Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/gohugoio/hugo.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml51
-rw-r--r--.dockerignore9
-rw-r--r--.gitattributes8
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md18
-rw-r--r--.github/ISSUE_TEMPLATE/config.yml5
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md11
-rw-r--r--.github/dependabot.yml7
-rw-r--r--.github/workflows/stale.yml47
-rw-r--r--.github/workflows/test.yml89
-rw-r--r--.gitignore30
-rw-r--r--.gitmodules (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js)0
-rw-r--r--.mailmap3
-rw-r--r--CONTRIBUTING.md202
-rwxr-xr-xDockerfile45
-rw-r--r--LICENSE201
-rw-r--r--README.md256
-rw-r--r--SECURITY.md7
-rwxr-xr-xbench.sh37
-rwxr-xr-xbenchSite.sh12
-rwxr-xr-xbenchbep.sh1
-rwxr-xr-xbepdock.sh1
-rw-r--r--bufferpool/bufpool.go38
-rw-r--r--bufferpool/bufpool_test.go31
-rw-r--r--cache/filecache/filecache.go384
-rw-r--r--cache/filecache/filecache_config.go248
-rw-r--r--cache/filecache/filecache_config_test.go198
-rw-r--r--cache/filecache/filecache_pruner.go126
-rw-r--r--cache/filecache/filecache_pruner_test.go110
-rw-r--r--cache/filecache/filecache_test.go349
-rw-r--r--cache/namedmemcache/named_cache.go78
-rw-r--r--cache/namedmemcache/named_cache_test.go80
-rw-r--r--codegen/methods.go536
-rw-r--r--codegen/methods2_test.go20
-rw-r--r--codegen/methods_test.go96
-rw-r--r--commands/commandeer.go520
-rw-r--r--commands/commands.go343
-rw-r--r--commands/commands_test.go396
-rw-r--r--commands/config.go185
-rw-r--r--commands/convert.go207
-rw-r--r--commands/deploy.go86
-rw-r--r--commands/env.go60
-rw-r--r--commands/gen.go40
-rw-r--r--commands/genchromastyles.go72
-rw-r--r--commands/gendoc.go98
-rw-r--r--commands/gendocshelper.go71
-rw-r--r--commands/genman.go77
-rw-r--r--commands/helpers.go79
-rw-r--r--commands/hugo.go1253
-rw-r--r--commands/hugo_test.go206
-rw-r--r--commands/hugo_windows.go33
-rw-r--r--commands/import_jekyll.go604
-rw-r--r--commands/import_jekyll_test.go177
-rw-r--r--commands/limit_darwin.go84
-rw-r--r--commands/limit_others.go21
-rw-r--r--commands/list.go210
-rw-r--r--commands/list_test.go68
-rw-r--r--commands/mod.go293
-rw-r--r--commands/mod_npm.go56
-rw-r--r--commands/new.go126
-rw-r--r--commands/new_content_test.go29
-rw-r--r--commands/new_site.go165
-rw-r--r--commands/new_theme.go176
-rw-r--r--commands/nodeploy.go51
-rw-r--r--commands/release.go72
-rw-r--r--commands/release_noop.go21
-rw-r--r--commands/server.go741
-rw-r--r--commands/server_errors.go31
-rw-r--r--commands/server_test.go280
-rw-r--r--commands/static_syncer.go129
-rw-r--r--commands/version.go44
-rw-r--r--common/collections/append.go112
-rw-r--r--common/collections/append_test.go90
-rw-r--r--common/collections/collections.go21
-rw-r--r--common/collections/order.go20
-rw-r--r--common/collections/slice.go76
-rw-r--r--common/collections/slice_test.go124
-rw-r--r--common/constants/constants.go25
-rw-r--r--common/herrors/error_locator.go168
-rw-r--r--common/herrors/error_locator_test.go152
-rw-r--r--common/herrors/errors.go71
-rw-r--r--common/herrors/file_error.go394
-rw-r--r--common/herrors/file_error_test.go82
-rw-r--r--common/herrors/line_number_extractors.go63
-rw-r--r--common/hexec/exec.go276
-rw-r--r--common/hreflect/helpers.go222
-rw-r--r--common/hreflect/helpers_test.go86
-rw-r--r--common/htime/time.go165
-rw-r--r--common/htime/time_test.go148
-rw-r--r--common/hugio/copy.go90
-rw-r--r--common/hugio/readers.go59
-rw-r--r--common/hugio/writers.go84
-rw-r--r--common/hugo/hugo.go251
-rw-r--r--common/hugo/hugo_test.go44
-rw-r--r--common/hugo/vars_extended.go19
-rw-r--r--common/hugo/vars_regular.go19
-rw-r--r--common/hugo/version.go301
-rw-r--r--common/hugo/version_current.go23
-rw-r--r--common/hugo/version_test.go88
-rw-r--r--common/loggers/ignorableLogger.go65
-rw-r--r--common/loggers/loggers.go355
-rw-r--r--common/loggers/loggers_test.go60
-rw-r--r--common/maps/maps.go193
-rw-r--r--common/maps/maps_test.go196
-rw-r--r--common/maps/params.go289
-rw-r--r--common/maps/params_test.go170
-rw-r--r--common/maps/scratch.go172
-rw-r--r--common/maps/scratch_test.go221
-rw-r--r--common/math/math.go135
-rw-r--r--common/math/math_test.go106
-rw-r--r--common/para/para.go73
-rw-r--r--common/para/para_test.go95
-rw-r--r--common/paths/path.go265
-rw-r--r--common/paths/path_test.go228
-rw-r--r--common/paths/url.go181
-rw-r--r--common/paths/url_test.go99
-rw-r--r--common/terminal/colors.go79
-rw-r--r--common/text/position.go98
-rw-r--r--common/text/position_test.go32
-rw-r--r--common/text/transform.go78
-rw-r--r--common/text/transform_test.go76
-rw-r--r--common/types/convert.go130
-rw-r--r--common/types/convert_test.go49
-rw-r--r--common/types/evictingqueue.go96
-rw-r--r--common/types/evictingqueue_test.go74
-rw-r--r--common/types/hstring/stringtypes.go20
-rw-r--r--common/types/hstring/stringtypes_test.go30
-rw-r--r--common/types/types.go92
-rw-r--r--common/types/types_test.go29
-rw-r--r--common/urls/ref.go22
-rw-r--r--compare/compare.go38
-rw-r--r--compare/compare_strings.go113
-rw-r--r--compare/compare_strings_test.go83
-rw-r--r--config/commonConfig.go215
-rw-r--r--config/commonConfig_test.go140
-rw-r--r--config/compositeConfig.go117
-rw-r--r--config/compositeConfig_test.go40
-rw-r--r--config/configLoader.go214
-rw-r--r--config/configLoader_test.go34
-rw-r--r--config/configProvider.go67
-rw-r--r--config/configProvider_test.go35
-rw-r--r--config/defaultConfigProvider.go447
-rw-r--r--config/defaultConfigProvider_test.go400
-rw-r--r--config/docshelper.go45
-rw-r--r--config/env.go57
-rw-r--r--config/env_test.go32
-rw-r--r--config/privacy/privacyConfig.go114
-rw-r--r--config/privacy/privacyConfig_test.go100
-rw-r--r--config/security/docshelper.go26
-rw-r--r--config/security/securityConfig.go227
-rw-r--r--config/security/securityonfig_test.go166
-rw-r--r--config/security/whitelist.go102
-rw-r--r--config/security/whitelist_test.go47
-rw-r--r--config/services/servicesConfig.go97
-rw-r--r--config/services/servicesConfig_test.go67
-rw-r--r--create/content.go392
-rw-r--r--create/content_test.go441
-rw-r--r--deploy/cloudfront.go54
-rw-r--r--deploy/deploy.go741
-rw-r--r--deploy/deployConfig.go164
-rw-r--r--deploy/deployConfig_test.go199
-rw-r--r--deploy/deploy_azure.go22
-rw-r--r--deploy/deploy_test.go1065
-rw-r--r--deploy/google.go40
-rw-r--r--deps/deps.go441
-rw-r--r--deps/deps_test.go30
-rw-r--r--docs/.cspell.json (renamed from .cspell.json)0
-rw-r--r--docs/.editorconfig (renamed from .editorconfig)0
-rw-r--r--docs/.github/SUPPORT.md3
-rw-r--r--docs/.github/stale.yml (renamed from .github/stale.yml)0
-rw-r--r--docs/.github/workflows/spellcheck.yml (renamed from .github/workflows/spellcheck.yml)0
-rw-r--r--docs/.gitignore9
-rw-r--r--docs/.markdownlint.yaml (renamed from .markdownlint.yaml)0
-rw-r--r--docs/.vscode/extensions.json (renamed from .vscode/extensions.json)0
-rw-r--r--docs/LICENSE.md (renamed from LICENSE.md)0
-rw-r--r--docs/README.md48
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/index.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/index.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png)bin242906 -> 242906 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf)bin105920 -> 105920 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/config.toml (renamed from _vendor/github.com/gohugoio/gohugoioTheme/config.toml)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml (renamed from _vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html (renamed from _vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/package.json (renamed from _vendor/github.com/gohugoio/gohugoioTheme/package.json)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png)bin7612 -> 7612 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png)bin10264 -> 10264 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png)bin15088 -> 15088 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png)bin1592 -> 1592 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png)bin2038 -> 2038 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png)bin3467 -> 3467 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png)bin4747 -> 4747 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png)bin6238 -> 6238 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png)bin1000 -> 1000 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png)bin1648 -> 1648 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico)bin15086 -> 15086 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff)bin20892 -> 20892 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2)bin16936 -> 16936 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff)bin21496 -> 21496 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2)bin17320 -> 17320 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff)bin20932 -> 20932 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2)bin16872 -> 16872 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff)bin21520 -> 21520 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2)bin17332 -> 17332 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff)bin21240 -> 21240 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2)bin17172 -> 17172 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff)bin21964 -> 21964 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2)bin17732 -> 17732 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff)bin21208 -> 21208 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2)bin17080 -> 17080 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff)bin21924 -> 21924 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2)bin17776 -> 17776 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff)bin21220 -> 21220 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2)bin17128 -> 17128 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff)bin21960 -> 21960 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2)bin17756 -> 17756 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff)bin21244 -> 21244 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2)bin17140 -> 17140 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff)bin21872 -> 21872 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2)bin17644 -> 17644 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff)bin21676 -> 21676 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2)bin17436 -> 17436 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff)bin22220 -> 22220 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2 (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2)bin17948 -> 17948 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png)bin924 -> 924 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png)bin218051 -> 218051 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png)bin88131 -> 88131 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg)bin32994 -> 32994 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg)bin88453 -> 88453 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png)bin337 -> 337 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png)bin11972 -> 11972 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png)bin6225 -> 6225 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png)bin6020 -> 6020 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png)bin12885 -> 12885 bytes
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg (renamed from _vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg)0
-rw-r--r--docs/_vendor/github.com/gohugoio/gohugoioTheme/theme.toml (renamed from _vendor/github.com/gohugoio/gohugoioTheme/theme.toml)0
-rw-r--r--docs/_vendor/modules.txt (renamed from _vendor/modules.txt)0
-rw-r--r--docs/archetypes/functions.md (renamed from archetypes/functions.md)0
-rw-r--r--docs/archetypes/news/index.md (renamed from archetypes/news/index.md)0
-rw-r--r--docs/archetypes/showcase/bio.md (renamed from archetypes/showcase/bio.md)0
-rw-r--r--docs/archetypes/showcase/featured.png (renamed from archetypes/showcase/featured.png)bin41270 -> 41270 bytes
-rw-r--r--docs/archetypes/showcase/index.md (renamed from archetypes/showcase/index.md)0
-rw-r--r--docs/config.toml (renamed from config.toml)0
-rw-r--r--docs/config/_default/config.toml (renamed from config/_default/config.toml)0
-rw-r--r--docs/config/_default/languages.toml (renamed from config/_default/languages.toml)0
-rw-r--r--docs/config/_default/markup.toml (renamed from config/_default/markup.toml)0
-rw-r--r--docs/config/_default/menus/menus.en.toml (renamed from config/_default/menus/menus.en.toml)0
-rw-r--r--docs/config/_default/menus/menus.zh.toml (renamed from config/_default/menus/menus.zh.toml)0
-rw-r--r--docs/config/_default/params.toml (renamed from config/_default/params.toml)0
-rw-r--r--docs/config/_default/security.toml (renamed from config/_default/security.toml)0
-rw-r--r--docs/config/development/params.toml (renamed from config/development/params.toml)0
-rw-r--r--docs/config/production/config.toml (renamed from config/production/config.toml)0
-rw-r--r--docs/config/production/params.toml (renamed from config/production/params.toml)0
-rw-r--r--docs/content/en/_index.md (renamed from content/en/_index.md)0
-rw-r--r--docs/content/en/about/_index.md (renamed from content/en/about/_index.md)0
-rw-r--r--docs/content/en/about/benefits.md (renamed from content/en/about/benefits.md)0
-rw-r--r--docs/content/en/about/features.md (renamed from content/en/about/features.md)0
-rw-r--r--docs/content/en/about/hugo-and-gdpr.md (renamed from content/en/about/hugo-and-gdpr.md)0
-rw-r--r--docs/content/en/about/license.md (renamed from content/en/about/license.md)0
-rw-r--r--docs/content/en/about/security-model/hugo-security-model-featured.png (renamed from content/en/about/security-model/hugo-security-model-featured.png)bin85043 -> 85043 bytes
-rw-r--r--docs/content/en/about/security-model/index.md (renamed from content/en/about/security-model/index.md)0
-rw-r--r--docs/content/en/about/what-is-hugo.md (renamed from content/en/about/what-is-hugo.md)0
-rw-r--r--docs/content/en/commands/hugo.md (renamed from content/en/commands/hugo.md)0
-rw-r--r--docs/content/en/commands/hugo_completion.md (renamed from content/en/commands/hugo_completion.md)0
-rw-r--r--docs/content/en/commands/hugo_completion_bash.md (renamed from content/en/commands/hugo_completion_bash.md)0
-rw-r--r--docs/content/en/commands/hugo_completion_fish.md (renamed from content/en/commands/hugo_completion_fish.md)0
-rw-r--r--docs/content/en/commands/hugo_completion_powershell.md (renamed from content/en/commands/hugo_completion_powershell.md)0
-rw-r--r--docs/content/en/commands/hugo_completion_zsh.md (renamed from content/en/commands/hugo_completion_zsh.md)0
-rw-r--r--docs/content/en/commands/hugo_config.md (renamed from content/en/commands/hugo_config.md)0
-rw-r--r--docs/content/en/commands/hugo_config_mounts.md (renamed from content/en/commands/hugo_config_mounts.md)0
-rw-r--r--docs/content/en/commands/hugo_convert.md (renamed from content/en/commands/hugo_convert.md)0
-rw-r--r--docs/content/en/commands/hugo_convert_toJSON.md (renamed from content/en/commands/hugo_convert_toJSON.md)0
-rw-r--r--docs/content/en/commands/hugo_convert_toTOML.md (renamed from content/en/commands/hugo_convert_toTOML.md)0
-rw-r--r--docs/content/en/commands/hugo_convert_toYAML.md (renamed from content/en/commands/hugo_convert_toYAML.md)0
-rw-r--r--docs/content/en/commands/hugo_deploy.md (renamed from content/en/commands/hugo_deploy.md)0
-rw-r--r--docs/content/en/commands/hugo_env.md (renamed from content/en/commands/hugo_env.md)0
-rw-r--r--docs/content/en/commands/hugo_gen.md (renamed from content/en/commands/hugo_gen.md)0
-rw-r--r--docs/content/en/commands/hugo_gen_chromastyles.md (renamed from content/en/commands/hugo_gen_chromastyles.md)0
-rw-r--r--docs/content/en/commands/hugo_gen_doc.md (renamed from content/en/commands/hugo_gen_doc.md)0
-rw-r--r--docs/content/en/commands/hugo_gen_man.md (renamed from content/en/commands/hugo_gen_man.md)0
-rw-r--r--docs/content/en/commands/hugo_import.md (renamed from content/en/commands/hugo_import.md)0
-rw-r--r--docs/content/en/commands/hugo_import_jekyll.md (renamed from content/en/commands/hugo_import_jekyll.md)0
-rw-r--r--docs/content/en/commands/hugo_list.md (renamed from content/en/commands/hugo_list.md)0
-rw-r--r--docs/content/en/commands/hugo_list_all.md (renamed from content/en/commands/hugo_list_all.md)0
-rw-r--r--docs/content/en/commands/hugo_list_drafts.md (renamed from content/en/commands/hugo_list_drafts.md)0
-rw-r--r--docs/content/en/commands/hugo_list_expired.md (renamed from content/en/commands/hugo_list_expired.md)0
-rw-r--r--docs/content/en/commands/hugo_list_future.md (renamed from content/en/commands/hugo_list_future.md)0
-rw-r--r--docs/content/en/commands/hugo_mod.md (renamed from content/en/commands/hugo_mod.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_clean.md (renamed from content/en/commands/hugo_mod_clean.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_get.md (renamed from content/en/commands/hugo_mod_get.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_graph.md (renamed from content/en/commands/hugo_mod_graph.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_init.md (renamed from content/en/commands/hugo_mod_init.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_npm.md (renamed from content/en/commands/hugo_mod_npm.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_npm_pack.md (renamed from content/en/commands/hugo_mod_npm_pack.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_tidy.md (renamed from content/en/commands/hugo_mod_tidy.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_vendor.md (renamed from content/en/commands/hugo_mod_vendor.md)0
-rw-r--r--docs/content/en/commands/hugo_mod_verify.md (renamed from content/en/commands/hugo_mod_verify.md)0
-rw-r--r--docs/content/en/commands/hugo_new.md (renamed from content/en/commands/hugo_new.md)0
-rw-r--r--docs/content/en/commands/hugo_new_site.md (renamed from content/en/commands/hugo_new_site.md)0
-rw-r--r--docs/content/en/commands/hugo_new_theme.md (renamed from content/en/commands/hugo_new_theme.md)0
-rw-r--r--docs/content/en/commands/hugo_server.md (renamed from content/en/commands/hugo_server.md)0
-rw-r--r--docs/content/en/commands/hugo_version.md (renamed from content/en/commands/hugo_version.md)0
-rw-r--r--docs/content/en/content-management/_index.md (renamed from content/en/content-management/_index.md)0
-rw-r--r--docs/content/en/content-management/archetypes.md (renamed from content/en/content-management/archetypes.md)0
-rw-r--r--docs/content/en/content-management/build-options.md (renamed from content/en/content-management/build-options.md)0
-rw-r--r--docs/content/en/content-management/comments.md (renamed from content/en/content-management/comments.md)0
-rw-r--r--docs/content/en/content-management/cross-references.md (renamed from content/en/content-management/cross-references.md)0
-rw-r--r--docs/content/en/content-management/diagrams.md (renamed from content/en/content-management/diagrams.md)0
-rw-r--r--docs/content/en/content-management/formats.md (renamed from content/en/content-management/formats.md)0
-rw-r--r--docs/content/en/content-management/front-matter.md (renamed from content/en/content-management/front-matter.md)0
-rw-r--r--docs/content/en/content-management/image-processing/index.md (renamed from content/en/content-management/image-processing/index.md)0
-rw-r--r--docs/content/en/content-management/image-processing/sunset.jpg (renamed from content/en/content-management/image-processing/sunset.jpg)bin34584 -> 34584 bytes
-rw-r--r--docs/content/en/content-management/menus.md (renamed from content/en/content-management/menus.md)0
-rw-r--r--docs/content/en/content-management/multilingual.md (renamed from content/en/content-management/multilingual.md)0
-rw-r--r--docs/content/en/content-management/organization/1-featured-content-bundles.png (renamed from content/en/content-management/organization/1-featured-content-bundles.png)bin34394 -> 34394 bytes
-rw-r--r--docs/content/en/content-management/organization/index.md (renamed from content/en/content-management/organization/index.md)0
-rw-r--r--docs/content/en/content-management/page-bundles.md (renamed from content/en/content-management/page-bundles.md)0
-rw-r--r--docs/content/en/content-management/page-resources.md (renamed from content/en/content-management/page-resources.md)0
-rw-r--r--docs/content/en/content-management/related.md (renamed from content/en/content-management/related.md)0
-rw-r--r--docs/content/en/content-management/sections.md (renamed from content/en/content-management/sections.md)0
-rw-r--r--docs/content/en/content-management/shortcodes.md (renamed from content/en/content-management/shortcodes.md)0
-rw-r--r--docs/content/en/content-management/static-files.md (renamed from content/en/content-management/static-files.md)0
-rw-r--r--docs/content/en/content-management/summaries.md (renamed from content/en/content-management/summaries.md)0
-rw-r--r--docs/content/en/content-management/syntax-highlighting.md (renamed from content/en/content-management/syntax-highlighting.md)0
-rw-r--r--docs/content/en/content-management/taxonomies.md (renamed from content/en/content-management/taxonomies.md)0
-rw-r--r--docs/content/en/content-management/toc.md (renamed from content/en/content-management/toc.md)0
-rw-r--r--docs/content/en/content-management/types.md (renamed from content/en/content-management/types.md)0
-rw-r--r--docs/content/en/content-management/urls.md (renamed from content/en/content-management/urls.md)0
-rw-r--r--docs/content/en/contribute/_index.md (renamed from content/en/contribute/_index.md)0
-rw-r--r--docs/content/en/contribute/development.md (renamed from content/en/contribute/development.md)0
-rw-r--r--docs/content/en/contribute/documentation.md (renamed from content/en/contribute/documentation.md)0
-rw-r--r--docs/content/en/contribute/themes.md (renamed from content/en/contribute/themes.md)0
-rw-r--r--docs/content/en/documentation.md (renamed from content/en/documentation.md)0
-rw-r--r--docs/content/en/featured.png (renamed from content/en/featured.png)bin73881 -> 73881 bytes
-rw-r--r--docs/content/en/functions/GetPage.md (renamed from content/en/functions/GetPage.md)0
-rw-r--r--docs/content/en/functions/RenderString.md (renamed from content/en/functions/RenderString.md)0
-rw-r--r--docs/content/en/functions/_index.md (renamed from content/en/functions/_index.md)0
-rw-r--r--docs/content/en/functions/abslangurl.md (renamed from content/en/functions/abslangurl.md)0
-rw-r--r--docs/content/en/functions/absurl.md (renamed from content/en/functions/absurl.md)0
-rw-r--r--docs/content/en/functions/adddate.md (renamed from content/en/functions/adddate.md)0
-rw-r--r--docs/content/en/functions/after.md (renamed from content/en/functions/after.md)0
-rw-r--r--docs/content/en/functions/anchorize.md (renamed from content/en/functions/anchorize.md)0
-rw-r--r--docs/content/en/functions/append.md (renamed from content/en/functions/append.md)0
-rw-r--r--docs/content/en/functions/apply.md (renamed from content/en/functions/apply.md)0
-rw-r--r--docs/content/en/functions/base64.md (renamed from content/en/functions/base64.md)0
-rw-r--r--docs/content/en/functions/chomp.md (renamed from content/en/functions/chomp.md)0
-rw-r--r--docs/content/en/functions/complement.md (renamed from content/en/functions/complement.md)0
-rw-r--r--docs/content/en/functions/cond.md (renamed from content/en/functions/cond.md)0
-rw-r--r--docs/content/en/functions/countrunes.md (renamed from content/en/functions/countrunes.md)0
-rw-r--r--docs/content/en/functions/countwords.md (renamed from content/en/functions/countwords.md)0
-rw-r--r--docs/content/en/functions/dateformat.md (renamed from content/en/functions/dateformat.md)0
-rw-r--r--docs/content/en/functions/default.md (renamed from content/en/functions/default.md)0
-rw-r--r--docs/content/en/functions/delimit.md (renamed from content/en/functions/delimit.md)0
-rw-r--r--docs/content/en/functions/dict.md (renamed from content/en/functions/dict.md)0
-rw-r--r--docs/content/en/functions/echoparam.md (renamed from content/en/functions/echoparam.md)0
-rw-r--r--docs/content/en/functions/emojify.md (renamed from content/en/functions/emojify.md)0
-rw-r--r--docs/content/en/functions/eq.md (renamed from content/en/functions/eq.md)0
-rw-r--r--docs/content/en/functions/errorf.md (renamed from content/en/functions/errorf.md)0
-rw-r--r--docs/content/en/functions/fileExists.md (renamed from content/en/functions/fileExists.md)0
-rw-r--r--docs/content/en/functions/findRe.md (renamed from content/en/functions/findRe.md)0
-rw-r--r--docs/content/en/functions/first.md (renamed from content/en/functions/first.md)0
-rw-r--r--docs/content/en/functions/float.md (renamed from content/en/functions/float.md)0
-rw-r--r--docs/content/en/functions/format.md (renamed from content/en/functions/format.md)0
-rw-r--r--docs/content/en/functions/ge.md (renamed from content/en/functions/ge.md)0
-rw-r--r--docs/content/en/functions/get.md (renamed from content/en/functions/get.md)0
-rw-r--r--docs/content/en/functions/getenv.md (renamed from content/en/functions/getenv.md)0
-rw-r--r--docs/content/en/functions/group.md (renamed from content/en/functions/group.md)0
-rw-r--r--docs/content/en/functions/gt.md (renamed from content/en/functions/gt.md)0
-rw-r--r--docs/content/en/functions/hasPrefix.md (renamed from content/en/functions/hasPrefix.md)0
-rw-r--r--docs/content/en/functions/haschildren.md (renamed from content/en/functions/haschildren.md)0
-rw-r--r--docs/content/en/functions/hasmenucurrent.md (renamed from content/en/functions/hasmenucurrent.md)0
-rw-r--r--docs/content/en/functions/highlight.md (renamed from content/en/functions/highlight.md)0
-rw-r--r--docs/content/en/functions/hmac.md (renamed from content/en/functions/hmac.md)0
-rw-r--r--docs/content/en/functions/htmlEscape.md (renamed from content/en/functions/htmlEscape.md)0
-rw-r--r--docs/content/en/functions/htmlUnescape.md (renamed from content/en/functions/htmlUnescape.md)0
-rw-r--r--docs/content/en/functions/hugo.md (renamed from content/en/functions/hugo.md)0
-rw-r--r--docs/content/en/functions/humanize.md (renamed from content/en/functions/humanize.md)0
-rw-r--r--docs/content/en/functions/i18n.md (renamed from content/en/functions/i18n.md)0
-rw-r--r--docs/content/en/functions/images/index.md (renamed from content/en/functions/images/index.md)0
-rw-r--r--docs/content/en/functions/in.md (renamed from content/en/functions/in.md)0
-rw-r--r--docs/content/en/functions/index-function.md (renamed from content/en/functions/index-function.md)0
-rw-r--r--docs/content/en/functions/int.md (renamed from content/en/functions/int.md)0
-rw-r--r--docs/content/en/functions/intersect.md (renamed from content/en/functions/intersect.md)0
-rw-r--r--docs/content/en/functions/ismenucurrent.md (renamed from content/en/functions/ismenucurrent.md)0
-rw-r--r--docs/content/en/functions/isset.md (renamed from content/en/functions/isset.md)0
-rw-r--r--docs/content/en/functions/jsonify.md (renamed from content/en/functions/jsonify.md)0
-rw-r--r--docs/content/en/functions/lang.Merge.md (renamed from content/en/functions/lang.Merge.md)0
-rw-r--r--docs/content/en/functions/lang.md (renamed from content/en/functions/lang.md)0
-rw-r--r--docs/content/en/functions/last.md (renamed from content/en/functions/last.md)0
-rw-r--r--docs/content/en/functions/le.md (renamed from content/en/functions/le.md)0
-rw-r--r--docs/content/en/functions/len.md (renamed from content/en/functions/len.md)0
-rw-r--r--docs/content/en/functions/lower.md (renamed from content/en/functions/lower.md)0
-rw-r--r--docs/content/en/functions/lt.md (renamed from content/en/functions/lt.md)0
-rw-r--r--docs/content/en/functions/markdownify.md (renamed from content/en/functions/markdownify.md)0
-rw-r--r--docs/content/en/functions/math.md (renamed from content/en/functions/math.md)0
-rw-r--r--docs/content/en/functions/md5.md (renamed from content/en/functions/md5.md)0
-rw-r--r--docs/content/en/functions/merge.md (renamed from content/en/functions/merge.md)0
-rw-r--r--docs/content/en/functions/ne.md (renamed from content/en/functions/ne.md)0
-rw-r--r--docs/content/en/functions/now.md (renamed from content/en/functions/now.md)0
-rw-r--r--docs/content/en/functions/os.Stat.md (renamed from content/en/functions/os.Stat.md)0
-rw-r--r--docs/content/en/functions/param.md (renamed from content/en/functions/param.md)0
-rw-r--r--docs/content/en/functions/partialCached.md (renamed from content/en/functions/partialCached.md)0
-rw-r--r--docs/content/en/functions/path.Base.md (renamed from content/en/functions/path.Base.md)0
-rw-r--r--docs/content/en/functions/path.BaseName.md (renamed from content/en/functions/path.BaseName.md)0
-rw-r--r--docs/content/en/functions/path.Clean.md (renamed from content/en/functions/path.Clean.md)0
-rw-r--r--docs/content/en/functions/path.Dir.md (renamed from content/en/functions/path.Dir.md)0
-rw-r--r--docs/content/en/functions/path.Ext.md (renamed from content/en/functions/path.Ext.md)0
-rw-r--r--docs/content/en/functions/path.Join.md (renamed from content/en/functions/path.Join.md)0
-rw-r--r--docs/content/en/functions/path.Split.md (renamed from content/en/functions/path.Split.md)0
-rw-r--r--docs/content/en/functions/plainify.md (renamed from content/en/functions/plainify.md)0
-rw-r--r--docs/content/en/functions/pluralize.md (renamed from content/en/functions/pluralize.md)0
-rw-r--r--docs/content/en/functions/print.md (renamed from content/en/functions/print.md)0
-rw-r--r--docs/content/en/functions/printf.md (renamed from content/en/functions/printf.md)0
-rw-r--r--docs/content/en/functions/println.md (renamed from content/en/functions/println.md)0
-rw-r--r--docs/content/en/functions/querify.md (renamed from content/en/functions/querify.md)0
-rw-r--r--docs/content/en/functions/range.md (renamed from content/en/functions/range.md)0
-rw-r--r--docs/content/en/functions/readdir.md (renamed from content/en/functions/readdir.md)0
-rw-r--r--docs/content/en/functions/readfile.md (renamed from content/en/functions/readfile.md)0
-rw-r--r--docs/content/en/functions/ref.md (renamed from content/en/functions/ref.md)0
-rw-r--r--docs/content/en/functions/reflect.IsMap.md (renamed from content/en/functions/reflect.IsMap.md)0
-rw-r--r--docs/content/en/functions/reflect.IsSlice.md (renamed from content/en/functions/reflect.IsSlice.md)0
-rw-r--r--docs/content/en/functions/relLangURL.md (renamed from content/en/functions/relLangURL.md)0
-rw-r--r--docs/content/en/functions/relref.md (renamed from content/en/functions/relref.md)0
-rw-r--r--docs/content/en/functions/relurl.md (renamed from content/en/functions/relurl.md)0
-rw-r--r--docs/content/en/functions/render.md (renamed from content/en/functions/render.md)0
-rw-r--r--docs/content/en/functions/replace.md (renamed from content/en/functions/replace.md)0
-rw-r--r--docs/content/en/functions/replacere.md (renamed from content/en/functions/replacere.md)0
-rw-r--r--docs/content/en/functions/safeCSS.md (renamed from content/en/functions/safeCSS.md)0
-rw-r--r--docs/content/en/functions/safeHTML.md (renamed from content/en/functions/safeHTML.md)0
-rw-r--r--docs/content/en/functions/safeHTMLAttr.md (renamed from content/en/functions/safeHTMLAttr.md)0
-rw-r--r--docs/content/en/functions/safeJS.md (renamed from content/en/functions/safeJS.md)0
-rw-r--r--docs/content/en/functions/safeURL.md (renamed from content/en/functions/safeURL.md)0
-rw-r--r--docs/content/en/functions/scratch.md (renamed from content/en/functions/scratch.md)0
-rw-r--r--docs/content/en/functions/seq.md (renamed from content/en/functions/seq.md)0
-rw-r--r--docs/content/en/functions/sha.md (renamed from content/en/functions/sha.md)0
-rw-r--r--docs/content/en/functions/shuffle.md (renamed from content/en/functions/shuffle.md)0
-rw-r--r--docs/content/en/functions/singularize.md (renamed from content/en/functions/singularize.md)0
-rw-r--r--docs/content/en/functions/site.md (renamed from content/en/functions/site.md)0
-rw-r--r--docs/content/en/functions/slice.md (renamed from content/en/functions/slice.md)0
-rw-r--r--docs/content/en/functions/slicestr.md (renamed from content/en/functions/slicestr.md)0
-rw-r--r--docs/content/en/functions/sort.md (renamed from content/en/functions/sort.md)0
-rw-r--r--docs/content/en/functions/split.md (renamed from content/en/functions/split.md)0
-rw-r--r--docs/content/en/functions/string.md (renamed from content/en/functions/string.md)0
-rw-r--r--docs/content/en/functions/strings.Count.md (renamed from content/en/functions/strings.Count.md)0
-rw-r--r--docs/content/en/functions/strings.HasSuffix.md (renamed from content/en/functions/strings.HasSuffix.md)0
-rw-r--r--docs/content/en/functions/strings.Repeat.md (renamed from content/en/functions/strings.Repeat.md)0
-rw-r--r--docs/content/en/functions/strings.RuneCount.md (renamed from content/en/functions/strings.RuneCount.md)0
-rw-r--r--docs/content/en/functions/strings.TrimLeft.md (renamed from content/en/functions/strings.TrimLeft.md)0
-rw-r--r--docs/content/en/functions/strings.TrimPrefix.md (renamed from content/en/functions/strings.TrimPrefix.md)0
-rw-r--r--docs/content/en/functions/strings.TrimRight.md (renamed from content/en/functions/strings.TrimRight.md)0
-rw-r--r--docs/content/en/functions/strings.TrimSuffix.md (renamed from content/en/functions/strings.TrimSuffix.md)0
-rw-r--r--docs/content/en/functions/substr.md (renamed from content/en/functions/substr.md)0
-rw-r--r--docs/content/en/functions/symdiff.md (renamed from content/en/functions/symdiff.md)0
-rw-r--r--docs/content/en/functions/templates.Exists.md (renamed from content/en/functions/templates.Exists.md)0
-rw-r--r--docs/content/en/functions/time.md (renamed from content/en/functions/time.md)0
-rw-r--r--docs/content/en/functions/title.md (renamed from content/en/functions/title.md)0
-rw-r--r--docs/content/en/functions/transform.Unmarshal.md (renamed from content/en/functions/transform.Unmarshal.md)0
-rw-r--r--docs/content/en/functions/trim.md (renamed from content/en/functions/trim.md)0
-rw-r--r--docs/content/en/functions/truncate.md (renamed from content/en/functions/truncate.md)0
-rw-r--r--docs/content/en/functions/union.md (renamed from content/en/functions/union.md)0
-rw-r--r--docs/content/en/functions/uniq.md (renamed from content/en/functions/uniq.md)0
-rw-r--r--docs/content/en/functions/unix.md (renamed from content/en/functions/unix.md)0
-rw-r--r--docs/content/en/functions/upper.md (renamed from content/en/functions/upper.md)0
-rw-r--r--docs/content/en/functions/urlize.md (renamed from content/en/functions/urlize.md)0
-rw-r--r--docs/content/en/functions/urls.Parse.md (renamed from content/en/functions/urls.Parse.md)0
-rw-r--r--docs/content/en/functions/where.md (renamed from content/en/functions/where.md)0
-rw-r--r--docs/content/en/functions/with.md (renamed from content/en/functions/with.md)0
-rw-r--r--docs/content/en/getting-started/_index.md (renamed from content/en/getting-started/_index.md)0
-rw-r--r--docs/content/en/getting-started/configuration-markup.md (renamed from content/en/getting-started/configuration-markup.md)0
-rw-r--r--docs/content/en/getting-started/configuration.md (renamed from content/en/getting-started/configuration.md)0
-rw-r--r--docs/content/en/getting-started/directory-structure.md (renamed from content/en/getting-started/directory-structure.md)0
-rw-r--r--docs/content/en/getting-started/external-learning-resources/hia.jpg (renamed from content/en/getting-started/external-learning-resources/hia.jpg)bin66768 -> 66768 bytes
-rw-r--r--docs/content/en/getting-started/external-learning-resources/index.md (renamed from content/en/getting-started/external-learning-resources/index.md)0
-rw-r--r--docs/content/en/getting-started/installing.md (renamed from content/en/getting-started/installing.md)0
-rw-r--r--docs/content/en/getting-started/quick-start.md (renamed from content/en/getting-started/quick-start.md)0
-rw-r--r--docs/content/en/getting-started/usage.md (renamed from content/en/getting-started/usage.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/_index.md (renamed from content/en/hosting-and-deployment/_index.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/deployment-with-rclone.md (renamed from content/en/hosting-and-deployment/deployment-with-rclone.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/deployment-with-rsync.md (renamed from content/en/hosting-and-deployment/deployment-with-rsync.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-aws-amplify.md (renamed from content/en/hosting-and-deployment/hosting-on-aws-amplify.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md (renamed from content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-firebase.md (renamed from content/en/hosting-and-deployment/hosting-on-firebase.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-github.md (renamed from content/en/hosting-and-deployment/hosting-on-github.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-gitlab.md (renamed from content/en/hosting-and-deployment/hosting-on-gitlab.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-keycdn.md (renamed from content/en/hosting-and-deployment/hosting-on-keycdn.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-netlify.md (renamed from content/en/hosting-and-deployment/hosting-on-netlify.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hosting-on-render.md (renamed from content/en/hosting-and-deployment/hosting-on-render.md)0
-rw-r--r--docs/content/en/hosting-and-deployment/hugo-deploy.md (renamed from content/en/hosting-and-deployment/hugo-deploy.md)0
-rw-r--r--docs/content/en/hugo-modules/_index.md (renamed from content/en/hugo-modules/_index.md)0
-rw-r--r--docs/content/en/hugo-modules/configuration.md (renamed from content/en/hugo-modules/configuration.md)0
-rw-r--r--docs/content/en/hugo-modules/theme-components.md (renamed from content/en/hugo-modules/theme-components.md)0
-rw-r--r--docs/content/en/hugo-modules/use-modules.md (renamed from content/en/hugo-modules/use-modules.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/_index.md (renamed from content/en/hugo-pipes/_index.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/babel.md (renamed from content/en/hugo-pipes/babel.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/bundling.md (renamed from content/en/hugo-pipes/bundling.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/fingerprint.md (renamed from content/en/hugo-pipes/fingerprint.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/introduction.md (renamed from content/en/hugo-pipes/introduction.md)0
-rw-r--r--docs/content/en/hugo-pipes/js.md (renamed from content/en/hugo-pipes/js.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/minification.md (renamed from content/en/hugo-pipes/minification.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/postcss.md (renamed from content/en/hugo-pipes/postcss.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/postprocess.md (renamed from content/en/hugo-pipes/postprocess.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/resource-from-string.md (renamed from content/en/hugo-pipes/resource-from-string.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/resource-from-template.md (renamed from content/en/hugo-pipes/resource-from-template.md)0
-rwxr-xr-xdocs/content/en/hugo-pipes/scss-sass.md (renamed from content/en/hugo-pipes/scss-sass.md)0
-rw-r--r--docs/content/en/maintenance/_index.md (renamed from content/en/maintenance/_index.md)0
-rw-r--r--docs/content/en/myshowcase/bio.md (renamed from content/en/myshowcase/bio.md)0
-rw-r--r--docs/content/en/myshowcase/featured.png (renamed from content/en/myshowcase/featured.png)bin41270 -> 41270 bytes
-rw-r--r--docs/content/en/myshowcase/index.md (renamed from content/en/myshowcase/index.md)0
-rw-r--r--docs/content/en/news/0.10-relnotes/index.md (renamed from content/en/news/0.10-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.11-relnotes/index.md (renamed from content/en/news/0.11-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.12-relnotes/index.md (renamed from content/en/news/0.12-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.13-relnotes/index.md (renamed from content/en/news/0.13-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.14-relnotes/index.md (renamed from content/en/news/0.14-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.15-relnotes/index.md (renamed from content/en/news/0.15-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.16-relnotes/index.md (renamed from content/en/news/0.16-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.17-relnotes/index.md (renamed from content/en/news/0.17-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.18-relnotes/index.md (renamed from content/en/news/0.18-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.19-relnotes/index.md (renamed from content/en/news/0.19-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20-relnotes/index.md (renamed from content/en/news/0.20-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.1-relnotes/index.md (renamed from content/en/news/0.20.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.2-relnotes/index.md (renamed from content/en/news/0.20.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.3-relnotes/index.md (renamed from content/en/news/0.20.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.4-relnotes/index.md (renamed from content/en/news/0.20.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.5-relnotes/index.md (renamed from content/en/news/0.20.5-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.6-relnotes/index.md (renamed from content/en/news/0.20.6-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.20.7-relnotes/index.md (renamed from content/en/news/0.20.7-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.21-relnotes/index.md (renamed from content/en/news/0.21-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.22-relnotes/index.md (renamed from content/en/news/0.22-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.22.1-relnotes/index.md (renamed from content/en/news/0.22.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.23-relnotes/index.md (renamed from content/en/news/0.23-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.24-relnotes/index.md (renamed from content/en/news/0.24-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.24.1-relnotes/index.md (renamed from content/en/news/0.24.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.25-relnotes/index.md (renamed from content/en/news/0.25-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.25.1-relnotes/index.md (renamed from content/en/news/0.25.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.26-relnotes/index.md (renamed from content/en/news/0.26-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.27-relnotes/index.md (renamed from content/en/news/0.27-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.27.1-relnotes/index.md (renamed from content/en/news/0.27.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.28-relnotes/index.md (renamed from content/en/news/0.28-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.29-relnotes/index.md (renamed from content/en/news/0.29-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.30-relnotes/index.md (renamed from content/en/news/0.30-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.30.1-relnotes/index.md (renamed from content/en/news/0.30.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.30.2-relnotes/index.md (renamed from content/en/news/0.30.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.31-relnotes/index.md (renamed from content/en/news/0.31-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.31.1-relnotes/index.md (renamed from content/en/news/0.31.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.32-relnotes/index.md (renamed from content/en/news/0.32-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.32.1-relnotes/index.md (renamed from content/en/news/0.32.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.32.2-relnotes/index.md (renamed from content/en/news/0.32.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.32.3-relnotes/index.md (renamed from content/en/news/0.32.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.32.4-relnotes/index.md (renamed from content/en/news/0.32.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.33-relnotes/featured-hugo-33-poster.png (renamed from content/en/news/0.33-relnotes/featured-hugo-33-poster.png)bin70230 -> 70230 bytes
-rw-r--r--docs/content/en/news/0.33-relnotes/index.md (renamed from content/en/news/0.33-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.34-relnotes/featured-34-poster.png (renamed from content/en/news/0.34-relnotes/featured-34-poster.png)bin78317 -> 78317 bytes
-rw-r--r--docs/content/en/news/0.34-relnotes/index.md (renamed from content/en/news/0.34-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.35-relnotes/featured-hugo-35-poster.png (renamed from content/en/news/0.35-relnotes/featured-hugo-35-poster.png)bin88519 -> 88519 bytes
-rw-r--r--docs/content/en/news/0.35-relnotes/index.md (renamed from content/en/news/0.35-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.36-relnotes/featured-hugo-36-poster.png (renamed from content/en/news/0.36-relnotes/featured-hugo-36-poster.png)bin67640 -> 67640 bytes
-rw-r--r--docs/content/en/news/0.36-relnotes/index.md (renamed from content/en/news/0.36-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.36.1-relnotes/index.md (renamed from content/en/news/0.36.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.37-relnotes/featured-hugo-37-poster.png (renamed from content/en/news/0.37-relnotes/featured-hugo-37-poster.png)bin186693 -> 186693 bytes
-rw-r--r--docs/content/en/news/0.37-relnotes/index.md (renamed from content/en/news/0.37-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.37.1-relnotes/index.md (renamed from content/en/news/0.37.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.38-relnotes/featured-poster.png (renamed from content/en/news/0.38-relnotes/featured-poster.png)bin69978 -> 69978 bytes
-rw-r--r--docs/content/en/news/0.38-relnotes/index.md (renamed from content/en/news/0.38-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.38.1-relnotes/index.md (renamed from content/en/news/0.38.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.38.2-relnotes/index.md (renamed from content/en/news/0.38.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.39-relnotes/featured-hugo-39-poster.png (renamed from content/en/news/0.39-relnotes/featured-hugo-39-poster.png)bin217215 -> 217215 bytes
-rw-r--r--docs/content/en/news/0.39-relnotes/index.md (renamed from content/en/news/0.39-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.40-relnotes/featured-hugo-40-poster.png (renamed from content/en/news/0.40-relnotes/featured-hugo-40-poster.png)bin69238 -> 69238 bytes
-rw-r--r--docs/content/en/news/0.40-relnotes/index.md (renamed from content/en/news/0.40-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.40.1-relnotes/index.md (renamed from content/en/news/0.40.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.40.2-relnotes/index.md (renamed from content/en/news/0.40.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.40.3-relnotes/index.md (renamed from content/en/news/0.40.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.41-relnotes/featured-hugo-41-poster.png (renamed from content/en/news/0.41-relnotes/featured-hugo-41-poster.png)bin67955 -> 67955 bytes
-rw-r--r--docs/content/en/news/0.41-relnotes/index.md (renamed from content/en/news/0.41-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.42-relnotes/featured-hugo-42-poster.png (renamed from content/en/news/0.42-relnotes/featured-hugo-42-poster.png)bin74852 -> 74852 bytes
-rw-r--r--docs/content/en/news/0.42-relnotes/index.md (renamed from content/en/news/0.42-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.42.1-relnotes/index.md (renamed from content/en/news/0.42.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.42.2-relnotes/index.md (renamed from content/en/news/0.42.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.43-relnotes/featured-hugo-43-poster.png (renamed from content/en/news/0.43-relnotes/featured-hugo-43-poster.png)bin78299 -> 78299 bytes
-rw-r--r--docs/content/en/news/0.43-relnotes/index.md (renamed from content/en/news/0.43-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.44-relnotes/featured-hugo-44-poster.png (renamed from content/en/news/0.44-relnotes/featured-hugo-44-poster.png)bin77631 -> 77631 bytes
-rw-r--r--docs/content/en/news/0.44-relnotes/index.md (renamed from content/en/news/0.44-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.45-relnotes/featured-hugo-45-poster.png (renamed from content/en/news/0.45-relnotes/featured-hugo-45-poster.png)bin66863 -> 66863 bytes
-rw-r--r--docs/content/en/news/0.45-relnotes/index.md (renamed from content/en/news/0.45-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.45.1-relnotes/index.md (renamed from content/en/news/0.45.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.46-relnotes/featured-hugo-46-poster.png (renamed from content/en/news/0.46-relnotes/featured-hugo-46-poster.png)bin68614 -> 68614 bytes
-rw-r--r--docs/content/en/news/0.46-relnotes/index.md (renamed from content/en/news/0.46-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.47-relnotes/featured-hugo-47-poster.png (renamed from content/en/news/0.47-relnotes/featured-hugo-47-poster.png)bin88288 -> 88288 bytes
-rw-r--r--docs/content/en/news/0.47-relnotes/index.md (renamed from content/en/news/0.47-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.47.1-relnotes/index.md (renamed from content/en/news/0.47.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.48-relnotes/featured-hugo-48-poster.png (renamed from content/en/news/0.48-relnotes/featured-hugo-48-poster.png)bin95358 -> 95358 bytes
-rw-r--r--docs/content/en/news/0.48-relnotes/index.md (renamed from content/en/news/0.48-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.49-relnotes/featured-hugo-49-poster.png (renamed from content/en/news/0.49-relnotes/featured-hugo-49-poster.png)bin66352 -> 66352 bytes
-rw-r--r--docs/content/en/news/0.49-relnotes/index.md (renamed from content/en/news/0.49-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.49.1-relnotes/index.md (renamed from content/en/news/0.49.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.49.2-relnotes/index.md (renamed from content/en/news/0.49.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.50-relnotes/featured-hugo-50-poster.png (renamed from content/en/news/0.50-relnotes/featured-hugo-50-poster.png)bin227240 -> 227240 bytes
-rw-r--r--docs/content/en/news/0.50-relnotes/index.md (renamed from content/en/news/0.50-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.51-relnotes/featured-hugo-51-poster.png (renamed from content/en/news/0.51-relnotes/featured-hugo-51-poster.png)bin117678 -> 117678 bytes
-rw-r--r--docs/content/en/news/0.51-relnotes/index.md (renamed from content/en/news/0.51-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.52-relnotes/featured-hugo-52-poster.png (renamed from content/en/news/0.52-relnotes/featured-hugo-52-poster.png)bin336810 -> 336810 bytes
-rw-r--r--docs/content/en/news/0.52-relnotes/index.md (renamed from content/en/news/0.52-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.53-relnotes/featured-hugo-53-poster.png (renamed from content/en/news/0.53-relnotes/featured-hugo-53-poster.png)bin110427 -> 110427 bytes
-rw-r--r--docs/content/en/news/0.53-relnotes/index.md (renamed from content/en/news/0.53-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png (renamed from content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png)bin59805 -> 59805 bytes
-rw-r--r--docs/content/en/news/0.54.0-relnotes/index.md (renamed from content/en/news/0.54.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.0-relnotes/featured.png (renamed from content/en/news/0.55.0-relnotes/featured.png)bin1221797 -> 1221797 bytes
-rw-r--r--docs/content/en/news/0.55.0-relnotes/index.md (renamed from content/en/news/0.55.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.1-relnotes/index.md (renamed from content/en/news/0.55.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.2-relnotes/index.md (renamed from content/en/news/0.55.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.3-relnotes/index.md (renamed from content/en/news/0.55.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.4-relnotes/index.md (renamed from content/en/news/0.55.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.5-relnotes/index.md (renamed from content/en/news/0.55.5-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.55.6-relnotes/index.md (renamed from content/en/news/0.55.6-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.56.0-relnotes/featured.png (renamed from content/en/news/0.56.0-relnotes/featured.png)bin254587 -> 254587 bytes
-rw-r--r--docs/content/en/news/0.56.0-relnotes/index.md (renamed from content/en/news/0.56.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.56.1-relnotes/index.md (renamed from content/en/news/0.56.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.56.2-relnotes/index.md (renamed from content/en/news/0.56.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.56.3-relnotes/index.md (renamed from content/en/news/0.56.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png (renamed from content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png)bin45223 -> 45223 bytes
-rw-r--r--docs/content/en/news/0.57.0-relnotes/index.md (renamed from content/en/news/0.57.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.57.1-relnotes/index.md (renamed from content/en/news/0.57.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.57.2-relnotes/index.md (renamed from content/en/news/0.57.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.58.0-relnotes/hugo58-featured.png (renamed from content/en/news/0.58.0-relnotes/hugo58-featured.png)bin23413 -> 23413 bytes
-rw-r--r--docs/content/en/news/0.58.0-relnotes/index.md (renamed from content/en/news/0.58.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.58.1-relnotes/index.md (renamed from content/en/news/0.58.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.58.2-relnotes/index.md (renamed from content/en/news/0.58.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.58.3-relnotes/index.md (renamed from content/en/news/0.58.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png (renamed from content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png)bin78054 -> 78054 bytes
-rw-r--r--docs/content/en/news/0.59.0-relnotes/index.md (renamed from content/en/news/0.59.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.59.1-relnotes/index.md (renamed from content/en/news/0.59.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.60.0-relnotes/index.md (renamed from content/en/news/0.60.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.60.0-relnotes/poster-featured.png (renamed from content/en/news/0.60.0-relnotes/poster-featured.png)bin31907 -> 31907 bytes
-rw-r--r--docs/content/en/news/0.60.1-relnotes/featured-061.png (renamed from content/en/news/0.60.1-relnotes/featured-061.png)bin28841 -> 28841 bytes
-rw-r--r--docs/content/en/news/0.60.1-relnotes/index.md (renamed from content/en/news/0.60.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.61.0-relnotes/hugo-61-featured.png (renamed from content/en/news/0.61.0-relnotes/hugo-61-featured.png)bin79929 -> 79929 bytes
-rw-r--r--docs/content/en/news/0.61.0-relnotes/index.md (renamed from content/en/news/0.61.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png (renamed from content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png)bin105390 -> 105390 bytes
-rw-r--r--docs/content/en/news/0.62.0-relnotes/index.md (renamed from content/en/news/0.62.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.62.1-relnotes/index.md (renamed from content/en/news/0.62.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.62.2-relnotes/index.md (renamed from content/en/news/0.62.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.63.0-relnotes/featured-063.png (renamed from content/en/news/0.63.0-relnotes/featured-063.png)bin212246 -> 212246 bytes
-rw-r--r--docs/content/en/news/0.63.0-relnotes/index.md (renamed from content/en/news/0.63.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.63.1-relnotes/index.md (renamed from content/en/news/0.63.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.63.2-relnotes/index.md (renamed from content/en/news/0.63.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png (renamed from content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png)bin69464 -> 69464 bytes
-rw-r--r--docs/content/en/news/0.64.0-relnotes/index.md (renamed from content/en/news/0.64.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.64.1-relnotes/index.md (renamed from content/en/news/0.64.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png (renamed from content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png)bin115945 -> 115945 bytes
-rw-r--r--docs/content/en/news/0.65.0-relnotes/index.md (renamed from content/en/news/0.65.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.65.0-relnotes/pg-admin-tos.png (renamed from content/en/news/0.65.0-relnotes/pg-admin-tos.png)bin65614 -> 65614 bytes
-rw-r--r--docs/content/en/news/0.65.1-relnotes/index.md (renamed from content/en/news/0.65.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.65.2-relnotes/index.md (renamed from content/en/news/0.65.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.65.3-relnotes/index.md (renamed from content/en/news/0.65.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png (renamed from content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png)bin75588 -> 75588 bytes
-rw-r--r--docs/content/en/news/0.66.0-relnotes/index.md (renamed from content/en/news/0.66.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png (renamed from content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png)bin79436 -> 79436 bytes
-rw-r--r--docs/content/en/news/0.67.0-relnotes/index.md (renamed from content/en/news/0.67.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.67.1-relnotes/index.md (renamed from content/en/news/0.67.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.68.0-relnotes/hugo-68-featured.png (renamed from content/en/news/0.68.0-relnotes/hugo-68-featured.png)bin65337 -> 65337 bytes
-rw-r--r--docs/content/en/news/0.68.0-relnotes/index.md (renamed from content/en/news/0.68.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.68.1-relnotes/index.md (renamed from content/en/news/0.68.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.68.2-relnotes/index.md (renamed from content/en/news/0.68.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.68.3-relnotes/index.md (renamed from content/en/news/0.68.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png (renamed from content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png)bin398560 -> 398560 bytes
-rw-r--r--docs/content/en/news/0.69.0-relnotes/index.md (renamed from content/en/news/0.69.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.69.1-relnotes/index.md (renamed from content/en/news/0.69.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.69.2-relnotes/index.md (renamed from content/en/news/0.69.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.7-relnotes/index.md (renamed from content/en/news/0.7-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.70.0-relnotes/hugo-70-featured.png (renamed from content/en/news/0.70.0-relnotes/hugo-70-featured.png)bin65533 -> 65533 bytes
-rw-r--r--docs/content/en/news/0.70.0-relnotes/index.md (renamed from content/en/news/0.70.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.71.0-relnotes/hugo-71-featured.png (renamed from content/en/news/0.71.0-relnotes/hugo-71-featured.png)bin209832 -> 209832 bytes
-rw-r--r--docs/content/en/news/0.71.0-relnotes/index.md (renamed from content/en/news/0.71.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.71.1-relnotes/index.md (renamed from content/en/news/0.71.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.72.0-relnotes/hugo-72-featured.png (renamed from content/en/news/0.72.0-relnotes/hugo-72-featured.png)bin256988 -> 256988 bytes
-rw-r--r--docs/content/en/news/0.72.0-relnotes/index.md (renamed from content/en/news/0.72.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.73.0-relnotes/hugo-73-featured.png (renamed from content/en/news/0.73.0-relnotes/hugo-73-featured.png)bin186170 -> 186170 bytes
-rw-r--r--docs/content/en/news/0.73.0-relnotes/index.md (renamed from content/en/news/0.73.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.74.0-relnotes/featured.png (renamed from content/en/news/0.74.0-relnotes/featured.png)bin56047 -> 56047 bytes
-rw-r--r--docs/content/en/news/0.74.0-relnotes/index.md (renamed from content/en/news/0.74.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.74.1-relnotes/index.md (renamed from content/en/news/0.74.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.74.2-relnotes/index.md (renamed from content/en/news/0.74.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.74.3-relnotes/index.md (renamed from content/en/news/0.74.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.75.0-relnotes/featured.png (renamed from content/en/news/0.75.0-relnotes/featured.png)bin125400 -> 125400 bytes
-rw-r--r--docs/content/en/news/0.75.0-relnotes/index.md (renamed from content/en/news/0.75.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.75.1-relnotes/index.md (renamed from content/en/news/0.75.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.0-relnotes/featured.png (renamed from content/en/news/0.76.0-relnotes/featured.png)bin146539 -> 146539 bytes
-rw-r--r--docs/content/en/news/0.76.0-relnotes/index.md (renamed from content/en/news/0.76.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.1-relnotes/index.md (renamed from content/en/news/0.76.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.2-relnotes/index.md (renamed from content/en/news/0.76.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.3-relnotes/index.md (renamed from content/en/news/0.76.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.4-relnotes/index.md (renamed from content/en/news/0.76.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.76.5-relnotes/index.md (renamed from content/en/news/0.76.5-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.77.0-relnotes/featured.png (renamed from content/en/news/0.77.0-relnotes/featured.png)bin130926 -> 130926 bytes
-rw-r--r--docs/content/en/news/0.77.0-relnotes/index.md (renamed from content/en/news/0.77.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.78.0-relnotes/featured.png (renamed from content/en/news/0.78.0-relnotes/featured.png)bin47074 -> 47074 bytes
-rw-r--r--docs/content/en/news/0.78.0-relnotes/index.md (renamed from content/en/news/0.78.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.78.1-relnotes/index.md (renamed from content/en/news/0.78.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.78.2-relnotes/index.md (renamed from content/en/news/0.78.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.79.0-relnotes/featured.png (renamed from content/en/news/0.79.0-relnotes/featured.png)bin75235 -> 75235 bytes
-rw-r--r--docs/content/en/news/0.79.0-relnotes/index.md (renamed from content/en/news/0.79.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.79.1-relnotes/index.md (renamed from content/en/news/0.79.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.8-relnotes/index.md (renamed from content/en/news/0.8-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.80.0-relnotes/featured.png (renamed from content/en/news/0.80.0-relnotes/featured.png)bin162027 -> 162027 bytes
-rw-r--r--docs/content/en/news/0.80.0-relnotes/index.md (renamed from content/en/news/0.80.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.81.0-relnotes/featured.jpg (renamed from content/en/news/0.81.0-relnotes/featured.jpg)bin345282 -> 345282 bytes
-rw-r--r--docs/content/en/news/0.81.0-relnotes/index.md (renamed from content/en/news/0.81.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.82.0-relnotes/featured.png (renamed from content/en/news/0.82.0-relnotes/featured.png)bin57106 -> 57106 bytes
-rw-r--r--docs/content/en/news/0.82.0-relnotes/index.md (renamed from content/en/news/0.82.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.82.1-relnotes/index.md (renamed from content/en/news/0.82.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.83.0-relnotes/featured.png (renamed from content/en/news/0.83.0-relnotes/featured.png)bin82972 -> 82972 bytes
-rw-r--r--docs/content/en/news/0.83.0-relnotes/index.md (renamed from content/en/news/0.83.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.83.1-relnotes/index.md (renamed from content/en/news/0.83.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.84.0-relnotes/featured.png (renamed from content/en/news/0.84.0-relnotes/featured.png)bin47732 -> 47732 bytes
-rw-r--r--docs/content/en/news/0.84.0-relnotes/index.md (renamed from content/en/news/0.84.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.84.1-relnotes/index.md (renamed from content/en/news/0.84.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.84.2-relnotes/index.md (renamed from content/en/news/0.84.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.84.3-relnotes/index.md (renamed from content/en/news/0.84.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.84.4-relnotes/index.md (renamed from content/en/news/0.84.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.85.0-relnotes/featured.png (renamed from content/en/news/0.85.0-relnotes/featured.png)bin67393 -> 67393 bytes
-rw-r--r--docs/content/en/news/0.85.0-relnotes/index.md (renamed from content/en/news/0.85.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.86.0-relnotes/featured.png (renamed from content/en/news/0.86.0-relnotes/featured.png)bin188476 -> 188476 bytes
-rw-r--r--docs/content/en/news/0.86.0-relnotes/index.md (renamed from content/en/news/0.86.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.86.1-relnotes/index.md (renamed from content/en/news/0.86.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.87.0-relnotes/featured.png (renamed from content/en/news/0.87.0-relnotes/featured.png)bin104936 -> 104936 bytes
-rw-r--r--docs/content/en/news/0.87.0-relnotes/index.md (renamed from content/en/news/0.87.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.88.0-relnotes/featured.png (renamed from content/en/news/0.88.0-relnotes/featured.png)bin87331 -> 87331 bytes
-rw-r--r--docs/content/en/news/0.88.0-relnotes/index.md (renamed from content/en/news/0.88.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.88.1-relnotes/index.md (renamed from content/en/news/0.88.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.89.0-relnotes/featured.png (renamed from content/en/news/0.89.0-relnotes/featured.png)bin104046 -> 104046 bytes
-rw-r--r--docs/content/en/news/0.89.0-relnotes/index.md (renamed from content/en/news/0.89.0-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.89.1-relnotes/index.md (renamed from content/en/news/0.89.1-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.89.2-relnotes/index.md (renamed from content/en/news/0.89.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.89.3-relnotes/index.md (renamed from content/en/news/0.89.3-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.89.4-relnotes/index.md (renamed from content/en/news/0.89.4-relnotes/index.md)0
-rw-r--r--docs/content/en/news/0.9-relnotes/index.md (renamed from content/en/news/0.9-relnotes/index.md)0
-rw-r--r--docs/content/en/news/2021-12-17-no-more-releasenotes.md (renamed from content/en/news/2021-12-17-no-more-releasenotes.md)0
-rw-r--r--docs/content/en/news/2021/0.91.2-relnotes/featured.png (renamed from content/en/news/2021/0.91.2-relnotes/featured.png)bin131575 -> 131575 bytes
-rw-r--r--docs/content/en/news/2021/0.91.2-relnotes/index.md (renamed from content/en/news/2021/0.91.2-relnotes/index.md)0
-rw-r--r--docs/content/en/news/_index.md (renamed from content/en/news/_index.md)0
-rw-r--r--docs/content/en/news/hugo-macos-intel-vs-arm/featured.png (renamed from content/en/news/hugo-macos-intel-vs-arm/featured.png)bin299333 -> 299333 bytes
-rw-r--r--docs/content/en/news/hugo-macos-intel-vs-arm/index.html (renamed from content/en/news/hugo-macos-intel-vs-arm/index.html)0
-rw-r--r--docs/content/en/news/lets-celebrate-hugos-5th-birthday/featured.png (renamed from content/en/news/lets-celebrate-hugos-5th-birthday/featured.png)bin179291 -> 179291 bytes
-rw-r--r--docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png (renamed from content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png)bin15599 -> 15599 bytes
-rw-r--r--docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png (renamed from content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png)bin16956 -> 16956 bytes
-rw-r--r--docs/content/en/news/lets-celebrate-hugos-5th-birthday/index.md (renamed from content/en/news/lets-celebrate-hugos-5th-birthday/index.md)0
-rw-r--r--docs/content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png (renamed from content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png)bin358844 -> 358844 bytes
-rw-r--r--docs/content/en/readfiles/README.md (renamed from content/en/readfiles/README.md)0
-rw-r--r--docs/content/en/readfiles/dateformatting.md (renamed from content/en/readfiles/dateformatting.md)0
-rw-r--r--docs/content/en/readfiles/index.md (renamed from content/en/readfiles/index.md)0
-rw-r--r--docs/content/en/readfiles/pages-vs-site-pages.md (renamed from content/en/readfiles/pages-vs-site-pages.md)0
-rw-r--r--docs/content/en/readfiles/sectionvars.md (renamed from content/en/readfiles/sectionvars.md)0
-rw-r--r--docs/content/en/readfiles/testing.txt (renamed from content/en/readfiles/testing.txt)0
-rw-r--r--docs/content/en/showcase/1password-support/bio.md (renamed from content/en/showcase/1password-support/bio.md)0
-rw-r--r--docs/content/en/showcase/1password-support/featured.png (renamed from content/en/showcase/1password-support/featured.png)bin165718 -> 165718 bytes
-rw-r--r--docs/content/en/showcase/1password-support/index.md (renamed from content/en/showcase/1password-support/index.md)0
-rw-r--r--docs/content/en/showcase/bypasscensorship/bio.md (renamed from content/en/showcase/bypasscensorship/bio.md)0
-rw-r--r--docs/content/en/showcase/bypasscensorship/featured.png (renamed from content/en/showcase/bypasscensorship/featured.png)bin180903 -> 180903 bytes
-rw-r--r--docs/content/en/showcase/bypasscensorship/index.md (renamed from content/en/showcase/bypasscensorship/index.md)0
-rw-r--r--docs/content/en/showcase/digitalgov/bio.md (renamed from content/en/showcase/digitalgov/bio.md)0
-rw-r--r--docs/content/en/showcase/digitalgov/featured.png (renamed from content/en/showcase/digitalgov/featured.png)bin65077 -> 65077 bytes
-rw-r--r--docs/content/en/showcase/digitalgov/index.md (renamed from content/en/showcase/digitalgov/index.md)0
-rw-r--r--docs/content/en/showcase/fireship/bio.md (renamed from content/en/showcase/fireship/bio.md)0
-rw-r--r--docs/content/en/showcase/fireship/featured.png (renamed from content/en/showcase/fireship/featured.png)bin136959 -> 136959 bytes
-rw-r--r--docs/content/en/showcase/fireship/index.md (renamed from content/en/showcase/fireship/index.md)0
-rw-r--r--docs/content/en/showcase/flesland-flis/bio.md (renamed from content/en/showcase/flesland-flis/bio.md)0
-rw-r--r--docs/content/en/showcase/flesland-flis/featured.png (renamed from content/en/showcase/flesland-flis/featured.png)bin309284 -> 309284 bytes
-rw-r--r--docs/content/en/showcase/flesland-flis/index.md (renamed from content/en/showcase/flesland-flis/index.md)0
-rw-r--r--docs/content/en/showcase/forestry/bio.md (renamed from content/en/showcase/forestry/bio.md)0
-rw-r--r--docs/content/en/showcase/forestry/featured.png (renamed from content/en/showcase/forestry/featured.png)bin227009 -> 227009 bytes
-rw-r--r--docs/content/en/showcase/forestry/index.md (renamed from content/en/showcase/forestry/index.md)0
-rw-r--r--docs/content/en/showcase/godot-tutorials/bio.md (renamed from content/en/showcase/godot-tutorials/bio.md)0
-rw-r--r--docs/content/en/showcase/godot-tutorials/featured.png (renamed from content/en/showcase/godot-tutorials/featured.png)bin72068 -> 72068 bytes
-rw-r--r--docs/content/en/showcase/godot-tutorials/index.md (renamed from content/en/showcase/godot-tutorials/index.md)0
-rw-r--r--docs/content/en/showcase/hapticmedia/bio.md (renamed from content/en/showcase/hapticmedia/bio.md)0
-rw-r--r--docs/content/en/showcase/hapticmedia/featured.png (renamed from content/en/showcase/hapticmedia/featured.png)bin543922 -> 543922 bytes
-rw-r--r--docs/content/en/showcase/hapticmedia/index.md (renamed from content/en/showcase/hapticmedia/index.md)0
-rw-r--r--docs/content/en/showcase/hartwell-insurance/bio.md (renamed from content/en/showcase/hartwell-insurance/bio.md)0
-rw-r--r--docs/content/en/showcase/hartwell-insurance/featured.png (renamed from content/en/showcase/hartwell-insurance/featured.png)bin446603 -> 446603 bytes
-rw-r--r--docs/content/en/showcase/hartwell-insurance/hartwell-columns.png (renamed from content/en/showcase/hartwell-insurance/hartwell-columns.png)bin89018 -> 89018 bytes
-rw-r--r--docs/content/en/showcase/hartwell-insurance/hartwell-lighthouse.png (renamed from content/en/showcase/hartwell-insurance/hartwell-lighthouse.png)bin9025 -> 9025 bytes
-rw-r--r--docs/content/en/showcase/hartwell-insurance/hartwell-webpagetest.png (renamed from content/en/showcase/hartwell-insurance/hartwell-webpagetest.png)bin11653 -> 11653 bytes
-rw-r--r--docs/content/en/showcase/hartwell-insurance/index.md (renamed from content/en/showcase/hartwell-insurance/index.md)0
-rw-r--r--docs/content/en/showcase/keycdn/bio.md (renamed from content/en/showcase/keycdn/bio.md)0
-rw-r--r--docs/content/en/showcase/keycdn/featured.png (renamed from content/en/showcase/keycdn/featured.png)bin358740 -> 358740 bytes
-rw-r--r--docs/content/en/showcase/keycdn/index.md (renamed from content/en/showcase/keycdn/index.md)0
-rw-r--r--docs/content/en/showcase/letsencrypt/bio.md (renamed from content/en/showcase/letsencrypt/bio.md)0
-rw-r--r--docs/content/en/showcase/letsencrypt/featured.png (renamed from content/en/showcase/letsencrypt/featured.png)bin147459 -> 147459 bytes
-rw-r--r--docs/content/en/showcase/letsencrypt/index.md (renamed from content/en/showcase/letsencrypt/index.md)0
-rw-r--r--docs/content/en/showcase/linode/bio.md (renamed from content/en/showcase/linode/bio.md)0
-rw-r--r--docs/content/en/showcase/linode/featured.png (renamed from content/en/showcase/linode/featured.png)bin90149 -> 90149 bytes
-rw-r--r--docs/content/en/showcase/linode/index.md (renamed from content/en/showcase/linode/index.md)0
-rw-r--r--docs/content/en/showcase/over/bio.md (renamed from content/en/showcase/over/bio.md)0
-rw-r--r--docs/content/en/showcase/over/featured-over.png (renamed from content/en/showcase/over/featured-over.png)bin194841 -> 194841 bytes
-rw-r--r--docs/content/en/showcase/over/index.md (renamed from content/en/showcase/over/index.md)0
-rw-r--r--docs/content/en/showcase/pharmaseal/bio.md (renamed from content/en/showcase/pharmaseal/bio.md)0
-rw-r--r--docs/content/en/showcase/pharmaseal/featured-pharmaseal.png (renamed from content/en/showcase/pharmaseal/featured-pharmaseal.png)bin769739 -> 769739 bytes
-rw-r--r--docs/content/en/showcase/pharmaseal/index.md (renamed from content/en/showcase/pharmaseal/index.md)0
-rw-r--r--docs/content/en/showcase/quiply-employee-communications-app/bio.md (renamed from content/en/showcase/quiply-employee-communications-app/bio.md)0
-rw-r--r--docs/content/en/showcase/quiply-employee-communications-app/featured.png (renamed from content/en/showcase/quiply-employee-communications-app/featured.png)bin631206 -> 631206 bytes
-rw-r--r--docs/content/en/showcase/quiply-employee-communications-app/index.md (renamed from content/en/showcase/quiply-employee-communications-app/index.md)0
-rw-r--r--docs/content/en/showcase/small-multiples/bio.md (renamed from content/en/showcase/small-multiples/bio.md)0
-rw-r--r--docs/content/en/showcase/small-multiples/featured-small-multiples.png (renamed from content/en/showcase/small-multiples/featured-small-multiples.png)bin374273 -> 374273 bytes
-rw-r--r--docs/content/en/showcase/small-multiples/index.md (renamed from content/en/showcase/small-multiples/index.md)0
-rw-r--r--docs/content/en/showcase/template/bio.md (renamed from content/en/showcase/template/bio.md)0
-rw-r--r--docs/content/en/showcase/template/featured-template.png (renamed from content/en/showcase/template/featured-template.png)bin41270 -> 41270 bytes
-rw-r--r--docs/content/en/showcase/template/index.md (renamed from content/en/showcase/template/index.md)0
-rw-r--r--docs/content/en/showcase/tomango/bio.md (renamed from content/en/showcase/tomango/bio.md)0
-rw-r--r--docs/content/en/showcase/tomango/featured.png (renamed from content/en/showcase/tomango/featured.png)bin143336 -> 143336 bytes
-rw-r--r--docs/content/en/showcase/tomango/index.md (renamed from content/en/showcase/tomango/index.md)0
-rw-r--r--docs/content/en/templates/404.md (renamed from content/en/templates/404.md)0
-rw-r--r--docs/content/en/templates/_index.md (renamed from content/en/templates/_index.md)0
-rw-r--r--docs/content/en/templates/alternatives.md (renamed from content/en/templates/alternatives.md)0
-rw-r--r--docs/content/en/templates/base.md (renamed from content/en/templates/base.md)0
-rw-r--r--docs/content/en/templates/data-templates.md (renamed from content/en/templates/data-templates.md)0
-rw-r--r--docs/content/en/templates/files.md (renamed from content/en/templates/files.md)0
-rw-r--r--docs/content/en/templates/homepage.md (renamed from content/en/templates/homepage.md)0
-rw-r--r--docs/content/en/templates/internal.md (renamed from content/en/templates/internal.md)0
-rw-r--r--docs/content/en/templates/introduction.md (renamed from content/en/templates/introduction.md)0
-rw-r--r--docs/content/en/templates/lists.md (renamed from content/en/templates/lists.md)0
-rw-r--r--docs/content/en/templates/lookup-order.md (renamed from content/en/templates/lookup-order.md)0
-rw-r--r--docs/content/en/templates/menu-templates.md (renamed from content/en/templates/menu-templates.md)0
-rw-r--r--docs/content/en/templates/ordering-and-grouping.md (renamed from content/en/templates/ordering-and-grouping.md)0
-rw-r--r--docs/content/en/templates/output-formats.md (renamed from content/en/templates/output-formats.md)0
-rw-r--r--docs/content/en/templates/pagination.md (renamed from content/en/templates/pagination.md)0
-rw-r--r--docs/content/en/templates/partials.md (renamed from content/en/templates/partials.md)0
-rw-r--r--docs/content/en/templates/render-hooks.md (renamed from content/en/templates/render-hooks.md)0
-rw-r--r--docs/content/en/templates/robots.md (renamed from content/en/templates/robots.md)0
-rw-r--r--docs/content/en/templates/rss.md (renamed from content/en/templates/rss.md)0
-rw-r--r--docs/content/en/templates/section-templates.md (renamed from content/en/templates/section-templates.md)0
-rw-r--r--docs/content/en/templates/shortcode-templates.md (renamed from content/en/templates/shortcode-templates.md)0
-rw-r--r--docs/content/en/templates/single-page-templates.md (renamed from content/en/templates/single-page-templates.md)0
-rw-r--r--docs/content/en/templates/sitemap-template.md (renamed from content/en/templates/sitemap-template.md)0
-rw-r--r--docs/content/en/templates/taxonomy-templates.md (renamed from content/en/templates/taxonomy-templates.md)0
-rw-r--r--docs/content/en/templates/template-debugging.md (renamed from content/en/templates/template-debugging.md)0
-rw-r--r--docs/content/en/templates/views.md (renamed from content/en/templates/views.md)0
-rw-r--r--docs/content/en/tools/_index.md (renamed from content/en/tools/_index.md)0
-rw-r--r--docs/content/en/tools/editors.md (renamed from content/en/tools/editors.md)0
-rw-r--r--docs/content/en/tools/frontends.md (renamed from content/en/tools/frontends.md)0
-rw-r--r--docs/content/en/tools/migrations.md (renamed from content/en/tools/migrations.md)0
-rw-r--r--docs/content/en/tools/other.md (renamed from content/en/tools/other.md)0
-rw-r--r--docs/content/en/tools/search.md (renamed from content/en/tools/search.md)0
-rw-r--r--docs/content/en/tools/starter-kits.md (renamed from content/en/tools/starter-kits.md)0
-rw-r--r--docs/content/en/troubleshooting/_index.md (renamed from content/en/troubleshooting/_index.md)0
-rw-r--r--docs/content/en/troubleshooting/build-performance.md (renamed from content/en/troubleshooting/build-performance.md)0
-rw-r--r--docs/content/en/troubleshooting/faq.md (renamed from content/en/troubleshooting/faq.md)0
-rw-r--r--docs/content/en/variables/_index.md (renamed from content/en/variables/_index.md)0
-rw-r--r--docs/content/en/variables/files.md (renamed from content/en/variables/files.md)0
-rw-r--r--docs/content/en/variables/git.md (renamed from content/en/variables/git.md)0
-rw-r--r--docs/content/en/variables/menus.md (renamed from content/en/variables/menus.md)0
-rw-r--r--docs/content/en/variables/page.md (renamed from content/en/variables/page.md)0
-rw-r--r--docs/content/en/variables/pages.md (renamed from content/en/variables/pages.md)0
-rw-r--r--docs/content/en/variables/shortcodes.md (renamed from content/en/variables/shortcodes.md)0
-rw-r--r--docs/content/en/variables/site.md (renamed from content/en/variables/site.md)0
-rw-r--r--docs/content/en/variables/sitemap.md (renamed from content/en/variables/sitemap.md)0
-rw-r--r--docs/content/en/variables/taxonomy.md (renamed from content/en/variables/taxonomy.md)0
-rw-r--r--docs/content/zh/_index.md (renamed from content/zh/_index.md)0
-rw-r--r--docs/content/zh/about/_index.md (renamed from content/zh/about/_index.md)0
-rw-r--r--docs/content/zh/content-management/_index.md (renamed from content/zh/content-management/_index.md)0
-rw-r--r--docs/content/zh/documentation.md (renamed from content/zh/documentation.md)0
-rw-r--r--docs/content/zh/news/_index.md (renamed from content/zh/news/_index.md)0
-rw-r--r--docs/content/zh/templates/_index.md (renamed from content/zh/templates/_index.md)0
-rw-r--r--docs/content/zh/templates/base.md (renamed from content/zh/templates/base.md)0
-rw-r--r--docs/content/zh/tools/_index.md (renamed from content/zh/tools/_index.md)0
-rw-r--r--docs/content/zh/tools/search.md (renamed from content/zh/tools/search.md)0
-rw-r--r--docs/data/articles.toml (renamed from data/articles.toml)0
-rw-r--r--docs/data/docs.json (renamed from data/docs.json)0
-rw-r--r--docs/data/homepagetweets.toml (renamed from data/homepagetweets.toml)0
-rw-r--r--docs/data/titles.toml (renamed from data/titles.toml)0
-rw-r--r--docs/go.mod5
-rw-r--r--docs/go.sum35
-rw-r--r--docs/hugo_stats.json (renamed from hugo_stats.json)0
-rw-r--r--docs/layouts/_default/_markup/render-codeblock-mermaid.html (renamed from layouts/_default/_markup/render-codeblock-mermaid.html)0
-rw-r--r--docs/layouts/index.rss.xml (renamed from layouts/index.rss.xml)0
-rw-r--r--docs/layouts/maintenance/list.html (renamed from layouts/maintenance/list.html)0
-rw-r--r--docs/layouts/partials/hooks/before-body-end.html (renamed from layouts/partials/hooks/before-body-end.html)0
-rw-r--r--docs/layouts/partials/maintenance-pages-table.html (renamed from layouts/partials/maintenance-pages-table.html)0
-rw-r--r--docs/layouts/shortcodes/asciicast.html (renamed from layouts/shortcodes/asciicast.html)0
-rw-r--r--docs/layouts/shortcodes/chroma-lexers.html (renamed from layouts/shortcodes/chroma-lexers.html)0
-rw-r--r--docs/layouts/shortcodes/code-toggle.html (renamed from layouts/shortcodes/code-toggle.html)0
-rw-r--r--docs/layouts/shortcodes/code.html (renamed from layouts/shortcodes/code.html)0
-rw-r--r--docs/layouts/shortcodes/content-tree.html (renamed from layouts/shortcodes/content-tree.html)0
-rw-r--r--docs/layouts/shortcodes/datatable-filtered.html (renamed from layouts/shortcodes/datatable-filtered.html)0
-rw-r--r--docs/layouts/shortcodes/datatable.html (renamed from layouts/shortcodes/datatable.html)0
-rw-r--r--docs/layouts/shortcodes/directoryindex.html (renamed from layouts/shortcodes/directoryindex.html)0
-rw-r--r--docs/layouts/shortcodes/docfile.html (renamed from layouts/shortcodes/docfile.html)0
-rw-r--r--docs/layouts/shortcodes/exfile.html (renamed from layouts/shortcodes/exfile.html)0
-rw-r--r--docs/layouts/shortcodes/exfm.html (renamed from layouts/shortcodes/exfm.html)0
-rw-r--r--docs/layouts/shortcodes/funcsig.html (renamed from layouts/shortcodes/funcsig.html)0
-rw-r--r--docs/layouts/shortcodes/getcontent.html (renamed from layouts/shortcodes/getcontent.html)0
-rw-r--r--docs/layouts/shortcodes/gh.html (renamed from layouts/shortcodes/gh.html)0
-rw-r--r--docs/layouts/shortcodes/ghrepo.html (renamed from layouts/shortcodes/ghrepo.html)0
-rw-r--r--docs/layouts/shortcodes/gomodules-info.html (renamed from layouts/shortcodes/gomodules-info.html)0
-rw-r--r--docs/layouts/shortcodes/imgproc.html (renamed from layouts/shortcodes/imgproc.html)0
-rw-r--r--docs/layouts/shortcodes/module-mounts-note.html (renamed from layouts/shortcodes/module-mounts-note.html)0
-rw-r--r--docs/layouts/shortcodes/new-in.html (renamed from layouts/shortcodes/new-in.html)0
-rw-r--r--docs/layouts/shortcodes/nohighlight.html (renamed from layouts/shortcodes/nohighlight.html)0
-rw-r--r--docs/layouts/shortcodes/note.html (renamed from layouts/shortcodes/note.html)0
-rw-r--r--docs/layouts/shortcodes/output.html (renamed from layouts/shortcodes/output.html)0
-rw-r--r--docs/layouts/shortcodes/page-kinds.html (renamed from layouts/shortcodes/page-kinds.html)0
-rw-r--r--docs/layouts/shortcodes/readfile.html (renamed from layouts/shortcodes/readfile.html)0
-rw-r--r--docs/layouts/shortcodes/tip.html (renamed from layouts/shortcodes/tip.html)0
-rw-r--r--docs/layouts/shortcodes/todo.html (renamed from layouts/shortcodes/todo.html)0
-rw-r--r--docs/layouts/shortcodes/warning.html (renamed from layouts/shortcodes/warning.html)0
-rw-r--r--docs/layouts/shortcodes/yt.html (renamed from layouts/shortcodes/yt.html)0
-rw-r--r--docs/layouts/template-func/page.html (renamed from layouts/template-func/page.html)0
-rw-r--r--docs/netlify.toml (renamed from netlify.toml)0
-rwxr-xr-xdocs/pull-theme.sh (renamed from pull-theme.sh)0
-rw-r--r--docs/resources/.gitattributes (renamed from resources/.gitattributes)0
-rw-r--r--docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content (renamed from resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content)0
-rw-r--r--docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json (renamed from resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json)0
-rw-r--r--docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content (renamed from resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content)0
-rw-r--r--docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json (renamed from resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json)0
-rw-r--r--docs/src/css/_chroma.css (renamed from src/css/_chroma.css)0
-rw-r--r--docs/src/package-lock.json (renamed from src/package-lock.json)0
-rw-r--r--docs/static/apple-touch-icon.png (renamed from static/apple-touch-icon.png)bin7993 -> 7993 bytes
-rw-r--r--docs/static/css/hugofont.css (renamed from static/css/hugofont.css)0
-rw-r--r--docs/static/css/style.css (renamed from static/css/style.css)0
-rw-r--r--docs/static/favicon.ico (renamed from static/favicon.ico)bin15086 -> 15086 bytes
-rw-r--r--docs/static/fonts/hugo.eot (renamed from static/fonts/hugo.eot)bin16380 -> 16380 bytes
-rw-r--r--docs/static/fonts/hugo.svg (renamed from static/fonts/hugo.svg)0
-rw-r--r--docs/static/fonts/hugo.ttf (renamed from static/fonts/hugo.ttf)bin16228 -> 16228 bytes
-rw-r--r--docs/static/fonts/hugo.woff (renamed from static/fonts/hugo.woff)bin11728 -> 11728 bytes
-rw-r--r--docs/static/images/blog/hugo-26-poster.png (renamed from static/images/blog/hugo-26-poster.png)bin69207 -> 69207 bytes
-rw-r--r--docs/static/images/blog/hugo-27-poster.png (renamed from static/images/blog/hugo-27-poster.png)bin79893 -> 79893 bytes
-rw-r--r--docs/static/images/blog/hugo-28-poster.png (renamed from static/images/blog/hugo-28-poster.png)bin116760 -> 116760 bytes
-rw-r--r--docs/static/images/blog/hugo-29-poster.png (renamed from static/images/blog/hugo-29-poster.png)bin123034 -> 123034 bytes
-rw-r--r--docs/static/images/blog/hugo-30-poster.png (renamed from static/images/blog/hugo-30-poster.png)bin123192 -> 123192 bytes
-rw-r--r--docs/static/images/blog/hugo-31-poster.png (renamed from static/images/blog/hugo-31-poster.png)bin65077 -> 65077 bytes
-rw-r--r--docs/static/images/blog/hugo-32-poster.png (renamed from static/images/blog/hugo-32-poster.png)bin95867 -> 95867 bytes
-rw-r--r--docs/static/images/blog/hugo-bug-poster.png (renamed from static/images/blog/hugo-bug-poster.png)bin74141 -> 74141 bytes
-rw-r--r--docs/static/images/blog/hugo-http2-push.png (renamed from static/images/blog/hugo-http2-push.png)bin20544 -> 20544 bytes
-rw-r--r--docs/static/images/blog/sunset.jpg (renamed from static/images/blog/sunset.jpg)bin34584 -> 34584 bytes
-rw-r--r--docs/static/images/contribute/development/accept-cla.png (renamed from static/images/contribute/development/accept-cla.png)bin24972 -> 24972 bytes
-rw-r--r--docs/static/images/contribute/development/copy-remote-url.png (renamed from static/images/contribute/development/copy-remote-url.png)bin7232 -> 7232 bytes
-rw-r--r--docs/static/images/contribute/development/forking-a-repository.png (renamed from static/images/contribute/development/forking-a-repository.png)bin4608 -> 4608 bytes
-rw-r--r--docs/static/images/contribute/development/open-pull-request.png (renamed from static/images/contribute/development/open-pull-request.png)bin46508 -> 46508 bytes
-rw-r--r--docs/static/images/gohugoio-card-1.png (renamed from static/images/gohugoio-card-1.png)bin73881 -> 73881 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png (renamed from static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png)bin74234 -> 74234 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png (renamed from static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png)bin5613 -> 5613 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png (renamed from static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png)bin37494 -> 37494 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png (renamed from static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png)bin69079 -> 69079 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png)bin41068 -> 41068 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png)bin50615 -> 50615 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png)bin32517 -> 32517 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png)bin68953 -> 68953 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png)bin27450 -> 27450 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png)bin11394 -> 11394 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png)bin11670 -> 11670 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png)bin57084 -> 57084 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png)bin6725 -> 6725 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png)bin43674 -> 43674 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png)bin25260 -> 25260 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png)bin28724 -> 28724 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png)bin18047 -> 18047 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png)bin28485 -> 28485 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png)bin15601 -> 15601 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png)bin124423 -> 124423 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png (renamed from static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png)bin45528 -> 45528 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png (renamed from static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png)bin67178 -> 67178 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif (renamed from static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif)bin2880775 -> 2880775 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png (renamed from static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png)bin57947 -> 57947 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png (renamed from static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png)bin37585 -> 37585 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png (renamed from static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png)bin24689 -> 24689 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png (renamed from static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png)bin114748 -> 114748 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png (renamed from static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png)bin118836 -> 118836 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png (renamed from static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png)bin113753 -> 113753 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg)bin25643 -> 25643 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg)bin46713 -> 46713 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg)bin37855 -> 37855 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg)bin42233 -> 42233 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg)bin36939 -> 36939 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg)bin18930 -> 18930 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif)bin783315 -> 783315 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg)bin44374 -> 44374 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg)bin37306 -> 37306 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg)bin21536 -> 21536 bytes
-rw-r--r--docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg (renamed from static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg)bin37118 -> 37118 bytes
-rw-r--r--docs/static/images/hugo-content-bundles.png (renamed from static/images/hugo-content-bundles.png)bin34394 -> 34394 bytes
-rw-r--r--docs/static/images/icon-custom-outputs.svg (renamed from static/images/icon-custom-outputs.svg)0
-rw-r--r--docs/static/images/site-hierarchy.svg (renamed from static/images/site-hierarchy.svg)0
-rw-r--r--docs/static/img/examples/trees.svg (renamed from static/img/examples/trees.svg)0
-rw-r--r--docs/static/img/hugo-logo-med.png (renamed from static/img/hugo-logo-med.png)bin17402 -> 17402 bytes
-rw-r--r--docs/static/img/hugo-logo.png (renamed from static/img/hugo-logo.png)bin10003 -> 10003 bytes
-rw-r--r--docs/static/img/hugo.png (renamed from static/img/hugo.png)bin18210 -> 18210 bytes
-rw-r--r--docs/static/img/hugoSM.png (renamed from static/img/hugoSM.png)bin1869 -> 1869 bytes
-rw-r--r--docs/static/npmjs/index.html (renamed from static/npmjs/index.html)0
-rw-r--r--docs/static/share/hugo-tall.png (renamed from static/share/hugo-tall.png)bin9971 -> 9971 bytes
-rw-r--r--docs/static/share/made-with-hugo-dark.png (renamed from static/share/made-with-hugo-dark.png)bin8764 -> 8764 bytes
-rw-r--r--docs/static/share/made-with-hugo-long-dark.png (renamed from static/share/made-with-hugo-long-dark.png)bin9116 -> 9116 bytes
-rw-r--r--docs/static/share/made-with-hugo-long.png (renamed from static/share/made-with-hugo-long.png)bin9318 -> 9318 bytes
-rw-r--r--docs/static/share/made-with-hugo.png (renamed from static/share/made-with-hugo.png)bin8900 -> 8900 bytes
-rw-r--r--docs/static/share/powered-by-hugo-dark.png (renamed from static/share/powered-by-hugo-dark.png)bin3545 -> 3545 bytes
-rw-r--r--docs/static/share/powered-by-hugo-long-dark.png (renamed from static/share/powered-by-hugo-long-dark.png)bin3857 -> 3857 bytes
-rw-r--r--docs/static/share/powered-by-hugo-long.png (renamed from static/share/powered-by-hugo-long.png)bin3773 -> 3773 bytes
-rw-r--r--docs/static/share/powered-by-hugo.png (renamed from static/share/powered-by-hugo.png)bin3527 -> 3527 bytes
-rw-r--r--docshelper/docs.go51
-rw-r--r--go.mod128
-rw-r--r--go.sum1070
-rwxr-xr-xgoreleaser-hook-post-linux.sh9
-rw-r--r--goreleaser.yml197
-rw-r--r--helpers/content.go301
-rw-r--r--helpers/content_test.go244
-rw-r--r--helpers/docshelper.go37
-rw-r--r--helpers/emoji.go96
-rw-r--r--helpers/emoji_test.go143
-rw-r--r--helpers/general.go542
-rw-r--r--helpers/general_test.go467
-rw-r--r--helpers/path.go486
-rw-r--r--helpers/path_test.go560
-rw-r--r--helpers/pathspec.go87
-rw-r--r--helpers/pathspec_test.go62
-rw-r--r--helpers/processing_stats.go120
-rw-r--r--helpers/testhelpers_test.go49
-rw-r--r--helpers/url.go241
-rw-r--r--helpers/url_test.go260
-rw-r--r--htesting/hqt/checkers.go135
-rw-r--r--htesting/test_helpers.go144
-rw-r--r--htesting/test_helpers_test.go31
-rw-r--r--htesting/testdata_builder.go59
-rw-r--r--hugofs/createcounting_fs.go107
-rw-r--r--hugofs/decorators.go240
-rw-r--r--hugofs/fileinfo.go303
-rw-r--r--hugofs/fileinfo_test.go51
-rw-r--r--hugofs/filename_filter_fs.go178
-rw-r--r--hugofs/filename_filter_fs_test.go83
-rw-r--r--hugofs/files/classifier.go224
-rw-r--r--hugofs/files/classifier_test.go59
-rw-r--r--hugofs/filter_fs.go344
-rw-r--r--hugofs/filter_fs_test.go46
-rw-r--r--hugofs/fs.go225
-rw-r--r--hugofs/fs_test.go68
-rw-r--r--hugofs/glob.go84
-rw-r--r--hugofs/glob/filename_filter.go159
-rw-r--r--hugofs/glob/filename_filter_test.go70
-rw-r--r--hugofs/glob/glob.go166
-rw-r--r--hugofs/glob/glob_test.go103
-rw-r--r--hugofs/glob_test.go60
-rw-r--r--hugofs/hashing_fs.go97
-rw-r--r--hugofs/hashing_fs_test.go52
-rw-r--r--hugofs/language_merge.go39
-rw-r--r--hugofs/noop_fs.go86
-rw-r--r--hugofs/nosymlink_fs.go161
-rw-r--r--hugofs/nosymlink_test.go146
-rw-r--r--hugofs/rootmapping_fs.go652
-rw-r--r--hugofs/rootmapping_fs_test.go553
-rw-r--r--hugofs/slice_fs.go302
-rw-r--r--hugofs/stacktracer_fs.go77
-rw-r--r--hugofs/walk.go324
-rw-r--r--hugofs/walk_test.go276
-rw-r--r--hugolib/404_test.go79
-rw-r--r--hugolib/alias.go173
-rw-r--r--hugolib/alias_test.go159
-rw-r--r--hugolib/assets/images/sunset.jpgbin0 -> 90587 bytes
-rw-r--r--hugolib/breaking_changes_test.go130
-rw-r--r--hugolib/cascade_test.go630
-rw-r--r--hugolib/codeowners.go69
-rw-r--r--hugolib/collections.go46
-rw-r--r--hugolib/collections_test.go217
-rw-r--r--hugolib/config.go517
-rw-r--r--hugolib/config_test.go784
-rw-r--r--hugolib/configdir_test.go152
-rw-r--r--hugolib/content_factory.go177
-rw-r--r--hugolib/content_factory_test.go78
-rw-r--r--hugolib/content_map.go1061
-rw-r--r--hugolib/content_map_page.go1039
-rw-r--r--hugolib/content_map_test.go456
-rw-r--r--hugolib/content_render_hooks_test.go429
-rw-r--r--hugolib/datafiles_test.go444
-rw-r--r--hugolib/dates_test.go275
-rw-r--r--hugolib/disableKinds_test.go417
-rw-r--r--hugolib/embedded_shortcodes_test.go418
-rw-r--r--hugolib/embedded_templates_test.go169
-rw-r--r--hugolib/fileInfo.go115
-rw-r--r--hugolib/fileInfo_test.go31
-rw-r--r--hugolib/filesystems/basefs.go846
-rw-r--r--hugolib/filesystems/basefs_test.go460
-rw-r--r--hugolib/gitinfo.go46
-rw-r--r--hugolib/hugo_modules_test.go1173
-rw-r--r--hugolib/hugo_sites.go1160
-rw-r--r--hugolib/hugo_sites_build.go507
-rw-r--r--hugolib/hugo_sites_build_errors_test.go647
-rw-r--r--hugolib/hugo_sites_build_test.go1407
-rw-r--r--hugolib/hugo_sites_multihost_test.go119
-rw-r--r--hugolib/hugo_sites_rebuild_test.go316
-rw-r--r--hugolib/hugo_smoke_test.go443
-rw-r--r--hugolib/image_test.go247
-rw-r--r--hugolib/integrationtest_builder.go491
-rw-r--r--hugolib/language_content_dir_test.go526
-rw-r--r--hugolib/language_test.go139
-rw-r--r--hugolib/menu_test.go590
-rw-r--r--hugolib/minify_publisher_test.go63
-rw-r--r--hugolib/mount_filters_test.go119
-rw-r--r--hugolib/multilingual.go82
-rw-r--r--hugolib/page.go982
-rw-r--r--hugolib/page__common.go156
-rw-r--r--hugolib/page__content.go130
-rw-r--r--hugolib/page__data.go66
-rw-r--r--hugolib/page__menus.go70
-rw-r--r--hugolib/page__meta.go830
-rw-r--r--hugolib/page__new.go213
-rw-r--r--hugolib/page__output.go107
-rw-r--r--hugolib/page__paginator.go111
-rw-r--r--hugolib/page__paths.go165
-rw-r--r--hugolib/page__per_output.go783
-rw-r--r--hugolib/page__position.go75
-rw-r--r--hugolib/page__ref.go114
-rw-r--r--hugolib/page__tree.go187
-rw-r--r--hugolib/page_kinds.go52
-rw-r--r--hugolib/page_permalink_test.go149
-rw-r--r--hugolib/page_test.go2003
-rw-r--r--hugolib/page_unwrap.go50
-rw-r--r--hugolib/page_unwrap_test.go38
-rw-r--r--hugolib/pagebundler_test.go1348
-rw-r--r--hugolib/pagecollections.go340
-rw-r--r--hugolib/pagecollections_test.go420
-rw-r--r--hugolib/pages_capture.go580
-rw-r--r--hugolib/pages_capture_test.go79
-rw-r--r--hugolib/pages_language_merge_test.go196
-rw-r--r--hugolib/pages_process.go210
-rw-r--r--hugolib/pages_test.go119
-rw-r--r--hugolib/paginator_test.go138
-rw-r--r--hugolib/paths/baseURL.go87
-rw-r--r--hugolib/paths/baseURL_test.go67
-rw-r--r--hugolib/paths/paths.go274
-rw-r--r--hugolib/paths/paths_test.go50
-rw-r--r--hugolib/permalinker.go22
-rw-r--r--hugolib/prune_resources.go19
-rw-r--r--hugolib/renderstring_test.go192
-rw-r--r--hugolib/resource_chain_test.go756
-rw-r--r--hugolib/robotstxt_test.go41
-rw-r--r--hugolib/rss_test.go100
-rw-r--r--hugolib/securitypolicies_test.go202
-rw-r--r--hugolib/shortcode.go726
-rw-r--r--hugolib/shortcode_page.go75
-rw-r--r--hugolib/shortcode_test.go1055
-rw-r--r--hugolib/site.go1922
-rw-r--r--hugolib/siteJSONEncode_test.go44
-rw-r--r--hugolib/site_benchmark_new_test.go558
-rw-r--r--hugolib/site_output.go108
-rw-r--r--hugolib/site_output_test.go648
-rw-r--r--hugolib/site_render.go407
-rw-r--r--hugolib/site_sections.go28
-rw-r--r--hugolib/site_sections_test.go375
-rw-r--r--hugolib/site_stats_test.go98
-rw-r--r--hugolib/site_test.go1111
-rw-r--r--hugolib/site_url_test.go187
-rw-r--r--hugolib/sitemap_test.go120
-rw-r--r--hugolib/taxonomy.go173
-rw-r--r--hugolib/taxonomy_test.go696
-rw-r--r--hugolib/template_test.go774
-rw-r--r--hugolib/testdata/cities.csv130
-rw-r--r--hugolib/testdata/fruits.json5
-rw-r--r--hugolib/testdata/redis.cn.md697
-rw-r--r--hugolib/testdata/sunset.jpgbin0 -> 90587 bytes
-rw-r--r--hugolib/testdata/what-is-markdown.md9702
-rw-r--r--hugolib/testhelpers_test.go1117
-rw-r--r--hugolib/testsite/.gitignore1
-rw-r--r--hugolib/testsite/CODEOWNERS1
-rw-r--r--hugolib/testsite/content/first-post.md4
-rw-r--r--hugolib/testsite/content_nn/first-post.md4
-rw-r--r--hugolib/translations.go57
-rw-r--r--identity/identity.go164
-rw-r--r--identity/identity_test.go89
-rw-r--r--langs/config.go226
-rw-r--r--langs/i18n/i18n.go196
-rw-r--r--langs/i18n/i18n_test.go552
-rw-r--r--langs/i18n/integration_test.go57
-rw-r--r--langs/i18n/translationProvider.go143
-rw-r--r--langs/language.go331
-rw-r--r--langs/language_test.go108
-rw-r--r--lazy/init.go208
-rw-r--r--lazy/init_test.go241
-rw-r--r--lazy/once.go68
-rw-r--r--livereload/connection.go66
-rw-r--r--livereload/hub.go56
-rw-r--r--livereload/livereload.go193
-rw-r--r--magefile.go375
-rw-r--r--main.go32
-rw-r--r--markup/asciidocext/asciidocext_config/config.go79
-rw-r--r--markup/asciidocext/convert.go322
-rw-r--r--markup/asciidocext/convert_test.go463
-rw-r--r--markup/blackfriday/anchors.go39
-rw-r--r--markup/converter/converter.go140
-rw-r--r--markup/converter/hooks/hooks.go111
-rw-r--r--markup/goldmark/autoid.go133
-rw-r--r--markup/goldmark/autoid_test.go143
-rw-r--r--markup/goldmark/codeblocks/integration_test.go352
-rw-r--r--markup/goldmark/codeblocks/render.go203
-rw-r--r--markup/goldmark/codeblocks/transform.go54
-rw-r--r--markup/goldmark/convert.go231
-rw-r--r--markup/goldmark/convert_test.go501
-rw-r--r--markup/goldmark/goldmark_config/config.go98
-rw-r--r--markup/goldmark/integration_test.go577
-rw-r--r--markup/goldmark/internal/extensions/attributes/attributes.go125
-rw-r--r--markup/goldmark/internal/render/context.go81
-rw-r--r--markup/goldmark/render_hooks.go422
-rw-r--r--markup/goldmark/toc.go128
-rw-r--r--markup/goldmark/toc_test.go137
-rw-r--r--markup/highlight/config.go292
-rw-r--r--markup/highlight/config_test.go56
-rw-r--r--markup/highlight/highlight.go363
-rw-r--r--markup/highlight/highlight_test.go149
-rw-r--r--markup/highlight/integration_test.go85
-rw-r--r--markup/internal/attributes/attributes.go221
-rw-r--r--markup/internal/external.go71
-rw-r--r--markup/markup.go132
-rw-r--r--markup/markup_config/config.go94
-rw-r--r--markup/markup_config/config_test.go55
-rw-r--r--markup/markup_test.go46
-rw-r--r--markup/org/convert.go73
-rw-r--r--markup/org/convert_test.go40
-rw-r--r--markup/pandoc/convert.go90
-rw-r--r--markup/pandoc/convert_test.go42
-rw-r--r--markup/rst/convert.go135
-rw-r--r--markup/rst/convert_test.go47
-rw-r--r--markup/tableofcontents/tableofcontents.go170
-rw-r--r--markup/tableofcontents/tableofcontents_test.go155
-rw-r--r--media/docshelper.go13
-rw-r--r--media/mediaType.go536
-rw-r--r--media/mediaType_test.go346
-rw-r--r--media/testdata/fake.jsbin0 -> 13327 bytes
-rw-r--r--media/testdata/fake.png3
-rw-r--r--media/testdata/reosurce.otfbin0 -> 696 bytes
-rw-r--r--media/testdata/resource.bmpbin0 -> 65334 bytes
-rw-r--r--media/testdata/resource.css8
-rw-r--r--media/testdata/resource.csv130
-rw-r--r--media/testdata/resource.gifbin0 -> 161 bytes
-rw-r--r--media/testdata/resource.ics24
-rw-r--r--media/testdata/resource.jpebin0 -> 116955 bytes
-rw-r--r--media/testdata/resource.jpgbin0 -> 116955 bytes
-rw-r--r--media/testdata/resource.js3
-rw-r--r--media/testdata/resource.json14
-rw-r--r--media/testdata/resource.pdf198
-rw-r--r--media/testdata/resource.pngbin0 -> 13327 bytes
-rw-r--r--media/testdata/resource.rss20
-rw-r--r--media/testdata/resource.sass6
-rw-r--r--media/testdata/resource.scss7
-rw-r--r--media/testdata/resource.svg5
-rw-r--r--media/testdata/resource.ttfbin0 -> 552 bytes
-rw-r--r--media/testdata/resource.webpbin0 -> 59826 bytes
-rw-r--r--media/testdata/resource.xml7
-rwxr-xr-xmerge-release.sh23
-rw-r--r--metrics/metrics.go293
-rw-r--r--metrics/metrics_test.go69
-rw-r--r--minifiers/config.go131
-rw-r--r--minifiers/config_test.go63
-rw-r--r--minifiers/minifiers.go135
-rw-r--r--minifiers/minifiers_test.go220
-rw-r--r--modules/client.go836
-rw-r--r--modules/client_test.go213
-rw-r--r--modules/collect.go722
-rw-r--r--modules/collect_test.go51
-rw-r--r--modules/config.go421
-rw-r--r--modules/config_test.go161
-rw-r--r--modules/module.go188
-rw-r--r--modules/npm/package_builder.go237
-rw-r--r--modules/npm/package_builder_test.go95
-rw-r--r--navigation/menu.go315
-rw-r--r--navigation/menu_cache.go113
-rw-r--r--navigation/menu_cache_test.go81
-rw-r--r--navigation/pagemenus.go230
-rw-r--r--output/docshelper.go102
-rw-r--r--output/layout.go302
-rw-r--r--output/layout_test.go1008
-rw-r--r--output/outputFormat.go412
-rw-r--r--output/outputFormat_test.go267
-rw-r--r--parser/frontmatter.go118
-rw-r--r--parser/frontmatter_test.go78
-rw-r--r--parser/lowercase_camel_json.go59
-rw-r--r--parser/metadecoders/decoder.go311
-rw-r--r--parser/metadecoders/decoder_test.go299
-rw-r--r--parser/metadecoders/format.go118
-rw-r--r--parser/metadecoders/format_test.go86
-rw-r--r--parser/pageparser/doc.go18
-rw-r--r--parser/pageparser/item.go182
-rw-r--r--parser/pageparser/item_test.go34
-rw-r--r--parser/pageparser/itemtype_string.go43
-rw-r--r--parser/pageparser/pagelexer.go557
-rw-r--r--parser/pageparser/pagelexer_intro.go189
-rw-r--r--parser/pageparser/pagelexer_shortcode.go364
-rw-r--r--parser/pageparser/pagelexer_test.go28
-rw-r--r--parser/pageparser/pageparser.go195
-rw-r--r--parser/pageparser/pageparser_intro_test.go126
-rw-r--r--parser/pageparser/pageparser_main_test.go40
-rw-r--r--parser/pageparser/pageparser_shortcode_test.go279
-rw-r--r--parser/pageparser/pageparser_test.go90
-rw-r--r--publisher/htmlElementsCollector.go443
-rw-r--r--publisher/htmlElementsCollector_test.go216
-rw-r--r--publisher/publisher.go190
-rwxr-xr-xpull-docs.sh7
-rw-r--r--related/inverted_index.go458
-rw-r--r--related/inverted_index_test.go316
-rw-r--r--releaser/git.go253
-rw-r--r--releaser/git_test.go86
-rw-r--r--releaser/github.go143
-rw-r--r--releaser/github_test.go46
-rw-r--r--releaser/releasenotes_writer.go191
-rw-r--r--releaser/releasenotes_writer_test.go46
-rw-r--r--releaser/releaser.go304
-rw-r--r--resources/errorResource.go132
-rw-r--r--resources/image.go452
-rw-r--r--resources/image_cache.go168
-rw-r--r--resources/image_extended_test.go42
-rw-r--r--resources/image_test.go843
-rw-r--r--resources/images/color.go83
-rw-r--r--resources/images/color_test.go89
-rw-r--r--resources/images/config.go462
-rw-r--r--resources/images/config_test.go158
-rw-r--r--resources/images/exif/exif.go272
-rw-r--r--resources/images/exif/exif_test.go135
-rw-r--r--resources/images/filters.go236
-rw-r--r--resources/images/filters_test.go33
-rw-r--r--resources/images/image.go410
-rw-r--r--resources/images/image_resource.go53
-rw-r--r--resources/images/overlay.go43
-rw-r--r--resources/images/resampling.go214
-rw-r--r--resources/images/smartcrop.go104
-rw-r--r--resources/images/text.go108
-rw-r--r--resources/images/webp/webp.go36
-rw-r--r--resources/images/webp/webp_notavailable.go36
-rw-r--r--resources/integration_test.go96
-rw-r--r--resources/internal/key.go42
-rw-r--r--resources/internal/key_test.go36
-rw-r--r--resources/jsconfig/jsconfig.go92
-rw-r--r--resources/jsconfig/jsconfig_test.go35
-rw-r--r--resources/page/integration_test.go138
-rw-r--r--resources/page/page.go420
-rw-r--r--resources/page/page_author.go44
-rw-r--r--resources/page/page_data.go42
-rw-r--r--resources/page/page_data_test.go55
-rw-r--r--resources/page/page_generate/.gitignore1
-rw-r--r--resources/page/page_generate/generate_page_wrappers.go280
-rw-r--r--resources/page/page_kinds.go47
-rw-r--r--resources/page/page_kinds_test.go37
-rw-r--r--resources/page/page_lazy_contentprovider.go124
-rw-r--r--resources/page/page_marshaljson.autogen.go211
-rw-r--r--resources/page/page_matcher.go142
-rw-r--r--resources/page/page_matcher_test.go83
-rw-r--r--resources/page/page_nop.go515
-rw-r--r--resources/page/page_outputformat.go95
-rw-r--r--resources/page/page_paths.go342
-rw-r--r--resources/page/page_paths_test.go293
-rw-r--r--resources/page/page_wrappers.autogen.go25
-rw-r--r--resources/page/pagegroup.go460
-rw-r--r--resources/page/pagegroup_test.go466
-rw-r--r--resources/page/pagemeta/page_frontmatter.go427
-rw-r--r--resources/page/pagemeta/page_frontmatter_test.go257
-rw-r--r--resources/page/pagemeta/pagemeta.go110
-rw-r--r--resources/page/pagemeta/pagemeta_test.go92
-rw-r--r--resources/page/pages.go157
-rw-r--r--resources/page/pages_cache.go135
-rw-r--r--resources/page/pages_cache_test.go87
-rw-r--r--resources/page/pages_language_merge.go62
-rw-r--r--resources/page/pages_prev_next.go34
-rw-r--r--resources/page/pages_prev_next_test.go91
-rw-r--r--resources/page/pages_related.go195
-rw-r--r--resources/page/pages_related_test.go86
-rw-r--r--resources/page/pages_sort.go412
-rw-r--r--resources/page/pages_sort_search.go125
-rw-r--r--resources/page/pages_sort_search_test.go122
-rw-r--r--resources/page/pages_sort_test.go289
-rw-r--r--resources/page/pages_test.go72
-rw-r--r--resources/page/pagination.go396
-rw-r--r--resources/page/pagination_test.go310
-rw-r--r--resources/page/permalinks.go371
-rw-r--r--resources/page/permalinks_test.go241
-rw-r--r--resources/page/site.go167
-rw-r--r--resources/page/testhelpers_test.go622
-rw-r--r--resources/page/weighted.go138
-rw-r--r--resources/page/zero_file.autogen.go88
-rw-r--r--resources/post_publish.go51
-rw-r--r--resources/postpub/fields.go59
-rw-r--r--resources/postpub/fields_test.go45
-rw-r--r--resources/postpub/postpub.go181
-rw-r--r--resources/resource.go709
-rw-r--r--resources/resource/dates.go93
-rw-r--r--resources/resource/params.go33
-rw-r--r--resources/resource/resource_helpers.go70
-rw-r--r--resources/resource/resources.go198
-rw-r--r--resources/resource/resourcetypes.go224
-rw-r--r--resources/resource_cache.go305
-rw-r--r--resources/resource_cache_test.go58
-rw-r--r--resources/resource_factories/bundler/bundler.go148
-rw-r--r--resources/resource_factories/bundler/bundler_test.go40
-rw-r--r--resources/resource_factories/create/create.go151
-rw-r--r--resources/resource_factories/create/remote.go279
-rw-r--r--resources/resource_factories/create/remote_test.go96
-rw-r--r--resources/resource_metadata.go144
-rw-r--r--resources/resource_metadata_test.go221
-rw-r--r--resources/resource_spec.go345
-rw-r--r--resources/resource_test.go270
-rw-r--r--resources/resource_transformers/babel/babel.go239
-rw-r--r--resources/resource_transformers/babel/integration_test.go94
-rw-r--r--resources/resource_transformers/htesting/testhelpers.go78
-rw-r--r--resources/resource_transformers/integrity/integrity.go120
-rw-r--r--resources/resource_transformers/integrity/integrity_test.go69
-rw-r--r--resources/resource_transformers/js/build.go222
-rw-r--r--resources/resource_transformers/js/build_test.go14
-rw-r--r--resources/resource_transformers/js/integration_test.go261
-rw-r--r--resources/resource_transformers/js/options.go424
-rw-r--r--resources/resource_transformers/js/options_test.go184
-rw-r--r--resources/resource_transformers/minifier/integration_test.go47
-rw-r--r--resources/resource_transformers/minifier/minify.go59
-rw-r--r--resources/resource_transformers/minifier/minify_test.go42
-rw-r--r--resources/resource_transformers/postcss/integration_test.go244
-rw-r--r--resources/resource_transformers/postcss/postcss.go440
-rw-r--r--resources/resource_transformers/postcss/postcss_test.go166
-rw-r--r--resources/resource_transformers/templates/execute_as_template.go74
-rw-r--r--resources/resource_transformers/templates/integration_test.go77
-rw-r--r--resources/resource_transformers/tocss/dartsass/client.go143
-rw-r--r--resources/resource_transformers/tocss/dartsass/integration_test.go273
-rw-r--r--resources/resource_transformers/tocss/dartsass/transform.go182
-rw-r--r--resources/resource_transformers/tocss/scss/client.go90
-rw-r--r--resources/resource_transformers/tocss/scss/client_extended.go60
-rw-r--r--resources/resource_transformers/tocss/scss/client_notavailable.go31
-rw-r--r--resources/resource_transformers/tocss/scss/client_test.go49
-rw-r--r--resources/resource_transformers/tocss/scss/integration_test.go247
-rw-r--r--resources/resource_transformers/tocss/scss/tocss.go204
-rw-r--r--resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpgbin0 -> 90587 bytes
-rw-r--r--resources/testdata/circle.svg5
-rw-r--r--resources/testdata/fuzzy-cirlcle.pngbin0 -> 26792 bytes
-rw-r--r--resources/testdata/giphy.gifbin0 -> 52213 bytes
-rw-r--r--resources/testdata/gohugoio-card.gifbin0 -> 10820 bytes
-rw-r--r--resources/testdata/gohugoio.pngbin0 -> 73886 bytes
-rw-r--r--resources/testdata/gohugoio24.pngbin0 -> 267952 bytes
-rw-r--r--resources/testdata/gohugoio8.pngbin0 -> 73538 bytes
-rw-r--r--resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_200x0_resize_box.gifbin0 -> 73619 bytes
-rw-r--r--resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_512x0_resize_box.gifbin0 -> 310936 bytes
-rw-r--r--resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_100x0_resize_box.gifbin0 -> 3555 bytes
-rw-r--r--resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_220x0_resize_box.gifbin0 -> 12249 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_100x100_fill_box_center_3.pngbin0 -> 11002 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_1a923841aa34545db29f46a8fc4c5b0d.pngbin0 -> 46054 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x0_resize_q50_r90_box_3.pngbin0 -> 62018 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x100_resize_box_3.pngbin0 -> 20979 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x100_fill_nearestneighbor_topleft_3.pngbin0 -> 23035 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fill_gaussian_smart1_3.pngbin0 -> 46395 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fit_linear_3.pngbin0 -> 38597 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_324b4d42c8746a684068d123fad8b744.pngbin0 -> 78589 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_bottomleft_3.pngbin0 -> 60099 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_center_3.pngbin0 -> 60099 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_43055c40cb4a15bd8491bfc502799f43.pngbin0 -> 45378 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_4ea8f246299cc5fba9744bdf162bd57d.pngbin0 -> 8960 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_600x0_resize_box_3.pngbin0 -> 112941 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_60c098f0ca6626668d9e3ad6bfb38b5b.pngbin0 -> 64612 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_6575f3a3c39a30cba9d76a6045c36de6.pngbin0 -> 61497 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_8166ccaf22bdabb94c9bb90bffe64133.pngbin0 -> 65067 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9165e5559db8ba31a401327b5617c098.pngbin0 -> 85767 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9a8d95423df65a9c230a4cc88056c13a.pngbin0 -> 58718 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a38a1924befb1721a09be7d432f5f70f.pngbin0 -> 60267 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a487ef4bea3dba1e1a84be5358cfef39.pngbin0 -> 60182 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a751b6cd969d7feab12540a8bb0ca927.pngbin0 -> 53835 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_abcdd770eaed9301cfff4bc2f96459ba.pngbin0 -> 62941 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_aeaaf23afe6fb4702bd3992426d0cad3.pngbin0 -> 62049 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_bdde5e36f15689c1451933f92fd357b3.pngbin0 -> 59041 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d111079da5d8d143b6cae10d6fedbc24.pngbin0 -> 44573 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d87fd348ad697a9b16399709441d9d56.pngbin0 -> 58776 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_e8ef2efdde4357a79694ea9c2be82f63.pngbin0 -> 34370 bytes
-rw-r--r--resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_ee57777f148caaa6993972d9709fdf2d.pngbin0 -> 62162 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_00cd4ff18b53ecbd78e42aefe5fbf522.pngbin0 -> 24546 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_100x100_fill_box_center_3.pngbin0 -> 5969 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x0_resize_q50_r90_box_3.pngbin0 -> 25346 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x100_resize_box_3.pngbin0 -> 10198 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_276650b97daa7ae98e79b929d7f87c19.pngbin0 -> 34375 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_2e05d39f4cb329be10e8c515494cef76.pngbin0 -> 36145 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x100_fill_nearestneighbor_topleft_3.pngbin0 -> 10210 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fill_gaussian_smart1_3.pngbin0 -> 20658 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fit_linear_3.pngbin0 -> 17575 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3980c5868e0b6f20ec95424dfdcb1d67.pngbin0 -> 24422 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_398ca764abfff83bb15318068105dcb9.pngbin0 -> 26511 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3ed273f49d1dc83891f5736e21fc5f44.pngbin0 -> 33095 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_bottomleft_3.pngbin0 -> 26281 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_center_3.pngbin0 -> 26281 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_428e769d14483c2fcdd6f5c5138e2066.pngbin0 -> 23863 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_600x0_resize_box_3.pngbin0 -> 47492 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_73c19c5f80881858a85aa23cd0ca400d.pngbin0 -> 20199 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_871826faffc414ca3746f65fc9910eed.pngbin0 -> 21552 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0505112c99af88626ac9b9a16a27acb.pngbin0 -> 34281 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0ffc0f22f22e6920f3cad414d6db6ba.pngbin0 -> 24075 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_ae631e5252bb5d7b92bc766ad1a89069.pngbin0 -> 34054 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_b34412412a1cf1658e516a335b0a8dd4.pngbin0 -> 27285 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_c5140f11378ddb13843432a5b489594a.pngbin0 -> 26663 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d17d0184674fcf0a4d770c90bed503db.pngbin0 -> 20267 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d1bbfa2629bffb90118cacce3fcfb924.pngbin0 -> 28414 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_e4d38674b70d9ef559c5df72c9262790.pngbin0 -> 29412 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_eff9583d9b94ac79c60cb099846ce8f3.pngbin0 -> 18095 bytes
-rw-r--r--resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_f17bba59421e7a500387232295512fc0.pngbin0 -> 27034 bytes
-rw-r--r--resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_bge3e615_box_3.pngbin0 -> 5597 bytes
-rw-r--r--resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_q75_bge3e615_box_3.jpgbin0 -> 7640 bytes
-rw-r--r--resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_30x0_resize_box_3.pngbin0 -> 1314 bytes
-rw-r--r--resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_bge3e615_box_3.pngbin0 -> 4220 bytes
-rw-r--r--resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_q75_bge3e615_box_3.jpgbin0 -> 2909 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_0d1b300da7a815ed567b6dadb6f2ce5e.jpgbin0 -> 6446 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x100_fill_q75_box_center.jpgbin0 -> 1805 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_17fd3c558d78ce249b5f0bcbe1ddbffb.jpgbin0 -> 7033 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x0_resize_q50_r90_box.jpgbin0 -> 4222 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_resize_q75_box.jpgbin0 -> 2698 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x100_fill_q75_nearestneighbor_topleft.jpgbin0 -> 2065 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fill_q75_gaussian_smart1.jpgbin0 -> 4667 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fit_q75_linear.jpgbin0 -> 4919 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_30fc2aab35ca0861bf396d09aebc85a4.jpgbin0 -> 7087 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_352eb0101b7c88107520ba719432bbb2.jpgbin0 -> 6435 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3efc2d0f29a8e12c5a690fc6c9288854.jpgbin0 -> 4449 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f1b1455c4a7d13c5aeb7510f9a6a581.jpgbin0 -> 6941 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_bottomleft.jpgbin0 -> 7311 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_center.jpgbin0 -> 6448 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_510813cc53c37e2d489d2f9fdb13f749.jpgbin0 -> 20818 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_600x0_resize_q75_box.jpgbin0 -> 15636 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6673ece428cb7d523234ca0d7c299542.jpgbin0 -> 7088 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6c5c12ac79d3455ccb1993d51eec3cdf.jpgbin0 -> 6563 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_7d9bc4700565266807dc476421066137.jpgbin0 -> 6580 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_9f00027c376fe8556cc9996c47f23f78.jpgbin0 -> 6132 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_abf356affd7d70d6bec3b3498b572191.jpgbin0 -> 5908 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c2d24766b49f3147f5a4137a8db592ac.jpgbin0 -> 7252 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c36da6818db1ab630c3f87f65170003b.jpgbin0 -> 6337 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c50a17db1e6d1bd0fe31a9a3444f1587.jpgbin0 -> 6850 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_cb45fcba865177290c89dc9f41d6ff7a.jpgbin0 -> 4464 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_d30c10468b33df9010d185a8fe8f0491.jpgbin0 -> 5858 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_de1fe6c0f40e7165355507d0f1748083.jpgbin0 -> 5469 bytes
-rw-r--r--resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_f6d8fe32ce3e83abf130e91e33456914.jpgbin0 -> 6421 bytes
-rw-r--r--resources/testdata/golden_webp/fuzzy-cirlcle_hu525d1a6cf670e85f5e8f19890241399b_26792_200x0_resize_q75_h2_box_3.webpbin0 -> 7848 bytes
-rw-r--r--resources/testdata/gopher-hero8.pngbin0 -> 13327 bytes
-rw-r--r--resources/testdata/gradient-circle.pngbin0 -> 20069 bytes
-rw-r--r--resources/testdata/iss8079.jpgbin0 -> 116955 bytes
-rw-r--r--resources/testdata/sub/gohugoio2.pngbin0 -> 73886 bytes
-rw-r--r--resources/testdata/sunrise.JPGbin0 -> 90587 bytes
-rw-r--r--resources/testdata/sunset.jpgbin0 -> 90587 bytes
-rw-r--r--resources/testdata/sunset.webpbin0 -> 59826 bytes
-rw-r--r--resources/testhelpers_test.go205
-rw-r--r--resources/transform.go670
-rw-r--r--resources/transform_test.go440
-rw-r--r--scripts/fork_go_templates/.gitignore1
-rw-r--r--scripts/fork_go_templates/main.go228
-rw-r--r--snap/plugins/x-nodejs.yaml8
-rw-r--r--snap/plugins/x_nodejs.py332
-rw-r--r--snap/snapcraft.yaml110
-rw-r--r--source/content_directory_test.go66
-rw-r--r--source/fileInfo.go296
-rw-r--r--source/fileInfo_test.go57
-rw-r--r--source/filesystem.go121
-rw-r--r--source/filesystem_test.go102
-rw-r--r--source/sourceSpec.go158
-rw-r--r--temp/0.86.1-relnotes-ready.md8
-rw-r--r--tpl/cast/cast.go63
-rw-r--r--tpl/cast/cast_test.go119
-rw-r--r--tpl/cast/docshelper.go56
-rw-r--r--tpl/cast/init.go57
-rw-r--r--tpl/collections/append.go37
-rw-r--r--tpl/collections/append_test.go69
-rw-r--r--tpl/collections/apply.go164
-rw-r--r--tpl/collections/apply_test.go98
-rw-r--r--tpl/collections/collections.go781
-rw-r--r--tpl/collections/collections_test.go992
-rw-r--r--tpl/collections/complement.go55
-rw-r--r--tpl/collections/complement_test.go99
-rw-r--r--tpl/collections/index.go133
-rw-r--r--tpl/collections/index_test.go70
-rw-r--r--tpl/collections/init.go214
-rw-r--r--tpl/collections/integration_test.go75
-rw-r--r--tpl/collections/merge.go127
-rw-r--r--tpl/collections/merge_test.go243
-rw-r--r--tpl/collections/reflect_helpers.go215
-rw-r--r--tpl/collections/sort.go189
-rw-r--r--tpl/collections/sort_test.go268
-rw-r--r--tpl/collections/symdiff.go66
-rw-r--r--tpl/collections/symdiff_test.go80
-rw-r--r--tpl/collections/where.go515
-rw-r--r--tpl/collections/where_test.go864
-rw-r--r--tpl/compare/compare.go352
-rw-r--r--tpl/compare/compare_test.go460
-rw-r--r--tpl/compare/init.go90
-rw-r--r--tpl/crypto/crypto.go137
-rw-r--r--tpl/crypto/crypto_test.go138
-rw-r--r--tpl/crypto/init.go72
-rw-r--r--tpl/data/data.go209
-rw-r--r--tpl/data/data_test.go349
-rw-r--r--tpl/data/init.go45
-rw-r--r--tpl/data/resources.go130
-rw-r--r--tpl/data/resources_test.go230
-rw-r--r--tpl/debug/debug.go40
-rw-r--r--tpl/debug/init.go45
-rw-r--r--tpl/diagrams/diagrams.go81
-rw-r--r--tpl/diagrams/init.go38
-rw-r--r--tpl/encoding/encoding.go90
-rw-r--r--tpl/encoding/encoding_test.go118
-rw-r--r--tpl/encoding/init.go59
-rw-r--r--tpl/fmt/fmt.go85
-rw-r--r--tpl/fmt/init.go77
-rw-r--r--tpl/hugo/init.go39
-rw-r--r--tpl/images/images.go103
-rw-r--r--tpl/images/images_test.go119
-rw-r--r--tpl/images/init.go41
-rw-r--r--tpl/inflect/inflect.go79
-rw-r--r--tpl/inflect/inflect_test.go49
-rw-r--r--tpl/inflect/init.go60
-rw-r--r--tpl/internal/go_templates/cfg/cfg.go68
-rw-r--r--tpl/internal/go_templates/fmtsort/export_test.go11
-rw-r--r--tpl/internal/go_templates/fmtsort/sort.go220
-rw-r--r--tpl/internal/go_templates/fmtsort/sort_test.go279
-rw-r--r--tpl/internal/go_templates/htmltemplate/attr.go175
-rw-r--r--tpl/internal/go_templates/htmltemplate/attr_string.go16
-rw-r--r--tpl/internal/go_templates/htmltemplate/clone_test.go283
-rw-r--r--tpl/internal/go_templates/htmltemplate/content.go102
-rw-r--r--tpl/internal/go_templates/htmltemplate/content_test.go462
-rw-r--r--tpl/internal/go_templates/htmltemplate/context.go265
-rw-r--r--tpl/internal/go_templates/htmltemplate/css.go260
-rw-r--r--tpl/internal/go_templates/htmltemplate/css_test.go284
-rw-r--r--tpl/internal/go_templates/htmltemplate/delim_string.go16
-rw-r--r--tpl/internal/go_templates/htmltemplate/doc.go241
-rw-r--r--tpl/internal/go_templates/htmltemplate/element_string.go16
-rw-r--r--tpl/internal/go_templates/htmltemplate/error.go234
-rw-r--r--tpl/internal/go_templates/htmltemplate/escape.go962
-rw-r--r--tpl/internal/go_templates/htmltemplate/escape_test.go1998
-rw-r--r--tpl/internal/go_templates/htmltemplate/example_test.go185
-rw-r--r--tpl/internal/go_templates/htmltemplate/examplefiles_test.go229
-rw-r--r--tpl/internal/go_templates/htmltemplate/exec_test.go1838
-rw-r--r--tpl/internal/go_templates/htmltemplate/html.go265
-rw-r--r--tpl/internal/go_templates/htmltemplate/html_test.go100
-rw-r--r--tpl/internal/go_templates/htmltemplate/hugo_template.go41
-rw-r--r--tpl/internal/go_templates/htmltemplate/js.go430
-rw-r--r--tpl/internal/go_templates/htmltemplate/js_test.go426
-rw-r--r--tpl/internal/go_templates/htmltemplate/jsctx_string.go16
-rw-r--r--tpl/internal/go_templates/htmltemplate/multi_test.go293
-rw-r--r--tpl/internal/go_templates/htmltemplate/state_string.go16
-rw-r--r--tpl/internal/go_templates/htmltemplate/template.go537
-rw-r--r--tpl/internal/go_templates/htmltemplate/template_test.go222
-rw-r--r--tpl/internal/go_templates/htmltemplate/testdata/file1.tmpl2
-rw-r--r--tpl/internal/go_templates/htmltemplate/testdata/file2.tmpl2
-rw-r--r--tpl/internal/go_templates/htmltemplate/testdata/fs.zipbin0 -> 406 bytes
-rw-r--r--tpl/internal/go_templates/htmltemplate/testdata/tmpl1.tmpl3
-rw-r--r--tpl/internal/go_templates/htmltemplate/testdata/tmpl2.tmpl3
-rw-r--r--tpl/internal/go_templates/htmltemplate/transition.go592
-rw-r--r--tpl/internal/go_templates/htmltemplate/transition_test.go63
-rw-r--r--tpl/internal/go_templates/htmltemplate/url.go217
-rw-r--r--tpl/internal/go_templates/htmltemplate/url_test.go172
-rw-r--r--tpl/internal/go_templates/htmltemplate/urlpart_string.go16
-rw-r--r--tpl/internal/go_templates/testenv/testenv.go366
-rw-r--r--tpl/internal/go_templates/testenv/testenv_cgo.go11
-rw-r--r--tpl/internal/go_templates/testenv/testenv_notunix.go13
-rw-r--r--tpl/internal/go_templates/testenv/testenv_notwin.go20
-rw-r--r--tpl/internal/go_templates/testenv/testenv_unix.go13
-rw-r--r--tpl/internal/go_templates/testenv/testenv_windows.go47
-rw-r--r--tpl/internal/go_templates/texttemplate/doc.go465
-rw-r--r--tpl/internal/go_templates/texttemplate/example_test.go113
-rw-r--r--tpl/internal/go_templates/texttemplate/examplefiles_test.go184
-rw-r--r--tpl/internal/go_templates/texttemplate/examplefunc_test.go57
-rw-r--r--tpl/internal/go_templates/texttemplate/exec.go1045
-rw-r--r--tpl/internal/go_templates/texttemplate/exec_test.go1815
-rw-r--r--tpl/internal/go_templates/texttemplate/funcs.go753
-rw-r--r--tpl/internal/go_templates/texttemplate/helper.go177
-rw-r--r--tpl/internal/go_templates/texttemplate/hugo_template.go398
-rw-r--r--tpl/internal/go_templates/texttemplate/hugo_template_test.go92
-rw-r--r--tpl/internal/go_templates/texttemplate/link_test.go62
-rw-r--r--tpl/internal/go_templates/texttemplate/multi_test.go467
-rw-r--r--tpl/internal/go_templates/texttemplate/option.go72
-rw-r--r--tpl/internal/go_templates/texttemplate/parse/lex.go682
-rw-r--r--tpl/internal/go_templates/texttemplate/parse/lex_test.go562
-rw-r--r--tpl/internal/go_templates/texttemplate/parse/node.go1008
-rw-r--r--tpl/internal/go_templates/texttemplate/parse/parse.go795
-rw-r--r--tpl/internal/go_templates/texttemplate/parse/parse_test.go683
-rw-r--r--tpl/internal/go_templates/texttemplate/template.go238
-rw-r--r--tpl/internal/go_templates/texttemplate/testdata/file1.tmpl2
-rw-r--r--tpl/internal/go_templates/texttemplate/testdata/file2.tmpl2
-rw-r--r--tpl/internal/go_templates/texttemplate/testdata/tmpl1.tmpl3
-rw-r--r--tpl/internal/go_templates/texttemplate/testdata/tmpl2.tmpl3
-rw-r--r--tpl/internal/resourcehelpers/helpers.go69
-rw-r--r--tpl/internal/templatefuncRegistry_test.go39
-rw-r--r--tpl/internal/templatefuncsRegistry.go293
-rw-r--r--tpl/js/init.go36
-rw-r--r--tpl/js/js.go65
-rw-r--r--tpl/lang/init.go81
-rw-r--r--tpl/lang/lang.go266
-rw-r--r--tpl/lang/lang_test.go140
-rw-r--r--tpl/math/init.go134
-rw-r--r--tpl/math/math.go176
-rw-r--r--tpl/math/math_test.go437
-rw-r--r--tpl/math/round.go61
-rw-r--r--tpl/openapi/openapi3/init.go41
-rw-r--r--tpl/openapi/openapi3/integration_test.go74
-rw-r--r--tpl/openapi/openapi3/openapi3.go95
-rw-r--r--tpl/os/init.go62
-rw-r--r--tpl/os/integration_test.go51
-rw-r--r--tpl/os/os.go158
-rw-r--r--tpl/os/os_test.go128
-rw-r--r--tpl/partials/init.go55
-rw-r--r--tpl/partials/integration_test.go274
-rw-r--r--tpl/partials/partials.go277
-rw-r--r--tpl/partials/partials_test.go40
-rw-r--r--tpl/path/init.go60
-rw-r--r--tpl/path/path.go174
-rw-r--r--tpl/path/path_test.go236
-rw-r--r--tpl/reflect/init.go51
-rw-r--r--tpl/reflect/reflect.go36
-rw-r--r--tpl/reflect/reflect_test.go54
-rw-r--r--tpl/resources/init.go77
-rw-r--r--tpl/resources/integration_test.go100
-rw-r--r--tpl/resources/resources.go432
-rw-r--r--tpl/safe/init.go80
-rw-r--r--tpl/safe/safe.go73
-rw-r--r--tpl/safe/safe_test.go211
-rw-r--r--tpl/site/init.go43
-rw-r--r--tpl/strings/init.go229
-rw-r--r--tpl/strings/regexp.go125
-rw-r--r--tpl/strings/regexp_test.go93
-rw-r--r--tpl/strings/strings.go505
-rw-r--r--tpl/strings/strings_test.go787
-rw-r--r--tpl/strings/truncate.go157
-rw-r--r--tpl/strings/truncate_test.go83
-rw-r--r--tpl/template.go211
-rw-r--r--tpl/template_info.go83
-rw-r--r--tpl/template_test.go71
-rw-r--r--tpl/templates/init.go44
-rw-r--r--tpl/templates/integration_test.go85
-rw-r--r--tpl/templates/templates.go38
-rw-r--r--tpl/time/init.go95
-rw-r--r--tpl/time/time.go123
-rw-r--r--tpl/time/time_test.go185
-rw-r--r--tpl/tplimpl/embedded/.gitattributes1
-rw-r--r--tpl/tplimpl/embedded/templates/_default/_markup/render-codeblock-goat.html18
-rw-r--r--tpl/tplimpl/embedded/templates/_default/robots.txt1
-rw-r--r--tpl/tplimpl/embedded/templates/_default/rss.xml39
-rw-r--r--tpl/tplimpl/embedded/templates/_default/sitemap.xml24
-rw-r--r--tpl/tplimpl/embedded/templates/_default/sitemapindex.xml11
-rw-r--r--tpl/tplimpl/embedded/templates/_server/error.html87
-rw-r--r--tpl/tplimpl/embedded/templates/alias.html10
-rw-r--r--tpl/tplimpl/embedded/templates/disqus.html23
-rw-r--r--tpl/tplimpl/embedded/templates/google_analytics.html51
-rw-r--r--tpl/tplimpl/embedded/templates/google_analytics_async.html28
-rw-r--r--tpl/tplimpl/embedded/templates/google_news.html6
-rw-r--r--tpl/tplimpl/embedded/templates/opengraph.html44
-rw-r--r--tpl/tplimpl/embedded/templates/pagination.html154
-rw-r--r--tpl/tplimpl/embedded/templates/schema.html25
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/1__h_simple_assets.html34
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/figure.html28
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/gist.html1
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/highlight.html1
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/instagram.html18
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/instagram_simple.html67
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/param.html4
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/ref.html1
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/relref.html1
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/twitter.html35
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/twitter_simple.html58
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/vimeo.html14
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/vimeo_simple.html22
-rw-r--r--tpl/tplimpl/embedded/templates/shortcodes/youtube.html10
-rw-r--r--tpl/tplimpl/embedded/templates/twitter_cards.html24
-rw-r--r--tpl/tplimpl/integration_test.go117
-rw-r--r--tpl/tplimpl/shortcodes.go154
-rw-r--r--tpl/tplimpl/shortcodes_test.go91
-rw-r--r--tpl/tplimpl/template.go1161
-rw-r--r--tpl/tplimpl/templateFuncster.go14
-rw-r--r--tpl/tplimpl/templateProvider.go41
-rw-r--r--tpl/tplimpl/template_ast_transformers.go348
-rw-r--r--tpl/tplimpl/template_ast_transformers_test.go160
-rw-r--r--tpl/tplimpl/template_errors.go64
-rw-r--r--tpl/tplimpl/template_funcs.go204
-rw-r--r--tpl/tplimpl/template_funcs_test.go84
-rw-r--r--tpl/tplimpl/template_test.go40
-rw-r--r--tpl/transform/init.go117
-rw-r--r--tpl/transform/remarshal.go88
-rw-r--r--tpl/transform/remarshal_test.go203
-rw-r--r--tpl/transform/transform.go151
-rw-r--r--tpl/transform/transform_test.go273
-rw-r--r--tpl/transform/unmarshal.go170
-rw-r--r--tpl/transform/unmarshal_test.go233
-rw-r--r--tpl/urls/init.go73
-rw-r--r--tpl/urls/urls.go187
-rw-r--r--tpl/urls/urls_test.go70
-rw-r--r--transform/chain.go125
-rw-r--r--transform/chain_test.go70
-rw-r--r--transform/livereloadinject/livereloadinject.go85
-rw-r--r--transform/livereloadinject/livereloadinject_test.go64
-rw-r--r--transform/metainject/hugogenerator.go56
-rw-r--r--transform/metainject/hugogenerator_test.go60
-rw-r--r--transform/urlreplacers/absurl.go36
-rw-r--r--transform/urlreplacers/absurlreplacer.go260
-rw-r--r--transform/urlreplacers/absurlreplacer_test.go236
-rw-r--r--watcher/batcher.go87
-rw-r--r--watcher/filenotify/filenotify.go49
-rw-r--r--watcher/filenotify/fsnotify.go20
-rw-r--r--watcher/filenotify/poller.go326
-rw-r--r--watcher/filenotify/poller_test.go304
1958 files changed, 177407 insertions, 71 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
new file mode 100644
index 000000000..544f820d3
--- /dev/null
+++ b/.circleci/config.yml
@@ -0,0 +1,51 @@
+defaults: &defaults
+ docker:
+ - image: bepsays/ci-goreleaser:1.1800.300
+ environment:
+ CGO_ENABLED: "0"
+
+version: 2
+jobs:
+ build:
+ <<: *defaults
+ steps:
+ - checkout:
+ path: hugo
+ - run:
+ command: |
+ git clone git@github.com:gohugoio/hugoDocs.git
+ cd hugo
+ go mod download
+ sleep 5
+ go mod verify
+ - persist_to_workspace:
+ root: .
+ paths: .
+ release:
+ <<: *defaults
+ steps:
+ - attach_workspace:
+ at: /root/project
+ - run:
+ command: |
+ cd hugo
+ git config --global user.email "bjorn.erik.pedersen+hugoreleaser@gmail.com"
+ git config --global user.name "hugoreleaser"
+ go run -tags release main.go release -r ${CIRCLE_BRANCH}
+
+workflows:
+ version: 2
+ release:
+ jobs:
+ - build:
+ filters:
+ branches:
+ only: /release-.*/
+ - hold:
+ type: approval
+ requires:
+ - build
+ - release:
+ context: org-global
+ requires:
+ - hold
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 000000000..a183f6fcf
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,9 @@
+*.md
+*.log
+*.txt
+.git
+.github
+.circleci
+docs
+examples
+Dockerfile
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..6994810cf
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,8 @@
+# Text files have auto line endings
+* text=auto
+
+# Go source files always have LF line endings
+*.go text eol=lf
+
+# SVG files should not be modified
+*.svg -text
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 000000000..250b67a9b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,18 @@
+---
+name: 'Bug report'
+labels: 'Bug, NeedsTriage'
+assignees: ''
+about: Create a report to help us improve
+---
+
+
+<!-- Please answer these questions before submitting your issue. Thanks! -->
+
+### What version of Hugo are you using (`hugo version`)?
+
+<pre>
+$ hugo version
+
+</pre>
+
+### Does this issue reproduce with the latest release?
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..c84d3276b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: false
+contact_links:
+ - name: SUPPORT, ISSUES and TROUBLESHOOTING
+ url: https://discourse.gohugo.io/
+ about: Please DO NOT use Github for support requests. Please visit https://discourse.gohugo.io for support! You will be helped much faster there. If you ignore this request your issue might be closed with a discourse label.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 000000000..c114b3d7f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,11 @@
+---
+name: Proposal
+about: Propose a new feature for Hugo
+title: ''
+labels: 'Proposal, NeedsTriage'
+assignees: ''
+
+---
+
+
+<!-- Describe this new feature. Think about if it really belongs in the Hugo core module; you may want to discuss it on https://discourse.gohugo.io/ first. --> \ No newline at end of file
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 000000000..1801e72d9
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,7 @@
+# See https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#package-ecosystem
+version: 2
+updates:
+ - package-ecosystem: "gomod"
+ directory: "/"
+ schedule:
+ interval: "daily"
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
new file mode 100644
index 000000000..86cd2cac4
--- /dev/null
+++ b/.github/workflows/stale.yml
@@ -0,0 +1,47 @@
+name: 'Close stale and lock closed issues and PRs'
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: '30 1 * * *'
+jobs:
+ stale:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: dessant/lock-threads@08e671be8ac8944d0e132aa71d0ae8ccfb347675
+ with:
+ issue-inactive-days: 21
+ add-issue-labels: 'Outdated'
+ issue-comment: >
+ This issue has been automatically locked since there
+ has not been any recent activity after it was closed.
+ Please open a new issue for related bugs.
+ pr-comment: >
+ This pull request has been automatically locked since there
+ has not been any recent activity after it was closed.
+ Please open a new issue for related bugs.
+ - uses: actions/stale@04a1828bc18ada028d85a0252a47cd2963a91abe
+ with:
+ operations-per-run: 999
+ days-before-issue-stale: 365
+ days-before-pr-stale: 365
+ days-before-issue-close: 56
+ days-before-pr-close: 56
+ stale-issue-message: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. The resources of the Hugo team are limited, and so we are asking for your help.
+
+ If this is a **bug** and you can still reproduce this error on the <code>master</code> branch, please reply with all of the information you have about it in order to keep the issue open.
+
+ If this is a **feature request**, and you feel that it is still relevant and valuable, please tell us why.
+
+ This issue will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
+ stale-pr-message: This PR has been automatically marked as stale because it has not had
+ recent activity. The resources of the Hugo team are limited, and so we are asking for your help.
+
+ Please check https://github.com/gohugoio/hugo/blob/master/CONTRIBUTING.md#code-contribution and verify that this code contribution fits with the description. If yes, tell is in a comment.
+
+ This PR will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
+ stale-issue-label: 'Stale'
+ exempt-issue-labels: 'Keep,Security'
+ stale-pr-label: 'Stale'
+ exempt-pr-labels: 'Keep,Security'
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 000000000..1403c4d57
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,89 @@
+on: [push, pull_request]
+name: Test
+permissions:
+ contents: read
+jobs:
+ test:
+ env:
+ GOPROXY: https://proxy.golang.org
+ GO111MODULE: on
+ strategy:
+ matrix:
+ # Note: We upgraded to Go 1.18 in Hugo v0.95.0
+ # Go 1.18 had some breaking changes on the source level which means Hugo cannot be built
+ # with older Go versions, but the improvements in Go 1.18 were too good to pass on (e.g. break and continue).
+ # Note that you don't need Go (or Go 1.18) to run a pre-built binary.
+ go-version: [1.18.x]
+ os: [ubuntu-latest, macos-latest, windows-latest]
+ runs-on: ${{ matrix.os }}
+ steps:
+ - name: Install Go
+ uses: actions/setup-go@37335c7bb261b353407cff977110895fa0b4f7d8
+ with:
+ go-version: ${{ matrix.go-version }}
+ - name: Install Ruby
+ uses: actions/setup-ruby@5f29a1cd8dfebf420691c4c9a0e832e2fae5a526
+ with:
+ ruby-version: '2.7'
+ - name: Install Python
+ uses: actions/setup-python@3105fb18c05ddd93efea5f9e0bef7a03a6e9e7df
+ with:
+ python-version: '3.x'
+ - name: Install Mage
+ run: go install github.com/magefile/mage@07afc7d24f4d6d6442305d49552f04fbda5ccb3e
+ - name: Install asciidoctor
+ uses: reitzig/actions-asciidoctor@7570212ae20b63653481675fb1ff62d1073632b0
+ - name: Checkout code
+ uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f
+ - name: Install docutils
+ run: |
+ pip install docutils
+ rst2html.py --version
+ - if: matrix.os == 'ubuntu-latest'
+ name: Install pandoc on Linux
+ run: |
+ sudo apt-get update -y
+ sudo apt-get install -y pandoc
+ - if: matrix.os == 'macos-latest'
+ run: |
+ brew install pandoc
+ - if: matrix.os == 'windows-latest'
+ run: |
+ choco install pandoc
+ - run: pandoc -v
+ - if: matrix.os == 'ubuntu-latest'
+ name: Install dart-sass-embedded Linux
+ run: |
+ curl -LJO https://github.com/sass/dart-sass-embedded/releases/download/1.0.0-beta.6/sass_embedded-1.0.0-beta.6-linux-x64.tar.gz;
+ echo "04fc1e5e28d29a4585a701941b6dace56771d94bfbe7f9e4db28d24417ceeec3 sass_embedded-1.0.0-beta.6-linux-x64.tar.gz" | sha256sum -c;
+ tar -xvf sass_embedded-1.0.0-beta.6-linux-x64.tar.gz;
+ echo "$GITHUB_WORKSPACE/sass_embedded/" >> $GITHUB_PATH
+ - if: matrix.os == 'macos-latest'
+ name: Install dart-sass-embedded MacOS
+ run: |
+ curl -LJO https://github.com/sass/dart-sass-embedded/releases/download/1.0.0-beta.6/sass_embedded-1.0.0-beta.6-macos-x64.tar.gz;
+ echo "b3b984675a9b04aa22f6f2302dda4191b507ac2ca124467db2dfe7e58e72fbad sass_embedded-1.0.0-beta.6-macos-x64.tar.gz" | shasum -a 256 -c;
+ tar -xvf sass_embedded-1.0.0-beta.6-macos-x64.tar.gz;
+ echo "$GITHUB_WORKSPACE/sass_embedded/" >> $GITHUB_PATH
+ - if: matrix.os == 'windows-latest'
+ name: Install dart-sass-embedded Windows
+ run: |
+ curl -LJO https://github.com/sass/dart-sass-embedded/releases/download/1.0.0-beta.6/sass_embedded-1.0.0-beta.6-windows-x64.zip;
+ echo "6ae442129dbb3334bc21ef851261da6c0c1b560da790ca2e1350871d00ab816d sass_embedded-1.0.0-beta.6-windows-x64.zip" | sha256sum -c;
+ unzip sass_embedded-1.0.0-beta.6-windows-x64.zip;
+ echo "$env:GITHUB_WORKSPACE/sass_embedded/" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf-8 -Append
+ - name: Check
+ run: |
+ mage -v check;
+ env:
+ HUGO_BUILD_TAGS: extended
+ - name: Build Docs
+ env:
+ HUGO_BUILD_TAGS: extended
+ HUGO_TIMEOUT: 31000
+ HUGO_IGNOREERRORS: error-remote-getjson
+ HUGO_SERVICES_INSTAGRAM_ACCESSTOKEN: dummytoken
+ run: |
+ mage -v hugo
+ ./hugo -s docs/
+ ./hugo --renderToMemory -s docs/
diff --git a/.gitignore b/.gitignore
index 4164d21f8..b2aeb9142 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,29 @@
+/hugo
+docs/public*
/.idea
-/.vscode
-/public
-node_modules
+.vscode/*
+hugo.exe
+*.test
+*.prof
nohup.out
+cover.out
+*.swp
+*.swo
.DS_Store
-trace.out
+*~
+vendor/*/
+*.bench
+*.debug
+coverage*.out
+
+dock.sh
+
+GoBuilds
+dist
+
+hugolib/hugo_stats.json
+resources/sunset.jpg
+
+vendor
+
.hugo_build.lock
-resources/_gen/images/ \ No newline at end of file
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js b/.gitmodules
index e69de29bb..e69de29bb 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js
+++ b/.gitmodules
diff --git a/.mailmap b/.mailmap
new file mode 100644
index 000000000..e93adabc1
--- /dev/null
+++ b/.mailmap
@@ -0,0 +1,3 @@
+spf13 <steve.francia@gmail.com> Steve Francia <steve.francia@gmail.com>
+bep <bjorn.erik.pedersen@gmail.com> Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
+
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 000000000..23de481cd
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,202 @@
+# Contributing to Hugo
+
+**Note March 16th 2022:** We are currently very constrained on human resources to do code reviews, so we currently require any new Pull Requests to be limited to bug fixes closing an existing issue. Also, we have updated to Go 1.18, but we will currently not accept any generic rewrites, "interface{} to any" replacements and similar.
+
+We welcome contributions to Hugo of any kind including documentation, themes,
+organization, tutorials, blog posts, bug reports, issues, feature requests,
+feature implementations, pull requests, answering questions on the forum,
+helping to manage issues, etc.
+
+The Hugo community and maintainers are [very active](https://github.com/gohugoio/hugo/pulse/monthly) and helpful, and the project benefits greatly from this activity. We created a [step by step guide](https://gohugo.io/tutorials/how-to-contribute-to-hugo/) if you're unfamiliar with GitHub or contributing to open source projects in general.
+
+*Note that this repository only contains the actual source code of Hugo. For **only** documentation-related pull requests / issues please refer to the [hugoDocs](https://github.com/gohugoio/hugoDocs) repository.*
+
+*Changes to the codebase **and** related documentation, e.g. for a new feature, should still use a single pull request.*
+
+## Table of Contents
+
+* [Asking Support Questions](#asking-support-questions)
+* [Reporting Issues](#reporting-issues)
+* [Submitting Patches](#submitting-patches)
+ * [Code Contribution Guidelines](#code-contribution-guidelines)
+ * [Git Commit Message Guidelines](#git-commit-message-guidelines)
+ * [Fetching the Sources From GitHub](#fetching-the-sources-from-github)
+ * [Building Hugo with Your Changes](#building-hugo-with-your-changes)
+
+## Asking Support Questions
+
+We have an active [discussion forum](https://discourse.gohugo.io) where users and developers can ask questions.
+Please don't use the GitHub issue tracker to ask questions.
+
+## Reporting Issues
+
+If you believe you have found a defect in Hugo or its documentation, use
+the GitHub issue tracker to report
+the problem to the Hugo maintainers. If you're not sure if it's a bug or not,
+start by asking in the [discussion forum](https://discourse.gohugo.io).
+When reporting the issue, please provide the version of Hugo in use (`hugo
+version`) and your operating system.
+
+- [Hugo Issues · gohugoio/hugo](https://github.com/gohugoio/hugo/issues)
+- [Hugo Documentation Issues · gohugoio/hugoDocs](https://github.com/gohugoio/hugoDocs/issues)
+- [Hugo Website Theme Issues · gohugoio/hugoThemesSite](https://github.com/gohugoio/hugoThemesSite/issues)
+
+## Code Contribution
+
+Hugo has become a fully featured static site generator, so any new functionality must:
+
+* be useful to many.
+* fit naturally into _what Hugo does best._
+* strive not to break existing sites.
+* close or update an open [Hugo issue](https://github.com/gohugoio/hugo/issues)
+
+If it is of some complexity, the contributor is expected to maintain and support the new feature in the future (answer questions on the forum, fix any bugs etc.).
+
+It is recommended to open up a discussion on the [Hugo Forum](https://discourse.gohugo.io/) to get feedback on your idea before you begin.
+
+Any non-trivial code change needs to update an open [issue](https://github.com/gohugoio/hugo/issues). A non-trivial code change without an issue reference with one of the labels `bug` or `enhancement` will not be merged.
+
+Note that we do not accept new features that require [CGO](https://github.com/golang/go/wiki/cgo).
+We have one exception to this rule which is LibSASS.
+
+**Bug fixes are, of course, always welcome.**
+
+## Submitting Patches
+
+The Hugo project welcomes all contributors and contributions regardless of skill or experience level. If you are interested in helping with the project, we will help you with your contribution.
+
+### Code Contribution Guidelines
+
+Because we want to create the best possible product for our users and the best contribution experience for our developers, we have a set of guidelines which ensure that all contributions are acceptable. The guidelines are not intended as a filter or barrier to participation. If you are unfamiliar with the contribution process, the Hugo team will help you and teach you how to bring your contribution in accordance with the guidelines.
+
+To make the contribution process as seamless as possible, we ask for the following:
+
+* Go ahead and fork the project and make your changes. We encourage pull requests to allow for review and discussion of code changes.
+* When you’re ready to create a pull request, be sure to:
+ * Sign the [CLA](https://cla-assistant.io/gohugoio/hugo).
+ * Have test cases for the new code. If you have questions about how to do this, please ask in your pull request.
+ * Run `go fmt`.
+ * Add documentation if you are adding new features or changing functionality. The docs site lives in `/docs`.
+ * Squash your commits into a single commit. `git rebase -i`. It’s okay to force update your pull request with `git push -f`.
+ * Ensure that `mage check` succeeds. [Travis CI](https://travis-ci.org/gohugoio/hugo) (Windows, Linux and macOS) will fail the build if `mage check` fails.
+ * Follow the **Git Commit Message Guidelines** below.
+
+### Git Commit Message Guidelines
+
+This [blog article](https://cbea.ms/git-commit/) is a good resource for learning how to write good commit messages,
+the most important part being that each commit message should have a title/subject in imperative mood starting with a capital letter and no trailing period:
+*"js: Return error when option x is not set"*, **NOT** *"returning some error."*
+
+Most title/subjects should have a lower-cased prefix with a colon and one whitespace. The prefix can be:
+
+* The name of the package where (most of) the changes are made (e.g. `media: Add text/calendar`)
+* If the package name is deeply nested/long, try to shorten it from the left side, e.g. `markup/goldmark` is OK, `resources/resource_transformers/js` can be shortened to `js`.
+* If this commit touches several packages with a common functional topic, use that as a prefix, e.g. `errors: Resolve correct line numbers`)
+* If this commit touches many packages without a common functional topic, prefix with `all:` (e.g. `all: Reformat Go code`)
+* If this is a documentation update, prefix with `docs:`.
+* If nothing of the above applies, just leave the prefix out.
+
+Also, if your commit references one or more GitHub issues, always end your commit message body with *See #1234* or *Fixes #1234*.
+Replace *1234* with the GitHub issue ID. The last example will close the issue when the commit is merged into *master*.
+
+An example:
+
+```text
+tpl: Add custom index function
+
+Add a custom index template function that deviates from the stdlib simply by not
+returning an "index out of range" error if an array, slice or string index is
+out of range. Instead, we just return nil values. This should help make the
+new default function more useful for Hugo users.
+
+Fixes #1949
+```
+
+### Fetching the Sources From GitHub
+
+Since Hugo 0.48, Hugo uses the Go Modules support built into Go 1.11 to build. The easiest is to clone Hugo in a directory outside of `GOPATH`, as in the following example:
+
+```bash
+mkdir $HOME/src
+cd $HOME/src
+git clone https://github.com/gohugoio/hugo.git
+cd hugo
+go install
+```
+
+>Note: Some Go tools may not be fully updated to support Go Modules yet. One example would be LiteIDE. Follow [this workaround](https://github.com/visualfc/liteide/issues/986#issuecomment-428117702) for how to continue to work with Hugo below `GOPATH`.
+
+For some convenient build and test targets, you also will want to install Mage:
+
+```bash
+go install github.com/magefile/mage
+```
+
+Now, to make a change to Hugo's source:
+
+1. Create a new branch for your changes (the branch name is arbitrary):
+
+ ```bash
+ git checkout -b iss1234
+ ```
+
+1. After making your changes, commit them to your new branch:
+
+ ```bash
+ git commit -a -v
+ ```
+
+1. Fork Hugo in GitHub.
+
+1. Add your fork as a new remote (the remote name, "fork" in this example, is arbitrary):
+
+ ```bash
+ git remote add fork git@github.com:USERNAME/hugo.git
+ ```
+
+1. Push the changes to your new remote:
+
+ ```bash
+ git push --set-upstream fork iss1234
+ ```
+
+1. You're now ready to submit a PR based upon the new branch in your forked repository.
+
+### Building Hugo with Your Changes
+
+Hugo uses [mage](https://github.com/magefile/mage) to sync vendor dependencies, build Hugo, run the test suite and other things. You must run mage from the Hugo directory.
+
+```bash
+cd $HOME/go/src/github.com/gohugoio/hugo
+```
+
+To build Hugo:
+
+```bash
+mage hugo
+```
+
+To install hugo in `$HOME/go/bin`:
+
+```bash
+mage install
+```
+
+To run the tests:
+
+```bash
+mage hugoRace
+mage -v check
+```
+
+To list all available commands along with descriptions:
+
+```bash
+mage -l
+```
+
+**Note:** From Hugo 0.43 we have added a build tag, `extended` that adds **SCSS support**. This needs a C compiler installed to build. You can enable this when building by:
+
+```bash
+HUGO_BUILD_TAGS=extended mage install
+````
diff --git a/Dockerfile b/Dockerfile
new file mode 100755
index 000000000..885809fab
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,45 @@
+# GitHub: https://github.com/gohugoio
+# Twitter: https://twitter.com/gohugoio
+# Website: https://gohugo.io/
+
+FROM golang:1.18-alpine AS build
+
+# Optionally set HUGO_BUILD_TAGS to "extended" or "nodeploy" when building like so:
+# docker build --build-arg HUGO_BUILD_TAGS=extended .
+ARG HUGO_BUILD_TAGS
+
+ARG CGO=1
+ENV CGO_ENABLED=${CGO}
+ENV GOOS=linux
+ENV GO111MODULE=on
+
+WORKDIR /go/src/github.com/gohugoio/hugo
+
+COPY . /go/src/github.com/gohugoio/hugo/
+
+# gcc/g++ are required to build SASS libraries for extended version
+RUN apk update && \
+ apk add --no-cache gcc g++ musl-dev git && \
+ go install github.com/magefile/mage
+
+RUN mage hugo && mage install
+
+# ---
+
+FROM alpine:3.12
+
+COPY --from=build /go/bin/hugo /usr/bin/hugo
+
+# libc6-compat & libstdc++ are required for extended SASS libraries
+# ca-certificates are required to fetch outside resources (like Twitter oEmbeds)
+RUN apk update && \
+ apk add --no-cache ca-certificates libc6-compat libstdc++ git
+
+VOLUME /site
+WORKDIR /site
+
+# Expose port for live server
+EXPOSE 1313
+
+ENTRYPOINT ["hugo"]
+CMD ["--help"]
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
index a2c767b7b..dc9f3b445 100644
--- a/README.md
+++ b/README.md
@@ -1,48 +1,248 @@
-[![Netlify Status](https://api.netlify.com/api/v1/badges/e0dbbfc7-34f1-4393-a679-c16e80162705/deploy-status)](https://app.netlify.com/sites/gohugoio/deploys)
-[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://gohugo.io/contribute/documentation/)
+<a href="https://gohugo.io/"><img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/static/images/hugo-logo-wide.svg?sanitize=true" alt="Hugo" width="565"></a>
-# Hugo Docs
+A Fast and Flexible Static Site Generator built with love by [bep](https://github.com/bep), [spf13](https://spf13.com/) and [friends](https://github.com/gohugoio/hugo/graphs/contributors) in [Go][].
-Documentation site for [Hugo](https://github.com/gohugoio/hugo), the very fast and flexible static site generator built with love in Go.
+[Website](https://gohugo.io) |
+[Forum](https://discourse.gohugo.io) |
+[Documentation](https://gohugo.io/getting-started/) |
+[Installation Guide](https://gohugo.io/getting-started/installing/) |
+[Contribution Guide](CONTRIBUTING.md) |
+[Twitter](https://twitter.com/gohugoio)
-## Contributing
+[![GoDoc](https://godoc.org/github.com/gohugoio/hugo?status.svg)](https://godoc.org/github.com/gohugoio/hugo)
+[![Tests on Linux, MacOS and Windows](https://github.com/gohugoio/hugo/workflows/Test/badge.svg)](https://github.com/gohugoio/hugo/actions?query=workflow%3ATest)
+[![Go Report Card](https://goreportcard.com/badge/github.com/gohugoio/hugo)](https://goreportcard.com/report/github.com/gohugoio/hugo)
-We welcome contributions to Hugo of any kind including documentation, suggestions, bug reports, pull requests etc. Also check out our [contribution guide](https://gohugo.io/contribute/documentation/). We would love to hear from you.
+## Overview
-Note that this repository contains solely the documentation for Hugo. For contributions that aren't documentation-related please refer to the [hugo](https://github.com/gohugoio/hugo) repository.
+Hugo is a static HTML and CSS website generator written in [Go][].
+It is optimized for speed, ease of use, and configurability.
+Hugo takes a directory with content and templates and renders them into a full HTML website.
-*Pull requests shall **only** contain changes to the actual documentation. However, changes on the code base of Hugo **and** the documentation shall be a single, atomic pull request in the [hugo](https://github.com/gohugoio/hugo) repository.*
+Hugo relies on Markdown files with front matter for metadata, and you can run Hugo from any directory.
+This works well for shared hosts and other systems where you don’t have a privileged account.
-Spelling fixes are most welcomed, and if you want to contribute longer sections to the documentation, it would be great if you had the following criteria in mind when writing:
+Hugo renders a typical website of moderate size in a fraction of a second.
+A good rule of thumb is that each piece of content renders in around 1 millisecond.
-* Short is good. People go to the library to read novels. If there is more than one way to _do a thing_ in Hugo, describe the current _best practice_ (avoid "… but you can also do …" and "… in older versions of Hugo you had to …".
-* For example, try to find short snippets that teaches people about the concept. If the example is also useful as-is (copy and paste), then great. Don't list long and similar examples just so people can use them on their sites.
-* Hugo has users from all over the world, so easy to understand and [simple English](https://simple.wikipedia.org/wiki/Basic_English) is good.
+Hugo is designed to work well for any kind of website including blogs, tumbles, and docs.
-## Branches
+#### Supported Architectures
-* The `master` branch is where the site is automatically built from, and is the place to put changes relevant to the current Hugo version.
-* The `next` branch is where we store changes that are related to the next Hugo release. This can be previewed here: https://next--gohugoio.netlify.com/
+Currently, we provide pre-built Hugo binaries for Windows, Linux, FreeBSD, NetBSD, DragonFly BSD, OpenBSD, macOS (Darwin), and [Android](https://gist.github.com/bep/a0d8a26cf6b4f8bc992729b8e50b480b) for x64, i386 and ARM architectures.
-## Build
+Hugo may also be compiled from source wherever the Go compiler tool chain can run, e.g. for other operating systems including Plan 9 and Solaris.
-To view the documentation site locally, you need to clone this repository:
+**Complete documentation is available at [Hugo Documentation](https://gohugo.io/getting-started/).**
+
+## Choose How to Install
+
+If you want to use Hugo as your site generator, simply install the Hugo binaries.
+The Hugo binaries have no external dependencies.
+
+To contribute to the Hugo source code or documentation, you should [fork the Hugo GitHub project](https://github.com/gohugoio/hugo#fork-destination-box) and clone it to your local machine.
+
+Finally, you can install the Hugo source code with `go`, build the binaries yourself, and run Hugo that way.
+Building the binaries is an easy task for an experienced `go` getter.
+
+### Install Hugo as Your Site Generator (Binary Install)
+
+Use the [installation instructions in the Hugo documentation](https://gohugo.io/getting-started/installing/).
+
+### Build and Install the Binaries from Source (Advanced Install)
+
+#### Prerequisite Tools
+
+* [Git](https://git-scm.com/)
+* [Go (we test it with the last 2 major versions; but note that Hugo 0.95.0 only builds with >= Go 1.18.)](https://golang.org/dl/)
+
+#### Fetch from GitHub
+
+To fetch and build the source from GitHub:
```bash
-git clone https://github.com/gohugoio/hugoDocs.git
+mkdir $HOME/src
+cd $HOME/src
+git clone https://github.com/gohugoio/hugo.git
+cd hugo
+go install
```
-Also note that the documentation version for a given version of Hugo can also be found in the `/docs` sub-folder of the [Hugo source repository](https://github.com/gohugoio/hugo).
+**If you are a Windows user, substitute the `$HOME` environment variable above with `%USERPROFILE%`.**
-Then to view the docs in your browser, run Hugo and open up the link:
+If you want to compile with Sass/SCSS support use `--tags extended` and make sure `CGO_ENABLED=1` is set in your go environment. If you don't want to have CGO enabled, you may use the following command to temporarily enable CGO only for hugo compilation:
```bash
-▶ hugo server
-
-Started building sites ...
-.
-.
-Serving pages from memory
-Web Server is available at http://localhost:1313/ (bind address 127.0.0.1)
-Press Ctrl+C to stop
+CGO_ENABLED=1 go install --tags extended
```
+
+## The Hugo Documentation
+
+The Hugo documentation now lives in its own repository, see https://github.com/gohugoio/hugoDocs. But we do keep a version of that documentation as a `git subtree` in this repository. To build the sub folder `/docs` as a Hugo site, you need to clone this repo:
+
+```bash
+git clone git@github.com:gohugoio/hugo.git
+```
+## Contributing to Hugo
+
+**Note March 16th 2022:** We are currently very constrained on human resources to do code reviews, so we currently require any new Pull Requests to be limited to bug fixes closing an existing issue. Also, we have updated to Go 1.18, but we will currently not accept any generic rewrites, "interface{} to any" replacements and similar.
+
+For a complete guide to contributing to Hugo, see the [Contribution Guide](CONTRIBUTING.md).
+
+We welcome contributions to Hugo of any kind including documentation, themes,
+organization, tutorials, blog posts, bug reports, issues, feature requests,
+feature implementations, pull requests, answering questions on the forum,
+helping to manage issues, etc.
+
+The Hugo community and maintainers are [very active](https://github.com/gohugoio/hugo/pulse/monthly) and helpful, and the project benefits greatly from this activity.
+
+### Asking Support Questions
+
+We have an active [discussion forum](https://discourse.gohugo.io) where users and developers can ask questions.
+Please don't use the GitHub issue tracker to ask questions.
+
+### Reporting Issues
+
+If you believe you have found a defect in Hugo or its documentation, use
+the GitHub issue tracker to report the problem to the Hugo maintainers.
+If you're not sure if it's a bug or not, start by asking in the [discussion forum](https://discourse.gohugo.io).
+When reporting the issue, please provide the version of Hugo in use (`hugo version`).
+
+### Submitting Patches
+
+The Hugo project welcomes all contributors and contributions regardless of skill or experience level.
+If you are interested in helping with the project, we will help you with your contribution.
+Hugo is a very active project with many contributions happening daily.
+
+We want to create the best possible product for our users and the best contribution experience for our developers,
+we have a set of guidelines which ensure that all contributions are acceptable.
+The guidelines are not intended as a filter or barrier to participation.
+If you are unfamiliar with the contribution process, the Hugo team will help you and teach you how to bring your contribution in accordance with the guidelines.
+
+For a complete guide to contributing code to Hugo, see the [Contribution Guide](CONTRIBUTING.md).
+
+[Go]: https://golang.org/
+[Hugo Documentation]: https://gohugo.io/overview/introduction/
+
+## Dependencies
+
+Hugo stands on the shoulder of many great open source libraries.
+
+If you run `hugo env -v` you will get a complete and up to date list.
+
+In Hugo 0.100.1 that list is, in lexical order:
+
+```
+cloud.google.com/go/compute="v1.6.1"
+cloud.google.com/go/iam="v0.3.0"
+cloud.google.com/go/storage="v1.22.0"
+cloud.google.com/go="v0.101.0"
+github.com/Azure/azure-pipeline-go="v0.2.3"
+github.com/Azure/azure-storage-blob-go="v0.14.0"
+github.com/Azure/go-autorest/autorest/adal="v0.9.15"
+github.com/Azure/go-autorest/autorest/date="v0.3.0"
+github.com/Azure/go-autorest/autorest="v0.11.20"
+github.com/Azure/go-autorest/logger="v0.2.1"
+github.com/Azure/go-autorest/tracing="v0.6.0"
+github.com/BurntSushi/locker="v0.0.0-20171006230638-a6e239ea1c69"
+github.com/PuerkitoBio/purell="v1.1.1"
+github.com/PuerkitoBio/urlesc="v0.0.0-20170810143723-de5bf2ad4578"
+github.com/alecthomas/chroma="v0.10.0"
+github.com/armon/go-radix="v1.0.0"
+github.com/aws/aws-sdk-go-v2/config="v1.7.0"
+github.com/aws/aws-sdk-go-v2/credentials="v1.4.0"
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds="v1.5.0"
+github.com/aws/aws-sdk-go-v2/internal/ini="v1.2.2"
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url="v1.3.0"
+github.com/aws/aws-sdk-go-v2/service/sso="v1.4.0"
+github.com/aws/aws-sdk-go-v2/service/sts="v1.7.0"
+github.com/aws/aws-sdk-go-v2="v1.9.0"
+github.com/aws/aws-sdk-go="v1.43.5"
+github.com/aws/smithy-go="v1.8.0"
+github.com/bep/clock="v0.3.0"
+github.com/bep/debounce="v1.2.0"
+github.com/bep/gitmap="v1.1.2"
+github.com/bep/goat="v0.5.0"
+github.com/bep/godartsass="v0.14.0"
+github.com/bep/golibsass="v1.1.0"
+github.com/bep/gowebp="v0.1.0"
+github.com/bep/overlayfs="v0.6.0"
+github.com/bep/tmc="v0.5.1"
+github.com/clbanning/mxj/v2="v2.5.5"
+github.com/cli/safeexec="v1.0.0"
+github.com/cpuguy83/go-md2man/v2="v2.0.1"
+github.com/disintegration/gift="v1.2.1"
+github.com/dlclark/regexp2="v1.4.0"
+github.com/dustin/go-humanize="v1.0.0"
+github.com/evanw/esbuild="v0.14.42"
+github.com/frankban/quicktest="v1.14.3"
+github.com/fsnotify/fsnotify="v1.5.4"
+github.com/getkin/kin-openapi="v0.94.0"
+github.com/ghodss/yaml="v1.0.0"
+github.com/go-openapi/jsonpointer="v0.19.5"
+github.com/go-openapi/swag="v0.19.5"
+github.com/gobuffalo/flect="v0.2.5"
+github.com/gobwas/glob="v0.2.3"
+github.com/gohugoio/go-i18n/v2="v2.1.3-0.20210430103248-4c28c89f8013"
+github.com/gohugoio/locales="v0.14.0"
+github.com/gohugoio/localescompressed="v1.0.1"
+github.com/golang-jwt/jwt/v4="v4.0.0"
+github.com/golang/groupcache="v0.0.0-20210331224755-41bb18bfe9da"
+github.com/golang/protobuf="v1.5.2"
+github.com/google/go-cmp="v0.5.8"
+github.com/google/uuid="v1.3.0"
+github.com/google/wire="v0.5.0"
+github.com/googleapis/gax-go/v2="v2.3.0"
+github.com/googleapis/go-type-adapters="v1.0.0"
+github.com/gorilla/websocket="v1.5.0"
+github.com/hairyhenderson/go-codeowners="v0.2.3-0.20201026200250-cdc7c0759690"
+github.com/inconshreveable/mousetrap="v1.0.0"
+github.com/jdkato/prose="v1.2.1"
+github.com/jmespath/go-jmespath="v0.4.0"
+github.com/kr/pretty="v0.3.0"
+github.com/kr/text="v0.2.0"
+github.com/kyokomi/emoji/v2="v2.2.9"
+github.com/mailru/easyjson="v0.0.0-20190626092158-b2ccc519800e"
+github.com/mattn/go-ieproxy="v0.0.1"
+github.com/mattn/go-isatty="v0.0.14"
+github.com/mattn/go-runewidth="v0.0.9"
+github.com/mitchellh/hashstructure="v1.1.0"
+github.com/mitchellh/mapstructure="v1.5.0"
+github.com/muesli/smartcrop="v0.3.0"
+github.com/niklasfasching/go-org="v1.6.2"
+github.com/olekukonko/tablewriter="v0.0.5"
+github.com/pelletier/go-toml/v2="v2.0.0-beta.7.0.20220408132554-2377ac4bc04c"
+github.com/rogpeppe/go-internal="v1.8.1"
+github.com/russross/blackfriday/v2="v2.1.0"
+github.com/rwcarlsen/goexif="v0.0.0-20190401172101-9e8deecbddbd"
+github.com/sanity-io/litter="v1.5.5"
+github.com/sass/libsass="3.6.5"
+github.com/spf13/afero="v1.8.2"
+github.com/spf13/cast="v1.5.0"
+github.com/spf13/cobra="v1.4.0"
+github.com/spf13/fsync="v0.9.0"
+github.com/spf13/jwalterweatherman="v1.1.0"
+github.com/spf13/pflag="v1.0.5"
+github.com/tdewolff/minify/v2="v2.11.5"
+github.com/tdewolff/parse/v2="v2.5.31"
+github.com/webmproject/libwebp="v1.2.0"
+github.com/yuin/goldmark="v1.4.12"
+go.opencensus.io="v0.23.0"
+go.uber.org/atomic="v1.9.0"
+gocloud.dev="v0.24.0"
+golang.org/x/crypto="v0.0.0-20211108221036-ceb1ce70b4fa"
+golang.org/x/image="v0.0.0-20211028202545-6944b10bf410"
+golang.org/x/net="v0.0.0-20220425223048-2871e0cb64e4"
+golang.org/x/oauth2="v0.0.0-20220411215720-9780585627b5"
+golang.org/x/sync="v0.0.0-20210220032951-036812b2e83c"
+golang.org/x/sys="v0.0.0-20220422013727-9388b58f7150"
+golang.org/x/text="v0.3.7"
+golang.org/x/tools="v0.1.10"
+golang.org/x/xerrors="v0.0.0-20220411194840-2f41105eb62f"
+google.golang.org/api="v0.76.0"
+google.golang.org/genproto="v0.0.0-20220426171045-31bebdecfb46"
+google.golang.org/grpc="v1.46.0"
+google.golang.org/protobuf="v1.28.0"
+gopkg.in/yaml.v2="v2.4.0"
+```
+
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 000000000..320b2ff54
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,7 @@
+## Security Policy
+
+### Reporting a Vulnerability
+
+Please report (suspected) security vulnerabilities to **[bjorn.erik.pedersen@gmail.com](mailto:bjorn.erik.pedersen@gmail.com)**. You will receive a response from us within 48 hours. If we can confirm the issue, we will release a patch as soon as possible depending on the complexity of the issue but historically within days.
+
+Also see [Hugo's Security Model](https://gohugo.io/about/security-model/).
diff --git a/bench.sh b/bench.sh
new file mode 100755
index 000000000..c6a20a7e3
--- /dev/null
+++ b/bench.sh
@@ -0,0 +1,37 @@
+#!/usr/bin/env bash
+
+# allow user to override go executable by running as GOEXE=xxx make ...
+GOEXE="${GOEXE-go}"
+
+# Convenience script to
+# - For a given branch
+# - Run benchmark tests for a given package
+# - Do the same for master
+# - then compare the two runs with benchcmp
+
+benchFilter=".*"
+
+if (( $# < 2 ));
+ then
+ echo "USAGE: ./bench.sh <git-branch> <package-to-bench> (and <benchmark filter> (regexp, optional))"
+ exit 1
+fi
+
+
+
+if [ $# -eq 3 ]; then
+ benchFilter=$3
+fi
+
+
+BRANCH=$1
+PACKAGE=$2
+
+git checkout $BRANCH
+"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-$BRANCH.txt
+
+git checkout master
+"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-master.txt
+
+
+benchcmp /tmp/bench-$PACKAGE-master.txt /tmp/bench-$PACKAGE-$BRANCH.txt
diff --git a/benchSite.sh b/benchSite.sh
new file mode 100755
index 000000000..aae21231c
--- /dev/null
+++ b/benchSite.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# allow user to override go executable by running as GOEXE=xxx make ...
+GOEXE="${GOEXE-go}"
+
+# Send in a regexp matching the benchmarks you want to run, i.e. './benchSite.sh "YAML"'.
+# Note the quotes, which will be needed for more complex expressions.
+# The above will run all variations, but only for front matter YAML.
+
+echo "Running with BenchmarkSiteBuilding/${1}"
+
+"${GOEXE}" test -run="NONE" -bench="BenchmarkSiteBuilding/${1}" -test.benchmem=true ./hugolib -memprofile mem.prof -count 3 -cpuprofile cpu.prof
diff --git a/benchbep.sh b/benchbep.sh
new file mode 100755
index 000000000..efd616c88
--- /dev/null
+++ b/benchbep.sh
@@ -0,0 +1 @@
+gobench -package=./hugolib -bench="BenchmarkSiteNew/Deep_content_tree" \ No newline at end of file
diff --git a/bepdock.sh b/bepdock.sh
new file mode 100755
index 000000000..a7ac0c639
--- /dev/null
+++ b/bepdock.sh
@@ -0,0 +1 @@
+docker run --rm --mount type=bind,source="$(pwd)",target=/hugo -w /hugo -i -t bepsays/ci-goreleaser:1.11-2 /bin/bash \ No newline at end of file
diff --git a/bufferpool/bufpool.go b/bufferpool/bufpool.go
new file mode 100644
index 000000000..f05675e3e
--- /dev/null
+++ b/bufferpool/bufpool.go
@@ -0,0 +1,38 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package bufferpool provides a pool of bytes buffers.
+package bufferpool
+
+import (
+ "bytes"
+ "sync"
+)
+
+var bufferPool = &sync.Pool{
+ New: func() any {
+ return &bytes.Buffer{}
+ },
+}
+
+// GetBuffer returns a buffer from the pool.
+func GetBuffer() (buf *bytes.Buffer) {
+ return bufferPool.Get().(*bytes.Buffer)
+}
+
+// PutBuffer returns a buffer to the pool.
+// The buffer is reset before it is put back into circulation.
+func PutBuffer(buf *bytes.Buffer) {
+ buf.Reset()
+ bufferPool.Put(buf)
+}
diff --git a/bufferpool/bufpool_test.go b/bufferpool/bufpool_test.go
new file mode 100644
index 000000000..023724b97
--- /dev/null
+++ b/bufferpool/bufpool_test.go
@@ -0,0 +1,31 @@
+// Copyright 2016-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bufferpool
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestBufferPool(t *testing.T) {
+ c := qt.New(t)
+
+ buff := GetBuffer()
+ buff.WriteString("do be do be do")
+ c.Assert(buff.String(), qt.Equals, "do be do be do")
+ PutBuffer(buff)
+
+ c.Assert(buff.Len(), qt.Equals, 0)
+}
diff --git a/cache/filecache/filecache.go b/cache/filecache/filecache.go
new file mode 100644
index 000000000..63d939ef6
--- /dev/null
+++ b/cache/filecache/filecache.go
@@ -0,0 +1,384 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "bytes"
+ "errors"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/BurntSushi/locker"
+ "github.com/spf13/afero"
+)
+
+// ErrFatal can be used to signal an unrecoverable error.
+var ErrFatal = errors.New("fatal filecache error")
+
+const (
+ filecacheRootDirname = "filecache"
+)
+
+// Cache caches a set of files in a directory. This is usually a file on
+// disk, but since this is backed by an Afero file system, it can be anything.
+type Cache struct {
+ Fs afero.Fs
+
+ // Max age for items in this cache. Negative duration means forever,
+ // 0 is effectively turning this cache off.
+ maxAge time.Duration
+
+ // When set, we just remove this entire root directory on expiration.
+ pruneAllRootDir string
+
+ nlocker *lockTracker
+}
+
+type lockTracker struct {
+ seenMu sync.RWMutex
+ seen map[string]struct{}
+
+ *locker.Locker
+}
+
+// Lock tracks the ids in use. We use this information to do garbage collection
+// after a Hugo build.
+func (l *lockTracker) Lock(id string) {
+ l.seenMu.RLock()
+ if _, seen := l.seen[id]; !seen {
+ l.seenMu.RUnlock()
+ l.seenMu.Lock()
+ l.seen[id] = struct{}{}
+ l.seenMu.Unlock()
+ } else {
+ l.seenMu.RUnlock()
+ }
+
+ l.Locker.Lock(id)
+}
+
+// ItemInfo contains info about a cached file.
+type ItemInfo struct {
+ // This is the file's name relative to the cache's filesystem.
+ Name string
+}
+
+// NewCache creates a new file cache with the given filesystem and max age.
+func NewCache(fs afero.Fs, maxAge time.Duration, pruneAllRootDir string) *Cache {
+ return &Cache{
+ Fs: fs,
+ nlocker: &lockTracker{Locker: locker.NewLocker(), seen: make(map[string]struct{})},
+ maxAge: maxAge,
+ pruneAllRootDir: pruneAllRootDir,
+ }
+}
+
+// lockedFile is a file with a lock that is released on Close.
+type lockedFile struct {
+ afero.File
+ unlock func()
+}
+
+func (l *lockedFile) Close() error {
+ defer l.unlock()
+ return l.File.Close()
+}
+
+// WriteCloser returns a transactional writer into the cache.
+// It's important that it's closed when done.
+func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) {
+ id = cleanID(id)
+ c.nlocker.Lock(id)
+
+ info := ItemInfo{Name: id}
+
+ f, err := helpers.OpenFileForWriting(c.Fs, id)
+ if err != nil {
+ c.nlocker.Unlock(id)
+ return info, nil, err
+ }
+
+ return info, &lockedFile{
+ File: f,
+ unlock: func() { c.nlocker.Unlock(id) },
+ }, nil
+}
+
+// ReadOrCreate tries to lookup the file in cache.
+// If found, it is passed to read and then closed.
+// If not found a new file is created and passed to create, which should close
+// it when done.
+func (c *Cache) ReadOrCreate(id string,
+ read func(info ItemInfo, r io.ReadSeeker) error,
+ create func(info ItemInfo, w io.WriteCloser) error) (info ItemInfo, err error) {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ info = ItemInfo{Name: id}
+
+ if r := c.getOrRemove(id); r != nil {
+ err = read(info, r)
+ defer r.Close()
+ if err == nil || err == ErrFatal {
+ // See https://github.com/gohugoio/hugo/issues/6401
+ // To recover from file corruption we handle read errors
+ // as the cache item was not found.
+ // Any file permission issue will also fail in the next step.
+ return
+ }
+ }
+
+ f, err := helpers.OpenFileForWriting(c.Fs, id)
+ if err != nil {
+ return
+ }
+
+ err = create(info, f)
+
+ return
+}
+
+// GetOrCreate tries to get the file with the given id from cache. If not found or expired, create will
+// be invoked and the result cached.
+// This method is protected by a named lock using the given id as identifier.
+func (c *Cache) GetOrCreate(id string, create func() (io.ReadCloser, error)) (ItemInfo, io.ReadCloser, error) {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ info := ItemInfo{Name: id}
+
+ if r := c.getOrRemove(id); r != nil {
+ return info, r, nil
+ }
+
+ var (
+ r io.ReadCloser
+ err error
+ )
+
+ r, err = create()
+ if err != nil {
+ return info, nil, err
+ }
+
+ if c.maxAge == 0 {
+ // No caching.
+ return info, hugio.ToReadCloser(r), nil
+ }
+
+ var buff bytes.Buffer
+ return info,
+ hugio.ToReadCloser(&buff),
+ afero.WriteReader(c.Fs, id, io.TeeReader(r, &buff))
+}
+
+// GetOrCreateBytes is the same as GetOrCreate, but produces a byte slice.
+func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (ItemInfo, []byte, error) {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ info := ItemInfo{Name: id}
+
+ if r := c.getOrRemove(id); r != nil {
+ defer r.Close()
+ b, err := ioutil.ReadAll(r)
+ return info, b, err
+ }
+
+ var (
+ b []byte
+ err error
+ )
+
+ b, err = create()
+ if err != nil {
+ return info, nil, err
+ }
+
+ if c.maxAge == 0 {
+ return info, b, nil
+ }
+
+ if err := afero.WriteReader(c.Fs, id, bytes.NewReader(b)); err != nil {
+ return info, nil, err
+ }
+ return info, b, nil
+}
+
+// GetBytes gets the file content with the given id from the cache, nil if none found.
+func (c *Cache) GetBytes(id string) (ItemInfo, []byte, error) {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ info := ItemInfo{Name: id}
+
+ if r := c.getOrRemove(id); r != nil {
+ defer r.Close()
+ b, err := ioutil.ReadAll(r)
+ return info, b, err
+ }
+
+ return info, nil, nil
+}
+
+// Get gets the file with the given id from the cahce, nil if none found.
+func (c *Cache) Get(id string) (ItemInfo, io.ReadCloser, error) {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ info := ItemInfo{Name: id}
+
+ r := c.getOrRemove(id)
+
+ return info, r, nil
+}
+
+// getOrRemove gets the file with the given id. If it's expired, it will
+// be removed.
+func (c *Cache) getOrRemove(id string) hugio.ReadSeekCloser {
+ if c.maxAge == 0 {
+ // No caching.
+ return nil
+ }
+
+ if c.maxAge > 0 {
+ fi, err := c.Fs.Stat(id)
+ if err != nil {
+ return nil
+ }
+
+ if c.isExpired(fi.ModTime()) {
+ c.Fs.Remove(id)
+ return nil
+ }
+ }
+
+ f, err := c.Fs.Open(id)
+ if err != nil {
+ return nil
+ }
+
+ return f
+}
+
+func (c *Cache) isExpired(modTime time.Time) bool {
+ if c.maxAge < 0 {
+ return false
+ }
+
+ // Note the use of time.Since here.
+ // We cannot use Hugo's global Clock for this.
+ return c.maxAge == 0 || time.Since(modTime) > c.maxAge
+}
+
+// For testing
+func (c *Cache) getString(id string) string {
+ id = cleanID(id)
+
+ c.nlocker.Lock(id)
+ defer c.nlocker.Unlock(id)
+
+ f, err := c.Fs.Open(id)
+ if err != nil {
+ return ""
+ }
+ defer f.Close()
+
+ b, _ := ioutil.ReadAll(f)
+ return string(b)
+}
+
+// Caches is a named set of caches.
+type Caches map[string]*Cache
+
+// Get gets a named cache, nil if none found.
+func (f Caches) Get(name string) *Cache {
+ return f[strings.ToLower(name)]
+}
+
+// NewCaches creates a new set of file caches from the given
+// configuration.
+func NewCaches(p *helpers.PathSpec) (Caches, error) {
+ var dcfg Configs
+ if c, ok := p.Cfg.Get("filecacheConfigs").(Configs); ok {
+ dcfg = c
+ } else {
+ var err error
+ dcfg, err = DecodeConfig(p.Fs.Source, p.Cfg)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ fs := p.Fs.Source
+
+ m := make(Caches)
+ for k, v := range dcfg {
+ var cfs afero.Fs
+
+ if v.isResourceDir {
+ cfs = p.BaseFs.ResourcesCache
+ } else {
+ cfs = fs
+ }
+
+ if cfs == nil {
+ // TODO(bep) we still have some places that do not initialize the
+ // full dependencies of a site, e.g. the import Jekyll command.
+ // That command does not need these caches, so let us just continue
+ // for now.
+ continue
+ }
+
+ baseDir := v.Dir
+
+ if err := cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
+ return nil, err
+ }
+
+ bfs := afero.NewBasePathFs(cfs, baseDir)
+
+ var pruneAllRootDir string
+ if k == cacheKeyModules {
+ pruneAllRootDir = "pkg"
+ }
+
+ m[k] = NewCache(bfs, v.MaxAge, pruneAllRootDir)
+ }
+
+ return m, nil
+}
+
+func cleanID(name string) string {
+ return strings.TrimPrefix(filepath.Clean(name), helpers.FilePathSeparator)
+}
diff --git a/cache/filecache/filecache_config.go b/cache/filecache/filecache_config.go
new file mode 100644
index 000000000..a82133ab7
--- /dev/null
+++ b/cache/filecache/filecache_config.go
@@ -0,0 +1,248 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "errors"
+
+ "github.com/mitchellh/mapstructure"
+ "github.com/spf13/afero"
+)
+
+const (
+ cachesConfigKey = "caches"
+
+ resourcesGenDir = ":resourceDir/_gen"
+ cacheDirProject = ":cacheDir/:project"
+)
+
+var defaultCacheConfig = Config{
+ MaxAge: -1, // Never expire
+ Dir: cacheDirProject,
+}
+
+const (
+ cacheKeyGetJSON = "getjson"
+ cacheKeyGetCSV = "getcsv"
+ cacheKeyImages = "images"
+ cacheKeyAssets = "assets"
+ cacheKeyModules = "modules"
+ cacheKeyGetResource = "getresource"
+)
+
+type Configs map[string]Config
+
+func (c Configs) CacheDirModules() string {
+ return c[cacheKeyModules].Dir
+}
+
+var defaultCacheConfigs = Configs{
+ cacheKeyModules: {
+ MaxAge: -1,
+ Dir: ":cacheDir/modules",
+ },
+ cacheKeyGetJSON: defaultCacheConfig,
+ cacheKeyGetCSV: defaultCacheConfig,
+ cacheKeyImages: {
+ MaxAge: -1,
+ Dir: resourcesGenDir,
+ },
+ cacheKeyAssets: {
+ MaxAge: -1,
+ Dir: resourcesGenDir,
+ },
+ cacheKeyGetResource: Config{
+ MaxAge: -1, // Never expire
+ Dir: cacheDirProject,
+ },
+}
+
+type Config struct {
+ // Max age of cache entries in this cache. Any items older than this will
+ // be removed and not returned from the cache.
+ // a negative value means forever, 0 means cache is disabled.
+ MaxAge time.Duration
+
+ // The directory where files are stored.
+ Dir string
+
+ // Will resources/_gen will get its own composite filesystem that
+ // also checks any theme.
+ isResourceDir bool
+}
+
+// GetJSONCache gets the file cache for getJSON.
+func (f Caches) GetJSONCache() *Cache {
+ return f[cacheKeyGetJSON]
+}
+
+// GetCSVCache gets the file cache for getCSV.
+func (f Caches) GetCSVCache() *Cache {
+ return f[cacheKeyGetCSV]
+}
+
+// ImageCache gets the file cache for processed images.
+func (f Caches) ImageCache() *Cache {
+ return f[cacheKeyImages]
+}
+
+// ModulesCache gets the file cache for Hugo Modules.
+func (f Caches) ModulesCache() *Cache {
+ return f[cacheKeyModules]
+}
+
+// AssetsCache gets the file cache for assets (processed resources, SCSS etc.).
+func (f Caches) AssetsCache() *Cache {
+ return f[cacheKeyAssets]
+}
+
+// GetResourceCache gets the file cache for remote resources.
+func (f Caches) GetResourceCache() *Cache {
+ return f[cacheKeyGetResource]
+}
+
+func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
+ c := make(Configs)
+ valid := make(map[string]bool)
+ // Add defaults
+ for k, v := range defaultCacheConfigs {
+ c[k] = v
+ valid[k] = true
+ }
+
+ m := cfg.GetStringMap(cachesConfigKey)
+
+ _, isOsFs := fs.(*afero.OsFs)
+
+ for k, v := range m {
+ if _, ok := v.(maps.Params); !ok {
+ continue
+ }
+ cc := defaultCacheConfig
+
+ dc := &mapstructure.DecoderConfig{
+ Result: &cc,
+ DecodeHook: mapstructure.StringToTimeDurationHookFunc(),
+ WeaklyTypedInput: true,
+ }
+
+ decoder, err := mapstructure.NewDecoder(dc)
+ if err != nil {
+ return c, err
+ }
+
+ if err := decoder.Decode(v); err != nil {
+ return nil, fmt.Errorf("failed to decode filecache config: %w", err)
+ }
+
+ if cc.Dir == "" {
+ return c, errors.New("must provide cache Dir")
+ }
+
+ name := strings.ToLower(k)
+ if !valid[name] {
+ return nil, fmt.Errorf("%q is not a valid cache name", name)
+ }
+
+ c[name] = cc
+ }
+
+ // This is a very old flag in Hugo, but we need to respect it.
+ disabled := cfg.GetBool("ignoreCache")
+
+ for k, v := range c {
+ dir := filepath.ToSlash(filepath.Clean(v.Dir))
+ hadSlash := strings.HasPrefix(dir, "/")
+ parts := strings.Split(dir, "/")
+
+ for i, part := range parts {
+ if strings.HasPrefix(part, ":") {
+ resolved, isResource, err := resolveDirPlaceholder(fs, cfg, part)
+ if err != nil {
+ return c, err
+ }
+ if isResource {
+ v.isResourceDir = true
+ }
+ parts[i] = resolved
+ }
+ }
+
+ dir = path.Join(parts...)
+ if hadSlash {
+ dir = "/" + dir
+ }
+ v.Dir = filepath.Clean(filepath.FromSlash(dir))
+
+ if !v.isResourceDir {
+ if isOsFs && !filepath.IsAbs(v.Dir) {
+ return c, fmt.Errorf("%q must resolve to an absolute directory", v.Dir)
+ }
+
+ // Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
+ if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
+ return c, fmt.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
+ }
+ }
+
+ if !strings.HasPrefix(v.Dir, "_gen") {
+ // We do cache eviction (file removes) and since the user can set
+ // his/hers own cache directory, we really want to make sure
+ // we do not delete any files that do not belong to this cache.
+ // We do add the cache name as the root, but this is an extra safe
+ // guard. We skip the files inside /resources/_gen/ because
+ // that would be breaking.
+ v.Dir = filepath.Join(v.Dir, filecacheRootDirname, k)
+ } else {
+ v.Dir = filepath.Join(v.Dir, k)
+ }
+
+ if disabled {
+ v.MaxAge = 0
+ }
+
+ c[k] = v
+ }
+
+ return c, nil
+}
+
+// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
+func resolveDirPlaceholder(fs afero.Fs, cfg config.Provider, placeholder string) (cacheDir string, isResource bool, err error) {
+ workingDir := cfg.GetString("workingDir")
+
+ switch strings.ToLower(placeholder) {
+ case ":resourcedir":
+ return "", true, nil
+ case ":cachedir":
+ d, err := helpers.GetCacheDir(fs, cfg)
+ return d, false, err
+ case ":project":
+ return filepath.Base(workingDir), false, nil
+ }
+
+ return "", false, fmt.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
+}
diff --git a/cache/filecache/filecache_config_test.go b/cache/filecache/filecache_config_test.go
new file mode 100644
index 000000000..1ed020ef1
--- /dev/null
+++ b/cache/filecache/filecache_config_test.go
@@ -0,0 +1,198 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDecodeConfig(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configStr := `
+resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archetypeDir = "archetypes"
+
+[caches]
+[caches.getJSON]
+maxAge = "10m"
+dir = "/path/to/c1"
+[caches.getCSV]
+maxAge = "11h"
+dir = "/path/to/c2"
+[caches.images]
+dir = "/path/to/c3"
+[caches.getResource]
+dir = "/path/to/c4"
+`
+
+ cfg, err := config.FromConfigString(configStr, "toml")
+ c.Assert(err, qt.IsNil)
+ fs := afero.NewMemMapFs()
+ decoded, err := DecodeConfig(fs, cfg)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(len(decoded), qt.Equals, 6)
+
+ c2 := decoded["getcsv"]
+ c.Assert(c2.MaxAge.String(), qt.Equals, "11h0m0s")
+ c.Assert(c2.Dir, qt.Equals, filepath.FromSlash("/path/to/c2/filecache/getcsv"))
+
+ c3 := decoded["images"]
+ c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
+ c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
+
+ c4 := decoded["getresource"]
+ c.Assert(c4.MaxAge, qt.Equals, time.Duration(-1))
+ c.Assert(c4.Dir, qt.Equals, filepath.FromSlash("/path/to/c4/filecache/getresource"))
+}
+
+func TestDecodeConfigIgnoreCache(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configStr := `
+resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
+ignoreCache = true
+[caches]
+[caches.getJSON]
+maxAge = 1234
+dir = "/path/to/c1"
+[caches.getCSV]
+maxAge = 3456
+dir = "/path/to/c2"
+[caches.images]
+dir = "/path/to/c3"
+[caches.getResource]
+dir = "/path/to/c4"
+`
+
+ cfg, err := config.FromConfigString(configStr, "toml")
+ c.Assert(err, qt.IsNil)
+ fs := afero.NewMemMapFs()
+ decoded, err := DecodeConfig(fs, cfg)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(len(decoded), qt.Equals, 6)
+
+ for _, v := range decoded {
+ c.Assert(v.MaxAge, qt.Equals, time.Duration(0))
+ }
+}
+
+func TestDecodeConfigDefault(t *testing.T) {
+ c := qt.New(t)
+ cfg := newTestConfig()
+
+ if runtime.GOOS == "windows" {
+ cfg.Set("resourceDir", "c:\\cache\\resources")
+ cfg.Set("cacheDir", "c:\\cache\\thecache")
+
+ } else {
+ cfg.Set("resourceDir", "/cache/resources")
+ cfg.Set("cacheDir", "/cache/thecache")
+ }
+
+ fs := afero.NewMemMapFs()
+
+ decoded, err := DecodeConfig(fs, cfg)
+
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(len(decoded), qt.Equals, 6)
+
+ imgConfig := decoded[cacheKeyImages]
+ jsonConfig := decoded[cacheKeyGetJSON]
+
+ if runtime.GOOS == "windows" {
+ c.Assert(imgConfig.Dir, qt.Equals, filepath.FromSlash("_gen/images"))
+ } else {
+ c.Assert(imgConfig.Dir, qt.Equals, "_gen/images")
+ c.Assert(jsonConfig.Dir, qt.Equals, "/cache/thecache/hugoproject/filecache/getjson")
+ }
+
+ c.Assert(imgConfig.isResourceDir, qt.Equals, true)
+ c.Assert(jsonConfig.isResourceDir, qt.Equals, false)
+}
+
+func TestDecodeConfigInvalidDir(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configStr := `
+resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
+[caches]
+[caches.getJSON]
+maxAge = "10m"
+dir = "/"
+
+`
+ if runtime.GOOS == "windows" {
+ configStr = strings.Replace(configStr, "/", "c:\\\\", 1)
+ }
+
+ cfg, err := config.FromConfigString(configStr, "toml")
+ c.Assert(err, qt.IsNil)
+ fs := afero.NewMemMapFs()
+
+ _, err = DecodeConfig(fs, cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func newTestConfig() config.Provider {
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
+ cfg.Set("contentDir", "content")
+ cfg.Set("dataDir", "data")
+ cfg.Set("resourceDir", "resources")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("archetypeDir", "archetypes")
+ cfg.Set("assetDir", "assets")
+
+ return cfg
+}
diff --git a/cache/filecache/filecache_pruner.go b/cache/filecache/filecache_pruner.go
new file mode 100644
index 000000000..e5e571972
--- /dev/null
+++ b/cache/filecache/filecache_pruner.go
@@ -0,0 +1,126 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "fmt"
+ "io"
+ "os"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/spf13/afero"
+)
+
+// Prune removes expired and unused items from this cache.
+// The last one requires a full build so the cache usage can be tracked.
+// Note that we operate directly on the filesystem here, so this is not
+// thread safe.
+func (c Caches) Prune() (int, error) {
+ counter := 0
+ for k, cache := range c {
+
+ count, err := cache.Prune(false)
+
+ counter += count
+
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return counter, fmt.Errorf("failed to prune cache %q: %w", k, err)
+ }
+
+ }
+
+ return counter, nil
+}
+
+// Prune removes expired and unused items from this cache.
+// If force is set, everything will be removed not considering expiry time.
+func (c *Cache) Prune(force bool) (int, error) {
+ if c.pruneAllRootDir != "" {
+ return c.pruneRootDir(force)
+ }
+
+ counter := 0
+
+ err := afero.Walk(c.Fs, "", func(name string, info os.FileInfo, err error) error {
+ if info == nil {
+ return nil
+ }
+
+ name = cleanID(name)
+
+ if info.IsDir() {
+ f, err := c.Fs.Open(name)
+ if err != nil {
+ // This cache dir may not exist.
+ return nil
+ }
+ defer f.Close()
+ _, err = f.Readdirnames(1)
+ if err == io.EOF {
+ // Empty dir.
+ err = c.Fs.Remove(name)
+ }
+
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ return nil
+ }
+
+ shouldRemove := force || c.isExpired(info.ModTime())
+
+ if !shouldRemove && len(c.nlocker.seen) > 0 {
+ // Remove it if it's not been touched/used in the last build.
+ _, seen := c.nlocker.seen[name]
+ shouldRemove = !seen
+ }
+
+ if shouldRemove {
+ err := c.Fs.Remove(name)
+ if err == nil {
+ counter++
+ }
+
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ }
+
+ return nil
+ })
+
+ return counter, err
+}
+
+func (c *Cache) pruneRootDir(force bool) (int, error) {
+ info, err := c.Fs.Stat(c.pruneAllRootDir)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return 0, nil
+ }
+ return 0, err
+ }
+
+ if !force && !c.isExpired(info.ModTime()) {
+ return 0, nil
+ }
+
+ return hugofs.MakeReadableAndRemoveAllModulePkgDir(c.Fs, c.pruneAllRootDir)
+}
diff --git a/cache/filecache/filecache_pruner_test.go b/cache/filecache/filecache_pruner_test.go
new file mode 100644
index 000000000..46e1317ce
--- /dev/null
+++ b/cache/filecache/filecache_pruner_test.go
@@ -0,0 +1,110 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPrune(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configStr := `
+resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
+[caches]
+[caches.getjson]
+maxAge = "200ms"
+dir = "/cache/c"
+[caches.getcsv]
+maxAge = "200ms"
+dir = "/cache/d"
+[caches.assets]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
+[caches.images]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
+`
+
+ for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
+ msg := qt.Commentf("cache: %s", name)
+ p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
+ caches, err := NewCaches(p)
+ c.Assert(err, qt.IsNil)
+ cache := caches[name]
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ cache.GetOrCreateBytes(id, func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
+ if i == 4 {
+ // This will expire the first 5
+ time.Sleep(201 * time.Millisecond)
+ }
+ }
+
+ count, err := caches.Prune()
+ c.Assert(err, qt.IsNil)
+ c.Assert(count, qt.Equals, 5, msg)
+
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i < 5 {
+ c.Assert(v, qt.Equals, "")
+ } else {
+ c.Assert(v, qt.Equals, "abc")
+ }
+ }
+
+ caches, err = NewCaches(p)
+ c.Assert(err, qt.IsNil)
+ cache = caches[name]
+ // Touch one and then prune.
+ cache.GetOrCreateBytes("i5", func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
+
+ count, err = caches.Prune()
+ c.Assert(err, qt.IsNil)
+ c.Assert(count, qt.Equals, 4)
+
+ // Now only the i5 should be left.
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i != 5 {
+ c.Assert(v, qt.Equals, "")
+ } else {
+ c.Assert(v, qt.Equals, "abc")
+ }
+ }
+
+ }
+}
diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go
new file mode 100644
index 000000000..47b5a7fcf
--- /dev/null
+++ b/cache/filecache/filecache_test.go
@@ -0,0 +1,349 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filecache
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/gobwas/glob"
+
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFileCache(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ tempWorkingDir, err := ioutil.TempDir("", "hugo_filecache_test_work")
+ c.Assert(err, qt.IsNil)
+ defer os.Remove(tempWorkingDir)
+
+ tempCacheDir, err := ioutil.TempDir("", "hugo_filecache_test_cache")
+ c.Assert(err, qt.IsNil)
+ defer os.Remove(tempCacheDir)
+
+ osfs := afero.NewOsFs()
+
+ for _, test := range []struct {
+ cacheDir string
+ workingDir string
+ }{
+ // Run with same dirs twice to make sure that works.
+ {tempCacheDir, tempWorkingDir},
+ {tempCacheDir, tempWorkingDir},
+ } {
+
+ configStr := `
+workingDir = "WORKING_DIR"
+resourceDir = "resources"
+cacheDir = "CACHEDIR"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
+[caches]
+[caches.getJSON]
+maxAge = "10h"
+dir = ":cacheDir/c"
+
+`
+
+ winPathSep := "\\\\"
+
+ replacer := strings.NewReplacer("CACHEDIR", test.cacheDir, "WORKING_DIR", test.workingDir)
+
+ configStr = replacer.Replace(configStr)
+ configStr = strings.Replace(configStr, "\\", winPathSep, -1)
+
+ p := newPathsSpec(t, osfs, configStr)
+
+ caches, err := NewCaches(p)
+ c.Assert(err, qt.IsNil)
+
+ cache := caches.Get("GetJSON")
+ c.Assert(cache, qt.Not(qt.IsNil))
+ c.Assert(cache.maxAge.String(), qt.Equals, "10h0m0s")
+
+ bfs, ok := cache.Fs.(*afero.BasePathFs)
+ c.Assert(ok, qt.Equals, true)
+ filename, err := bfs.RealPath("key")
+ c.Assert(err, qt.IsNil)
+ if test.cacheDir != "" {
+ c.Assert(filename, qt.Equals, filepath.Join(test.cacheDir, "c/"+filecacheRootDirname+"/getjson/key"))
+ } else {
+ // Temp dir.
+ c.Assert(filename, qt.Matches, ".*hugo_cache.*"+filecacheRootDirname+".*key")
+ }
+
+ cache = caches.Get("Images")
+ c.Assert(cache, qt.Not(qt.IsNil))
+ c.Assert(cache.maxAge, qt.Equals, time.Duration(-1))
+ bfs, ok = cache.Fs.(*afero.BasePathFs)
+ c.Assert(ok, qt.Equals, true)
+ filename, _ = bfs.RealPath("key")
+ c.Assert(filename, qt.Equals, filepath.FromSlash("_gen/images/key"))
+
+ rf := func(s string) func() (io.ReadCloser, error) {
+ return func() (io.ReadCloser, error) {
+ return struct {
+ io.ReadSeeker
+ io.Closer
+ }{
+ strings.NewReader(s),
+ ioutil.NopCloser(nil),
+ }, nil
+ }
+ }
+
+ bf := func() ([]byte, error) {
+ return []byte("bcd"), nil
+ }
+
+ for _, ca := range []*Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
+ for i := 0; i < 2; i++ {
+ info, r, err := ca.GetOrCreate("a", rf("abc"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(info.Name, qt.Equals, "a")
+ b, _ := ioutil.ReadAll(r)
+ r.Close()
+ c.Assert(string(b), qt.Equals, "abc")
+
+ info, b, err = ca.GetOrCreateBytes("b", bf)
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(info.Name, qt.Equals, "b")
+ c.Assert(string(b), qt.Equals, "bcd")
+
+ _, b, err = ca.GetOrCreateBytes("a", bf)
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b), qt.Equals, "abc")
+
+ _, r, err = ca.GetOrCreate("a", rf("bcd"))
+ c.Assert(err, qt.IsNil)
+ b, _ = ioutil.ReadAll(r)
+ r.Close()
+ c.Assert(string(b), qt.Equals, "abc")
+ }
+ }
+
+ c.Assert(caches.Get("getJSON"), qt.Not(qt.IsNil))
+
+ info, w, err := caches.ImageCache().WriteCloser("mykey")
+ c.Assert(err, qt.IsNil)
+ c.Assert(info.Name, qt.Equals, "mykey")
+ io.WriteString(w, "Hugo is great!")
+ w.Close()
+ c.Assert(caches.ImageCache().getString("mykey"), qt.Equals, "Hugo is great!")
+
+ info, r, err := caches.ImageCache().Get("mykey")
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(info.Name, qt.Equals, "mykey")
+ b, _ := ioutil.ReadAll(r)
+ r.Close()
+ c.Assert(string(b), qt.Equals, "Hugo is great!")
+
+ info, b, err = caches.ImageCache().GetBytes("mykey")
+ c.Assert(err, qt.IsNil)
+ c.Assert(info.Name, qt.Equals, "mykey")
+ c.Assert(string(b), qt.Equals, "Hugo is great!")
+
+ }
+}
+
+func TestFileCacheConcurrent(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configStr := `
+resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
+[caches]
+[caches.getjson]
+maxAge = "1s"
+dir = "/cache/c"
+
+`
+
+ p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
+
+ caches, err := NewCaches(p)
+ c.Assert(err, qt.IsNil)
+
+ const cacheName = "getjson"
+
+ filenameData := func(i int) (string, string) {
+ data := fmt.Sprintf("data: %d", i)
+ filename := fmt.Sprintf("file%d", i)
+ return filename, data
+ }
+
+ var wg sync.WaitGroup
+
+ for i := 0; i < 50; i++ {
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ for j := 0; j < 20; j++ {
+ ca := caches.Get(cacheName)
+ c.Assert(ca, qt.Not(qt.IsNil))
+ filename, data := filenameData(i)
+ _, r, err := ca.GetOrCreate(filename, func() (io.ReadCloser, error) {
+ return hugio.ToReadCloser(strings.NewReader(data)), nil
+ })
+ c.Assert(err, qt.IsNil)
+ b, _ := ioutil.ReadAll(r)
+ r.Close()
+ c.Assert(string(b), qt.Equals, data)
+ // Trigger some expiration.
+ time.Sleep(50 * time.Millisecond)
+ }
+ }(i)
+
+ }
+ wg.Wait()
+}
+
+func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ var result string
+
+ rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error {
+ return func(info ItemInfo, r io.ReadSeeker) error {
+ if failLevel > 0 {
+ if failLevel > 1 {
+ return ErrFatal
+ }
+ return errors.New("fail")
+ }
+
+ b, _ := ioutil.ReadAll(r)
+ result = string(b)
+
+ return nil
+ }
+ }
+
+ bf := func(s string) func(info ItemInfo, w io.WriteCloser) error {
+ return func(info ItemInfo, w io.WriteCloser) error {
+ defer w.Close()
+ result = s
+ _, err := w.Write([]byte(s))
+ return err
+ }
+ }
+
+ cache := NewCache(afero.NewMemMapFs(), 100*time.Hour, "")
+
+ const id = "a32"
+
+ _, err := cache.ReadOrCreate(id, rf(0), bf("v1"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, "v1")
+ _, err = cache.ReadOrCreate(id, rf(0), bf("v2"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, "v1")
+ _, err = cache.ReadOrCreate(id, rf(1), bf("v3"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, "v3")
+ _, err = cache.ReadOrCreate(id, rf(2), bf("v3"))
+ c.Assert(err, qt.Equals, ErrFatal)
+}
+
+func TestCleanID(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(cleanID(filepath.FromSlash("/a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
+ c.Assert(cleanID(filepath.FromSlash("a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
+}
+
+func initConfig(fs afero.Fs, cfg config.Provider) error {
+ if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
+ return err
+ }
+
+ modConfig, err := modules.DecodeConfig(cfg)
+ if err != nil {
+ return err
+ }
+
+ workingDir := cfg.GetString("workingDir")
+ themesDir := cfg.GetString("themesDir")
+ if !filepath.IsAbs(themesDir) {
+ themesDir = filepath.Join(workingDir, themesDir)
+ }
+ globAll := glob.MustCompile("**", '/')
+ modulesClient := modules.NewClient(modules.ClientConfig{
+ Fs: fs,
+ WorkingDir: workingDir,
+ ThemesDir: themesDir,
+ ModuleConfig: modConfig,
+ IgnoreVendor: globAll,
+ })
+
+ moduleConfig, err := modulesClient.Collect()
+ if err != nil {
+ return err
+ }
+
+ if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[len(moduleConfig.ActiveModules)-1]); err != nil {
+ return err
+ }
+
+ cfg.Set("allModules", moduleConfig.ActiveModules)
+
+ return nil
+}
+
+func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec {
+ c := qt.New(t)
+ cfg, err := config.FromConfigString(configStr, "toml")
+ c.Assert(err, qt.IsNil)
+ initConfig(fs, cfg)
+ config.SetBaseTestDefaults(cfg)
+ p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil)
+ c.Assert(err, qt.IsNil)
+ return p
+}
diff --git a/cache/namedmemcache/named_cache.go b/cache/namedmemcache/named_cache.go
new file mode 100644
index 000000000..7fb4fe8ed
--- /dev/null
+++ b/cache/namedmemcache/named_cache.go
@@ -0,0 +1,78 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package namedmemcache provides a memory cache with a named lock. This is suitable
+// for situations where creating the cached resource can be time consuming or otherwise
+// resource hungry, or in situations where a "once only per key" is a requirement.
+package namedmemcache
+
+import (
+ "sync"
+
+ "github.com/BurntSushi/locker"
+)
+
+// Cache holds the cached values.
+type Cache struct {
+ nlocker *locker.Locker
+ cache map[string]cacheEntry
+ mu sync.RWMutex
+}
+
+type cacheEntry struct {
+ value any
+ err error
+}
+
+// New creates a new cache.
+func New() *Cache {
+ return &Cache{
+ nlocker: locker.NewLocker(),
+ cache: make(map[string]cacheEntry),
+ }
+}
+
+// Clear clears the cache state.
+func (c *Cache) Clear() {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ c.cache = make(map[string]cacheEntry)
+ c.nlocker = locker.NewLocker()
+}
+
+// GetOrCreate tries to get the value with the given cache key, if not found
+// create will be called and cached.
+// This method is thread safe. It also guarantees that the create func for a given
+// key is invoked only once for this cache.
+func (c *Cache) GetOrCreate(key string, create func() (any, error)) (any, error) {
+ c.mu.RLock()
+ entry, found := c.cache[key]
+ c.mu.RUnlock()
+
+ if found {
+ return entry.value, entry.err
+ }
+
+ c.nlocker.Lock(key)
+ defer c.nlocker.Unlock(key)
+
+ // Create it.
+ value, err := create()
+
+ c.mu.Lock()
+ c.cache[key] = cacheEntry{value: value, err: err}
+ c.mu.Unlock()
+
+ return value, err
+}
diff --git a/cache/namedmemcache/named_cache_test.go b/cache/namedmemcache/named_cache_test.go
new file mode 100644
index 000000000..2db923d76
--- /dev/null
+++ b/cache/namedmemcache/named_cache_test.go
@@ -0,0 +1,80 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package namedmemcache
+
+import (
+ "fmt"
+ "sync"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestNamedCache(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ cache := New()
+
+ counter := 0
+ create := func() (any, error) {
+ counter++
+ return counter, nil
+ }
+
+ for i := 0; i < 5; i++ {
+ v1, err := cache.GetOrCreate("a1", create)
+ c.Assert(err, qt.IsNil)
+ c.Assert(v1, qt.Equals, 1)
+ v2, err := cache.GetOrCreate("a2", create)
+ c.Assert(err, qt.IsNil)
+ c.Assert(v2, qt.Equals, 2)
+ }
+
+ cache.Clear()
+
+ v3, err := cache.GetOrCreate("a2", create)
+ c.Assert(err, qt.IsNil)
+ c.Assert(v3, qt.Equals, 3)
+}
+
+func TestNamedCacheConcurrent(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ var wg sync.WaitGroup
+
+ cache := New()
+
+ create := func(i int) func() (any, error) {
+ return func() (any, error) {
+ return i, nil
+ }
+ }
+
+ for i := 0; i < 10; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for j := 0; j < 100; j++ {
+ id := fmt.Sprintf("id%d", j)
+ v, err := cache.GetOrCreate(id, create(j))
+ c.Assert(err, qt.IsNil)
+ c.Assert(v, qt.Equals, j)
+ }
+ }()
+ }
+ wg.Wait()
+}
diff --git a/codegen/methods.go b/codegen/methods.go
new file mode 100644
index 000000000..9bc80cc3e
--- /dev/null
+++ b/codegen/methods.go
@@ -0,0 +1,536 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+// Some functions in this file (see comments) is based on the Go source code,
+// copyright The Go Authors and governed by a BSD-style license.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package codegen contains helpers for code generation.
+package codegen
+
+import (
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+)
+
+// Make room for insertions
+const weightWidth = 1000
+
+// NewInspector creates a new Inspector given a source root.
+func NewInspector(root string) *Inspector {
+ return &Inspector{ProjectRootDir: root}
+}
+
+// Inspector provides methods to help code generation. It uses a combination
+// of reflection and source code AST to do the heavy lifting.
+type Inspector struct {
+ ProjectRootDir string
+
+ init sync.Once
+
+ // Determines method order. Go's reflect sorts lexicographically, so
+ // we must parse the source to preserve this order.
+ methodWeight map[string]map[string]int
+}
+
+// MethodsFromTypes create a method set from the include slice, excluding any
+// method in exclude.
+func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.Type) Methods {
+ c.parseSource()
+
+ var methods Methods
+
+ excludes := make(map[string]bool)
+
+ if len(exclude) > 0 {
+ for _, m := range c.MethodsFromTypes(exclude, nil) {
+ excludes[m.Name] = true
+ }
+ }
+
+ // There may be overlapping interfaces in types. Do a simple check for now.
+ seen := make(map[string]bool)
+
+ nameAndPackage := func(t reflect.Type) (string, string) {
+ var name, pkg string
+
+ isPointer := t.Kind() == reflect.Ptr
+
+ if isPointer {
+ t = t.Elem()
+ }
+
+ pkgPrefix := ""
+ if pkgPath := t.PkgPath(); pkgPath != "" {
+ pkgPath = strings.TrimSuffix(pkgPath, "/")
+ _, shortPath := path.Split(pkgPath)
+ pkgPrefix = shortPath + "."
+ pkg = pkgPath
+ }
+
+ name = t.Name()
+ if name == "" {
+ // interface{}
+ name = t.String()
+ }
+
+ if isPointer {
+ pkgPrefix = "*" + pkgPrefix
+ }
+
+ name = pkgPrefix + name
+
+ return name, pkg
+ }
+
+ for _, t := range include {
+ for i := 0; i < t.NumMethod(); i++ {
+
+ m := t.Method(i)
+ if excludes[m.Name] || seen[m.Name] {
+ continue
+ }
+
+ seen[m.Name] = true
+
+ if m.PkgPath != "" {
+ // Not exported
+ continue
+ }
+
+ numIn := m.Type.NumIn()
+
+ ownerName, _ := nameAndPackage(t)
+
+ method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
+
+ for i := 0; i < numIn; i++ {
+ in := m.Type.In(i)
+
+ name, pkg := nameAndPackage(in)
+
+ if pkg != "" {
+ method.Imports = append(method.Imports, pkg)
+ }
+
+ method.In = append(method.In, name)
+ }
+
+ numOut := m.Type.NumOut()
+
+ if numOut > 0 {
+ for i := 0; i < numOut; i++ {
+ out := m.Type.Out(i)
+ name, pkg := nameAndPackage(out)
+
+ if pkg != "" {
+ method.Imports = append(method.Imports, pkg)
+ }
+
+ method.Out = append(method.Out, name)
+ }
+ }
+
+ methods = append(methods, method)
+ }
+ }
+
+ sort.SliceStable(methods, func(i, j int) bool {
+ mi, mj := methods[i], methods[j]
+
+ wi := c.methodWeight[mi.OwnerName][mi.Name]
+ wj := c.methodWeight[mj.OwnerName][mj.Name]
+
+ if wi == wj {
+ return mi.Name < mj.Name
+ }
+
+ return wi < wj
+ })
+
+ return methods
+}
+
+func (c *Inspector) parseSource() {
+ c.init.Do(func() {
+ if !strings.Contains(c.ProjectRootDir, "hugo") {
+ panic("dir must be set to the Hugo root")
+ }
+
+ c.methodWeight = make(map[string]map[string]int)
+ dirExcludes := regexp.MustCompile("docs|examples")
+ fileExcludes := regexp.MustCompile("autogen")
+ var filenames []string
+
+ filepath.Walk(c.ProjectRootDir, func(path string, info os.FileInfo, err error) error {
+ if info.IsDir() {
+ if dirExcludes.MatchString(info.Name()) {
+ return filepath.SkipDir
+ }
+ }
+
+ if !strings.HasSuffix(path, ".go") || fileExcludes.MatchString(path) {
+ return nil
+ }
+
+ filenames = append(filenames, path)
+
+ return nil
+ })
+
+ for _, filename := range filenames {
+
+ pkg := c.packageFromPath(filename)
+
+ fset := token.NewFileSet()
+ node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
+ if err != nil {
+ panic(err)
+ }
+
+ ast.Inspect(node, func(n ast.Node) bool {
+ switch t := n.(type) {
+ case *ast.TypeSpec:
+ if t.Name.IsExported() {
+ switch it := t.Type.(type) {
+ case *ast.InterfaceType:
+ iface := pkg + "." + t.Name.Name
+ methodNames := collectMethodsRecursive(pkg, it.Methods.List)
+ weights := make(map[string]int)
+ weight := weightWidth
+ for _, name := range methodNames {
+ weights[name] = weight
+ weight += weightWidth
+ }
+ c.methodWeight[iface] = weights
+ }
+ }
+ }
+ return true
+ })
+
+ }
+
+ // Complement
+ for _, v1 := range c.methodWeight {
+ for k2, w := range v1 {
+ if v, found := c.methodWeight[k2]; found {
+ for k3, v3 := range v {
+ v1[k3] = (v3 / weightWidth) + w
+ }
+ }
+ }
+ }
+ })
+}
+
+func (c *Inspector) packageFromPath(p string) string {
+ p = filepath.ToSlash(p)
+ base := path.Base(p)
+ if !strings.Contains(base, ".") {
+ return base
+ }
+ return path.Base(strings.TrimSuffix(p, base))
+}
+
+// Method holds enough information about it to recreate it.
+type Method struct {
+ // The interface we extracted this method from.
+ Owner reflect.Type
+
+ // String version of the above, on the form PACKAGE.NAME, e.g.
+ // page.Page
+ OwnerName string
+
+ // Method name.
+ Name string
+
+ // Imports needed to satisfy the method signature.
+ Imports []string
+
+ // Argument types, including any package prefix, e.g. string, int, interface{},
+ // net.Url
+ In []string
+
+ // Return types.
+ Out []string
+}
+
+// Declaration creates a method declaration (without any body) for the given receiver.
+func (m Method) Declaration(receiver string) string {
+ return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStr())
+}
+
+// DeclarationNamed creates a method declaration (without any body) for the given receiver
+// with named return values.
+func (m Method) DeclarationNamed(receiver string) string {
+ return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStrNamed())
+}
+
+// Delegate creates a delegate call string.
+func (m Method) Delegate(receiver, delegate string) string {
+ ret := ""
+ if len(m.Out) > 0 {
+ ret = "return "
+ }
+ return fmt.Sprintf("%s%s.%s.%s%s", ret, receiverShort(receiver), delegate, m.Name, m.inOutStr())
+}
+
+func (m Method) String() string {
+ return m.Name + m.inStr() + " " + m.outStr() + "\n"
+}
+
+func (m Method) inOutStr() string {
+ if len(m.In) == 0 {
+ return "()"
+ }
+
+ args := make([]string, len(m.In))
+ for i := 0; i < len(args); i++ {
+ args[i] = fmt.Sprintf("arg%d", i)
+ }
+ return "(" + strings.Join(args, ", ") + ")"
+}
+
+func (m Method) inStr() string {
+ if len(m.In) == 0 {
+ return "()"
+ }
+
+ args := make([]string, len(m.In))
+ for i := 0; i < len(args); i++ {
+ args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
+ }
+ return "(" + strings.Join(args, ", ") + ")"
+}
+
+func (m Method) outStr() string {
+ if len(m.Out) == 0 {
+ return ""
+ }
+ if len(m.Out) == 1 {
+ return m.Out[0]
+ }
+
+ return "(" + strings.Join(m.Out, ", ") + ")"
+}
+
+func (m Method) outStrNamed() string {
+ if len(m.Out) == 0 {
+ return ""
+ }
+
+ outs := make([]string, len(m.Out))
+ for i := 0; i < len(outs); i++ {
+ outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i])
+ }
+
+ return "(" + strings.Join(outs, ", ") + ")"
+}
+
+// Methods represents a list of methods for one or more interfaces.
+// The order matches the defined order in their source file(s).
+type Methods []Method
+
+// Imports returns a sorted list of package imports needed to satisfy the
+// signatures of all methods.
+func (m Methods) Imports() []string {
+ var pkgImports []string
+ for _, method := range m {
+ pkgImports = append(pkgImports, method.Imports...)
+ }
+ if len(pkgImports) > 0 {
+ pkgImports = uniqueNonEmptyStrings(pkgImports)
+ sort.Strings(pkgImports)
+ }
+ return pkgImports
+}
+
+// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
+// matching any of the regexps in excludes will be ignored.
+func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
+ var sb strings.Builder
+
+ r := receiverShort(receiver)
+ what := firstToUpper(trimAsterisk(receiver))
+ pgkName := path.Base(pkgPath)
+
+ fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
+
+ var methods Methods
+ excludeRes := make([]*regexp.Regexp, len(excludes))
+
+ for i, exclude := range excludes {
+ excludeRes[i] = regexp.MustCompile(exclude)
+ }
+
+ for _, method := range m {
+ // Exclude methods with arguments and incompatible return values
+ if len(method.In) > 0 || len(method.Out) == 0 || len(method.Out) > 2 {
+ continue
+ }
+
+ if len(method.Out) == 2 {
+ if method.Out[1] != "error" {
+ continue
+ }
+ }
+
+ for _, re := range excludeRes {
+ if re.MatchString(method.Name) {
+ continue
+ }
+ }
+
+ methods = append(methods, method)
+ }
+
+ for _, method := range methods {
+ varn := varName(method.Name)
+ if len(method.Out) == 1 {
+ fmt.Fprintf(&sb, "\t%s := %s.%s()\n", varn, r, method.Name)
+ } else {
+ fmt.Fprintf(&sb, "\t%s, err := %s.%s()\n", varn, r, method.Name)
+ fmt.Fprint(&sb, "\tif err != nil {\n\t\treturn nil, err\n\t}\n")
+ }
+ }
+
+ fmt.Fprint(&sb, "\n\ts := struct {\n")
+
+ for _, method := range methods {
+ fmt.Fprintf(&sb, "\t\t%s %s\n", method.Name, typeName(method.Out[0], pgkName))
+ }
+
+ fmt.Fprint(&sb, "\n\t}{\n")
+
+ for _, method := range methods {
+ varn := varName(method.Name)
+ fmt.Fprintf(&sb, "\t\t%s: %s,\n", method.Name, varn)
+ }
+
+ fmt.Fprint(&sb, "\n\t}\n\n")
+ fmt.Fprint(&sb, "\treturn json.Marshal(&s)\n}")
+
+ pkgImports := append(methods.Imports(), "encoding/json")
+
+ if pkgPath != "" {
+ // Exclude self
+ for i, pkgImp := range pkgImports {
+ if pkgImp == pkgPath {
+ pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
+ }
+ }
+ }
+
+ return sb.String(), pkgImports
+}
+
+func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
+ var methodNames []string
+ for _, m := range f {
+ if m.Names != nil {
+ methodNames = append(methodNames, m.Names[0].Name)
+ continue
+ }
+
+ if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil {
+ // Embedded interface
+ methodNames = append(
+ methodNames,
+ collectMethodsRecursive(
+ pkg,
+ ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...)
+ } else {
+ // Embedded, but in a different file/package. Return the
+ // package.Name and deal with that later.
+ name := packageName(m.Type)
+ if !strings.Contains(name, ".") {
+ // Assume current package
+ name = pkg + "." + name
+ }
+ methodNames = append(methodNames, name)
+ }
+ }
+
+ return methodNames
+}
+
+func firstToLower(name string) string {
+ return strings.ToLower(name[:1]) + name[1:]
+}
+
+func firstToUpper(name string) string {
+ return strings.ToUpper(name[:1]) + name[1:]
+}
+
+func packageName(e ast.Expr) string {
+ switch tp := e.(type) {
+ case *ast.Ident:
+ return tp.Name
+ case *ast.SelectorExpr:
+ return fmt.Sprintf("%s.%s", packageName(tp.X), packageName(tp.Sel))
+ }
+ return ""
+}
+
+func receiverShort(receiver string) string {
+ return strings.ToLower(trimAsterisk(receiver))[:1]
+}
+
+func trimAsterisk(name string) string {
+ return strings.TrimPrefix(name, "*")
+}
+
+func typeName(name, pkg string) string {
+ return strings.TrimPrefix(name, pkg+".")
+}
+
+func uniqueNonEmptyStrings(s []string) []string {
+ var unique []string
+ set := map[string]any{}
+ for _, val := range s {
+ if val == "" {
+ continue
+ }
+ if _, ok := set[val]; !ok {
+ unique = append(unique, val)
+ set[val] = val
+ }
+ }
+ return unique
+}
+
+func varName(name string) string {
+ name = firstToLower(name)
+
+ // Adjust some reserved keywords, see https://golang.org/ref/spec#Keywords
+ switch name {
+ case "type":
+ name = "typ"
+ case "package":
+ name = "pkg"
+ // Not reserved, but syntax highlighters has it as a keyword.
+ case "len":
+ name = "length"
+ }
+
+ return name
+}
diff --git a/codegen/methods2_test.go b/codegen/methods2_test.go
new file mode 100644
index 000000000..bd36b5e80
--- /dev/null
+++ b/codegen/methods2_test.go
@@ -0,0 +1,20 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codegen
+
+type IEmbed interface {
+ MethodEmbed3(s string) string
+ MethodEmbed1() string
+ MethodEmbed2()
+}
diff --git a/codegen/methods_test.go b/codegen/methods_test.go
new file mode 100644
index 000000000..0aff43d0e
--- /dev/null
+++ b/codegen/methods_test.go
@@ -0,0 +1,96 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codegen
+
+import (
+ "fmt"
+ "net"
+ "os"
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/herrors"
+)
+
+func TestMethods(t *testing.T) {
+ var (
+ zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
+ zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
+ zeroI = reflect.TypeOf((*I)(nil)).Elem()
+ )
+
+ dir, _ := os.Getwd()
+ insp := NewInspector(dir)
+
+ t.Run("MethodsFromTypes", func(t *testing.T) {
+ c := qt.New(t)
+
+ methods := insp.MethodsFromTypes([]reflect.Type{zeroI}, nil)
+
+ methodsStr := fmt.Sprint(methods)
+
+ c.Assert(methodsStr, qt.Contains, "Method1(arg0 herrors.ErrorContext)")
+ c.Assert(methodsStr, qt.Contains, "Method7() interface {}")
+ c.Assert(methodsStr, qt.Contains, "Method0() string\n Method4() string")
+ c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string\n MethodEmbed1() string")
+
+ c.Assert(methods.Imports(), qt.Contains, "github.com/gohugoio/hugo/common/herrors")
+ })
+
+ t.Run("EmbedOnly", func(t *testing.T) {
+ c := qt.New(t)
+
+ methods := insp.MethodsFromTypes([]reflect.Type{zeroIEOnly}, nil)
+
+ methodsStr := fmt.Sprint(methods)
+
+ c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string")
+ })
+
+ t.Run("ToMarshalJSON", func(t *testing.T) {
+ c := qt.New(t)
+
+ m, pkg := insp.MethodsFromTypes(
+ []reflect.Type{zeroI},
+ []reflect.Type{zeroIE}).ToMarshalJSON("*page", "page")
+
+ c.Assert(m, qt.Contains, "method6 := p.Method6()")
+ c.Assert(m, qt.Contains, "Method0: method0,")
+ c.Assert(m, qt.Contains, "return json.Marshal(&s)")
+
+ c.Assert(pkg, qt.Contains, "github.com/gohugoio/hugo/common/herrors")
+ c.Assert(pkg, qt.Contains, "encoding/json")
+
+ fmt.Println(pkg)
+ })
+}
+
+type I interface {
+ IEmbed
+ Method0() string
+ Method4() string
+ Method1(myerr herrors.ErrorContext)
+ Method3(myint int, mystring string)
+ Method5() (string, error)
+ Method6() *net.IP
+ Method7() any
+ Method8() herrors.ErrorContext
+ method2()
+ method9() os.FileInfo
+}
+
+type IEOnly interface {
+ IEmbed
+}
diff --git a/commands/commandeer.go b/commands/commandeer.go
new file mode 100644
index 000000000..5e5e1b3ab
--- /dev/null
+++ b/commands/commandeer.go
@@ -0,0 +1,520 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "net"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sync"
+ "time"
+
+ hconfig "github.com/gohugoio/hugo/config"
+
+ "golang.org/x/sync/semaphore"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/spf13/cast"
+ jww "github.com/spf13/jwalterweatherman"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/spf13/cobra"
+
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/spf13/afero"
+
+ "github.com/bep/clock"
+ "github.com/bep/debounce"
+ "github.com/bep/overlayfs"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+)
+
+type commandeerHugoState struct {
+ *deps.DepsCfg
+ hugoSites *hugolib.HugoSites
+ fsCreate sync.Once
+ created chan struct{}
+}
+
+type commandeer struct {
+ *commandeerHugoState
+
+ logger loggers.Logger
+ serverConfig *config.Server
+
+ buildLock func() (unlock func(), err error)
+
+ // Loading state
+ mustHaveConfigFile bool
+ failOnInitErr bool
+ running bool
+
+ // Currently only set when in "fast render mode". But it seems to
+ // be fast enough that we could maybe just add it for all server modes.
+ changeDetector *fileChangeDetector
+
+ // We need to reuse these on server rebuilds.
+ // These 2 will be different if --renderStaticToDisk is set.
+ publishDirFs afero.Fs
+ publishDirServerFs afero.Fs
+
+ h *hugoBuilderCommon
+ ftch flagsToConfigHandler
+
+ visitedURLs *types.EvictingStringQueue
+
+ cfgInit func(c *commandeer) error
+
+ // We watch these for changes.
+ configFiles []string
+
+ // Used in cases where we get flooded with events in server mode.
+ debounce func(f func())
+
+ serverPorts []serverPortListener
+
+ languages langs.Languages
+ doLiveReload bool
+ renderStaticToDisk bool
+ fastRenderMode bool
+ showErrorInBrowser bool
+ wasError bool
+
+ configured bool
+ paused bool
+
+ fullRebuildSem *semaphore.Weighted
+
+ // Any error from the last build.
+ buildErr error
+}
+
+type serverPortListener struct {
+ p int
+ ln net.Listener
+}
+
+func newCommandeerHugoState() *commandeerHugoState {
+ return &commandeerHugoState{
+ created: make(chan struct{}),
+ }
+}
+
+func (c *commandeerHugoState) hugo() *hugolib.HugoSites {
+ <-c.created
+ return c.hugoSites
+}
+
+func (c *commandeerHugoState) hugoTry() *hugolib.HugoSites {
+ select {
+ case <-c.created:
+ return c.hugoSites
+ case <-time.After(time.Millisecond * 100):
+ return nil
+ }
+}
+
+func (c *commandeer) errCount() int {
+ return int(c.logger.LogCounters().ErrorCounter.Count())
+}
+
+func (c *commandeer) getErrorWithContext() any {
+ errCount := c.errCount()
+
+ if errCount == 0 {
+ return nil
+ }
+
+ m := make(map[string]any)
+
+ //xwm["Error"] = errors.New(cleanErrorLog(removeErrorPrefixFromLog(c.logger.Errors())))
+ m["Error"] = errors.New(cleanErrorLog(removeErrorPrefixFromLog(c.logger.Errors())))
+ m["Version"] = hugo.BuildVersionString()
+ ferrors := herrors.UnwrapFileErrorsWithErrorContext(c.buildErr)
+ m["Files"] = ferrors
+
+ return m
+}
+
+func (c *commandeer) Set(key string, value any) {
+ if c.configured {
+ panic("commandeer cannot be changed")
+ }
+ c.Cfg.Set(key, value)
+}
+
+func (c *commandeer) initFs(fs *hugofs.Fs) error {
+ c.publishDirFs = fs.PublishDir
+ c.publishDirServerFs = fs.PublishDirServer
+ c.DepsCfg.Fs = fs
+
+ return nil
+}
+
+func (c *commandeer) initClock(loc *time.Location) error {
+ bt := c.Cfg.GetString("clock")
+ if bt == "" {
+ return nil
+ }
+
+ t, err := cast.StringToDateInDefaultLocation(bt, loc)
+ if err != nil {
+ return fmt.Errorf(`failed to parse "clock" flag: %s`, err)
+ }
+
+ htime.Clock = clock.Start(t)
+ return nil
+}
+
+func newCommandeer(mustHaveConfigFile, failOnInitErr, running bool, h *hugoBuilderCommon, f flagsToConfigHandler, cfgInit func(c *commandeer) error, subCmdVs ...*cobra.Command) (*commandeer, error) {
+ var rebuildDebouncer func(f func())
+ if running {
+ // The time value used is tested with mass content replacements in a fairly big Hugo site.
+ // It is better to wait for some seconds in those cases rather than get flooded
+ // with rebuilds.
+ rebuildDebouncer = debounce.New(4 * time.Second)
+ }
+
+ out := ioutil.Discard
+ if !h.quiet {
+ out = os.Stdout
+ }
+
+ c := &commandeer{
+ h: h,
+ ftch: f,
+ commandeerHugoState: newCommandeerHugoState(),
+ cfgInit: cfgInit,
+ visitedURLs: types.NewEvictingStringQueue(10),
+ debounce: rebuildDebouncer,
+ fullRebuildSem: semaphore.NewWeighted(1),
+
+ // Init state
+ mustHaveConfigFile: mustHaveConfigFile,
+ failOnInitErr: failOnInitErr,
+ running: running,
+
+ // This will be replaced later, but we need something to log to before the configuration is read.
+ logger: loggers.NewLogger(jww.LevelWarn, jww.LevelError, out, ioutil.Discard, running),
+ }
+
+ return c, c.loadConfig()
+}
+
+type fileChangeDetector struct {
+ sync.Mutex
+ current map[string]string
+ prev map[string]string
+
+ irrelevantRe *regexp.Regexp
+}
+
+func (f *fileChangeDetector) OnFileClose(name, md5sum string) {
+ f.Lock()
+ defer f.Unlock()
+ f.current[name] = md5sum
+}
+
+func (f *fileChangeDetector) changed() []string {
+ if f == nil {
+ return nil
+ }
+ f.Lock()
+ defer f.Unlock()
+ var c []string
+ for k, v := range f.current {
+ vv, found := f.prev[k]
+ if !found || v != vv {
+ c = append(c, k)
+ }
+ }
+
+ return f.filterIrrelevant(c)
+}
+
+func (f *fileChangeDetector) filterIrrelevant(in []string) []string {
+ var filtered []string
+ for _, v := range in {
+ if !f.irrelevantRe.MatchString(v) {
+ filtered = append(filtered, v)
+ }
+ }
+ return filtered
+}
+
+func (f *fileChangeDetector) PrepareNew() {
+ if f == nil {
+ return
+ }
+
+ f.Lock()
+ defer f.Unlock()
+
+ if f.current == nil {
+ f.current = make(map[string]string)
+ f.prev = make(map[string]string)
+ return
+ }
+
+ f.prev = make(map[string]string)
+ for k, v := range f.current {
+ f.prev[k] = v
+ }
+ f.current = make(map[string]string)
+}
+
+func (c *commandeer) loadConfig() error {
+ if c.DepsCfg == nil {
+ c.DepsCfg = &deps.DepsCfg{}
+ }
+
+ if c.logger != nil {
+ // Truncate the error log if this is a reload.
+ c.logger.Reset()
+ }
+
+ cfg := c.DepsCfg
+ c.configured = false
+ cfg.Running = c.running
+
+ var dir string
+ if c.h.source != "" {
+ dir, _ = filepath.Abs(c.h.source)
+ } else {
+ dir, _ = os.Getwd()
+ }
+
+ var sourceFs afero.Fs = hugofs.Os
+ if c.DepsCfg.Fs != nil {
+ sourceFs = c.DepsCfg.Fs.Source
+ }
+
+ environment := c.h.getEnvironment(c.running)
+
+ doWithConfig := func(cfg config.Provider) error {
+ if c.ftch != nil {
+ c.ftch.flagsToConfig(cfg)
+ }
+
+ cfg.Set("workingDir", dir)
+ cfg.Set("environment", environment)
+ return nil
+ }
+
+ cfgSetAndInit := func(cfg config.Provider) error {
+ c.Cfg = cfg
+ if c.cfgInit == nil {
+ return nil
+ }
+ err := c.cfgInit(c)
+ return err
+ }
+
+ configPath := c.h.source
+ if configPath == "" {
+ configPath = dir
+ }
+ config, configFiles, err := hugolib.LoadConfig(
+ hugolib.ConfigSourceDescriptor{
+ Fs: sourceFs,
+ Logger: c.logger,
+ Path: configPath,
+ WorkingDir: dir,
+ Filename: c.h.cfgFile,
+ AbsConfigDir: c.h.getConfigDir(dir),
+ Environment: environment,
+ },
+ cfgSetAndInit,
+ doWithConfig)
+
+ if err != nil {
+ // We should improve the error handling here,
+ // but with hugo mod init and similar there is a chicken and egg situation
+ // with modules already configured in config.toml, so ignore those errors.
+ if c.mustHaveConfigFile || (c.failOnInitErr && !moduleNotFoundRe.MatchString(err.Error())) {
+ return err
+ } else {
+ // Just make it a warning.
+ c.logger.Warnln(err)
+ }
+ } else if c.mustHaveConfigFile && len(configFiles) == 0 {
+ return hugolib.ErrNoConfigFile
+ }
+
+ c.configFiles = configFiles
+
+ var ok bool
+ loc := time.Local
+ c.languages, ok = c.Cfg.Get("languagesSorted").(langs.Languages)
+ if ok {
+ loc = langs.GetLocation(c.languages[0])
+ }
+
+ err = c.initClock(loc)
+ if err != nil {
+ return err
+ }
+
+ // Set some commonly used flags
+ c.doLiveReload = c.running && !c.Cfg.GetBool("disableLiveReload")
+ c.fastRenderMode = c.doLiveReload && !c.Cfg.GetBool("disableFastRender")
+ c.showErrorInBrowser = c.doLiveReload && !c.Cfg.GetBool("disableBrowserError")
+
+ // This is potentially double work, but we need to do this one more time now
+ // that all the languages have been configured.
+ if c.cfgInit != nil {
+ if err := c.cfgInit(c); err != nil {
+ return err
+ }
+ }
+
+ logger, err := c.createLogger(config)
+ if err != nil {
+ return err
+ }
+
+ cfg.Logger = logger
+ c.logger = logger
+ c.serverConfig, err = hconfig.DecodeServer(cfg.Cfg)
+ if err != nil {
+ return err
+ }
+
+ createMemFs := config.GetBool("renderToMemory")
+ c.renderStaticToDisk = config.GetBool("renderStaticToDisk")
+
+ if createMemFs {
+ // Rendering to memoryFS, publish to Root regardless of publishDir.
+ config.Set("publishDir", "/")
+ config.Set("publishDirStatic", "/")
+ } else if c.renderStaticToDisk {
+ // Hybrid, render dynamic content to Root.
+ config.Set("publishDirStatic", config.Get("publishDir"))
+ config.Set("publishDir", "/")
+
+ }
+
+ c.fsCreate.Do(func() {
+ // Assume both source and destination are using same filesystem.
+ fs := hugofs.NewFromSourceAndDestination(sourceFs, sourceFs, config)
+
+ if c.publishDirFs != nil {
+ // Need to reuse the destination on server rebuilds.
+ fs.PublishDir = c.publishDirFs
+ fs.PublishDirServer = c.publishDirServerFs
+ } else {
+ if c.renderStaticToDisk {
+ publishDirStatic := config.GetString("publishDirStatic")
+ workingDir := config.GetString("workingDir")
+ absPublishDirStatic := paths.AbsPathify(workingDir, publishDirStatic)
+
+ fs = hugofs.NewFromSourceAndDestination(sourceFs, afero.NewMemMapFs(), config)
+ // Writes the dynamic output to memory,
+ // while serve others directly from /public on disk.
+ dynamicFs := fs.PublishDir
+ staticFs := afero.NewBasePathFs(afero.NewOsFs(), absPublishDirStatic)
+
+ // Serve from both the static and dynamic fs,
+ // the first will take priority.
+ // THis is a read-only filesystem,
+ // we do all the writes to
+ // fs.Destination and fs.DestinationStatic.
+ fs.PublishDirServer = overlayfs.New(
+ overlayfs.Options{
+ Fss: []afero.Fs{
+ dynamicFs,
+ staticFs,
+ },
+ },
+ )
+ fs.PublishDirStatic = staticFs
+ } else if createMemFs {
+ // Hugo writes the output to memory instead of the disk.
+ fs = hugofs.NewFromSourceAndDestination(sourceFs, afero.NewMemMapFs(), config)
+ }
+ }
+
+ if c.fastRenderMode {
+ // For now, fast render mode only. It should, however, be fast enough
+ // for the full variant, too.
+ changeDetector := &fileChangeDetector{
+ // We use this detector to decide to do a Hot reload of a single path or not.
+ // We need to filter out source maps and possibly some other to be able
+ // to make that decision.
+ irrelevantRe: regexp.MustCompile(`\.map$`),
+ }
+
+ changeDetector.PrepareNew()
+ fs.PublishDir = hugofs.NewHashingFs(fs.PublishDir, changeDetector)
+ fs.PublishDirStatic = hugofs.NewHashingFs(fs.PublishDirStatic, changeDetector)
+ c.changeDetector = changeDetector
+ }
+
+ if c.Cfg.GetBool("logPathWarnings") {
+ // Note that we only care about the "dynamic creates" here,
+ // so skip the static fs.
+ fs.PublishDir = hugofs.NewCreateCountingFs(fs.PublishDir)
+ }
+
+ // To debug hard-to-find path issues.
+ // fs.Destination = hugofs.NewStacktracerFs(fs.Destination, `fr/fr`)
+
+ err = c.initFs(fs)
+ if err != nil {
+ close(c.created)
+ return
+ }
+
+ var h *hugolib.HugoSites
+
+ var createErr error
+ h, createErr = hugolib.NewHugoSites(*c.DepsCfg)
+ if h == nil || c.failOnInitErr {
+ err = createErr
+ }
+
+ c.hugoSites = h
+ // TODO(bep) improve.
+ if c.buildLock == nil && h != nil {
+ c.buildLock = h.LockBuild
+ }
+ close(c.created)
+ })
+
+ if err != nil {
+ return err
+ }
+
+ cacheDir, err := helpers.GetCacheDir(sourceFs, config)
+ if err != nil {
+ return err
+ }
+ config.Set("cacheDir", cacheDir)
+
+ return nil
+}
diff --git a/commands/commands.go b/commands/commands.go
new file mode 100644
index 000000000..b81b867f9
--- /dev/null
+++ b/commands/commands.go
@@ -0,0 +1,343 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "fmt"
+ "os"
+ "time"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/loggers"
+ hpaths "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/cobra"
+)
+
+type commandsBuilder struct {
+ hugoBuilderCommon
+
+ commands []cmder
+}
+
+func newCommandsBuilder() *commandsBuilder {
+ return &commandsBuilder{}
+}
+
+func (b *commandsBuilder) addCommands(commands ...cmder) *commandsBuilder {
+ b.commands = append(b.commands, commands...)
+ return b
+}
+
+func (b *commandsBuilder) addAll() *commandsBuilder {
+ b.addCommands(
+ b.newServerCmd(),
+ newVersionCmd(),
+ newEnvCmd(),
+ b.newConfigCmd(),
+ b.newDeployCmd(),
+ b.newConvertCmd(),
+ b.newNewCmd(),
+ b.newListCmd(),
+ newImportCmd(),
+ newGenCmd(),
+ createReleaser(),
+ b.newModCmd(),
+ )
+
+ return b
+}
+
+func (b *commandsBuilder) build() *hugoCmd {
+ h := b.newHugoCmd()
+ addCommands(h.getCommand(), b.commands...)
+ return h
+}
+
+func addCommands(root *cobra.Command, commands ...cmder) {
+ for _, command := range commands {
+ cmd := command.getCommand()
+ if cmd == nil {
+ continue
+ }
+ root.AddCommand(cmd)
+ }
+}
+
+type baseCmd struct {
+ cmd *cobra.Command
+}
+
+var _ commandsBuilderGetter = (*baseBuilderCmd)(nil)
+
+// Used in tests.
+type commandsBuilderGetter interface {
+ getCommandsBuilder() *commandsBuilder
+}
+
+type baseBuilderCmd struct {
+ *baseCmd
+ *commandsBuilder
+}
+
+func (b *baseBuilderCmd) getCommandsBuilder() *commandsBuilder {
+ return b.commandsBuilder
+}
+
+func (c *baseCmd) getCommand() *cobra.Command {
+ return c.cmd
+}
+
+func newBaseCmd(cmd *cobra.Command) *baseCmd {
+ return &baseCmd{cmd: cmd}
+}
+
+func (b *commandsBuilder) newBuilderCmd(cmd *cobra.Command) *baseBuilderCmd {
+ bcmd := &baseBuilderCmd{commandsBuilder: b, baseCmd: &baseCmd{cmd: cmd}}
+ bcmd.hugoBuilderCommon.handleFlags(cmd)
+ return bcmd
+}
+
+func (b *commandsBuilder) newBuilderBasicCmd(cmd *cobra.Command) *baseBuilderCmd {
+ bcmd := &baseBuilderCmd{commandsBuilder: b, baseCmd: &baseCmd{cmd: cmd}}
+ bcmd.hugoBuilderCommon.handleCommonBuilderFlags(cmd)
+ return bcmd
+}
+
+func (c *baseCmd) flagsToConfig(cfg config.Provider) {
+ initializeFlags(c.cmd, cfg)
+}
+
+type hugoCmd struct {
+ *baseBuilderCmd
+
+ // Need to get the sites once built.
+ c *commandeer
+}
+
+var _ cmder = (*nilCommand)(nil)
+
+type nilCommand struct{}
+
+func (c *nilCommand) getCommand() *cobra.Command {
+ return nil
+}
+
+func (c *nilCommand) flagsToConfig(cfg config.Provider) {
+}
+
+func (b *commandsBuilder) newHugoCmd() *hugoCmd {
+ cc := &hugoCmd{}
+
+ cc.baseBuilderCmd = b.newBuilderCmd(&cobra.Command{
+ Use: "hugo",
+ Short: "hugo builds your site",
+ Long: `hugo is the main command, used to build your Hugo site.
+
+Hugo is a Fast and Flexible Static Site Generator
+built with love by spf13 and friends in Go.
+
+Complete documentation is available at https://gohugo.io/.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ defer cc.timeTrack(time.Now(), "Total")
+ cfgInit := func(c *commandeer) error {
+ if cc.buildWatch {
+ c.Set("disableLiveReload", true)
+ }
+ return nil
+ }
+
+ // prevent cobra printing error so it can be handled here (before the timeTrack prints)
+ cmd.SilenceErrors = true
+
+ c, err := initializeConfig(true, true, cc.buildWatch, &cc.hugoBuilderCommon, cc, cfgInit)
+ if err != nil {
+ cmd.PrintErrln("Error:", err.Error())
+ return err
+ }
+ cc.c = c
+
+ err = c.build()
+ if err != nil {
+ cmd.PrintErrln("Error:", err.Error())
+ }
+ return err
+ },
+ })
+
+ cc.cmd.PersistentFlags().StringVar(&cc.cfgFile, "config", "", "config file (default is path/config.yaml|json|toml)")
+ cc.cmd.PersistentFlags().StringVar(&cc.cfgDir, "configDir", "config", "config dir")
+ cc.cmd.PersistentFlags().BoolVar(&cc.quiet, "quiet", false, "build in quiet mode")
+
+ // Set bash-completion
+ _ = cc.cmd.PersistentFlags().SetAnnotation("config", cobra.BashCompFilenameExt, config.ValidConfigFileExtensions)
+
+ cc.cmd.PersistentFlags().BoolVarP(&cc.verbose, "verbose", "v", false, "verbose output")
+ cc.cmd.PersistentFlags().BoolVarP(&cc.debug, "debug", "", false, "debug output")
+ cc.cmd.PersistentFlags().BoolVar(&cc.logging, "log", false, "enable Logging")
+ cc.cmd.PersistentFlags().StringVar(&cc.logFile, "logFile", "", "log File path (if set, logging enabled automatically)")
+ cc.cmd.PersistentFlags().BoolVar(&cc.verboseLog, "verboseLog", false, "verbose logging")
+
+ cc.cmd.Flags().BoolVarP(&cc.buildWatch, "watch", "w", false, "watch filesystem for changes and recreate as needed")
+
+ cc.cmd.Flags().Bool("renderToMemory", false, "render to memory (only useful for benchmark testing)")
+
+ // Set bash-completion
+ _ = cc.cmd.PersistentFlags().SetAnnotation("logFile", cobra.BashCompFilenameExt, []string{})
+
+ cc.cmd.SetGlobalNormalizationFunc(helpers.NormalizeHugoFlags)
+ cc.cmd.SilenceUsage = true
+
+ return cc
+}
+
+type hugoBuilderCommon struct {
+ source string
+ baseURL string
+ environment string
+
+ buildWatch bool
+ poll string
+ clock string
+
+ gc bool
+
+ // Profile flags (for debugging of performance problems)
+ cpuprofile string
+ memprofile string
+ mutexprofile string
+ traceprofile string
+ printm bool
+
+ // TODO(bep) var vs string
+ logging bool
+ verbose bool
+ verboseLog bool
+ debug bool
+ quiet bool
+
+ cfgFile string
+ cfgDir string
+ logFile string
+}
+
+func (cc *hugoBuilderCommon) timeTrack(start time.Time, name string) {
+ if cc.quiet {
+ return
+ }
+ elapsed := time.Since(start)
+ fmt.Printf("%s in %v ms\n", name, int(1000*elapsed.Seconds()))
+}
+
+func (cc *hugoBuilderCommon) getConfigDir(baseDir string) string {
+ if cc.cfgDir != "" {
+ return hpaths.AbsPathify(baseDir, cc.cfgDir)
+ }
+
+ if v, found := os.LookupEnv("HUGO_CONFIGDIR"); found {
+ return hpaths.AbsPathify(baseDir, v)
+ }
+
+ return hpaths.AbsPathify(baseDir, "config")
+}
+
+func (cc *hugoBuilderCommon) getEnvironment(isServer bool) string {
+ if cc.environment != "" {
+ return cc.environment
+ }
+
+ if v, found := os.LookupEnv("HUGO_ENVIRONMENT"); found {
+ return v
+ }
+
+ // Used by Netlify and Forestry
+ if v, found := os.LookupEnv("HUGO_ENV"); found {
+ return v
+ }
+
+ if isServer {
+ return hugo.EnvironmentDevelopment
+ }
+
+ return hugo.EnvironmentProduction
+}
+
+func (cc *hugoBuilderCommon) handleCommonBuilderFlags(cmd *cobra.Command) {
+ cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
+ cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
+ cmd.PersistentFlags().StringVarP(&cc.environment, "environment", "e", "", "build environment")
+ cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory")
+ cmd.PersistentFlags().StringP("ignoreVendorPaths", "", "", "ignores any _vendor for module paths matching the given Glob pattern")
+ cmd.PersistentFlags().StringVar(&cc.clock, "clock", "", "set the clock used by Hugo, e.g. --clock 2021-11-06T22:30:00.00+09:00")
+}
+
+func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
+ cc.handleCommonBuilderFlags(cmd)
+ cmd.Flags().Bool("cleanDestinationDir", false, "remove files from destination not found in static directories")
+ cmd.Flags().BoolP("buildDrafts", "D", false, "include content marked as draft")
+ cmd.Flags().BoolP("buildFuture", "F", false, "include content with publishdate in the future")
+ cmd.Flags().BoolP("buildExpired", "E", false, "include expired content")
+ cmd.Flags().StringP("contentDir", "c", "", "filesystem path to content directory")
+ cmd.Flags().StringP("layoutDir", "l", "", "filesystem path to layout directory")
+ cmd.Flags().StringP("cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/")
+ cmd.Flags().BoolP("ignoreCache", "", false, "ignores the cache directory")
+ cmd.Flags().StringP("destination", "d", "", "filesystem path to write files to")
+ cmd.Flags().StringSliceP("theme", "t", []string{}, "themes to use (located in /themes/THEMENAME/)")
+ cmd.Flags().StringVarP(&cc.baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. https://spf13.com/")
+ cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date, author, and CODEOWNERS info to the pages")
+ cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
+ cmd.Flags().StringVar(&cc.poll, "poll", "", "set this to a poll interval, e.g --poll 700ms, to use a poll based approach to watch for file system changes")
+ cmd.Flags().BoolVar(&loggers.PanicOnWarning, "panicOnWarning", false, "panic on first WARNING log")
+ cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
+ cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
+ cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
+ cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
+ cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
+ cmd.Flags().BoolP("noBuildLock", "", false, "don't create .hugo_build.lock file")
+ cmd.Flags().BoolP("printI18nWarnings", "", false, "print missing translations")
+ cmd.Flags().BoolP("printPathWarnings", "", false, "print warnings on duplicate target paths etc.")
+ cmd.Flags().BoolP("printUnusedTemplates", "", false, "print warnings on unused templates.")
+ cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
+ cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
+ cmd.Flags().BoolVarP(&cc.printm, "printMemoryUsage", "", false, "print memory usage to screen at intervals")
+ cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
+ cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
+
+ // Hide these for now.
+ cmd.Flags().MarkHidden("profile-cpu")
+ cmd.Flags().MarkHidden("profile-mem")
+ cmd.Flags().MarkHidden("profile-mutex")
+
+ cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")
+
+ cmd.Flags().Bool("minify", false, "minify any supported output format (HTML, XML etc.)")
+
+ // Set bash-completion.
+ // Each flag must first be defined before using the SetAnnotation() call.
+ _ = cmd.Flags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
+ _ = cmd.Flags().SetAnnotation("cacheDir", cobra.BashCompSubdirsInDir, []string{})
+ _ = cmd.Flags().SetAnnotation("destination", cobra.BashCompSubdirsInDir, []string{})
+ _ = cmd.Flags().SetAnnotation("theme", cobra.BashCompSubdirsInDir, []string{"themes"})
+}
+
+func checkErr(logger loggers.Logger, err error, s ...string) {
+ if err == nil {
+ return
+ }
+ for _, message := range s {
+ logger.Errorln(message)
+ }
+ logger.Errorln(err)
+}
diff --git a/commands/commands_test.go b/commands/commands_test.go
new file mode 100644
index 000000000..97d81ec6e
--- /dev/null
+++ b/commands/commands_test.go
@@ -0,0 +1,396 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/spf13/cobra"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestExecute(t *testing.T) {
+ c := qt.New(t)
+
+ createSite := func(c *qt.C) string {
+ dir := createSimpleTestSite(t, testSiteConfig{})
+ return dir
+ }
+
+ c.Run("hugo", func(c *qt.C) {
+ dir := createSite(c)
+ resp := Execute([]string{"-s=" + dir})
+ c.Assert(resp.Err, qt.IsNil)
+ result := resp.Result
+ c.Assert(len(result.Sites) == 1, qt.Equals, true)
+ c.Assert(len(result.Sites[0].RegularPages()) == 1, qt.Equals, true)
+ c.Assert(result.Sites[0].Info.Params()["myparam"], qt.Equals, "paramproduction")
+ })
+
+ c.Run("hugo, set environment", func(c *qt.C) {
+ dir := createSite(c)
+ resp := Execute([]string{"-s=" + dir, "-e=staging"})
+ c.Assert(resp.Err, qt.IsNil)
+ result := resp.Result
+ c.Assert(result.Sites[0].Info.Params()["myparam"], qt.Equals, "paramstaging")
+ })
+
+ c.Run("convert toJSON", func(c *qt.C) {
+ dir := createSite(c)
+ output := filepath.Join(dir, "myjson")
+ resp := Execute([]string{"convert", "toJSON", "-s=" + dir, "-e=staging", "-o=" + output})
+ c.Assert(resp.Err, qt.IsNil)
+ converted := readFileFrom(c, filepath.Join(output, "content", "p1.md"))
+ c.Assert(converted, qt.Equals, "{\n \"title\": \"P1\",\n \"weight\": 1\n}\n\nContent\n\n", qt.Commentf(converted))
+ })
+
+ c.Run("config, set environment", func(c *qt.C) {
+ dir := createSite(c)
+ out, err := captureStdout(func() error {
+ resp := Execute([]string{"config", "-s=" + dir, "-e=staging"})
+ return resp.Err
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(out, qt.Contains, "params = map[myparam:paramstaging]", qt.Commentf(out))
+ })
+
+ c.Run("deploy, environment set", func(c *qt.C) {
+ dir := createSite(c)
+ resp := Execute([]string{"deploy", "-s=" + dir, "-e=staging", "--target=mydeployment", "--dryRun"})
+ c.Assert(resp.Err, qt.Not(qt.IsNil))
+ c.Assert(resp.Err.Error(), qt.Contains, `no driver registered for "hugocloud"`)
+ })
+
+ c.Run("list", func(c *qt.C) {
+ dir := createSite(c)
+ out, err := captureStdout(func() error {
+ resp := Execute([]string{"list", "all", "-s=" + dir, "-e=staging"})
+ return resp.Err
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(out, qt.Contains, "p1.md")
+ })
+
+ c.Run("new theme", func(c *qt.C) {
+ dir := createSite(c)
+ themesDir := filepath.Join(dir, "mythemes")
+ resp := Execute([]string{"new", "theme", "mytheme", "-s=" + dir, "-e=staging", "--themesDir=" + themesDir})
+ c.Assert(resp.Err, qt.IsNil)
+ themeTOML := readFileFrom(c, filepath.Join(themesDir, "mytheme", "theme.toml"))
+ c.Assert(themeTOML, qt.Contains, "name = \"Mytheme\"")
+ })
+
+ c.Run("new site", func(c *qt.C) {
+ dir := createSite(c)
+ siteDir := filepath.Join(dir, "mysite")
+ resp := Execute([]string{"new", "site", siteDir, "-e=staging"})
+ c.Assert(resp.Err, qt.IsNil)
+ config := readFileFrom(c, filepath.Join(siteDir, "config.toml"))
+ c.Assert(config, qt.Contains, "baseURL = 'http://example.org/'")
+ checkNewSiteInited(c, siteDir)
+ })
+}
+
+func checkNewSiteInited(c *qt.C, basepath string) {
+ paths := []string{
+ filepath.Join(basepath, "layouts"),
+ filepath.Join(basepath, "content"),
+ filepath.Join(basepath, "archetypes"),
+ filepath.Join(basepath, "static"),
+ filepath.Join(basepath, "data"),
+ filepath.Join(basepath, "config.toml"),
+ }
+
+ for _, path := range paths {
+ _, err := os.Stat(path)
+ c.Assert(err, qt.IsNil)
+ }
+}
+
+func readFileFrom(c *qt.C, filename string) string {
+ c.Helper()
+ filename = filepath.Clean(filename)
+ b, err := afero.ReadFile(hugofs.Os, filename)
+ c.Assert(err, qt.IsNil)
+ return string(b)
+}
+
+func TestFlags(t *testing.T) {
+ c := qt.New(t)
+
+ noOpRunE := func(cmd *cobra.Command, args []string) error {
+ return nil
+ }
+
+ tests := []struct {
+ name string
+ args []string
+ check func(c *qt.C, cmd *serverCmd)
+ }{
+ {
+ // https://github.com/gohugoio/hugo/issues/7642
+ name: "ignoreVendorPaths",
+ args: []string{"server", "--ignoreVendorPaths=github.com/**"},
+ check: func(c *qt.C, cmd *serverCmd) {
+ cfg := config.NewWithTestDefaults()
+ cmd.flagsToConfig(cfg)
+ c.Assert(cfg.Get("ignoreVendorPaths"), qt.Equals, "github.com/**")
+ },
+ },
+ {
+ name: "Persistent flags",
+ args: []string{
+ "server",
+ "--config=myconfig.toml",
+ "--configDir=myconfigdir",
+ "--contentDir=mycontent",
+ "--disableKinds=page,home",
+ "--environment=testing",
+ "--configDir=myconfigdir",
+ "--layoutDir=mylayouts",
+ "--theme=mytheme",
+ "--gc",
+ "--themesDir=mythemes",
+ "--cleanDestinationDir",
+ "--navigateToChanged",
+ "--disableLiveReload",
+ "--noHTTPCache",
+ "--printI18nWarnings",
+ "--destination=/tmp/mydestination",
+ "-b=https://example.com/b/",
+ "--port=1366",
+ "--renderToDisk",
+ "--source=mysource",
+ "--printPathWarnings",
+ "--printUnusedTemplates",
+ },
+ check: func(c *qt.C, sc *serverCmd) {
+ c.Assert(sc, qt.Not(qt.IsNil))
+ c.Assert(sc.navigateToChanged, qt.Equals, true)
+ c.Assert(sc.disableLiveReload, qt.Equals, true)
+ c.Assert(sc.noHTTPCache, qt.Equals, true)
+ c.Assert(sc.renderToDisk, qt.Equals, true)
+ c.Assert(sc.serverPort, qt.Equals, 1366)
+ c.Assert(sc.environment, qt.Equals, "testing")
+
+ cfg := config.NewWithTestDefaults()
+ sc.flagsToConfig(cfg)
+ c.Assert(cfg.GetString("publishDir"), qt.Equals, "/tmp/mydestination")
+ c.Assert(cfg.GetString("contentDir"), qt.Equals, "mycontent")
+ c.Assert(cfg.GetString("layoutDir"), qt.Equals, "mylayouts")
+ c.Assert(cfg.GetStringSlice("theme"), qt.DeepEquals, []string{"mytheme"})
+ c.Assert(cfg.GetString("themesDir"), qt.Equals, "mythemes")
+ c.Assert(cfg.GetString("baseURL"), qt.Equals, "https://example.com/b/")
+
+ c.Assert(cfg.Get("disableKinds"), qt.DeepEquals, []string{"page", "home"})
+
+ c.Assert(cfg.GetBool("gc"), qt.Equals, true)
+
+ // The flag is named printPathWarnings
+ c.Assert(cfg.GetBool("logPathWarnings"), qt.Equals, true)
+
+ // The flag is named printI18nWarnings
+ c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true)
+ },
+ },
+ }
+
+ for _, test := range tests {
+ c.Run(test.name, func(c *qt.C) {
+ b := newCommandsBuilder()
+ root := b.addAll().build()
+
+ for _, cmd := range b.commands {
+ if cmd.getCommand() == nil {
+ continue
+ }
+ // We are only intereseted in the flag handling here.
+ cmd.getCommand().RunE = noOpRunE
+ }
+ rootCmd := root.getCommand()
+ rootCmd.SetArgs(test.args)
+ c.Assert(rootCmd.Execute(), qt.IsNil)
+ test.check(c, b.commands[0].(*serverCmd))
+ })
+ }
+}
+
+func TestCommandsExecute(t *testing.T) {
+ c := qt.New(t)
+
+ dir := createSimpleTestSite(t, testSiteConfig{})
+ dirOut := t.TempDir()
+
+ sourceFlag := fmt.Sprintf("-s=%s", dir)
+
+ tests := []struct {
+ commands []string
+ flags []string
+ expectErrToContain string
+ }{
+ // TODO(bep) permission issue on my OSX? "operation not permitted" {[]string{"check", "ulimit"}, nil, false},
+ {[]string{"env"}, nil, ""},
+ {[]string{"version"}, nil, ""},
+ // no args = hugo build
+ {nil, []string{sourceFlag}, ""},
+ {nil, []string{sourceFlag, "--renderToMemory"}, ""},
+ {[]string{"completion", "bash"}, nil, ""},
+ {[]string{"completion", "fish"}, nil, ""},
+ {[]string{"completion", "powershell"}, nil, ""},
+ {[]string{"completion", "zsh"}, nil, ""},
+ {[]string{"config"}, []string{sourceFlag}, ""},
+ {[]string{"convert", "toTOML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "toml")}, ""},
+ {[]string{"convert", "toYAML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "yaml")}, ""},
+ {[]string{"convert", "toJSON"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "json")}, ""},
+ {[]string{"gen", "chromastyles"}, []string{"--style=manni"}, ""},
+ {[]string{"gen", "doc"}, []string{"--dir=" + filepath.Join(dirOut, "doc")}, ""},
+ {[]string{"gen", "man"}, []string{"--dir=" + filepath.Join(dirOut, "man")}, ""},
+ {[]string{"list", "drafts"}, []string{sourceFlag}, ""},
+ {[]string{"list", "expired"}, []string{sourceFlag}, ""},
+ {[]string{"list", "future"}, []string{sourceFlag}, ""},
+ {[]string{"new", "new-page.md"}, []string{sourceFlag}, ""},
+ {[]string{"new", "site", filepath.Join(dirOut, "new-site")}, nil, ""},
+ {[]string{"unknowncommand"}, nil, "unknown command"},
+ // TODO(bep) cli refactor fix https://github.com/gohugoio/hugo/issues/4450
+ //{[]string{"new", "theme", filepath.Join(dirOut, "new-theme")}, nil,false},
+ }
+
+ for _, test := range tests {
+ name := "hugo"
+ if len(test.commands) > 0 {
+ name = test.commands[0]
+ }
+ c.Run(name, func(c *qt.C) {
+ b := newCommandsBuilder().addAll().build()
+ hugoCmd := b.getCommand()
+ test.flags = append(test.flags, "--quiet")
+ hugoCmd.SetArgs(append(test.commands, test.flags...))
+
+ // TODO(bep) capture output and add some simple asserts
+ // TODO(bep) misspelled subcommands does not return an error. We should investigate this
+ // but before that, check for "Error: unknown command".
+
+ _, err := hugoCmd.ExecuteC()
+ if test.expectErrToContain != "" {
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(err.Error(), qt.Contains, test.expectErrToContain)
+ } else {
+ c.Assert(err, qt.IsNil)
+ }
+
+ // Assert that we have not left any development debug artifacts in
+ // the code.
+ if b.c != nil {
+ _, ok := b.c.publishDirFs.(types.DevMarker)
+ c.Assert(ok, qt.Equals, false)
+ }
+ })
+
+ }
+}
+
+type testSiteConfig struct {
+ configTOML string
+ contentDir string
+}
+
+func createSimpleTestSite(t testing.TB, cfg testSiteConfig) string {
+ dir := t.TempDir()
+
+ cfgStr := `
+
+baseURL = "https://example.org"
+title = "Hugo Commands"
+
+
+`
+
+ contentDir := "content"
+
+ if cfg.configTOML != "" {
+ cfgStr = cfg.configTOML
+ }
+ if cfg.contentDir != "" {
+ contentDir = cfg.contentDir
+ }
+
+ os.MkdirAll(filepath.Join(dir, "public"), 0777)
+
+ // Just the basic. These are for CLI tests, not site testing.
+ writeFile(t, filepath.Join(dir, "config.toml"), cfgStr)
+ writeFile(t, filepath.Join(dir, "config", "staging", "params.toml"), `myparam="paramstaging"`)
+ writeFile(t, filepath.Join(dir, "config", "staging", "deployment.toml"), `
+[[targets]]
+name = "mydeployment"
+URL = "hugocloud://hugotestbucket"
+`)
+
+ writeFile(t, filepath.Join(dir, "config", "testing", "params.toml"), `myparam="paramtesting"`)
+ writeFile(t, filepath.Join(dir, "config", "production", "params.toml"), `myparam="paramproduction"`)
+
+ writeFile(t, filepath.Join(dir, "static", "myfile.txt"), `Hello World!`)
+
+ writeFile(t, filepath.Join(dir, contentDir, "p1.md"), `
+---
+title: "P1"
+weight: 1
+---
+
+Content
+
+`)
+
+ writeFile(t, filepath.Join(dir, "layouts", "_default", "single.html"), `
+
+Single: {{ .Title }}
+
+`)
+
+ writeFile(t, filepath.Join(dir, "layouts", "_default", "list.html"), `
+
+List: {{ .Title }}
+Environment: {{ hugo.Environment }}
+
+For issue 9788:
+{{ $foo :="abc" | resources.FromString "foo.css" | minify | resources.PostProcess }}
+PostProcess: {{ $foo.RelPermalink }}
+
+`)
+
+ return dir
+}
+
+func writeFile(t testing.TB, filename, content string) {
+ must(t, os.MkdirAll(filepath.Dir(filename), os.FileMode(0755)))
+ must(t, ioutil.WriteFile(filename, []byte(content), os.FileMode(0755)))
+}
+
+func must(t testing.TB, err error) {
+ if err != nil {
+ t.Fatal(err)
+ }
+}
diff --git a/commands/config.go b/commands/config.go
new file mode 100644
index 000000000..7fda2d40e
--- /dev/null
+++ b/commands/config.go
@@ -0,0 +1,185 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.Print the version number of Hug
+
+package commands
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "reflect"
+ "regexp"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*configCmd)(nil)
+
+type configCmd struct {
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newConfigCmd() *configCmd {
+ cc := &configCmd{}
+ cmd := &cobra.Command{
+ Use: "config",
+ Short: "Print the site configuration",
+ Long: `Print the site configuration, both default and custom settings.`,
+ RunE: cc.printConfig,
+ }
+
+ printMountsCmd := &cobra.Command{
+ Use: "mounts",
+ Short: "Print the configured file mounts",
+ RunE: cc.printMounts,
+ }
+
+ cmd.AddCommand(printMountsCmd)
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
+
+func (c *configCmd) printMounts(cmd *cobra.Command, args []string) error {
+ cfg, err := initializeConfig(true, false, false, &c.hugoBuilderCommon, c, nil)
+ if err != nil {
+ return err
+ }
+
+ allModules := cfg.Cfg.Get("allmodules").(modules.Modules)
+
+ for _, m := range allModules {
+ if err := parser.InterfaceToConfig(&modMounts{m: m, verbose: c.verbose}, metadecoders.JSON, os.Stdout); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (c *configCmd) printConfig(cmd *cobra.Command, args []string) error {
+ cfg, err := initializeConfig(true, false, false, &c.hugoBuilderCommon, c, nil)
+ if err != nil {
+ return err
+ }
+
+ allSettings := cfg.Cfg.Get("").(maps.Params)
+
+ // We need to clean up this, but we store objects in the config that
+ // isn't really interesting to the end user, so filter these.
+ ignoreKeysRe := regexp.MustCompile("client|sorted|filecacheconfigs|allmodules|multilingual")
+
+ separator := ": "
+
+ if len(cfg.configFiles) > 0 && strings.HasSuffix(cfg.configFiles[0], ".toml") {
+ separator = " = "
+ }
+
+ var keys []string
+ for k := range allSettings {
+ if ignoreKeysRe.MatchString(k) {
+ continue
+ }
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ kv := reflect.ValueOf(allSettings[k])
+ if kv.Kind() == reflect.String {
+ fmt.Printf("%s%s\"%+v\"\n", k, separator, allSettings[k])
+ } else {
+ fmt.Printf("%s%s%+v\n", k, separator, allSettings[k])
+ }
+ }
+
+ return nil
+}
+
+type modMounts struct {
+ verbose bool
+ m modules.Module
+}
+
+type modMount struct {
+ Source string `json:"source"`
+ Target string `json:"target"`
+ Lang string `json:"lang,omitempty"`
+}
+
+func (m *modMounts) MarshalJSON() ([]byte, error) {
+ var mounts []modMount
+
+ for _, mount := range m.m.Mounts() {
+ mounts = append(mounts, modMount{
+ Source: mount.Source,
+ Target: mount.Target,
+ Lang: mount.Lang,
+ })
+ }
+
+ var ownerPath string
+ if m.m.Owner() != nil {
+ ownerPath = m.m.Owner().Path()
+ }
+
+ if m.verbose {
+ config := m.m.Config()
+ return json.Marshal(&struct {
+ Path string `json:"path"`
+ Version string `json:"version"`
+ Time time.Time `json:"time"`
+ Owner string `json:"owner"`
+ Dir string `json:"dir"`
+ Meta map[string]any `json:"meta"`
+ HugoVersion modules.HugoVersion `json:"hugoVersion"`
+
+ Mounts []modMount `json:"mounts"`
+ }{
+ Path: m.m.Path(),
+ Version: m.m.Version(),
+ Time: m.m.Time(),
+ Owner: ownerPath,
+ Dir: m.m.Dir(),
+ Meta: config.Params,
+ HugoVersion: config.HugoVersion,
+ Mounts: mounts,
+ })
+ }
+
+ return json.Marshal(&struct {
+ Path string `json:"path"`
+ Version string `json:"version"`
+ Time time.Time `json:"time"`
+ Owner string `json:"owner"`
+ Dir string `json:"dir"`
+ Mounts []modMount `json:"mounts"`
+ }{
+ Path: m.m.Path(),
+ Version: m.m.Version(),
+ Time: m.m.Time(),
+ Owner: ownerPath,
+ Dir: m.m.Dir(),
+ Mounts: mounts,
+ })
+
+}
diff --git a/commands/convert.go b/commands/convert.go
new file mode 100644
index 000000000..1ec965a0b
--- /dev/null
+++ b/commands/convert.go
@@ -0,0 +1,207 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "fmt"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/hugolib"
+
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*convertCmd)(nil)
+
+type convertCmd struct {
+ outputDir string
+ unsafe bool
+
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newConvertCmd() *convertCmd {
+ cc := &convertCmd{}
+
+ cmd := &cobra.Command{
+ Use: "convert",
+ Short: "Convert your content to different formats",
+ Long: `Convert your content (e.g. front matter) to different formats.
+
+See convert's subcommands toJSON, toTOML and toYAML for more information.`,
+ RunE: nil,
+ }
+
+ cmd.AddCommand(
+ &cobra.Command{
+ Use: "toJSON",
+ Short: "Convert front matter to JSON",
+ Long: `toJSON converts all front matter in the content directory
+to use JSON for the front matter.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return cc.convertContents(metadecoders.JSON)
+ },
+ },
+ &cobra.Command{
+ Use: "toTOML",
+ Short: "Convert front matter to TOML",
+ Long: `toTOML converts all front matter in the content directory
+to use TOML for the front matter.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return cc.convertContents(metadecoders.TOML)
+ },
+ },
+ &cobra.Command{
+ Use: "toYAML",
+ Short: "Convert front matter to YAML",
+ Long: `toYAML converts all front matter in the content directory
+to use YAML for the front matter.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return cc.convertContents(metadecoders.YAML)
+ },
+ },
+ )
+
+ cmd.PersistentFlags().StringVarP(&cc.outputDir, "output", "o", "", "filesystem path to write files to")
+ cmd.PersistentFlags().BoolVar(&cc.unsafe, "unsafe", false, "enable less safe operations, please backup first")
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
+
+func (cc *convertCmd) convertContents(format metadecoders.Format) error {
+ if cc.outputDir == "" && !cc.unsafe {
+ return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
+ }
+
+ c, err := initializeConfig(true, false, false, &cc.hugoBuilderCommon, cc, nil)
+ if err != nil {
+ return err
+ }
+
+ c.Cfg.Set("buildDrafts", true)
+
+ h, err := hugolib.NewHugoSites(*c.DepsCfg)
+ if err != nil {
+ return err
+ }
+
+ if err := h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
+ return err
+ }
+
+ site := h.Sites[0]
+
+ site.Log.Println("processing", len(site.AllPages()), "content files")
+ for _, p := range site.AllPages() {
+ if err := cc.convertAndSavePage(p, site, format); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
+ // The resources are not in .Site.AllPages.
+ for _, r := range p.Resources().ByType("page") {
+ if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
+ return err
+ }
+ }
+
+ if p.File().IsZero() {
+ // No content file.
+ return nil
+ }
+
+ errMsg := fmt.Errorf("Error processing file %q", p.File().Path())
+
+ site.Log.Infoln("Attempting to convert", p.File().Filename())
+
+ f := p.File()
+ file, err := f.FileInfo().Meta().Open()
+ if err != nil {
+ site.Log.Errorln(errMsg)
+ file.Close()
+ return nil
+ }
+
+ pf, err := pageparser.ParseFrontMatterAndContent(file)
+ if err != nil {
+ site.Log.Errorln(errMsg)
+ file.Close()
+ return err
+ }
+
+ file.Close()
+
+ // better handling of dates in formats that don't have support for them
+ if pf.FrontMatterFormat == metadecoders.JSON || pf.FrontMatterFormat == metadecoders.YAML || pf.FrontMatterFormat == metadecoders.TOML {
+ for k, v := range pf.FrontMatter {
+ switch vv := v.(type) {
+ case time.Time:
+ pf.FrontMatter[k] = vv.Format(time.RFC3339)
+ }
+ }
+ }
+
+ var newContent bytes.Buffer
+ err = parser.InterfaceToFrontMatter(pf.FrontMatter, targetFormat, &newContent)
+ if err != nil {
+ site.Log.Errorln(errMsg)
+ return err
+ }
+
+ newContent.Write(pf.Content)
+
+ newFilename := p.File().Filename()
+
+ if cc.outputDir != "" {
+ contentDir := strings.TrimSuffix(newFilename, p.File().Path())
+ contentDir = filepath.Base(contentDir)
+
+ newFilename = filepath.Join(cc.outputDir, contentDir, p.File().Path())
+ }
+
+ fs := hugofs.Os
+ if err := helpers.WriteToDisk(newFilename, &newContent, fs); err != nil {
+ return fmt.Errorf("Failed to save file %q:: %w", newFilename, err)
+ }
+
+ return nil
+}
+
+type parsedFile struct {
+ frontMatterFormat metadecoders.Format
+ frontMatterSource []byte
+ frontMatter map[string]any
+
+ // Everything after Front Matter
+ content []byte
+}
diff --git a/commands/deploy.go b/commands/deploy.go
new file mode 100644
index 000000000..071e10a6a
--- /dev/null
+++ b/commands/deploy.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package commands
+
+import (
+ "context"
+
+ "github.com/gohugoio/hugo/deploy"
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*deployCmd)(nil)
+
+// deployCmd supports deploying sites to Cloud providers.
+type deployCmd struct {
+ *baseBuilderCmd
+
+ invalidateCDN bool
+ maxDeletes int
+}
+
+// TODO: In addition to the "deploy" command, consider adding a "--deploy"
+// flag for the default command; this would build the site and then deploy it.
+// It's not obvious how to do this; would all of the deploy-specific flags
+// have to exist at the top level as well?
+
+// TODO: The output files change every time "hugo" is executed, it looks
+// like because of map order randomization. This means that you can
+// run "hugo && hugo deploy" again and again and upload new stuff every time. Is
+// this intended?
+
+func (b *commandsBuilder) newDeployCmd() *deployCmd {
+ cc := &deployCmd{}
+
+ cmd := &cobra.Command{
+ Use: "deploy",
+ Short: "Deploy your site to a Cloud provider.",
+ Long: `Deploy your site to a Cloud provider.
+
+See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
+documentation.
+`,
+
+ RunE: func(cmd *cobra.Command, args []string) error {
+ cfgInit := func(c *commandeer) error {
+ c.Set("invalidateCDN", cc.invalidateCDN)
+ c.Set("maxDeletes", cc.maxDeletes)
+ return nil
+ }
+ comm, err := initializeConfig(true, true, false, &cc.hugoBuilderCommon, cc, cfgInit)
+ if err != nil {
+ return err
+ }
+ deployer, err := deploy.New(comm.Cfg, comm.hugo().PathSpec.PublishFs)
+ if err != nil {
+ return err
+ }
+ return deployer.Deploy(context.Background())
+ },
+ }
+
+ cmd.Flags().String("target", "", "target deployment from deployments section in config file; defaults to the first one")
+ cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
+ cmd.Flags().Bool("dryRun", false, "dry run")
+ cmd.Flags().Bool("force", false, "force upload of all files")
+ cmd.Flags().BoolVar(&cc.invalidateCDN, "invalidateCDN", true, "invalidate the CDN cache listed in the deployment target")
+ cmd.Flags().IntVar(&cc.maxDeletes, "maxDeletes", 256, "maximum # of files to delete, or -1 to disable")
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
diff --git a/commands/env.go b/commands/env.go
new file mode 100644
index 000000000..65808b1be
--- /dev/null
+++ b/commands/env.go
@@ -0,0 +1,60 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "runtime"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*envCmd)(nil)
+
+type envCmd struct {
+ *baseCmd
+}
+
+func newEnvCmd() *envCmd {
+ return &envCmd{
+ baseCmd: newBaseCmd(&cobra.Command{
+ Use: "env",
+ Short: "Print Hugo version and environment info",
+ Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.
+
+If you add the -v flag, you will get a full dependency list.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ printHugoVersion()
+ jww.FEEDBACK.Printf("GOOS=%q\n", runtime.GOOS)
+ jww.FEEDBACK.Printf("GOARCH=%q\n", runtime.GOARCH)
+ jww.FEEDBACK.Printf("GOVERSION=%q\n", runtime.Version())
+
+ isVerbose, _ := cmd.Flags().GetBool("verbose")
+
+ if isVerbose {
+ deps := hugo.GetDependencyList()
+ for _, dep := range deps {
+ jww.FEEDBACK.Printf("%s\n", dep)
+ }
+ }
+
+ return nil
+ },
+ }),
+ }
+
+}
diff --git a/commands/gen.go b/commands/gen.go
new file mode 100644
index 000000000..c44eba36c
--- /dev/null
+++ b/commands/gen.go
@@ -0,0 +1,40 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*genCmd)(nil)
+
+type genCmd struct {
+ *baseCmd
+}
+
+func newGenCmd() *genCmd {
+ cc := &genCmd{}
+ cc.baseCmd = newBaseCmd(&cobra.Command{
+ Use: "gen",
+ Short: "A collection of several useful generators.",
+ })
+
+ cc.cmd.AddCommand(
+ newGenDocCmd().getCommand(),
+ newGenManCmd().getCommand(),
+ createGenDocsHelper().getCommand(),
+ createGenChromaStyles().getCommand())
+
+ return cc
+}
diff --git a/commands/genchromastyles.go b/commands/genchromastyles.go
new file mode 100644
index 000000000..4dfa77d2e
--- /dev/null
+++ b/commands/genchromastyles.go
@@ -0,0 +1,72 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "os"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/styles"
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*genChromaStyles)(nil)
+
+type genChromaStyles struct {
+ style string
+ highlightStyle string
+ linesStyle string
+ *baseCmd
+}
+
+// TODO(bep) highlight
+func createGenChromaStyles() *genChromaStyles {
+ g := &genChromaStyles{
+ baseCmd: newBaseCmd(&cobra.Command{
+ Use: "chromastyles",
+ Short: "Generate CSS stylesheet for the Chroma code highlighter",
+ Long: `Generate CSS stylesheet for the Chroma code highlighter for a given style. This stylesheet is needed if markup.highlight.noClasses is disabled in config.
+
+See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`,
+ }),
+ }
+
+ g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
+ return g.generate()
+ }
+
+ g.cmd.PersistentFlags().StringVar(&g.style, "style", "friendly", "highlighter style (see https://xyproto.github.io/splash/docs/)")
+ g.cmd.PersistentFlags().StringVar(&g.highlightStyle, "highlightStyle", "bg:#ffffcc", "style used for highlighting lines (see https://github.com/alecthomas/chroma)")
+ g.cmd.PersistentFlags().StringVar(&g.linesStyle, "linesStyle", "", "style used for line numbers (see https://github.com/alecthomas/chroma)")
+
+ return g
+}
+
+func (g *genChromaStyles) generate() error {
+ builder := styles.Get(g.style).Builder()
+ if g.highlightStyle != "" {
+ builder.Add(chroma.LineHighlight, g.highlightStyle)
+ }
+ if g.linesStyle != "" {
+ builder.Add(chroma.LineNumbers, g.linesStyle)
+ }
+ style, err := builder.Build()
+ if err != nil {
+ return err
+ }
+ formatter := html.New(html.WithAllClasses(true))
+ formatter.WriteCSS(os.Stdout, style)
+ return nil
+}
diff --git a/commands/gendoc.go b/commands/gendoc.go
new file mode 100644
index 000000000..8ecb0ec0d
--- /dev/null
+++ b/commands/gendoc.go
@@ -0,0 +1,98 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/cobra"
+ "github.com/spf13/cobra/doc"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*genDocCmd)(nil)
+
+type genDocCmd struct {
+ gendocdir string
+ *baseCmd
+}
+
+func newGenDocCmd() *genDocCmd {
+ const gendocFrontmatterTemplate = `---
+title: "%s"
+slug: %s
+url: %s
+---
+`
+
+ cc := &genDocCmd{}
+
+ cc.baseCmd = newBaseCmd(&cobra.Command{
+ Use: "doc",
+ Short: "Generate Markdown documentation for the Hugo CLI.",
+ Long: `Generate Markdown documentation for the Hugo CLI.
+
+This command is, mostly, used to create up-to-date documentation
+of Hugo's command-line interface for https://gohugo.io/.
+
+It creates one Markdown file per command with front matter suitable
+for rendering in Hugo.`,
+
+ RunE: func(cmd *cobra.Command, args []string) error {
+ cmd.VisitParents(func(c *cobra.Command) {
+ // Disable the "Auto generated by spf13/cobra on DATE"
+ // as it creates a lot of diffs.
+ c.DisableAutoGenTag = true
+ })
+
+ if !strings.HasSuffix(cc.gendocdir, helpers.FilePathSeparator) {
+ cc.gendocdir += helpers.FilePathSeparator
+ }
+ if found, _ := helpers.Exists(cc.gendocdir, hugofs.Os); !found {
+ jww.FEEDBACK.Println("Directory", cc.gendocdir, "does not exist, creating...")
+ if err := hugofs.Os.MkdirAll(cc.gendocdir, 0777); err != nil {
+ return err
+ }
+ }
+ prepender := func(filename string) string {
+ name := filepath.Base(filename)
+ base := strings.TrimSuffix(name, path.Ext(name))
+ url := "/commands/" + strings.ToLower(base) + "/"
+ return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url)
+ }
+
+ linkHandler := func(name string) string {
+ base := strings.TrimSuffix(name, path.Ext(name))
+ return "/commands/" + strings.ToLower(base) + "/"
+ }
+ jww.FEEDBACK.Println("Generating Hugo command-line documentation in", cc.gendocdir, "...")
+ doc.GenMarkdownTreeCustom(cmd.Root(), cc.gendocdir, prepender, linkHandler)
+ jww.FEEDBACK.Println("Done.")
+
+ return nil
+ },
+ })
+
+ cc.cmd.PersistentFlags().StringVar(&cc.gendocdir, "dir", "/tmp/hugodoc/", "the directory to write the doc.")
+
+ // For bash-completion
+ cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
+
+ return cc
+}
diff --git a/commands/gendocshelper.go b/commands/gendocshelper.go
new file mode 100644
index 000000000..34d45154f
--- /dev/null
+++ b/commands/gendocshelper.go
@@ -0,0 +1,71 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/docshelper"
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*genDocsHelper)(nil)
+
+type genDocsHelper struct {
+ target string
+ *baseCmd
+}
+
+func createGenDocsHelper() *genDocsHelper {
+ g := &genDocsHelper{
+ baseCmd: newBaseCmd(&cobra.Command{
+ Use: "docshelper",
+ Short: "Generate some data files for the Hugo docs.",
+ Hidden: true,
+ }),
+ }
+
+ g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
+ return g.generate()
+ }
+
+ g.cmd.PersistentFlags().StringVarP(&g.target, "dir", "", "docs/data", "data dir")
+
+ return g
+}
+
+func (g *genDocsHelper) generate() error {
+ fmt.Println("Generate docs data to", g.target)
+
+ targetFile := filepath.Join(g.target, "docs.json")
+
+ f, err := os.Create(targetFile)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ enc := json.NewEncoder(f)
+ enc.SetIndent("", " ")
+
+ if err := enc.Encode(docshelper.GetDocProvider()); err != nil {
+ return err
+ }
+
+ fmt.Println("Done!")
+ return nil
+}
diff --git a/commands/genman.go b/commands/genman.go
new file mode 100644
index 000000000..720046289
--- /dev/null
+++ b/commands/genman.go
@@ -0,0 +1,77 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/cobra"
+ "github.com/spf13/cobra/doc"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*genManCmd)(nil)
+
+type genManCmd struct {
+ genmandir string
+ *baseCmd
+}
+
+func newGenManCmd() *genManCmd {
+ cc := &genManCmd{}
+
+ cc.baseCmd = newBaseCmd(&cobra.Command{
+ Use: "man",
+ Short: "Generate man pages for the Hugo CLI",
+ Long: `This command automatically generates up-to-date man pages of Hugo's
+command-line interface. By default, it creates the man page files
+in the "man" directory under the current directory.`,
+
+ RunE: func(cmd *cobra.Command, args []string) error {
+ header := &doc.GenManHeader{
+ Section: "1",
+ Manual: "Hugo Manual",
+ Source: fmt.Sprintf("Hugo %s", hugo.CurrentVersion),
+ }
+ if !strings.HasSuffix(cc.genmandir, helpers.FilePathSeparator) {
+ cc.genmandir += helpers.FilePathSeparator
+ }
+ if found, _ := helpers.Exists(cc.genmandir, hugofs.Os); !found {
+ jww.FEEDBACK.Println("Directory", cc.genmandir, "does not exist, creating...")
+ if err := hugofs.Os.MkdirAll(cc.genmandir, 0777); err != nil {
+ return err
+ }
+ }
+ cmd.Root().DisableAutoGenTag = true
+
+ jww.FEEDBACK.Println("Generating Hugo man pages in", cc.genmandir, "...")
+ doc.GenManTree(cmd.Root(), header, cc.genmandir)
+
+ jww.FEEDBACK.Println("Done.")
+
+ return nil
+ },
+ })
+
+ cc.cmd.PersistentFlags().StringVar(&cc.genmandir, "dir", "man/", "the directory to write the man pages.")
+
+ // For bash-completion
+ cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
+
+ return cc
+}
diff --git a/commands/helpers.go b/commands/helpers.go
new file mode 100644
index 000000000..71f686953
--- /dev/null
+++ b/commands/helpers.go
@@ -0,0 +1,79 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package commands defines and implements command-line commands and flags
+// used by Hugo. Commands and flags are implemented using Cobra.
+package commands
+
+import (
+ "fmt"
+ "regexp"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/cobra"
+)
+
+const (
+ ansiEsc = "\u001B"
+ clearLine = "\r\033[K"
+ hideCursor = ansiEsc + "[?25l"
+ showCursor = ansiEsc + "[?25h"
+)
+
+type flagsToConfigHandler interface {
+ flagsToConfig(cfg config.Provider)
+}
+
+type cmder interface {
+ flagsToConfigHandler
+ getCommand() *cobra.Command
+}
+
+// commandError is an error used to signal different error situations in command handling.
+type commandError struct {
+ s string
+ userError bool
+}
+
+func (c commandError) Error() string {
+ return c.s
+}
+
+func (c commandError) isUserError() bool {
+ return c.userError
+}
+
+func newUserError(a ...any) commandError {
+ return commandError{s: fmt.Sprintln(a...), userError: true}
+}
+
+func newSystemError(a ...any) commandError {
+ return commandError{s: fmt.Sprintln(a...), userError: false}
+}
+
+func newSystemErrorF(format string, a ...any) commandError {
+ return commandError{s: fmt.Sprintf(format, a...), userError: false}
+}
+
+// Catch some of the obvious user errors from Cobra.
+// We don't want to show the usage message for every error.
+// The below may be to generic. Time will show.
+var userErrorRegexp = regexp.MustCompile("unknown flag")
+
+func isUserError(err error) bool {
+ if cErr, ok := err.(commandError); ok && cErr.isUserError() {
+ return true
+ }
+
+ return userErrorRegexp.MatchString(err.Error())
+}
diff --git a/commands/hugo.go b/commands/hugo.go
new file mode 100644
index 000000000..5169d65a5
--- /dev/null
+++ b/commands/hugo.go
@@ -0,0 +1,1253 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package commands defines and implements command-line commands and flags
+// used by Hugo. Commands and flags are implemented using Cobra.
+package commands
+
+import (
+ "context"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "os/signal"
+ "path/filepath"
+ "runtime"
+ "runtime/pprof"
+ "runtime/trace"
+ "strings"
+ "sync/atomic"
+ "syscall"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/common/terminal"
+
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+
+ "golang.org/x/sync/errgroup"
+
+ "github.com/gohugoio/hugo/config"
+
+ flag "github.com/spf13/pflag"
+
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/livereload"
+ "github.com/gohugoio/hugo/watcher"
+ "github.com/spf13/afero"
+ "github.com/spf13/cobra"
+ "github.com/spf13/fsync"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+// The Response value from Execute.
+type Response struct {
+ // The build Result will only be set in the hugo build command.
+ Result *hugolib.HugoSites
+
+ // Err is set when the command failed to execute.
+ Err error
+
+ // The command that was executed.
+ Cmd *cobra.Command
+}
+
+// IsUserError returns true is the Response error is a user error rather than a
+// system error.
+func (r Response) IsUserError() bool {
+ return r.Err != nil && isUserError(r.Err)
+}
+
+// Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
+// The args are usually filled with os.Args[1:].
+func Execute(args []string) Response {
+ hugoCmd := newCommandsBuilder().addAll().build()
+ cmd := hugoCmd.getCommand()
+ cmd.SetArgs(args)
+
+ c, err := cmd.ExecuteC()
+
+ var resp Response
+
+ if c == cmd && hugoCmd.c != nil {
+ // Root command executed
+ resp.Result = hugoCmd.c.hugo()
+ }
+
+ if err == nil {
+ errCount := int(loggers.GlobalErrorCounter.Count())
+ if errCount > 0 {
+ err = fmt.Errorf("logged %d errors", errCount)
+ } else if resp.Result != nil {
+ errCount = resp.Result.NumLogErrors()
+ if errCount > 0 {
+ err = fmt.Errorf("logged %d errors", errCount)
+ }
+ }
+
+ }
+
+ resp.Err = err
+ resp.Cmd = c
+
+ return resp
+}
+
+// InitializeConfig initializes a config file with sensible default configuration flags.
+func initializeConfig(mustHaveConfigFile, failOnInitErr, running bool,
+ h *hugoBuilderCommon,
+ f flagsToConfigHandler,
+ cfgInit func(c *commandeer) error) (*commandeer, error) {
+ c, err := newCommandeer(mustHaveConfigFile, failOnInitErr, running, h, f, cfgInit)
+ if err != nil {
+ return nil, err
+ }
+
+ if h := c.hugoTry(); h != nil {
+ for _, s := range h.Sites {
+ s.RegisterMediaTypes()
+ }
+ }
+
+ return c, nil
+}
+
+func (c *commandeer) createLogger(cfg config.Provider) (loggers.Logger, error) {
+ var (
+ logHandle = ioutil.Discard
+ logThreshold = jww.LevelWarn
+ logFile = cfg.GetString("logFile")
+ outHandle = ioutil.Discard
+ stdoutThreshold = jww.LevelWarn
+ )
+
+ if !c.h.quiet {
+ outHandle = os.Stdout
+ }
+
+ if c.h.verboseLog || c.h.logging || (c.h.logFile != "") {
+ var err error
+ if logFile != "" {
+ logHandle, err = os.OpenFile(logFile, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
+ if err != nil {
+ return nil, newSystemError("Failed to open log file:", logFile, err)
+ }
+ } else {
+ logHandle, err = ioutil.TempFile("", "hugo")
+ if err != nil {
+ return nil, newSystemError(err)
+ }
+ }
+ } else if !c.h.quiet && cfg.GetBool("verbose") {
+ stdoutThreshold = jww.LevelInfo
+ }
+
+ if cfg.GetBool("debug") {
+ stdoutThreshold = jww.LevelDebug
+ }
+
+ if c.h.verboseLog {
+ logThreshold = jww.LevelInfo
+ if cfg.GetBool("debug") {
+ logThreshold = jww.LevelDebug
+ }
+ }
+
+ loggers.InitGlobalLogger(stdoutThreshold, logThreshold, outHandle, logHandle)
+ helpers.InitLoggers()
+
+ return loggers.NewLogger(stdoutThreshold, logThreshold, outHandle, logHandle, c.running), nil
+}
+
+func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
+ persFlagKeys := []string{
+ "debug",
+ "verbose",
+ "logFile",
+ // Moved from vars
+ }
+ flagKeys := []string{
+ "cleanDestinationDir",
+ "buildDrafts",
+ "buildFuture",
+ "buildExpired",
+ "clock",
+ "uglyURLs",
+ "canonifyURLs",
+ "enableRobotsTXT",
+ "enableGitInfo",
+ "pluralizeListTitles",
+ "preserveTaxonomyNames",
+ "ignoreCache",
+ "forceSyncStatic",
+ "noTimes",
+ "noChmod",
+ "noBuildLock",
+ "ignoreVendorPaths",
+ "templateMetrics",
+ "templateMetricsHints",
+
+ // Moved from vars.
+ "baseURL",
+ "buildWatch",
+ "cacheDir",
+ "cfgFile",
+ "confirm",
+ "contentDir",
+ "debug",
+ "destination",
+ "disableKinds",
+ "dryRun",
+ "force",
+ "gc",
+ "printI18nWarnings",
+ "printUnusedTemplates",
+ "invalidateCDN",
+ "layoutDir",
+ "logFile",
+ "maxDeletes",
+ "quiet",
+ "renderToMemory",
+ "source",
+ "target",
+ "theme",
+ "themesDir",
+ "verbose",
+ "verboseLog",
+ "duplicateTargetPaths",
+ }
+
+ for _, key := range persFlagKeys {
+ setValueFromFlag(cmd.PersistentFlags(), key, cfg, "", false)
+ }
+ for _, key := range flagKeys {
+ setValueFromFlag(cmd.Flags(), key, cfg, "", false)
+ }
+
+ setValueFromFlag(cmd.Flags(), "minify", cfg, "minifyOutput", true)
+
+ // Set some "config aliases"
+ setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
+ setValueFromFlag(cmd.Flags(), "printI18nWarnings", cfg, "logI18nWarnings", false)
+ setValueFromFlag(cmd.Flags(), "printPathWarnings", cfg, "logPathWarnings", false)
+}
+
+func setValueFromFlag(flags *flag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) {
+ key = strings.TrimSpace(key)
+ if (force && flags.Lookup(key) != nil) || flags.Changed(key) {
+ f := flags.Lookup(key)
+ configKey := key
+ if targetKey != "" {
+ configKey = targetKey
+ }
+ // Gotta love this API.
+ switch f.Value.Type() {
+ case "bool":
+ bv, _ := flags.GetBool(key)
+ cfg.Set(configKey, bv)
+ case "string":
+ cfg.Set(configKey, f.Value.String())
+ case "stringSlice":
+ bv, _ := flags.GetStringSlice(key)
+ cfg.Set(configKey, bv)
+ case "int":
+ iv, _ := flags.GetInt(key)
+ cfg.Set(configKey, iv)
+ default:
+ panic(fmt.Sprintf("update switch with %s", f.Value.Type()))
+ }
+
+ }
+}
+
+func (c *commandeer) fullBuild(noBuildLock bool) error {
+ var (
+ g errgroup.Group
+ langCount map[string]uint64
+ )
+
+ if !c.h.quiet {
+ fmt.Println("Start building sites … ")
+ fmt.Println(hugo.BuildVersionString())
+ if terminal.IsTerminal(os.Stdout) {
+ defer func() {
+ fmt.Print(showCursor + clearLine)
+ }()
+ }
+ }
+
+ copyStaticFunc := func() error {
+ cnt, err := c.copyStatic()
+ if err != nil {
+ return fmt.Errorf("Error copying static files: %w", err)
+ }
+ langCount = cnt
+ return nil
+ }
+ buildSitesFunc := func() error {
+ if err := c.buildSites(noBuildLock); err != nil {
+ return fmt.Errorf("Error building site: %w", err)
+ }
+ return nil
+ }
+ // Do not copy static files and build sites in parallel if cleanDestinationDir is enabled.
+ // This flag deletes all static resources in /public folder that are missing in /static,
+ // and it does so at the end of copyStatic() call.
+ if c.Cfg.GetBool("cleanDestinationDir") {
+ if err := copyStaticFunc(); err != nil {
+ return err
+ }
+ if err := buildSitesFunc(); err != nil {
+ return err
+ }
+ } else {
+ g.Go(copyStaticFunc)
+ g.Go(buildSitesFunc)
+ if err := g.Wait(); err != nil {
+ return err
+ }
+ }
+
+ for _, s := range c.hugo().Sites {
+ s.ProcessingStats.Static = langCount[s.Language().Lang]
+ }
+
+ if c.h.gc {
+ count, err := c.hugo().GC()
+ if err != nil {
+ return err
+ }
+ for _, s := range c.hugo().Sites {
+ // We have no way of knowing what site the garbage belonged to.
+ s.ProcessingStats.Cleaned = uint64(count)
+ }
+ }
+
+ return nil
+}
+
+func (c *commandeer) initCPUProfile() (func(), error) {
+ if c.h.cpuprofile == "" {
+ return nil, nil
+ }
+
+ f, err := os.Create(c.h.cpuprofile)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create CPU profile: %w", err)
+ }
+ if err := pprof.StartCPUProfile(f); err != nil {
+ return nil, fmt.Errorf("failed to start CPU profile: %w", err)
+ }
+ return func() {
+ pprof.StopCPUProfile()
+ f.Close()
+ }, nil
+}
+
+func (c *commandeer) initMemProfile() {
+ if c.h.memprofile == "" {
+ return
+ }
+
+ f, err := os.Create(c.h.memprofile)
+ if err != nil {
+ c.logger.Errorf("could not create memory profile: ", err)
+ }
+ defer f.Close()
+ runtime.GC() // get up-to-date statistics
+ if err := pprof.WriteHeapProfile(f); err != nil {
+ c.logger.Errorf("could not write memory profile: ", err)
+ }
+}
+
+func (c *commandeer) initTraceProfile() (func(), error) {
+ if c.h.traceprofile == "" {
+ return nil, nil
+ }
+
+ f, err := os.Create(c.h.traceprofile)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create trace file: %w", err)
+ }
+
+ if err := trace.Start(f); err != nil {
+ return nil, fmt.Errorf("failed to start trace: %w", err)
+ }
+
+ return func() {
+ trace.Stop()
+ f.Close()
+ }, nil
+}
+
+func (c *commandeer) initMutexProfile() (func(), error) {
+ if c.h.mutexprofile == "" {
+ return nil, nil
+ }
+
+ f, err := os.Create(c.h.mutexprofile)
+ if err != nil {
+ return nil, err
+ }
+
+ runtime.SetMutexProfileFraction(1)
+
+ return func() {
+ pprof.Lookup("mutex").WriteTo(f, 0)
+ f.Close()
+ }, nil
+}
+
+func (c *commandeer) initMemTicker() func() {
+ memticker := time.NewTicker(5 * time.Second)
+ quit := make(chan struct{})
+ printMem := func() {
+ var m runtime.MemStats
+ runtime.ReadMemStats(&m)
+ fmt.Printf("\n\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\n\n", formatByteCount(m.Alloc), formatByteCount(m.TotalAlloc), formatByteCount(m.Sys), m.NumGC)
+ }
+
+ go func() {
+ for {
+ select {
+ case <-memticker.C:
+ printMem()
+ case <-quit:
+ memticker.Stop()
+ printMem()
+ return
+ }
+ }
+ }()
+
+ return func() {
+ close(quit)
+ }
+}
+
+func (c *commandeer) initProfiling() (func(), error) {
+ stopCPUProf, err := c.initCPUProfile()
+ if err != nil {
+ return nil, err
+ }
+
+ stopMutexProf, err := c.initMutexProfile()
+ if err != nil {
+ return nil, err
+ }
+
+ stopTraceProf, err := c.initTraceProfile()
+ if err != nil {
+ return nil, err
+ }
+
+ var stopMemTicker func()
+ if c.h.printm {
+ stopMemTicker = c.initMemTicker()
+ }
+
+ return func() {
+ c.initMemProfile()
+
+ if stopCPUProf != nil {
+ stopCPUProf()
+ }
+ if stopMutexProf != nil {
+ stopMutexProf()
+ }
+
+ if stopTraceProf != nil {
+ stopTraceProf()
+ }
+
+ if stopMemTicker != nil {
+ stopMemTicker()
+ }
+ }, nil
+}
+
+func (c *commandeer) build() error {
+ stopProfiling, err := c.initProfiling()
+ if err != nil {
+ return err
+ }
+
+ defer func() {
+ if stopProfiling != nil {
+ stopProfiling()
+ }
+ }()
+
+ if err := c.fullBuild(false); err != nil {
+ return err
+ }
+
+ if !c.h.quiet {
+ fmt.Println()
+ c.hugo().PrintProcessingStats(os.Stdout)
+ fmt.Println()
+
+ if createCounter, ok := c.publishDirFs.(hugofs.DuplicatesReporter); ok {
+ dupes := createCounter.ReportDuplicates()
+ if dupes != "" {
+ c.logger.Warnln("Duplicate target paths:", dupes)
+ }
+ }
+
+ unusedTemplates := c.hugo().Tmpl().(tpl.UnusedTemplatesProvider).UnusedTemplates()
+ for _, unusedTemplate := range unusedTemplates {
+ c.logger.Warnf("Template %s is unused, source file %s", unusedTemplate.Name(), unusedTemplate.Filename())
+ }
+ }
+
+ if c.h.buildWatch {
+ watchDirs, err := c.getDirList()
+ if err != nil {
+ return err
+ }
+
+ baseWatchDir := c.Cfg.GetString("workingDir")
+ rootWatchDirs := getRootWatchDirsStr(baseWatchDir, watchDirs)
+
+ c.logger.Printf("Watching for changes in %s%s{%s}\n", baseWatchDir, helpers.FilePathSeparator, rootWatchDirs)
+ c.logger.Println("Press Ctrl+C to stop")
+ watcher, err := c.newWatcher(c.h.poll, watchDirs...)
+ checkErr(c.Logger, err)
+ defer watcher.Close()
+
+ sigs := make(chan os.Signal, 1)
+ signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
+
+ <-sigs
+ }
+
+ return nil
+}
+
+func (c *commandeer) serverBuild() error {
+ stopProfiling, err := c.initProfiling()
+ if err != nil {
+ return err
+ }
+
+ defer func() {
+ if stopProfiling != nil {
+ stopProfiling()
+ }
+ }()
+
+ if err := c.fullBuild(false); err != nil {
+ return err
+ }
+
+ // TODO(bep) Feedback?
+ if !c.h.quiet {
+ fmt.Println()
+ c.hugo().PrintProcessingStats(os.Stdout)
+ fmt.Println()
+ }
+
+ return nil
+}
+
+func (c *commandeer) copyStatic() (map[string]uint64, error) {
+ m, err := c.doWithPublishDirs(c.copyStaticTo)
+ if err == nil || os.IsNotExist(err) {
+ return m, nil
+ }
+ return m, err
+}
+
+func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) {
+ langCount := make(map[string]uint64)
+
+ staticFilesystems := c.hugo().BaseFs.SourceFilesystems.Static
+
+ if len(staticFilesystems) == 0 {
+ c.logger.Infoln("No static directories found to sync")
+ return langCount, nil
+ }
+
+ for lang, fs := range staticFilesystems {
+ cnt, err := f(fs)
+ if err != nil {
+ return langCount, err
+ }
+
+ if lang == "" {
+ // Not multihost
+ for _, l := range c.languages {
+ langCount[l.Lang] = cnt
+ }
+ } else {
+ langCount[lang] = cnt
+ }
+ }
+
+ return langCount, nil
+}
+
+type countingStatFs struct {
+ afero.Fs
+ statCounter uint64
+}
+
+func (fs *countingStatFs) Stat(name string) (os.FileInfo, error) {
+ f, err := fs.Fs.Stat(name)
+ if err == nil {
+ if !f.IsDir() {
+ atomic.AddUint64(&fs.statCounter, 1)
+ }
+ }
+ return f, err
+}
+
+func chmodFilter(dst, src os.FileInfo) bool {
+ // Hugo publishes data from multiple sources, potentially
+ // with overlapping directory structures. We cannot sync permissions
+ // for directories as that would mean that we might end up with write-protected
+ // directories inside /public.
+ // One example of this would be syncing from the Go Module cache,
+ // which have 0555 directories.
+ return src.IsDir()
+}
+
+func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
+ publishDir := helpers.FilePathSeparator
+
+ if sourceFs.PublishFolder != "" {
+ publishDir = filepath.Join(publishDir, sourceFs.PublishFolder)
+ }
+
+ fs := &countingStatFs{Fs: sourceFs.Fs}
+
+ syncer := fsync.NewSyncer()
+ syncer.NoTimes = c.Cfg.GetBool("noTimes")
+ syncer.NoChmod = c.Cfg.GetBool("noChmod")
+ syncer.ChmodFilter = chmodFilter
+ syncer.SrcFs = fs
+ syncer.DestFs = c.Fs.PublishDir
+ if c.renderStaticToDisk {
+ syncer.DestFs = c.Fs.PublishDirStatic
+ }
+ // Now that we are using a unionFs for the static directories
+ // We can effectively clean the publishDir on initial sync
+ syncer.Delete = c.Cfg.GetBool("cleanDestinationDir")
+
+ if syncer.Delete {
+ c.logger.Infoln("removing all files from destination that don't exist in static dirs")
+
+ syncer.DeleteFilter = func(f os.FileInfo) bool {
+ return f.IsDir() && strings.HasPrefix(f.Name(), ".")
+ }
+ }
+ c.logger.Infoln("syncing static files to", publishDir)
+
+ // because we are using a baseFs (to get the union right).
+ // set sync src to root
+ err := syncer.Sync(publishDir, helpers.FilePathSeparator)
+ if err != nil {
+ return 0, err
+ }
+
+ // Sync runs Stat 3 times for every source file (which sounds much)
+ numFiles := fs.statCounter / 3
+
+ return numFiles, err
+}
+
+func (c *commandeer) firstPathSpec() *helpers.PathSpec {
+ return c.hugo().Sites[0].PathSpec
+}
+
+func (c *commandeer) timeTrack(start time.Time, name string) {
+ // Note the use of time.Since here and time.Now in the callers.
+ // We have a htime.Sinnce, but that may be adjusted to the future,
+ // and that does not make sense here, esp. when used before the
+ // global Clock is initialized.
+ elapsed := time.Since(start)
+ c.logger.Printf("%s in %v ms", name, int(1000*elapsed.Seconds()))
+}
+
+// getDirList provides NewWatcher() with a list of directories to watch for changes.
+func (c *commandeer) getDirList() ([]string, error) {
+ var filenames []string
+
+ walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ c.logger.Errorln("walker: ", err)
+ return nil
+ }
+
+ if fi.IsDir() {
+ if fi.Name() == ".git" ||
+ fi.Name() == "node_modules" || fi.Name() == "bower_components" {
+ return filepath.SkipDir
+ }
+
+ filenames = append(filenames, fi.Meta().Filename)
+ }
+
+ return nil
+ }
+
+ watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs()
+ for _, fi := range watchFiles {
+ if !fi.IsDir() {
+ filenames = append(filenames, fi.Meta().Filename)
+ continue
+ }
+
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.logger, Info: fi, WalkFn: walkFn})
+ if err := w.Walk(); err != nil {
+ c.logger.Errorln("walker: ", err)
+ }
+ }
+
+ filenames = helpers.UniqueStringsSorted(filenames)
+
+ return filenames, nil
+}
+
+func (c *commandeer) buildSites(noBuildLock bool) (err error) {
+ return c.hugo().Build(hugolib.BuildCfg{NoBuildLock: noBuildLock})
+}
+
+func (c *commandeer) handleBuildErr(err error, msg string) {
+ c.buildErr = err
+ c.logger.Errorln(msg + ": " + cleanErrorLog(err.Error()))
+}
+
+func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
+ if c.buildErr != nil {
+ ferrs := herrors.UnwrapFileErrorsWithErrorContext(c.buildErr)
+ for _, err := range ferrs {
+ events = append(events, fsnotify.Event{Name: err.Position().Filename, Op: fsnotify.Write})
+ }
+ }
+ c.buildErr = nil
+ visited := c.visitedURLs.PeekAllSet()
+ if c.fastRenderMode {
+ // Make sure we always render the home pages
+ for _, l := range c.languages {
+ langPath := c.hugo().PathSpec.GetLangSubDir(l.Lang)
+ if langPath != "" {
+ langPath = langPath + "/"
+ }
+ home := c.hugo().PathSpec.PrependBasePath("/"+langPath, false)
+ visited[home] = true
+ }
+ }
+ return c.hugo().Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: visited, ErrRecovery: c.wasError}, events...)
+}
+
+func (c *commandeer) partialReRender(urls ...string) error {
+ defer func() {
+ c.wasError = false
+ }()
+ c.buildErr = nil
+ visited := make(map[string]bool)
+ for _, url := range urls {
+ visited[url] = true
+ }
+
+ // Note: We do not set NoBuildLock as the file lock is not acquired at this stage.
+ return c.hugo().Build(hugolib.BuildCfg{NoBuildLock: false, RecentlyVisited: visited, PartialReRender: true, ErrRecovery: c.wasError})
+}
+
+func (c *commandeer) fullRebuild(changeType string) {
+ if changeType == configChangeGoMod {
+ // go.mod may be changed during the build itself, and
+ // we really want to prevent superfluous builds.
+ if !c.fullRebuildSem.TryAcquire(1) {
+ return
+ }
+ c.fullRebuildSem.Release(1)
+ }
+
+ c.fullRebuildSem.Acquire(context.Background(), 1)
+
+ go func() {
+ defer c.fullRebuildSem.Release(1)
+
+ c.printChangeDetected(changeType)
+
+ defer func() {
+ // Allow any file system events to arrive back.
+ // This will block any rebuild on config changes for the
+ // duration of the sleep.
+ time.Sleep(2 * time.Second)
+ }()
+
+ defer c.timeTrack(time.Now(), "Rebuilt")
+
+ c.commandeerHugoState = newCommandeerHugoState()
+ err := c.loadConfig()
+ if err != nil {
+ // Set the processing on pause until the state is recovered.
+ c.paused = true
+ c.handleBuildErr(err, "Failed to reload config")
+
+ } else {
+ c.paused = false
+ }
+
+ if !c.paused {
+ _, err := c.copyStatic()
+ if err != nil {
+ c.logger.Errorln(err)
+ return
+ }
+
+ err = c.buildSites(true)
+ if err != nil {
+ c.logger.Errorln(err)
+ } else if !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload") {
+ livereload.ForceRefresh()
+ }
+ }
+ }()
+}
+
+// newWatcher creates a new watcher to watch filesystem events.
+func (c *commandeer) newWatcher(pollIntervalStr string, dirList ...string) (*watcher.Batcher, error) {
+ if runtime.GOOS == "darwin" {
+ tweakLimit()
+ }
+
+ staticSyncer, err := newStaticSyncer(c)
+ if err != nil {
+ return nil, err
+ }
+
+ var pollInterval time.Duration
+ poll := pollIntervalStr != ""
+ if poll {
+ pollInterval, err = types.ToDurationE(pollIntervalStr)
+ if err != nil {
+ return nil, fmt.Errorf("invalid value for flag poll: %s", err)
+ }
+ c.logger.Printf("Use watcher with poll interval %v", pollInterval)
+ }
+
+ if pollInterval == 0 {
+ pollInterval = 500 * time.Millisecond
+ }
+
+ watcher, err := watcher.New(500*time.Millisecond, pollInterval, poll)
+ if err != nil {
+ return nil, err
+ }
+
+ spec := c.hugo().Deps.SourceSpec
+
+ for _, d := range dirList {
+ if d != "" {
+ if spec.IgnoreFile(d) {
+ continue
+ }
+ _ = watcher.Add(d)
+ }
+ }
+
+ // Identifies changes to config (config.toml) files.
+ configSet := make(map[string]bool)
+
+ c.logger.Println("Watching for config changes in", strings.Join(c.configFiles, ", "))
+ for _, configFile := range c.configFiles {
+ watcher.Add(configFile)
+ configSet[configFile] = true
+ }
+
+ go func() {
+ for {
+ select {
+ case evs := <-watcher.Events:
+ unlock, err := c.buildLock()
+ if err != nil {
+ c.logger.Errorln("Failed to acquire a build lock: %s", err)
+ return
+ }
+ c.handleEvents(watcher, staticSyncer, evs, configSet)
+ if c.showErrorInBrowser && c.errCount() > 0 {
+ // Need to reload browser to show the error
+ livereload.ForceRefresh()
+ }
+ unlock()
+ case err := <-watcher.Errors():
+ if err != nil && !os.IsNotExist(err) {
+ c.logger.Errorln("Error while watching:", err)
+ }
+ }
+ }
+ }()
+
+ return watcher, nil
+}
+
+func (c *commandeer) printChangeDetected(typ string) {
+ msg := "\nChange"
+ if typ != "" {
+ msg += " of " + typ
+ }
+ msg += " detected, rebuilding site."
+
+ c.logger.Println(msg)
+ const layout = "2006-01-02 15:04:05.000 -0700"
+ c.logger.Println(htime.Now().Format(layout))
+}
+
+const (
+ configChangeConfig = "config file"
+ configChangeGoMod = "go.mod file"
+)
+
+func (c *commandeer) handleEvents(watcher *watcher.Batcher,
+ staticSyncer *staticSyncer,
+ evs []fsnotify.Event,
+ configSet map[string]bool) {
+ defer func() {
+ c.wasError = false
+ }()
+
+ var isHandled bool
+
+ for _, ev := range evs {
+ isConfig := configSet[ev.Name]
+ configChangeType := configChangeConfig
+ if isConfig {
+ if strings.Contains(ev.Name, "go.mod") {
+ configChangeType = configChangeGoMod
+ }
+ }
+ if !isConfig {
+ // It may be one of the /config folders
+ dirname := filepath.Dir(ev.Name)
+ if dirname != "." && configSet[dirname] {
+ isConfig = true
+ }
+ }
+
+ if isConfig {
+ isHandled = true
+
+ if ev.Op&fsnotify.Chmod == fsnotify.Chmod {
+ continue
+ }
+
+ if ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename {
+ for _, configFile := range c.configFiles {
+ counter := 0
+ for watcher.Add(configFile) != nil {
+ counter++
+ if counter >= 100 {
+ break
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+ }
+
+ // Config file(s) changed. Need full rebuild.
+ c.fullRebuild(configChangeType)
+
+ return
+ }
+ }
+
+ if isHandled {
+ return
+ }
+
+ if c.paused {
+ // Wait for the server to get into a consistent state before
+ // we continue with processing.
+ return
+ }
+
+ if len(evs) > 50 {
+ // This is probably a mass edit of the content dir.
+ // Schedule a full rebuild for when it slows down.
+ c.debounce(func() {
+ c.fullRebuild("")
+ })
+ return
+ }
+
+ c.logger.Infoln("Received System Events:", evs)
+
+ staticEvents := []fsnotify.Event{}
+ dynamicEvents := []fsnotify.Event{}
+
+ filtered := []fsnotify.Event{}
+ for _, ev := range evs {
+ if c.hugo().ShouldSkipFileChangeEvent(ev) {
+ continue
+ }
+ // Check the most specific first, i.e. files.
+ contentMapped := c.hugo().ContentChanges.GetSymbolicLinkMappings(ev.Name)
+ if len(contentMapped) > 0 {
+ for _, mapped := range contentMapped {
+ filtered = append(filtered, fsnotify.Event{Name: mapped, Op: ev.Op})
+ }
+ continue
+ }
+
+ // Check for any symbolic directory mapping.
+
+ dir, name := filepath.Split(ev.Name)
+
+ contentMapped = c.hugo().ContentChanges.GetSymbolicLinkMappings(dir)
+
+ if len(contentMapped) == 0 {
+ filtered = append(filtered, ev)
+ continue
+ }
+
+ for _, mapped := range contentMapped {
+ mappedFilename := filepath.Join(mapped, name)
+ filtered = append(filtered, fsnotify.Event{Name: mappedFilename, Op: ev.Op})
+ }
+ }
+
+ evs = filtered
+
+ for _, ev := range evs {
+ ext := filepath.Ext(ev.Name)
+ baseName := filepath.Base(ev.Name)
+ istemp := strings.HasSuffix(ext, "~") ||
+ (ext == ".swp") || // vim
+ (ext == ".swx") || // vim
+ (ext == ".tmp") || // generic temp file
+ (ext == ".DS_Store") || // OSX Thumbnail
+ baseName == "4913" || // vim
+ strings.HasPrefix(ext, ".goutputstream") || // gnome
+ strings.HasSuffix(ext, "jb_old___") || // intelliJ
+ strings.HasSuffix(ext, "jb_tmp___") || // intelliJ
+ strings.HasSuffix(ext, "jb_bak___") || // intelliJ
+ strings.HasPrefix(ext, ".sb-") || // byword
+ strings.HasPrefix(baseName, ".#") || // emacs
+ strings.HasPrefix(baseName, "#") // emacs
+ if istemp {
+ continue
+ }
+ if c.hugo().Deps.SourceSpec.IgnoreFile(ev.Name) {
+ continue
+ }
+ // Sometimes during rm -rf operations a '"": REMOVE' is triggered. Just ignore these
+ if ev.Name == "" {
+ continue
+ }
+
+ // Write and rename operations are often followed by CHMOD.
+ // There may be valid use cases for rebuilding the site on CHMOD,
+ // but that will require more complex logic than this simple conditional.
+ // On OS X this seems to be related to Spotlight, see:
+ // https://github.com/go-fsnotify/fsnotify/issues/15
+ // A workaround is to put your site(s) on the Spotlight exception list,
+ // but that may be a little mysterious for most end users.
+ // So, for now, we skip reload on CHMOD.
+ // We do have to check for WRITE though. On slower laptops a Chmod
+ // could be aggregated with other important events, and we still want
+ // to rebuild on those
+ if ev.Op&(fsnotify.Chmod|fsnotify.Write|fsnotify.Create) == fsnotify.Chmod {
+ continue
+ }
+
+ walkAdder := func(path string, f hugofs.FileMetaInfo, err error) error {
+ if f.IsDir() {
+ c.logger.Println("adding created directory to watchlist", path)
+ if err := watcher.Add(path); err != nil {
+ return err
+ }
+ } else if !staticSyncer.isStatic(path) {
+ // Hugo's rebuilding logic is entirely file based. When you drop a new folder into
+ // /content on OSX, the above logic will handle future watching of those files,
+ // but the initial CREATE is lost.
+ dynamicEvents = append(dynamicEvents, fsnotify.Event{Name: path, Op: fsnotify.Create})
+ }
+ return nil
+ }
+
+ // recursively add new directories to watch list
+ // When mkdir -p is used, only the top directory triggers an event (at least on OSX)
+ if ev.Op&fsnotify.Create == fsnotify.Create {
+ if s, err := c.Fs.Source.Stat(ev.Name); err == nil && s.Mode().IsDir() {
+ _ = helpers.SymbolicWalk(c.Fs.Source, ev.Name, walkAdder)
+ }
+ }
+
+ if staticSyncer.isStatic(ev.Name) {
+ staticEvents = append(staticEvents, ev)
+ } else {
+ dynamicEvents = append(dynamicEvents, ev)
+ }
+ }
+
+ if len(staticEvents) > 0 {
+ c.printChangeDetected("Static files")
+
+ if c.Cfg.GetBool("forceSyncStatic") {
+ c.logger.Printf("Syncing all static files\n")
+ _, err := c.copyStatic()
+ if err != nil {
+ c.logger.Errorln("Error copying static files to publish dir:", err)
+ return
+ }
+ } else {
+ if err := staticSyncer.syncsStaticEvents(staticEvents); err != nil {
+ c.logger.Errorln("Error syncing static files to publish dir:", err)
+ return
+ }
+ }
+
+ if !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload") {
+ // Will block forever trying to write to a channel that nobody is reading if livereload isn't initialized
+
+ // force refresh when more than one file
+ if !c.wasError && len(staticEvents) == 1 {
+ ev := staticEvents[0]
+ path := c.hugo().BaseFs.SourceFilesystems.MakeStaticPathRelative(ev.Name)
+ path = c.firstPathSpec().RelURL(helpers.ToSlashTrimLeading(path), false)
+
+ livereload.RefreshPath(path)
+ } else {
+ livereload.ForceRefresh()
+ }
+ }
+ }
+
+ if len(dynamicEvents) > 0 {
+ partitionedEvents := partitionDynamicEvents(
+ c.firstPathSpec().BaseFs.SourceFilesystems,
+ dynamicEvents)
+
+ doLiveReload := !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload")
+ onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents)
+
+ c.printChangeDetected("")
+ c.changeDetector.PrepareNew()
+
+ func() {
+ defer c.timeTrack(time.Now(), "Total")
+ if err := c.rebuildSites(dynamicEvents); err != nil {
+ c.handleBuildErr(err, "Rebuild failed")
+ }
+ }()
+
+ if doLiveReload {
+ if len(partitionedEvents.ContentEvents) == 0 && len(partitionedEvents.AssetEvents) > 0 {
+ if c.wasError {
+ livereload.ForceRefresh()
+ return
+ }
+ changed := c.changeDetector.changed()
+ if c.changeDetector != nil && len(changed) == 0 {
+ // Nothing has changed.
+ return
+ } else if len(changed) == 1 {
+ pathToRefresh := c.firstPathSpec().RelURL(helpers.ToSlashTrimLeading(changed[0]), false)
+ livereload.RefreshPath(pathToRefresh)
+ } else {
+ livereload.ForceRefresh()
+ }
+ }
+
+ if len(partitionedEvents.ContentEvents) > 0 {
+
+ navigate := c.Cfg.GetBool("navigateToChanged")
+ // We have fetched the same page above, but it may have
+ // changed.
+ var p page.Page
+
+ if navigate {
+ if onePageName != "" {
+ p = c.hugo().GetContentPage(onePageName)
+ }
+ }
+
+ if p != nil {
+ livereload.NavigateToPathForPort(p.RelPermalink(), p.Site().ServerPort())
+ } else {
+ livereload.ForceRefresh()
+ }
+ }
+ }
+ }
+}
+
+// dynamicEvents contains events that is considered dynamic, as in "not static".
+// Both of these categories will trigger a new build, but the asset events
+// does not fit into the "navigate to changed" logic.
+type dynamicEvents struct {
+ ContentEvents []fsnotify.Event
+ AssetEvents []fsnotify.Event
+}
+
+func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fsnotify.Event) (de dynamicEvents) {
+ for _, e := range events {
+ if sourceFs.IsAsset(e.Name) {
+ de.AssetEvents = append(de.AssetEvents, e)
+ } else {
+ de.ContentEvents = append(de.ContentEvents, e)
+ }
+ }
+ return
+}
+
+func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
+ name := ""
+
+ for _, ev := range events {
+ if ev.Op&fsnotify.Write == fsnotify.Write || ev.Op&fsnotify.Create == fsnotify.Create {
+ if files.IsIndexContentFile(ev.Name) {
+ return ev.Name
+ }
+
+ if files.IsContentFile(ev.Name) {
+ name = ev.Name
+ }
+
+ }
+ }
+
+ return name
+}
+
+func formatByteCount(b uint64) string {
+ const unit = 1000
+ if b < unit {
+ return fmt.Sprintf("%d B", b)
+ }
+ div, exp := int64(unit), 0
+ for n := b / unit; n >= unit; n /= unit {
+ div *= unit
+ exp++
+ }
+ return fmt.Sprintf("%.1f %cB",
+ float64(b)/float64(div), "kMGTPE"[exp])
+}
diff --git a/commands/hugo_test.go b/commands/hugo_test.go
new file mode 100644
index 000000000..1e1326642
--- /dev/null
+++ b/commands/hugo_test.go
@@ -0,0 +1,206 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "fmt"
+ "math/rand"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/bep/clock"
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+ "golang.org/x/tools/txtar"
+)
+
+// Issue #5662
+func TestHugoWithContentDirOverride(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ files := `
+-- config.toml --
+baseURL = "https://example.org"
+title = "Hugo Commands"
+-- mycontent/p1.md --
+---
+title: "P1"
+---
+-- layouts/_default/single.html --
+Page: {{ .Title }}|
+
+`
+ s := newTestHugoCmdBuilder(c, files, []string{"-c", "mycontent"}).Build()
+ s.AssertFileContent("public/p1/index.html", `Page: P1|`)
+
+}
+
+// Issue #9794
+func TestHugoStaticFilesMultipleStaticAndManyFolders(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ files := `
+-- config.toml --
+baseURL = "https://example.org"
+theme = "mytheme"
+-- layouts/index.html --
+Home.
+
+`
+ const (
+ numDirs = 33
+ numFilesMax = 12
+ )
+
+ r := rand.New(rand.NewSource(32))
+
+ for i := 0; i < numDirs; i++ {
+ for j := 0; j < r.Intn(numFilesMax); j++ {
+ if j%3 == 0 {
+ files += fmt.Sprintf("-- themes/mytheme/static/d%d/f%d.txt --\nHellot%d-%d\n", i, j, i, j)
+ files += fmt.Sprintf("-- themes/mytheme/static/d%d/ft%d.txt --\nHellot%d-%d\n", i, j, i, j)
+ }
+ files += fmt.Sprintf("-- static/d%d/f%d.txt --\nHello%d-%d\n", i, j, i, j)
+ }
+ }
+
+ r = rand.New(rand.NewSource(32))
+
+ s := newTestHugoCmdBuilder(c, files, []string{"-c", "mycontent"}).Build()
+ for i := 0; i < numDirs; i++ {
+ for j := 0; j < r.Intn(numFilesMax); j++ {
+ if j%3 == 0 {
+ if j%3 == 0 {
+ s.AssertFileContent(fmt.Sprintf("public/d%d/ft%d.txt", i, j), fmt.Sprintf("Hellot%d-%d", i, j))
+ }
+ s.AssertFileContent(fmt.Sprintf("public/d%d/f%d.txt", i, j), fmt.Sprintf("Hello%d-%d", i, j))
+ }
+ }
+ }
+
+}
+
+// Issue #8787
+func TestHugoListCommandsWithClockFlag(t *testing.T) {
+ t.Cleanup(func() { htime.Clock = clock.System() })
+
+ c := qt.New(t)
+
+ files := `
+-- config.toml --
+baseURL = "https://example.org"
+title = "Hugo Commands"
+timeZone = "UTC"
+-- content/past.md --
+---
+title: "Past"
+date: 2000-11-06
+---
+-- content/future.md --
+---
+title: "Future"
+date: 2200-11-06
+---
+-- layouts/_default/single.html --
+Page: {{ .Title }}|
+
+`
+ s := newTestHugoCmdBuilder(c, files, []string{"list", "future"})
+ s.captureOut = true
+ s.Build()
+ p := filepath.Join("content", "future.md")
+ s.AssertStdout(p + ",2200-11-06T00:00:00Z")
+
+ s = newTestHugoCmdBuilder(c, files, []string{"list", "future", "--clock", "2300-11-06"}).Build()
+ s.AssertStdout("")
+}
+
+type testHugoCmdBuilder struct {
+ *qt.C
+
+ fs afero.Fs
+ dir string
+ files string
+ args []string
+
+ captureOut bool
+ out string
+}
+
+func newTestHugoCmdBuilder(c *qt.C, files string, args []string) *testHugoCmdBuilder {
+ s := &testHugoCmdBuilder{C: c, files: files, args: args}
+ s.dir = s.TempDir()
+ s.fs = afero.NewBasePathFs(hugofs.Os, s.dir)
+
+ return s
+}
+
+func (s *testHugoCmdBuilder) Build() *testHugoCmdBuilder {
+ data := txtar.Parse([]byte(s.files))
+
+ for _, f := range data.Files {
+ filename := filepath.Clean(f.Name)
+ data := bytes.TrimSuffix(f.Data, []byte("\n"))
+ s.Assert(s.fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
+ s.Assert(afero.WriteFile(s.fs, filename, data, 0666), qt.IsNil)
+ }
+
+ hugoCmd := newCommandsBuilder().addAll().build()
+ cmd := hugoCmd.getCommand()
+ args := append(s.args, "-s="+s.dir, "--quiet")
+ cmd.SetArgs(args)
+
+ if s.captureOut {
+ out, err := captureStdout(func() error {
+ _, err := cmd.ExecuteC()
+ return err
+ })
+ s.Assert(err, qt.IsNil)
+ s.out = out
+ } else {
+ _, err := cmd.ExecuteC()
+ s.Assert(err, qt.IsNil)
+ }
+
+ return s
+}
+
+func (s *testHugoCmdBuilder) AssertFileContent(filename string, matches ...string) {
+ s.Helper()
+ data, err := afero.ReadFile(s.fs, filename)
+ s.Assert(err, qt.IsNil)
+ content := strings.TrimSpace(string(data))
+ for _, m := range matches {
+ lines := strings.Split(m, "\n")
+ for _, match := range lines {
+ match = strings.TrimSpace(match)
+ if match == "" || strings.HasPrefix(match, "#") {
+ continue
+ }
+ s.Assert(content, qt.Contains, match, qt.Commentf(m))
+ }
+ }
+}
+
+func (s *testHugoCmdBuilder) AssertStdout(match string) {
+ s.Helper()
+ content := strings.TrimSpace(s.out)
+ s.Assert(content, qt.Contains, strings.TrimSpace(match))
+}
diff --git a/commands/hugo_windows.go b/commands/hugo_windows.go
new file mode 100644
index 000000000..1724f12cd
--- /dev/null
+++ b/commands/hugo_windows.go
@@ -0,0 +1,33 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ // For time zone lookups on Windows without Go installed.
+ // See #8892
+ _ "time/tzdata"
+
+ "github.com/spf13/cobra"
+)
+
+func init() {
+ // This message to show to Windows users if Hugo is opened from explorer.exe
+ cobra.MousetrapHelpText = `
+
+ Hugo is a command-line tool for generating static website.
+
+ You need to open cmd.exe and run Hugo from there.
+
+ Visit https://gohugo.io/ for more information.`
+}
diff --git a/commands/import_jekyll.go b/commands/import_jekyll.go
new file mode 100644
index 000000000..91d5c69fe
--- /dev/null
+++ b/commands/import_jekyll.go
@@ -0,0 +1,604 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/spf13/afero"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*importCmd)(nil)
+
+type importCmd struct {
+ *baseCmd
+}
+
+func newImportCmd() *importCmd {
+ cc := &importCmd{}
+
+ cc.baseCmd = newBaseCmd(&cobra.Command{
+ Use: "import",
+ Short: "Import your site from others.",
+ Long: `Import your site from other web site generators like Jekyll.
+
+Import requires a subcommand, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
+ RunE: nil,
+ })
+
+ importJekyllCmd := &cobra.Command{
+ Use: "jekyll",
+ Short: "hugo import from Jekyll",
+ Long: `hugo import from Jekyll.
+
+Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
+ RunE: cc.importFromJekyll,
+ }
+
+ importJekyllCmd.Flags().Bool("force", false, "allow import into non-empty target directory")
+
+ cc.cmd.AddCommand(importJekyllCmd)
+
+ return cc
+}
+
+func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
+ if len(args) < 2 {
+ return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
+ }
+
+ jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
+ if err != nil {
+ return newUserError("path error:", args[0])
+ }
+
+ targetDir, err := filepath.Abs(filepath.Clean(args[1]))
+ if err != nil {
+ return newUserError("path error:", args[1])
+ }
+
+ jww.INFO.Println("Import Jekyll from:", jekyllRoot, "to:", targetDir)
+
+ if strings.HasPrefix(filepath.Dir(targetDir), jekyllRoot) {
+ return newUserError("abort: target path should not be inside the Jekyll root")
+ }
+
+ forceImport, _ := cmd.Flags().GetBool("force")
+
+ fs := afero.NewOsFs()
+ jekyllPostDirs, hasAnyPost := i.getJekyllDirInfo(fs, jekyllRoot)
+ if !hasAnyPost {
+ return errors.New("abort: jekyll root contains neither posts nor drafts")
+ }
+
+ err = i.createSiteFromJekyll(jekyllRoot, targetDir, jekyllPostDirs, forceImport)
+
+ if err != nil {
+ return newUserError(err)
+ }
+
+ jww.FEEDBACK.Println("Importing...")
+
+ fileCount := 0
+ callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if fi.IsDir() {
+ return nil
+ }
+
+ relPath, err := filepath.Rel(jekyllRoot, path)
+ if err != nil {
+ return newUserError("get rel path error:", path)
+ }
+
+ relPath = filepath.ToSlash(relPath)
+ draft := false
+
+ switch {
+ case strings.Contains(relPath, "_posts/"):
+ relPath = filepath.Join("content/post", strings.Replace(relPath, "_posts/", "", -1))
+ case strings.Contains(relPath, "_drafts/"):
+ relPath = filepath.Join("content/draft", strings.Replace(relPath, "_drafts/", "", -1))
+ draft = true
+ default:
+ return nil
+ }
+
+ fileCount++
+ return convertJekyllPost(path, relPath, targetDir, draft)
+ }
+
+ for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
+ if hasAnyPostInDir {
+ if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
+ return err
+ }
+ }
+ }
+
+ jww.FEEDBACK.Println("Congratulations!", fileCount, "post(s) imported!")
+ jww.FEEDBACK.Println("Now, start Hugo by yourself:\n" +
+ "$ git clone https://github.com/spf13/herring-cove.git " + args[1] + "/themes/herring-cove")
+ jww.FEEDBACK.Println("$ cd " + args[1] + "\n$ hugo server --theme=herring-cove")
+
+ return nil
+}
+
+func (i *importCmd) getJekyllDirInfo(fs afero.Fs, jekyllRoot string) (map[string]bool, bool) {
+ postDirs := make(map[string]bool)
+ hasAnyPost := false
+ if entries, err := ioutil.ReadDir(jekyllRoot); err == nil {
+ for _, entry := range entries {
+ if entry.IsDir() {
+ subDir := filepath.Join(jekyllRoot, entry.Name())
+ if isPostDir, hasAnyPostInDir := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
+ postDirs[entry.Name()] = hasAnyPostInDir
+ if hasAnyPostInDir {
+ hasAnyPost = true
+ }
+ }
+ }
+ }
+ }
+ return postDirs, hasAnyPost
+}
+
+func (i *importCmd) retrieveJekyllPostDir(fs afero.Fs, dir string) (bool, bool) {
+ if strings.HasSuffix(dir, "_posts") || strings.HasSuffix(dir, "_drafts") {
+ isEmpty, _ := helpers.IsEmpty(dir, fs)
+ return true, !isEmpty
+ }
+
+ if entries, err := ioutil.ReadDir(dir); err == nil {
+ for _, entry := range entries {
+ if entry.IsDir() {
+ subDir := filepath.Join(dir, entry.Name())
+ if isPostDir, hasAnyPost := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
+ return isPostDir, hasAnyPost
+ }
+ }
+ }
+ }
+
+ return false, true
+}
+
+func (i *importCmd) createSiteFromJekyll(jekyllRoot, targetDir string, jekyllPostDirs map[string]bool, force bool) error {
+ fs := &afero.OsFs{}
+ if exists, _ := helpers.Exists(targetDir, fs); exists {
+ if isDir, _ := helpers.IsDir(targetDir, fs); !isDir {
+ return errors.New("target path \"" + targetDir + "\" exists but is not a directory")
+ }
+
+ isEmpty, _ := helpers.IsEmpty(targetDir, fs)
+
+ if !isEmpty && !force {
+ return errors.New("target path \"" + targetDir + "\" exists and is not empty")
+ }
+ }
+
+ jekyllConfig := i.loadJekyllConfig(fs, jekyllRoot)
+
+ mkdir(targetDir, "layouts")
+ mkdir(targetDir, "content")
+ mkdir(targetDir, "archetypes")
+ mkdir(targetDir, "static")
+ mkdir(targetDir, "data")
+ mkdir(targetDir, "themes")
+
+ i.createConfigFromJekyll(fs, targetDir, "yaml", jekyllConfig)
+
+ i.copyJekyllFilesAndFolders(jekyllRoot, filepath.Join(targetDir, "static"), jekyllPostDirs)
+
+ return nil
+}
+
+func (i *importCmd) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]any {
+ path := filepath.Join(jekyllRoot, "_config.yml")
+
+ exists, err := helpers.Exists(path, fs)
+
+ if err != nil || !exists {
+ jww.WARN.Println("_config.yaml not found: Is the specified Jekyll root correct?")
+ return nil
+ }
+
+ f, err := fs.Open(path)
+ if err != nil {
+ return nil
+ }
+
+ defer f.Close()
+
+ b, err := ioutil.ReadAll(f)
+ if err != nil {
+ return nil
+ }
+
+ c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
+ if err != nil {
+ return nil
+ }
+
+ return c
+}
+
+func (i *importCmd) createConfigFromJekyll(fs afero.Fs, inpath string, kind metadecoders.Format, jekyllConfig map[string]any) (err error) {
+ title := "My New Hugo Site"
+ baseURL := "http://example.org/"
+
+ for key, value := range jekyllConfig {
+ lowerKey := strings.ToLower(key)
+
+ switch lowerKey {
+ case "title":
+ if str, ok := value.(string); ok {
+ title = str
+ }
+
+ case "url":
+ if str, ok := value.(string); ok {
+ baseURL = str
+ }
+ }
+ }
+
+ in := map[string]any{
+ "baseURL": baseURL,
+ "title": title,
+ "languageCode": "en-us",
+ "disablePathToLower": true,
+ }
+
+ var buf bytes.Buffer
+ err = parser.InterfaceToConfig(in, kind, &buf)
+ if err != nil {
+ return err
+ }
+
+ return helpers.WriteToDisk(filepath.Join(inpath, "config."+string(kind)), &buf, fs)
+}
+
+func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
+ fs := hugofs.Os
+
+ fi, err := fs.Stat(jekyllRoot)
+ if err != nil {
+ return err
+ }
+ if !fi.IsDir() {
+ return errors.New(jekyllRoot + " is not a directory")
+ }
+ err = os.MkdirAll(dest, fi.Mode())
+ if err != nil {
+ return err
+ }
+ entries, err := ioutil.ReadDir(jekyllRoot)
+ if err != nil {
+ return err
+ }
+
+ for _, entry := range entries {
+ sfp := filepath.Join(jekyllRoot, entry.Name())
+ dfp := filepath.Join(dest, entry.Name())
+ if entry.IsDir() {
+ if entry.Name()[0] != '_' && entry.Name()[0] != '.' {
+ if _, ok := jekyllPostDirs[entry.Name()]; !ok {
+ err = hugio.CopyDir(fs, sfp, dfp, nil)
+ if err != nil {
+ jww.ERROR.Println(err)
+ }
+ }
+ }
+ } else {
+ lowerEntryName := strings.ToLower(entry.Name())
+ exceptSuffix := []string{
+ ".md", ".markdown", ".html", ".htm",
+ ".xml", ".textile", "rakefile", "gemfile", ".lock",
+ }
+ isExcept := false
+ for _, suffix := range exceptSuffix {
+ if strings.HasSuffix(lowerEntryName, suffix) {
+ isExcept = true
+ break
+ }
+ }
+
+ if !isExcept && entry.Name()[0] != '.' && entry.Name()[0] != '_' {
+ err = hugio.CopyFile(fs, sfp, dfp)
+ if err != nil {
+ jww.ERROR.Println(err)
+ }
+ }
+ }
+
+ }
+ return nil
+}
+
+func parseJekyllFilename(filename string) (time.Time, string, error) {
+ re := regexp.MustCompile(`(\d+-\d+-\d+)-(.+)\..*`)
+ r := re.FindAllStringSubmatch(filename, -1)
+ if len(r) == 0 {
+ return htime.Now(), "", errors.New("filename not match")
+ }
+
+ postDate, err := time.Parse("2006-1-2", r[0][1])
+ if err != nil {
+ return htime.Now(), "", err
+ }
+
+ postName := r[0][2]
+
+ return postDate, postName, nil
+}
+
+func convertJekyllPost(path, relPath, targetDir string, draft bool) error {
+ jww.TRACE.Println("Converting", path)
+
+ filename := filepath.Base(path)
+ postDate, postName, err := parseJekyllFilename(filename)
+ if err != nil {
+ jww.WARN.Printf("Failed to parse filename '%s': %s. Skipping.", filename, err)
+ return nil
+ }
+
+ jww.TRACE.Println(filename, postDate, postName)
+
+ targetFile := filepath.Join(targetDir, relPath)
+ targetParentDir := filepath.Dir(targetFile)
+ os.MkdirAll(targetParentDir, 0777)
+
+ contentBytes, err := ioutil.ReadFile(path)
+ if err != nil {
+ jww.ERROR.Println("Read file error:", path)
+ return err
+ }
+
+ pf, err := pageparser.ParseFrontMatterAndContent(bytes.NewReader(contentBytes))
+ if err != nil {
+ jww.ERROR.Println("Parse file error:", path)
+ return err
+ }
+
+ newmetadata, err := convertJekyllMetaData(pf.FrontMatter, postName, postDate, draft)
+ if err != nil {
+ jww.ERROR.Println("Convert metadata error:", path)
+ return err
+ }
+
+ content, err := convertJekyllContent(newmetadata, string(pf.Content))
+ if err != nil {
+ jww.ERROR.Println("Converting Jekyll error:", path)
+ return err
+ }
+
+ fs := hugofs.Os
+ if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
+ return fmt.Errorf("failed to save file %q: %s", filename, err)
+ }
+
+ return nil
+}
+
+func convertJekyllMetaData(m any, postName string, postDate time.Time, draft bool) (any, error) {
+ metadata, err := maps.ToStringMapE(m)
+ if err != nil {
+ return nil, err
+ }
+
+ if draft {
+ metadata["draft"] = true
+ }
+
+ for key, value := range metadata {
+ lowerKey := strings.ToLower(key)
+
+ switch lowerKey {
+ case "layout":
+ delete(metadata, key)
+ case "permalink":
+ if str, ok := value.(string); ok {
+ metadata["url"] = str
+ }
+ delete(metadata, key)
+ case "category":
+ if str, ok := value.(string); ok {
+ metadata["categories"] = []string{str}
+ }
+ delete(metadata, key)
+ case "excerpt_separator":
+ if key != lowerKey {
+ delete(metadata, key)
+ metadata[lowerKey] = value
+ }
+ case "date":
+ if str, ok := value.(string); ok {
+ re := regexp.MustCompile(`(\d+):(\d+):(\d+)`)
+ r := re.FindAllStringSubmatch(str, -1)
+ if len(r) > 0 {
+ hour, _ := strconv.Atoi(r[0][1])
+ minute, _ := strconv.Atoi(r[0][2])
+ second, _ := strconv.Atoi(r[0][3])
+ postDate = time.Date(postDate.Year(), postDate.Month(), postDate.Day(), hour, minute, second, 0, time.UTC)
+ }
+ }
+ delete(metadata, key)
+ }
+
+ }
+
+ metadata["date"] = postDate.Format(time.RFC3339)
+
+ return metadata, nil
+}
+
+func convertJekyllContent(m any, content string) (string, error) {
+ metadata, _ := maps.ToStringMapE(m)
+
+ lines := strings.Split(content, "\n")
+ var resultLines []string
+ for _, line := range lines {
+ resultLines = append(resultLines, strings.Trim(line, "\r\n"))
+ }
+
+ content = strings.Join(resultLines, "\n")
+
+ excerptSep := "<!--more-->"
+ if value, ok := metadata["excerpt_separator"]; ok {
+ if str, strOk := value.(string); strOk {
+ content = strings.Replace(content, strings.TrimSpace(str), excerptSep, -1)
+ }
+ }
+
+ replaceList := []struct {
+ re *regexp.Regexp
+ replace string
+ }{
+ {regexp.MustCompile("(?i)<!-- more -->"), "<!--more-->"},
+ {regexp.MustCompile(`\{%\s*raw\s*%\}\s*(.*?)\s*\{%\s*endraw\s*%\}`), "$1"},
+ {regexp.MustCompile(`{%\s*endhighlight\s*%}`), "{{< / highlight >}}"},
+ }
+
+ for _, replace := range replaceList {
+ content = replace.re.ReplaceAllString(content, replace.replace)
+ }
+
+ replaceListFunc := []struct {
+ re *regexp.Regexp
+ replace func(string) string
+ }{
+ // Octopress image tag: http://octopress.org/docs/plugins/image-tag/
+ {regexp.MustCompile(`{%\s+img\s*(.*?)\s*%}`), replaceImageTag},
+ {regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`), replaceHighlightTag},
+ }
+
+ for _, replace := range replaceListFunc {
+ content = replace.re.ReplaceAllStringFunc(content, replace.replace)
+ }
+
+ var buf bytes.Buffer
+ if len(metadata) != 0 {
+ err := parser.InterfaceToFrontMatter(m, metadecoders.YAML, &buf)
+ if err != nil {
+ return "", err
+ }
+ }
+ buf.WriteString(content)
+
+ return buf.String(), nil
+}
+
+func replaceHighlightTag(match string) string {
+ r := regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`)
+ parts := r.FindStringSubmatch(match)
+ lastQuote := rune(0)
+ f := func(c rune) bool {
+ switch {
+ case c == lastQuote:
+ lastQuote = rune(0)
+ return false
+ case lastQuote != rune(0):
+ return false
+ case unicode.In(c, unicode.Quotation_Mark):
+ lastQuote = c
+ return false
+ default:
+ return unicode.IsSpace(c)
+ }
+ }
+ // splitting string by space but considering quoted section
+ items := strings.FieldsFunc(parts[1], f)
+
+ result := bytes.NewBufferString("{{< highlight ")
+ result.WriteString(items[0]) // language
+ options := items[1:]
+ for i, opt := range options {
+ opt = strings.Replace(opt, "\"", "", -1)
+ if opt == "linenos" {
+ opt = "linenos=table"
+ }
+ if i == 0 {
+ opt = " \"" + opt
+ }
+ if i < len(options)-1 {
+ opt += ","
+ } else if i == len(options)-1 {
+ opt += "\""
+ }
+ result.WriteString(opt)
+ }
+
+ result.WriteString(" >}}")
+ return result.String()
+}
+
+func replaceImageTag(match string) string {
+ r := regexp.MustCompile(`{%\s+img\s*(\p{L}*)\s+([\S]*/[\S]+)\s+(\d*)\s*(\d*)\s*(.*?)\s*%}`)
+ result := bytes.NewBufferString("{{< figure ")
+ parts := r.FindStringSubmatch(match)
+ // Index 0 is the entire string, ignore
+ replaceOptionalPart(result, "class", parts[1])
+ replaceOptionalPart(result, "src", parts[2])
+ replaceOptionalPart(result, "width", parts[3])
+ replaceOptionalPart(result, "height", parts[4])
+ // title + alt
+ part := parts[5]
+ if len(part) > 0 {
+ splits := strings.Split(part, "'")
+ lenSplits := len(splits)
+ if lenSplits == 1 {
+ replaceOptionalPart(result, "title", splits[0])
+ } else if lenSplits == 3 {
+ replaceOptionalPart(result, "title", splits[1])
+ } else if lenSplits == 5 {
+ replaceOptionalPart(result, "title", splits[1])
+ replaceOptionalPart(result, "alt", splits[3])
+ }
+ }
+ result.WriteString(">}}")
+ return result.String()
+}
+
+func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
+ if len(part) > 0 {
+ buffer.WriteString(partName + "=\"" + part + "\" ")
+ }
+}
diff --git a/commands/import_jekyll_test.go b/commands/import_jekyll_test.go
new file mode 100644
index 000000000..dbe4e25d0
--- /dev/null
+++ b/commands/import_jekyll_test.go
@@ -0,0 +1,177 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "encoding/json"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestParseJekyllFilename(t *testing.T) {
+ c := qt.New(t)
+ filenameArray := []string{
+ "2015-01-02-test.md",
+ "2012-03-15-中文.markup",
+ }
+
+ expectResult := []struct {
+ postDate time.Time
+ postName string
+ }{
+ {time.Date(2015, time.January, 2, 0, 0, 0, 0, time.UTC), "test"},
+ {time.Date(2012, time.March, 15, 0, 0, 0, 0, time.UTC), "中文"},
+ }
+
+ for i, filename := range filenameArray {
+ postDate, postName, err := parseJekyllFilename(filename)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expectResult[i].postDate.Format("2006-01-02"), qt.Equals, postDate.Format("2006-01-02"))
+ c.Assert(expectResult[i].postName, qt.Equals, postName)
+ }
+}
+
+func TestConvertJekyllMetadata(t *testing.T) {
+ c := qt.New(t)
+ testDataList := []struct {
+ metadata any
+ postName string
+ postDate time.Time
+ draft bool
+ expect string
+ }{
+ {
+ map[any]any{},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"date":"2015-10-01T00:00:00Z"}`,
+ },
+ {
+ map[any]any{},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
+ `{"date":"2015-10-01T00:00:00Z","draft":true}`,
+ },
+ {
+ map[any]any{"Permalink": "/permalink.html", "layout": "post"},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
+ },
+ {
+ map[any]any{"permalink": "/permalink.html"},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
+ },
+ {
+ map[any]any{"category": nil, "permalink": 123},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"date":"2015-10-01T00:00:00Z"}`,
+ },
+ {
+ map[any]any{"Excerpt_Separator": "sep"},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`,
+ },
+ {
+ map[any]any{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
+ "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
+ `{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`,
+ },
+ }
+
+ for _, data := range testDataList {
+ result, err := convertJekyllMetaData(data.metadata, data.postName, data.postDate, data.draft)
+ c.Assert(err, qt.IsNil)
+ jsonResult, err := json.Marshal(result)
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(jsonResult), qt.Equals, data.expect)
+ }
+}
+
+func TestConvertJekyllContent(t *testing.T) {
+ c := qt.New(t)
+ testDataList := []struct {
+ metadata any
+ content string
+ expect string
+ }{
+ {
+ map[any]any{},
+ "Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content",
+ },
+ {
+ map[any]any{},
+ "Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content",
+ },
+ {
+ map[any]any{"excerpt_separator": "<!--sep-->"},
+ "Test content\n<!--sep-->\npart2 content",
+ "---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content",
+ },
+ {map[any]any{}, "{% raw %}text{% endraw %}", "text"},
+ {map[any]any{}, "{%raw%} text2 {%endraw %}", "text2"},
+ {
+ map[any]any{},
+ "{% highlight go %}\nvar s int\n{% endhighlight %}",
+ "{{< highlight go >}}\nvar s int\n{{< / highlight >}}",
+ },
+ {
+ map[any]any{},
+ "{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}",
+ "{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}",
+ },
+
+ // Octopress image tag
+ {
+ map[any]any{},
+ "{% img http://placekitten.com/890/280 %}",
+ "{{< figure src=\"http://placekitten.com/890/280\" >}}",
+ },
+ {
+ map[any]any{},
+ "{% img left http://placekitten.com/320/250 Place Kitten #2 %}",
+ "{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}",
+ },
+ {
+ map[any]any{},
+ "{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}",
+ "{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}",
+ },
+ {
+ map[any]any{},
+ "{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
+ "{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
+ },
+ {
+ map[any]any{},
+ "{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
+ "{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
+ },
+ {
+ map[any]any{},
+ "{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}",
+ "{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
+ },
+ {
+ map[any]any{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
+ "somecontent",
+ "---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent",
+ },
+ }
+ for _, data := range testDataList {
+ result, err := convertJekyllContent(data.metadata, data.content)
+ c.Assert(result, qt.Equals, data.expect)
+ c.Assert(err, qt.IsNil)
+ }
+}
diff --git a/commands/limit_darwin.go b/commands/limit_darwin.go
new file mode 100644
index 000000000..6799f37b1
--- /dev/null
+++ b/commands/limit_darwin.go
@@ -0,0 +1,84 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "syscall"
+
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*limitCmd)(nil)
+
+type limitCmd struct {
+ *baseCmd
+}
+
+func newLimitCmd() *limitCmd {
+ ccmd := &cobra.Command{
+ Use: "ulimit",
+ Short: "Check system ulimit settings",
+ Long: `Hugo will inspect the current ulimit settings on the system.
+This is primarily to ensure that Hugo can watch enough files on some OSs`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ var rLimit syscall.Rlimit
+ err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
+ if err != nil {
+ return newSystemError("Error Getting rlimit ", err)
+ }
+
+ jww.FEEDBACK.Println("Current rLimit:", rLimit)
+
+ if rLimit.Cur >= newRlimit {
+ return nil
+ }
+
+ jww.FEEDBACK.Println("Attempting to increase limit")
+ rLimit.Cur = newRlimit
+ err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
+ if err != nil {
+ return newSystemError("Error Setting rLimit ", err)
+ }
+ err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
+ if err != nil {
+ return newSystemError("Error Getting rLimit ", err)
+ }
+ jww.FEEDBACK.Println("rLimit after change:", rLimit)
+
+ return nil
+ },
+ }
+
+ return &limitCmd{baseCmd: newBaseCmd(ccmd)}
+}
+
+const newRlimit = 10240
+
+func tweakLimit() {
+ var rLimit syscall.Rlimit
+ err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
+ if err != nil {
+ jww.WARN.Println("Unable to get rlimit:", err)
+ return
+ }
+ if rLimit.Cur < newRlimit {
+ rLimit.Cur = newRlimit
+ err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
+ if err != nil {
+ // This may not succeed, see https://github.com/golang/go/issues/30401
+ jww.INFO.Println("Unable to increase number of open files limit:", err)
+ }
+ }
+}
diff --git a/commands/limit_others.go b/commands/limit_others.go
new file mode 100644
index 000000000..b141b7004
--- /dev/null
+++ b/commands/limit_others.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !darwin
+// +build !darwin
+
+package commands
+
+func tweakLimit() {
+ // nothing to do
+}
diff --git a/commands/list.go b/commands/list.go
new file mode 100644
index 000000000..4b62c91c5
--- /dev/null
+++ b/commands/list.go
@@ -0,0 +1,210 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "encoding/csv"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*listCmd)(nil)
+
+type listCmd struct {
+ *baseBuilderCmd
+}
+
+func (lc *listCmd) buildSites(config map[string]any) (*hugolib.HugoSites, error) {
+ cfgInit := func(c *commandeer) error {
+ for key, value := range config {
+ c.Set(key, value)
+ }
+ return nil
+ }
+
+ c, err := initializeConfig(true, true, false, &lc.hugoBuilderCommon, lc, cfgInit)
+ if err != nil {
+ return nil, err
+ }
+
+ sites, err := hugolib.NewHugoSites(*c.DepsCfg)
+ if err != nil {
+ return nil, newSystemError("Error creating sites", err)
+ }
+
+ if err := sites.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
+ return nil, newSystemError("Error Processing Source Content", err)
+ }
+
+ return sites, nil
+}
+
+func (b *commandsBuilder) newListCmd() *listCmd {
+ cc := &listCmd{}
+
+ cmd := &cobra.Command{
+ Use: "list",
+ Short: "Listing out various types of content",
+ Long: `Listing out various types of content.
+
+List requires a subcommand, e.g. ` + "`hugo list drafts`.",
+ RunE: nil,
+ }
+
+ cmd.AddCommand(
+ &cobra.Command{
+ Use: "drafts",
+ Short: "List all drafts",
+ Long: `List all of the drafts in your content directory.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ sites, err := cc.buildSites(map[string]any{"buildDrafts": true})
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ for _, p := range sites.Pages() {
+ if p.Draft() {
+ jww.FEEDBACK.Println(strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)))
+ }
+ }
+
+ return nil
+ },
+ },
+ &cobra.Command{
+ Use: "future",
+ Short: "List all posts dated in the future",
+ Long: `List all of the posts in your content directory which will be posted in the future.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ sites, err := cc.buildSites(map[string]any{"buildFuture": true})
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ writer := csv.NewWriter(os.Stdout)
+ defer writer.Flush()
+
+ for _, p := range sites.Pages() {
+ if resource.IsFuture(p) {
+ err := writer.Write([]string{
+ strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
+ p.PublishDate().Format(time.RFC3339),
+ })
+ if err != nil {
+ return newSystemError("Error writing future posts to stdout", err)
+ }
+ }
+ }
+
+ return nil
+ },
+ },
+ &cobra.Command{
+ Use: "expired",
+ Short: "List all posts already expired",
+ Long: `List all of the posts in your content directory which has already expired.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ sites, err := cc.buildSites(map[string]any{"buildExpired": true})
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ writer := csv.NewWriter(os.Stdout)
+ defer writer.Flush()
+
+ for _, p := range sites.Pages() {
+ if resource.IsExpired(p) {
+ err := writer.Write([]string{
+ strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
+ p.ExpiryDate().Format(time.RFC3339),
+ })
+ if err != nil {
+ return newSystemError("Error writing expired posts to stdout", err)
+ }
+ }
+ }
+
+ return nil
+ },
+ },
+ &cobra.Command{
+ Use: "all",
+ Short: "List all posts",
+ Long: `List all of the posts in your content directory, include drafts, future and expired pages.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ sites, err := cc.buildSites(map[string]any{
+ "buildExpired": true,
+ "buildDrafts": true,
+ "buildFuture": true,
+ })
+ if err != nil {
+ return newSystemError("Error building sites", err)
+ }
+
+ writer := csv.NewWriter(os.Stdout)
+ defer writer.Flush()
+
+ writer.Write([]string{
+ "path",
+ "slug",
+ "title",
+ "date",
+ "expiryDate",
+ "publishDate",
+ "draft",
+ "permalink",
+ })
+ for _, p := range sites.Pages() {
+ if !p.IsPage() {
+ continue
+ }
+ err := writer.Write([]string{
+ strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
+ p.Slug(),
+ p.Title(),
+ p.Date().Format(time.RFC3339),
+ p.ExpiryDate().Format(time.RFC3339),
+ p.PublishDate().Format(time.RFC3339),
+ strconv.FormatBool(p.Draft()),
+ p.Permalink(),
+ })
+ if err != nil {
+ return newSystemError("Error writing posts to stdout", err)
+ }
+ }
+
+ return nil
+ },
+ },
+ )
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
diff --git a/commands/list_test.go b/commands/list_test.go
new file mode 100644
index 000000000..8b2535571
--- /dev/null
+++ b/commands/list_test.go
@@ -0,0 +1,68 @@
+package commands
+
+import (
+ "bytes"
+ "encoding/csv"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func captureStdout(f func() error) (string, error) {
+ old := os.Stdout
+ r, w, _ := os.Pipe()
+ os.Stdout = w
+
+ err := f()
+
+ w.Close()
+ os.Stdout = old
+
+ var buf bytes.Buffer
+ io.Copy(&buf, r)
+ return buf.String(), err
+}
+
+func TestListAll(t *testing.T) {
+ c := qt.New(t)
+ dir := createSimpleTestSite(t, testSiteConfig{})
+
+ hugoCmd := newCommandsBuilder().addAll().build()
+ cmd := hugoCmd.getCommand()
+
+ t.Cleanup(func() {
+ os.RemoveAll(dir)
+ })
+
+ cmd.SetArgs([]string{"-s=" + dir, "list", "all"})
+
+ out, err := captureStdout(func() error {
+ _, err := cmd.ExecuteC()
+ return err
+ })
+ c.Assert(err, qt.IsNil)
+
+ r := csv.NewReader(strings.NewReader(out))
+
+ header, err := r.Read()
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(header, qt.DeepEquals, []string{
+ "path", "slug", "title",
+ "date", "expiryDate", "publishDate",
+ "draft", "permalink",
+ })
+
+ record, err := r.Read()
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(record, qt.DeepEquals, []string{
+ filepath.Join("content", "p1.md"), "", "P1",
+ "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z",
+ "false", "https://example.org/p1/",
+ })
+}
diff --git a/commands/mod.go b/commands/mod.go
new file mode 100644
index 000000000..44a48bf79
--- /dev/null
+++ b/commands/mod.go
@@ -0,0 +1,293 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+
+ "github.com/gohugoio/hugo/hugolib"
+
+ "github.com/gohugoio/hugo/modules"
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*modCmd)(nil)
+
+type modCmd struct {
+ *baseBuilderCmd
+}
+
+func (c *modCmd) newVerifyCmd() *cobra.Command {
+ var clean bool
+
+ verifyCmd := &cobra.Command{
+ Use: "verify",
+ Short: "Verify dependencies.",
+ Long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withModsClient(true, func(c *modules.Client) error {
+ return c.Verify(clean)
+ })
+ },
+ }
+
+ verifyCmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
+
+ return verifyCmd
+}
+
+var moduleNotFoundRe = regexp.MustCompile("module.*not found")
+
+func (c *modCmd) newCleanCmd() *cobra.Command {
+ var pattern string
+ var all bool
+ cmd := &cobra.Command{
+ Use: "clean",
+ Short: "Delete the Hugo Module cache for the current project.",
+ Long: `Delete the Hugo Module cache for the current project.
+
+Note that after you run this command, all of your dependencies will be re-downloaded next time you run "hugo".
+
+Also note that if you configure a positive maxAge for the "modules" file cache, it will also be cleaned as part of "hugo --gc".
+
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ if all {
+ com, err := c.initConfig(false)
+
+ if err != nil && com == nil {
+ return err
+ }
+
+ count, err := com.hugo().FileCaches.ModulesCache().Prune(true)
+ com.logger.Printf("Deleted %d files from module cache.", count)
+ return err
+ }
+ return c.withModsClient(true, func(c *modules.Client) error {
+ return c.Clean(pattern)
+ })
+ },
+ }
+
+ cmd.Flags().StringVarP(&pattern, "pattern", "", "", `pattern matching module paths to clean (all if not set), e.g. "**hugo*"`)
+ cmd.Flags().BoolVarP(&all, "all", "", false, "clean entire module cache")
+
+ return cmd
+}
+
+func (b *commandsBuilder) newModCmd() *modCmd {
+ c := &modCmd{}
+
+ const commonUsage = `
+Note that Hugo will always start out by resolving the components defined in the site
+configuration, provided by a _vendor directory (if no --ignoreVendorPaths flag provided),
+Go Modules, or a folder inside the themes directory, in that order.
+
+See https://gohugo.io/hugo-modules/ for more information.
+
+`
+
+ cmd := &cobra.Command{
+ Use: "mod",
+ Short: "Various Hugo Modules helpers.",
+ Long: `Various helpers to help manage the modules in your project's dependency graph.
+
+Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
+This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
+
+` + commonUsage,
+
+ RunE: nil,
+ }
+
+ cmd.AddCommand(newModNPMCmd(c))
+
+ cmd.AddCommand(
+ &cobra.Command{
+ Use: "get",
+ DisableFlagParsing: true,
+ Short: "Resolves dependencies in your current Hugo Project.",
+ Long: `
+Resolves dependencies in your current Hugo Project.
+
+Some examples:
+
+Install the latest version possible for a given module:
+
+ hugo mod get github.com/gohugoio/testshortcodes
+
+Install a specific version:
+
+ hugo mod get github.com/gohugoio/testshortcodes@v0.3.0
+
+Install the latest versions of all module dependencies:
+
+ hugo mod get -u
+ hugo mod get -u ./... (recursive)
+
+Run "go help get" for more information. All flags available for "go get" is also relevant here.
+` + commonUsage,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ // We currently just pass on the flags we get to Go and
+ // need to do the flag handling manually.
+ if len(args) == 1 && args[0] == "-h" {
+ return cmd.Help()
+ }
+
+ var lastArg string
+ if len(args) != 0 {
+ lastArg = args[len(args)-1]
+ }
+
+ if lastArg == "./..." {
+ args = args[:len(args)-1]
+ // Do a recursive update.
+ dirname, err := os.Getwd()
+ if err != nil {
+ return err
+ }
+
+ // Sanity check. We do recursive walking and want to avoid
+ // accidents.
+ if len(dirname) < 5 {
+ return errors.New("must not be run from the file system root")
+ }
+
+ filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
+ if info.IsDir() {
+ return nil
+ }
+
+ if info.Name() == "go.mod" {
+ // Found a module.
+ dir := filepath.Dir(path)
+ fmt.Println("Update module in", dir)
+ c.source = dir
+ err := c.withModsClient(false, func(c *modules.Client) error {
+ if len(args) == 1 && args[0] == "-h" {
+ return cmd.Help()
+ }
+ return c.Get(args...)
+ })
+ if err != nil {
+ return err
+ }
+
+ }
+
+ return nil
+ })
+
+ return nil
+ }
+
+ return c.withModsClient(false, func(c *modules.Client) error {
+ return c.Get(args...)
+ })
+ },
+ },
+ &cobra.Command{
+ Use: "graph",
+ Short: "Print a module dependency graph.",
+ Long: `Print a module dependency graph with information about module status (disabled, vendored).
+Note that for vendored modules, that is the version listed and not the one from go.mod.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withModsClient(true, func(c *modules.Client) error {
+ return c.Graph(os.Stdout)
+ })
+ },
+ },
+ &cobra.Command{
+ Use: "init",
+ Short: "Initialize this project as a Hugo Module.",
+ Long: `Initialize this project as a Hugo Module.
+It will try to guess the module path, but you may help by passing it as an argument, e.g:
+
+ hugo mod init github.com/gohugoio/testshortcodes
+
+Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
+inside a subfolder on GitHub, as one example.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ var path string
+ if len(args) >= 1 {
+ path = args[0]
+ }
+ return c.withModsClient(false, func(c *modules.Client) error {
+ return c.Init(path)
+ })
+ },
+ },
+ &cobra.Command{
+ Use: "vendor",
+ Short: "Vendor all module dependencies into the _vendor directory.",
+ Long: `Vendor all module dependencies into the _vendor directory.
+
+If a module is vendored, that is where Hugo will look for it's dependencies.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withModsClient(true, func(c *modules.Client) error {
+ return c.Vendor()
+ })
+ },
+ },
+ c.newVerifyCmd(),
+ &cobra.Command{
+ Use: "tidy",
+ Short: "Remove unused entries in go.mod and go.sum.",
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withModsClient(true, func(c *modules.Client) error {
+ return c.Tidy()
+ })
+ },
+ },
+ c.newCleanCmd(),
+ )
+
+ c.baseBuilderCmd = b.newBuilderCmd(cmd)
+
+ return c
+}
+
+func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {
+ com, err := c.initConfig(failOnMissingConfig)
+ if err != nil {
+ return err
+ }
+
+ return f(com.hugo().ModulesClient)
+}
+
+func (c *modCmd) withHugo(f func(*hugolib.HugoSites) error) error {
+ com, err := c.initConfig(true)
+ if err != nil {
+ return err
+ }
+
+ return f(com.hugo())
+}
+
+func (c *modCmd) initConfig(failOnNoConfig bool) (*commandeer, error) {
+ com, err := initializeConfig(failOnNoConfig, false, false, &c.hugoBuilderCommon, c, nil)
+ if err != nil {
+ return nil, err
+ }
+ return com, nil
+}
diff --git a/commands/mod_npm.go b/commands/mod_npm.go
new file mode 100644
index 000000000..852d98571
--- /dev/null
+++ b/commands/mod_npm.go
@@ -0,0 +1,56 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/modules/npm"
+ "github.com/spf13/cobra"
+)
+
+func newModNPMCmd(c *modCmd) *cobra.Command {
+ cmd := &cobra.Command{
+ Use: "npm",
+ Short: "Various npm helpers.",
+ Long: `Various npm (Node package manager) helpers.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withHugo(func(h *hugolib.HugoSites) error {
+ return nil
+ })
+ },
+ }
+
+ cmd.AddCommand(&cobra.Command{
+ Use: "pack",
+ Short: "Experimental: Prepares and writes a composite package.json file for your project.",
+ Long: `Prepares and writes a composite package.json file for your project.
+
+On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file
+with the base dependency set.
+
+This set will be merged with all "package.hugo.json" files found in the dependency tree, picking the version closest to the project.
+
+This command is marked as 'Experimental'. We think it's a great idea, so it's not likely to be
+removed from Hugo, but we need to test this out in "real life" to get a feel of it,
+so this may/will change in future versions of Hugo.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return c.withHugo(func(h *hugolib.HugoSites) error {
+ return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
+ })
+ },
+ })
+
+ return cmd
+}
diff --git a/commands/new.go b/commands/new.go
new file mode 100644
index 000000000..c5b5cd182
--- /dev/null
+++ b/commands/new.go
@@ -0,0 +1,126 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/create"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/spf13/afero"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*newCmd)(nil)
+
+type newCmd struct {
+ contentEditor string
+ contentType string
+
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newNewCmd() *newCmd {
+ cmd := &cobra.Command{
+ Use: "new [path]",
+ Short: "Create new content for your site",
+ Long: `Create a new content file and automatically set the date and title.
+It will guess which kind of file to create based on the path provided.
+
+You can also specify the kind with ` + "`-k KIND`" + `.
+
+If archetypes are provided in your theme or site, they will be used.
+
+Ensure you run this within the root directory of your site.`,
+ }
+
+ cc := &newCmd{baseBuilderCmd: b.newBuilderCmd(cmd)}
+
+ cmd.Flags().StringVarP(&cc.contentType, "kind", "k", "", "content type to create")
+ cmd.Flags().StringVar(&cc.contentEditor, "editor", "", "edit new content with this editor, if provided")
+
+ cmd.AddCommand(b.newNewSiteCmd().getCommand())
+ cmd.AddCommand(b.newNewThemeCmd().getCommand())
+
+ cmd.RunE = cc.newContent
+
+ return cc
+}
+
+func (n *newCmd) newContent(cmd *cobra.Command, args []string) error {
+ cfgInit := func(c *commandeer) error {
+ if cmd.Flags().Changed("editor") {
+ c.Set("newContentEditor", n.contentEditor)
+ }
+ return nil
+ }
+
+ c, err := initializeConfig(true, true, false, &n.hugoBuilderCommon, n, cfgInit)
+ if err != nil {
+ return err
+ }
+
+ if len(args) < 1 {
+ return newUserError("path needs to be provided")
+ }
+
+ return create.NewContent(c.hugo(), n.contentType, args[0])
+}
+
+func mkdir(x ...string) {
+ p := filepath.Join(x...)
+
+ err := os.MkdirAll(p, 0777) // before umask
+ if err != nil {
+ jww.FATAL.Fatalln(err)
+ }
+}
+
+func touchFile(fs afero.Fs, x ...string) {
+ inpath := filepath.Join(x...)
+ mkdir(filepath.Dir(inpath))
+ err := helpers.WriteToDisk(inpath, bytes.NewReader([]byte{}), fs)
+ if err != nil {
+ jww.FATAL.Fatalln(err)
+ }
+}
+
+func newContentPathSection(h *hugolib.HugoSites, path string) (string, string) {
+ // Forward slashes is used in all examples. Convert if needed.
+ // Issue #1133
+ createpath := filepath.FromSlash(path)
+
+ if h != nil {
+ for _, dir := range h.BaseFs.Content.Dirs {
+ createpath = strings.TrimPrefix(createpath, dir.Meta().Filename)
+ }
+ }
+
+ var section string
+ // assume the first directory is the section (kind)
+ if strings.Contains(createpath[1:], helpers.FilePathSeparator) {
+ parts := strings.Split(strings.TrimPrefix(createpath, helpers.FilePathSeparator), helpers.FilePathSeparator)
+ if len(parts) > 0 {
+ section = parts[0]
+ }
+
+ }
+
+ return createpath, section
+}
diff --git a/commands/new_content_test.go b/commands/new_content_test.go
new file mode 100644
index 000000000..42a7c968c
--- /dev/null
+++ b/commands/new_content_test.go
@@ -0,0 +1,29 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// Issue #1133
+func TestNewContentPathSectionWithForwardSlashes(t *testing.T) {
+ c := qt.New(t)
+ p, s := newContentPathSection(nil, "/post/new.md")
+ c.Assert(p, qt.Equals, filepath.FromSlash("/post/new.md"))
+ c.Assert(s, qt.Equals, "post")
+}
diff --git a/commands/new_site.go b/commands/new_site.go
new file mode 100644
index 000000000..384c6365b
--- /dev/null
+++ b/commands/new_site.go
@@ -0,0 +1,165 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/create"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*newSiteCmd)(nil)
+
+type newSiteCmd struct {
+ configFormat string
+
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newNewSiteCmd() *newSiteCmd {
+ cc := &newSiteCmd{}
+
+ cmd := &cobra.Command{
+ Use: "site [path]",
+ Short: "Create a new site (skeleton)",
+ Long: `Create a new site in the provided directory.
+The new site will have the correct structure, but no content or theme yet.
+Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
+ RunE: cc.newSite,
+ }
+
+ cmd.Flags().StringVarP(&cc.configFormat, "format", "f", "toml", "config file format")
+ cmd.Flags().Bool("force", false, "init inside non-empty directory")
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
+
+func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error {
+ archeTypePath := filepath.Join(basepath, "archetypes")
+ dirs := []string{
+ filepath.Join(basepath, "layouts"),
+ filepath.Join(basepath, "content"),
+ archeTypePath,
+ filepath.Join(basepath, "static"),
+ filepath.Join(basepath, "data"),
+ filepath.Join(basepath, "themes"),
+ }
+
+ if exists, _ := helpers.Exists(basepath, fs.Source); exists {
+ if isDir, _ := helpers.IsDir(basepath, fs.Source); !isDir {
+ return errors.New(basepath + " already exists but not a directory")
+ }
+
+ isEmpty, _ := helpers.IsEmpty(basepath, fs.Source)
+
+ switch {
+ case !isEmpty && !force:
+ return errors.New(basepath + " already exists and is not empty. See --force.")
+
+ case !isEmpty && force:
+ all := append(dirs, filepath.Join(basepath, "config."+n.configFormat))
+ for _, path := range all {
+ if exists, _ := helpers.Exists(path, fs.Source); exists {
+ return errors.New(path + " already exists")
+ }
+ }
+ }
+ }
+
+ for _, dir := range dirs {
+ if err := fs.Source.MkdirAll(dir, 0777); err != nil {
+ return fmt.Errorf("Failed to create dir: %w", err)
+ }
+ }
+
+ createConfig(fs, basepath, n.configFormat)
+
+ // Create a default archetype file.
+ helpers.SafeWriteToDisk(filepath.Join(archeTypePath, "default.md"),
+ strings.NewReader(create.DefaultArchetypeTemplateTemplate), fs.Source)
+
+ jww.FEEDBACK.Printf("Congratulations! Your new Hugo site is created in %s.\n\n", basepath)
+ jww.FEEDBACK.Println(nextStepsText())
+
+ return nil
+}
+
+// newSite creates a new Hugo site and initializes a structured Hugo directory.
+func (n *newSiteCmd) newSite(cmd *cobra.Command, args []string) error {
+ if len(args) < 1 {
+ return newUserError("path needs to be provided")
+ }
+
+ createpath, err := filepath.Abs(filepath.Clean(args[0]))
+ if err != nil {
+ return newUserError(err)
+ }
+
+ forceNew, _ := cmd.Flags().GetBool("force")
+ cfg := config.New()
+ cfg.Set("workingDir", createpath)
+ cfg.Set("publishDir", "public")
+ return n.doNewSite(hugofs.NewDefault(cfg), createpath, forceNew)
+}
+
+func createConfig(fs *hugofs.Fs, inpath string, kind string) (err error) {
+ in := map[string]string{
+ "baseURL": "http://example.org/",
+ "title": "My New Hugo Site",
+ "languageCode": "en-us",
+ }
+
+ var buf bytes.Buffer
+ err = parser.InterfaceToConfig(in, metadecoders.FormatFromString(kind), &buf)
+ if err != nil {
+ return err
+ }
+
+ return helpers.WriteToDisk(filepath.Join(inpath, "config."+kind), &buf, fs.Source)
+}
+
+func nextStepsText() string {
+ var nextStepsText bytes.Buffer
+
+ nextStepsText.WriteString(`Just a few more steps and you're ready to go:
+
+1. Download a theme into the same-named folder.
+ Choose a theme from https://themes.gohugo.io/ or
+ create your own with the "hugo new theme <THEMENAME>" command.
+2. Perhaps you want to add some content. You can add single files
+ with "hugo new `)
+
+ nextStepsText.WriteString(filepath.Join("<SECTIONNAME>", "<FILENAME>.<FORMAT>"))
+
+ nextStepsText.WriteString(`".
+3. Start the built-in live server via "hugo server".
+
+Visit https://gohugo.io/ for quickstart guide and full documentation.`)
+
+ return nextStepsText.String()
+}
diff --git a/commands/new_theme.go b/commands/new_theme.go
new file mode 100644
index 000000000..4e2357b55
--- /dev/null
+++ b/commands/new_theme.go
@@ -0,0 +1,176 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "errors"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*newThemeCmd)(nil)
+
+type newThemeCmd struct {
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newNewThemeCmd() *newThemeCmd {
+ cc := &newThemeCmd{}
+
+ cmd := &cobra.Command{
+ Use: "theme [name]",
+ Short: "Create a new theme",
+ Long: `Create a new theme (skeleton) called [name] in ./themes.
+New theme is a skeleton. Please add content to the touched files. Add your
+name to the copyright line in the license and adjust the theme.toml file
+as you see fit.`,
+ RunE: cc.newTheme,
+ }
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
+
+// newTheme creates a new Hugo theme template
+func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error {
+ c, err := initializeConfig(false, false, false, &n.hugoBuilderCommon, n, nil)
+ if err != nil {
+ return err
+ }
+
+ if len(args) < 1 {
+ return newUserError("theme name needs to be provided")
+ }
+
+ createpath := c.hugo().PathSpec.AbsPathify(filepath.Join(c.Cfg.GetString("themesDir"), args[0]))
+ jww.FEEDBACK.Println("Creating theme at", createpath)
+
+ cfg := c.DepsCfg
+
+ if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x {
+ return errors.New(createpath + " already exists")
+ }
+
+ mkdir(createpath, "layouts", "_default")
+ mkdir(createpath, "layouts", "partials")
+
+ touchFile(cfg.Fs.Source, createpath, "layouts", "index.html")
+ touchFile(cfg.Fs.Source, createpath, "layouts", "404.html")
+ touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "list.html")
+ touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "single.html")
+
+ baseofDefault := []byte(`<!DOCTYPE html>
+<html>
+ {{- partial "head.html" . -}}
+ <body>
+ {{- partial "header.html" . -}}
+ <div id="content">
+ {{- block "main" . }}{{- end }}
+ </div>
+ {{- partial "footer.html" . -}}
+ </body>
+</html>
+`)
+ err = helpers.WriteToDisk(filepath.Join(createpath, "layouts", "_default", "baseof.html"), bytes.NewReader(baseofDefault), cfg.Fs.Source)
+ if err != nil {
+ return err
+ }
+
+ touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "head.html")
+ touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "header.html")
+ touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "footer.html")
+
+ mkdir(createpath, "archetypes")
+
+ archDefault := []byte("+++\n+++\n")
+
+ err = helpers.WriteToDisk(filepath.Join(createpath, "archetypes", "default.md"), bytes.NewReader(archDefault), cfg.Fs.Source)
+ if err != nil {
+ return err
+ }
+
+ mkdir(createpath, "static", "js")
+ mkdir(createpath, "static", "css")
+
+ by := []byte(`The MIT License (MIT)
+
+Copyright (c) ` + htime.Now().Format("2006") + ` YOUR_NAME_HERE
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+`)
+
+ err = helpers.WriteToDisk(filepath.Join(createpath, "LICENSE"), bytes.NewReader(by), cfg.Fs.Source)
+ if err != nil {
+ return err
+ }
+
+ n.createThemeMD(cfg.Fs, createpath)
+
+ return nil
+}
+
+func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) {
+ by := []byte(`# theme.toml template for a Hugo theme
+# See https://github.com/gohugoio/hugoThemes#themetoml for an example
+
+name = "` + strings.Title(helpers.MakeTitle(filepath.Base(inpath))) + `"
+license = "MIT"
+licenselink = "https://github.com/yourname/yourtheme/blob/master/LICENSE"
+description = ""
+homepage = "http://example.com/"
+tags = []
+features = []
+min_version = "0.41.0"
+
+[author]
+ name = ""
+ homepage = ""
+
+# If porting an existing theme
+[original]
+ name = ""
+ homepage = ""
+ repo = ""
+`)
+
+ err = helpers.WriteToDisk(filepath.Join(inpath, "theme.toml"), bytes.NewReader(by), fs.Source)
+ if err != nil {
+ return
+ }
+
+ return nil
+}
diff --git a/commands/nodeploy.go b/commands/nodeploy.go
new file mode 100644
index 000000000..061ea503e
--- /dev/null
+++ b/commands/nodeploy.go
@@ -0,0 +1,51 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build nodeploy
+// +build nodeploy
+
+package commands
+
+import (
+ "errors"
+
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*deployCmd)(nil)
+
+// deployCmd supports deploying sites to Cloud providers.
+type deployCmd struct {
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newDeployCmd() *deployCmd {
+ cc := &deployCmd{}
+
+ cmd := &cobra.Command{
+ Use: "deploy",
+ Short: "Deploy your site to a Cloud provider.",
+ Long: `Deploy your site to a Cloud provider.
+
+See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
+documentation.
+`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return errors.New("build without HUGO_BUILD_TAGS=nodeploy to use this command")
+ },
+ }
+
+ cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
+
+ return cc
+}
diff --git a/commands/release.go b/commands/release.go
new file mode 100644
index 000000000..6decda9ea
--- /dev/null
+++ b/commands/release.go
@@ -0,0 +1,72 @@
+//go:build release
+// +build release
+
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "errors"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/releaser"
+ "github.com/spf13/cobra"
+)
+
+var _ cmder = (*releaseCommandeer)(nil)
+
+type releaseCommandeer struct {
+ cmd *cobra.Command
+
+ version string
+
+ skipPublish bool
+ try bool
+}
+
+func createReleaser() cmder {
+ // Note: This is a command only meant for internal use and must be run
+ // via "go run -tags release main.go release" on the actual code base that is in the release.
+ r := &releaseCommandeer{
+ cmd: &cobra.Command{
+ Use: "release",
+ Short: "Release a new version of Hugo.",
+ Hidden: true,
+ },
+ }
+
+ r.cmd.RunE = func(cmd *cobra.Command, args []string) error {
+ return r.release()
+ }
+
+ r.cmd.PersistentFlags().StringVarP(&r.version, "rel", "r", "", "new release version, i.e. 0.25.1")
+ r.cmd.PersistentFlags().BoolVarP(&r.skipPublish, "skip-publish", "", false, "skip all publishing pipes of the release")
+ r.cmd.PersistentFlags().BoolVarP(&r.try, "try", "", false, "simulate a release, i.e. no changes")
+
+ return r
+}
+
+func (c *releaseCommandeer) getCommand() *cobra.Command {
+ return c.cmd
+}
+
+func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) {
+}
+
+func (r *releaseCommandeer) release() error {
+ if r.version == "" {
+ return errors.New("must set the --rel flag to the relevant version number")
+ }
+ return releaser.New(r.version, r.skipPublish, r.try).Run()
+}
diff --git a/commands/release_noop.go b/commands/release_noop.go
new file mode 100644
index 000000000..176dc9794
--- /dev/null
+++ b/commands/release_noop.go
@@ -0,0 +1,21 @@
+//go:build !release
+// +build !release
+
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+func createReleaser() cmder {
+ return &nilCommand{}
+}
diff --git a/commands/server.go b/commands/server.go
new file mode 100644
index 000000000..f082164ce
--- /dev/null
+++ b/commands/server.go
@@ -0,0 +1,741 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "net"
+ "net/http"
+ "net/url"
+ "os"
+ "os/signal"
+ "path"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "syscall"
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl"
+ "golang.org/x/sync/errgroup"
+
+ "github.com/gohugoio/hugo/livereload"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/afero"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+type serverCmd struct {
+ // Can be used to stop the server. Useful in tests
+ stop chan bool
+
+ disableLiveReload bool
+ navigateToChanged bool
+ renderToDisk bool
+ renderStaticToDisk bool
+ serverAppend bool
+ serverInterface string
+ serverPort int
+ liveReloadPort int
+ serverWatch bool
+ noHTTPCache bool
+
+ disableFastRender bool
+ disableBrowserError bool
+
+ *baseBuilderCmd
+}
+
+func (b *commandsBuilder) newServerCmd() *serverCmd {
+ return b.newServerCmdSignaled(nil)
+}
+
+func (b *commandsBuilder) newServerCmdSignaled(stop chan bool) *serverCmd {
+ cc := &serverCmd{stop: stop}
+
+ cc.baseBuilderCmd = b.newBuilderCmd(&cobra.Command{
+ Use: "server",
+ Aliases: []string{"serve"},
+ Short: "A high performance webserver",
+ Long: `Hugo provides its own webserver which builds and serves the site.
+While hugo server is high performance, it is a webserver with limited options.
+Many run it in production, but the standard behavior is for people to use it
+in development and use a more full featured server such as Nginx or Caddy.
+
+'hugo server' will avoid writing the rendered and served content to disk,
+preferring to store it in memory.
+
+By default hugo will also watch your files for any changes you make and
+automatically rebuild the site. It will then live reload any open browser pages
+and push the latest content to them. As most Hugo sites are built in a fraction
+of a second, you will be able to save and see your changes nearly instantly.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ err := cc.server(cmd, args)
+ if err != nil && cc.stop != nil {
+ cc.stop <- true
+ }
+ return err
+ },
+ })
+
+ cc.cmd.Flags().IntVarP(&cc.serverPort, "port", "p", 1313, "port on which the server will listen")
+ cc.cmd.Flags().IntVar(&cc.liveReloadPort, "liveReloadPort", -1, "port for live reloading (i.e. 443 in HTTPS proxy situations)")
+ cc.cmd.Flags().StringVarP(&cc.serverInterface, "bind", "", "127.0.0.1", "interface to which the server will bind")
+ cc.cmd.Flags().BoolVarP(&cc.serverWatch, "watch", "w", true, "watch filesystem for changes and recreate as needed")
+ cc.cmd.Flags().BoolVar(&cc.noHTTPCache, "noHTTPCache", false, "prevent HTTP caching")
+ cc.cmd.Flags().BoolVarP(&cc.serverAppend, "appendPort", "", true, "append port to baseURL")
+ cc.cmd.Flags().BoolVar(&cc.disableLiveReload, "disableLiveReload", false, "watch without enabling live browser reload on rebuild")
+ cc.cmd.Flags().BoolVar(&cc.navigateToChanged, "navigateToChanged", false, "navigate to changed content file on live browser reload")
+ cc.cmd.Flags().BoolVar(&cc.renderToDisk, "renderToDisk", false, "serve all files from disk (default is from memory)")
+ cc.cmd.Flags().BoolVar(&cc.renderStaticToDisk, "renderStaticToDisk", false, "serve static files from disk and dynamic files from memory")
+ cc.cmd.Flags().BoolVar(&cc.disableFastRender, "disableFastRender", false, "enables full re-renders on changes")
+ cc.cmd.Flags().BoolVar(&cc.disableBrowserError, "disableBrowserError", false, "do not show build errors in the browser")
+
+ cc.cmd.Flags().String("memstats", "", "log memory usage to this file")
+ cc.cmd.Flags().String("meminterval", "100ms", "interval to poll memory usage (requires --memstats), valid time units are \"ns\", \"us\" (or \"µs\"), \"ms\", \"s\", \"m\", \"h\".")
+
+ return cc
+}
+
+type filesOnlyFs struct {
+ fs http.FileSystem
+}
+
+type noDirFile struct {
+ http.File
+}
+
+func (fs filesOnlyFs) Open(name string) (http.File, error) {
+ f, err := fs.fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ return noDirFile{f}, nil
+}
+
+func (f noDirFile) Readdir(count int) ([]os.FileInfo, error) {
+ return nil, nil
+}
+
+func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
+ // If a Destination is provided via flag write to disk
+ destination, _ := cmd.Flags().GetString("destination")
+ if destination != "" {
+ sc.renderToDisk = true
+ }
+
+ var serverCfgInit sync.Once
+
+ cfgInit := func(c *commandeer) (rerr error) {
+ c.Set("renderToMemory", !(sc.renderToDisk || sc.renderStaticToDisk))
+ c.Set("renderStaticToDisk", sc.renderStaticToDisk)
+ if cmd.Flags().Changed("navigateToChanged") {
+ c.Set("navigateToChanged", sc.navigateToChanged)
+ }
+ if cmd.Flags().Changed("disableLiveReload") {
+ c.Set("disableLiveReload", sc.disableLiveReload)
+ }
+ if cmd.Flags().Changed("disableFastRender") {
+ c.Set("disableFastRender", sc.disableFastRender)
+ }
+ if cmd.Flags().Changed("disableBrowserError") {
+ c.Set("disableBrowserError", sc.disableBrowserError)
+ }
+ if sc.serverWatch {
+ c.Set("watch", true)
+ }
+
+ // TODO(bep) see issue 9901
+ // cfgInit is called twice, before and after the languages have been initialized.
+ // The servers (below) can not be initialized before we
+ // know if we're configured in a multihost setup.
+ if len(c.languages) == 0 {
+ return nil
+ }
+
+ // We can only do this once.
+ serverCfgInit.Do(func() {
+ c.serverPorts = make([]serverPortListener, 1)
+
+ if c.languages.IsMultihost() {
+ if !sc.serverAppend {
+ rerr = newSystemError("--appendPort=false not supported when in multihost mode")
+ }
+ c.serverPorts = make([]serverPortListener, len(c.languages))
+ }
+
+ currentServerPort := sc.serverPort
+
+ for i := 0; i < len(c.serverPorts); i++ {
+ l, err := net.Listen("tcp", net.JoinHostPort(sc.serverInterface, strconv.Itoa(currentServerPort)))
+ if err == nil {
+ c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort}
+ } else {
+ if i == 0 && sc.cmd.Flags().Changed("port") {
+ // port set explicitly by user -- he/she probably meant it!
+ rerr = newSystemErrorF("Server startup failed: %s", err)
+ return
+ }
+ c.logger.Println("port", sc.serverPort, "already in use, attempting to use an available port")
+ l, sp, err := helpers.TCPListen()
+ if err != nil {
+ rerr = newSystemError("Unable to find alternative port to use:", err)
+ return
+ }
+ c.serverPorts[i] = serverPortListener{ln: l, p: sp.Port}
+ }
+
+ currentServerPort = c.serverPorts[i].p + 1
+ }
+ })
+
+ if rerr != nil {
+ return
+ }
+
+ c.Set("port", sc.serverPort)
+ if sc.liveReloadPort != -1 {
+ c.Set("liveReloadPort", sc.liveReloadPort)
+ } else {
+ c.Set("liveReloadPort", c.serverPorts[0].p)
+ }
+
+ isMultiHost := c.languages.IsMultihost()
+ for i, language := range c.languages {
+ var serverPort int
+ if isMultiHost {
+ serverPort = c.serverPorts[i].p
+ } else {
+ serverPort = c.serverPorts[0].p
+ }
+
+ baseURL, err := sc.fixURL(language, sc.baseURL, serverPort)
+ if err != nil {
+ return nil
+ }
+ if isMultiHost {
+ language.Set("baseURL", baseURL)
+ }
+ if i == 0 {
+ c.Set("baseURL", baseURL)
+ }
+ }
+
+ return
+ }
+
+ if err := memStats(); err != nil {
+ jww.WARN.Println("memstats error:", err)
+ }
+
+ // silence errors in cobra so we can handle them here
+ cmd.SilenceErrors = true
+
+ c, err := initializeConfig(true, true, true, &sc.hugoBuilderCommon, sc, cfgInit)
+ if err != nil {
+ cmd.PrintErrln("Error:", err.Error())
+ return err
+ }
+
+ err = func() error {
+ defer c.timeTrack(time.Now(), "Built")
+ err := c.serverBuild()
+ if err != nil {
+ cmd.PrintErrln("Error:", err.Error())
+ }
+ return err
+ }()
+ if err != nil {
+ return err
+ }
+
+ // Watch runs its own server as part of the routine
+ if sc.serverWatch {
+
+ watchDirs, err := c.getDirList()
+ if err != nil {
+ return err
+ }
+
+ watchGroups := helpers.ExtractAndGroupRootPaths(watchDirs)
+
+ for _, group := range watchGroups {
+ jww.FEEDBACK.Printf("Watching for changes in %s\n", group)
+ }
+ watcher, err := c.newWatcher(sc.poll, watchDirs...)
+ if err != nil {
+ return err
+ }
+
+ defer watcher.Close()
+
+ }
+
+ return c.serve(sc)
+}
+
+func getRootWatchDirsStr(baseDir string, watchDirs []string) string {
+ relWatchDirs := make([]string, len(watchDirs))
+ for i, dir := range watchDirs {
+ relWatchDirs[i], _ = paths.GetRelativePath(dir, baseDir)
+ }
+
+ return strings.Join(helpers.UniqueStringsSorted(helpers.ExtractRootPaths(relWatchDirs)), ",")
+}
+
+type fileServer struct {
+ baseURLs []string
+ roots []string
+ errorTemplate func(err any) (io.Reader, error)
+ c *commandeer
+ s *serverCmd
+}
+
+func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Request {
+ r2 := new(http.Request)
+ *r2 = *r
+ r2.URL = new(url.URL)
+ *r2.URL = *r.URL
+ r2.URL.Path = toPath
+ r2.Header.Set("X-Rewrite-Original-URI", r.URL.RequestURI())
+
+ return r2
+}
+
+func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string, string, error) {
+ baseURL := f.baseURLs[i]
+ root := f.roots[i]
+ port := f.c.serverPorts[i].p
+ listener := f.c.serverPorts[i].ln
+
+ // For logging only.
+ // TODO(bep) consolidate.
+ publishDir := f.c.Cfg.GetString("publishDir")
+ publishDirStatic := f.c.Cfg.GetString("publishDirStatic")
+ workingDir := f.c.Cfg.GetString("workingDir")
+
+ if root != "" {
+ publishDir = filepath.Join(publishDir, root)
+ publishDirStatic = filepath.Join(publishDirStatic, root)
+ }
+ absPublishDir := paths.AbsPathify(workingDir, publishDir)
+ absPublishDirStatic := paths.AbsPathify(workingDir, publishDirStatic)
+
+ jww.FEEDBACK.Printf("Environment: %q", f.c.hugo().Deps.Site.Hugo().Environment)
+
+ if i == 0 {
+ if f.s.renderToDisk {
+ jww.FEEDBACK.Println("Serving pages from " + absPublishDir)
+ } else if f.s.renderStaticToDisk {
+ jww.FEEDBACK.Println("Serving pages from memory and static files from " + absPublishDirStatic)
+ } else {
+ jww.FEEDBACK.Println("Serving pages from memory")
+ }
+ }
+
+ httpFs := afero.NewHttpFs(f.c.publishDirServerFs)
+ fs := filesOnlyFs{httpFs.Dir(path.Join("/", root))}
+
+ if i == 0 && f.c.fastRenderMode {
+ jww.FEEDBACK.Println("Running in Fast Render Mode. For full rebuilds on change: hugo server --disableFastRender")
+ }
+
+ // We're only interested in the path
+ u, err := url.Parse(baseURL)
+ if err != nil {
+ return nil, nil, "", "", fmt.Errorf("Invalid baseURL: %w", err)
+ }
+
+ decorate := func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if f.c.showErrorInBrowser {
+ // First check the error state
+ err := f.c.getErrorWithContext()
+ if err != nil {
+ f.c.wasError = true
+ w.WriteHeader(500)
+ r, err := f.errorTemplate(err)
+ if err != nil {
+ f.c.logger.Errorln(err)
+ }
+
+ port = 1313
+ if !f.c.paused {
+ port = f.c.Cfg.GetInt("liveReloadPort")
+ }
+ lr := *u
+ lr.Host = fmt.Sprintf("%s:%d", lr.Hostname(), port)
+ fmt.Fprint(w, injectLiveReloadScript(r, lr))
+
+ return
+ }
+ }
+
+ if f.s.noHTTPCache {
+ w.Header().Set("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0")
+ w.Header().Set("Pragma", "no-cache")
+ }
+
+ // Ignore any query params for the operations below.
+ requestURI := strings.TrimSuffix(r.RequestURI, "?"+r.URL.RawQuery)
+
+ for _, header := range f.c.serverConfig.MatchHeaders(requestURI) {
+ w.Header().Set(header.Key, header.Value)
+ }
+
+ if redirect := f.c.serverConfig.MatchRedirect(requestURI); !redirect.IsZero() {
+ doRedirect := true
+ // This matches Netlify's behaviour and is needed for SPA behaviour.
+ // See https://docs.netlify.com/routing/redirects/rewrites-proxies/
+ if !redirect.Force {
+ path := filepath.Clean(strings.TrimPrefix(requestURI, u.Path))
+ fi, err := f.c.hugo().BaseFs.PublishFs.Stat(path)
+ if err == nil {
+ if fi.IsDir() {
+ // There will be overlapping directories, so we
+ // need to check for a file.
+ _, err = f.c.hugo().BaseFs.PublishFs.Stat(filepath.Join(path, "index.html"))
+ doRedirect = err != nil
+ } else {
+ doRedirect = false
+ }
+ }
+ }
+
+ if doRedirect {
+ if redirect.Status == 200 {
+ if r2 := f.rewriteRequest(r, strings.TrimPrefix(redirect.To, u.Path)); r2 != nil {
+ requestURI = redirect.To
+ r = r2
+ }
+ } else {
+ w.Header().Set("Content-Type", "")
+ http.Redirect(w, r, redirect.To, redirect.Status)
+ return
+ }
+ }
+
+ }
+
+ if f.c.fastRenderMode && f.c.buildErr == nil {
+ if strings.HasSuffix(requestURI, "/") || strings.HasSuffix(requestURI, "html") || strings.HasSuffix(requestURI, "htm") {
+ if !f.c.visitedURLs.Contains(requestURI) {
+ // If not already on stack, re-render that single page.
+ if err := f.c.partialReRender(requestURI); err != nil {
+ f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", requestURI))
+ if f.c.showErrorInBrowser {
+ http.Redirect(w, r, requestURI, http.StatusMovedPermanently)
+ return
+ }
+ }
+ }
+
+ f.c.visitedURLs.Add(requestURI)
+
+ }
+ }
+
+ h.ServeHTTP(w, r)
+ })
+ }
+
+ fileserver := decorate(http.FileServer(fs))
+ mu := http.NewServeMux()
+ if u.Path == "" || u.Path == "/" {
+ mu.Handle("/", fileserver)
+ } else {
+ mu.Handle(u.Path, http.StripPrefix(u.Path, fileserver))
+ }
+
+ endpoint := net.JoinHostPort(f.s.serverInterface, strconv.Itoa(port))
+
+ return mu, listener, u.String(), endpoint, nil
+}
+
+var (
+ logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} `)
+ logDuplicateTemplateExecuteRe = regexp.MustCompile(`: template: .*?:\d+:\d+: executing ".*?"`)
+ logDuplicateTemplateParseRe = regexp.MustCompile(`: template: .*?:\d+:\d*`)
+)
+
+func removeErrorPrefixFromLog(content string) string {
+ return logErrorRe.ReplaceAllLiteralString(content, "")
+}
+
+var logReplacer = strings.NewReplacer(
+ "can't", "can’t", // Chroma lexer does'nt do well with "can't"
+ "*hugolib.pageState", "page.Page", // Page is the public interface.
+ "Rebuild failed:", "",
+)
+
+func cleanErrorLog(content string) string {
+ content = strings.ReplaceAll(content, "\n", " ")
+ content = logReplacer.Replace(content)
+ content = logDuplicateTemplateExecuteRe.ReplaceAllString(content, "")
+ content = logDuplicateTemplateParseRe.ReplaceAllString(content, "")
+ seen := make(map[string]bool)
+ parts := strings.Split(content, ": ")
+ keep := make([]string, 0, len(parts))
+ for _, part := range parts {
+ if seen[part] {
+ continue
+ }
+ seen[part] = true
+ keep = append(keep, part)
+ }
+ return strings.Join(keep, ": ")
+}
+
+func (c *commandeer) serve(s *serverCmd) error {
+ isMultiHost := c.hugo().IsMultihost()
+
+ var (
+ baseURLs []string
+ roots []string
+ )
+
+ if isMultiHost {
+ for _, s := range c.hugo().Sites {
+ baseURLs = append(baseURLs, s.BaseURL.String())
+ roots = append(roots, s.Language().Lang)
+ }
+ } else {
+ s := c.hugo().Sites[0]
+ baseURLs = []string{s.BaseURL.String()}
+ roots = []string{""}
+ }
+
+ // Cache it here. The HugoSites object may be unavaialble later on due to intermitent configuration errors.
+ // To allow the en user to change the error template while the server is running, we use
+ // the freshest template we can provide.
+ var (
+ errTempl tpl.Template
+ templHandler tpl.TemplateHandler
+ )
+ getErrorTemplateAndHandler := func(h *hugolib.HugoSites) (tpl.Template, tpl.TemplateHandler) {
+ if h == nil {
+ return errTempl, templHandler
+ }
+ templHandler := h.Tmpl()
+ errTempl, found := templHandler.Lookup("_server/error.html")
+ if !found {
+ panic("template server/error.html not found")
+ }
+ return errTempl, templHandler
+ }
+ errTempl, templHandler = getErrorTemplateAndHandler(c.hugo())
+
+ srv := &fileServer{
+ baseURLs: baseURLs,
+ roots: roots,
+ c: c,
+ s: s,
+ errorTemplate: func(ctx any) (io.Reader, error) {
+ // hugoTry does not block, getErrorTemplateAndHandler will fall back
+ // to cached values if nil.
+ templ, handler := getErrorTemplateAndHandler(c.hugoTry())
+ b := &bytes.Buffer{}
+ err := handler.Execute(templ, b, ctx)
+ return b, err
+ },
+ }
+
+ doLiveReload := !c.Cfg.GetBool("disableLiveReload")
+
+ if doLiveReload {
+ livereload.Initialize()
+ }
+
+ sigs := make(chan os.Signal, 1)
+ signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
+ var servers []*http.Server
+
+ wg1, ctx := errgroup.WithContext(context.Background())
+
+ for i := range baseURLs {
+ mu, listener, serverURL, endpoint, err := srv.createEndpoint(i)
+ srv := &http.Server{
+ Addr: endpoint,
+ Handler: mu,
+ }
+ servers = append(servers, srv)
+
+ if doLiveReload {
+ u, err := url.Parse(helpers.SanitizeURL(baseURLs[i]))
+ if err != nil {
+ return err
+ }
+
+ mu.HandleFunc(u.Path+"/livereload.js", livereload.ServeJS)
+ mu.HandleFunc(u.Path+"/livereload", livereload.Handler)
+ }
+ jww.FEEDBACK.Printf("Web Server is available at %s (bind address %s)\n", serverURL, s.serverInterface)
+ wg1.Go(func() error {
+ err = srv.Serve(listener)
+ if err != nil && err != http.ErrServerClosed {
+ return err
+ }
+ return nil
+ })
+ }
+
+ jww.FEEDBACK.Println("Press Ctrl+C to stop")
+
+ err := func() error {
+ if s.stop != nil {
+ for {
+ select {
+ case <-sigs:
+ return nil
+ case <-s.stop:
+ return nil
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+ }
+ } else {
+ for {
+ select {
+ case <-sigs:
+ return nil
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+ }
+ }
+ }()
+
+ if err != nil {
+ jww.ERROR.Println("Error:", err)
+ }
+
+ if h := c.hugoTry(); h != nil {
+ h.Close()
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ defer cancel()
+ wg2, ctx := errgroup.WithContext(ctx)
+ for _, srv := range servers {
+ srv := srv
+ wg2.Go(func() error {
+ return srv.Shutdown(ctx)
+ })
+ }
+
+ err1, err2 := wg1.Wait(), wg2.Wait()
+ if err1 != nil {
+ return err1
+ }
+ return err2
+}
+
+// fixURL massages the baseURL into a form needed for serving
+// all pages correctly.
+func (sc *serverCmd) fixURL(cfg config.Provider, s string, port int) (string, error) {
+ useLocalhost := false
+ if s == "" {
+ s = cfg.GetString("baseURL")
+ useLocalhost = true
+ }
+
+ if !strings.HasSuffix(s, "/") {
+ s = s + "/"
+ }
+
+ // do an initial parse of the input string
+ u, err := url.Parse(s)
+ if err != nil {
+ return "", err
+ }
+
+ // if no Host is defined, then assume that no schema or double-slash were
+ // present in the url. Add a double-slash and make a best effort attempt.
+ if u.Host == "" && s != "/" {
+ s = "//" + s
+
+ u, err = url.Parse(s)
+ if err != nil {
+ return "", err
+ }
+ }
+
+ if useLocalhost {
+ if u.Scheme == "https" {
+ u.Scheme = "http"
+ }
+ u.Host = "localhost"
+ }
+
+ if sc.serverAppend {
+ if strings.Contains(u.Host, ":") {
+ u.Host, _, err = net.SplitHostPort(u.Host)
+ if err != nil {
+ return "", fmt.Errorf("Failed to split baseURL hostpost: %w", err)
+ }
+ }
+ u.Host += fmt.Sprintf(":%d", port)
+ }
+
+ return u.String(), nil
+}
+
+func memStats() error {
+ b := newCommandsBuilder()
+ sc := b.newServerCmd().getCommand()
+ memstats := sc.Flags().Lookup("memstats").Value.String()
+ if memstats != "" {
+ interval, err := time.ParseDuration(sc.Flags().Lookup("meminterval").Value.String())
+ if err != nil {
+ interval, _ = time.ParseDuration("100ms")
+ }
+
+ fileMemStats, err := os.Create(memstats)
+ if err != nil {
+ return err
+ }
+
+ fileMemStats.WriteString("# Time\tHeapSys\tHeapAlloc\tHeapIdle\tHeapReleased\n")
+
+ go func() {
+ var stats runtime.MemStats
+
+ start := htime.Now().UnixNano()
+
+ for {
+ runtime.ReadMemStats(&stats)
+ if fileMemStats != nil {
+ fileMemStats.WriteString(fmt.Sprintf("%d\t%d\t%d\t%d\t%d\n",
+ (htime.Now().UnixNano()-start)/1000000, stats.HeapSys, stats.HeapAlloc, stats.HeapIdle, stats.HeapReleased))
+ time.Sleep(interval)
+ } else {
+ break
+ }
+ }
+ }()
+ }
+ return nil
+}
diff --git a/commands/server_errors.go b/commands/server_errors.go
new file mode 100644
index 000000000..edf658156
--- /dev/null
+++ b/commands/server_errors.go
@@ -0,0 +1,31 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "bytes"
+ "io"
+ "net/url"
+
+ "github.com/gohugoio/hugo/transform"
+ "github.com/gohugoio/hugo/transform/livereloadinject"
+)
+
+func injectLiveReloadScript(src io.Reader, baseURL url.URL) string {
+ var b bytes.Buffer
+ chain := transform.Chain{livereloadinject.New(baseURL)}
+ chain.Apply(&b, src)
+
+ return b.String()
+}
diff --git a/commands/server_test.go b/commands/server_test.go
new file mode 100644
index 000000000..56d3949ee
--- /dev/null
+++ b/commands/server_test.go
@@ -0,0 +1,280 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "fmt"
+ "net/http"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "golang.org/x/net/context"
+ "golang.org/x/sync/errgroup"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// Issue 9518
+func TestServerPanicOnConfigError(t *testing.T) {
+ c := qt.New(t)
+
+ config := `
+[markup]
+[markup.highlight]
+linenos='table'
+`
+
+ r := runServerTest(c, 0, config)
+
+ c.Assert(r.err, qt.IsNotNil)
+ c.Assert(r.err.Error(), qt.Contains, "cannot parse 'Highlight.LineNos' as bool:")
+}
+
+func TestServerFlags(t *testing.T) {
+ c := qt.New(t)
+
+ assertPublic := func(c *qt.C, r serverTestResult, renderStaticToDisk bool) {
+ c.Assert(r.err, qt.IsNil)
+ c.Assert(r.homesContent[0], qt.Contains, "Environment: development")
+ c.Assert(r.publicDirnames["myfile.txt"], qt.Equals, renderStaticToDisk)
+
+ }
+
+ for _, test := range []struct {
+ flag string
+ assert func(c *qt.C, r serverTestResult)
+ }{
+ {"", func(c *qt.C, r serverTestResult) {
+ assertPublic(c, r, false)
+ }},
+ {"--renderToDisk", func(c *qt.C, r serverTestResult) {
+ assertPublic(c, r, true)
+ }},
+ {"--renderStaticToDisk", func(c *qt.C, r serverTestResult) {
+ assertPublic(c, r, true)
+ }},
+ } {
+ c.Run(test.flag, func(c *qt.C) {
+ config := `
+baseURL="https://example.org"
+`
+
+ var args []string
+ if test.flag != "" {
+ args = strings.Split(test.flag, "=")
+ }
+
+ r := runServerTest(c, 1, config, args...)
+
+ test.assert(c, r)
+
+ })
+
+ }
+
+}
+
+func TestServerBugs(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ name string
+ config string
+ flag string
+ numservers int
+ assert func(c *qt.C, r serverTestResult)
+ }{
+ // Issue 9788
+ {"PostProcess, memory", "", "", 1, func(c *qt.C, r serverTestResult) {
+ c.Assert(r.err, qt.IsNil)
+ c.Assert(r.homesContent[0], qt.Contains, "PostProcess: /foo.min.css")
+ }},
+ {"PostProcess, disk", "", "--renderToDisk", 1, func(c *qt.C, r serverTestResult) {
+ c.Assert(r.err, qt.IsNil)
+ c.Assert(r.homesContent[0], qt.Contains, "PostProcess: /foo.min.css")
+ }},
+ // Isue 9901
+ {"Multihost", `
+defaultContentLanguage = 'en'
+[languages]
+[languages.en]
+baseURL = 'https://example.com'
+title = 'My blog'
+weight = 1
+[languages.fr]
+baseURL = 'https://example.fr'
+title = 'Mon blogue'
+weight = 2
+`, "", 2, func(c *qt.C, r serverTestResult) {
+ c.Assert(r.err, qt.IsNil)
+ for i, s := range []string{"My blog", "Mon blogue"} {
+ c.Assert(r.homesContent[i], qt.Contains, s)
+ }
+ }},
+ } {
+ c.Run(test.name, func(c *qt.C) {
+ if test.config == "" {
+ test.config = `
+baseURL="https://example.org"
+`
+ }
+
+ var args []string
+ if test.flag != "" {
+ args = strings.Split(test.flag, "=")
+ }
+ r := runServerTest(c, test.numservers, test.config, args...)
+ test.assert(c, r)
+
+ })
+
+ }
+
+}
+
+type serverTestResult struct {
+ err error
+ homesContent []string
+ publicDirnames map[string]bool
+}
+
+func runServerTest(c *qt.C, getNumHomes int, config string, args ...string) (result serverTestResult) {
+ dir := createSimpleTestSite(c, testSiteConfig{configTOML: config})
+
+ sp, err := helpers.FindAvailablePort()
+ c.Assert(err, qt.IsNil)
+ port := sp.Port
+
+ defer func() {
+ os.RemoveAll(dir)
+ }()
+
+ stop := make(chan bool)
+
+ b := newCommandsBuilder()
+ scmd := b.newServerCmdSignaled(stop)
+
+ cmd := scmd.getCommand()
+ args = append([]string{"-s=" + dir, fmt.Sprintf("-p=%d", port)}, args...)
+ cmd.SetArgs(args)
+
+ ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
+ defer cancel()
+ wg, ctx := errgroup.WithContext(ctx)
+
+ wg.Go(func() error {
+ _, err := cmd.ExecuteC()
+ return err
+ })
+
+ if getNumHomes > 0 {
+ // Esp. on slow CI machines, we need to wait a little before the web
+ // server is ready.
+ time.Sleep(567 * time.Millisecond)
+ result.homesContent = make([]string, getNumHomes)
+ for i := 0; i < getNumHomes; i++ {
+ func() {
+ resp, err := http.Get(fmt.Sprintf("http://localhost:%d/", port+i))
+ c.Check(err, qt.IsNil)
+ c.Check(resp.StatusCode, qt.Equals, http.StatusOK)
+ if err == nil {
+ defer resp.Body.Close()
+ result.homesContent[i] = helpers.ReaderToString(resp.Body)
+ }
+ }()
+ }
+ }
+
+ time.Sleep(1 * time.Second)
+
+ select {
+ case <-stop:
+ case stop <- true:
+ }
+
+ pubFiles, err := os.ReadDir(filepath.Join(dir, "public"))
+ c.Check(err, qt.IsNil)
+ result.publicDirnames = make(map[string]bool)
+ for _, f := range pubFiles {
+ result.publicDirnames[f.Name()] = true
+ }
+
+ result.err = wg.Wait()
+
+ return
+
+}
+
+func TestFixURL(t *testing.T) {
+ type data struct {
+ TestName string
+ CLIBaseURL string
+ CfgBaseURL string
+ AppendPort bool
+ Port int
+ Result string
+ }
+ tests := []data{
+ {"Basic http localhost", "", "http://foo.com", true, 1313, "http://localhost:1313/"},
+ {"Basic https production, http localhost", "", "https://foo.com", true, 1313, "http://localhost:1313/"},
+ {"Basic subdir", "", "http://foo.com/bar", true, 1313, "http://localhost:1313/bar/"},
+ {"Basic production", "http://foo.com", "http://foo.com", false, 80, "http://foo.com/"},
+ {"Production subdir", "http://foo.com/bar", "http://foo.com/bar", false, 80, "http://foo.com/bar/"},
+ {"No http", "", "foo.com", true, 1313, "//localhost:1313/"},
+ {"Override configured port", "", "foo.com:2020", true, 1313, "//localhost:1313/"},
+ {"No http production", "foo.com", "foo.com", false, 80, "//foo.com/"},
+ {"No http production with port", "foo.com", "foo.com", true, 2020, "//foo.com:2020/"},
+ {"No config", "", "", true, 1313, "//localhost:1313/"},
+ }
+
+ for _, test := range tests {
+ t.Run(test.TestName, func(t *testing.T) {
+ b := newCommandsBuilder()
+ s := b.newServerCmd()
+ v := config.NewWithTestDefaults()
+ baseURL := test.CLIBaseURL
+ v.Set("baseURL", test.CfgBaseURL)
+ s.serverAppend = test.AppendPort
+ s.serverPort = test.Port
+ result, err := s.fixURL(v, baseURL, s.serverPort)
+ if err != nil {
+ t.Errorf("Unexpected error %s", err)
+ }
+ if result != test.Result {
+ t.Errorf("Expected %q, got %q", test.Result, result)
+ }
+ })
+ }
+}
+
+func TestRemoveErrorPrefixFromLog(t *testing.T) {
+ c := qt.New(t)
+ content := `ERROR 2018/10/07 13:11:12 Error while rendering "home": template: _default/baseof.html:4:3: executing "main" at <partial "logo" .>: error calling partial: template: partials/logo.html:5:84: executing "partials/logo.html" at <$resized.AHeight>: can't evaluate field AHeight in type *resource.Image
+ERROR 2018/10/07 13:11:12 Rebuild failed: logged 1 error(s)
+`
+
+ withoutError := removeErrorPrefixFromLog(content)
+
+ c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false)
+}
+
+func isWindowsCI() bool {
+ return runtime.GOOS == "windows" && os.Getenv("CI") != ""
+}
diff --git a/commands/static_syncer.go b/commands/static_syncer.go
new file mode 100644
index 000000000..b97c4df7a
--- /dev/null
+++ b/commands/static_syncer.go
@@ -0,0 +1,129 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "os"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/fsync"
+)
+
+type staticSyncer struct {
+ c *commandeer
+}
+
+func newStaticSyncer(c *commandeer) (*staticSyncer, error) {
+ return &staticSyncer{c: c}, nil
+}
+
+func (s *staticSyncer) isStatic(filename string) bool {
+ return s.c.hugo().BaseFs.SourceFilesystems.IsStatic(filename)
+}
+
+func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
+ c := s.c
+
+ syncFn := func(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
+ publishDir := helpers.FilePathSeparator
+
+ if sourceFs.PublishFolder != "" {
+ publishDir = filepath.Join(publishDir, sourceFs.PublishFolder)
+ }
+
+ syncer := fsync.NewSyncer()
+ syncer.NoTimes = c.Cfg.GetBool("noTimes")
+ syncer.NoChmod = c.Cfg.GetBool("noChmod")
+ syncer.ChmodFilter = chmodFilter
+ syncer.SrcFs = sourceFs.Fs
+ syncer.DestFs = c.Fs.PublishDir
+ if c.renderStaticToDisk {
+ syncer.DestFs = c.Fs.PublishDirStatic
+ }
+
+ // prevent spamming the log on changes
+ logger := helpers.NewDistinctErrorLogger()
+
+ for _, ev := range staticEvents {
+ // Due to our approach of layering both directories and the content's rendered output
+ // into one we can't accurately remove a file not in one of the source directories.
+ // If a file is in the local static dir and also in the theme static dir and we remove
+ // it from one of those locations we expect it to still exist in the destination
+ //
+ // If Hugo generates a file (from the content dir) over a static file
+ // the content generated file should take precedence.
+ //
+ // Because we are now watching and handling individual events it is possible that a static
+ // event that occupies the same path as a content generated file will take precedence
+ // until a regeneration of the content takes places.
+ //
+ // Hugo assumes that these cases are very rare and will permit this bad behavior
+ // The alternative is to track every single file and which pipeline rendered it
+ // and then to handle conflict resolution on every event.
+
+ fromPath := ev.Name
+
+ relPath, found := sourceFs.MakePathRelative(fromPath)
+
+ if !found {
+ // Not member of this virtual host.
+ continue
+ }
+
+ // Remove || rename is harder and will require an assumption.
+ // Hugo takes the following approach:
+ // If the static file exists in any of the static source directories after this event
+ // Hugo will re-sync it.
+ // If it does not exist in all of the static directories Hugo will remove it.
+ //
+ // This assumes that Hugo has not generated content on top of a static file and then removed
+ // the source of that static file. In this case Hugo will incorrectly remove that file
+ // from the published directory.
+ if ev.Op&fsnotify.Rename == fsnotify.Rename || ev.Op&fsnotify.Remove == fsnotify.Remove {
+ if _, err := sourceFs.Fs.Stat(relPath); os.IsNotExist(err) {
+ // If file doesn't exist in any static dir, remove it
+ logger.Println("File no longer exists in static dir, removing", relPath)
+ _ = c.Fs.PublishDirStatic.RemoveAll(relPath)
+
+ } else if err == nil {
+ // If file still exists, sync it
+ logger.Println("Syncing", relPath, "to", publishDir)
+
+ if err := syncer.Sync(relPath, relPath); err != nil {
+ c.logger.Errorln(err)
+ }
+ } else {
+ c.logger.Errorln(err)
+ }
+
+ continue
+ }
+
+ // For all other event operations Hugo will sync static.
+ logger.Println("Syncing", relPath, "to", publishDir)
+ if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
+ c.logger.Errorln(err)
+ }
+ }
+
+ return 0, nil
+ }
+
+ _, err := c.doWithPublishDirs(syncFn)
+ return err
+}
diff --git a/commands/version.go b/commands/version.go
new file mode 100644
index 000000000..287950a2d
--- /dev/null
+++ b/commands/version.go
@@ -0,0 +1,44 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/spf13/cobra"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var _ cmder = (*versionCmd)(nil)
+
+type versionCmd struct {
+ *baseCmd
+}
+
+func newVersionCmd() *versionCmd {
+ return &versionCmd{
+ newBaseCmd(&cobra.Command{
+ Use: "version",
+ Short: "Print the version number of Hugo",
+ Long: `All software has versions. This is Hugo's.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ printHugoVersion()
+ return nil
+ },
+ }),
+ }
+}
+
+func printHugoVersion() {
+ jww.FEEDBACK.Println(hugo.BuildVersionString())
+}
diff --git a/common/collections/append.go b/common/collections/append.go
new file mode 100644
index 000000000..a9c14c1aa
--- /dev/null
+++ b/common/collections/append.go
@@ -0,0 +1,112 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// Append appends from to a slice to and returns the resulting slice.
+// If length of from is one and the only element is a slice of same type as to,
+// it will be appended.
+func Append(to any, from ...any) (any, error) {
+ tov, toIsNil := indirect(reflect.ValueOf(to))
+
+ toIsNil = toIsNil || to == nil
+ var tot reflect.Type
+
+ if !toIsNil {
+ if tov.Kind() != reflect.Slice {
+ return nil, fmt.Errorf("expected a slice, got %T", to)
+ }
+
+ tot = tov.Type().Elem()
+ toIsNil = tov.Len() == 0
+
+ if len(from) == 1 {
+ fromv := reflect.ValueOf(from[0])
+ if fromv.Kind() == reflect.Slice {
+ if toIsNil {
+ // If we get nil []string, we just return the []string
+ return from[0], nil
+ }
+
+ fromt := reflect.TypeOf(from[0]).Elem()
+
+ // If we get []string []string, we append the from slice to to
+ if tot == fromt {
+ return reflect.AppendSlice(tov, fromv).Interface(), nil
+ } else if !fromt.AssignableTo(tot) {
+ // Fall back to a []interface{} slice.
+ return appendToInterfaceSliceFromValues(tov, fromv)
+ }
+ }
+ }
+ }
+
+ if toIsNil {
+ return Slice(from...), nil
+ }
+
+ for _, f := range from {
+ fv := reflect.ValueOf(f)
+ if !fv.Type().AssignableTo(tot) {
+ // Fall back to a []interface{} slice.
+ tov, _ := indirect(reflect.ValueOf(to))
+ return appendToInterfaceSlice(tov, from...)
+ }
+ tov = reflect.Append(tov, fv)
+ }
+
+ return tov.Interface(), nil
+}
+
+func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, error) {
+ var tos []any
+
+ for _, slice := range []reflect.Value{slice1, slice2} {
+ for i := 0; i < slice.Len(); i++ {
+ tos = append(tos, slice.Index(i).Interface())
+ }
+ }
+
+ return tos, nil
+}
+
+func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) {
+ var tos []any
+
+ for i := 0; i < tov.Len(); i++ {
+ tos = append(tos, tov.Index(i).Interface())
+ }
+
+ tos = append(tos, from...)
+
+ return tos, nil
+}
+
+// indirect is borrowed from the Go stdlib: 'text/template/exec.go'
+// TODO(bep) consolidate
+func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
+ for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
+ if v.IsNil() {
+ return v, true
+ }
+ if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
+ break
+ }
+ }
+ return v, false
+}
diff --git a/common/collections/append_test.go b/common/collections/append_test.go
new file mode 100644
index 000000000..6df32fee6
--- /dev/null
+++ b/common/collections/append_test.go
@@ -0,0 +1,90 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "html/template"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestAppend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ start any
+ addend []any
+ expected any
+ }{
+ {[]string{"a", "b"}, []any{"c"}, []string{"a", "b", "c"}},
+ {[]string{"a", "b"}, []any{"c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
+ {[]string{"a", "b"}, []any{[]string{"c", "d", "e"}}, []string{"a", "b", "c", "d", "e"}},
+ {[]string{"a"}, []any{"b", template.HTML("c")}, []any{"a", "b", template.HTML("c")}},
+ {nil, []any{"a", "b"}, []string{"a", "b"}},
+ {nil, []any{nil}, []any{nil}},
+ {[]any{}, []any{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}},
+ {
+ tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
+ []any{&tstSlicer{"c"}},
+ tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}},
+ },
+ {
+ &tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
+ []any{&tstSlicer{"c"}},
+ tstSlicers{
+ &tstSlicer{"a"},
+ &tstSlicer{"b"},
+ &tstSlicer{"c"},
+ },
+ },
+ {
+ testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
+ []any{&tstSlicerIn1{"c"}},
+ testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}},
+ },
+ //https://github.com/gohugoio/hugo/issues/5361
+ {
+ []string{"a", "b"},
+ []any{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
+ []any{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}},
+ },
+ {
+ []string{"a", "b"},
+ []any{&tstSlicer{"a"}},
+ []any{"a", "b", &tstSlicer{"a"}},
+ },
+ // Errors
+ {"", []any{[]string{"a", "b"}}, false},
+ // No string concatenation.
+ {
+ "ab",
+ []any{"c"},
+ false,
+ },
+ } {
+
+ result, err := Append(test.start, test.addend...)
+
+ if b, ok := test.expected.(bool); ok && !b {
+
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, test.expected)
+ }
+}
diff --git a/common/collections/collections.go b/common/collections/collections.go
new file mode 100644
index 000000000..0b46abee9
--- /dev/null
+++ b/common/collections/collections.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package collections contains common Hugo functionality related to collection
+// handling.
+package collections
+
+// Grouper defines a very generic way to group items by a given key.
+type Grouper interface {
+ Group(key any, items any) (any, error)
+}
diff --git a/common/collections/order.go b/common/collections/order.go
new file mode 100644
index 000000000..4bdc3b4ac
--- /dev/null
+++ b/common/collections/order.go
@@ -0,0 +1,20 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+type Order interface {
+ // Ordinal is a zero-based ordinal that represents the order of an object
+ // in a collection.
+ Ordinal() int
+}
diff --git a/common/collections/slice.go b/common/collections/slice.go
new file mode 100644
index 000000000..51cb6ec1f
--- /dev/null
+++ b/common/collections/slice.go
@@ -0,0 +1,76 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "reflect"
+)
+
+// Slicer defines a very generic way to create a typed slice. This is used
+// in collections.Slice template func to get types such as Pages, PageGroups etc.
+// instead of the less useful []interface{}.
+type Slicer interface {
+ Slice(items any) (any, error)
+}
+
+// Slice returns a slice of all passed arguments.
+func Slice(args ...any) any {
+ if len(args) == 0 {
+ return args
+ }
+
+ first := args[0]
+ firstType := reflect.TypeOf(first)
+
+ if firstType == nil {
+ return args
+ }
+
+ if g, ok := first.(Slicer); ok {
+ v, err := g.Slice(args)
+ if err == nil {
+ return v
+ }
+
+ // If Slice fails, the items are not of the same type and
+ // []interface{} is the best we can do.
+ return args
+ }
+
+ if len(args) > 1 {
+ // This can be a mix of types.
+ for i := 1; i < len(args); i++ {
+ if firstType != reflect.TypeOf(args[i]) {
+ // []interface{} is the best we can do
+ return args
+ }
+ }
+ }
+
+ slice := reflect.MakeSlice(reflect.SliceOf(firstType), len(args), len(args))
+ for i, arg := range args {
+ slice.Index(i).Set(reflect.ValueOf(arg))
+ }
+ return slice.Interface()
+}
+
+// StringSliceToInterfaceSlice converts ss to []interface{}.
+func StringSliceToInterfaceSlice(ss []string) []any {
+ result := make([]any, len(ss))
+ for i, s := range ss {
+ result[i] = s
+ }
+ return result
+
+}
diff --git a/common/collections/slice_test.go b/common/collections/slice_test.go
new file mode 100644
index 000000000..8e6553994
--- /dev/null
+++ b/common/collections/slice_test.go
@@ -0,0 +1,124 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var (
+ _ Slicer = (*tstSlicer)(nil)
+ _ Slicer = (*tstSlicerIn1)(nil)
+ _ Slicer = (*tstSlicerIn2)(nil)
+ _ testSlicerInterface = (*tstSlicerIn1)(nil)
+ _ testSlicerInterface = (*tstSlicerIn1)(nil)
+)
+
+type testSlicerInterface interface {
+ Name() string
+}
+
+type testSlicerInterfaces []testSlicerInterface
+
+type tstSlicerIn1 struct {
+ TheName string
+}
+
+type tstSlicerIn2 struct {
+ TheName string
+}
+
+type tstSlicer struct {
+ TheName string
+}
+
+func (p *tstSlicerIn1) Slice(in any) (any, error) {
+ items := in.([]any)
+ result := make(testSlicerInterfaces, len(items))
+ for i, v := range items {
+ switch vv := v.(type) {
+ case testSlicerInterface:
+ result[i] = vv
+ default:
+ return nil, errors.New("invalid type")
+ }
+ }
+ return result, nil
+}
+
+func (p *tstSlicerIn2) Slice(in any) (any, error) {
+ items := in.([]any)
+ result := make(testSlicerInterfaces, len(items))
+ for i, v := range items {
+ switch vv := v.(type) {
+ case testSlicerInterface:
+ result[i] = vv
+ default:
+ return nil, errors.New("invalid type")
+ }
+ }
+ return result, nil
+}
+
+func (p *tstSlicerIn1) Name() string {
+ return p.TheName
+}
+
+func (p *tstSlicerIn2) Name() string {
+ return p.TheName
+}
+
+func (p *tstSlicer) Slice(in any) (any, error) {
+ items := in.([]any)
+ result := make(tstSlicers, len(items))
+ for i, v := range items {
+ switch vv := v.(type) {
+ case *tstSlicer:
+ result[i] = vv
+ default:
+ return nil, errors.New("invalid type")
+ }
+ }
+ return result, nil
+}
+
+type tstSlicers []*tstSlicer
+
+func TestSlice(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ args []any
+ expected any
+ }{
+ {[]any{"a", "b"}, []string{"a", "b"}},
+ {[]any{&tstSlicer{"a"}, &tstSlicer{"b"}}, tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
+ {[]any{&tstSlicer{"a"}, "b"}, []any{&tstSlicer{"a"}, "b"}},
+ {[]any{}, []any{}},
+ {[]any{nil}, []any{nil}},
+ {[]any{5, "b"}, []any{5, "b"}},
+ {[]any{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}, testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}},
+ {[]any{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}, []any{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.args)
+
+ result := Slice(test.args...)
+
+ c.Assert(test.expected, qt.DeepEquals, result, errMsg)
+ }
+}
diff --git a/common/constants/constants.go b/common/constants/constants.go
new file mode 100644
index 000000000..e416d4ad3
--- /dev/null
+++ b/common/constants/constants.go
@@ -0,0 +1,25 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package constants
+
+// Error IDs.
+// Do not change these values.
+const (
+ ErrIDAmbigousDisableKindTaxonomy = "error-disable-taxonomy"
+ ErrIDAmbigousOutputKindTaxonomy = "error-output-taxonomy"
+
+ // IDs for remote errors in tpl/data.
+ ErrRemoteGetJSON = "error-remote-getjson"
+ ErrRemoteGetCSV = "error-remote-getcsv"
+)
diff --git a/common/herrors/error_locator.go b/common/herrors/error_locator.go
new file mode 100644
index 000000000..18c21e51b
--- /dev/null
+++ b/common/herrors/error_locator.go
@@ -0,0 +1,168 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package herrors contains common Hugo errors and error related utilities.
+package herrors
+
+import (
+ "io"
+ "io/ioutil"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/text"
+)
+
+// LineMatcher contains the elements used to match an error to a line
+type LineMatcher struct {
+ Position text.Position
+ Error error
+
+ LineNumber int
+ Offset int
+ Line string
+}
+
+// LineMatcherFn is used to match a line with an error.
+// It returns the column number or 0 if the line was found, but column could not be determinde. Returns -1 if no line match.
+type LineMatcherFn func(m LineMatcher) int
+
+// SimpleLineMatcher simply matches by line number.
+var SimpleLineMatcher = func(m LineMatcher) int {
+ if m.Position.LineNumber == m.LineNumber {
+ // We found the line, but don't know the column.
+ return 0
+ }
+ return -1
+}
+
+// NopLineMatcher is a matcher that always returns 1.
+// This will effectively give line 1, column 1.
+var NopLineMatcher = func(m LineMatcher) int {
+ return 1
+}
+
+// OffsetMatcher is a line matcher that matches by offset.
+var OffsetMatcher = func(m LineMatcher) int {
+ if m.Offset+len(m.Line) >= m.Position.Offset {
+ // We found the line, but return 0 to signal that we want to determine
+ // the column from the error.
+ return 0
+ }
+ return -1
+}
+
+// ErrorContext contains contextual information about an error. This will
+// typically be the lines surrounding some problem in a file.
+type ErrorContext struct {
+
+ // If a match will contain the matched line and up to 2 lines before and after.
+ // Will be empty if no match.
+ Lines []string
+
+ // The position of the error in the Lines above. 0 based.
+ LinesPos int
+
+ // The position of the content in the file. Note that this may be different from the error's position set
+ // in FileError.
+ Position text.Position
+
+ // The lexer to use for syntax highlighting.
+ // https://gohugo.io/content-management/syntax-highlighting/#list-of-chroma-highlighting-languages
+ ChromaLexer string
+}
+
+func chromaLexerFromType(fileType string) string {
+ switch fileType {
+ case "html", "htm":
+ return "go-html-template"
+ }
+ return fileType
+}
+
+func extNoDelimiter(filename string) string {
+ return strings.TrimPrefix(filepath.Ext(filename), ".")
+}
+
+func chromaLexerFromFilename(filename string) string {
+ if strings.Contains(filename, "layouts") {
+ return "go-html-template"
+ }
+
+ ext := extNoDelimiter(filename)
+ return chromaLexerFromType(ext)
+}
+
+func locateErrorInString(src string, matcher LineMatcherFn) *ErrorContext {
+ return locateError(strings.NewReader(src), &fileError{}, matcher)
+}
+
+func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext {
+ if le == nil {
+ panic("must provide an error")
+ }
+
+ ectx := &ErrorContext{LinesPos: -1, Position: text.Position{Offset: -1}}
+
+ b, err := ioutil.ReadAll(r)
+ if err != nil {
+ return ectx
+ }
+
+ lines := strings.Split(string(b), "\n")
+
+ lineNo := 0
+ posBytes := 0
+
+ for li, line := range lines {
+ lineNo = li + 1
+ m := LineMatcher{
+ Position: le.Position(),
+ Error: le,
+ LineNumber: lineNo,
+ Offset: posBytes,
+ Line: line,
+ }
+ v := matches(m)
+ if ectx.LinesPos == -1 && v != -1 {
+ ectx.Position.LineNumber = lineNo
+ ectx.Position.ColumnNumber = v
+ break
+ }
+
+ posBytes += len(line)
+ }
+
+ if ectx.Position.LineNumber > 0 {
+ low := ectx.Position.LineNumber - 3
+ if low < 0 {
+ low = 0
+ }
+
+ if ectx.Position.LineNumber > 2 {
+ ectx.LinesPos = 2
+ } else {
+ ectx.LinesPos = ectx.Position.LineNumber - 1
+ }
+
+ high := ectx.Position.LineNumber + 2
+ if high > len(lines) {
+ high = len(lines)
+ }
+
+ ectx.Lines = lines[low:high]
+
+ }
+
+ return ectx
+}
diff --git a/common/herrors/error_locator_test.go b/common/herrors/error_locator_test.go
new file mode 100644
index 000000000..6135657d8
--- /dev/null
+++ b/common/herrors/error_locator_test.go
@@ -0,0 +1,152 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package herrors contains common Hugo errors and error related utilities.
+package herrors
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestErrorLocator(t *testing.T) {
+ c := qt.New(t)
+
+ lineMatcher := func(m LineMatcher) int {
+ if strings.Contains(m.Line, "THEONE") {
+ return 1
+ }
+ return -1
+ }
+
+ lines := `LINE 1
+LINE 2
+LINE 3
+LINE 4
+This is THEONE
+LINE 6
+LINE 7
+LINE 8
+`
+
+ location := locateErrorInString(lines, lineMatcher)
+ pos := location.Position
+ c.Assert(location.Lines, qt.DeepEquals, []string{"LINE 3", "LINE 4", "This is THEONE", "LINE 6", "LINE 7"})
+
+ c.Assert(pos.LineNumber, qt.Equals, 5)
+ c.Assert(location.LinesPos, qt.Equals, 2)
+
+ locate := func(s string, m LineMatcherFn) *ErrorContext {
+ ctx := locateErrorInString(s, m)
+ return ctx
+ }
+
+ c.Assert(locate(`This is THEONE`, lineMatcher).Lines, qt.DeepEquals, []string{"This is THEONE"})
+
+ location = locateErrorInString(`L1
+This is THEONE
+L2
+`, lineMatcher)
+ pos = location.Position
+ c.Assert(pos.LineNumber, qt.Equals, 2)
+ c.Assert(location.LinesPos, qt.Equals, 1)
+ c.Assert(location.Lines, qt.DeepEquals, []string{"L1", "This is THEONE", "L2", ""})
+
+ location = locate(`This is THEONE
+L2
+`, lineMatcher)
+ c.Assert(location.LinesPos, qt.Equals, 0)
+ c.Assert(location.Lines, qt.DeepEquals, []string{"This is THEONE", "L2", ""})
+
+ location = locate(`L1
+This THEONE
+`, lineMatcher)
+ c.Assert(location.Lines, qt.DeepEquals, []string{"L1", "This THEONE", ""})
+ c.Assert(location.LinesPos, qt.Equals, 1)
+
+ location = locate(`L1
+L2
+This THEONE
+`, lineMatcher)
+ c.Assert(location.Lines, qt.DeepEquals, []string{"L1", "L2", "This THEONE", ""})
+ c.Assert(location.LinesPos, qt.Equals, 2)
+
+ location = locateErrorInString("NO MATCH", lineMatcher)
+ pos = location.Position
+ c.Assert(pos.LineNumber, qt.Equals, 0)
+ c.Assert(location.LinesPos, qt.Equals, -1)
+ c.Assert(len(location.Lines), qt.Equals, 0)
+
+ lineMatcher = func(m LineMatcher) int {
+ if m.LineNumber == 6 {
+ return 1
+ }
+ return -1
+ }
+
+ location = locateErrorInString(`A
+B
+C
+D
+E
+F
+G
+H
+I
+J`, lineMatcher)
+ pos = location.Position
+
+ c.Assert(location.Lines, qt.DeepEquals, []string{"D", "E", "F", "G", "H"})
+ c.Assert(pos.LineNumber, qt.Equals, 6)
+ c.Assert(location.LinesPos, qt.Equals, 2)
+
+ // Test match EOF
+ lineMatcher = func(m LineMatcher) int {
+ if m.LineNumber == 4 {
+ return 1
+ }
+ return -1
+ }
+
+ location = locateErrorInString(`A
+B
+C
+`, lineMatcher)
+
+ pos = location.Position
+
+ c.Assert(location.Lines, qt.DeepEquals, []string{"B", "C", ""})
+ c.Assert(pos.LineNumber, qt.Equals, 4)
+ c.Assert(location.LinesPos, qt.Equals, 2)
+
+ offsetMatcher := func(m LineMatcher) int {
+ if m.Offset == 1 {
+ return 1
+ }
+ return -1
+ }
+
+ location = locateErrorInString(`A
+B
+C
+D
+E`, offsetMatcher)
+
+ pos = location.Position
+
+ c.Assert(location.Lines, qt.DeepEquals, []string{"A", "B", "C", "D"})
+ c.Assert(pos.LineNumber, qt.Equals, 2)
+ c.Assert(location.LinesPos, qt.Equals, 1)
+}
diff --git a/common/herrors/errors.go b/common/herrors/errors.go
new file mode 100644
index 000000000..6ce908853
--- /dev/null
+++ b/common/herrors/errors.go
@@ -0,0 +1,71 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package herrors contains common Hugo errors and error related utilities.
+package herrors
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "runtime"
+ "runtime/debug"
+ "strconv"
+)
+
+// PrintStackTrace prints the current stacktrace to w.
+func PrintStackTrace(w io.Writer) {
+ buf := make([]byte, 1<<16)
+ runtime.Stack(buf, true)
+ fmt.Fprintf(w, "%s", buf)
+}
+
+// ErrorSender is a, typically, non-blocking error handler.
+type ErrorSender interface {
+ SendError(err error)
+}
+
+// Recover is a helper function that can be used to capture panics.
+// Put this at the top of a method/function that crashes in a template:
+// defer herrors.Recover()
+func Recover(args ...any) {
+ if r := recover(); r != nil {
+ fmt.Println("ERR:", r)
+ args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n")
+ fmt.Println(args...)
+ }
+}
+
+// Get the current goroutine id. Used only for debugging.
+func GetGID() uint64 {
+ b := make([]byte, 64)
+ b = b[:runtime.Stack(b, false)]
+ b = bytes.TrimPrefix(b, []byte("goroutine "))
+ b = b[:bytes.IndexByte(b, ' ')]
+ n, _ := strconv.ParseUint(string(b), 10, 64)
+ return n
+}
+
+// ErrFeatureNotAvailable denotes that a feature is unavailable.
+//
+// We will, at least to begin with, make some Hugo features (SCSS with libsass) optional,
+// and this error is used to signal those situations.
+var ErrFeatureNotAvailable = errors.New("this feature is not available in your current Hugo version, see https://goo.gl/YMrWcn for more information")
+
+// Must panics if err != nil.
+func Must(err error) {
+ if err != nil {
+ panic(err)
+ }
+}
diff --git a/common/herrors/file_error.go b/common/herrors/file_error.go
new file mode 100644
index 000000000..e6baaf6e3
--- /dev/null
+++ b/common/herrors/file_error.go
@@ -0,0 +1,394 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable lfmtaw or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package herrors
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "path/filepath"
+
+ "github.com/bep/godartsass"
+ "github.com/bep/golibsass/libsass/libsasserrors"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/pelletier/go-toml/v2"
+ "github.com/spf13/afero"
+ "github.com/tdewolff/parse/v2"
+
+ "errors"
+)
+
+// FileError represents an error when handling a file: Parsing a config file,
+// execute a template etc.
+type FileError interface {
+ error
+
+ // ErroContext holds some context information about the error.
+ ErrorContext() *ErrorContext
+
+ text.Positioner
+
+ // UpdatePosition updates the position of the error.
+ UpdatePosition(pos text.Position) FileError
+
+ // UpdateContent updates the error with a new ErrorContext from the content of the file.
+ UpdateContent(r io.Reader, linematcher LineMatcherFn) FileError
+}
+
+// Unwrapper can unwrap errors created with fmt.Errorf.
+type Unwrapper interface {
+ Unwrap() error
+}
+
+var (
+ _ FileError = (*fileError)(nil)
+ _ Unwrapper = (*fileError)(nil)
+)
+
+func (fe *fileError) UpdatePosition(pos text.Position) FileError {
+ oldFilename := fe.Position().Filename
+ if pos.Filename != "" && fe.fileType == "" {
+ _, fe.fileType = paths.FileAndExtNoDelimiter(filepath.Clean(pos.Filename))
+ }
+ if pos.Filename == "" {
+ pos.Filename = oldFilename
+ }
+ fe.position = pos
+ return fe
+}
+
+func (fe *fileError) UpdateContent(r io.Reader, linematcher LineMatcherFn) FileError {
+ if linematcher == nil {
+ linematcher = SimpleLineMatcher
+ }
+
+ var (
+ posle = fe.position
+ ectx *ErrorContext
+ )
+
+ if posle.LineNumber <= 1 && posle.Offset > 0 {
+ // Try to locate the line number from the content if offset is set.
+ ectx = locateError(r, fe, func(m LineMatcher) int {
+ if posle.Offset >= m.Offset && posle.Offset < m.Offset+len(m.Line) {
+ lno := posle.LineNumber - m.Position.LineNumber + m.LineNumber
+ m.Position = text.Position{LineNumber: lno}
+ return linematcher(m)
+ }
+ return -1
+ })
+ } else {
+ ectx = locateError(r, fe, linematcher)
+ }
+
+ if ectx.ChromaLexer == "" {
+ if fe.fileType != "" {
+ ectx.ChromaLexer = chromaLexerFromType(fe.fileType)
+ } else {
+ ectx.ChromaLexer = chromaLexerFromFilename(fe.Position().Filename)
+ }
+ }
+
+ fe.errorContext = ectx
+
+ if ectx.Position.LineNumber > 0 {
+ fe.position.LineNumber = ectx.Position.LineNumber
+ }
+
+ if ectx.Position.ColumnNumber > 0 {
+ fe.position.ColumnNumber = ectx.Position.ColumnNumber
+ }
+
+ return fe
+
+}
+
+type fileError struct {
+ position text.Position
+ errorContext *ErrorContext
+
+ fileType string
+
+ cause error
+}
+
+func (e *fileError) ErrorContext() *ErrorContext {
+ return e.errorContext
+}
+
+// Position returns the text position of this error.
+func (e fileError) Position() text.Position {
+ return e.position
+}
+
+func (e *fileError) Error() string {
+ return fmt.Sprintf("%s: %s", e.position, e.causeString())
+}
+
+func (e *fileError) causeString() string {
+ if e.cause == nil {
+ return ""
+ }
+ switch v := e.cause.(type) {
+ // Avoid repeating the file info in the error message.
+ case godartsass.SassError:
+ return v.Message
+ case libsasserrors.Error:
+ return v.Message
+ default:
+ return v.Error()
+ }
+}
+
+func (e *fileError) Unwrap() error {
+ return e.cause
+}
+
+// NewFileError creates a new FileError that wraps err.
+// It will try to extract the filename and line number from err.
+func NewFileError(err error) FileError {
+ // Filetype is used to determine the Chroma lexer to use.
+ fileType, pos := extractFileTypePos(err)
+ return &fileError{cause: err, fileType: fileType, position: pos}
+}
+
+// NewFileErrorFromName creates a new FileError that wraps err.
+// The value for name should identify the file, the best
+// being the full filename to the file on disk.
+func NewFileErrorFromName(err error, name string) FileError {
+ // Filetype is used to determine the Chroma lexer to use.
+ fileType, pos := extractFileTypePos(err)
+ pos.Filename = name
+ if fileType == "" {
+ _, fileType = paths.FileAndExtNoDelimiter(filepath.Clean(name))
+ }
+
+ return &fileError{cause: err, fileType: fileType, position: pos}
+
+}
+
+// NewFileErrorFromPos will use the filename and line number from pos to create a new FileError, wrapping err.
+func NewFileErrorFromPos(err error, pos text.Position) FileError {
+ // Filetype is used to determine the Chroma lexer to use.
+ fileType, _ := extractFileTypePos(err)
+ if fileType == "" {
+ _, fileType = paths.FileAndExtNoDelimiter(filepath.Clean(pos.Filename))
+ }
+ return &fileError{cause: err, fileType: fileType, position: pos}
+
+}
+
+func NewFileErrorFromFileInErr(err error, fs afero.Fs, linematcher LineMatcherFn) FileError {
+ fe := NewFileError(err)
+ pos := fe.Position()
+ if pos.Filename == "" {
+ return fe
+ }
+
+ f, realFilename, err2 := openFile(pos.Filename, fs)
+ if err2 != nil {
+ return fe
+ }
+
+ pos.Filename = realFilename
+ defer f.Close()
+ return fe.UpdateContent(f, linematcher)
+}
+
+func NewFileErrorFromFileInPos(err error, pos text.Position, fs afero.Fs, linematcher LineMatcherFn) FileError {
+ if err == nil {
+ panic("err is nil")
+ }
+ f, realFilename, err2 := openFile(pos.Filename, fs)
+ if err2 != nil {
+ return NewFileErrorFromPos(err, pos)
+ }
+ pos.Filename = realFilename
+ defer f.Close()
+ return NewFileErrorFromPos(err, pos).UpdateContent(f, linematcher)
+}
+
+// NewFileErrorFromFile is a convenience method to create a new FileError from a file.
+func NewFileErrorFromFile(err error, filename string, fs afero.Fs, linematcher LineMatcherFn) FileError {
+ if err == nil {
+ panic("err is nil")
+ }
+ f, realFilename, err2 := openFile(filename, fs)
+ if err2 != nil {
+ return NewFileErrorFromName(err, realFilename)
+ }
+ defer f.Close()
+ return NewFileErrorFromName(err, realFilename).UpdateContent(f, linematcher)
+}
+
+func openFile(filename string, fs afero.Fs) (afero.File, string, error) {
+ realFilename := filename
+
+ // We want the most specific filename possible in the error message.
+ fi, err2 := fs.Stat(filename)
+ if err2 == nil {
+ if s, ok := fi.(interface {
+ Filename() string
+ }); ok {
+ realFilename = s.Filename()
+ }
+
+ }
+
+ f, err2 := fs.Open(filename)
+ if err2 != nil {
+ return nil, realFilename, err2
+ }
+
+ return f, realFilename, nil
+}
+
+// Cause returns the underlying error or itself if it does not implement Unwrap.
+func Cause(err error) error {
+ if u := errors.Unwrap(err); u != nil {
+ return u
+ }
+ return err
+}
+
+func extractFileTypePos(err error) (string, text.Position) {
+ err = Cause(err)
+
+ var fileType string
+
+ // LibSass, DartSass
+ if pos := extractPosition(err); pos.LineNumber > 0 || pos.Offset > 0 {
+ _, fileType = paths.FileAndExtNoDelimiter(pos.Filename)
+ return fileType, pos
+ }
+
+ // Default to line 1 col 1 if we don't find any better.
+ pos := text.Position{
+ Offset: -1,
+ LineNumber: 1,
+ ColumnNumber: 1,
+ }
+
+ // JSON errors.
+ offset, typ := extractOffsetAndType(err)
+ if fileType == "" {
+ fileType = typ
+ }
+
+ if offset >= 0 {
+ pos.Offset = offset
+ }
+
+ // The error type from the minifier contains line number and column number.
+ if line, col := exctractLineNumberAndColumnNumber(err); line >= 0 {
+ pos.LineNumber = line
+ pos.ColumnNumber = col
+ return fileType, pos
+ }
+
+ // Look in the error message for the line number.
+ for _, handle := range lineNumberExtractors {
+ lno, col := handle(err)
+ if lno > 0 {
+ pos.ColumnNumber = col
+ pos.LineNumber = lno
+ break
+ }
+ }
+
+ if fileType == "" && pos.Filename != "" {
+ _, fileType = paths.FileAndExtNoDelimiter(pos.Filename)
+ }
+
+ return fileType, pos
+}
+
+// UnwrapFileError tries to unwrap a FileError from err.
+// It returns nil if this is not possible.
+func UnwrapFileError(err error) FileError {
+ for err != nil {
+ switch v := err.(type) {
+ case FileError:
+ return v
+ default:
+ err = errors.Unwrap(err)
+ }
+ }
+ return nil
+}
+
+// UnwrapFileErrors tries to unwrap all FileError.
+func UnwrapFileErrors(err error) []FileError {
+ var errs []FileError
+ for err != nil {
+ if v, ok := err.(FileError); ok {
+ errs = append(errs, v)
+ }
+ err = errors.Unwrap(err)
+ }
+ return errs
+}
+
+// UnwrapFileErrorsWithErrorContext tries to unwrap all FileError in err that has an ErrorContext.
+func UnwrapFileErrorsWithErrorContext(err error) []FileError {
+ var errs []FileError
+ for err != nil {
+ if v, ok := err.(FileError); ok && v.ErrorContext() != nil {
+ errs = append(errs, v)
+ }
+ err = errors.Unwrap(err)
+ }
+ return errs
+}
+
+func extractOffsetAndType(e error) (int, string) {
+ switch v := e.(type) {
+ case *json.UnmarshalTypeError:
+ return int(v.Offset), "json"
+ case *json.SyntaxError:
+ return int(v.Offset), "json"
+ default:
+ return -1, ""
+ }
+}
+
+func exctractLineNumberAndColumnNumber(e error) (int, int) {
+ switch v := e.(type) {
+ case *parse.Error:
+ return v.Line, v.Column
+ case *toml.DecodeError:
+ return v.Position()
+
+ }
+
+ return -1, -1
+}
+
+func extractPosition(e error) (pos text.Position) {
+ switch v := e.(type) {
+ case godartsass.SassError:
+ span := v.Span
+ start := span.Start
+ filename, _ := paths.UrlToFilename(span.Url)
+ pos.Filename = filename
+ pos.Offset = start.Offset
+ pos.ColumnNumber = start.Column
+ case libsasserrors.Error:
+ pos.Filename = v.File
+ pos.LineNumber = v.Line
+ pos.ColumnNumber = v.Column
+ }
+ return
+}
diff --git a/common/herrors/file_error_test.go b/common/herrors/file_error_test.go
new file mode 100644
index 000000000..0b260a255
--- /dev/null
+++ b/common/herrors/file_error_test.go
@@ -0,0 +1,82 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package herrors
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/text"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestNewFileError(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ fe := NewFileErrorFromName(errors.New("bar"), "foo.html")
+ c.Assert(fe.Error(), qt.Equals, `"foo.html:1:1": bar`)
+
+ lines := ""
+ for i := 1; i <= 100; i++ {
+ lines += fmt.Sprintf("line %d\n", i)
+ }
+
+ fe.UpdatePosition(text.Position{LineNumber: 32, ColumnNumber: 2})
+ c.Assert(fe.Error(), qt.Equals, `"foo.html:32:2": bar`)
+ fe.UpdatePosition(text.Position{LineNumber: 0, ColumnNumber: 0, Offset: 212})
+ fe.UpdateContent(strings.NewReader(lines), nil)
+ c.Assert(fe.Error(), qt.Equals, `"foo.html:32:0": bar`)
+ errorContext := fe.ErrorContext()
+ c.Assert(errorContext, qt.IsNotNil)
+ c.Assert(errorContext.Lines, qt.DeepEquals, []string{"line 30", "line 31", "line 32", "line 33", "line 34"})
+ c.Assert(errorContext.LinesPos, qt.Equals, 2)
+ c.Assert(errorContext.ChromaLexer, qt.Equals, "go-html-template")
+
+}
+
+func TestNewFileErrorExtractFromMessage(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ in error
+ offset int
+ lineNumber int
+ columnNumber int
+ }{
+ {errors.New("no line number for you"), 0, 1, 1},
+ {errors.New(`template: _default/single.html:4:15: executing "_default/single.html" at <.Titles>: can't evaluate field Titles in type *hugolib.PageOutput`), 0, 4, 15},
+ {errors.New("parse failed: template: _default/bundle-resource-meta.html:11: unexpected in operand"), 0, 11, 1},
+ {errors.New(`failed:: template: _default/bundle-resource-meta.html:2:7: executing "main" at <.Titles>`), 0, 2, 7},
+ {errors.New(`failed to load translations: (6, 7): was expecting token =, but got "g" instead`), 0, 6, 7},
+ {errors.New(`execute of template failed: template: index.html:2:5: executing "index.html" at <partial "foo.html" .>: error calling partial: "/layouts/partials/foo.html:3:6": execute of template failed: template: partials/foo.html:3:6: executing "partials/foo.html" at <.ThisDoesNotExist>: can't evaluate field ThisDoesNotExist in type *hugolib.pageStat`), 0, 2, 5},
+ } {
+
+ got := NewFileErrorFromName(test.in, "test.txt")
+
+ errMsg := qt.Commentf("[%d][%T]", i, got)
+
+ pos := got.Position()
+ c.Assert(pos.LineNumber, qt.Equals, test.lineNumber, errMsg)
+ c.Assert(pos.ColumnNumber, qt.Equals, test.columnNumber, errMsg)
+ c.Assert(errors.Unwrap(got), qt.Not(qt.IsNil))
+ }
+}
diff --git a/common/herrors/line_number_extractors.go b/common/herrors/line_number_extractors.go
new file mode 100644
index 000000000..f70a2691f
--- /dev/null
+++ b/common/herrors/line_number_extractors.go
@@ -0,0 +1,63 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package herrors
+
+import (
+ "regexp"
+ "strconv"
+)
+
+var lineNumberExtractors = []lineNumberExtractor{
+ // Template/shortcode parse errors
+ newLineNumberErrHandlerFromRegexp(`:(\d+):(\d*):`),
+ newLineNumberErrHandlerFromRegexp(`:(\d+):`),
+
+ // YAML parse errors
+ newLineNumberErrHandlerFromRegexp(`line (\d+):`),
+
+ // i18n bundle errors
+ newLineNumberErrHandlerFromRegexp(`\((\d+),\s(\d*)`),
+}
+
+type lineNumberExtractor func(e error) (int, int)
+
+func newLineNumberErrHandlerFromRegexp(expression string) lineNumberExtractor {
+ re := regexp.MustCompile(expression)
+ return extractLineNo(re)
+}
+
+func extractLineNo(re *regexp.Regexp) lineNumberExtractor {
+ return func(e error) (int, int) {
+ if e == nil {
+ panic("no error")
+ }
+ col := 1
+ s := e.Error()
+ m := re.FindStringSubmatch(s)
+ if len(m) >= 2 {
+ lno, _ := strconv.Atoi(m[1])
+ if len(m) > 2 {
+ col, _ = strconv.Atoi(m[2])
+ }
+
+ if col <= 0 {
+ col = 1
+ }
+
+ return lno, col
+ }
+
+ return 0, col
+ }
+}
diff --git a/common/hexec/exec.go b/common/hexec/exec.go
new file mode 100644
index 000000000..7a9fdd938
--- /dev/null
+++ b/common/hexec/exec.go
@@ -0,0 +1,276 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hexec
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "regexp"
+ "strings"
+
+ "os"
+ "os/exec"
+
+ "github.com/cli/safeexec"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/security"
+)
+
+var WithDir = func(dir string) func(c *commandeer) {
+ return func(c *commandeer) {
+ c.dir = dir
+ }
+}
+
+var WithContext = func(ctx context.Context) func(c *commandeer) {
+ return func(c *commandeer) {
+ c.ctx = ctx
+ }
+}
+
+var WithStdout = func(w io.Writer) func(c *commandeer) {
+ return func(c *commandeer) {
+ c.stdout = w
+ }
+}
+
+var WithStderr = func(w io.Writer) func(c *commandeer) {
+ return func(c *commandeer) {
+ c.stderr = w
+ }
+}
+
+var WithStdin = func(r io.Reader) func(c *commandeer) {
+ return func(c *commandeer) {
+ c.stdin = r
+ }
+}
+
+var WithEnviron = func(env []string) func(c *commandeer) {
+ return func(c *commandeer) {
+ setOrAppend := func(s string) {
+ k1, _ := config.SplitEnvVar(s)
+ var found bool
+ for i, v := range c.env {
+ k2, _ := config.SplitEnvVar(v)
+ if k1 == k2 {
+ found = true
+ c.env[i] = s
+ }
+ }
+
+ if !found {
+ c.env = append(c.env, s)
+ }
+ }
+
+ for _, s := range env {
+ setOrAppend(s)
+ }
+ }
+}
+
+// New creates a new Exec using the provided security config.
+func New(cfg security.Config) *Exec {
+ var baseEnviron []string
+ for _, v := range os.Environ() {
+ k, _ := config.SplitEnvVar(v)
+ if cfg.Exec.OsEnv.Accept(k) {
+ baseEnviron = append(baseEnviron, v)
+ }
+ }
+
+ return &Exec{
+ sc: cfg,
+ baseEnviron: baseEnviron,
+ }
+}
+
+// IsNotFound reports whether this is an error about a binary not found.
+func IsNotFound(err error) bool {
+ var notFoundErr *NotFoundError
+ return errors.As(err, &notFoundErr)
+}
+
+// SafeCommand is a wrapper around os/exec Command which uses a LookPath
+// implementation that does not search in current directory before looking in PATH.
+// See https://github.com/cli/safeexec and the linked issues.
+func SafeCommand(name string, arg ...string) (*exec.Cmd, error) {
+ bin, err := safeexec.LookPath(name)
+ if err != nil {
+ return nil, err
+ }
+
+ return exec.Command(bin, arg...), nil
+}
+
+// Exec encorces a security policy for commands run via os/exec.
+type Exec struct {
+ sc security.Config
+
+ // os.Environ filtered by the Exec.OsEnviron whitelist filter.
+ baseEnviron []string
+}
+
+// New will fail if name is not allowed according to the configured security policy.
+// Else a configured Runner will be returned ready to be Run.
+func (e *Exec) New(name string, arg ...any) (Runner, error) {
+ if err := e.sc.CheckAllowedExec(name); err != nil {
+ return nil, err
+ }
+
+ env := make([]string, len(e.baseEnviron))
+ copy(env, e.baseEnviron)
+
+ cm := &commandeer{
+ name: name,
+ env: env,
+ }
+
+ return cm.command(arg...)
+
+}
+
+// Npx is a convenience method to create a Runner running npx --no-install <name> <args.
+func (e *Exec) Npx(name string, arg ...any) (Runner, error) {
+ arg = append(arg[:0], append([]any{"--no-install", name}, arg[0:]...)...)
+ return e.New("npx", arg...)
+}
+
+// Sec returns the security policies this Exec is configured with.
+func (e *Exec) Sec() security.Config {
+ return e.sc
+}
+
+type NotFoundError struct {
+ name string
+}
+
+func (e *NotFoundError) Error() string {
+ return fmt.Sprintf("binary with name %q not found", e.name)
+}
+
+// Runner wraps a *os.Cmd.
+type Runner interface {
+ Run() error
+ StdinPipe() (io.WriteCloser, error)
+}
+
+type cmdWrapper struct {
+ name string
+ c *exec.Cmd
+
+ outerr *bytes.Buffer
+}
+
+var notFoundRe = regexp.MustCompile(`(?s)not found:|could not determine executable`)
+
+func (c *cmdWrapper) Run() error {
+ err := c.c.Run()
+ if err == nil {
+ return nil
+ }
+ if notFoundRe.MatchString(c.outerr.String()) {
+ return &NotFoundError{name: c.name}
+ }
+ return fmt.Errorf("failed to execute binary %q with args %v: %s", c.name, c.c.Args[1:], c.outerr.String())
+}
+
+func (c *cmdWrapper) StdinPipe() (io.WriteCloser, error) {
+ return c.c.StdinPipe()
+}
+
+type commandeer struct {
+ stdout io.Writer
+ stderr io.Writer
+ stdin io.Reader
+ dir string
+ ctx context.Context
+
+ name string
+ env []string
+}
+
+func (c *commandeer) command(arg ...any) (*cmdWrapper, error) {
+ if c == nil {
+ return nil, nil
+ }
+
+ var args []string
+ for _, a := range arg {
+ switch v := a.(type) {
+ case string:
+ args = append(args, v)
+ case func(*commandeer):
+ v(c)
+ default:
+ return nil, fmt.Errorf("invalid argument to command: %T", a)
+ }
+ }
+
+ bin, err := safeexec.LookPath(c.name)
+ if err != nil {
+ return nil, &NotFoundError{
+ name: c.name,
+ }
+ }
+
+ outerr := &bytes.Buffer{}
+ if c.stderr == nil {
+ c.stderr = outerr
+ } else {
+ c.stderr = io.MultiWriter(c.stderr, outerr)
+ }
+
+ var cmd *exec.Cmd
+
+ if c.ctx != nil {
+ cmd = exec.CommandContext(c.ctx, bin, args...)
+ } else {
+ cmd = exec.Command(bin, args...)
+ }
+
+ cmd.Stdin = c.stdin
+ cmd.Stderr = c.stderr
+ cmd.Stdout = c.stdout
+ cmd.Env = c.env
+ cmd.Dir = c.dir
+
+ return &cmdWrapper{outerr: outerr, c: cmd, name: c.name}, nil
+}
+
+// InPath reports whether binaryName is in $PATH.
+func InPath(binaryName string) bool {
+ if strings.Contains(binaryName, "/") {
+ panic("binary name should not contain any slash")
+ }
+ _, err := safeexec.LookPath(binaryName)
+ return err == nil
+}
+
+// LookPath finds the path to binaryName in $PATH.
+// Returns "" if not found.
+func LookPath(binaryName string) string {
+ if strings.Contains(binaryName, "/") {
+ panic("binary name should not contain any slash")
+ }
+ s, err := safeexec.LookPath(binaryName)
+ if err != nil {
+ return ""
+ }
+ return s
+}
diff --git a/common/hreflect/helpers.go b/common/hreflect/helpers.go
new file mode 100644
index 000000000..1b7e5acf7
--- /dev/null
+++ b/common/hreflect/helpers.go
@@ -0,0 +1,222 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+// Some functions in this file (see comments) is based on the Go source code,
+// copyright The Go Authors and governed by a BSD-style license.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package hreflect contains reflect helpers.
+package hreflect
+
+import (
+ "context"
+ "reflect"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/types"
+)
+
+// TODO(bep) replace the private versions in /tpl with these.
+// IsNumber returns whether the given kind is a number.
+func IsNumber(kind reflect.Kind) bool {
+ return IsInt(kind) || IsUint(kind) || IsFloat(kind)
+}
+
+// IsInt returns whether the given kind is an int.
+func IsInt(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return true
+ default:
+ return false
+ }
+}
+
+// IsUint returns whether the given kind is an uint.
+func IsUint(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return true
+ default:
+ return false
+ }
+}
+
+// IsFloat returns whether the given kind is a float.
+func IsFloat(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Float32, reflect.Float64:
+ return true
+ default:
+ return false
+ }
+}
+
+// IsTruthful returns whether in represents a truthful value.
+// See IsTruthfulValue
+func IsTruthful(in any) bool {
+ switch v := in.(type) {
+ case reflect.Value:
+ return IsTruthfulValue(v)
+ default:
+ return IsTruthfulValue(reflect.ValueOf(in))
+ }
+}
+
+var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem()
+
+// IsTruthfulValue returns whether the given value has a meaningful truth value.
+// This is based on template.IsTrue in Go's stdlib, but also considers
+// IsZero and any interface value will be unwrapped before it's considered
+// for truthfulness.
+//
+// Based on:
+// https://github.com/golang/go/blob/178a2c42254166cffed1b25fb1d3c7a5727cada6/src/text/template/exec.go#L306
+func IsTruthfulValue(val reflect.Value) (truth bool) {
+ val = indirectInterface(val)
+
+ if !val.IsValid() {
+ // Something like var x interface{}, never set. It's a form of nil.
+ return
+ }
+
+ if val.Type().Implements(zeroType) {
+ return !val.Interface().(types.Zeroer).IsZero()
+ }
+
+ switch val.Kind() {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ truth = val.Len() > 0
+ case reflect.Bool:
+ truth = val.Bool()
+ case reflect.Complex64, reflect.Complex128:
+ truth = val.Complex() != 0
+ case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface:
+ truth = !val.IsNil()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ truth = val.Int() != 0
+ case reflect.Float32, reflect.Float64:
+ truth = val.Float() != 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ truth = val.Uint() != 0
+ case reflect.Struct:
+ truth = true // Struct values are always true.
+ default:
+ return
+ }
+
+ return
+}
+
+type methodKey struct {
+ typ reflect.Type
+ name string
+}
+
+type methods struct {
+ sync.RWMutex
+ cache map[methodKey]int
+}
+
+var methodCache = &methods{cache: make(map[methodKey]int)}
+
+// GetMethodByName is the same as reflect.Value.MethodByName, but it caches the
+// type lookup.
+func GetMethodByName(v reflect.Value, name string) reflect.Value {
+ index := GetMethodIndexByName(v.Type(), name)
+
+ if index == -1 {
+ return reflect.Value{}
+ }
+
+ return v.Method(index)
+}
+
+// GetMethodIndexByName returns the index of the method with the given name, or
+// -1 if no such method exists.
+func GetMethodIndexByName(tp reflect.Type, name string) int {
+ k := methodKey{tp, name}
+ methodCache.RLock()
+ index, found := methodCache.cache[k]
+ methodCache.RUnlock()
+ if found {
+ return index
+ }
+
+ methodCache.Lock()
+ defer methodCache.Unlock()
+
+ m, ok := tp.MethodByName(name)
+ index = m.Index
+ if !ok {
+ index = -1
+ }
+ methodCache.cache[k] = index
+
+ if !ok {
+ return -1
+ }
+
+ return m.Index
+}
+
+var (
+ timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
+ asTimeProviderType = reflect.TypeOf((*htime.AsTimeProvider)(nil)).Elem()
+)
+
+// IsTime returns whether tp is a time.Time type or if it can be converted into one
+// in ToTime.
+func IsTime(tp reflect.Type) bool {
+ if tp == timeType {
+ return true
+ }
+
+ if tp.Implements(asTimeProviderType) {
+ return true
+ }
+ return false
+}
+
+// AsTime returns v as a time.Time if possible.
+// The given location is only used if the value implements AsTimeProvider (e.g. go-toml local).
+// A zero Time and false is returned if this isn't possible.
+// Note that this function does not accept string dates.
+func AsTime(v reflect.Value, loc *time.Location) (time.Time, bool) {
+ if v.Kind() == reflect.Interface {
+ return AsTime(v.Elem(), loc)
+ }
+
+ if v.Type() == timeType {
+ return v.Interface().(time.Time), true
+ }
+
+ if v.Type().Implements(asTimeProviderType) {
+ return v.Interface().(htime.AsTimeProvider).AsTime(loc), true
+ }
+
+ return time.Time{}, false
+}
+
+// Based on: https://github.com/golang/go/blob/178a2c42254166cffed1b25fb1d3c7a5727cada6/src/text/template/exec.go#L931
+func indirectInterface(v reflect.Value) reflect.Value {
+ if v.Kind() != reflect.Interface {
+ return v
+ }
+ if v.IsNil() {
+ return reflect.Value{}
+ }
+ return v.Elem()
+}
+
+var ContextInterface = reflect.TypeOf((*context.Context)(nil)).Elem()
diff --git a/common/hreflect/helpers_test.go b/common/hreflect/helpers_test.go
new file mode 100644
index 000000000..d16b9b9b3
--- /dev/null
+++ b/common/hreflect/helpers_test.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hreflect
+
+import (
+ "reflect"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestIsTruthful(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(IsTruthful(true), qt.Equals, true)
+ c.Assert(IsTruthful(false), qt.Equals, false)
+ c.Assert(IsTruthful(time.Now()), qt.Equals, true)
+ c.Assert(IsTruthful(time.Time{}), qt.Equals, false)
+}
+
+func TestGetMethodByName(t *testing.T) {
+ c := qt.New(t)
+ v := reflect.ValueOf(&testStruct{})
+ tp := v.Type()
+
+ c.Assert(GetMethodIndexByName(tp, "Method1"), qt.Equals, 0)
+ c.Assert(GetMethodIndexByName(tp, "Method3"), qt.Equals, 2)
+ c.Assert(GetMethodIndexByName(tp, "Foo"), qt.Equals, -1)
+}
+
+func BenchmarkIsTruthFul(b *testing.B) {
+ v := reflect.ValueOf("Hugo")
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if !IsTruthfulValue(v) {
+ b.Fatal("not truthful")
+ }
+ }
+}
+
+type testStruct struct{}
+
+func (t *testStruct) Method1() string {
+ return "Hugo"
+}
+
+func (t *testStruct) Method2() string {
+ return "Hugo"
+}
+
+func (t *testStruct) Method3() string {
+ return "Hugo"
+}
+
+func (t *testStruct) Method4() string {
+ return "Hugo"
+}
+
+func (t *testStruct) Method5() string {
+ return "Hugo"
+}
+
+func BenchmarkGetMethodByName(b *testing.B) {
+ v := reflect.ValueOf(&testStruct{})
+ methods := []string{"Method1", "Method2", "Method3", "Method4", "Method5"}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ for _, method := range methods {
+ _ = GetMethodByName(v, method)
+ }
+ }
+}
diff --git a/common/htime/time.go b/common/htime/time.go
new file mode 100644
index 000000000..d30ecf7e1
--- /dev/null
+++ b/common/htime/time.go
@@ -0,0 +1,165 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htime
+
+import (
+ "strings"
+ "time"
+
+ "github.com/bep/clock"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/locales"
+)
+
+var (
+ longDayNames = []string{
+ "Sunday",
+ "Monday",
+ "Tuesday",
+ "Wednesday",
+ "Thursday",
+ "Friday",
+ "Saturday",
+ }
+
+ shortDayNames = []string{
+ "Sun",
+ "Mon",
+ "Tue",
+ "Wed",
+ "Thu",
+ "Fri",
+ "Sat",
+ }
+
+ shortMonthNames = []string{
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+ }
+
+ longMonthNames = []string{
+ "January",
+ "February",
+ "March",
+ "April",
+ "May",
+ "June",
+ "July",
+ "August",
+ "September",
+ "October",
+ "November",
+ "December",
+ }
+
+ Clock = clock.System()
+)
+
+func NewTimeFormatter(ltr locales.Translator) TimeFormatter {
+ if ltr == nil {
+ panic("must provide a locales.Translator")
+ }
+ return TimeFormatter{
+ ltr: ltr,
+ }
+}
+
+// TimeFormatter is locale aware.
+type TimeFormatter struct {
+ ltr locales.Translator
+}
+
+func (f TimeFormatter) Format(t time.Time, layout string) string {
+ if layout == "" {
+ return ""
+ }
+
+ if layout[0] == ':' {
+ // It may be one of Hugo's custom layouts.
+ switch strings.ToLower(layout[1:]) {
+ case "date_full":
+ return f.ltr.FmtDateFull(t)
+ case "date_long":
+ return f.ltr.FmtDateLong(t)
+ case "date_medium":
+ return f.ltr.FmtDateMedium(t)
+ case "date_short":
+ return f.ltr.FmtDateShort(t)
+ case "time_full":
+ return f.ltr.FmtTimeFull(t)
+ case "time_long":
+ return f.ltr.FmtTimeLong(t)
+ case "time_medium":
+ return f.ltr.FmtTimeMedium(t)
+ case "time_short":
+ return f.ltr.FmtTimeShort(t)
+ }
+ }
+
+ s := t.Format(layout)
+
+ monthIdx := t.Month() - 1 // Month() starts at 1.
+ dayIdx := t.Weekday()
+
+ s = strings.ReplaceAll(s, longMonthNames[monthIdx], f.ltr.MonthWide(t.Month()))
+ if !strings.Contains(s, f.ltr.MonthWide(t.Month())) {
+ s = strings.ReplaceAll(s, shortMonthNames[monthIdx], f.ltr.MonthAbbreviated(t.Month()))
+ }
+ s = strings.ReplaceAll(s, longDayNames[dayIdx], f.ltr.WeekdayWide(t.Weekday()))
+ if !strings.Contains(s, f.ltr.WeekdayWide(t.Weekday())) {
+ s = strings.ReplaceAll(s, shortDayNames[dayIdx], f.ltr.WeekdayAbbreviated(t.Weekday()))
+ }
+
+ return s
+}
+
+func ToTimeInDefaultLocationE(i any, location *time.Location) (tim time.Time, err error) {
+ switch vv := i.(type) {
+ case AsTimeProvider:
+ return vv.AsTime(location), nil
+ // issue #8895
+ // datetimes parsed by `go-toml` have empty zone name
+ // convert back them into string and use `cast`
+ // TODO(bep) add tests, make sure we really need this.
+ case time.Time:
+ i = vv.Format(time.RFC3339)
+ }
+ return cast.ToTimeInDefaultLocationE(i, location)
+}
+
+// Now returns time.Now() or time value based on the `clock` flag.
+// Use this function to fake time inside hugo.
+func Now() time.Time {
+ return Clock.Now()
+}
+
+func Since(t time.Time) time.Duration {
+ return Clock.Since(t)
+}
+
+// AsTimeProvider is implemented by go-toml's LocalDate and LocalDateTime.
+type AsTimeProvider interface {
+ AsTime(zone *time.Location) time.Time
+}
diff --git a/common/htime/time_test.go b/common/htime/time_test.go
new file mode 100644
index 000000000..6cb323409
--- /dev/null
+++ b/common/htime/time_test.go
@@ -0,0 +1,148 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htime
+
+import (
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+ translators "github.com/gohugoio/localescompressed"
+)
+
+func TestTimeFormatter(t *testing.T) {
+ c := qt.New(t)
+
+ june06, _ := time.Parse("2006-Jan-02", "2018-Jun-06")
+ june06 = june06.Add(7777 * time.Second)
+
+ jan06, _ := time.Parse("2006-Jan-02", "2018-Jan-06")
+ jan06 = jan06.Add(32 * time.Second)
+
+ mondayNovemberFirst, _ := time.Parse("2006-Jan-02", "2021-11-01")
+ mondayNovemberFirst = mondayNovemberFirst.Add(33 * time.Second)
+
+ c.Run("Norsk nynorsk", func(c *qt.C) {
+ f := NewTimeFormatter(translators.GetTranslator("nn"))
+
+ c.Assert(f.Format(june06, "Monday Jan 2 2006"), qt.Equals, "onsdag juni 6 2018")
+ c.Assert(f.Format(june06, "Mon January 2 2006"), qt.Equals, "on. juni 6 2018")
+ c.Assert(f.Format(june06, "Mon Mon"), qt.Equals, "on. on.")
+ })
+
+ c.Run("Custom layouts Norsk nynorsk", func(c *qt.C) {
+ f := NewTimeFormatter(translators.GetTranslator("nn"))
+
+ c.Assert(f.Format(june06, ":date_full"), qt.Equals, "onsdag 6. juni 2018")
+ c.Assert(f.Format(june06, ":date_long"), qt.Equals, "6. juni 2018")
+ c.Assert(f.Format(june06, ":date_medium"), qt.Equals, "6. juni 2018")
+ c.Assert(f.Format(june06, ":date_short"), qt.Equals, "06.06.2018")
+
+ c.Assert(f.Format(june06, ":time_full"), qt.Equals, "kl. 02:09:37 UTC")
+ c.Assert(f.Format(june06, ":time_long"), qt.Equals, "02:09:37 UTC")
+ c.Assert(f.Format(june06, ":time_medium"), qt.Equals, "02:09:37")
+ c.Assert(f.Format(june06, ":time_short"), qt.Equals, "02:09")
+
+ })
+
+ c.Run("Custom layouts English", func(c *qt.C) {
+ f := NewTimeFormatter(translators.GetTranslator("en"))
+
+ c.Assert(f.Format(june06, ":date_full"), qt.Equals, "Wednesday, June 6, 2018")
+ c.Assert(f.Format(june06, ":date_long"), qt.Equals, "June 6, 2018")
+ c.Assert(f.Format(june06, ":date_medium"), qt.Equals, "Jun 6, 2018")
+ c.Assert(f.Format(june06, ":date_short"), qt.Equals, "6/6/18")
+
+ c.Assert(f.Format(june06, ":time_full"), qt.Equals, "2:09:37 am UTC")
+ c.Assert(f.Format(june06, ":time_long"), qt.Equals, "2:09:37 am UTC")
+ c.Assert(f.Format(june06, ":time_medium"), qt.Equals, "2:09:37 am")
+ c.Assert(f.Format(june06, ":time_short"), qt.Equals, "2:09 am")
+
+ })
+
+ c.Run("English", func(c *qt.C) {
+ f := NewTimeFormatter(translators.GetTranslator("en"))
+
+ c.Assert(f.Format(june06, "Monday Jan 2 2006"), qt.Equals, "Wednesday Jun 6 2018")
+ c.Assert(f.Format(june06, "Mon January 2 2006"), qt.Equals, "Wed June 6 2018")
+ c.Assert(f.Format(june06, "Mon Mon"), qt.Equals, "Wed Wed")
+ })
+
+ c.Run("Weekdays German", func(c *qt.C) {
+ tr := translators.GetTranslator("de")
+ f := NewTimeFormatter(tr)
+
+ // Issue #9107
+ for i, weekDayWideGerman := range []string{"Montag", "Dienstag", "Mittwoch", "Donnerstag", "Freitag", "Samstag", "Sonntag"} {
+ date := mondayNovemberFirst.Add(time.Duration(i*24) * time.Hour)
+ c.Assert(tr.WeekdayWide(date.Weekday()), qt.Equals, weekDayWideGerman)
+ c.Assert(f.Format(date, "Monday"), qt.Equals, weekDayWideGerman)
+ }
+
+ for i, weekDayAbbreviatedGerman := range []string{"Mo.", "Di.", "Mi.", "Do.", "Fr.", "Sa.", "So."} {
+ date := mondayNovemberFirst.Add(time.Duration(i*24) * time.Hour)
+ c.Assert(tr.WeekdayAbbreviated(date.Weekday()), qt.Equals, weekDayAbbreviatedGerman)
+ c.Assert(f.Format(date, "Mon"), qt.Equals, weekDayAbbreviatedGerman)
+ }
+ })
+
+ c.Run("Months German", func(c *qt.C) {
+ tr := translators.GetTranslator("de")
+ f := NewTimeFormatter(tr)
+
+ // Issue #9107
+ for i, monthWideNorway := range []string{"Januar", "Februar", "März", "April", "Mai", "Juni", "Juli"} {
+ date := jan06.Add(time.Duration(i*24*31) * time.Hour)
+ c.Assert(tr.MonthWide(date.Month()), qt.Equals, monthWideNorway)
+ c.Assert(f.Format(date, "January"), qt.Equals, monthWideNorway)
+ }
+
+ })
+
+}
+
+func BenchmarkTimeFormatter(b *testing.B) {
+ june06, _ := time.Parse("2006-Jan-02", "2018-Jun-06")
+
+ b.Run("Native", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ got := june06.Format("Monday Jan 2 2006")
+ if got != "Wednesday Jun 6 2018" {
+ b.Fatalf("invalid format, got %q", got)
+ }
+ }
+ })
+
+ b.Run("Localized", func(b *testing.B) {
+ f := NewTimeFormatter(translators.GetTranslator("nn"))
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ got := f.Format(june06, "Monday Jan 2 2006")
+ if got != "onsdag juni 6 2018" {
+ b.Fatalf("invalid format, got %q", got)
+ }
+ }
+ })
+
+ b.Run("Localized Custom", func(b *testing.B) {
+ f := NewTimeFormatter(translators.GetTranslator("nn"))
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ got := f.Format(june06, ":date_medium")
+ if got != "6. juni 2018" {
+ b.Fatalf("invalid format, got %q", got)
+ }
+ }
+ })
+}
diff --git a/common/hugio/copy.go b/common/hugio/copy.go
new file mode 100644
index 000000000..7c52f8723
--- /dev/null
+++ b/common/hugio/copy.go
@@ -0,0 +1,90 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugio
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+
+ "github.com/spf13/afero"
+)
+
+// CopyFile copies a file.
+func CopyFile(fs afero.Fs, from, to string) error {
+ sf, err := fs.Open(from)
+ if err != nil {
+ return err
+ }
+ defer sf.Close()
+ df, err := fs.Create(to)
+ if err != nil {
+ return err
+ }
+ defer df.Close()
+ _, err = io.Copy(df, sf)
+ if err != nil {
+ return err
+ }
+ si, err := fs.Stat(from)
+ if err != nil {
+ err = fs.Chmod(to, si.Mode())
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// CopyDir copies a directory.
+func CopyDir(fs afero.Fs, from, to string, shouldCopy func(filename string) bool) error {
+ fi, err := os.Stat(from)
+ if err != nil {
+ return err
+ }
+
+ if !fi.IsDir() {
+ return fmt.Errorf("%q is not a directory", from)
+ }
+
+ err = fs.MkdirAll(to, 0777) // before umask
+ if err != nil {
+ return err
+ }
+
+ entries, _ := ioutil.ReadDir(from)
+ for _, entry := range entries {
+ fromFilename := filepath.Join(from, entry.Name())
+ toFilename := filepath.Join(to, entry.Name())
+ if entry.IsDir() {
+ if shouldCopy != nil && !shouldCopy(fromFilename) {
+ continue
+ }
+ if err := CopyDir(fs, fromFilename, toFilename, shouldCopy); err != nil {
+ return err
+ }
+ } else {
+ if err := CopyFile(fs, fromFilename, toFilename); err != nil {
+ return err
+ }
+ }
+
+ }
+
+ return nil
+}
diff --git a/common/hugio/readers.go b/common/hugio/readers.go
new file mode 100644
index 000000000..60bd97992
--- /dev/null
+++ b/common/hugio/readers.go
@@ -0,0 +1,59 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugio
+
+import (
+ "io"
+ "strings"
+)
+
+// ReadSeeker wraps io.Reader and io.Seeker.
+type ReadSeeker interface {
+ io.Reader
+ io.Seeker
+}
+
+// ReadSeekCloser is implemented by afero.File. We use this as the common type for
+// content in Resource objects, even for strings.
+type ReadSeekCloser interface {
+ ReadSeeker
+ io.Closer
+}
+
+// ReadSeekCloserProvider provides a ReadSeekCloser.
+type ReadSeekCloserProvider interface {
+ ReadSeekCloser() (ReadSeekCloser, error)
+}
+
+// ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
+// TODO(bep) rename this and similar to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
+type ReadSeekerNoOpCloser struct {
+ ReadSeeker
+}
+
+// Close does nothing.
+func (r ReadSeekerNoOpCloser) Close() error {
+ return nil
+}
+
+// NewReadSeekerNoOpCloser creates a new ReadSeekerNoOpCloser with the given ReadSeeker.
+func NewReadSeekerNoOpCloser(r ReadSeeker) ReadSeekerNoOpCloser {
+ return ReadSeekerNoOpCloser{r}
+}
+
+// NewReadSeekerNoOpCloserFromString uses strings.NewReader to create a new ReadSeekerNoOpCloser
+// from the given string.
+func NewReadSeekerNoOpCloserFromString(content string) ReadSeekerNoOpCloser {
+ return ReadSeekerNoOpCloser{strings.NewReader(content)}
+}
diff --git a/common/hugio/writers.go b/common/hugio/writers.go
new file mode 100644
index 000000000..d8be83a40
--- /dev/null
+++ b/common/hugio/writers.go
@@ -0,0 +1,84 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugio
+
+import (
+ "io"
+ "io/ioutil"
+)
+
+// As implemented by strings.Builder.
+type FlexiWriter interface {
+ io.Writer
+ io.ByteWriter
+ WriteString(s string) (int, error)
+ WriteRune(r rune) (int, error)
+}
+
+type multiWriteCloser struct {
+ io.Writer
+ closers []io.WriteCloser
+}
+
+func (m multiWriteCloser) Close() error {
+ var err error
+ for _, c := range m.closers {
+ if closeErr := c.Close(); err != nil {
+ err = closeErr
+ }
+ }
+ return err
+}
+
+// NewMultiWriteCloser creates a new io.WriteCloser that duplicates its writes to all the
+// provided writers.
+func NewMultiWriteCloser(writeClosers ...io.WriteCloser) io.WriteCloser {
+ writers := make([]io.Writer, len(writeClosers))
+ for i, w := range writeClosers {
+ writers[i] = w
+ }
+ return multiWriteCloser{Writer: io.MultiWriter(writers...), closers: writeClosers}
+}
+
+// ToWriteCloser creates an io.WriteCloser from the given io.Writer.
+// If it's not already, one will be created with a Close method that does nothing.
+func ToWriteCloser(w io.Writer) io.WriteCloser {
+ if rw, ok := w.(io.WriteCloser); ok {
+ return rw
+ }
+
+ return struct {
+ io.Writer
+ io.Closer
+ }{
+ w,
+ ioutil.NopCloser(nil),
+ }
+}
+
+// ToReadCloser creates an io.ReadCloser from the given io.Reader.
+// If it's not already, one will be created with a Close method that does nothing.
+func ToReadCloser(r io.Reader) io.ReadCloser {
+ if rc, ok := r.(io.ReadCloser); ok {
+ return rc
+ }
+
+ return struct {
+ io.Reader
+ io.Closer
+ }{
+ r,
+ ioutil.NopCloser(nil),
+ }
+}
diff --git a/common/hugo/hugo.go b/common/hugo/hugo.go
new file mode 100644
index 000000000..54fbd65a3
--- /dev/null
+++ b/common/hugo/hugo.go
@@ -0,0 +1,251 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugo
+
+import (
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "runtime/debug"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+const (
+ EnvironmentDevelopment = "development"
+ EnvironmentProduction = "production"
+)
+
+var (
+ // buildDate allows vendor-specified build date when .git/ is unavailable.
+ buildDate string
+ // vendorInfo contains vendor notes about the current build.
+ vendorInfo string
+)
+
+// Info contains information about the current Hugo environment
+type Info struct {
+ CommitHash string
+ BuildDate string
+
+ // The build environment.
+ // Defaults are "production" (hugo) and "development" (hugo server).
+ // This can also be set by the user.
+ // It can be any string, but it will be all lower case.
+ Environment string
+
+ // version of go that the Hugo binary was built with
+ GoVersion string
+
+ deps []*Dependency
+}
+
+// Version returns the current version as a comparable version string.
+func (i Info) Version() VersionString {
+ return CurrentVersion.Version()
+}
+
+// Generator a Hugo meta generator HTML tag.
+func (i Info) Generator() template.HTML {
+ return template.HTML(fmt.Sprintf(`<meta name="generator" content="Hugo %s" />`, CurrentVersion.String()))
+}
+
+func (i Info) IsProduction() bool {
+ return i.Environment == EnvironmentProduction
+}
+
+func (i Info) IsExtended() bool {
+ return IsExtended
+}
+
+// Deps gets a list of dependencies for this Hugo build.
+func (i Info) Deps() []*Dependency {
+ return i.deps
+}
+
+// NewInfo creates a new Hugo Info object.
+func NewInfo(environment string, deps []*Dependency) Info {
+ if environment == "" {
+ environment = EnvironmentProduction
+ }
+ var (
+ commitHash string
+ buildDate string
+ goVersion string
+ )
+
+ bi := getBuildInfo()
+ if bi != nil {
+ commitHash = bi.Revision
+ buildDate = bi.RevisionTime
+ goVersion = bi.GoVersion
+ }
+
+ return Info{
+ CommitHash: commitHash,
+ BuildDate: buildDate,
+ Environment: environment,
+ deps: deps,
+ GoVersion: goVersion,
+ }
+}
+
+// GetExecEnviron creates and gets the common os/exec environment used in the
+// external programs we interact with via os/exec, e.g. postcss.
+func GetExecEnviron(workDir string, cfg config.Provider, fs afero.Fs) []string {
+ var env []string
+ nodepath := filepath.Join(workDir, "node_modules")
+ if np := os.Getenv("NODE_PATH"); np != "" {
+ nodepath = workDir + string(os.PathListSeparator) + np
+ }
+ config.SetEnvVars(&env, "NODE_PATH", nodepath)
+ config.SetEnvVars(&env, "PWD", workDir)
+ config.SetEnvVars(&env, "HUGO_ENVIRONMENT", cfg.GetString("environment"))
+ config.SetEnvVars(&env, "HUGO_ENV", cfg.GetString("environment"))
+
+ if fs != nil {
+ fis, err := afero.ReadDir(fs, files.FolderJSConfig)
+ if err == nil {
+ for _, fi := range fis {
+ key := fmt.Sprintf("HUGO_FILE_%s", strings.ReplaceAll(strings.ToUpper(fi.Name()), ".", "_"))
+ value := fi.(hugofs.FileMetaInfo).Meta().Filename
+ config.SetEnvVars(&env, key, value)
+ }
+ }
+ }
+
+ return env
+}
+
+type buildInfo struct {
+ VersionControlSystem string
+ Revision string
+ RevisionTime string
+ Modified bool
+
+ GoOS string
+ GoArch string
+
+ *debug.BuildInfo
+}
+
+var bInfo *buildInfo
+var bInfoInit sync.Once
+
+func getBuildInfo() *buildInfo {
+ bInfoInit.Do(func() {
+ bi, ok := debug.ReadBuildInfo()
+ if !ok {
+ return
+ }
+
+ bInfo = &buildInfo{BuildInfo: bi}
+
+ for _, s := range bInfo.Settings {
+ switch s.Key {
+ case "vcs":
+ bInfo.VersionControlSystem = s.Value
+ case "vcs.revision":
+ bInfo.Revision = s.Value
+ case "vcs.time":
+ bInfo.RevisionTime = s.Value
+ case "vcs.modified":
+ bInfo.Modified = s.Value == "true"
+ case "GOOS":
+ bInfo.GoOS = s.Value
+ case "GOARCH":
+ bInfo.GoArch = s.Value
+ }
+ }
+
+ })
+
+ return bInfo
+}
+
+// GetDependencyList returns a sorted dependency list on the format package="version".
+// It includes both Go dependencies and (a manually maintained) list of C(++) dependencies.
+func GetDependencyList() []string {
+ var deps []string
+
+ formatDep := func(path, version string) string {
+ return fmt.Sprintf("%s=%q", path, version)
+ }
+
+ if IsExtended {
+ deps = append(
+ deps,
+ // TODO(bep) consider adding a DepsNonGo() method to these upstream projects.
+ formatDep("github.com/sass/libsass", "3.6.5"),
+ formatDep("github.com/webmproject/libwebp", "v1.2.0"),
+ )
+ }
+
+ bi := getBuildInfo()
+ if bi == nil {
+ return deps
+ }
+
+ for _, dep := range bi.Deps {
+ deps = append(deps, formatDep(dep.Path, dep.Version))
+ }
+
+ sort.Strings(deps)
+
+ return deps
+}
+
+// IsRunningAsTest reports whether we are running as a test.
+func IsRunningAsTest() bool {
+ for _, arg := range os.Args {
+ if strings.HasPrefix(arg, "-test") {
+ return true
+ }
+ }
+ return false
+}
+
+// Dependency is a single dependency, which can be either a Hugo Module or a local theme.
+type Dependency struct {
+ // Returns the path to this module.
+ // This will either be the module path, e.g. "github.com/gohugoio/myshortcodes",
+ // or the path below your /theme folder, e.g. "mytheme".
+ Path string
+
+ // The module version.
+ Version string
+
+ // Whether this dependency is vendored.
+ Vendor bool
+
+ // Time version was created.
+ Time time.Time
+
+ // In the dependency tree, this is the first module that defines this module
+ // as a dependency.
+ Owner *Dependency
+
+ // Replaced by this dependency.
+ Replace *Dependency
+}
diff --git a/common/hugo/hugo_test.go b/common/hugo/hugo_test.go
new file mode 100644
index 000000000..f2ad0f5c1
--- /dev/null
+++ b/common/hugo/hugo_test.go
@@ -0,0 +1,44 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugo
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestHugoInfo(t *testing.T) {
+ c := qt.New(t)
+
+ hugoInfo := NewInfo("", nil)
+
+ c.Assert(hugoInfo.Version(), qt.Equals, CurrentVersion.Version())
+ c.Assert(fmt.Sprintf("%T", VersionString("")), qt.Equals, fmt.Sprintf("%T", hugoInfo.Version()))
+
+ bi := getBuildInfo()
+ if bi != nil {
+ c.Assert(hugoInfo.CommitHash, qt.Equals, bi.Revision)
+ c.Assert(hugoInfo.BuildDate, qt.Equals, bi.RevisionTime)
+ c.Assert(hugoInfo.GoVersion, qt.Equals, bi.GoVersion)
+ }
+ c.Assert(hugoInfo.Environment, qt.Equals, "production")
+ c.Assert(string(hugoInfo.Generator()), qt.Contains, fmt.Sprintf("Hugo %s", hugoInfo.Version()))
+ c.Assert(hugoInfo.IsProduction(), qt.Equals, true)
+ c.Assert(hugoInfo.IsExtended(), qt.Equals, IsExtended)
+
+ devHugoInfo := NewInfo("development", nil)
+ c.Assert(devHugoInfo.IsProduction(), qt.Equals, false)
+}
diff --git a/common/hugo/vars_extended.go b/common/hugo/vars_extended.go
new file mode 100644
index 000000000..edbaff243
--- /dev/null
+++ b/common/hugo/vars_extended.go
@@ -0,0 +1,19 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build extended
+// +build extended
+
+package hugo
+
+var IsExtended = true
diff --git a/common/hugo/vars_regular.go b/common/hugo/vars_regular.go
new file mode 100644
index 000000000..223df4b6c
--- /dev/null
+++ b/common/hugo/vars_regular.go
@@ -0,0 +1,19 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !extended
+// +build !extended
+
+package hugo
+
+var IsExtended = false
diff --git a/common/hugo/version.go b/common/hugo/version.go
new file mode 100644
index 000000000..3bb6472e2
--- /dev/null
+++ b/common/hugo/version.go
@@ -0,0 +1,301 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugo
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "runtime"
+ "strconv"
+ "strings"
+
+ "github.com/gohugoio/hugo/compare"
+ "github.com/spf13/cast"
+)
+
+// Version represents the Hugo build version.
+type Version struct {
+ Major int
+
+ Minor int
+
+ // Increment this for bug releases
+ PatchLevel int
+
+ // HugoVersionSuffix is the suffix used in the Hugo version string.
+ // It will be blank for release versions.
+ Suffix string
+}
+
+var (
+ _ compare.Eqer = (*VersionString)(nil)
+ _ compare.Comparer = (*VersionString)(nil)
+)
+
+func (v Version) String() string {
+ return version(v.Major, v.Minor, v.PatchLevel, v.Suffix)
+}
+
+// Version returns the Hugo version.
+func (v Version) Version() VersionString {
+ return VersionString(v.String())
+}
+
+// Compare implements the compare.Comparer interface.
+func (h Version) Compare(other any) int {
+ return compareVersions(h, other)
+}
+
+// VersionString represents a Hugo version string.
+type VersionString string
+
+func (h VersionString) String() string {
+ return string(h)
+}
+
+// Compare implements the compare.Comparer interface.
+func (h VersionString) Compare(other any) int {
+ v := MustParseVersion(h.String())
+ return compareVersions(v, other)
+}
+
+// Eq implements the compare.Eqer interface.
+func (h VersionString) Eq(other any) bool {
+ s, err := cast.ToStringE(other)
+ if err != nil {
+ return false
+ }
+ return s == h.String()
+}
+
+var versionSuffixes = []string{"-test", "-DEV"}
+
+// ParseVersion parses a version string.
+func ParseVersion(s string) (Version, error) {
+ var vv Version
+ for _, suffix := range versionSuffixes {
+ if strings.HasSuffix(s, suffix) {
+ vv.Suffix = suffix
+ s = strings.TrimSuffix(s, suffix)
+ }
+ }
+
+ vv.Major, vv.Minor, vv.PatchLevel = parseVersion(s)
+
+ return vv, nil
+}
+
+// MustParseVersion parses a version string
+// and panics if any error occurs.
+func MustParseVersion(s string) Version {
+ vv, err := ParseVersion(s)
+ if err != nil {
+ panic(err)
+ }
+ return vv
+}
+
+// ReleaseVersion represents the release version.
+func (v Version) ReleaseVersion() Version {
+ v.Suffix = ""
+ return v
+}
+
+// Next returns the next Hugo release version.
+func (v Version) Next() Version {
+ return Version{Major: v.Major, Minor: v.Minor + 1}
+}
+
+// Prev returns the previous Hugo release version.
+func (v Version) Prev() Version {
+ return Version{Major: v.Major, Minor: v.Minor - 1}
+}
+
+// NextPatchLevel returns the next patch/bugfix Hugo version.
+// This will be a patch increment on the previous Hugo version.
+func (v Version) NextPatchLevel(level int) Version {
+ prev := v.Prev()
+ prev.PatchLevel = level
+ return prev
+}
+
+// BuildVersionString creates a version string. This is what you see when
+// running "hugo version".
+func BuildVersionString() string {
+ // program := "Hugo Static Site Generator"
+ program := "hugo"
+
+ version := "v" + CurrentVersion.String()
+
+ bi := getBuildInfo()
+ if bi == nil {
+ return version
+ }
+ if bi.Revision != "" {
+ version += "-" + bi.Revision
+ }
+ if IsExtended {
+ version += "+extended"
+ }
+
+ osArch := bi.GoOS + "/" + bi.GoArch
+
+ date := bi.RevisionTime
+ if date == "" {
+ // Accept vendor-specified build date if .git/ is unavailable.
+ date = buildDate
+ }
+ if date == "" {
+ date = "unknown"
+ }
+
+ versionString := fmt.Sprintf("%s %s %s BuildDate=%s",
+ program, version, osArch, date)
+
+ if vendorInfo != "" {
+ versionString += " VendorInfo=" + vendorInfo
+ }
+
+ return versionString
+}
+
+func version(major, minor, patch int, suffix string) string {
+ if patch > 0 || minor > 53 {
+ return fmt.Sprintf("%d.%d.%d%s", major, minor, patch, suffix)
+ }
+ return fmt.Sprintf("%d.%d%s", major, minor, suffix)
+}
+
+// CompareVersion compares the given version string or number against the
+// running Hugo version.
+// It returns -1 if the given version is less than, 0 if equal and 1 if greater than
+// the running version.
+func CompareVersion(version any) int {
+ return compareVersions(CurrentVersion, version)
+}
+
+func compareVersions(inVersion Version, in any) int {
+ var c int
+ switch d := in.(type) {
+ case float64:
+ c = compareFloatWithVersion(d, inVersion)
+ case float32:
+ c = compareFloatWithVersion(float64(d), inVersion)
+ case int:
+ c = compareFloatWithVersion(float64(d), inVersion)
+ case int32:
+ c = compareFloatWithVersion(float64(d), inVersion)
+ case int64:
+ c = compareFloatWithVersion(float64(d), inVersion)
+ case Version:
+ if d.Major == inVersion.Major && d.Minor == inVersion.Minor && d.PatchLevel == inVersion.PatchLevel {
+ return strings.Compare(inVersion.Suffix, d.Suffix)
+ }
+ if d.Major > inVersion.Major {
+ return 1
+ } else if d.Major < inVersion.Major {
+ return -1
+ }
+ if d.Minor > inVersion.Minor {
+ return 1
+ } else if d.Minor < inVersion.Minor {
+ return -1
+ }
+ if d.PatchLevel > inVersion.PatchLevel {
+ return 1
+ } else if d.PatchLevel < inVersion.PatchLevel {
+ return -1
+ }
+ default:
+ s, err := cast.ToStringE(in)
+ if err != nil {
+ return -1
+ }
+
+ v, err := ParseVersion(s)
+ if err != nil {
+ return -1
+ }
+ return inVersion.Compare(v)
+
+ }
+
+ return c
+}
+
+func parseVersion(s string) (int, int, int) {
+ var major, minor, patch int
+ parts := strings.Split(s, ".")
+ if len(parts) > 0 {
+ major, _ = strconv.Atoi(parts[0])
+ }
+ if len(parts) > 1 {
+ minor, _ = strconv.Atoi(parts[1])
+ }
+ if len(parts) > 2 {
+ patch, _ = strconv.Atoi(parts[2])
+ }
+
+ return major, minor, patch
+}
+
+// compareFloatWithVersion compares v1 with v2.
+// It returns -1 if v1 is less than v2, 0 if v1 is equal to v2 and 1 if v1 is greater than v2.
+func compareFloatWithVersion(v1 float64, v2 Version) int {
+ mf, minf := math.Modf(v1)
+ v1maj := int(mf)
+ v1min := int(minf * 100)
+
+ if v2.Major == v1maj && v2.Minor == v1min {
+ return 0
+ }
+
+ if v1maj > v2.Major {
+ return 1
+
+ }
+
+ if v1maj < v2.Major {
+ return -1
+ }
+
+ if v1min > v2.Minor {
+ return 1
+ }
+
+ return -1
+
+}
+
+func GoMinorVersion() int {
+ return goMinorVersion(runtime.Version())
+}
+
+func goMinorVersion(version string) int {
+ if strings.HasPrefix(version, "devel") {
+ return 9999 // magic
+ }
+ var major, minor int
+ var trailing string
+ n, err := fmt.Sscanf(version, "go%d.%d%s", &major, &minor, &trailing)
+ if n == 2 && err == io.EOF {
+ // Means there were no trailing characters (i.e., not an alpha/beta)
+ err = nil
+ }
+ if err != nil {
+ return 0
+ }
+ return minor
+}
diff --git a/common/hugo/version_current.go b/common/hugo/version_current.go
new file mode 100644
index 000000000..0be78e95f
--- /dev/null
+++ b/common/hugo/version_current.go
@@ -0,0 +1,23 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugo
+
+// CurrentVersion represents the current build version.
+// This should be the only one.
+var CurrentVersion = Version{
+ Major: 0,
+ Minor: 102,
+ PatchLevel: 0,
+ Suffix: "-DEV",
+}
diff --git a/common/hugo/version_test.go b/common/hugo/version_test.go
new file mode 100644
index 000000000..33e50ebf5
--- /dev/null
+++ b/common/hugo/version_test.go
@@ -0,0 +1,88 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugo
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestHugoVersion(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(version(0, 15, 0, "-DEV"), qt.Equals, "0.15-DEV")
+ c.Assert(version(0, 15, 2, "-DEV"), qt.Equals, "0.15.2-DEV")
+
+ v := Version{Minor: 21, Suffix: "-DEV"}
+
+ c.Assert(v.ReleaseVersion().String(), qt.Equals, "0.21")
+ c.Assert(v.String(), qt.Equals, "0.21-DEV")
+ c.Assert(v.Next().String(), qt.Equals, "0.22")
+ nextVersionString := v.Next().Version()
+ c.Assert(nextVersionString.String(), qt.Equals, "0.22")
+ c.Assert(nextVersionString.Eq("0.22"), qt.Equals, true)
+ c.Assert(nextVersionString.Eq("0.21"), qt.Equals, false)
+ c.Assert(nextVersionString.Eq(nextVersionString), qt.Equals, true)
+ c.Assert(v.NextPatchLevel(3).String(), qt.Equals, "0.20.3")
+
+ // We started to use full semver versions even for main
+ // releases in v0.54.0
+ v = Version{Minor: 53, PatchLevel: 0}
+ c.Assert(v.String(), qt.Equals, "0.53")
+ c.Assert(v.Next().String(), qt.Equals, "0.54.0")
+ c.Assert(v.Next().Next().String(), qt.Equals, "0.55.0")
+ v = Version{Minor: 54, PatchLevel: 0, Suffix: "-DEV"}
+ c.Assert(v.String(), qt.Equals, "0.54.0-DEV")
+}
+
+func TestCompareVersions(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(compareVersions(MustParseVersion("0.20.0"), 0.20), qt.Equals, 0)
+ c.Assert(compareVersions(MustParseVersion("0.20.0"), float32(0.20)), qt.Equals, 0)
+ c.Assert(compareVersions(MustParseVersion("0.20.0"), float64(0.20)), qt.Equals, 0)
+ c.Assert(compareVersions(MustParseVersion("0.19.1"), 0.20), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.19.3"), "0.20.2"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.1"), 3), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.1"), int32(3)), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.1"), int64(3)), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.20"), "0.20"), qt.Equals, 0)
+ c.Assert(compareVersions(MustParseVersion("0.20.1"), "0.20.1"), qt.Equals, 0)
+ c.Assert(compareVersions(MustParseVersion("0.20.1"), "0.20"), qt.Equals, -1)
+ c.Assert(compareVersions(MustParseVersion("0.20.0"), "0.20.1"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.20.1"), "0.20.2"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.21.1"), "0.22.1"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.22.0"), "0.22-DEV"), qt.Equals, -1)
+ c.Assert(compareVersions(MustParseVersion("0.22.0"), "0.22.1-DEV"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.22.0-DEV"), "0.22"), qt.Equals, 1)
+ c.Assert(compareVersions(MustParseVersion("0.22.1-DEV"), "0.22"), qt.Equals, -1)
+ c.Assert(compareVersions(MustParseVersion("0.22.1-DEV"), "0.22.1-DEV"), qt.Equals, 0)
+}
+
+func TestParseHugoVersion(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(MustParseVersion("0.25").String(), qt.Equals, "0.25")
+ c.Assert(MustParseVersion("0.25.2").String(), qt.Equals, "0.25.2")
+ c.Assert(MustParseVersion("0.25-test").String(), qt.Equals, "0.25-test")
+ c.Assert(MustParseVersion("0.25-DEV").String(), qt.Equals, "0.25-DEV")
+}
+
+func TestGoMinorVersion(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(goMinorVersion("go1.12.5"), qt.Equals, 12)
+ c.Assert(goMinorVersion("go1.14rc1"), qt.Equals, 14)
+ c.Assert(GoMinorVersion() >= 11, qt.Equals, true)
+}
diff --git a/common/loggers/ignorableLogger.go b/common/loggers/ignorableLogger.go
new file mode 100644
index 000000000..5040d1036
--- /dev/null
+++ b/common/loggers/ignorableLogger.go
@@ -0,0 +1,65 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package loggers
+
+import (
+ "fmt"
+ "strings"
+)
+
+// IgnorableLogger is a logger that ignores certain log statements.
+type IgnorableLogger interface {
+ Logger
+ Errorsf(statementID, format string, v ...any)
+ Apply(logger Logger) IgnorableLogger
+}
+
+type ignorableLogger struct {
+ Logger
+ statements map[string]bool
+}
+
+// NewIgnorableLogger wraps the given logger and ignores the log statement IDs given.
+func NewIgnorableLogger(logger Logger, statements ...string) IgnorableLogger {
+ statementsSet := make(map[string]bool)
+ for _, s := range statements {
+ statementsSet[strings.ToLower(s)] = true
+ }
+ return ignorableLogger{
+ Logger: logger,
+ statements: statementsSet,
+ }
+}
+
+// Errorsf logs statementID as an ERROR if not configured as ignoreable.
+func (l ignorableLogger) Errorsf(statementID, format string, v ...any) {
+ if l.statements[statementID] {
+ // Ignore.
+ return
+ }
+ ignoreMsg := fmt.Sprintf(`
+If you feel that this should not be logged as an ERROR, you can ignore it by adding this to your site config:
+ignoreErrors = [%q]`, statementID)
+
+ format += ignoreMsg
+
+ l.Errorf(format, v...)
+}
+
+func (l ignorableLogger) Apply(logger Logger) IgnorableLogger {
+ return ignorableLogger{
+ Logger: logger,
+ statements: l.statements,
+ }
+}
diff --git a/common/loggers/loggers.go b/common/loggers/loggers.go
new file mode 100644
index 000000000..308635fe9
--- /dev/null
+++ b/common/loggers/loggers.go
@@ -0,0 +1,355 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package loggers
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "regexp"
+ "runtime"
+ "time"
+
+ "github.com/gohugoio/hugo/common/terminal"
+
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var (
+ // Counts ERROR logs to the global jww logger.
+ GlobalErrorCounter *jww.Counter
+ PanicOnWarning bool
+)
+
+func init() {
+ GlobalErrorCounter = &jww.Counter{}
+ jww.SetLogListeners(jww.LogCounter(GlobalErrorCounter, jww.LevelError))
+}
+
+func LoggerToWriterWithPrefix(logger *log.Logger, prefix string) io.Writer {
+ return prefixWriter{
+ logger: logger,
+ prefix: prefix,
+ }
+}
+
+type prefixWriter struct {
+ logger *log.Logger
+ prefix string
+}
+
+func (w prefixWriter) Write(p []byte) (n int, err error) {
+ w.logger.Printf("%s: %s", w.prefix, p)
+ return len(p), nil
+}
+
+type Logger interface {
+ Printf(format string, v ...any)
+ Println(v ...any)
+ PrintTimerIfDelayed(start time.Time, name string)
+ Debug() *log.Logger
+ Debugf(format string, v ...any)
+ Debugln(v ...any)
+ Info() *log.Logger
+ Infof(format string, v ...any)
+ Infoln(v ...any)
+ Warn() *log.Logger
+ Warnf(format string, v ...any)
+ Warnln(v ...any)
+ Error() *log.Logger
+ Errorf(format string, v ...any)
+ Errorln(v ...any)
+ Errors() string
+
+ Out() io.Writer
+
+ Reset()
+
+ // Used in tests.
+ LogCounters() *LogCounters
+}
+
+type LogCounters struct {
+ ErrorCounter *jww.Counter
+ WarnCounter *jww.Counter
+}
+
+type logger struct {
+ *jww.Notepad
+
+ // The writer that represents stdout.
+ // Will be ioutil.Discard when in quiet mode.
+ out io.Writer
+
+ logCounters *LogCounters
+
+ // This is only set in server mode.
+ errors *bytes.Buffer
+}
+
+func (l *logger) Printf(format string, v ...any) {
+ l.FEEDBACK.Printf(format, v...)
+}
+
+func (l *logger) Println(v ...any) {
+ l.FEEDBACK.Println(v...)
+}
+
+func (l *logger) Debug() *log.Logger {
+ return l.DEBUG
+}
+
+func (l *logger) Debugf(format string, v ...any) {
+ l.DEBUG.Printf(format, v...)
+}
+
+func (l *logger) Debugln(v ...any) {
+ l.DEBUG.Println(v...)
+}
+
+func (l *logger) Infof(format string, v ...any) {
+ l.INFO.Printf(format, v...)
+}
+
+func (l *logger) Infoln(v ...any) {
+ l.INFO.Println(v...)
+}
+
+func (l *logger) Info() *log.Logger {
+ return l.INFO
+}
+
+const panicOnWarningMessage = "Warning trapped. Remove the --panicOnWarning flag to continue."
+
+func (l *logger) Warnf(format string, v ...any) {
+ l.WARN.Printf(format, v...)
+ if PanicOnWarning {
+ panic(panicOnWarningMessage)
+ }
+}
+
+func (l *logger) Warnln(v ...any) {
+ l.WARN.Println(v...)
+ if PanicOnWarning {
+ panic(panicOnWarningMessage)
+ }
+}
+
+func (l *logger) Warn() *log.Logger {
+ return l.WARN
+}
+
+func (l *logger) Errorf(format string, v ...any) {
+ l.ERROR.Printf(format, v...)
+}
+
+func (l *logger) Errorln(v ...any) {
+ l.ERROR.Println(v...)
+}
+
+func (l *logger) Error() *log.Logger {
+ return l.ERROR
+}
+
+func (l *logger) LogCounters() *LogCounters {
+ return l.logCounters
+}
+
+func (l *logger) Out() io.Writer {
+ return l.out
+}
+
+// PrintTimerIfDelayed prints a time statement to the FEEDBACK logger
+// if considerable time is spent.
+func (l *logger) PrintTimerIfDelayed(start time.Time, name string) {
+ elapsed := time.Since(start)
+ milli := int(1000 * elapsed.Seconds())
+ if milli < 500 {
+ return
+ }
+ l.Printf("%s in %v ms", name, milli)
+}
+
+func (l *logger) PrintTimer(start time.Time, name string) {
+ elapsed := time.Since(start)
+ milli := int(1000 * elapsed.Seconds())
+ l.Printf("%s in %v ms", name, milli)
+}
+
+func (l *logger) Errors() string {
+ if l.errors == nil {
+ return ""
+ }
+ return ansiColorRe.ReplaceAllString(l.errors.String(), "")
+}
+
+// Reset resets the logger's internal state.
+func (l *logger) Reset() {
+ l.logCounters.ErrorCounter.Reset()
+ if l.errors != nil {
+ l.errors.Reset()
+ }
+}
+
+// NewLogger creates a new Logger for the given thresholds
+func NewLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle io.Writer, saveErrors bool) Logger {
+ return newLogger(stdoutThreshold, logThreshold, outHandle, logHandle, saveErrors)
+}
+
+// NewDebugLogger is a convenience function to create a debug logger.
+func NewDebugLogger() Logger {
+ return NewBasicLogger(jww.LevelDebug)
+}
+
+// NewWarningLogger is a convenience function to create a warning logger.
+func NewWarningLogger() Logger {
+ return NewBasicLogger(jww.LevelWarn)
+}
+
+// NewInfoLogger is a convenience function to create a info logger.
+func NewInfoLogger() Logger {
+ return NewBasicLogger(jww.LevelInfo)
+}
+
+// NewErrorLogger is a convenience function to create an error logger.
+func NewErrorLogger() Logger {
+ return NewBasicLogger(jww.LevelError)
+}
+
+// NewBasicLogger creates a new basic logger writing to Stdout.
+func NewBasicLogger(t jww.Threshold) Logger {
+ return newLogger(t, jww.LevelError, os.Stdout, ioutil.Discard, false)
+}
+
+// NewBasicLoggerForWriter creates a new basic logger writing to w.
+func NewBasicLoggerForWriter(t jww.Threshold, w io.Writer) Logger {
+ return newLogger(t, jww.LevelError, w, ioutil.Discard, false)
+}
+
+// RemoveANSIColours removes all ANSI colours from the given string.
+func RemoveANSIColours(s string) string {
+ return ansiColorRe.ReplaceAllString(s, "")
+}
+
+var (
+ ansiColorRe = regexp.MustCompile("(?s)\\033\\[\\d*(;\\d*)*m")
+ errorRe = regexp.MustCompile("^(ERROR|FATAL|WARN)")
+)
+
+type ansiCleaner struct {
+ w io.Writer
+}
+
+func (a ansiCleaner) Write(p []byte) (n int, err error) {
+ return a.w.Write(ansiColorRe.ReplaceAll(p, []byte("")))
+}
+
+type labelColorizer struct {
+ w io.Writer
+}
+
+func (a labelColorizer) Write(p []byte) (n int, err error) {
+ replaced := errorRe.ReplaceAllStringFunc(string(p), func(m string) string {
+ switch m {
+ case "ERROR", "FATAL":
+ return terminal.Error(m)
+ case "WARN":
+ return terminal.Warning(m)
+ default:
+ return m
+ }
+ })
+ // io.MultiWriter will abort if we return a bigger write count than input
+ // bytes, so we lie a little.
+ _, err = a.w.Write([]byte(replaced))
+ return len(p), err
+}
+
+// InitGlobalLogger initializes the global logger, used in some rare cases.
+func InitGlobalLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle io.Writer) {
+ outHandle, logHandle = getLogWriters(outHandle, logHandle)
+
+ jww.SetStdoutOutput(outHandle)
+ jww.SetLogOutput(logHandle)
+ jww.SetLogThreshold(logThreshold)
+ jww.SetStdoutThreshold(stdoutThreshold)
+}
+
+func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
+ isTerm := terminal.PrintANSIColors(os.Stdout)
+ if logHandle != ioutil.Discard && isTerm {
+ // Remove any Ansi coloring from log output
+ logHandle = ansiCleaner{w: logHandle}
+ }
+
+ if isTerm {
+ outHandle = labelColorizer{w: outHandle}
+ }
+
+ return outHandle, logHandle
+}
+
+type fatalLogWriter int
+
+func (s fatalLogWriter) Write(p []byte) (n int, err error) {
+ trace := make([]byte, 1500)
+ runtime.Stack(trace, true)
+ fmt.Printf("\n===========\n\n%s\n", trace)
+ os.Exit(-1)
+
+ return 0, nil
+}
+
+var fatalLogListener = func(t jww.Threshold) io.Writer {
+ if t != jww.LevelError {
+ // Only interested in ERROR
+ return nil
+ }
+
+ return new(fatalLogWriter)
+}
+
+func newLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle io.Writer, saveErrors bool) *logger {
+ errorCounter := &jww.Counter{}
+ warnCounter := &jww.Counter{}
+ outHandle, logHandle = getLogWriters(outHandle, logHandle)
+
+ listeners := []jww.LogListener{jww.LogCounter(errorCounter, jww.LevelError), jww.LogCounter(warnCounter, jww.LevelWarn)}
+ var errorBuff *bytes.Buffer
+ if saveErrors {
+ errorBuff = new(bytes.Buffer)
+ errorCapture := func(t jww.Threshold) io.Writer {
+ if t != jww.LevelError {
+ // Only interested in ERROR
+ return nil
+ }
+ return errorBuff
+ }
+
+ listeners = append(listeners, errorCapture)
+ }
+
+ return &logger{
+ Notepad: jww.NewNotepad(stdoutThreshold, logThreshold, outHandle, logHandle, "", log.Ldate|log.Ltime, listeners...),
+ out: outHandle,
+ logCounters: &LogCounters{
+ ErrorCounter: errorCounter,
+ WarnCounter: warnCounter,
+ },
+ errors: errorBuff,
+ }
+}
diff --git a/common/loggers/loggers_test.go b/common/loggers/loggers_test.go
new file mode 100644
index 000000000..a7bd1ae12
--- /dev/null
+++ b/common/loggers/loggers_test.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package loggers
+
+import (
+ "bytes"
+ "fmt"
+ "log"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestLogger(t *testing.T) {
+ c := qt.New(t)
+ l := NewWarningLogger()
+
+ l.Errorln("One error")
+ l.Errorln("Two error")
+ l.Warnln("A warning")
+
+ c.Assert(l.LogCounters().ErrorCounter.Count(), qt.Equals, uint64(2))
+}
+
+func TestLoggerToWriterWithPrefix(t *testing.T) {
+ c := qt.New(t)
+
+ var b bytes.Buffer
+
+ logger := log.New(&b, "", 0)
+
+ w := LoggerToWriterWithPrefix(logger, "myprefix")
+
+ fmt.Fprint(w, "Hello Hugo!")
+
+ c.Assert(b.String(), qt.Equals, "myprefix: Hello Hugo!\n")
+}
+
+func TestRemoveANSIColours(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(RemoveANSIColours(""), qt.Equals, "")
+ c.Assert(RemoveANSIColours("\033[31m"), qt.Equals, "")
+ c.Assert(RemoveANSIColours("\033[31mHello"), qt.Equals, "Hello")
+ c.Assert(RemoveANSIColours("\033[31mHello\033[0m"), qt.Equals, "Hello")
+ c.Assert(RemoveANSIColours("\033[31mHello\033[0m World"), qt.Equals, "Hello World")
+ c.Assert(RemoveANSIColours("\033[31mHello\033[0m World\033[31m!"), qt.Equals, "Hello World!")
+ c.Assert(RemoveANSIColours("\x1b[90m 5 |"), qt.Equals, " 5 |")
+}
diff --git a/common/maps/maps.go b/common/maps/maps.go
new file mode 100644
index 000000000..2d8a122ca
--- /dev/null
+++ b/common/maps/maps.go
@@ -0,0 +1,193 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gobwas/glob"
+ "github.com/spf13/cast"
+)
+
+// ToStringMapE converts in to map[string]interface{}.
+func ToStringMapE(in any) (map[string]any, error) {
+ switch vv := in.(type) {
+ case Params:
+ return vv, nil
+ case map[string]string:
+ var m = map[string]any{}
+ for k, v := range vv {
+ m[k] = v
+ }
+ return m, nil
+
+ default:
+ return cast.ToStringMapE(in)
+ }
+}
+
+// ToParamsAndPrepare converts in to Params and prepares it for use.
+// If in is nil, an empty map is returned.
+// See PrepareParams.
+func ToParamsAndPrepare(in any) (Params, bool) {
+ if types.IsNil(in) {
+ return Params{}, true
+ }
+ m, err := ToStringMapE(in)
+ if err != nil {
+ return nil, false
+ }
+ PrepareParams(m)
+ return m, true
+}
+
+// MustToParamsAndPrepare calls ToParamsAndPrepare and panics if it fails.
+func MustToParamsAndPrepare(in any) Params {
+ if p, ok := ToParamsAndPrepare(in); ok {
+ return p
+ } else {
+ panic(fmt.Sprintf("cannot convert %T to maps.Params", in))
+ }
+}
+
+// ToStringMap converts in to map[string]interface{}.
+func ToStringMap(in any) map[string]any {
+ m, _ := ToStringMapE(in)
+ return m
+}
+
+// ToStringMapStringE converts in to map[string]string.
+func ToStringMapStringE(in any) (map[string]string, error) {
+ m, err := ToStringMapE(in)
+ if err != nil {
+ return nil, err
+ }
+ return cast.ToStringMapStringE(m)
+}
+
+// ToStringMapString converts in to map[string]string.
+func ToStringMapString(in any) map[string]string {
+ m, _ := ToStringMapStringE(in)
+ return m
+}
+
+// ToStringMapBool converts in to bool.
+func ToStringMapBool(in any) map[string]bool {
+ m, _ := ToStringMapE(in)
+ return cast.ToStringMapBool(m)
+}
+
+// ToSliceStringMap converts in to []map[string]interface{}.
+func ToSliceStringMap(in any) ([]map[string]any, error) {
+ switch v := in.(type) {
+ case []map[string]any:
+ return v, nil
+ case []any:
+ var s []map[string]any
+ for _, entry := range v {
+ if vv, ok := entry.(map[string]any); ok {
+ s = append(s, vv)
+ }
+ }
+ return s, nil
+ default:
+ return nil, fmt.Errorf("unable to cast %#v of type %T to []map[string]interface{}", in, in)
+ }
+}
+
+// LookupEqualFold finds key in m with case insensitive equality checks.
+func LookupEqualFold[T any | string](m map[string]T, key string) (T, bool) {
+ if v, found := m[key]; found {
+ return v, true
+ }
+ for k, v := range m {
+ if strings.EqualFold(k, key) {
+ return v, true
+ }
+ }
+ var s T
+ return s, false
+}
+
+type keyRename struct {
+ pattern glob.Glob
+ newKey string
+}
+
+// KeyRenamer supports renaming of keys in a map.
+type KeyRenamer struct {
+ renames []keyRename
+}
+
+// NewKeyRenamer creates a new KeyRenamer given a list of pattern and new key
+// value pairs.
+func NewKeyRenamer(patternKeys ...string) (KeyRenamer, error) {
+ var renames []keyRename
+ for i := 0; i < len(patternKeys); i += 2 {
+ g, err := glob.Compile(strings.ToLower(patternKeys[i]), '/')
+ if err != nil {
+ return KeyRenamer{}, err
+ }
+ renames = append(renames, keyRename{pattern: g, newKey: patternKeys[i+1]})
+ }
+
+ return KeyRenamer{renames: renames}, nil
+}
+
+func (r KeyRenamer) getNewKey(keyPath string) string {
+ for _, matcher := range r.renames {
+ if matcher.pattern.Match(keyPath) {
+ return matcher.newKey
+ }
+ }
+
+ return ""
+}
+
+// Rename renames the keys in the given map according
+// to the patterns in the current KeyRenamer.
+func (r KeyRenamer) Rename(m map[string]any) {
+ r.renamePath("", m)
+}
+
+func (KeyRenamer) keyPath(k1, k2 string) string {
+ k1, k2 = strings.ToLower(k1), strings.ToLower(k2)
+ if k1 == "" {
+ return k2
+ }
+ return k1 + "/" + k2
+}
+
+func (r KeyRenamer) renamePath(parentKeyPath string, m map[string]any) {
+ for key, val := range m {
+ keyPath := r.keyPath(parentKeyPath, key)
+ switch val.(type) {
+ case map[any]any:
+ val = cast.ToStringMap(val)
+ r.renamePath(keyPath, val.(map[string]any))
+ case map[string]any:
+ r.renamePath(keyPath, val.(map[string]any))
+ }
+
+ newKey := r.getNewKey(keyPath)
+
+ if newKey != "" {
+ delete(m, key)
+ m[newKey] = val
+ }
+ }
+}
diff --git a/common/maps/maps_test.go b/common/maps/maps_test.go
new file mode 100644
index 000000000..0b84d2dd7
--- /dev/null
+++ b/common/maps/maps_test.go
@@ -0,0 +1,196 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPrepareParams(t *testing.T) {
+ tests := []struct {
+ input Params
+ expected Params
+ }{
+ {
+ map[string]any{
+ "abC": 32,
+ },
+ Params{
+ "abc": 32,
+ },
+ },
+ {
+ map[string]any{
+ "abC": 32,
+ "deF": map[any]any{
+ 23: "A value",
+ 24: map[string]any{
+ "AbCDe": "A value",
+ "eFgHi": "Another value",
+ },
+ },
+ "gHi": map[string]any{
+ "J": 25,
+ },
+ "jKl": map[string]string{
+ "M": "26",
+ },
+ },
+ Params{
+ "abc": 32,
+ "def": Params{
+ "23": "A value",
+ "24": Params{
+ "abcde": "A value",
+ "efghi": "Another value",
+ },
+ },
+ "ghi": Params{
+ "j": 25,
+ },
+ "jkl": Params{
+ "m": "26",
+ },
+ },
+ },
+ }
+
+ for i, test := range tests {
+ t.Run(fmt.Sprint(i), func(t *testing.T) {
+ // PrepareParams modifies input.
+ PrepareParams(test.input)
+ if !reflect.DeepEqual(test.expected, test.input) {
+ t.Errorf("[%d] Expected\n%#v, got\n%#v\n", i, test.expected, test.input)
+ }
+ })
+ }
+}
+
+func TestToSliceStringMap(t *testing.T) {
+ c := qt.New(t)
+
+ tests := []struct {
+ input any
+ expected []map[string]any
+ }{
+ {
+ input: []map[string]any{
+ {"abc": 123},
+ },
+ expected: []map[string]any{
+ {"abc": 123},
+ },
+ }, {
+ input: []any{
+ map[string]any{
+ "def": 456,
+ },
+ },
+ expected: []map[string]any{
+ {"def": 456},
+ },
+ },
+ }
+
+ for _, test := range tests {
+ v, err := ToSliceStringMap(test.input)
+ c.Assert(err, qt.IsNil)
+ c.Assert(v, qt.DeepEquals, test.expected)
+ }
+}
+
+func TestToParamsAndPrepare(t *testing.T) {
+ c := qt.New(t)
+ _, ok := ToParamsAndPrepare(map[string]any{"A": "av"})
+ c.Assert(ok, qt.IsTrue)
+
+ params, ok := ToParamsAndPrepare(nil)
+ c.Assert(ok, qt.IsTrue)
+ c.Assert(params, qt.DeepEquals, Params{})
+}
+
+func TestRenameKeys(t *testing.T) {
+ c := qt.New(t)
+
+ m := map[string]any{
+ "a": 32,
+ "ren1": "m1",
+ "ren2": "m1_2",
+ "sub": map[string]any{
+ "subsub": map[string]any{
+ "REN1": "m2",
+ "ren2": "m2_2",
+ },
+ },
+ "no": map[string]any{
+ "ren1": "m2",
+ "ren2": "m2_2",
+ },
+ }
+
+ expected := map[string]any{
+ "a": 32,
+ "new1": "m1",
+ "new2": "m1_2",
+ "sub": map[string]any{
+ "subsub": map[string]any{
+ "new1": "m2",
+ "ren2": "m2_2",
+ },
+ },
+ "no": map[string]any{
+ "ren1": "m2",
+ "ren2": "m2_2",
+ },
+ }
+
+ renamer, err := NewKeyRenamer(
+ "{ren1,sub/*/ren1}", "new1",
+ "{Ren2,sub/ren2}", "new2",
+ )
+ c.Assert(err, qt.IsNil)
+
+ renamer.Rename(m)
+
+ if !reflect.DeepEqual(expected, m) {
+ t.Errorf("Expected\n%#v, got\n%#v\n", expected, m)
+ }
+}
+
+func TestLookupEqualFold(t *testing.T) {
+ c := qt.New(t)
+
+ m1 := map[string]any{
+ "a": "av",
+ "B": "bv",
+ }
+
+ v, found := LookupEqualFold(m1, "b")
+ c.Assert(found, qt.IsTrue)
+ c.Assert(v, qt.Equals, "bv")
+
+ m2 := map[string]string{
+ "a": "av",
+ "B": "bv",
+ }
+
+ v, found = LookupEqualFold(m2, "b")
+ c.Assert(found, qt.IsTrue)
+ c.Assert(v, qt.Equals, "bv")
+
+}
diff --git a/common/maps/params.go b/common/maps/params.go
new file mode 100644
index 000000000..60fe56668
--- /dev/null
+++ b/common/maps/params.go
@@ -0,0 +1,289 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/spf13/cast"
+)
+
+// Params is a map where all keys are lower case.
+type Params map[string]any
+
+// Get does a lower case and nested search in this map.
+// It will return nil if none found.
+func (p Params) Get(indices ...string) any {
+ v, _, _ := getNested(p, indices)
+ return v
+}
+
+// Set overwrites values in p with values in pp for common or new keys.
+// This is done recursively.
+func (p Params) Set(pp Params) {
+ for k, v := range pp {
+ vv, found := p[k]
+ if !found {
+ p[k] = v
+ } else {
+ switch vvv := vv.(type) {
+ case Params:
+ if pv, ok := v.(Params); ok {
+ vvv.Set(pv)
+ } else {
+ p[k] = v
+ }
+ default:
+ p[k] = v
+ }
+ }
+ }
+}
+
+// IsZero returns true if p is considered empty.
+func (p Params) IsZero() bool {
+ if p == nil || len(p) == 0 {
+ return true
+ }
+
+ if len(p) > 1 {
+ return false
+ }
+
+ for k, _ := range p {
+ return k == mergeStrategyKey
+ }
+
+ return false
+
+}
+
+// Merge transfers values from pp to p for new keys.
+// This is done recursively.
+func (p Params) Merge(pp Params) {
+ p.merge("", pp)
+}
+
+// MergeRoot transfers values from pp to p for new keys where p is the
+// root of the tree.
+// This is done recursively.
+func (p Params) MergeRoot(pp Params) {
+ ms, _ := p.GetMergeStrategy()
+ p.merge(ms, pp)
+}
+
+func (p Params) merge(ps ParamsMergeStrategy, pp Params) {
+ ns, found := p.GetMergeStrategy()
+
+ var ms = ns
+ if !found && ps != "" {
+ ms = ps
+ }
+
+ noUpdate := ms == ParamsMergeStrategyNone
+ noUpdate = noUpdate || (ps != "" && ps == ParamsMergeStrategyShallow)
+
+ for k, v := range pp {
+
+ if k == mergeStrategyKey {
+ continue
+ }
+ vv, found := p[k]
+
+ if found {
+ // Key matches, if both sides are Params, we try to merge.
+ if vvv, ok := vv.(Params); ok {
+ if pv, ok := v.(Params); ok {
+ vvv.merge(ms, pv)
+ }
+ }
+ } else if !noUpdate {
+ p[k] = v
+ }
+
+ }
+}
+
+func (p Params) GetMergeStrategy() (ParamsMergeStrategy, bool) {
+ if v, found := p[mergeStrategyKey]; found {
+ if s, ok := v.(ParamsMergeStrategy); ok {
+ return s, true
+ }
+ }
+ return ParamsMergeStrategyShallow, false
+}
+
+func (p Params) DeleteMergeStrategy() bool {
+ if _, found := p[mergeStrategyKey]; found {
+ delete(p, mergeStrategyKey)
+ return true
+ }
+ return false
+}
+
+func (p Params) SetDefaultMergeStrategy(s ParamsMergeStrategy) {
+ switch s {
+ case ParamsMergeStrategyDeep, ParamsMergeStrategyNone, ParamsMergeStrategyShallow:
+ default:
+ panic(fmt.Sprintf("invalid merge strategy %q", s))
+ }
+ p[mergeStrategyKey] = s
+}
+
+func getNested(m map[string]any, indices []string) (any, string, map[string]any) {
+ if len(indices) == 0 {
+ return nil, "", nil
+ }
+
+ first := indices[0]
+ v, found := m[strings.ToLower(cast.ToString(first))]
+ if !found {
+ if len(indices) == 1 {
+ return nil, first, m
+ }
+ return nil, "", nil
+
+ }
+
+ if len(indices) == 1 {
+ return v, first, m
+ }
+
+ switch m2 := v.(type) {
+ case Params:
+ return getNested(m2, indices[1:])
+ case map[string]any:
+ return getNested(m2, indices[1:])
+ default:
+ return nil, "", nil
+ }
+}
+
+// GetNestedParam gets the first match of the keyStr in the candidates given.
+// It will first try the exact match and then try to find it as a nested map value,
+// using the given separator, e.g. "mymap.name".
+// It assumes that all the maps given have lower cased keys.
+func GetNestedParam(keyStr, separator string, candidates ...Params) (any, error) {
+ keyStr = strings.ToLower(keyStr)
+
+ // Try exact match first
+ for _, m := range candidates {
+ if v, ok := m[keyStr]; ok {
+ return v, nil
+ }
+ }
+
+ keySegments := strings.Split(keyStr, separator)
+ for _, m := range candidates {
+ if v := m.Get(keySegments...); v != nil {
+ return v, nil
+ }
+ }
+
+ return nil, nil
+}
+
+func GetNestedParamFn(keyStr, separator string, lookupFn func(key string) any) (any, string, map[string]any, error) {
+ keySegments := strings.Split(keyStr, separator)
+ if len(keySegments) == 0 {
+ return nil, "", nil, nil
+ }
+
+ first := lookupFn(keySegments[0])
+ if first == nil {
+ return nil, "", nil, nil
+ }
+
+ if len(keySegments) == 1 {
+ return first, keySegments[0], nil, nil
+ }
+
+ switch m := first.(type) {
+ case map[string]any:
+ v, key, owner := getNested(m, keySegments[1:])
+ return v, key, owner, nil
+ case Params:
+ v, key, owner := getNested(m, keySegments[1:])
+ return v, key, owner, nil
+ }
+
+ return nil, "", nil, nil
+}
+
+// ParamsMergeStrategy tells what strategy to use in Params.Merge.
+type ParamsMergeStrategy string
+
+const (
+ // Do not merge.
+ ParamsMergeStrategyNone ParamsMergeStrategy = "none"
+ // Only add new keys.
+ ParamsMergeStrategyShallow ParamsMergeStrategy = "shallow"
+ // Add new keys, merge existing.
+ ParamsMergeStrategyDeep ParamsMergeStrategy = "deep"
+
+ mergeStrategyKey = "_merge"
+)
+
+func toMergeStrategy(v any) ParamsMergeStrategy {
+ s := ParamsMergeStrategy(cast.ToString(v))
+ switch s {
+ case ParamsMergeStrategyDeep, ParamsMergeStrategyNone, ParamsMergeStrategyShallow:
+ return s
+ default:
+ return ParamsMergeStrategyDeep
+ }
+}
+
+// PrepareParams
+// * makes all the keys in the given map lower cased and will do so
+// * This will modify the map given.
+// * Any nested map[interface{}]interface{}, map[string]interface{},map[string]string will be converted to Params.
+// * Any _merge value will be converted to proper type and value.
+func PrepareParams(m Params) {
+ for k, v := range m {
+ var retyped bool
+ lKey := strings.ToLower(k)
+ if lKey == mergeStrategyKey {
+ v = toMergeStrategy(v)
+ retyped = true
+ } else {
+ switch vv := v.(type) {
+ case map[any]any:
+ var p Params = cast.ToStringMap(v)
+ v = p
+ PrepareParams(p)
+ retyped = true
+ case map[string]any:
+ var p Params = v.(map[string]any)
+ v = p
+ PrepareParams(p)
+ retyped = true
+ case map[string]string:
+ p := make(Params)
+ for k, v := range vv {
+ p[k] = v
+ }
+ v = p
+ PrepareParams(p)
+ retyped = true
+ }
+ }
+
+ if retyped || k != lKey {
+ delete(m, k)
+ m[lKey] = v
+ }
+ }
+}
diff --git a/common/maps/params_test.go b/common/maps/params_test.go
new file mode 100644
index 000000000..a070e6f60
--- /dev/null
+++ b/common/maps/params_test.go
@@ -0,0 +1,170 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGetNestedParam(t *testing.T) {
+ m := map[string]any{
+ "string": "value",
+ "first": 1,
+ "with_underscore": 2,
+ "nested": map[string]any{
+ "color": "blue",
+ "nestednested": map[string]any{
+ "color": "green",
+ },
+ },
+ }
+
+ c := qt.New(t)
+
+ must := func(keyStr, separator string, candidates ...Params) any {
+ v, err := GetNestedParam(keyStr, separator, candidates...)
+ c.Assert(err, qt.IsNil)
+ return v
+ }
+
+ c.Assert(must("first", "_", m), qt.Equals, 1)
+ c.Assert(must("First", "_", m), qt.Equals, 1)
+ c.Assert(must("with_underscore", "_", m), qt.Equals, 2)
+ c.Assert(must("nested_color", "_", m), qt.Equals, "blue")
+ c.Assert(must("nested.nestednested.color", ".", m), qt.Equals, "green")
+ c.Assert(must("string.name", ".", m), qt.IsNil)
+ c.Assert(must("nested.foo", ".", m), qt.IsNil)
+}
+
+// https://github.com/gohugoio/hugo/issues/7903
+func TestGetNestedParamFnNestedNewKey(t *testing.T) {
+ c := qt.New(t)
+
+ nested := map[string]any{
+ "color": "blue",
+ }
+ m := map[string]any{
+ "nested": nested,
+ }
+
+ existing, nestedKey, owner, err := GetNestedParamFn("nested.new", ".", func(key string) any {
+ return m[key]
+ })
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(existing, qt.IsNil)
+ c.Assert(nestedKey, qt.Equals, "new")
+ c.Assert(owner, qt.DeepEquals, nested)
+}
+
+func TestParamsSetAndMerge(t *testing.T) {
+ c := qt.New(t)
+
+ createParamsPair := func() (Params, Params) {
+ p1 := Params{"a": "av", "c": "cv", "nested": Params{"al2": "al2v", "cl2": "cl2v"}}
+ p2 := Params{"b": "bv", "a": "abv", "nested": Params{"bl2": "bl2v", "al2": "al2bv"}, mergeStrategyKey: ParamsMergeStrategyDeep}
+ return p1, p2
+ }
+
+ p1, p2 := createParamsPair()
+
+ p1.Set(p2)
+
+ c.Assert(p1, qt.DeepEquals, Params{
+ "a": "abv",
+ "c": "cv",
+ "nested": Params{
+ "al2": "al2bv",
+ "cl2": "cl2v",
+ "bl2": "bl2v",
+ },
+ "b": "bv",
+ mergeStrategyKey: ParamsMergeStrategyDeep,
+ })
+
+ p1, p2 = createParamsPair()
+
+ p1.Merge(p2)
+
+ // Default is to do a shallow merge.
+ c.Assert(p1, qt.DeepEquals, Params{
+ "c": "cv",
+ "nested": Params{
+ "al2": "al2v",
+ "cl2": "cl2v",
+ },
+ "b": "bv",
+ "a": "av",
+ })
+
+ p1, p2 = createParamsPair()
+ p1.SetDefaultMergeStrategy(ParamsMergeStrategyNone)
+ p1.Merge(p2)
+ p1.DeleteMergeStrategy()
+
+ c.Assert(p1, qt.DeepEquals, Params{
+ "a": "av",
+ "c": "cv",
+ "nested": Params{
+ "al2": "al2v",
+ "cl2": "cl2v",
+ },
+ })
+
+ p1, p2 = createParamsPair()
+ p1.SetDefaultMergeStrategy(ParamsMergeStrategyShallow)
+ p1.Merge(p2)
+ p1.DeleteMergeStrategy()
+
+ c.Assert(p1, qt.DeepEquals, Params{
+ "a": "av",
+ "c": "cv",
+ "nested": Params{
+ "al2": "al2v",
+ "cl2": "cl2v",
+ },
+ "b": "bv",
+ })
+
+ p1, p2 = createParamsPair()
+ p1.SetDefaultMergeStrategy(ParamsMergeStrategyDeep)
+ p1.Merge(p2)
+ p1.DeleteMergeStrategy()
+
+ c.Assert(p1, qt.DeepEquals, Params{
+ "nested": Params{
+ "al2": "al2v",
+ "cl2": "cl2v",
+ "bl2": "bl2v",
+ },
+ "b": "bv",
+ "a": "av",
+ "c": "cv",
+ })
+
+}
+
+func TestParamsIsZero(t *testing.T) {
+ c := qt.New(t)
+
+ var nilParams Params
+
+ c.Assert(Params{}.IsZero(), qt.IsTrue)
+ c.Assert(nilParams.IsZero(), qt.IsTrue)
+ c.Assert(Params{"foo": "bar"}.IsZero(), qt.IsFalse)
+ c.Assert(Params{"_merge": "foo", "foo": "bar"}.IsZero(), qt.IsFalse)
+ c.Assert(Params{"_merge": "foo"}.IsZero(), qt.IsTrue)
+}
diff --git a/common/maps/scratch.go b/common/maps/scratch.go
new file mode 100644
index 000000000..d4745d27c
--- /dev/null
+++ b/common/maps/scratch.go
@@ -0,0 +1,172 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "reflect"
+ "sort"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/math"
+)
+
+// Scratch is a writable context used for stateful operations in Page/Node rendering.
+type Scratch struct {
+ values map[string]any
+ mu sync.RWMutex
+}
+
+// Scratcher provides a scratching service.
+type Scratcher interface {
+ Scratch() *Scratch
+}
+
+type scratcher struct {
+ s *Scratch
+}
+
+func (s scratcher) Scratch() *Scratch {
+ return s.s
+}
+
+// NewScratcher creates a new Scratcher.
+func NewScratcher() Scratcher {
+ return scratcher{s: NewScratch()}
+}
+
+// Add will, for single values, add (using the + operator) the addend to the existing addend (if found).
+// Supports numeric values and strings.
+//
+// If the first add for a key is an array or slice, then the next value(s) will be appended.
+func (c *Scratch) Add(key string, newAddend any) (string, error) {
+ var newVal any
+ c.mu.RLock()
+ existingAddend, found := c.values[key]
+ c.mu.RUnlock()
+ if found {
+ var err error
+
+ addendV := reflect.TypeOf(existingAddend)
+
+ if addendV.Kind() == reflect.Slice || addendV.Kind() == reflect.Array {
+ newVal, err = collections.Append(existingAddend, newAddend)
+ if err != nil {
+ return "", err
+ }
+ } else {
+ newVal, err = math.DoArithmetic(existingAddend, newAddend, '+')
+ if err != nil {
+ return "", err
+ }
+ }
+ } else {
+ newVal = newAddend
+ }
+ c.mu.Lock()
+ c.values[key] = newVal
+ c.mu.Unlock()
+ return "", nil // have to return something to make it work with the Go templates
+}
+
+// Set stores a value with the given key in the Node context.
+// This value can later be retrieved with Get.
+func (c *Scratch) Set(key string, value any) string {
+ c.mu.Lock()
+ c.values[key] = value
+ c.mu.Unlock()
+ return ""
+}
+
+// Delete deletes the given key.
+func (c *Scratch) Delete(key string) string {
+ c.mu.Lock()
+ delete(c.values, key)
+ c.mu.Unlock()
+ return ""
+}
+
+// Get returns a value previously set by Add or Set.
+func (c *Scratch) Get(key string) any {
+ c.mu.RLock()
+ val := c.values[key]
+ c.mu.RUnlock()
+
+ return val
+}
+
+// Values returns the raw backing map. Note that you should just use
+// this method on the locally scoped Scratch instances you obtain via newScratch, not
+// .Page.Scratch etc., as that will lead to concurrency issues.
+func (c *Scratch) Values() map[string]any {
+ c.mu.RLock()
+ defer c.mu.RUnlock()
+ return c.values
+}
+
+// SetInMap stores a value to a map with the given key in the Node context.
+// This map can later be retrieved with GetSortedMapValues.
+func (c *Scratch) SetInMap(key string, mapKey string, value any) string {
+ c.mu.Lock()
+ _, found := c.values[key]
+ if !found {
+ c.values[key] = make(map[string]any)
+ }
+
+ c.values[key].(map[string]any)[mapKey] = value
+ c.mu.Unlock()
+ return ""
+}
+
+// DeleteInMap deletes a value to a map with the given key in the Node context.
+func (c *Scratch) DeleteInMap(key string, mapKey string) string {
+ c.mu.Lock()
+ _, found := c.values[key]
+ if found {
+ delete(c.values[key].(map[string]any), mapKey)
+ }
+ c.mu.Unlock()
+ return ""
+}
+
+// GetSortedMapValues returns a sorted map previously filled with SetInMap.
+func (c *Scratch) GetSortedMapValues(key string) any {
+ c.mu.RLock()
+
+ if c.values[key] == nil {
+ c.mu.RUnlock()
+ return nil
+ }
+
+ unsortedMap := c.values[key].(map[string]any)
+ c.mu.RUnlock()
+ var keys []string
+ for mapKey := range unsortedMap {
+ keys = append(keys, mapKey)
+ }
+
+ sort.Strings(keys)
+
+ sortedArray := make([]any, len(unsortedMap))
+ for i, mapKey := range keys {
+ sortedArray[i] = unsortedMap[mapKey]
+ }
+
+ return sortedArray
+}
+
+// NewScratch returns a new instance of Scratch.
+func NewScratch() *Scratch {
+ return &Scratch{values: make(map[string]any)}
+}
diff --git a/common/maps/scratch_test.go b/common/maps/scratch_test.go
new file mode 100644
index 000000000..b515adb1d
--- /dev/null
+++ b/common/maps/scratch_test.go
@@ -0,0 +1,221 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+ "reflect"
+ "sync"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestScratchAdd(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.Add("int1", 10)
+ scratch.Add("int1", 20)
+ scratch.Add("int2", 20)
+
+ c.Assert(scratch.Get("int1"), qt.Equals, int64(30))
+ c.Assert(scratch.Get("int2"), qt.Equals, 20)
+
+ scratch.Add("float1", float64(10.5))
+ scratch.Add("float1", float64(20.1))
+
+ c.Assert(scratch.Get("float1"), qt.Equals, float64(30.6))
+
+ scratch.Add("string1", "Hello ")
+ scratch.Add("string1", "big ")
+ scratch.Add("string1", "World!")
+
+ c.Assert(scratch.Get("string1"), qt.Equals, "Hello big World!")
+
+ scratch.Add("scratch", scratch)
+ _, err := scratch.Add("scratch", scratch)
+
+ m := scratch.Values()
+ c.Assert(m, qt.HasLen, 5)
+
+ if err == nil {
+ t.Errorf("Expected error from invalid arithmetic")
+ }
+}
+
+func TestScratchAddSlice(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+
+ _, err := scratch.Add("intSlice", []int{1, 2})
+ c.Assert(err, qt.IsNil)
+ _, err = scratch.Add("intSlice", 3)
+ c.Assert(err, qt.IsNil)
+
+ sl := scratch.Get("intSlice")
+ expected := []int{1, 2, 3}
+
+ if !reflect.DeepEqual(expected, sl) {
+ t.Errorf("Slice difference, go %q expected %q", sl, expected)
+ }
+ _, err = scratch.Add("intSlice", []int{4, 5})
+
+ c.Assert(err, qt.IsNil)
+
+ sl = scratch.Get("intSlice")
+ expected = []int{1, 2, 3, 4, 5}
+
+ if !reflect.DeepEqual(expected, sl) {
+ t.Errorf("Slice difference, go %q expected %q", sl, expected)
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/5275
+func TestScratchAddTypedSliceToInterfaceSlice(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.Set("slice", []any{})
+
+ _, err := scratch.Add("slice", []int{1, 2})
+ c.Assert(err, qt.IsNil)
+ c.Assert(scratch.Get("slice"), qt.DeepEquals, []int{1, 2})
+}
+
+// https://github.com/gohugoio/hugo/issues/5361
+func TestScratchAddDifferentTypedSliceToInterfaceSlice(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.Set("slice", []string{"foo"})
+
+ _, err := scratch.Add("slice", []int{1, 2})
+ c.Assert(err, qt.IsNil)
+ c.Assert(scratch.Get("slice"), qt.DeepEquals, []any{"foo", 1, 2})
+}
+
+func TestScratchSet(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.Set("key", "val")
+ c.Assert(scratch.Get("key"), qt.Equals, "val")
+}
+
+func TestScratchDelete(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.Set("key", "val")
+ scratch.Delete("key")
+ scratch.Add("key", "Lucy Parsons")
+ c.Assert(scratch.Get("key"), qt.Equals, "Lucy Parsons")
+}
+
+// Issue #2005
+func TestScratchInParallel(t *testing.T) {
+ var wg sync.WaitGroup
+ scratch := NewScratch()
+
+ key := "counter"
+ scratch.Set(key, int64(1))
+ for i := 1; i <= 10; i++ {
+ wg.Add(1)
+ go func(j int) {
+ for k := 0; k < 10; k++ {
+ newVal := int64(k + j)
+
+ _, err := scratch.Add(key, newVal)
+ if err != nil {
+ t.Errorf("Got err %s", err)
+ }
+
+ scratch.Set(key, newVal)
+
+ val := scratch.Get(key)
+
+ if counter, ok := val.(int64); ok {
+ if counter < 1 {
+ t.Errorf("Got %d", counter)
+ }
+ } else {
+ t.Errorf("Got %T", val)
+ }
+ }
+ wg.Done()
+ }(i)
+ }
+ wg.Wait()
+}
+
+func TestScratchGet(t *testing.T) {
+ t.Parallel()
+ scratch := NewScratch()
+ nothing := scratch.Get("nothing")
+ if nothing != nil {
+ t.Errorf("Should not return anything, but got %v", nothing)
+ }
+}
+
+func TestScratchSetInMap(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.SetInMap("key", "lux", "Lux")
+ scratch.SetInMap("key", "abc", "Abc")
+ scratch.SetInMap("key", "zyx", "Zyx")
+ scratch.SetInMap("key", "abc", "Abc (updated)")
+ scratch.SetInMap("key", "def", "Def")
+ c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, []any{0: "Abc (updated)", 1: "Def", 2: "Lux", 3: "Zyx"})
+}
+
+func TestScratchDeleteInMap(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ scratch := NewScratch()
+ scratch.SetInMap("key", "lux", "Lux")
+ scratch.SetInMap("key", "abc", "Abc")
+ scratch.SetInMap("key", "zyx", "Zyx")
+ scratch.DeleteInMap("key", "abc")
+ scratch.SetInMap("key", "def", "Def")
+ scratch.DeleteInMap("key", "lmn") // Do nothing
+ c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, []any{0: "Def", 1: "Lux", 2: "Zyx"})
+}
+
+func TestScratchGetSortedMapValues(t *testing.T) {
+ t.Parallel()
+ scratch := NewScratch()
+ nothing := scratch.GetSortedMapValues("nothing")
+ if nothing != nil {
+ t.Errorf("Should not return anything, but got %v", nothing)
+ }
+}
+
+func BenchmarkScratchGet(b *testing.B) {
+ scratch := NewScratch()
+ scratch.Add("A", 1)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ scratch.Get("A")
+ }
+}
diff --git a/common/math/math.go b/common/math/math.go
new file mode 100644
index 000000000..d4e2c1148
--- /dev/null
+++ b/common/math/math.go
@@ -0,0 +1,135 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package math
+
+import (
+ "errors"
+ "reflect"
+)
+
+// DoArithmetic performs arithmetic operations (+,-,*,/) using reflection to
+// determine the type of the two terms.
+func DoArithmetic(a, b any, op rune) (any, error) {
+ av := reflect.ValueOf(a)
+ bv := reflect.ValueOf(b)
+ var ai, bi int64
+ var af, bf float64
+ var au, bu uint64
+ switch av.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ ai = av.Int()
+ switch bv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ bi = bv.Int()
+ case reflect.Float32, reflect.Float64:
+ af = float64(ai) // may overflow
+ ai = 0
+ bf = bv.Float()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ bu = bv.Uint()
+ if ai >= 0 {
+ au = uint64(ai)
+ ai = 0
+ } else {
+ bi = int64(bu) // may overflow
+ bu = 0
+ }
+ default:
+ return nil, errors.New("can't apply the operator to the values")
+ }
+ case reflect.Float32, reflect.Float64:
+ af = av.Float()
+ switch bv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ bf = float64(bv.Int()) // may overflow
+ case reflect.Float32, reflect.Float64:
+ bf = bv.Float()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ bf = float64(bv.Uint()) // may overflow
+ default:
+ return nil, errors.New("can't apply the operator to the values")
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ au = av.Uint()
+ switch bv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ bi = bv.Int()
+ if bi >= 0 {
+ bu = uint64(bi)
+ bi = 0
+ } else {
+ ai = int64(au) // may overflow
+ au = 0
+ }
+ case reflect.Float32, reflect.Float64:
+ af = float64(au) // may overflow
+ au = 0
+ bf = bv.Float()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ bu = bv.Uint()
+ default:
+ return nil, errors.New("can't apply the operator to the values")
+ }
+ case reflect.String:
+ as := av.String()
+ if bv.Kind() == reflect.String && op == '+' {
+ bs := bv.String()
+ return as + bs, nil
+ }
+ return nil, errors.New("can't apply the operator to the values")
+ default:
+ return nil, errors.New("can't apply the operator to the values")
+ }
+
+ switch op {
+ case '+':
+ if ai != 0 || bi != 0 {
+ return ai + bi, nil
+ } else if af != 0 || bf != 0 {
+ return af + bf, nil
+ } else if au != 0 || bu != 0 {
+ return au + bu, nil
+ }
+ return 0, nil
+ case '-':
+ if ai != 0 || bi != 0 {
+ return ai - bi, nil
+ } else if af != 0 || bf != 0 {
+ return af - bf, nil
+ } else if au != 0 || bu != 0 {
+ return au - bu, nil
+ }
+ return 0, nil
+ case '*':
+ if ai != 0 || bi != 0 {
+ return ai * bi, nil
+ } else if af != 0 || bf != 0 {
+ return af * bf, nil
+ } else if au != 0 || bu != 0 {
+ return au * bu, nil
+ }
+ return 0, nil
+ case '/':
+ if bi != 0 {
+ return ai / bi, nil
+ } else if bf != 0 {
+ return af / bf, nil
+ } else if bu != 0 {
+ return au / bu, nil
+ }
+ return nil, errors.New("can't divide the value by 0")
+ default:
+ return nil, errors.New("there is no such an operation")
+ }
+}
diff --git a/common/math/math_test.go b/common/math/math_test.go
new file mode 100644
index 000000000..89e391ce0
--- /dev/null
+++ b/common/math/math_test.go
@@ -0,0 +1,106 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package math
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDoArithmetic(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ a any
+ b any
+ op rune
+ expect any
+ }{
+ {3, 2, '+', int64(5)},
+ {3, 2, '-', int64(1)},
+ {3, 2, '*', int64(6)},
+ {3, 2, '/', int64(1)},
+ {3.0, 2, '+', float64(5)},
+ {3.0, 2, '-', float64(1)},
+ {3.0, 2, '*', float64(6)},
+ {3.0, 2, '/', float64(1.5)},
+ {3, 2.0, '+', float64(5)},
+ {3, 2.0, '-', float64(1)},
+ {3, 2.0, '*', float64(6)},
+ {3, 2.0, '/', float64(1.5)},
+ {3.0, 2.0, '+', float64(5)},
+ {3.0, 2.0, '-', float64(1)},
+ {3.0, 2.0, '*', float64(6)},
+ {3.0, 2.0, '/', float64(1.5)},
+ {uint(3), uint(2), '+', uint64(5)},
+ {uint(3), uint(2), '-', uint64(1)},
+ {uint(3), uint(2), '*', uint64(6)},
+ {uint(3), uint(2), '/', uint64(1)},
+ {uint(3), 2, '+', uint64(5)},
+ {uint(3), 2, '-', uint64(1)},
+ {uint(3), 2, '*', uint64(6)},
+ {uint(3), 2, '/', uint64(1)},
+ {3, uint(2), '+', uint64(5)},
+ {3, uint(2), '-', uint64(1)},
+ {3, uint(2), '*', uint64(6)},
+ {3, uint(2), '/', uint64(1)},
+ {uint(3), -2, '+', int64(1)},
+ {uint(3), -2, '-', int64(5)},
+ {uint(3), -2, '*', int64(-6)},
+ {uint(3), -2, '/', int64(-1)},
+ {-3, uint(2), '+', int64(-1)},
+ {-3, uint(2), '-', int64(-5)},
+ {-3, uint(2), '*', int64(-6)},
+ {-3, uint(2), '/', int64(-1)},
+ {uint(3), 2.0, '+', float64(5)},
+ {uint(3), 2.0, '-', float64(1)},
+ {uint(3), 2.0, '*', float64(6)},
+ {uint(3), 2.0, '/', float64(1.5)},
+ {3.0, uint(2), '+', float64(5)},
+ {3.0, uint(2), '-', float64(1)},
+ {3.0, uint(2), '*', float64(6)},
+ {3.0, uint(2), '/', float64(1.5)},
+ {0, 0, '+', 0},
+ {0, 0, '-', 0},
+ {0, 0, '*', 0},
+ {"foo", "bar", '+', "foobar"},
+ {3, 0, '/', false},
+ {3.0, 0, '/', false},
+ {3, 0.0, '/', false},
+ {uint(3), uint(0), '/', false},
+ {3, uint(0), '/', false},
+ {-3, uint(0), '/', false},
+ {uint(3), 0, '/', false},
+ {3.0, uint(0), '/', false},
+ {uint(3), 0.0, '/', false},
+ {3, "foo", '+', false},
+ {3.0, "foo", '+', false},
+ {uint(3), "foo", '+', false},
+ {"foo", 3, '+', false},
+ {"foo", "bar", '-', false},
+ {3, 2, '%', false},
+ } {
+ result, err := DoArithmetic(test.a, test.b, test.op)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(test.expect, qt.Equals, result)
+ }
+}
diff --git a/common/para/para.go b/common/para/para.go
new file mode 100644
index 000000000..69bfc205b
--- /dev/null
+++ b/common/para/para.go
@@ -0,0 +1,73 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package para implements parallel execution helpers.
+package para
+
+import (
+ "context"
+
+ "golang.org/x/sync/errgroup"
+)
+
+// Workers configures a task executor with the most number of tasks to be executed in parallel.
+type Workers struct {
+ sem chan struct{}
+}
+
+// Runner wraps the lifecycle methods of a new task set.
+//
+// Run wil block until a worker is available or the context is cancelled,
+// and then run the given func in a new goroutine.
+// Wait will wait for all the running goroutines to finish.
+type Runner interface {
+ Run(func() error)
+ Wait() error
+}
+
+type errGroupRunner struct {
+ *errgroup.Group
+ w *Workers
+ ctx context.Context
+}
+
+func (g *errGroupRunner) Run(fn func() error) {
+ select {
+ case g.w.sem <- struct{}{}:
+ case <-g.ctx.Done():
+ return
+ }
+
+ g.Go(func() error {
+ err := fn()
+ <-g.w.sem
+ return err
+ })
+}
+
+// New creates a new Workers with the given number of workers.
+func New(numWorkers int) *Workers {
+ return &Workers{
+ sem: make(chan struct{}, numWorkers),
+ }
+}
+
+// Start starts a new Runner.
+func (w *Workers) Start(ctx context.Context) (Runner, context.Context) {
+ g, ctx := errgroup.WithContext(ctx)
+ return &errGroupRunner{
+ Group: g,
+ ctx: ctx,
+ w: w,
+ }, ctx
+}
diff --git a/common/para/para_test.go b/common/para/para_test.go
new file mode 100644
index 000000000..646b7b36b
--- /dev/null
+++ b/common/para/para_test.go
@@ -0,0 +1,95 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package para
+
+import (
+ "context"
+ "runtime"
+ "sort"
+ "sync"
+ "sync/atomic"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPara(t *testing.T) {
+ if runtime.NumCPU() < 4 {
+ t.Skipf("skip para test, CPU count is %d", runtime.NumCPU())
+ }
+
+ if !htesting.IsCI() {
+ t.Skip("skip para test when not running on CI")
+ }
+
+ c := qt.New(t)
+
+ c.Run("Order", func(c *qt.C) {
+ n := 500
+ ints := make([]int, n)
+ for i := 0; i < n; i++ {
+ ints[i] = i
+ }
+
+ p := New(4)
+ r, _ := p.Start(context.Background())
+
+ var result []int
+ var mu sync.Mutex
+ for i := 0; i < n; i++ {
+ i := i
+ r.Run(func() error {
+ mu.Lock()
+ defer mu.Unlock()
+ result = append(result, i)
+ return nil
+ })
+ }
+
+ c.Assert(r.Wait(), qt.IsNil)
+ c.Assert(result, qt.HasLen, len(ints))
+ c.Assert(sort.IntsAreSorted(result), qt.Equals, false, qt.Commentf("Para does not seem to be parallel"))
+ sort.Ints(result)
+ c.Assert(result, qt.DeepEquals, ints)
+ })
+
+ c.Run("Time", func(c *qt.C) {
+ const n = 100
+
+ p := New(5)
+ r, _ := p.Start(context.Background())
+
+ start := time.Now()
+
+ var counter int64
+
+ for i := 0; i < n; i++ {
+ r.Run(func() error {
+ atomic.AddInt64(&counter, 1)
+ time.Sleep(1 * time.Millisecond)
+ return nil
+ })
+ }
+
+ c.Assert(r.Wait(), qt.IsNil)
+ c.Assert(counter, qt.Equals, int64(n))
+
+ since := time.Since(start)
+ limit := n / 2 * time.Millisecond
+ c.Assert(since < limit, qt.Equals, true, qt.Commentf("%s >= %s", since, limit))
+ })
+}
diff --git a/common/paths/path.go b/common/paths/path.go
new file mode 100644
index 000000000..3a7f3e790
--- /dev/null
+++ b/common/paths/path.go
@@ -0,0 +1,265 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "errors"
+ "fmt"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// FilePathSeparator as defined by os.Separator.
+const FilePathSeparator = string(filepath.Separator)
+
+// filepathPathBridge is a bridge for common functionality in filepath vs path
+type filepathPathBridge interface {
+ Base(in string) string
+ Clean(in string) string
+ Dir(in string) string
+ Ext(in string) string
+ Join(elem ...string) string
+ Separator() string
+}
+
+type filepathBridge struct{}
+
+func (filepathBridge) Base(in string) string {
+ return filepath.Base(in)
+}
+
+func (filepathBridge) Clean(in string) string {
+ return filepath.Clean(in)
+}
+
+func (filepathBridge) Dir(in string) string {
+ return filepath.Dir(in)
+}
+
+func (filepathBridge) Ext(in string) string {
+ return filepath.Ext(in)
+}
+
+func (filepathBridge) Join(elem ...string) string {
+ return filepath.Join(elem...)
+}
+
+func (filepathBridge) Separator() string {
+ return FilePathSeparator
+}
+
+var fpb filepathBridge
+
+// AbsPathify creates an absolute path if given a working dir and a relative path.
+// If already absolute, the path is just cleaned.
+func AbsPathify(workingDir, inPath string) string {
+ if filepath.IsAbs(inPath) {
+ return filepath.Clean(inPath)
+ }
+ return filepath.Join(workingDir, inPath)
+}
+
+// MakeTitle converts the path given to a suitable title, trimming whitespace
+// and replacing hyphens with whitespace.
+func MakeTitle(inpath string) string {
+ return strings.Replace(strings.TrimSpace(inpath), "-", " ", -1)
+}
+
+// ReplaceExtension takes a path and an extension, strips the old extension
+// and returns the path with the new extension.
+func ReplaceExtension(path string, newExt string) string {
+ f, _ := fileAndExt(path, fpb)
+ return f + "." + newExt
+}
+
+func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
+ for _, currentPath := range possibleDirectories {
+ if strings.HasPrefix(inPath, currentPath) {
+ return strings.TrimPrefix(inPath, currentPath), nil
+ }
+ }
+ return inPath, errors.New("can't extract relative path, unknown prefix")
+}
+
+// Should be good enough for Hugo.
+var isFileRe = regexp.MustCompile(`.*\..{1,6}$`)
+
+// GetDottedRelativePath expects a relative path starting after the content directory.
+// It returns a relative path with dots ("..") navigating up the path structure.
+func GetDottedRelativePath(inPath string) string {
+ inPath = filepath.Clean(filepath.FromSlash(inPath))
+
+ if inPath == "." {
+ return "./"
+ }
+
+ if !isFileRe.MatchString(inPath) && !strings.HasSuffix(inPath, FilePathSeparator) {
+ inPath += FilePathSeparator
+ }
+
+ if !strings.HasPrefix(inPath, FilePathSeparator) {
+ inPath = FilePathSeparator + inPath
+ }
+
+ dir, _ := filepath.Split(inPath)
+
+ sectionCount := strings.Count(dir, FilePathSeparator)
+
+ if sectionCount == 0 || dir == FilePathSeparator {
+ return "./"
+ }
+
+ var dottedPath string
+
+ for i := 1; i < sectionCount; i++ {
+ dottedPath += "../"
+ }
+
+ return dottedPath
+}
+
+// ExtNoDelimiter takes a path and returns the extension, excluding the delimiter, i.e. "md".
+func ExtNoDelimiter(in string) string {
+ return strings.TrimPrefix(Ext(in), ".")
+}
+
+// Ext takes a path and returns the extension, including the delimiter, i.e. ".md".
+func Ext(in string) string {
+ _, ext := fileAndExt(in, fpb)
+ return ext
+}
+
+// PathAndExt is the same as FileAndExt, but it uses the path package.
+func PathAndExt(in string) (string, string) {
+ return fileAndExt(in, pb)
+}
+
+// FileAndExt takes a path and returns the file and extension separated,
+// the extension including the delimiter, i.e. ".md".
+func FileAndExt(in string) (string, string) {
+ return fileAndExt(in, fpb)
+}
+
+// FileAndExtNoDelimiter takes a path and returns the file and extension separated,
+// the extension excluding the delimiter, e.g "md".
+func FileAndExtNoDelimiter(in string) (string, string) {
+ file, ext := fileAndExt(in, fpb)
+ return file, strings.TrimPrefix(ext, ".")
+}
+
+// Filename takes a file path, strips out the extension,
+// and returns the name of the file.
+func Filename(in string) (name string) {
+ name, _ = fileAndExt(in, fpb)
+ return
+}
+
+// PathNoExt takes a path, strips out the extension,
+// and returns the name of the file.
+func PathNoExt(in string) string {
+ return strings.TrimSuffix(in, path.Ext(in))
+}
+
+// FileAndExt returns the filename and any extension of a file path as
+// two separate strings.
+//
+// If the path, in, contains a directory name ending in a slash,
+// then both name and ext will be empty strings.
+//
+// If the path, in, is either the current directory, the parent
+// directory or the root directory, or an empty string,
+// then both name and ext will be empty strings.
+//
+// If the path, in, represents the path of a file without an extension,
+// then name will be the name of the file and ext will be an empty string.
+//
+// If the path, in, represents a filename with an extension,
+// then name will be the filename minus any extension - including the dot
+// and ext will contain the extension - minus the dot.
+func fileAndExt(in string, b filepathPathBridge) (name string, ext string) {
+ ext = b.Ext(in)
+ base := b.Base(in)
+
+ return extractFilename(in, ext, base, b.Separator()), ext
+}
+
+func extractFilename(in, ext, base, pathSeparator string) (name string) {
+ // No file name cases. These are defined as:
+ // 1. any "in" path that ends in a pathSeparator
+ // 2. any "base" consisting of just an pathSeparator
+ // 3. any "base" consisting of just an empty string
+ // 4. any "base" consisting of just the current directory i.e. "."
+ // 5. any "base" consisting of just the parent directory i.e. ".."
+ if (strings.LastIndex(in, pathSeparator) == len(in)-1) || base == "" || base == "." || base == ".." || base == pathSeparator {
+ name = "" // there is NO filename
+ } else if ext != "" { // there was an Extension
+ // return the filename minus the extension (and the ".")
+ name = base[:strings.LastIndex(base, ".")]
+ } else {
+ // no extension case so just return base, which willi
+ // be the filename
+ name = base
+ }
+ return
+}
+
+// GetRelativePath returns the relative path of a given path.
+func GetRelativePath(path, base string) (final string, err error) {
+ if filepath.IsAbs(path) && base == "" {
+ return "", errors.New("source: missing base directory")
+ }
+ name := filepath.Clean(path)
+ base = filepath.Clean(base)
+
+ name, err = filepath.Rel(base, name)
+ if err != nil {
+ return "", err
+ }
+
+ if strings.HasSuffix(filepath.FromSlash(path), FilePathSeparator) && !strings.HasSuffix(name, FilePathSeparator) {
+ name += FilePathSeparator
+ }
+ return name, nil
+}
+
+func prettifyPath(in string, b filepathPathBridge) string {
+ if filepath.Ext(in) == "" {
+ // /section/name/ -> /section/name/index.html
+ if len(in) < 2 {
+ return b.Separator()
+ }
+ return b.Join(in, "index.html")
+ }
+ name, ext := fileAndExt(in, b)
+ if name == "index" {
+ // /section/name/index.html -> /section/name/index.html
+ return b.Clean(in)
+ }
+ // /section/name.html -> /section/name/index.html
+ return b.Join(b.Dir(in), name, "index"+ext)
+}
+
+type NamedSlice struct {
+ Name string
+ Slice []string
+}
+
+func (n NamedSlice) String() string {
+ if len(n.Slice) == 0 {
+ return n.Name
+ }
+ return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ","))
+}
diff --git a/common/paths/path_test.go b/common/paths/path_test.go
new file mode 100644
index 000000000..2400f16ab
--- /dev/null
+++ b/common/paths/path_test.go
@@ -0,0 +1,228 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGetRelativePath(t *testing.T) {
+ tests := []struct {
+ path string
+ base string
+ expect any
+ }{
+ {filepath.FromSlash("/a/b"), filepath.FromSlash("/a"), filepath.FromSlash("b")},
+ {filepath.FromSlash("/a/b/c/"), filepath.FromSlash("/a"), filepath.FromSlash("b/c/")},
+ {filepath.FromSlash("/c"), filepath.FromSlash("/a/b"), filepath.FromSlash("../../c")},
+ {filepath.FromSlash("/c"), "", false},
+ }
+ for i, this := range tests {
+ // ultimately a fancy wrapper around filepath.Rel
+ result, err := GetRelativePath(this.path, this.base)
+
+ if b, ok := this.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] GetRelativePath didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] GetRelativePath failed: %s", i, err)
+ continue
+ }
+ if result != this.expect {
+ t.Errorf("[%d] GetRelativePath got %v but expected %v", i, result, this.expect)
+ }
+ }
+
+ }
+}
+
+func TestMakePathRelative(t *testing.T) {
+ type test struct {
+ inPath, path1, path2, output string
+ }
+
+ data := []test{
+ {"/abc/bcd/ab.css", "/abc/bcd", "/bbc/bcd", "/ab.css"},
+ {"/abc/bcd/ab.css", "/abcd/bcd", "/abc/bcd", "/ab.css"},
+ }
+
+ for i, d := range data {
+ output, _ := makePathRelative(d.inPath, d.path1, d.path2)
+ if d.output != output {
+ t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
+ }
+ }
+ _, error := makePathRelative("a/b/c.ss", "/a/c", "/d/c", "/e/f")
+
+ if error == nil {
+ t.Errorf("Test failed, expected error")
+ }
+}
+
+func TestGetDottedRelativePath(t *testing.T) {
+ // on Windows this will receive both kinds, both country and western ...
+ for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
+ doTestGetDottedRelativePath(f, t)
+ }
+}
+
+func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"", "./"},
+ {urlFixer("/"), "./"},
+ {urlFixer("post"), "../"},
+ {urlFixer("/post"), "../"},
+ {urlFixer("post/"), "../"},
+ {urlFixer("tags/foo.html"), "../"},
+ {urlFixer("/tags/foo.html"), "../"},
+ {urlFixer("/post/"), "../"},
+ {urlFixer("////post/////"), "../"},
+ {urlFixer("/foo/bar/index.html"), "../../"},
+ {urlFixer("/foo/bar/foo/"), "../../../"},
+ {urlFixer("/foo/bar/foo"), "../../../"},
+ {urlFixer("foo/bar/foo/"), "../../../"},
+ {urlFixer("foo/bar/foo/bar"), "../../../../"},
+ {"404.html", "./"},
+ {"404.xml", "./"},
+ {"/404.html", "./"},
+ }
+ for i, d := range data {
+ output := GetDottedRelativePath(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+func TestMakeTitle(t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"Make-Title", "Make Title"},
+ {"MakeTitle", "MakeTitle"},
+ {"make_title", "make_title"},
+ }
+ for i, d := range data {
+ output := MakeTitle(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+// Replace Extension is probably poorly named, but the intent of the
+// function is to accept a path and return only the file name with a
+// new extension. It's intentionally designed to strip out the path
+// and only provide the name. We should probably rename the function to
+// be more explicit at some point.
+func TestReplaceExtension(t *testing.T) {
+ type test struct {
+ input, newext, expected string
+ }
+ data := []test{
+ // These work according to the above definition
+ {"/some/random/path/file.xml", "html", "file.html"},
+ {"/banana.html", "xml", "banana.xml"},
+ {"./banana.html", "xml", "banana.xml"},
+ {"banana/pie/index.html", "xml", "index.xml"},
+ {"../pies/fish/index.html", "xml", "index.xml"},
+ // but these all fail
+ {"filename-without-an-ext", "ext", "filename-without-an-ext.ext"},
+ {"/filename-without-an-ext", "ext", "filename-without-an-ext.ext"},
+ {"/directory/mydir/", "ext", ".ext"},
+ {"mydir/", "ext", ".ext"},
+ }
+
+ for i, d := range data {
+ output := ReplaceExtension(filepath.FromSlash(d.input), d.newext)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+func TestExtNoDelimiter(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(ExtNoDelimiter(filepath.FromSlash("/my/data.json")), qt.Equals, "json")
+}
+
+func TestFilename(t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"index.html", "index"},
+ {"./index.html", "index"},
+ {"/index.html", "index"},
+ {"index", "index"},
+ {"/tmp/index.html", "index"},
+ {"./filename-no-ext", "filename-no-ext"},
+ {"/filename-no-ext", "filename-no-ext"},
+ {"filename-no-ext", "filename-no-ext"},
+ {"directory/", ""}, // no filename case??
+ {"directory/.hidden.ext", ".hidden"},
+ {"./directory/../~/banana/gold.fish", "gold"},
+ {"../directory/banana.man", "banana"},
+ {"~/mydir/filename.ext", "filename"},
+ {"./directory//tmp/filename.ext", "filename"},
+ }
+
+ for i, d := range data {
+ output := Filename(filepath.FromSlash(d.input))
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+func TestFileAndExt(t *testing.T) {
+ type test struct {
+ input, expectedFile, expectedExt string
+ }
+ data := []test{
+ {"index.html", "index", ".html"},
+ {"./index.html", "index", ".html"},
+ {"/index.html", "index", ".html"},
+ {"index", "index", ""},
+ {"/tmp/index.html", "index", ".html"},
+ {"./filename-no-ext", "filename-no-ext", ""},
+ {"/filename-no-ext", "filename-no-ext", ""},
+ {"filename-no-ext", "filename-no-ext", ""},
+ {"directory/", "", ""}, // no filename case??
+ {"directory/.hidden.ext", ".hidden", ".ext"},
+ {"./directory/../~/banana/gold.fish", "gold", ".fish"},
+ {"../directory/banana.man", "banana", ".man"},
+ {"~/mydir/filename.ext", "filename", ".ext"},
+ {"./directory//tmp/filename.ext", "filename", ".ext"},
+ }
+
+ for i, d := range data {
+ file, ext := fileAndExt(filepath.FromSlash(d.input), fpb)
+ if d.expectedFile != file {
+ t.Errorf("Test %d failed. Expected filename %q got %q.", i, d.expectedFile, file)
+ }
+ if d.expectedExt != ext {
+ t.Errorf("Test %d failed. Expected extension %q got %q.", i, d.expectedExt, ext)
+ }
+ }
+}
diff --git a/common/paths/url.go b/common/paths/url.go
new file mode 100644
index 000000000..c538d8f2c
--- /dev/null
+++ b/common/paths/url.go
@@ -0,0 +1,181 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "fmt"
+ "net/url"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+type pathBridge struct{}
+
+func (pathBridge) Base(in string) string {
+ return path.Base(in)
+}
+
+func (pathBridge) Clean(in string) string {
+ return path.Clean(in)
+}
+
+func (pathBridge) Dir(in string) string {
+ return path.Dir(in)
+}
+
+func (pathBridge) Ext(in string) string {
+ return path.Ext(in)
+}
+
+func (pathBridge) Join(elem ...string) string {
+ return path.Join(elem...)
+}
+
+func (pathBridge) Separator() string {
+ return "/"
+}
+
+var pb pathBridge
+
+// MakePermalink combines base URL with content path to create full URL paths.
+// Example
+// base: http://spf13.com/
+// path: post/how-i-blog
+// result: http://spf13.com/post/how-i-blog
+func MakePermalink(host, plink string) *url.URL {
+ base, err := url.Parse(host)
+ if err != nil {
+ panic(err)
+ }
+
+ p, err := url.Parse(plink)
+ if err != nil {
+ panic(err)
+ }
+
+ if p.Host != "" {
+ panic(fmt.Errorf("can't make permalink from absolute link %q", plink))
+ }
+
+ base.Path = path.Join(base.Path, p.Path)
+
+ // path.Join will strip off the last /, so put it back if it was there.
+ hadTrailingSlash := (plink == "" && strings.HasSuffix(host, "/")) || strings.HasSuffix(p.Path, "/")
+ if hadTrailingSlash && !strings.HasSuffix(base.Path, "/") {
+ base.Path = base.Path + "/"
+ }
+
+ return base
+}
+
+// AddContextRoot adds the context root to an URL if it's not already set.
+// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
+// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set.
+func AddContextRoot(baseURL, relativePath string) string {
+ url, err := url.Parse(baseURL)
+ if err != nil {
+ panic(err)
+ }
+
+ newPath := path.Join(url.Path, relativePath)
+
+ // path strips trailing slash, ignore root path.
+ if newPath != "/" && strings.HasSuffix(relativePath, "/") {
+ newPath += "/"
+ }
+ return newPath
+}
+
+// URLizeAn
+
+// PrettifyURL takes a URL string and returns a semantic, clean URL.
+func PrettifyURL(in string) string {
+ x := PrettifyURLPath(in)
+
+ if path.Base(x) == "index.html" {
+ return path.Dir(x)
+ }
+
+ if in == "" {
+ return "/"
+ }
+
+ return x
+}
+
+// PrettifyURLPath takes a URL path to a content and converts it
+// to enable pretty URLs.
+// /section/name.html becomes /section/name/index.html
+// /section/name/ becomes /section/name/index.html
+// /section/name/index.html becomes /section/name/index.html
+func PrettifyURLPath(in string) string {
+ return prettifyPath(in, pb)
+}
+
+// Uglify does the opposite of PrettifyURLPath().
+// /section/name/index.html becomes /section/name.html
+// /section/name/ becomes /section/name.html
+// /section/name.html becomes /section/name.html
+func Uglify(in string) string {
+ if path.Ext(in) == "" {
+ if len(in) < 2 {
+ return "/"
+ }
+ // /section/name/ -> /section/name.html
+ return path.Clean(in) + ".html"
+ }
+
+ name, ext := fileAndExt(in, pb)
+ if name == "index" {
+ // /section/name/index.html -> /section/name.html
+ d := path.Dir(in)
+ if len(d) > 1 {
+ return d + ext
+ }
+ return in
+ }
+ // /.xml -> /index.xml
+ if name == "" {
+ return path.Dir(in) + "index" + ext
+ }
+ // /section/name.html -> /section/name.html
+ return path.Clean(in)
+}
+
+// UrlToFilename converts the URL s to a filename.
+// If ParseRequestURI fails, the input is just converted to OS specific slashes and returned.
+func UrlToFilename(s string) (string, bool) {
+ u, err := url.ParseRequestURI(s)
+
+ if err != nil {
+ return filepath.FromSlash(s), false
+ }
+
+ p := u.Path
+
+ if p == "" {
+ p, _ = url.QueryUnescape(u.Opaque)
+ return filepath.FromSlash(p), true
+ }
+
+ p = filepath.FromSlash(p)
+
+ if u.Host != "" {
+ // C:\data\file.txt
+ p = strings.ToUpper(u.Host) + ":" + p
+ }
+
+ return p, true
+}
diff --git a/common/paths/url_test.go b/common/paths/url_test.go
new file mode 100644
index 000000000..4e5f73053
--- /dev/null
+++ b/common/paths/url_test.go
@@ -0,0 +1,99 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMakePermalink(t *testing.T) {
+ type test struct {
+ host, link, output string
+ }
+
+ data := []test{
+ {"http://abc.com/foo", "post/bar", "http://abc.com/foo/post/bar"},
+ {"http://abc.com/foo/", "post/bar", "http://abc.com/foo/post/bar"},
+ {"http://abc.com", "post/bar", "http://abc.com/post/bar"},
+ {"http://abc.com", "bar", "http://abc.com/bar"},
+ {"http://abc.com/foo/bar", "post/bar", "http://abc.com/foo/bar/post/bar"},
+ {"http://abc.com/foo/bar", "post/bar/", "http://abc.com/foo/bar/post/bar/"},
+ }
+
+ for i, d := range data {
+ output := MakePermalink(d.host, d.link).String()
+ if d.output != output {
+ t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
+ }
+ }
+}
+
+func TestAddContextRoot(t *testing.T) {
+ tests := []struct {
+ baseURL string
+ url string
+ expected string
+ }{
+ {"http://example.com/sub/", "/foo", "/sub/foo"},
+ {"http://example.com/sub/", "/foo/index.html", "/sub/foo/index.html"},
+ {"http://example.com/sub1/sub2", "/foo", "/sub1/sub2/foo"},
+ {"http://example.com", "/foo", "/foo"},
+ // cannot guess that the context root is already added int the example below
+ {"http://example.com/sub/", "/sub/foo", "/sub/sub/foo"},
+ {"http://example.com/тря", "/трям/", "/тря/трям/"},
+ {"http://example.com", "/", "/"},
+ {"http://example.com/bar", "//", "/bar/"},
+ }
+
+ for _, test := range tests {
+ output := AddContextRoot(test.baseURL, test.url)
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestPretty(t *testing.T) {
+ c := qt.New(t)
+ c.Assert("/section/name/index.html", qt.Equals, PrettifyURLPath("/section/name.html"))
+ c.Assert("/section/sub/name/index.html", qt.Equals, PrettifyURLPath("/section/sub/name.html"))
+ c.Assert("/section/name/index.html", qt.Equals, PrettifyURLPath("/section/name/"))
+ c.Assert("/section/name/index.html", qt.Equals, PrettifyURLPath("/section/name/index.html"))
+ c.Assert("/index.html", qt.Equals, PrettifyURLPath("/index.html"))
+ c.Assert("/name/index.xml", qt.Equals, PrettifyURLPath("/name.xml"))
+ c.Assert("/", qt.Equals, PrettifyURLPath("/"))
+ c.Assert("/", qt.Equals, PrettifyURLPath(""))
+ c.Assert("/section/name", qt.Equals, PrettifyURL("/section/name.html"))
+ c.Assert("/section/sub/name", qt.Equals, PrettifyURL("/section/sub/name.html"))
+ c.Assert("/section/name", qt.Equals, PrettifyURL("/section/name/"))
+ c.Assert("/section/name", qt.Equals, PrettifyURL("/section/name/index.html"))
+ c.Assert("/", qt.Equals, PrettifyURL("/index.html"))
+ c.Assert("/name/index.xml", qt.Equals, PrettifyURL("/name.xml"))
+ c.Assert("/", qt.Equals, PrettifyURL("/"))
+ c.Assert("/", qt.Equals, PrettifyURL(""))
+}
+
+func TestUgly(t *testing.T) {
+ c := qt.New(t)
+ c.Assert("/section/name.html", qt.Equals, Uglify("/section/name.html"))
+ c.Assert("/section/sub/name.html", qt.Equals, Uglify("/section/sub/name.html"))
+ c.Assert("/section/name.html", qt.Equals, Uglify("/section/name/"))
+ c.Assert("/section/name.html", qt.Equals, Uglify("/section/name/index.html"))
+ c.Assert("/index.html", qt.Equals, Uglify("/index.html"))
+ c.Assert("/name.xml", qt.Equals, Uglify("/name.xml"))
+ c.Assert("/", qt.Equals, Uglify("/"))
+ c.Assert("/", qt.Equals, Uglify(""))
+}
diff --git a/common/terminal/colors.go b/common/terminal/colors.go
new file mode 100644
index 000000000..c4a78291e
--- /dev/null
+++ b/common/terminal/colors.go
@@ -0,0 +1,79 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package terminal contains helper for the terminal, such as coloring output.
+package terminal
+
+import (
+ "fmt"
+ "os"
+ "runtime"
+ "strings"
+
+ isatty "github.com/mattn/go-isatty"
+)
+
+const (
+ errorColor = "\033[1;31m%s\033[0m"
+ warningColor = "\033[0;33m%s\033[0m"
+ noticeColor = "\033[1;36m%s\033[0m"
+)
+
+// PrintANSIColors returns false if NO_COLOR env variable is set,
+// else IsTerminal(f).
+func PrintANSIColors(f *os.File) bool {
+ if os.Getenv("NO_COLOR") != "" {
+ return false
+ }
+ return IsTerminal(f)
+}
+
+// IsTerminal return true if the file descriptor is terminal and the TERM
+// environment variable isn't a dumb one.
+func IsTerminal(f *os.File) bool {
+ if runtime.GOOS == "windows" {
+ return false
+ }
+
+ fd := f.Fd()
+ return os.Getenv("TERM") != "dumb" && (isatty.IsTerminal(fd) || isatty.IsCygwinTerminal(fd))
+}
+
+// Notice colorizes the string in a noticeable color.
+func Notice(s string) string {
+ return colorize(s, noticeColor)
+}
+
+// Error colorizes the string in a colour that grabs attention.
+func Error(s string) string {
+ return colorize(s, errorColor)
+}
+
+// Warning colorizes the string in a colour that warns.
+func Warning(s string) string {
+ return colorize(s, warningColor)
+}
+
+// colorize s in color.
+func colorize(s, color string) string {
+ s = fmt.Sprintf(color, doublePercent(s))
+ return singlePercent(s)
+}
+
+func doublePercent(str string) string {
+ return strings.Replace(str, "%", "%%", -1)
+}
+
+func singlePercent(str string) string {
+ return strings.Replace(str, "%%", "%", -1)
+}
diff --git a/common/text/position.go b/common/text/position.go
new file mode 100644
index 000000000..cc1eda354
--- /dev/null
+++ b/common/text/position.go
@@ -0,0 +1,98 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package text
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/terminal"
+)
+
+// Positioner represents a thing that knows its position in a text file or stream,
+// typically an error.
+type Positioner interface {
+ Position() Position
+}
+
+// Position holds a source position in a text file or stream.
+type Position struct {
+ Filename string // filename, if any
+ Offset int // byte offset, starting at 0. It's set to -1 if not provided.
+ LineNumber int // line number, starting at 1
+ ColumnNumber int // column number, starting at 1 (character count per line)
+}
+
+func (pos Position) String() string {
+ if pos.Filename == "" {
+ pos.Filename = "<stream>"
+ }
+ return positionStringFormatfunc(pos)
+}
+
+// IsValid returns true if line number is > 0.
+func (pos Position) IsValid() bool {
+ return pos.LineNumber > 0
+}
+
+var positionStringFormatfunc func(p Position) string
+
+func createPositionStringFormatter(formatStr string) func(p Position) string {
+ if formatStr == "" {
+ formatStr = "\":file::line::col\""
+ }
+
+ identifiers := []string{":file", ":line", ":col"}
+ var identifiersFound []string
+
+ for i := range formatStr {
+ for _, id := range identifiers {
+ if strings.HasPrefix(formatStr[i:], id) {
+ identifiersFound = append(identifiersFound, id)
+ }
+ }
+ }
+
+ replacer := strings.NewReplacer(":file", "%s", ":line", "%d", ":col", "%d")
+ format := replacer.Replace(formatStr)
+
+ f := func(pos Position) string {
+ args := make([]any, len(identifiersFound))
+ for i, id := range identifiersFound {
+ switch id {
+ case ":file":
+ args[i] = pos.Filename
+ case ":line":
+ args[i] = pos.LineNumber
+ case ":col":
+ args[i] = pos.ColumnNumber
+ }
+ }
+
+ msg := fmt.Sprintf(format, args...)
+
+ if terminal.PrintANSIColors(os.Stdout) {
+ return terminal.Notice(msg)
+ }
+
+ return msg
+ }
+
+ return f
+}
+
+func init() {
+ positionStringFormatfunc = createPositionStringFormatter(os.Getenv("HUGO_FILE_LOG_FORMAT"))
+}
diff --git a/common/text/position_test.go b/common/text/position_test.go
new file mode 100644
index 000000000..a1f43c5d4
--- /dev/null
+++ b/common/text/position_test.go
@@ -0,0 +1,32 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package text
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPositionStringFormatter(t *testing.T) {
+ c := qt.New(t)
+
+ pos := Position{Filename: "/my/file.txt", LineNumber: 12, ColumnNumber: 13, Offset: 14}
+
+ c.Assert(createPositionStringFormatter(":file|:col|:line")(pos), qt.Equals, "/my/file.txt|13|12")
+ c.Assert(createPositionStringFormatter(":col|:file|:line")(pos), qt.Equals, "13|/my/file.txt|12")
+ c.Assert(createPositionStringFormatter("好::col")(pos), qt.Equals, "好:13")
+ c.Assert(createPositionStringFormatter("")(pos), qt.Equals, "\"/my/file.txt:12:13\"")
+ c.Assert(pos.String(), qt.Equals, "\"/my/file.txt:12:13\"")
+}
diff --git a/common/text/transform.go b/common/text/transform.go
new file mode 100644
index 000000000..de093af0d
--- /dev/null
+++ b/common/text/transform.go
@@ -0,0 +1,78 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package text
+
+import (
+ "strings"
+ "sync"
+ "unicode"
+
+ "golang.org/x/text/runes"
+ "golang.org/x/text/transform"
+ "golang.org/x/text/unicode/norm"
+)
+
+var accentTransformerPool = &sync.Pool{
+ New: func() any {
+ return transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
+ },
+}
+
+// RemoveAccents removes all accents from b.
+func RemoveAccents(b []byte) []byte {
+ t := accentTransformerPool.Get().(transform.Transformer)
+ b, _, _ = transform.Bytes(t, b)
+ t.Reset()
+ accentTransformerPool.Put(t)
+ return b
+}
+
+// RemoveAccentsString removes all accents from s.
+func RemoveAccentsString(s string) string {
+ t := accentTransformerPool.Get().(transform.Transformer)
+ s, _, _ = transform.String(t, s)
+ t.Reset()
+ accentTransformerPool.Put(t)
+ return s
+}
+
+// Chomp removes trailing newline characters from s.
+func Chomp(s string) string {
+ return strings.TrimRightFunc(s, func(r rune) bool {
+ return r == '\n' || r == '\r'
+ })
+}
+
+// Puts adds a trailing \n none found.
+func Puts(s string) string {
+ if s == "" || s[len(s)-1] == '\n' {
+ return s
+ }
+ return s + "\n"
+}
+
+// VisitLinesAfter calls the given function for each line, including newlines, in the given string.
+func VisitLinesAfter(s string, fn func(line string)) {
+ high := strings.IndexRune(s, '\n')
+ for high != -1 {
+ fn(s[:high+1])
+ s = s[high+1:]
+
+ high = strings.IndexRune(s, '\n')
+ }
+
+ if s != "" {
+ fn(s)
+ }
+}
diff --git a/common/text/transform_test.go b/common/text/transform_test.go
new file mode 100644
index 000000000..41447715f
--- /dev/null
+++ b/common/text/transform_test.go
@@ -0,0 +1,76 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package text
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestRemoveAccents(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(string(RemoveAccents([]byte("Resumé"))), qt.Equals, "Resume")
+ c.Assert(string(RemoveAccents([]byte("Hugo Rocks!"))), qt.Equals, "Hugo Rocks!")
+ c.Assert(string(RemoveAccentsString("Resumé")), qt.Equals, "Resume")
+}
+
+func TestChomp(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(Chomp("\nA\n"), qt.Equals, "\nA")
+ c.Assert(Chomp("A\r\n"), qt.Equals, "A")
+}
+
+func TestPuts(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(Puts("A"), qt.Equals, "A\n")
+ c.Assert(Puts("\nA\n"), qt.Equals, "\nA\n")
+ c.Assert(Puts(""), qt.Equals, "")
+}
+
+func TestVisitLinesAfter(t *testing.T) {
+ const lines = `line 1
+line 2
+
+line 3`
+
+ var collected []string
+
+ VisitLinesAfter(lines, func(s string) {
+ collected = append(collected, s)
+ })
+
+ c := qt.New(t)
+
+ c.Assert(collected, qt.DeepEquals, []string{"line 1\n", "line 2\n", "\n", "line 3"})
+
+}
+
+func BenchmarkVisitLinesAfter(b *testing.B) {
+ const lines = `line 1
+ line 2
+
+ line 3`
+
+ for i := 0; i < b.N; i++ {
+ VisitLinesAfter(lines, func(s string) {
+
+ })
+
+ }
+
+}
diff --git a/common/types/convert.go b/common/types/convert.go
new file mode 100644
index 000000000..fbeab5b91
--- /dev/null
+++ b/common/types/convert.go
@@ -0,0 +1,130 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package types
+
+import (
+ "encoding/json"
+ "fmt"
+ "html/template"
+ "reflect"
+ "time"
+
+ "github.com/spf13/cast"
+)
+
+// ToDuration converts v to time.Duration.
+// See ToDurationE if you need to handle errors.
+func ToDuration(v any) time.Duration {
+ d, _ := ToDurationE(v)
+ return d
+}
+
+// ToDurationE converts v to time.Duration.
+func ToDurationE(v any) (time.Duration, error) {
+ if n := cast.ToInt(v); n > 0 {
+ return time.Duration(n) * time.Millisecond, nil
+ }
+ d, err := time.ParseDuration(cast.ToString(v))
+ if err != nil {
+ return 0, fmt.Errorf("cannot convert %v to time.Duration", v)
+ }
+ return d, nil
+}
+
+// ToStringSlicePreserveString is the same as ToStringSlicePreserveStringE,
+// but it never fails.
+func ToStringSlicePreserveString(v any) []string {
+ vv, _ := ToStringSlicePreserveStringE(v)
+ return vv
+}
+
+// ToStringSlicePreserveStringE converts v to a string slice.
+// If v is a string, it will be wrapped in a string slice.
+func ToStringSlicePreserveStringE(v any) ([]string, error) {
+ if v == nil {
+ return nil, nil
+ }
+ if sds, ok := v.(string); ok {
+ return []string{sds}, nil
+ }
+ result, err := cast.ToStringSliceE(v)
+ if err == nil {
+ return result, nil
+ }
+
+ // Probably []int or similar. Fall back to reflect.
+ vv := reflect.ValueOf(v)
+
+ switch vv.Kind() {
+ case reflect.Slice, reflect.Array:
+ result = make([]string, vv.Len())
+ for i := 0; i < vv.Len(); i++ {
+ s, err := cast.ToStringE(vv.Index(i).Interface())
+ if err != nil {
+ return nil, err
+ }
+ result[i] = s
+ }
+ return result, nil
+ default:
+ return nil, fmt.Errorf("failed to convert %T to a string slice", v)
+ }
+
+}
+
+// TypeToString converts v to a string if it's a valid string type.
+// Note that this will not try to convert numeric values etc.,
+// use ToString for that.
+func TypeToString(v any) (string, bool) {
+ switch s := v.(type) {
+ case string:
+ return s, true
+ case template.HTML:
+ return string(s), true
+ case template.CSS:
+ return string(s), true
+ case template.HTMLAttr:
+ return string(s), true
+ case template.JS:
+ return string(s), true
+ case template.JSStr:
+ return string(s), true
+ case template.URL:
+ return string(s), true
+ case template.Srcset:
+ return string(s), true
+ }
+
+ return "", false
+}
+
+// ToString converts v to a string.
+func ToString(v any) string {
+ s, _ := ToStringE(v)
+ return s
+}
+
+// ToStringE converts v to a string.
+func ToStringE(v any) (string, error) {
+ if s, ok := TypeToString(v); ok {
+ return s, nil
+ }
+
+ switch s := v.(type) {
+ case json.RawMessage:
+ return string(s), nil
+ default:
+ return cast.ToStringE(v)
+ }
+}
diff --git a/common/types/convert_test.go b/common/types/convert_test.go
new file mode 100644
index 000000000..215117441
--- /dev/null
+++ b/common/types/convert_test.go
@@ -0,0 +1,49 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package types
+
+import (
+ "encoding/json"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestToStringSlicePreserveString(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(ToStringSlicePreserveString("Hugo"), qt.DeepEquals, []string{"Hugo"})
+ c.Assert(ToStringSlicePreserveString(qt.Commentf("Hugo")), qt.DeepEquals, []string{"Hugo"})
+ c.Assert(ToStringSlicePreserveString([]any{"A", "B"}), qt.DeepEquals, []string{"A", "B"})
+ c.Assert(ToStringSlicePreserveString([]int{1, 3}), qt.DeepEquals, []string{"1", "3"})
+ c.Assert(ToStringSlicePreserveString(nil), qt.IsNil)
+}
+
+func TestToString(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(ToString([]byte("Hugo")), qt.Equals, "Hugo")
+ c.Assert(ToString(json.RawMessage("Hugo")), qt.Equals, "Hugo")
+}
+
+func TestToDuration(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(ToDuration("200ms"), qt.Equals, 200*time.Millisecond)
+ c.Assert(ToDuration("200"), qt.Equals, 200*time.Millisecond)
+ c.Assert(ToDuration("4m"), qt.Equals, 4*time.Minute)
+ c.Assert(ToDuration("asdfadf"), qt.Equals, time.Duration(0))
+
+}
diff --git a/common/types/evictingqueue.go b/common/types/evictingqueue.go
new file mode 100644
index 000000000..884762426
--- /dev/null
+++ b/common/types/evictingqueue.go
@@ -0,0 +1,96 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package types contains types shared between packages in Hugo.
+package types
+
+import (
+ "sync"
+)
+
+// EvictingStringQueue is a queue which automatically evicts elements from the head of
+// the queue when attempting to add new elements onto the queue and it is full.
+// This queue orders elements LIFO (last-in-first-out). It throws away duplicates.
+// Note: This queue currently does not contain any remove (poll etc.) methods.
+type EvictingStringQueue struct {
+ size int
+ vals []string
+ set map[string]bool
+ mu sync.Mutex
+}
+
+// NewEvictingStringQueue creates a new queue with the given size.
+func NewEvictingStringQueue(size int) *EvictingStringQueue {
+ return &EvictingStringQueue{size: size, set: make(map[string]bool)}
+}
+
+// Add adds a new string to the tail of the queue if it's not already there.
+func (q *EvictingStringQueue) Add(v string) {
+ q.mu.Lock()
+ if q.set[v] {
+ q.mu.Unlock()
+ return
+ }
+
+ if len(q.set) == q.size {
+ // Full
+ delete(q.set, q.vals[0])
+ q.vals = append(q.vals[:0], q.vals[1:]...)
+ }
+ q.set[v] = true
+ q.vals = append(q.vals, v)
+ q.mu.Unlock()
+}
+
+// Contains returns whether the queue contains v.
+func (q *EvictingStringQueue) Contains(v string) bool {
+ q.mu.Lock()
+ defer q.mu.Unlock()
+ return q.set[v]
+}
+
+// Peek looks at the last element added to the queue.
+func (q *EvictingStringQueue) Peek() string {
+ q.mu.Lock()
+ l := len(q.vals)
+ if l == 0 {
+ q.mu.Unlock()
+ return ""
+ }
+ elem := q.vals[l-1]
+ q.mu.Unlock()
+ return elem
+}
+
+// PeekAll looks at all the elements in the queue, with the newest first.
+func (q *EvictingStringQueue) PeekAll() []string {
+ q.mu.Lock()
+ vals := make([]string, len(q.vals))
+ copy(vals, q.vals)
+ q.mu.Unlock()
+ for i, j := 0, len(vals)-1; i < j; i, j = i+1, j-1 {
+ vals[i], vals[j] = vals[j], vals[i]
+ }
+ return vals
+}
+
+// PeekAllSet returns PeekAll as a set.
+func (q *EvictingStringQueue) PeekAllSet() map[string]bool {
+ all := q.PeekAll()
+ set := make(map[string]bool)
+ for _, v := range all {
+ set[v] = true
+ }
+
+ return set
+}
diff --git a/common/types/evictingqueue_test.go b/common/types/evictingqueue_test.go
new file mode 100644
index 000000000..7489ba88d
--- /dev/null
+++ b/common/types/evictingqueue_test.go
@@ -0,0 +1,74 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package types
+
+import (
+ "sync"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestEvictingStringQueue(t *testing.T) {
+ c := qt.New(t)
+
+ queue := NewEvictingStringQueue(3)
+
+ c.Assert(queue.Peek(), qt.Equals, "")
+ queue.Add("a")
+ queue.Add("b")
+ queue.Add("a")
+ c.Assert(queue.Peek(), qt.Equals, "b")
+ queue.Add("b")
+ c.Assert(queue.Peek(), qt.Equals, "b")
+
+ queue.Add("a")
+ queue.Add("b")
+
+ c.Assert(queue.Contains("a"), qt.Equals, true)
+ c.Assert(queue.Contains("foo"), qt.Equals, false)
+
+ c.Assert(queue.PeekAll(), qt.DeepEquals, []string{"b", "a"})
+ c.Assert(queue.Peek(), qt.Equals, "b")
+ queue.Add("c")
+ queue.Add("d")
+ // Overflowed, a should now be removed.
+ c.Assert(queue.PeekAll(), qt.DeepEquals, []string{"d", "c", "b"})
+ c.Assert(len(queue.PeekAllSet()), qt.Equals, 3)
+ c.Assert(queue.PeekAllSet()["c"], qt.Equals, true)
+}
+
+func TestEvictingStringQueueConcurrent(t *testing.T) {
+ var wg sync.WaitGroup
+ val := "someval"
+
+ queue := NewEvictingStringQueue(3)
+
+ for j := 0; j < 100; j++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ queue.Add(val)
+ v := queue.Peek()
+ if v != val {
+ t.Error("wrong val")
+ }
+ vals := queue.PeekAll()
+ if len(vals) != 1 || vals[0] != val {
+ t.Error("wrong val")
+ }
+ }()
+ }
+ wg.Wait()
+}
diff --git a/common/types/hstring/stringtypes.go b/common/types/hstring/stringtypes.go
new file mode 100644
index 000000000..601218e0e
--- /dev/null
+++ b/common/types/hstring/stringtypes.go
@@ -0,0 +1,20 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hstring
+
+type RenderedString string
+
+func (s RenderedString) String() string {
+ return string(s)
+}
diff --git a/common/types/hstring/stringtypes_test.go b/common/types/hstring/stringtypes_test.go
new file mode 100644
index 000000000..8fa1c9760
--- /dev/null
+++ b/common/types/hstring/stringtypes_test.go
@@ -0,0 +1,30 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hstring
+
+import (
+ "html/template"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/cast"
+)
+
+func TestRenderedString(t *testing.T) {
+ c := qt.New(t)
+
+ // Validate that it will behave like a string in Hugo settings.
+ c.Assert(cast.ToString(RenderedString("Hugo")), qt.Equals, "Hugo")
+ c.Assert(template.HTML(RenderedString("Hugo")), qt.Equals, template.HTML("Hugo"))
+}
diff --git a/common/types/types.go b/common/types/types.go
new file mode 100644
index 000000000..cee497bf4
--- /dev/null
+++ b/common/types/types.go
@@ -0,0 +1,92 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package types contains types shared between packages in Hugo.
+package types
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/spf13/cast"
+)
+
+// RLocker represents the read locks in sync.RWMutex.
+type RLocker interface {
+ RLock()
+ RUnlock()
+}
+
+// KeyValue is a interface{} tuple.
+type KeyValue struct {
+ Key any
+ Value any
+}
+
+// KeyValueStr is a string tuple.
+type KeyValueStr struct {
+ Key string
+ Value string
+}
+
+// KeyValues holds an key and a slice of values.
+type KeyValues struct {
+ Key any
+ Values []any
+}
+
+// KeyString returns the key as a string, an empty string if conversion fails.
+func (k KeyValues) KeyString() string {
+ return cast.ToString(k.Key)
+}
+
+func (k KeyValues) String() string {
+ return fmt.Sprintf("%v: %v", k.Key, k.Values)
+}
+
+// NewKeyValuesStrings takes a given key and slice of values and returns a new
+// KeyValues struct.
+func NewKeyValuesStrings(key string, values ...string) KeyValues {
+ iv := make([]any, len(values))
+ for i := 0; i < len(values); i++ {
+ iv[i] = values[i]
+ }
+ return KeyValues{Key: key, Values: iv}
+}
+
+// Zeroer, as implemented by time.Time, will be used by the truth template
+// funcs in Hugo (if, with, not, and, or).
+type Zeroer interface {
+ IsZero() bool
+}
+
+// IsNil reports whether v is nil.
+func IsNil(v any) bool {
+ if v == nil {
+ return true
+ }
+
+ value := reflect.ValueOf(v)
+ switch value.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+ return value.IsNil()
+ }
+
+ return false
+}
+
+// DevMarker is a marker interface for types that should only be used during
+// development.
+type DevMarker interface {
+ DevOnly()
+}
diff --git a/common/types/types_test.go b/common/types/types_test.go
new file mode 100644
index 000000000..6f13ae834
--- /dev/null
+++ b/common/types/types_test.go
@@ -0,0 +1,29 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package types
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestKeyValues(t *testing.T) {
+ c := qt.New(t)
+
+ kv := NewKeyValuesStrings("key", "a1", "a2")
+
+ c.Assert(kv.KeyString(), qt.Equals, "key")
+ c.Assert(kv.Values, qt.DeepEquals, []any{"a1", "a2"})
+}
diff --git a/common/urls/ref.go b/common/urls/ref.go
new file mode 100644
index 000000000..e5804a279
--- /dev/null
+++ b/common/urls/ref.go
@@ -0,0 +1,22 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urls
+
+// RefLinker is implemented by those who support reference linking.
+// args must contain a path, but can also point to the target
+// language or output format.
+type RefLinker interface {
+ Ref(args map[string]any) (string, error)
+ RelRef(args map[string]any) (string, error)
+}
diff --git a/compare/compare.go b/compare/compare.go
new file mode 100644
index 000000000..de97690c7
--- /dev/null
+++ b/compare/compare.go
@@ -0,0 +1,38 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compare
+
+// Eqer can be used to determine if this value is equal to the other.
+// The semantics of equals is that the two value are interchangeable
+// in the Hugo templates.
+type Eqer interface {
+ // Eq returns whether this value is equal to the other.
+ // This is for internal use.
+ Eq(other any) bool
+}
+
+// ProbablyEqer is an equal check that may return false positives, but never
+// a false negative.
+type ProbablyEqer interface {
+ // For internal use.
+ ProbablyEq(other any) bool
+}
+
+// Comparer can be used to compare two values.
+// This will be used when using the le, ge etc. operators in the templates.
+// Compare returns -1 if the given version is less than, 0 if equal and 1 if greater than
+// the running version.
+type Comparer interface {
+ Compare(other any) int
+}
diff --git a/compare/compare_strings.go b/compare/compare_strings.go
new file mode 100644
index 000000000..1fd954081
--- /dev/null
+++ b/compare/compare_strings.go
@@ -0,0 +1,113 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compare
+
+import (
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Strings returns an integer comparing two strings lexicographically.
+func Strings(s, t string) int {
+ c := compareFold(s, t)
+
+ if c == 0 {
+ // "B" and "b" would be the same so we need a tiebreaker.
+ return strings.Compare(s, t)
+ }
+
+ return c
+}
+
+// This function is derived from strings.EqualFold in Go's stdlib.
+// https://github.com/golang/go/blob/ad4a58e31501bce5de2aad90a620eaecdc1eecb8/src/strings/strings.go#L893
+func compareFold(s, t string) int {
+ for s != "" && t != "" {
+ var sr, tr rune
+ if s[0] < utf8.RuneSelf {
+ sr, s = rune(s[0]), s[1:]
+ } else {
+ r, size := utf8.DecodeRuneInString(s)
+ sr, s = r, s[size:]
+ }
+ if t[0] < utf8.RuneSelf {
+ tr, t = rune(t[0]), t[1:]
+ } else {
+ r, size := utf8.DecodeRuneInString(t)
+ tr, t = r, t[size:]
+ }
+
+ if tr == sr {
+ continue
+ }
+
+ c := 1
+ if tr < sr {
+ tr, sr = sr, tr
+ c = -c
+ }
+
+ // ASCII only.
+ if tr < utf8.RuneSelf {
+ if sr >= 'A' && sr <= 'Z' {
+ if tr <= 'Z' {
+ // Same case.
+ return -c
+ }
+
+ diff := tr - (sr + 'a' - 'A')
+
+ if diff == 0 {
+ continue
+ }
+
+ if diff < 0 {
+ return c
+ }
+
+ if diff > 0 {
+ return -c
+ }
+ }
+ }
+
+ // Unicode.
+ r := unicode.SimpleFold(sr)
+ for r != sr && r < tr {
+ r = unicode.SimpleFold(r)
+ }
+
+ if r == tr {
+ continue
+ }
+
+ return -c
+ }
+
+ if s == "" && t == "" {
+ return 0
+ }
+
+ if s == "" {
+ return -1
+ }
+
+ return 1
+}
+
+// LessStrings returns whether s is less than t lexicographically.
+func LessStrings(s, t string) bool {
+ return Strings(s, t) < 0
+}
diff --git a/compare/compare_strings_test.go b/compare/compare_strings_test.go
new file mode 100644
index 000000000..a73091fc6
--- /dev/null
+++ b/compare/compare_strings_test.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compare
+
+import (
+ "sort"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestCompare(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ a string
+ b string
+ }{
+ {"a", "a"},
+ {"A", "a"},
+ {"Ab", "Ac"},
+ {"az", "Za"},
+ {"C", "D"},
+ {"B", "a"},
+ {"C", ""},
+ {"", ""},
+ {"αβδC", "ΑΒΔD"},
+ {"αβδC", "ΑΒΔ"},
+ {"αβδ", "ΑΒΔD"},
+ {"αβδ", "ΑΒΔ"},
+ {"β", "δ"},
+ {"好", strings.ToLower("好")},
+ } {
+
+ expect := strings.Compare(strings.ToLower(test.a), strings.ToLower(test.b))
+ got := compareFold(test.a, test.b)
+
+ c.Assert(got, qt.Equals, expect)
+
+ }
+}
+
+func TestLexicographicSort(t *testing.T) {
+ c := qt.New(t)
+
+ s := []string{"b", "Bz", "ba", "A", "Ba", "ba"}
+
+ sort.Slice(s, func(i, j int) bool {
+ return LessStrings(s[i], s[j])
+ })
+
+ c.Assert(s, qt.DeepEquals, []string{"A", "b", "Ba", "ba", "ba", "Bz"})
+}
+
+func BenchmarkStringSort(b *testing.B) {
+ prototype := []string{"b", "Bz", "zz", "ba", "αβδ αβδ αβδ", "A", "Ba", "ba", "nnnnasdfnnn", "AAgæåz", "αβδC"}
+ b.Run("LessStrings", func(b *testing.B) {
+ ss := make([][]string, b.N)
+ for i := 0; i < b.N; i++ {
+ ss[i] = make([]string, len(prototype))
+ copy(ss[i], prototype)
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ sss := ss[i]
+ sort.Slice(sss, func(i, j int) bool {
+ return LessStrings(sss[i], sss[j])
+ })
+ }
+ })
+
+}
diff --git a/config/commonConfig.go b/config/commonConfig.go
new file mode 100644
index 000000000..efaa589d1
--- /dev/null
+++ b/config/commonConfig.go
@@ -0,0 +1,215 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gobwas/glob"
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/mitchellh/mapstructure"
+ "github.com/spf13/cast"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+var DefaultBuild = Build{
+ UseResourceCacheWhen: "fallback",
+ WriteStats: false,
+}
+
+// Build holds some build related configuration.
+type Build struct {
+ UseResourceCacheWhen string // never, fallback, always. Default is fallback
+
+ // When enabled, will collect and write a hugo_stats.json with some build
+ // related aggregated data (e.g. CSS class names).
+ WriteStats bool
+
+ // Can be used to toggle off writing of the intellinsense /assets/jsconfig.js
+ // file.
+ NoJSConfigInAssets bool
+}
+
+func (b Build) UseResourceCache(err error) bool {
+ if b.UseResourceCacheWhen == "never" {
+ return false
+ }
+
+ if b.UseResourceCacheWhen == "fallback" {
+ return err == herrors.ErrFeatureNotAvailable
+ }
+
+ return true
+}
+
+func DecodeBuild(cfg Provider) Build {
+ m := cfg.GetStringMap("build")
+ b := DefaultBuild
+ if m == nil {
+ return b
+ }
+
+ err := mapstructure.WeakDecode(m, &b)
+ if err != nil {
+ return DefaultBuild
+ }
+
+ b.UseResourceCacheWhen = strings.ToLower(b.UseResourceCacheWhen)
+ when := b.UseResourceCacheWhen
+ if when != "never" && when != "always" && when != "fallback" {
+ b.UseResourceCacheWhen = "fallback"
+ }
+
+ return b
+}
+
+// Sitemap configures the sitemap to be generated.
+type Sitemap struct {
+ ChangeFreq string
+ Priority float64
+ Filename string
+}
+
+func DecodeSitemap(prototype Sitemap, input map[string]any) Sitemap {
+ for key, value := range input {
+ switch key {
+ case "changefreq":
+ prototype.ChangeFreq = cast.ToString(value)
+ case "priority":
+ prototype.Priority = cast.ToFloat64(value)
+ case "filename":
+ prototype.Filename = cast.ToString(value)
+ default:
+ jww.WARN.Printf("Unknown Sitemap field: %s\n", key)
+ }
+ }
+
+ return prototype
+}
+
+// Config for the dev server.
+type Server struct {
+ Headers []Headers
+ Redirects []Redirect
+
+ compiledInit sync.Once
+ compiledHeaders []glob.Glob
+ compiledRedirects []glob.Glob
+}
+
+func (s *Server) init() {
+ s.compiledInit.Do(func() {
+ for _, h := range s.Headers {
+ s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
+ }
+ for _, r := range s.Redirects {
+ s.compiledRedirects = append(s.compiledRedirects, glob.MustCompile(r.From))
+ }
+ })
+}
+
+func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
+ s.init()
+
+ if s.compiledHeaders == nil {
+ return nil
+ }
+
+ var matches []types.KeyValueStr
+
+ for i, g := range s.compiledHeaders {
+ if g.Match(pattern) {
+ h := s.Headers[i]
+ for k, v := range h.Values {
+ matches = append(matches, types.KeyValueStr{Key: k, Value: cast.ToString(v)})
+ }
+ }
+ }
+
+ sort.Slice(matches, func(i, j int) bool {
+ return matches[i].Key < matches[j].Key
+ })
+
+ return matches
+}
+
+func (s *Server) MatchRedirect(pattern string) Redirect {
+ s.init()
+
+ if s.compiledRedirects == nil {
+ return Redirect{}
+ }
+
+ pattern = strings.TrimSuffix(pattern, "index.html")
+
+ for i, g := range s.compiledRedirects {
+ redir := s.Redirects[i]
+
+ // No redirect to self.
+ if redir.To == pattern {
+ return Redirect{}
+ }
+
+ if g.Match(pattern) {
+ return redir
+ }
+ }
+
+ return Redirect{}
+}
+
+type Headers struct {
+ For string
+ Values map[string]any
+}
+
+type Redirect struct {
+ From string
+ To string
+ Status int
+ Force bool
+}
+
+func (r Redirect) IsZero() bool {
+ return r.From == ""
+}
+
+func DecodeServer(cfg Provider) (*Server, error) {
+ m := cfg.GetStringMap("server")
+ s := &Server{}
+ if m == nil {
+ return s, nil
+ }
+
+ _ = mapstructure.WeakDecode(m, s)
+
+ for i, redir := range s.Redirects {
+ // Get it in line with the Hugo server.
+ redir.To = strings.TrimSuffix(redir.To, "index.html")
+ if !strings.HasPrefix(redir.To, "https") && !strings.HasSuffix(redir.To, "/") {
+ // There are some tricky infinite loop situations when dealing
+ // when the target does not have a trailing slash.
+ // This can certainly be handled better, but not time for that now.
+ return nil, fmt.Errorf("unsupported redirect to value %q in server config; currently this must be either a remote destination or a local folder, e.g. \"/blog/\" or \"/blog/index.html\"", redir.To)
+ }
+ s.Redirects[i] = redir
+ }
+
+ return s, nil
+}
diff --git a/config/commonConfig_test.go b/config/commonConfig_test.go
new file mode 100644
index 000000000..4ff2e8ed5
--- /dev/null
+++ b/config/commonConfig_test.go
@@ -0,0 +1,140 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "errors"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/types"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestBuild(t *testing.T) {
+ c := qt.New(t)
+
+ v := New()
+ v.Set("build", map[string]any{
+ "useResourceCacheWhen": "always",
+ })
+
+ b := DecodeBuild(v)
+
+ c.Assert(b.UseResourceCacheWhen, qt.Equals, "always")
+
+ v.Set("build", map[string]any{
+ "useResourceCacheWhen": "foo",
+ })
+
+ b = DecodeBuild(v)
+
+ c.Assert(b.UseResourceCacheWhen, qt.Equals, "fallback")
+
+ c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, true)
+ c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, false)
+
+ b.UseResourceCacheWhen = "always"
+ c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, true)
+ c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, true)
+ c.Assert(b.UseResourceCache(nil), qt.Equals, true)
+
+ b.UseResourceCacheWhen = "never"
+ c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, false)
+ c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, false)
+ c.Assert(b.UseResourceCache(nil), qt.Equals, false)
+}
+
+func TestServer(t *testing.T) {
+ c := qt.New(t)
+
+ cfg, err := FromConfigString(`[[server.headers]]
+for = "/*.jpg"
+
+[server.headers.values]
+X-Frame-Options = "DENY"
+X-XSS-Protection = "1; mode=block"
+X-Content-Type-Options = "nosniff"
+
+[[server.redirects]]
+from = "/foo/**"
+to = "/foo/index.html"
+status = 200
+
+[[server.redirects]]
+from = "/google/**"
+to = "https://google.com/"
+status = 301
+
+[[server.redirects]]
+from = "/**"
+to = "/default/index.html"
+status = 301
+
+
+
+`, "toml")
+
+ c.Assert(err, qt.IsNil)
+
+ s, err := DecodeServer(cfg)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{
+ {Key: "X-Content-Type-Options", Value: "nosniff"},
+ {Key: "X-Frame-Options", Value: "DENY"},
+ {Key: "X-XSS-Protection", Value: "1; mode=block"},
+ })
+
+ c.Assert(s.MatchRedirect("/foo/bar/baz"), qt.DeepEquals, Redirect{
+ From: "/foo/**",
+ To: "/foo/",
+ Status: 200,
+ })
+
+ c.Assert(s.MatchRedirect("/someother"), qt.DeepEquals, Redirect{
+ From: "/**",
+ To: "/default/",
+ Status: 301,
+ })
+
+ c.Assert(s.MatchRedirect("/google/foo"), qt.DeepEquals, Redirect{
+ From: "/google/**",
+ To: "https://google.com/",
+ Status: 301,
+ })
+
+ // No redirect loop, please.
+ c.Assert(s.MatchRedirect("/default/index.html"), qt.DeepEquals, Redirect{})
+ c.Assert(s.MatchRedirect("/default/"), qt.DeepEquals, Redirect{})
+
+ for _, errorCase := range []string{
+ `[[server.redirects]]
+from = "/**"
+to = "/file"
+status = 301`,
+ `[[server.redirects]]
+from = "/**"
+to = "/foo/file.html"
+status = 301`,
+ } {
+
+ cfg, err := FromConfigString(errorCase, "toml")
+ c.Assert(err, qt.IsNil)
+ _, err = DecodeServer(cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ }
+}
diff --git a/config/compositeConfig.go b/config/compositeConfig.go
new file mode 100644
index 000000000..395b2d585
--- /dev/null
+++ b/config/compositeConfig.go
@@ -0,0 +1,117 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+)
+
+// NewCompositeConfig creates a new composite Provider with a read-only base
+// and a writeable layer.
+func NewCompositeConfig(base, layer Provider) Provider {
+ return &compositeConfig{
+ base: base,
+ layer: layer,
+ }
+}
+
+// compositeConfig contains a read only config base with
+// a possibly writeable config layer on top.
+type compositeConfig struct {
+ base Provider
+ layer Provider
+}
+
+func (c *compositeConfig) GetBool(key string) bool {
+ if c.layer.IsSet(key) {
+ return c.layer.GetBool(key)
+ }
+ return c.base.GetBool(key)
+}
+
+func (c *compositeConfig) GetInt(key string) int {
+ if c.layer.IsSet(key) {
+ return c.layer.GetInt(key)
+ }
+ return c.base.GetInt(key)
+}
+
+func (c *compositeConfig) Merge(key string, value any) {
+ c.layer.Merge(key, value)
+}
+
+func (c *compositeConfig) GetParams(key string) maps.Params {
+ if c.layer.IsSet(key) {
+ return c.layer.GetParams(key)
+ }
+ return c.base.GetParams(key)
+}
+
+func (c *compositeConfig) GetStringMap(key string) map[string]any {
+ if c.layer.IsSet(key) {
+ return c.layer.GetStringMap(key)
+ }
+ return c.base.GetStringMap(key)
+}
+
+func (c *compositeConfig) GetStringMapString(key string) map[string]string {
+ if c.layer.IsSet(key) {
+ return c.layer.GetStringMapString(key)
+ }
+ return c.base.GetStringMapString(key)
+}
+
+func (c *compositeConfig) GetStringSlice(key string) []string {
+ if c.layer.IsSet(key) {
+ return c.layer.GetStringSlice(key)
+ }
+ return c.base.GetStringSlice(key)
+}
+
+func (c *compositeConfig) Get(key string) any {
+ if c.layer.IsSet(key) {
+ return c.layer.Get(key)
+ }
+ return c.base.Get(key)
+}
+
+func (c *compositeConfig) IsSet(key string) bool {
+ if c.layer.IsSet(key) {
+ return true
+ }
+ return c.base.IsSet(key)
+}
+
+func (c *compositeConfig) GetString(key string) string {
+ if c.layer.IsSet(key) {
+ return c.layer.GetString(key)
+ }
+ return c.base.GetString(key)
+}
+
+func (c *compositeConfig) Set(key string, value any) {
+ c.layer.Set(key, value)
+}
+
+func (c *compositeConfig) SetDefaults(params maps.Params) {
+ c.layer.SetDefaults(params)
+}
+
+func (c *compositeConfig) WalkParams(walkFn func(params ...KeyParams) bool) {
+ panic("not supported")
+}
+
+func (c *compositeConfig) SetDefaultMergeStrategy() {
+ panic("not supported")
+}
diff --git a/config/compositeConfig_test.go b/config/compositeConfig_test.go
new file mode 100644
index 000000000..60644102f
--- /dev/null
+++ b/config/compositeConfig_test.go
@@ -0,0 +1,40 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestCompositeConfig(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Set and get", func(c *qt.C) {
+ base, layer := New(), New()
+ cfg := NewCompositeConfig(base, layer)
+
+ layer.Set("a1", "av")
+ base.Set("b1", "bv")
+ cfg.Set("c1", "cv")
+
+ c.Assert(cfg.Get("a1"), qt.Equals, "av")
+ c.Assert(cfg.Get("b1"), qt.Equals, "bv")
+ c.Assert(cfg.Get("c1"), qt.Equals, "cv")
+ c.Assert(cfg.IsSet("c1"), qt.IsTrue)
+ c.Assert(layer.IsSet("c1"), qt.IsTrue)
+ c.Assert(base.IsSet("c1"), qt.IsFalse)
+ })
+}
diff --git a/config/configLoader.go b/config/configLoader.go
new file mode 100644
index 000000000..6722c12fd
--- /dev/null
+++ b/config/configLoader.go
@@ -0,0 +1,214 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+ "github.com/spf13/afero"
+)
+
+var (
+ ValidConfigFileExtensions = []string{"toml", "yaml", "yml", "json"}
+ validConfigFileExtensionsMap map[string]bool = make(map[string]bool)
+)
+
+func init() {
+ for _, ext := range ValidConfigFileExtensions {
+ validConfigFileExtensionsMap[ext] = true
+ }
+}
+
+// IsValidConfigFilename returns whether filename is one of the supported
+// config formats in Hugo.
+func IsValidConfigFilename(filename string) bool {
+ ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(filename), "."))
+ return validConfigFileExtensionsMap[ext]
+}
+
+// FromConfigString creates a config from the given YAML, JSON or TOML config. This is useful in tests.
+func FromConfigString(config, configType string) (Provider, error) {
+ m, err := readConfig(metadecoders.FormatFromString(configType), []byte(config))
+ if err != nil {
+ return nil, err
+ }
+ return NewFrom(m), nil
+}
+
+// FromFile loads the configuration from the given filename.
+func FromFile(fs afero.Fs, filename string) (Provider, error) {
+ m, err := loadConfigFromFile(fs, filename)
+ if err != nil {
+ fe := herrors.UnwrapFileError(err)
+ if fe != nil {
+ pos := fe.Position()
+ pos.Filename = filename
+ fe.UpdatePosition(pos)
+ return nil, err
+ }
+ return nil, herrors.NewFileErrorFromFile(err, filename, fs, nil)
+ }
+ return NewFrom(m), nil
+}
+
+// FromFileToMap is the same as FromFile, but it returns the config values
+// as a simple map.
+func FromFileToMap(fs afero.Fs, filename string) (map[string]any, error) {
+ return loadConfigFromFile(fs, filename)
+}
+
+func readConfig(format metadecoders.Format, data []byte) (map[string]any, error) {
+ m, err := metadecoders.Default.UnmarshalToMap(data, format)
+ if err != nil {
+ return nil, err
+ }
+
+ RenameKeys(m)
+
+ return m, nil
+}
+
+func loadConfigFromFile(fs afero.Fs, filename string) (map[string]any, error) {
+ m, err := metadecoders.Default.UnmarshalFileToMap(fs, filename)
+ if err != nil {
+ return nil, err
+ }
+ RenameKeys(m)
+ return m, nil
+}
+
+func LoadConfigFromDir(sourceFs afero.Fs, configDir, environment string) (Provider, []string, error) {
+ defaultConfigDir := filepath.Join(configDir, "_default")
+ environmentConfigDir := filepath.Join(configDir, environment)
+ cfg := New()
+
+ var configDirs []string
+ // Merge from least to most specific.
+ for _, dir := range []string{defaultConfigDir, environmentConfigDir} {
+ if _, err := sourceFs.Stat(dir); err == nil {
+ configDirs = append(configDirs, dir)
+ }
+ }
+
+ if len(configDirs) == 0 {
+ return nil, nil, nil
+ }
+
+ // Keep track of these so we can watch them for changes.
+ var dirnames []string
+
+ for _, configDir := range configDirs {
+ err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error {
+ if fi == nil || err != nil {
+ return nil
+ }
+
+ if fi.IsDir() {
+ dirnames = append(dirnames, path)
+ return nil
+ }
+
+ if !IsValidConfigFilename(path) {
+ return nil
+ }
+
+ name := paths.Filename(filepath.Base(path))
+
+ item, err := metadecoders.Default.UnmarshalFileToMap(sourceFs, path)
+ if err != nil {
+ // This will be used in error reporting, use the most specific value.
+ dirnames = []string{path}
+ return fmt.Errorf("failed to unmarshl config for path %q: %w", path, err)
+ }
+
+ var keyPath []string
+
+ if name != "config" {
+ // Can be params.jp, menus.en etc.
+ name, lang := paths.FileAndExtNoDelimiter(name)
+
+ keyPath = []string{name}
+
+ if lang != "" {
+ keyPath = []string{"languages", lang}
+ switch name {
+ case "menu", "menus":
+ keyPath = append(keyPath, "menus")
+ case "params":
+ keyPath = append(keyPath, "params")
+ }
+ }
+ }
+
+ root := item
+ if len(keyPath) > 0 {
+ root = make(map[string]any)
+ m := root
+ for i, key := range keyPath {
+ if i >= len(keyPath)-1 {
+ m[key] = item
+ } else {
+ nm := make(map[string]any)
+ m[key] = nm
+ m = nm
+ }
+ }
+ }
+
+ // Migrate menu => menus etc.
+ RenameKeys(root)
+
+ // Set will overwrite keys with the same name, recursively.
+ cfg.Set("", root)
+
+ return nil
+ })
+ if err != nil {
+ return nil, dirnames, err
+ }
+
+ }
+
+ return cfg, dirnames, nil
+
+}
+
+var keyAliases maps.KeyRenamer
+
+func init() {
+ var err error
+ keyAliases, err = maps.NewKeyRenamer(
+ // Before 0.53 we used singular for "menu".
+ "{menu,languages/*/menu}", "menus",
+ )
+
+ if err != nil {
+ panic(err)
+ }
+}
+
+// RenameKeys renames config keys in m recursively according to a global Hugo
+// alias definition.
+func RenameKeys(m map[string]any) {
+ keyAliases.Rename(m)
+}
diff --git a/config/configLoader_test.go b/config/configLoader_test.go
new file mode 100644
index 000000000..546031334
--- /dev/null
+++ b/config/configLoader_test.go
@@ -0,0 +1,34 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestIsValidConfigFileName(t *testing.T) {
+ c := qt.New(t)
+
+ for _, ext := range ValidConfigFileExtensions {
+ filename := "config." + ext
+ c.Assert(IsValidConfigFilename(filename), qt.Equals, true)
+ c.Assert(IsValidConfigFilename(strings.ToUpper(filename)), qt.Equals, true)
+ }
+
+ c.Assert(IsValidConfigFilename(""), qt.Equals, false)
+ c.Assert(IsValidConfigFilename("config.toml.swp"), qt.Equals, false)
+}
diff --git a/config/configProvider.go b/config/configProvider.go
new file mode 100644
index 000000000..01a2e8c54
--- /dev/null
+++ b/config/configProvider.go
@@ -0,0 +1,67 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/types"
+)
+
+// Provider provides the configuration settings for Hugo.
+type Provider interface {
+ GetString(key string) string
+ GetInt(key string) int
+ GetBool(key string) bool
+ GetParams(key string) maps.Params
+ GetStringMap(key string) map[string]any
+ GetStringMapString(key string) map[string]string
+ GetStringSlice(key string) []string
+ Get(key string) any
+ Set(key string, value any)
+ Merge(key string, value any)
+ SetDefaults(params maps.Params)
+ SetDefaultMergeStrategy()
+ WalkParams(walkFn func(params ...KeyParams) bool)
+ IsSet(key string) bool
+}
+
+// GetStringSlicePreserveString returns a string slice from the given config and key.
+// It differs from the GetStringSlice method in that if the config value is a string,
+// we do not attempt to split it into fields.
+func GetStringSlicePreserveString(cfg Provider, key string) []string {
+ sd := cfg.Get(key)
+ return types.ToStringSlicePreserveString(sd)
+}
+
+// SetBaseTestDefaults provides some common config defaults used in tests.
+func SetBaseTestDefaults(cfg Provider) Provider {
+ setIfNotSet(cfg, "baseURL", "https://example.org")
+ setIfNotSet(cfg, "resourceDir", "resources")
+ setIfNotSet(cfg, "contentDir", "content")
+ setIfNotSet(cfg, "dataDir", "data")
+ setIfNotSet(cfg, "i18nDir", "i18n")
+ setIfNotSet(cfg, "layoutDir", "layouts")
+ setIfNotSet(cfg, "assetDir", "assets")
+ setIfNotSet(cfg, "archetypeDir", "archetypes")
+ setIfNotSet(cfg, "publishDir", "public")
+ setIfNotSet(cfg, "workingDir", "")
+ setIfNotSet(cfg, "defaultContentLanguage", "en")
+ return cfg
+}
+
+func setIfNotSet(cfg Provider, key string, value any) {
+ if !cfg.IsSet(key) {
+ cfg.Set(key, value)
+ }
+}
diff --git a/config/configProvider_test.go b/config/configProvider_test.go
new file mode 100644
index 000000000..0afba1e58
--- /dev/null
+++ b/config/configProvider_test.go
@@ -0,0 +1,35 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGetStringSlicePreserveString(t *testing.T) {
+ c := qt.New(t)
+ cfg := New()
+
+ s := "This is a string"
+ sSlice := []string{"This", "is", "a", "slice"}
+
+ cfg.Set("s1", s)
+ cfg.Set("s2", sSlice)
+
+ c.Assert(GetStringSlicePreserveString(cfg, "s1"), qt.DeepEquals, []string{s})
+ c.Assert(GetStringSlicePreserveString(cfg, "s2"), qt.DeepEquals, sSlice)
+ c.Assert(GetStringSlicePreserveString(cfg, "s3"), qt.IsNil)
+}
diff --git a/config/defaultConfigProvider.go b/config/defaultConfigProvider.go
new file mode 100644
index 000000000..822f421fa
--- /dev/null
+++ b/config/defaultConfigProvider.go
@@ -0,0 +1,447 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
+)
+
+var (
+
+ // ConfigRootKeysSet contains all of the config map root keys.
+ ConfigRootKeysSet = map[string]bool{
+ "build": true,
+ "caches": true,
+ "cascade": true,
+ "frontmatter": true,
+ "languages": true,
+ "imaging": true,
+ "markup": true,
+ "mediatypes": true,
+ "menus": true,
+ "minify": true,
+ "module": true,
+ "outputformats": true,
+ "params": true,
+ "permalinks": true,
+ "related": true,
+ "sitemap": true,
+ "privacy": true,
+ "security": true,
+ "taxonomies": true,
+ }
+
+ // ConfigRootKeys is a sorted version of ConfigRootKeysSet.
+ ConfigRootKeys []string
+)
+
+func init() {
+ for k := range ConfigRootKeysSet {
+ ConfigRootKeys = append(ConfigRootKeys, k)
+ }
+ sort.Strings(ConfigRootKeys)
+}
+
+// New creates a Provider backed by an empty maps.Params.
+func New() Provider {
+ return &defaultConfigProvider{
+ root: make(maps.Params),
+ }
+}
+
+// NewFrom creates a Provider backed by params.
+func NewFrom(params maps.Params) Provider {
+ maps.PrepareParams(params)
+ return &defaultConfigProvider{
+ root: params,
+ }
+}
+
+// NewWithTestDefaults is used in tests only.
+func NewWithTestDefaults() Provider {
+ return SetBaseTestDefaults(New())
+}
+
+// defaultConfigProvider is a Provider backed by a map where all keys are lower case.
+// All methods are thread safe.
+type defaultConfigProvider struct {
+ mu sync.RWMutex
+ root maps.Params
+
+ keyCache sync.Map
+}
+
+func (c *defaultConfigProvider) Get(k string) any {
+ if k == "" {
+ return c.root
+ }
+ c.mu.RLock()
+ key, m := c.getNestedKeyAndMap(strings.ToLower(k), false)
+ if m == nil {
+ c.mu.RUnlock()
+ return nil
+ }
+ v := m[key]
+ c.mu.RUnlock()
+ return v
+}
+
+func (c *defaultConfigProvider) GetBool(k string) bool {
+ v := c.Get(k)
+ return cast.ToBool(v)
+}
+
+func (c *defaultConfigProvider) GetInt(k string) int {
+ v := c.Get(k)
+ return cast.ToInt(v)
+}
+
+func (c *defaultConfigProvider) IsSet(k string) bool {
+ var found bool
+ c.mu.RLock()
+ key, m := c.getNestedKeyAndMap(strings.ToLower(k), false)
+ if m != nil {
+ _, found = m[key]
+ }
+ c.mu.RUnlock()
+ return found
+}
+
+func (c *defaultConfigProvider) GetString(k string) string {
+ v := c.Get(k)
+ return cast.ToString(v)
+}
+
+func (c *defaultConfigProvider) GetParams(k string) maps.Params {
+ v := c.Get(k)
+ if v == nil {
+ return nil
+ }
+ return v.(maps.Params)
+}
+
+func (c *defaultConfigProvider) GetStringMap(k string) map[string]any {
+ v := c.Get(k)
+ return maps.ToStringMap(v)
+}
+
+func (c *defaultConfigProvider) GetStringMapString(k string) map[string]string {
+ v := c.Get(k)
+ return maps.ToStringMapString(v)
+}
+
+func (c *defaultConfigProvider) GetStringSlice(k string) []string {
+ v := c.Get(k)
+ return cast.ToStringSlice(v)
+}
+
+func (c *defaultConfigProvider) Set(k string, v any) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ k = strings.ToLower(k)
+
+ if k == "" {
+ if p, ok := maps.ToParamsAndPrepare(v); ok {
+ // Set the values directly in root.
+ c.root.Set(p)
+ } else {
+ c.root[k] = v
+ }
+
+ return
+ }
+
+ switch vv := v.(type) {
+ case map[string]any, map[any]any, map[string]string:
+ p := maps.MustToParamsAndPrepare(vv)
+ v = p
+ }
+
+ key, m := c.getNestedKeyAndMap(k, true)
+ if m == nil {
+ return
+ }
+
+ if existing, found := m[key]; found {
+ if p1, ok := existing.(maps.Params); ok {
+ if p2, ok := v.(maps.Params); ok {
+ p1.Set(p2)
+ return
+ }
+ }
+ }
+
+ m[key] = v
+}
+
+// SetDefaults will set values from params if not already set.
+func (c *defaultConfigProvider) SetDefaults(params maps.Params) {
+ maps.PrepareParams(params)
+ for k, v := range params {
+ if _, found := c.root[k]; !found {
+ c.root[k] = v
+ }
+ }
+}
+
+func (c *defaultConfigProvider) Merge(k string, v any) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ k = strings.ToLower(k)
+
+ const (
+ languagesKey = "languages"
+ paramsKey = "params"
+ menusKey = "menus"
+ )
+
+ if k == "" {
+ rs, f := c.root.GetMergeStrategy()
+ if f && rs == maps.ParamsMergeStrategyNone {
+ // The user has set a "no merge" strategy on this,
+ // nothing more to do.
+ return
+ }
+
+ if p, ok := maps.ToParamsAndPrepare(v); ok {
+ // As there may be keys in p not in root, we need to handle
+ // those as a special case.
+ var keysToDelete []string
+ for kk, vv := range p {
+ if pp, ok := vv.(maps.Params); ok {
+ if pppi, ok := c.root[kk]; ok {
+ ppp := pppi.(maps.Params)
+ if kk == languagesKey {
+ // Languages is currently a special case.
+ // We may have languages with menus or params in the
+ // right map that is not present in the left map.
+ // With the default merge strategy those items will not
+ // be passed over.
+ var hasParams, hasMenus bool
+ for _, rv := range pp {
+ if lkp, ok := rv.(maps.Params); ok {
+ _, hasMenus = lkp[menusKey]
+ _, hasParams = lkp[paramsKey]
+ }
+ }
+
+ if hasMenus || hasParams {
+ for _, lv := range ppp {
+ if lkp, ok := lv.(maps.Params); ok {
+ if hasMenus {
+ if _, ok := lkp[menusKey]; !ok {
+ p := maps.Params{}
+ p.SetDefaultMergeStrategy(maps.ParamsMergeStrategyShallow)
+ lkp[menusKey] = p
+ }
+ }
+ if hasParams {
+ if _, ok := lkp[paramsKey]; !ok {
+ p := maps.Params{}
+ p.SetDefaultMergeStrategy(maps.ParamsMergeStrategyShallow)
+ lkp[paramsKey] = p
+ }
+ }
+ }
+ }
+ }
+ }
+ ppp.Merge(pp)
+ } else {
+ // We need to use the default merge strategy for
+ // this key.
+ np := make(maps.Params)
+ strategy := c.determineMergeStrategy(KeyParams{Key: "", Params: c.root}, KeyParams{Key: kk, Params: np})
+ np.SetDefaultMergeStrategy(strategy)
+ np.Merge(pp)
+ c.root[kk] = np
+ if np.IsZero() {
+ // Just keep it until merge is done.
+ keysToDelete = append(keysToDelete, kk)
+ }
+ }
+ }
+ }
+ // Merge the rest.
+ c.root.MergeRoot(p)
+ for _, k := range keysToDelete {
+ delete(c.root, k)
+ }
+ } else {
+ panic(fmt.Sprintf("unsupported type %T received in Merge", v))
+ }
+
+ return
+ }
+
+ switch vv := v.(type) {
+ case map[string]any, map[any]any, map[string]string:
+ p := maps.MustToParamsAndPrepare(vv)
+ v = p
+ }
+
+ key, m := c.getNestedKeyAndMap(k, true)
+ if m == nil {
+ return
+ }
+
+ if existing, found := m[key]; found {
+ if p1, ok := existing.(maps.Params); ok {
+ if p2, ok := v.(maps.Params); ok {
+ p1.Merge(p2)
+ }
+ }
+ } else {
+ m[key] = v
+ }
+}
+
+func (c *defaultConfigProvider) WalkParams(walkFn func(params ...KeyParams) bool) {
+ var walk func(params ...KeyParams)
+ walk = func(params ...KeyParams) {
+ if walkFn(params...) {
+ return
+ }
+ p1 := params[len(params)-1]
+ i := len(params)
+ for k, v := range p1.Params {
+ if p2, ok := v.(maps.Params); ok {
+ paramsplus1 := make([]KeyParams, i+1)
+ copy(paramsplus1, params)
+ paramsplus1[i] = KeyParams{Key: k, Params: p2}
+ walk(paramsplus1...)
+ }
+ }
+ }
+ walk(KeyParams{Key: "", Params: c.root})
+}
+
+func (c *defaultConfigProvider) determineMergeStrategy(params ...KeyParams) maps.ParamsMergeStrategy {
+ if len(params) == 0 {
+ return maps.ParamsMergeStrategyNone
+ }
+
+ var (
+ strategy maps.ParamsMergeStrategy
+ prevIsRoot bool
+ curr = params[len(params)-1]
+ )
+
+ if len(params) > 1 {
+ prev := params[len(params)-2]
+ prevIsRoot = prev.Key == ""
+
+ // Inherit from parent (but not from the root unless it's set by user).
+ s, found := prev.Params.GetMergeStrategy()
+ if !prevIsRoot && !found {
+ panic("invalid state, merge strategy not set on parent")
+ }
+ if found || !prevIsRoot {
+ strategy = s
+ }
+ }
+
+ switch curr.Key {
+ case "":
+ // Don't set a merge strategy on the root unless set by user.
+ // This will be handled as a special case.
+ case "params":
+ strategy = maps.ParamsMergeStrategyDeep
+ case "outputformats", "mediatypes":
+ if prevIsRoot {
+ strategy = maps.ParamsMergeStrategyShallow
+ }
+ case "menus":
+ isMenuKey := prevIsRoot
+ if !isMenuKey {
+ // Can also be set below languages.
+ // root > languages > en > menus
+ if len(params) == 4 && params[1].Key == "languages" {
+ isMenuKey = true
+ }
+ }
+ if isMenuKey {
+ strategy = maps.ParamsMergeStrategyShallow
+ }
+ default:
+ if strategy == "" {
+ strategy = maps.ParamsMergeStrategyNone
+ }
+ }
+
+ return strategy
+}
+
+type KeyParams struct {
+ Key string
+ Params maps.Params
+}
+
+func (c *defaultConfigProvider) SetDefaultMergeStrategy() {
+ c.WalkParams(func(params ...KeyParams) bool {
+ if len(params) == 0 {
+ return false
+ }
+ p := params[len(params)-1].Params
+ var found bool
+ if _, found = p.GetMergeStrategy(); found {
+ // Set by user.
+ return false
+ }
+ strategy := c.determineMergeStrategy(params...)
+ if strategy != "" {
+ p.SetDefaultMergeStrategy(strategy)
+ }
+ return false
+ })
+
+}
+
+func (c *defaultConfigProvider) getNestedKeyAndMap(key string, create bool) (string, maps.Params) {
+ var parts []string
+ v, ok := c.keyCache.Load(key)
+ if ok {
+ parts = v.([]string)
+ } else {
+ parts = strings.Split(key, ".")
+ c.keyCache.Store(key, parts)
+ }
+ current := c.root
+ for i := 0; i < len(parts)-1; i++ {
+ next, found := current[parts[i]]
+ if !found {
+ if create {
+ next = make(maps.Params)
+ current[parts[i]] = next
+ } else {
+ return "", nil
+ }
+ }
+ var ok bool
+ current, ok = next.(maps.Params)
+ if !ok {
+ // E.g. a string, not a map that we can store values in.
+ return "", nil
+ }
+ }
+ return parts[len(parts)-1], current
+}
diff --git a/config/defaultConfigProvider_test.go b/config/defaultConfigProvider_test.go
new file mode 100644
index 000000000..65f10ec6a
--- /dev/null
+++ b/config/defaultConfigProvider_test.go
@@ -0,0 +1,400 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/para"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDefaultConfigProvider(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Set and get", func(c *qt.C) {
+ cfg := New()
+ var k string
+ var v any
+
+ k, v = "foo", "bar"
+ cfg.Set(k, v)
+ c.Assert(cfg.Get(k), qt.Equals, v)
+ c.Assert(cfg.Get(strings.ToUpper(k)), qt.Equals, v)
+ c.Assert(cfg.GetString(k), qt.Equals, v)
+
+ k, v = "foo", 42
+ cfg.Set(k, v)
+ c.Assert(cfg.Get(k), qt.Equals, v)
+ c.Assert(cfg.GetInt(k), qt.Equals, v)
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "foo": 42,
+ })
+ })
+
+ c.Run("Set and get map", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("foo", map[string]any{
+ "bar": "baz",
+ })
+
+ c.Assert(cfg.Get("foo"), qt.DeepEquals, maps.Params{
+ "bar": "baz",
+ })
+
+ c.Assert(cfg.GetStringMap("foo"), qt.DeepEquals, map[string]any{"bar": string("baz")})
+ c.Assert(cfg.GetStringMapString("foo"), qt.DeepEquals, map[string]string{"bar": string("baz")})
+ })
+
+ c.Run("Set and get nested", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("a", map[string]any{
+ "B": "bv",
+ })
+ cfg.Set("a.c", "cv")
+
+ c.Assert(cfg.Get("a"), qt.DeepEquals, maps.Params{
+ "b": "bv",
+ "c": "cv",
+ })
+ c.Assert(cfg.Get("a.c"), qt.Equals, "cv")
+
+ cfg.Set("b.a", "av")
+ c.Assert(cfg.Get("b"), qt.DeepEquals, maps.Params{
+ "a": "av",
+ })
+
+ cfg.Set("b", map[string]any{
+ "b": "bv",
+ })
+
+ c.Assert(cfg.Get("b"), qt.DeepEquals, maps.Params{
+ "a": "av",
+ "b": "bv",
+ })
+
+ cfg = New()
+
+ cfg.Set("a", "av")
+
+ cfg.Set("", map[string]any{
+ "a": "av2",
+ "b": "bv2",
+ })
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "a": "av2",
+ "b": "bv2",
+ })
+
+ cfg = New()
+
+ cfg.Set("a", "av")
+
+ cfg.Set("", map[string]any{
+ "b": "bv2",
+ })
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "a": "av",
+ "b": "bv2",
+ })
+
+ cfg = New()
+
+ cfg.Set("", map[string]any{
+ "foo": map[string]any{
+ "a": "av",
+ },
+ })
+
+ cfg.Set("", map[string]any{
+ "foo": map[string]any{
+ "b": "bv2",
+ },
+ })
+
+ c.Assert(cfg.Get("foo"), qt.DeepEquals, maps.Params{
+ "a": "av",
+ "b": "bv2",
+ })
+ })
+
+ c.Run("Merge default strategy", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("a", map[string]any{
+ "B": "bv",
+ })
+
+ cfg.Merge("a", map[string]any{
+ "B": "bv2",
+ "c": "cv2",
+ })
+
+ c.Assert(cfg.Get("a"), qt.DeepEquals, maps.Params{
+ "b": "bv",
+ "c": "cv2",
+ })
+
+ cfg = New()
+
+ cfg.Set("a", "av")
+
+ cfg.Merge("", map[string]any{
+ "a": "av2",
+ "b": "bv2",
+ })
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "a": "av",
+ })
+ })
+
+ c.Run("Merge shallow", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("a", map[string]any{
+ "_merge": "shallow",
+ "B": "bv",
+ "c": map[string]any{
+ "b": "bv",
+ },
+ })
+
+ cfg.Merge("a", map[string]any{
+ "c": map[string]any{
+ "d": "dv2",
+ },
+ "e": "ev2",
+ })
+
+ c.Assert(cfg.Get("a"), qt.DeepEquals, maps.Params{
+ "e": "ev2",
+ "_merge": maps.ParamsMergeStrategyShallow,
+ "b": "bv",
+ "c": maps.Params{
+ "b": "bv",
+ },
+ })
+ })
+
+ // Issue #8679
+ c.Run("Merge typed maps", func(c *qt.C) {
+ for _, left := range []any{
+ map[string]string{
+ "c": "cv1",
+ },
+ map[string]any{
+ "c": "cv1",
+ },
+ map[any]any{
+ "c": "cv1",
+ },
+ } {
+ cfg := New()
+
+ cfg.Set("", map[string]any{
+ "b": left,
+ })
+
+ cfg.Merge("", maps.Params{
+ "b": maps.Params{
+ "c": "cv2",
+ "d": "dv2",
+ },
+ })
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "b": maps.Params{
+ "c": "cv1",
+ "d": "dv2",
+ },
+ })
+ }
+
+ for _, left := range []any{
+ map[string]string{
+ "b": "bv1",
+ },
+ map[string]any{
+ "b": "bv1",
+ },
+ map[any]any{
+ "b": "bv1",
+ },
+ } {
+ for _, right := range []any{
+ map[string]string{
+ "b": "bv2",
+ "c": "cv2",
+ },
+ map[string]any{
+ "b": "bv2",
+ "c": "cv2",
+ },
+ map[any]any{
+ "b": "bv2",
+ "c": "cv2",
+ },
+ } {
+ cfg := New()
+
+ cfg.Set("a", left)
+
+ cfg.Merge("a", right)
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "a": maps.Params{
+ "b": "bv1",
+ "c": "cv2",
+ },
+ })
+ }
+ }
+ })
+
+ // Issue #8701
+ c.Run("Prevent _merge only maps", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("", map[string]any{
+ "B": "bv",
+ })
+
+ cfg.Merge("", map[string]any{
+ "c": map[string]any{
+ "_merge": "shallow",
+ "d": "dv2",
+ },
+ })
+
+ c.Assert(cfg.Get(""), qt.DeepEquals, maps.Params{
+ "b": "bv",
+ })
+ })
+
+ c.Run("IsSet", func(c *qt.C) {
+ cfg := New()
+
+ cfg.Set("a", map[string]any{
+ "B": "bv",
+ })
+
+ c.Assert(cfg.IsSet("A"), qt.IsTrue)
+ c.Assert(cfg.IsSet("a.b"), qt.IsTrue)
+ c.Assert(cfg.IsSet("z"), qt.IsFalse)
+ })
+
+ c.Run("Para", func(c *qt.C) {
+ cfg := New()
+ p := para.New(4)
+ r, _ := p.Start(context.Background())
+
+ setAndGet := func(k string, v int) error {
+ vs := strconv.Itoa(v)
+ cfg.Set(k, v)
+ err := errors.New("get failed")
+ if cfg.Get(k) != v {
+ return err
+ }
+ if cfg.GetInt(k) != v {
+ return err
+ }
+ if cfg.GetString(k) != vs {
+ return err
+ }
+ if !cfg.IsSet(k) {
+ return err
+ }
+ return nil
+ }
+
+ for i := 0; i < 20; i++ {
+ i := i
+ r.Run(func() error {
+ const v = 42
+ k := fmt.Sprintf("k%d", i)
+ if err := setAndGet(k, v); err != nil {
+ return err
+ }
+
+ m := maps.Params{
+ "new": 42,
+ }
+
+ cfg.Merge("", m)
+
+ return nil
+ })
+ }
+
+ c.Assert(r.Wait(), qt.IsNil)
+ })
+}
+
+func BenchmarkDefaultConfigProvider(b *testing.B) {
+ type cfger interface {
+ Get(key string) any
+ Set(key string, value any)
+ IsSet(key string) bool
+ }
+
+ newMap := func() map[string]any {
+ return map[string]any{
+ "a": map[string]any{
+ "b": map[string]any{
+ "c": 32,
+ "d": 43,
+ },
+ },
+ "b": 62,
+ }
+ }
+
+ runMethods := func(b *testing.B, cfg cfger) {
+ m := newMap()
+ cfg.Set("mymap", m)
+ cfg.Set("num", 32)
+ if !(cfg.IsSet("mymap") && cfg.IsSet("mymap.a") && cfg.IsSet("mymap.a.b") && cfg.IsSet("mymap.a.b.c")) {
+ b.Fatal("IsSet failed")
+ }
+
+ if cfg.Get("num") != 32 {
+ b.Fatal("Get failed")
+ }
+
+ if cfg.Get("mymap.a.b.c") != 32 {
+ b.Fatal("Get failed")
+ }
+ }
+
+ b.Run("Custom", func(b *testing.B) {
+ cfg := New()
+ for i := 0; i < b.N; i++ {
+ runMethods(b, cfg)
+ }
+ })
+}
diff --git a/config/docshelper.go b/config/docshelper.go
new file mode 100644
index 000000000..e34c53c2b
--- /dev/null
+++ b/config/docshelper.go
@@ -0,0 +1,45 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/docshelper"
+)
+
+// This is is just some helpers used to create some JSON used in the Hugo docs.
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+
+ cfg := New()
+ for _, configRoot := range ConfigRootKeys {
+ cfg.Set(configRoot, make(maps.Params))
+ }
+ lang := maps.Params{
+ "en": maps.Params{
+ "menus": maps.Params{},
+ "params": maps.Params{},
+ },
+ }
+ cfg.Set("languages", lang)
+ cfg.SetDefaultMergeStrategy()
+
+ configHelpers := map[string]any{
+ "mergeStrategy": cfg.Get(""),
+ }
+ return docshelper.DocProvider{"config": configHelpers}
+ }
+
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/config/env.go b/config/env.go
new file mode 100644
index 000000000..1e9266b17
--- /dev/null
+++ b/config/env.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "os"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+// GetNumWorkerMultiplier returns the base value used to calculate the number
+// of workers to use for Hugo's parallel execution.
+// It returns the value in HUGO_NUMWORKERMULTIPLIER OS env variable if set to a
+// positive integer, else the number of logical CPUs.
+func GetNumWorkerMultiplier() int {
+ if gmp := os.Getenv("HUGO_NUMWORKERMULTIPLIER"); gmp != "" {
+ if p, err := strconv.Atoi(gmp); err == nil && p > 0 {
+ return p
+ }
+ }
+ return runtime.NumCPU()
+}
+
+// SetEnvVars sets vars on the form key=value in the oldVars slice.
+func SetEnvVars(oldVars *[]string, keyValues ...string) {
+ for i := 0; i < len(keyValues); i += 2 {
+ setEnvVar(oldVars, keyValues[i], keyValues[i+1])
+ }
+}
+
+func SplitEnvVar(v string) (string, string) {
+ name, value, _ := strings.Cut(v, "=")
+ return name, value
+}
+
+func setEnvVar(vars *[]string, key, value string) {
+ for i := range *vars {
+ if strings.HasPrefix((*vars)[i], key+"=") {
+ (*vars)[i] = key + "=" + value
+ return
+ }
+ }
+ // New var.
+ *vars = append(*vars, key+"="+value)
+}
diff --git a/config/env_test.go b/config/env_test.go
new file mode 100644
index 000000000..3c402b9ef
--- /dev/null
+++ b/config/env_test.go
@@ -0,0 +1,32 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSetEnvVars(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ vars := []string{"FOO=bar", "HUGO=cool", "BAR=foo"}
+ SetEnvVars(&vars, "HUGO", "rocking!", "NEW", "bar")
+ c.Assert(vars, qt.DeepEquals, []string{"FOO=bar", "HUGO=rocking!", "BAR=foo", "NEW=bar"})
+
+ key, val := SplitEnvVar("HUGO=rocks")
+ c.Assert(key, qt.Equals, "HUGO")
+ c.Assert(val, qt.Equals, "rocks")
+}
diff --git a/config/privacy/privacyConfig.go b/config/privacy/privacyConfig.go
new file mode 100644
index 000000000..a36046364
--- /dev/null
+++ b/config/privacy/privacyConfig.go
@@ -0,0 +1,114 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package privacy
+
+import (
+ "github.com/gohugoio/hugo/config"
+ "github.com/mitchellh/mapstructure"
+)
+
+const privacyConfigKey = "privacy"
+
+// Service is the common values for a service in a policy definition.
+type Service struct {
+ Disable bool
+}
+
+// Config is a privacy configuration for all the relevant services in Hugo.
+type Config struct {
+ Disqus Disqus
+ GoogleAnalytics GoogleAnalytics
+ Instagram Instagram
+ Twitter Twitter
+ Vimeo Vimeo
+ YouTube YouTube
+}
+
+// Disqus holds the privacy configuration settings related to the Disqus template.
+type Disqus struct {
+ Service `mapstructure:",squash"`
+}
+
+// GoogleAnalytics holds the privacy configuration settings related to the Google Analytics template.
+type GoogleAnalytics struct {
+ Service `mapstructure:",squash"`
+
+ // Enabling this will disable the use of Cookies and use Session Storage to Store the GA Client ID.
+ UseSessionStorage bool
+
+ // Enabling this will make the GA templates respect the
+ // "Do Not Track" HTTP header. See https://www.paulfurley.com/google-analytics-dnt/.
+ RespectDoNotTrack bool
+
+ // Enabling this will make it so the users' IP addresses are anonymized within Google Analytics.
+ AnonymizeIP bool
+}
+
+// Instagram holds the privacy configuration settings related to the Instagram shortcode.
+type Instagram struct {
+ Service `mapstructure:",squash"`
+
+ // If simple mode is enabled, a static and no-JS version of the Instagram
+ // image card will be built.
+ Simple bool
+}
+
+// Twitter holds the privacy configuration settingsrelated to the Twitter shortcode.
+type Twitter struct {
+ Service `mapstructure:",squash"`
+
+ // When set to true, the Tweet and its embedded page on your site are not used
+ // for purposes that include personalized suggestions and personalized ads.
+ EnableDNT bool
+
+ // If simple mode is enabled, a static and no-JS version of the Tweet will be built.
+ Simple bool
+}
+
+// Vimeo holds the privacy configuration settingsrelated to the Vimeo shortcode.
+type Vimeo struct {
+ Service `mapstructure:",squash"`
+
+ // When set to true, the Vimeo player will be blocked from tracking any session data,
+ // including all cookies and stats.
+ EnableDNT bool
+
+ // If simple mode is enabled, only a thumbnail is fetched from i.vimeocdn.com and
+ // shown with a play button overlaid. If a user clicks the button, he/she will
+ // be taken to the video page on vimeo.com in a new browser tab.
+ Simple bool
+}
+
+// YouTube holds the privacy configuration settingsrelated to the YouTube shortcode.
+type YouTube struct {
+ Service `mapstructure:",squash"`
+
+ // When you turn on privacy-enhanced mode,
+ // YouTube won’t store information about visitors on your website
+ // unless the user plays the embedded video.
+ PrivacyEnhanced bool
+}
+
+// DecodeConfig creates a privacy Config from a given Hugo configuration.
+func DecodeConfig(cfg config.Provider) (pc Config, err error) {
+ if !cfg.IsSet(privacyConfigKey) {
+ return
+ }
+
+ m := cfg.GetStringMap(privacyConfigKey)
+
+ err = mapstructure.WeakDecode(m, &pc)
+
+ return
+}
diff --git a/config/privacy/privacyConfig_test.go b/config/privacy/privacyConfig_test.go
new file mode 100644
index 000000000..c17ce713d
--- /dev/null
+++ b/config/privacy/privacyConfig_test.go
@@ -0,0 +1,100 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package privacy
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestDecodeConfigFromTOML(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[privacy]
+[privacy.disqus]
+disable = true
+[privacy.googleAnalytics]
+disable = true
+respectDoNotTrack = true
+anonymizeIP = true
+useSessionStorage = true
+[privacy.instagram]
+disable = true
+simple = true
+[privacy.twitter]
+disable = true
+enableDNT = true
+simple = true
+[privacy.vimeo]
+disable = true
+enableDNT = true
+simple = true
+[privacy.youtube]
+disable = true
+privacyEnhanced = true
+simple = true
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+
+ got := []bool{
+ pc.Disqus.Disable, pc.GoogleAnalytics.Disable,
+ pc.GoogleAnalytics.RespectDoNotTrack, pc.GoogleAnalytics.AnonymizeIP,
+ pc.GoogleAnalytics.UseSessionStorage, pc.Instagram.Disable,
+ pc.Instagram.Simple, pc.Twitter.Disable, pc.Twitter.EnableDNT,
+ pc.Twitter.Simple, pc.Vimeo.Disable, pc.Vimeo.EnableDNT, pc.Vimeo.Simple,
+ pc.YouTube.PrivacyEnhanced, pc.YouTube.Disable,
+ }
+
+ c.Assert(got, qt.All(qt.Equals), true)
+}
+
+func TestDecodeConfigFromTOMLCaseInsensitive(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[Privacy]
+[Privacy.YouTube]
+PrivacyENhanced = true
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.YouTube.PrivacyEnhanced, qt.Equals, true)
+}
+
+func TestDecodeConfigDefault(t *testing.T) {
+ c := qt.New(t)
+
+ pc, err := DecodeConfig(config.New())
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.YouTube.PrivacyEnhanced, qt.Equals, false)
+}
diff --git a/config/security/docshelper.go b/config/security/docshelper.go
new file mode 100644
index 000000000..ade03560e
--- /dev/null
+++ b/config/security/docshelper.go
@@ -0,0 +1,26 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "github.com/gohugoio/hugo/docshelper"
+)
+
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+
+ return docshelper.DocProvider{"config": DefaultConfig.ToSecurityMap()}
+ }
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/config/security/securityConfig.go b/config/security/securityConfig.go
new file mode 100644
index 000000000..b27481230
--- /dev/null
+++ b/config/security/securityConfig.go
@@ -0,0 +1,227 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+ "github.com/mitchellh/mapstructure"
+)
+
+const securityConfigKey = "security"
+
+// DefaultConfig holds the default security policy.
+var DefaultConfig = Config{
+ Exec: Exec{
+ Allow: NewWhitelist(
+ "^dart-sass-embedded$",
+ "^go$", // for Go Modules
+ "^npx$", // used by all Node tools (Babel, PostCSS).
+ "^postcss$",
+ ),
+ // These have been tested to work with Hugo's external programs
+ // on Windows, Linux and MacOS.
+ OsEnv: NewWhitelist("(?i)^(PATH|PATHEXT|APPDATA|TMP|TEMP|TERM)$"),
+ },
+ Funcs: Funcs{
+ Getenv: NewWhitelist("^HUGO_"),
+ },
+ HTTP: HTTP{
+ URLs: NewWhitelist(".*"),
+ Methods: NewWhitelist("(?i)GET|POST"),
+ },
+}
+
+// Config is the top level security config.
+type Config struct {
+ // Restricts access to os.Exec.
+ Exec Exec `json:"exec"`
+
+ // Restricts access to certain template funcs.
+ Funcs Funcs `json:"funcs"`
+
+ // Restricts access to resources.Get, getJSON, getCSV.
+ HTTP HTTP `json:"http"`
+
+ // Allow inline shortcodes
+ EnableInlineShortcodes bool `json:"enableInlineShortcodes"`
+}
+
+// Exec holds os/exec policies.
+type Exec struct {
+ Allow Whitelist `json:"allow"`
+ OsEnv Whitelist `json:"osEnv"`
+}
+
+// Funcs holds template funcs policies.
+type Funcs struct {
+ // OS env keys allowed to query in os.Getenv.
+ Getenv Whitelist `json:"getenv"`
+}
+
+type HTTP struct {
+ // URLs to allow in remote HTTP (resources.Get, getJSON, getCSV).
+ URLs Whitelist `json:"urls"`
+
+ // HTTP methods to allow.
+ Methods Whitelist `json:"methods"`
+}
+
+// ToTOML converts c to TOML with [security] as the root.
+func (c Config) ToTOML() string {
+ sec := c.ToSecurityMap()
+
+ var b bytes.Buffer
+
+ if err := parser.InterfaceToConfig(sec, metadecoders.TOML, &b); err != nil {
+ panic(err)
+ }
+
+ return strings.TrimSpace(b.String())
+}
+
+func (c Config) CheckAllowedExec(name string) error {
+ if !c.Exec.Allow.Accept(name) {
+ return &AccessDeniedError{
+ name: name,
+ path: "security.exec.allow",
+ policies: c.ToTOML(),
+ }
+ }
+ return nil
+
+}
+
+func (c Config) CheckAllowedGetEnv(name string) error {
+ if !c.Funcs.Getenv.Accept(name) {
+ return &AccessDeniedError{
+ name: name,
+ path: "security.funcs.getenv",
+ policies: c.ToTOML(),
+ }
+ }
+ return nil
+}
+
+func (c Config) CheckAllowedHTTPURL(url string) error {
+ if !c.HTTP.URLs.Accept(url) {
+ return &AccessDeniedError{
+ name: url,
+ path: "security.http.urls",
+ policies: c.ToTOML(),
+ }
+ }
+ return nil
+}
+
+func (c Config) CheckAllowedHTTPMethod(method string) error {
+ if !c.HTTP.Methods.Accept(method) {
+ return &AccessDeniedError{
+ name: method,
+ path: "security.http.method",
+ policies: c.ToTOML(),
+ }
+ }
+ return nil
+}
+
+// ToSecurityMap converts c to a map with 'security' as the root key.
+func (c Config) ToSecurityMap() map[string]any {
+ // Take it to JSON and back to get proper casing etc.
+ asJson, err := json.Marshal(c)
+ herrors.Must(err)
+ m := make(map[string]any)
+ herrors.Must(json.Unmarshal(asJson, &m))
+
+ // Add the root
+ sec := map[string]any{
+ "security": m,
+ }
+ return sec
+
+}
+
+// DecodeConfig creates a privacy Config from a given Hugo configuration.
+func DecodeConfig(cfg config.Provider) (Config, error) {
+ sc := DefaultConfig
+ if cfg.IsSet(securityConfigKey) {
+ m := cfg.GetStringMap(securityConfigKey)
+ dec, err := mapstructure.NewDecoder(
+ &mapstructure.DecoderConfig{
+ WeaklyTypedInput: true,
+ Result: &sc,
+ DecodeHook: stringSliceToWhitelistHook(),
+ },
+ )
+ if err != nil {
+ return sc, err
+ }
+
+ if err = dec.Decode(m); err != nil {
+ return sc, err
+ }
+ }
+
+ if !sc.EnableInlineShortcodes {
+ // Legacy
+ sc.EnableInlineShortcodes = cfg.GetBool("enableInlineShortcodes")
+ }
+
+ return sc, nil
+
+}
+
+func stringSliceToWhitelistHook() mapstructure.DecodeHookFuncType {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data any) (any, error) {
+
+ if t != reflect.TypeOf(Whitelist{}) {
+ return data, nil
+ }
+
+ wl := types.ToStringSlicePreserveString(data)
+
+ return NewWhitelist(wl...), nil
+
+ }
+}
+
+// AccessDeniedError represents a security policy conflict.
+type AccessDeniedError struct {
+ path string
+ name string
+ policies string
+}
+
+func (e *AccessDeniedError) Error() string {
+ return fmt.Sprintf("access denied: %q is not whitelisted in policy %q; the current security configuration is:\n\n%s\n\n", e.name, e.path, e.policies)
+}
+
+// IsAccessDenied reports whether err is an AccessDeniedError
+func IsAccessDenied(err error) bool {
+ var notFoundErr *AccessDeniedError
+ return errors.As(err, &notFoundErr)
+}
diff --git a/config/security/securityonfig_test.go b/config/security/securityonfig_test.go
new file mode 100644
index 000000000..d0416a20d
--- /dev/null
+++ b/config/security/securityonfig_test.go
@@ -0,0 +1,166 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestDecodeConfigFromTOML(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Slice whitelist", func(c *qt.C) {
+ c.Parallel()
+ tomlConfig := `
+
+
+someOtherValue = "bar"
+
+[security]
+enableInlineShortcodes=true
+[security.exec]
+allow=["a", "b"]
+osEnv=["a", "b", "c"]
+[security.funcs]
+getEnv=["a", "b"]
+
+`
+
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.EnableInlineShortcodes, qt.IsTrue)
+ c.Assert(pc.Exec.Allow.Accept("a"), qt.IsTrue)
+ c.Assert(pc.Exec.Allow.Accept("d"), qt.IsFalse)
+ c.Assert(pc.Exec.OsEnv.Accept("a"), qt.IsTrue)
+ c.Assert(pc.Exec.OsEnv.Accept("e"), qt.IsFalse)
+ c.Assert(pc.Funcs.Getenv.Accept("a"), qt.IsTrue)
+ c.Assert(pc.Funcs.Getenv.Accept("c"), qt.IsFalse)
+
+ })
+
+ c.Run("String whitelist", func(c *qt.C) {
+ c.Parallel()
+ tomlConfig := `
+
+
+someOtherValue = "bar"
+
+[security]
+[security.exec]
+allow="a"
+osEnv="b"
+
+`
+
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.Exec.Allow.Accept("a"), qt.IsTrue)
+ c.Assert(pc.Exec.Allow.Accept("d"), qt.IsFalse)
+ c.Assert(pc.Exec.OsEnv.Accept("b"), qt.IsTrue)
+ c.Assert(pc.Exec.OsEnv.Accept("e"), qt.IsFalse)
+
+ })
+
+ c.Run("Default exec.osEnv", func(c *qt.C) {
+ c.Parallel()
+ tomlConfig := `
+
+
+someOtherValue = "bar"
+
+[security]
+[security.exec]
+allow="a"
+
+`
+
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.Exec.Allow.Accept("a"), qt.IsTrue)
+ c.Assert(pc.Exec.OsEnv.Accept("PATH"), qt.IsTrue)
+ c.Assert(pc.Exec.OsEnv.Accept("e"), qt.IsFalse)
+
+ })
+
+ c.Run("Enable inline shortcodes, legacy", func(c *qt.C) {
+ c.Parallel()
+ tomlConfig := `
+
+
+someOtherValue = "bar"
+enableInlineShortcodes=true
+
+[security]
+[security.exec]
+allow="a"
+osEnv="b"
+
+`
+
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ pc, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc.EnableInlineShortcodes, qt.IsTrue)
+
+ })
+
+}
+
+func TestToTOML(t *testing.T) {
+ c := qt.New(t)
+
+ got := DefaultConfig.ToTOML()
+
+ c.Assert(got, qt.Equals,
+ "[security]\n enableInlineShortcodes = false\n [security.exec]\n allow = ['^dart-sass-embedded$', '^go$', '^npx$', '^postcss$']\n osEnv = ['(?i)^(PATH|PATHEXT|APPDATA|TMP|TEMP|TERM)$']\n\n [security.funcs]\n getenv = ['^HUGO_']\n\n [security.http]\n methods = ['(?i)GET|POST']\n urls = ['.*']",
+ )
+}
+
+func TestDecodeConfigDefault(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ pc, err := DecodeConfig(config.New())
+ c.Assert(err, qt.IsNil)
+ c.Assert(pc, qt.Not(qt.IsNil))
+ c.Assert(pc.Exec.Allow.Accept("a"), qt.IsFalse)
+ c.Assert(pc.Exec.Allow.Accept("npx"), qt.IsTrue)
+ c.Assert(pc.Exec.Allow.Accept("Npx"), qt.IsFalse)
+ c.Assert(pc.Exec.OsEnv.Accept("a"), qt.IsFalse)
+ c.Assert(pc.Exec.OsEnv.Accept("PATH"), qt.IsTrue)
+ c.Assert(pc.Exec.OsEnv.Accept("e"), qt.IsFalse)
+
+ c.Assert(pc.HTTP.URLs.Accept("https://example.org"), qt.IsTrue)
+ c.Assert(pc.HTTP.Methods.Accept("POST"), qt.IsTrue)
+ c.Assert(pc.HTTP.Methods.Accept("GET"), qt.IsTrue)
+ c.Assert(pc.HTTP.Methods.Accept("get"), qt.IsTrue)
+ c.Assert(pc.HTTP.Methods.Accept("DELETE"), qt.IsFalse)
+}
diff --git a/config/security/whitelist.go b/config/security/whitelist.go
new file mode 100644
index 000000000..0d2c187c6
--- /dev/null
+++ b/config/security/whitelist.go
@@ -0,0 +1,102 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+const (
+ acceptNoneKeyword = "none"
+)
+
+// Whitelist holds a whitelist.
+type Whitelist struct {
+ acceptNone bool
+ patterns []*regexp.Regexp
+
+ // Store this for debugging/error reporting
+ patternsStrings []string
+}
+
+func (w Whitelist) MarshalJSON() ([]byte, error) {
+ if w.acceptNone {
+ return json.Marshal(acceptNoneKeyword)
+ }
+
+ return json.Marshal(w.patternsStrings)
+}
+
+// NewWhitelist creates a new Whitelist from zero or more patterns.
+// An empty patterns list or a pattern with the value 'none' will create
+// a whitelist that will Accept noone.
+func NewWhitelist(patterns ...string) Whitelist {
+ if len(patterns) == 0 {
+ return Whitelist{acceptNone: true}
+ }
+
+ var acceptSome bool
+ var patternsStrings []string
+
+ for _, p := range patterns {
+ if p == acceptNoneKeyword {
+ acceptSome = false
+ break
+ }
+
+ if ps := strings.TrimSpace(p); ps != "" {
+ acceptSome = true
+ patternsStrings = append(patternsStrings, ps)
+ }
+ }
+
+ if !acceptSome {
+ return Whitelist{
+ acceptNone: true,
+ }
+ }
+
+ var patternsr []*regexp.Regexp
+
+ for i := 0; i < len(patterns); i++ {
+ p := strings.TrimSpace(patterns[i])
+ if p == "" {
+ continue
+ }
+ patternsr = append(patternsr, regexp.MustCompile(p))
+ }
+
+ return Whitelist{patterns: patternsr, patternsStrings: patternsStrings}
+}
+
+// Accepted reports whether name is whitelisted.
+func (w Whitelist) Accept(name string) bool {
+ if w.acceptNone {
+ return false
+ }
+
+ for _, p := range w.patterns {
+ if p.MatchString(name) {
+ return true
+ }
+ }
+ return false
+}
+
+func (w Whitelist) String() string {
+ return fmt.Sprint(w.patternsStrings)
+}
diff --git a/config/security/whitelist_test.go b/config/security/whitelist_test.go
new file mode 100644
index 000000000..5c4196dff
--- /dev/null
+++ b/config/security/whitelist_test.go
@@ -0,0 +1,47 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestWhitelist(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ c.Run("none", func(c *qt.C) {
+ c.Assert(NewWhitelist("none", "foo").Accept("foo"), qt.IsFalse)
+ c.Assert(NewWhitelist().Accept("foo"), qt.IsFalse)
+ c.Assert(NewWhitelist("").Accept("foo"), qt.IsFalse)
+ c.Assert(NewWhitelist(" ", " ").Accept("foo"), qt.IsFalse)
+ c.Assert(Whitelist{}.Accept("foo"), qt.IsFalse)
+ })
+
+ c.Run("One", func(c *qt.C) {
+ w := NewWhitelist("^foo.*")
+ c.Assert(w.Accept("foo"), qt.IsTrue)
+ c.Assert(w.Accept("mfoo"), qt.IsFalse)
+ })
+
+ c.Run("Multiple", func(c *qt.C) {
+ w := NewWhitelist("^foo.*", "^bar.*")
+ c.Assert(w.Accept("foo"), qt.IsTrue)
+ c.Assert(w.Accept("bar"), qt.IsTrue)
+ c.Assert(w.Accept("mbar"), qt.IsFalse)
+ })
+
+}
diff --git a/config/services/servicesConfig.go b/config/services/servicesConfig.go
new file mode 100644
index 000000000..1b4317e92
--- /dev/null
+++ b/config/services/servicesConfig.go
@@ -0,0 +1,97 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package services
+
+import (
+ "github.com/gohugoio/hugo/config"
+ "github.com/mitchellh/mapstructure"
+)
+
+const (
+ servicesConfigKey = "services"
+
+ disqusShortnameKey = "disqusshortname"
+ googleAnalyticsKey = "googleanalytics"
+ rssLimitKey = "rssLimit"
+)
+
+// Config is a privacy configuration for all the relevant services in Hugo.
+type Config struct {
+ Disqus Disqus
+ GoogleAnalytics GoogleAnalytics
+ Instagram Instagram
+ Twitter Twitter
+ RSS RSS
+}
+
+// Disqus holds the functional configuration settings related to the Disqus template.
+type Disqus struct {
+ // A Shortname is the unique identifier assigned to a Disqus site.
+ Shortname string
+}
+
+// GoogleAnalytics holds the functional configuration settings related to the Google Analytics template.
+type GoogleAnalytics struct {
+ // The GA tracking ID.
+ ID string
+}
+
+// Instagram holds the functional configuration settings related to the Instagram shortcodes.
+type Instagram struct {
+ // The Simple variant of the Instagram is decorated with Bootstrap 4 card classes.
+ // This means that if you use Bootstrap 4 or want to provide your own CSS, you want
+ // to disable the inline CSS provided by Hugo.
+ DisableInlineCSS bool
+
+ // App or Client Access Token.
+ // If you are using a Client Access Token, remember that you must combine it with your App ID
+ // using a pipe symbol (<APPID>|<CLIENTTOKEN>) otherwise the request will fail.
+ AccessToken string
+}
+
+// Twitter holds the functional configuration settings related to the Twitter shortcodes.
+type Twitter struct {
+ // The Simple variant of Twitter is decorated with a basic set of inline styles.
+ // This means that if you want to provide your own CSS, you want
+ // to disable the inline CSS provided by Hugo.
+ DisableInlineCSS bool
+}
+
+// RSS holds the functional configuration settings related to the RSS feeds.
+type RSS struct {
+ // Limit the number of pages.
+ Limit int
+}
+
+// DecodeConfig creates a services Config from a given Hugo configuration.
+func DecodeConfig(cfg config.Provider) (c Config, err error) {
+ m := cfg.GetStringMap(servicesConfigKey)
+
+ err = mapstructure.WeakDecode(m, &c)
+
+ // Keep backwards compatibility.
+ if c.GoogleAnalytics.ID == "" {
+ // Try the global config
+ c.GoogleAnalytics.ID = cfg.GetString(googleAnalyticsKey)
+ }
+ if c.Disqus.Shortname == "" {
+ c.Disqus.Shortname = cfg.GetString(disqusShortnameKey)
+ }
+
+ if c.RSS.Limit == 0 {
+ c.RSS.Limit = cfg.GetInt(rssLimitKey)
+ }
+
+ return
+}
diff --git a/config/services/servicesConfig_test.go b/config/services/servicesConfig_test.go
new file mode 100644
index 000000000..826255e73
--- /dev/null
+++ b/config/services/servicesConfig_test.go
@@ -0,0 +1,67 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package services
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestDecodeConfigFromTOML(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[services]
+[services.disqus]
+shortname = "DS"
+[services.googleAnalytics]
+id = "ga_id"
+[services.instagram]
+disableInlineCSS = true
+[services.twitter]
+disableInlineCSS = true
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ config, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(config, qt.Not(qt.IsNil))
+
+ c.Assert(config.Disqus.Shortname, qt.Equals, "DS")
+ c.Assert(config.GoogleAnalytics.ID, qt.Equals, "ga_id")
+
+ c.Assert(config.Instagram.DisableInlineCSS, qt.Equals, true)
+}
+
+// Support old root-level GA settings etc.
+func TestUseSettingsFromRootIfSet(t *testing.T) {
+ c := qt.New(t)
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("disqusShortname", "root_short")
+ cfg.Set("googleAnalytics", "ga_root")
+
+ config, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(config, qt.Not(qt.IsNil))
+
+ c.Assert(config.Disqus.Shortname, qt.Equals, "root_short")
+ c.Assert(config.GoogleAnalytics.ID, qt.Equals, "ga_root")
+}
diff --git a/create/content.go b/create/content.go
new file mode 100644
index 000000000..5f05e25f9
--- /dev/null
+++ b/create/content.go
@@ -0,0 +1,392 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package create provides functions to create new content.
+package create
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/paths"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/spf13/afero"
+)
+
+const (
+ // DefaultArchetypeTemplateTemplate is the template used in 'hugo new site'
+ // and the template we use as a fall back.
+ DefaultArchetypeTemplateTemplate = `---
+title: "{{ replace .Name "-" " " | title }}"
+date: {{ .Date }}
+draft: true
+---
+
+`
+)
+
+// NewContent creates a new content file in h (or a full bundle if the archetype is a directory)
+// in targetPath.
+func NewContent(h *hugolib.HugoSites, kind, targetPath string) error {
+ if h.BaseFs.Content.Dirs == nil {
+ return errors.New("no existing content directory configured for this project")
+ }
+
+ cf := hugolib.NewContentFactory(h)
+
+ if kind == "" {
+ var err error
+ kind, err = cf.SectionFromFilename(targetPath)
+ if err != nil {
+ return err
+ }
+ }
+
+ b := &contentBuilder{
+ archeTypeFs: h.PathSpec.BaseFs.Archetypes.Fs,
+ sourceFs: h.PathSpec.Fs.Source,
+ ps: h.PathSpec,
+ h: h,
+ cf: cf,
+
+ kind: kind,
+ targetPath: targetPath,
+ }
+
+ ext := paths.Ext(targetPath)
+
+ b.setArcheTypeFilenameToUse(ext)
+
+ withBuildLock := func() (string, error) {
+ unlock, err := h.BaseFs.LockBuild()
+ if err != nil {
+ return "", fmt.Errorf("failed to acquire a build lock: %s", err)
+ }
+ defer unlock()
+
+ if b.isDir {
+ return "", b.buildDir()
+ }
+
+ if ext == "" {
+ return "", fmt.Errorf("failed to resolve %q to a archetype template", targetPath)
+ }
+
+ if !files.IsContentFile(b.targetPath) {
+ return "", fmt.Errorf("target path %q is not a known content format", b.targetPath)
+ }
+
+ return b.buildFile()
+
+ }
+
+ filename, err := withBuildLock()
+ if err != nil {
+ return err
+ }
+
+ if filename != "" {
+ return b.openInEditorIfConfigured(filename)
+ }
+
+ return nil
+
+}
+
+type contentBuilder struct {
+ archeTypeFs afero.Fs
+ sourceFs afero.Fs
+
+ ps *helpers.PathSpec
+ h *hugolib.HugoSites
+ cf hugolib.ContentFactory
+
+ // Builder state
+ archetypeFilename string
+ targetPath string
+ kind string
+ isDir bool
+ dirMap archetypeMap
+}
+
+func (b *contentBuilder) buildDir() error {
+ // Split the dir into content files and the rest.
+ if err := b.mapArcheTypeDir(); err != nil {
+ return err
+ }
+
+ var contentTargetFilenames []string
+ var baseDir string
+
+ for _, fi := range b.dirMap.contentFiles {
+ targetFilename := filepath.Join(b.targetPath, strings.TrimPrefix(fi.Meta().Path, b.archetypeFilename))
+ abs, err := b.cf.CreateContentPlaceHolder(targetFilename)
+ if err != nil {
+ return err
+ }
+ if baseDir == "" {
+ baseDir = strings.TrimSuffix(abs, targetFilename)
+ }
+
+ contentTargetFilenames = append(contentTargetFilenames, abs)
+ }
+
+ var contentInclusionFilter *glob.FilenameFilter
+ if !b.dirMap.siteUsed {
+ // We don't need to build everything.
+ contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
+ filename = strings.TrimPrefix(filename, string(os.PathSeparator))
+ for _, cn := range contentTargetFilenames {
+ if strings.Contains(cn, filename) {
+ return true
+ }
+ }
+ return false
+ })
+
+ }
+
+ if err := b.h.Build(hugolib.BuildCfg{NoBuildLock: true, SkipRender: true, ContentInclusionFilter: contentInclusionFilter}); err != nil {
+ return err
+ }
+
+ for i, filename := range contentTargetFilenames {
+ if err := b.applyArcheType(filename, b.dirMap.contentFiles[i].Meta().Path); err != nil {
+ return err
+ }
+ }
+
+ // Copy the rest as is.
+ for _, f := range b.dirMap.otherFiles {
+ meta := f.Meta()
+ filename := meta.Path
+
+ in, err := meta.Open()
+ if err != nil {
+ return fmt.Errorf("failed to open non-content file: %w", err)
+ }
+
+ targetFilename := filepath.Join(baseDir, b.targetPath, strings.TrimPrefix(filename, b.archetypeFilename))
+ targetDir := filepath.Dir(targetFilename)
+
+ if err := b.sourceFs.MkdirAll(targetDir, 0o777); err != nil && !os.IsExist(err) {
+ return fmt.Errorf("failed to create target directory for %q: %w", targetDir, err)
+ }
+
+ out, err := b.sourceFs.Create(targetFilename)
+ if err != nil {
+ return err
+ }
+
+ _, err = io.Copy(out, in)
+ if err != nil {
+ return err
+ }
+
+ in.Close()
+ out.Close()
+ }
+
+ b.h.Log.Printf("Content dir %q created", filepath.Join(baseDir, b.targetPath))
+
+ return nil
+}
+
+func (b *contentBuilder) buildFile() (string, error) {
+ contentPlaceholderAbsFilename, err := b.cf.CreateContentPlaceHolder(b.targetPath)
+ if err != nil {
+ return "", err
+ }
+
+ usesSite, err := b.usesSiteVar(b.archetypeFilename)
+ if err != nil {
+ return "", err
+ }
+
+ var contentInclusionFilter *glob.FilenameFilter
+ if !usesSite {
+ // We don't need to build everything.
+ contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
+ filename = strings.TrimPrefix(filename, string(os.PathSeparator))
+ return strings.Contains(contentPlaceholderAbsFilename, filename)
+ })
+ }
+
+ if err := b.h.Build(hugolib.BuildCfg{NoBuildLock: true, SkipRender: true, ContentInclusionFilter: contentInclusionFilter}); err != nil {
+ return "", err
+ }
+
+ if err := b.applyArcheType(contentPlaceholderAbsFilename, b.archetypeFilename); err != nil {
+ return "", err
+ }
+
+ b.h.Log.Printf("Content %q created", contentPlaceholderAbsFilename)
+
+ return contentPlaceholderAbsFilename, nil
+}
+
+func (b *contentBuilder) setArcheTypeFilenameToUse(ext string) {
+ var pathsToCheck []string
+
+ if b.kind != "" {
+ pathsToCheck = append(pathsToCheck, b.kind+ext)
+ }
+
+ pathsToCheck = append(pathsToCheck, "default"+ext)
+
+ for _, p := range pathsToCheck {
+ fi, err := b.archeTypeFs.Stat(p)
+ if err == nil {
+ b.archetypeFilename = p
+ b.isDir = fi.IsDir()
+ return
+ }
+ }
+
+}
+
+func (b *contentBuilder) applyArcheType(contentFilename, archetypeFilename string) error {
+ p := b.h.GetContentPage(contentFilename)
+ if p == nil {
+ panic(fmt.Sprintf("[BUG] no Page found for %q", contentFilename))
+ }
+
+ f, err := b.sourceFs.Create(contentFilename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ if archetypeFilename == "" {
+ return b.cf.ApplyArchetypeTemplate(f, p, b.kind, DefaultArchetypeTemplateTemplate)
+ }
+
+ return b.cf.ApplyArchetypeFilename(f, p, b.kind, archetypeFilename)
+
+}
+
+func (b *contentBuilder) mapArcheTypeDir() error {
+ var m archetypeMap
+
+ walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if fi.IsDir() {
+ return nil
+ }
+
+ fil := fi.(hugofs.FileMetaInfo)
+
+ if files.IsContentFile(path) {
+ m.contentFiles = append(m.contentFiles, fil)
+ if !m.siteUsed {
+ m.siteUsed, err = b.usesSiteVar(path)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ m.otherFiles = append(m.otherFiles, fil)
+
+ return nil
+ }
+
+ walkCfg := hugofs.WalkwayConfig{
+ WalkFn: walkFn,
+ Fs: b.archeTypeFs,
+ Root: b.archetypeFilename,
+ }
+
+ w := hugofs.NewWalkway(walkCfg)
+
+ if err := w.Walk(); err != nil {
+ return fmt.Errorf("failed to walk archetype dir %q: %w", b.archetypeFilename, err)
+ }
+
+ b.dirMap = m
+
+ return nil
+}
+
+func (b *contentBuilder) openInEditorIfConfigured(filename string) error {
+ editor := b.h.Cfg.GetString("newContentEditor")
+ if editor == "" {
+ return nil
+ }
+
+ editorExec := strings.Fields(editor)[0]
+ editorFlags := strings.Fields(editor)[1:]
+
+ var args []any
+ for _, editorFlag := range editorFlags {
+ args = append(args, editorFlag)
+ }
+ args = append(
+ args,
+ filename,
+ hexec.WithStdin(os.Stdin),
+ hexec.WithStderr(os.Stderr),
+ hexec.WithStdout(os.Stdout),
+ )
+
+ b.h.Log.Printf("Editing %q with %q ...\n", filename, editorExec)
+
+ cmd, err := b.h.Deps.ExecHelper.New(editorExec, args...)
+ if err != nil {
+ return err
+ }
+
+ return cmd.Run()
+}
+
+func (b *contentBuilder) usesSiteVar(filename string) (bool, error) {
+ if filename == "" {
+ return false, nil
+ }
+ bb, err := afero.ReadFile(b.archeTypeFs, filename)
+ if err != nil {
+ return false, fmt.Errorf("failed to open archetype file: %w", err)
+ }
+
+ return bytes.Contains(bb, []byte(".Site")) || bytes.Contains(bb, []byte("site.")), nil
+
+}
+
+type archetypeMap struct {
+ // These needs to be parsed and executed as Go templates.
+ contentFiles []hugofs.FileMetaInfo
+ // These are just copied to destination.
+ otherFiles []hugofs.FileMetaInfo
+ // If the templates needs a fully built site. This can potentially be
+ // expensive, so only do when needed.
+ siteUsed bool
+}
diff --git a/create/content_test.go b/create/content_test.go
new file mode 100644
index 000000000..80a666093
--- /dev/null
+++ b/create/content_test.go
@@ -0,0 +1,441 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package create_test
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/deps"
+
+ "github.com/gohugoio/hugo/hugolib"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/create"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/afero"
+)
+
+// TODO(bep) clean this up. Export the test site builder in Hugolib or something.
+func TestNewContentFromFile(t *testing.T) {
+ cases := []struct {
+ name string
+ kind string
+ path string
+ expected any
+ }{
+ {"Post", "post", "post/sample-1.md", []string{`title = "Post Arch title"`, `test = "test1"`, "date = \"2015-01-12T19:20:04-07:00\""}},
+ {"Post org-mode", "post", "post/org-1.org", []string{`#+title: ORG-1`}},
+ {"Post, unknown content filetype", "post", "post/sample-1.pdoc", false},
+ {"Empty date", "emptydate", "post/sample-ed.md", []string{`title = "Empty Date Arch title"`, `test = "test1"`}},
+ {"Archetype file not found", "stump", "stump/sample-2.md", []string{`title: "Sample 2"`}}, // no archetype file
+ {"No archetype", "", "sample-3.md", []string{`title: "Sample 3"`}}, // no archetype
+ {"Empty archetype", "product", "product/sample-4.md", []string{`title = "SAMPLE-4"`}}, // empty archetype front matter
+ {"Filenames", "filenames", "content/mypage/index.md", []string{"title = \"INDEX\"\n+++\n\n\nContentBaseName: mypage"}},
+ {"Branch Name", "name", "content/tags/tag-a/_index.md", []string{"+++\ntitle = 'Tag A'\n+++"}},
+
+ {"Lang 1", "lang", "post/lang-1.md", []string{`Site Lang: en|Name: Lang 1|i18n: Hugo Rocks!`}},
+ {"Lang 2", "lang", "post/lang-2.en.md", []string{`Site Lang: en|Name: Lang 2|i18n: Hugo Rocks!`}},
+ {"Lang nn file", "lang", "content/post/lang-3.nn.md", []string{`Site Lang: nn|Name: Lang 3|i18n: Hugo Rokkar!`}},
+ {"Lang nn dir", "lang", "content_nn/post/lang-4.md", []string{`Site Lang: nn|Name: Lang 4|i18n: Hugo Rokkar!`}},
+ {"Lang en in nn dir", "lang", "content_nn/post/lang-5.en.md", []string{`Site Lang: en|Name: Lang 5|i18n: Hugo Rocks!`}},
+ {"Lang en default", "lang", "post/my-bundle/index.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}},
+ {"Lang en file", "lang", "post/my-bundle/index.en.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}},
+ {"Lang nn bundle", "lang", "content/post/my-bundle/index.nn.md", []string{`Site Lang: nn|Name: My Bundle|i18n: Hugo Rokkar!`}},
+ {"Site", "site", "content/mypage/index.md", []string{"RegularPages .Site: 10", "RegularPages site: 10"}},
+ {"Shortcodes", "shortcodes", "shortcodes/go.md", []string{
+ `title = "GO"`,
+ "{{< myshortcode >}}",
+ "{{% myshortcode %}}",
+ "{{</* comment */>}}\n{{%/* comment */%}}",
+ }}, // shortcodes
+ }
+
+ c := qt.New(t)
+
+ for i, cas := range cases {
+ cas := cas
+
+ c.Run(cas.name, func(c *qt.C) {
+ c.Parallel()
+
+ mm := afero.NewMemMapFs()
+ c.Assert(initFs(mm), qt.IsNil)
+ cfg, fs := newTestCfg(c, mm)
+ h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
+ c.Assert(err, qt.IsNil)
+ err = create.NewContent(h, cas.kind, cas.path)
+
+ if b, ok := cas.expected.(bool); ok && !b {
+ if !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ }
+ return
+ }
+
+ c.Assert(err, qt.IsNil)
+
+ fname := filepath.FromSlash(cas.path)
+ if !strings.HasPrefix(fname, "content") {
+ fname = filepath.Join("content", fname)
+ }
+
+ content := readFileFromFs(c, fs.Source, fname)
+
+ for _, v := range cas.expected.([]string) {
+ found := strings.Contains(content, v)
+ if !found {
+ c.Fatalf("[%d] %q missing from output:\n%q", i, v, content)
+ }
+ }
+ })
+
+ }
+}
+
+func TestNewContentFromDir(t *testing.T) {
+ mm := afero.NewMemMapFs()
+ c := qt.New(t)
+
+ archetypeDir := filepath.Join("archetypes", "my-bundle")
+ c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil)
+
+ archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle")
+ c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil)
+
+ contentFile := `
+File: %s
+Site Lang: {{ .Site.Language.Lang }}
+Name: {{ replace .Name "-" " " | title }}
+i18n: {{ T "hugo" }}
+`
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+
+ c.Assert(initFs(mm), qt.IsNil)
+ cfg, fs := newTestCfg(c, mm)
+
+ h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(h.Sites), qt.Equals, 2)
+
+ c.Assert(create.NewContent(h, "my-bundle", "post/my-post"), qt.IsNil)
+
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`)
+
+ // Content files should get the correct site context.
+ // TODO(bep) archetype check i18n
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Post`, `i18n: Hugo Rocks!`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Site Lang: nn`, `Name: My Post`, `i18n: Hugo Rokkar!`)
+
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Site Lang: en`, `Name: Bio`)
+
+ c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
+}
+
+func TestNewContentFromDirSiteFunction(t *testing.T) {
+ mm := afero.NewMemMapFs()
+ c := qt.New(t)
+
+ archetypeDir := filepath.Join("archetypes", "my-bundle")
+ defaultArchetypeDir := filepath.Join("archetypes", "default")
+ c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil)
+ c.Assert(mm.MkdirAll(defaultArchetypeDir, 0o755), qt.IsNil)
+
+ contentFile := `
+File: %s
+site RegularPages: {{ len site.RegularPages }}
+
+`
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(defaultArchetypeDir, "index.md"), []byte("default archetype index.md"), 0o755), qt.IsNil)
+
+ c.Assert(initFs(mm), qt.IsNil)
+ cfg, fs := newTestCfg(c, mm)
+
+ h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(h.Sites), qt.Equals, 2)
+
+ c.Assert(create.NewContent(h, "my-bundle", "post/my-post"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `site RegularPages: 10`)
+
+ // Default bundle archetype
+ c.Assert(create.NewContent(h, "", "post/my-post2"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post2/index.md")), `default archetype index.md`)
+
+ // Regular file with bundle kind.
+ c.Assert(create.NewContent(h, "my-bundle", "post/foo.md"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/foo.md")), `draft: true`)
+
+ // Regular files should fall back to the default archetype (we have no regular file archetype).
+ c.Assert(create.NewContent(h, "my-bundle", "mypage.md"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "mypage.md")), `draft: true`)
+
+}
+
+func TestNewContentFromDirNoSite(t *testing.T) {
+ mm := afero.NewMemMapFs()
+ c := qt.New(t)
+
+ archetypeDir := filepath.Join("archetypes", "my-bundle")
+ c.Assert(mm.MkdirAll(archetypeDir, 0o755), qt.IsNil)
+
+ archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle")
+ c.Assert(mm.MkdirAll(archetypeThemeDir, 0o755), qt.IsNil)
+
+ contentFile := `
+File: %s
+Name: {{ replace .Name "-" " " | title }}
+i18n: {{ T "hugo" }}
+`
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0o755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0o755), qt.IsNil)
+
+ c.Assert(initFs(mm), qt.IsNil)
+ cfg, fs := newTestCfg(c, mm)
+
+ h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(h.Sites), qt.Equals, 2)
+
+ c.Assert(create.NewContent(h, "my-bundle", "post/my-post"), qt.IsNil)
+
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/resources/hugo2.xml")), `hugo2: {{ printf "no template handling in here" }}`)
+
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.md")), `File: index.md`, `Name: My Post`, `i18n: Hugo Rocks!`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/index.nn.md")), `File: index.nn.md`, `Name: My Post`, `i18n: Hugo Rokkar!`)
+
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-post/pages/bio.md")), `File: bio.md`, `Name: Bio`)
+
+ c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post"), qt.IsNil)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Name: My Theme Post`, `i18n: Hugo Rocks!`)
+ cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
+}
+
+func initFs(fs afero.Fs) error {
+ perm := os.FileMode(0o755)
+ var err error
+
+ // create directories
+ dirs := []string{
+ "archetypes",
+ "content",
+ filepath.Join("themes", "sample", "archetypes"),
+ }
+ for _, dir := range dirs {
+ err = fs.Mkdir(dir, perm)
+ if err != nil && !os.IsExist(err) {
+ return err
+ }
+ }
+
+ // create some dummy content
+ for i := 1; i <= 10; i++ {
+ filename := filepath.Join("content", fmt.Sprintf("page%d.md", i))
+ afero.WriteFile(fs, filename, []byte(`---
+title: Test
+---
+`), 0666)
+ }
+
+ // create archetype files
+ for _, v := range []struct {
+ path string
+ content string
+ }{
+ {
+ path: filepath.Join("archetypes", "post.md"),
+ content: "+++\ndate = \"2015-01-12T19:20:04-07:00\"\ntitle = \"Post Arch title\"\ntest = \"test1\"\n+++\n",
+ },
+ {
+ path: filepath.Join("archetypes", "post.org"),
+ content: "#+title: {{ .BaseFileName | upper }}",
+ },
+ {
+ path: filepath.Join("archetypes", "name.md"),
+ content: `+++
+title = '{{ replace .Name "-" " " | title }}'
++++`,
+ },
+ {
+ path: filepath.Join("archetypes", "product.md"),
+ content: `+++
+title = "{{ .BaseFileName | upper }}"
++++`,
+ },
+ {
+ path: filepath.Join("archetypes", "filenames.md"),
+ content: `...
+title = "{{ .BaseFileName | upper }}"
++++
+
+
+ContentBaseName: {{ .File.ContentBaseName }}
+
+`,
+ },
+ {
+ path: filepath.Join("archetypes", "site.md"),
+ content: `...
+title = "{{ .BaseFileName | upper }}"
++++
+
+Len RegularPages .Site: {{ len .Site.RegularPages }}
+Len RegularPages site: {{ len site.RegularPages }}
+
+
+`,
+ },
+ {
+ path: filepath.Join("archetypes", "emptydate.md"),
+ content: "+++\ndate =\"\"\ntitle = \"Empty Date Arch title\"\ntest = \"test1\"\n+++\n",
+ },
+ {
+ path: filepath.Join("archetypes", "lang.md"),
+ content: `Site Lang: {{ site.Language.Lang }}|Name: {{ replace .Name "-" " " | title }}|i18n: {{ T "hugo" }}`,
+ },
+ // #3623x
+ {
+ path: filepath.Join("archetypes", "shortcodes.md"),
+ content: `+++
+title = "{{ .BaseFileName | upper }}"
++++
+
+{{< myshortcode >}}
+
+Some text.
+
+{{% myshortcode %}}
+{{</* comment */>}}
+{{%/* comment */%}}
+
+
+`,
+ },
+ } {
+ f, err := fs.Create(v.path)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ _, err = f.Write([]byte(v.content))
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func cContains(c *qt.C, v any, matches ...string) {
+ for _, m := range matches {
+ c.Assert(v, qt.Contains, m)
+ }
+}
+
+// TODO(bep) extract common testing package with this and some others
+func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
+ t.Helper()
+ filename = filepath.FromSlash(filename)
+ b, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ // Print some debug info
+ root := strings.Split(filename, helpers.FilePathSeparator)[0]
+ afero.Walk(fs, root, func(path string, info os.FileInfo, err error) error {
+ if info != nil && !info.IsDir() {
+ fmt.Println(" ", path)
+ }
+
+ return nil
+ })
+ t.Fatalf("Failed to read file: %s", err)
+ }
+ return string(b)
+}
+
+func newTestCfg(c *qt.C, mm afero.Fs) (config.Provider, *hugofs.Fs) {
+ cfg := `
+
+theme = "mytheme"
+[languages]
+[languages.en]
+weight = 1
+languageName = "English"
+[languages.nn]
+weight = 2
+languageName = "Nynorsk"
+
+[module]
+[[module.mounts]]
+ source = 'archetypes'
+ target = 'archetypes'
+[[module.mounts]]
+ source = 'content'
+ target = 'content'
+ lang = 'en'
+[[module.mounts]]
+ source = 'content_nn'
+ target = 'content'
+ lang = 'nn'
+`
+ if mm == nil {
+ mm = afero.NewMemMapFs()
+ }
+
+ mm.MkdirAll(filepath.FromSlash("content_nn"), 0o777)
+
+ mm.MkdirAll(filepath.FromSlash("themes/mytheme"), 0o777)
+
+ c.Assert(afero.WriteFile(mm, filepath.Join("i18n", "en.toml"), []byte(`[hugo]
+other = "Hugo Rocks!"`), 0o755), qt.IsNil)
+ c.Assert(afero.WriteFile(mm, filepath.Join("i18n", "nn.toml"), []byte(`[hugo]
+other = "Hugo Rokkar!"`), 0o755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(mm, "config.toml", []byte(cfg), 0o755), qt.IsNil)
+
+ v, _, err := hugolib.LoadConfig(hugolib.ConfigSourceDescriptor{Fs: mm, Filename: "config.toml"})
+ c.Assert(err, qt.IsNil)
+
+ return v, hugofs.NewFrom(mm, v)
+}
diff --git a/deploy/cloudfront.go b/deploy/cloudfront.go
new file mode 100644
index 000000000..2f6d94b18
--- /dev/null
+++ b/deploy/cloudfront.go
@@ -0,0 +1,54 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "context"
+ "time"
+
+ "github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/session"
+ "github.com/aws/aws-sdk-go/service/cloudfront"
+)
+
+// InvalidateCloudFront invalidates the CloudFront cache for distributionID.
+// It uses the default AWS credentials from the environment.
+func InvalidateCloudFront(ctx context.Context, distributionID string) error {
+ // SharedConfigEnable enables loading "shared config (~/.aws/config) and
+ // shared credentials (~/.aws/credentials) files".
+ // See https://docs.aws.amazon.com/sdk-for-go/api/aws/session/ for more
+ // details.
+ // This is the same codepath used by Go CDK when creating an s3 URL.
+ // TODO: Update this to a Go CDK helper once available
+ // (https://github.com/google/go-cloud/issues/2003).
+ sess, err := session.NewSessionWithOptions(session.Options{SharedConfigState: session.SharedConfigEnable})
+ if err != nil {
+ return err
+ }
+ req := &cloudfront.CreateInvalidationInput{
+ DistributionId: aws.String(distributionID),
+ InvalidationBatch: &cloudfront.InvalidationBatch{
+ CallerReference: aws.String(time.Now().Format("20060102150405")),
+ Paths: &cloudfront.Paths{
+ Items: []*string{aws.String("/*")},
+ Quantity: aws.Int64(1),
+ },
+ },
+ }
+ _, err = cloudfront.New(sess).CreateInvalidationWithContext(ctx, req)
+ return err
+}
diff --git a/deploy/deploy.go b/deploy/deploy.go
new file mode 100644
index 000000000..f0a4e0178
--- /dev/null
+++ b/deploy/deploy.go
@@ -0,0 +1,741 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "bytes"
+ "compress/gzip"
+ "context"
+ "crypto/md5"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "mime"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+
+ "errors"
+
+ "github.com/dustin/go-humanize"
+ "github.com/gobwas/glob"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/media"
+ "github.com/spf13/afero"
+ jww "github.com/spf13/jwalterweatherman"
+ "golang.org/x/text/unicode/norm"
+
+ "gocloud.dev/blob"
+ _ "gocloud.dev/blob/fileblob" // import
+ _ "gocloud.dev/blob/gcsblob" // import
+ _ "gocloud.dev/blob/s3blob" // import
+ "gocloud.dev/gcerrors"
+)
+
+// Deployer supports deploying the site to target cloud providers.
+type Deployer struct {
+ localFs afero.Fs
+ bucket *blob.Bucket
+
+ target *target // the target to deploy to
+ matchers []*matcher // matchers to apply to uploaded files
+ mediaTypes media.Types // Hugo's MediaType to guess ContentType
+ ordering []*regexp.Regexp // orders uploads
+ quiet bool // true reduces STDOUT
+ confirm bool // true enables confirmation before making changes
+ dryRun bool // true skips conformations and prints changes instead of applying them
+ force bool // true forces upload of all files
+ invalidateCDN bool // true enables invalidate CDN cache (if possible)
+ maxDeletes int // caps the # of files to delete; -1 to disable
+
+ // For tests...
+ summary deploySummary // summary of latest Deploy results
+}
+
+type deploySummary struct {
+ NumLocal, NumRemote, NumUploads, NumDeletes int
+}
+
+const metaMD5Hash = "md5chksum" // the meta key to store md5hash in
+
+// New constructs a new *Deployer.
+func New(cfg config.Provider, localFs afero.Fs) (*Deployer, error) {
+ targetName := cfg.GetString("target")
+
+ // Load the [deployment] section of the config.
+ dcfg, err := decodeConfig(cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(dcfg.Targets) == 0 {
+ return nil, errors.New("no deployment targets found")
+ }
+
+ // Find the target to deploy to.
+ var tgt *target
+ if targetName == "" {
+ // Default to the first target.
+ tgt = dcfg.Targets[0]
+ } else {
+ for _, t := range dcfg.Targets {
+ if t.Name == targetName {
+ tgt = t
+ }
+ }
+ if tgt == nil {
+ return nil, fmt.Errorf("deployment target %q not found", targetName)
+ }
+ }
+
+ return &Deployer{
+ localFs: localFs,
+ target: tgt,
+ matchers: dcfg.Matchers,
+ ordering: dcfg.ordering,
+ mediaTypes: dcfg.mediaTypes,
+ quiet: cfg.GetBool("quiet"),
+ confirm: cfg.GetBool("confirm"),
+ dryRun: cfg.GetBool("dryRun"),
+ force: cfg.GetBool("force"),
+ invalidateCDN: cfg.GetBool("invalidateCDN"),
+ maxDeletes: cfg.GetInt("maxDeletes"),
+ }, nil
+}
+
+func (d *Deployer) openBucket(ctx context.Context) (*blob.Bucket, error) {
+ if d.bucket != nil {
+ return d.bucket, nil
+ }
+ jww.FEEDBACK.Printf("Deploying to target %q (%s)\n", d.target.Name, d.target.URL)
+ return blob.OpenBucket(ctx, d.target.URL)
+}
+
+// Deploy deploys the site to a target.
+func (d *Deployer) Deploy(ctx context.Context) error {
+ bucket, err := d.openBucket(ctx)
+ if err != nil {
+ return err
+ }
+
+ // Load local files from the source directory.
+ var include, exclude glob.Glob
+ if d.target != nil {
+ include, exclude = d.target.includeGlob, d.target.excludeGlob
+ }
+ local, err := walkLocal(d.localFs, d.matchers, include, exclude, d.mediaTypes)
+ if err != nil {
+ return err
+ }
+ jww.INFO.Printf("Found %d local files.\n", len(local))
+ d.summary.NumLocal = len(local)
+
+ // Load remote files from the target.
+ remote, err := walkRemote(ctx, bucket, include, exclude)
+ if err != nil {
+ return err
+ }
+ jww.INFO.Printf("Found %d remote files.\n", len(remote))
+ d.summary.NumRemote = len(remote)
+
+ // Diff local vs remote to see what changes need to be applied.
+ uploads, deletes := findDiffs(local, remote, d.force)
+ d.summary.NumUploads = len(uploads)
+ d.summary.NumDeletes = len(deletes)
+ if len(uploads)+len(deletes) == 0 {
+ if !d.quiet {
+ jww.FEEDBACK.Println("No changes required.")
+ }
+ return nil
+ }
+ if !d.quiet {
+ jww.FEEDBACK.Println(summarizeChanges(uploads, deletes))
+ }
+
+ // Ask for confirmation before proceeding.
+ if d.confirm && !d.dryRun {
+ fmt.Printf("Continue? (Y/n) ")
+ var confirm string
+ if _, err := fmt.Scanln(&confirm); err != nil {
+ return err
+ }
+ if confirm != "" && confirm[0] != 'y' && confirm[0] != 'Y' {
+ return errors.New("aborted")
+ }
+ }
+
+ // Order the uploads. They are organized in groups; all uploads in a group
+ // must be complete before moving on to the next group.
+ uploadGroups := applyOrdering(d.ordering, uploads)
+
+ // Apply the changes in parallel, using an inverted worker
+ // pool (https://www.youtube.com/watch?v=5zXAHh5tJqQ&t=26m58s).
+ // sem prevents more than nParallel concurrent goroutines.
+ const nParallel = 10
+ var errs []error
+ var errMu sync.Mutex // protects errs
+
+ for _, uploads := range uploadGroups {
+ // Short-circuit for an empty group.
+ if len(uploads) == 0 {
+ continue
+ }
+
+ // Within the group, apply uploads in parallel.
+ sem := make(chan struct{}, nParallel)
+ for _, upload := range uploads {
+ if d.dryRun {
+ if !d.quiet {
+ jww.FEEDBACK.Printf("[DRY RUN] Would upload: %v\n", upload)
+ }
+ continue
+ }
+
+ sem <- struct{}{}
+ go func(upload *fileToUpload) {
+ if err := doSingleUpload(ctx, bucket, upload); err != nil {
+ errMu.Lock()
+ defer errMu.Unlock()
+ errs = append(errs, err)
+ }
+ <-sem
+ }(upload)
+ }
+ // Wait for all uploads in the group to finish.
+ for n := nParallel; n > 0; n-- {
+ sem <- struct{}{}
+ }
+ }
+
+ if d.maxDeletes != -1 && len(deletes) > d.maxDeletes {
+ jww.WARN.Printf("Skipping %d deletes because it is more than --maxDeletes (%d). If this is expected, set --maxDeletes to a larger number, or -1 to disable this check.\n", len(deletes), d.maxDeletes)
+ d.summary.NumDeletes = 0
+ } else {
+ // Apply deletes in parallel.
+ sort.Slice(deletes, func(i, j int) bool { return deletes[i] < deletes[j] })
+ sem := make(chan struct{}, nParallel)
+ for _, del := range deletes {
+ if d.dryRun {
+ if !d.quiet {
+ jww.FEEDBACK.Printf("[DRY RUN] Would delete %s\n", del)
+ }
+ continue
+ }
+ sem <- struct{}{}
+ go func(del string) {
+ jww.INFO.Printf("Deleting %s...\n", del)
+ if err := bucket.Delete(ctx, del); err != nil {
+ if gcerrors.Code(err) == gcerrors.NotFound {
+ jww.WARN.Printf("Failed to delete %q because it wasn't found: %v", del, err)
+ } else {
+ errMu.Lock()
+ defer errMu.Unlock()
+ errs = append(errs, err)
+ }
+ }
+ <-sem
+ }(del)
+ }
+ // Wait for all deletes to finish.
+ for n := nParallel; n > 0; n-- {
+ sem <- struct{}{}
+ }
+ }
+ if len(errs) > 0 {
+ if !d.quiet {
+ jww.FEEDBACK.Printf("Encountered %d errors.\n", len(errs))
+ }
+ return errs[0]
+ }
+ if !d.quiet {
+ jww.FEEDBACK.Println("Success!")
+ }
+
+ if d.invalidateCDN {
+ if d.target.CloudFrontDistributionID != "" {
+ if d.dryRun {
+ if !d.quiet {
+ jww.FEEDBACK.Printf("[DRY RUN] Would invalidate CloudFront CDN with ID %s\n", d.target.CloudFrontDistributionID)
+ }
+ } else {
+ jww.FEEDBACK.Println("Invalidating CloudFront CDN...")
+ if err := InvalidateCloudFront(ctx, d.target.CloudFrontDistributionID); err != nil {
+ jww.FEEDBACK.Printf("Failed to invalidate CloudFront CDN: %v\n", err)
+ return err
+ }
+ }
+ }
+ if d.target.GoogleCloudCDNOrigin != "" {
+ if d.dryRun {
+ if !d.quiet {
+ jww.FEEDBACK.Printf("[DRY RUN] Would invalidate Google Cloud CDN with origin %s\n", d.target.GoogleCloudCDNOrigin)
+ }
+ } else {
+ jww.FEEDBACK.Println("Invalidating Google Cloud CDN...")
+ if err := InvalidateGoogleCloudCDN(ctx, d.target.GoogleCloudCDNOrigin); err != nil {
+ jww.FEEDBACK.Printf("Failed to invalidate Google Cloud CDN: %v\n", err)
+ return err
+ }
+ }
+ }
+ jww.FEEDBACK.Println("Success!")
+ }
+ return nil
+}
+
+// summarizeChanges creates a text description of the proposed changes.
+func summarizeChanges(uploads []*fileToUpload, deletes []string) string {
+ uploadSize := int64(0)
+ for _, u := range uploads {
+ uploadSize += u.Local.UploadSize
+ }
+ return fmt.Sprintf("Identified %d file(s) to upload, totaling %s, and %d file(s) to delete.", len(uploads), humanize.Bytes(uint64(uploadSize)), len(deletes))
+}
+
+// doSingleUpload executes a single file upload.
+func doSingleUpload(ctx context.Context, bucket *blob.Bucket, upload *fileToUpload) error {
+ jww.INFO.Printf("Uploading %v...\n", upload)
+ opts := &blob.WriterOptions{
+ CacheControl: upload.Local.CacheControl(),
+ ContentEncoding: upload.Local.ContentEncoding(),
+ ContentType: upload.Local.ContentType(),
+ Metadata: map[string]string{metaMD5Hash: hex.EncodeToString(upload.Local.MD5())},
+ }
+ w, err := bucket.NewWriter(ctx, upload.Local.SlashPath, opts)
+ if err != nil {
+ return err
+ }
+ r, err := upload.Local.Reader()
+ if err != nil {
+ return err
+ }
+ defer r.Close()
+ _, err = io.Copy(w, r)
+ if err != nil {
+ return err
+ }
+ if err := w.Close(); err != nil {
+ return err
+ }
+ return nil
+}
+
+// localFile represents a local file from the source. Use newLocalFile to
+// construct one.
+type localFile struct {
+ // NativePath is the native path to the file (using file.Separator).
+ NativePath string
+ // SlashPath is NativePath converted to use /.
+ SlashPath string
+ // UploadSize is the size of the content to be uploaded. It may not
+ // be the same as the local file size if the content will be
+ // gzipped before upload.
+ UploadSize int64
+
+ fs afero.Fs
+ matcher *matcher
+ md5 []byte // cache
+ gzipped bytes.Buffer // cached of gzipped contents if gzipping
+ mediaTypes media.Types
+}
+
+// newLocalFile initializes a *localFile.
+func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *matcher, mt media.Types) (*localFile, error) {
+ f, err := fs.Open(nativePath)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+ lf := &localFile{
+ NativePath: nativePath,
+ SlashPath: slashpath,
+ fs: fs,
+ matcher: m,
+ mediaTypes: mt,
+ }
+ if m != nil && m.Gzip {
+ // We're going to gzip the content. Do it once now, and cache the result
+ // in gzipped. The UploadSize is the size of the gzipped content.
+ gz := gzip.NewWriter(&lf.gzipped)
+ if _, err := io.Copy(gz, f); err != nil {
+ return nil, err
+ }
+ if err := gz.Close(); err != nil {
+ return nil, err
+ }
+ lf.UploadSize = int64(lf.gzipped.Len())
+ } else {
+ // Raw content. Just get the UploadSize.
+ info, err := f.Stat()
+ if err != nil {
+ return nil, err
+ }
+ lf.UploadSize = info.Size()
+ }
+ return lf, nil
+}
+
+// Reader returns an io.ReadCloser for reading the content to be uploaded.
+// The caller must call Close on the returned ReaderCloser.
+// The reader content may not be the same as the local file content due to
+// gzipping.
+func (lf *localFile) Reader() (io.ReadCloser, error) {
+ if lf.matcher != nil && lf.matcher.Gzip {
+ // We've got the gzipped contents cached in gzipped.
+ // Note: we can't use lf.gzipped directly as a Reader, since we it discards
+ // data after it is read, and we may read it more than once.
+ return ioutil.NopCloser(bytes.NewReader(lf.gzipped.Bytes())), nil
+ }
+ // Not expected to fail since we did it successfully earlier in newLocalFile,
+ // but could happen due to changes in the underlying filesystem.
+ return lf.fs.Open(lf.NativePath)
+}
+
+// CacheControl returns the Cache-Control header to use for lf, based on the
+// first matching matcher (if any).
+func (lf *localFile) CacheControl() string {
+ if lf.matcher == nil {
+ return ""
+ }
+ return lf.matcher.CacheControl
+}
+
+// ContentEncoding returns the Content-Encoding header to use for lf, based
+// on the matcher's Content-Encoding and Gzip fields.
+func (lf *localFile) ContentEncoding() string {
+ if lf.matcher == nil {
+ return ""
+ }
+ if lf.matcher.Gzip {
+ return "gzip"
+ }
+ return lf.matcher.ContentEncoding
+}
+
+// ContentType returns the Content-Type header to use for lf.
+// It first checks if there's a Content-Type header configured via a matching
+// matcher; if not, it tries to generate one based on the filename extension.
+// If this fails, the Content-Type will be the empty string. In this case, Go
+// Cloud will automatically try to infer a Content-Type based on the file
+// content.
+func (lf *localFile) ContentType() string {
+ if lf.matcher != nil && lf.matcher.ContentType != "" {
+ return lf.matcher.ContentType
+ }
+
+ ext := filepath.Ext(lf.NativePath)
+ if mimeType, _, found := lf.mediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, ".")); found {
+ return mimeType.Type()
+ }
+
+ return mime.TypeByExtension(ext)
+}
+
+// Force returns true if the file should be forced to re-upload based on the
+// matching matcher.
+func (lf *localFile) Force() bool {
+ return lf.matcher != nil && lf.matcher.Force
+}
+
+// MD5 returns an MD5 hash of the content to be uploaded.
+func (lf *localFile) MD5() []byte {
+ if len(lf.md5) > 0 {
+ return lf.md5
+ }
+ h := md5.New()
+ r, err := lf.Reader()
+ if err != nil {
+ return nil
+ }
+ defer r.Close()
+ if _, err := io.Copy(h, r); err != nil {
+ return nil
+ }
+ lf.md5 = h.Sum(nil)
+ return lf.md5
+}
+
+// knownHiddenDirectory checks if the specified name is a well known
+// hidden directory.
+func knownHiddenDirectory(name string) bool {
+ knownDirectories := []string{
+ ".well-known",
+ }
+
+ for _, dir := range knownDirectories {
+ if name == dir {
+ return true
+ }
+ }
+ return false
+}
+
+// walkLocal walks the source directory and returns a flat list of files,
+// using localFile.SlashPath as the map keys.
+func walkLocal(fs afero.Fs, matchers []*matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
+ retval := map[string]*localFile{}
+ err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ // Skip hidden directories.
+ if path != "" && strings.HasPrefix(info.Name(), ".") {
+ // Except for specific hidden directories
+ if !knownHiddenDirectory(info.Name()) {
+ return filepath.SkipDir
+ }
+ }
+ return nil
+ }
+
+ // .DS_Store is an internal MacOS attribute file; skip it.
+ if info.Name() == ".DS_Store" {
+ return nil
+ }
+
+ // When a file system is HFS+, its filepath is in NFD form.
+ if runtime.GOOS == "darwin" {
+ path = norm.NFC.String(path)
+ }
+
+ // Check include/exclude matchers.
+ slashpath := filepath.ToSlash(path)
+ if include != nil && !include.Match(slashpath) {
+ jww.INFO.Printf(" dropping %q due to include\n", slashpath)
+ return nil
+ }
+ if exclude != nil && exclude.Match(slashpath) {
+ jww.INFO.Printf(" dropping %q due to exclude\n", slashpath)
+ return nil
+ }
+
+ // Find the first matching matcher (if any).
+ var m *matcher
+ for _, cur := range matchers {
+ if cur.Matches(slashpath) {
+ m = cur
+ break
+ }
+ }
+ lf, err := newLocalFile(fs, path, slashpath, m, mediaTypes)
+ if err != nil {
+ return err
+ }
+ retval[lf.SlashPath] = lf
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return retval, nil
+}
+
+// walkRemote walks the target bucket and returns a flat list.
+func walkRemote(ctx context.Context, bucket *blob.Bucket, include, exclude glob.Glob) (map[string]*blob.ListObject, error) {
+ retval := map[string]*blob.ListObject{}
+ iter := bucket.List(nil)
+ for {
+ obj, err := iter.Next(ctx)
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+ // Check include/exclude matchers.
+ if include != nil && !include.Match(obj.Key) {
+ jww.INFO.Printf(" remote dropping %q due to include\n", obj.Key)
+ continue
+ }
+ if exclude != nil && exclude.Match(obj.Key) {
+ jww.INFO.Printf(" remote dropping %q due to exclude\n", obj.Key)
+ continue
+ }
+ // If the remote didn't give us an MD5, use remote attributes MD5, if that doesn't exist compute one.
+ // This can happen for some providers (e.g., fileblob, which uses the
+ // local filesystem), but not for the most common Cloud providers
+ // (S3, GCS, Azure). Although, it can happen for S3 if the blob was uploaded
+ // via a multi-part upload.
+ // Although it's unfortunate to have to read the file, it's likely better
+ // than assuming a delta and re-uploading it.
+ if len(obj.MD5) == 0 {
+ var attrMD5 []byte
+ attrs, err := bucket.Attributes(ctx, obj.Key)
+ if err == nil {
+ md5String, exists := attrs.Metadata[metaMD5Hash]
+ if exists {
+ attrMD5, _ = hex.DecodeString(md5String)
+ }
+ }
+ if len(attrMD5) == 0 {
+ r, err := bucket.NewReader(ctx, obj.Key, nil)
+ if err == nil {
+ h := md5.New()
+ if _, err := io.Copy(h, r); err == nil {
+ obj.MD5 = h.Sum(nil)
+ }
+ r.Close()
+ }
+ } else {
+ obj.MD5 = attrMD5
+ }
+ }
+ retval[obj.Key] = obj
+ }
+ return retval, nil
+}
+
+// uploadReason is an enum of reasons why a file must be uploaded.
+type uploadReason string
+
+const (
+ reasonUnknown uploadReason = "unknown"
+ reasonNotFound uploadReason = "not found at target"
+ reasonForce uploadReason = "--force"
+ reasonSize uploadReason = "size differs"
+ reasonMD5Differs uploadReason = "md5 differs"
+ reasonMD5Missing uploadReason = "remote md5 missing"
+)
+
+// fileToUpload represents a single local file that should be uploaded to
+// the target.
+type fileToUpload struct {
+ Local *localFile
+ Reason uploadReason
+}
+
+func (u *fileToUpload) String() string {
+ details := []string{humanize.Bytes(uint64(u.Local.UploadSize))}
+ if s := u.Local.CacheControl(); s != "" {
+ details = append(details, fmt.Sprintf("Cache-Control: %q", s))
+ }
+ if s := u.Local.ContentEncoding(); s != "" {
+ details = append(details, fmt.Sprintf("Content-Encoding: %q", s))
+ }
+ if s := u.Local.ContentType(); s != "" {
+ details = append(details, fmt.Sprintf("Content-Type: %q", s))
+ }
+ return fmt.Sprintf("%s (%s): %v", u.Local.SlashPath, strings.Join(details, ", "), u.Reason)
+}
+
+// findDiffs diffs localFiles vs remoteFiles to see what changes should be
+// applied to the remote target. It returns a slice of *fileToUpload and a
+// slice of paths for files to delete.
+func findDiffs(localFiles map[string]*localFile, remoteFiles map[string]*blob.ListObject, force bool) ([]*fileToUpload, []string) {
+ var uploads []*fileToUpload
+ var deletes []string
+
+ found := map[string]bool{}
+ for path, lf := range localFiles {
+ upload := false
+ reason := reasonUnknown
+
+ if remoteFile, ok := remoteFiles[path]; ok {
+ // The file exists in remote. Let's see if we need to upload it anyway.
+
+ // TODO: We don't register a diff if the metadata (e.g., Content-Type
+ // header) has changed. This would be difficult/expensive to detect; some
+ // providers return metadata along with their "List" result, but others
+ // (notably AWS S3) do not, so gocloud.dev's blob.Bucket doesn't expose
+ // it in the list result. It would require a separate request per blob
+ // to fetch. At least for now, we work around this by documenting it and
+ // providing a "force" flag (to re-upload everything) and a "force" bool
+ // per matcher (to re-upload all files in a matcher whose headers may have
+ // changed).
+ // Idea: extract a sample set of 1 file per extension + 1 file per matcher
+ // and check those files?
+ if force {
+ upload = true
+ reason = reasonForce
+ } else if lf.Force() {
+ upload = true
+ reason = reasonForce
+ } else if lf.UploadSize != remoteFile.Size {
+ upload = true
+ reason = reasonSize
+ } else if len(remoteFile.MD5) == 0 {
+ // This shouldn't happen unless the remote didn't give us an MD5 hash
+ // from List, AND we failed to compute one by reading the remote file.
+ // Default to considering the files different.
+ upload = true
+ reason = reasonMD5Missing
+ } else if !bytes.Equal(lf.MD5(), remoteFile.MD5) {
+ upload = true
+ reason = reasonMD5Differs
+ } else {
+ // Nope! Leave uploaded = false.
+ }
+ found[path] = true
+ } else {
+ // The file doesn't exist in remote.
+ upload = true
+ reason = reasonNotFound
+ }
+ if upload {
+ jww.DEBUG.Printf("%s needs to be uploaded: %v\n", path, reason)
+ uploads = append(uploads, &fileToUpload{lf, reason})
+ } else {
+ jww.DEBUG.Printf("%s exists at target and does not need to be uploaded", path)
+ }
+ }
+
+ // Remote files that weren't found locally should be deleted.
+ for path := range remoteFiles {
+ if !found[path] {
+ deletes = append(deletes, path)
+ }
+ }
+ return uploads, deletes
+}
+
+// applyOrdering returns an ordered slice of slices of uploads.
+//
+// The returned slice will have length len(ordering)+1.
+//
+// The subslice at index i, for i = 0 ... len(ordering)-1, will have all of the
+// uploads whose Local.SlashPath matched the regex at ordering[i] (but not any
+// previous ordering regex).
+// The subslice at index len(ordering) will have the remaining uploads that
+// didn't match any ordering regex.
+//
+// The subslices are sorted by Local.SlashPath.
+func applyOrdering(ordering []*regexp.Regexp, uploads []*fileToUpload) [][]*fileToUpload {
+ // Sort the whole slice by Local.SlashPath first.
+ sort.Slice(uploads, func(i, j int) bool { return uploads[i].Local.SlashPath < uploads[j].Local.SlashPath })
+
+ retval := make([][]*fileToUpload, len(ordering)+1)
+ for _, u := range uploads {
+ matched := false
+ for i, re := range ordering {
+ if re.MatchString(u.Local.SlashPath) {
+ retval[i] = append(retval[i], u)
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ retval[len(ordering)] = append(retval[len(ordering)], u)
+ }
+ }
+ return retval
+}
diff --git a/deploy/deployConfig.go b/deploy/deployConfig.go
new file mode 100644
index 000000000..477751d33
--- /dev/null
+++ b/deploy/deployConfig.go
@@ -0,0 +1,164 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "fmt"
+ "regexp"
+
+ "errors"
+
+ "github.com/gobwas/glob"
+ "github.com/gohugoio/hugo/config"
+ hglob "github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/gohugoio/hugo/media"
+ "github.com/mitchellh/mapstructure"
+)
+
+const deploymentConfigKey = "deployment"
+
+// deployConfig is the complete configuration for deployment.
+type deployConfig struct {
+ Targets []*target
+ Matchers []*matcher
+ Order []string
+
+ ordering []*regexp.Regexp // compiled Order
+ mediaTypes media.Types
+}
+
+type target struct {
+ Name string
+ URL string
+
+ CloudFrontDistributionID string
+
+ // GoogleCloudCDNOrigin specifies the Google Cloud project and CDN origin to
+ // invalidate when deploying this target. It is specified as <project>/<origin>.
+ GoogleCloudCDNOrigin string
+
+ // Optional patterns of files to include/exclude for this target.
+ // Parsed using github.com/gobwas/glob.
+ Include string
+ Exclude string
+
+ // Parsed versions of Include/Exclude.
+ includeGlob glob.Glob
+ excludeGlob glob.Glob
+}
+
+func (tgt *target) parseIncludeExclude() error {
+ var err error
+ if tgt.Include != "" {
+ tgt.includeGlob, err = hglob.GetGlob(tgt.Include)
+ if err != nil {
+ return fmt.Errorf("invalid deployment.target.include %q: %v", tgt.Include, err)
+ }
+ }
+ if tgt.Exclude != "" {
+ tgt.excludeGlob, err = hglob.GetGlob(tgt.Exclude)
+ if err != nil {
+ return fmt.Errorf("invalid deployment.target.exclude %q: %v", tgt.Exclude, err)
+ }
+ }
+ return nil
+}
+
+// matcher represents configuration to be applied to files whose paths match
+// a specified pattern.
+type matcher struct {
+ // Pattern is the string pattern to match against paths.
+ // Matching is done against paths converted to use / as the path separator.
+ Pattern string
+
+ // CacheControl specifies caching attributes to use when serving the blob.
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
+ CacheControl string
+
+ // ContentEncoding specifies the encoding used for the blob's content, if any.
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
+ ContentEncoding string
+
+ // ContentType specifies the MIME type of the blob being written.
+ // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
+ ContentType string
+
+ // Gzip determines whether the file should be gzipped before upload.
+ // If so, the ContentEncoding field will automatically be set to "gzip".
+ Gzip bool
+
+ // Force indicates that matching files should be re-uploaded. Useful when
+ // other route-determined metadata (e.g., ContentType) has changed.
+ Force bool
+
+ // re is Pattern compiled.
+ re *regexp.Regexp
+}
+
+func (m *matcher) Matches(path string) bool {
+ return m.re.MatchString(path)
+}
+
+// decode creates a config from a given Hugo configuration.
+func decodeConfig(cfg config.Provider) (deployConfig, error) {
+ var (
+ mediaTypesConfig []map[string]any
+ dcfg deployConfig
+ )
+
+ if !cfg.IsSet(deploymentConfigKey) {
+ return dcfg, nil
+ }
+ if err := mapstructure.WeakDecode(cfg.GetStringMap(deploymentConfigKey), &dcfg); err != nil {
+ return dcfg, err
+ }
+ for _, tgt := range dcfg.Targets {
+ if *tgt == (target{}) {
+ return dcfg, errors.New("empty deployment target")
+ }
+ if err := tgt.parseIncludeExclude(); err != nil {
+ return dcfg, err
+ }
+ }
+ var err error
+ for _, m := range dcfg.Matchers {
+ if *m == (matcher{}) {
+ return dcfg, errors.New("empty deployment matcher")
+ }
+ m.re, err = regexp.Compile(m.Pattern)
+ if err != nil {
+ return dcfg, fmt.Errorf("invalid deployment.matchers.pattern: %v", err)
+ }
+ }
+ for _, o := range dcfg.Order {
+ re, err := regexp.Compile(o)
+ if err != nil {
+ return dcfg, fmt.Errorf("invalid deployment.orderings.pattern: %v", err)
+ }
+ dcfg.ordering = append(dcfg.ordering, re)
+ }
+
+ if cfg.IsSet("mediaTypes") {
+ mediaTypesConfig = append(mediaTypesConfig, cfg.GetStringMap("mediaTypes"))
+ }
+
+ dcfg.mediaTypes, err = media.DecodeTypes(mediaTypesConfig...)
+ if err != nil {
+ return dcfg, err
+ }
+ return dcfg, nil
+}
diff --git a/deploy/deployConfig_test.go b/deploy/deployConfig_test.go
new file mode 100644
index 000000000..ed03d57db
--- /dev/null
+++ b/deploy/deployConfig_test.go
@@ -0,0 +1,199 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestDecodeConfigFromTOML(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[deployment]
+
+order = ["o1", "o2"]
+
+# All lowercase.
+[[deployment.targets]]
+name = "name0"
+url = "url0"
+cloudfrontdistributionid = "cdn0"
+include = "*.html"
+
+# All uppercase.
+[[deployment.targets]]
+NAME = "name1"
+URL = "url1"
+CLOUDFRONTDISTRIBUTIONID = "cdn1"
+INCLUDE = "*.jpg"
+
+# Camelcase.
+[[deployment.targets]]
+name = "name2"
+url = "url2"
+cloudFrontDistributionID = "cdn2"
+exclude = "*.png"
+
+# All lowercase.
+[[deployment.matchers]]
+pattern = "^pattern0$"
+cachecontrol = "cachecontrol0"
+contentencoding = "contentencoding0"
+contenttype = "contenttype0"
+
+# All uppercase.
+[[deployment.matchers]]
+PATTERN = "^pattern1$"
+CACHECONTROL = "cachecontrol1"
+CONTENTENCODING = "contentencoding1"
+CONTENTTYPE = "contenttype1"
+GZIP = true
+FORCE = true
+
+# Camelcase.
+[[deployment.matchers]]
+pattern = "^pattern2$"
+cacheControl = "cachecontrol2"
+contentEncoding = "contentencoding2"
+contentType = "contenttype2"
+gzip = true
+force = true
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ dcfg, err := decodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+
+ // Order.
+ c.Assert(len(dcfg.Order), qt.Equals, 2)
+ c.Assert(dcfg.Order[0], qt.Equals, "o1")
+ c.Assert(dcfg.Order[1], qt.Equals, "o2")
+ c.Assert(len(dcfg.ordering), qt.Equals, 2)
+
+ // Targets.
+ c.Assert(len(dcfg.Targets), qt.Equals, 3)
+ wantInclude := []string{"*.html", "*.jpg", ""}
+ wantExclude := []string{"", "", "*.png"}
+ for i := 0; i < 3; i++ {
+ tgt := dcfg.Targets[i]
+ c.Assert(tgt.Name, qt.Equals, fmt.Sprintf("name%d", i))
+ c.Assert(tgt.URL, qt.Equals, fmt.Sprintf("url%d", i))
+ c.Assert(tgt.CloudFrontDistributionID, qt.Equals, fmt.Sprintf("cdn%d", i))
+ c.Assert(tgt.Include, qt.Equals, wantInclude[i])
+ if wantInclude[i] != "" {
+ c.Assert(tgt.includeGlob, qt.Not(qt.IsNil))
+ }
+ c.Assert(tgt.Exclude, qt.Equals, wantExclude[i])
+ if wantExclude[i] != "" {
+ c.Assert(tgt.excludeGlob, qt.Not(qt.IsNil))
+ }
+ }
+
+ // Matchers.
+ c.Assert(len(dcfg.Matchers), qt.Equals, 3)
+ for i := 0; i < 3; i++ {
+ m := dcfg.Matchers[i]
+ c.Assert(m.Pattern, qt.Equals, fmt.Sprintf("^pattern%d$", i))
+ c.Assert(m.re, qt.Not(qt.IsNil))
+ c.Assert(m.CacheControl, qt.Equals, fmt.Sprintf("cachecontrol%d", i))
+ c.Assert(m.ContentEncoding, qt.Equals, fmt.Sprintf("contentencoding%d", i))
+ c.Assert(m.ContentType, qt.Equals, fmt.Sprintf("contenttype%d", i))
+ c.Assert(m.Gzip, qt.Equals, i != 0)
+ c.Assert(m.Force, qt.Equals, i != 0)
+ }
+}
+
+func TestInvalidOrderingPattern(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[deployment]
+order = ["["] # invalid regular expression
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ _, err = decodeConfig(cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestInvalidMatcherPattern(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[deployment]
+[[deployment.matchers]]
+Pattern = "[" # invalid regular expression
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ _, err = decodeConfig(cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestDecodeConfigDefault(t *testing.T) {
+ c := qt.New(t)
+
+ dcfg, err := decodeConfig(config.New())
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(dcfg.Targets), qt.Equals, 0)
+ c.Assert(len(dcfg.Matchers), qt.Equals, 0)
+}
+
+func TestEmptyTarget(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+[deployment]
+[[deployment.targets]]
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ _, err = decodeConfig(cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestEmptyMatcher(t *testing.T) {
+ c := qt.New(t)
+
+ tomlConfig := `
+[deployment]
+[[deployment.matchers]]
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ _, err = decodeConfig(cfg)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
diff --git a/deploy/deploy_azure.go b/deploy/deploy_azure.go
new file mode 100644
index 000000000..fc7daca3b
--- /dev/null
+++ b/deploy/deploy_azure.go
@@ -0,0 +1,22 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !solaris && !nodeploy
+// +build !solaris,!nodeploy
+
+package deploy
+
+import (
+ _ "gocloud.dev/blob"
+ _ "gocloud.dev/blob/azureblob" // import
+)
diff --git a/deploy/deploy_test.go b/deploy/deploy_test.go
new file mode 100644
index 000000000..a92649b74
--- /dev/null
+++ b/deploy/deploy_test.go
@@ -0,0 +1,1065 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "bytes"
+ "compress/gzip"
+ "context"
+ "crypto/md5"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/google/go-cmp/cmp"
+ "github.com/google/go-cmp/cmp/cmpopts"
+ "github.com/spf13/afero"
+ "gocloud.dev/blob"
+ "gocloud.dev/blob/fileblob"
+ "gocloud.dev/blob/memblob"
+)
+
+func TestFindDiffs(t *testing.T) {
+ hash1 := []byte("hash 1")
+ hash2 := []byte("hash 2")
+ makeLocal := func(path string, size int64, hash []byte) *localFile {
+ return &localFile{NativePath: path, SlashPath: filepath.ToSlash(path), UploadSize: size, md5: hash}
+ }
+ makeRemote := func(path string, size int64, hash []byte) *blob.ListObject {
+ return &blob.ListObject{Key: path, Size: size, MD5: hash}
+ }
+
+ tests := []struct {
+ Description string
+ Local []*localFile
+ Remote []*blob.ListObject
+ Force bool
+ WantUpdates []*fileToUpload
+ WantDeletes []string
+ }{
+ {
+ Description: "empty -> no diffs",
+ },
+ {
+ Description: "local == remote -> no diffs",
+ Local: []*localFile{
+ makeLocal("aaa", 1, hash1),
+ makeLocal("bbb", 2, hash1),
+ makeLocal("ccc", 3, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, hash1),
+ makeRemote("bbb", 2, hash1),
+ makeRemote("ccc", 3, hash2),
+ },
+ },
+ {
+ Description: "local w/ separators == remote -> no diffs",
+ Local: []*localFile{
+ makeLocal(filepath.Join("aaa", "aaa"), 1, hash1),
+ makeLocal(filepath.Join("bbb", "bbb"), 2, hash1),
+ makeLocal(filepath.Join("ccc", "ccc"), 3, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa/aaa", 1, hash1),
+ makeRemote("bbb/bbb", 2, hash1),
+ makeRemote("ccc/ccc", 3, hash2),
+ },
+ },
+ {
+ Description: "local == remote with force flag true -> diffs",
+ Local: []*localFile{
+ makeLocal("aaa", 1, hash1),
+ makeLocal("bbb", 2, hash1),
+ makeLocal("ccc", 3, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, hash1),
+ makeRemote("bbb", 2, hash1),
+ makeRemote("ccc", 3, hash2),
+ },
+ Force: true,
+ WantUpdates: []*fileToUpload{
+ {makeLocal("aaa", 1, nil), reasonForce},
+ {makeLocal("bbb", 2, nil), reasonForce},
+ {makeLocal("ccc", 3, nil), reasonForce},
+ },
+ },
+ {
+ Description: "local == remote with route.Force true -> diffs",
+ Local: []*localFile{
+ {NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &matcher{Force: true}, md5: hash1},
+ makeLocal("bbb", 2, hash1),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, hash1),
+ makeRemote("bbb", 2, hash1),
+ },
+ WantUpdates: []*fileToUpload{
+ {makeLocal("aaa", 1, nil), reasonForce},
+ },
+ },
+ {
+ Description: "extra local file -> upload",
+ Local: []*localFile{
+ makeLocal("aaa", 1, hash1),
+ makeLocal("bbb", 2, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, hash1),
+ },
+ WantUpdates: []*fileToUpload{
+ {makeLocal("bbb", 2, nil), reasonNotFound},
+ },
+ },
+ {
+ Description: "extra remote file -> delete",
+ Local: []*localFile{
+ makeLocal("aaa", 1, hash1),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, hash1),
+ makeRemote("bbb", 2, hash2),
+ },
+ WantDeletes: []string{"bbb"},
+ },
+ {
+ Description: "diffs in size or md5 -> upload",
+ Local: []*localFile{
+ makeLocal("aaa", 1, hash1),
+ makeLocal("bbb", 2, hash1),
+ makeLocal("ccc", 1, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("aaa", 1, nil),
+ makeRemote("bbb", 1, hash1),
+ makeRemote("ccc", 1, hash1),
+ },
+ WantUpdates: []*fileToUpload{
+ {makeLocal("aaa", 1, nil), reasonMD5Missing},
+ {makeLocal("bbb", 2, nil), reasonSize},
+ {makeLocal("ccc", 1, nil), reasonMD5Differs},
+ },
+ },
+ {
+ Description: "mix of updates and deletes",
+ Local: []*localFile{
+ makeLocal("same", 1, hash1),
+ makeLocal("updated", 2, hash1),
+ makeLocal("updated2", 1, hash2),
+ makeLocal("new", 1, hash1),
+ makeLocal("new2", 2, hash2),
+ },
+ Remote: []*blob.ListObject{
+ makeRemote("same", 1, hash1),
+ makeRemote("updated", 1, hash1),
+ makeRemote("updated2", 1, hash1),
+ makeRemote("stale", 1, hash1),
+ makeRemote("stale2", 1, hash1),
+ },
+ WantUpdates: []*fileToUpload{
+ {makeLocal("new", 1, nil), reasonNotFound},
+ {makeLocal("new2", 2, nil), reasonNotFound},
+ {makeLocal("updated", 2, nil), reasonSize},
+ {makeLocal("updated2", 1, nil), reasonMD5Differs},
+ },
+ WantDeletes: []string{"stale", "stale2"},
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.Description, func(t *testing.T) {
+ local := map[string]*localFile{}
+ for _, l := range tc.Local {
+ local[l.SlashPath] = l
+ }
+ remote := map[string]*blob.ListObject{}
+ for _, r := range tc.Remote {
+ remote[r.Key] = r
+ }
+ gotUpdates, gotDeletes := findDiffs(local, remote, tc.Force)
+ gotUpdates = applyOrdering(nil, gotUpdates)[0]
+ sort.Slice(gotDeletes, func(i, j int) bool { return gotDeletes[i] < gotDeletes[j] })
+ if diff := cmp.Diff(gotUpdates, tc.WantUpdates, cmpopts.IgnoreUnexported(localFile{})); diff != "" {
+ t.Errorf("updates differ:\n%s", diff)
+ }
+ if diff := cmp.Diff(gotDeletes, tc.WantDeletes); diff != "" {
+ t.Errorf("deletes differ:\n%s", diff)
+ }
+ })
+ }
+}
+
+func TestWalkLocal(t *testing.T) {
+ tests := map[string]struct {
+ Given []string
+ Expect []string
+ }{
+ "Empty": {
+ Given: []string{},
+ Expect: []string{},
+ },
+ "Normal": {
+ Given: []string{"file.txt", "normal_dir/file.txt"},
+ Expect: []string{"file.txt", "normal_dir/file.txt"},
+ },
+ "Hidden": {
+ Given: []string{"file.txt", ".hidden_dir/file.txt", "normal_dir/file.txt"},
+ Expect: []string{"file.txt", "normal_dir/file.txt"},
+ },
+ "Well Known": {
+ Given: []string{"file.txt", ".hidden_dir/file.txt", ".well-known/file.txt"},
+ Expect: []string{"file.txt", ".well-known/file.txt"},
+ },
+ }
+
+ for desc, tc := range tests {
+ t.Run(desc, func(t *testing.T) {
+ fs := afero.NewMemMapFs()
+ for _, name := range tc.Given {
+ dir, _ := path.Split(name)
+ if dir != "" {
+ if err := fs.MkdirAll(dir, 0755); err != nil {
+ t.Fatal(err)
+ }
+ }
+ if fd, err := fs.Create(name); err != nil {
+ t.Fatal(err)
+ } else {
+ fd.Close()
+ }
+ }
+ if got, err := walkLocal(fs, nil, nil, nil, media.DefaultTypes); err != nil {
+ t.Fatal(err)
+ } else {
+ expect := map[string]any{}
+ for _, path := range tc.Expect {
+ if _, ok := got[path]; !ok {
+ t.Errorf("expected %q in results, but was not found", path)
+ }
+ expect[path] = nil
+ }
+ for path := range got {
+ if _, ok := expect[path]; !ok {
+ t.Errorf("got %q in results unexpectedly", path)
+ }
+ }
+ }
+ })
+ }
+}
+
+func TestLocalFile(t *testing.T) {
+ const (
+ content = "hello world!"
+ )
+ contentBytes := []byte(content)
+ contentLen := int64(len(contentBytes))
+ contentMD5 := md5.Sum(contentBytes)
+ var buf bytes.Buffer
+ gz := gzip.NewWriter(&buf)
+ if _, err := gz.Write(contentBytes); err != nil {
+ t.Fatal(err)
+ }
+ gz.Close()
+ gzBytes := buf.Bytes()
+ gzLen := int64(len(gzBytes))
+ gzMD5 := md5.Sum(gzBytes)
+
+ tests := []struct {
+ Description string
+ Path string
+ Matcher *matcher
+ MediaTypesConfig []map[string]any
+ WantContent []byte
+ WantSize int64
+ WantMD5 []byte
+ WantContentType string // empty string is always OK, since content type detection is OS-specific
+ WantCacheControl string
+ WantContentEncoding string
+ }{
+ {
+ Description: "file with no suffix",
+ Path: "foo",
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ },
+ {
+ Description: "file with .txt suffix",
+ Path: "foo.txt",
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ },
+ {
+ Description: "CacheControl from matcher",
+ Path: "foo.txt",
+ Matcher: &matcher{CacheControl: "max-age=630720000"},
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ WantCacheControl: "max-age=630720000",
+ },
+ {
+ Description: "ContentEncoding from matcher",
+ Path: "foo.txt",
+ Matcher: &matcher{ContentEncoding: "foobar"},
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ WantContentEncoding: "foobar",
+ },
+ {
+ Description: "ContentType from matcher",
+ Path: "foo.txt",
+ Matcher: &matcher{ContentType: "foo/bar"},
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ WantContentType: "foo/bar",
+ },
+ {
+ Description: "gzipped content",
+ Path: "foo.txt",
+ Matcher: &matcher{Gzip: true},
+ WantContent: gzBytes,
+ WantSize: gzLen,
+ WantMD5: gzMD5[:],
+ WantContentEncoding: "gzip",
+ },
+ {
+ Description: "Custom MediaType",
+ Path: "foo.hugo",
+ MediaTypesConfig: []map[string]any{
+ {
+ "hugo/custom": map[string]any{
+ "suffixes": []string{"hugo"},
+ },
+ },
+ },
+ WantContent: contentBytes,
+ WantSize: contentLen,
+ WantMD5: contentMD5[:],
+ WantContentType: "hugo/custom",
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.Description, func(t *testing.T) {
+ fs := new(afero.MemMapFs)
+ if err := afero.WriteFile(fs, tc.Path, []byte(content), os.ModePerm); err != nil {
+ t.Fatal(err)
+ }
+ mediaTypes := media.DefaultTypes
+ if len(tc.MediaTypesConfig) > 0 {
+ mt, err := media.DecodeTypes(tc.MediaTypesConfig...)
+ if err != nil {
+ t.Fatal(err)
+ }
+ mediaTypes = mt
+ }
+ lf, err := newLocalFile(fs, tc.Path, filepath.ToSlash(tc.Path), tc.Matcher, mediaTypes)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if got := lf.UploadSize; got != tc.WantSize {
+ t.Errorf("got size %d want %d", got, tc.WantSize)
+ }
+ if got := lf.MD5(); !bytes.Equal(got, tc.WantMD5) {
+ t.Errorf("got MD5 %x want %x", got, tc.WantMD5)
+ }
+ if got := lf.CacheControl(); got != tc.WantCacheControl {
+ t.Errorf("got CacheControl %q want %q", got, tc.WantCacheControl)
+ }
+ if got := lf.ContentEncoding(); got != tc.WantContentEncoding {
+ t.Errorf("got ContentEncoding %q want %q", got, tc.WantContentEncoding)
+ }
+ if tc.WantContentType != "" {
+ if got := lf.ContentType(); got != tc.WantContentType {
+ t.Errorf("got ContentType %q want %q", got, tc.WantContentType)
+ }
+ }
+ // Verify the reader last to ensure the previous operations don't
+ // interfere with it.
+ r, err := lf.Reader()
+ if err != nil {
+ t.Fatal(err)
+ }
+ gotContent, err := ioutil.ReadAll(r)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(gotContent, tc.WantContent) {
+ t.Errorf("got content %q want %q", string(gotContent), string(tc.WantContent))
+ }
+ r.Close()
+ // Verify we can read again.
+ r, err = lf.Reader()
+ if err != nil {
+ t.Fatal(err)
+ }
+ gotContent, err = ioutil.ReadAll(r)
+ if err != nil {
+ t.Fatal(err)
+ }
+ r.Close()
+ if !bytes.Equal(gotContent, tc.WantContent) {
+ t.Errorf("got content %q want %q", string(gotContent), string(tc.WantContent))
+ }
+ })
+ }
+}
+
+func TestOrdering(t *testing.T) {
+ tests := []struct {
+ Description string
+ Uploads []string
+ Ordering []*regexp.Regexp
+ Want [][]string
+ }{
+ {
+ Description: "empty",
+ Want: [][]string{nil},
+ },
+ {
+ Description: "no ordering",
+ Uploads: []string{"c", "b", "a", "d"},
+ Want: [][]string{{"a", "b", "c", "d"}},
+ },
+ {
+ Description: "one ordering",
+ Uploads: []string{"db", "c", "b", "a", "da"},
+ Ordering: []*regexp.Regexp{regexp.MustCompile("^d")},
+ Want: [][]string{{"da", "db"}, {"a", "b", "c"}},
+ },
+ {
+ Description: "two orderings",
+ Uploads: []string{"db", "c", "b", "a", "da"},
+ Ordering: []*regexp.Regexp{
+ regexp.MustCompile("^d"),
+ regexp.MustCompile("^b"),
+ },
+ Want: [][]string{{"da", "db"}, {"b"}, {"a", "c"}},
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.Description, func(t *testing.T) {
+ uploads := make([]*fileToUpload, len(tc.Uploads))
+ for i, u := range tc.Uploads {
+ uploads[i] = &fileToUpload{Local: &localFile{SlashPath: u}}
+ }
+ gotUploads := applyOrdering(tc.Ordering, uploads)
+ var got [][]string
+ for _, subslice := range gotUploads {
+ var gotsubslice []string
+ for _, u := range subslice {
+ gotsubslice = append(gotsubslice, u.Local.SlashPath)
+ }
+ got = append(got, gotsubslice)
+ }
+ if diff := cmp.Diff(got, tc.Want); diff != "" {
+ t.Error(diff)
+ }
+ })
+ }
+}
+
+type fileData struct {
+ Name string // name of the file
+ Contents string // contents of the file
+}
+
+// initLocalFs initializes fs with some test files.
+func initLocalFs(ctx context.Context, fs afero.Fs) ([]*fileData, error) {
+ // The initial local filesystem.
+ local := []*fileData{
+ {"aaa", "aaa"},
+ {"bbb", "bbb"},
+ {"subdir/aaa", "subdir-aaa"},
+ {"subdir/nested/aaa", "subdir-nested-aaa"},
+ {"subdir2/bbb", "subdir2-bbb"},
+ }
+ if err := writeFiles(fs, local); err != nil {
+ return nil, err
+ }
+ return local, nil
+}
+
+// fsTest represents an (afero.FS, Go CDK blob.Bucket) against which end-to-end
+// tests can be run.
+type fsTest struct {
+ name string
+ fs afero.Fs
+ bucket *blob.Bucket
+}
+
+// initFsTests initializes a pair of tests for end-to-end test:
+// 1. An in-memory afero.Fs paired with an in-memory Go CDK bucket.
+// 2. A filesystem-based afero.Fs paired with an filesystem-based Go CDK bucket.
+// It returns the pair of tests and a cleanup function.
+func initFsTests() ([]*fsTest, func(), error) {
+ tmpfsdir, err := ioutil.TempDir("", "fs")
+ if err != nil {
+ return nil, nil, err
+ }
+ tmpbucketdir, err := ioutil.TempDir("", "bucket")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ memfs := afero.NewMemMapFs()
+ membucket := memblob.OpenBucket(nil)
+
+ filefs := afero.NewBasePathFs(afero.NewOsFs(), tmpfsdir)
+ filebucket, err := fileblob.OpenBucket(tmpbucketdir, nil)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ tests := []*fsTest{
+ {"mem", memfs, membucket},
+ {"file", filefs, filebucket},
+ }
+ cleanup := func() {
+ membucket.Close()
+ filebucket.Close()
+ os.RemoveAll(tmpfsdir)
+ os.RemoveAll(tmpbucketdir)
+ }
+ return tests, cleanup, nil
+}
+
+// TestEndToEndSync verifies that basic adds, updates, and deletes are working
+// correctly.
+func TestEndToEndSync(t *testing.T) {
+ ctx := context.Background()
+ tests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ local, err := initLocalFs(ctx, test.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ deployer := &Deployer{
+ localFs: test.fs,
+ maxDeletes: -1,
+ bucket: test.bucket,
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Initial deployment should sync remote with local.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("initial deploy: failed: %v", err)
+ }
+ wantSummary := deploySummary{NumLocal: 5, NumRemote: 0, NumUploads: 5, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("initial deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+ if diff, err := verifyRemote(ctx, deployer.bucket, local); err != nil {
+ t.Errorf("initial deploy: failed to verify remote: %v", err)
+ } else if diff != "" {
+ t.Errorf("initial deploy: remote snapshot doesn't match expected:\n%v", diff)
+ }
+
+ // A repeat deployment shouldn't change anything.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("no-op deploy: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 0, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("no-op deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // Make some changes to the local filesystem:
+ // 1. Modify file [0].
+ // 2. Delete file [1].
+ // 3. Add a new file (sorted last).
+ updatefd := local[0]
+ updatefd.Contents = "new contents"
+ deletefd := local[1]
+ local = append(local[:1], local[2:]...) // removing deleted [1]
+ newfd := &fileData{"zzz", "zzz"}
+ local = append(local, newfd)
+ if err := writeFiles(test.fs, []*fileData{updatefd, newfd}); err != nil {
+ t.Fatal(err)
+ }
+ if err := test.fs.Remove(deletefd.Name); err != nil {
+ t.Fatal(err)
+ }
+
+ // A deployment should apply those 3 changes.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy after changes: failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 2, NumDeletes: 1}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy after changes: got %v, want %v", deployer.summary, wantSummary)
+ }
+ if diff, err := verifyRemote(ctx, deployer.bucket, local); err != nil {
+ t.Errorf("deploy after changes: failed to verify remote: %v", err)
+ } else if diff != "" {
+ t.Errorf("deploy after changes: remote snapshot doesn't match expected:\n%v", diff)
+ }
+
+ // Again, a repeat deployment shouldn't change anything.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("no-op deploy: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 0, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("no-op deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+ })
+ }
+}
+
+// TestMaxDeletes verifies that the "maxDeletes" flag is working correctly.
+func TestMaxDeletes(t *testing.T) {
+ ctx := context.Background()
+ tests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ local, err := initLocalFs(ctx, test.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ deployer := &Deployer{
+ localFs: test.fs,
+ maxDeletes: -1,
+ bucket: test.bucket,
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Sync remote with local.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("initial deploy: failed: %v", err)
+ }
+ wantSummary := deploySummary{NumLocal: 5, NumRemote: 0, NumUploads: 5, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("initial deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // Delete two files, [1] and [2].
+ if err := test.fs.Remove(local[1].Name); err != nil {
+ t.Fatal(err)
+ }
+ if err := test.fs.Remove(local[2].Name); err != nil {
+ t.Fatal(err)
+ }
+
+ // A deployment with maxDeletes=0 shouldn't change anything.
+ deployer.maxDeletes = 0
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 3, NumRemote: 5, NumUploads: 0, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // A deployment with maxDeletes=1 shouldn't change anything either.
+ deployer.maxDeletes = 1
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 3, NumRemote: 5, NumUploads: 0, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // A deployment with maxDeletes=2 should make the changes.
+ deployer.maxDeletes = 2
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 3, NumRemote: 5, NumUploads: 0, NumDeletes: 2}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // Delete two more files, [0] and [3].
+ if err := test.fs.Remove(local[0].Name); err != nil {
+ t.Fatal(err)
+ }
+ if err := test.fs.Remove(local[3].Name); err != nil {
+ t.Fatal(err)
+ }
+
+ // A deployment with maxDeletes=-1 should make the changes.
+ deployer.maxDeletes = -1
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 1, NumRemote: 3, NumUploads: 0, NumDeletes: 2}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+ })
+ }
+}
+
+// TestIncludeExclude verifies that the include/exclude options for targets work.
+func TestIncludeExclude(t *testing.T) {
+ ctx := context.Background()
+ tests := []struct {
+ Include string
+ Exclude string
+ Want deploySummary
+ }{
+ {
+ Want: deploySummary{NumLocal: 5, NumUploads: 5},
+ },
+ {
+ Include: "**aaa",
+ Want: deploySummary{NumLocal: 3, NumUploads: 3},
+ },
+ {
+ Include: "**bbb",
+ Want: deploySummary{NumLocal: 2, NumUploads: 2},
+ },
+ {
+ Include: "aaa",
+ Want: deploySummary{NumLocal: 1, NumUploads: 1},
+ },
+ {
+ Exclude: "**aaa",
+ Want: deploySummary{NumLocal: 2, NumUploads: 2},
+ },
+ {
+ Exclude: "**bbb",
+ Want: deploySummary{NumLocal: 3, NumUploads: 3},
+ },
+ {
+ Exclude: "aaa",
+ Want: deploySummary{NumLocal: 4, NumUploads: 4},
+ },
+ {
+ Include: "**aaa",
+ Exclude: "**nested**",
+ Want: deploySummary{NumLocal: 2, NumUploads: 2},
+ },
+ }
+ for _, test := range tests {
+ t.Run(fmt.Sprintf("include %q exclude %q", test.Include, test.Exclude), func(t *testing.T) {
+ fsTests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ fsTest := fsTests[1] // just do file-based test
+
+ _, err = initLocalFs(ctx, fsTest.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tgt := &target{
+ Include: test.Include,
+ Exclude: test.Exclude,
+ }
+ if err := tgt.parseIncludeExclude(); err != nil {
+ t.Error(err)
+ }
+ deployer := &Deployer{
+ localFs: fsTest.fs,
+ maxDeletes: -1,
+ bucket: fsTest.bucket,
+ target: tgt,
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Sync remote with local.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy: failed: %v", err)
+ }
+ if !cmp.Equal(deployer.summary, test.Want) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, test.Want)
+ }
+ })
+ }
+}
+
+// TestIncludeExcludeRemoteDelete verifies deleted local files that don't match include/exclude patterns
+// are not deleted on the remote.
+func TestIncludeExcludeRemoteDelete(t *testing.T) {
+ ctx := context.Background()
+
+ tests := []struct {
+ Include string
+ Exclude string
+ Want deploySummary
+ }{
+ {
+ Want: deploySummary{NumLocal: 3, NumRemote: 5, NumUploads: 0, NumDeletes: 2},
+ },
+ {
+ Include: "**aaa",
+ Want: deploySummary{NumLocal: 2, NumRemote: 3, NumUploads: 0, NumDeletes: 1},
+ },
+ {
+ Include: "subdir/**",
+ Want: deploySummary{NumLocal: 1, NumRemote: 2, NumUploads: 0, NumDeletes: 1},
+ },
+ {
+ Exclude: "**bbb",
+ Want: deploySummary{NumLocal: 2, NumRemote: 3, NumUploads: 0, NumDeletes: 1},
+ },
+ {
+ Exclude: "bbb",
+ Want: deploySummary{NumLocal: 3, NumRemote: 4, NumUploads: 0, NumDeletes: 1},
+ },
+ }
+ for _, test := range tests {
+ t.Run(fmt.Sprintf("include %q exclude %q", test.Include, test.Exclude), func(t *testing.T) {
+ fsTests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ fsTest := fsTests[1] // just do file-based test
+
+ local, err := initLocalFs(ctx, fsTest.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ deployer := &Deployer{
+ localFs: fsTest.fs,
+ maxDeletes: -1,
+ bucket: fsTest.bucket,
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Initial sync to get the files on the remote
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy: failed: %v", err)
+ }
+
+ // Delete two files, [1] and [2].
+ if err := fsTest.fs.Remove(local[1].Name); err != nil {
+ t.Fatal(err)
+ }
+ if err := fsTest.fs.Remove(local[2].Name); err != nil {
+ t.Fatal(err)
+ }
+
+ // Second sync
+ tgt := &target{
+ Include: test.Include,
+ Exclude: test.Exclude,
+ }
+ if err := tgt.parseIncludeExclude(); err != nil {
+ t.Error(err)
+ }
+ deployer.target = tgt
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy: failed: %v", err)
+ }
+
+ if !cmp.Equal(deployer.summary, test.Want) {
+ t.Errorf("deploy: got %v, want %v", deployer.summary, test.Want)
+ }
+ })
+ }
+}
+
+// TestCompression verifies that gzip compression works correctly.
+// In particular, MD5 hashes must be of the compressed content.
+func TestCompression(t *testing.T) {
+ ctx := context.Background()
+
+ tests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ local, err := initLocalFs(ctx, test.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ deployer := &Deployer{
+ localFs: test.fs,
+ bucket: test.bucket,
+ matchers: []*matcher{{Pattern: ".*", Gzip: true, re: regexp.MustCompile(".*")}},
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Initial deployment should sync remote with local.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("initial deploy: failed: %v", err)
+ }
+ wantSummary := deploySummary{NumLocal: 5, NumRemote: 0, NumUploads: 5, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("initial deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // A repeat deployment shouldn't change anything.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("no-op deploy: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 0, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("no-op deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // Make an update to the local filesystem, on [1].
+ updatefd := local[1]
+ updatefd.Contents = "new contents"
+ if err := writeFiles(test.fs, []*fileData{updatefd}); err != nil {
+ t.Fatal(err)
+ }
+
+ // A deployment should apply the changes.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("deploy after changes: failed: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 1, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("deploy after changes: got %v, want %v", deployer.summary, wantSummary)
+ }
+ })
+ }
+}
+
+// TestMatching verifies that matchers match correctly, and that the Force
+// attribute for matcher works.
+func TestMatching(t *testing.T) {
+ ctx := context.Background()
+ tests, cleanup, err := initFsTests()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer cleanup()
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ _, err := initLocalFs(ctx, test.fs)
+ if err != nil {
+ t.Fatal(err)
+ }
+ deployer := &Deployer{
+ localFs: test.fs,
+ bucket: test.bucket,
+ matchers: []*matcher{{Pattern: "^subdir/aaa$", Force: true, re: regexp.MustCompile("^subdir/aaa$")}},
+ mediaTypes: media.DefaultTypes,
+ }
+
+ // Initial deployment to sync remote with local.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("initial deploy: failed: %v", err)
+ }
+ wantSummary := deploySummary{NumLocal: 5, NumRemote: 0, NumUploads: 5, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("initial deploy: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // A repeat deployment should upload a single file, the one that matched the Force matcher.
+ // Note that matching happens based on the ToSlash form, so this matches
+ // even on Windows.
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("no-op deploy with single force matcher: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 1, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("no-op deploy with single force matcher: got %v, want %v", deployer.summary, wantSummary)
+ }
+
+ // Repeat with a matcher that should now match 3 files.
+ deployer.matchers = []*matcher{{Pattern: "aaa", Force: true, re: regexp.MustCompile("aaa")}}
+ if err := deployer.Deploy(ctx); err != nil {
+ t.Errorf("no-op deploy with triple force matcher: %v", err)
+ }
+ wantSummary = deploySummary{NumLocal: 5, NumRemote: 5, NumUploads: 3, NumDeletes: 0}
+ if !cmp.Equal(deployer.summary, wantSummary) {
+ t.Errorf("no-op deploy with triple force matcher: got %v, want %v", deployer.summary, wantSummary)
+ }
+ })
+ }
+}
+
+// writeFiles writes the files in fds to fd.
+func writeFiles(fs afero.Fs, fds []*fileData) error {
+ for _, fd := range fds {
+ dir := path.Dir(fd.Name)
+ if dir != "." {
+ err := fs.MkdirAll(dir, os.ModePerm)
+ if err != nil {
+ return err
+ }
+ }
+ f, err := fs.Create(fd.Name)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ _, err = f.WriteString(fd.Contents)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// verifyRemote that the current contents of bucket matches local.
+// It returns an empty string if the contents matched, and a non-empty string
+// capturing the diff if they didn't.
+func verifyRemote(ctx context.Context, bucket *blob.Bucket, local []*fileData) (string, error) {
+ var cur []*fileData
+ iter := bucket.List(nil)
+ for {
+ obj, err := iter.Next(ctx)
+ if err == io.EOF {
+ break
+ }
+ if err != nil {
+ return "", err
+ }
+ contents, err := bucket.ReadAll(ctx, obj.Key)
+ if err != nil {
+ return "", err
+ }
+ cur = append(cur, &fileData{obj.Key, string(contents)})
+ }
+ if cmp.Equal(cur, local) {
+ return "", nil
+ }
+ diff := "got: \n"
+ for _, f := range cur {
+ diff += fmt.Sprintf(" %s: %s\n", f.Name, f.Contents)
+ }
+ diff += "want: \n"
+ for _, f := range local {
+ diff += fmt.Sprintf(" %s: %s\n", f.Name, f.Contents)
+ }
+ return diff, nil
+}
diff --git a/deploy/google.go b/deploy/google.go
new file mode 100644
index 000000000..6e492bc01
--- /dev/null
+++ b/deploy/google.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !nodeploy
+// +build !nodeploy
+
+package deploy
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "google.golang.org/api/compute/v1"
+)
+
+// Invalidate all of the content in a Google Cloud CDN distribution.
+func InvalidateGoogleCloudCDN(ctx context.Context, origin string) error {
+ parts := strings.Split(origin, "/")
+ if len(parts) != 2 {
+ return fmt.Errorf("origin must be <project>/<origin>")
+ }
+ service, err := compute.NewService(ctx)
+ if err != nil {
+ return err
+ }
+ rule := &compute.CacheInvalidationRule{Path: "/*"}
+ _, err = service.UrlMaps.InvalidateCache(parts[0], parts[1], rule).Context(ctx).Do()
+ return err
+}
diff --git a/deps/deps.go b/deps/deps.go
new file mode 100644
index 000000000..ece420302
--- /dev/null
+++ b/deps/deps.go
@@ -0,0 +1,441 @@
+package deps
+
+import (
+ "fmt"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/metrics"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/source"
+ "github.com/gohugoio/hugo/tpl"
+ "github.com/spf13/cast"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+// Deps holds dependencies used by many.
+// There will be normally only one instance of deps in play
+// at a given time, i.e. one per Site built.
+type Deps struct {
+
+ // The logger to use.
+ Log loggers.Logger `json:"-"`
+
+ // Used to log errors that may repeat itself many times.
+ LogDistinct loggers.Logger
+
+ ExecHelper *hexec.Exec
+
+ // The templates to use. This will usually implement the full tpl.TemplateManager.
+ tmpl tpl.TemplateHandler
+
+ // We use this to parse and execute ad-hoc text templates.
+ textTmpl tpl.TemplateParseFinder
+
+ // The file systems to use.
+ Fs *hugofs.Fs `json:"-"`
+
+ // The PathSpec to use
+ *helpers.PathSpec `json:"-"`
+
+ // The ContentSpec to use
+ *helpers.ContentSpec `json:"-"`
+
+ // The SourceSpec to use
+ SourceSpec *source.SourceSpec `json:"-"`
+
+ // The Resource Spec to use
+ ResourceSpec *resources.Spec
+
+ // The configuration to use
+ Cfg config.Provider `json:"-"`
+
+ // The file cache to use.
+ FileCaches filecache.Caches
+
+ // The translation func to use
+ Translate func(translationID string, templateData any) string `json:"-"`
+
+ // The language in use. TODO(bep) consolidate with site
+ Language *langs.Language
+
+ // The site building.
+ Site page.Site
+
+ // All the output formats available for the current site.
+ OutputFormatsConfig output.Formats
+
+ templateProvider ResourceProvider
+ WithTemplate func(templ tpl.TemplateManager) error `json:"-"`
+
+ // Used in tests
+ OverloadedTemplateFuncs map[string]any
+
+ translationProvider ResourceProvider
+
+ Metrics metrics.Provider
+
+ // Timeout is configurable in site config.
+ Timeout time.Duration
+
+ // BuildStartListeners will be notified before a build starts.
+ BuildStartListeners *Listeners
+
+ // Resources that gets closed when the build is done or the server shuts down.
+ BuildClosers *Closers
+
+ // Atomic values set during a build.
+ // This is common/global for all sites.
+ BuildState *BuildState
+
+ // Whether we are in running (server) mode
+ Running bool
+
+ *globalErrHandler
+}
+
+type globalErrHandler struct {
+ // Channel for some "hard to get to" build errors
+ buildErrors chan error
+}
+
+// SendErr sends the error on a channel to be handled later.
+// This can be used in situations where returning and aborting the current
+// operation isn't practical.
+func (e *globalErrHandler) SendError(err error) {
+ if e.buildErrors != nil {
+ select {
+ case e.buildErrors <- err:
+ default:
+ }
+ return
+ }
+
+ jww.ERROR.Println(err)
+}
+
+func (e *globalErrHandler) StartErrorCollector() chan error {
+ e.buildErrors = make(chan error, 10)
+ return e.buildErrors
+}
+
+// Listeners represents an event listener.
+type Listeners struct {
+ sync.Mutex
+
+ // A list of funcs to be notified about an event.
+ listeners []func()
+}
+
+// Add adds a function to a Listeners instance.
+func (b *Listeners) Add(f func()) {
+ if b == nil {
+ return
+ }
+ b.Lock()
+ defer b.Unlock()
+ b.listeners = append(b.listeners, f)
+}
+
+// Notify executes all listener functions.
+func (b *Listeners) Notify() {
+ b.Lock()
+ defer b.Unlock()
+ for _, notify := range b.listeners {
+ notify()
+ }
+}
+
+// ResourceProvider is used to create and refresh, and clone resources needed.
+type ResourceProvider interface {
+ Update(deps *Deps) error
+ Clone(deps *Deps) error
+}
+
+func (d *Deps) Tmpl() tpl.TemplateHandler {
+ return d.tmpl
+}
+
+func (d *Deps) TextTmpl() tpl.TemplateParseFinder {
+ return d.textTmpl
+}
+
+func (d *Deps) SetTmpl(tmpl tpl.TemplateHandler) {
+ d.tmpl = tmpl
+}
+
+func (d *Deps) SetTextTmpl(tmpl tpl.TemplateParseFinder) {
+ d.textTmpl = tmpl
+}
+
+// LoadResources loads translations and templates.
+func (d *Deps) LoadResources() error {
+ // Note that the translations need to be loaded before the templates.
+ if err := d.translationProvider.Update(d); err != nil {
+ return fmt.Errorf("loading translations: %w", err)
+ }
+
+ if err := d.templateProvider.Update(d); err != nil {
+ return fmt.Errorf("loading templates: %w", err)
+ }
+
+ return nil
+}
+
+// New initializes a Dep struct.
+// Defaults are set for nil values,
+// but TemplateProvider, TranslationProvider and Language are always required.
+func New(cfg DepsCfg) (*Deps, error) {
+ var (
+ logger = cfg.Logger
+ fs = cfg.Fs
+ )
+
+ if cfg.TemplateProvider == nil {
+ panic("Must have a TemplateProvider")
+ }
+
+ if cfg.TranslationProvider == nil {
+ panic("Must have a TranslationProvider")
+ }
+
+ if cfg.Language == nil {
+ panic("Must have a Language")
+ }
+
+ if logger == nil {
+ logger = loggers.NewErrorLogger()
+ }
+
+ if fs == nil {
+ // Default to the production file system.
+ fs = hugofs.NewDefault(cfg.Language)
+ }
+
+ if cfg.MediaTypes == nil {
+ cfg.MediaTypes = media.DefaultTypes
+ }
+
+ if cfg.OutputFormats == nil {
+ cfg.OutputFormats = output.DefaultFormats
+ }
+
+ securityConfig, err := security.DecodeConfig(cfg.Cfg)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create security config from configuration: %w", err)
+ }
+ execHelper := hexec.New(securityConfig)
+
+ ps, err := helpers.NewPathSpec(fs, cfg.Language, logger)
+ if err != nil {
+ return nil, fmt.Errorf("create PathSpec: %w", err)
+ }
+
+ fileCaches, err := filecache.NewCaches(ps)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create file caches from configuration: %w", err)
+ }
+
+ errorHandler := &globalErrHandler{}
+ buildState := &BuildState{}
+
+ resourceSpec, err := resources.NewSpec(ps, fileCaches, buildState, logger, errorHandler, execHelper, cfg.OutputFormats, cfg.MediaTypes)
+ if err != nil {
+ return nil, err
+ }
+
+ contentSpec, err := helpers.NewContentSpec(cfg.Language, logger, ps.BaseFs.Content.Fs, execHelper)
+ if err != nil {
+ return nil, err
+ }
+
+ sp := source.NewSourceSpec(ps, nil, fs.Source)
+
+ timeoutms := cfg.Language.GetInt("timeout")
+ if timeoutms <= 0 {
+ timeoutms = 3000
+ }
+
+ ignoreErrors := cast.ToStringSlice(cfg.Cfg.Get("ignoreErrors"))
+ ignorableLogger := loggers.NewIgnorableLogger(logger, ignoreErrors...)
+
+ logDistinct := helpers.NewDistinctLogger(logger)
+
+ d := &Deps{
+ Fs: fs,
+ Log: ignorableLogger,
+ LogDistinct: logDistinct,
+ ExecHelper: execHelper,
+ templateProvider: cfg.TemplateProvider,
+ translationProvider: cfg.TranslationProvider,
+ WithTemplate: cfg.WithTemplate,
+ OverloadedTemplateFuncs: cfg.OverloadedTemplateFuncs,
+ PathSpec: ps,
+ ContentSpec: contentSpec,
+ SourceSpec: sp,
+ ResourceSpec: resourceSpec,
+ Cfg: cfg.Language,
+ Language: cfg.Language,
+ Site: cfg.Site,
+ FileCaches: fileCaches,
+ BuildStartListeners: &Listeners{},
+ BuildClosers: &Closers{},
+ BuildState: buildState,
+ Running: cfg.Running,
+ Timeout: time.Duration(timeoutms) * time.Millisecond,
+ globalErrHandler: errorHandler,
+ }
+
+ if cfg.Cfg.GetBool("templateMetrics") {
+ d.Metrics = metrics.NewProvider(cfg.Cfg.GetBool("templateMetricsHints"))
+ }
+
+ return d, nil
+}
+
+func (d *Deps) Close() error {
+ return d.BuildClosers.Close()
+}
+
+// ForLanguage creates a copy of the Deps with the language dependent
+// parts switched out.
+func (d Deps) ForLanguage(cfg DepsCfg, onCreated func(d *Deps) error) (*Deps, error) {
+ l := cfg.Language
+ var err error
+
+ d.PathSpec, err = helpers.NewPathSpecWithBaseBaseFsProvided(d.Fs, l, d.Log, d.BaseFs)
+ if err != nil {
+ return nil, err
+ }
+
+ d.ContentSpec, err = helpers.NewContentSpec(l, d.Log, d.BaseFs.Content.Fs, d.ExecHelper)
+ if err != nil {
+ return nil, err
+ }
+
+ d.Site = cfg.Site
+
+ // These are common for all sites, so reuse.
+ // TODO(bep) clean up these inits.
+ resourceCache := d.ResourceSpec.ResourceCache
+ postBuildAssets := d.ResourceSpec.PostBuildAssets
+ d.ResourceSpec, err = resources.NewSpec(d.PathSpec, d.ResourceSpec.FileCaches, d.BuildState, d.Log, d.globalErrHandler, d.ExecHelper, cfg.OutputFormats, cfg.MediaTypes)
+ if err != nil {
+ return nil, err
+ }
+ d.ResourceSpec.ResourceCache = resourceCache
+ d.ResourceSpec.PostBuildAssets = postBuildAssets
+
+ d.Cfg = l
+ d.Language = l
+
+ if onCreated != nil {
+ if err = onCreated(&d); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := d.translationProvider.Clone(&d); err != nil {
+ return nil, err
+ }
+
+ if err := d.templateProvider.Clone(&d); err != nil {
+ return nil, err
+ }
+
+ d.BuildStartListeners = &Listeners{}
+
+ return &d, nil
+}
+
+// DepsCfg contains configuration options that can be used to configure Hugo
+// on a global level, i.e. logging etc.
+// Nil values will be given default values.
+type DepsCfg struct {
+
+ // The Logger to use.
+ Logger loggers.Logger
+
+ // The file systems to use
+ Fs *hugofs.Fs
+
+ // The language to use.
+ Language *langs.Language
+
+ // The Site in use
+ Site page.Site
+
+ // The configuration to use.
+ Cfg config.Provider
+
+ // The media types configured.
+ MediaTypes media.Types
+
+ // The output formats configured.
+ OutputFormats output.Formats
+
+ // Template handling.
+ TemplateProvider ResourceProvider
+ WithTemplate func(templ tpl.TemplateManager) error
+ // Used in tests
+ OverloadedTemplateFuncs map[string]any
+
+ // i18n handling.
+ TranslationProvider ResourceProvider
+
+ // Whether we are in running (server) mode
+ Running bool
+}
+
+// BuildState are flags that may be turned on during a build.
+type BuildState struct {
+ counter uint64
+}
+
+func (b *BuildState) Incr() int {
+ return int(atomic.AddUint64(&b.counter, uint64(1)))
+}
+
+func NewBuildState() BuildState {
+ return BuildState{}
+}
+
+type Closer interface {
+ Close() error
+}
+
+type Closers struct {
+ mu sync.Mutex
+ cs []Closer
+}
+
+func (cs *Closers) Add(c Closer) {
+ cs.mu.Lock()
+ defer cs.mu.Unlock()
+ cs.cs = append(cs.cs, c)
+}
+
+func (cs *Closers) Close() error {
+ cs.mu.Lock()
+ defer cs.mu.Unlock()
+ for _, c := range cs.cs {
+ c.Close()
+ }
+
+ cs.cs = cs.cs[:0]
+
+ return nil
+}
diff --git a/deps/deps_test.go b/deps/deps_test.go
new file mode 100644
index 000000000..d68276732
--- /dev/null
+++ b/deps/deps_test.go
@@ -0,0 +1,30 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package deps
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestBuildFlags(t *testing.T) {
+ c := qt.New(t)
+ var bf BuildState
+ bf.Incr()
+ bf.Incr()
+ bf.Incr()
+
+ c.Assert(bf.Incr(), qt.Equals, 4)
+}
diff --git a/.cspell.json b/docs/.cspell.json
index 1a197ea7b..1a197ea7b 100644
--- a/.cspell.json
+++ b/docs/.cspell.json
diff --git a/.editorconfig b/docs/.editorconfig
index dd2a0096f..dd2a0096f 100644
--- a/.editorconfig
+++ b/docs/.editorconfig
diff --git a/docs/.github/SUPPORT.md b/docs/.github/SUPPORT.md
new file mode 100644
index 000000000..cc9de09ff
--- /dev/null
+++ b/docs/.github/SUPPORT.md
@@ -0,0 +1,3 @@
+### Asking Support Questions
+
+We have an active [discussion forum](https://discourse.gohugo.io) where users and developers can ask questions. Please don't use the GitHub issue tracker to ask questions.
diff --git a/.github/stale.yml b/docs/.github/stale.yml
index 389205294..389205294 100644
--- a/.github/stale.yml
+++ b/docs/.github/stale.yml
diff --git a/.github/workflows/spellcheck.yml b/docs/.github/workflows/spellcheck.yml
index 6f68fee18..6f68fee18 100644
--- a/.github/workflows/spellcheck.yml
+++ b/docs/.github/workflows/spellcheck.yml
diff --git a/docs/.gitignore b/docs/.gitignore
new file mode 100644
index 000000000..4164d21f8
--- /dev/null
+++ b/docs/.gitignore
@@ -0,0 +1,9 @@
+/.idea
+/.vscode
+/public
+node_modules
+nohup.out
+.DS_Store
+trace.out
+.hugo_build.lock
+resources/_gen/images/ \ No newline at end of file
diff --git a/.markdownlint.yaml b/docs/.markdownlint.yaml
index 1b76d280e..1b76d280e 100644
--- a/.markdownlint.yaml
+++ b/docs/.markdownlint.yaml
diff --git a/.vscode/extensions.json b/docs/.vscode/extensions.json
index 4ca64b165..4ca64b165 100644
--- a/.vscode/extensions.json
+++ b/docs/.vscode/extensions.json
diff --git a/LICENSE.md b/docs/LICENSE.md
index b62a9b5ff..b62a9b5ff 100644
--- a/LICENSE.md
+++ b/docs/LICENSE.md
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 000000000..a2c767b7b
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,48 @@
+[![Netlify Status](https://api.netlify.com/api/v1/badges/e0dbbfc7-34f1-4393-a679-c16e80162705/deploy-status)](https://app.netlify.com/sites/gohugoio/deploys)
+[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](https://gohugo.io/contribute/documentation/)
+
+# Hugo Docs
+
+Documentation site for [Hugo](https://github.com/gohugoio/hugo), the very fast and flexible static site generator built with love in Go.
+
+## Contributing
+
+We welcome contributions to Hugo of any kind including documentation, suggestions, bug reports, pull requests etc. Also check out our [contribution guide](https://gohugo.io/contribute/documentation/). We would love to hear from you.
+
+Note that this repository contains solely the documentation for Hugo. For contributions that aren't documentation-related please refer to the [hugo](https://github.com/gohugoio/hugo) repository.
+
+*Pull requests shall **only** contain changes to the actual documentation. However, changes on the code base of Hugo **and** the documentation shall be a single, atomic pull request in the [hugo](https://github.com/gohugoio/hugo) repository.*
+
+Spelling fixes are most welcomed, and if you want to contribute longer sections to the documentation, it would be great if you had the following criteria in mind when writing:
+
+* Short is good. People go to the library to read novels. If there is more than one way to _do a thing_ in Hugo, describe the current _best practice_ (avoid "… but you can also do …" and "… in older versions of Hugo you had to …".
+* For example, try to find short snippets that teaches people about the concept. If the example is also useful as-is (copy and paste), then great. Don't list long and similar examples just so people can use them on their sites.
+* Hugo has users from all over the world, so easy to understand and [simple English](https://simple.wikipedia.org/wiki/Basic_English) is good.
+
+## Branches
+
+* The `master` branch is where the site is automatically built from, and is the place to put changes relevant to the current Hugo version.
+* The `next` branch is where we store changes that are related to the next Hugo release. This can be previewed here: https://next--gohugoio.netlify.com/
+
+## Build
+
+To view the documentation site locally, you need to clone this repository:
+
+```bash
+git clone https://github.com/gohugoio/hugoDocs.git
+```
+
+Also note that the documentation version for a given version of Hugo can also be found in the `/docs` sub-folder of the [Hugo source repository](https://github.com/gohugoio/hugo).
+
+Then to view the docs in your browser, run Hugo and open up the link:
+
+```bash
+▶ hugo server
+
+Started building sites ...
+.
+.
+Serving pages from memory
+Web Server is available at http://localhost:1313/ (bind address 127.0.0.1)
+Press Ctrl+C to stop
+```
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css
index 0122f9758..0122f9758 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_algolia.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css
index 997931ac4..997931ac4 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_animation.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css
index 11fae8702..11fae8702 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_carousel.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css
index d00ea65e6..d00ea65e6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_chroma.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css
index 66a2fc246..66a2fc246 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_code.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css
index 1d61a7725..1d61a7725 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_color-scheme.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css
index e1e938c74..e1e938c74 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_columns.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css
index 4e092e8bf..4e092e8bf 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content-tables.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css
index 9c8a8a14d..9c8a8a14d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_content.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css
index e28f67d4b..e28f67d4b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_definition-lists.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css
index 0ea8e9b72..0ea8e9b72 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_documentation-styles.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css
index da9f04c81..da9f04c81 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_fluid-type.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css
index 9b451cf1c..9b451cf1c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_font-family.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css
index 56a16be6d..56a16be6d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_header-link.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css
index c49107655..c49107655 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hljs.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css
index 0b1df9610..0b1df9610 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_hugo-internal-template-styling.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css
index 7991450fe..7991450fe 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_no-js.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css
index 04ea11ec5..04ea11ec5 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_social-icons.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css
index 7759bed96..7759bed96 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_stickyheader.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css
index 299a4a963..299a4a963 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_svg.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css
index 6e0022cc9..6e0022cc9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tabs.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css
index d697c4d85..d697c4d85 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_tachyons.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css
index 8701b1530..8701b1530 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/_variables.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css
index c71f69dd1..c71f69dd1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/css/main.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/index.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/index.js
index e309bdb99..e309bdb99 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/index.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/index.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js
index ffae31c7f..ffae31c7f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/clipboardjs.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js
index d8039c5d6..d8039c5d6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/codeblocks.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js
index 0074da8cd..0074da8cd 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/docsearch.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js
index e69de29bb..e69de29bb 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/filesaver.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js
index c2252e783..c2252e783 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/hljs.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js
index 4eb3950af..4eb3950af 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/lazysizes.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js
index f6d3eac9f..f6d3eac9f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/main.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js
index d0e645385..d0e645385 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/menutoggle.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js
index 50b5126a9..50b5126a9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/nojs.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js
index 0b69978cd..0b69978cd 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/scrolldir.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js
index 4bb2d99b8..4bb2d99b8 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/smoothscroll.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js
index a689d474e..a689d474e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/js/tabs.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png
index 65555845b..65555845b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/gohugoio-card-base-1.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf
index db680a088..db680a088 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/opengraph/mulish-black.ttf
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css
index b5e5faa84..b5e5faa84 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/css/app.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js
index 8d871af7b..8d871af7b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/assets/output/js/app.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/config.toml b/docs/_vendor/github.com/gohugoio/gohugoioTheme/config.toml
index 8ce64833a..8ce64833a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/config.toml
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/config.toml
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml b/docs/_vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml
index 0940da6d7..0940da6d7 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/data/sponsors.toml
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html
index 9b0866d18..9b0866d18 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/404.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html
index 6f944aee3..6f944aee3 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/_markup/render-heading.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html
index 527547428..527547428 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/baseof.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html
index 91f744c30..91f744c30 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/documentation-home.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html
index 3b7a2307e..3b7a2307e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/list.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html
index 4d4394d1b..4d4394d1b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/page.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html
index 8cd289624..8cd289624 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/single.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html
index 77d1812d9..77d1812d9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/taxonomy.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html
index 499eec598..499eec598 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/_default/terms.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers
index fedd73525..fedd73525 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.headers
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html
index 93dfdd6c6..93dfdd6c6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir
index 2dfd2bc0f..2dfd2bc0f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/index.redir
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html
index eeb8cb2d9..eeb8cb2d9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/list.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html
index 200daa70a..200daa70a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/news/single.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html
index b7e37c47c..b7e37c47c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-section-summaries.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html
index 0b0125740..0b0125740 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/boxes-small-news.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html
index 622df7953..622df7953 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data-card.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html
index 25baea80a..25baea80a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/components/author-github-data.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html
index 090b9243b..090b9243b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/functions-signature.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html
index dc8ddd01e..dc8ddd01e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/docs/page-meta-data.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html
index d9cd9c68f..d9cd9c68f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/entry-summary.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html
index fe9f4d2aa..fe9f4d2aa 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/gtag.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html
index af615ee7c..af615ee7c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/head-additions.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html
index 9e7240433..9e7240433 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hero.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html
index a7733acdc..a7733acdc 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-icons.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html
index f36b3d674..f36b3d674 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/features-single.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html
index a23cae11d..a23cae11d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/installation.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html
index 5300fb7a8..5300fb7a8 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/open-source-involvement.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html
index c73cfa5e9..c73cfa5e9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/showcase.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html
index a808655a6..a808655a6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/sponsors.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html
index 5aebf6737..5aebf6737 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/home-page-sections/tweets.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html
index 426abd018..426abd018 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/after-body-start.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html
index 426abd018..426abd018 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/hooks/before-body-end.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html
index dec9ae48b..dec9ae48b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/icon-link.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html
index a8fc27e21..a8fc27e21 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs-mobile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html
index 61aa11dde..61aa11dde 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-docs.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html
index 6ad98923e..6ad98923e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links-global-mobile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html
index af3790b16..af3790b16 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-links.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html
index b04866e52..b04866e52 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-mobile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html
index d8e87eb63..d8e87eb63 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/nav-top.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html
index 79b315a44..79b315a44 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/get-featured-image.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html
index c8ff64889..c8ff64889 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/opengraph.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html
index 9d25d0315..9d25d0315 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/opengraph/twitter_cards.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html
index edf84669e..edf84669e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-edit.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html
index dcc96242f..dcc96242f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/page-header.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html
index dd048223e..dd048223e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/pagelayout.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html
index 71a14c0ef..71a14c0ef 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section-with-title.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html
index af9f4aac1..af9f4aac1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links-in-section.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html
index cd43dd840..cd43dd840 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/previous-next-links.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html
index fb11699af..fb11699af 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/related.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html
index e54184996..e54184996 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-footer.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html
index 54472ba16..54472ba16 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-manifest.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html
index 749c699e6..749c699e6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-nav.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html
index 7dec9de18..7dec9de18 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-scripts.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html
index 8c97ac454..8c97ac454 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/site-search.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html
index 7b517dbb4..7b517dbb4 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/social-follow.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html
index 0f140cf70..0f140cf70 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/summary.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg
index da9438414..da9438414 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/Twitter_Logo_Blue.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg
index 6f3c20f76..6f3c20f76 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/apple.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg
index e1b170359..e1b170359 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clipboard.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg
index e1b170359..e1b170359 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/clippy.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg
index 2ea15de87..2ea15de87 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/cloud.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg
index bc696b90b..bc696b90b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/content.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg
index 9f9d71769..9f9d71769 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/design.svg
diff --git a/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/exclamation.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg
index 6e6af44a2..6e6af44a2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/facebook.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg
index ed2c929b4..ed2c929b4 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/focus.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg
index 842be09a1..842be09a1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/freebsd.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg
index 717a35686..717a35686 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/functions.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg
index 29bc57ad3..29bc57ad3 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-corner.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg
index dabc741e0..dabc741e0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/github-squared.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg
index 9c2de7da2..9c2de7da2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gitter.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg
index 9ab114aa3..9ab114aa3 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gme.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html
index 1a6b82159..1a6b82159 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/godoc-icon.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg
index 961221f18..961221f18 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-2.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg
index 0f8fbe0d9..0f8fbe0d9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-front.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg
index 36d9f1c41..36d9f1c41 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-homepage.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg
index 05cfb84d1..05cfb84d1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-side_path.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg
index bc1e5010c..bc1e5010c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher-small.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg
index 7f6ec255c..7f6ec255c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/gopher.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg
index ea72a6f51..ea72a6f51 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo-h-only.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg
index 58d025596..58d025596 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/hugo.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg
index 3ba28c3f5..3ba28c3f5 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_down.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg
index 8ec2eb766..8ec2eb766 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_arrow_drop_up.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg
index da37757cf..da37757cf 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_left_black_24px.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg
index 47689a91e..47689a91e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/ic_chevron_right_black_24px.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg
index 5c2ccc2f4..5c2ccc2f4 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/idea.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg
index ae915113b..ae915113b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/instagram.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg
index b0e2f5b0d..b0e2f5b0d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/javascript.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg
index d2ba6d0fc..d2ba6d0fc 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/json.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg
index ba9400b7f..ba9400b7f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-ext.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg
index f5de52d02..f5de52d02 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/link-permalink.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg
index f1a794565..f1a794565 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/md.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg
index d0d9ae938..d0d9ae938 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/mdsolid.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg
index 83b706383..83b706383 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/newlogo.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg
index da3d9cfcf..da3d9cfcf 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/sass.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg
index 181789b54..181789b54 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/search.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg
index 247ca9062..247ca9062 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/twitter.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg
index 2bdcf5f94..2bdcf5f94 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/website.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg
index fe3bf0296..fe3bf0296 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/windows.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg
index 59eeb71c2..59eeb71c2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/svg/yaml.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html
index 59e3e51a0..59e3e51a0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/tags.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html
index 583feec4f..583feec4f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/partials/toc.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt
index 25b9e9a0d..25b9e9a0d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/robots.txt
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html
index 2755b1e2d..2755b1e2d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/articlelist.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html
index c695a7aae..c695a7aae 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code-toggle.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html
index 6df49956a..6df49956a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/code.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html
index 7ddda86d0..7ddda86d0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/datatable.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html
index 37e7d3ad1..37e7d3ad1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/directoryindex.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html
index 2f982aae8..2f982aae8 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/docfile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html
index 226782957..226782957 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html
index c0429bbe1..c0429bbe1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/exfm.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html
index e027dc0f0..e027dc0f0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/gh.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html
index e9df40d6a..e9df40d6a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/ghrepo.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html
index 0f254b4ca..0f254b4ca 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/nohighlight.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html
index 24d2cd0b2..24d2cd0b2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/note.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html
index df1a8ae89..df1a8ae89 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/output.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html
index f777abe26..f777abe26 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/readfile.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html
index 139e3376b..139e3376b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/tip.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html
index c9147be64..c9147be64 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/warning.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html
index 6915cec5f..6915cec5f 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/shortcodes/yt.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html
index bff52ad8d..bff52ad8d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/list.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html
index 5ae1e07a7..5ae1e07a7 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/layouts/showcase/single.html
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/package.json b/docs/_vendor/github.com/gohugoio/gohugoioTheme/package.json
index 9222aad76..9222aad76 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/package.json
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/package.json
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png
index 975cb33ba..975cb33ba 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-144x144.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png
index 7ab6c3849..7ab6c3849 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-192x192.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png
index ed88a2224..ed88a2224 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-256x256.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png
index 3695eb088..3695eb088 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-36x36.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png
index ca275dad6..ca275dad6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-48x48.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png
index 966891f25..966891f25 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-72x72.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png
index feb1d3ebf..feb1d3ebf 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/android-chrome-96x96.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png
index ecf1fc020..ecf1fc020 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/apple-touch-icon.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml
index 62400c5f2..62400c5f2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/browserconfig.xml
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js
index 6391e71e9..6391e71e9 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/app.bundle.js
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css
index 51107f438..51107f438 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/dist/main.css
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png
index c62ce6fb2..c62ce6fb2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-16x16.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png
index 57a018e35..57a018e35 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon-32x32.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico
index dc007a99e..dc007a99e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/favicon.ico
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff
index 97602c761..97602c761 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2
index 858a4e9af..858a4e9af 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff
index 472e5740a..472e5740a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2
index 449772391..449772391 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-200italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff
index 4579c75d7..4579c75d7 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2
index 6c211a7ed..6c211a7ed 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff
index c739550ce..c739550ce 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2
index db9e434c5..db9e434c5 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-300italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff
index 342b3aad2..342b3aad2 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2
index f3e9d31af..f3e9d31af 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff
index 89bdcbd90..89bdcbd90 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2
index b78e3bd39..b78e3bd39 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-400italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff
index e31fd2c52..e31fd2c52 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2
index 6f1f8026b..6f1f8026b 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff
index e2b4a0154..e2b4a0154 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2
index fafd8076a..fafd8076a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-600italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff
index d2152c4ea..d2152c4ea 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2
index 1cedfcd14..1cedfcd14 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff
index 016fa059c..016fa059c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2
index fa9697232..fa9697232 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-700italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff
index 9fd9939dc..9fd9939dc 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2
index 4cdf7bc78..4cdf7bc78 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff
index 2d0c0d2ff..2d0c0d2ff 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2
index ee51dd38a..ee51dd38a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-800italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff
index 1b343ad2c..1b343ad2c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2
index 1252216a0..1252216a0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff
index 0aad09765..0aad09765 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2 b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2
index fd4e66bfb..fd4e66bfb 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/fonts/muli-latin-900italic.woff2
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png
index 9965f8d33..9965f8d33 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/GitHub-Mark-64px.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png
index 93dedacfa..93dedacfa 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gohugoio-card.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg
index 36d9f1c41..36d9f1c41 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-hero.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg
index 85f949783..85f949783 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/gopher-side_color.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png
index 771da8d88..771da8d88 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/home-page-templating-example.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg
index 9ac768bb0..9ac768bb0 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes.jpg
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg
index 64424b24e..64424b24e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/homepage-screenshot-hugo-themes_not-optimized-according-to-google.jpg
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg
index 1f6a79ea6..1f6a79ea6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/hugo-logo-wide.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg
index 40cb249c6..40cb249c6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-built-in-templates.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg
index e6df93b9d..e6df93b9d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-content-management.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg
index 0db21fce1..0db21fce1 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-fast.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg
index 2ac859285..2ac859285 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg
index a65c77208..a65c77208 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-multilingual2.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png
index 2eb9c504e..2eb9c504e 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-search.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg
index b5cc252d6..b5cc252d6 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/icon-shortcodes.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg
index 2d2724070..2d2724070 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/netlify-dark.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg
index 7d1a043e8..7d1a043e8 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/site-hierarchy.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg
index 27cdd974d..27cdd974d 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/brave-logo.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg
index 3f5344c61..3f5344c61 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/esolia-logo.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg
index ac95cd444..ac95cd444 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/forestry-logotype.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png
index 269e6af84..269e6af84 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/images/sponsors/linode-logo_standard_light_medium.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json
index e671ac45a..e671ac45a 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/manifest.json
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png
index e54b4bd75..e54b4bd75 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-144x144.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png
index c7b84c690..c7b84c690 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-150x150.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png
index 2cde5c08c..2cde5c08c 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/mstile-310x310.png
Binary files differ
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg
index 80ff2dae3..80ff2dae3 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/static/safari-pinned-tab.svg
diff --git a/_vendor/github.com/gohugoio/gohugoioTheme/theme.toml b/docs/_vendor/github.com/gohugoio/gohugoioTheme/theme.toml
index 8d678e7b8..8d678e7b8 100644
--- a/_vendor/github.com/gohugoio/gohugoioTheme/theme.toml
+++ b/docs/_vendor/github.com/gohugoio/gohugoioTheme/theme.toml
diff --git a/_vendor/modules.txt b/docs/_vendor/modules.txt
index b755ed7f6..b755ed7f6 100644
--- a/_vendor/modules.txt
+++ b/docs/_vendor/modules.txt
diff --git a/archetypes/functions.md b/docs/archetypes/functions.md
index cc9b02b36..cc9b02b36 100644
--- a/archetypes/functions.md
+++ b/docs/archetypes/functions.md
diff --git a/archetypes/news/index.md b/docs/archetypes/news/index.md
index 42e207e6a..42e207e6a 100644
--- a/archetypes/news/index.md
+++ b/docs/archetypes/news/index.md
diff --git a/archetypes/showcase/bio.md b/docs/archetypes/showcase/bio.md
index 2443c2f35..2443c2f35 100644
--- a/archetypes/showcase/bio.md
+++ b/docs/archetypes/showcase/bio.md
diff --git a/archetypes/showcase/featured.png b/docs/archetypes/showcase/featured.png
index 4f390132e..4f390132e 100644
--- a/archetypes/showcase/featured.png
+++ b/docs/archetypes/showcase/featured.png
Binary files differ
diff --git a/archetypes/showcase/index.md b/docs/archetypes/showcase/index.md
index a21bb9726..a21bb9726 100644
--- a/archetypes/showcase/index.md
+++ b/docs/archetypes/showcase/index.md
diff --git a/config.toml b/docs/config.toml
index ef8d99d6d..ef8d99d6d 100644
--- a/config.toml
+++ b/docs/config.toml
diff --git a/config/_default/config.toml b/docs/config/_default/config.toml
index 4d3bb1f60..4d3bb1f60 100644
--- a/config/_default/config.toml
+++ b/docs/config/_default/config.toml
diff --git a/config/_default/languages.toml b/docs/config/_default/languages.toml
index c9914d84d..c9914d84d 100644
--- a/config/_default/languages.toml
+++ b/docs/config/_default/languages.toml
diff --git a/config/_default/markup.toml b/docs/config/_default/markup.toml
index fb68fae23..fb68fae23 100644
--- a/config/_default/markup.toml
+++ b/docs/config/_default/markup.toml
diff --git a/config/_default/menus/menus.en.toml b/docs/config/_default/menus/menus.en.toml
index a31218a4f..a31218a4f 100644
--- a/config/_default/menus/menus.en.toml
+++ b/docs/config/_default/menus/menus.en.toml
diff --git a/config/_default/menus/menus.zh.toml b/docs/config/_default/menus/menus.zh.toml
index 2f68be67b..2f68be67b 100644
--- a/config/_default/menus/menus.zh.toml
+++ b/docs/config/_default/menus/menus.zh.toml
diff --git a/config/_default/params.toml b/docs/config/_default/params.toml
index f123287b2..f123287b2 100644
--- a/config/_default/params.toml
+++ b/docs/config/_default/params.toml
diff --git a/config/_default/security.toml b/docs/config/_default/security.toml
index 2be3f1ba8..2be3f1ba8 100644
--- a/config/_default/security.toml
+++ b/docs/config/_default/security.toml
diff --git a/config/development/params.toml b/docs/config/development/params.toml
index 4cd7314ab..4cd7314ab 100644
--- a/config/development/params.toml
+++ b/docs/config/development/params.toml
diff --git a/config/production/config.toml b/docs/config/production/config.toml
index 961f04d35..961f04d35 100644
--- a/config/production/config.toml
+++ b/docs/config/production/config.toml
diff --git a/config/production/params.toml b/docs/config/production/params.toml
index d0071fe65..d0071fe65 100644
--- a/config/production/params.toml
+++ b/docs/config/production/params.toml
diff --git a/content/en/_index.md b/docs/content/en/_index.md
index b4e602438..b4e602438 100644
--- a/content/en/_index.md
+++ b/docs/content/en/_index.md
diff --git a/content/en/about/_index.md b/docs/content/en/about/_index.md
index 8ed441b61..8ed441b61 100644
--- a/content/en/about/_index.md
+++ b/docs/content/en/about/_index.md
diff --git a/content/en/about/benefits.md b/docs/content/en/about/benefits.md
index d5c13a9c3..d5c13a9c3 100644
--- a/content/en/about/benefits.md
+++ b/docs/content/en/about/benefits.md
diff --git a/content/en/about/features.md b/docs/content/en/about/features.md
index ed8c25821..ed8c25821 100644
--- a/content/en/about/features.md
+++ b/docs/content/en/about/features.md
diff --git a/content/en/about/hugo-and-gdpr.md b/docs/content/en/about/hugo-and-gdpr.md
index 269c5d432..269c5d432 100644
--- a/content/en/about/hugo-and-gdpr.md
+++ b/docs/content/en/about/hugo-and-gdpr.md
diff --git a/content/en/about/license.md b/docs/content/en/about/license.md
index ae74b6047..ae74b6047 100644
--- a/content/en/about/license.md
+++ b/docs/content/en/about/license.md
diff --git a/content/en/about/security-model/hugo-security-model-featured.png b/docs/content/en/about/security-model/hugo-security-model-featured.png
index 5592d104b..5592d104b 100644
--- a/content/en/about/security-model/hugo-security-model-featured.png
+++ b/docs/content/en/about/security-model/hugo-security-model-featured.png
Binary files differ
diff --git a/content/en/about/security-model/index.md b/docs/content/en/about/security-model/index.md
index 461c7fe77..461c7fe77 100644
--- a/content/en/about/security-model/index.md
+++ b/docs/content/en/about/security-model/index.md
diff --git a/content/en/about/what-is-hugo.md b/docs/content/en/about/what-is-hugo.md
index 69ca1fddb..69ca1fddb 100644
--- a/content/en/about/what-is-hugo.md
+++ b/docs/content/en/about/what-is-hugo.md
diff --git a/content/en/commands/hugo.md b/docs/content/en/commands/hugo.md
index e51a79f48..e51a79f48 100644
--- a/content/en/commands/hugo.md
+++ b/docs/content/en/commands/hugo.md
diff --git a/content/en/commands/hugo_completion.md b/docs/content/en/commands/hugo_completion.md
index 309af4fe9..309af4fe9 100644
--- a/content/en/commands/hugo_completion.md
+++ b/docs/content/en/commands/hugo_completion.md
diff --git a/content/en/commands/hugo_completion_bash.md b/docs/content/en/commands/hugo_completion_bash.md
index ccdd979ae..ccdd979ae 100644
--- a/content/en/commands/hugo_completion_bash.md
+++ b/docs/content/en/commands/hugo_completion_bash.md
diff --git a/content/en/commands/hugo_completion_fish.md b/docs/content/en/commands/hugo_completion_fish.md
index 26cdb9b5f..26cdb9b5f 100644
--- a/content/en/commands/hugo_completion_fish.md
+++ b/docs/content/en/commands/hugo_completion_fish.md
diff --git a/content/en/commands/hugo_completion_powershell.md b/docs/content/en/commands/hugo_completion_powershell.md
index 85bf87836..85bf87836 100644
--- a/content/en/commands/hugo_completion_powershell.md
+++ b/docs/content/en/commands/hugo_completion_powershell.md
diff --git a/content/en/commands/hugo_completion_zsh.md b/docs/content/en/commands/hugo_completion_zsh.md
index 7dfeab233..7dfeab233 100644
--- a/content/en/commands/hugo_completion_zsh.md
+++ b/docs/content/en/commands/hugo_completion_zsh.md
diff --git a/content/en/commands/hugo_config.md b/docs/content/en/commands/hugo_config.md
index e751416cc..e751416cc 100644
--- a/content/en/commands/hugo_config.md
+++ b/docs/content/en/commands/hugo_config.md
diff --git a/content/en/commands/hugo_config_mounts.md b/docs/content/en/commands/hugo_config_mounts.md
index 0bcc3ac78..0bcc3ac78 100644
--- a/content/en/commands/hugo_config_mounts.md
+++ b/docs/content/en/commands/hugo_config_mounts.md
diff --git a/content/en/commands/hugo_convert.md b/docs/content/en/commands/hugo_convert.md
index 2e5e9aa6a..2e5e9aa6a 100644
--- a/content/en/commands/hugo_convert.md
+++ b/docs/content/en/commands/hugo_convert.md
diff --git a/content/en/commands/hugo_convert_toJSON.md b/docs/content/en/commands/hugo_convert_toJSON.md
index 2c437d13a..2c437d13a 100644
--- a/content/en/commands/hugo_convert_toJSON.md
+++ b/docs/content/en/commands/hugo_convert_toJSON.md
diff --git a/content/en/commands/hugo_convert_toTOML.md b/docs/content/en/commands/hugo_convert_toTOML.md
index d45935beb..d45935beb 100644
--- a/content/en/commands/hugo_convert_toTOML.md
+++ b/docs/content/en/commands/hugo_convert_toTOML.md
diff --git a/content/en/commands/hugo_convert_toYAML.md b/docs/content/en/commands/hugo_convert_toYAML.md
index 113efcd6b..113efcd6b 100644
--- a/content/en/commands/hugo_convert_toYAML.md
+++ b/docs/content/en/commands/hugo_convert_toYAML.md
diff --git a/content/en/commands/hugo_deploy.md b/docs/content/en/commands/hugo_deploy.md
index b8638c674..b8638c674 100644
--- a/content/en/commands/hugo_deploy.md
+++ b/docs/content/en/commands/hugo_deploy.md
diff --git a/content/en/commands/hugo_env.md b/docs/content/en/commands/hugo_env.md
index c1d7b67b7..c1d7b67b7 100644
--- a/content/en/commands/hugo_env.md
+++ b/docs/content/en/commands/hugo_env.md
diff --git a/content/en/commands/hugo_gen.md b/docs/content/en/commands/hugo_gen.md
index aeb5a66ca..aeb5a66ca 100644
--- a/content/en/commands/hugo_gen.md
+++ b/docs/content/en/commands/hugo_gen.md
diff --git a/content/en/commands/hugo_gen_chromastyles.md b/docs/content/en/commands/hugo_gen_chromastyles.md
index 6c39faf7d..6c39faf7d 100644
--- a/content/en/commands/hugo_gen_chromastyles.md
+++ b/docs/content/en/commands/hugo_gen_chromastyles.md
diff --git a/content/en/commands/hugo_gen_doc.md b/docs/content/en/commands/hugo_gen_doc.md
index f8d1ab4e5..f8d1ab4e5 100644
--- a/content/en/commands/hugo_gen_doc.md
+++ b/docs/content/en/commands/hugo_gen_doc.md
diff --git a/content/en/commands/hugo_gen_man.md b/docs/content/en/commands/hugo_gen_man.md
index a01b7ccd3..a01b7ccd3 100644
--- a/content/en/commands/hugo_gen_man.md
+++ b/docs/content/en/commands/hugo_gen_man.md
diff --git a/content/en/commands/hugo_import.md b/docs/content/en/commands/hugo_import.md
index e297ff927..e297ff927 100644
--- a/content/en/commands/hugo_import.md
+++ b/docs/content/en/commands/hugo_import.md
diff --git a/content/en/commands/hugo_import_jekyll.md b/docs/content/en/commands/hugo_import_jekyll.md
index 5686f511e..5686f511e 100644
--- a/content/en/commands/hugo_import_jekyll.md
+++ b/docs/content/en/commands/hugo_import_jekyll.md
diff --git a/content/en/commands/hugo_list.md b/docs/content/en/commands/hugo_list.md
index 4099d01ee..4099d01ee 100644
--- a/content/en/commands/hugo_list.md
+++ b/docs/content/en/commands/hugo_list.md
diff --git a/content/en/commands/hugo_list_all.md b/docs/content/en/commands/hugo_list_all.md
index a179df343..a179df343 100644
--- a/content/en/commands/hugo_list_all.md
+++ b/docs/content/en/commands/hugo_list_all.md
diff --git a/content/en/commands/hugo_list_drafts.md b/docs/content/en/commands/hugo_list_drafts.md
index 0ef3525dd..0ef3525dd 100644
--- a/content/en/commands/hugo_list_drafts.md
+++ b/docs/content/en/commands/hugo_list_drafts.md
diff --git a/content/en/commands/hugo_list_expired.md b/docs/content/en/commands/hugo_list_expired.md
index 8e43a6c14..8e43a6c14 100644
--- a/content/en/commands/hugo_list_expired.md
+++ b/docs/content/en/commands/hugo_list_expired.md
diff --git a/content/en/commands/hugo_list_future.md b/docs/content/en/commands/hugo_list_future.md
index db756392a..db756392a 100644
--- a/content/en/commands/hugo_list_future.md
+++ b/docs/content/en/commands/hugo_list_future.md
diff --git a/content/en/commands/hugo_mod.md b/docs/content/en/commands/hugo_mod.md
index 91cd7c743..91cd7c743 100644
--- a/content/en/commands/hugo_mod.md
+++ b/docs/content/en/commands/hugo_mod.md
diff --git a/content/en/commands/hugo_mod_clean.md b/docs/content/en/commands/hugo_mod_clean.md
index d99db11b2..d99db11b2 100644
--- a/content/en/commands/hugo_mod_clean.md
+++ b/docs/content/en/commands/hugo_mod_clean.md
diff --git a/content/en/commands/hugo_mod_get.md b/docs/content/en/commands/hugo_mod_get.md
index 298a5808b..298a5808b 100644
--- a/content/en/commands/hugo_mod_get.md
+++ b/docs/content/en/commands/hugo_mod_get.md
diff --git a/content/en/commands/hugo_mod_graph.md b/docs/content/en/commands/hugo_mod_graph.md
index bff68e217..bff68e217 100644
--- a/content/en/commands/hugo_mod_graph.md
+++ b/docs/content/en/commands/hugo_mod_graph.md
diff --git a/content/en/commands/hugo_mod_init.md b/docs/content/en/commands/hugo_mod_init.md
index 5cd25be1a..5cd25be1a 100644
--- a/content/en/commands/hugo_mod_init.md
+++ b/docs/content/en/commands/hugo_mod_init.md
diff --git a/content/en/commands/hugo_mod_npm.md b/docs/content/en/commands/hugo_mod_npm.md
index c9dc16d7b..c9dc16d7b 100644
--- a/content/en/commands/hugo_mod_npm.md
+++ b/docs/content/en/commands/hugo_mod_npm.md
diff --git a/content/en/commands/hugo_mod_npm_pack.md b/docs/content/en/commands/hugo_mod_npm_pack.md
index 134cb66c4..134cb66c4 100644
--- a/content/en/commands/hugo_mod_npm_pack.md
+++ b/docs/content/en/commands/hugo_mod_npm_pack.md
diff --git a/content/en/commands/hugo_mod_tidy.md b/docs/content/en/commands/hugo_mod_tidy.md
index 448697b1c..448697b1c 100644
--- a/content/en/commands/hugo_mod_tidy.md
+++ b/docs/content/en/commands/hugo_mod_tidy.md
diff --git a/content/en/commands/hugo_mod_vendor.md b/docs/content/en/commands/hugo_mod_vendor.md
index 5758a3b49..5758a3b49 100644
--- a/content/en/commands/hugo_mod_vendor.md
+++ b/docs/content/en/commands/hugo_mod_vendor.md
diff --git a/content/en/commands/hugo_mod_verify.md b/docs/content/en/commands/hugo_mod_verify.md
index 6a21c2076..6a21c2076 100644
--- a/content/en/commands/hugo_mod_verify.md
+++ b/docs/content/en/commands/hugo_mod_verify.md
diff --git a/content/en/commands/hugo_new.md b/docs/content/en/commands/hugo_new.md
index d294b65b3..d294b65b3 100644
--- a/content/en/commands/hugo_new.md
+++ b/docs/content/en/commands/hugo_new.md
diff --git a/content/en/commands/hugo_new_site.md b/docs/content/en/commands/hugo_new_site.md
index 10ef7c7ae..10ef7c7ae 100644
--- a/content/en/commands/hugo_new_site.md
+++ b/docs/content/en/commands/hugo_new_site.md
diff --git a/content/en/commands/hugo_new_theme.md b/docs/content/en/commands/hugo_new_theme.md
index 5d2d40216..5d2d40216 100644
--- a/content/en/commands/hugo_new_theme.md
+++ b/docs/content/en/commands/hugo_new_theme.md
diff --git a/content/en/commands/hugo_server.md b/docs/content/en/commands/hugo_server.md
index f79a8d5e9..f79a8d5e9 100644
--- a/content/en/commands/hugo_server.md
+++ b/docs/content/en/commands/hugo_server.md
diff --git a/content/en/commands/hugo_version.md b/docs/content/en/commands/hugo_version.md
index e6e8fcaa1..e6e8fcaa1 100644
--- a/content/en/commands/hugo_version.md
+++ b/docs/content/en/commands/hugo_version.md
diff --git a/content/en/content-management/_index.md b/docs/content/en/content-management/_index.md
index 28f2ecf82..28f2ecf82 100644
--- a/content/en/content-management/_index.md
+++ b/docs/content/en/content-management/_index.md
diff --git a/content/en/content-management/archetypes.md b/docs/content/en/content-management/archetypes.md
index 354ef0fef..354ef0fef 100644
--- a/content/en/content-management/archetypes.md
+++ b/docs/content/en/content-management/archetypes.md
diff --git a/content/en/content-management/build-options.md b/docs/content/en/content-management/build-options.md
index 7bbb772df..7bbb772df 100644
--- a/content/en/content-management/build-options.md
+++ b/docs/content/en/content-management/build-options.md
diff --git a/content/en/content-management/comments.md b/docs/content/en/content-management/comments.md
index ad3a1b55d..ad3a1b55d 100644
--- a/content/en/content-management/comments.md
+++ b/docs/content/en/content-management/comments.md
diff --git a/content/en/content-management/cross-references.md b/docs/content/en/content-management/cross-references.md
index b5dd8a4c3..b5dd8a4c3 100644
--- a/content/en/content-management/cross-references.md
+++ b/docs/content/en/content-management/cross-references.md
diff --git a/content/en/content-management/diagrams.md b/docs/content/en/content-management/diagrams.md
index 243a70fd4..243a70fd4 100644
--- a/content/en/content-management/diagrams.md
+++ b/docs/content/en/content-management/diagrams.md
diff --git a/content/en/content-management/formats.md b/docs/content/en/content-management/formats.md
index 303bb4596..303bb4596 100644
--- a/content/en/content-management/formats.md
+++ b/docs/content/en/content-management/formats.md
diff --git a/content/en/content-management/front-matter.md b/docs/content/en/content-management/front-matter.md
index 0d8c2e3a6..0d8c2e3a6 100644
--- a/content/en/content-management/front-matter.md
+++ b/docs/content/en/content-management/front-matter.md
diff --git a/content/en/content-management/image-processing/index.md b/docs/content/en/content-management/image-processing/index.md
index 710c260ca..710c260ca 100644
--- a/content/en/content-management/image-processing/index.md
+++ b/docs/content/en/content-management/image-processing/index.md
diff --git a/content/en/content-management/image-processing/sunset.jpg b/docs/content/en/content-management/image-processing/sunset.jpg
index 4dbcc0836..4dbcc0836 100644
--- a/content/en/content-management/image-processing/sunset.jpg
+++ b/docs/content/en/content-management/image-processing/sunset.jpg
Binary files differ
diff --git a/content/en/content-management/menus.md b/docs/content/en/content-management/menus.md
index aefc99e00..aefc99e00 100644
--- a/content/en/content-management/menus.md
+++ b/docs/content/en/content-management/menus.md
diff --git a/content/en/content-management/multilingual.md b/docs/content/en/content-management/multilingual.md
index d1e7965b2..d1e7965b2 100644
--- a/content/en/content-management/multilingual.md
+++ b/docs/content/en/content-management/multilingual.md
diff --git a/content/en/content-management/organization/1-featured-content-bundles.png b/docs/content/en/content-management/organization/1-featured-content-bundles.png
index 501e671e2..501e671e2 100644
--- a/content/en/content-management/organization/1-featured-content-bundles.png
+++ b/docs/content/en/content-management/organization/1-featured-content-bundles.png
Binary files differ
diff --git a/content/en/content-management/organization/index.md b/docs/content/en/content-management/organization/index.md
index 10e22c2d3..10e22c2d3 100644
--- a/content/en/content-management/organization/index.md
+++ b/docs/content/en/content-management/organization/index.md
diff --git a/content/en/content-management/page-bundles.md b/docs/content/en/content-management/page-bundles.md
index 9561ea2e9..9561ea2e9 100644
--- a/content/en/content-management/page-bundles.md
+++ b/docs/content/en/content-management/page-bundles.md
diff --git a/content/en/content-management/page-resources.md b/docs/content/en/content-management/page-resources.md
index 9f2c0cfab..9f2c0cfab 100644
--- a/content/en/content-management/page-resources.md
+++ b/docs/content/en/content-management/page-resources.md
diff --git a/content/en/content-management/related.md b/docs/content/en/content-management/related.md
index 9ede15252..9ede15252 100644
--- a/content/en/content-management/related.md
+++ b/docs/content/en/content-management/related.md
diff --git a/content/en/content-management/sections.md b/docs/content/en/content-management/sections.md
index 6806e342c..6806e342c 100644
--- a/content/en/content-management/sections.md
+++ b/docs/content/en/content-management/sections.md
diff --git a/content/en/content-management/shortcodes.md b/docs/content/en/content-management/shortcodes.md
index 017267ec7..017267ec7 100644
--- a/content/en/content-management/shortcodes.md
+++ b/docs/content/en/content-management/shortcodes.md
diff --git a/content/en/content-management/static-files.md b/docs/content/en/content-management/static-files.md
index e42ee9088..e42ee9088 100644
--- a/content/en/content-management/static-files.md
+++ b/docs/content/en/content-management/static-files.md
diff --git a/content/en/content-management/summaries.md b/docs/content/en/content-management/summaries.md
index 3c67a67dc..3c67a67dc 100644
--- a/content/en/content-management/summaries.md
+++ b/docs/content/en/content-management/summaries.md
diff --git a/content/en/content-management/syntax-highlighting.md b/docs/content/en/content-management/syntax-highlighting.md
index 8ff270c54..8ff270c54 100644
--- a/content/en/content-management/syntax-highlighting.md
+++ b/docs/content/en/content-management/syntax-highlighting.md
diff --git a/content/en/content-management/taxonomies.md b/docs/content/en/content-management/taxonomies.md
index 32227d52c..32227d52c 100644
--- a/content/en/content-management/taxonomies.md
+++ b/docs/content/en/content-management/taxonomies.md
diff --git a/content/en/content-management/toc.md b/docs/content/en/content-management/toc.md
index 57228ba50..57228ba50 100644
--- a/content/en/content-management/toc.md
+++ b/docs/content/en/content-management/toc.md
diff --git a/content/en/content-management/types.md b/docs/content/en/content-management/types.md
index da45302a7..da45302a7 100644
--- a/content/en/content-management/types.md
+++ b/docs/content/en/content-management/types.md
diff --git a/content/en/content-management/urls.md b/docs/content/en/content-management/urls.md
index 774bd9a58..774bd9a58 100644
--- a/content/en/content-management/urls.md
+++ b/docs/content/en/content-management/urls.md
diff --git a/content/en/contribute/_index.md b/docs/content/en/contribute/_index.md
index 5e46ae287..5e46ae287 100644
--- a/content/en/contribute/_index.md
+++ b/docs/content/en/contribute/_index.md
diff --git a/content/en/contribute/development.md b/docs/content/en/contribute/development.md
index 16489772e..16489772e 100644
--- a/content/en/contribute/development.md
+++ b/docs/content/en/contribute/development.md
diff --git a/content/en/contribute/documentation.md b/docs/content/en/contribute/documentation.md
index df937f1e1..df937f1e1 100644
--- a/content/en/contribute/documentation.md
+++ b/docs/content/en/contribute/documentation.md
diff --git a/content/en/contribute/themes.md b/docs/content/en/contribute/themes.md
index 403bac825..403bac825 100644
--- a/content/en/contribute/themes.md
+++ b/docs/content/en/contribute/themes.md
diff --git a/content/en/documentation.md b/docs/content/en/documentation.md
index 77cf283fa..77cf283fa 100644
--- a/content/en/documentation.md
+++ b/docs/content/en/documentation.md
diff --git a/content/en/featured.png b/docs/content/en/featured.png
index 09953aed9..09953aed9 100644
--- a/content/en/featured.png
+++ b/docs/content/en/featured.png
Binary files differ
diff --git a/content/en/functions/GetPage.md b/docs/content/en/functions/GetPage.md
index aa5e9323f..aa5e9323f 100644
--- a/content/en/functions/GetPage.md
+++ b/docs/content/en/functions/GetPage.md
diff --git a/content/en/functions/RenderString.md b/docs/content/en/functions/RenderString.md
index 1b77f6a38..1b77f6a38 100644
--- a/content/en/functions/RenderString.md
+++ b/docs/content/en/functions/RenderString.md
diff --git a/content/en/functions/_index.md b/docs/content/en/functions/_index.md
index fa7974685..fa7974685 100644
--- a/content/en/functions/_index.md
+++ b/docs/content/en/functions/_index.md
diff --git a/content/en/functions/abslangurl.md b/docs/content/en/functions/abslangurl.md
index d9818fcaf..d9818fcaf 100644
--- a/content/en/functions/abslangurl.md
+++ b/docs/content/en/functions/abslangurl.md
diff --git a/content/en/functions/absurl.md b/docs/content/en/functions/absurl.md
index 13524d2d6..13524d2d6 100644
--- a/content/en/functions/absurl.md
+++ b/docs/content/en/functions/absurl.md
diff --git a/content/en/functions/adddate.md b/docs/content/en/functions/adddate.md
index 116ffa8de..116ffa8de 100644
--- a/content/en/functions/adddate.md
+++ b/docs/content/en/functions/adddate.md
diff --git a/content/en/functions/after.md b/docs/content/en/functions/after.md
index da93b3e44..da93b3e44 100644
--- a/content/en/functions/after.md
+++ b/docs/content/en/functions/after.md
diff --git a/content/en/functions/anchorize.md b/docs/content/en/functions/anchorize.md
index 5530e193b..5530e193b 100644
--- a/content/en/functions/anchorize.md
+++ b/docs/content/en/functions/anchorize.md
diff --git a/content/en/functions/append.md b/docs/content/en/functions/append.md
index 3e2fc13cc..3e2fc13cc 100644
--- a/content/en/functions/append.md
+++ b/docs/content/en/functions/append.md
diff --git a/content/en/functions/apply.md b/docs/content/en/functions/apply.md
index 7550069a5..7550069a5 100644
--- a/content/en/functions/apply.md
+++ b/docs/content/en/functions/apply.md
diff --git a/content/en/functions/base64.md b/docs/content/en/functions/base64.md
index 54ca0440c..54ca0440c 100644
--- a/content/en/functions/base64.md
+++ b/docs/content/en/functions/base64.md
diff --git a/content/en/functions/chomp.md b/docs/content/en/functions/chomp.md
index c56505c2c..c56505c2c 100644
--- a/content/en/functions/chomp.md
+++ b/docs/content/en/functions/chomp.md
diff --git a/content/en/functions/complement.md b/docs/content/en/functions/complement.md
index a41faf524..a41faf524 100644
--- a/content/en/functions/complement.md
+++ b/docs/content/en/functions/complement.md
diff --git a/content/en/functions/cond.md b/docs/content/en/functions/cond.md
index 30b853251..30b853251 100644
--- a/content/en/functions/cond.md
+++ b/docs/content/en/functions/cond.md
diff --git a/content/en/functions/countrunes.md b/docs/content/en/functions/countrunes.md
index 0acef095b..0acef095b 100644
--- a/content/en/functions/countrunes.md
+++ b/docs/content/en/functions/countrunes.md
diff --git a/content/en/functions/countwords.md b/docs/content/en/functions/countwords.md
index 17f0fa5c2..17f0fa5c2 100644
--- a/content/en/functions/countwords.md
+++ b/docs/content/en/functions/countwords.md
diff --git a/content/en/functions/dateformat.md b/docs/content/en/functions/dateformat.md
index 362efabd3..362efabd3 100644
--- a/content/en/functions/dateformat.md
+++ b/docs/content/en/functions/dateformat.md
diff --git a/content/en/functions/default.md b/docs/content/en/functions/default.md
index c4cc166a7..c4cc166a7 100644
--- a/content/en/functions/default.md
+++ b/docs/content/en/functions/default.md
diff --git a/content/en/functions/delimit.md b/docs/content/en/functions/delimit.md
index 533af0523..533af0523 100644
--- a/content/en/functions/delimit.md
+++ b/docs/content/en/functions/delimit.md
diff --git a/content/en/functions/dict.md b/docs/content/en/functions/dict.md
index 966b6af3e..966b6af3e 100644
--- a/content/en/functions/dict.md
+++ b/docs/content/en/functions/dict.md
diff --git a/content/en/functions/echoparam.md b/docs/content/en/functions/echoparam.md
index 515b5c449..515b5c449 100644
--- a/content/en/functions/echoparam.md
+++ b/docs/content/en/functions/echoparam.md
diff --git a/content/en/functions/emojify.md b/docs/content/en/functions/emojify.md
index 20e30fafa..20e30fafa 100644
--- a/content/en/functions/emojify.md
+++ b/docs/content/en/functions/emojify.md
diff --git a/content/en/functions/eq.md b/docs/content/en/functions/eq.md
index d342a628a..d342a628a 100644
--- a/content/en/functions/eq.md
+++ b/docs/content/en/functions/eq.md
diff --git a/content/en/functions/errorf.md b/docs/content/en/functions/errorf.md
index 41ea2f192..41ea2f192 100644
--- a/content/en/functions/errorf.md
+++ b/docs/content/en/functions/errorf.md
diff --git a/content/en/functions/fileExists.md b/docs/content/en/functions/fileExists.md
index 2175453de..2175453de 100644
--- a/content/en/functions/fileExists.md
+++ b/docs/content/en/functions/fileExists.md
diff --git a/content/en/functions/findRe.md b/docs/content/en/functions/findRe.md
index b6375be9c..b6375be9c 100644
--- a/content/en/functions/findRe.md
+++ b/docs/content/en/functions/findRe.md
diff --git a/content/en/functions/first.md b/docs/content/en/functions/first.md
index 98d162617..98d162617 100644
--- a/content/en/functions/first.md
+++ b/docs/content/en/functions/first.md
diff --git a/content/en/functions/float.md b/docs/content/en/functions/float.md
index 1c589e317..1c589e317 100644
--- a/content/en/functions/float.md
+++ b/docs/content/en/functions/float.md
diff --git a/content/en/functions/format.md b/docs/content/en/functions/format.md
index 199030782..199030782 100644
--- a/content/en/functions/format.md
+++ b/docs/content/en/functions/format.md
diff --git a/content/en/functions/ge.md b/docs/content/en/functions/ge.md
index c7256c0e3..c7256c0e3 100644
--- a/content/en/functions/ge.md
+++ b/docs/content/en/functions/ge.md
diff --git a/content/en/functions/get.md b/docs/content/en/functions/get.md
index c5b75c7fd..c5b75c7fd 100644
--- a/content/en/functions/get.md
+++ b/docs/content/en/functions/get.md
diff --git a/content/en/functions/getenv.md b/docs/content/en/functions/getenv.md
index f7f71d35c..f7f71d35c 100644
--- a/content/en/functions/getenv.md
+++ b/docs/content/en/functions/getenv.md
diff --git a/content/en/functions/group.md b/docs/content/en/functions/group.md
index 203498cb1..203498cb1 100644
--- a/content/en/functions/group.md
+++ b/docs/content/en/functions/group.md
diff --git a/content/en/functions/gt.md b/docs/content/en/functions/gt.md
index 91203f890..91203f890 100644
--- a/content/en/functions/gt.md
+++ b/docs/content/en/functions/gt.md
diff --git a/content/en/functions/hasPrefix.md b/docs/content/en/functions/hasPrefix.md
index 99d5ba819..99d5ba819 100644
--- a/content/en/functions/hasPrefix.md
+++ b/docs/content/en/functions/hasPrefix.md
diff --git a/content/en/functions/haschildren.md b/docs/content/en/functions/haschildren.md
index 1deeb7277..1deeb7277 100644
--- a/content/en/functions/haschildren.md
+++ b/docs/content/en/functions/haschildren.md
diff --git a/content/en/functions/hasmenucurrent.md b/docs/content/en/functions/hasmenucurrent.md
index dbd4aa418..dbd4aa418 100644
--- a/content/en/functions/hasmenucurrent.md
+++ b/docs/content/en/functions/hasmenucurrent.md
diff --git a/content/en/functions/highlight.md b/docs/content/en/functions/highlight.md
index 40f3a78de..40f3a78de 100644
--- a/content/en/functions/highlight.md
+++ b/docs/content/en/functions/highlight.md
diff --git a/content/en/functions/hmac.md b/docs/content/en/functions/hmac.md
index b906e5abb..b906e5abb 100644
--- a/content/en/functions/hmac.md
+++ b/docs/content/en/functions/hmac.md
diff --git a/content/en/functions/htmlEscape.md b/docs/content/en/functions/htmlEscape.md
index 652492c72..652492c72 100644
--- a/content/en/functions/htmlEscape.md
+++ b/docs/content/en/functions/htmlEscape.md
diff --git a/content/en/functions/htmlUnescape.md b/docs/content/en/functions/htmlUnescape.md
index 9533fde90..9533fde90 100644
--- a/content/en/functions/htmlUnescape.md
+++ b/docs/content/en/functions/htmlUnescape.md
diff --git a/content/en/functions/hugo.md b/docs/content/en/functions/hugo.md
index 1792f5a8d..1792f5a8d 100644
--- a/content/en/functions/hugo.md
+++ b/docs/content/en/functions/hugo.md
diff --git a/content/en/functions/humanize.md b/docs/content/en/functions/humanize.md
index 18a087591..18a087591 100644
--- a/content/en/functions/humanize.md
+++ b/docs/content/en/functions/humanize.md
diff --git a/content/en/functions/i18n.md b/docs/content/en/functions/i18n.md
index 7d88292b9..7d88292b9 100644
--- a/content/en/functions/i18n.md
+++ b/docs/content/en/functions/i18n.md
diff --git a/content/en/functions/images/index.md b/docs/content/en/functions/images/index.md
index 92c6ff0da..92c6ff0da 100644
--- a/content/en/functions/images/index.md
+++ b/docs/content/en/functions/images/index.md
diff --git a/content/en/functions/in.md b/docs/content/en/functions/in.md
index 7a8cb33eb..7a8cb33eb 100644
--- a/content/en/functions/in.md
+++ b/docs/content/en/functions/in.md
diff --git a/content/en/functions/index-function.md b/docs/content/en/functions/index-function.md
index 6ee46b4a8..6ee46b4a8 100644
--- a/content/en/functions/index-function.md
+++ b/docs/content/en/functions/index-function.md
diff --git a/content/en/functions/int.md b/docs/content/en/functions/int.md
index 24818cef6..24818cef6 100644
--- a/content/en/functions/int.md
+++ b/docs/content/en/functions/int.md
diff --git a/content/en/functions/intersect.md b/docs/content/en/functions/intersect.md
index 2fe73ded8..2fe73ded8 100644
--- a/content/en/functions/intersect.md
+++ b/docs/content/en/functions/intersect.md
diff --git a/content/en/functions/ismenucurrent.md b/docs/content/en/functions/ismenucurrent.md
index 51c21721b..51c21721b 100644
--- a/content/en/functions/ismenucurrent.md
+++ b/docs/content/en/functions/ismenucurrent.md
diff --git a/content/en/functions/isset.md b/docs/content/en/functions/isset.md
index aa36f6dac..aa36f6dac 100644
--- a/content/en/functions/isset.md
+++ b/docs/content/en/functions/isset.md
diff --git a/content/en/functions/jsonify.md b/docs/content/en/functions/jsonify.md
index 28b90534c..28b90534c 100644
--- a/content/en/functions/jsonify.md
+++ b/docs/content/en/functions/jsonify.md
diff --git a/content/en/functions/lang.Merge.md b/docs/content/en/functions/lang.Merge.md
index 78d658442..78d658442 100644
--- a/content/en/functions/lang.Merge.md
+++ b/docs/content/en/functions/lang.Merge.md
diff --git a/content/en/functions/lang.md b/docs/content/en/functions/lang.md
index 1a4da807a..1a4da807a 100644
--- a/content/en/functions/lang.md
+++ b/docs/content/en/functions/lang.md
diff --git a/content/en/functions/last.md b/docs/content/en/functions/last.md
index 4a752cb82..4a752cb82 100644
--- a/content/en/functions/last.md
+++ b/docs/content/en/functions/last.md
diff --git a/content/en/functions/le.md b/docs/content/en/functions/le.md
index 1ff0ac582..1ff0ac582 100644
--- a/content/en/functions/le.md
+++ b/docs/content/en/functions/le.md
diff --git a/content/en/functions/len.md b/docs/content/en/functions/len.md
index 0681583db..0681583db 100644
--- a/content/en/functions/len.md
+++ b/docs/content/en/functions/len.md
diff --git a/content/en/functions/lower.md b/docs/content/en/functions/lower.md
index 0e8ba6c6d..0e8ba6c6d 100644
--- a/content/en/functions/lower.md
+++ b/docs/content/en/functions/lower.md
diff --git a/content/en/functions/lt.md b/docs/content/en/functions/lt.md
index d2a234986..d2a234986 100644
--- a/content/en/functions/lt.md
+++ b/docs/content/en/functions/lt.md
diff --git a/content/en/functions/markdownify.md b/docs/content/en/functions/markdownify.md
index 171c3bf10..171c3bf10 100644
--- a/content/en/functions/markdownify.md
+++ b/docs/content/en/functions/markdownify.md
diff --git a/content/en/functions/math.md b/docs/content/en/functions/math.md
index 2d98c4deb..2d98c4deb 100644
--- a/content/en/functions/math.md
+++ b/docs/content/en/functions/math.md
diff --git a/content/en/functions/md5.md b/docs/content/en/functions/md5.md
index 7db856a75..7db856a75 100644
--- a/content/en/functions/md5.md
+++ b/docs/content/en/functions/md5.md
diff --git a/content/en/functions/merge.md b/docs/content/en/functions/merge.md
index 0944815a1..0944815a1 100644
--- a/content/en/functions/merge.md
+++ b/docs/content/en/functions/merge.md
diff --git a/content/en/functions/ne.md b/docs/content/en/functions/ne.md
index e072993d2..e072993d2 100644
--- a/content/en/functions/ne.md
+++ b/docs/content/en/functions/ne.md
diff --git a/content/en/functions/now.md b/docs/content/en/functions/now.md
index 24e1ab3f7..24e1ab3f7 100644
--- a/content/en/functions/now.md
+++ b/docs/content/en/functions/now.md
diff --git a/content/en/functions/os.Stat.md b/docs/content/en/functions/os.Stat.md
index a56f79735..a56f79735 100644
--- a/content/en/functions/os.Stat.md
+++ b/docs/content/en/functions/os.Stat.md
diff --git a/content/en/functions/param.md b/docs/content/en/functions/param.md
index eb0a87279..eb0a87279 100644
--- a/content/en/functions/param.md
+++ b/docs/content/en/functions/param.md
diff --git a/content/en/functions/partialCached.md b/docs/content/en/functions/partialCached.md
index d7a70ac64..d7a70ac64 100644
--- a/content/en/functions/partialCached.md
+++ b/docs/content/en/functions/partialCached.md
diff --git a/content/en/functions/path.Base.md b/docs/content/en/functions/path.Base.md
index a6bfc2bd5..a6bfc2bd5 100644
--- a/content/en/functions/path.Base.md
+++ b/docs/content/en/functions/path.Base.md
diff --git a/content/en/functions/path.BaseName.md b/docs/content/en/functions/path.BaseName.md
index 6a5b9f0a0..6a5b9f0a0 100644
--- a/content/en/functions/path.BaseName.md
+++ b/docs/content/en/functions/path.BaseName.md
diff --git a/content/en/functions/path.Clean.md b/docs/content/en/functions/path.Clean.md
index 852de65fd..852de65fd 100644
--- a/content/en/functions/path.Clean.md
+++ b/docs/content/en/functions/path.Clean.md
diff --git a/content/en/functions/path.Dir.md b/docs/content/en/functions/path.Dir.md
index 161a0daa8..161a0daa8 100644
--- a/content/en/functions/path.Dir.md
+++ b/docs/content/en/functions/path.Dir.md
diff --git a/content/en/functions/path.Ext.md b/docs/content/en/functions/path.Ext.md
index 8c6fe907c..8c6fe907c 100644
--- a/content/en/functions/path.Ext.md
+++ b/docs/content/en/functions/path.Ext.md
diff --git a/content/en/functions/path.Join.md b/docs/content/en/functions/path.Join.md
index 579e8667e..579e8667e 100644
--- a/content/en/functions/path.Join.md
+++ b/docs/content/en/functions/path.Join.md
diff --git a/content/en/functions/path.Split.md b/docs/content/en/functions/path.Split.md
index 7737b77d3..7737b77d3 100644
--- a/content/en/functions/path.Split.md
+++ b/docs/content/en/functions/path.Split.md
diff --git a/content/en/functions/plainify.md b/docs/content/en/functions/plainify.md
index 1258c139a..1258c139a 100644
--- a/content/en/functions/plainify.md
+++ b/docs/content/en/functions/plainify.md
diff --git a/content/en/functions/pluralize.md b/docs/content/en/functions/pluralize.md
index 9c5040934..9c5040934 100644
--- a/content/en/functions/pluralize.md
+++ b/docs/content/en/functions/pluralize.md
diff --git a/content/en/functions/print.md b/docs/content/en/functions/print.md
index 42b8dcb75..42b8dcb75 100644
--- a/content/en/functions/print.md
+++ b/docs/content/en/functions/print.md
diff --git a/content/en/functions/printf.md b/docs/content/en/functions/printf.md
index 8b12b9883..8b12b9883 100644
--- a/content/en/functions/printf.md
+++ b/docs/content/en/functions/printf.md
diff --git a/content/en/functions/println.md b/docs/content/en/functions/println.md
index 1ea70cbad..1ea70cbad 100644
--- a/content/en/functions/println.md
+++ b/docs/content/en/functions/println.md
diff --git a/content/en/functions/querify.md b/docs/content/en/functions/querify.md
index 9e8c4b83f..9e8c4b83f 100644
--- a/content/en/functions/querify.md
+++ b/docs/content/en/functions/querify.md
diff --git a/content/en/functions/range.md b/docs/content/en/functions/range.md
index 599e2ad25..599e2ad25 100644
--- a/content/en/functions/range.md
+++ b/docs/content/en/functions/range.md
diff --git a/content/en/functions/readdir.md b/docs/content/en/functions/readdir.md
index 70fe7b66c..70fe7b66c 100644
--- a/content/en/functions/readdir.md
+++ b/docs/content/en/functions/readdir.md
diff --git a/content/en/functions/readfile.md b/docs/content/en/functions/readfile.md
index b0a88458b..b0a88458b 100644
--- a/content/en/functions/readfile.md
+++ b/docs/content/en/functions/readfile.md
diff --git a/content/en/functions/ref.md b/docs/content/en/functions/ref.md
index ade380420..ade380420 100644
--- a/content/en/functions/ref.md
+++ b/docs/content/en/functions/ref.md
diff --git a/content/en/functions/reflect.IsMap.md b/docs/content/en/functions/reflect.IsMap.md
index 9ad50b599..9ad50b599 100644
--- a/content/en/functions/reflect.IsMap.md
+++ b/docs/content/en/functions/reflect.IsMap.md
diff --git a/content/en/functions/reflect.IsSlice.md b/docs/content/en/functions/reflect.IsSlice.md
index 38640918a..38640918a 100644
--- a/content/en/functions/reflect.IsSlice.md
+++ b/docs/content/en/functions/reflect.IsSlice.md
diff --git a/content/en/functions/relLangURL.md b/docs/content/en/functions/relLangURL.md
index 5cca28c69..5cca28c69 100644
--- a/content/en/functions/relLangURL.md
+++ b/docs/content/en/functions/relLangURL.md
diff --git a/content/en/functions/relref.md b/docs/content/en/functions/relref.md
index dbac2f3db..dbac2f3db 100644
--- a/content/en/functions/relref.md
+++ b/docs/content/en/functions/relref.md
diff --git a/content/en/functions/relurl.md b/docs/content/en/functions/relurl.md
index af98c44f4..af98c44f4 100644
--- a/content/en/functions/relurl.md
+++ b/docs/content/en/functions/relurl.md
diff --git a/content/en/functions/render.md b/docs/content/en/functions/render.md
index c2347091f..c2347091f 100644
--- a/content/en/functions/render.md
+++ b/docs/content/en/functions/render.md
diff --git a/content/en/functions/replace.md b/docs/content/en/functions/replace.md
index 07b83d035..07b83d035 100644
--- a/content/en/functions/replace.md
+++ b/docs/content/en/functions/replace.md
diff --git a/content/en/functions/replacere.md b/docs/content/en/functions/replacere.md
index a2149b6d3..a2149b6d3 100644
--- a/content/en/functions/replacere.md
+++ b/docs/content/en/functions/replacere.md
diff --git a/content/en/functions/safeCSS.md b/docs/content/en/functions/safeCSS.md
index 835ad3065..835ad3065 100644
--- a/content/en/functions/safeCSS.md
+++ b/docs/content/en/functions/safeCSS.md
diff --git a/content/en/functions/safeHTML.md b/docs/content/en/functions/safeHTML.md
index 3d5197a4f..3d5197a4f 100644
--- a/content/en/functions/safeHTML.md
+++ b/docs/content/en/functions/safeHTML.md
diff --git a/content/en/functions/safeHTMLAttr.md b/docs/content/en/functions/safeHTMLAttr.md
index 19de2e985..19de2e985 100644
--- a/content/en/functions/safeHTMLAttr.md
+++ b/docs/content/en/functions/safeHTMLAttr.md
diff --git a/content/en/functions/safeJS.md b/docs/content/en/functions/safeJS.md
index c67bfa61c..c67bfa61c 100644
--- a/content/en/functions/safeJS.md
+++ b/docs/content/en/functions/safeJS.md
diff --git a/content/en/functions/safeURL.md b/docs/content/en/functions/safeURL.md
index 959076246..959076246 100644
--- a/content/en/functions/safeURL.md
+++ b/docs/content/en/functions/safeURL.md
diff --git a/content/en/functions/scratch.md b/docs/content/en/functions/scratch.md
index c64aa1119..c64aa1119 100644
--- a/content/en/functions/scratch.md
+++ b/docs/content/en/functions/scratch.md
diff --git a/content/en/functions/seq.md b/docs/content/en/functions/seq.md
index c0750b4a1..c0750b4a1 100644
--- a/content/en/functions/seq.md
+++ b/docs/content/en/functions/seq.md
diff --git a/content/en/functions/sha.md b/docs/content/en/functions/sha.md
index 24ad165a8..24ad165a8 100644
--- a/content/en/functions/sha.md
+++ b/docs/content/en/functions/sha.md
diff --git a/content/en/functions/shuffle.md b/docs/content/en/functions/shuffle.md
index 95a586e44..95a586e44 100644
--- a/content/en/functions/shuffle.md
+++ b/docs/content/en/functions/shuffle.md
diff --git a/content/en/functions/singularize.md b/docs/content/en/functions/singularize.md
index 8ca46d433..8ca46d433 100644
--- a/content/en/functions/singularize.md
+++ b/docs/content/en/functions/singularize.md
diff --git a/content/en/functions/site.md b/docs/content/en/functions/site.md
index d4b8e417c..d4b8e417c 100644
--- a/content/en/functions/site.md
+++ b/docs/content/en/functions/site.md
diff --git a/content/en/functions/slice.md b/docs/content/en/functions/slice.md
index 0710d5e40..0710d5e40 100644
--- a/content/en/functions/slice.md
+++ b/docs/content/en/functions/slice.md
diff --git a/content/en/functions/slicestr.md b/docs/content/en/functions/slicestr.md
index 254ad9a61..254ad9a61 100644
--- a/content/en/functions/slicestr.md
+++ b/docs/content/en/functions/slicestr.md
diff --git a/content/en/functions/sort.md b/docs/content/en/functions/sort.md
index b48aad1aa..b48aad1aa 100644
--- a/content/en/functions/sort.md
+++ b/docs/content/en/functions/sort.md
diff --git a/content/en/functions/split.md b/docs/content/en/functions/split.md
index a66a7cf0c..a66a7cf0c 100644
--- a/content/en/functions/split.md
+++ b/docs/content/en/functions/split.md
diff --git a/content/en/functions/string.md b/docs/content/en/functions/string.md
index 33a781e45..33a781e45 100644
--- a/content/en/functions/string.md
+++ b/docs/content/en/functions/string.md
diff --git a/content/en/functions/strings.Count.md b/docs/content/en/functions/strings.Count.md
index f666a7843..f666a7843 100644
--- a/content/en/functions/strings.Count.md
+++ b/docs/content/en/functions/strings.Count.md
diff --git a/content/en/functions/strings.HasSuffix.md b/docs/content/en/functions/strings.HasSuffix.md
index 3b82ea055..3b82ea055 100644
--- a/content/en/functions/strings.HasSuffix.md
+++ b/docs/content/en/functions/strings.HasSuffix.md
diff --git a/content/en/functions/strings.Repeat.md b/docs/content/en/functions/strings.Repeat.md
index 7cba6d075..7cba6d075 100644
--- a/content/en/functions/strings.Repeat.md
+++ b/docs/content/en/functions/strings.Repeat.md
diff --git a/content/en/functions/strings.RuneCount.md b/docs/content/en/functions/strings.RuneCount.md
index de335f862..de335f862 100644
--- a/content/en/functions/strings.RuneCount.md
+++ b/docs/content/en/functions/strings.RuneCount.md
diff --git a/content/en/functions/strings.TrimLeft.md b/docs/content/en/functions/strings.TrimLeft.md
index 2d71a1367..2d71a1367 100644
--- a/content/en/functions/strings.TrimLeft.md
+++ b/docs/content/en/functions/strings.TrimLeft.md
diff --git a/content/en/functions/strings.TrimPrefix.md b/docs/content/en/functions/strings.TrimPrefix.md
index a66bbe3db..a66bbe3db 100644
--- a/content/en/functions/strings.TrimPrefix.md
+++ b/docs/content/en/functions/strings.TrimPrefix.md
diff --git a/content/en/functions/strings.TrimRight.md b/docs/content/en/functions/strings.TrimRight.md
index 05084a4c0..05084a4c0 100644
--- a/content/en/functions/strings.TrimRight.md
+++ b/docs/content/en/functions/strings.TrimRight.md
diff --git a/content/en/functions/strings.TrimSuffix.md b/docs/content/en/functions/strings.TrimSuffix.md
index 916038054..916038054 100644
--- a/content/en/functions/strings.TrimSuffix.md
+++ b/docs/content/en/functions/strings.TrimSuffix.md
diff --git a/content/en/functions/substr.md b/docs/content/en/functions/substr.md
index 31f733e0b..31f733e0b 100644
--- a/content/en/functions/substr.md
+++ b/docs/content/en/functions/substr.md
diff --git a/content/en/functions/symdiff.md b/docs/content/en/functions/symdiff.md
index da269a422..da269a422 100644
--- a/content/en/functions/symdiff.md
+++ b/docs/content/en/functions/symdiff.md
diff --git a/content/en/functions/templates.Exists.md b/docs/content/en/functions/templates.Exists.md
index 36fa0ea60..36fa0ea60 100644
--- a/content/en/functions/templates.Exists.md
+++ b/docs/content/en/functions/templates.Exists.md
diff --git a/content/en/functions/time.md b/docs/content/en/functions/time.md
index 681c85fd9..681c85fd9 100644
--- a/content/en/functions/time.md
+++ b/docs/content/en/functions/time.md
diff --git a/content/en/functions/title.md b/docs/content/en/functions/title.md
index 201e4c140..201e4c140 100644
--- a/content/en/functions/title.md
+++ b/docs/content/en/functions/title.md
diff --git a/content/en/functions/transform.Unmarshal.md b/docs/content/en/functions/transform.Unmarshal.md
index 9b380dc57..9b380dc57 100644
--- a/content/en/functions/transform.Unmarshal.md
+++ b/docs/content/en/functions/transform.Unmarshal.md
diff --git a/content/en/functions/trim.md b/docs/content/en/functions/trim.md
index 7945ffef9..7945ffef9 100644
--- a/content/en/functions/trim.md
+++ b/docs/content/en/functions/trim.md
diff --git a/content/en/functions/truncate.md b/docs/content/en/functions/truncate.md
index 8d0dd36b1..8d0dd36b1 100644
--- a/content/en/functions/truncate.md
+++ b/docs/content/en/functions/truncate.md
diff --git a/content/en/functions/union.md b/docs/content/en/functions/union.md
index 459e3620d..459e3620d 100644
--- a/content/en/functions/union.md
+++ b/docs/content/en/functions/union.md
diff --git a/content/en/functions/uniq.md b/docs/content/en/functions/uniq.md
index eec55e5c3..eec55e5c3 100644
--- a/content/en/functions/uniq.md
+++ b/docs/content/en/functions/uniq.md
diff --git a/content/en/functions/unix.md b/docs/content/en/functions/unix.md
index 98fcc74a4..98fcc74a4 100644
--- a/content/en/functions/unix.md
+++ b/docs/content/en/functions/unix.md
diff --git a/content/en/functions/upper.md b/docs/content/en/functions/upper.md
index 0762541dd..0762541dd 100644
--- a/content/en/functions/upper.md
+++ b/docs/content/en/functions/upper.md
diff --git a/content/en/functions/urlize.md b/docs/content/en/functions/urlize.md
index 82a88b9d7..82a88b9d7 100644
--- a/content/en/functions/urlize.md
+++ b/docs/content/en/functions/urlize.md
diff --git a/content/en/functions/urls.Parse.md b/docs/content/en/functions/urls.Parse.md
index 7709ad8c2..7709ad8c2 100644
--- a/content/en/functions/urls.Parse.md
+++ b/docs/content/en/functions/urls.Parse.md
diff --git a/content/en/functions/where.md b/docs/content/en/functions/where.md
index f712cd984..f712cd984 100644
--- a/content/en/functions/where.md
+++ b/docs/content/en/functions/where.md
diff --git a/content/en/functions/with.md b/docs/content/en/functions/with.md
index ae643caf7..ae643caf7 100644
--- a/content/en/functions/with.md
+++ b/docs/content/en/functions/with.md
diff --git a/content/en/getting-started/_index.md b/docs/content/en/getting-started/_index.md
index 1615bdd91..1615bdd91 100644
--- a/content/en/getting-started/_index.md
+++ b/docs/content/en/getting-started/_index.md
diff --git a/content/en/getting-started/configuration-markup.md b/docs/content/en/getting-started/configuration-markup.md
index 2a38c0ce4..2a38c0ce4 100644
--- a/content/en/getting-started/configuration-markup.md
+++ b/docs/content/en/getting-started/configuration-markup.md
diff --git a/content/en/getting-started/configuration.md b/docs/content/en/getting-started/configuration.md
index 9393e4534..9393e4534 100644
--- a/content/en/getting-started/configuration.md
+++ b/docs/content/en/getting-started/configuration.md
diff --git a/content/en/getting-started/directory-structure.md b/docs/content/en/getting-started/directory-structure.md
index 3fa66d4c5..3fa66d4c5 100644
--- a/content/en/getting-started/directory-structure.md
+++ b/docs/content/en/getting-started/directory-structure.md
diff --git a/content/en/getting-started/external-learning-resources/hia.jpg b/docs/content/en/getting-started/external-learning-resources/hia.jpg
index 601947a70..601947a70 100644
--- a/content/en/getting-started/external-learning-resources/hia.jpg
+++ b/docs/content/en/getting-started/external-learning-resources/hia.jpg
Binary files differ
diff --git a/content/en/getting-started/external-learning-resources/index.md b/docs/content/en/getting-started/external-learning-resources/index.md
index 349d7e29d..349d7e29d 100644
--- a/content/en/getting-started/external-learning-resources/index.md
+++ b/docs/content/en/getting-started/external-learning-resources/index.md
diff --git a/content/en/getting-started/installing.md b/docs/content/en/getting-started/installing.md
index 32e9df18e..32e9df18e 100644
--- a/content/en/getting-started/installing.md
+++ b/docs/content/en/getting-started/installing.md
diff --git a/content/en/getting-started/quick-start.md b/docs/content/en/getting-started/quick-start.md
index d30ba3cfe..d30ba3cfe 100644
--- a/content/en/getting-started/quick-start.md
+++ b/docs/content/en/getting-started/quick-start.md
diff --git a/content/en/getting-started/usage.md b/docs/content/en/getting-started/usage.md
index fc5ae2501..fc5ae2501 100644
--- a/content/en/getting-started/usage.md
+++ b/docs/content/en/getting-started/usage.md
diff --git a/content/en/hosting-and-deployment/_index.md b/docs/content/en/hosting-and-deployment/_index.md
index ea9f60f17..ea9f60f17 100644
--- a/content/en/hosting-and-deployment/_index.md
+++ b/docs/content/en/hosting-and-deployment/_index.md
diff --git a/content/en/hosting-and-deployment/deployment-with-rclone.md b/docs/content/en/hosting-and-deployment/deployment-with-rclone.md
index e9feb765a..e9feb765a 100644
--- a/content/en/hosting-and-deployment/deployment-with-rclone.md
+++ b/docs/content/en/hosting-and-deployment/deployment-with-rclone.md
diff --git a/content/en/hosting-and-deployment/deployment-with-rsync.md b/docs/content/en/hosting-and-deployment/deployment-with-rsync.md
index ad59b71f1..ad59b71f1 100644
--- a/content/en/hosting-and-deployment/deployment-with-rsync.md
+++ b/docs/content/en/hosting-and-deployment/deployment-with-rsync.md
diff --git a/content/en/hosting-and-deployment/hosting-on-aws-amplify.md b/docs/content/en/hosting-and-deployment/hosting-on-aws-amplify.md
index 34a21e9e9..34a21e9e9 100644
--- a/content/en/hosting-and-deployment/hosting-on-aws-amplify.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-aws-amplify.md
diff --git a/content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md b/docs/content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md
index d951ec670..d951ec670 100644
--- a/content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-cloudflare-pages.md
diff --git a/content/en/hosting-and-deployment/hosting-on-firebase.md b/docs/content/en/hosting-and-deployment/hosting-on-firebase.md
index 3a4039f90..3a4039f90 100644
--- a/content/en/hosting-and-deployment/hosting-on-firebase.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-firebase.md
diff --git a/content/en/hosting-and-deployment/hosting-on-github.md b/docs/content/en/hosting-and-deployment/hosting-on-github.md
index ae30ce44a..ae30ce44a 100644
--- a/content/en/hosting-and-deployment/hosting-on-github.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-github.md
diff --git a/content/en/hosting-and-deployment/hosting-on-gitlab.md b/docs/content/en/hosting-and-deployment/hosting-on-gitlab.md
index 03c7776a7..03c7776a7 100644
--- a/content/en/hosting-and-deployment/hosting-on-gitlab.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-gitlab.md
diff --git a/content/en/hosting-and-deployment/hosting-on-keycdn.md b/docs/content/en/hosting-and-deployment/hosting-on-keycdn.md
index bec8b36e4..bec8b36e4 100644
--- a/content/en/hosting-and-deployment/hosting-on-keycdn.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-keycdn.md
diff --git a/content/en/hosting-and-deployment/hosting-on-netlify.md b/docs/content/en/hosting-and-deployment/hosting-on-netlify.md
index 1e2da8466..1e2da8466 100644
--- a/content/en/hosting-and-deployment/hosting-on-netlify.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-netlify.md
diff --git a/content/en/hosting-and-deployment/hosting-on-render.md b/docs/content/en/hosting-and-deployment/hosting-on-render.md
index 73b20bcb2..73b20bcb2 100644
--- a/content/en/hosting-and-deployment/hosting-on-render.md
+++ b/docs/content/en/hosting-and-deployment/hosting-on-render.md
diff --git a/content/en/hosting-and-deployment/hugo-deploy.md b/docs/content/en/hosting-and-deployment/hugo-deploy.md
index da7f77c61..da7f77c61 100644
--- a/content/en/hosting-and-deployment/hugo-deploy.md
+++ b/docs/content/en/hosting-and-deployment/hugo-deploy.md
diff --git a/content/en/hugo-modules/_index.md b/docs/content/en/hugo-modules/_index.md
index 2b02a559e..2b02a559e 100644
--- a/content/en/hugo-modules/_index.md
+++ b/docs/content/en/hugo-modules/_index.md
diff --git a/content/en/hugo-modules/configuration.md b/docs/content/en/hugo-modules/configuration.md
index 70de492ab..70de492ab 100644
--- a/content/en/hugo-modules/configuration.md
+++ b/docs/content/en/hugo-modules/configuration.md
diff --git a/content/en/hugo-modules/theme-components.md b/docs/content/en/hugo-modules/theme-components.md
index f1feb636a..f1feb636a 100644
--- a/content/en/hugo-modules/theme-components.md
+++ b/docs/content/en/hugo-modules/theme-components.md
diff --git a/content/en/hugo-modules/use-modules.md b/docs/content/en/hugo-modules/use-modules.md
index 038e0bf89..038e0bf89 100644
--- a/content/en/hugo-modules/use-modules.md
+++ b/docs/content/en/hugo-modules/use-modules.md
diff --git a/content/en/hugo-pipes/_index.md b/docs/content/en/hugo-pipes/_index.md
index 47411072a..47411072a 100755
--- a/content/en/hugo-pipes/_index.md
+++ b/docs/content/en/hugo-pipes/_index.md
diff --git a/content/en/hugo-pipes/babel.md b/docs/content/en/hugo-pipes/babel.md
index 76a1d441d..76a1d441d 100755
--- a/content/en/hugo-pipes/babel.md
+++ b/docs/content/en/hugo-pipes/babel.md
diff --git a/content/en/hugo-pipes/bundling.md b/docs/content/en/hugo-pipes/bundling.md
index 79b866c93..79b866c93 100755
--- a/content/en/hugo-pipes/bundling.md
+++ b/docs/content/en/hugo-pipes/bundling.md
diff --git a/content/en/hugo-pipes/fingerprint.md b/docs/content/en/hugo-pipes/fingerprint.md
index b58b577db..b58b577db 100755
--- a/content/en/hugo-pipes/fingerprint.md
+++ b/docs/content/en/hugo-pipes/fingerprint.md
diff --git a/content/en/hugo-pipes/introduction.md b/docs/content/en/hugo-pipes/introduction.md
index a3c956885..a3c956885 100755
--- a/content/en/hugo-pipes/introduction.md
+++ b/docs/content/en/hugo-pipes/introduction.md
diff --git a/content/en/hugo-pipes/js.md b/docs/content/en/hugo-pipes/js.md
index 63bd8bdd9..63bd8bdd9 100644
--- a/content/en/hugo-pipes/js.md
+++ b/docs/content/en/hugo-pipes/js.md
diff --git a/content/en/hugo-pipes/minification.md b/docs/content/en/hugo-pipes/minification.md
index d11ee58a9..d11ee58a9 100755
--- a/content/en/hugo-pipes/minification.md
+++ b/docs/content/en/hugo-pipes/minification.md
diff --git a/content/en/hugo-pipes/postcss.md b/docs/content/en/hugo-pipes/postcss.md
index 154f97f0b..154f97f0b 100755
--- a/content/en/hugo-pipes/postcss.md
+++ b/docs/content/en/hugo-pipes/postcss.md
diff --git a/content/en/hugo-pipes/postprocess.md b/docs/content/en/hugo-pipes/postprocess.md
index aafb786a4..aafb786a4 100755
--- a/content/en/hugo-pipes/postprocess.md
+++ b/docs/content/en/hugo-pipes/postprocess.md
diff --git a/content/en/hugo-pipes/resource-from-string.md b/docs/content/en/hugo-pipes/resource-from-string.md
index 8b942d2f3..8b942d2f3 100755
--- a/content/en/hugo-pipes/resource-from-string.md
+++ b/docs/content/en/hugo-pipes/resource-from-string.md
diff --git a/content/en/hugo-pipes/resource-from-template.md b/docs/content/en/hugo-pipes/resource-from-template.md
index 9bc8c7e9d..9bc8c7e9d 100755
--- a/content/en/hugo-pipes/resource-from-template.md
+++ b/docs/content/en/hugo-pipes/resource-from-template.md
diff --git a/content/en/hugo-pipes/scss-sass.md b/docs/content/en/hugo-pipes/scss-sass.md
index c4a0a5520..c4a0a5520 100755
--- a/content/en/hugo-pipes/scss-sass.md
+++ b/docs/content/en/hugo-pipes/scss-sass.md
diff --git a/content/en/maintenance/_index.md b/docs/content/en/maintenance/_index.md
index 691a5d47c..691a5d47c 100644
--- a/content/en/maintenance/_index.md
+++ b/docs/content/en/maintenance/_index.md
diff --git a/content/en/myshowcase/bio.md b/docs/content/en/myshowcase/bio.md
index 7d1b30895..7d1b30895 100644
--- a/content/en/myshowcase/bio.md
+++ b/docs/content/en/myshowcase/bio.md
diff --git a/content/en/myshowcase/featured.png b/docs/content/en/myshowcase/featured.png
index 4f390132e..4f390132e 100644
--- a/content/en/myshowcase/featured.png
+++ b/docs/content/en/myshowcase/featured.png
Binary files differ
diff --git a/content/en/myshowcase/index.md b/docs/content/en/myshowcase/index.md
index 08ea1e55f..08ea1e55f 100644
--- a/content/en/myshowcase/index.md
+++ b/docs/content/en/myshowcase/index.md
diff --git a/content/en/news/0.10-relnotes/index.md b/docs/content/en/news/0.10-relnotes/index.md
index 060998ba0..060998ba0 100644
--- a/content/en/news/0.10-relnotes/index.md
+++ b/docs/content/en/news/0.10-relnotes/index.md
diff --git a/content/en/news/0.11-relnotes/index.md b/docs/content/en/news/0.11-relnotes/index.md
index dc4115fe0..dc4115fe0 100644
--- a/content/en/news/0.11-relnotes/index.md
+++ b/docs/content/en/news/0.11-relnotes/index.md
diff --git a/content/en/news/0.12-relnotes/index.md b/docs/content/en/news/0.12-relnotes/index.md
index 0c62ea5b1..0c62ea5b1 100644
--- a/content/en/news/0.12-relnotes/index.md
+++ b/docs/content/en/news/0.12-relnotes/index.md
diff --git a/content/en/news/0.13-relnotes/index.md b/docs/content/en/news/0.13-relnotes/index.md
index 198f5fe7b..198f5fe7b 100644
--- a/content/en/news/0.13-relnotes/index.md
+++ b/docs/content/en/news/0.13-relnotes/index.md
diff --git a/content/en/news/0.14-relnotes/index.md b/docs/content/en/news/0.14-relnotes/index.md
index 9b7928b47..9b7928b47 100644
--- a/content/en/news/0.14-relnotes/index.md
+++ b/docs/content/en/news/0.14-relnotes/index.md
diff --git a/content/en/news/0.15-relnotes/index.md b/docs/content/en/news/0.15-relnotes/index.md
index cd268eaa6..cd268eaa6 100644
--- a/content/en/news/0.15-relnotes/index.md
+++ b/docs/content/en/news/0.15-relnotes/index.md
diff --git a/content/en/news/0.16-relnotes/index.md b/docs/content/en/news/0.16-relnotes/index.md
index 92f6e54a0..92f6e54a0 100644
--- a/content/en/news/0.16-relnotes/index.md
+++ b/docs/content/en/news/0.16-relnotes/index.md
diff --git a/content/en/news/0.17-relnotes/index.md b/docs/content/en/news/0.17-relnotes/index.md
index 034e8e891..034e8e891 100644
--- a/content/en/news/0.17-relnotes/index.md
+++ b/docs/content/en/news/0.17-relnotes/index.md
diff --git a/content/en/news/0.18-relnotes/index.md b/docs/content/en/news/0.18-relnotes/index.md
index 5aaab9ffe..5aaab9ffe 100644
--- a/content/en/news/0.18-relnotes/index.md
+++ b/docs/content/en/news/0.18-relnotes/index.md
diff --git a/content/en/news/0.19-relnotes/index.md b/docs/content/en/news/0.19-relnotes/index.md
index 073b47495..073b47495 100644
--- a/content/en/news/0.19-relnotes/index.md
+++ b/docs/content/en/news/0.19-relnotes/index.md
diff --git a/content/en/news/0.20-relnotes/index.md b/docs/content/en/news/0.20-relnotes/index.md
index 27e4d48f7..27e4d48f7 100644
--- a/content/en/news/0.20-relnotes/index.md
+++ b/docs/content/en/news/0.20-relnotes/index.md
diff --git a/content/en/news/0.20.1-relnotes/index.md b/docs/content/en/news/0.20.1-relnotes/index.md
index 109737bb3..109737bb3 100644
--- a/content/en/news/0.20.1-relnotes/index.md
+++ b/docs/content/en/news/0.20.1-relnotes/index.md
diff --git a/content/en/news/0.20.2-relnotes/index.md b/docs/content/en/news/0.20.2-relnotes/index.md
index 2e67d20ea..2e67d20ea 100644
--- a/content/en/news/0.20.2-relnotes/index.md
+++ b/docs/content/en/news/0.20.2-relnotes/index.md
diff --git a/content/en/news/0.20.3-relnotes/index.md b/docs/content/en/news/0.20.3-relnotes/index.md
index c79d9b202..c79d9b202 100644
--- a/content/en/news/0.20.3-relnotes/index.md
+++ b/docs/content/en/news/0.20.3-relnotes/index.md
diff --git a/content/en/news/0.20.4-relnotes/index.md b/docs/content/en/news/0.20.4-relnotes/index.md
index 2fde30e14..2fde30e14 100644
--- a/content/en/news/0.20.4-relnotes/index.md
+++ b/docs/content/en/news/0.20.4-relnotes/index.md
diff --git a/content/en/news/0.20.5-relnotes/index.md b/docs/content/en/news/0.20.5-relnotes/index.md
index eaed27832..eaed27832 100644
--- a/content/en/news/0.20.5-relnotes/index.md
+++ b/docs/content/en/news/0.20.5-relnotes/index.md
diff --git a/content/en/news/0.20.6-relnotes/index.md b/docs/content/en/news/0.20.6-relnotes/index.md
index 52189092a..52189092a 100644
--- a/content/en/news/0.20.6-relnotes/index.md
+++ b/docs/content/en/news/0.20.6-relnotes/index.md
diff --git a/content/en/news/0.20.7-relnotes/index.md b/docs/content/en/news/0.20.7-relnotes/index.md
index 50ac365d5..50ac365d5 100644
--- a/content/en/news/0.20.7-relnotes/index.md
+++ b/docs/content/en/news/0.20.7-relnotes/index.md
diff --git a/content/en/news/0.21-relnotes/index.md b/docs/content/en/news/0.21-relnotes/index.md
index aae1fd0b4..aae1fd0b4 100644
--- a/content/en/news/0.21-relnotes/index.md
+++ b/docs/content/en/news/0.21-relnotes/index.md
diff --git a/content/en/news/0.22-relnotes/index.md b/docs/content/en/news/0.22-relnotes/index.md
index fefb501f6..fefb501f6 100644
--- a/content/en/news/0.22-relnotes/index.md
+++ b/docs/content/en/news/0.22-relnotes/index.md
diff --git a/content/en/news/0.22.1-relnotes/index.md b/docs/content/en/news/0.22.1-relnotes/index.md
index ceb207d70..ceb207d70 100644
--- a/content/en/news/0.22.1-relnotes/index.md
+++ b/docs/content/en/news/0.22.1-relnotes/index.md
diff --git a/content/en/news/0.23-relnotes/index.md b/docs/content/en/news/0.23-relnotes/index.md
index fdf6e9e73..fdf6e9e73 100644
--- a/content/en/news/0.23-relnotes/index.md
+++ b/docs/content/en/news/0.23-relnotes/index.md
diff --git a/content/en/news/0.24-relnotes/index.md b/docs/content/en/news/0.24-relnotes/index.md
index ec71e246f..ec71e246f 100644
--- a/content/en/news/0.24-relnotes/index.md
+++ b/docs/content/en/news/0.24-relnotes/index.md
diff --git a/content/en/news/0.24.1-relnotes/index.md b/docs/content/en/news/0.24.1-relnotes/index.md
index 2ec2cef55..2ec2cef55 100644
--- a/content/en/news/0.24.1-relnotes/index.md
+++ b/docs/content/en/news/0.24.1-relnotes/index.md
diff --git a/content/en/news/0.25-relnotes/index.md b/docs/content/en/news/0.25-relnotes/index.md
index 9527c911a..9527c911a 100644
--- a/content/en/news/0.25-relnotes/index.md
+++ b/docs/content/en/news/0.25-relnotes/index.md
diff --git a/content/en/news/0.25.1-relnotes/index.md b/docs/content/en/news/0.25.1-relnotes/index.md
index 7d70d87a5..7d70d87a5 100644
--- a/content/en/news/0.25.1-relnotes/index.md
+++ b/docs/content/en/news/0.25.1-relnotes/index.md
diff --git a/content/en/news/0.26-relnotes/index.md b/docs/content/en/news/0.26-relnotes/index.md
index 7ebbf8185..7ebbf8185 100644
--- a/content/en/news/0.26-relnotes/index.md
+++ b/docs/content/en/news/0.26-relnotes/index.md
diff --git a/content/en/news/0.27-relnotes/index.md b/docs/content/en/news/0.27-relnotes/index.md
index 92fc3a7b0..92fc3a7b0 100644
--- a/content/en/news/0.27-relnotes/index.md
+++ b/docs/content/en/news/0.27-relnotes/index.md
diff --git a/content/en/news/0.27.1-relnotes/index.md b/docs/content/en/news/0.27.1-relnotes/index.md
index 1184cc175..1184cc175 100644
--- a/content/en/news/0.27.1-relnotes/index.md
+++ b/docs/content/en/news/0.27.1-relnotes/index.md
diff --git a/content/en/news/0.28-relnotes/index.md b/docs/content/en/news/0.28-relnotes/index.md
index 91128e48e..91128e48e 100644
--- a/content/en/news/0.28-relnotes/index.md
+++ b/docs/content/en/news/0.28-relnotes/index.md
diff --git a/content/en/news/0.29-relnotes/index.md b/docs/content/en/news/0.29-relnotes/index.md
index 810781dda..810781dda 100644
--- a/content/en/news/0.29-relnotes/index.md
+++ b/docs/content/en/news/0.29-relnotes/index.md
diff --git a/content/en/news/0.30-relnotes/index.md b/docs/content/en/news/0.30-relnotes/index.md
index 9281a5c20..9281a5c20 100644
--- a/content/en/news/0.30-relnotes/index.md
+++ b/docs/content/en/news/0.30-relnotes/index.md
diff --git a/content/en/news/0.30.1-relnotes/index.md b/docs/content/en/news/0.30.1-relnotes/index.md
index 68165e877..68165e877 100644
--- a/content/en/news/0.30.1-relnotes/index.md
+++ b/docs/content/en/news/0.30.1-relnotes/index.md
diff --git a/content/en/news/0.30.2-relnotes/index.md b/docs/content/en/news/0.30.2-relnotes/index.md
index 1d4bcd946..1d4bcd946 100644
--- a/content/en/news/0.30.2-relnotes/index.md
+++ b/docs/content/en/news/0.30.2-relnotes/index.md
diff --git a/content/en/news/0.31-relnotes/index.md b/docs/content/en/news/0.31-relnotes/index.md
index ba16dfacb..ba16dfacb 100644
--- a/content/en/news/0.31-relnotes/index.md
+++ b/docs/content/en/news/0.31-relnotes/index.md
diff --git a/content/en/news/0.31.1-relnotes/index.md b/docs/content/en/news/0.31.1-relnotes/index.md
index a74470d64..a74470d64 100644
--- a/content/en/news/0.31.1-relnotes/index.md
+++ b/docs/content/en/news/0.31.1-relnotes/index.md
diff --git a/content/en/news/0.32-relnotes/index.md b/docs/content/en/news/0.32-relnotes/index.md
index c3f36fe64..c3f36fe64 100644
--- a/content/en/news/0.32-relnotes/index.md
+++ b/docs/content/en/news/0.32-relnotes/index.md
diff --git a/content/en/news/0.32.1-relnotes/index.md b/docs/content/en/news/0.32.1-relnotes/index.md
index 867e3413e..867e3413e 100644
--- a/content/en/news/0.32.1-relnotes/index.md
+++ b/docs/content/en/news/0.32.1-relnotes/index.md
diff --git a/content/en/news/0.32.2-relnotes/index.md b/docs/content/en/news/0.32.2-relnotes/index.md
index 7453a2678..7453a2678 100644
--- a/content/en/news/0.32.2-relnotes/index.md
+++ b/docs/content/en/news/0.32.2-relnotes/index.md
diff --git a/content/en/news/0.32.3-relnotes/index.md b/docs/content/en/news/0.32.3-relnotes/index.md
index ad795a183..ad795a183 100644
--- a/content/en/news/0.32.3-relnotes/index.md
+++ b/docs/content/en/news/0.32.3-relnotes/index.md
diff --git a/content/en/news/0.32.4-relnotes/index.md b/docs/content/en/news/0.32.4-relnotes/index.md
index bd8163e0d..bd8163e0d 100644
--- a/content/en/news/0.32.4-relnotes/index.md
+++ b/docs/content/en/news/0.32.4-relnotes/index.md
diff --git a/content/en/news/0.33-relnotes/featured-hugo-33-poster.png b/docs/content/en/news/0.33-relnotes/featured-hugo-33-poster.png
index c30caafcc..c30caafcc 100644
--- a/content/en/news/0.33-relnotes/featured-hugo-33-poster.png
+++ b/docs/content/en/news/0.33-relnotes/featured-hugo-33-poster.png
Binary files differ
diff --git a/content/en/news/0.33-relnotes/index.md b/docs/content/en/news/0.33-relnotes/index.md
index 74cd50dc4..74cd50dc4 100644
--- a/content/en/news/0.33-relnotes/index.md
+++ b/docs/content/en/news/0.33-relnotes/index.md
diff --git a/content/en/news/0.34-relnotes/featured-34-poster.png b/docs/content/en/news/0.34-relnotes/featured-34-poster.png
index a5c81b8c8..a5c81b8c8 100644
--- a/content/en/news/0.34-relnotes/featured-34-poster.png
+++ b/docs/content/en/news/0.34-relnotes/featured-34-poster.png
Binary files differ
diff --git a/content/en/news/0.34-relnotes/index.md b/docs/content/en/news/0.34-relnotes/index.md
index dd5418a77..dd5418a77 100644
--- a/content/en/news/0.34-relnotes/index.md
+++ b/docs/content/en/news/0.34-relnotes/index.md
diff --git a/content/en/news/0.35-relnotes/featured-hugo-35-poster.png b/docs/content/en/news/0.35-relnotes/featured-hugo-35-poster.png
index a97e3b901..a97e3b901 100644
--- a/content/en/news/0.35-relnotes/featured-hugo-35-poster.png
+++ b/docs/content/en/news/0.35-relnotes/featured-hugo-35-poster.png
Binary files differ
diff --git a/content/en/news/0.35-relnotes/index.md b/docs/content/en/news/0.35-relnotes/index.md
index 104cbd222..104cbd222 100644
--- a/content/en/news/0.35-relnotes/index.md
+++ b/docs/content/en/news/0.35-relnotes/index.md
diff --git a/content/en/news/0.36-relnotes/featured-hugo-36-poster.png b/docs/content/en/news/0.36-relnotes/featured-hugo-36-poster.png
index 12dec42fc..12dec42fc 100644
--- a/content/en/news/0.36-relnotes/featured-hugo-36-poster.png
+++ b/docs/content/en/news/0.36-relnotes/featured-hugo-36-poster.png
Binary files differ
diff --git a/content/en/news/0.36-relnotes/index.md b/docs/content/en/news/0.36-relnotes/index.md
index a81892458..a81892458 100644
--- a/content/en/news/0.36-relnotes/index.md
+++ b/docs/content/en/news/0.36-relnotes/index.md
diff --git a/content/en/news/0.36.1-relnotes/index.md b/docs/content/en/news/0.36.1-relnotes/index.md
index 00a5b346c..00a5b346c 100644
--- a/content/en/news/0.36.1-relnotes/index.md
+++ b/docs/content/en/news/0.36.1-relnotes/index.md
diff --git a/content/en/news/0.37-relnotes/featured-hugo-37-poster.png b/docs/content/en/news/0.37-relnotes/featured-hugo-37-poster.png
index 9f369ba25..9f369ba25 100644
--- a/content/en/news/0.37-relnotes/featured-hugo-37-poster.png
+++ b/docs/content/en/news/0.37-relnotes/featured-hugo-37-poster.png
Binary files differ
diff --git a/content/en/news/0.37-relnotes/index.md b/docs/content/en/news/0.37-relnotes/index.md
index a9b6b4cef..a9b6b4cef 100644
--- a/content/en/news/0.37-relnotes/index.md
+++ b/docs/content/en/news/0.37-relnotes/index.md
diff --git a/content/en/news/0.37.1-relnotes/index.md b/docs/content/en/news/0.37.1-relnotes/index.md
index 754ed4240..754ed4240 100644
--- a/content/en/news/0.37.1-relnotes/index.md
+++ b/docs/content/en/news/0.37.1-relnotes/index.md
diff --git a/content/en/news/0.38-relnotes/featured-poster.png b/docs/content/en/news/0.38-relnotes/featured-poster.png
index 1e7988c8f..1e7988c8f 100644
--- a/content/en/news/0.38-relnotes/featured-poster.png
+++ b/docs/content/en/news/0.38-relnotes/featured-poster.png
Binary files differ
diff --git a/content/en/news/0.38-relnotes/index.md b/docs/content/en/news/0.38-relnotes/index.md
index 71d167cd5..71d167cd5 100644
--- a/content/en/news/0.38-relnotes/index.md
+++ b/docs/content/en/news/0.38-relnotes/index.md
diff --git a/content/en/news/0.38.1-relnotes/index.md b/docs/content/en/news/0.38.1-relnotes/index.md
index a025b5415..a025b5415 100644
--- a/content/en/news/0.38.1-relnotes/index.md
+++ b/docs/content/en/news/0.38.1-relnotes/index.md
diff --git a/content/en/news/0.38.2-relnotes/index.md b/docs/content/en/news/0.38.2-relnotes/index.md
index 0a045eee8..0a045eee8 100644
--- a/content/en/news/0.38.2-relnotes/index.md
+++ b/docs/content/en/news/0.38.2-relnotes/index.md
diff --git a/content/en/news/0.39-relnotes/featured-hugo-39-poster.png b/docs/content/en/news/0.39-relnotes/featured-hugo-39-poster.png
index e3fa6400a..e3fa6400a 100644
--- a/content/en/news/0.39-relnotes/featured-hugo-39-poster.png
+++ b/docs/content/en/news/0.39-relnotes/featured-hugo-39-poster.png
Binary files differ
diff --git a/content/en/news/0.39-relnotes/index.md b/docs/content/en/news/0.39-relnotes/index.md
index d1c28252a..d1c28252a 100644
--- a/content/en/news/0.39-relnotes/index.md
+++ b/docs/content/en/news/0.39-relnotes/index.md
diff --git a/content/en/news/0.40-relnotes/featured-hugo-40-poster.png b/docs/content/en/news/0.40-relnotes/featured-hugo-40-poster.png
index 9a7f36d1f..9a7f36d1f 100644
--- a/content/en/news/0.40-relnotes/featured-hugo-40-poster.png
+++ b/docs/content/en/news/0.40-relnotes/featured-hugo-40-poster.png
Binary files differ
diff --git a/content/en/news/0.40-relnotes/index.md b/docs/content/en/news/0.40-relnotes/index.md
index 9a45c1c09..9a45c1c09 100644
--- a/content/en/news/0.40-relnotes/index.md
+++ b/docs/content/en/news/0.40-relnotes/index.md
diff --git a/content/en/news/0.40.1-relnotes/index.md b/docs/content/en/news/0.40.1-relnotes/index.md
index 3352f164b..3352f164b 100644
--- a/content/en/news/0.40.1-relnotes/index.md
+++ b/docs/content/en/news/0.40.1-relnotes/index.md
diff --git a/content/en/news/0.40.2-relnotes/index.md b/docs/content/en/news/0.40.2-relnotes/index.md
index 50b9c3842..50b9c3842 100644
--- a/content/en/news/0.40.2-relnotes/index.md
+++ b/docs/content/en/news/0.40.2-relnotes/index.md
diff --git a/content/en/news/0.40.3-relnotes/index.md b/docs/content/en/news/0.40.3-relnotes/index.md
index 6f822809d..6f822809d 100644
--- a/content/en/news/0.40.3-relnotes/index.md
+++ b/docs/content/en/news/0.40.3-relnotes/index.md
diff --git a/content/en/news/0.41-relnotes/featured-hugo-41-poster.png b/docs/content/en/news/0.41-relnotes/featured-hugo-41-poster.png
index 8f752f665..8f752f665 100644
--- a/content/en/news/0.41-relnotes/featured-hugo-41-poster.png
+++ b/docs/content/en/news/0.41-relnotes/featured-hugo-41-poster.png
Binary files differ
diff --git a/content/en/news/0.41-relnotes/index.md b/docs/content/en/news/0.41-relnotes/index.md
index 411e373e5..411e373e5 100644
--- a/content/en/news/0.41-relnotes/index.md
+++ b/docs/content/en/news/0.41-relnotes/index.md
diff --git a/content/en/news/0.42-relnotes/featured-hugo-42-poster.png b/docs/content/en/news/0.42-relnotes/featured-hugo-42-poster.png
index 1f1cab1f1..1f1cab1f1 100644
--- a/content/en/news/0.42-relnotes/featured-hugo-42-poster.png
+++ b/docs/content/en/news/0.42-relnotes/featured-hugo-42-poster.png
Binary files differ
diff --git a/content/en/news/0.42-relnotes/index.md b/docs/content/en/news/0.42-relnotes/index.md
index 9356eb675..9356eb675 100644
--- a/content/en/news/0.42-relnotes/index.md
+++ b/docs/content/en/news/0.42-relnotes/index.md
diff --git a/content/en/news/0.42.1-relnotes/index.md b/docs/content/en/news/0.42.1-relnotes/index.md
index 6b5b3c775..6b5b3c775 100644
--- a/content/en/news/0.42.1-relnotes/index.md
+++ b/docs/content/en/news/0.42.1-relnotes/index.md
diff --git a/content/en/news/0.42.2-relnotes/index.md b/docs/content/en/news/0.42.2-relnotes/index.md
index c9bf6c469..c9bf6c469 100644
--- a/content/en/news/0.42.2-relnotes/index.md
+++ b/docs/content/en/news/0.42.2-relnotes/index.md
diff --git a/content/en/news/0.43-relnotes/featured-hugo-43-poster.png b/docs/content/en/news/0.43-relnotes/featured-hugo-43-poster.png
index b221ca7f1..b221ca7f1 100644
--- a/content/en/news/0.43-relnotes/featured-hugo-43-poster.png
+++ b/docs/content/en/news/0.43-relnotes/featured-hugo-43-poster.png
Binary files differ
diff --git a/content/en/news/0.43-relnotes/index.md b/docs/content/en/news/0.43-relnotes/index.md
index afa23329b..afa23329b 100644
--- a/content/en/news/0.43-relnotes/index.md
+++ b/docs/content/en/news/0.43-relnotes/index.md
diff --git a/content/en/news/0.44-relnotes/featured-hugo-44-poster.png b/docs/content/en/news/0.44-relnotes/featured-hugo-44-poster.png
index 330b235fb..330b235fb 100644
--- a/content/en/news/0.44-relnotes/featured-hugo-44-poster.png
+++ b/docs/content/en/news/0.44-relnotes/featured-hugo-44-poster.png
Binary files differ
diff --git a/content/en/news/0.44-relnotes/index.md b/docs/content/en/news/0.44-relnotes/index.md
index aa8396898..aa8396898 100644
--- a/content/en/news/0.44-relnotes/index.md
+++ b/docs/content/en/news/0.44-relnotes/index.md
diff --git a/content/en/news/0.45-relnotes/featured-hugo-45-poster.png b/docs/content/en/news/0.45-relnotes/featured-hugo-45-poster.png
index 40f71daca..40f71daca 100644
--- a/content/en/news/0.45-relnotes/featured-hugo-45-poster.png
+++ b/docs/content/en/news/0.45-relnotes/featured-hugo-45-poster.png
Binary files differ
diff --git a/content/en/news/0.45-relnotes/index.md b/docs/content/en/news/0.45-relnotes/index.md
index 83051c058..83051c058 100644
--- a/content/en/news/0.45-relnotes/index.md
+++ b/docs/content/en/news/0.45-relnotes/index.md
diff --git a/content/en/news/0.45.1-relnotes/index.md b/docs/content/en/news/0.45.1-relnotes/index.md
index 84e0416c7..84e0416c7 100644
--- a/content/en/news/0.45.1-relnotes/index.md
+++ b/docs/content/en/news/0.45.1-relnotes/index.md
diff --git a/content/en/news/0.46-relnotes/featured-hugo-46-poster.png b/docs/content/en/news/0.46-relnotes/featured-hugo-46-poster.png
index c00622e04..c00622e04 100644
--- a/content/en/news/0.46-relnotes/featured-hugo-46-poster.png
+++ b/docs/content/en/news/0.46-relnotes/featured-hugo-46-poster.png
Binary files differ
diff --git a/content/en/news/0.46-relnotes/index.md b/docs/content/en/news/0.46-relnotes/index.md
index 65c2f1036..65c2f1036 100644
--- a/content/en/news/0.46-relnotes/index.md
+++ b/docs/content/en/news/0.46-relnotes/index.md
diff --git a/content/en/news/0.47-relnotes/featured-hugo-47-poster.png b/docs/content/en/news/0.47-relnotes/featured-hugo-47-poster.png
index 601922961..601922961 100644
--- a/content/en/news/0.47-relnotes/featured-hugo-47-poster.png
+++ b/docs/content/en/news/0.47-relnotes/featured-hugo-47-poster.png
Binary files differ
diff --git a/content/en/news/0.47-relnotes/index.md b/docs/content/en/news/0.47-relnotes/index.md
index 79d15ec62..79d15ec62 100644
--- a/content/en/news/0.47-relnotes/index.md
+++ b/docs/content/en/news/0.47-relnotes/index.md
diff --git a/content/en/news/0.47.1-relnotes/index.md b/docs/content/en/news/0.47.1-relnotes/index.md
index d35b0fad2..d35b0fad2 100644
--- a/content/en/news/0.47.1-relnotes/index.md
+++ b/docs/content/en/news/0.47.1-relnotes/index.md
diff --git a/content/en/news/0.48-relnotes/featured-hugo-48-poster.png b/docs/content/en/news/0.48-relnotes/featured-hugo-48-poster.png
index 7adb0d22e..7adb0d22e 100644
--- a/content/en/news/0.48-relnotes/featured-hugo-48-poster.png
+++ b/docs/content/en/news/0.48-relnotes/featured-hugo-48-poster.png
Binary files differ
diff --git a/content/en/news/0.48-relnotes/index.md b/docs/content/en/news/0.48-relnotes/index.md
index 92c765f23..92c765f23 100644
--- a/content/en/news/0.48-relnotes/index.md
+++ b/docs/content/en/news/0.48-relnotes/index.md
diff --git a/content/en/news/0.49-relnotes/featured-hugo-49-poster.png b/docs/content/en/news/0.49-relnotes/featured-hugo-49-poster.png
index 6f0f42ed4..6f0f42ed4 100644
--- a/content/en/news/0.49-relnotes/featured-hugo-49-poster.png
+++ b/docs/content/en/news/0.49-relnotes/featured-hugo-49-poster.png
Binary files differ
diff --git a/content/en/news/0.49-relnotes/index.md b/docs/content/en/news/0.49-relnotes/index.md
index 6bb272c33..6bb272c33 100644
--- a/content/en/news/0.49-relnotes/index.md
+++ b/docs/content/en/news/0.49-relnotes/index.md
diff --git a/content/en/news/0.49.1-relnotes/index.md b/docs/content/en/news/0.49.1-relnotes/index.md
index a3858a9e1..a3858a9e1 100644
--- a/content/en/news/0.49.1-relnotes/index.md
+++ b/docs/content/en/news/0.49.1-relnotes/index.md
diff --git a/content/en/news/0.49.2-relnotes/index.md b/docs/content/en/news/0.49.2-relnotes/index.md
index 1d24cd624..1d24cd624 100644
--- a/content/en/news/0.49.2-relnotes/index.md
+++ b/docs/content/en/news/0.49.2-relnotes/index.md
diff --git a/content/en/news/0.50-relnotes/featured-hugo-50-poster.png b/docs/content/en/news/0.50-relnotes/featured-hugo-50-poster.png
index de5b76d79..de5b76d79 100644
--- a/content/en/news/0.50-relnotes/featured-hugo-50-poster.png
+++ b/docs/content/en/news/0.50-relnotes/featured-hugo-50-poster.png
Binary files differ
diff --git a/content/en/news/0.50-relnotes/index.md b/docs/content/en/news/0.50-relnotes/index.md
index 46ab61cd0..46ab61cd0 100644
--- a/content/en/news/0.50-relnotes/index.md
+++ b/docs/content/en/news/0.50-relnotes/index.md
diff --git a/content/en/news/0.51-relnotes/featured-hugo-51-poster.png b/docs/content/en/news/0.51-relnotes/featured-hugo-51-poster.png
index 07755a1ab..07755a1ab 100644
--- a/content/en/news/0.51-relnotes/featured-hugo-51-poster.png
+++ b/docs/content/en/news/0.51-relnotes/featured-hugo-51-poster.png
Binary files differ
diff --git a/content/en/news/0.51-relnotes/index.md b/docs/content/en/news/0.51-relnotes/index.md
index 8590a422c..8590a422c 100644
--- a/content/en/news/0.51-relnotes/index.md
+++ b/docs/content/en/news/0.51-relnotes/index.md
diff --git a/content/en/news/0.52-relnotes/featured-hugo-52-poster.png b/docs/content/en/news/0.52-relnotes/featured-hugo-52-poster.png
index 190f5758a..190f5758a 100644
--- a/content/en/news/0.52-relnotes/featured-hugo-52-poster.png
+++ b/docs/content/en/news/0.52-relnotes/featured-hugo-52-poster.png
Binary files differ
diff --git a/content/en/news/0.52-relnotes/index.md b/docs/content/en/news/0.52-relnotes/index.md
index 849a0028c..849a0028c 100644
--- a/content/en/news/0.52-relnotes/index.md
+++ b/docs/content/en/news/0.52-relnotes/index.md
diff --git a/content/en/news/0.53-relnotes/featured-hugo-53-poster.png b/docs/content/en/news/0.53-relnotes/featured-hugo-53-poster.png
index c3cee3adc..c3cee3adc 100644
--- a/content/en/news/0.53-relnotes/featured-hugo-53-poster.png
+++ b/docs/content/en/news/0.53-relnotes/featured-hugo-53-poster.png
Binary files differ
diff --git a/content/en/news/0.53-relnotes/index.md b/docs/content/en/news/0.53-relnotes/index.md
index b61ab9074..b61ab9074 100644
--- a/content/en/news/0.53-relnotes/index.md
+++ b/docs/content/en/news/0.53-relnotes/index.md
diff --git a/content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png b/docs/content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png
index 10fe563c3..10fe563c3 100644
--- a/content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png
+++ b/docs/content/en/news/0.54.0-relnotes/featured-hugo-54.0-poster.png
Binary files differ
diff --git a/content/en/news/0.54.0-relnotes/index.md b/docs/content/en/news/0.54.0-relnotes/index.md
index 8fc56620b..8fc56620b 100644
--- a/content/en/news/0.54.0-relnotes/index.md
+++ b/docs/content/en/news/0.54.0-relnotes/index.md
diff --git a/content/en/news/0.55.0-relnotes/featured.png b/docs/content/en/news/0.55.0-relnotes/featured.png
index 0d3180579..0d3180579 100644
--- a/content/en/news/0.55.0-relnotes/featured.png
+++ b/docs/content/en/news/0.55.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.55.0-relnotes/index.md b/docs/content/en/news/0.55.0-relnotes/index.md
index c22eaf366..c22eaf366 100644
--- a/content/en/news/0.55.0-relnotes/index.md
+++ b/docs/content/en/news/0.55.0-relnotes/index.md
diff --git a/content/en/news/0.55.1-relnotes/index.md b/docs/content/en/news/0.55.1-relnotes/index.md
index 4e9880dc5..4e9880dc5 100644
--- a/content/en/news/0.55.1-relnotes/index.md
+++ b/docs/content/en/news/0.55.1-relnotes/index.md
diff --git a/content/en/news/0.55.2-relnotes/index.md b/docs/content/en/news/0.55.2-relnotes/index.md
index 0b6f49b11..0b6f49b11 100644
--- a/content/en/news/0.55.2-relnotes/index.md
+++ b/docs/content/en/news/0.55.2-relnotes/index.md
diff --git a/content/en/news/0.55.3-relnotes/index.md b/docs/content/en/news/0.55.3-relnotes/index.md
index d00c47d54..d00c47d54 100644
--- a/content/en/news/0.55.3-relnotes/index.md
+++ b/docs/content/en/news/0.55.3-relnotes/index.md
diff --git a/content/en/news/0.55.4-relnotes/index.md b/docs/content/en/news/0.55.4-relnotes/index.md
index 292b39244..292b39244 100644
--- a/content/en/news/0.55.4-relnotes/index.md
+++ b/docs/content/en/news/0.55.4-relnotes/index.md
diff --git a/content/en/news/0.55.5-relnotes/index.md b/docs/content/en/news/0.55.5-relnotes/index.md
index 45a3eda54..45a3eda54 100644
--- a/content/en/news/0.55.5-relnotes/index.md
+++ b/docs/content/en/news/0.55.5-relnotes/index.md
diff --git a/content/en/news/0.55.6-relnotes/index.md b/docs/content/en/news/0.55.6-relnotes/index.md
index c447aa061..c447aa061 100644
--- a/content/en/news/0.55.6-relnotes/index.md
+++ b/docs/content/en/news/0.55.6-relnotes/index.md
diff --git a/content/en/news/0.56.0-relnotes/featured.png b/docs/content/en/news/0.56.0-relnotes/featured.png
index bd6410ead..bd6410ead 100644
--- a/content/en/news/0.56.0-relnotes/featured.png
+++ b/docs/content/en/news/0.56.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.56.0-relnotes/index.md b/docs/content/en/news/0.56.0-relnotes/index.md
index 631c1c6a5..631c1c6a5 100644
--- a/content/en/news/0.56.0-relnotes/index.md
+++ b/docs/content/en/news/0.56.0-relnotes/index.md
diff --git a/content/en/news/0.56.1-relnotes/index.md b/docs/content/en/news/0.56.1-relnotes/index.md
index c83250fe5..c83250fe5 100644
--- a/content/en/news/0.56.1-relnotes/index.md
+++ b/docs/content/en/news/0.56.1-relnotes/index.md
diff --git a/content/en/news/0.56.2-relnotes/index.md b/docs/content/en/news/0.56.2-relnotes/index.md
index 67c5f8b6d..67c5f8b6d 100644
--- a/content/en/news/0.56.2-relnotes/index.md
+++ b/docs/content/en/news/0.56.2-relnotes/index.md
diff --git a/content/en/news/0.56.3-relnotes/index.md b/docs/content/en/news/0.56.3-relnotes/index.md
index c505e520d..c505e520d 100644
--- a/content/en/news/0.56.3-relnotes/index.md
+++ b/docs/content/en/news/0.56.3-relnotes/index.md
diff --git a/content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png b/docs/content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png
index aeb5561c8..aeb5561c8 100644
--- a/content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png
+++ b/docs/content/en/news/0.57.0-relnotes/hugo-57-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.57.0-relnotes/index.md b/docs/content/en/news/0.57.0-relnotes/index.md
index d99150e08..d99150e08 100644
--- a/content/en/news/0.57.0-relnotes/index.md
+++ b/docs/content/en/news/0.57.0-relnotes/index.md
diff --git a/content/en/news/0.57.1-relnotes/index.md b/docs/content/en/news/0.57.1-relnotes/index.md
index fea7833ff..fea7833ff 100644
--- a/content/en/news/0.57.1-relnotes/index.md
+++ b/docs/content/en/news/0.57.1-relnotes/index.md
diff --git a/content/en/news/0.57.2-relnotes/index.md b/docs/content/en/news/0.57.2-relnotes/index.md
index 83c349401..83c349401 100644
--- a/content/en/news/0.57.2-relnotes/index.md
+++ b/docs/content/en/news/0.57.2-relnotes/index.md
diff --git a/content/en/news/0.58.0-relnotes/hugo58-featured.png b/docs/content/en/news/0.58.0-relnotes/hugo58-featured.png
index 52962050d..52962050d 100644
--- a/content/en/news/0.58.0-relnotes/hugo58-featured.png
+++ b/docs/content/en/news/0.58.0-relnotes/hugo58-featured.png
Binary files differ
diff --git a/content/en/news/0.58.0-relnotes/index.md b/docs/content/en/news/0.58.0-relnotes/index.md
index 38b2143e0..38b2143e0 100644
--- a/content/en/news/0.58.0-relnotes/index.md
+++ b/docs/content/en/news/0.58.0-relnotes/index.md
diff --git a/content/en/news/0.58.1-relnotes/index.md b/docs/content/en/news/0.58.1-relnotes/index.md
index 1350c0a73..1350c0a73 100644
--- a/content/en/news/0.58.1-relnotes/index.md
+++ b/docs/content/en/news/0.58.1-relnotes/index.md
diff --git a/content/en/news/0.58.2-relnotes/index.md b/docs/content/en/news/0.58.2-relnotes/index.md
index e498aea59..e498aea59 100644
--- a/content/en/news/0.58.2-relnotes/index.md
+++ b/docs/content/en/news/0.58.2-relnotes/index.md
diff --git a/content/en/news/0.58.3-relnotes/index.md b/docs/content/en/news/0.58.3-relnotes/index.md
index 86bc4b88f..86bc4b88f 100644
--- a/content/en/news/0.58.3-relnotes/index.md
+++ b/docs/content/en/news/0.58.3-relnotes/index.md
diff --git a/content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png b/docs/content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png
index 67dc65872..67dc65872 100644
--- a/content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png
+++ b/docs/content/en/news/0.59.0-relnotes/hugo-59-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.59.0-relnotes/index.md b/docs/content/en/news/0.59.0-relnotes/index.md
index 1a7552d09..1a7552d09 100644
--- a/content/en/news/0.59.0-relnotes/index.md
+++ b/docs/content/en/news/0.59.0-relnotes/index.md
diff --git a/content/en/news/0.59.1-relnotes/index.md b/docs/content/en/news/0.59.1-relnotes/index.md
index 830ccaca1..830ccaca1 100644
--- a/content/en/news/0.59.1-relnotes/index.md
+++ b/docs/content/en/news/0.59.1-relnotes/index.md
diff --git a/content/en/news/0.60.0-relnotes/index.md b/docs/content/en/news/0.60.0-relnotes/index.md
index 8cdebb35e..8cdebb35e 100644
--- a/content/en/news/0.60.0-relnotes/index.md
+++ b/docs/content/en/news/0.60.0-relnotes/index.md
diff --git a/content/en/news/0.60.0-relnotes/poster-featured.png b/docs/content/en/news/0.60.0-relnotes/poster-featured.png
index 9bd99be59..9bd99be59 100644
--- a/content/en/news/0.60.0-relnotes/poster-featured.png
+++ b/docs/content/en/news/0.60.0-relnotes/poster-featured.png
Binary files differ
diff --git a/content/en/news/0.60.1-relnotes/featured-061.png b/docs/content/en/news/0.60.1-relnotes/featured-061.png
index 8ff4d4af9..8ff4d4af9 100644
--- a/content/en/news/0.60.1-relnotes/featured-061.png
+++ b/docs/content/en/news/0.60.1-relnotes/featured-061.png
Binary files differ
diff --git a/content/en/news/0.60.1-relnotes/index.md b/docs/content/en/news/0.60.1-relnotes/index.md
index 2709c7b6f..2709c7b6f 100644
--- a/content/en/news/0.60.1-relnotes/index.md
+++ b/docs/content/en/news/0.60.1-relnotes/index.md
diff --git a/content/en/news/0.61.0-relnotes/hugo-61-featured.png b/docs/content/en/news/0.61.0-relnotes/hugo-61-featured.png
index 8691f30e2..8691f30e2 100644
--- a/content/en/news/0.61.0-relnotes/hugo-61-featured.png
+++ b/docs/content/en/news/0.61.0-relnotes/hugo-61-featured.png
Binary files differ
diff --git a/content/en/news/0.61.0-relnotes/index.md b/docs/content/en/news/0.61.0-relnotes/index.md
index 2922506df..2922506df 100644
--- a/content/en/news/0.61.0-relnotes/index.md
+++ b/docs/content/en/news/0.61.0-relnotes/index.md
diff --git a/content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png b/docs/content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png
index 9a024c023..9a024c023 100644
--- a/content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png
+++ b/docs/content/en/news/0.62.0-relnotes/hugo-62-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.62.0-relnotes/index.md b/docs/content/en/news/0.62.0-relnotes/index.md
index 71f01145d..71f01145d 100644
--- a/content/en/news/0.62.0-relnotes/index.md
+++ b/docs/content/en/news/0.62.0-relnotes/index.md
diff --git a/content/en/news/0.62.1-relnotes/index.md b/docs/content/en/news/0.62.1-relnotes/index.md
index 98fe5eb5b..98fe5eb5b 100644
--- a/content/en/news/0.62.1-relnotes/index.md
+++ b/docs/content/en/news/0.62.1-relnotes/index.md
diff --git a/content/en/news/0.62.2-relnotes/index.md b/docs/content/en/news/0.62.2-relnotes/index.md
index 0d116e5a2..0d116e5a2 100644
--- a/content/en/news/0.62.2-relnotes/index.md
+++ b/docs/content/en/news/0.62.2-relnotes/index.md
diff --git a/content/en/news/0.63.0-relnotes/featured-063.png b/docs/content/en/news/0.63.0-relnotes/featured-063.png
index 3944d52cc..3944d52cc 100644
--- a/content/en/news/0.63.0-relnotes/featured-063.png
+++ b/docs/content/en/news/0.63.0-relnotes/featured-063.png
Binary files differ
diff --git a/content/en/news/0.63.0-relnotes/index.md b/docs/content/en/news/0.63.0-relnotes/index.md
index 899dfdb39..899dfdb39 100644
--- a/content/en/news/0.63.0-relnotes/index.md
+++ b/docs/content/en/news/0.63.0-relnotes/index.md
diff --git a/content/en/news/0.63.1-relnotes/index.md b/docs/content/en/news/0.63.1-relnotes/index.md
index e6ae8b906..e6ae8b906 100644
--- a/content/en/news/0.63.1-relnotes/index.md
+++ b/docs/content/en/news/0.63.1-relnotes/index.md
diff --git a/content/en/news/0.63.2-relnotes/index.md b/docs/content/en/news/0.63.2-relnotes/index.md
index 8477ef02c..8477ef02c 100644
--- a/content/en/news/0.63.2-relnotes/index.md
+++ b/docs/content/en/news/0.63.2-relnotes/index.md
diff --git a/content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png b/docs/content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png
index 71861bad8..71861bad8 100644
--- a/content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png
+++ b/docs/content/en/news/0.64.0-relnotes/hugo-64-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.64.0-relnotes/index.md b/docs/content/en/news/0.64.0-relnotes/index.md
index e03dc8f54..e03dc8f54 100644
--- a/content/en/news/0.64.0-relnotes/index.md
+++ b/docs/content/en/news/0.64.0-relnotes/index.md
diff --git a/content/en/news/0.64.1-relnotes/index.md b/docs/content/en/news/0.64.1-relnotes/index.md
index 4dbcab670..4dbcab670 100644
--- a/content/en/news/0.64.1-relnotes/index.md
+++ b/docs/content/en/news/0.64.1-relnotes/index.md
diff --git a/content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png b/docs/content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png
index a311df0cb..a311df0cb 100644
--- a/content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png
+++ b/docs/content/en/news/0.65.0-relnotes/hugo-65-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.65.0-relnotes/index.md b/docs/content/en/news/0.65.0-relnotes/index.md
index 1a2edb907..1a2edb907 100644
--- a/content/en/news/0.65.0-relnotes/index.md
+++ b/docs/content/en/news/0.65.0-relnotes/index.md
diff --git a/content/en/news/0.65.0-relnotes/pg-admin-tos.png b/docs/content/en/news/0.65.0-relnotes/pg-admin-tos.png
index fc2f4e34d..fc2f4e34d 100644
--- a/content/en/news/0.65.0-relnotes/pg-admin-tos.png
+++ b/docs/content/en/news/0.65.0-relnotes/pg-admin-tos.png
Binary files differ
diff --git a/content/en/news/0.65.1-relnotes/index.md b/docs/content/en/news/0.65.1-relnotes/index.md
index 07ee66569..07ee66569 100644
--- a/content/en/news/0.65.1-relnotes/index.md
+++ b/docs/content/en/news/0.65.1-relnotes/index.md
diff --git a/content/en/news/0.65.2-relnotes/index.md b/docs/content/en/news/0.65.2-relnotes/index.md
index ee9280976..ee9280976 100644
--- a/content/en/news/0.65.2-relnotes/index.md
+++ b/docs/content/en/news/0.65.2-relnotes/index.md
diff --git a/content/en/news/0.65.3-relnotes/index.md b/docs/content/en/news/0.65.3-relnotes/index.md
index 1d47362bb..1d47362bb 100644
--- a/content/en/news/0.65.3-relnotes/index.md
+++ b/docs/content/en/news/0.65.3-relnotes/index.md
diff --git a/content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png b/docs/content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png
index fcdc707ce..fcdc707ce 100644
--- a/content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png
+++ b/docs/content/en/news/0.66.0-relnotes/hugo-66-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.66.0-relnotes/index.md b/docs/content/en/news/0.66.0-relnotes/index.md
index 850a8fa71..850a8fa71 100644
--- a/content/en/news/0.66.0-relnotes/index.md
+++ b/docs/content/en/news/0.66.0-relnotes/index.md
diff --git a/content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png b/docs/content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png
index 059d8c07a..059d8c07a 100644
--- a/content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png
+++ b/docs/content/en/news/0.67.0-relnotes/hugo-67-poster-featured.png
Binary files differ
diff --git a/content/en/news/0.67.0-relnotes/index.md b/docs/content/en/news/0.67.0-relnotes/index.md
index 881fe0367..881fe0367 100644
--- a/content/en/news/0.67.0-relnotes/index.md
+++ b/docs/content/en/news/0.67.0-relnotes/index.md
diff --git a/content/en/news/0.67.1-relnotes/index.md b/docs/content/en/news/0.67.1-relnotes/index.md
index 7962ccca3..7962ccca3 100644
--- a/content/en/news/0.67.1-relnotes/index.md
+++ b/docs/content/en/news/0.67.1-relnotes/index.md
diff --git a/content/en/news/0.68.0-relnotes/hugo-68-featured.png b/docs/content/en/news/0.68.0-relnotes/hugo-68-featured.png
index 0696d990d..0696d990d 100644
--- a/content/en/news/0.68.0-relnotes/hugo-68-featured.png
+++ b/docs/content/en/news/0.68.0-relnotes/hugo-68-featured.png
Binary files differ
diff --git a/content/en/news/0.68.0-relnotes/index.md b/docs/content/en/news/0.68.0-relnotes/index.md
index 507249a21..507249a21 100644
--- a/content/en/news/0.68.0-relnotes/index.md
+++ b/docs/content/en/news/0.68.0-relnotes/index.md
diff --git a/content/en/news/0.68.1-relnotes/index.md b/docs/content/en/news/0.68.1-relnotes/index.md
index ab9946b8e..ab9946b8e 100644
--- a/content/en/news/0.68.1-relnotes/index.md
+++ b/docs/content/en/news/0.68.1-relnotes/index.md
diff --git a/content/en/news/0.68.2-relnotes/index.md b/docs/content/en/news/0.68.2-relnotes/index.md
index c61d1012a..c61d1012a 100644
--- a/content/en/news/0.68.2-relnotes/index.md
+++ b/docs/content/en/news/0.68.2-relnotes/index.md
diff --git a/content/en/news/0.68.3-relnotes/index.md b/docs/content/en/news/0.68.3-relnotes/index.md
index 3855aadf3..3855aadf3 100644
--- a/content/en/news/0.68.3-relnotes/index.md
+++ b/docs/content/en/news/0.68.3-relnotes/index.md
diff --git a/content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png b/docs/content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png
index d1b413142..d1b413142 100644
--- a/content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png
+++ b/docs/content/en/news/0.69.0-relnotes/hugo-69-easter-featured.png
Binary files differ
diff --git a/content/en/news/0.69.0-relnotes/index.md b/docs/content/en/news/0.69.0-relnotes/index.md
index 13bb1b76b..13bb1b76b 100644
--- a/content/en/news/0.69.0-relnotes/index.md
+++ b/docs/content/en/news/0.69.0-relnotes/index.md
diff --git a/content/en/news/0.69.1-relnotes/index.md b/docs/content/en/news/0.69.1-relnotes/index.md
index d80e3f26d..d80e3f26d 100644
--- a/content/en/news/0.69.1-relnotes/index.md
+++ b/docs/content/en/news/0.69.1-relnotes/index.md
diff --git a/content/en/news/0.69.2-relnotes/index.md b/docs/content/en/news/0.69.2-relnotes/index.md
index 048a58817..048a58817 100644
--- a/content/en/news/0.69.2-relnotes/index.md
+++ b/docs/content/en/news/0.69.2-relnotes/index.md
diff --git a/content/en/news/0.7-relnotes/index.md b/docs/content/en/news/0.7-relnotes/index.md
index e140304c0..e140304c0 100644
--- a/content/en/news/0.7-relnotes/index.md
+++ b/docs/content/en/news/0.7-relnotes/index.md
diff --git a/content/en/news/0.70.0-relnotes/hugo-70-featured.png b/docs/content/en/news/0.70.0-relnotes/hugo-70-featured.png
index 3b9c67d5f..3b9c67d5f 100644
--- a/content/en/news/0.70.0-relnotes/hugo-70-featured.png
+++ b/docs/content/en/news/0.70.0-relnotes/hugo-70-featured.png
Binary files differ
diff --git a/content/en/news/0.70.0-relnotes/index.md b/docs/content/en/news/0.70.0-relnotes/index.md
index 8a6c25b00..8a6c25b00 100644
--- a/content/en/news/0.70.0-relnotes/index.md
+++ b/docs/content/en/news/0.70.0-relnotes/index.md
diff --git a/content/en/news/0.71.0-relnotes/hugo-71-featured.png b/docs/content/en/news/0.71.0-relnotes/hugo-71-featured.png
index 081581df8..081581df8 100644
--- a/content/en/news/0.71.0-relnotes/hugo-71-featured.png
+++ b/docs/content/en/news/0.71.0-relnotes/hugo-71-featured.png
Binary files differ
diff --git a/content/en/news/0.71.0-relnotes/index.md b/docs/content/en/news/0.71.0-relnotes/index.md
index 07d951bf3..07d951bf3 100644
--- a/content/en/news/0.71.0-relnotes/index.md
+++ b/docs/content/en/news/0.71.0-relnotes/index.md
diff --git a/content/en/news/0.71.1-relnotes/index.md b/docs/content/en/news/0.71.1-relnotes/index.md
index 7fbd01dd3..7fbd01dd3 100644
--- a/content/en/news/0.71.1-relnotes/index.md
+++ b/docs/content/en/news/0.71.1-relnotes/index.md
diff --git a/content/en/news/0.72.0-relnotes/hugo-72-featured.png b/docs/content/en/news/0.72.0-relnotes/hugo-72-featured.png
index 673ab28c3..673ab28c3 100644
--- a/content/en/news/0.72.0-relnotes/hugo-72-featured.png
+++ b/docs/content/en/news/0.72.0-relnotes/hugo-72-featured.png
Binary files differ
diff --git a/content/en/news/0.72.0-relnotes/index.md b/docs/content/en/news/0.72.0-relnotes/index.md
index 8e413f02a..8e413f02a 100644
--- a/content/en/news/0.72.0-relnotes/index.md
+++ b/docs/content/en/news/0.72.0-relnotes/index.md
diff --git a/content/en/news/0.73.0-relnotes/hugo-73-featured.png b/docs/content/en/news/0.73.0-relnotes/hugo-73-featured.png
index 92685b15c..92685b15c 100644
--- a/content/en/news/0.73.0-relnotes/hugo-73-featured.png
+++ b/docs/content/en/news/0.73.0-relnotes/hugo-73-featured.png
Binary files differ
diff --git a/content/en/news/0.73.0-relnotes/index.md b/docs/content/en/news/0.73.0-relnotes/index.md
index 398869e92..398869e92 100644
--- a/content/en/news/0.73.0-relnotes/index.md
+++ b/docs/content/en/news/0.73.0-relnotes/index.md
diff --git a/content/en/news/0.74.0-relnotes/featured.png b/docs/content/en/news/0.74.0-relnotes/featured.png
index 34f288495..34f288495 100644
--- a/content/en/news/0.74.0-relnotes/featured.png
+++ b/docs/content/en/news/0.74.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.74.0-relnotes/index.md b/docs/content/en/news/0.74.0-relnotes/index.md
index d793f4888..d793f4888 100644
--- a/content/en/news/0.74.0-relnotes/index.md
+++ b/docs/content/en/news/0.74.0-relnotes/index.md
diff --git a/content/en/news/0.74.1-relnotes/index.md b/docs/content/en/news/0.74.1-relnotes/index.md
index 7419296af..7419296af 100644
--- a/content/en/news/0.74.1-relnotes/index.md
+++ b/docs/content/en/news/0.74.1-relnotes/index.md
diff --git a/content/en/news/0.74.2-relnotes/index.md b/docs/content/en/news/0.74.2-relnotes/index.md
index e78d8564c..e78d8564c 100644
--- a/content/en/news/0.74.2-relnotes/index.md
+++ b/docs/content/en/news/0.74.2-relnotes/index.md
diff --git a/content/en/news/0.74.3-relnotes/index.md b/docs/content/en/news/0.74.3-relnotes/index.md
index b5503982c..b5503982c 100644
--- a/content/en/news/0.74.3-relnotes/index.md
+++ b/docs/content/en/news/0.74.3-relnotes/index.md
diff --git a/content/en/news/0.75.0-relnotes/featured.png b/docs/content/en/news/0.75.0-relnotes/featured.png
index 0ee2a713f..0ee2a713f 100644
--- a/content/en/news/0.75.0-relnotes/featured.png
+++ b/docs/content/en/news/0.75.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.75.0-relnotes/index.md b/docs/content/en/news/0.75.0-relnotes/index.md
index 3758be13b..3758be13b 100644
--- a/content/en/news/0.75.0-relnotes/index.md
+++ b/docs/content/en/news/0.75.0-relnotes/index.md
diff --git a/content/en/news/0.75.1-relnotes/index.md b/docs/content/en/news/0.75.1-relnotes/index.md
index 6da3d6c65..6da3d6c65 100644
--- a/content/en/news/0.75.1-relnotes/index.md
+++ b/docs/content/en/news/0.75.1-relnotes/index.md
diff --git a/content/en/news/0.76.0-relnotes/featured.png b/docs/content/en/news/0.76.0-relnotes/featured.png
index a2130ce81..a2130ce81 100644
--- a/content/en/news/0.76.0-relnotes/featured.png
+++ b/docs/content/en/news/0.76.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.76.0-relnotes/index.md b/docs/content/en/news/0.76.0-relnotes/index.md
index 65f3ebb9d..65f3ebb9d 100644
--- a/content/en/news/0.76.0-relnotes/index.md
+++ b/docs/content/en/news/0.76.0-relnotes/index.md
diff --git a/content/en/news/0.76.1-relnotes/index.md b/docs/content/en/news/0.76.1-relnotes/index.md
index b28b17960..b28b17960 100644
--- a/content/en/news/0.76.1-relnotes/index.md
+++ b/docs/content/en/news/0.76.1-relnotes/index.md
diff --git a/content/en/news/0.76.2-relnotes/index.md b/docs/content/en/news/0.76.2-relnotes/index.md
index c071fdcac..c071fdcac 100644
--- a/content/en/news/0.76.2-relnotes/index.md
+++ b/docs/content/en/news/0.76.2-relnotes/index.md
diff --git a/content/en/news/0.76.3-relnotes/index.md b/docs/content/en/news/0.76.3-relnotes/index.md
index 1dc2d8b09..1dc2d8b09 100644
--- a/content/en/news/0.76.3-relnotes/index.md
+++ b/docs/content/en/news/0.76.3-relnotes/index.md
diff --git a/content/en/news/0.76.4-relnotes/index.md b/docs/content/en/news/0.76.4-relnotes/index.md
index cb7e8670c..cb7e8670c 100644
--- a/content/en/news/0.76.4-relnotes/index.md
+++ b/docs/content/en/news/0.76.4-relnotes/index.md
diff --git a/content/en/news/0.76.5-relnotes/index.md b/docs/content/en/news/0.76.5-relnotes/index.md
index 063cbbaa9..063cbbaa9 100644
--- a/content/en/news/0.76.5-relnotes/index.md
+++ b/docs/content/en/news/0.76.5-relnotes/index.md
diff --git a/content/en/news/0.77.0-relnotes/featured.png b/docs/content/en/news/0.77.0-relnotes/featured.png
index 4245479a5..4245479a5 100644
--- a/content/en/news/0.77.0-relnotes/featured.png
+++ b/docs/content/en/news/0.77.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.77.0-relnotes/index.md b/docs/content/en/news/0.77.0-relnotes/index.md
index c9db7ef99..c9db7ef99 100644
--- a/content/en/news/0.77.0-relnotes/index.md
+++ b/docs/content/en/news/0.77.0-relnotes/index.md
diff --git a/content/en/news/0.78.0-relnotes/featured.png b/docs/content/en/news/0.78.0-relnotes/featured.png
index 36ae0ac95..36ae0ac95 100644
--- a/content/en/news/0.78.0-relnotes/featured.png
+++ b/docs/content/en/news/0.78.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.78.0-relnotes/index.md b/docs/content/en/news/0.78.0-relnotes/index.md
index 25b0fd4d8..25b0fd4d8 100644
--- a/content/en/news/0.78.0-relnotes/index.md
+++ b/docs/content/en/news/0.78.0-relnotes/index.md
diff --git a/content/en/news/0.78.1-relnotes/index.md b/docs/content/en/news/0.78.1-relnotes/index.md
index 168c1bbcd..168c1bbcd 100644
--- a/content/en/news/0.78.1-relnotes/index.md
+++ b/docs/content/en/news/0.78.1-relnotes/index.md
diff --git a/content/en/news/0.78.2-relnotes/index.md b/docs/content/en/news/0.78.2-relnotes/index.md
index 4714373e9..4714373e9 100644
--- a/content/en/news/0.78.2-relnotes/index.md
+++ b/docs/content/en/news/0.78.2-relnotes/index.md
diff --git a/content/en/news/0.79.0-relnotes/featured.png b/docs/content/en/news/0.79.0-relnotes/featured.png
index f1b7686da..f1b7686da 100644
--- a/content/en/news/0.79.0-relnotes/featured.png
+++ b/docs/content/en/news/0.79.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.79.0-relnotes/index.md b/docs/content/en/news/0.79.0-relnotes/index.md
index 23ed1ef2e..23ed1ef2e 100644
--- a/content/en/news/0.79.0-relnotes/index.md
+++ b/docs/content/en/news/0.79.0-relnotes/index.md
diff --git a/content/en/news/0.79.1-relnotes/index.md b/docs/content/en/news/0.79.1-relnotes/index.md
index 2a3f32765..2a3f32765 100644
--- a/content/en/news/0.79.1-relnotes/index.md
+++ b/docs/content/en/news/0.79.1-relnotes/index.md
diff --git a/content/en/news/0.8-relnotes/index.md b/docs/content/en/news/0.8-relnotes/index.md
index 6da6b9671..6da6b9671 100644
--- a/content/en/news/0.8-relnotes/index.md
+++ b/docs/content/en/news/0.8-relnotes/index.md
diff --git a/content/en/news/0.80.0-relnotes/featured.png b/docs/content/en/news/0.80.0-relnotes/featured.png
index 09308b04c..09308b04c 100644
--- a/content/en/news/0.80.0-relnotes/featured.png
+++ b/docs/content/en/news/0.80.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.80.0-relnotes/index.md b/docs/content/en/news/0.80.0-relnotes/index.md
index 1c390b685..1c390b685 100644
--- a/content/en/news/0.80.0-relnotes/index.md
+++ b/docs/content/en/news/0.80.0-relnotes/index.md
diff --git a/content/en/news/0.81.0-relnotes/featured.jpg b/docs/content/en/news/0.81.0-relnotes/featured.jpg
index e1488c5c2..e1488c5c2 100644
--- a/content/en/news/0.81.0-relnotes/featured.jpg
+++ b/docs/content/en/news/0.81.0-relnotes/featured.jpg
Binary files differ
diff --git a/content/en/news/0.81.0-relnotes/index.md b/docs/content/en/news/0.81.0-relnotes/index.md
index ebcbb5ac7..ebcbb5ac7 100644
--- a/content/en/news/0.81.0-relnotes/index.md
+++ b/docs/content/en/news/0.81.0-relnotes/index.md
diff --git a/content/en/news/0.82.0-relnotes/featured.png b/docs/content/en/news/0.82.0-relnotes/featured.png
index d09243f61..d09243f61 100644
--- a/content/en/news/0.82.0-relnotes/featured.png
+++ b/docs/content/en/news/0.82.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.82.0-relnotes/index.md b/docs/content/en/news/0.82.0-relnotes/index.md
index 0297ea041..0297ea041 100644
--- a/content/en/news/0.82.0-relnotes/index.md
+++ b/docs/content/en/news/0.82.0-relnotes/index.md
diff --git a/content/en/news/0.82.1-relnotes/index.md b/docs/content/en/news/0.82.1-relnotes/index.md
index 54462b8ad..54462b8ad 100644
--- a/content/en/news/0.82.1-relnotes/index.md
+++ b/docs/content/en/news/0.82.1-relnotes/index.md
diff --git a/content/en/news/0.83.0-relnotes/featured.png b/docs/content/en/news/0.83.0-relnotes/featured.png
index 473170efd..473170efd 100644
--- a/content/en/news/0.83.0-relnotes/featured.png
+++ b/docs/content/en/news/0.83.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.83.0-relnotes/index.md b/docs/content/en/news/0.83.0-relnotes/index.md
index 987110c3f..987110c3f 100644
--- a/content/en/news/0.83.0-relnotes/index.md
+++ b/docs/content/en/news/0.83.0-relnotes/index.md
diff --git a/content/en/news/0.83.1-relnotes/index.md b/docs/content/en/news/0.83.1-relnotes/index.md
index e896b04e1..e896b04e1 100644
--- a/content/en/news/0.83.1-relnotes/index.md
+++ b/docs/content/en/news/0.83.1-relnotes/index.md
diff --git a/content/en/news/0.84.0-relnotes/featured.png b/docs/content/en/news/0.84.0-relnotes/featured.png
index e733669e3..e733669e3 100644
--- a/content/en/news/0.84.0-relnotes/featured.png
+++ b/docs/content/en/news/0.84.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.84.0-relnotes/index.md b/docs/content/en/news/0.84.0-relnotes/index.md
index 8d3dfac24..8d3dfac24 100644
--- a/content/en/news/0.84.0-relnotes/index.md
+++ b/docs/content/en/news/0.84.0-relnotes/index.md
diff --git a/content/en/news/0.84.1-relnotes/index.md b/docs/content/en/news/0.84.1-relnotes/index.md
index 3cee1e290..3cee1e290 100644
--- a/content/en/news/0.84.1-relnotes/index.md
+++ b/docs/content/en/news/0.84.1-relnotes/index.md
diff --git a/content/en/news/0.84.2-relnotes/index.md b/docs/content/en/news/0.84.2-relnotes/index.md
index d2469ff0c..d2469ff0c 100644
--- a/content/en/news/0.84.2-relnotes/index.md
+++ b/docs/content/en/news/0.84.2-relnotes/index.md
diff --git a/content/en/news/0.84.3-relnotes/index.md b/docs/content/en/news/0.84.3-relnotes/index.md
index c805efb46..c805efb46 100644
--- a/content/en/news/0.84.3-relnotes/index.md
+++ b/docs/content/en/news/0.84.3-relnotes/index.md
diff --git a/content/en/news/0.84.4-relnotes/index.md b/docs/content/en/news/0.84.4-relnotes/index.md
index a04e4251d..a04e4251d 100644
--- a/content/en/news/0.84.4-relnotes/index.md
+++ b/docs/content/en/news/0.84.4-relnotes/index.md
diff --git a/content/en/news/0.85.0-relnotes/featured.png b/docs/content/en/news/0.85.0-relnotes/featured.png
index eb68d3171..eb68d3171 100644
--- a/content/en/news/0.85.0-relnotes/featured.png
+++ b/docs/content/en/news/0.85.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.85.0-relnotes/index.md b/docs/content/en/news/0.85.0-relnotes/index.md
index e0c25e537..e0c25e537 100644
--- a/content/en/news/0.85.0-relnotes/index.md
+++ b/docs/content/en/news/0.85.0-relnotes/index.md
diff --git a/content/en/news/0.86.0-relnotes/featured.png b/docs/content/en/news/0.86.0-relnotes/featured.png
index e9967e864..e9967e864 100644
--- a/content/en/news/0.86.0-relnotes/featured.png
+++ b/docs/content/en/news/0.86.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.86.0-relnotes/index.md b/docs/content/en/news/0.86.0-relnotes/index.md
index 4b717ed7c..4b717ed7c 100644
--- a/content/en/news/0.86.0-relnotes/index.md
+++ b/docs/content/en/news/0.86.0-relnotes/index.md
diff --git a/content/en/news/0.86.1-relnotes/index.md b/docs/content/en/news/0.86.1-relnotes/index.md
index 68a893bfb..68a893bfb 100644
--- a/content/en/news/0.86.1-relnotes/index.md
+++ b/docs/content/en/news/0.86.1-relnotes/index.md
diff --git a/content/en/news/0.87.0-relnotes/featured.png b/docs/content/en/news/0.87.0-relnotes/featured.png
index 21d209857..21d209857 100644
--- a/content/en/news/0.87.0-relnotes/featured.png
+++ b/docs/content/en/news/0.87.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.87.0-relnotes/index.md b/docs/content/en/news/0.87.0-relnotes/index.md
index e033aaa82..e033aaa82 100644
--- a/content/en/news/0.87.0-relnotes/index.md
+++ b/docs/content/en/news/0.87.0-relnotes/index.md
diff --git a/content/en/news/0.88.0-relnotes/featured.png b/docs/content/en/news/0.88.0-relnotes/featured.png
index 94af243dc..94af243dc 100644
--- a/content/en/news/0.88.0-relnotes/featured.png
+++ b/docs/content/en/news/0.88.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.88.0-relnotes/index.md b/docs/content/en/news/0.88.0-relnotes/index.md
index 9dfe9e681..9dfe9e681 100644
--- a/content/en/news/0.88.0-relnotes/index.md
+++ b/docs/content/en/news/0.88.0-relnotes/index.md
diff --git a/content/en/news/0.88.1-relnotes/index.md b/docs/content/en/news/0.88.1-relnotes/index.md
index d94e72fe9..d94e72fe9 100644
--- a/content/en/news/0.88.1-relnotes/index.md
+++ b/docs/content/en/news/0.88.1-relnotes/index.md
diff --git a/content/en/news/0.89.0-relnotes/featured.png b/docs/content/en/news/0.89.0-relnotes/featured.png
index 474e1f792..474e1f792 100644
--- a/content/en/news/0.89.0-relnotes/featured.png
+++ b/docs/content/en/news/0.89.0-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/0.89.0-relnotes/index.md b/docs/content/en/news/0.89.0-relnotes/index.md
index 6e32087e6..6e32087e6 100644
--- a/content/en/news/0.89.0-relnotes/index.md
+++ b/docs/content/en/news/0.89.0-relnotes/index.md
diff --git a/content/en/news/0.89.1-relnotes/index.md b/docs/content/en/news/0.89.1-relnotes/index.md
index 383cd2ee8..383cd2ee8 100644
--- a/content/en/news/0.89.1-relnotes/index.md
+++ b/docs/content/en/news/0.89.1-relnotes/index.md
diff --git a/content/en/news/0.89.2-relnotes/index.md b/docs/content/en/news/0.89.2-relnotes/index.md
index 0b886effa..0b886effa 100644
--- a/content/en/news/0.89.2-relnotes/index.md
+++ b/docs/content/en/news/0.89.2-relnotes/index.md
diff --git a/content/en/news/0.89.3-relnotes/index.md b/docs/content/en/news/0.89.3-relnotes/index.md
index ef6887d51..ef6887d51 100644
--- a/content/en/news/0.89.3-relnotes/index.md
+++ b/docs/content/en/news/0.89.3-relnotes/index.md
diff --git a/content/en/news/0.89.4-relnotes/index.md b/docs/content/en/news/0.89.4-relnotes/index.md
index 1f5385f33..1f5385f33 100644
--- a/content/en/news/0.89.4-relnotes/index.md
+++ b/docs/content/en/news/0.89.4-relnotes/index.md
diff --git a/content/en/news/0.9-relnotes/index.md b/docs/content/en/news/0.9-relnotes/index.md
index 5b9bf2c0d..5b9bf2c0d 100644
--- a/content/en/news/0.9-relnotes/index.md
+++ b/docs/content/en/news/0.9-relnotes/index.md
diff --git a/content/en/news/2021-12-17-no-more-releasenotes.md b/docs/content/en/news/2021-12-17-no-more-releasenotes.md
index 38bd75357..38bd75357 100644
--- a/content/en/news/2021-12-17-no-more-releasenotes.md
+++ b/docs/content/en/news/2021-12-17-no-more-releasenotes.md
diff --git a/content/en/news/2021/0.91.2-relnotes/featured.png b/docs/content/en/news/2021/0.91.2-relnotes/featured.png
index fddcb22eb..fddcb22eb 100644
--- a/content/en/news/2021/0.91.2-relnotes/featured.png
+++ b/docs/content/en/news/2021/0.91.2-relnotes/featured.png
Binary files differ
diff --git a/content/en/news/2021/0.91.2-relnotes/index.md b/docs/content/en/news/2021/0.91.2-relnotes/index.md
index 03654bf28..03654bf28 100644
--- a/content/en/news/2021/0.91.2-relnotes/index.md
+++ b/docs/content/en/news/2021/0.91.2-relnotes/index.md
diff --git a/content/en/news/_index.md b/docs/content/en/news/_index.md
index 353accc3d..353accc3d 100644
--- a/content/en/news/_index.md
+++ b/docs/content/en/news/_index.md
diff --git a/content/en/news/hugo-macos-intel-vs-arm/featured.png b/docs/content/en/news/hugo-macos-intel-vs-arm/featured.png
index 30e73ad4e..30e73ad4e 100644
--- a/content/en/news/hugo-macos-intel-vs-arm/featured.png
+++ b/docs/content/en/news/hugo-macos-intel-vs-arm/featured.png
Binary files differ
diff --git a/content/en/news/hugo-macos-intel-vs-arm/index.html b/docs/content/en/news/hugo-macos-intel-vs-arm/index.html
index 9bc83df5d..9bc83df5d 100644
--- a/content/en/news/hugo-macos-intel-vs-arm/index.html
+++ b/docs/content/en/news/hugo-macos-intel-vs-arm/index.html
diff --git a/content/en/news/lets-celebrate-hugos-5th-birthday/featured.png b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/featured.png
index 4c31412fd..4c31412fd 100644
--- a/content/en/news/lets-celebrate-hugos-5th-birthday/featured.png
+++ b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/featured.png
Binary files differ
diff --git a/content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png
index 00848fcf0..00848fcf0 100644
--- a/content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png
+++ b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-stars.png
Binary files differ
diff --git a/content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png
index 0d4dfd599..0d4dfd599 100644
--- a/content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png
+++ b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/graph-themes.png
Binary files differ
diff --git a/content/en/news/lets-celebrate-hugos-5th-birthday/index.md b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/index.md
index 9912027b5..9912027b5 100644
--- a/content/en/news/lets-celebrate-hugos-5th-birthday/index.md
+++ b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/index.md
diff --git a/content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png
index 5b368b97a..5b368b97a 100644
--- a/content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png
+++ b/docs/content/en/news/lets-celebrate-hugos-5th-birthday/sunset-get.png
Binary files differ
diff --git a/content/en/readfiles/README.md b/docs/content/en/readfiles/README.md
index 4b10f0e47..4b10f0e47 100644
--- a/content/en/readfiles/README.md
+++ b/docs/content/en/readfiles/README.md
diff --git a/content/en/readfiles/dateformatting.md b/docs/content/en/readfiles/dateformatting.md
index 42138dd8a..42138dd8a 100644
--- a/content/en/readfiles/dateformatting.md
+++ b/docs/content/en/readfiles/dateformatting.md
diff --git a/content/en/readfiles/index.md b/docs/content/en/readfiles/index.md
index 3d65eaa0f..3d65eaa0f 100644
--- a/content/en/readfiles/index.md
+++ b/docs/content/en/readfiles/index.md
diff --git a/content/en/readfiles/pages-vs-site-pages.md b/docs/content/en/readfiles/pages-vs-site-pages.md
index df5c7d26e..df5c7d26e 100644
--- a/content/en/readfiles/pages-vs-site-pages.md
+++ b/docs/content/en/readfiles/pages-vs-site-pages.md
diff --git a/content/en/readfiles/sectionvars.md b/docs/content/en/readfiles/sectionvars.md
index 45aaff1f3..45aaff1f3 100644
--- a/content/en/readfiles/sectionvars.md
+++ b/docs/content/en/readfiles/sectionvars.md
diff --git a/content/en/readfiles/testing.txt b/docs/content/en/readfiles/testing.txt
index 6428710e3..6428710e3 100644
--- a/content/en/readfiles/testing.txt
+++ b/docs/content/en/readfiles/testing.txt
diff --git a/content/en/showcase/1password-support/bio.md b/docs/content/en/showcase/1password-support/bio.md
index 9187908d9..9187908d9 100644
--- a/content/en/showcase/1password-support/bio.md
+++ b/docs/content/en/showcase/1password-support/bio.md
diff --git a/content/en/showcase/1password-support/featured.png b/docs/content/en/showcase/1password-support/featured.png
index 8e46495e6..8e46495e6 100644
--- a/content/en/showcase/1password-support/featured.png
+++ b/docs/content/en/showcase/1password-support/featured.png
Binary files differ
diff --git a/content/en/showcase/1password-support/index.md b/docs/content/en/showcase/1password-support/index.md
index 2bcbff3fd..2bcbff3fd 100644
--- a/content/en/showcase/1password-support/index.md
+++ b/docs/content/en/showcase/1password-support/index.md
diff --git a/content/en/showcase/bypasscensorship/bio.md b/docs/content/en/showcase/bypasscensorship/bio.md
index 6563e13ca..6563e13ca 100644
--- a/content/en/showcase/bypasscensorship/bio.md
+++ b/docs/content/en/showcase/bypasscensorship/bio.md
diff --git a/content/en/showcase/bypasscensorship/featured.png b/docs/content/en/showcase/bypasscensorship/featured.png
index d6f429112..d6f429112 100644
--- a/content/en/showcase/bypasscensorship/featured.png
+++ b/docs/content/en/showcase/bypasscensorship/featured.png
Binary files differ
diff --git a/content/en/showcase/bypasscensorship/index.md b/docs/content/en/showcase/bypasscensorship/index.md
index a266797ea..a266797ea 100644
--- a/content/en/showcase/bypasscensorship/index.md
+++ b/docs/content/en/showcase/bypasscensorship/index.md
diff --git a/content/en/showcase/digitalgov/bio.md b/docs/content/en/showcase/digitalgov/bio.md
index db3ffafaf..db3ffafaf 100644
--- a/content/en/showcase/digitalgov/bio.md
+++ b/docs/content/en/showcase/digitalgov/bio.md
diff --git a/content/en/showcase/digitalgov/featured.png b/docs/content/en/showcase/digitalgov/featured.png
index 5663180f9..5663180f9 100644
--- a/content/en/showcase/digitalgov/featured.png
+++ b/docs/content/en/showcase/digitalgov/featured.png
Binary files differ
diff --git a/content/en/showcase/digitalgov/index.md b/docs/content/en/showcase/digitalgov/index.md
index 63f44b645..63f44b645 100644
--- a/content/en/showcase/digitalgov/index.md
+++ b/docs/content/en/showcase/digitalgov/index.md
diff --git a/content/en/showcase/fireship/bio.md b/docs/content/en/showcase/fireship/bio.md
index faf739bfa..faf739bfa 100644
--- a/content/en/showcase/fireship/bio.md
+++ b/docs/content/en/showcase/fireship/bio.md
diff --git a/content/en/showcase/fireship/featured.png b/docs/content/en/showcase/fireship/featured.png
index 33d1a47c5..33d1a47c5 100644
--- a/content/en/showcase/fireship/featured.png
+++ b/docs/content/en/showcase/fireship/featured.png
Binary files differ
diff --git a/content/en/showcase/fireship/index.md b/docs/content/en/showcase/fireship/index.md
index e9338a625..e9338a625 100644
--- a/content/en/showcase/fireship/index.md
+++ b/docs/content/en/showcase/fireship/index.md
diff --git a/content/en/showcase/flesland-flis/bio.md b/docs/content/en/showcase/flesland-flis/bio.md
index 2fa6a7964..2fa6a7964 100644
--- a/content/en/showcase/flesland-flis/bio.md
+++ b/docs/content/en/showcase/flesland-flis/bio.md
diff --git a/content/en/showcase/flesland-flis/featured.png b/docs/content/en/showcase/flesland-flis/featured.png
index a6dae684e..a6dae684e 100644
--- a/content/en/showcase/flesland-flis/featured.png
+++ b/docs/content/en/showcase/flesland-flis/featured.png
Binary files differ
diff --git a/content/en/showcase/flesland-flis/index.md b/docs/content/en/showcase/flesland-flis/index.md
index 935bb4661..935bb4661 100644
--- a/content/en/showcase/flesland-flis/index.md
+++ b/docs/content/en/showcase/flesland-flis/index.md
diff --git a/content/en/showcase/forestry/bio.md b/docs/content/en/showcase/forestry/bio.md
index 767365cc0..767365cc0 100644
--- a/content/en/showcase/forestry/bio.md
+++ b/docs/content/en/showcase/forestry/bio.md
diff --git a/content/en/showcase/forestry/featured.png b/docs/content/en/showcase/forestry/featured.png
index 1ee315e78..1ee315e78 100644
--- a/content/en/showcase/forestry/featured.png
+++ b/docs/content/en/showcase/forestry/featured.png
Binary files differ
diff --git a/content/en/showcase/forestry/index.md b/docs/content/en/showcase/forestry/index.md
index 1a9c0faaa..1a9c0faaa 100644
--- a/content/en/showcase/forestry/index.md
+++ b/docs/content/en/showcase/forestry/index.md
diff --git a/content/en/showcase/godot-tutorials/bio.md b/docs/content/en/showcase/godot-tutorials/bio.md
index 92fccadf6..92fccadf6 100644
--- a/content/en/showcase/godot-tutorials/bio.md
+++ b/docs/content/en/showcase/godot-tutorials/bio.md
diff --git a/content/en/showcase/godot-tutorials/featured.png b/docs/content/en/showcase/godot-tutorials/featured.png
index fef13b996..fef13b996 100644
--- a/content/en/showcase/godot-tutorials/featured.png
+++ b/docs/content/en/showcase/godot-tutorials/featured.png
Binary files differ
diff --git a/content/en/showcase/godot-tutorials/index.md b/docs/content/en/showcase/godot-tutorials/index.md
index e33e413e1..e33e413e1 100644
--- a/content/en/showcase/godot-tutorials/index.md
+++ b/docs/content/en/showcase/godot-tutorials/index.md
diff --git a/content/en/showcase/hapticmedia/bio.md b/docs/content/en/showcase/hapticmedia/bio.md
index 4423edb70..4423edb70 100644
--- a/content/en/showcase/hapticmedia/bio.md
+++ b/docs/content/en/showcase/hapticmedia/bio.md
diff --git a/content/en/showcase/hapticmedia/featured.png b/docs/content/en/showcase/hapticmedia/featured.png
index a47ea9c2c..a47ea9c2c 100644
--- a/content/en/showcase/hapticmedia/featured.png
+++ b/docs/content/en/showcase/hapticmedia/featured.png
Binary files differ
diff --git a/content/en/showcase/hapticmedia/index.md b/docs/content/en/showcase/hapticmedia/index.md
index 85ec17a8b..85ec17a8b 100644
--- a/content/en/showcase/hapticmedia/index.md
+++ b/docs/content/en/showcase/hapticmedia/index.md
diff --git a/content/en/showcase/hartwell-insurance/bio.md b/docs/content/en/showcase/hartwell-insurance/bio.md
index 7fab74292..7fab74292 100644
--- a/content/en/showcase/hartwell-insurance/bio.md
+++ b/docs/content/en/showcase/hartwell-insurance/bio.md
diff --git a/content/en/showcase/hartwell-insurance/featured.png b/docs/content/en/showcase/hartwell-insurance/featured.png
index ced251f98..ced251f98 100644
--- a/content/en/showcase/hartwell-insurance/featured.png
+++ b/docs/content/en/showcase/hartwell-insurance/featured.png
Binary files differ
diff --git a/content/en/showcase/hartwell-insurance/hartwell-columns.png b/docs/content/en/showcase/hartwell-insurance/hartwell-columns.png
index c9d36b67d..c9d36b67d 100644
--- a/content/en/showcase/hartwell-insurance/hartwell-columns.png
+++ b/docs/content/en/showcase/hartwell-insurance/hartwell-columns.png
Binary files differ
diff --git a/content/en/showcase/hartwell-insurance/hartwell-lighthouse.png b/docs/content/en/showcase/hartwell-insurance/hartwell-lighthouse.png
index a882f01fd..a882f01fd 100644
--- a/content/en/showcase/hartwell-insurance/hartwell-lighthouse.png
+++ b/docs/content/en/showcase/hartwell-insurance/hartwell-lighthouse.png
Binary files differ
diff --git a/content/en/showcase/hartwell-insurance/hartwell-webpagetest.png b/docs/content/en/showcase/hartwell-insurance/hartwell-webpagetest.png
index f60994ea1..f60994ea1 100644
--- a/content/en/showcase/hartwell-insurance/hartwell-webpagetest.png
+++ b/docs/content/en/showcase/hartwell-insurance/hartwell-webpagetest.png
Binary files differ
diff --git a/content/en/showcase/hartwell-insurance/index.md b/docs/content/en/showcase/hartwell-insurance/index.md
index 925497949..925497949 100644
--- a/content/en/showcase/hartwell-insurance/index.md
+++ b/docs/content/en/showcase/hartwell-insurance/index.md
diff --git a/content/en/showcase/keycdn/bio.md b/docs/content/en/showcase/keycdn/bio.md
index 90f623dca..90f623dca 100644
--- a/content/en/showcase/keycdn/bio.md
+++ b/docs/content/en/showcase/keycdn/bio.md
diff --git a/content/en/showcase/keycdn/featured.png b/docs/content/en/showcase/keycdn/featured.png
index 46018a8f9..46018a8f9 100644
--- a/content/en/showcase/keycdn/featured.png
+++ b/docs/content/en/showcase/keycdn/featured.png
Binary files differ
diff --git a/content/en/showcase/keycdn/index.md b/docs/content/en/showcase/keycdn/index.md
index d092aa07d..d092aa07d 100644
--- a/content/en/showcase/keycdn/index.md
+++ b/docs/content/en/showcase/keycdn/index.md
diff --git a/content/en/showcase/letsencrypt/bio.md b/docs/content/en/showcase/letsencrypt/bio.md
index 92551dc47..92551dc47 100644
--- a/content/en/showcase/letsencrypt/bio.md
+++ b/docs/content/en/showcase/letsencrypt/bio.md
diff --git a/content/en/showcase/letsencrypt/featured.png b/docs/content/en/showcase/letsencrypt/featured.png
index 9535d91bd..9535d91bd 100644
--- a/content/en/showcase/letsencrypt/featured.png
+++ b/docs/content/en/showcase/letsencrypt/featured.png
Binary files differ
diff --git a/content/en/showcase/letsencrypt/index.md b/docs/content/en/showcase/letsencrypt/index.md
index fc57a26b8..fc57a26b8 100644
--- a/content/en/showcase/letsencrypt/index.md
+++ b/docs/content/en/showcase/letsencrypt/index.md
diff --git a/content/en/showcase/linode/bio.md b/docs/content/en/showcase/linode/bio.md
index 42fa92229..42fa92229 100644
--- a/content/en/showcase/linode/bio.md
+++ b/docs/content/en/showcase/linode/bio.md
diff --git a/content/en/showcase/linode/featured.png b/docs/content/en/showcase/linode/featured.png
index 8e517eacb..8e517eacb 100644
--- a/content/en/showcase/linode/featured.png
+++ b/docs/content/en/showcase/linode/featured.png
Binary files differ
diff --git a/content/en/showcase/linode/index.md b/docs/content/en/showcase/linode/index.md
index 5a341be8a..5a341be8a 100644
--- a/content/en/showcase/linode/index.md
+++ b/docs/content/en/showcase/linode/index.md
diff --git a/content/en/showcase/over/bio.md b/docs/content/en/showcase/over/bio.md
index 415668f9e..415668f9e 100644
--- a/content/en/showcase/over/bio.md
+++ b/docs/content/en/showcase/over/bio.md
diff --git a/content/en/showcase/over/featured-over.png b/docs/content/en/showcase/over/featured-over.png
index 7d1ba6060..7d1ba6060 100644
--- a/content/en/showcase/over/featured-over.png
+++ b/docs/content/en/showcase/over/featured-over.png
Binary files differ
diff --git a/content/en/showcase/over/index.md b/docs/content/en/showcase/over/index.md
index 137bb2a55..137bb2a55 100644
--- a/content/en/showcase/over/index.md
+++ b/docs/content/en/showcase/over/index.md
diff --git a/content/en/showcase/pharmaseal/bio.md b/docs/content/en/showcase/pharmaseal/bio.md
index 7477f1c32..7477f1c32 100644
--- a/content/en/showcase/pharmaseal/bio.md
+++ b/docs/content/en/showcase/pharmaseal/bio.md
diff --git a/content/en/showcase/pharmaseal/featured-pharmaseal.png b/docs/content/en/showcase/pharmaseal/featured-pharmaseal.png
index 4a64325b7..4a64325b7 100644
--- a/content/en/showcase/pharmaseal/featured-pharmaseal.png
+++ b/docs/content/en/showcase/pharmaseal/featured-pharmaseal.png
Binary files differ
diff --git a/content/en/showcase/pharmaseal/index.md b/docs/content/en/showcase/pharmaseal/index.md
index 64e9960a3..64e9960a3 100644
--- a/content/en/showcase/pharmaseal/index.md
+++ b/docs/content/en/showcase/pharmaseal/index.md
diff --git a/content/en/showcase/quiply-employee-communications-app/bio.md b/docs/content/en/showcase/quiply-employee-communications-app/bio.md
index f72a62554..f72a62554 100644
--- a/content/en/showcase/quiply-employee-communications-app/bio.md
+++ b/docs/content/en/showcase/quiply-employee-communications-app/bio.md
diff --git a/content/en/showcase/quiply-employee-communications-app/featured.png b/docs/content/en/showcase/quiply-employee-communications-app/featured.png
index a4e9f046e..a4e9f046e 100644
--- a/content/en/showcase/quiply-employee-communications-app/featured.png
+++ b/docs/content/en/showcase/quiply-employee-communications-app/featured.png
Binary files differ
diff --git a/content/en/showcase/quiply-employee-communications-app/index.md b/docs/content/en/showcase/quiply-employee-communications-app/index.md
index a8c31cc33..a8c31cc33 100644
--- a/content/en/showcase/quiply-employee-communications-app/index.md
+++ b/docs/content/en/showcase/quiply-employee-communications-app/index.md
diff --git a/content/en/showcase/small-multiples/bio.md b/docs/content/en/showcase/small-multiples/bio.md
index 3e0c1f14a..3e0c1f14a 100644
--- a/content/en/showcase/small-multiples/bio.md
+++ b/docs/content/en/showcase/small-multiples/bio.md
diff --git a/content/en/showcase/small-multiples/featured-small-multiples.png b/docs/content/en/showcase/small-multiples/featured-small-multiples.png
index a278f464d..a278f464d 100644
--- a/content/en/showcase/small-multiples/featured-small-multiples.png
+++ b/docs/content/en/showcase/small-multiples/featured-small-multiples.png
Binary files differ
diff --git a/content/en/showcase/small-multiples/index.md b/docs/content/en/showcase/small-multiples/index.md
index e2b80ea9a..e2b80ea9a 100644
--- a/content/en/showcase/small-multiples/index.md
+++ b/docs/content/en/showcase/small-multiples/index.md
diff --git a/content/en/showcase/template/bio.md b/docs/content/en/showcase/template/bio.md
index 597163340..597163340 100644
--- a/content/en/showcase/template/bio.md
+++ b/docs/content/en/showcase/template/bio.md
diff --git a/content/en/showcase/template/featured-template.png b/docs/content/en/showcase/template/featured-template.png
index 4f390132e..4f390132e 100644
--- a/content/en/showcase/template/featured-template.png
+++ b/docs/content/en/showcase/template/featured-template.png
Binary files differ
diff --git a/content/en/showcase/template/index.md b/docs/content/en/showcase/template/index.md
index 06e4a6548..06e4a6548 100644
--- a/content/en/showcase/template/index.md
+++ b/docs/content/en/showcase/template/index.md
diff --git a/content/en/showcase/tomango/bio.md b/docs/content/en/showcase/tomango/bio.md
index 052bd93cd..052bd93cd 100644
--- a/content/en/showcase/tomango/bio.md
+++ b/docs/content/en/showcase/tomango/bio.md
diff --git a/content/en/showcase/tomango/featured.png b/docs/content/en/showcase/tomango/featured.png
index d4b037e0f..d4b037e0f 100644
--- a/content/en/showcase/tomango/featured.png
+++ b/docs/content/en/showcase/tomango/featured.png
Binary files differ
diff --git a/content/en/showcase/tomango/index.md b/docs/content/en/showcase/tomango/index.md
index 6dc1a5c1f..6dc1a5c1f 100644
--- a/content/en/showcase/tomango/index.md
+++ b/docs/content/en/showcase/tomango/index.md
diff --git a/content/en/templates/404.md b/docs/content/en/templates/404.md
index 87bbe1d36..87bbe1d36 100644
--- a/content/en/templates/404.md
+++ b/docs/content/en/templates/404.md
diff --git a/content/en/templates/_index.md b/docs/content/en/templates/_index.md
index 18ae40eac..18ae40eac 100644
--- a/content/en/templates/_index.md
+++ b/docs/content/en/templates/_index.md
diff --git a/content/en/templates/alternatives.md b/docs/content/en/templates/alternatives.md
index c5fde5b51..c5fde5b51 100644
--- a/content/en/templates/alternatives.md
+++ b/docs/content/en/templates/alternatives.md
diff --git a/content/en/templates/base.md b/docs/content/en/templates/base.md
index f2648bd52..f2648bd52 100644
--- a/content/en/templates/base.md
+++ b/docs/content/en/templates/base.md
diff --git a/content/en/templates/data-templates.md b/docs/content/en/templates/data-templates.md
index bd1ed6d76..bd1ed6d76 100644
--- a/content/en/templates/data-templates.md
+++ b/docs/content/en/templates/data-templates.md
diff --git a/content/en/templates/files.md b/docs/content/en/templates/files.md
index c2de11292..c2de11292 100644
--- a/content/en/templates/files.md
+++ b/docs/content/en/templates/files.md
diff --git a/content/en/templates/homepage.md b/docs/content/en/templates/homepage.md
index b6ce87f8e..b6ce87f8e 100644
--- a/content/en/templates/homepage.md
+++ b/docs/content/en/templates/homepage.md
diff --git a/content/en/templates/internal.md b/docs/content/en/templates/internal.md
index ace21f548..ace21f548 100644
--- a/content/en/templates/internal.md
+++ b/docs/content/en/templates/internal.md
diff --git a/content/en/templates/introduction.md b/docs/content/en/templates/introduction.md
index ba66afa86..ba66afa86 100644
--- a/content/en/templates/introduction.md
+++ b/docs/content/en/templates/introduction.md
diff --git a/content/en/templates/lists.md b/docs/content/en/templates/lists.md
index 97dc0f40d..97dc0f40d 100644
--- a/content/en/templates/lists.md
+++ b/docs/content/en/templates/lists.md
diff --git a/content/en/templates/lookup-order.md b/docs/content/en/templates/lookup-order.md
index e021c8112..e021c8112 100644
--- a/content/en/templates/lookup-order.md
+++ b/docs/content/en/templates/lookup-order.md
diff --git a/content/en/templates/menu-templates.md b/docs/content/en/templates/menu-templates.md
index 8893d7b5a..8893d7b5a 100644
--- a/content/en/templates/menu-templates.md
+++ b/docs/content/en/templates/menu-templates.md
diff --git a/content/en/templates/ordering-and-grouping.md b/docs/content/en/templates/ordering-and-grouping.md
index f1634c7d8..f1634c7d8 100644
--- a/content/en/templates/ordering-and-grouping.md
+++ b/docs/content/en/templates/ordering-and-grouping.md
diff --git a/content/en/templates/output-formats.md b/docs/content/en/templates/output-formats.md
index df48d55dc..df48d55dc 100644
--- a/content/en/templates/output-formats.md
+++ b/docs/content/en/templates/output-formats.md
diff --git a/content/en/templates/pagination.md b/docs/content/en/templates/pagination.md
index d89735f7c..d89735f7c 100644
--- a/content/en/templates/pagination.md
+++ b/docs/content/en/templates/pagination.md
diff --git a/content/en/templates/partials.md b/docs/content/en/templates/partials.md
index 56a676d7e..56a676d7e 100644
--- a/content/en/templates/partials.md
+++ b/docs/content/en/templates/partials.md
diff --git a/content/en/templates/render-hooks.md b/docs/content/en/templates/render-hooks.md
index 6987819ca..6987819ca 100644
--- a/content/en/templates/render-hooks.md
+++ b/docs/content/en/templates/render-hooks.md
diff --git a/content/en/templates/robots.md b/docs/content/en/templates/robots.md
index 52a77314b..52a77314b 100644
--- a/content/en/templates/robots.md
+++ b/docs/content/en/templates/robots.md
diff --git a/content/en/templates/rss.md b/docs/content/en/templates/rss.md
index 009ba241a..009ba241a 100644
--- a/content/en/templates/rss.md
+++ b/docs/content/en/templates/rss.md
diff --git a/content/en/templates/section-templates.md b/docs/content/en/templates/section-templates.md
index a40e2a2d7..a40e2a2d7 100644
--- a/content/en/templates/section-templates.md
+++ b/docs/content/en/templates/section-templates.md
diff --git a/content/en/templates/shortcode-templates.md b/docs/content/en/templates/shortcode-templates.md
index 487037bf0..487037bf0 100644
--- a/content/en/templates/shortcode-templates.md
+++ b/docs/content/en/templates/shortcode-templates.md
diff --git a/content/en/templates/single-page-templates.md b/docs/content/en/templates/single-page-templates.md
index b6d8241c2..b6d8241c2 100644
--- a/content/en/templates/single-page-templates.md
+++ b/docs/content/en/templates/single-page-templates.md
diff --git a/content/en/templates/sitemap-template.md b/docs/content/en/templates/sitemap-template.md
index 9fc817020..9fc817020 100644
--- a/content/en/templates/sitemap-template.md
+++ b/docs/content/en/templates/sitemap-template.md
diff --git a/content/en/templates/taxonomy-templates.md b/docs/content/en/templates/taxonomy-templates.md
index 27fadb85b..27fadb85b 100644
--- a/content/en/templates/taxonomy-templates.md
+++ b/docs/content/en/templates/taxonomy-templates.md
diff --git a/content/en/templates/template-debugging.md b/docs/content/en/templates/template-debugging.md
index 0a5150a8a..0a5150a8a 100644
--- a/content/en/templates/template-debugging.md
+++ b/docs/content/en/templates/template-debugging.md
diff --git a/content/en/templates/views.md b/docs/content/en/templates/views.md
index 87f66afe0..87f66afe0 100644
--- a/content/en/templates/views.md
+++ b/docs/content/en/templates/views.md
diff --git a/content/en/tools/_index.md b/docs/content/en/tools/_index.md
index a186ffb06..a186ffb06 100644
--- a/content/en/tools/_index.md
+++ b/docs/content/en/tools/_index.md
diff --git a/content/en/tools/editors.md b/docs/content/en/tools/editors.md
index 8da5fc2d7..8da5fc2d7 100644
--- a/content/en/tools/editors.md
+++ b/docs/content/en/tools/editors.md
diff --git a/content/en/tools/frontends.md b/docs/content/en/tools/frontends.md
index 1d1d7fae6..1d1d7fae6 100644
--- a/content/en/tools/frontends.md
+++ b/docs/content/en/tools/frontends.md
diff --git a/content/en/tools/migrations.md b/docs/content/en/tools/migrations.md
index 7009164eb..7009164eb 100644
--- a/content/en/tools/migrations.md
+++ b/docs/content/en/tools/migrations.md
diff --git a/content/en/tools/other.md b/docs/content/en/tools/other.md
index 3afd7b96b..3afd7b96b 100644
--- a/content/en/tools/other.md
+++ b/docs/content/en/tools/other.md
diff --git a/content/en/tools/search.md b/docs/content/en/tools/search.md
index dec87d72c..dec87d72c 100644
--- a/content/en/tools/search.md
+++ b/docs/content/en/tools/search.md
diff --git a/content/en/tools/starter-kits.md b/docs/content/en/tools/starter-kits.md
index 9e10a813e..9e10a813e 100644
--- a/content/en/tools/starter-kits.md
+++ b/docs/content/en/tools/starter-kits.md
diff --git a/content/en/troubleshooting/_index.md b/docs/content/en/troubleshooting/_index.md
index 3170dc7d8..3170dc7d8 100644
--- a/content/en/troubleshooting/_index.md
+++ b/docs/content/en/troubleshooting/_index.md
diff --git a/content/en/troubleshooting/build-performance.md b/docs/content/en/troubleshooting/build-performance.md
index e0700f381..e0700f381 100644
--- a/content/en/troubleshooting/build-performance.md
+++ b/docs/content/en/troubleshooting/build-performance.md
diff --git a/content/en/troubleshooting/faq.md b/docs/content/en/troubleshooting/faq.md
index 67d9a3998..67d9a3998 100644
--- a/content/en/troubleshooting/faq.md
+++ b/docs/content/en/troubleshooting/faq.md
diff --git a/content/en/variables/_index.md b/docs/content/en/variables/_index.md
index 382ee25d4..382ee25d4 100644
--- a/content/en/variables/_index.md
+++ b/docs/content/en/variables/_index.md
diff --git a/content/en/variables/files.md b/docs/content/en/variables/files.md
index d8f3daece..d8f3daece 100644
--- a/content/en/variables/files.md
+++ b/docs/content/en/variables/files.md
diff --git a/content/en/variables/git.md b/docs/content/en/variables/git.md
index 58a285fd9..58a285fd9 100644
--- a/content/en/variables/git.md
+++ b/docs/content/en/variables/git.md
diff --git a/content/en/variables/menus.md b/docs/content/en/variables/menus.md
index 9b8fe4d49..9b8fe4d49 100644
--- a/content/en/variables/menus.md
+++ b/docs/content/en/variables/menus.md
diff --git a/content/en/variables/page.md b/docs/content/en/variables/page.md
index df7e3cb71..df7e3cb71 100644
--- a/content/en/variables/page.md
+++ b/docs/content/en/variables/page.md
diff --git a/content/en/variables/pages.md b/docs/content/en/variables/pages.md
index 79d39a158..79d39a158 100644
--- a/content/en/variables/pages.md
+++ b/docs/content/en/variables/pages.md
diff --git a/content/en/variables/shortcodes.md b/docs/content/en/variables/shortcodes.md
index 37ae30629..37ae30629 100644
--- a/content/en/variables/shortcodes.md
+++ b/docs/content/en/variables/shortcodes.md
diff --git a/content/en/variables/site.md b/docs/content/en/variables/site.md
index bdfeb6527..bdfeb6527 100644
--- a/content/en/variables/site.md
+++ b/docs/content/en/variables/site.md
diff --git a/content/en/variables/sitemap.md b/docs/content/en/variables/sitemap.md
index dd926f2b3..dd926f2b3 100644
--- a/content/en/variables/sitemap.md
+++ b/docs/content/en/variables/sitemap.md
diff --git a/content/en/variables/taxonomy.md b/docs/content/en/variables/taxonomy.md
index 5bcdffee5..5bcdffee5 100644
--- a/content/en/variables/taxonomy.md
+++ b/docs/content/en/variables/taxonomy.md
diff --git a/content/zh/_index.md b/docs/content/zh/_index.md
index d54cb3436..d54cb3436 100644
--- a/content/zh/_index.md
+++ b/docs/content/zh/_index.md
diff --git a/content/zh/about/_index.md b/docs/content/zh/about/_index.md
index bf19807d9..bf19807d9 100644
--- a/content/zh/about/_index.md
+++ b/docs/content/zh/about/_index.md
diff --git a/content/zh/content-management/_index.md b/docs/content/zh/content-management/_index.md
index 8c088dc57..8c088dc57 100644
--- a/content/zh/content-management/_index.md
+++ b/docs/content/zh/content-management/_index.md
diff --git a/content/zh/documentation.md b/docs/content/zh/documentation.md
index 1575fd375..1575fd375 100644
--- a/content/zh/documentation.md
+++ b/docs/content/zh/documentation.md
diff --git a/content/zh/news/_index.md b/docs/content/zh/news/_index.md
index 286d32e19..286d32e19 100644
--- a/content/zh/news/_index.md
+++ b/docs/content/zh/news/_index.md
diff --git a/content/zh/templates/_index.md b/docs/content/zh/templates/_index.md
index 3cd8df436..3cd8df436 100644
--- a/content/zh/templates/_index.md
+++ b/docs/content/zh/templates/_index.md
diff --git a/content/zh/templates/base.md b/docs/content/zh/templates/base.md
index 3a1aa479c..3a1aa479c 100644
--- a/content/zh/templates/base.md
+++ b/docs/content/zh/templates/base.md
diff --git a/content/zh/tools/_index.md b/docs/content/zh/tools/_index.md
index a3de7dc76..a3de7dc76 100644
--- a/content/zh/tools/_index.md
+++ b/docs/content/zh/tools/_index.md
diff --git a/content/zh/tools/search.md b/docs/content/zh/tools/search.md
index 26b25ea2a..26b25ea2a 100644
--- a/content/zh/tools/search.md
+++ b/docs/content/zh/tools/search.md
diff --git a/data/articles.toml b/docs/data/articles.toml
index eac45d20a..eac45d20a 100644
--- a/data/articles.toml
+++ b/docs/data/articles.toml
diff --git a/data/docs.json b/docs/data/docs.json
index fe4efeb22..fe4efeb22 100644
--- a/data/docs.json
+++ b/docs/data/docs.json
diff --git a/data/homepagetweets.toml b/docs/data/homepagetweets.toml
index cde241f01..cde241f01 100644
--- a/data/homepagetweets.toml
+++ b/docs/data/homepagetweets.toml
diff --git a/data/titles.toml b/docs/data/titles.toml
index 2348c8561..2348c8561 100644
--- a/data/titles.toml
+++ b/docs/data/titles.toml
diff --git a/docs/go.mod b/docs/go.mod
new file mode 100644
index 000000000..75ec6290f
--- /dev/null
+++ b/docs/go.mod
@@ -0,0 +1,5 @@
+module github.com/gohugoio/hugoDocs
+
+go 1.16
+
+require github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135 // indirect
diff --git a/docs/go.sum b/docs/go.sum
new file mode 100644
index 000000000..dad4ba67c
--- /dev/null
+++ b/docs/go.sum
@@ -0,0 +1,35 @@
+github.com/gohugoio/gohugoioTheme v0.0.0-20190808163145-07b3c0f73b02/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20191014144142-1f3a01deed7b h1:PWNjl46fvtz54PKO0BdiXOF6/4L/uCP0F3gtcCxGrJs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20191014144142-1f3a01deed7b/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20191021162625-2e7250ca437d h1:D3DcaYkuJbotdWNNAQpQl37txX4HQ6R5uMHoxVmTw0w=
+github.com/gohugoio/gohugoioTheme v0.0.0-20191021162625-2e7250ca437d/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123151337-9475fd449324 h1:UZwHDYtGY0uOKIvcm2LWd+xfFxD3X5L222LIJdI5RE4=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123151337-9475fd449324/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123204146-589b4c309025 h1:ScYFARz+bHX1rEr1donVknhRdxGY/cwqK1hHvWEfrlc=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123204146-589b4c309025/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123205007-5d6620a0db26 h1:acXfduibbWxji9tW0WkLHbjcXFsnd5uIwXe0WfwOazg=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200123205007-5d6620a0db26/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200128164921-1d0bc5482051 h1:cS14MnUGS6xwWYfPNshimm8HdMCZiYBxWkCD0VnvgVw=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200128164921-1d0bc5482051/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200327225449-368f4cbef8d7 h1:cZ+ahAjSetbFv3aDJ9ipDbKyqaVlmkbSZ5cULgBTh+w=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200327225449-368f4cbef8d7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200327231942-7f80b3d02bfa h1:kG+O/wT9UXomzp5eQiUuFVZ0l7YylAW6EVPLyjMxi/c=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200327231942-7f80b3d02bfa/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200328100657-2bfd5f8c6aee h1:PJZhCwnuVLyafDWNPSHk9iJvk6gEIvPRnycy7Pq3peA=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200328100657-2bfd5f8c6aee/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200518164958-62cbad03c40f h1:Ge3JACszSUyJW2Az9cJzWdo4PUqdijJA1RxoQSVMBSI=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200518164958-62cbad03c40f/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200518165806-0095b7b902a7 h1:Sy0hlWyZmFtdSY0Cobvw1ZYm3G1aR5+4DuFNRbMkh48=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200518165806-0095b7b902a7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200711112515-b0dfe471654f h1:BWJyycs4HD7tUbaU8RIGeMay84bIBWRVVLE3yajPas4=
+github.com/gohugoio/gohugoioTheme v0.0.0-20200711112515-b0dfe471654f/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210301124928-2c15837dfec3 h1:ShqzOFeeg54FPSuS6q8HSeTVgj2xNZRe/YS0jNbi21g=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210301124928-2c15837dfec3/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210409065807-6383d8cbaf65 h1:EJzierSWKqwsrUXU6MaFe0J97c0e5pzl5dBNRRrV2Nc=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210409065807-6383d8cbaf65/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210409071416-c88da48134b7 h1:uRCgPslaBgLYy4ANXBoPbBQVM8aNiHoxIZTKUXpkuUA=
+github.com/gohugoio/gohugoioTheme v0.0.0-20210409071416-c88da48134b7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20211211125852-b85e21c1f3d6 h1:lAgdWrn8VEg0PrNCPX4DflCg2msDKpSYV6E8RTNV3N0=
+github.com/gohugoio/gohugoioTheme v0.0.0-20211211125852-b85e21c1f3d6/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135 h1:6hVzfE9YhSsZP5t6jWjvVp7MoPm7Y5fEhH/ls4ahhKk=
+github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
diff --git a/hugo_stats.json b/docs/hugo_stats.json
index ca27d8991..ca27d8991 100644
--- a/hugo_stats.json
+++ b/docs/hugo_stats.json
diff --git a/layouts/_default/_markup/render-codeblock-mermaid.html b/docs/layouts/_default/_markup/render-codeblock-mermaid.html
index 59641551c..59641551c 100644
--- a/layouts/_default/_markup/render-codeblock-mermaid.html
+++ b/docs/layouts/_default/_markup/render-codeblock-mermaid.html
diff --git a/layouts/index.rss.xml b/docs/layouts/index.rss.xml
index 1d3498a1e..1d3498a1e 100644
--- a/layouts/index.rss.xml
+++ b/docs/layouts/index.rss.xml
diff --git a/layouts/maintenance/list.html b/docs/layouts/maintenance/list.html
index 2035c058b..2035c058b 100644
--- a/layouts/maintenance/list.html
+++ b/docs/layouts/maintenance/list.html
diff --git a/layouts/partials/hooks/before-body-end.html b/docs/layouts/partials/hooks/before-body-end.html
index fb7ae20ba..fb7ae20ba 100644
--- a/layouts/partials/hooks/before-body-end.html
+++ b/docs/layouts/partials/hooks/before-body-end.html
diff --git a/layouts/partials/maintenance-pages-table.html b/docs/layouts/partials/maintenance-pages-table.html
index a2429a335..a2429a335 100644
--- a/layouts/partials/maintenance-pages-table.html
+++ b/docs/layouts/partials/maintenance-pages-table.html
diff --git a/layouts/shortcodes/asciicast.html b/docs/layouts/shortcodes/asciicast.html
index ee23adc2d..ee23adc2d 100644
--- a/layouts/shortcodes/asciicast.html
+++ b/docs/layouts/shortcodes/asciicast.html
diff --git a/layouts/shortcodes/chroma-lexers.html b/docs/layouts/shortcodes/chroma-lexers.html
index 2e10c3dee..2e10c3dee 100644
--- a/layouts/shortcodes/chroma-lexers.html
+++ b/docs/layouts/shortcodes/chroma-lexers.html
diff --git a/layouts/shortcodes/code-toggle.html b/docs/layouts/shortcodes/code-toggle.html
index 113d85a1f..113d85a1f 100644
--- a/layouts/shortcodes/code-toggle.html
+++ b/docs/layouts/shortcodes/code-toggle.html
diff --git a/layouts/shortcodes/code.html b/docs/layouts/shortcodes/code.html
index 0ee25149d..0ee25149d 100644
--- a/layouts/shortcodes/code.html
+++ b/docs/layouts/shortcodes/code.html
diff --git a/layouts/shortcodes/content-tree.html b/docs/layouts/shortcodes/content-tree.html
index 0cb527cb5..0cb527cb5 100644
--- a/layouts/shortcodes/content-tree.html
+++ b/docs/layouts/shortcodes/content-tree.html
diff --git a/layouts/shortcodes/datatable-filtered.html b/docs/layouts/shortcodes/datatable-filtered.html
index ff3f299bd..ff3f299bd 100644
--- a/layouts/shortcodes/datatable-filtered.html
+++ b/docs/layouts/shortcodes/datatable-filtered.html
diff --git a/layouts/shortcodes/datatable.html b/docs/layouts/shortcodes/datatable.html
index 4e2814f5a..4e2814f5a 100644
--- a/layouts/shortcodes/datatable.html
+++ b/docs/layouts/shortcodes/datatable.html
diff --git a/layouts/shortcodes/directoryindex.html b/docs/layouts/shortcodes/directoryindex.html
index 37e7d3ad1..37e7d3ad1 100644
--- a/layouts/shortcodes/directoryindex.html
+++ b/docs/layouts/shortcodes/directoryindex.html
diff --git a/layouts/shortcodes/docfile.html b/docs/layouts/shortcodes/docfile.html
index 2f982aae8..2f982aae8 100644
--- a/layouts/shortcodes/docfile.html
+++ b/docs/layouts/shortcodes/docfile.html
diff --git a/layouts/shortcodes/exfile.html b/docs/layouts/shortcodes/exfile.html
index 226782957..226782957 100644
--- a/layouts/shortcodes/exfile.html
+++ b/docs/layouts/shortcodes/exfile.html
diff --git a/layouts/shortcodes/exfm.html b/docs/layouts/shortcodes/exfm.html
index c0429bbe1..c0429bbe1 100644
--- a/layouts/shortcodes/exfm.html
+++ b/docs/layouts/shortcodes/exfm.html
diff --git a/layouts/shortcodes/funcsig.html b/docs/layouts/shortcodes/funcsig.html
index 1709c60b0..1709c60b0 100644
--- a/layouts/shortcodes/funcsig.html
+++ b/docs/layouts/shortcodes/funcsig.html
diff --git a/layouts/shortcodes/getcontent.html b/docs/layouts/shortcodes/getcontent.html
index 6ae35dd6d..6ae35dd6d 100644
--- a/layouts/shortcodes/getcontent.html
+++ b/docs/layouts/shortcodes/getcontent.html
diff --git a/layouts/shortcodes/gh.html b/docs/layouts/shortcodes/gh.html
index 981f4b838..981f4b838 100644
--- a/layouts/shortcodes/gh.html
+++ b/docs/layouts/shortcodes/gh.html
diff --git a/layouts/shortcodes/ghrepo.html b/docs/layouts/shortcodes/ghrepo.html
index e9df40d6a..e9df40d6a 100644
--- a/layouts/shortcodes/ghrepo.html
+++ b/docs/layouts/shortcodes/ghrepo.html
diff --git a/layouts/shortcodes/gomodules-info.html b/docs/layouts/shortcodes/gomodules-info.html
index b56758ac3..b56758ac3 100644
--- a/layouts/shortcodes/gomodules-info.html
+++ b/docs/layouts/shortcodes/gomodules-info.html
diff --git a/layouts/shortcodes/imgproc.html b/docs/layouts/shortcodes/imgproc.html
index f792702ce..f792702ce 100644
--- a/layouts/shortcodes/imgproc.html
+++ b/docs/layouts/shortcodes/imgproc.html
diff --git a/layouts/shortcodes/module-mounts-note.html b/docs/layouts/shortcodes/module-mounts-note.html
index 654aafef4..654aafef4 100644
--- a/layouts/shortcodes/module-mounts-note.html
+++ b/docs/layouts/shortcodes/module-mounts-note.html
diff --git a/layouts/shortcodes/new-in.html b/docs/layouts/shortcodes/new-in.html
index e81fda3c5..e81fda3c5 100644
--- a/layouts/shortcodes/new-in.html
+++ b/docs/layouts/shortcodes/new-in.html
diff --git a/layouts/shortcodes/nohighlight.html b/docs/layouts/shortcodes/nohighlight.html
index 238234f17..238234f17 100644
--- a/layouts/shortcodes/nohighlight.html
+++ b/docs/layouts/shortcodes/nohighlight.html
diff --git a/layouts/shortcodes/note.html b/docs/layouts/shortcodes/note.html
index 24d2cd0b2..24d2cd0b2 100644
--- a/layouts/shortcodes/note.html
+++ b/docs/layouts/shortcodes/note.html
diff --git a/layouts/shortcodes/output.html b/docs/layouts/shortcodes/output.html
index e51d284bb..e51d284bb 100644
--- a/layouts/shortcodes/output.html
+++ b/docs/layouts/shortcodes/output.html
diff --git a/layouts/shortcodes/page-kinds.html b/docs/layouts/shortcodes/page-kinds.html
index 968a7a5fb..968a7a5fb 100644
--- a/layouts/shortcodes/page-kinds.html
+++ b/docs/layouts/shortcodes/page-kinds.html
diff --git a/layouts/shortcodes/readfile.html b/docs/layouts/shortcodes/readfile.html
index 36400ac55..36400ac55 100644
--- a/layouts/shortcodes/readfile.html
+++ b/docs/layouts/shortcodes/readfile.html
diff --git a/layouts/shortcodes/tip.html b/docs/layouts/shortcodes/tip.html
index 139e3376b..139e3376b 100644
--- a/layouts/shortcodes/tip.html
+++ b/docs/layouts/shortcodes/tip.html
diff --git a/layouts/shortcodes/todo.html b/docs/layouts/shortcodes/todo.html
index 50a099267..50a099267 100644
--- a/layouts/shortcodes/todo.html
+++ b/docs/layouts/shortcodes/todo.html
diff --git a/layouts/shortcodes/warning.html b/docs/layouts/shortcodes/warning.html
index c9147be64..c9147be64 100644
--- a/layouts/shortcodes/warning.html
+++ b/docs/layouts/shortcodes/warning.html
diff --git a/layouts/shortcodes/yt.html b/docs/layouts/shortcodes/yt.html
index 6915cec5f..6915cec5f 100644
--- a/layouts/shortcodes/yt.html
+++ b/docs/layouts/shortcodes/yt.html
diff --git a/layouts/template-func/page.html b/docs/layouts/template-func/page.html
index 8b5f0da85..8b5f0da85 100644
--- a/layouts/template-func/page.html
+++ b/docs/layouts/template-func/page.html
diff --git a/netlify.toml b/docs/netlify.toml
index ade4b9a86..ade4b9a86 100644
--- a/netlify.toml
+++ b/docs/netlify.toml
diff --git a/pull-theme.sh b/docs/pull-theme.sh
index 828b6cfb4..828b6cfb4 100755
--- a/pull-theme.sh
+++ b/docs/pull-theme.sh
diff --git a/resources/.gitattributes b/docs/resources/.gitattributes
index a205a8e9d..a205a8e9d 100644
--- a/resources/.gitattributes
+++ b/docs/resources/.gitattributes
diff --git a/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content b/docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content
index 42d7140c5..42d7140c5 100644
--- a/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content
+++ b/docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.content
diff --git a/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json b/docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json
index 91f089a79..91f089a79 100644
--- a/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json
+++ b/docs/resources/_gen/assets/css/output/css/app.css_d11fe7b62c27961c87ecd0f2490357b9.json
diff --git a/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content b/docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content
index 3097ec5a6..3097ec5a6 100644
--- a/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content
+++ b/docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.content
diff --git a/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json b/docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json
index 06787c13f..06787c13f 100644
--- a/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json
+++ b/docs/resources/_gen/assets/js/output/js/app.js_8848f55d07695b7ff7188138f23d69e3.json
diff --git a/src/css/_chroma.css b/docs/src/css/_chroma.css
index 1ad06604b..1ad06604b 100644
--- a/src/css/_chroma.css
+++ b/docs/src/css/_chroma.css
diff --git a/src/package-lock.json b/docs/src/package-lock.json
index 48e341a09..48e341a09 100644
--- a/src/package-lock.json
+++ b/docs/src/package-lock.json
diff --git a/static/apple-touch-icon.png b/docs/static/apple-touch-icon.png
index 50e23ce1d..50e23ce1d 100644
--- a/static/apple-touch-icon.png
+++ b/docs/static/apple-touch-icon.png
Binary files differ
diff --git a/static/css/hugofont.css b/docs/static/css/hugofont.css
index 09d6ce070..09d6ce070 100644
--- a/static/css/hugofont.css
+++ b/docs/static/css/hugofont.css
diff --git a/static/css/style.css b/docs/static/css/style.css
index 312c247c9..312c247c9 100644
--- a/static/css/style.css
+++ b/docs/static/css/style.css
diff --git a/static/favicon.ico b/docs/static/favicon.ico
index 36693330b..36693330b 100644
--- a/static/favicon.ico
+++ b/docs/static/favicon.ico
Binary files differ
diff --git a/static/fonts/hugo.eot b/docs/static/fonts/hugo.eot
index b92f00f93..b92f00f93 100644
--- a/static/fonts/hugo.eot
+++ b/docs/static/fonts/hugo.eot
Binary files differ
diff --git a/static/fonts/hugo.svg b/docs/static/fonts/hugo.svg
index 7913f7c1f..7913f7c1f 100644
--- a/static/fonts/hugo.svg
+++ b/docs/static/fonts/hugo.svg
diff --git a/static/fonts/hugo.ttf b/docs/static/fonts/hugo.ttf
index 962914d33..962914d33 100644
--- a/static/fonts/hugo.ttf
+++ b/docs/static/fonts/hugo.ttf
Binary files differ
diff --git a/static/fonts/hugo.woff b/docs/static/fonts/hugo.woff
index 4693fbe7f..4693fbe7f 100644
--- a/static/fonts/hugo.woff
+++ b/docs/static/fonts/hugo.woff
Binary files differ
diff --git a/static/images/blog/hugo-26-poster.png b/docs/static/images/blog/hugo-26-poster.png
index 827f1f7bb..827f1f7bb 100644
--- a/static/images/blog/hugo-26-poster.png
+++ b/docs/static/images/blog/hugo-26-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-27-poster.png b/docs/static/images/blog/hugo-27-poster.png
index 69efa36bc..69efa36bc 100644
--- a/static/images/blog/hugo-27-poster.png
+++ b/docs/static/images/blog/hugo-27-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-28-poster.png b/docs/static/images/blog/hugo-28-poster.png
index ae3d6ac16..ae3d6ac16 100644
--- a/static/images/blog/hugo-28-poster.png
+++ b/docs/static/images/blog/hugo-28-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-29-poster.png b/docs/static/images/blog/hugo-29-poster.png
index dbe2d434f..dbe2d434f 100644
--- a/static/images/blog/hugo-29-poster.png
+++ b/docs/static/images/blog/hugo-29-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-30-poster.png b/docs/static/images/blog/hugo-30-poster.png
index 214369e89..214369e89 100644
--- a/static/images/blog/hugo-30-poster.png
+++ b/docs/static/images/blog/hugo-30-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-31-poster.png b/docs/static/images/blog/hugo-31-poster.png
index e11e53aa7..e11e53aa7 100644
--- a/static/images/blog/hugo-31-poster.png
+++ b/docs/static/images/blog/hugo-31-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-32-poster.png b/docs/static/images/blog/hugo-32-poster.png
index f915247ad..f915247ad 100644
--- a/static/images/blog/hugo-32-poster.png
+++ b/docs/static/images/blog/hugo-32-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-bug-poster.png b/docs/static/images/blog/hugo-bug-poster.png
index cd236682d..cd236682d 100644
--- a/static/images/blog/hugo-bug-poster.png
+++ b/docs/static/images/blog/hugo-bug-poster.png
Binary files differ
diff --git a/static/images/blog/hugo-http2-push.png b/docs/static/images/blog/hugo-http2-push.png
index 1ddfd4653..1ddfd4653 100644
--- a/static/images/blog/hugo-http2-push.png
+++ b/docs/static/images/blog/hugo-http2-push.png
Binary files differ
diff --git a/static/images/blog/sunset.jpg b/docs/static/images/blog/sunset.jpg
index 4dbcc0836..4dbcc0836 100644
--- a/static/images/blog/sunset.jpg
+++ b/docs/static/images/blog/sunset.jpg
Binary files differ
diff --git a/static/images/contribute/development/accept-cla.png b/docs/static/images/contribute/development/accept-cla.png
index 272de935e..272de935e 100644
--- a/static/images/contribute/development/accept-cla.png
+++ b/docs/static/images/contribute/development/accept-cla.png
Binary files differ
diff --git a/static/images/contribute/development/copy-remote-url.png b/docs/static/images/contribute/development/copy-remote-url.png
index a97a8f48f..a97a8f48f 100644
--- a/static/images/contribute/development/copy-remote-url.png
+++ b/docs/static/images/contribute/development/copy-remote-url.png
Binary files differ
diff --git a/static/images/contribute/development/forking-a-repository.png b/docs/static/images/contribute/development/forking-a-repository.png
index b2566b841..b2566b841 100644
--- a/static/images/contribute/development/forking-a-repository.png
+++ b/docs/static/images/contribute/development/forking-a-repository.png
Binary files differ
diff --git a/static/images/contribute/development/open-pull-request.png b/docs/static/images/contribute/development/open-pull-request.png
index 3f8328964..3f8328964 100644
--- a/static/images/contribute/development/open-pull-request.png
+++ b/docs/static/images/contribute/development/open-pull-request.png
Binary files differ
diff --git a/static/images/gohugoio-card-1.png b/docs/static/images/gohugoio-card-1.png
index 09953aed9..09953aed9 100644
--- a/static/images/gohugoio-card-1.png
+++ b/docs/static/images/gohugoio-card-1.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png
index cc909af2c..cc909af2c 100644
--- a/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-server.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png
index 2cbc45e7e..2cbc45e7e 100644
--- a/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/hugo-with-nanobox.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png
index 4dd7ebc9d..4dd7ebc9d 100644
--- a/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-deploy-dry-run.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png
index 29a31e2c2..29a31e2c2 100644
--- a/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-nanobox/nanobox-run.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png
index 19ec945cd..19ec945cd 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-a-github-pages-step.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png
index 785fc1290..785fc1290 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/adding-the-project-to-github.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png
index 98eecb299..98eecb299 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/and-we-ve-got-an-app.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png
index 26ec22370..26ec22370 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/configure-the-deploy-step.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png
index b9e53d0bc..b9e53d0bc 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/creating-a-basic-hugo-site.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png
index 439b224e8..439b224e8 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/public-or-not.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png
index 754eab984..754eab984 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/using-hugo-build.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png
index 170488456..170488456 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-access.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png
index d93505af7..d93505af7 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-account-settings.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png
index dc854b4da..dc854b4da 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-add-app.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png
index 2359fb3b3..2359fb3b3 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-git-connections.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png
index 40abf82ad..40abf82ad 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-search.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png
index d44a70de3..d44a70de3 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-owner.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png
index 45c395f8d..45c395f8d 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-select-repository.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png
index 41b82036f..41b82036f 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up-page.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png
index c2de857a3..c2de857a3 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/wercker-sign-up.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png b/docs/static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png
index ee6054dda..ee6054dda 100644
--- a/static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png
+++ b/docs/static/images/hosting-and-deployment/deployment-with-wercker/werckeryml.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png
index 1ec752428..1ec752428 100644
--- a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-build-settings.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif
index 6c57cf3b2..6c57cf3b2 100644
--- a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif
+++ b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-connect-repo.gif
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png
index 3b17e2b01..3b17e2b01 100644
--- a/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-aws-amplify/amplify-gettingstarted.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png b/docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png
index b78f6fd15..b78f6fd15 100644
--- a/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-blog-post.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png b/docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png
index e97f13465..e97f13465 100644
--- a/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-bitbucket/bitbucket-create-repo.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png
index 7cde4a6a2..7cde4a6a2 100644
--- a/static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/keycdn-pull-zone.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png
index ad99341d5..ad99341d5 100644
--- a/static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-api-key.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png
index 2e5cf5f41..2e5cf5f41 100644
--- a/static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png
+++ b/docs/static/images/hosting-and-deployment/hosting-on-keycdn/secret-zone-id.png
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg
index 17698d34a..17698d34a 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-add-new-site.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg
index eaae924e4..eaae924e4 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-authorize-added-permissions.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg
index 347477dd2..347477dd2 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-1.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg
index 18bfd6fed..18bfd6fed 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-2.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg
index 6f9b6477c..6f9b6477c 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-create-new-site-step-3.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg
index ed5eaf3c8..ed5eaf3c8 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploy-published.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif
index c1f27c236..c1f27c236 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-deploying-site.gif
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg
index 748122e89..748122e89 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-first-authorize.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg
index 3edc49c43..3edc49c43 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-live-site.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg
index f23626218..f23626218 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-push-to-deploy.jpg
Binary files differ
diff --git a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg
index cd9a218b4..cd9a218b4 100644
--- a/static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg
+++ b/docs/static/images/hosting-and-deployment/hosting-on-netlify/netlify-signup.jpg
Binary files differ
diff --git a/static/images/hugo-content-bundles.png b/docs/static/images/hugo-content-bundles.png
index 501e671e2..501e671e2 100644
--- a/static/images/hugo-content-bundles.png
+++ b/docs/static/images/hugo-content-bundles.png
Binary files differ
diff --git a/static/images/icon-custom-outputs.svg b/docs/static/images/icon-custom-outputs.svg
index ccf581f31..ccf581f31 100644
--- a/static/images/icon-custom-outputs.svg
+++ b/docs/static/images/icon-custom-outputs.svg
diff --git a/static/images/site-hierarchy.svg b/docs/static/images/site-hierarchy.svg
index 3c744871b..3c744871b 100644
--- a/static/images/site-hierarchy.svg
+++ b/docs/static/images/site-hierarchy.svg
diff --git a/static/img/examples/trees.svg b/docs/static/img/examples/trees.svg
index 0aaccfcff..0aaccfcff 100644
--- a/static/img/examples/trees.svg
+++ b/docs/static/img/examples/trees.svg
diff --git a/static/img/hugo-logo-med.png b/docs/static/img/hugo-logo-med.png
index 11d91b320..11d91b320 100644
--- a/static/img/hugo-logo-med.png
+++ b/docs/static/img/hugo-logo-med.png
Binary files differ
diff --git a/static/img/hugo-logo.png b/docs/static/img/hugo-logo.png
index 0a78f8eaa..0a78f8eaa 100644
--- a/static/img/hugo-logo.png
+++ b/docs/static/img/hugo-logo.png
Binary files differ
diff --git a/static/img/hugo.png b/docs/static/img/hugo.png
index 48acf346c..48acf346c 100644
--- a/static/img/hugo.png
+++ b/docs/static/img/hugo.png
Binary files differ
diff --git a/static/img/hugoSM.png b/docs/static/img/hugoSM.png
index f64f43088..f64f43088 100644
--- a/static/img/hugoSM.png
+++ b/docs/static/img/hugoSM.png
Binary files differ
diff --git a/static/npmjs/index.html b/docs/static/npmjs/index.html
index 88dd510af..88dd510af 100644
--- a/static/npmjs/index.html
+++ b/docs/static/npmjs/index.html
diff --git a/static/share/hugo-tall.png b/docs/static/share/hugo-tall.png
index 001ce5eb3..001ce5eb3 100644
--- a/static/share/hugo-tall.png
+++ b/docs/static/share/hugo-tall.png
Binary files differ
diff --git a/static/share/made-with-hugo-dark.png b/docs/static/share/made-with-hugo-dark.png
index c6cadf283..c6cadf283 100644
--- a/static/share/made-with-hugo-dark.png
+++ b/docs/static/share/made-with-hugo-dark.png
Binary files differ
diff --git a/static/share/made-with-hugo-long-dark.png b/docs/static/share/made-with-hugo-long-dark.png
index 1e49995fb..1e49995fb 100644
--- a/static/share/made-with-hugo-long-dark.png
+++ b/docs/static/share/made-with-hugo-long-dark.png
Binary files differ
diff --git a/static/share/made-with-hugo-long.png b/docs/static/share/made-with-hugo-long.png
index c5df534cf..c5df534cf 100644
--- a/static/share/made-with-hugo-long.png
+++ b/docs/static/share/made-with-hugo-long.png
Binary files differ
diff --git a/static/share/made-with-hugo.png b/docs/static/share/made-with-hugo.png
index 52dfd19e5..52dfd19e5 100644
--- a/static/share/made-with-hugo.png
+++ b/docs/static/share/made-with-hugo.png
Binary files differ
diff --git a/static/share/powered-by-hugo-dark.png b/docs/static/share/powered-by-hugo-dark.png
index a8e2ebc80..a8e2ebc80 100644
--- a/static/share/powered-by-hugo-dark.png
+++ b/docs/static/share/powered-by-hugo-dark.png
Binary files differ
diff --git a/static/share/powered-by-hugo-long-dark.png b/docs/static/share/powered-by-hugo-long-dark.png
index 1b760b1bf..1b760b1bf 100644
--- a/static/share/powered-by-hugo-long-dark.png
+++ b/docs/static/share/powered-by-hugo-long-dark.png
Binary files differ
diff --git a/static/share/powered-by-hugo-long.png b/docs/static/share/powered-by-hugo-long.png
index 37131359d..37131359d 100644
--- a/static/share/powered-by-hugo-long.png
+++ b/docs/static/share/powered-by-hugo-long.png
Binary files differ
diff --git a/static/share/powered-by-hugo.png b/docs/static/share/powered-by-hugo.png
index 27ff099d5..27ff099d5 100644
--- a/static/share/powered-by-hugo.png
+++ b/docs/static/share/powered-by-hugo.png
Binary files differ
diff --git a/docshelper/docs.go b/docshelper/docs.go
new file mode 100644
index 000000000..1e1594120
--- /dev/null
+++ b/docshelper/docs.go
@@ -0,0 +1,51 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package docshelper provides some helpers for the Hugo documentation, and
+// is of limited interest for the general Hugo user.
+package docshelper
+
+type (
+ DocProviderFunc = func() DocProvider
+ DocProvider map[string]map[string]any
+)
+
+var docProviderFuncs []DocProviderFunc
+
+func AddDocProviderFunc(fn DocProviderFunc) {
+ docProviderFuncs = append(docProviderFuncs, fn)
+}
+
+func GetDocProvider() DocProvider {
+ provider := make(DocProvider)
+
+ for _, fn := range docProviderFuncs {
+ p := fn()
+ for k, v := range p {
+ if prev, found := provider[k]; !found {
+ provider[k] = v
+ } else {
+ merge(prev, v)
+ }
+ }
+ }
+
+ return provider
+}
+
+// Shallow merge
+func merge(dst, src map[string]any) {
+ for k, v := range src {
+ dst[k] = v
+ }
+}
diff --git a/go.mod b/go.mod
index 75ec6290f..bb2e05363 100644
--- a/go.mod
+++ b/go.mod
@@ -1,5 +1,127 @@
-module github.com/gohugoio/hugoDocs
+module github.com/gohugoio/hugo
-go 1.16
+require (
+ github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69
+ github.com/PuerkitoBio/purell v1.1.1
+ github.com/alecthomas/chroma/v2 v2.2.0
+ github.com/armon/go-radix v1.0.0
+ github.com/aws/aws-sdk-go v1.43.5
+ github.com/bep/clock v0.3.0
+ github.com/bep/debounce v1.2.0
+ github.com/bep/gitmap v1.1.2
+ github.com/bep/goat v0.5.0
+ github.com/bep/godartsass v0.14.0
+ github.com/bep/golibsass v1.1.0
+ github.com/bep/gowebp v0.1.0
+ github.com/bep/overlayfs v0.6.0
+ github.com/bep/tmc v0.5.1
+ github.com/clbanning/mxj/v2 v2.5.6
+ github.com/cli/safeexec v1.0.0
+ github.com/disintegration/gift v1.2.1
+ github.com/dustin/go-humanize v1.0.0
+ github.com/evanw/esbuild v0.14.43
+ github.com/fortytw2/leaktest v1.3.0
+ github.com/frankban/quicktest v1.14.3
+ github.com/fsnotify/fsnotify v1.5.4
+ github.com/getkin/kin-openapi v0.97.0
+ github.com/ghodss/yaml v1.0.0
+ github.com/gobuffalo/flect v0.2.5
+ github.com/gobwas/glob v0.2.3
+ github.com/gohugoio/go-i18n/v2 v2.1.3-0.20210430103248-4c28c89f8013
+ github.com/gohugoio/locales v0.14.0
+ github.com/gohugoio/localescompressed v1.0.1
+ github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95
+ github.com/google/go-cmp v0.5.8
+ github.com/gorilla/websocket v1.5.0
+ github.com/hairyhenderson/go-codeowners v0.2.3-0.20201026200250-cdc7c0759690
+ github.com/jdkato/prose v1.2.1
+ github.com/kylelemons/godebug v1.1.0
+ github.com/kyokomi/emoji/v2 v2.2.9
+ github.com/magefile/mage v1.13.0
+ github.com/mattn/go-isatty v0.0.14
+ github.com/mitchellh/hashstructure v1.1.0
+ github.com/mitchellh/mapstructure v1.5.0
+ github.com/muesli/smartcrop v0.3.0
+ github.com/niklasfasching/go-org v1.6.5
+ github.com/olekukonko/tablewriter v0.0.5
+ github.com/pelletier/go-toml/v2 v2.0.2
+ github.com/rogpeppe/go-internal v1.8.1
+ github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
+ github.com/sanity-io/litter v1.5.5
+ github.com/spf13/afero v1.8.2
+ github.com/spf13/cast v1.5.0
+ github.com/spf13/cobra v1.4.0
+ github.com/spf13/fsync v0.9.0
+ github.com/spf13/jwalterweatherman v1.1.0
+ github.com/spf13/pflag v1.0.5
+ github.com/tdewolff/minify/v2 v2.11.10
+ github.com/tdewolff/parse/v2 v2.6.0
+ github.com/yuin/goldmark v1.4.12
+ go.uber.org/atomic v1.9.0
+ gocloud.dev v0.24.0
+ golang.org/x/image v0.0.0-20211028202545-6944b10bf410
+ golang.org/x/net v0.0.0-20220607020251-c690dde0001d
+ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
+ golang.org/x/text v0.3.7
+ golang.org/x/tools v0.1.11
+ google.golang.org/api v0.76.0
+ gopkg.in/yaml.v2 v2.4.0
+)
-require github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135 // indirect
+require (
+ cloud.google.com/go v0.101.0 // indirect
+ cloud.google.com/go/compute v1.6.1 // indirect
+ cloud.google.com/go/iam v0.3.0 // indirect
+ cloud.google.com/go/storage v1.22.0 // indirect
+ github.com/Azure/azure-pipeline-go v0.2.3 // indirect
+ github.com/Azure/azure-storage-blob-go v0.14.0 // indirect
+ github.com/Azure/go-autorest v14.2.0+incompatible // indirect
+ github.com/Azure/go-autorest/autorest v0.11.20 // indirect
+ github.com/Azure/go-autorest/autorest/adal v0.9.15 // indirect
+ github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
+ github.com/Azure/go-autorest/logger v0.2.1 // indirect
+ github.com/Azure/go-autorest/tracing v0.6.0 // indirect
+ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
+ github.com/aws/aws-sdk-go-v2 v1.9.0 // indirect
+ github.com/aws/aws-sdk-go-v2/config v1.7.0 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.4.0 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.0 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/ini v1.2.2 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.0 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.4.0 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.7.0 // indirect
+ github.com/aws/smithy-go v1.8.0 // indirect
+ github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
+ github.com/dlclark/regexp2 v1.4.0 // indirect
+ github.com/go-openapi/jsonpointer v0.19.5 // indirect
+ github.com/go-openapi/swag v0.19.5 // indirect
+ github.com/golang-jwt/jwt/v4 v4.0.0 // indirect
+ github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
+ github.com/golang/protobuf v1.5.2 // indirect
+ github.com/google/uuid v1.3.0 // indirect
+ github.com/google/wire v0.5.0 // indirect
+ github.com/googleapis/gax-go/v2 v2.3.0 // indirect
+ github.com/googleapis/go-type-adapters v1.0.0 // indirect
+ github.com/inconshreveable/mousetrap v1.0.0 // indirect
+ github.com/invopop/yaml v0.1.0 // indirect
+ github.com/jmespath/go-jmespath v0.4.0 // indirect
+ github.com/kr/pretty v0.3.0 // indirect
+ github.com/kr/text v0.2.0 // indirect
+ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e // indirect
+ github.com/mattn/go-ieproxy v0.0.1 // indirect
+ github.com/mattn/go-runewidth v0.0.9 // indirect
+ github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
+ github.com/russross/blackfriday/v2 v2.1.0 // indirect
+ go.opencensus.io v0.23.0 // indirect
+ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa // indirect
+ golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect
+ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect
+ golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
+ google.golang.org/appengine v1.6.7 // indirect
+ google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46 // indirect
+ google.golang.org/grpc v1.46.0 // indirect
+ google.golang.org/protobuf v1.28.0 // indirect
+ gopkg.in/yaml.v3 v3.0.1 // indirect
+)
+
+go 1.18
diff --git a/go.sum b/go.sum
index dad4ba67c..fcd0fe681 100644
--- a/go.sum
+++ b/go.sum
@@ -1,35 +1,1035 @@
-github.com/gohugoio/gohugoioTheme v0.0.0-20190808163145-07b3c0f73b02/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20191014144142-1f3a01deed7b h1:PWNjl46fvtz54PKO0BdiXOF6/4L/uCP0F3gtcCxGrJs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20191014144142-1f3a01deed7b/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20191021162625-2e7250ca437d h1:D3DcaYkuJbotdWNNAQpQl37txX4HQ6R5uMHoxVmTw0w=
-github.com/gohugoio/gohugoioTheme v0.0.0-20191021162625-2e7250ca437d/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123151337-9475fd449324 h1:UZwHDYtGY0uOKIvcm2LWd+xfFxD3X5L222LIJdI5RE4=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123151337-9475fd449324/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123204146-589b4c309025 h1:ScYFARz+bHX1rEr1donVknhRdxGY/cwqK1hHvWEfrlc=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123204146-589b4c309025/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123205007-5d6620a0db26 h1:acXfduibbWxji9tW0WkLHbjcXFsnd5uIwXe0WfwOazg=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200123205007-5d6620a0db26/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200128164921-1d0bc5482051 h1:cS14MnUGS6xwWYfPNshimm8HdMCZiYBxWkCD0VnvgVw=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200128164921-1d0bc5482051/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200327225449-368f4cbef8d7 h1:cZ+ahAjSetbFv3aDJ9ipDbKyqaVlmkbSZ5cULgBTh+w=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200327225449-368f4cbef8d7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200327231942-7f80b3d02bfa h1:kG+O/wT9UXomzp5eQiUuFVZ0l7YylAW6EVPLyjMxi/c=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200327231942-7f80b3d02bfa/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200328100657-2bfd5f8c6aee h1:PJZhCwnuVLyafDWNPSHk9iJvk6gEIvPRnycy7Pq3peA=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200328100657-2bfd5f8c6aee/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200518164958-62cbad03c40f h1:Ge3JACszSUyJW2Az9cJzWdo4PUqdijJA1RxoQSVMBSI=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200518164958-62cbad03c40f/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200518165806-0095b7b902a7 h1:Sy0hlWyZmFtdSY0Cobvw1ZYm3G1aR5+4DuFNRbMkh48=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200518165806-0095b7b902a7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200711112515-b0dfe471654f h1:BWJyycs4HD7tUbaU8RIGeMay84bIBWRVVLE3yajPas4=
-github.com/gohugoio/gohugoioTheme v0.0.0-20200711112515-b0dfe471654f/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210301124928-2c15837dfec3 h1:ShqzOFeeg54FPSuS6q8HSeTVgj2xNZRe/YS0jNbi21g=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210301124928-2c15837dfec3/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210409065807-6383d8cbaf65 h1:EJzierSWKqwsrUXU6MaFe0J97c0e5pzl5dBNRRrV2Nc=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210409065807-6383d8cbaf65/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210409071416-c88da48134b7 h1:uRCgPslaBgLYy4ANXBoPbBQVM8aNiHoxIZTKUXpkuUA=
-github.com/gohugoio/gohugoioTheme v0.0.0-20210409071416-c88da48134b7/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20211211125852-b85e21c1f3d6 h1:lAgdWrn8VEg0PrNCPX4DflCg2msDKpSYV6E8RTNV3N0=
-github.com/gohugoio/gohugoioTheme v0.0.0-20211211125852-b85e21c1f3d6/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
-github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135 h1:6hVzfE9YhSsZP5t6jWjvVp7MoPm7Y5fEhH/ls4ahhKk=
-github.com/gohugoio/gohugoioTheme v0.0.0-20220228085601-7cfbda06d135/go.mod h1:kpw3SS48xZvLQGEXKu8u5XHgXkPvL8DX3oGa07+z8Bs=
+bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
+cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
+cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
+cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
+cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
+cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
+cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
+cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
+cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
+cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
+cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
+cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
+cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
+cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
+cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY=
+cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
+cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
+cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
+cloud.google.com/go v0.82.0/go.mod h1:vlKccHJGuFBFufnAnuB08dfEH9Y3H7dzDzRECFdC2TA=
+cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY=
+cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM=
+cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY=
+cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY=
+cloud.google.com/go v0.89.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=
+cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ=
+cloud.google.com/go v0.92.2/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=
+cloud.google.com/go v0.92.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=
+cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI=
+cloud.google.com/go v0.94.0/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=
+cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4=
+cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc=
+cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA=
+cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A=
+cloud.google.com/go v0.101.0 h1:g+LL+JvpvdyGtcaD2xw2mSByE/6F9s471eJSoaysM84=
+cloud.google.com/go v0.101.0/go.mod h1:hEiddgDb77jDQ+I80tURYNJEnuwPzFU8awCFFRLKjW0=
+cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
+cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
+cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
+cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
+cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
+cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
+cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow=
+cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM=
+cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M=
+cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s=
+cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc=
+cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU=
+cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
+cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
+cloud.google.com/go/firestore v1.5.0/go.mod h1:c4nNYR1qdq7eaZ+jSc5fonrQN2k3M7sWATcYTiakjEo=
+cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc=
+cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY=
+cloud.google.com/go/kms v0.1.0/go.mod h1:8Qp8PCAypHg4FdmlyW1QRAv09BGQ9Uzh7JnmIZxPk+c=
+cloud.google.com/go/monitoring v0.1.0/go.mod h1:Hpm3XfzJv+UTiXzCG5Ffp0wijzHTC7Cv4eR7o3x/fEE=
+cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
+cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
+cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
+cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
+cloud.google.com/go/pubsub v1.16.0/go.mod h1:6A8EfoWZ/lUvCWStKGwAWauJZSiuV0Mkmu6WilK/TxQ=
+cloud.google.com/go/secretmanager v0.1.0/go.mod h1:3nGKHvnzDUVit7U0S9KAKJ4aOsO1xtwRG+7ey5LK1bM=
+cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
+cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
+cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
+cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
+cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
+cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
+cloud.google.com/go/storage v1.16.1/go.mod h1:LaNorbty3ehnU3rEjXSNV/NRgQA0O8Y+uh6bPe5UOk4=
+cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8=
+cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE=
+cloud.google.com/go/trace v0.1.0/go.mod h1:wxEwsoeRVPbeSkt7ZC9nWCgmoKQRAoySN7XHW2AmI7g=
+contrib.go.opencensus.io/exporter/aws v0.0.0-20200617204711-c478e41e60e9/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA=
+contrib.go.opencensus.io/exporter/stackdriver v0.13.8/go.mod h1:huNtlWx75MwO7qMs0KrMxPZXzNNWebav1Sq/pm02JdQ=
+contrib.go.opencensus.io/integrations/ocsql v0.1.7/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE=
+dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
+github.com/Azure/azure-amqp-common-go/v3 v3.1.0/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0=
+github.com/Azure/azure-amqp-common-go/v3 v3.1.1/go.mod h1:YsDaPfaO9Ub2XeSKdIy2DfwuiQlHQCauHJwSqtrkECI=
+github.com/Azure/azure-pipeline-go v0.2.3 h1:7U9HBg1JFK3jHl5qmo4CTZKFTVgMwdFHMVtCdfBE21U=
+github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k=
+github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
+github.com/Azure/azure-sdk-for-go v57.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
+github.com/Azure/azure-service-bus-go v0.10.16/go.mod h1:MlkLwGGf1ewcx5jZadn0gUEty+tTg0RaElr6bPf+QhI=
+github.com/Azure/azure-storage-blob-go v0.14.0 h1:1BCg74AmVdYwO3dlKwtFU1V0wU2PZdREkXvAmZJRUlM=
+github.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck=
+github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs=
+github.com/Azure/go-amqp v0.13.11/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk=
+github.com/Azure/go-amqp v0.13.12/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk=
+github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
+github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
+github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw=
+github.com/Azure/go-autorest/autorest v0.11.17/go.mod h1:eipySxLmqSyC5s5k1CLupqet0PSENBEDP93LQ9a8QYw=
+github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=
+github.com/Azure/go-autorest/autorest v0.11.20 h1:s8H1PbCZSqg/DH7JMlOz6YMig6htWLNPsjDdlLqCx3M=
+github.com/Azure/go-autorest/autorest v0.11.20/go.mod h1:o3tqFY+QR40VOlk+pV4d77mORO64jOXSgEnPQgLK6JY=
+github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg=
+github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A=
+github.com/Azure/go-autorest/autorest/adal v0.9.11/go.mod h1:nBKAnTomx8gDtl+3ZCJv2v0KACFHWTB2drffI1B68Pk=
+github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=
+github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=
+github.com/Azure/go-autorest/autorest/adal v0.9.15 h1:X+p2GF0GWyOiSmqohIaEeuNFNDY4I4EOlVuUQvFdWMk=
+github.com/Azure/go-autorest/autorest/adal v0.9.15/go.mod h1:tGMin8I49Yij6AQ+rvV+Xa/zwxYQB5hmsd6DkfAx2+A=
+github.com/Azure/go-autorest/autorest/azure/auth v0.5.8 h1:TzPg6B6fTZ0G1zBf3T54aI7p3cAT6u//TOXGPmFMOXg=
+github.com/Azure/go-autorest/autorest/azure/auth v0.5.8/go.mod h1:kxyKZTSfKh8OVFWPAgOgQ/frrJgeYQJPyR5fLFmXko4=
+github.com/Azure/go-autorest/autorest/azure/cli v0.4.2/go.mod h1:7qkJkT+j6b+hIpzMOwPChJhTqS8VbsqqgULzMNRugoM=
+github.com/Azure/go-autorest/autorest/azure/cli v0.4.3 h1:DOhB+nXkF7LN0JfBGB5YtCF6QLK8mLe4psaHF7ZQEKM=
+github.com/Azure/go-autorest/autorest/azure/cli v0.4.3/go.mod h1:yAQ2b6eP/CmLPnmLvxtT1ALIY3OR1oFcCqVBi8vHiTc=
+github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=
+github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=
+github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
+github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=
+github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
+github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE=
+github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E=
+github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=
+github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg=
+github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=
+github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=
+github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=
+github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 h1:+tu3HOoMXB7RXEINRVIpxJCT+KdYiI7LAEAUrOw3dIU=
+github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69/go.mod h1:L1AbZdiDllfyYH5l5OkAaZtk7VkWe89bPJFmnDBNHxg=
+github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
+github.com/GoogleCloudPlatform/cloudsql-proxy v1.24.0/go.mod h1:3tx938GhY4FC+E1KT/jNjDw7Z5qxAEtIiERJ2sXjnII=
+github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
+github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
+github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
+github.com/alecthomas/chroma/v2 v2.1.0 h1:ZG9L5/RsxO/xIONrBy8Cgo+5si3d9x3osweXc4VHl0o=
+github.com/alecthomas/chroma/v2 v2.1.0/go.mod h1:vf4zrexSH54oEjJ7EdB65tGNHmH3pGZmVkgTP5RHvAs=
+github.com/alecthomas/chroma/v2 v2.2.0 h1:Aten8jfQwUqEdadVFFjNyjx7HTexhKP0XuqBG67mRDY=
+github.com/alecthomas/chroma/v2 v2.2.0/go.mod h1:vf4zrexSH54oEjJ7EdB65tGNHmH3pGZmVkgTP5RHvAs=
+github.com/alecthomas/repr v0.0.0-20220113201626-b1b626ac65ae h1:zzGwJfFlFGD94CyyYwCJeSuD32Gj9GTaSi5y9hoVzdY=
+github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
+github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
+github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
+github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
+github.com/aws/aws-sdk-go v1.37.0/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
+github.com/aws/aws-sdk-go v1.40.34/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
+github.com/aws/aws-sdk-go v1.43.5 h1:N7arnx54E4QyW69c45UW5o8j2DCSjzpoxzJW3yU6OSo=
+github.com/aws/aws-sdk-go v1.43.5/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
+github.com/aws/aws-sdk-go-v2 v1.9.0 h1:+S+dSqQCN3MSU5vJRu1HqHrq00cJn6heIMU7X9hcsoo=
+github.com/aws/aws-sdk-go-v2 v1.9.0/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=
+github.com/aws/aws-sdk-go-v2/config v1.7.0 h1:J2cZ7qe+3IpqBEXnHUrFrOjoB9BlsXg7j53vxcl5IVg=
+github.com/aws/aws-sdk-go-v2/config v1.7.0/go.mod h1:w9+nMZ7soXCe5nT46Ri354SNhXDQ6v+V5wqDjnZE+GY=
+github.com/aws/aws-sdk-go-v2/credentials v1.4.0 h1:kmvesfjY861FzlCU9mvAfe01D9aeXcG2ZuC+k9F2YLM=
+github.com/aws/aws-sdk-go-v2/credentials v1.4.0/go.mod h1:dgGR+Qq7Wjcd4AOAW5Rf5Tnv3+x7ed6kETXyS9WCuAY=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.0 h1:OxTAgH8Y4BXHD6PGCJ8DHx2kaZPCQfSTqmDsdRZFezE=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.0/go.mod h1:CpNzHK9VEFUCknu50kkB8z58AH2B5DvPP7ea1LHve/Y=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.2.2 h1:d95cddM3yTm4qffj3P6EnP+TzX1SSkWaQypXSgT/hpA=
+github.com/aws/aws-sdk-go-v2/internal/ini v1.2.2/go.mod h1:BQV0agm+JEhqR+2RT5e1XTFIDcAAV0eW6z2trp+iduw=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.0 h1:VNJ5NLBteVXEwE2F1zEXVmyIH58mZ6kIQGJoC7C+vkg=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.0/go.mod h1:R1KK+vY8AfalhG1AOu5e35pOD2SdoPKQCFLTvnxiohk=
+github.com/aws/aws-sdk-go-v2/service/kms v1.5.0/go.mod h1:w7JuP9Oq1IKMFQPkNe3V6s9rOssXzOVEMNEqK1L1bao=
+github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.6.0/go.mod h1:B+7C5UKdVq1ylkI/A6O8wcurFtaux0R1njePNPtKwoA=
+github.com/aws/aws-sdk-go-v2/service/ssm v1.10.0/go.mod h1:4dXS5YNqI3SNbetQ7X7vfsMlX6ZnboJA2dulBwJx7+g=
+github.com/aws/aws-sdk-go-v2/service/sso v1.4.0 h1:sHXMIKYS6YiLPzmKSvDpPmOpJDHxmAUgbiF49YNVztg=
+github.com/aws/aws-sdk-go-v2/service/sso v1.4.0/go.mod h1:+1fpWnL96DL23aXPpMGbsmKe8jLTEfbjuQoA4WS1VaA=
+github.com/aws/aws-sdk-go-v2/service/sts v1.7.0 h1:1at4e5P+lvHNl2nUktdM2/v+rpICg/QSEr9TO/uW9vU=
+github.com/aws/aws-sdk-go-v2/service/sts v1.7.0/go.mod h1:0qcSMCyASQPN2sk/1KQLQ2Fh6yq8wm0HSDAimPhzCoM=
+github.com/aws/smithy-go v1.8.0 h1:AEwwwXQZtUwP5Mz506FeXXrKBe0jA8gVM+1gEcSRooc=
+github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
+github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
+github.com/bep/clock v0.3.0 h1:vfOA6+wVb6pPQEiXow9f/too92vNTLe9MuwO13PfI0M=
+github.com/bep/clock v0.3.0/go.mod h1:6Gz2lapnJ9vxpvPxQ2u6FcXFRoj4kkiqQ6pm0ERZlwk=
+github.com/bep/debounce v1.2.0 h1:wXds8Kq8qRfwAOpAxHrJDbCXgC5aHSzgQb/0gKsHQqo=
+github.com/bep/debounce v1.2.0/go.mod h1:H8yggRPQKLUhUoqrJC1bO2xNya7vanpDl7xR3ISbCJ0=
+github.com/bep/gitmap v1.1.2 h1:zk04w1qc1COTZPPYWDQHvns3y1afOsdRfraFQ3qI840=
+github.com/bep/gitmap v1.1.2/go.mod h1:g9VRETxFUXNWzMiuxOwcudo6DfZkW9jOsOW0Ft4kYaY=
+github.com/bep/goat v0.5.0 h1:S8jLXHCVy/EHIoCY+btKkmcxcXFd34a0Q63/0D4TKeA=
+github.com/bep/goat v0.5.0/go.mod h1:Md9x7gRxiWKs85yHlVTvHQw9rg86Bm+Y4SuYE8CTH7c=
+github.com/bep/godartsass v0.14.0 h1:pPb6XkpyDEppS+wK0veh7OXDQc4xzOJI9Qcjb743UeQ=
+github.com/bep/godartsass v0.14.0/go.mod h1:6LvK9RftsXMxGfsA0LDV12AGc4Jylnu6NgHL+Q5/pE8=
+github.com/bep/golibsass v1.1.0 h1:pjtXr00IJZZaOdfryNa9wARTB3Q0BmxC3/V1KNcgyTw=
+github.com/bep/golibsass v1.1.0/go.mod h1:DL87K8Un/+pWUS75ggYv41bliGiolxzDKWJAq3eJ1MA=
+github.com/bep/gowebp v0.1.0 h1:4/iQpfnxHyXs3x/aTxMMdOpLEQQhFmF6G7EieWPTQyo=
+github.com/bep/gowebp v0.1.0/go.mod h1:ZhFodwdiFp8ehGJpF4LdPl6unxZm9lLFjxD3z2h2AgI=
+github.com/bep/overlayfs v0.6.0 h1:sgLcq/qtIzbaQNl2TldGXOkHvqeZB025sPvHOQL+DYo=
+github.com/bep/overlayfs v0.6.0/go.mod h1:NFjSmn3kCqG7KX2Lmz8qT8VhPPCwZap3UNogXawoQHM=
+github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI=
+github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0=
+github.com/bep/workers v1.0.0 h1:U+H8YmEaBCEaFZBst7GcRVEoqeRC9dzH2dWOwGmOchg=
+github.com/bep/workers v1.0.0/go.mod h1:7kIESOB86HfR2379pwoMWNy8B50D7r99fRLUyPSNyCs=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
+github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U=
+github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
+github.com/clbanning/mxj/v2 v2.5.6 h1:Jm4VaCI/+Ug5Q57IzEoZbwx4iQFA6wkXv72juUSeK+g=
+github.com/clbanning/mxj/v2 v2.5.6/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s=
+github.com/cli/safeexec v1.0.0 h1:0VngyaIyqACHdcMNWfo6+KdUYnqEr2Sg+bSP1pdF+dI=
+github.com/cli/safeexec v1.0.0/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
+github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
+github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU=
+github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
+github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
+github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
+github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
+github.com/disintegration/gift v1.2.1 h1:Y005a1X4Z7Uc+0gLpSAsKhWi4qLtsdEcMIbbdvdZ6pc=
+github.com/disintegration/gift v1.2.1/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
+github.com/djherbis/atime v1.1.0/go.mod h1:28OF6Y8s3NQWwacXc5eZTsEsiMzp7LF8MbXE+XJPdBE=
+github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
+github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
+github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
+github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
+github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
+github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/evanw/esbuild v0.14.43 h1:4WIYkAf2fLmru3KYtKUB6mdBl4dgpoVcq7hqXhXGVG0=
+github.com/evanw/esbuild v0.14.43/go.mod h1:GG+zjdi59yh3ehDn4ZWfPcATxjPDUH53iU4ZJbp7dkY=
+github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
+github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
+github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
+github.com/frankban/quicktest v1.4.1/go.mod h1:36zfPVQyHxymz4cH7wlDmVwDrJuljRB60qkgn7rorfQ=
+github.com/frankban/quicktest v1.7.2/go.mod h1:jaStnuzAqU1AJdCO0l53JDCJrVDKcS03DbaAcR7Ks/o=
+github.com/frankban/quicktest v1.13.0/go.mod h1:qLE0fzW0VuyUAJgPU19zByoIr0HtCHN/r/VLSOOIySU=
+github.com/frankban/quicktest v1.14.2/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
+github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
+github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
+github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
+github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
+github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
+github.com/getkin/kin-openapi v0.97.0 h1:bsvXZeuGiCW43ZKy6xOY5qfT5fCRYmnJwierblSrHCU=
+github.com/getkin/kin-openapi v0.97.0/go.mod h1:w4lRPHiyOdwGbOkLIyk+P0qCwlu7TXPCHD/64nSXzgE=
+github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=
+github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
+github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
+github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
+github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI=
+github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
+github.com/gobuffalo/flect v0.2.5 h1:H6vvsv2an0lalEaCDRThvtBfmg44W/QHXBCYUXf/6S4=
+github.com/gobuffalo/flect v0.2.5/go.mod h1:1ZyCLIbg0YD7sDkzvFdPoOydPtD8y9JQnrOROolUcM8=
+github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
+github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
+github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
+github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw=
+github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM=
+github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
+github.com/gohugoio/go-i18n/v2 v2.1.3-0.20210430103248-4c28c89f8013 h1:Nj29Qbkt0bZ/bJl8eccfxQp3NlU/0IW1v9eyYtQ53XQ=
+github.com/gohugoio/go-i18n/v2 v2.1.3-0.20210430103248-4c28c89f8013/go.mod h1:3Ltoo9Banwq0gOtcOwxuHG6omk+AwsQPADyw2vQYOJQ=
+github.com/gohugoio/locales v0.14.0 h1:Q0gpsZwfv7ATHMbcTNepFd59H7GoykzWJIxi113XGDc=
+github.com/gohugoio/locales v0.14.0/go.mod h1:ip8cCAv/cnmVLzzXtiTpPwgJ4xhKZranqNqtoIu0b/4=
+github.com/gohugoio/localescompressed v1.0.1 h1:KTYMi8fCWYLswFyJAeOtuk/EkXR/KPTHHNN9OS+RTxo=
+github.com/gohugoio/localescompressed v1.0.1/go.mod h1:jBF6q8D7a0vaEmcWPNcAjUZLJaIVNiwvM3WlmTvooB0=
+github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95 h1:sgew0XCnZwnzpWxTt3V8LLiCO7OQi3C6dycaE67wfkU=
+github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95/go.mod h1:bOlVlCa1/RajcHpXkrUXPSHB/Re1UnlXxD1Qp8SKOd8=
+github.com/golang-jwt/jwt/v4 v4.0.0 h1:RAqyYixv1p7uEnocuy8P1nru5wprCh/MH2BIlW5z5/o=
+github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
+github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
+github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
+github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
+github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
+github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
+github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
+github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
+github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-replayers/grpcreplay v1.1.0 h1:S5+I3zYyZ+GQz68OfbURDdt/+cSMqCK1wrvNx7WBzTE=
+github.com/google/go-replayers/grpcreplay v1.1.0/go.mod h1:qzAvJ8/wi57zq7gWqaE6AwLM6miiXUQwP1S+I9icmhk=
+github.com/google/go-replayers/httpreplay v1.0.0 h1:8SmT8fUYM4nueF+UnXIX8LJxNTb1vpPuknXz+yTWzL4=
+github.com/google/go-replayers/httpreplay v1.0.0/go.mod h1:LJhKoTwS5Wy5Ld/peq8dFFG5OfJyHEz7ft+DsTUv25M=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
+github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=
+github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
+github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
+github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw=
+github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
+github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
+github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
+github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/google/wire v0.5.0 h1:I7ELFeVBr3yfPIcc8+MWvrjk+3VjbcSzoXm3JVa+jD8=
+github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU=
+github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
+github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
+github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=
+github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=
+github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM=
+github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI=
+github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM=
+github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA=
+github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4=
+github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
+github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/hairyhenderson/go-codeowners v0.2.3-0.20201026200250-cdc7c0759690 h1:XWjCrg/HJRLZCbvsUxS5R/9JhwiiwNctEsRvZ1Vjz5k=
+github.com/hairyhenderson/go-codeowners v0.2.3-0.20201026200250-cdc7c0759690/go.mod h1:8Qu9UmnhCRunfRv365Z3w+mT/WfLGKJiK+vugY9qNCU=
+github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/invopop/yaml v0.1.0 h1:YW3WGUoJEXYfzWBjn00zIlrw7brGVD0fUKRYDPAPhrc=
+github.com/invopop/yaml v0.1.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q=
+github.com/jdkato/prose v1.2.1 h1:Fp3UnJmLVISmlc57BgKUzdjr0lOtjqTZicL3PaYy6cU=
+github.com/jdkato/prose v1.2.1/go.mod h1:AiRHgVagnEx2JbQRQowVBKjG0bcs/vtkGCH1dYAL1rA=
+github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
+github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
+github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
+github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
+github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
+github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
+github.com/klauspost/compress v1.13.5/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
+github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
+github.com/kyokomi/emoji/v2 v2.2.9 h1:UWYkjplPZ4rMPvLxc+/e12/xTqoRcn55oUySkpZ554g=
+github.com/kyokomi/emoji/v2 v2.2.9/go.mod h1:JUcn42DTdsXJo1SWanHh4HKDEyPaR5CqkmoirZZP9qE=
+github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
+github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/magefile/mage v1.13.0 h1:XtLJl8bcCM7EFoO8FyH8XK3t7G5hQAeK+i4tq+veT9M=
+github.com/magefile/mage v1.13.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs=
+github.com/mattn/go-ieproxy v0.0.1 h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=
+github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
+github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
+github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
+github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
+github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
+github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/mitchellh/hashstructure v1.1.0 h1:P6P1hdjqAAknpY/M1CGipelZgp+4y9ja9kmUZPXP+H0=
+github.com/mitchellh/hashstructure v1.1.0/go.mod h1:xUDAozZz0Wmdiufv0uyhnHkUTN6/6d8ulp4AwfLKrmA=
+github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
+github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/montanaflynn/stats v0.6.3/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
+github.com/muesli/smartcrop v0.3.0 h1:JTlSkmxWg/oQ1TcLDoypuirdE8Y/jzNirQeLkxpA6Oc=
+github.com/muesli/smartcrop v0.3.0/go.mod h1:i2fCI/UorTfgEpPPLWiFBv4pye+YAG78RwcQLUkocpI=
+github.com/neurosnap/sentences v1.0.6/go.mod h1:pg1IapvYpWCJJm/Etxeh0+gtMf1rI1STY9S7eUCPbDc=
+github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
+github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
+github.com/niklasfasching/go-org v1.6.5 h1:5YAIqNTdl6lAOb7lD2AyQ1RuFGPVrAKvUexphk8PGbo=
+github.com/niklasfasching/go-org v1.6.5/go.mod h1:ybv0eGDnxylFUfFE+ySaQc734j/L3+/ChKZ/h63a2wM=
+github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
+github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
+github.com/pelletier/go-toml/v2 v2.0.2 h1:+jQXlF3scKIcSEKkdHzXhCTDLPFi5r1wnK6yPS+49Gw=
+github.com/pelletier/go-toml/v2 v2.0.2/go.mod h1:MovirKjgVRESsAvNZlAjtFwV867yGuwRkXbG66OzopI=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg=
+github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
+github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
+github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
+github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo=
+github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
+github.com/shogo82148/go-shuffle v0.0.0-20180218125048-27e6095f230d/go.mod h1:2htx6lmL0NGLHlO8ZCf+lQBGBHIbEujyywxJArf+2Yc=
+github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
+github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
+github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo=
+github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo=
+github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
+github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
+github.com/spf13/cobra v1.4.0 h1:y+wJpx64xcgO1V+RcnwW0LEHxTKRi2ZDPSBjWnrg88Q=
+github.com/spf13/cobra v1.4.0/go.mod h1:Wo4iy3BUC+X2Fybo0PDqwJIv3dNRiZLHQymsfxlB84g=
+github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY=
+github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0=
+github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
+github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
+github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.2 h1:4jaiDzPyXQvSd7D0EjG45355tLlV3VOECpq10pLC+8s=
+github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
+github.com/tdewolff/minify/v2 v2.11.10 h1:2tk9nuKfc8YOTD8glZ7JF/VtE8W5HOgmepWdjcPtRro=
+github.com/tdewolff/minify/v2 v2.11.10/go.mod h1:dHOS3dk+nJ0M3q3uM3VlNzTb70cou+ov0ki7C4PAFgM=
+github.com/tdewolff/parse/v2 v2.6.0 h1:f2D7w32JtqjCv6SczWkfwK+m15et42qEtDnZXHoNY70=
+github.com/tdewolff/parse/v2 v2.6.0/go.mod h1:WzaJpRSbwq++EIQHYIRTpbYKNA3gn9it1Ik++q4zyho=
+github.com/tdewolff/test v1.0.6 h1:76mzYJQ83Op284kMT+63iCNCI7NEERsIN8dLM+RiKr4=
+github.com/tdewolff/test v1.0.6/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE=
+github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
+github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
+github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.12 h1:6hffw6vALvEDqJ19dOJvJKOoAOKe4NDaTqvd2sktGN0=
+github.com/yuin/goldmark v1.4.12/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
+go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
+go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
+go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
+go.opencensus.io v0.22.6/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
+go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
+go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
+go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
+go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
+go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
+go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
+go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak=
+go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
+go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
+gocloud.dev v0.24.0 h1:cNtHD07zQQiv02OiwwDyVMuHmR7iQt2RLkzoAgz7wBs=
+gocloud.dev v0.24.0/go.mod h1:uA+als++iBX5ShuG4upQo/3Zoz49iIPlYUWHV5mM8w8=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
+golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
+golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa h1:idItI2DDfCokpg0N51B2VtiLdJ4vAuXC9fnCb2gACo4=
+golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
+golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
+golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
+golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
+golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
+golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
+golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20210220032944-ac19c3e999fb/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/image v0.0.0-20211028202545-6944b10bf410 h1:hTftEOvwiOq2+O8k2D5/Q7COC7k5Qcrgc2TFURJYnvQ=
+golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
+golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
+golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
+golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
+golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210825183410-e898025ed96a/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
+golang.org/x/net v0.0.0-20220607020251-c690dde0001d h1:4SFsTMi4UahlKoloni7L4eYzhFRifURQLw+yv0QDCx8=
+golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210126194326-f9ce19ea3013/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE=
+golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210223095934-7937bea0104d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
+golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
+golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
+golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY=
+golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U=
+golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
+google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
+google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
+google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
+google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
+google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
+google.golang.org/api v0.37.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
+google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
+google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
+google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
+google.golang.org/api v0.46.0/go.mod h1:ceL4oozhkAiTID8XMmJBsIxID/9wMXJVVFXPg4ylg3I=
+google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo=
+google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4=
+google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw=
+google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU=
+google.golang.org/api v0.52.0/go.mod h1:Him/adpjt0sxtkWViy0b6xyKW/SD71CwdJ7HqJo7SrU=
+google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k=
+google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
+google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE=
+google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI=
+google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I=
+google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo=
+google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g=
+google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA=
+google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8=
+google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs=
+google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
+google.golang.org/api v0.76.0 h1:UkZl25bR1FHNqtK/EKs3vCdpZtUO6gea3YElTwc8pQg=
+google.golang.org/api v0.76.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
+google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
+google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
+google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
+google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
+google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210126160654-44e461bb6506/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
+google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
+google.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
+google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
+google.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
+google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=
+google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
+google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
+google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
+google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=
+google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210825212027-de86158e7fda/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
+google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
+google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
+google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E=
+google.golang.org/genproto v0.0.0-20220405205423-9d709892a2bf/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46 h1:G1IeWbjrqEq9ChWxEuRPJu6laA67+XgTFHVSAvepr38=
+google.golang.org/genproto v0.0.0-20220426171045-31bebdecfb46/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
+google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
+google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
+google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
+google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
+google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
+google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
+google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
+google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
+google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
+google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
+google.golang.org/grpc v1.46.0 h1:oCjezcn6g6A75TGoKYBPgKmVBLexhYLM6MebdrPApP8=
+google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
+google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=
+google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/neurosnap/sentences.v1 v1.0.6/go.mod h1:YlK+SN+fLQZj+kY3r8DkGDhDr91+S3JmTb5LSxFRQo0=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
+honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=
+nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=
+rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
+rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
+rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
diff --git a/goreleaser-hook-post-linux.sh b/goreleaser-hook-post-linux.sh
new file mode 100755
index 000000000..e97e274a8
--- /dev/null
+++ b/goreleaser-hook-post-linux.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# Se https://github.com/gohugoio/hugo/issues/8955
+objdump -T dist/hugo_extended_linux_linux_amd64/hugo | grep -E -q 'GLIBC_2.2[0-9]'
+RESULT=$?
+if [ $RESULT -eq 0 ]; then
+ echo "Found GLIBC_2.2x in Linux binary, this will not work in older Vercel/Netlify images.";
+ exit -1;
+fi
diff --git a/goreleaser.yml b/goreleaser.yml
new file mode 100644
index 000000000..0d1389b6a
--- /dev/null
+++ b/goreleaser.yml
@@ -0,0 +1,197 @@
+project_name: hugo
+env:
+ - GO111MODULE=on
+ - GOPROXY=https://proxy.golang.org
+before:
+ hooks:
+ - go mod download
+builds:
+ -
+ binary: hugo
+ id: hugo
+ ldflags: -s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -buildmode
+ - exe
+ goos:
+ - darwin
+ - linux
+ - windows
+ goarch:
+ - amd64
+ - 386
+ - arm
+ - arm64
+ goarm:
+ - 7
+ ignore:
+ - goos: darwin
+ goarch: 386
+ -
+ binary: hugo
+ id: hugo_unix
+ ldflags: -s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -buildmode
+ - exe
+ goos:
+ - freebsd
+ - netbsd
+ - openbsd
+ - dragonfly
+ goarch:
+ - amd64
+ -
+ binary: hugo
+ id: hugo_extended_windows
+ ldflags:
+ - -s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio
+ - "-extldflags '-static'"
+ env:
+ - CGO_ENABLED=1
+ - CC=x86_64-w64-mingw32-gcc
+ - CXX=x86_64-w64-mingw32-g++
+ flags:
+ - -buildmode
+ - exe
+ - -tags
+ - extended
+ goos:
+ - windows
+ goarch:
+ - amd64
+ - binary: hugo
+ id: hugo_extended_darwin
+ ldflags: -s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio
+ env:
+ - CGO_ENABLED=1
+ - CC=o64-clang
+ - CXX=o64-clang++
+ flags:
+ - -buildmode
+ - exe
+ - -tags
+ - extended
+ goos:
+ - darwin
+ goarch:
+ - amd64
+ - arm64
+ - binary: hugo
+ id: hugo_extended_linux
+ ldflags: -s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=gohugoio
+ env:
+ - CGO_ENABLED=1
+ flags:
+ - -buildmode
+ - exe
+ - -tags
+ - extended
+ goos:
+ - linux
+ goarch:
+ - amd64
+ hooks:
+ post: ./goreleaser-hook-post-linux.sh
+release:
+ draft: true
+
+archives:
+ -
+ id: "hugo"
+ builds: ['hugo', 'hugo_unix']
+ format: tar.gz
+ format_overrides:
+ - goos: windows
+ format: zip
+ name_template: "{{.ProjectName}}_{{.Version}}_{{.Os}}-{{.Arch}}"
+ replacements:
+ amd64: 64bit
+ 386: 32bit
+ arm: ARM
+ arm64: ARM64
+ darwin: macOS
+ linux: Linux
+ windows: Windows
+ openbsd: OpenBSD
+ netbsd: NetBSD
+ freebsd: FreeBSD
+ dragonfly: DragonFlyBSD
+ files:
+ - README.md
+ - LICENSE
+ -
+ id: "hugo_extended"
+ builds: ['hugo_extended_windows', 'hugo_extended_linux', 'hugo_extended_darwin']
+ format: tar.gz
+ format_overrides:
+ - goos: windows
+ format: zip
+ name_template: "{{.ProjectName}}_extended_{{.Version}}_{{.Os}}-{{.Arch}}"
+ replacements:
+ amd64: 64bit
+ 386: 32bit
+ arm: ARM
+ arm64: ARM64
+ darwin: macOS
+ linux: Linux
+ windows: Windows
+ openbsd: OpenBSD
+ netbsd: NetBSD
+ freebsd: FreeBSD
+ dragonfly: DragonFlyBSD
+ files:
+ - README.md
+ - LICENSE
+
+nfpms:
+ -
+ id: "hugo"
+ builds: ['hugo']
+ formats:
+ - deb
+ vendor: "gohugo.io"
+ homepage: "https://gohugo.io/"
+ maintainer: "Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>"
+ description: "A Fast and Flexible Static Site Generator built with love in GoLang."
+ license: "Apache-2.0"
+ file_name_template: "{{.ProjectName}}_{{.Version}}_{{.Os}}-{{.Arch}}"
+ replacements:
+ amd64: 64bit
+ 386: 32bit
+ arm: ARM
+ arm64: ARM64
+ darwin: macOS
+ linux: Linux
+ windows: Windows
+ openbsd: OpenBSD
+ netbsd: NetBSD
+ freebsd: FreeBSD
+ dragonfly: DragonFlyBSD
+ -
+ id: "hugo_extended"
+ builds: ['hugo_extended_linux']
+ formats:
+ - deb
+ vendor: "gohugo.io"
+ homepage: "https://gohugo.io/"
+ maintainer: "Bjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>"
+ description: "A Fast and Flexible Static Site Generator built with love in GoLang."
+ license: "Apache-2.0"
+ file_name_template: "{{.ProjectName}}_extended_{{.Version}}_{{.Os}}-{{.Arch}}"
+ replacements:
+ amd64: 64bit
+ 386: 32bit
+ arm: ARM
+ arm64: ARM64
+ darwin: macOS
+ linux: Linux
+ windows: Windows
+ openbsd: OpenBSD
+ netbsd: NetBSD
+ freebsd: FreeBSD
+ dragonfly: DragonFlyBSD
diff --git a/helpers/content.go b/helpers/content.go
new file mode 100644
index 000000000..d04e34a07
--- /dev/null
+++ b/helpers/content.go
@@ -0,0 +1,301 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package helpers implements general utility functions that work with
+// and on content. The helper functions defined here lay down the
+// foundation of how Hugo works with files and filepaths, and perform
+// string operations on content.
+package helpers
+
+import (
+ "bytes"
+ "html/template"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+
+ "github.com/gohugoio/hugo/markup"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+var (
+ openingPTag = []byte("<p>")
+ closingPTag = []byte("</p>")
+ paragraphIndicator = []byte("<p")
+ closingIndicator = []byte("</")
+)
+
+// ContentSpec provides functionality to render markdown content.
+type ContentSpec struct {
+ Converters markup.ConverterProvider
+ anchorNameSanitizer converter.AnchorNameSanitizer
+ getRenderer func(t hooks.RendererType, id any) any
+
+ // SummaryLength is the length of the summary that Hugo extracts from a content.
+ summaryLength int
+
+ BuildFuture bool
+ BuildExpired bool
+ BuildDrafts bool
+
+ Cfg config.Provider
+}
+
+// NewContentSpec returns a ContentSpec initialized
+// with the appropriate fields from the given config.Provider.
+func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.Fs, ex *hexec.Exec) (*ContentSpec, error) {
+ spec := &ContentSpec{
+ summaryLength: cfg.GetInt("summaryLength"),
+ BuildFuture: cfg.GetBool("buildFuture"),
+ BuildExpired: cfg.GetBool("buildExpired"),
+ BuildDrafts: cfg.GetBool("buildDrafts"),
+
+ Cfg: cfg,
+ }
+
+ converterProvider, err := markup.NewConverterProvider(converter.ProviderConfig{
+ Cfg: cfg,
+ ContentFs: contentFs,
+ Logger: logger,
+ Exec: ex,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ spec.Converters = converterProvider
+ p := converterProvider.Get("markdown")
+ conv, err := p.New(converter.DocumentContext{})
+ if err != nil {
+ return nil, err
+ }
+ if as, ok := conv.(converter.AnchorNameSanitizer); ok {
+ spec.anchorNameSanitizer = as
+ } else {
+ // Use Goldmark's sanitizer
+ p := converterProvider.Get("goldmark")
+ conv, err := p.New(converter.DocumentContext{})
+ if err != nil {
+ return nil, err
+ }
+ spec.anchorNameSanitizer = conv.(converter.AnchorNameSanitizer)
+ }
+
+ return spec, nil
+}
+
+// stripEmptyNav strips out empty <nav> tags from content.
+func stripEmptyNav(in []byte) []byte {
+ return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)
+}
+
+// BytesToHTML converts bytes to type template.HTML.
+func BytesToHTML(b []byte) template.HTML {
+ return template.HTML(string(b))
+}
+
+// ExtractTOC extracts Table of Contents from content.
+func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
+ if !bytes.Contains(content, []byte("<nav>")) {
+ return content, nil
+ }
+ origContent := make([]byte, len(content))
+ copy(origContent, content)
+ first := []byte(`<nav>
+<ul>`)
+
+ last := []byte(`</ul>
+</nav>`)
+
+ replacement := []byte(`<nav id="TableOfContents">
+<ul>`)
+
+ startOfTOC := bytes.Index(content, first)
+
+ peekEnd := len(content)
+ if peekEnd > 70+startOfTOC {
+ peekEnd = 70 + startOfTOC
+ }
+
+ if startOfTOC < 0 {
+ return stripEmptyNav(content), toc
+ }
+ // Need to peek ahead to see if this nav element is actually the right one.
+ correctNav := bytes.Index(content[startOfTOC:peekEnd], []byte(`<li><a href="#`))
+ if correctNav < 0 { // no match found
+ return content, toc
+ }
+ lengthOfTOC := bytes.Index(content[startOfTOC:], last) + len(last)
+ endOfTOC := startOfTOC + lengthOfTOC
+
+ newcontent = append(content[:startOfTOC], content[endOfTOC:]...)
+ toc = append(replacement, origContent[startOfTOC+len(first):endOfTOC]...)
+ return
+}
+
+func (c *ContentSpec) SanitizeAnchorName(s string) string {
+ return c.anchorNameSanitizer.SanitizeAnchorName(s)
+}
+
+func (c *ContentSpec) ResolveMarkup(in string) string {
+ in = strings.ToLower(in)
+ switch in {
+ case "md", "markdown", "mdown":
+ return "markdown"
+ case "html", "htm":
+ return "html"
+ default:
+ if conv := c.Converters.Get(in); conv != nil {
+ return conv.Name()
+ }
+ }
+ return ""
+}
+
+// TotalWords counts instance of one or more consecutive white space
+// characters, as defined by unicode.IsSpace, in s.
+// This is a cheaper way of word counting than the obvious len(strings.Fields(s)).
+func TotalWords(s string) int {
+ n := 0
+ inWord := false
+ for _, r := range s {
+ wasInWord := inWord
+ inWord = !unicode.IsSpace(r)
+ if inWord && !wasInWord {
+ n++
+ }
+ }
+ return n
+}
+
+// TruncateWordsByRune truncates words by runes.
+func (c *ContentSpec) TruncateWordsByRune(in []string) (string, bool) {
+ words := make([]string, len(in))
+ copy(words, in)
+
+ count := 0
+ for index, word := range words {
+ if count >= c.summaryLength {
+ return strings.Join(words[:index], " "), true
+ }
+ runeCount := utf8.RuneCountInString(word)
+ if len(word) == runeCount {
+ count++
+ } else if count+runeCount < c.summaryLength {
+ count += runeCount
+ } else {
+ for ri := range word {
+ if count >= c.summaryLength {
+ truncatedWords := append(words[:index], word[:ri])
+ return strings.Join(truncatedWords, " "), true
+ }
+ count++
+ }
+ }
+ }
+
+ return strings.Join(words, " "), false
+}
+
+// TruncateWordsToWholeSentence takes content and truncates to whole sentence
+// limited by max number of words. It also returns whether it is truncated.
+func (c *ContentSpec) TruncateWordsToWholeSentence(s string) (string, bool) {
+ var (
+ wordCount = 0
+ lastWordIndex = -1
+ )
+
+ for i, r := range s {
+ if unicode.IsSpace(r) {
+ wordCount++
+ lastWordIndex = i
+
+ if wordCount >= c.summaryLength {
+ break
+ }
+
+ }
+ }
+
+ if lastWordIndex == -1 {
+ return s, false
+ }
+
+ endIndex := -1
+
+ for j, r := range s[lastWordIndex:] {
+ if isEndOfSentence(r) {
+ endIndex = j + lastWordIndex + utf8.RuneLen(r)
+ break
+ }
+ }
+
+ if endIndex == -1 {
+ return s, false
+ }
+
+ return strings.TrimSpace(s[:endIndex]), endIndex < len(s)
+}
+
+// TrimShortHTML removes the <p>/</p> tags from HTML input in the situation
+// where said tags are the only <p> tags in the input and enclose the content
+// of the input (whitespace excluded).
+func (c *ContentSpec) TrimShortHTML(input []byte) []byte {
+ firstOpeningP := bytes.Index(input, paragraphIndicator)
+ lastOpeningP := bytes.LastIndex(input, paragraphIndicator)
+
+ lastClosingP := bytes.LastIndex(input, closingPTag)
+ lastClosing := bytes.LastIndex(input, closingIndicator)
+
+ if firstOpeningP == lastOpeningP && lastClosingP == lastClosing {
+ input = bytes.TrimSpace(input)
+ input = bytes.TrimPrefix(input, openingPTag)
+ input = bytes.TrimSuffix(input, closingPTag)
+ input = bytes.TrimSpace(input)
+ }
+ return input
+}
+
+func isEndOfSentence(r rune) bool {
+ return r == '.' || r == '?' || r == '!' || r == '"' || r == '\n'
+}
+
+// Kept only for benchmark.
+func (c *ContentSpec) truncateWordsToWholeSentenceOld(content string) (string, bool) {
+ words := strings.Fields(content)
+
+ if c.summaryLength >= len(words) {
+ return strings.Join(words, " "), false
+ }
+
+ for counter, word := range words[c.summaryLength:] {
+ if strings.HasSuffix(word, ".") ||
+ strings.HasSuffix(word, "?") ||
+ strings.HasSuffix(word, ".\"") ||
+ strings.HasSuffix(word, "!") {
+ upper := c.summaryLength + counter + 1
+ return strings.Join(words[:upper], " "), (upper < len(words))
+ }
+ }
+
+ return strings.Join(words[:c.summaryLength], " "), true
+}
diff --git a/helpers/content_test.go b/helpers/content_test.go
new file mode 100644
index 000000000..54b7ef3f9
--- /dev/null
+++ b/helpers/content_test.go
@@ -0,0 +1,244 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "bytes"
+ "html/template"
+ "strings"
+ "testing"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
+
+func TestTrimShortHTML(t *testing.T) {
+ tests := []struct {
+ input, output []byte
+ }{
+ {[]byte(""), []byte("")},
+ {[]byte("Plain text"), []byte("Plain text")},
+ {[]byte(" \t\n Whitespace text\n\n"), []byte("Whitespace text")},
+ {[]byte("<p>Simple paragraph</p>"), []byte("Simple paragraph")},
+ {[]byte("\n \n \t <p> \t Whitespace\nHTML \n\t </p>\n\t"), []byte("Whitespace\nHTML")},
+ {[]byte("<p>Multiple</p><p>paragraphs</p>"), []byte("<p>Multiple</p><p>paragraphs</p>")},
+ {[]byte("<p>Nested<p>paragraphs</p></p>"), []byte("<p>Nested<p>paragraphs</p></p>")},
+ {[]byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>"), []byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>")},
+ }
+
+ c := newTestContentSpec()
+ for i, test := range tests {
+ output := c.TrimShortHTML(test.input)
+ if !bytes.Equal(test.output, output) {
+ t.Errorf("Test %d failed. Expected %q got %q", i, test.output, output)
+ }
+ }
+}
+
+func TestStripEmptyNav(t *testing.T) {
+ c := qt.New(t)
+ cleaned := stripEmptyNav([]byte("do<nav>\n</nav>\n\nbedobedo"))
+ c.Assert(cleaned, qt.DeepEquals, []byte("dobedobedo"))
+}
+
+func TestBytesToHTML(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(BytesToHTML([]byte("dobedobedo")), qt.Equals, template.HTML("dobedobedo"))
+}
+
+func TestNewContentSpec(t *testing.T) {
+ cfg := config.NewWithTestDefaults()
+ c := qt.New(t)
+
+ cfg.Set("summaryLength", 32)
+ cfg.Set("buildFuture", true)
+ cfg.Set("buildExpired", true)
+ cfg.Set("buildDrafts", true)
+
+ spec, err := NewContentSpec(cfg, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(spec.summaryLength, qt.Equals, 32)
+ c.Assert(spec.BuildFuture, qt.Equals, true)
+ c.Assert(spec.BuildExpired, qt.Equals, true)
+ c.Assert(spec.BuildDrafts, qt.Equals, true)
+}
+
+var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
+
+func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) {
+ c := newTestContentSpec()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ c.TruncateWordsToWholeSentence(benchmarkTruncateString)
+ }
+}
+
+func BenchmarkTestTruncateWordsToWholeSentenceOld(b *testing.B) {
+ c := newTestContentSpec()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ c.truncateWordsToWholeSentenceOld(benchmarkTruncateString)
+ }
+}
+
+func TestTruncateWordsToWholeSentence(t *testing.T) {
+ c := newTestContentSpec()
+ type test struct {
+ input, expected string
+ max int
+ truncated bool
+ }
+ data := []test{
+ {"a b c", "a b c", 12, false},
+ {"a b c", "a b c", 3, false},
+ {"a", "a", 1, false},
+ {"This is a sentence.", "This is a sentence.", 5, false},
+ {"This is also a sentence!", "This is also a sentence!", 1, false},
+ {"To be. Or not to be. That's the question.", "To be.", 1, true},
+ {" \nThis is not a sentence\nAnd this is another", "This is not a sentence", 4, true},
+ {"", "", 10, false},
+ {"This... is a more difficult test?", "This... is a more difficult test?", 1, false},
+ }
+ for i, d := range data {
+ c.summaryLength = d.max
+ output, truncated := c.TruncateWordsToWholeSentence(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+
+ if d.truncated != truncated {
+ t.Errorf("Test %d failed. Expected truncated=%t got %t", i, d.truncated, truncated)
+ }
+ }
+}
+
+func TestTruncateWordsByRune(t *testing.T) {
+ c := newTestContentSpec()
+ type test struct {
+ input, expected string
+ max int
+ truncated bool
+ }
+ data := []test{
+ {"", "", 1, false},
+ {"a b c", "a b c", 12, false},
+ {"a b c", "a b c", 3, false},
+ {"a", "a", 1, false},
+ {"Hello 中国", "", 0, true},
+ {"这是中文,全中文。", "这是中文,", 5, true},
+ {"Hello 中国", "Hello 中", 2, true},
+ {"Hello 中国", "Hello 中国", 3, false},
+ {"Hello中国 Good 好的", "Hello中国 Good 好", 9, true},
+ {"This is a sentence.", "This is", 2, true},
+ {"This is also a sentence!", "This", 1, true},
+ {"To be. Or not to be. That's the question.", "To be. Or not", 4, true},
+ {" \nThis is not a sentence\n ", "This is not", 3, true},
+ }
+ for i, d := range data {
+ c.summaryLength = d.max
+ output, truncated := c.TruncateWordsByRune(strings.Fields(d.input))
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+
+ if d.truncated != truncated {
+ t.Errorf("Test %d failed. Expected truncated=%t got %t", i, d.truncated, truncated)
+ }
+ }
+}
+
+func TestExtractTOCNormalContent(t *testing.T) {
+ content := []byte("<nav>\n<ul>\nTOC<li><a href=\"#")
+
+ actualTocLessContent, actualToc := ExtractTOC(content)
+ expectedTocLess := []byte("TOC<li><a href=\"#")
+ expectedToc := []byte("<nav id=\"TableOfContents\">\n<ul>\n")
+
+ if !bytes.Equal(actualTocLessContent, expectedTocLess) {
+ t.Errorf("Actual tocless (%s) did not equal expected (%s) tocless content", actualTocLessContent, expectedTocLess)
+ }
+
+ if !bytes.Equal(actualToc, expectedToc) {
+ t.Errorf("Actual toc (%s) did not equal expected (%s) toc content", actualToc, expectedToc)
+ }
+}
+
+func TestExtractTOCGreaterThanSeventy(t *testing.T) {
+ content := []byte("<nav>\n<ul>\nTOC This is a very long content which will definitely be greater than seventy, I promise you that.<li><a href=\"#")
+
+ actualTocLessContent, actualToc := ExtractTOC(content)
+ // Because the start of Toc is greater than 70+startpoint of <li> content and empty TOC will be returned
+ expectedToc := []byte("")
+
+ if !bytes.Equal(actualTocLessContent, content) {
+ t.Errorf("Actual tocless (%s) did not equal expected (%s) tocless content", actualTocLessContent, content)
+ }
+
+ if !bytes.Equal(actualToc, expectedToc) {
+ t.Errorf("Actual toc (%s) did not equal expected (%s) toc content", actualToc, expectedToc)
+ }
+}
+
+func TestExtractNoTOC(t *testing.T) {
+ content := []byte("TOC")
+
+ actualTocLessContent, actualToc := ExtractTOC(content)
+ expectedToc := []byte("")
+
+ if !bytes.Equal(actualTocLessContent, content) {
+ t.Errorf("Actual tocless (%s) did not equal expected (%s) tocless content", actualTocLessContent, content)
+ }
+
+ if !bytes.Equal(actualToc, expectedToc) {
+ t.Errorf("Actual toc (%s) did not equal expected (%s) toc content", actualToc, expectedToc)
+ }
+}
+
+var totalWordsBenchmarkString = strings.Repeat("Hugo Rocks ", 200)
+
+func TestTotalWords(t *testing.T) {
+ for i, this := range []struct {
+ s string
+ words int
+ }{
+ {"Two, Words!", 2},
+ {"Word", 1},
+ {"", 0},
+ {"One, Two, Three", 3},
+ {totalWordsBenchmarkString, 400},
+ } {
+ actualWordCount := TotalWords(this.s)
+
+ if actualWordCount != this.words {
+ t.Errorf("[%d] Actual word count (%d) for test string (%s) did not match %d", i, actualWordCount, this.s, this.words)
+ }
+ }
+}
+
+func BenchmarkTotalWords(b *testing.B) {
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ wordCount := TotalWords(totalWordsBenchmarkString)
+ if wordCount != 400 {
+ b.Fatal("Wordcount error")
+ }
+ }
+}
diff --git a/helpers/docshelper.go b/helpers/docshelper.go
new file mode 100644
index 000000000..35d07d366
--- /dev/null
+++ b/helpers/docshelper.go
@@ -0,0 +1,37 @@
+package helpers
+
+import (
+ "sort"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/gohugoio/hugo/docshelper"
+)
+
+// This is is just some helpers used to create some JSON used in the Hugo docs.
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ var chromaLexers []any
+
+ sort.Sort(lexers.GlobalLexerRegistry.Lexers)
+
+ for _, l := range lexers.GlobalLexerRegistry.Lexers {
+
+ config := l.Config()
+
+ lexerEntry := struct {
+ Name string
+ Aliases []string
+ }{
+ config.Name,
+ config.Aliases,
+ }
+
+ chromaLexers = append(chromaLexers, lexerEntry)
+
+ }
+
+ return docshelper.DocProvider{"chroma": map[string]any{"lexers": chromaLexers}}
+ }
+
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/helpers/emoji.go b/helpers/emoji.go
new file mode 100644
index 000000000..eb47ff448
--- /dev/null
+++ b/helpers/emoji.go
@@ -0,0 +1,96 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "bytes"
+ "sync"
+
+ "github.com/kyokomi/emoji/v2"
+)
+
+var (
+ emojiInit sync.Once
+
+ emojis = make(map[string][]byte)
+
+ emojiDelim = []byte(":")
+ emojiWordDelim = []byte(" ")
+ emojiMaxSize int
+)
+
+// Emoji returns the emojy given a key, e.g. ":smile:", nil if not found.
+func Emoji(key string) []byte {
+ emojiInit.Do(initEmoji)
+ return emojis[key]
+}
+
+// Emojify "emojifies" the input source.
+// Note that the input byte slice will be modified if needed.
+// See http://www.emoji-cheat-sheet.com/
+func Emojify(source []byte) []byte {
+ emojiInit.Do(initEmoji)
+
+ start := 0
+ k := bytes.Index(source[start:], emojiDelim)
+
+ for k != -1 {
+
+ j := start + k
+
+ upper := j + emojiMaxSize
+
+ if upper > len(source) {
+ upper = len(source)
+ }
+
+ endEmoji := bytes.Index(source[j+1:upper], emojiDelim)
+ nextWordDelim := bytes.Index(source[j:upper], emojiWordDelim)
+
+ if endEmoji < 0 {
+ start++
+ } else if endEmoji == 0 || (nextWordDelim != -1 && nextWordDelim < endEmoji) {
+ start += endEmoji + 1
+ } else {
+ endKey := endEmoji + j + 2
+ emojiKey := source[j:endKey]
+
+ if emoji, ok := emojis[string(emojiKey)]; ok {
+ source = append(source[:j], append(emoji, source[endKey:]...)...)
+ }
+
+ start += endEmoji
+ }
+
+ if start >= len(source) {
+ break
+ }
+
+ k = bytes.Index(source[start:], emojiDelim)
+ }
+
+ return source
+}
+
+func initEmoji() {
+ emojiMap := emoji.CodeMap()
+
+ for k, v := range emojiMap {
+ emojis[k] = []byte(v)
+
+ if len(k) > emojiMaxSize {
+ emojiMaxSize = len(k)
+ }
+ }
+}
diff --git a/helpers/emoji_test.go b/helpers/emoji_test.go
new file mode 100644
index 000000000..6485bb5fe
--- /dev/null
+++ b/helpers/emoji_test.go
@@ -0,0 +1,143 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package helpers
+
+import (
+ "math"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/bufferpool"
+ "github.com/kyokomi/emoji/v2"
+)
+
+func TestEmojiCustom(t *testing.T) {
+ for i, this := range []struct {
+ input string
+ expect []byte
+ }{
+ {"A :smile: a day", []byte("A 😄 a day")},
+ {"A few :smile:s a day", []byte("A few 😄s a day")},
+ {"A :smile: and a :beer: makes the day for sure.", []byte("A 😄 and a 🍺 makes the day for sure.")},
+ {"A :smile: and: a :beer:", []byte("A 😄 and: a 🍺")},
+ {"A :diamond_shape_with_a_dot_inside: and then some.", []byte("A 💠 and then some.")},
+ {":smile:", []byte("😄")},
+ {":smi", []byte(":smi")},
+ {"A :smile:", []byte("A 😄")},
+ {":beer:!", []byte("🍺!")},
+ {"::smile:", []byte(":😄")},
+ {":beer::", []byte("🍺:")},
+ {" :beer: :", []byte(" 🍺 :")},
+ {":beer: and :smile: and another :beer:!", []byte("🍺 and 😄 and another 🍺!")},
+ {" :beer: : ", []byte(" 🍺 : ")},
+ {"No smilies for you!", []byte("No smilies for you!")},
+ {" The motto: no smiles! ", []byte(" The motto: no smiles! ")},
+ {":hugo_is_the_best_static_gen:", []byte(":hugo_is_the_best_static_gen:")},
+ {"은행 :smile: 은행", []byte("은행 😄 은행")},
+ // #2198
+ {"See: A :beer:!", []byte("See: A 🍺!")},
+ {`Aaaaaaaaaa: aaaaaaaaaa aaaaaaaaaa aaaaaaaaaa.
+
+:beer:`, []byte(`Aaaaaaaaaa: aaaaaaaaaa aaaaaaaaaa aaaaaaaaaa.
+
+🍺`)},
+ {"test :\n```bash\nthis is a test\n```\n\ntest\n\n:cool::blush:::pizza:\\:blush : : blush: :pizza:", []byte("test :\n```bash\nthis is a test\n```\n\ntest\n\n🆒😊:🍕\\:blush : : blush: 🍕")},
+ {
+ // 2391
+ "[a](http://gohugo.io) :smile: [r](http://gohugo.io/introduction/overview/) :beer:",
+ []byte(`[a](http://gohugo.io) 😄 [r](http://gohugo.io/introduction/overview/) 🍺`),
+ },
+ } {
+
+ result := Emojify([]byte(this.input))
+
+ if !reflect.DeepEqual(result, this.expect) {
+ t.Errorf("[%d] got %q but expected %q", i, result, this.expect)
+ }
+
+ }
+}
+
+// The Emoji benchmarks below are heavily skewed in Hugo's direction:
+//
+// Hugo have a byte slice, wants a byte slice and doesn't mind if the original is modified.
+
+func BenchmarkEmojiKyokomiFprint(b *testing.B) {
+ f := func(in []byte) []byte {
+ buff := bufferpool.GetBuffer()
+ defer bufferpool.PutBuffer(buff)
+ emoji.Fprint(buff, string(in))
+
+ bc := make([]byte, buff.Len())
+ copy(bc, buff.Bytes())
+ return bc
+ }
+
+ doBenchmarkEmoji(b, f)
+}
+
+func BenchmarkEmojiKyokomiSprint(b *testing.B) {
+ f := func(in []byte) []byte {
+ return []byte(emoji.Sprint(string(in)))
+ }
+
+ doBenchmarkEmoji(b, f)
+}
+
+func BenchmarkHugoEmoji(b *testing.B) {
+ doBenchmarkEmoji(b, Emojify)
+}
+
+func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
+ type input struct {
+ in []byte
+ expect []byte
+ }
+
+ data := []struct {
+ input string
+ expect string
+ }{
+ {"A :smile: a day", emoji.Sprint("A :smile: a day")},
+ {"A :smile: and a :beer: day keeps the doctor away", emoji.Sprint("A :smile: and a :beer: day keeps the doctor away")},
+ {"A :smile: a day and 10 " + strings.Repeat(":beer: ", 10), emoji.Sprint("A :smile: a day and 10 " + strings.Repeat(":beer: ", 10))},
+ {"No smiles today.", "No smiles today."},
+ {"No smiles for you or " + strings.Repeat("you ", 1000), "No smiles for you or " + strings.Repeat("you ", 1000)},
+ }
+
+ in := make([]input, b.N*len(data))
+ cnt := 0
+ for i := 0; i < b.N; i++ {
+ for _, this := range data {
+ in[cnt] = input{[]byte(this.input), []byte(this.expect)}
+ cnt++
+ }
+ }
+
+ b.ResetTimer()
+ cnt = 0
+ for i := 0; i < b.N; i++ {
+ for j := range data {
+ currIn := in[cnt]
+ cnt++
+ result := f(currIn.in)
+ // The Emoji implementations gives slightly different output.
+ diffLen := len(result) - len(currIn.expect)
+ diffLen = int(math.Abs(float64(diffLen)))
+ if diffLen > 30 {
+ b.Fatalf("[%d] emoji std, got \n%q but expected \n%q", j, result, currIn.expect)
+ }
+ }
+ }
+}
diff --git a/helpers/general.go b/helpers/general.go
new file mode 100644
index 000000000..462ec773d
--- /dev/null
+++ b/helpers/general.go
@@ -0,0 +1,542 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "bytes"
+ "crypto/md5"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "net"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/mitchellh/hashstructure"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/spf13/afero"
+
+ "github.com/jdkato/prose/transform"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/spf13/pflag"
+)
+
+// FilePathSeparator as defined by os.Separator.
+const FilePathSeparator = string(filepath.Separator)
+
+// FindAvailablePort returns an available and valid TCP port.
+func FindAvailablePort() (*net.TCPAddr, error) {
+ l, err := net.Listen("tcp", ":0")
+ if err == nil {
+ defer l.Close()
+ addr := l.Addr()
+ if a, ok := addr.(*net.TCPAddr); ok {
+ return a, nil
+ }
+ return nil, fmt.Errorf("unable to obtain a valid tcp port: %v", addr)
+ }
+ return nil, err
+}
+
+// TCPListen starts listening on a valid TCP port.
+func TCPListen() (net.Listener, *net.TCPAddr, error) {
+ l, err := net.Listen("tcp", ":0")
+ if err != nil {
+ return nil, nil, err
+ }
+ addr := l.Addr()
+ if a, ok := addr.(*net.TCPAddr); ok {
+ return l, a, nil
+ }
+ l.Close()
+ return nil, nil, fmt.Errorf("unable to obtain a valid tcp port: %v", addr)
+
+}
+
+// InStringArray checks if a string is an element of a slice of strings
+// and returns a boolean value.
+func InStringArray(arr []string, el string) bool {
+ for _, v := range arr {
+ if v == el {
+ return true
+ }
+ }
+ return false
+}
+
+// FirstUpper returns a string with the first character as upper case.
+func FirstUpper(s string) string {
+ if s == "" {
+ return ""
+ }
+ r, n := utf8.DecodeRuneInString(s)
+ return string(unicode.ToUpper(r)) + s[n:]
+}
+
+// UniqueStrings returns a new slice with any duplicates removed.
+func UniqueStrings(s []string) []string {
+ unique := make([]string, 0, len(s))
+ for i, val := range s {
+ var seen bool
+ for j := 0; j < i; j++ {
+ if s[j] == val {
+ seen = true
+ break
+ }
+ }
+ if !seen {
+ unique = append(unique, val)
+ }
+ }
+ return unique
+}
+
+// UniqueStringsReuse returns a slice with any duplicates removed.
+// It will modify the input slice.
+func UniqueStringsReuse(s []string) []string {
+ result := s[:0]
+ for i, val := range s {
+ var seen bool
+
+ for j := 0; j < i; j++ {
+ if s[j] == val {
+ seen = true
+ break
+ }
+ }
+
+ if !seen {
+ result = append(result, val)
+ }
+ }
+ return result
+}
+
+// UniqueStringsReuse returns a sorted slice with any duplicates removed.
+// It will modify the input slice.
+func UniqueStringsSorted(s []string) []string {
+ if len(s) == 0 {
+ return nil
+ }
+ ss := sort.StringSlice(s)
+ ss.Sort()
+ i := 0
+ for j := 1; j < len(s); j++ {
+ if !ss.Less(i, j) {
+ continue
+ }
+ i++
+ s[i] = s[j]
+ }
+
+ return s[:i+1]
+}
+
+// ReaderToBytes takes an io.Reader argument, reads from it
+// and returns bytes.
+func ReaderToBytes(lines io.Reader) []byte {
+ if lines == nil {
+ return []byte{}
+ }
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+
+ b.ReadFrom(lines)
+
+ bc := make([]byte, b.Len())
+ copy(bc, b.Bytes())
+ return bc
+}
+
+// ReaderToString is the same as ReaderToBytes, but returns a string.
+func ReaderToString(lines io.Reader) string {
+ if lines == nil {
+ return ""
+ }
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+ b.ReadFrom(lines)
+ return b.String()
+}
+
+// ReaderContains reports whether subslice is within r.
+func ReaderContains(r io.Reader, subslice []byte) bool {
+ if r == nil || len(subslice) == 0 {
+ return false
+ }
+
+ bufflen := len(subslice) * 4
+ halflen := bufflen / 2
+ buff := make([]byte, bufflen)
+ var err error
+ var n, i int
+
+ for {
+ i++
+ if i == 1 {
+ n, err = io.ReadAtLeast(r, buff[:halflen], halflen)
+ } else {
+ if i != 2 {
+ // shift left to catch overlapping matches
+ copy(buff[:], buff[halflen:])
+ }
+ n, err = io.ReadAtLeast(r, buff[halflen:], halflen)
+ }
+
+ if n > 0 && bytes.Contains(buff, subslice) {
+ return true
+ }
+
+ if err != nil {
+ break
+ }
+ }
+ return false
+}
+
+// GetTitleFunc returns a func that can be used to transform a string to
+// title case.
+//
+// The supported styles are
+//
+// - "Go" (strings.Title)
+// - "AP" (see https://www.apstylebook.com/)
+// - "Chicago" (see http://www.chicagomanualofstyle.org/home.html)
+//
+// If an unknown or empty style is provided, AP style is what you get.
+func GetTitleFunc(style string) func(s string) string {
+ switch strings.ToLower(style) {
+ case "go":
+ return strings.Title
+ case "chicago":
+ tc := transform.NewTitleConverter(transform.ChicagoStyle)
+ return tc.Title
+ default:
+ tc := transform.NewTitleConverter(transform.APStyle)
+ return tc.Title
+ }
+}
+
+// HasStringsPrefix tests whether the string slice s begins with prefix slice s.
+func HasStringsPrefix(s, prefix []string) bool {
+ return len(s) >= len(prefix) && compareStringSlices(s[0:len(prefix)], prefix)
+}
+
+// HasStringsSuffix tests whether the string slice s ends with suffix slice s.
+func HasStringsSuffix(s, suffix []string) bool {
+ return len(s) >= len(suffix) && compareStringSlices(s[len(s)-len(suffix):], suffix)
+}
+
+func compareStringSlices(a, b []string) bool {
+ if a == nil && b == nil {
+ return true
+ }
+
+ if a == nil || b == nil {
+ return false
+ }
+
+ if len(a) != len(b) {
+ return false
+ }
+
+ for i := range a {
+ if a[i] != b[i] {
+ return false
+ }
+ }
+
+ return true
+}
+
+// DistinctLogger ignores duplicate log statements.
+type DistinctLogger struct {
+ loggers.Logger
+ sync.RWMutex
+ m map[string]bool
+}
+
+func (l *DistinctLogger) Reset() {
+ l.Lock()
+ defer l.Unlock()
+
+ l.m = make(map[string]bool)
+}
+
+// Println will log the string returned from fmt.Sprintln given the arguments,
+// but not if it has been logged before.
+func (l *DistinctLogger) Println(v ...any) {
+ // fmt.Sprint doesn't add space between string arguments
+ logStatement := strings.TrimSpace(fmt.Sprintln(v...))
+ l.printIfNotPrinted("println", logStatement, func() {
+ l.Logger.Println(logStatement)
+ })
+}
+
+// Printf will log the string returned from fmt.Sprintf given the arguments,
+// but not if it has been logged before.
+func (l *DistinctLogger) Printf(format string, v ...any) {
+ logStatement := fmt.Sprintf(format, v...)
+ l.printIfNotPrinted("printf", logStatement, func() {
+ l.Logger.Printf(format, v...)
+ })
+}
+
+func (l *DistinctLogger) Debugf(format string, v ...any) {
+ logStatement := fmt.Sprintf(format, v...)
+ l.printIfNotPrinted("debugf", logStatement, func() {
+ l.Logger.Debugf(format, v...)
+ })
+}
+
+func (l *DistinctLogger) Debugln(v ...any) {
+ logStatement := fmt.Sprint(v...)
+ l.printIfNotPrinted("debugln", logStatement, func() {
+ l.Logger.Debugln(v...)
+ })
+}
+
+func (l *DistinctLogger) Infof(format string, v ...any) {
+ logStatement := fmt.Sprintf(format, v...)
+ l.printIfNotPrinted("info", logStatement, func() {
+ l.Logger.Infof(format, v...)
+ })
+}
+
+func (l *DistinctLogger) Infoln(v ...any) {
+ logStatement := fmt.Sprint(v...)
+ l.printIfNotPrinted("infoln", logStatement, func() {
+ l.Logger.Infoln(v...)
+ })
+}
+
+func (l *DistinctLogger) Warnf(format string, v ...any) {
+ logStatement := fmt.Sprintf(format, v...)
+ l.printIfNotPrinted("warnf", logStatement, func() {
+ l.Logger.Warnf(format, v...)
+ })
+}
+
+func (l *DistinctLogger) Warnln(v ...any) {
+ logStatement := fmt.Sprint(v...)
+ l.printIfNotPrinted("warnln", logStatement, func() {
+ l.Logger.Warnln(v...)
+ })
+}
+
+func (l *DistinctLogger) Errorf(format string, v ...any) {
+ logStatement := fmt.Sprint(v...)
+ l.printIfNotPrinted("errorf", logStatement, func() {
+ l.Logger.Errorf(format, v...)
+ })
+}
+
+func (l *DistinctLogger) Errorln(v ...any) {
+ logStatement := fmt.Sprint(v...)
+ l.printIfNotPrinted("errorln", logStatement, func() {
+ l.Logger.Errorln(v...)
+ })
+}
+
+func (l *DistinctLogger) hasPrinted(key string) bool {
+ l.RLock()
+ defer l.RUnlock()
+ _, found := l.m[key]
+ return found
+}
+
+func (l *DistinctLogger) printIfNotPrinted(level, logStatement string, print func()) {
+ key := level + logStatement
+ if l.hasPrinted(key) {
+ return
+ }
+ l.Lock()
+ defer l.Unlock()
+ l.m[key] = true // Placing this after print() can cause duplicate warning entries to be logged when --panicOnWarning is true.
+ print()
+
+}
+
+// NewDistinctErrorLogger creates a new DistinctLogger that logs ERRORs
+func NewDistinctErrorLogger() loggers.Logger {
+ return &DistinctLogger{m: make(map[string]bool), Logger: loggers.NewErrorLogger()}
+}
+
+// NewDistinctLogger creates a new DistinctLogger that logs to the provided logger.
+func NewDistinctLogger(logger loggers.Logger) loggers.Logger {
+ return &DistinctLogger{m: make(map[string]bool), Logger: logger}
+}
+
+// NewDistinctWarnLogger creates a new DistinctLogger that logs WARNs
+func NewDistinctWarnLogger() loggers.Logger {
+ return &DistinctLogger{m: make(map[string]bool), Logger: loggers.NewWarningLogger()}
+}
+
+var (
+ // DistinctErrorLog can be used to avoid spamming the logs with errors.
+ DistinctErrorLog = NewDistinctErrorLogger()
+
+ // DistinctWarnLog can be used to avoid spamming the logs with warnings.
+ DistinctWarnLog = NewDistinctWarnLogger()
+)
+
+// InitLoggers resets the global distinct loggers.
+func InitLoggers() {
+ DistinctErrorLog.Reset()
+ DistinctWarnLog.Reset()
+}
+
+// Deprecated informs about a deprecation, but only once for a given set of arguments' values.
+// If the err flag is enabled, it logs as an ERROR (will exit with -1) and the text will
+// point at the next Hugo release.
+// The idea is two remove an item in two Hugo releases to give users and theme authors
+// plenty of time to fix their templates.
+func Deprecated(item, alternative string, err bool) {
+ if err {
+ DistinctErrorLog.Errorf("%s is deprecated and will be removed in Hugo %s. %s", item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
+ } else {
+ var warnPanicMessage string
+ if !loggers.PanicOnWarning {
+ warnPanicMessage = "\n\nRe-run Hugo with the flag --panicOnWarning to get a better error message."
+ }
+ DistinctWarnLog.Warnf("%s is deprecated and will be removed in a future release. %s%s", item, alternative, warnPanicMessage)
+ }
+}
+
+// SliceToLower goes through the source slice and lowers all values.
+func SliceToLower(s []string) []string {
+ if s == nil {
+ return nil
+ }
+
+ l := make([]string, len(s))
+ for i, v := range s {
+ l[i] = strings.ToLower(v)
+ }
+
+ return l
+}
+
+// MD5String takes a string and returns its MD5 hash.
+func MD5String(f string) string {
+ h := md5.New()
+ h.Write([]byte(f))
+ return hex.EncodeToString(h.Sum([]byte{}))
+}
+
+// MD5FromFileFast creates a MD5 hash from the given file. It only reads parts of
+// the file for speed, so don't use it if the files are very subtly different.
+// It will not close the file.
+func MD5FromFileFast(r io.ReadSeeker) (string, error) {
+ const (
+ // Do not change once set in stone!
+ maxChunks = 8
+ peekSize = 64
+ seek = 2048
+ )
+
+ h := md5.New()
+ buff := make([]byte, peekSize)
+
+ for i := 0; i < maxChunks; i++ {
+ if i > 0 {
+ _, err := r.Seek(seek, 0)
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return "", err
+ }
+ }
+
+ _, err := io.ReadAtLeast(r, buff, peekSize)
+ if err != nil {
+ if err == io.EOF || err == io.ErrUnexpectedEOF {
+ h.Write(buff)
+ break
+ }
+ return "", err
+ }
+ h.Write(buff)
+ }
+
+ return hex.EncodeToString(h.Sum(nil)), nil
+}
+
+// MD5FromReader creates a MD5 hash from the given reader.
+func MD5FromReader(r io.Reader) (string, error) {
+ h := md5.New()
+ if _, err := io.Copy(h, r); err != nil {
+ return "", nil
+ }
+ return hex.EncodeToString(h.Sum(nil)), nil
+}
+
+// IsWhitespace determines if the given rune is whitespace.
+func IsWhitespace(r rune) bool {
+ return r == ' ' || r == '\t' || r == '\n' || r == '\r'
+}
+
+// NormalizeHugoFlags facilitates transitions of Hugo command-line flags,
+// e.g. --baseUrl to --baseURL, --uglyUrls to --uglyURLs
+func NormalizeHugoFlags(f *pflag.FlagSet, name string) pflag.NormalizedName {
+ switch name {
+ case "baseUrl":
+ name = "baseURL"
+ case "uglyUrls":
+ name = "uglyURLs"
+ }
+ return pflag.NormalizedName(name)
+}
+
+// PrintFs prints the given filesystem to the given writer starting from the given path.
+// This is useful for debugging.
+func PrintFs(fs afero.Fs, path string, w io.Writer) {
+ if fs == nil {
+ return
+ }
+
+ afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
+ fmt.Println(path)
+ return nil
+ })
+}
+
+// HashString returns a hash from the given elements.
+// It will panic if the hash cannot be calculated.
+func HashString(elements ...any) string {
+ var o any
+ if len(elements) == 1 {
+ o = elements[0]
+ } else {
+ o = elements
+ }
+
+ hash, err := hashstructure.Hash(o, nil)
+ if err != nil {
+ panic(err)
+ }
+ return strconv.FormatUint(hash, 10)
+}
diff --git a/helpers/general_test.go b/helpers/general_test.go
new file mode 100644
index 000000000..75119f01d
--- /dev/null
+++ b/helpers/general_test.go
@@ -0,0 +1,467 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/afero"
+)
+
+func TestResolveMarkup(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.NewWithTestDefaults()
+ spec, err := NewContentSpec(cfg, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
+ c.Assert(err, qt.IsNil)
+
+ for i, this := range []struct {
+ in string
+ expect string
+ }{
+ {"md", "markdown"},
+ {"markdown", "markdown"},
+ {"mdown", "markdown"},
+ {"asciidocext", "asciidocext"},
+ {"adoc", "asciidocext"},
+ {"ad", "asciidocext"},
+ {"rst", "rst"},
+ {"pandoc", "pandoc"},
+ {"pdc", "pandoc"},
+ {"html", "html"},
+ {"htm", "html"},
+ {"org", "org"},
+ {"excel", ""},
+ } {
+ result := spec.ResolveMarkup(this.in)
+ if result != this.expect {
+ t.Errorf("[%d] got %s but expected %s", i, result, this.expect)
+ }
+ }
+}
+
+func TestDistinctLoggerDoesNotLockOnWarningPanic(t *testing.T) {
+ // Testing to make sure logger mutex doesn't lock if warnings cause panics.
+ // func Warnf() of DistinctLogger is defined in general.go
+ l := NewDistinctLogger(loggers.NewWarningLogger())
+
+ // Set PanicOnWarning to true to reproduce issue 9380
+ // Ensure global variable loggers.PanicOnWarning is reset to old value after test
+ if loggers.PanicOnWarning == false {
+ loggers.PanicOnWarning = true
+ defer func() {
+ loggers.PanicOnWarning = false
+ }()
+ }
+
+ // Establish timeout in case a lock occurs:
+ timeIsUp := make(chan bool)
+ timeOutSeconds := 1
+ go func() {
+ time.Sleep(time.Second * time.Duration(timeOutSeconds))
+ timeIsUp <- true
+ }()
+
+ // Attempt to run multiple logging threads in parallel
+ counterC := make(chan int)
+ goroutines := 5
+
+ for i := 0; i < goroutines; i++ {
+ go func() {
+ defer func() {
+ // Intentional panic successfully recovered - notify counter channel
+ recover()
+ counterC <- 1
+ }()
+
+ l.Warnf("Placeholder template message: %v", "In this test, logging a warning causes a panic.")
+ }()
+ }
+
+ // All goroutines should complete before timeout
+ var counter int
+ for {
+ select {
+ case <-counterC:
+ counter++
+ if counter == goroutines {
+ return
+ }
+ case <-timeIsUp:
+ t.Errorf("Unable to log warnings with --panicOnWarning within alloted time of: %v seconds. Investigate possible mutex locking on panic in distinct warning logger.", timeOutSeconds)
+ return
+ }
+ }
+}
+
+func TestFirstUpper(t *testing.T) {
+ for i, this := range []struct {
+ in string
+ expect string
+ }{
+ {"foo", "Foo"},
+ {"foo bar", "Foo bar"},
+ {"Foo Bar", "Foo Bar"},
+ {"", ""},
+ {"å", "Å"},
+ } {
+ result := FirstUpper(this.in)
+ if result != this.expect {
+ t.Errorf("[%d] got %s but expected %s", i, result, this.expect)
+ }
+ }
+}
+
+func TestHasStringsPrefix(t *testing.T) {
+ for i, this := range []struct {
+ s []string
+ prefix []string
+ expect bool
+ }{
+ {[]string{"a"}, []string{"a"}, true},
+ {[]string{}, []string{}, true},
+ {[]string{"a", "b", "c"}, []string{"a", "b"}, true},
+ {[]string{"d", "a", "b", "c"}, []string{"a", "b"}, false},
+ {[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, true},
+ {[]string{"abra", "ca"}, []string{"abra", "ca", "dabra"}, false},
+ } {
+ result := HasStringsPrefix(this.s, this.prefix)
+ if result != this.expect {
+ t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
+ }
+ }
+}
+
+func TestHasStringsSuffix(t *testing.T) {
+ for i, this := range []struct {
+ s []string
+ suffix []string
+ expect bool
+ }{
+ {[]string{"a"}, []string{"a"}, true},
+ {[]string{}, []string{}, true},
+ {[]string{"a", "b", "c"}, []string{"b", "c"}, true},
+ {[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, false},
+ {[]string{"abra", "ca", "dabra"}, []string{"ca", "dabra"}, true},
+ } {
+ result := HasStringsSuffix(this.s, this.suffix)
+ if result != this.expect {
+ t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
+ }
+ }
+}
+
+var containsTestText = (`На берегу пустынных волн
+Стоял он, дум великих полн,
+И вдаль глядел. Пред ним широко
+Река неслася; бедный чёлн
+По ней стремился одиноко.
+По мшистым, топким берегам
+Чернели избы здесь и там,
+Приют убогого чухонца;
+И лес, неведомый лучам
+В тумане спрятанного солнца,
+Кругом шумел.
+
+Τη γλώσσα μου έδωσαν ελληνική
+το σπίτι φτωχικό στις αμμουδιές του Ομήρου.
+Μονάχη έγνοια η γλώσσα μου στις αμμουδιές του Ομήρου.
+
+από το Άξιον Εστί
+του Οδυσσέα Ελύτη
+
+Sîne klâwen durh die wolken sint geslagen,
+er stîget ûf mit grôzer kraft,
+ich sih in grâwen tägelîch als er wil tagen,
+den tac, der im geselleschaft
+erwenden wil, dem werden man,
+den ich mit sorgen în verliez.
+ich bringe in hinnen, ob ich kan.
+sîn vil manegiu tugent michz leisten hiez.
+`)
+
+var containsBenchTestData = []struct {
+ v1 string
+ v2 []byte
+ expect bool
+}{
+ {"abc", []byte("a"), true},
+ {"abc", []byte("b"), true},
+ {"abcdefg", []byte("efg"), true},
+ {"abc", []byte("d"), false},
+ {containsTestText, []byte("стремился"), true},
+ {containsTestText, []byte(containsTestText[10:80]), true},
+ {containsTestText, []byte(containsTestText[100:111]), true},
+ {containsTestText, []byte(containsTestText[len(containsTestText)-100 : len(containsTestText)-10]), true},
+ {containsTestText, []byte(containsTestText[len(containsTestText)-20:]), true},
+ {containsTestText, []byte("notfound"), false},
+}
+
+// some corner cases
+var containsAdditionalTestData = []struct {
+ v1 string
+ v2 []byte
+ expect bool
+}{
+ {"", nil, false},
+ {"", []byte("a"), false},
+ {"a", []byte(""), false},
+ {"", []byte(""), false},
+}
+
+func TestSliceToLower(t *testing.T) {
+ t.Parallel()
+ tests := []struct {
+ value []string
+ expected []string
+ }{
+ {[]string{"a", "b", "c"}, []string{"a", "b", "c"}},
+ {[]string{"a", "B", "c"}, []string{"a", "b", "c"}},
+ {[]string{"A", "B", "C"}, []string{"a", "b", "c"}},
+ }
+
+ for _, test := range tests {
+ res := SliceToLower(test.value)
+ for i, val := range res {
+ if val != test.expected[i] {
+ t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
+ }
+ }
+ }
+}
+
+func TestReaderContains(t *testing.T) {
+ c := qt.New(t)
+ for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
+ result := ReaderContains(strings.NewReader(this.v1), this.v2)
+ if result != this.expect {
+ t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
+ }
+ }
+
+ c.Assert(ReaderContains(nil, []byte("a")), qt.Equals, false)
+ c.Assert(ReaderContains(nil, nil), qt.Equals, false)
+}
+
+func TestGetTitleFunc(t *testing.T) {
+ title := "somewhere over the rainbow"
+ c := qt.New(t)
+
+ c.Assert(GetTitleFunc("go")(title), qt.Equals, "Somewhere Over The Rainbow")
+ c.Assert(GetTitleFunc("chicago")(title), qt.Equals, "Somewhere over the Rainbow")
+ c.Assert(GetTitleFunc("Chicago")(title), qt.Equals, "Somewhere over the Rainbow")
+ c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
+ c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
+ c.Assert(GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow")
+ c.Assert(GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow")
+}
+
+func BenchmarkReaderContains(b *testing.B) {
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ for i, this := range containsBenchTestData {
+ result := ReaderContains(strings.NewReader(this.v1), this.v2)
+ if result != this.expect {
+ b.Errorf("[%d] got %t but expected %t", i, result, this.expect)
+ }
+ }
+ }
+}
+
+func TestUniqueStrings(t *testing.T) {
+ in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
+ output := UniqueStrings(in)
+ expected := []string{"a", "b", "c", "", "d"}
+ if !reflect.DeepEqual(output, expected) {
+ t.Errorf("Expected %#v, got %#v\n", expected, output)
+ }
+}
+
+func TestUniqueStringsReuse(t *testing.T) {
+ in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
+ output := UniqueStringsReuse(in)
+ expected := []string{"a", "b", "c", "", "d"}
+ if !reflect.DeepEqual(output, expected) {
+ t.Errorf("Expected %#v, got %#v\n", expected, output)
+ }
+}
+
+func TestUniqueStringsSorted(t *testing.T) {
+ c := qt.New(t)
+ in := []string{"a", "a", "b", "c", "b", "", "a", "", "d"}
+ output := UniqueStringsSorted(in)
+ expected := []string{"", "a", "b", "c", "d"}
+ c.Assert(output, qt.DeepEquals, expected)
+ c.Assert(UniqueStringsSorted(nil), qt.IsNil)
+}
+
+func TestFindAvailablePort(t *testing.T) {
+ c := qt.New(t)
+ addr, err := FindAvailablePort()
+ c.Assert(err, qt.IsNil)
+ c.Assert(addr, qt.Not(qt.IsNil))
+ c.Assert(addr.Port > 0, qt.Equals, true)
+}
+
+func TestFastMD5FromFile(t *testing.T) {
+ fs := afero.NewMemMapFs()
+
+ if err := afero.WriteFile(fs, "small.txt", []byte("abc"), 0777); err != nil {
+ t.Fatal(err)
+ }
+
+ if err := afero.WriteFile(fs, "small2.txt", []byte("abd"), 0777); err != nil {
+ t.Fatal(err)
+ }
+
+ if err := afero.WriteFile(fs, "bigger.txt", []byte(strings.Repeat("a bc d e", 100)), 0777); err != nil {
+ t.Fatal(err)
+ }
+
+ if err := afero.WriteFile(fs, "bigger2.txt", []byte(strings.Repeat("c d e f g", 100)), 0777); err != nil {
+ t.Fatal(err)
+ }
+
+ c := qt.New(t)
+
+ sf1, err := fs.Open("small.txt")
+ c.Assert(err, qt.IsNil)
+ sf2, err := fs.Open("small2.txt")
+ c.Assert(err, qt.IsNil)
+
+ bf1, err := fs.Open("bigger.txt")
+ c.Assert(err, qt.IsNil)
+ bf2, err := fs.Open("bigger2.txt")
+ c.Assert(err, qt.IsNil)
+
+ defer sf1.Close()
+ defer sf2.Close()
+ defer bf1.Close()
+ defer bf2.Close()
+
+ m1, err := MD5FromFileFast(sf1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96")
+
+ m2, err := MD5FromFileFast(sf2)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m2, qt.Not(qt.Equals), m1)
+
+ m3, err := MD5FromFileFast(bf1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m3, qt.Not(qt.Equals), m2)
+
+ m4, err := MD5FromFileFast(bf2)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m4, qt.Not(qt.Equals), m3)
+
+ m5, err := MD5FromReader(bf2)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m5, qt.Not(qt.Equals), m4)
+}
+
+func BenchmarkMD5FromFileFast(b *testing.B) {
+ fs := afero.NewMemMapFs()
+
+ for _, full := range []bool{false, true} {
+ b.Run(fmt.Sprintf("full=%t", full), func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ b.StopTimer()
+ if err := afero.WriteFile(fs, "file.txt", []byte(strings.Repeat("1234567890", 2000)), 0777); err != nil {
+ b.Fatal(err)
+ }
+ f, err := fs.Open("file.txt")
+ if err != nil {
+ b.Fatal(err)
+ }
+ b.StartTimer()
+ if full {
+ if _, err := MD5FromReader(f); err != nil {
+ b.Fatal(err)
+ }
+ } else {
+ if _, err := MD5FromFileFast(f); err != nil {
+ b.Fatal(err)
+ }
+ }
+ f.Close()
+ }
+ })
+ }
+}
+
+func BenchmarkUniqueStrings(b *testing.B) {
+ input := []string{"a", "b", "d", "e", "d", "h", "a", "i"}
+
+ b.Run("Safe", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ result := UniqueStrings(input)
+ if len(result) != 6 {
+ b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
+ }
+ }
+ })
+
+ b.Run("Reuse slice", func(b *testing.B) {
+ b.StopTimer()
+ inputs := make([][]string, b.N)
+ for i := 0; i < b.N; i++ {
+ inputc := make([]string, len(input))
+ copy(inputc, input)
+ inputs[i] = inputc
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ inputc := inputs[i]
+
+ result := UniqueStringsReuse(inputc)
+ if len(result) != 6 {
+ b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
+ }
+ }
+ })
+
+ b.Run("Reuse slice sorted", func(b *testing.B) {
+ b.StopTimer()
+ inputs := make([][]string, b.N)
+ for i := 0; i < b.N; i++ {
+ inputc := make([]string, len(input))
+ copy(inputc, input)
+ inputs[i] = inputc
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ inputc := inputs[i]
+
+ result := UniqueStringsSorted(inputc)
+ if len(result) != 6 {
+ b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
+ }
+ }
+ })
+}
+
+func TestHashString(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(HashString("a", "b"), qt.Equals, "2712570657419664240")
+ c.Assert(HashString("ab"), qt.Equals, "590647783936702392")
+}
diff --git a/helpers/path.go b/helpers/path.go
new file mode 100644
index 000000000..0fb365f43
--- /dev/null
+++ b/helpers/path.go
@@ -0,0 +1,486 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+ "unicode"
+
+ "github.com/gohugoio/hugo/common/text"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/spf13/afero"
+)
+
+// MakePath takes a string with any characters and replace it
+// so the string could be used in a path.
+// It does so by creating a Unicode-sanitized string, with the spaces replaced,
+// whilst preserving the original casing of the string.
+// E.g. Social Media -> Social-Media
+func (p *PathSpec) MakePath(s string) string {
+ return p.UnicodeSanitize(s)
+}
+
+// MakePathsSanitized applies MakePathSanitized on every item in the slice
+func (p *PathSpec) MakePathsSanitized(paths []string) {
+ for i, path := range paths {
+ paths[i] = p.MakePathSanitized(path)
+ }
+}
+
+// MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
+func (p *PathSpec) MakePathSanitized(s string) string {
+ if p.DisablePathToLower {
+ return p.MakePath(s)
+ }
+ return strings.ToLower(p.MakePath(s))
+}
+
+// ToSlashTrimLeading is just a filepath.ToSlaas with an added / prefix trimmer.
+func ToSlashTrimLeading(s string) string {
+ return strings.TrimPrefix(filepath.ToSlash(s), "/")
+}
+
+// MakeTitle converts the path given to a suitable title, trimming whitespace
+// and replacing hyphens with whitespace.
+func MakeTitle(inpath string) string {
+ return strings.Replace(strings.TrimSpace(inpath), "-", " ", -1)
+}
+
+// From https://golang.org/src/net/url/url.go
+func ishex(c rune) bool {
+ switch {
+ case '0' <= c && c <= '9':
+ return true
+ case 'a' <= c && c <= 'f':
+ return true
+ case 'A' <= c && c <= 'F':
+ return true
+ }
+ return false
+}
+
+// UnicodeSanitize sanitizes string to be used in Hugo URL's, allowing only
+// a predefined set of special Unicode characters.
+// If RemovePathAccents configuration flag is enabled, Unicode accents
+// are also removed.
+// Hyphens in the original input are maintained.
+// Spaces will be replaced with a single hyphen, and sequential replacement hyphens will be reduced to one.
+func (p *PathSpec) UnicodeSanitize(s string) string {
+ if p.RemovePathAccents {
+ s = text.RemoveAccentsString(s)
+ }
+
+ source := []rune(s)
+ target := make([]rune, 0, len(source))
+ var (
+ prependHyphen bool
+ wasHyphen bool
+ )
+
+ for i, r := range source {
+ isAllowed := r == '.' || r == '/' || r == '\\' || r == '_' || r == '#' || r == '+' || r == '~' || r == '-'
+ isAllowed = isAllowed || unicode.IsLetter(r) || unicode.IsDigit(r) || unicode.IsMark(r)
+ isAllowed = isAllowed || (r == '%' && i+2 < len(source) && ishex(source[i+1]) && ishex(source[i+2]))
+
+ if isAllowed {
+ // track explicit hyphen in input; no need to add a new hyphen if
+ // we just saw one.
+ wasHyphen = r == '-'
+
+ if prependHyphen {
+ // if currently have a hyphen, don't prepend an extra one
+ if !wasHyphen {
+ target = append(target, '-')
+ }
+ prependHyphen = false
+ }
+ target = append(target, r)
+ } else if len(target) > 0 && !wasHyphen && unicode.IsSpace(r) {
+ prependHyphen = true
+ }
+ }
+
+ return string(target)
+}
+
+func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
+ for _, currentPath := range possibleDirectories {
+ if strings.HasPrefix(inPath, currentPath) {
+ return strings.TrimPrefix(inPath, currentPath), nil
+ }
+ }
+ return inPath, errors.New("can't extract relative path, unknown prefix")
+}
+
+// Should be good enough for Hugo.
+var isFileRe = regexp.MustCompile(`.*\..{1,6}$`)
+
+// GetDottedRelativePath expects a relative path starting after the content directory.
+// It returns a relative path with dots ("..") navigating up the path structure.
+func GetDottedRelativePath(inPath string) string {
+ inPath = filepath.Clean(filepath.FromSlash(inPath))
+
+ if inPath == "." {
+ return "./"
+ }
+
+ if !isFileRe.MatchString(inPath) && !strings.HasSuffix(inPath, FilePathSeparator) {
+ inPath += FilePathSeparator
+ }
+
+ if !strings.HasPrefix(inPath, FilePathSeparator) {
+ inPath = FilePathSeparator + inPath
+ }
+
+ dir, _ := filepath.Split(inPath)
+
+ sectionCount := strings.Count(dir, FilePathSeparator)
+
+ if sectionCount == 0 || dir == FilePathSeparator {
+ return "./"
+ }
+
+ var dottedPath string
+
+ for i := 1; i < sectionCount; i++ {
+ dottedPath += "../"
+ }
+
+ return dottedPath
+}
+
+type NamedSlice struct {
+ Name string
+ Slice []string
+}
+
+func (n NamedSlice) String() string {
+ if len(n.Slice) == 0 {
+ return n.Name
+ }
+ return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ","))
+}
+
+func ExtractAndGroupRootPaths(paths []string) []NamedSlice {
+ if len(paths) == 0 {
+ return nil
+ }
+
+ pathsCopy := make([]string, len(paths))
+ hadSlashPrefix := strings.HasPrefix(paths[0], FilePathSeparator)
+
+ for i, p := range paths {
+ pathsCopy[i] = strings.Trim(filepath.ToSlash(p), "/")
+ }
+
+ sort.Strings(pathsCopy)
+
+ pathsParts := make([][]string, len(pathsCopy))
+
+ for i, p := range pathsCopy {
+ pathsParts[i] = strings.Split(p, "/")
+ }
+
+ var groups [][]string
+
+ for i, p1 := range pathsParts {
+ c1 := -1
+
+ for j, p2 := range pathsParts {
+ if i == j {
+ continue
+ }
+
+ c2 := -1
+
+ for i, v := range p1 {
+ if i >= len(p2) {
+ break
+ }
+ if v != p2[i] {
+ break
+ }
+
+ c2 = i
+ }
+
+ if c1 == -1 || (c2 != -1 && c2 < c1) {
+ c1 = c2
+ }
+ }
+
+ if c1 != -1 {
+ groups = append(groups, p1[:c1+1])
+ } else {
+ groups = append(groups, p1)
+ }
+ }
+
+ groupsStr := make([]string, len(groups))
+ for i, g := range groups {
+ groupsStr[i] = strings.Join(g, "/")
+ }
+
+ groupsStr = UniqueStringsSorted(groupsStr)
+
+ var result []NamedSlice
+
+ for _, g := range groupsStr {
+ name := filepath.FromSlash(g)
+ if hadSlashPrefix {
+ name = FilePathSeparator + name
+ }
+ ns := NamedSlice{Name: name}
+ for _, p := range pathsCopy {
+ if !strings.HasPrefix(p, g) {
+ continue
+ }
+
+ p = strings.TrimPrefix(p, g)
+ if p != "" {
+ ns.Slice = append(ns.Slice, p)
+ }
+ }
+
+ ns.Slice = UniqueStrings(ExtractRootPaths(ns.Slice))
+
+ result = append(result, ns)
+ }
+
+ return result
+}
+
+// ExtractRootPaths extracts the root paths from the supplied list of paths.
+// The resulting root path will not contain any file separators, but there
+// may be duplicates.
+// So "/content/section/" becomes "content"
+func ExtractRootPaths(paths []string) []string {
+ r := make([]string, len(paths))
+ for i, p := range paths {
+ root := filepath.ToSlash(p)
+ sections := strings.Split(root, "/")
+ for _, section := range sections {
+ if section != "" {
+ root = section
+ break
+ }
+ }
+ r[i] = root
+ }
+ return r
+}
+
+// FindCWD returns the current working directory from where the Hugo
+// executable is run.
+func FindCWD() (string, error) {
+ serverFile, err := filepath.Abs(os.Args[0])
+ if err != nil {
+ return "", fmt.Errorf("can't get absolute path for executable: %v", err)
+ }
+
+ path := filepath.Dir(serverFile)
+ realFile, err := filepath.EvalSymlinks(serverFile)
+ if err != nil {
+ if _, err = os.Stat(serverFile + ".exe"); err == nil {
+ realFile = filepath.Clean(serverFile + ".exe")
+ }
+ }
+
+ if err == nil && realFile != serverFile {
+ path = filepath.Dir(realFile)
+ }
+
+ return path, nil
+}
+
+// SymbolicWalk is like filepath.Walk, but it follows symbolic links.
+func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error {
+ if _, isOs := fs.(*afero.OsFs); isOs {
+ // Mainly to track symlinks.
+ fs = hugofs.NewBaseFileDecorator(fs)
+ }
+
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{
+ Fs: fs,
+ Root: root,
+ WalkFn: walker,
+ })
+
+ return w.Walk()
+}
+
+// LstatIfPossible can be used to call Lstat if possible, else Stat.
+func LstatIfPossible(fs afero.Fs, path string) (os.FileInfo, error) {
+ if lstater, ok := fs.(afero.Lstater); ok {
+ fi, _, err := lstater.LstatIfPossible(path)
+ return fi, err
+ }
+
+ return fs.Stat(path)
+}
+
+// SafeWriteToDisk is the same as WriteToDisk
+// but it also checks to see if file/directory already exists.
+func SafeWriteToDisk(inpath string, r io.Reader, fs afero.Fs) (err error) {
+ return afero.SafeWriteReader(fs, inpath, r)
+}
+
+// WriteToDisk writes content to disk.
+func WriteToDisk(inpath string, r io.Reader, fs afero.Fs) (err error) {
+ return afero.WriteReader(fs, inpath, r)
+}
+
+// OpenFilesForWriting opens all the given filenames for writing.
+func OpenFilesForWriting(fs afero.Fs, filenames ...string) (io.WriteCloser, error) {
+ var writeClosers []io.WriteCloser
+ for _, filename := range filenames {
+ f, err := OpenFileForWriting(fs, filename)
+ if err != nil {
+ for _, wc := range writeClosers {
+ wc.Close()
+ }
+ return nil, err
+ }
+ writeClosers = append(writeClosers, f)
+ }
+
+ return hugio.NewMultiWriteCloser(writeClosers...), nil
+}
+
+// OpenFileForWriting opens or creates the given file. If the target directory
+// does not exist, it gets created.
+func OpenFileForWriting(fs afero.Fs, filename string) (afero.File, error) {
+ filename = filepath.Clean(filename)
+ // Create will truncate if file already exists.
+ // os.Create will create any new files with mode 0666 (before umask).
+ f, err := fs.Create(filename)
+ if err != nil {
+ if !os.IsNotExist(err) {
+ return nil, err
+ }
+ if err = fs.MkdirAll(filepath.Dir(filename), 0777); err != nil { // before umask
+ return nil, err
+ }
+ f, err = fs.Create(filename)
+ }
+
+ return f, err
+}
+
+// GetCacheDir returns a cache dir from the given filesystem and config.
+// The dir will be created if it does not exist.
+func GetCacheDir(fs afero.Fs, cfg config.Provider) (string, error) {
+ cacheDir := getCacheDir(cfg)
+ if cacheDir != "" {
+ exists, err := DirExists(cacheDir, fs)
+ if err != nil {
+ return "", err
+ }
+ if !exists {
+ err := fs.MkdirAll(cacheDir, 0777) // Before umask
+ if err != nil {
+ return "", fmt.Errorf("failed to create cache dir: %w", err)
+ }
+ }
+ return cacheDir, nil
+ }
+
+ // Fall back to a cache in /tmp.
+ return GetTempDir("hugo_cache", fs), nil
+}
+
+func getCacheDir(cfg config.Provider) string {
+ // Always use the cacheDir config if set.
+ cacheDir := cfg.GetString("cacheDir")
+ if len(cacheDir) > 1 {
+ return addTrailingFileSeparator(cacheDir)
+ }
+
+ // See Issue #8714.
+ // Turns out that Cloudflare also sets NETLIFY=true in its build environment,
+ // but all of these 3 should not give any false positives.
+ if os.Getenv("NETLIFY") == "true" && os.Getenv("PULL_REQUEST") != "" && os.Getenv("DEPLOY_PRIME_URL") != "" {
+ // Netlify's cache behaviour is not documented, the currently best example
+ // is this project:
+ // https://github.com/philhawksworth/content-shards/blob/master/gulpfile.js
+ return "/opt/build/cache/hugo_cache/"
+ }
+
+ // This will fall back to an hugo_cache folder in the tmp dir, which should work fine for most CI
+ // providers. See this for a working CircleCI setup:
+ // https://github.com/bep/hugo-sass-test/blob/6c3960a8f4b90e8938228688bc49bdcdd6b2d99e/.circleci/config.yml
+ // If not, they can set the HUGO_CACHEDIR environment variable or cacheDir config key.
+ return ""
+}
+
+func addTrailingFileSeparator(s string) string {
+ if !strings.HasSuffix(s, FilePathSeparator) {
+ s = s + FilePathSeparator
+ }
+ return s
+}
+
+// GetTempDir returns a temporary directory with the given sub path.
+func GetTempDir(subPath string, fs afero.Fs) string {
+ return afero.GetTempDir(fs, subPath)
+}
+
+// DirExists checks if a path exists and is a directory.
+func DirExists(path string, fs afero.Fs) (bool, error) {
+ return afero.DirExists(fs, path)
+}
+
+// IsDir checks if a given path is a directory.
+func IsDir(path string, fs afero.Fs) (bool, error) {
+ return afero.IsDir(fs, path)
+}
+
+// IsEmpty checks if a given path is empty, meaning it doesn't contain any regular files.
+func IsEmpty(path string, fs afero.Fs) (bool, error) {
+ var hasFile bool
+ err := afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
+ if info.IsDir() {
+ return nil
+ }
+ hasFile = true
+ return filepath.SkipDir
+ })
+ return !hasFile, err
+}
+
+// Exists checks if a file or directory exists.
+func Exists(path string, fs afero.Fs) (bool, error) {
+ return afero.Exists(fs, path)
+}
+
+// AddTrailingSlash adds a trailing Unix styled slash (/) if not already
+// there.
+func AddTrailingSlash(path string) string {
+ if !strings.HasSuffix(path, "/") {
+ path += "/"
+ }
+ return path
+}
diff --git a/helpers/path_test.go b/helpers/path_test.go
new file mode 100644
index 000000000..3d0617f54
--- /dev/null
+++ b/helpers/path_test.go
@@ -0,0 +1,560 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strconv"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/langs"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+)
+
+func TestMakePath(t *testing.T) {
+ c := qt.New(t)
+ tests := []struct {
+ input string
+ expected string
+ removeAccents bool
+ }{
+ {"dot.slash/backslash\\underscore_pound#plus+hyphen-", "dot.slash/backslash\\underscore_pound#plus+hyphen-", true},
+ {"abcXYZ0123456789", "abcXYZ0123456789", true},
+ {"%20 %2", "%20-2", true},
+ {"foo- bar", "foo-bar", true},
+ {" Foo bar ", "Foo-bar", true},
+ {"Foo.Bar/foo_Bar-Foo", "Foo.Bar/foo_Bar-Foo", true},
+ {"fOO,bar:foobAR", "fOObarfoobAR", true},
+ {"FOo/BaR.html", "FOo/BaR.html", true},
+ {"трям/трям", "трям/трям", true},
+ {"은행", "은행", true},
+ {"Банковский кассир", "Банковскии-кассир", true},
+ // Issue #1488
+ {"संस्कृत", "संस्कृत", false},
+ {"a%C3%B1ame", "a%C3%B1ame", false}, // Issue #1292
+ {"this+is+a+test", "this+is+a+test", false}, // Issue #1290
+ {"~foo", "~foo", false}, // Issue #2177
+ {"foo--bar", "foo--bar", true}, // Issue #7288
+ }
+
+ for _, test := range tests {
+ v := newTestCfg()
+ v.Set("removePathAccents", test.removeAccents)
+
+ l := langs.NewDefaultLanguage(v)
+ p, err := NewPathSpec(hugofs.NewMem(v), l, nil)
+ c.Assert(err, qt.IsNil)
+
+ output := p.MakePath(test.input)
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestMakePathSanitized(t *testing.T) {
+ v := newTestCfg()
+
+ p, _ := NewPathSpec(hugofs.NewMem(v), v, nil)
+
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {" FOO bar ", "foo-bar"},
+ {"Foo.Bar/fOO_bAr-Foo", "foo.bar/foo_bar-foo"},
+ {"FOO,bar:FooBar", "foobarfoobar"},
+ {"foo/BAR.HTML", "foo/bar.html"},
+ {"трям/трям", "трям/трям"},
+ {"은행", "은행"},
+ }
+
+ for _, test := range tests {
+ output := p.MakePathSanitized(test.input)
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
+ v := newTestCfg()
+
+ v.Set("disablePathToLower", true)
+
+ l := langs.NewDefaultLanguage(v)
+ p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
+
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {" FOO bar ", "FOO-bar"},
+ {"Foo.Bar/fOO_bAr-Foo", "Foo.Bar/fOO_bAr-Foo"},
+ {"FOO,bar:FooBar", "FOObarFooBar"},
+ {"foo/BAR.HTML", "foo/BAR.HTML"},
+ {"трям/трям", "трям/трям"},
+ {"은행", "은행"},
+ }
+
+ for _, test := range tests {
+ output := p.MakePathSanitized(test.input)
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestMakePathRelative(t *testing.T) {
+ type test struct {
+ inPath, path1, path2, output string
+ }
+
+ data := []test{
+ {"/abc/bcd/ab.css", "/abc/bcd", "/bbc/bcd", "/ab.css"},
+ {"/abc/bcd/ab.css", "/abcd/bcd", "/abc/bcd", "/ab.css"},
+ }
+
+ for i, d := range data {
+ output, _ := makePathRelative(d.inPath, d.path1, d.path2)
+ if d.output != output {
+ t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
+ }
+ }
+ _, error := makePathRelative("a/b/c.ss", "/a/c", "/d/c", "/e/f")
+
+ if error == nil {
+ t.Errorf("Test failed, expected error")
+ }
+}
+
+func TestGetDottedRelativePath(t *testing.T) {
+ // on Windows this will receive both kinds, both country and western ...
+ for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
+ doTestGetDottedRelativePath(f, t)
+ }
+}
+
+func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"", "./"},
+ {urlFixer("/"), "./"},
+ {urlFixer("post"), "../"},
+ {urlFixer("/post"), "../"},
+ {urlFixer("post/"), "../"},
+ {urlFixer("tags/foo.html"), "../"},
+ {urlFixer("/tags/foo.html"), "../"},
+ {urlFixer("/post/"), "../"},
+ {urlFixer("////post/////"), "../"},
+ {urlFixer("/foo/bar/index.html"), "../../"},
+ {urlFixer("/foo/bar/foo/"), "../../../"},
+ {urlFixer("/foo/bar/foo"), "../../../"},
+ {urlFixer("foo/bar/foo/"), "../../../"},
+ {urlFixer("foo/bar/foo/bar"), "../../../../"},
+ {"404.html", "./"},
+ {"404.xml", "./"},
+ {"/404.html", "./"},
+ }
+ for i, d := range data {
+ output := GetDottedRelativePath(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+func TestMakeTitle(t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"Make-Title", "Make Title"},
+ {"MakeTitle", "MakeTitle"},
+ {"make_title", "make_title"},
+ }
+ for i, d := range data {
+ output := MakeTitle(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+func TestDirExists(t *testing.T) {
+ type test struct {
+ input string
+ expected bool
+ }
+
+ data := []test{
+ {".", true},
+ {"./", true},
+ {"..", true},
+ {"../", true},
+ {"./..", true},
+ {"./../", true},
+ {os.TempDir(), true},
+ {os.TempDir() + FilePathSeparator, true},
+ {"/", true},
+ {"/some-really-random-directory-name", false},
+ {"/some/really/random/directory/name", false},
+ {"./some-really-random-local-directory-name", false},
+ {"./some/really/random/local/directory/name", false},
+ }
+
+ for i, d := range data {
+ exists, _ := DirExists(filepath.FromSlash(d.input), new(afero.OsFs))
+ if d.expected != exists {
+ t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
+ }
+ }
+}
+
+func TestIsDir(t *testing.T) {
+ type test struct {
+ input string
+ expected bool
+ }
+ data := []test{
+ {"./", true},
+ {"/", true},
+ {"./this-directory-does-not-existi", false},
+ {"/this-absolute-directory/does-not-exist", false},
+ }
+
+ for i, d := range data {
+
+ exists, _ := IsDir(d.input, new(afero.OsFs))
+ if d.expected != exists {
+ t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
+ }
+ }
+}
+
+func createZeroSizedFileInTempDir() (*os.File, error) {
+ filePrefix := "_path_test_"
+ f, e := ioutil.TempFile("", filePrefix) // dir is os.TempDir()
+ if e != nil {
+ // if there was an error no file was created.
+ // => no requirement to delete the file
+ return nil, e
+ }
+ return f, nil
+}
+
+func createNonZeroSizedFileInTempDir() (*os.File, error) {
+ f, err := createZeroSizedFileInTempDir()
+ if err != nil {
+ // no file ??
+ return nil, err
+ }
+ byteString := []byte("byteString")
+ err = ioutil.WriteFile(f.Name(), byteString, 0644)
+ if err != nil {
+ // delete the file
+ deleteFileInTempDir(f)
+ return nil, err
+ }
+ return f, nil
+}
+
+func deleteFileInTempDir(f *os.File) {
+ _ = os.Remove(f.Name())
+}
+
+func createEmptyTempDir() (string, error) {
+ dirPrefix := "_dir_prefix_"
+ d, e := ioutil.TempDir("", dirPrefix) // will be in os.TempDir()
+ if e != nil {
+ // no directory to delete - it was never created
+ return "", e
+ }
+ return d, nil
+}
+
+func deleteTempDir(d string) {
+ _ = os.RemoveAll(d)
+}
+
+func TestExists(t *testing.T) {
+ zeroSizedFile, _ := createZeroSizedFileInTempDir()
+ defer deleteFileInTempDir(zeroSizedFile)
+ nonZeroSizedFile, _ := createNonZeroSizedFileInTempDir()
+ defer deleteFileInTempDir(nonZeroSizedFile)
+ emptyDirectory, _ := createEmptyTempDir()
+ defer deleteTempDir(emptyDirectory)
+ nonExistentFile := os.TempDir() + "/this-file-does-not-exist.txt"
+ nonExistentDir := os.TempDir() + "/this/directory/does/not/exist/"
+
+ type test struct {
+ input string
+ expectedResult bool
+ expectedErr error
+ }
+
+ data := []test{
+ {zeroSizedFile.Name(), true, nil},
+ {nonZeroSizedFile.Name(), true, nil},
+ {emptyDirectory, true, nil},
+ {nonExistentFile, false, nil},
+ {nonExistentDir, false, nil},
+ }
+ for i, d := range data {
+ exists, err := Exists(d.input, new(afero.OsFs))
+ if d.expectedResult != exists {
+ t.Errorf("Test %d failed. Expected result %t got %t", i, d.expectedResult, exists)
+ }
+ if d.expectedErr != err {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expectedErr, err)
+ }
+ }
+}
+
+func TestAbsPathify(t *testing.T) {
+ type test struct {
+ inPath, workingDir, expected string
+ }
+ data := []test{
+ {os.TempDir(), filepath.FromSlash("/work"), filepath.Clean(os.TempDir())}, // TempDir has trailing slash
+ {"dir", filepath.FromSlash("/work"), filepath.FromSlash("/work/dir")},
+ }
+
+ windowsData := []test{
+ {"c:\\banana\\..\\dir", "c:\\foo", "c:\\dir"},
+ {"\\dir", "c:\\foo", "c:\\foo\\dir"},
+ {"c:\\", "c:\\foo", "c:\\"},
+ }
+
+ unixData := []test{
+ {"/banana/../dir/", "/work", "/dir"},
+ }
+
+ for i, d := range data {
+ // todo see comment in AbsPathify
+ ps := newTestDefaultPathSpec("workingDir", d.workingDir)
+
+ expected := ps.AbsPathify(d.inPath)
+ if d.expected != expected {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
+ }
+ }
+ t.Logf("Running platform specific path tests for %s", runtime.GOOS)
+ if runtime.GOOS == "windows" {
+ for i, d := range windowsData {
+ ps := newTestDefaultPathSpec("workingDir", d.workingDir)
+
+ expected := ps.AbsPathify(d.inPath)
+ if d.expected != expected {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
+ }
+ }
+ } else {
+ for i, d := range unixData {
+ ps := newTestDefaultPathSpec("workingDir", d.workingDir)
+
+ expected := ps.AbsPathify(d.inPath)
+ if d.expected != expected {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
+ }
+ }
+ }
+}
+
+func TestExtractAndGroupRootPaths(t *testing.T) {
+ in := []string{
+ filepath.FromSlash("/a/b/c/d"),
+ filepath.FromSlash("/a/b/c/e"),
+ filepath.FromSlash("/a/b/e/f"),
+ filepath.FromSlash("/a/b"),
+ filepath.FromSlash("/a/b/c/b/g"),
+ filepath.FromSlash("/c/d/e"),
+ }
+
+ inCopy := make([]string, len(in))
+ copy(inCopy, in)
+
+ result := ExtractAndGroupRootPaths(in)
+
+ c := qt.New(t)
+ c.Assert(fmt.Sprint(result), qt.Equals, filepath.FromSlash("[/a/b/{c,e} /c/d/e]"))
+
+ // Make sure the original is preserved
+ c.Assert(in, qt.DeepEquals, inCopy)
+}
+
+func TestExtractRootPaths(t *testing.T) {
+ tests := []struct {
+ input []string
+ expected []string
+ }{{
+ []string{
+ filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b",
+ filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//"),
+ },
+ []string{"a", "a", "b", "c", "d", "e"},
+ }}
+
+ for _, test := range tests {
+ output := ExtractRootPaths(test.input)
+ if !reflect.DeepEqual(output, test.expected) {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestFindCWD(t *testing.T) {
+ type test struct {
+ expectedDir string
+ expectedErr error
+ }
+
+ // cwd, _ := os.Getwd()
+ data := []test{
+ //{cwd, nil},
+ // Commenting this out. It doesn't work properly.
+ // There's a good reason why we don't use os.Getwd(), it doesn't actually work the way we want it to.
+ // I really don't know a better way to test this function. - SPF 2014.11.04
+ }
+ for i, d := range data {
+ dir, err := FindCWD()
+ if d.expectedDir != dir {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedDir, dir)
+ }
+ if d.expectedErr != err {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedErr, err)
+ }
+ }
+}
+
+func TestSafeWriteToDisk(t *testing.T) {
+ emptyFile, _ := createZeroSizedFileInTempDir()
+ defer deleteFileInTempDir(emptyFile)
+ tmpDir, _ := createEmptyTempDir()
+ defer deleteTempDir(tmpDir)
+
+ randomString := "This is a random string!"
+ reader := strings.NewReader(randomString)
+
+ fileExists := fmt.Errorf("%v already exists", emptyFile.Name())
+
+ type test struct {
+ filename string
+ expectedErr error
+ }
+
+ now := time.Now().Unix()
+ nowStr := strconv.FormatInt(now, 10)
+ data := []test{
+ {emptyFile.Name(), fileExists},
+ {tmpDir + "/" + nowStr, nil},
+ }
+
+ for i, d := range data {
+ e := SafeWriteToDisk(d.filename, reader, new(afero.OsFs))
+ if d.expectedErr != nil {
+ if d.expectedErr.Error() != e.Error() {
+ t.Errorf("Test %d failed. Expected error %q but got %q", i, d.expectedErr.Error(), e.Error())
+ }
+ } else {
+ if d.expectedErr != e {
+ t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedErr, e)
+ }
+ contents, _ := ioutil.ReadFile(d.filename)
+ if randomString != string(contents) {
+ t.Errorf("Test %d failed. Expected contents %q but got %q", i, randomString, string(contents))
+ }
+ }
+ reader.Seek(0, 0)
+ }
+}
+
+func TestWriteToDisk(t *testing.T) {
+ emptyFile, _ := createZeroSizedFileInTempDir()
+ defer deleteFileInTempDir(emptyFile)
+ tmpDir, _ := createEmptyTempDir()
+ defer deleteTempDir(tmpDir)
+
+ randomString := "This is a random string!"
+ reader := strings.NewReader(randomString)
+
+ type test struct {
+ filename string
+ expectedErr error
+ }
+
+ now := time.Now().Unix()
+ nowStr := strconv.FormatInt(now, 10)
+ data := []test{
+ {emptyFile.Name(), nil},
+ {tmpDir + "/" + nowStr, nil},
+ }
+
+ for i, d := range data {
+ e := WriteToDisk(d.filename, reader, new(afero.OsFs))
+ if d.expectedErr != e {
+ t.Errorf("Test %d failed. WriteToDisk Error Expected %q but got %q", i, d.expectedErr, e)
+ }
+ contents, e := ioutil.ReadFile(d.filename)
+ if e != nil {
+ t.Errorf("Test %d failed. Could not read file %s. Reason: %s\n", i, d.filename, e)
+ }
+ if randomString != string(contents) {
+ t.Errorf("Test %d failed. Expected contents %q but got %q", i, randomString, string(contents))
+ }
+ reader.Seek(0, 0)
+ }
+}
+
+func TestGetTempDir(t *testing.T) {
+ dir := os.TempDir()
+ if FilePathSeparator != dir[len(dir)-1:] {
+ dir = dir + FilePathSeparator
+ }
+ testDir := "hugoTestFolder" + FilePathSeparator
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {"", dir},
+ {testDir + " Foo bar ", dir + testDir + " Foo bar " + FilePathSeparator},
+ {testDir + "Foo.Bar/foo_Bar-Foo", dir + testDir + "Foo.Bar/foo_Bar-Foo" + FilePathSeparator},
+ {testDir + "fOO,bar:foo%bAR", dir + testDir + "fOObarfoo%bAR" + FilePathSeparator},
+ {testDir + "fOO,bar:foobAR", dir + testDir + "fOObarfoobAR" + FilePathSeparator},
+ {testDir + "FOo/BaR.html", dir + testDir + "FOo/BaR.html" + FilePathSeparator},
+ {testDir + "трям/трям", dir + testDir + "трям/трям" + FilePathSeparator},
+ {testDir + "은행", dir + testDir + "은행" + FilePathSeparator},
+ {testDir + "Банковский кассир", dir + testDir + "Банковский кассир" + FilePathSeparator},
+ }
+
+ for _, test := range tests {
+ output := GetTempDir(test.input, new(afero.MemMapFs))
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
diff --git a/helpers/pathspec.go b/helpers/pathspec.go
new file mode 100644
index 000000000..49fd9a71c
--- /dev/null
+++ b/helpers/pathspec.go
@@ -0,0 +1,87 @@
+// Copyright 2016-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "strings"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+ "github.com/gohugoio/hugo/hugolib/paths"
+)
+
+// PathSpec holds methods that decides how paths in URLs and files in Hugo should look like.
+type PathSpec struct {
+ *paths.Paths
+ *filesystems.BaseFs
+
+ ProcessingStats *ProcessingStats
+
+ // The file systems to use
+ Fs *hugofs.Fs
+
+ // The config provider to use
+ Cfg config.Provider
+}
+
+// NewPathSpec creates a new PathSpec from the given filesystems and language.
+func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*PathSpec, error) {
+ return NewPathSpecWithBaseBaseFsProvided(fs, cfg, logger, nil)
+}
+
+// NewPathSpecWithBaseBaseFsProvided creats a new PathSpec from the given filesystems and language.
+// If an existing BaseFs is provided, parts of that is reused.
+func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) {
+ p, err := paths.New(fs, cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ var options []func(*filesystems.BaseFs) error
+ if baseBaseFs != nil {
+ options = []func(*filesystems.BaseFs) error{
+ filesystems.WithBaseFs(baseBaseFs),
+ }
+ }
+ bfs, err := filesystems.NewBase(p, logger, options...)
+ if err != nil {
+ return nil, err
+ }
+
+ ps := &PathSpec{
+ Paths: p,
+ BaseFs: bfs,
+ Fs: fs,
+ Cfg: cfg,
+ ProcessingStats: NewProcessingStats(p.Lang()),
+ }
+
+ basePath := ps.BaseURL.Path()
+ if basePath != "" && basePath != "/" {
+ ps.BasePath = basePath
+ }
+
+ return ps, nil
+}
+
+// PermalinkForBaseURL creates a permalink from the given link and baseURL.
+func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string {
+ link = strings.TrimPrefix(link, "/")
+ if !strings.HasSuffix(baseURL, "/") {
+ baseURL += "/"
+ }
+ return baseURL + link
+}
diff --git a/helpers/pathspec_test.go b/helpers/pathspec_test.go
new file mode 100644
index 000000000..84448050d
--- /dev/null
+++ b/helpers/pathspec_test.go
@@ -0,0 +1,62 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/langs"
+)
+
+func TestNewPathSpecFromConfig(t *testing.T) {
+ c := qt.New(t)
+ v := newTestCfg()
+ l := langs.NewLanguage("no", v)
+ v.Set("disablePathToLower", true)
+ v.Set("removePathAccents", true)
+ v.Set("uglyURLs", true)
+ v.Set("canonifyURLs", true)
+ v.Set("paginatePath", "side")
+ v.Set("baseURL", "http://base.com/foo")
+ v.Set("themesDir", "thethemes")
+ v.Set("layoutDir", "thelayouts")
+ v.Set("workingDir", "thework")
+ v.Set("staticDir", "thestatic")
+ v.Set("theme", "thetheme")
+ langs.LoadLanguageSettings(v, nil)
+
+ fs := hugofs.NewMem(v)
+ fs.Source.MkdirAll(filepath.FromSlash("thework/thethemes/thetheme"), 0777)
+
+ p, err := NewPathSpec(fs, l, nil)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(p.CanonifyURLs, qt.Equals, true)
+ c.Assert(p.DisablePathToLower, qt.Equals, true)
+ c.Assert(p.RemovePathAccents, qt.Equals, true)
+ c.Assert(p.UglyURLs, qt.Equals, true)
+ c.Assert(p.Language.Lang, qt.Equals, "no")
+ c.Assert(p.PaginatePath, qt.Equals, "side")
+
+ c.Assert(p.BaseURL.String(), qt.Equals, "http://base.com/foo")
+ c.Assert(p.BaseURLString, qt.Equals, "http://base.com/foo")
+ c.Assert(p.BaseURLNoPathString, qt.Equals, "http://base.com")
+
+ c.Assert(p.ThemesDir, qt.Equals, "thethemes")
+ c.Assert(p.WorkingDir, qt.Equals, "thework")
+}
diff --git a/helpers/processing_stats.go b/helpers/processing_stats.go
new file mode 100644
index 000000000..3e3e9a3ca
--- /dev/null
+++ b/helpers/processing_stats.go
@@ -0,0 +1,120 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "io"
+ "strconv"
+ "sync/atomic"
+
+ "github.com/olekukonko/tablewriter"
+)
+
+// ProcessingStats represents statistics about a site build.
+type ProcessingStats struct {
+ Name string
+
+ Pages uint64
+ PaginatorPages uint64
+ Static uint64
+ ProcessedImages uint64
+ Files uint64
+ Aliases uint64
+ Sitemaps uint64
+ Cleaned uint64
+}
+
+type processingStatsTitleVal struct {
+ name string
+ val uint64
+}
+
+func (s *ProcessingStats) toVals() []processingStatsTitleVal {
+ return []processingStatsTitleVal{
+ {"Pages", s.Pages},
+ {"Paginator pages", s.PaginatorPages},
+ {"Non-page files", s.Files},
+ {"Static files", s.Static},
+ {"Processed images", s.ProcessedImages},
+ {"Aliases", s.Aliases},
+ {"Sitemaps", s.Sitemaps},
+ {"Cleaned", s.Cleaned},
+ }
+}
+
+// NewProcessingStats returns a new ProcessingStats instance.
+func NewProcessingStats(name string) *ProcessingStats {
+ return &ProcessingStats{Name: name}
+}
+
+// Incr increments a given counter.
+func (s *ProcessingStats) Incr(counter *uint64) {
+ atomic.AddUint64(counter, 1)
+}
+
+// Add adds an amount to a given counter.
+func (s *ProcessingStats) Add(counter *uint64, amount int) {
+ atomic.AddUint64(counter, uint64(amount))
+}
+
+// Table writes a table-formatted representation of the stats in a
+// ProcessingStats instance to w.
+func (s *ProcessingStats) Table(w io.Writer) {
+ titleVals := s.toVals()
+ data := make([][]string, len(titleVals))
+ for i, tv := range titleVals {
+ data[i] = []string{tv.name, strconv.Itoa(int(tv.val))}
+ }
+
+ table := tablewriter.NewWriter(w)
+
+ table.AppendBulk(data)
+ table.SetHeader([]string{"", s.Name})
+ table.SetBorder(false)
+ table.Render()
+}
+
+// ProcessingStatsTable writes a table-formatted representation of stats to w.
+func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
+ names := make([]string, len(stats)+1)
+
+ var data [][]string
+
+ for i := 0; i < len(stats); i++ {
+ stat := stats[i]
+ names[i+1] = stat.Name
+
+ titleVals := stat.toVals()
+
+ if i == 0 {
+ data = make([][]string, len(titleVals))
+ }
+
+ for j, tv := range titleVals {
+ if i == 0 {
+ data[j] = []string{tv.name, strconv.Itoa(int(tv.val))}
+ } else {
+ data[j] = append(data[j], strconv.Itoa(int(tv.val)))
+ }
+ }
+
+ }
+
+ table := tablewriter.NewWriter(w)
+
+ table.AppendBulk(data)
+ table.SetHeader(names)
+ table.SetBorder(false)
+ table.Render()
+}
diff --git a/helpers/testhelpers_test.go b/helpers/testhelpers_test.go
new file mode 100644
index 000000000..00be3db25
--- /dev/null
+++ b/helpers/testhelpers_test.go
@@ -0,0 +1,49 @@
+package helpers
+
+import (
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/modules"
+)
+
+func newTestPathSpec(fs *hugofs.Fs, v config.Provider) *PathSpec {
+ l := langs.NewDefaultLanguage(v)
+ ps, _ := NewPathSpec(fs, l, nil)
+ return ps
+}
+
+func newTestDefaultPathSpec(configKeyValues ...any) *PathSpec {
+ cfg := newTestCfg()
+ fs := hugofs.NewMem(cfg)
+
+ for i := 0; i < len(configKeyValues); i += 2 {
+ cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
+ }
+ return newTestPathSpec(fs, cfg)
+}
+
+func newTestCfg() config.Provider {
+ v := config.NewWithTestDefaults()
+ langs.LoadLanguageSettings(v, nil)
+ langs.LoadLanguageSettings(v, nil)
+ mod, err := modules.CreateProjectModule(v)
+ if err != nil {
+ panic(err)
+ }
+ v.Set("allModules", modules.Modules{mod})
+
+ return v
+}
+
+func newTestContentSpec() *ContentSpec {
+ v := config.NewWithTestDefaults()
+ spec, err := NewContentSpec(v, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
+ if err != nil {
+ panic(err)
+ }
+ return spec
+}
diff --git a/helpers/url.go b/helpers/url.go
new file mode 100644
index 000000000..7cb998ca2
--- /dev/null
+++ b/helpers/url.go
@@ -0,0 +1,241 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "net/url"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/PuerkitoBio/purell"
+)
+
+func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string {
+ s, err := purell.NormalizeURLString(in, f)
+ if err != nil {
+ return in
+ }
+
+ // Temporary workaround for the bug fix and resulting
+ // behavioral change in purell.NormalizeURLString():
+ // a leading '/' was inadvertently added to relative links,
+ // but no longer, see #878.
+ //
+ // I think the real solution is to allow Hugo to
+ // make relative URL with relative path,
+ // e.g. "../../post/hello-again/", as wished by users
+ // in issues #157, #622, etc., without forcing
+ // relative URLs to begin with '/'.
+ // Once the fixes are in, let's remove this kludge
+ // and restore SanitizeURL() to the way it was.
+ // -- @anthonyfok, 2015-02-16
+ //
+ // Begin temporary kludge
+ u, err := url.Parse(s)
+ if err != nil {
+ panic(err)
+ }
+ if len(u.Path) > 0 && !strings.HasPrefix(u.Path, "/") {
+ u.Path = "/" + u.Path
+ }
+ return u.String()
+ // End temporary kludge
+
+ // return s
+
+}
+
+// SanitizeURL sanitizes the input URL string.
+func SanitizeURL(in string) string {
+ return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
+}
+
+// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash.
+func SanitizeURLKeepTrailingSlash(in string) string {
+ return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
+}
+
+// URLize is similar to MakePath, but with Unicode handling
+// Example:
+// uri: Vim (text editor)
+// urlize: vim-text-editor
+func (p *PathSpec) URLize(uri string) string {
+ return p.URLEscape(p.MakePathSanitized(uri))
+}
+
+// URLizeFilename creates an URL from a filename by escaping unicode letters
+// and turn any filepath separator into forward slashes.
+func (p *PathSpec) URLizeFilename(filename string) string {
+ return p.URLEscape(filepath.ToSlash(filename))
+}
+
+// URLEscape escapes unicode letters.
+func (p *PathSpec) URLEscape(uri string) string {
+ // escape unicode letters
+ parsedURI, err := url.Parse(uri)
+ if err != nil {
+ // if net/url can not parse URL it means Sanitize works incorrectly
+ panic(err)
+ }
+ x := parsedURI.String()
+ return x
+}
+
+// AbsURL creates an absolute URL from the relative path given and the BaseURL set in config.
+func (p *PathSpec) AbsURL(in string, addLanguage bool) string {
+ url, err := url.Parse(in)
+ if err != nil {
+ return in
+ }
+
+ if url.IsAbs() || strings.HasPrefix(in, "//") {
+ // It is already absolute, return it as is.
+ return in
+ }
+
+ baseURL := p.getBaseURLRoot(in)
+
+ if addLanguage {
+ prefix := p.GetLanguagePrefix()
+ if prefix != "" {
+ hasPrefix := false
+ // avoid adding language prefix if already present
+ in2 := in
+ if strings.HasPrefix(in, "/") {
+ in2 = in[1:]
+ }
+ if in2 == prefix {
+ hasPrefix = true
+ } else {
+ hasPrefix = strings.HasPrefix(in2, prefix+"/")
+ }
+
+ if !hasPrefix {
+ addSlash := in == "" || strings.HasSuffix(in, "/")
+ in = path.Join(prefix, in)
+
+ if addSlash {
+ in += "/"
+ }
+ }
+ }
+ }
+
+ return paths.MakePermalink(baseURL, in).String()
+}
+
+func (p *PathSpec) getBaseURLRoot(path string) string {
+ if strings.HasPrefix(path, "/") {
+ // Treat it as relative to the server root.
+ return p.BaseURLNoPathString
+ } else {
+ // Treat it as relative to the baseURL.
+ return p.BaseURLString
+ }
+}
+
+func (p *PathSpec) RelURL(in string, addLanguage bool) string {
+ baseURL := p.getBaseURLRoot(in)
+ canonifyURLs := p.CanonifyURLs
+ if (!strings.HasPrefix(in, baseURL) && strings.HasPrefix(in, "http")) || strings.HasPrefix(in, "//") {
+ return in
+ }
+
+ u := in
+
+ if strings.HasPrefix(in, baseURL) {
+ u = strings.TrimPrefix(u, baseURL)
+ }
+
+ if addLanguage {
+ prefix := p.GetLanguagePrefix()
+ if prefix != "" {
+ hasPrefix := false
+ // avoid adding language prefix if already present
+ in2 := in
+ if strings.HasPrefix(in, "/") {
+ in2 = in[1:]
+ }
+ if in2 == prefix {
+ hasPrefix = true
+ } else {
+ hasPrefix = strings.HasPrefix(in2, prefix+"/")
+ }
+
+ if !hasPrefix {
+ hadSlash := strings.HasSuffix(u, "/")
+
+ u = path.Join(prefix, u)
+
+ if hadSlash {
+ u += "/"
+ }
+ }
+ }
+ }
+
+ if !canonifyURLs {
+ u = paths.AddContextRoot(baseURL, u)
+ }
+
+ if in == "" && !strings.HasSuffix(u, "/") && strings.HasSuffix(baseURL, "/") {
+ u += "/"
+ }
+
+ if !strings.HasPrefix(u, "/") {
+ u = "/" + u
+ }
+
+ return u
+}
+
+// PrependBasePath prepends any baseURL sub-folder to the given resource
+func (p *PathSpec) PrependBasePath(rel string, isAbs bool) string {
+ basePath := p.GetBasePath(!isAbs)
+ if basePath != "" {
+ rel = filepath.ToSlash(rel)
+ // Need to prepend any path from the baseURL
+ hadSlash := strings.HasSuffix(rel, "/")
+ rel = path.Join(basePath, rel)
+ if hadSlash {
+ rel += "/"
+ }
+ }
+ return rel
+}
+
+// URLizeAndPrep applies misc sanitation to the given URL to get it in line
+// with the Hugo standard.
+func (p *PathSpec) URLizeAndPrep(in string) string {
+ return p.URLPrep(p.URLize(in))
+}
+
+// URLPrep applies misc sanitation to the given URL.
+func (p *PathSpec) URLPrep(in string) string {
+ if p.UglyURLs {
+ return paths.Uglify(SanitizeURL(in))
+ }
+ pretty := paths.PrettifyURL(SanitizeURL(in))
+ if path.Ext(pretty) == ".xml" {
+ return pretty
+ }
+ url, err := purell.NormalizeURLString(pretty, purell.FlagAddTrailingSlash)
+ if err != nil {
+ return pretty
+ }
+ return url
+}
diff --git a/helpers/url_test.go b/helpers/url_test.go
new file mode 100644
index 000000000..e248036ae
--- /dev/null
+++ b/helpers/url_test.go
@@ -0,0 +1,260 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package helpers
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+)
+
+func TestURLize(t *testing.T) {
+ v := newTestCfg()
+ l := langs.NewDefaultLanguage(v)
+ p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
+
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {" foo bar ", "foo-bar"},
+ {"foo.bar/foo_bar-foo", "foo.bar/foo_bar-foo"},
+ {"foo,bar:foobar", "foobarfoobar"},
+ {"foo/bar.html", "foo/bar.html"},
+ {"трям/трям", "%D1%82%D1%80%D1%8F%D0%BC/%D1%82%D1%80%D1%8F%D0%BC"},
+ {"100%-google", "100-google"},
+ }
+
+ for _, test := range tests {
+ output := p.URLize(test.input)
+ if output != test.expected {
+ t.Errorf("Expected %#v, got %#v\n", test.expected, output)
+ }
+ }
+}
+
+func TestAbsURL(t *testing.T) {
+ for _, defaultInSubDir := range []bool{true, false} {
+ for _, addLanguage := range []bool{true, false} {
+ for _, m := range []bool{true, false} {
+ for _, l := range []string{"en", "fr"} {
+ doTestAbsURL(t, defaultInSubDir, addLanguage, m, l)
+ }
+ }
+ }
+ }
+}
+
+func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
+ c := qt.New(t)
+ v := newTestCfg()
+ v.Set("multilingual", multilingual)
+ v.Set("defaultContentLanguage", "en")
+ v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
+
+ tests := []struct {
+ input string
+ baseURL string
+ expected string
+ }{
+ // Issue 9994
+ {"foo/bar", "https://example.org/foo/", "https://example.org/foo/MULTIfoo/bar"},
+ {"/foo/bar", "https://example.org/foo/", "https://example.org/MULTIfoo/bar"},
+
+ {"/test/foo", "http://base/", "http://base/MULTItest/foo"},
+ {"/" + lang + "/test/foo", "http://base/", "http://base/" + lang + "/test/foo"},
+ {"", "http://base/ace/", "http://base/ace/MULTI"},
+ {"/test/2/foo/", "http://base", "http://base/MULTItest/2/foo/"},
+ {"http://abs", "http://base/", "http://abs"},
+ {"schema://abs", "http://base/", "schema://abs"},
+ {"//schemaless", "http://base/", "//schemaless"},
+ {"test/2/foo/", "http://base/path", "http://base/path/MULTItest/2/foo/"},
+ {lang + "/test/2/foo/", "http://base/path", "http://base/path/" + lang + "/test/2/foo/"},
+ {"/test/2/foo/", "http://base/path", "http://base/MULTItest/2/foo/"},
+ {"http//foo", "http://base/path", "http://base/path/MULTIhttp/foo"},
+ }
+
+ if multilingual && addLanguage && defaultInSubDir {
+ newTests := []struct {
+ input string
+ baseURL string
+ expected string
+ }{
+ {lang + "test", "http://base/", "http://base/" + lang + "/" + lang + "test"},
+ {"/" + lang + "test", "http://base/", "http://base/" + lang + "/" + lang + "test"},
+ }
+
+ tests = append(tests, newTests...)
+
+ }
+
+ for _, test := range tests {
+ v.Set("baseURL", test.baseURL)
+ v.Set("contentDir", "content")
+ l := langs.NewLanguage(lang, v)
+ p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
+
+ output := p.AbsURL(test.input, addLanguage)
+ expected := test.expected
+ if multilingual && addLanguage {
+ if !defaultInSubDir && lang == "en" {
+ expected = strings.Replace(expected, "MULTI", "", 1)
+ } else {
+ expected = strings.Replace(expected, "MULTI", lang+"/", 1)
+ }
+ } else {
+ expected = strings.Replace(expected, "MULTI", "", 1)
+ }
+
+ c.Assert(output, qt.Equals, expected)
+ }
+}
+
+func TestRelURL(t *testing.T) {
+ for _, defaultInSubDir := range []bool{true, false} {
+ for _, addLanguage := range []bool{true, false} {
+ for _, m := range []bool{true, false} {
+ for _, l := range []string{"en", "fr"} {
+ doTestRelURL(t, defaultInSubDir, addLanguage, m, l)
+ }
+ }
+ }
+ }
+}
+
+func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
+ c := qt.New(t)
+ v := newTestCfg()
+ v.Set("multilingual", multilingual)
+ v.Set("defaultContentLanguage", "en")
+ v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
+
+ tests := []struct {
+ input string
+ baseURL string
+ canonify bool
+ expected string
+ }{
+
+ // Issue 9994
+ {"/foo/bar", "https://example.org/foo/", false, "MULTI/foo/bar"},
+ {"foo/bar", "https://example.org/foo/", false, "/fooMULTI/foo/bar"},
+
+ {"/test/foo", "http://base/", false, "MULTI/test/foo"},
+ {"/" + lang + "/test/foo", "http://base/", false, "/" + lang + "/test/foo"},
+ {lang + "/test/foo", "http://base/", false, "/" + lang + "/test/foo"},
+ {"test.css", "http://base/sub", false, "/subMULTI/test.css"},
+ {"test.css", "http://base/sub", true, "MULTI/test.css"},
+ {"/test/", "http://base/", false, "MULTI/test/"},
+ {"test/", "http://base/sub/", false, "/subMULTI/test/"},
+ {"/test/", "http://base/sub/", true, "MULTI/test/"},
+ {"", "http://base/ace/", false, "/aceMULTI/"},
+ {"", "http://base/ace", false, "/aceMULTI"},
+ {"http://abs", "http://base/", false, "http://abs"},
+ {"//schemaless", "http://base/", false, "//schemaless"},
+ }
+
+ if multilingual && addLanguage && defaultInSubDir {
+ newTests := []struct {
+ input string
+ baseURL string
+ canonify bool
+ expected string
+ }{
+ {lang + "test", "http://base/", false, "/" + lang + "/" + lang + "test"},
+ {"/" + lang + "test", "http://base/", false, "/" + lang + "/" + lang + "test"},
+ }
+ tests = append(tests, newTests...)
+ }
+
+ for i, test := range tests {
+ v.Set("baseURL", test.baseURL)
+ v.Set("canonifyURLs", test.canonify)
+ l := langs.NewLanguage(lang, v)
+ p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
+
+ output := p.RelURL(test.input, addLanguage)
+
+ expected := test.expected
+ if multilingual && addLanguage {
+ if !defaultInSubDir && lang == "en" {
+ expected = strings.Replace(expected, "MULTI", "", 1)
+ } else {
+ expected = strings.Replace(expected, "MULTI", "/"+lang, 1)
+ }
+ } else {
+ expected = strings.Replace(expected, "MULTI", "", 1)
+ }
+
+ c.Assert(output, qt.Equals, expected, qt.Commentf("[%d] %s", i, test.input))
+
+ }
+}
+
+func TestSanitizeURL(t *testing.T) {
+ tests := []struct {
+ input string
+ expected string
+ }{
+ {"http://foo.bar/", "http://foo.bar"},
+ {"http://foo.bar", "http://foo.bar"}, // issue #1105
+ {"http://foo.bar/zoo/", "http://foo.bar/zoo"}, // issue #931
+ }
+
+ for i, test := range tests {
+ o1 := SanitizeURL(test.input)
+ o2 := SanitizeURLKeepTrailingSlash(test.input)
+
+ expected2 := test.expected
+
+ if strings.HasSuffix(test.input, "/") && !strings.HasSuffix(expected2, "/") {
+ expected2 += "/"
+ }
+
+ if o1 != test.expected {
+ t.Errorf("[%d] 1: Expected %#v, got %#v\n", i, test.expected, o1)
+ }
+ if o2 != expected2 {
+ t.Errorf("[%d] 2: Expected %#v, got %#v\n", i, expected2, o2)
+ }
+ }
+}
+
+func TestURLPrep(t *testing.T) {
+ type test struct {
+ ugly bool
+ input string
+ output string
+ }
+
+ data := []test{
+ {false, "/section/name.html", "/section/name/"},
+ {true, "/section/name/index.html", "/section/name.html"},
+ }
+
+ for i, d := range data {
+ v := newTestCfg()
+ v.Set("uglyURLs", d.ugly)
+ l := langs.NewDefaultLanguage(v)
+ p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
+
+ output := p.URLPrep(d.input)
+ if d.output != output {
+ t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
+ }
+ }
+}
diff --git a/htesting/hqt/checkers.go b/htesting/hqt/checkers.go
new file mode 100644
index 000000000..7655d6a63
--- /dev/null
+++ b/htesting/hqt/checkers.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hqt
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/google/go-cmp/cmp"
+ "github.com/spf13/cast"
+)
+
+// IsSameString asserts that two strings are equal. The two strings
+// are normalized (whitespace removed) before doing a ==.
+// Also note that two strings can be the same even if they're of different
+// types.
+var IsSameString qt.Checker = &stringChecker{
+ argNames: []string{"got", "want"},
+}
+
+// IsSameType asserts that got is the same type as want.
+var IsSameType qt.Checker = &typeChecker{
+ argNames: []string{"got", "want"},
+}
+
+type argNames []string
+
+func (a argNames) ArgNames() []string {
+ return a
+}
+
+type typeChecker struct {
+ argNames
+}
+
+// Check implements Checker.Check by checking that got and args[0] is of the same type.
+func (c *typeChecker) Check(got any, args []any, note func(key string, value any)) (err error) {
+ if want := args[0]; reflect.TypeOf(got) != reflect.TypeOf(want) {
+ if _, ok := got.(error); ok && want == nil {
+ return errors.New("got non-nil error")
+ }
+ return errors.New("values are not of same type")
+ }
+ return nil
+}
+
+type stringChecker struct {
+ argNames
+}
+
+// Check implements Checker.Check by checking that got and args[0] represents the same normalized text (whitespace etc. removed).
+func (c *stringChecker) Check(got any, args []any, note func(key string, value any)) (err error) {
+ s1, s2 := cast.ToString(got), cast.ToString(args[0])
+
+ if s1 == s2 {
+ return nil
+ }
+
+ s1, s2 = normalizeString(s1), normalizeString(s2)
+
+ if s1 == s2 {
+ return nil
+ }
+
+ return fmt.Errorf("values are not the same text: %s", strings.Join(htesting.DiffStrings(s1, s2), " | "))
+}
+
+func normalizeString(s string) string {
+ s = strings.ReplaceAll(s, "\r\n", "\n")
+
+ lines := strings.Split(strings.TrimSpace(s), "\n")
+ for i, line := range lines {
+ lines[i] = strings.Join(strings.Fields(strings.TrimSpace(line)), "")
+ }
+ return strings.Join(lines, "\n")
+}
+
+// DeepAllowUnexported creates an option to allow compare of unexported types
+// in the given list of types.
+// see https://github.com/google/go-cmp/issues/40#issuecomment-328615283
+func DeepAllowUnexported(vs ...any) cmp.Option {
+ m := make(map[reflect.Type]struct{})
+ for _, v := range vs {
+ structTypes(reflect.ValueOf(v), m)
+ }
+ var typs []any
+ for t := range m {
+ typs = append(typs, reflect.New(t).Elem().Interface())
+ }
+ return cmp.AllowUnexported(typs...)
+}
+
+func structTypes(v reflect.Value, m map[reflect.Type]struct{}) {
+ if !v.IsValid() {
+ return
+ }
+ switch v.Kind() {
+ case reflect.Ptr:
+ if !v.IsNil() {
+ structTypes(v.Elem(), m)
+ }
+ case reflect.Interface:
+ if !v.IsNil() {
+ structTypes(v.Elem(), m)
+ }
+ case reflect.Slice, reflect.Array:
+ for i := 0; i < v.Len(); i++ {
+ structTypes(v.Index(i), m)
+ }
+ case reflect.Map:
+ for _, k := range v.MapKeys() {
+ structTypes(v.MapIndex(k), m)
+ }
+ case reflect.Struct:
+ m[v.Type()] = struct{}{}
+ for i := 0; i < v.NumField(); i++ {
+ structTypes(v.Field(i), m)
+ }
+ }
+}
diff --git a/htesting/test_helpers.go b/htesting/test_helpers.go
new file mode 100644
index 000000000..fa3f29c44
--- /dev/null
+++ b/htesting/test_helpers.go
@@ -0,0 +1,144 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htesting
+
+import (
+ "math/rand"
+ "os"
+ "regexp"
+ "runtime"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/spf13/afero"
+)
+
+// IsTest reports whether we're running as a test.
+var IsTest bool
+
+func init() {
+ for _, arg := range os.Args {
+ if strings.HasPrefix(arg, "-test.") {
+ IsTest = true
+ break
+ }
+ }
+}
+
+// CreateTempDir creates a temp dir in the given filesystem and
+// returns the dirnam and a func that removes it when done.
+func CreateTempDir(fs afero.Fs, prefix string) (string, func(), error) {
+ tempDir, err := afero.TempDir(fs, "", prefix)
+ if err != nil {
+ return "", nil, err
+ }
+
+ _, isOsFs := fs.(*afero.OsFs)
+
+ if isOsFs && runtime.GOOS == "darwin" && !strings.HasPrefix(tempDir, "/private") {
+ // To get the entry folder in line with the rest. This its a little bit
+ // mysterious, but so be it.
+ tempDir = "/private" + tempDir
+ }
+ return tempDir, func() { fs.RemoveAll(tempDir) }, nil
+}
+
+// BailOut panics with a stack trace after the given duration. Useful for
+// hanging tests.
+func BailOut(after time.Duration) {
+ time.AfterFunc(after, func() {
+ buf := make([]byte, 1<<16)
+ runtime.Stack(buf, true)
+ panic(string(buf))
+ })
+}
+
+// Rnd is used only for testing.
+var Rnd = rand.New(rand.NewSource(time.Now().UnixNano()))
+
+func RandBool() bool {
+ return Rnd.Intn(2) != 0
+}
+
+// DiffStringSlices returns the difference between two string slices.
+// Useful in tests.
+// See:
+// http://stackoverflow.com/questions/19374219/how-to-find-the-difference-between-two-slices-of-strings-in-golang
+func DiffStringSlices(slice1 []string, slice2 []string) []string {
+ diffStr := []string{}
+ m := map[string]int{}
+
+ for _, s1Val := range slice1 {
+ m[s1Val] = 1
+ }
+ for _, s2Val := range slice2 {
+ m[s2Val] = m[s2Val] + 1
+ }
+
+ for mKey, mVal := range m {
+ if mVal == 1 {
+ diffStr = append(diffStr, mKey)
+ }
+ }
+
+ return diffStr
+}
+
+// DiffStrings splits the strings into fields and runs it into DiffStringSlices.
+// Useful for tests.
+func DiffStrings(s1, s2 string) []string {
+ return DiffStringSlices(strings.Fields(s1), strings.Fields(s2))
+}
+
+// IsCI reports whether we're running in a CI server.
+func IsCI() bool {
+ return (os.Getenv("CI") != "" || os.Getenv("CI_LOCAL") != "") && os.Getenv("CIRCLE_BRANCH") == ""
+}
+
+// IsGitHubAction reports whether we're running in a GitHub Action.
+func IsGitHubAction() bool {
+ return os.Getenv("GITHUB_ACTION") != ""
+}
+
+// SupportsAll reports whether the running system supports all Hugo features,
+// e.g. Asciidoc, Pandoc etc.
+func SupportsAll() bool {
+ return IsGitHubAction() || os.Getenv("CI_LOCAL") != ""
+}
+
+// GoMinorVersion returns the minor version of the current Go version,
+// e.g. 16 for Go 1.16.
+func GoMinorVersion() int {
+ return extractMinorVersionFromGoTag(runtime.Version())
+}
+
+var goMinorVersionRe = regexp.MustCompile(`go1.(\d*)`)
+
+func extractMinorVersionFromGoTag(tag string) int {
+ // The tag may be on the form go1.17, go1.17.5 go1.17rc2 -- or just a commit hash.
+ match := goMinorVersionRe.FindStringSubmatch(tag)
+
+ if len(match) == 2 {
+ i, err := strconv.Atoi(match[1])
+ if err != nil {
+ return -1
+ }
+ return i
+ }
+
+ // a commit hash, not useful.
+ return -1
+
+}
diff --git a/htesting/test_helpers_test.go b/htesting/test_helpers_test.go
new file mode 100644
index 000000000..3e767ac9d
--- /dev/null
+++ b/htesting/test_helpers_test.go
@@ -0,0 +1,31 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htesting
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestExtractMinorVersionFromGoTag(t *testing.T) {
+
+ c := qt.New(t)
+
+ c.Assert(extractMinorVersionFromGoTag("go1.17"), qt.Equals, 17)
+ c.Assert(extractMinorVersionFromGoTag("go1.16.7"), qt.Equals, 16)
+ c.Assert(extractMinorVersionFromGoTag("go1.17beta1"), qt.Equals, 17)
+ c.Assert(extractMinorVersionFromGoTag("asdfadf"), qt.Equals, -1)
+
+}
diff --git a/htesting/testdata_builder.go b/htesting/testdata_builder.go
new file mode 100644
index 000000000..d7ba18521
--- /dev/null
+++ b/htesting/testdata_builder.go
@@ -0,0 +1,59 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htesting
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/spf13/afero"
+)
+
+type testFile struct {
+ name string
+ content string
+}
+
+type testdataBuilder struct {
+ t testing.TB
+ fs afero.Fs
+ workingDir string
+
+ files []testFile
+}
+
+func NewTestdataBuilder(fs afero.Fs, workingDir string, t testing.TB) *testdataBuilder {
+ workingDir = filepath.Clean(workingDir)
+ return &testdataBuilder{fs: fs, workingDir: workingDir, t: t}
+}
+
+func (b *testdataBuilder) Add(filename, content string) *testdataBuilder {
+ b.files = append(b.files, testFile{name: filename, content: content})
+ return b
+}
+
+func (b *testdataBuilder) Build() *testdataBuilder {
+ for _, f := range b.files {
+ if err := afero.WriteFile(b.fs, filepath.Join(b.workingDir, f.name), []byte(f.content), 0666); err != nil {
+ b.t.Fatalf("failed to add %q: %s", f.name, err)
+ }
+ }
+ return b
+}
+
+func (b testdataBuilder) WithWorkingDir(dir string) *testdataBuilder {
+ b.workingDir = filepath.Clean(dir)
+ b.files = make([]testFile, 0)
+ return &b
+}
diff --git a/hugofs/createcounting_fs.go b/hugofs/createcounting_fs.go
new file mode 100644
index 000000000..1737ad5ce
--- /dev/null
+++ b/hugofs/createcounting_fs.go
@@ -0,0 +1,107 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/spf13/afero"
+)
+
+// Reseter is implemented by some of the stateful filesystems.
+type Reseter interface {
+ Reset()
+}
+
+// DuplicatesReporter reports about duplicate filenames.
+type DuplicatesReporter interface {
+ ReportDuplicates() string
+}
+
+var (
+ _ FilesystemUnwrapper = (*createCountingFs)(nil)
+)
+
+func NewCreateCountingFs(fs afero.Fs) afero.Fs {
+ return &createCountingFs{Fs: fs, fileCount: make(map[string]int)}
+}
+
+func (fs *createCountingFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+// ReportDuplicates reports filenames written more than once.
+func (c *createCountingFs) ReportDuplicates() string {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ var dupes []string
+
+ for k, v := range c.fileCount {
+ if v > 1 {
+ dupes = append(dupes, fmt.Sprintf("%s (%d)", k, v))
+ }
+ }
+
+ if len(dupes) == 0 {
+ return ""
+ }
+
+ sort.Strings(dupes)
+
+ return strings.Join(dupes, ", ")
+}
+
+// createCountingFs counts filenames of created files or files opened
+// for writing.
+type createCountingFs struct {
+ afero.Fs
+
+ mu sync.Mutex
+ fileCount map[string]int
+}
+
+func (c *createCountingFs) Reset() {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+
+ c.fileCount = make(map[string]int)
+}
+
+func (fs *createCountingFs) onCreate(filename string) {
+ fs.mu.Lock()
+ defer fs.mu.Unlock()
+
+ fs.fileCount[filename] = fs.fileCount[filename] + 1
+}
+
+func (fs *createCountingFs) Create(name string) (afero.File, error) {
+ f, err := fs.Fs.Create(name)
+ if err == nil {
+ fs.onCreate(name)
+ }
+ return f, err
+}
+
+func (fs *createCountingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ f, err := fs.Fs.OpenFile(name, flag, perm)
+ if err == nil && isWrite(flag) {
+ fs.onCreate(name)
+ }
+ return f, err
+}
diff --git a/hugofs/decorators.go b/hugofs/decorators.go
new file mode 100644
index 000000000..3762d753b
--- /dev/null
+++ b/hugofs/decorators.go
@@ -0,0 +1,240 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ _ FilesystemUnwrapper = (*baseFileDecoratorFs)(nil)
+)
+
+func decorateDirs(fs afero.Fs, meta *FileMeta) afero.Fs {
+ ffs := &baseFileDecoratorFs{Fs: fs}
+
+ decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
+ if !fi.IsDir() {
+ // Leave regular files as they are.
+ return fi, nil
+ }
+
+ return decorateFileInfo(fi, fs, nil, "", "", meta), nil
+ }
+
+ ffs.decorate = decorator
+
+ return ffs
+}
+
+func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
+ ffs := &baseFileDecoratorFs{Fs: fs}
+
+ decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
+ path := createPath(name)
+
+ return decorateFileInfo(fi, fs, nil, "", path, nil), nil
+ }
+
+ ffs.decorate = decorator
+
+ return ffs
+}
+
+// DecorateBasePathFs adds Path info to files and directories in the
+// provided BasePathFs, using the base as base.
+func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs {
+ basePath, _ := base.RealPath("")
+ if !strings.HasSuffix(basePath, filepathSeparator) {
+ basePath += filepathSeparator
+ }
+
+ ffs := &baseFileDecoratorFs{Fs: base}
+
+ decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
+ path := strings.TrimPrefix(name, basePath)
+
+ return decorateFileInfo(fi, base, nil, "", path, nil), nil
+ }
+
+ ffs.decorate = decorator
+
+ return ffs
+}
+
+// NewBaseFileDecorator decorates the given Fs to provide the real filename
+// and an Opener func.
+func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs {
+ ffs := &baseFileDecoratorFs{Fs: fs}
+
+ decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
+ // Store away the original in case it's a symlink.
+ meta := NewFileMeta()
+ meta.Name = fi.Name()
+
+ if fi.IsDir() {
+ meta.JoinStatFunc = func(name string) (FileMetaInfo, error) {
+ joinedFilename := filepath.Join(filename, name)
+ fi, _, err := lstatIfPossible(fs, joinedFilename)
+ if err != nil {
+ return nil, err
+ }
+
+ fi, err = ffs.decorate(fi, joinedFilename)
+ if err != nil {
+ return nil, err
+ }
+
+ return fi.(FileMetaInfo), nil
+ }
+ }
+
+ isSymlink := isSymlink(fi)
+ if isSymlink {
+ meta.OriginalFilename = filename
+ var link string
+ var err error
+ link, fi, err = evalSymlinks(fs, filename)
+ if err != nil {
+ return nil, err
+ }
+ filename = link
+ meta.IsSymlink = true
+ }
+
+ opener := func() (afero.File, error) {
+ return ffs.open(filename)
+ }
+
+ fim := decorateFileInfo(fi, ffs, opener, filename, "", meta)
+
+ for _, cb := range callbacks {
+ cb(fim)
+ }
+
+ return fim, nil
+ }
+
+ ffs.decorate = decorator
+ return ffs
+}
+
+func evalSymlinks(fs afero.Fs, filename string) (string, os.FileInfo, error) {
+ link, err := filepath.EvalSymlinks(filename)
+ if err != nil {
+ return "", nil, err
+ }
+
+ fi, err := fs.Stat(link)
+ if err != nil {
+ return "", nil, err
+ }
+
+ return link, fi, nil
+}
+
+type baseFileDecoratorFs struct {
+ afero.Fs
+ decorate func(fi os.FileInfo, filename string) (os.FileInfo, error)
+}
+
+func (fs *baseFileDecoratorFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) {
+ fi, err := fs.Fs.Stat(name)
+ if err != nil {
+ return nil, err
+ }
+
+ return fs.decorate(fi, name)
+}
+
+func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ var (
+ fi os.FileInfo
+ err error
+ ok bool
+ )
+
+ if lstater, isLstater := fs.Fs.(afero.Lstater); isLstater {
+ fi, ok, err = lstater.LstatIfPossible(name)
+ } else {
+ fi, err = fs.Fs.Stat(name)
+ }
+
+ if err != nil {
+ return nil, false, err
+ }
+
+ fi, err = fs.decorate(fi, name)
+
+ return fi, ok, err
+}
+
+func (fs *baseFileDecoratorFs) Open(name string) (afero.File, error) {
+ return fs.open(name)
+}
+
+func (fs *baseFileDecoratorFs) open(name string) (afero.File, error) {
+ f, err := fs.Fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ return &baseFileDecoratorFile{File: f, fs: fs}, nil
+}
+
+type baseFileDecoratorFile struct {
+ afero.File
+ fs *baseFileDecoratorFs
+}
+
+func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) {
+ dirnames, err := l.File.Readdirnames(c)
+ if err != nil {
+ return nil, err
+ }
+
+ fisp := make([]os.FileInfo, 0, len(dirnames))
+
+ for _, dirname := range dirnames {
+ filename := dirname
+
+ if l.Name() != "" && l.Name() != filepathSeparator {
+ filename = filepath.Join(l.Name(), dirname)
+ }
+
+ // We need to resolve any symlink info.
+ fi, _, err := lstatIfPossible(l.fs.Fs, filename)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+ fi, err = l.fs.decorate(fi, filename)
+ if err != nil {
+ return nil, fmt.Errorf("decorate: %w", err)
+ }
+ fisp = append(fisp, fi)
+ }
+
+ return fisp, err
+}
diff --git a/hugofs/fileinfo.go b/hugofs/fileinfo.go
new file mode 100644
index 000000000..1d46a7464
--- /dev/null
+++ b/hugofs/fileinfo.go
@@ -0,0 +1,303 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package hugofs provides the file systems used by Hugo.
+package hugofs
+
+import (
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+ "golang.org/x/text/unicode/norm"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/htime"
+
+ "github.com/spf13/afero"
+)
+
+func NewFileMeta() *FileMeta {
+ return &FileMeta{}
+}
+
+// PathFile returns the relative file path for the file source.
+func (f *FileMeta) PathFile() string {
+ if f.BaseDir == "" {
+ return ""
+ }
+ return strings.TrimPrefix(strings.TrimPrefix(f.Filename, f.BaseDir), filepathSeparator)
+}
+
+type FileMeta struct {
+ Name string
+ Filename string
+ Path string
+ PathWalk string
+ OriginalFilename string
+ BaseDir string
+
+ SourceRoot string
+ MountRoot string
+ Module string
+
+ Weight int
+ IsOrdered bool
+ IsSymlink bool
+ IsRootFile bool
+ IsProject bool
+ Watch bool
+
+ Classifier files.ContentClass
+
+ SkipDir bool
+
+ Lang string
+ TranslationBaseName string
+ TranslationBaseNameWithExt string
+ Translations []string
+
+ Fs afero.Fs
+ OpenFunc func() (afero.File, error)
+ JoinStatFunc func(name string) (FileMetaInfo, error)
+
+ // Include only files or directories that match.
+ InclusionFilter *glob.FilenameFilter
+}
+
+func (m *FileMeta) Copy() *FileMeta {
+ if m == nil {
+ return NewFileMeta()
+ }
+ c := *m
+ return &c
+}
+
+func (m *FileMeta) Merge(from *FileMeta) {
+ if m == nil || from == nil {
+ return
+ }
+ dstv := reflect.Indirect(reflect.ValueOf(m))
+ srcv := reflect.Indirect(reflect.ValueOf(from))
+
+ for i := 0; i < dstv.NumField(); i++ {
+ v := dstv.Field(i)
+ if !v.CanSet() {
+ continue
+ }
+ if !hreflect.IsTruthfulValue(v) {
+ v.Set(srcv.Field(i))
+ }
+ }
+
+ if m.InclusionFilter == nil {
+ m.InclusionFilter = from.InclusionFilter
+ }
+}
+
+func (f *FileMeta) Open() (afero.File, error) {
+ if f.OpenFunc == nil {
+ return nil, errors.New("OpenFunc not set")
+ }
+ return f.OpenFunc()
+}
+
+func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) {
+ if f.JoinStatFunc == nil {
+ return nil, os.ErrNotExist
+ }
+ return f.JoinStatFunc(name)
+}
+
+type FileMetaInfo interface {
+ os.FileInfo
+ Meta() *FileMeta
+}
+
+type fileInfoMeta struct {
+ os.FileInfo
+
+ m *FileMeta
+}
+
+type filenameProvider interface {
+ Filename() string
+}
+
+var _ filenameProvider = (*fileInfoMeta)(nil)
+
+// Filename returns the full filename.
+func (fi *fileInfoMeta) Filename() string {
+ return fi.m.Filename
+}
+
+// Name returns the file's name. Note that we follow symlinks,
+// if supported by the file system, and the Name given here will be the
+// name of the symlink, which is what Hugo needs in all situations.
+func (fi *fileInfoMeta) Name() string {
+ if name := fi.m.Name; name != "" {
+ return name
+ }
+ return fi.FileInfo.Name()
+}
+
+func (fi *fileInfoMeta) Meta() *FileMeta {
+ return fi.m
+}
+
+func NewFileMetaInfo(fi os.FileInfo, m *FileMeta) FileMetaInfo {
+ if m == nil {
+ panic("FileMeta must be set")
+ }
+ if fim, ok := fi.(FileMetaInfo); ok {
+ m.Merge(fim.Meta())
+ }
+ return &fileInfoMeta{FileInfo: fi, m: m}
+}
+
+type dirNameOnlyFileInfo struct {
+ name string
+ modTime time.Time
+}
+
+func (fi *dirNameOnlyFileInfo) Name() string {
+ return fi.name
+}
+
+func (fi *dirNameOnlyFileInfo) Size() int64 {
+ panic("not implemented")
+}
+
+func (fi *dirNameOnlyFileInfo) Mode() os.FileMode {
+ return os.ModeDir
+}
+
+func (fi *dirNameOnlyFileInfo) ModTime() time.Time {
+ return fi.modTime
+}
+
+func (fi *dirNameOnlyFileInfo) IsDir() bool {
+ return true
+}
+
+func (fi *dirNameOnlyFileInfo) Sys() any {
+ return nil
+}
+
+func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afero.File, error)) FileMetaInfo {
+ name = normalizeFilename(name)
+ _, base := filepath.Split(name)
+
+ m := meta.Copy()
+ if m.Filename == "" {
+ m.Filename = name
+ }
+ m.OpenFunc = fileOpener
+ m.IsOrdered = false
+
+ return NewFileMetaInfo(
+ &dirNameOnlyFileInfo{name: base, modTime: htime.Now()},
+ m,
+ )
+}
+
+func decorateFileInfo(
+ fi os.FileInfo,
+ fs afero.Fs, opener func() (afero.File, error),
+ filename, filepath string, inMeta *FileMeta) FileMetaInfo {
+ var meta *FileMeta
+ var fim FileMetaInfo
+
+ filepath = strings.TrimPrefix(filepath, filepathSeparator)
+
+ var ok bool
+ if fim, ok = fi.(FileMetaInfo); ok {
+ meta = fim.Meta()
+ } else {
+ meta = NewFileMeta()
+ fim = NewFileMetaInfo(fi, meta)
+ }
+
+ if opener != nil {
+ meta.OpenFunc = opener
+ }
+ if fs != nil {
+ meta.Fs = fs
+ }
+ nfilepath := normalizeFilename(filepath)
+ nfilename := normalizeFilename(filename)
+ if nfilepath != "" {
+ meta.Path = nfilepath
+ }
+ if nfilename != "" {
+ meta.Filename = nfilename
+ }
+
+ meta.Merge(inMeta)
+
+ return fim
+}
+
+func isSymlink(fi os.FileInfo) bool {
+ return fi != nil && fi.Mode()&os.ModeSymlink == os.ModeSymlink
+}
+
+func fileInfosToFileMetaInfos(fis []os.FileInfo) []FileMetaInfo {
+ fims := make([]FileMetaInfo, len(fis))
+ for i, v := range fis {
+ fims[i] = v.(FileMetaInfo)
+ }
+ return fims
+}
+
+func normalizeFilename(filename string) string {
+ if filename == "" {
+ return ""
+ }
+ if runtime.GOOS == "darwin" {
+ // When a file system is HFS+, its filepath is in NFD form.
+ return norm.NFC.String(filename)
+ }
+ return filename
+}
+
+func fileInfosToNames(fis []os.FileInfo) []string {
+ names := make([]string, len(fis))
+ for i, d := range fis {
+ names[i] = d.Name()
+ }
+ return names
+}
+
+func fromSlash(filenames []string) []string {
+ for i, name := range filenames {
+ filenames[i] = filepath.FromSlash(name)
+ }
+ return filenames
+}
+
+func sortFileInfos(fis []os.FileInfo) {
+ sort.Slice(fis, func(i, j int) bool {
+ fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo)
+ return fimi.Meta().Filename < fimj.Meta().Filename
+ })
+}
diff --git a/hugofs/fileinfo_test.go b/hugofs/fileinfo_test.go
new file mode 100644
index 000000000..8d6a2ff7a
--- /dev/null
+++ b/hugofs/fileinfo_test.go
@@ -0,0 +1,51 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFileMeta(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Merge", func(c *qt.C) {
+ src := &FileMeta{
+ Filename: "fs1",
+ Path: "ps1",
+ }
+ dst := &FileMeta{
+ Filename: "fd1",
+ }
+
+ dst.Merge(src)
+
+ c.Assert(dst.Path, qt.Equals, "ps1")
+ c.Assert(dst.Filename, qt.Equals, "fd1")
+ })
+
+ c.Run("Copy", func(c *qt.C) {
+ src := &FileMeta{
+ Filename: "fs1",
+ Path: "ps1",
+ }
+ dst := src.Copy()
+
+ c.Assert(dst, qt.Not(qt.Equals), src)
+ c.Assert(dst, qt.DeepEquals, src)
+ })
+
+}
diff --git a/hugofs/filename_filter_fs.go b/hugofs/filename_filter_fs.go
new file mode 100644
index 000000000..4ecd1f55a
--- /dev/null
+++ b/hugofs/filename_filter_fs.go
@@ -0,0 +1,178 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "os"
+ "strings"
+ "syscall"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/spf13/afero"
+)
+
+var (
+ _ FilesystemUnwrapper = (*filenameFilterFs)(nil)
+)
+
+func newFilenameFilterFs(fs afero.Fs, base string, filter *glob.FilenameFilter) afero.Fs {
+ return &filenameFilterFs{
+ fs: fs,
+ base: base,
+ filter: filter,
+ }
+}
+
+// filenameFilterFs is a filesystem that filters by filename.
+type filenameFilterFs struct {
+ base string
+ fs afero.Fs
+
+ filter *glob.FilenameFilter
+}
+
+func (fs *filenameFilterFs) UnwrapFilesystem() afero.Fs {
+ return fs.fs
+}
+
+func (fs *filenameFilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ fi, b, err := fs.fs.(afero.Lstater).LstatIfPossible(name)
+ if err != nil {
+ return nil, false, err
+ }
+ if !fs.filter.Match(name, fi.IsDir()) {
+ return nil, false, os.ErrNotExist
+ }
+ return fi, b, nil
+}
+
+func (fs *filenameFilterFs) Open(name string) (afero.File, error) {
+ fi, err := fs.fs.Stat(name)
+ if err != nil {
+ return nil, err
+ }
+
+ if !fs.filter.Match(name, fi.IsDir()) {
+ return nil, os.ErrNotExist
+ }
+
+ f, err := fs.fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+
+ if !fi.IsDir() {
+ return f, nil
+ }
+
+ return &filenameFilterDir{
+ File: f,
+ base: fs.base,
+ filter: fs.filter,
+ }, nil
+}
+
+func (fs *filenameFilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ return fs.Open(name)
+}
+
+func (fs *filenameFilterFs) Stat(name string) (os.FileInfo, error) {
+ fi, _, err := fs.LstatIfPossible(name)
+ return fi, err
+}
+
+func (fs *filenameFilterFs) getOpener(name string) func() (afero.File, error) {
+ return func() (afero.File, error) {
+ return fs.Open(name)
+ }
+}
+
+type filenameFilterDir struct {
+ afero.File
+ base string
+ filter *glob.FilenameFilter
+}
+
+func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) {
+ fis, err := f.File.Readdir(-1)
+ if err != nil {
+ return nil, err
+ }
+
+ var result []os.FileInfo
+ for _, fi := range fis {
+ fim := fi.(FileMetaInfo)
+ if f.filter.Match(strings.TrimPrefix(fim.Meta().Filename, f.base), fim.IsDir()) {
+ result = append(result, fi)
+ }
+ }
+
+ return result, nil
+}
+
+func (f *filenameFilterDir) Readdirnames(count int) ([]string, error) {
+ dirsi, err := f.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+
+ dirs := make([]string, len(dirsi))
+ for i, d := range dirsi {
+ dirs[i] = d.Name()
+ }
+ return dirs, nil
+}
+
+func (fs *filenameFilterFs) Chmod(n string, m os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) Chtimes(n string, a, m time.Time) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) Chown(n string, uid, gid int) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) ReadDir(name string) ([]os.FileInfo, error) {
+ panic("not implemented")
+}
+
+func (fs *filenameFilterFs) Remove(n string) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) RemoveAll(p string) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) Rename(o, n string) error {
+ return syscall.EPERM
+}
+func (fs *filenameFilterFs) Create(n string) (afero.File, error) {
+ return nil, syscall.EPERM
+}
+func (fs *filenameFilterFs) Name() string {
+ return "FinameFilterFS"
+}
+
+func (fs *filenameFilterFs) Mkdir(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *filenameFilterFs) MkdirAll(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
diff --git a/hugofs/filename_filter_fs_test.go b/hugofs/filename_filter_fs_test.go
new file mode 100644
index 000000000..b3e97a6a6
--- /dev/null
+++ b/hugofs/filename_filter_fs_test.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFilenameFilterFs(t *testing.T) {
+ c := qt.New(t)
+
+ base := filepath.FromSlash("/mybase")
+
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ for _, letter := range []string{"a", "b", "c"} {
+ for i := 1; i <= 3; i++ {
+ c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0755), qt.IsNil)
+ }
+ }
+
+ fs = afero.NewBasePathFs(fs, base)
+
+ filter, err := glob.NewFilenameFilter(nil, []string{"/b/**.txt"})
+ c.Assert(err, qt.IsNil)
+
+ fs = newFilenameFilterFs(fs, base, filter)
+
+ assertExists := func(filename string, shouldExist bool) {
+ filename = filepath.Clean(filename)
+ _, err1 := fs.Stat(filename)
+ f, err2 := fs.Open(filename)
+ if shouldExist {
+ c.Assert(err1, qt.IsNil)
+ c.Assert(err2, qt.IsNil)
+ defer f.Close()
+
+ } else {
+ for _, err := range []error{err1, err2} {
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(errors.Is(err, os.ErrNotExist), qt.IsTrue)
+ }
+ }
+ }
+
+ assertExists("/a/my1.txt", true)
+ assertExists("/b/my1.txt", false)
+
+ dirB, err := fs.Open("/b")
+ defer dirB.Close()
+ c.Assert(err, qt.IsNil)
+ dirBEntries, err := dirB.Readdirnames(-1)
+ c.Assert(dirBEntries, qt.DeepEquals, []string{"my1.json", "my2.json", "my3.json"})
+
+ dirC, err := fs.Open("/c")
+ defer dirC.Close()
+ c.Assert(err, qt.IsNil)
+ dirCEntries, err := dirC.Readdirnames(-1)
+ c.Assert(dirCEntries, qt.DeepEquals, []string{"my1.json", "my1.txt", "my2.json", "my2.txt", "my3.json", "my3.txt"})
+
+}
diff --git a/hugofs/files/classifier.go b/hugofs/files/classifier.go
new file mode 100644
index 000000000..09b239c21
--- /dev/null
+++ b/hugofs/files/classifier.go
@@ -0,0 +1,224 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package files
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "unicode"
+
+ "github.com/spf13/afero"
+)
+
+const (
+ // The NPM package.json "template" file.
+ FilenamePackageHugoJSON = "package.hugo.json"
+ // The NPM package file.
+ FilenamePackageJSON = "package.json"
+)
+
+var (
+ // This should be the only list of valid extensions for content files.
+ contentFileExtensions = []string{
+ "html", "htm",
+ "mdown", "markdown", "md",
+ "asciidoc", "adoc", "ad",
+ "rest", "rst",
+ "org",
+ "pandoc", "pdc",
+ }
+
+ contentFileExtensionsSet map[string]bool
+
+ htmlFileExtensions = []string{
+ "html", "htm",
+ }
+
+ htmlFileExtensionsSet map[string]bool
+)
+
+func init() {
+ contentFileExtensionsSet = make(map[string]bool)
+ for _, ext := range contentFileExtensions {
+ contentFileExtensionsSet[ext] = true
+ }
+ htmlFileExtensionsSet = make(map[string]bool)
+ for _, ext := range htmlFileExtensions {
+ htmlFileExtensionsSet[ext] = true
+ }
+}
+
+func IsContentFile(filename string) bool {
+ return contentFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
+}
+
+func IsIndexContentFile(filename string) bool {
+ if !IsContentFile(filename) {
+ return false
+ }
+
+ base := filepath.Base(filename)
+
+ return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.")
+}
+
+func IsHTMLFile(filename string) bool {
+ return htmlFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
+}
+
+func IsContentExt(ext string) bool {
+ return contentFileExtensionsSet[ext]
+}
+
+type ContentClass string
+
+const (
+ ContentClassLeaf ContentClass = "leaf"
+ ContentClassBranch ContentClass = "branch"
+ ContentClassFile ContentClass = "zfile" // Sort below
+ ContentClassContent ContentClass = "zcontent"
+)
+
+func (c ContentClass) IsBundle() bool {
+ return c == ContentClassLeaf || c == ContentClassBranch
+}
+
+func ClassifyContentFile(filename string, open func() (afero.File, error)) ContentClass {
+ if !IsContentFile(filename) {
+ return ContentClassFile
+ }
+
+ if IsHTMLFile(filename) {
+ // We need to look inside the file. If the first non-whitespace
+ // character is a "<", then we treat it as a regular file.
+ // Eearlier we created pages for these files, but that had all sorts
+ // of troubles, and isn't what it says in the documentation.
+ // See https://github.com/gohugoio/hugo/issues/7030
+ if open == nil {
+ panic(fmt.Sprintf("no file opener provided for %q", filename))
+ }
+
+ f, err := open()
+ if err != nil {
+ return ContentClassFile
+ }
+ ishtml := isHTMLContent(f)
+ f.Close()
+ if ishtml {
+ return ContentClassFile
+ }
+
+ }
+
+ if strings.HasPrefix(filename, "_index.") {
+ return ContentClassBranch
+ }
+
+ if strings.HasPrefix(filename, "index.") {
+ return ContentClassLeaf
+ }
+
+ return ContentClassContent
+}
+
+var htmlComment = []rune{'<', '!', '-', '-'}
+
+func isHTMLContent(r io.Reader) bool {
+ br := bufio.NewReader(r)
+ i := 0
+ for {
+ c, _, err := br.ReadRune()
+ if err != nil {
+ break
+ }
+
+ if i > 0 {
+ if i >= len(htmlComment) {
+ return false
+ }
+
+ if c != htmlComment[i] {
+ return true
+ }
+
+ i++
+ continue
+ }
+
+ if !unicode.IsSpace(c) {
+ if i == 0 && c != '<' {
+ return false
+ }
+ i++
+ }
+ }
+ return true
+}
+
+const (
+ ComponentFolderArchetypes = "archetypes"
+ ComponentFolderStatic = "static"
+ ComponentFolderLayouts = "layouts"
+ ComponentFolderContent = "content"
+ ComponentFolderData = "data"
+ ComponentFolderAssets = "assets"
+ ComponentFolderI18n = "i18n"
+
+ FolderResources = "resources"
+ FolderJSConfig = "_jsconfig" // Mounted below /assets with postcss.config.js etc.
+)
+
+var (
+ JsConfigFolderMountPrefix = filepath.Join(ComponentFolderAssets, FolderJSConfig)
+
+ ComponentFolders = []string{
+ ComponentFolderArchetypes,
+ ComponentFolderStatic,
+ ComponentFolderLayouts,
+ ComponentFolderContent,
+ ComponentFolderData,
+ ComponentFolderAssets,
+ ComponentFolderI18n,
+ }
+
+ componentFoldersSet = make(map[string]bool)
+)
+
+func init() {
+ sort.Strings(ComponentFolders)
+ for _, f := range ComponentFolders {
+ componentFoldersSet[f] = true
+ }
+}
+
+// ResolveComponentFolder returns "content" from "content/blog/foo.md" etc.
+func ResolveComponentFolder(filename string) string {
+ filename = strings.TrimPrefix(filename, string(os.PathSeparator))
+ for _, cf := range ComponentFolders {
+ if strings.HasPrefix(filename, cf) {
+ return cf
+ }
+ }
+
+ return ""
+}
+
+func IsComponentFolder(name string) bool {
+ return componentFoldersSet[name]
+}
diff --git a/hugofs/files/classifier_test.go b/hugofs/files/classifier_test.go
new file mode 100644
index 000000000..84036b870
--- /dev/null
+++ b/hugofs/files/classifier_test.go
@@ -0,0 +1,59 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package files
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestIsContentFile(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(IsContentFile(filepath.FromSlash("my/file.md")), qt.Equals, true)
+ c.Assert(IsContentFile(filepath.FromSlash("my/file.ad")), qt.Equals, true)
+ c.Assert(IsContentFile(filepath.FromSlash("textfile.txt")), qt.Equals, false)
+ c.Assert(IsContentExt("md"), qt.Equals, true)
+ c.Assert(IsContentExt("json"), qt.Equals, false)
+}
+
+func TestIsHTMLContent(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(isHTMLContent(strings.NewReader(" <html>")), qt.Equals, true)
+ c.Assert(isHTMLContent(strings.NewReader(" <!--\n---")), qt.Equals, false)
+ c.Assert(isHTMLContent(strings.NewReader(" <!--")), qt.Equals, true)
+ c.Assert(isHTMLContent(strings.NewReader(" ---<")), qt.Equals, false)
+ c.Assert(isHTMLContent(strings.NewReader(" foo <")), qt.Equals, false)
+}
+
+func TestComponentFolders(t *testing.T) {
+ c := qt.New(t)
+
+ // It's important that these are absolutely right and not changed.
+ c.Assert(len(componentFoldersSet), qt.Equals, len(ComponentFolders))
+ c.Assert(IsComponentFolder("archetypes"), qt.Equals, true)
+ c.Assert(IsComponentFolder("layouts"), qt.Equals, true)
+ c.Assert(IsComponentFolder("data"), qt.Equals, true)
+ c.Assert(IsComponentFolder("i18n"), qt.Equals, true)
+ c.Assert(IsComponentFolder("assets"), qt.Equals, true)
+ c.Assert(IsComponentFolder("resources"), qt.Equals, false)
+ c.Assert(IsComponentFolder("static"), qt.Equals, true)
+ c.Assert(IsComponentFolder("content"), qt.Equals, true)
+ c.Assert(IsComponentFolder("foo"), qt.Equals, false)
+ c.Assert(IsComponentFolder(""), qt.Equals, false)
+}
diff --git a/hugofs/filter_fs.go b/hugofs/filter_fs.go
new file mode 100644
index 000000000..351b4d0f7
--- /dev/null
+++ b/hugofs/filter_fs.go
@@ -0,0 +1,344 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "syscall"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ _ afero.Fs = (*FilterFs)(nil)
+ _ afero.Lstater = (*FilterFs)(nil)
+ _ afero.File = (*filterDir)(nil)
+)
+
+func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
+ applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
+ for i, fi := range fis {
+ if fi.IsDir() {
+ filename := filepath.Join(name, fi.Name())
+ fis[i] = decorateFileInfo(fi, fs, fs.getOpener(filename), "", "", nil)
+ continue
+ }
+
+ meta := fi.(FileMetaInfo).Meta()
+ lang := meta.Lang
+
+ fileLang, translationBaseName, translationBaseNameWithExt := langInfoFrom(langs, fi.Name())
+ weight := meta.Weight
+
+ if fileLang != "" {
+ if fileLang == lang {
+ // Give priority to myfile.sv.txt inside the sv filesystem.
+ weight++
+ }
+ lang = fileLang
+ }
+
+ fim := NewFileMetaInfo(
+ fi,
+ &FileMeta{
+ Lang: lang,
+ Weight: weight,
+ TranslationBaseName: translationBaseName,
+ TranslationBaseNameWithExt: translationBaseNameWithExt,
+ Classifier: files.ClassifyContentFile(fi.Name(), meta.OpenFunc),
+ })
+
+ fis[i] = fim
+ }
+ }
+
+ all := func(fis []os.FileInfo) {
+ // Maps translation base name to a list of language codes.
+ translations := make(map[string][]string)
+ trackTranslation := func(meta *FileMeta) {
+ name := meta.TranslationBaseNameWithExt
+ translations[name] = append(translations[name], meta.Lang)
+ }
+ for _, fi := range fis {
+ if fi.IsDir() {
+ continue
+ }
+ meta := fi.(FileMetaInfo).Meta()
+
+ trackTranslation(meta)
+
+ }
+
+ for _, fi := range fis {
+ fim := fi.(FileMetaInfo)
+ langs := translations[fim.Meta().TranslationBaseNameWithExt]
+ if len(langs) > 0 {
+ fim.Meta().Translations = sortAndremoveStringDuplicates(langs)
+ }
+ }
+ }
+
+ return &FilterFs{
+ fs: fs,
+ applyPerSource: applyMeta,
+ applyAll: all,
+ }, nil
+}
+
+func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
+ applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
+ for i, fi := range fis {
+ if fi.IsDir() {
+ fis[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil)
+ }
+ }
+ }
+
+ ffs := &FilterFs{
+ fs: fs,
+ applyPerSource: applyMeta,
+ }
+
+ return ffs, nil
+}
+
+var (
+ _ FilesystemUnwrapper = (*FilterFs)(nil)
+)
+
+// FilterFs is an ordered composite filesystem.
+type FilterFs struct {
+ fs afero.Fs
+
+ applyPerSource func(fs *FilterFs, name string, fis []os.FileInfo)
+ applyAll func(fis []os.FileInfo)
+}
+
+func (fs *FilterFs) Chmod(n string, m os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) Chtimes(n string, a, m time.Time) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) Chown(n string, uid, gid int) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) UnwrapFilesystem() afero.Fs {
+ return fs.fs
+}
+
+func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ fi, b, err := lstatIfPossible(fs.fs, name)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if fi.IsDir() {
+ return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil
+ }
+
+ parent := filepath.Dir(name)
+ fs.applyFilters(parent, -1, fi)
+
+ return fi, b, nil
+}
+
+func (fs *FilterFs) Mkdir(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) MkdirAll(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) Name() string {
+ return "WeightedFileSystem"
+}
+
+func (fs *FilterFs) Open(name string) (afero.File, error) {
+ f, err := fs.fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+
+ return &filterDir{
+ File: f,
+ ffs: fs,
+ }, nil
+}
+
+func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ return fs.fs.Open(name)
+}
+
+func (fs *FilterFs) ReadDir(name string) ([]os.FileInfo, error) {
+ panic("not implemented")
+}
+
+func (fs *FilterFs) Remove(n string) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) RemoveAll(p string) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) Rename(o, n string) error {
+ return syscall.EPERM
+}
+
+func (fs *FilterFs) Stat(name string) (os.FileInfo, error) {
+ fi, _, err := fs.LstatIfPossible(name)
+ return fi, err
+}
+
+func (fs *FilterFs) Create(n string) (afero.File, error) {
+ return nil, syscall.EPERM
+}
+
+func (fs *FilterFs) getOpener(name string) func() (afero.File, error) {
+ return func() (afero.File, error) {
+ return fs.Open(name)
+ }
+}
+
+func (fs *FilterFs) applyFilters(name string, count int, fis ...os.FileInfo) ([]os.FileInfo, error) {
+ if fs.applyPerSource != nil {
+ fs.applyPerSource(fs, name, fis)
+ }
+
+ seen := make(map[string]bool)
+ var duplicates []int
+ for i, dir := range fis {
+ if !dir.IsDir() {
+ continue
+ }
+ if seen[dir.Name()] {
+ duplicates = append(duplicates, i)
+ } else {
+ seen[dir.Name()] = true
+ }
+ }
+
+ // Remove duplicate directories, keep first.
+ if len(duplicates) > 0 {
+ for i := len(duplicates) - 1; i >= 0; i-- {
+ idx := duplicates[i]
+ fis = append(fis[:idx], fis[idx+1:]...)
+ }
+ }
+
+ if fs.applyAll != nil {
+ fs.applyAll(fis)
+ }
+
+ if count > 0 && len(fis) >= count {
+ return fis[:count], nil
+ }
+
+ return fis, nil
+}
+
+type filterDir struct {
+ afero.File
+ ffs *FilterFs
+}
+
+func (f *filterDir) Readdir(count int) ([]os.FileInfo, error) {
+ fis, err := f.File.Readdir(-1)
+ if err != nil {
+ return nil, err
+ }
+ return f.ffs.applyFilters(f.Name(), count, fis...)
+}
+
+func (f *filterDir) Readdirnames(count int) ([]string, error) {
+ dirsi, err := f.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+
+ dirs := make([]string, len(dirsi))
+ for i, d := range dirsi {
+ dirs[i] = d.Name()
+ }
+ return dirs, nil
+}
+
+// Try to extract the language from the given filename.
+// Any valid language identifier in the name will win over the
+// language set on the file system, e.g. "mypost.en.md".
+func langInfoFrom(languages map[string]int, name string) (string, string, string) {
+ var lang string
+
+ baseName := filepath.Base(name)
+ ext := filepath.Ext(baseName)
+ translationBaseName := baseName
+
+ if ext != "" {
+ translationBaseName = strings.TrimSuffix(translationBaseName, ext)
+ }
+
+ fileLangExt := filepath.Ext(translationBaseName)
+ fileLang := strings.TrimPrefix(fileLangExt, ".")
+
+ if _, found := languages[fileLang]; found {
+ lang = fileLang
+ translationBaseName = strings.TrimSuffix(translationBaseName, fileLangExt)
+ }
+
+ translationBaseNameWithExt := translationBaseName
+
+ if ext != "" {
+ translationBaseNameWithExt += ext
+ }
+
+ return lang, translationBaseName, translationBaseNameWithExt
+}
+
+func printFs(fs afero.Fs, path string, w io.Writer) {
+ if fs == nil {
+ return
+ }
+ afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
+ fmt.Println("p:::", path)
+ return nil
+ })
+}
+
+func sortAndremoveStringDuplicates(s []string) []string {
+ ss := sort.StringSlice(s)
+ ss.Sort()
+ i := 0
+ for j := 1; j < len(s); j++ {
+ if !ss.Less(i, j) {
+ continue
+ }
+ i++
+ s[i] = s[j]
+ }
+
+ return s[:i+1]
+}
diff --git a/hugofs/filter_fs_test.go b/hugofs/filter_fs_test.go
new file mode 100644
index 000000000..524d957d6
--- /dev/null
+++ b/hugofs/filter_fs_test.go
@@ -0,0 +1,46 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestLangInfoFrom(t *testing.T) {
+ langs := map[string]int{
+ "sv": 10,
+ "en": 20,
+ }
+
+ c := qt.New(t)
+
+ tests := []struct {
+ input string
+ expected []string
+ }{
+ {"page.sv.md", []string{"sv", "page", "page.md"}},
+ {"page.en.md", []string{"en", "page", "page.md"}},
+ {"page.no.md", []string{"", "page.no", "page.no.md"}},
+ {filepath.FromSlash("tc-lib-color/class-Com.Tecnick.Color.Css"), []string{"", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}},
+ {filepath.FromSlash("class-Com.Tecnick.Color.sv.Css"), []string{"sv", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}},
+ }
+
+ for _, test := range tests {
+ v1, v2, v3 := langInfoFrom(langs, test.input)
+ c.Assert([]string{v1, v2, v3}, qt.DeepEquals, test.expected)
+ }
+}
diff --git a/hugofs/fs.go b/hugofs/fs.go
new file mode 100644
index 000000000..63c25a4c0
--- /dev/null
+++ b/hugofs/fs.go
@@ -0,0 +1,225 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package hugofs provides the file systems used by Hugo.
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/bep/overlayfs"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/afero"
+)
+
+// Os points to the (real) Os filesystem.
+var Os = &afero.OsFs{}
+
+// Fs holds the core filesystems used by Hugo.
+type Fs struct {
+ // Source is Hugo's source file system.
+ // Note that this will always be a "plain" Afero filesystem:
+ // * afero.OsFs when running in production
+ // * afero.MemMapFs for many of the tests.
+ Source afero.Fs
+
+ // PublishDir is where Hugo publishes its rendered content.
+ // It's mounted inside publishDir (default /public).
+ PublishDir afero.Fs
+
+ // PublishDirStatic is the file system used for static files when --renderStaticToDisk is set.
+ // When this is set, PublishDir is set to write to memory.
+ PublishDirStatic afero.Fs
+
+ // PublishDirServer is the file system used for serving the public directory with Hugo's development server.
+ // This will typically be the same as PublishDir, but not if --renderStaticToDisk is set.
+ PublishDirServer afero.Fs
+
+ // Os is an OS file system.
+ // NOTE: Field is currently unused.
+ Os afero.Fs
+
+ // WorkingDirReadOnly is a read-only file system
+ // restricted to the project working dir.
+ WorkingDirReadOnly afero.Fs
+
+ // WorkingDirWritable is a writable file system
+ // restricted to the project working dir.
+ WorkingDirWritable afero.Fs
+}
+
+// NewDefault creates a new Fs with the OS file system
+// as source and destination file systems.
+func NewDefault(cfg config.Provider) *Fs {
+ fs := Os
+ return newFs(fs, fs, cfg)
+}
+
+// NewMem creates a new Fs with the MemMapFs
+// as source and destination file systems.
+// Useful for testing.
+func NewMem(cfg config.Provider) *Fs {
+ fs := &afero.MemMapFs{}
+ return newFs(fs, fs, cfg)
+}
+
+// NewFrom creates a new Fs based on the provided Afero Fs
+// as source and destination file systems.
+// Useful for testing.
+func NewFrom(fs afero.Fs, cfg config.Provider) *Fs {
+ return newFs(fs, fs, cfg)
+}
+
+// NewFrom creates a new Fs based on the provided Afero Fss
+// as the source and destination file systems.
+func NewFromSourceAndDestination(source, destination afero.Fs, cfg config.Provider) *Fs {
+ return newFs(source, destination, cfg)
+}
+
+func newFs(source, destination afero.Fs, cfg config.Provider) *Fs {
+ workingDir := cfg.GetString("workingDir")
+ publishDir := cfg.GetString("publishDir")
+ if publishDir == "" {
+ panic("publishDir is empty")
+ }
+
+ // Sanity check
+ if IsOsFs(source) && len(workingDir) < 2 {
+ panic("workingDir is too short")
+ }
+
+ absPublishDir := paths.AbsPathify(workingDir, publishDir)
+
+ // Make sure we always have the /public folder ready to use.
+ if err := source.MkdirAll(absPublishDir, 0777); err != nil && !os.IsExist(err) {
+ panic(err)
+ }
+
+ pubFs := afero.NewBasePathFs(destination, absPublishDir)
+
+ return &Fs{
+ Source: source,
+ PublishDir: pubFs,
+ PublishDirServer: pubFs,
+ PublishDirStatic: pubFs,
+ Os: &afero.OsFs{},
+ WorkingDirReadOnly: getWorkingDirFsReadOnly(source, workingDir),
+ WorkingDirWritable: getWorkingDirFsWritable(source, workingDir),
+ }
+}
+
+func getWorkingDirFsReadOnly(base afero.Fs, workingDir string) afero.Fs {
+ if workingDir == "" {
+ return afero.NewReadOnlyFs(base)
+ }
+ return afero.NewBasePathFs(afero.NewReadOnlyFs(base), workingDir)
+}
+
+func getWorkingDirFsWritable(base afero.Fs, workingDir string) afero.Fs {
+ if workingDir == "" {
+ return base
+ }
+ return afero.NewBasePathFs(base, workingDir)
+}
+
+func isWrite(flag int) bool {
+ return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
+}
+
+// MakeReadableAndRemoveAllModulePkgDir makes any subdir in dir readable and then
+// removes the root.
+// TODO(bep) move this to a more suitable place.
+//
+func MakeReadableAndRemoveAllModulePkgDir(fs afero.Fs, dir string) (int, error) {
+ // Safe guard
+ if !strings.Contains(dir, "pkg") {
+ panic(fmt.Sprint("invalid dir:", dir))
+ }
+
+ counter := 0
+ afero.Walk(fs, dir, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return nil
+ }
+ if info.IsDir() {
+ counter++
+ fs.Chmod(path, 0777)
+ }
+ return nil
+ })
+ return counter, fs.RemoveAll(dir)
+}
+
+// HasOsFs returns whether fs is an OsFs or if it fs wraps an OsFs.
+// TODO(bep) make this nore robust.
+func IsOsFs(fs afero.Fs) bool {
+ var isOsFs bool
+ WalkFilesystems(fs, func(fs afero.Fs) bool {
+ switch base := fs.(type) {
+ case *afero.MemMapFs:
+ isOsFs = false
+ case *afero.OsFs:
+ isOsFs = true
+ case *afero.BasePathFs:
+ _, supportsLstat, _ := base.LstatIfPossible("asdfasdfasdf")
+ isOsFs = supportsLstat
+ }
+ return isOsFs
+ })
+ return isOsFs
+}
+
+// FilesystemsUnwrapper returns the underlying filesystems.
+type FilesystemsUnwrapper interface {
+ UnwrapFilesystems() []afero.Fs
+}
+
+// FilesystemsProvider returns the underlying filesystem.
+type FilesystemUnwrapper interface {
+ UnwrapFilesystem() afero.Fs
+}
+
+// WalkFn is the walk func for WalkFilesystems.
+type WalkFn func(fs afero.Fs) bool
+
+// WalkFilesystems walks fs recursively and calls fn.
+// If fn returns true, walking is stopped.
+func WalkFilesystems(fs afero.Fs, fn WalkFn) bool {
+ if fn(fs) {
+ return true
+ }
+
+ if afs, ok := fs.(FilesystemUnwrapper); ok {
+ if WalkFilesystems(afs.UnwrapFilesystem(), fn) {
+ return true
+ }
+
+ } else if bfs, ok := fs.(FilesystemsUnwrapper); ok {
+ for _, sf := range bfs.UnwrapFilesystems() {
+ if WalkFilesystems(sf, fn) {
+ return true
+ }
+ }
+ } else if cfs, ok := fs.(overlayfs.FilesystemIterator); ok {
+ for i := 0; i < cfs.NumFilesystems(); i++ {
+ if WalkFilesystems(cfs.Filesystem(i), fn) {
+ return true
+ }
+ }
+ }
+
+ return false
+}
diff --git a/hugofs/fs_test.go b/hugofs/fs_test.go
new file mode 100644
index 000000000..f7203fac9
--- /dev/null
+++ b/hugofs/fs_test.go
@@ -0,0 +1,68 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting/hqt"
+ "github.com/spf13/afero"
+)
+
+func TestIsOsFs(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(IsOsFs(Os), qt.Equals, true)
+ c.Assert(IsOsFs(&afero.MemMapFs{}), qt.Equals, false)
+ c.Assert(IsOsFs(afero.NewBasePathFs(&afero.MemMapFs{}, "/public")), qt.Equals, false)
+ c.Assert(IsOsFs(afero.NewBasePathFs(Os, t.TempDir())), qt.Equals, true)
+
+}
+
+func TestNewDefault(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("workingDir", t.TempDir())
+ f := NewDefault(v)
+
+ c.Assert(f.Source, qt.IsNotNil)
+ c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs))
+ c.Assert(f.Os, qt.IsNotNil)
+ c.Assert(f.WorkingDirReadOnly, qt.IsNotNil)
+ c.Assert(f.WorkingDirReadOnly, hqt.IsSameType, new(afero.BasePathFs))
+ c.Assert(IsOsFs(f.Source), qt.IsTrue)
+ c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsTrue)
+ c.Assert(IsOsFs(f.PublishDir), qt.IsTrue)
+ c.Assert(IsOsFs(f.Os), qt.IsTrue)
+}
+
+func TestNewMem(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ f := NewMem(v)
+
+ c.Assert(f.Source, qt.Not(qt.IsNil))
+ c.Assert(f.Source, hqt.IsSameType, new(afero.MemMapFs))
+ c.Assert(f.PublishDir, qt.Not(qt.IsNil))
+ c.Assert(f.PublishDir, hqt.IsSameType, new(afero.BasePathFs))
+ c.Assert(f.Os, hqt.IsSameType, new(afero.OsFs))
+ c.Assert(f.WorkingDirReadOnly, qt.IsNotNil)
+ c.Assert(IsOsFs(f.Source), qt.IsFalse)
+ c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsFalse)
+ c.Assert(IsOsFs(f.PublishDir), qt.IsFalse)
+ c.Assert(IsOsFs(f.Os), qt.IsTrue)
+}
diff --git a/hugofs/glob.go b/hugofs/glob.go
new file mode 100644
index 000000000..147b6b9f1
--- /dev/null
+++ b/hugofs/glob.go
@@ -0,0 +1,84 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "errors"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/spf13/afero"
+)
+
+// Glob walks the fs and passes all matches to the handle func.
+// The handle func can return true to signal a stop.
+func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error)) error {
+ pattern = glob.NormalizePath(pattern)
+ if pattern == "" {
+ return nil
+ }
+
+ g, err := glob.GetGlob(pattern)
+ if err != nil {
+ return nil
+ }
+
+ hasSuperAsterisk := strings.Contains(pattern, "**")
+ levels := strings.Count(pattern, "/")
+ root := glob.ResolveRootDir(pattern)
+
+ // Signals that we're done.
+ done := errors.New("done")
+
+ wfn := func(p string, info FileMetaInfo, err error) error {
+ p = glob.NormalizePath(p)
+ if info.IsDir() {
+ if !hasSuperAsterisk {
+ // Avoid walking to the bottom if we can avoid it.
+ if p != "" && strings.Count(p, "/") >= levels {
+ return filepath.SkipDir
+ }
+ }
+ return nil
+ }
+
+ if g.Match(p) {
+ d, err := handle(info)
+ if err != nil {
+ return err
+ }
+ if d {
+ return done
+ }
+ }
+
+ return nil
+ }
+
+ w := NewWalkway(WalkwayConfig{
+ Root: root,
+ Fs: fs,
+ WalkFn: wfn,
+ })
+
+ err = w.Walk()
+
+ if err != done {
+ return err
+ }
+
+ return nil
+}
diff --git a/hugofs/glob/filename_filter.go b/hugofs/glob/filename_filter.go
new file mode 100644
index 000000000..c4b582bd5
--- /dev/null
+++ b/hugofs/glob/filename_filter.go
@@ -0,0 +1,159 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package glob
+
+import (
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gobwas/glob"
+)
+
+type FilenameFilter struct {
+ shouldInclude func(filename string) bool
+ inclusions []glob.Glob
+ dirInclusions []glob.Glob
+ exclusions []glob.Glob
+ isWindows bool
+}
+
+func normalizeFilenameGlobPattern(s string) string {
+ // Use Unix separators even on Windows.
+ s = filepath.ToSlash(s)
+ if !strings.HasPrefix(s, "/") {
+ s = "/" + s
+ }
+ return s
+}
+
+// NewFilenameFilter creates a new Glob where the Match method will
+// return true if the file should be included.
+// Note that the inclusions will be checked first.
+func NewFilenameFilter(inclusions, exclusions []string) (*FilenameFilter, error) {
+ if inclusions == nil && exclusions == nil {
+ return nil, nil
+ }
+ filter := &FilenameFilter{isWindows: isWindows}
+
+ for _, include := range inclusions {
+ include = normalizeFilenameGlobPattern(include)
+ g, err := filenamesGlobCache.GetGlob(include)
+ if err != nil {
+ return nil, err
+ }
+ filter.inclusions = append(filter.inclusions, g)
+
+ // For mounts that do directory walking (e.g. content) we
+ // must make sure that all directories up to this inclusion also
+ // gets included.
+ dir := path.Dir(include)
+ parts := strings.Split(dir, "/")
+ for i, _ := range parts {
+ pattern := "/" + filepath.Join(parts[:i+1]...)
+ g, err := filenamesGlobCache.GetGlob(pattern)
+ if err != nil {
+ return nil, err
+ }
+ filter.dirInclusions = append(filter.dirInclusions, g)
+ }
+ }
+
+ for _, exclude := range exclusions {
+ exclude = normalizeFilenameGlobPattern(exclude)
+ g, err := filenamesGlobCache.GetGlob(exclude)
+ if err != nil {
+ return nil, err
+ }
+ filter.exclusions = append(filter.exclusions, g)
+ }
+
+ return filter, nil
+}
+
+// MustNewFilenameFilter invokes NewFilenameFilter and panics on error.
+func MustNewFilenameFilter(inclusions, exclusions []string) *FilenameFilter {
+ filter, err := NewFilenameFilter(inclusions, exclusions)
+ if err != nil {
+ panic(err)
+ }
+ return filter
+}
+
+// NewFilenameFilterForInclusionFunc create a new filter using the provided inclusion func.
+func NewFilenameFilterForInclusionFunc(shouldInclude func(filename string) bool) *FilenameFilter {
+ return &FilenameFilter{shouldInclude: shouldInclude, isWindows: isWindows}
+}
+
+// Match returns whether filename should be included.
+func (f *FilenameFilter) Match(filename string, isDir bool) bool {
+ if f == nil {
+ return true
+ }
+ return f.doMatch(filename, isDir)
+ /*if f.shouldInclude == nil {
+ fmt.Printf("Match: %q (%t) => %t\n", filename, isDir, isMatch)
+ }
+ return isMatch*/
+}
+
+func (f *FilenameFilter) doMatch(filename string, isDir bool) bool {
+ if f == nil {
+ return true
+ }
+
+ if !strings.HasPrefix(filename, filepathSeparator) {
+ filename = filepathSeparator + filename
+ }
+
+ if f.shouldInclude != nil {
+ if f.shouldInclude(filename) {
+ return true
+ }
+ if f.isWindows {
+ // The Glob matchers below handles this by themselves,
+ // for the shouldInclude we need to take some extra steps
+ // to make this robust.
+ winFilename := filepath.FromSlash(filename)
+ if filename != winFilename {
+ if f.shouldInclude(winFilename) {
+ return true
+ }
+ }
+ }
+
+ }
+
+ for _, inclusion := range f.inclusions {
+ if inclusion.Match(filename) {
+ return true
+ }
+ }
+
+ if isDir && f.inclusions != nil {
+ for _, inclusion := range f.dirInclusions {
+ if inclusion.Match(filename) {
+ return true
+ }
+ }
+ }
+
+ for _, exclusion := range f.exclusions {
+ if exclusion.Match(filename) {
+ return false
+ }
+ }
+
+ return f.inclusions == nil && f.shouldInclude == nil
+}
diff --git a/hugofs/glob/filename_filter_test.go b/hugofs/glob/filename_filter_test.go
new file mode 100644
index 000000000..1fce5b135
--- /dev/null
+++ b/hugofs/glob/filename_filter_test.go
@@ -0,0 +1,70 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package glob
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFilenameFilter(t *testing.T) {
+ c := qt.New(t)
+
+ excludeAlmostAllJSON, err := NewFilenameFilter([]string{"/a/b/c/foo.json"}, []string{"**.json"})
+ c.Assert(err, qt.IsNil)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/data/my.json"), false), qt.Equals, false)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c/foo.json"), false), qt.Equals, true)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c/foo.bar"), false), qt.Equals, false)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c"), true), qt.Equals, true)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b"), true), qt.Equals, true)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a"), true), qt.Equals, true)
+ c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/"), true), qt.Equals, true)
+ c.Assert(excludeAlmostAllJSON.Match("", true), qt.Equals, true)
+
+ excludeAllButFooJSON, err := NewFilenameFilter([]string{"/a/**/foo.json"}, []string{"**.json"})
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/data/my.json"), false), qt.Equals, false)
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c/d/e/foo.json"), false), qt.Equals, true)
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c"), true), qt.Equals, true)
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/"), true), qt.Equals, true)
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/"), true), qt.Equals, true)
+ c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/b"), true), qt.Equals, false)
+ c.Assert(err, qt.IsNil)
+
+ nopFilter, err := NewFilenameFilter(nil, nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(nopFilter.Match("ab.txt", false), qt.Equals, true)
+
+ includeOnlyFilter, err := NewFilenameFilter([]string{"**.json", "**.jpg"}, nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(includeOnlyFilter.Match("ab.json", false), qt.Equals, true)
+ c.Assert(includeOnlyFilter.Match("ab.jpg", false), qt.Equals, true)
+ c.Assert(includeOnlyFilter.Match("ab.gif", false), qt.Equals, false)
+
+ exlcudeOnlyFilter, err := NewFilenameFilter(nil, []string{"**.json", "**.jpg"})
+ c.Assert(err, qt.IsNil)
+ c.Assert(exlcudeOnlyFilter.Match("ab.json", false), qt.Equals, false)
+ c.Assert(exlcudeOnlyFilter.Match("ab.jpg", false), qt.Equals, false)
+ c.Assert(exlcudeOnlyFilter.Match("ab.gif", false), qt.Equals, true)
+
+ var nilFilter *FilenameFilter
+ c.Assert(nilFilter.Match("ab.gif", false), qt.Equals, true)
+
+ funcFilter := NewFilenameFilterForInclusionFunc(func(s string) bool { return strings.HasSuffix(s, ".json") })
+ c.Assert(funcFilter.Match("ab.json", false), qt.Equals, true)
+ c.Assert(funcFilter.Match("ab.bson", false), qt.Equals, false)
+
+}
diff --git a/hugofs/glob/glob.go b/hugofs/glob/glob.go
new file mode 100644
index 000000000..9e928ec32
--- /dev/null
+++ b/hugofs/glob/glob.go
@@ -0,0 +1,166 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package glob
+
+import (
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/gobwas/glob"
+ "github.com/gobwas/glob/syntax"
+)
+
+const filepathSeparator = string(os.PathSeparator)
+
+var (
+ isWindows = runtime.GOOS == "windows"
+ defaultGlobCache = &globCache{
+ isCaseSensitive: false,
+ isWindows: isWindows,
+ cache: make(map[string]globErr),
+ }
+
+ filenamesGlobCache = &globCache{
+ isCaseSensitive: false, // As long as the search strings are all lower case, this does not allocate.
+ isWindows: isWindows,
+ cache: make(map[string]globErr),
+ }
+)
+
+type globErr struct {
+ glob glob.Glob
+ err error
+}
+
+type globCache struct {
+ // Config
+ isCaseSensitive bool
+ isWindows bool
+
+ // Cache
+ sync.RWMutex
+ cache map[string]globErr
+}
+
+func (gc *globCache) GetGlob(pattern string) (glob.Glob, error) {
+ var eg globErr
+
+ gc.RLock()
+ var found bool
+ eg, found = gc.cache[pattern]
+ gc.RUnlock()
+ if found {
+ return eg.glob, eg.err
+ }
+
+ var g glob.Glob
+ var err error
+
+ pattern = filepath.ToSlash(pattern)
+
+ if gc.isCaseSensitive {
+ g, err = glob.Compile(pattern, '/')
+ } else {
+ g, err = glob.Compile(strings.ToLower(pattern), '/')
+
+ }
+
+ eg = globErr{
+ globDecorator{
+ g: g,
+ isCaseSensitive: gc.isCaseSensitive,
+ isWindows: gc.isWindows},
+ err,
+ }
+
+ gc.Lock()
+ gc.cache[pattern] = eg
+ gc.Unlock()
+
+ return eg.glob, eg.err
+}
+
+type globDecorator struct {
+ // Whether both pattern and the strings to match will be matched
+ // by their original case.
+ isCaseSensitive bool
+
+ // On Windows we may get filenames with Windows slashes to match,
+ // which wee need to normalize.
+ isWindows bool
+
+ g glob.Glob
+}
+
+func (g globDecorator) Match(s string) bool {
+ if g.isWindows {
+ s = filepath.ToSlash(s)
+ }
+ if !g.isCaseSensitive {
+ s = strings.ToLower(s)
+ }
+ return g.g.Match(s)
+}
+
+func GetGlob(pattern string) (glob.Glob, error) {
+ return defaultGlobCache.GetGlob(pattern)
+}
+
+func NormalizePath(p string) string {
+ return strings.Trim(path.Clean(filepath.ToSlash(strings.ToLower(p))), "/.")
+}
+
+// ResolveRootDir takes a normalized path on the form "assets/**.json" and
+// determines any root dir, i.e. any start path without any wildcards.
+func ResolveRootDir(p string) string {
+ parts := strings.Split(path.Dir(p), "/")
+ var roots []string
+ for _, part := range parts {
+ if HasGlobChar(part) {
+ break
+ }
+ roots = append(roots, part)
+ }
+
+ if len(roots) == 0 {
+ return ""
+ }
+
+ return strings.Join(roots, "/")
+}
+
+// FilterGlobParts removes any string with glob wildcard.
+func FilterGlobParts(a []string) []string {
+ b := a[:0]
+ for _, x := range a {
+ if !HasGlobChar(x) {
+ b = append(b, x)
+ }
+ }
+ return b
+}
+
+// HasGlobChar returns whether s contains any glob wildcards.
+func HasGlobChar(s string) bool {
+ for i := 0; i < len(s); i++ {
+ if syntax.Special(s[i]) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/hugofs/glob/glob_test.go b/hugofs/glob/glob_test.go
new file mode 100644
index 000000000..66efe9e53
--- /dev/null
+++ b/hugofs/glob/glob_test.go
@@ -0,0 +1,103 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package glob
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestResolveRootDir(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ input string
+ expected string
+ }{
+ {"data/foo.json", "data"},
+ {"a/b/**/foo.json", "a/b"},
+ {"dat?a/foo.json", ""},
+ {"a/b[a-c]/foo.json", "a"},
+ } {
+ c.Assert(ResolveRootDir(test.input), qt.Equals, test.expected)
+ }
+}
+
+func TestFilterGlobParts(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ input []string
+ expected []string
+ }{
+ {[]string{"a", "*", "c"}, []string{"a", "c"}},
+ } {
+ c.Assert(FilterGlobParts(test.input), qt.DeepEquals, test.expected)
+ }
+}
+
+func TestNormalizePath(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ input string
+ expected string
+ }{
+ {filepath.FromSlash("data/FOO.json"), "data/foo.json"},
+ {filepath.FromSlash("/data/FOO.json"), "data/foo.json"},
+ {filepath.FromSlash("./FOO.json"), "foo.json"},
+ {"//", ""},
+ } {
+ c.Assert(NormalizePath(test.input), qt.Equals, test.expected)
+ }
+}
+
+func TestGetGlob(t *testing.T) {
+ for _, cache := range []*globCache{defaultGlobCache, filenamesGlobCache} {
+ c := qt.New(t)
+ g, err := cache.GetGlob("**.JSON")
+ c.Assert(err, qt.IsNil)
+ c.Assert(g.Match("data/my.jSon"), qt.Equals, true)
+ }
+}
+
+func BenchmarkGetGlob(b *testing.B) {
+
+ runBench := func(name string, cache *globCache, search string) {
+ b.Run(name, func(b *testing.B) {
+ g, err := GetGlob("**/foo")
+ if err != nil {
+ b.Fatal(err)
+ }
+ for i := 0; i < b.N; i++ {
+ _ = g.Match(search)
+ }
+ })
+ }
+
+ runBench("Default cache", defaultGlobCache, "abcde")
+ runBench("Filenames cache, lowercase searchs", filenamesGlobCache, "abcde")
+ runBench("Filenames cache, mixed case searchs", filenamesGlobCache, "abCDe")
+
+ b.Run("GetGlob", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ _, err := GetGlob("**/foo")
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/hugofs/glob_test.go b/hugofs/glob_test.go
new file mode 100644
index 000000000..29cd1e0ca
--- /dev/null
+++ b/hugofs/glob_test.go
@@ -0,0 +1,60 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGlob(t *testing.T) {
+ c := qt.New(t)
+
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ create := func(filename string) {
+ err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte("content "+filename), 0777)
+ c.Assert(err, qt.IsNil)
+ }
+
+ collect := func(pattern string) []string {
+ var paths []string
+ h := func(fi FileMetaInfo) (bool, error) {
+ paths = append(paths, fi.Meta().Path)
+ return false, nil
+ }
+ err := Glob(fs, pattern, h)
+ c.Assert(err, qt.IsNil)
+ return paths
+ }
+
+ create("root.json")
+ create("jsonfiles/d1.json")
+ create("jsonfiles/d2.json")
+ create("jsonfiles/sub/d3.json")
+ create("jsonfiles/d1.xml")
+ create("a/b/c/e/f.json")
+
+ c.Assert(collect("**.json"), qt.HasLen, 5)
+ c.Assert(collect("**"), qt.HasLen, 6)
+ c.Assert(collect(""), qt.HasLen, 0)
+ c.Assert(collect("jsonfiles/*.json"), qt.HasLen, 2)
+ c.Assert(collect("*.json"), qt.HasLen, 1)
+ c.Assert(collect("**.xml"), qt.HasLen, 1)
+ c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2)
+}
diff --git a/hugofs/hashing_fs.go b/hugofs/hashing_fs.go
new file mode 100644
index 000000000..d15ba5863
--- /dev/null
+++ b/hugofs/hashing_fs.go
@@ -0,0 +1,97 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "hash"
+ "os"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ _ afero.Fs = (*md5HashingFs)(nil)
+ _ FilesystemUnwrapper = (*md5HashingFs)(nil)
+)
+
+// FileHashReceiver will receive the filename an the content's MD5 sum on file close.
+type FileHashReceiver interface {
+ OnFileClose(name, md5sum string)
+}
+
+type md5HashingFs struct {
+ afero.Fs
+ hashReceiver FileHashReceiver
+}
+
+// NewHashingFs creates a new filesystem that will receive MD5 checksums of
+// any written file content on Close. Note that this is probably not a good
+// idea for "full build" situations, but when doing fast render mode, the amount
+// of files published is low, and it would be really nice to know exactly which
+// of these files where actually changed.
+// Note that this will only work for file operations that use the io.Writer
+// to write content to file, but that is fine for the "publish content" use case.
+func NewHashingFs(delegate afero.Fs, hashReceiver FileHashReceiver) afero.Fs {
+ return &md5HashingFs{Fs: delegate, hashReceiver: hashReceiver}
+}
+
+func (fs *md5HashingFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+func (fs *md5HashingFs) Create(name string) (afero.File, error) {
+ f, err := fs.Fs.Create(name)
+ if err == nil {
+ f = fs.wrapFile(f)
+ }
+ return f, err
+}
+
+func (fs *md5HashingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ f, err := fs.Fs.OpenFile(name, flag, perm)
+ if err == nil && isWrite(flag) {
+ f = fs.wrapFile(f)
+ }
+ return f, err
+}
+
+func (fs *md5HashingFs) wrapFile(f afero.File) afero.File {
+ return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver}
+}
+
+func (fs *md5HashingFs) Name() string {
+ return "md5HashingFs"
+}
+
+type hashingFile struct {
+ hashReceiver FileHashReceiver
+ h hash.Hash
+ afero.File
+}
+
+func (h *hashingFile) Write(p []byte) (n int, err error) {
+ n, err = h.File.Write(p)
+ if err != nil {
+ return
+ }
+ return h.h.Write(p)
+}
+
+func (h *hashingFile) Close() error {
+ sum := hex.EncodeToString(h.h.Sum(nil))
+ h.hashReceiver.OnFileClose(h.Name(), sum)
+ return h.File.Close()
+}
diff --git a/hugofs/hashing_fs_test.go b/hugofs/hashing_fs_test.go
new file mode 100644
index 000000000..3e1f6c41d
--- /dev/null
+++ b/hugofs/hashing_fs_test.go
@@ -0,0 +1,52 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/afero"
+)
+
+type testHashReceiver struct {
+ sum string
+ name string
+}
+
+func (t *testHashReceiver) OnFileClose(name, md5hash string) {
+ t.name = name
+ t.sum = md5hash
+}
+
+func TestHashingFs(t *testing.T) {
+ c := qt.New(t)
+
+ fs := afero.NewMemMapFs()
+ observer := &testHashReceiver{}
+ ofs := NewHashingFs(fs, observer)
+
+ f, err := ofs.Create("hashme")
+ c.Assert(err, qt.IsNil)
+ _, err = f.Write([]byte("content"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(f.Close(), qt.IsNil)
+ c.Assert(observer.sum, qt.Equals, "9a0364b9e99bb480dd25e1f0284c8555")
+ c.Assert(observer.name, qt.Equals, "hashme")
+
+ f, err = ofs.Create("nowrites")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f.Close(), qt.IsNil)
+ c.Assert(observer.sum, qt.Equals, "d41d8cd98f00b204e9800998ecf8427e")
+}
diff --git a/hugofs/language_merge.go b/hugofs/language_merge.go
new file mode 100644
index 000000000..a2fa411a9
--- /dev/null
+++ b/hugofs/language_merge.go
@@ -0,0 +1,39 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "os"
+)
+
+// LanguageDirsMerger implements the overlayfs.DirsMerger func, which is used
+// to merge two directories.
+var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) []os.FileInfo {
+ for _, fi1 := range bofi {
+ fim1 := fi1.(FileMetaInfo)
+ var found bool
+ for _, fi2 := range lofi {
+ fim2 := fi2.(FileMetaInfo)
+ if fi1.Name() == fi2.Name() && fim1.Meta().Lang == fim2.Meta().Lang {
+ found = true
+ break
+ }
+ }
+ if !found {
+ lofi = append(lofi, fi1)
+ }
+ }
+
+ return lofi
+}
diff --git a/hugofs/noop_fs.go b/hugofs/noop_fs.go
new file mode 100644
index 000000000..8e4abbc6b
--- /dev/null
+++ b/hugofs/noop_fs.go
@@ -0,0 +1,86 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "errors"
+ "os"
+ "time"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ errNoOp = errors.New("this is a filesystem that does nothing and this operation is not supported")
+ _ afero.Fs = (*noOpFs)(nil)
+
+ // NoOpFs provides a no-op filesystem that implements the afero.Fs
+ // interface.
+ NoOpFs = &noOpFs{}
+)
+
+type noOpFs struct {
+}
+
+func (fs noOpFs) Create(name string) (afero.File, error) {
+ return nil, errNoOp
+}
+
+func (fs noOpFs) Mkdir(name string, perm os.FileMode) error {
+ return nil
+}
+
+func (fs noOpFs) MkdirAll(path string, perm os.FileMode) error {
+ return nil
+}
+
+func (fs noOpFs) Open(name string) (afero.File, error) {
+ return nil, os.ErrNotExist
+}
+
+func (fs noOpFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ return nil, os.ErrNotExist
+}
+
+func (fs noOpFs) Remove(name string) error {
+ return nil
+}
+
+func (fs noOpFs) RemoveAll(path string) error {
+ return nil
+}
+
+func (fs noOpFs) Rename(oldname string, newname string) error {
+ return errNoOp
+}
+
+func (fs noOpFs) Stat(name string) (os.FileInfo, error) {
+ return nil, os.ErrNotExist
+}
+
+func (fs noOpFs) Name() string {
+ return "noOpFs"
+}
+
+func (fs noOpFs) Chmod(name string, mode os.FileMode) error {
+ return errNoOp
+}
+
+func (fs noOpFs) Chtimes(name string, atime time.Time, mtime time.Time) error {
+ return errNoOp
+}
+
+func (fs *noOpFs) Chown(name string, uid int, gid int) error {
+ return errNoOp
+}
diff --git a/hugofs/nosymlink_fs.go b/hugofs/nosymlink_fs.go
new file mode 100644
index 000000000..d3cad5e74
--- /dev/null
+++ b/hugofs/nosymlink_fs.go
@@ -0,0 +1,161 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/spf13/afero"
+)
+
+var ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
+
+// NewNoSymlinkFs creates a new filesystem that prevents symlinks.
+func NewNoSymlinkFs(fs afero.Fs, logger loggers.Logger, allowFiles bool) afero.Fs {
+ return &noSymlinkFs{Fs: fs, logger: logger, allowFiles: allowFiles}
+}
+
+var (
+ _ FilesystemUnwrapper = (*noSymlinkFs)(nil)
+)
+
+// noSymlinkFs is a filesystem that prevents symlinking.
+type noSymlinkFs struct {
+ allowFiles bool // block dirs only
+ logger loggers.Logger
+ afero.Fs
+}
+
+type noSymlinkFile struct {
+ fs *noSymlinkFs
+ afero.File
+}
+
+func (f *noSymlinkFile) Readdir(count int) ([]os.FileInfo, error) {
+ fis, err := f.File.Readdir(count)
+
+ filtered := fis[:0]
+ for _, x := range fis {
+ filename := filepath.Join(f.Name(), x.Name())
+ if _, err := f.fs.checkSymlinkStatus(filename, x); err != nil {
+ // Log a warning and drop the file from the list
+ logUnsupportedSymlink(filename, f.fs.logger)
+ } else {
+ filtered = append(filtered, x)
+ }
+ }
+
+ return filtered, err
+}
+
+func (f *noSymlinkFile) Readdirnames(count int) ([]string, error) {
+ dirs, err := f.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+ return fileInfosToNames(dirs), nil
+}
+
+func (fs *noSymlinkFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+func (fs *noSymlinkFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ return fs.stat(name)
+}
+
+func (fs *noSymlinkFs) Stat(name string) (os.FileInfo, error) {
+ fi, _, err := fs.stat(name)
+ return fi, err
+}
+
+func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) {
+ var (
+ fi os.FileInfo
+ wasLstat bool
+ err error
+ )
+
+ if lstater, ok := fs.Fs.(afero.Lstater); ok {
+ fi, wasLstat, err = lstater.LstatIfPossible(name)
+ } else {
+ fi, err = fs.Fs.Stat(name)
+ }
+
+ if err != nil {
+ return nil, false, err
+ }
+
+ fi, err = fs.checkSymlinkStatus(name, fi)
+
+ return fi, wasLstat, err
+}
+
+func (fs *noSymlinkFs) checkSymlinkStatus(name string, fi os.FileInfo) (os.FileInfo, error) {
+ var metaIsSymlink bool
+
+ if fim, ok := fi.(FileMetaInfo); ok {
+ meta := fim.Meta()
+ metaIsSymlink = meta.IsSymlink
+ }
+
+ if metaIsSymlink {
+ if fs.allowFiles && !fi.IsDir() {
+ return fi, nil
+ }
+ return nil, ErrPermissionSymlink
+ }
+
+ // Also support non-decorated filesystems, e.g. the Os fs.
+ if isSymlink(fi) {
+ // Need to determine if this is a directory or not.
+ _, sfi, err := evalSymlinks(fs.Fs, name)
+ if err != nil {
+ return nil, err
+ }
+ if fs.allowFiles && !sfi.IsDir() {
+ // Return the original FileInfo to get the expected Name.
+ return fi, nil
+ }
+ return nil, ErrPermissionSymlink
+ }
+
+ return fi, nil
+}
+
+func (fs *noSymlinkFs) Open(name string) (afero.File, error) {
+ if _, _, err := fs.stat(name); err != nil {
+ return nil, err
+ }
+ return fs.wrapFile(fs.Fs.Open(name))
+}
+
+func (fs *noSymlinkFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ if _, _, err := fs.stat(name); err != nil {
+ return nil, err
+ }
+ return fs.wrapFile(fs.Fs.OpenFile(name, flag, perm))
+}
+
+func (fs *noSymlinkFs) wrapFile(f afero.File, err error) (afero.File, error) {
+ if err != nil {
+ return nil, err
+ }
+
+ return &noSymlinkFile{File: f, fs: fs}, nil
+}
diff --git a/hugofs/nosymlink_test.go b/hugofs/nosymlink_test.go
new file mode 100644
index 000000000..e00dcf1a8
--- /dev/null
+++ b/hugofs/nosymlink_test.go
@@ -0,0 +1,146 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func prepareSymlinks(t *testing.T) (string, func()) {
+ c := qt.New(t)
+
+ workDir, clean, err := htesting.CreateTempDir(Os, "hugo-symlink-test")
+ c.Assert(err, qt.IsNil)
+ wd, _ := os.Getwd()
+
+ blogDir := filepath.Join(workDir, "blog")
+ blogSubDir := filepath.Join(blogDir, "sub")
+ c.Assert(os.MkdirAll(blogSubDir, 0777), qt.IsNil)
+ blogFile1 := filepath.Join(blogDir, "a.txt")
+ blogFile2 := filepath.Join(blogSubDir, "b.txt")
+ afero.WriteFile(Os, filepath.Join(blogFile1), []byte("content1"), 0777)
+ afero.WriteFile(Os, filepath.Join(blogFile2), []byte("content2"), 0777)
+ os.Chdir(workDir)
+ c.Assert(os.Symlink("blog", "symlinkdedir"), qt.IsNil)
+ os.Chdir(blogDir)
+ c.Assert(os.Symlink("sub", "symsub"), qt.IsNil)
+ c.Assert(os.Symlink("a.txt", "symlinkdedfile.txt"), qt.IsNil)
+
+ return workDir, func() {
+ clean()
+ os.Chdir(wd)
+ }
+}
+
+func TestNoSymlinkFs(t *testing.T) {
+ if skipSymlink() {
+ t.Skip("Skip; os.Symlink needs administrator rights on Windows")
+ }
+ c := qt.New(t)
+ workDir, clean := prepareSymlinks(t)
+ defer clean()
+
+ blogDir := filepath.Join(workDir, "blog")
+ blogFile1 := filepath.Join(blogDir, "a.txt")
+
+ logger := loggers.NewWarningLogger()
+
+ for _, bfs := range []afero.Fs{NewBaseFileDecorator(Os), Os} {
+ for _, allowFiles := range []bool{false, true} {
+ logger.LogCounters().WarnCounter.Reset()
+ fs := NewNoSymlinkFs(bfs, logger, allowFiles)
+ ls := fs.(afero.Lstater)
+ symlinkedDir := filepath.Join(workDir, "symlinkdedir")
+ symlinkedFilename := "symlinkdedfile.txt"
+ symlinkedFile := filepath.Join(blogDir, symlinkedFilename)
+
+ assertFileErr := func(err error) {
+ if allowFiles {
+ c.Assert(err, qt.IsNil)
+ } else {
+ c.Assert(err, qt.Equals, ErrPermissionSymlink)
+ }
+ }
+
+ assertFileStat := func(name string, fi os.FileInfo, err error) {
+ t.Helper()
+ assertFileErr(err)
+ if err == nil {
+ c.Assert(fi, qt.Not(qt.IsNil))
+ c.Assert(fi.Name(), qt.Equals, name)
+ }
+ }
+
+ // Check Stat and Lstat
+ for _, stat := range []func(name string) (os.FileInfo, error){
+ func(name string) (os.FileInfo, error) {
+ return fs.Stat(name)
+ },
+ func(name string) (os.FileInfo, error) {
+ fi, _, err := ls.LstatIfPossible(name)
+ return fi, err
+ },
+ } {
+ _, err := stat(symlinkedDir)
+ c.Assert(err, qt.Equals, ErrPermissionSymlink)
+ fi, err := stat(symlinkedFile)
+ assertFileStat(symlinkedFilename, fi, err)
+
+ fi, err = stat(filepath.Join(workDir, "blog"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(fi, qt.Not(qt.IsNil))
+
+ fi, err = stat(blogFile1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(fi, qt.Not(qt.IsNil))
+ }
+
+ // Check Open
+ _, err := fs.Open(symlinkedDir)
+ c.Assert(err, qt.Equals, ErrPermissionSymlink)
+ _, err = fs.OpenFile(symlinkedDir, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
+ c.Assert(err, qt.Equals, ErrPermissionSymlink)
+ _, err = fs.OpenFile(symlinkedFile, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
+ assertFileErr(err)
+ _, err = fs.Open(symlinkedFile)
+ assertFileErr(err)
+ f, err := fs.Open(blogDir)
+ c.Assert(err, qt.IsNil)
+ f.Close()
+ f, err = fs.Open(blogFile1)
+ c.Assert(err, qt.IsNil)
+ f.Close()
+
+ // Check readdir
+ f, err = fs.Open(workDir)
+ c.Assert(err, qt.IsNil)
+ // There is at least one unsupported symlink inside workDir
+ _, err = f.Readdir(-1)
+ c.Assert(err, qt.IsNil)
+ f.Close()
+ c.Assert(logger.LogCounters().WarnCounter.Count(), qt.Equals, uint64(1))
+
+ }
+ }
+}
diff --git a/hugofs/rootmapping_fs.go b/hugofs/rootmapping_fs.go
new file mode 100644
index 000000000..90df48f8c
--- /dev/null
+++ b/hugofs/rootmapping_fs.go
@@ -0,0 +1,652 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ radix "github.com/armon/go-radix"
+ "github.com/spf13/afero"
+)
+
+var filepathSeparator = string(filepath.Separator)
+
+// NewRootMappingFs creates a new RootMappingFs on top of the provided with
+// root mappings with some optional metadata about the root.
+// Note that From represents a virtual root that maps to the actual filename in To.
+func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
+ rootMapToReal := radix.New()
+ var virtualRoots []RootMapping
+
+ for _, rm := range rms {
+ (&rm).clean()
+
+ fromBase := files.ResolveComponentFolder(rm.From)
+
+ if len(rm.To) < 2 {
+ panic(fmt.Sprintf("invalid root mapping; from/to: %s/%s", rm.From, rm.To))
+ }
+
+ fi, err := fs.Stat(rm.To)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+ // Extract "blog" from "content/blog"
+ rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator)
+ if rm.Meta == nil {
+ rm.Meta = NewFileMeta()
+ }
+
+ rm.Meta.SourceRoot = rm.To
+ rm.Meta.BaseDir = rm.ToBasedir
+ rm.Meta.MountRoot = rm.path
+ rm.Meta.Module = rm.Module
+ rm.Meta.IsProject = rm.IsProject
+
+ meta := rm.Meta.Copy()
+
+ if !fi.IsDir() {
+ _, name := filepath.Split(rm.From)
+ meta.Name = name
+ }
+
+ rm.fi = NewFileMetaInfo(fi, meta)
+
+ key := filepathSeparator + rm.From
+ var mappings []RootMapping
+ v, found := rootMapToReal.Get(key)
+ if found {
+ // There may be more than one language pointing to the same root.
+ mappings = v.([]RootMapping)
+ }
+ mappings = append(mappings, rm)
+ rootMapToReal.Insert(key, mappings)
+
+ virtualRoots = append(virtualRoots, rm)
+ }
+
+ rootMapToReal.Insert(filepathSeparator, virtualRoots)
+
+ rfs := &RootMappingFs{
+ Fs: fs,
+ rootMapToReal: rootMapToReal,
+ }
+
+ return rfs, nil
+}
+
+func newRootMappingFsFromFromTo(
+ baseDir string,
+ fs afero.Fs,
+ fromTo ...string,
+) (*RootMappingFs, error) {
+ rms := make([]RootMapping, len(fromTo)/2)
+ for i, j := 0, 0; j < len(fromTo); i, j = i+1, j+2 {
+ rms[i] = RootMapping{
+ From: fromTo[j],
+ To: fromTo[j+1],
+ ToBasedir: baseDir,
+ }
+ }
+
+ return NewRootMappingFs(fs, rms...)
+}
+
+// RootMapping describes a virtual file or directory mount.
+type RootMapping struct {
+ From string // The virtual mount.
+ To string // The source directory or file.
+ ToBasedir string // The base of To. May be empty if an absolute path was provided.
+ Module string // The module path/ID.
+ IsProject bool // Whether this is a mount in the main project.
+ Meta *FileMeta // File metadata (lang etc.)
+
+ fi FileMetaInfo
+ path string // The virtual mount point, e.g. "blog".
+
+}
+
+type keyRootMappings struct {
+ key string
+ roots []RootMapping
+}
+
+func (rm *RootMapping) clean() {
+ rm.From = strings.Trim(filepath.Clean(rm.From), filepathSeparator)
+ rm.To = filepath.Clean(rm.To)
+}
+
+func (r RootMapping) filename(name string) string {
+ if name == "" {
+ return r.To
+ }
+ return filepath.Join(r.To, strings.TrimPrefix(name, r.From))
+}
+
+func (r RootMapping) trimFrom(name string) string {
+ if name == "" {
+ return ""
+ }
+ return strings.TrimPrefix(name, r.From)
+}
+
+var (
+ _ FilesystemUnwrapper = (*RootMappingFs)(nil)
+)
+
+// A RootMappingFs maps several roots into one. Note that the root of this filesystem
+// is directories only, and they will be returned in Readdir and Readdirnames
+// in the order given.
+type RootMappingFs struct {
+ afero.Fs
+ rootMapToReal *radix.Tree
+}
+
+func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) {
+ base = filepathSeparator + fs.cleanName(base)
+ roots := fs.getRootsWithPrefix(base)
+
+ if roots == nil {
+ return nil, nil
+ }
+
+ fss := make([]FileMetaInfo, len(roots))
+ for i, r := range roots {
+ bfs := afero.NewBasePathFs(fs.Fs, r.To)
+ bfs = decoratePath(bfs, func(name string) string {
+ p := strings.TrimPrefix(name, r.To)
+ if r.path != "" {
+ // Make sure it's mounted to a any sub path, e.g. blog
+ p = filepath.Join(r.path, p)
+ }
+ p = strings.TrimLeft(p, filepathSeparator)
+ return p
+ })
+
+ fs := bfs
+ if r.Meta.InclusionFilter != nil {
+ fs = newFilenameFilterFs(fs, r.To, r.Meta.InclusionFilter)
+ }
+ fs = decorateDirs(fs, r.Meta)
+ fi, err := fs.Stat("")
+ if err != nil {
+ return nil, fmt.Errorf("RootMappingFs.Dirs: %w", err)
+ }
+
+ if !fi.IsDir() {
+ fi.(FileMetaInfo).Meta().Merge(r.Meta)
+ }
+
+ fss[i] = fi.(FileMetaInfo)
+ }
+
+ return fss, nil
+}
+
+func (fs *RootMappingFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+// Filter creates a copy of this filesystem with only mappings matching a filter.
+func (fs RootMappingFs) Filter(f func(m RootMapping) bool) *RootMappingFs {
+ rootMapToReal := radix.New()
+ fs.rootMapToReal.Walk(func(b string, v any) bool {
+ rms := v.([]RootMapping)
+ var nrms []RootMapping
+ for _, rm := range rms {
+ if f(rm) {
+ nrms = append(nrms, rm)
+ }
+ }
+ if len(nrms) != 0 {
+ rootMapToReal.Insert(b, nrms)
+ }
+ return false
+ })
+
+ fs.rootMapToReal = rootMapToReal
+
+ return &fs
+}
+
+// LstatIfPossible returns the os.FileInfo structure describing a given file.
+func (fs *RootMappingFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ fis, err := fs.doLstat(name)
+ if err != nil {
+ return nil, false, err
+ }
+ return fis[0], false, nil
+}
+
+// Open opens the named file for reading.
+func (fs *RootMappingFs) Open(name string) (afero.File, error) {
+ fis, err := fs.doLstat(name)
+ if err != nil {
+ return nil, err
+ }
+
+ return fs.newUnionFile(fis...)
+}
+
+// Stat returns the os.FileInfo structure describing a given file. If there is
+// an error, it will be of type *os.PathError.
+func (fs *RootMappingFs) Stat(name string) (os.FileInfo, error) {
+ fi, _, err := fs.LstatIfPossible(name)
+ return fi, err
+}
+
+func (fs *RootMappingFs) hasPrefix(prefix string) bool {
+ hasPrefix := false
+ fs.rootMapToReal.WalkPrefix(prefix, func(b string, v any) bool {
+ hasPrefix = true
+ return true
+ })
+
+ return hasPrefix
+}
+
+func (fs *RootMappingFs) getRoot(key string) []RootMapping {
+ v, found := fs.rootMapToReal.Get(key)
+ if !found {
+ return nil
+ }
+
+ return v.([]RootMapping)
+}
+
+func (fs *RootMappingFs) getRoots(key string) (string, []RootMapping) {
+ s, v, found := fs.rootMapToReal.LongestPrefix(key)
+ if !found || (s == filepathSeparator && key != filepathSeparator) {
+ return "", nil
+ }
+ return s, v.([]RootMapping)
+}
+
+func (fs *RootMappingFs) debug() {
+ fmt.Println("debug():")
+ fs.rootMapToReal.Walk(func(s string, v any) bool {
+ fmt.Println("Key", s)
+ return false
+ })
+}
+
+func (fs *RootMappingFs) getRootsWithPrefix(prefix string) []RootMapping {
+ var roots []RootMapping
+ fs.rootMapToReal.WalkPrefix(prefix, func(b string, v any) bool {
+ roots = append(roots, v.([]RootMapping)...)
+ return false
+ })
+
+ return roots
+}
+
+func (fs *RootMappingFs) getAncestors(prefix string) []keyRootMappings {
+ var roots []keyRootMappings
+ fs.rootMapToReal.WalkPath(prefix, func(s string, v any) bool {
+ if strings.HasPrefix(prefix, s+filepathSeparator) {
+ roots = append(roots, keyRootMappings{
+ key: s,
+ roots: v.([]RootMapping),
+ })
+ }
+ return false
+ })
+
+ return roots
+}
+
+func (fs *RootMappingFs) newUnionFile(fis ...FileMetaInfo) (afero.File, error) {
+ meta := fis[0].Meta()
+ f, err := meta.Open()
+ if err != nil {
+ return nil, err
+ }
+ if len(fis) == 1 {
+ return f, nil
+ }
+
+ rf := &rootMappingFile{File: f, fs: fs, name: meta.Name, meta: meta}
+ if len(fis) == 1 {
+ return rf, err
+ }
+
+ next, err := fs.newUnionFile(fis[1:]...)
+ if err != nil {
+ return nil, err
+ }
+
+ uf := &afero.UnionFile{Base: rf, Layer: next}
+
+ uf.Merger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
+ // Ignore duplicate directory entries
+ seen := make(map[string]bool)
+ var result []os.FileInfo
+
+ for _, fis := range [][]os.FileInfo{bofi, lofi} {
+ for _, fi := range fis {
+
+ if fi.IsDir() && seen[fi.Name()] {
+ continue
+ }
+
+ if fi.IsDir() {
+ seen[fi.Name()] = true
+ }
+
+ result = append(result, fi)
+ }
+ }
+
+ return result, nil
+ }
+
+ return uf, nil
+}
+
+func (fs *RootMappingFs) cleanName(name string) string {
+ return strings.Trim(filepath.Clean(name), filepathSeparator)
+}
+
+func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error) {
+ prefix = filepathSeparator + fs.cleanName(prefix)
+
+ var fis []os.FileInfo
+
+ seen := make(map[string]bool) // Prevent duplicate directories
+ level := strings.Count(prefix, filepathSeparator)
+
+ collectDir := func(rm RootMapping, fi FileMetaInfo) error {
+ f, err := fi.Meta().Open()
+ if err != nil {
+ return err
+ }
+ direntries, err := f.Readdir(-1)
+ if err != nil {
+ f.Close()
+ return err
+ }
+
+ for _, fi := range direntries {
+ meta := fi.(FileMetaInfo).Meta()
+ meta.Merge(rm.Meta)
+ if !rm.Meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fi.IsDir()) {
+ continue
+ }
+
+ if fi.IsDir() {
+ name := fi.Name()
+ if seen[name] {
+ continue
+ }
+ seen[name] = true
+ opener := func() (afero.File, error) {
+ return fs.Open(filepath.Join(rm.From, name))
+ }
+ fi = newDirNameOnlyFileInfo(name, meta, opener)
+ }
+
+ fis = append(fis, fi)
+ }
+
+ f.Close()
+
+ return nil
+ }
+
+ // First add any real files/directories.
+ rms := fs.getRoot(prefix)
+ for _, rm := range rms {
+ if err := collectDir(rm, rm.fi); err != nil {
+ return nil, err
+ }
+ }
+
+ // Next add any file mounts inside the given directory.
+ prefixInside := prefix + filepathSeparator
+ fs.rootMapToReal.WalkPrefix(prefixInside, func(s string, v any) bool {
+ if (strings.Count(s, filepathSeparator) - level) != 1 {
+ // This directory is not part of the current, but we
+ // need to include the first name part to make it
+ // navigable.
+ path := strings.TrimPrefix(s, prefixInside)
+ parts := strings.Split(path, filepathSeparator)
+ name := parts[0]
+
+ if seen[name] {
+ return false
+ }
+ seen[name] = true
+ opener := func() (afero.File, error) {
+ return fs.Open(path)
+ }
+
+ fi := newDirNameOnlyFileInfo(name, nil, opener)
+ fis = append(fis, fi)
+
+ return false
+ }
+
+ rms := v.([]RootMapping)
+ for _, rm := range rms {
+ if !rm.fi.IsDir() {
+ // A single file mount
+ fis = append(fis, rm.fi)
+ continue
+ }
+ name := filepath.Base(rm.From)
+ if seen[name] {
+ continue
+ }
+ seen[name] = true
+
+ opener := func() (afero.File, error) {
+ return fs.Open(rm.From)
+ }
+
+ fi := newDirNameOnlyFileInfo(name, rm.Meta, opener)
+
+ fis = append(fis, fi)
+
+ }
+
+ return false
+ })
+
+ // Finally add any ancestor dirs with files in this directory.
+ ancestors := fs.getAncestors(prefix)
+ for _, root := range ancestors {
+ subdir := strings.TrimPrefix(prefix, root.key)
+ for _, rm := range root.roots {
+ if rm.fi.IsDir() {
+ fi, err := rm.fi.Meta().JoinStat(subdir)
+ if err == nil {
+ if err := collectDir(rm, fi); err != nil {
+ return nil, err
+ }
+ }
+ }
+ }
+ }
+
+ return fis, nil
+}
+
+func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) {
+ name = fs.cleanName(name)
+ key := filepathSeparator + name
+
+ roots := fs.getRoot(key)
+
+ if roots == nil {
+ if fs.hasPrefix(key) {
+ // We have directories mounted below this.
+ // Make it look like a directory.
+ return []FileMetaInfo{newDirNameOnlyFileInfo(name, nil, fs.virtualDirOpener(name))}, nil
+ }
+
+ // Find any real files or directories with this key.
+ _, roots := fs.getRoots(key)
+ if roots == nil {
+ return nil, &os.PathError{Op: "LStat", Path: name, Err: os.ErrNotExist}
+ }
+
+ var err error
+ var fis []FileMetaInfo
+
+ for _, rm := range roots {
+ var fi FileMetaInfo
+ fi, _, err = fs.statRoot(rm, name)
+ if err == nil {
+ fis = append(fis, fi)
+ }
+ }
+
+ if fis != nil {
+ return fis, nil
+ }
+
+ if err == nil {
+ err = &os.PathError{Op: "LStat", Path: name, Err: err}
+ }
+
+ return nil, err
+ }
+
+ fileCount := 0
+ var wasFiltered bool
+ for _, root := range roots {
+ meta := root.fi.Meta()
+ if !meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), root.fi.IsDir()) {
+ wasFiltered = true
+ continue
+ }
+
+ if !root.fi.IsDir() {
+ fileCount++
+ }
+ if fileCount > 1 {
+ break
+ }
+ }
+
+ if fileCount == 0 {
+ if wasFiltered {
+ return nil, os.ErrNotExist
+ }
+ // Dir only.
+ return []FileMetaInfo{newDirNameOnlyFileInfo(name, roots[0].Meta, fs.virtualDirOpener(name))}, nil
+ }
+
+ if fileCount > 1 {
+ // Not supported by this filesystem.
+ return nil, fmt.Errorf("found multiple files with name %q, use .Readdir or the source filesystem directly", name)
+ }
+
+ return []FileMetaInfo{roots[0].fi}, nil
+}
+
+func (fs *RootMappingFs) statRoot(root RootMapping, name string) (FileMetaInfo, bool, error) {
+ if !root.Meta.InclusionFilter.Match(root.trimFrom(name), root.fi.IsDir()) {
+ return nil, false, os.ErrNotExist
+ }
+ filename := root.filename(name)
+
+ fi, b, err := lstatIfPossible(fs.Fs, filename)
+ if err != nil {
+ return nil, b, err
+ }
+
+ var opener func() (afero.File, error)
+ if fi.IsDir() {
+ // Make sure metadata gets applied in Readdir.
+ opener = fs.realDirOpener(filename, root.Meta)
+ } else {
+ // Opens the real file directly.
+ opener = func() (afero.File, error) {
+ return fs.Fs.Open(filename)
+ }
+ }
+
+ return decorateFileInfo(fi, fs.Fs, opener, "", "", root.Meta), b, nil
+}
+
+func (fs *RootMappingFs) virtualDirOpener(name string) func() (afero.File, error) {
+ return func() (afero.File, error) { return &rootMappingFile{name: name, fs: fs}, nil }
+}
+
+func (fs *RootMappingFs) realDirOpener(name string, meta *FileMeta) func() (afero.File, error) {
+ return func() (afero.File, error) {
+ f, err := fs.Fs.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ return &rootMappingFile{name: name, meta: meta, fs: fs, File: f}, nil
+ }
+}
+
+type rootMappingFile struct {
+ afero.File
+ fs *RootMappingFs
+ name string
+ meta *FileMeta
+}
+
+func (f *rootMappingFile) Close() error {
+ if f.File == nil {
+ return nil
+ }
+ return f.File.Close()
+}
+
+func (f *rootMappingFile) Name() string {
+ return f.name
+}
+
+func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
+ if f.File != nil {
+
+ fis, err := f.File.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+
+ var result []os.FileInfo
+ for _, fi := range fis {
+ fim := decorateFileInfo(fi, f.fs, nil, "", "", f.meta)
+ meta := fim.Meta()
+ if f.meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fim.IsDir()) {
+ result = append(result, fim)
+ }
+ }
+ return result, nil
+ }
+
+ return f.fs.collectDirEntries(f.name)
+}
+
+func (f *rootMappingFile) Readdirnames(count int) ([]string, error) {
+ dirs, err := f.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+ return fileInfosToNames(dirs), nil
+}
diff --git a/hugofs/rootmapping_fs_test.go b/hugofs/rootmapping_fs_test.go
new file mode 100644
index 000000000..c843866fc
--- /dev/null
+++ b/hugofs/rootmapping_fs_test.go
@@ -0,0 +1,553 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "io/ioutil"
+ "path/filepath"
+ "sort"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/spf13/afero"
+)
+
+func TestLanguageRootMapping(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("contentDir", "content")
+
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ c.Assert(afero.WriteFile(fs, filepath.Join("content/sv/svdir", "main.txt"), []byte("main sv"), 0755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "sv-f.txt"), []byte("some sv blog content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", "en-f.txt"), []byte("some en blog content in a"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent/d1", "sv-d1-f.txt"), []byte("some sv blog content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent/d1", "en-d1-f.txt"), []byte("some en blog content in a"), 0755), qt.IsNil)
+
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myotherenblogcontent", "en-f2.txt"), []byte("some en content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvdocs", "sv-docs.txt"), []byte("some sv docs content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/b/myenblogcontent", "en-b-f.txt"), []byte("some en content"), 0755), qt.IsNil)
+
+ rfs, err := NewRootMappingFs(fs,
+ RootMapping{
+ From: "content/blog", // Virtual path, first element is one of content, static, layouts etc.
+ To: "themes/a/mysvblogcontent", // Real path
+ Meta: &FileMeta{Lang: "sv"},
+ },
+ RootMapping{
+ From: "content/blog",
+ To: "themes/a/myenblogcontent",
+ Meta: &FileMeta{Lang: "en"},
+ },
+ RootMapping{
+ From: "content/blog",
+ To: "content/sv",
+ Meta: &FileMeta{Lang: "sv"},
+ },
+ RootMapping{
+ From: "content/blog",
+ To: "themes/a/myotherenblogcontent",
+ Meta: &FileMeta{Lang: "en"},
+ },
+ RootMapping{
+ From: "content/docs",
+ To: "themes/a/mysvdocs",
+ Meta: &FileMeta{Lang: "sv"},
+ },
+ )
+
+ c.Assert(err, qt.IsNil)
+
+ collected, err := collectFilenames(rfs, "content", "content")
+ c.Assert(err, qt.IsNil)
+ c.Assert(collected, qt.DeepEquals,
+ []string{"blog/d1/en-d1-f.txt", "blog/d1/sv-d1-f.txt", "blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"}, qt.Commentf("%#v", collected))
+
+ dirs, err := rfs.Dirs(filepath.FromSlash("content/blog"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(dirs), qt.Equals, 4)
+ for _, dir := range dirs {
+ f, err := dir.Meta().Open()
+ c.Assert(err, qt.IsNil)
+ f.Close()
+ }
+
+ blog, err := rfs.Open(filepath.FromSlash("content/blog"))
+ c.Assert(err, qt.IsNil)
+ fis, err := blog.Readdir(-1)
+ for _, fi := range fis {
+ f, err := fi.(FileMetaInfo).Meta().Open()
+ c.Assert(err, qt.IsNil)
+ f.Close()
+ }
+ blog.Close()
+
+ getDirnames := func(name string, rfs *RootMappingFs) []string {
+ c.Helper()
+ filename := filepath.FromSlash(name)
+ f, err := rfs.Open(filename)
+ c.Assert(err, qt.IsNil)
+ names, err := f.Readdirnames(-1)
+
+ f.Close()
+ c.Assert(err, qt.IsNil)
+
+ info, err := rfs.Stat(filename)
+ c.Assert(err, qt.IsNil)
+ f2, err := info.(FileMetaInfo).Meta().Open()
+ c.Assert(err, qt.IsNil)
+ names2, err := f2.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(names2, qt.DeepEquals, names)
+ f2.Close()
+
+ return names
+ }
+
+ rfsEn := rfs.Filter(func(rm RootMapping) bool {
+ return rm.Meta.Lang == "en"
+ })
+
+ c.Assert(getDirnames("content/blog", rfsEn), qt.DeepEquals, []string{"d1", "en-f.txt", "en-f2.txt"})
+
+ rfsSv := rfs.Filter(func(rm RootMapping) bool {
+ return rm.Meta.Lang == "sv"
+ })
+
+ c.Assert(getDirnames("content/blog", rfsSv), qt.DeepEquals, []string{"d1", "sv-f.txt", "svdir"})
+
+ // Make sure we have not messed with the original
+ c.Assert(getDirnames("content/blog", rfs), qt.DeepEquals, []string{"d1", "sv-f.txt", "en-f.txt", "svdir", "en-f2.txt"})
+
+ c.Assert(getDirnames("content", rfsSv), qt.DeepEquals, []string{"blog", "docs"})
+ c.Assert(getDirnames("content", rfs), qt.DeepEquals, []string{"blog", "docs"})
+}
+
+func TestRootMappingFsDirnames(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ testfile := "myfile.txt"
+ c.Assert(fs.Mkdir("f1t", 0755), qt.IsNil)
+ c.Assert(fs.Mkdir("f2t", 0755), qt.IsNil)
+ c.Assert(fs.Mkdir("f3t", 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0755), qt.IsNil)
+
+ rfs, err := newRootMappingFsFromFromTo("", fs, "static/bf1", "f1t", "static/cf2", "f2t", "static/af3", "f3t")
+ c.Assert(err, qt.IsNil)
+
+ fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
+ c.Assert(err, qt.IsNil)
+ c.Assert(fif.Name(), qt.Equals, "myfile.txt")
+ fifm := fif.(FileMetaInfo).Meta()
+ c.Assert(fifm.Filename, qt.Equals, filepath.FromSlash("f2t/myfile.txt"))
+
+ root, err := rfs.Open("static")
+ c.Assert(err, qt.IsNil)
+
+ dirnames, err := root.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirnames, qt.DeepEquals, []string{"af3", "bf1", "cf2"})
+}
+
+func TestRootMappingFsFilename(t *testing.T) {
+ c := qt.New(t)
+ workDir, clean, err := htesting.CreateTempDir(Os, "hugo-root-filename")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+ fs := NewBaseFileDecorator(Os)
+
+ testfilename := filepath.Join(workDir, "f1t/foo/file.txt")
+
+ c.Assert(fs.MkdirAll(filepath.Join(workDir, "f1t/foo"), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, testfilename, []byte("content"), 0666), qt.IsNil)
+
+ rfs, err := newRootMappingFsFromFromTo(workDir, fs, "static/f1", filepath.Join(workDir, "f1t"), "static/f2", filepath.Join(workDir, "f2t"))
+ c.Assert(err, qt.IsNil)
+
+ fi, err := rfs.Stat(filepath.FromSlash("static/f1/foo/file.txt"))
+ c.Assert(err, qt.IsNil)
+ fim := fi.(FileMetaInfo)
+ c.Assert(fim.Meta().Filename, qt.Equals, testfilename)
+ _, err = rfs.Stat(filepath.FromSlash("static/f1"))
+ c.Assert(err, qt.IsNil)
+}
+
+func TestRootMappingFsMount(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ testfile := "test.txt"
+
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0755), qt.IsNil)
+
+ bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs)
+ rm := []RootMapping{
+ // Directories
+ {
+ From: "content/blog",
+ To: "mynoblogcontent",
+ Meta: &FileMeta{Lang: "no"},
+ },
+ {
+ From: "content/blog",
+ To: "myenblogcontent",
+ Meta: &FileMeta{Lang: "en"},
+ },
+ {
+ From: "content/blog",
+ To: "mysvblogcontent",
+ Meta: &FileMeta{Lang: "sv"},
+ },
+ // Files
+ {
+ From: "content/singles/p1.md",
+ To: "singlefiles/no.txt",
+ ToBasedir: "singlefiles",
+ Meta: &FileMeta{Lang: "no"},
+ },
+ {
+ From: "content/singles/p1.md",
+ To: "singlefiles/sv.txt",
+ ToBasedir: "singlefiles",
+ Meta: &FileMeta{Lang: "sv"},
+ },
+ }
+
+ rfs, err := NewRootMappingFs(bfs, rm...)
+ c.Assert(err, qt.IsNil)
+
+ blog, err := rfs.Stat(filepath.FromSlash("content/blog"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(blog.IsDir(), qt.Equals, true)
+ blogm := blog.(FileMetaInfo).Meta()
+ c.Assert(blogm.Lang, qt.Equals, "no") // First match
+
+ f, err := blogm.Open()
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+ dirs1, err := f.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ // Union with duplicate dir names filtered.
+ c.Assert(dirs1, qt.DeepEquals, []string{"test.txt", "test.txt", "other.txt", "test.txt"})
+
+ files, err := afero.ReadDir(rfs, filepath.FromSlash("content/blog"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(files), qt.Equals, 4)
+
+ testfilefi := files[1]
+ c.Assert(testfilefi.Name(), qt.Equals, testfile)
+
+ testfilem := testfilefi.(FileMetaInfo).Meta()
+ c.Assert(testfilem.Filename, qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt"))
+
+ tf, err := testfilem.Open()
+ c.Assert(err, qt.IsNil)
+ defer tf.Close()
+ b, err := ioutil.ReadAll(tf)
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b), qt.Equals, "some no content")
+
+ // Ambiguous
+ _, err = rfs.Stat(filepath.FromSlash("content/singles/p1.md"))
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ singlesDir, err := rfs.Open(filepath.FromSlash("content/singles"))
+ c.Assert(err, qt.IsNil)
+ defer singlesDir.Close()
+ singles, err := singlesDir.Readdir(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(singles, qt.HasLen, 2)
+ for i, lang := range []string{"no", "sv"} {
+ fi := singles[i].(FileMetaInfo)
+ c.Assert(fi.Meta().PathFile(), qt.Equals, filepath.FromSlash("themes/a/singlefiles/"+lang+".txt"))
+ c.Assert(fi.Meta().Lang, qt.Equals, lang)
+ c.Assert(fi.Name(), qt.Equals, "p1.md")
+ }
+}
+
+func TestRootMappingFsMountOverlap(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ c.Assert(afero.WriteFile(fs, filepath.FromSlash("da/a.txt"), []byte("some no content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.FromSlash("db/b.txt"), []byte("some no content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.FromSlash("dc/c.txt"), []byte("some no content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.FromSlash("de/e.txt"), []byte("some no content"), 0755), qt.IsNil)
+
+ rm := []RootMapping{
+ {
+ From: "static",
+ To: "da",
+ },
+ {
+ From: "static/b",
+ To: "db",
+ },
+ {
+ From: "static/b/c",
+ To: "dc",
+ },
+ {
+ From: "/static/e/",
+ To: "de",
+ },
+ }
+
+ rfs, err := NewRootMappingFs(fs, rm...)
+ c.Assert(err, qt.IsNil)
+
+ checkDirnames := func(name string, expect []string) {
+ c.Helper()
+ name = filepath.FromSlash(name)
+ f, err := rfs.Open(name)
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+ names, err := f.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(names, qt.DeepEquals, expect, qt.Commentf(fmt.Sprintf("%#v", names)))
+ }
+
+ checkDirnames("static", []string{"a.txt", "b", "e"})
+ checkDirnames("static/b", []string{"b.txt", "c"})
+ checkDirnames("static/b/c", []string{"c.txt"})
+
+ fi, err := rfs.Stat(filepath.FromSlash("static/b/b.txt"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(fi.Name(), qt.Equals, "b.txt")
+}
+
+func TestRootMappingFsOs(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewOsFs())
+
+ d, clean, err := htesting.CreateTempDir(fs, "hugo-root-mapping-os")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ testfile := "myfile.txt"
+ c.Assert(fs.Mkdir(filepath.Join(d, "f1t"), 0755), qt.IsNil)
+ c.Assert(fs.Mkdir(filepath.Join(d, "f2t"), 0755), qt.IsNil)
+ c.Assert(fs.Mkdir(filepath.Join(d, "f3t"), 0755), qt.IsNil)
+
+ // Deep structure
+ deepDir := filepath.Join(d, "d1", "d2", "d3", "d4", "d5")
+ c.Assert(fs.MkdirAll(deepDir, 0755), qt.IsNil)
+ for i := 1; i <= 3; i++ {
+ c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0755), qt.IsNil)
+ }
+
+ c.Assert(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0755), qt.IsNil)
+
+ // https://github.com/gohugoio/hugo/issues/6854
+ mystaticDir := filepath.Join(d, "mystatic", "a", "b", "c")
+ c.Assert(fs.MkdirAll(mystaticDir, 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0755), qt.IsNil)
+
+ rfs, err := newRootMappingFsFromFromTo(
+ d,
+ fs,
+ "static/bf1", filepath.Join(d, "f1t"),
+ "static/cf2", filepath.Join(d, "f2t"),
+ "static/af3", filepath.Join(d, "f3t"),
+ "static", filepath.Join(d, "mystatic"),
+ "static/a/b/c", filepath.Join(d, "d1", "d2", "d3"),
+ "layouts", filepath.Join(d, "d1"),
+ )
+
+ c.Assert(err, qt.IsNil)
+
+ fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
+ c.Assert(err, qt.IsNil)
+ c.Assert(fif.Name(), qt.Equals, "myfile.txt")
+
+ root, err := rfs.Open("static")
+ c.Assert(err, qt.IsNil)
+
+ dirnames, err := root.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirnames, qt.DeepEquals, []string{"a", "af3", "bf1", "cf2"}, qt.Commentf(fmt.Sprintf("%#v", dirnames)))
+
+ getDirnames := func(dirname string) []string {
+ dirname = filepath.FromSlash(dirname)
+ f, err := rfs.Open(dirname)
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+ dirnames, err := f.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ sort.Strings(dirnames)
+ return dirnames
+ }
+
+ c.Assert(getDirnames("static/a/b"), qt.DeepEquals, []string{"c"})
+ c.Assert(getDirnames("static/a/b/c"), qt.DeepEquals, []string{"d4", "f-1.txt", "f-2.txt", "f-3.txt", "ms-1.txt"})
+ c.Assert(getDirnames("static/a/b/c/d4"), qt.DeepEquals, []string{"d4-1", "d4-2", "d4-3", "d5"})
+
+ all, err := collectFilenames(rfs, "static", "static")
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(all, qt.DeepEquals, []string{"a/b/c/f-1.txt", "a/b/c/f-2.txt", "a/b/c/f-3.txt", "a/b/c/ms-1.txt", "cf2/myfile.txt"})
+
+ fis, err := collectFileinfos(rfs, "static", "static")
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(fis[9].Meta().PathFile(), qt.Equals, filepath.FromSlash("d1/d2/d3/f-1.txt"))
+
+ dirc := fis[3].Meta()
+
+ f, err := dirc.Open()
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+ fileInfos, err := f.Readdir(-1)
+ c.Assert(err, qt.IsNil)
+ sortFileInfos(fileInfos)
+ i := 0
+ for _, fi := range fileInfos {
+ if fi.IsDir() || fi.Name() == "ms-1.txt" {
+ continue
+ }
+ i++
+ meta := fi.(FileMetaInfo).Meta()
+ c.Assert(meta.Filename, qt.Equals, filepath.Join(d, fmt.Sprintf("/d1/d2/d3/f-%d.txt", i)))
+ c.Assert(meta.PathFile(), qt.Equals, filepath.FromSlash(fmt.Sprintf("d1/d2/d3/f-%d.txt", i)))
+ }
+
+ _, err = rfs.Stat(filepath.FromSlash("layouts/d2/d3/f-1.txt"))
+ c.Assert(err, qt.IsNil)
+ _, err = rfs.Stat(filepath.FromSlash("layouts/d2/d3"))
+ c.Assert(err, qt.IsNil)
+}
+
+func TestRootMappingFsOsBase(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewOsFs())
+
+ d, clean, err := htesting.CreateTempDir(fs, "hugo-root-mapping-os-base")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ // Deep structure
+ deepDir := filepath.Join(d, "d1", "d2", "d3", "d4", "d5")
+ c.Assert(fs.MkdirAll(deepDir, 0755), qt.IsNil)
+ for i := 1; i <= 3; i++ {
+ c.Assert(fs.MkdirAll(filepath.Join(d, "d1", "d2", "d3", "d4", fmt.Sprintf("d4-%d", i)), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join(d, "d1", "d2", "d3", fmt.Sprintf("f-%d.txt", i)), []byte("some content"), 0755), qt.IsNil)
+ }
+
+ mystaticDir := filepath.Join(d, "mystatic", "a", "b", "c")
+ c.Assert(fs.MkdirAll(mystaticDir, 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join(mystaticDir, "ms-1.txt"), []byte("some content"), 0755), qt.IsNil)
+
+ bfs := afero.NewBasePathFs(fs, d)
+
+ rfs, err := newRootMappingFsFromFromTo(
+ "",
+ bfs,
+ "static", "mystatic",
+ "static/a/b/c", filepath.Join("d1", "d2", "d3"),
+ )
+
+ getDirnames := func(dirname string) []string {
+ dirname = filepath.FromSlash(dirname)
+ f, err := rfs.Open(dirname)
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+ dirnames, err := f.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ sort.Strings(dirnames)
+ return dirnames
+ }
+
+ c.Assert(getDirnames("static/a/b/c"), qt.DeepEquals, []string{"d4", "f-1.txt", "f-2.txt", "f-3.txt", "ms-1.txt"})
+}
+
+func TestRootMappingFileFilter(t *testing.T) {
+ c := qt.New(t)
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ for _, lang := range []string{"no", "en", "fr"} {
+ for i := 1; i <= 3; i++ {
+ c.Assert(afero.WriteFile(fs, filepath.Join(lang, fmt.Sprintf("my%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil)
+ }
+ }
+
+ for _, lang := range []string{"no", "en", "fr"} {
+ for i := 1; i <= 3; i++ {
+ c.Assert(afero.WriteFile(fs, filepath.Join(lang, "sub", fmt.Sprintf("mysub%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil)
+ }
+ }
+
+ rm := []RootMapping{
+ {
+ From: "content",
+ To: "no",
+ Meta: &FileMeta{Lang: "no", InclusionFilter: glob.MustNewFilenameFilter(nil, []string{"**.txt"})},
+ },
+ {
+ From: "content",
+ To: "en",
+ Meta: &FileMeta{Lang: "en"},
+ },
+ {
+ From: "content",
+ To: "fr",
+ Meta: &FileMeta{Lang: "fr", InclusionFilter: glob.MustNewFilenameFilter(nil, []string{"**.txt"})},
+ },
+ }
+
+ rfs, err := NewRootMappingFs(fs, rm...)
+ c.Assert(err, qt.IsNil)
+
+ assertExists := func(filename string, shouldExist bool) {
+ c.Helper()
+ filename = filepath.Clean(filename)
+ _, err1 := rfs.Stat(filename)
+ f, err2 := rfs.Open(filename)
+ if shouldExist {
+ c.Assert(err1, qt.IsNil)
+ c.Assert(err2, qt.IsNil)
+ c.Assert(f.Close(), qt.IsNil)
+ } else {
+ c.Assert(err1, qt.Not(qt.IsNil))
+ c.Assert(err2, qt.Not(qt.IsNil))
+ }
+ }
+
+ assertExists("content/myno1.txt", false)
+ assertExists("content/myen1.txt", true)
+ assertExists("content/myfr1.txt", false)
+
+ dirEntriesSub, err := afero.ReadDir(rfs, filepath.Join("content", "sub"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(dirEntriesSub), qt.Equals, 3)
+
+ dirEntries, err := afero.ReadDir(rfs, "content")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(dirEntries), qt.Equals, 4)
+
+}
diff --git a/hugofs/slice_fs.go b/hugofs/slice_fs.go
new file mode 100644
index 000000000..7edaf7513
--- /dev/null
+++ b/hugofs/slice_fs.go
@@ -0,0 +1,302 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "time"
+
+ "errors"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ _ afero.Fs = (*SliceFs)(nil)
+ _ afero.Lstater = (*SliceFs)(nil)
+ _ FilesystemsUnwrapper = (*SliceFs)(nil)
+ _ afero.File = (*sliceDir)(nil)
+)
+
+func NewSliceFs(dirs ...FileMetaInfo) (afero.Fs, error) {
+ if len(dirs) == 0 {
+ return NoOpFs, nil
+ }
+
+ for _, dir := range dirs {
+ if !dir.IsDir() {
+ return nil, errors.New("this fs supports directories only")
+ }
+ }
+
+ fs := &SliceFs{
+ dirs: dirs,
+ }
+
+ return fs, nil
+}
+
+// SliceFs is an ordered composite filesystem.
+type SliceFs struct {
+ dirs []FileMetaInfo
+}
+
+func (fs *SliceFs) UnwrapFilesystems() []afero.Fs {
+ var fss []afero.Fs
+ for _, dir := range fs.dirs {
+ fss = append(fss, dir.Meta().Fs)
+ }
+ return fss
+}
+
+func (fs *SliceFs) Chmod(n string, m os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) Chtimes(n string, a, m time.Time) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) Chown(n string, uid, gid int) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
+ fi, _, err := fs.pickFirst(name)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if fi.IsDir() {
+ return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil
+ }
+
+ return nil, false, fmt.Errorf("lstat: files not supported: %q", name)
+}
+
+func (fs *SliceFs) Mkdir(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) MkdirAll(n string, p os.FileMode) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) Name() string {
+ return "SliceFs"
+}
+
+func (fs *SliceFs) Open(name string) (afero.File, error) {
+ fi, idx, err := fs.pickFirst(name)
+ if err != nil {
+ return nil, err
+ }
+
+ if !fi.IsDir() {
+ panic("currently only dirs in here")
+ }
+
+ return &sliceDir{
+ lfs: fs,
+ idx: idx,
+ dirname: name,
+ }, nil
+}
+
+func (fs *SliceFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ panic("not implemented")
+}
+
+func (fs *SliceFs) ReadDir(name string) ([]os.FileInfo, error) {
+ panic("not implemented")
+}
+
+func (fs *SliceFs) Remove(n string) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) RemoveAll(p string) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) Rename(o, n string) error {
+ return syscall.EPERM
+}
+
+func (fs *SliceFs) Stat(name string) (os.FileInfo, error) {
+ fi, _, err := fs.LstatIfPossible(name)
+ return fi, err
+}
+
+func (fs *SliceFs) Create(n string) (afero.File, error) {
+ return nil, syscall.EPERM
+}
+
+func (fs *SliceFs) getOpener(name string) func() (afero.File, error) {
+ return func() (afero.File, error) {
+ return fs.Open(name)
+ }
+}
+
+func (fs *SliceFs) pickFirst(name string) (os.FileInfo, int, error) {
+ for i, mfs := range fs.dirs {
+ meta := mfs.Meta()
+ fs := meta.Fs
+ fi, _, err := lstatIfPossible(fs, name)
+ if err == nil {
+ // Gotta match!
+ return fi, i, nil
+ }
+
+ if !os.IsNotExist(err) {
+ // Real error
+ return nil, -1, err
+ }
+ }
+
+ // Not found
+ return nil, -1, os.ErrNotExist
+}
+
+func (fs *SliceFs) readDirs(name string, startIdx, count int) ([]os.FileInfo, error) {
+ collect := func(lfs *FileMeta) ([]os.FileInfo, error) {
+ d, err := lfs.Fs.Open(name)
+ if err != nil {
+ if !os.IsNotExist(err) {
+ return nil, err
+ }
+ return nil, nil
+ } else {
+ defer d.Close()
+ dirs, err := d.Readdir(-1)
+ if err != nil {
+ return nil, err
+ }
+ return dirs, nil
+ }
+ }
+
+ var dirs []os.FileInfo
+
+ for i := startIdx; i < len(fs.dirs); i++ {
+ mfs := fs.dirs[i]
+
+ fis, err := collect(mfs.Meta())
+ if err != nil {
+ return nil, err
+ }
+
+ dirs = append(dirs, fis...)
+
+ }
+
+ seen := make(map[string]bool)
+ var duplicates []int
+ for i, fi := range dirs {
+ if !fi.IsDir() {
+ continue
+ }
+
+ if seen[fi.Name()] {
+ duplicates = append(duplicates, i)
+ } else {
+ // Make sure it's opened by this filesystem.
+ dirs[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil)
+ seen[fi.Name()] = true
+ }
+ }
+
+ // Remove duplicate directories, keep first.
+ if len(duplicates) > 0 {
+ for i := len(duplicates) - 1; i >= 0; i-- {
+ idx := duplicates[i]
+ dirs = append(dirs[:idx], dirs[idx+1:]...)
+ }
+ }
+
+ if count > 0 && len(dirs) >= count {
+ return dirs[:count], nil
+ }
+
+ return dirs, nil
+}
+
+type sliceDir struct {
+ lfs *SliceFs
+ idx int
+ dirname string
+}
+
+func (f *sliceDir) Close() error {
+ return nil
+}
+
+func (f *sliceDir) Name() string {
+ return f.dirname
+}
+
+func (f *sliceDir) Read(p []byte) (n int, err error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) ReadAt(p []byte, off int64) (n int, err error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) Readdir(count int) ([]os.FileInfo, error) {
+ return f.lfs.readDirs(f.dirname, f.idx, count)
+}
+
+func (f *sliceDir) Readdirnames(count int) ([]string, error) {
+ dirsi, err := f.Readdir(count)
+ if err != nil {
+ return nil, err
+ }
+
+ dirs := make([]string, len(dirsi))
+ for i, d := range dirsi {
+ dirs[i] = d.Name()
+ }
+ return dirs, nil
+}
+
+func (f *sliceDir) Seek(offset int64, whence int) (int64, error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) Stat() (os.FileInfo, error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) Sync() error {
+ panic("not implemented")
+}
+
+func (f *sliceDir) Truncate(size int64) error {
+ panic("not implemented")
+}
+
+func (f *sliceDir) Write(p []byte) (n int, err error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) WriteAt(p []byte, off int64) (n int, err error) {
+ panic("not implemented")
+}
+
+func (f *sliceDir) WriteString(s string) (ret int, err error) {
+ panic("not implemented")
+}
diff --git a/hugofs/stacktracer_fs.go b/hugofs/stacktracer_fs.go
new file mode 100644
index 000000000..4411dbfde
--- /dev/null
+++ b/hugofs/stacktracer_fs.go
@@ -0,0 +1,77 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "regexp"
+ "runtime"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/spf13/afero"
+)
+
+var (
+ // Make sure we don't accidentally use this in the real Hugo.
+ _ types.DevMarker = (*stacktracerFs)(nil)
+ _ FilesystemUnwrapper = (*stacktracerFs)(nil)
+)
+
+// NewStacktracerFs wraps the given fs printing stack traces for file creates
+// matching the given regexp pattern.
+func NewStacktracerFs(fs afero.Fs, pattern string) afero.Fs {
+ return &stacktracerFs{Fs: fs, re: regexp.MustCompile(pattern)}
+}
+
+// stacktracerFs can be used in hard-to-debug development situations where
+// you get some input you don't understand where comes from.
+type stacktracerFs struct {
+ afero.Fs
+
+ // Will print stacktrace for every file creates matching this pattern.
+ re *regexp.Regexp
+}
+
+func (fs *stacktracerFs) DevOnly() {
+}
+
+func (fs *stacktracerFs) UnwrapFilesystem() afero.Fs {
+ return fs.Fs
+}
+
+func (fs *stacktracerFs) onCreate(filename string) {
+ if fs.re.MatchString(filename) {
+ trace := make([]byte, 1500)
+ runtime.Stack(trace, true)
+ fmt.Printf("\n===========\n%q:\n%s\n", filename, trace)
+ }
+}
+
+func (fs *stacktracerFs) Create(name string) (afero.File, error) {
+ f, err := fs.Fs.Create(name)
+ if err == nil {
+ fs.onCreate(name)
+ }
+ return f, err
+}
+
+func (fs *stacktracerFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
+ f, err := fs.Fs.OpenFile(name, flag, perm)
+ if err == nil && isWrite(flag) {
+ fs.onCreate(name)
+ }
+ return f, err
+}
diff --git a/hugofs/walk.go b/hugofs/walk.go
new file mode 100644
index 000000000..22a99402f
--- /dev/null
+++ b/hugofs/walk.go
@@ -0,0 +1,324 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "errors"
+
+ "github.com/spf13/afero"
+)
+
+type (
+ WalkFunc func(path string, info FileMetaInfo, err error) error
+ WalkHook func(dir FileMetaInfo, path string, readdir []FileMetaInfo) ([]FileMetaInfo, error)
+)
+
+type Walkway struct {
+ fs afero.Fs
+ root string
+ basePath string
+
+ logger loggers.Logger
+
+ // May be pre-set
+ fi FileMetaInfo
+ dirEntries []FileMetaInfo
+
+ walkFn WalkFunc
+ walked bool
+
+ // We may traverse symbolic links and bite ourself.
+ seen map[string]bool
+
+ // Optional hooks
+ hookPre WalkHook
+ hookPost WalkHook
+}
+
+type WalkwayConfig struct {
+ Fs afero.Fs
+ Root string
+ BasePath string
+
+ Logger loggers.Logger
+
+ // One or both of these may be pre-set.
+ Info FileMetaInfo
+ DirEntries []FileMetaInfo
+
+ WalkFn WalkFunc
+ HookPre WalkHook
+ HookPost WalkHook
+}
+
+func NewWalkway(cfg WalkwayConfig) *Walkway {
+ var fs afero.Fs
+ if cfg.Info != nil {
+ fs = cfg.Info.Meta().Fs
+ } else {
+ fs = cfg.Fs
+ }
+
+ basePath := cfg.BasePath
+ if basePath != "" && !strings.HasSuffix(basePath, filepathSeparator) {
+ basePath += filepathSeparator
+ }
+
+ logger := cfg.Logger
+ if logger == nil {
+ logger = loggers.NewWarningLogger()
+ }
+
+ return &Walkway{
+ fs: fs,
+ root: cfg.Root,
+ basePath: basePath,
+ fi: cfg.Info,
+ dirEntries: cfg.DirEntries,
+ walkFn: cfg.WalkFn,
+ hookPre: cfg.HookPre,
+ hookPost: cfg.HookPost,
+ logger: logger,
+ seen: make(map[string]bool),
+ }
+}
+
+func (w *Walkway) Walk() error {
+ if w.walked {
+ panic("this walkway is already walked")
+ }
+ w.walked = true
+
+ if w.fs == NoOpFs {
+ return nil
+ }
+
+ var fi FileMetaInfo
+ if w.fi != nil {
+ fi = w.fi
+ } else {
+ info, _, err := lstatIfPossible(w.fs, w.root)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+
+ if w.checkErr(w.root, err) {
+ return nil
+ }
+ return w.walkFn(w.root, nil, fmt.Errorf("walk: %q: %w", w.root, err))
+ }
+ fi = info.(FileMetaInfo)
+ }
+
+ if !fi.IsDir() {
+ return w.walkFn(w.root, nil, errors.New("file to walk must be a directory"))
+ }
+
+ return w.walk(w.root, fi, w.dirEntries, w.walkFn)
+}
+
+// if the filesystem supports it, use Lstat, else use fs.Stat
+func lstatIfPossible(fs afero.Fs, path string) (os.FileInfo, bool, error) {
+ if lfs, ok := fs.(afero.Lstater); ok {
+ fi, b, err := lfs.LstatIfPossible(path)
+ return fi, b, err
+ }
+ fi, err := fs.Stat(path)
+ return fi, false, err
+}
+
+// checkErr returns true if the error is handled.
+func (w *Walkway) checkErr(filename string, err error) bool {
+ if err == ErrPermissionSymlink {
+ logUnsupportedSymlink(filename, w.logger)
+ return true
+ }
+
+ if os.IsNotExist(err) {
+ // The file may be removed in process.
+ // This may be a ERROR situation, but it is not possible
+ // to determine as a general case.
+ w.logger.Warnf("File %q not found, skipping.", filename)
+ return true
+ }
+
+ return false
+}
+
+func logUnsupportedSymlink(filename string, logger loggers.Logger) {
+ logger.Warnf("Unsupported symlink found in %q, skipping.", filename)
+}
+
+// walk recursively descends path, calling walkFn.
+// It follow symlinks if supported by the filesystem, but only the same path once.
+func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo, walkFn WalkFunc) error {
+ err := walkFn(path, info, nil)
+ if err != nil {
+ if info.IsDir() && err == filepath.SkipDir {
+ return nil
+ }
+ return err
+ }
+ if !info.IsDir() {
+ return nil
+ }
+
+ meta := info.Meta()
+ filename := meta.Filename
+
+ if dirEntries == nil {
+ f, err := w.fs.Open(path)
+ if err != nil {
+ if w.checkErr(path, err) {
+ return nil
+ }
+ return walkFn(path, info, fmt.Errorf("walk: open %q (%q): %w", path, w.root, err))
+ }
+
+ fis, err := f.Readdir(-1)
+ f.Close()
+ if err != nil {
+ if w.checkErr(filename, err) {
+ return nil
+ }
+ return walkFn(path, info, fmt.Errorf("walk: Readdir: %w", err))
+ }
+
+ dirEntries = fileInfosToFileMetaInfos(fis)
+
+ if !meta.IsOrdered {
+ sort.Slice(dirEntries, func(i, j int) bool {
+ fii := dirEntries[i]
+ fij := dirEntries[j]
+
+ fim, fjm := fii.Meta(), fij.Meta()
+
+ // Pull bundle headers to the top.
+ ficlass, fjclass := fim.Classifier, fjm.Classifier
+ if ficlass != fjclass {
+ return ficlass < fjclass
+ }
+
+ // With multiple content dirs with different languages,
+ // there can be duplicate files, and a weight will be added
+ // to the closest one.
+ fiw, fjw := fim.Weight, fjm.Weight
+ if fiw != fjw {
+
+ return fiw > fjw
+ }
+
+ // When we walk into a symlink, we keep the reference to
+ // the original name.
+ fin, fjn := fim.Name, fjm.Name
+ if fin != "" && fjn != "" {
+ return fin < fjn
+ }
+
+ return fii.Name() < fij.Name()
+ })
+ }
+ }
+
+ // First add some metadata to the dir entries
+ for _, fi := range dirEntries {
+ fim := fi.(FileMetaInfo)
+
+ meta := fim.Meta()
+
+ // Note that we use the original Name even if it's a symlink.
+ name := meta.Name
+ if name == "" {
+ name = fim.Name()
+ }
+
+ if name == "" {
+ panic(fmt.Sprintf("[%s] no name set in %v", path, meta))
+ }
+ pathn := filepath.Join(path, name)
+
+ pathMeta := pathn
+ if w.basePath != "" {
+ pathMeta = strings.TrimPrefix(pathn, w.basePath)
+ }
+
+ meta.Path = normalizeFilename(pathMeta)
+ meta.PathWalk = pathn
+
+ if fim.IsDir() && meta.IsSymlink && w.isSeen(meta.Filename) {
+ // Prevent infinite recursion
+ // Possible cyclic reference
+ meta.SkipDir = true
+ }
+ }
+
+ if w.hookPre != nil {
+ dirEntries, err = w.hookPre(info, path, dirEntries)
+ if err != nil {
+ if err == filepath.SkipDir {
+ return nil
+ }
+ return err
+ }
+ }
+
+ for _, fi := range dirEntries {
+ fim := fi.(FileMetaInfo)
+ meta := fim.Meta()
+
+ if meta.SkipDir {
+ continue
+ }
+
+ err := w.walk(meta.PathWalk, fim, nil, walkFn)
+ if err != nil {
+ if !fi.IsDir() || err != filepath.SkipDir {
+ return err
+ }
+ }
+ }
+
+ if w.hookPost != nil {
+ dirEntries, err = w.hookPost(info, path, dirEntries)
+ if err != nil {
+ if err == filepath.SkipDir {
+ return nil
+ }
+ return err
+ }
+ }
+ return nil
+}
+
+func (w *Walkway) isSeen(filename string) bool {
+ if filename == "" {
+ return false
+ }
+
+ if w.seen[filename] {
+ return true
+ }
+
+ w.seen[filename] = true
+ return false
+}
diff --git a/hugofs/walk_test.go b/hugofs/walk_test.go
new file mode 100644
index 000000000..2e162fa72
--- /dev/null
+++ b/hugofs/walk_test.go
@@ -0,0 +1,276 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugofs
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/para"
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestWalk(t *testing.T) {
+ c := qt.New(t)
+
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ afero.WriteFile(fs, "b.txt", []byte("content"), 0777)
+ afero.WriteFile(fs, "c.txt", []byte("content"), 0777)
+ afero.WriteFile(fs, "a.txt", []byte("content"), 0777)
+
+ names, err := collectFilenames(fs, "", "")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(names, qt.DeepEquals, []string{"a.txt", "b.txt", "c.txt"})
+}
+
+func TestWalkRootMappingFs(t *testing.T) {
+ c := qt.New(t)
+
+ prepare := func(c *qt.C) afero.Fs {
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ testfile := "test.txt"
+
+ c.Assert(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0755), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0755), qt.IsNil)
+
+ rm := []RootMapping{
+ {
+ From: "static/b",
+ To: "e/f",
+ },
+ {
+ From: "static/a",
+ To: "c/d",
+ },
+
+ {
+ From: "static/c",
+ To: "a/b",
+ },
+ }
+
+ rfs, err := NewRootMappingFs(fs, rm...)
+ c.Assert(err, qt.IsNil)
+ return afero.NewBasePathFs(rfs, "static")
+ }
+
+ c.Run("Basic", func(c *qt.C) {
+ bfs := prepare(c)
+
+ names, err := collectFilenames(bfs, "", "")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(names, qt.DeepEquals, []string{"a/test.txt", "b/test.txt", "c/test.txt"})
+ })
+
+ c.Run("Para", func(c *qt.C) {
+ bfs := prepare(c)
+
+ p := para.New(4)
+ r, _ := p.Start(context.Background())
+
+ for i := 0; i < 8; i++ {
+ r.Run(func() error {
+ _, err := collectFilenames(bfs, "", "")
+ if err != nil {
+ return err
+ }
+ fi, err := bfs.Stat("b/test.txt")
+ if err != nil {
+ return err
+ }
+ meta := fi.(FileMetaInfo).Meta()
+ if meta.Filename == "" {
+ return errors.New("fail")
+ }
+ return nil
+ })
+ }
+
+ c.Assert(r.Wait(), qt.IsNil)
+ })
+}
+
+func skipSymlink() bool {
+ if runtime.GOOS != "windows" {
+ return false
+ }
+ if os.Getenv("GITHUB_ACTION") != "" {
+ // TODO(bep) figure out why this fails on GitHub Actions.
+ return true
+ }
+ return os.Getenv("CI") == ""
+}
+
+func TestWalkSymbolicLink(t *testing.T) {
+ if skipSymlink() {
+ t.Skip("Skip; os.Symlink needs administrator rights on Windows")
+ }
+ c := qt.New(t)
+ workDir, clean, err := htesting.CreateTempDir(Os, "hugo-walk-sym")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+ wd, _ := os.Getwd()
+ defer func() {
+ os.Chdir(wd)
+ }()
+
+ fs := NewBaseFileDecorator(Os)
+
+ blogDir := filepath.Join(workDir, "blog")
+ docsDir := filepath.Join(workDir, "docs")
+ blogReal := filepath.Join(blogDir, "real")
+ blogRealSub := filepath.Join(blogReal, "sub")
+ c.Assert(os.MkdirAll(blogRealSub, 0777), qt.IsNil)
+ c.Assert(os.MkdirAll(docsDir, 0777), qt.IsNil)
+ afero.WriteFile(fs, filepath.Join(blogRealSub, "a.txt"), []byte("content"), 0777)
+ afero.WriteFile(fs, filepath.Join(docsDir, "b.txt"), []byte("content"), 0777)
+
+ os.Chdir(blogDir)
+ c.Assert(os.Symlink("real", "symlinked"), qt.IsNil)
+ os.Chdir(blogReal)
+ c.Assert(os.Symlink("../real", "cyclic"), qt.IsNil)
+ os.Chdir(docsDir)
+ c.Assert(os.Symlink("../blog/real/cyclic", "docsreal"), qt.IsNil)
+
+ t.Run("OS Fs", func(t *testing.T) {
+ c := qt.New(t)
+
+ names, err := collectFilenames(fs, workDir, workDir)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(names, qt.DeepEquals, []string{"blog/real/sub/a.txt", "blog/symlinked/sub/a.txt", "docs/b.txt"})
+ })
+
+ t.Run("BasePath Fs", func(t *testing.T) {
+ c := qt.New(t)
+
+ docsFs := afero.NewBasePathFs(fs, docsDir)
+
+ names, err := collectFilenames(docsFs, "", "")
+ c.Assert(err, qt.IsNil)
+
+ // Note: the docsreal folder is considered cyclic when walking from the root, but this works.
+ c.Assert(names, qt.DeepEquals, []string{"b.txt", "docsreal/sub/a.txt"})
+ })
+}
+
+func collectFilenames(fs afero.Fs, base, root string) ([]string, error) {
+ var names []string
+
+ walkFn := func(path string, info FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if info.IsDir() {
+ return nil
+ }
+
+ filename := info.Meta().Path
+ filename = filepath.ToSlash(filename)
+
+ names = append(names, filename)
+
+ return nil
+ }
+
+ w := NewWalkway(WalkwayConfig{Fs: fs, BasePath: base, Root: root, WalkFn: walkFn})
+
+ err := w.Walk()
+
+ return names, err
+}
+
+func collectFileinfos(fs afero.Fs, base, root string) ([]FileMetaInfo, error) {
+ var fis []FileMetaInfo
+
+ walkFn := func(path string, info FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ fis = append(fis, info)
+
+ return nil
+ }
+
+ w := NewWalkway(WalkwayConfig{Fs: fs, BasePath: base, Root: root, WalkFn: walkFn})
+
+ err := w.Walk()
+
+ return fis, err
+}
+
+func BenchmarkWalk(b *testing.B) {
+ c := qt.New(b)
+ fs := NewBaseFileDecorator(afero.NewMemMapFs())
+
+ writeFiles := func(dir string, numfiles int) {
+ for i := 0; i < numfiles; i++ {
+ filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i))
+ c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0777), qt.IsNil)
+ }
+ }
+
+ const numFilesPerDir = 20
+
+ writeFiles("root", numFilesPerDir)
+ writeFiles("root/l1_1", numFilesPerDir)
+ writeFiles("root/l1_1/l2_1", numFilesPerDir)
+ writeFiles("root/l1_1/l2_2", numFilesPerDir)
+ writeFiles("root/l1_2", numFilesPerDir)
+ writeFiles("root/l1_2/l2_1", numFilesPerDir)
+ writeFiles("root/l1_3", numFilesPerDir)
+
+ walkFn := func(path string, info FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ return nil
+ }
+
+ filename := info.Meta().Filename
+ if !strings.HasPrefix(filename, "root") {
+ return errors.New(filename)
+ }
+
+ return nil
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ w := NewWalkway(WalkwayConfig{Fs: fs, Root: "root", WalkFn: walkFn})
+
+ if err := w.Walk(); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
diff --git a/hugolib/404_test.go b/hugolib/404_test.go
new file mode 100644
index 000000000..383302e0b
--- /dev/null
+++ b/hugolib/404_test.go
@@ -0,0 +1,79 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+)
+
+func Test404(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded(
+ "404.html",
+ `
+{{ $home := site.Home }}
+404:
+Parent: {{ .Parent.Kind }}
+IsAncestor: {{ .IsAncestor $home }}/{{ $home.IsAncestor . }}
+IsDescendant: {{ .IsDescendant $home }}/{{ $home.IsDescendant . }}
+CurrentSection: {{ .CurrentSection.Kind }}|
+FirstSection: {{ .FirstSection.Kind }}|
+InSection: {{ .InSection $home.Section }}|{{ $home.InSection . }}
+Sections: {{ len .Sections }}|
+Page: {{ .Page.RelPermalink }}|
+Data: {{ len .Data }}|
+
+`,
+ )
+ b.Build(BuildCfg{})
+
+ // Note: We currently have only 1 404 page. One might think that we should have
+ // multiple, to follow the Custom Output scheme, but I don't see how that would work
+ // right now.
+ b.AssertFileContent("public/404.html", `
+
+ 404:
+Parent: home
+IsAncestor: false/true
+IsDescendant: true/false
+CurrentSection: home|
+FirstSection: home|
+InSection: false|true
+Sections: 0|
+Page: /404.html|
+Data: 1|
+
+`)
+}
+
+func Test404WithBase(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplates("404.html", `{{ define "main" }}
+Page not found
+{{ end }}`,
+ "baseof.html", `Base: {{ block "main" . }}{{ end }}`).WithContent("page.md", ``)
+
+ b.Build(BuildCfg{})
+
+ // Note: We currently have only 1 404 page. One might think that we should have
+ // multiple, to follow the Custom Output scheme, but I don't see how that would work
+ // right now.
+ b.AssertFileContent("public/404.html", `
+Base:
+Page not found`)
+}
diff --git a/hugolib/alias.go b/hugolib/alias.go
new file mode 100644
index 000000000..2609cd6bb
--- /dev/null
+++ b/hugolib/alias.go
@@ -0,0 +1,173 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "path"
+ "path/filepath"
+ "runtime"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/publisher"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+type aliasHandler struct {
+ t tpl.TemplateHandler
+ log loggers.Logger
+ allowRoot bool
+}
+
+func newAliasHandler(t tpl.TemplateHandler, l loggers.Logger, allowRoot bool) aliasHandler {
+ return aliasHandler{t, l, allowRoot}
+}
+
+type aliasPage struct {
+ Permalink string
+ page.Page
+}
+
+func (a aliasHandler) renderAlias(permalink string, p page.Page) (io.Reader, error) {
+ var templ tpl.Template
+ var found bool
+
+ templ, found = a.t.Lookup("alias.html")
+ if !found {
+ // TODO(bep) consolidate
+ templ, found = a.t.Lookup("_internal/alias.html")
+ if !found {
+ return nil, errors.New("no alias template found")
+ }
+ }
+
+ data := aliasPage{
+ permalink,
+ p,
+ }
+
+ buffer := new(bytes.Buffer)
+ err := a.t.Execute(templ, buffer, data)
+ if err != nil {
+ return nil, err
+ }
+ return buffer, nil
+}
+
+func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) {
+ return s.publishDestAlias(false, path, permalink, outputFormat, p)
+}
+
+func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) {
+ handler := newAliasHandler(s.Tmpl(), s.Log, allowRoot)
+
+ s.Log.Debugln("creating alias:", path, "redirecting to", permalink)
+
+ targetPath, err := handler.targetPathAlias(path)
+ if err != nil {
+ return err
+ }
+
+ aliasContent, err := handler.renderAlias(permalink, p)
+ if err != nil {
+ return err
+ }
+
+ pd := publisher.Descriptor{
+ Src: aliasContent,
+ TargetPath: targetPath,
+ StatCounter: &s.PathSpec.ProcessingStats.Aliases,
+ OutputFormat: outputFormat,
+ }
+
+ if s.Info.relativeURLs || s.Info.canonifyURLs {
+ pd.AbsURLPath = s.absURLPath(targetPath)
+ }
+
+ return s.publisher.Publish(pd)
+}
+
+func (a aliasHandler) targetPathAlias(src string) (string, error) {
+ originalAlias := src
+ if len(src) <= 0 {
+ return "", fmt.Errorf("alias \"\" is an empty string")
+ }
+
+ alias := path.Clean(filepath.ToSlash(src))
+
+ if !a.allowRoot && alias == "/" {
+ return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias)
+ }
+
+ components := strings.Split(alias, "/")
+
+ // Validate against directory traversal
+ if components[0] == ".." {
+ return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias)
+ }
+
+ // Handle Windows file and directory naming restrictions
+ // See "Naming Files, Paths, and Namespaces" on MSDN
+ // https://msdn.microsoft.com/en-us/library/aa365247%28v=VS.85%29.aspx?f=255&MSPPError=-2147217396
+ msgs := []string{}
+ reservedNames := []string{"CON", "PRN", "AUX", "NUL", "COM0", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT0", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"}
+
+ if strings.ContainsAny(alias, ":*?\"<>|") {
+ msgs = append(msgs, fmt.Sprintf("Alias \"%s\" contains invalid characters on Windows: : * ? \" < > |", originalAlias))
+ }
+ for _, ch := range alias {
+ if ch < ' ' {
+ msgs = append(msgs, fmt.Sprintf("Alias \"%s\" contains ASCII control code (0x00 to 0x1F), invalid on Windows: : * ? \" < > |", originalAlias))
+ continue
+ }
+ }
+ for _, comp := range components {
+ if strings.HasSuffix(comp, " ") || strings.HasSuffix(comp, ".") {
+ msgs = append(msgs, fmt.Sprintf("Alias \"%s\" contains component with a trailing space or period, problematic on Windows", originalAlias))
+ }
+ for _, r := range reservedNames {
+ if comp == r {
+ msgs = append(msgs, fmt.Sprintf("Alias \"%s\" contains component with reserved name \"%s\" on Windows", originalAlias, r))
+ }
+ }
+ }
+ if len(msgs) > 0 {
+ if runtime.GOOS == "windows" {
+ for _, m := range msgs {
+ a.log.Errorln(m)
+ }
+ return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias)
+ }
+ for _, m := range msgs {
+ a.log.Infoln(m)
+ }
+ }
+
+ // Add the final touch
+ alias = strings.TrimPrefix(alias, "/")
+ if strings.HasSuffix(alias, "/") {
+ alias = alias + "index.html"
+ } else if !strings.HasSuffix(alias, ".html") {
+ alias = alias + "/" + "index.html"
+ }
+
+ return filepath.FromSlash(alias), nil
+}
diff --git a/hugolib/alias_test.go b/hugolib/alias_test.go
new file mode 100644
index 000000000..124c9f4ca
--- /dev/null
+++ b/hugolib/alias_test.go
@@ -0,0 +1,159 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "path/filepath"
+ "runtime"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const pageWithAlias = `---
+title: Has Alias
+aliases: ["/foo/bar/", "rel"]
+---
+For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.
+`
+
+const pageWithAliasMultipleOutputs = `---
+title: Has Alias for HTML and AMP
+aliases: ["/foo/bar/"]
+outputs: ["HTML", "AMP", "JSON"]
+---
+For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.
+`
+
+const (
+ basicTemplate = "<html><body>{{.Content}}</body></html>"
+ aliasTemplate = "<html><body>ALIASTEMPLATE</body></html>"
+)
+
+func TestAlias(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ tests := []struct {
+ fileSuffix string
+ urlPrefix string
+ urlSuffix string
+ settings map[string]any
+ }{
+ {"/index.html", "http://example.com", "/", map[string]any{"baseURL": "http://example.com"}},
+ {"/index.html", "http://example.com/some/path", "/", map[string]any{"baseURL": "http://example.com/some/path"}},
+ {"/index.html", "http://example.com", "/", map[string]any{"baseURL": "http://example.com", "canonifyURLs": true}},
+ {"/index.html", "../..", "/", map[string]any{"relativeURLs": true}},
+ {".html", "", ".html", map[string]any{"uglyURLs": true}},
+ }
+
+ for _, test := range tests {
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFileAndSettings(test.settings).WithContent("blog/page.md", pageWithAlias)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 1)
+
+ // the real page
+ b.AssertFileContent("public/blog/page"+test.fileSuffix, "For some moments the old man")
+ // the alias redirectors
+ b.AssertFileContent("public/foo/bar"+test.fileSuffix, "<meta http-equiv=\"refresh\" content=\"0; url="+test.urlPrefix+"/blog/page"+test.urlSuffix+"\">")
+ b.AssertFileContent("public/blog/rel"+test.fileSuffix, "<meta http-equiv=\"refresh\" content=\"0; url="+test.urlPrefix+"/blog/page"+test.urlSuffix+"\">")
+ }
+}
+
+func TestAliasMultipleOutputFormats(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithContent("blog/page.md", pageWithAliasMultipleOutputs)
+
+ b.WithTemplates(
+ "_default/single.html", basicTemplate,
+ "_default/single.amp.html", basicTemplate,
+ "_default/single.json", basicTemplate)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ // the real pages
+ b.AssertFileContent("public/blog/page/index.html", "For some moments the old man")
+ b.AssertFileContent("public/amp/blog/page/index.html", "For some moments the old man")
+ b.AssertFileContent("public/blog/page/index.json", "For some moments the old man")
+
+ // the alias redirectors
+ b.AssertFileContent("public/foo/bar/index.html", "<meta http-equiv=\"refresh\" content=\"0; ")
+ b.AssertFileContent("public/amp/foo/bar/index.html", "<meta http-equiv=\"refresh\" content=\"0; ")
+ c.Assert(b.CheckExists("public/foo/bar/index.json"), qt.Equals, false)
+}
+
+func TestAliasTemplate(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithContent("page.md", pageWithAlias).WithTemplatesAdded("alias.html", aliasTemplate)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ // the real page
+ b.AssertFileContent("public/page/index.html", "For some moments the old man")
+ // the alias redirector
+ b.AssertFileContent("public/foo/bar/index.html", "ALIASTEMPLATE")
+}
+
+func TestTargetPathHTMLRedirectAlias(t *testing.T) {
+ h := newAliasHandler(nil, loggers.NewErrorLogger(), false)
+
+ errIsNilForThisOS := runtime.GOOS != "windows"
+
+ tests := []struct {
+ value string
+ expected string
+ errIsNil bool
+ }{
+ {"", "", false},
+ {"s", filepath.FromSlash("s/index.html"), true},
+ {"/", "", false},
+ {"alias 1", filepath.FromSlash("alias 1/index.html"), true},
+ {"alias 2/", filepath.FromSlash("alias 2/index.html"), true},
+ {"alias 3.html", "alias 3.html", true},
+ {"alias4.html", "alias4.html", true},
+ {"/alias 5.html", "alias 5.html", true},
+ {"/трям.html", "трям.html", true},
+ {"../../../../tmp/passwd", "", false},
+ {"/foo/../../../../tmp/passwd", filepath.FromSlash("tmp/passwd/index.html"), true},
+ {"foo/../../../../tmp/passwd", "", false},
+ {"C:\\Windows", filepath.FromSlash("C:\\Windows/index.html"), errIsNilForThisOS},
+ {"/trailing-space /", filepath.FromSlash("trailing-space /index.html"), errIsNilForThisOS},
+ {"/trailing-period./", filepath.FromSlash("trailing-period./index.html"), errIsNilForThisOS},
+ {"/tab\tseparated/", filepath.FromSlash("tab\tseparated/index.html"), errIsNilForThisOS},
+ {"/chrome/?p=help&ctx=keyboard#topic=3227046", filepath.FromSlash("chrome/?p=help&ctx=keyboard#topic=3227046/index.html"), errIsNilForThisOS},
+ {"/LPT1/Printer/", filepath.FromSlash("LPT1/Printer/index.html"), errIsNilForThisOS},
+ }
+
+ for _, test := range tests {
+ path, err := h.targetPathAlias(test.value)
+ if (err == nil) != test.errIsNil {
+ t.Errorf("Expected err == nil => %t, got: %t. err: %s", test.errIsNil, err == nil, err)
+ continue
+ }
+ if err == nil && path != test.expected {
+ t.Errorf("Expected: %q, got: %q", test.expected, path)
+ }
+ }
+}
diff --git a/hugolib/assets/images/sunset.jpg b/hugolib/assets/images/sunset.jpg
new file mode 100644
index 000000000..7d7307bed
--- /dev/null
+++ b/hugolib/assets/images/sunset.jpg
Binary files differ
diff --git a/hugolib/breaking_changes_test.go b/hugolib/breaking_changes_test.go
new file mode 100644
index 000000000..495baff3e
--- /dev/null
+++ b/hugolib/breaking_changes_test.go
@@ -0,0 +1,130 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func Test073(t *testing.T) {
+ assertDisabledTaxonomyAndTerm := func(b *sitesBuilder, taxonomy, term bool) {
+ b.Assert(b.CheckExists("public/tags/index.html"), qt.Equals, taxonomy)
+ b.Assert(b.CheckExists("public/tags/tag1/index.html"), qt.Equals, term)
+ }
+
+ assertOutputTaxonomyAndTerm := func(b *sitesBuilder, taxonomy, term bool) {
+ b.Assert(b.CheckExists("public/tags/index.json"), qt.Equals, taxonomy)
+ b.Assert(b.CheckExists("public/tags/tag1/index.json"), qt.Equals, term)
+ }
+
+ for _, this := range []struct {
+ name string
+ config string
+ assert func(err error, out string, b *sitesBuilder)
+ }{
+ {
+ "Outputs for both taxonomy and taxonomyTerm",
+ `[outputs]
+ taxonomy = ["JSON"]
+ taxonomyTerm = ["JSON"]
+
+`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertOutputTaxonomyAndTerm(b, true, true)
+ },
+ },
+ {
+ "Outputs for taxonomyTerm",
+ `[outputs]
+taxonomyTerm = ["JSON"]
+
+`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertOutputTaxonomyAndTerm(b, true, false)
+ },
+ },
+ {
+ "Outputs for taxonomy only",
+ `[outputs]
+taxonomy = ["JSON"]
+
+`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.Not(qt.IsNil))
+ b.Assert(out, qt.Contains, `ignoreErrors = ["error-output-taxonomy"]`)
+ },
+ },
+ {
+ "Outputs for taxonomy only, ignore error",
+ `
+ignoreErrors = ["error-output-taxonomy"]
+[outputs]
+taxonomy = ["JSON"]
+
+`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertOutputTaxonomyAndTerm(b, true, false)
+ },
+ },
+ {
+ "Disable both taxonomy and taxonomyTerm",
+ `disableKinds = ["taxonomy", "taxonomyTerm"]`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertDisabledTaxonomyAndTerm(b, false, false)
+ },
+ },
+ {
+ "Disable only taxonomyTerm",
+ `disableKinds = ["taxonomyTerm"]`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertDisabledTaxonomyAndTerm(b, false, true)
+ },
+ },
+ {
+ "Disable only taxonomy",
+ `disableKinds = ["taxonomy"]`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.Not(qt.IsNil))
+ b.Assert(out, qt.Contains, `ignoreErrors = ["error-disable-taxonomy"]`)
+ },
+ },
+ {
+ "Disable only taxonomy, ignore error",
+ `disableKinds = ["taxonomy"]
+ ignoreErrors = ["error-disable-taxonomy"]`,
+ func(err error, out string, b *sitesBuilder) {
+ b.Assert(err, qt.IsNil)
+ assertDisabledTaxonomyAndTerm(b, false, true)
+ },
+ },
+ } {
+ t.Run(this.name, func(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", this.config)
+ b.WithTemplatesAdded("_default/list.json", "JSON")
+ out, err := captureStdout(func() error {
+ return b.BuildE(BuildCfg{})
+ })
+ fmt.Println(out)
+ this.assert(err, out, b)
+ })
+ }
+}
diff --git a/hugolib/cascade_test.go b/hugolib/cascade_test.go
new file mode 100644
index 000000000..dff2082b6
--- /dev/null
+++ b/hugolib/cascade_test.go
@@ -0,0 +1,630 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "path"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+)
+
+func BenchmarkCascade(b *testing.B) {
+ allLangs := []string{"en", "nn", "nb", "sv", "ab", "aa", "af", "sq", "kw", "da"}
+
+ for i := 1; i <= len(allLangs); i += 2 {
+ langs := allLangs[0:i]
+ b.Run(fmt.Sprintf("langs-%d", len(langs)), func(b *testing.B) {
+ c := qt.New(b)
+ b.StopTimer()
+ builders := make([]*sitesBuilder, b.N)
+ for i := 0; i < b.N; i++ {
+ builders[i] = newCascadeTestBuilder(b, langs)
+ }
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ builder := builders[i]
+ err := builder.BuildE(BuildCfg{})
+ c.Assert(err, qt.IsNil)
+ first := builder.H.Sites[0]
+ c.Assert(first, qt.Not(qt.IsNil))
+ }
+ })
+ }
+}
+
+func BenchmarkCascadeTarget(b *testing.B) {
+ files := `
+-- content/_index.md --
+background = 'yosemite.jpg'
+[cascade._target]
+kind = '{section,term}'
+-- content/posts/_index.md --
+-- content/posts/funny/_index.md --
+`
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/posts/p%d.md --\n", i+1)
+ }
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/posts/funny/pf%d.md --\n", i+1)
+ }
+
+ b.Run("Kind", func(b *testing.B) {
+ cfg := IntegrationTestConfig{
+ T: b,
+ TxtarString: files,
+ }
+ builders := make([]*IntegrationTestBuilder, b.N)
+
+ for i := range builders {
+ builders[i] = NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+ })
+}
+
+func TestCascadeConfig(t *testing.T) {
+ c := qt.New(t)
+
+ // Make sure the cascade from config gets applied even if we're not
+ // having a content file for the home page.
+ for _, withHomeContent := range []bool{true, false} {
+ testName := "Home content file"
+ if !withHomeContent {
+ testName = "No home content file"
+ }
+ c.Run(testName, func(c *qt.C) {
+ b := newTestSitesBuilder(c)
+
+ b.WithConfigFile("toml", `
+baseURL="https://example.org"
+
+[cascade]
+img1 = "img1-config.jpg"
+imgconfig = "img-config.jpg"
+
+`)
+
+ if withHomeContent {
+ b.WithContent("_index.md", `
+---
+title: "Home"
+cascade:
+ img1: "img1-home.jpg"
+ img2: "img2-home.jpg"
+---
+`)
+ }
+
+ b.WithContent("p1.md", ``)
+
+ b.Build(BuildCfg{})
+
+ p1 := b.H.Sites[0].getPage("p1")
+
+ if withHomeContent {
+ b.Assert(p1.Params(), qt.DeepEquals, maps.Params{
+ "imgconfig": "img-config.jpg",
+ "draft": bool(false),
+ "iscjklanguage": bool(false),
+ "img1": "img1-home.jpg",
+ "img2": "img2-home.jpg",
+ })
+ } else {
+ b.Assert(p1.Params(), qt.DeepEquals, maps.Params{
+ "img1": "img1-config.jpg",
+ "imgconfig": "img-config.jpg",
+ "draft": bool(false),
+ "iscjklanguage": bool(false),
+ })
+ }
+ })
+
+ }
+}
+
+func TestCascade(t *testing.T) {
+ allLangs := []string{"en", "nn", "nb", "sv"}
+
+ langs := allLangs[:3]
+
+ t.Run(fmt.Sprintf("langs-%d", len(langs)), func(t *testing.T) {
+ b := newCascadeTestBuilder(t, langs)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+12|term|categories/cool/_index.md|Cascade Category|cat.png|categories|HTML-|
+12|term|categories/catsect1|catsect1|cat.png|categories|HTML-|
+12|term|categories/funny|funny|cat.png|categories|HTML-|
+12|taxonomy|categories/_index.md|My Categories|cat.png|categories|HTML-|
+32|term|categories/sad/_index.md|Cascade Category|sad.png|categories|HTML-|
+42|term|tags/blue|blue|home.png|tags|HTML-|
+42|taxonomy|tags|Cascade Home|home.png|tags|HTML-|
+42|section|sectnocontent|Cascade Home|home.png|sectnocontent|HTML-|
+42|section|sect3|Cascade Home|home.png|sect3|HTML-|
+42|page|bundle1/index.md|Cascade Home|home.png|page|HTML-|
+42|page|p2.md|Cascade Home|home.png|page|HTML-|
+42|page|sect2/p2.md|Cascade Home|home.png|sect2|HTML-|
+42|page|sect3/nofrontmatter.md|Cascade Home|home.png|sect3|HTML-|
+42|page|sect3/p1.md|Cascade Home|home.png|sect3|HTML-|
+42|page|sectnocontent/p1.md|Cascade Home|home.png|sectnocontent|HTML-|
+42|section|sectnofrontmatter/_index.md|Cascade Home|home.png|sectnofrontmatter|HTML-|
+42|term|tags/green|green|home.png|tags|HTML-|
+42|home|_index.md|Home|home.png|page|HTML-|
+42|page|p1.md|p1|home.png|page|HTML-|
+42|section|sect1/_index.md|Sect1|sect1.png|stype|HTML-|
+42|section|sect1/s1_2/_index.md|Sect1_2|sect1.png|stype|HTML-|
+42|page|sect1/s1_2/p1.md|Sect1_2_p1|sect1.png|stype|HTML-|
+42|page|sect1/s1_2/p2.md|Sect1_2_p2|sect1.png|stype|HTML-|
+42|section|sect2/_index.md|Sect2|home.png|sect2|HTML-|
+42|page|sect2/p1.md|Sect2_p1|home.png|sect2|HTML-|
+52|page|sect4/p1.md|Cascade Home|home.png|sect4|RSS-|
+52|section|sect4/_index.md|Sect4|home.png|sect4|RSS-|
+`)
+
+ // Check that type set in cascade gets the correct layout.
+ b.AssertFileContent("public/sect1/index.html", `stype list: Sect1`)
+ b.AssertFileContent("public/sect1/s1_2/p2/index.html", `stype single: Sect1_2_p2`)
+
+ // Check output formats set in cascade
+ b.AssertFileContent("public/sect4/index.xml", `<link>https://example.org/sect4/index.xml</link>`)
+ b.AssertFileContent("public/sect4/p1/index.xml", `<link>https://example.org/sect4/p1/index.xml</link>`)
+ b.C.Assert(b.CheckExists("public/sect2/index.xml"), qt.Equals, false)
+
+ // Check cascade into bundled page
+ b.AssertFileContent("public/bundle1/index.html", `Resources: bp1.md|home.png|`)
+ })
+}
+
+func TestCascadeEdit(t *testing.T) {
+ p1Content := `---
+title: P1
+---
+`
+
+ indexContentNoCascade := `
+---
+title: Home
+---
+`
+
+ indexContentCascade := `
+---
+title: Section
+cascade:
+ banner: post.jpg
+ layout: postlayout
+ type: posttype
+---
+`
+
+ layout := `Banner: {{ .Params.banner }}|Layout: {{ .Layout }}|Type: {{ .Type }}|Content: {{ .Content }}`
+
+ newSite := func(t *testing.T, cascade bool) *sitesBuilder {
+ b := newTestSitesBuilder(t).Running()
+ b.WithTemplates("_default/single.html", layout)
+ b.WithTemplates("_default/list.html", layout)
+ if cascade {
+ b.WithContent("post/_index.md", indexContentCascade)
+ } else {
+ b.WithContent("post/_index.md", indexContentNoCascade)
+ }
+ b.WithContent("post/dir/p1.md", p1Content)
+
+ return b
+ }
+
+ t.Run("Edit descendant", func(t *testing.T) {
+ t.Parallel()
+
+ b := newSite(t, true)
+ b.Build(BuildCfg{})
+
+ assert := func() {
+ b.Helper()
+ b.AssertFileContent("public/post/dir/p1/index.html",
+ `Banner: post.jpg|`,
+ `Layout: postlayout`,
+ `Type: posttype`,
+ )
+ }
+
+ assert()
+
+ b.EditFiles("content/post/dir/p1.md", p1Content+"\ncontent edit")
+ b.Build(BuildCfg{})
+
+ assert()
+ b.AssertFileContent("public/post/dir/p1/index.html",
+ `content edit
+Banner: post.jpg`,
+ )
+ })
+
+ t.Run("Edit ancestor", func(t *testing.T) {
+ t.Parallel()
+
+ b := newSite(t, true)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|Content:`)
+
+ b.EditFiles("content/post/_index.md", strings.Replace(indexContentCascade, "post.jpg", "edit.jpg", 1))
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/index.html", `Banner: edit.jpg|Layout: postlayout|Type: posttype|`)
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: edit.jpg|Layout: postlayout|Type: posttype|`)
+ })
+
+ t.Run("Edit ancestor, add cascade", func(t *testing.T) {
+ t.Parallel()
+
+ b := newSite(t, true)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg`)
+
+ b.EditFiles("content/post/_index.md", indexContentCascade)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|`)
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|`)
+ })
+
+ t.Run("Edit ancestor, remove cascade", func(t *testing.T) {
+ t.Parallel()
+
+ b := newSite(t, false)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: |Layout: |`)
+
+ b.EditFiles("content/post/_index.md", indexContentNoCascade)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/index.html", `Banner: |Layout: |Type: post|`)
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: |Layout: |`)
+ })
+
+ t.Run("Edit ancestor, content only", func(t *testing.T) {
+ t.Parallel()
+
+ b := newSite(t, true)
+ b.Build(BuildCfg{})
+
+ b.EditFiles("content/post/_index.md", indexContentCascade+"\ncontent edit")
+
+ counters := &testCounters{}
+ b.Build(BuildCfg{testCounters: counters})
+ // As we only changed the content, not the cascade front matter,
+ // only the home page is re-rendered.
+ b.Assert(int(counters.contentRenderCounter), qt.Equals, 1)
+
+ b.AssertFileContent("public/post/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|Content: <p>content edit</p>`)
+ b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|`)
+ })
+}
+
+func newCascadeTestBuilder(t testing.TB, langs []string) *sitesBuilder {
+ p := func(m map[string]any) string {
+ var yamlStr string
+
+ if len(m) > 0 {
+ var b bytes.Buffer
+
+ parser.InterfaceToConfig(m, metadecoders.YAML, &b)
+ yamlStr = b.String()
+ }
+
+ metaStr := "---\n" + yamlStr + "\n---"
+
+ return metaStr
+ }
+
+ createLangConfig := func(lang string) string {
+ const langEntry = `
+[languages.%s]
+`
+ return fmt.Sprintf(langEntry, lang)
+ }
+
+ createMount := func(lang string) string {
+ const mountsTempl = `
+[[module.mounts]]
+source="content/%s"
+target="content"
+lang="%s"
+`
+ return fmt.Sprintf(mountsTempl, lang, lang)
+ }
+
+ config := `
+baseURL = "https://example.org"
+defaultContentLanguage = "en"
+defaultContentLanguageInSubDir = false
+
+[languages]`
+ for _, lang := range langs {
+ config += createLangConfig(lang)
+ }
+
+ config += "\n\n[module]\n"
+ for _, lang := range langs {
+ config += createMount(lang)
+ }
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+
+ createContentFiles := func(lang string) {
+ withContent := func(filenameContent ...string) {
+ for i := 0; i < len(filenameContent); i += 2 {
+ b.WithContent(path.Join(lang, filenameContent[i]), filenameContent[i+1])
+ }
+ }
+
+ withContent(
+ "_index.md", p(map[string]any{
+ "title": "Home",
+ "cascade": map[string]any{
+ "title": "Cascade Home",
+ "ICoN": "home.png",
+ "outputs": []string{"HTML"},
+ "weight": 42,
+ },
+ }),
+ "p1.md", p(map[string]any{
+ "title": "p1",
+ }),
+ "p2.md", p(map[string]any{}),
+ "sect1/_index.md", p(map[string]any{
+ "title": "Sect1",
+ "type": "stype",
+ "cascade": map[string]any{
+ "title": "Cascade Sect1",
+ "icon": "sect1.png",
+ "type": "stype",
+ "categories": []string{"catsect1"},
+ },
+ }),
+ "sect1/s1_2/_index.md", p(map[string]any{
+ "title": "Sect1_2",
+ }),
+ "sect1/s1_2/p1.md", p(map[string]any{
+ "title": "Sect1_2_p1",
+ }),
+ "sect1/s1_2/p2.md", p(map[string]any{
+ "title": "Sect1_2_p2",
+ }),
+ "sect2/_index.md", p(map[string]any{
+ "title": "Sect2",
+ }),
+ "sect2/p1.md", p(map[string]any{
+ "title": "Sect2_p1",
+ "categories": []string{"cool", "funny", "sad"},
+ "tags": []string{"blue", "green"},
+ }),
+ "sect2/p2.md", p(map[string]any{}),
+ "sect3/p1.md", p(map[string]any{}),
+
+ // No front matter, see #6855
+ "sect3/nofrontmatter.md", `**Hello**`,
+ "sectnocontent/p1.md", `**Hello**`,
+ "sectnofrontmatter/_index.md", `**Hello**`,
+
+ "sect4/_index.md", p(map[string]any{
+ "title": "Sect4",
+ "cascade": map[string]any{
+ "weight": 52,
+ "outputs": []string{"RSS"},
+ },
+ }),
+ "sect4/p1.md", p(map[string]any{}),
+ "p2.md", p(map[string]any{}),
+ "bundle1/index.md", p(map[string]any{}),
+ "bundle1/bp1.md", p(map[string]any{}),
+ "categories/_index.md", p(map[string]any{
+ "title": "My Categories",
+ "cascade": map[string]any{
+ "title": "Cascade Category",
+ "icoN": "cat.png",
+ "weight": 12,
+ },
+ }),
+ "categories/cool/_index.md", p(map[string]any{}),
+ "categories/sad/_index.md", p(map[string]any{
+ "cascade": map[string]any{
+ "icon": "sad.png",
+ "weight": 32,
+ },
+ }),
+ )
+ }
+
+ createContentFiles("en")
+
+ b.WithTemplates("index.html", `
+
+{{ range .Site.Pages }}
+{{- .Weight }}|{{ .Kind }}|{{ path.Join .Path }}|{{ .Title }}|{{ .Params.icon }}|{{ .Type }}|{{ range .OutputFormats }}{{ .Name }}-{{ end }}|
+{{ end }}
+`,
+
+ "_default/single.html", "default single: {{ .Title }}|{{ .RelPermalink }}|{{ .Content }}|Resources: {{ range .Resources }}{{ .Name }}|{{ .Params.icon }}|{{ .Content }}{{ end }}",
+ "_default/list.html", "default list: {{ .Title }}",
+ "stype/single.html", "stype single: {{ .Title }}|{{ .RelPermalink }}|{{ .Content }}",
+ "stype/list.html", "stype list: {{ .Title }}",
+ )
+
+ return b
+}
+
+func TestCascadeTarget(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ newBuilder := func(c *qt.C) *sitesBuilder {
+ b := newTestSitesBuilder(c)
+
+ b.WithTemplates("index.html", `
+{{ $p1 := site.GetPage "s1/p1" }}
+{{ $s1 := site.GetPage "s1" }}
+
+P1|p1:{{ $p1.Params.p1 }}|p2:{{ $p1.Params.p2 }}|
+S1|p1:{{ $s1.Params.p1 }}|p2:{{ $s1.Params.p2 }}|
+`)
+ b.WithContent("s1/_index.md", "---\ntitle: s1 section\n---")
+ b.WithContent("s1/p1/index.md", "---\ntitle: p1\n---")
+ b.WithContent("s1/p2/index.md", "---\ntitle: p2\n---")
+ b.WithContent("s2/p1/index.md", "---\ntitle: p1_2\n---")
+
+ return b
+ }
+
+ c.Run("slice", func(c *qt.C) {
+ b := newBuilder(c)
+ b.WithContent("_index.md", `+++
+title = "Home"
+[[cascade]]
+p1 = "p1"
+[[cascade]]
+p2 = "p2"
++++
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "P1|p1:p1|p2:p2")
+ })
+
+ c.Run("slice with _target", func(c *qt.C) {
+ b := newBuilder(c)
+
+ b.WithContent("_index.md", `+++
+title = "Home"
+[[cascade]]
+p1 = "p1"
+[cascade._target]
+path="**p1**"
+[[cascade]]
+p2 = "p2"
+[cascade._target]
+kind="section"
++++
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+P1|p1:p1|p2:|
+S1|p1:|p2:p2|
+`)
+ })
+
+ c.Run("slice with environment _target", func(c *qt.C) {
+ b := newBuilder(c)
+
+ b.WithContent("_index.md", `+++
+title = "Home"
+[[cascade]]
+p1 = "p1"
+[cascade._target]
+path="**p1**"
+environment="testing"
+[[cascade]]
+p2 = "p2"
+[cascade._target]
+kind="section"
+environment="production"
++++
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+P1|p1:|p2:|
+S1|p1:|p2:p2|
+`)
+ })
+
+ c.Run("slice with yaml _target", func(c *qt.C) {
+ b := newBuilder(c)
+
+ b.WithContent("_index.md", `---
+title: "Home"
+cascade:
+- p1: p1
+ _target:
+ path: "**p1**"
+- p2: p2
+ _target:
+ kind: "section"
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+P1|p1:p1|p2:|
+S1|p1:|p2:p2|
+`)
+ })
+
+ c.Run("slice with json _target", func(c *qt.C) {
+ b := newBuilder(c)
+
+ b.WithContent("_index.md", `{
+"title": "Home",
+"cascade": [
+ {
+ "p1": "p1",
+ "_target": {
+ "path": "**p1**"
+ }
+ },{
+ "p2": "p2",
+ "_target": {
+ "kind": "section"
+ }
+ }
+]
+}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ P1|p1:p1|p2:|
+ S1|p1:|p2:p2|
+ `)
+ })
+}
diff --git a/hugolib/codeowners.go b/hugolib/codeowners.go
new file mode 100644
index 000000000..17e956981
--- /dev/null
+++ b/hugolib/codeowners.go
@@ -0,0 +1,69 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "io"
+ "os"
+ "path"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/hairyhenderson/go-codeowners"
+ "github.com/spf13/afero"
+)
+
+var afs = afero.NewOsFs()
+
+func findCodeOwnersFile(dir string) (io.Reader, error) {
+ for _, p := range []string{".", "docs", ".github", ".gitlab"} {
+ f := path.Join(dir, p, "CODEOWNERS")
+
+ _, err := afs.Stat(f)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+
+ return afs.Open(f)
+ }
+
+ return nil, nil
+}
+
+type codeownerInfo struct {
+ owners *codeowners.Codeowners
+}
+
+func (c *codeownerInfo) forPage(p page.Page) []string {
+ return c.owners.Owners(p.File().Filename())
+}
+
+func newCodeOwners(cfg config.Provider) (*codeownerInfo, error) {
+ workingDir := cfg.GetString("workingDir")
+
+ r, err := findCodeOwnersFile(workingDir)
+ if err != nil || r == nil {
+ return nil, err
+ }
+
+ owners, err := codeowners.FromReader(r, workingDir)
+ if err != nil {
+ return nil, err
+ }
+
+ return &codeownerInfo{owners: owners}, nil
+}
diff --git a/hugolib/collections.go b/hugolib/collections.go
new file mode 100644
index 000000000..898d2ba12
--- /dev/null
+++ b/hugolib/collections.go
@@ -0,0 +1,46 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+var (
+ _ collections.Grouper = (*pageState)(nil)
+ _ collections.Slicer = (*pageState)(nil)
+)
+
+// collections.Slicer implementations below. We keep these bridge implementations
+// here as it makes it easier to get an idea of "type coverage". These
+// implementations have no value on their own.
+
+// Slice is for internal use.
+func (p *pageState) Slice(items any) (any, error) {
+ return page.ToPages(items)
+}
+
+// collections.Grouper implementations below
+
+// Group creates a PageGroup from a key and a Pages object
+// This method is not meant for external use. It got its non-typed arguments to satisfy
+// a very generic interface in the tpl package.
+func (p *pageState) Group(key any, in any) (any, error) {
+ pages, err := page.ToPages(in)
+ if err != nil {
+ return nil, err
+ }
+ return page.PageGroup{Key: key, Pages: pages}, nil
+}
diff --git a/hugolib/collections_test.go b/hugolib/collections_test.go
new file mode 100644
index 000000000..6925d41cd
--- /dev/null
+++ b/hugolib/collections_test.go
@@ -0,0 +1,217 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGroupFunc(t *testing.T) {
+ c := qt.New(t)
+
+ pageContent := `
+---
+title: "Page"
+---
+
+`
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().
+ WithContent("page1.md", pageContent, "page2.md", pageContent).
+ WithTemplatesAdded("index.html", `
+{{ $cool := .Site.RegularPages | group "cool" }}
+{{ $cool.Key }}: {{ len $cool.Pages }}
+
+`)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 2)
+
+ b.AssertFileContent("public/index.html", "cool: 2")
+}
+
+func TestSliceFunc(t *testing.T) {
+ c := qt.New(t)
+
+ pageContent := `
+---
+title: "Page"
+tags: ["blue", "green"]
+tags_weight: %d
+---
+
+`
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().
+ WithContent("page1.md", fmt.Sprintf(pageContent, 10), "page2.md", fmt.Sprintf(pageContent, 20)).
+ WithTemplatesAdded("index.html", `
+{{ $cool := first 1 .Site.RegularPages | group "cool" }}
+{{ $blue := after 1 .Site.RegularPages | group "blue" }}
+{{ $weightedPages := index (index .Site.Taxonomies "tags") "blue" }}
+
+{{ $p1 := index .Site.RegularPages 0 }}{{ $p2 := index .Site.RegularPages 1 }}
+{{ $wp1 := index $weightedPages 0 }}{{ $wp2 := index $weightedPages 1 }}
+
+{{ $pages := slice $p1 $p2 }}
+{{ $pageGroups := slice $cool $blue }}
+{{ $weighted := slice $wp1 $wp2 }}
+
+{{ printf "pages:%d:%T:%v/%v" (len $pages) $pages (index $pages 0) (index $pages 1) }}
+{{ printf "pageGroups:%d:%T:%v/%v" (len $pageGroups) $pageGroups (index (index $pageGroups 0).Pages 0) (index (index $pageGroups 1).Pages 0)}}
+{{ printf "weightedPages:%d::%T:%v" (len $weighted) $weighted $weighted | safeHTML }}
+
+`)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 2)
+
+ b.AssertFileContent("public/index.html",
+ "pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)",
+ "pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)",
+ `weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
+}
+
+func TestUnionFunc(t *testing.T) {
+ c := qt.New(t)
+
+ pageContent := `
+---
+title: "Page"
+tags: ["blue", "green"]
+tags_weight: %d
+---
+
+`
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().
+ WithContent("page1.md", fmt.Sprintf(pageContent, 10), "page2.md", fmt.Sprintf(pageContent, 20),
+ "page3.md", fmt.Sprintf(pageContent, 30)).
+ WithTemplatesAdded("index.html", `
+{{ $unionPages := first 2 .Site.RegularPages | union .Site.RegularPages }}
+{{ $unionWeightedPages := .Site.Taxonomies.tags.blue | union .Site.Taxonomies.tags.green }}
+{{ printf "unionPages: %T %d" $unionPages (len $unionPages) }}
+{{ printf "unionWeightedPages: %T %d" $unionWeightedPages (len $unionWeightedPages) }}
+`)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 3)
+
+ b.AssertFileContent("public/index.html",
+ "unionPages: page.Pages 3",
+ "unionWeightedPages: page.WeightedPages 6")
+}
+
+func TestCollectionsFuncs(t *testing.T) {
+ c := qt.New(t)
+
+ pageContent := `
+---
+title: "Page %d"
+tags: ["blue", "green"]
+tags_weight: %d
+---
+
+`
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().
+ WithContent("page1.md", fmt.Sprintf(pageContent, 10, 10), "page2.md", fmt.Sprintf(pageContent, 20, 20),
+ "page3.md", fmt.Sprintf(pageContent, 30, 30)).
+ WithTemplatesAdded("index.html", `
+{{ $uniqPages := first 2 .Site.RegularPages | append .Site.RegularPages | uniq }}
+{{ $inTrue := in .Site.RegularPages (index .Site.RegularPages 1) }}
+{{ $inFalse := in .Site.RegularPages (.Site.Home) }}
+
+{{ printf "uniqPages: %T %d" $uniqPages (len $uniqPages) }}
+{{ printf "inTrue: %t" $inTrue }}
+{{ printf "inFalse: %t" $inFalse }}
+`)
+
+ b.WithTemplatesAdded("_default/single.html", `
+{{ $related := .Site.RegularPages.Related . }}
+{{ $symdiff := $related | symdiff .Site.RegularPages }}
+Related: {{ range $related }}{{ .RelPermalink }}|{{ end }}
+Symdiff: {{ range $symdiff }}{{ .RelPermalink }}|{{ end }}
+`)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 3)
+
+ b.AssertFileContent("public/index.html",
+ "uniqPages: page.Pages 3",
+ "inTrue: true",
+ "inFalse: false",
+ )
+
+ b.AssertFileContent("public/page1/index.html", `Related: /page2/|/page3/|`, `Symdiff: /page1/|`)
+}
+
+func TestAppendFunc(t *testing.T) {
+ c := qt.New(t)
+
+ pageContent := `
+---
+title: "Page"
+tags: ["blue", "green"]
+tags_weight: %d
+---
+
+`
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().
+ WithContent("page1.md", fmt.Sprintf(pageContent, 10), "page2.md", fmt.Sprintf(pageContent, 20)).
+ WithTemplatesAdded("index.html", `
+{{ $p1 := index .Site.RegularPages 0 }}{{ $p2 := index .Site.RegularPages 1 }}
+
+{{ $pages := slice }}
+
+{{ if true }}
+ {{ $pages = $pages | append $p2 $p1 }}
+{{ end }}
+{{ $appendPages := .Site.Pages | append .Site.RegularPages }}
+{{ $appendStrings := slice "a" "b" | append "c" "d" "e" }}
+{{ $appendStringsSlice := slice "a" "b" "c" | append (slice "c" "d") }}
+
+{{ printf "pages:%d:%T:%v/%v" (len $pages) $pages (index $pages 0) (index $pages 1) }}
+{{ printf "appendPages:%d:%T:%v/%v" (len $appendPages) $appendPages (index $appendPages 0).Kind (index $appendPages 8).Kind }}
+{{ printf "appendStrings:%T:%v" $appendStrings $appendStrings }}
+{{ printf "appendStringsSlice:%T:%v" $appendStringsSlice $appendStringsSlice }}
+
+{{/* add some slightly related funcs to check what types we get */}}
+{{ $u := $appendStrings | union $appendStringsSlice }}
+{{ $i := $appendStrings | intersect $appendStringsSlice }}
+{{ printf "union:%T:%v" $u $u }}
+{{ printf "intersect:%T:%v" $i $i }}
+
+`)
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 2)
+
+ b.AssertFileContent("public/index.html",
+ "pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)",
+ "appendPages:9:page.Pages:home/page",
+ "appendStrings:[]string:[a b c d e]",
+ "appendStringsSlice:[]string:[a b c c d]",
+ "union:[]string:[a b c d e]",
+ "intersect:[]string:[a b c d]",
+ )
+}
diff --git a/hugolib/config.go b/hugolib/config.go
new file mode 100644
index 000000000..8444d15d9
--- /dev/null
+++ b/hugolib/config.go
@@ -0,0 +1,517 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/common/maps"
+ cpaths "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gobwas/glob"
+ hglob "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/privacy"
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/config/services"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/afero"
+)
+
+var ErrNoConfigFile = errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\n Run `hugo help new` for details.\n")
+
+// LoadConfig loads Hugo configuration into a new Viper and then adds
+// a set of defaults.
+func LoadConfig(d ConfigSourceDescriptor, doWithConfig ...func(cfg config.Provider) error) (config.Provider, []string, error) {
+ if d.Environment == "" {
+ d.Environment = hugo.EnvironmentProduction
+ }
+
+ if len(d.Environ) == 0 && !hugo.IsRunningAsTest() {
+ d.Environ = os.Environ()
+ }
+
+ var configFiles []string
+
+ l := configLoader{ConfigSourceDescriptor: d, cfg: config.New()}
+ // Make sure we always do this, even in error situations,
+ // as we have commands (e.g. "hugo mod init") that will
+ // use a partial configuration to do its job.
+ defer l.deleteMergeStrategies()
+
+ for _, name := range d.configFilenames() {
+ var filename string
+ filename, err := l.loadConfig(name)
+ if err == nil {
+ configFiles = append(configFiles, filename)
+ } else if err != ErrNoConfigFile {
+ return nil, nil, l.wrapFileError(err, filename)
+ }
+ }
+
+ if d.AbsConfigDir != "" {
+ dcfg, dirnames, err := config.LoadConfigFromDir(l.Fs, d.AbsConfigDir, l.Environment)
+ if err == nil {
+ if len(dirnames) > 0 {
+ l.cfg.Set("", dcfg.Get(""))
+ configFiles = append(configFiles, dirnames...)
+ }
+ } else if err != ErrNoConfigFile {
+ if len(dirnames) > 0 {
+ return nil, nil, l.wrapFileError(err, dirnames[0])
+ }
+ return nil, nil, err
+ }
+ }
+
+ if err := l.applyConfigDefaults(); err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ l.cfg.SetDefaultMergeStrategy()
+
+ // We create languages based on the settings, so we need to make sure that
+ // all configuration is loaded/set before doing that.
+ for _, d := range doWithConfig {
+ if err := d(l.cfg); err != nil {
+ return l.cfg, configFiles, err
+ }
+ }
+
+ // Some settings are used before we're done collecting all settings,
+ // so apply OS environment both before and after.
+ if err := l.applyOsEnvOverrides(d.Environ); err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ modulesConfig, err := l.loadModulesConfig()
+ if err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ // Need to run these after the modules are loaded, but before
+ // they are finalized.
+ collectHook := func(m *modules.ModulesConfig) error {
+ // We don't need the merge strategy configuration anymore,
+ // remove it so it doesn't accidentally show up in other settings.
+ l.deleteMergeStrategies()
+
+ if err := l.loadLanguageSettings(nil); err != nil {
+ return err
+ }
+
+ mods := m.ActiveModules
+
+ // Apply default project mounts.
+ if err := modules.ApplyProjectConfigDefaults(l.cfg, mods[0]); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ _, modulesConfigFiles, modulesCollectErr := l.collectModules(modulesConfig, l.cfg, collectHook)
+ if err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ configFiles = append(configFiles, modulesConfigFiles...)
+
+ if err := l.applyOsEnvOverrides(d.Environ); err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ if err = l.applyConfigAliases(); err != nil {
+ return l.cfg, configFiles, err
+ }
+
+ if err == nil {
+ err = modulesCollectErr
+ }
+
+ return l.cfg, configFiles, err
+}
+
+// LoadConfigDefault is a convenience method to load the default "config.toml" config.
+func LoadConfigDefault(fs afero.Fs) (config.Provider, error) {
+ v, _, err := LoadConfig(ConfigSourceDescriptor{Fs: fs, Filename: "config.toml"})
+ return v, err
+}
+
+// ConfigSourceDescriptor describes where to find the config (e.g. config.toml etc.).
+type ConfigSourceDescriptor struct {
+ Fs afero.Fs
+ Logger loggers.Logger
+
+ // Path to the config file to use, e.g. /my/project/config.toml
+ Filename string
+
+ // The path to the directory to look for configuration. Is used if Filename is not
+ // set or if it is set to a relative filename.
+ Path string
+
+ // The project's working dir. Is used to look for additional theme config.
+ WorkingDir string
+
+ // The (optional) directory for additional configuration files.
+ AbsConfigDir string
+
+ // production, development
+ Environment string
+
+ // Defaults to os.Environ if not set.
+ Environ []string
+}
+
+func (d ConfigSourceDescriptor) configFileDir() string {
+ if d.Path != "" {
+ return d.Path
+ }
+ return d.WorkingDir
+}
+
+func (d ConfigSourceDescriptor) configFilenames() []string {
+ if d.Filename == "" {
+ return []string{"config"}
+ }
+ return strings.Split(d.Filename, ",")
+}
+
+// SiteConfig represents the config in .Site.Config.
+type SiteConfig struct {
+ // This contains all privacy related settings that can be used to
+ // make the YouTube template etc. GDPR compliant.
+ Privacy privacy.Config
+
+ // Services contains config for services such as Google Analytics etc.
+ Services services.Config
+}
+
+type configLoader struct {
+ cfg config.Provider
+ ConfigSourceDescriptor
+}
+
+// Handle some legacy values.
+func (l configLoader) applyConfigAliases() error {
+ aliases := []types.KeyValueStr{{Key: "taxonomies", Value: "indexes"}}
+
+ for _, alias := range aliases {
+ if l.cfg.IsSet(alias.Key) {
+ vv := l.cfg.Get(alias.Key)
+ l.cfg.Set(alias.Value, vv)
+ }
+ }
+
+ return nil
+}
+
+func (l configLoader) applyConfigDefaults() error {
+ defaultSettings := maps.Params{
+ "cleanDestinationDir": false,
+ "watch": false,
+ "resourceDir": "resources",
+ "publishDir": "public",
+ "themesDir": "themes",
+ "buildDrafts": false,
+ "buildFuture": false,
+ "buildExpired": false,
+ "environment": hugo.EnvironmentProduction,
+ "uglyURLs": false,
+ "verbose": false,
+ "ignoreCache": false,
+ "canonifyURLs": false,
+ "relativeURLs": false,
+ "removePathAccents": false,
+ "titleCaseStyle": "AP",
+ "taxonomies": maps.Params{"tag": "tags", "category": "categories"},
+ "permalinks": maps.Params{},
+ "sitemap": maps.Params{"priority": -1, "filename": "sitemap.xml"},
+ "disableLiveReload": false,
+ "pluralizeListTitles": true,
+ "forceSyncStatic": false,
+ "footnoteAnchorPrefix": "",
+ "footnoteReturnLinkContents": "",
+ "newContentEditor": "",
+ "paginate": 10,
+ "paginatePath": "page",
+ "summaryLength": 70,
+ "rssLimit": -1,
+ "sectionPagesMenu": "",
+ "disablePathToLower": false,
+ "hasCJKLanguage": false,
+ "enableEmoji": false,
+ "defaultContentLanguage": "en",
+ "defaultContentLanguageInSubdir": false,
+ "enableMissingTranslationPlaceholders": false,
+ "enableGitInfo": false,
+ "ignoreFiles": make([]string, 0),
+ "disableAliases": false,
+ "debug": false,
+ "disableFastRender": false,
+ "timeout": "30s",
+ "enableInlineShortcodes": false,
+ }
+
+ l.cfg.SetDefaults(defaultSettings)
+
+ return nil
+}
+
+func (l configLoader) applyOsEnvOverrides(environ []string) error {
+ if len(environ) == 0 {
+ return nil
+ }
+
+ const delim = "__env__delim"
+
+ // Extract all that start with the HUGO prefix.
+ // The delimiter is the following rune, usually "_".
+ const hugoEnvPrefix = "HUGO"
+ var hugoEnv []types.KeyValueStr
+ for _, v := range environ {
+ key, val := config.SplitEnvVar(v)
+ if strings.HasPrefix(key, hugoEnvPrefix) {
+ delimiterAndKey := strings.TrimPrefix(key, hugoEnvPrefix)
+ if len(delimiterAndKey) < 2 {
+ continue
+ }
+ // Allow delimiters to be case sensitive.
+ // It turns out there isn't that many allowed special
+ // chars in environment variables when used in Bash and similar,
+ // so variables on the form HUGOxPARAMSxFOO=bar is one option.
+ key := strings.ReplaceAll(delimiterAndKey[1:], delimiterAndKey[:1], delim)
+ key = strings.ToLower(key)
+ hugoEnv = append(hugoEnv, types.KeyValueStr{
+ Key: key,
+ Value: val,
+ })
+
+ }
+ }
+
+ for _, env := range hugoEnv {
+ existing, nestedKey, owner, err := maps.GetNestedParamFn(env.Key, delim, l.cfg.Get)
+ if err != nil {
+ return err
+ }
+
+ if existing != nil {
+ val, err := metadecoders.Default.UnmarshalStringTo(env.Value, existing)
+ if err != nil {
+ continue
+ }
+
+ if owner != nil {
+ owner[nestedKey] = val
+ } else {
+ l.cfg.Set(env.Key, val)
+ }
+ } else if nestedKey != "" {
+ owner[nestedKey] = env.Value
+ } else {
+ // The container does not exist yet.
+ l.cfg.Set(strings.ReplaceAll(env.Key, delim, "."), env.Value)
+ }
+ }
+
+ return nil
+}
+
+func (l configLoader) collectModules(modConfig modules.Config, v1 config.Provider, hookBeforeFinalize func(m *modules.ModulesConfig) error) (modules.Modules, []string, error) {
+ workingDir := l.WorkingDir
+ if workingDir == "" {
+ workingDir = v1.GetString("workingDir")
+ }
+
+ themesDir := cpaths.AbsPathify(l.WorkingDir, v1.GetString("themesDir"))
+
+ var ignoreVendor glob.Glob
+ if s := v1.GetString("ignoreVendorPaths"); s != "" {
+ ignoreVendor, _ = hglob.GetGlob(hglob.NormalizePath(s))
+ }
+
+ filecacheConfigs, err := filecache.DecodeConfig(l.Fs, v1)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ secConfig, err := security.DecodeConfig(v1)
+ if err != nil {
+ return nil, nil, err
+ }
+ ex := hexec.New(secConfig)
+
+ v1.Set("filecacheConfigs", filecacheConfigs)
+
+ var configFilenames []string
+
+ hook := func(m *modules.ModulesConfig) error {
+ for _, tc := range m.ActiveModules {
+ if len(tc.ConfigFilenames()) > 0 {
+ if tc.Watch() {
+ configFilenames = append(configFilenames, tc.ConfigFilenames()...)
+ }
+
+ // Merge from theme config into v1 based on configured
+ // merge strategy.
+ v1.Merge("", tc.Cfg().Get(""))
+
+ }
+ }
+
+ if hookBeforeFinalize != nil {
+ return hookBeforeFinalize(m)
+ }
+
+ return nil
+ }
+
+ modulesClient := modules.NewClient(modules.ClientConfig{
+ Fs: l.Fs,
+ Logger: l.Logger,
+ Exec: ex,
+ HookBeforeFinalize: hook,
+ WorkingDir: workingDir,
+ ThemesDir: themesDir,
+ Environment: l.Environment,
+ CacheDir: filecacheConfigs.CacheDirModules(),
+ ModuleConfig: modConfig,
+ IgnoreVendor: ignoreVendor,
+ })
+
+ v1.Set("modulesClient", modulesClient)
+
+ moduleConfig, err := modulesClient.Collect()
+
+ // Avoid recreating these later.
+ v1.Set("allModules", moduleConfig.ActiveModules)
+
+ if moduleConfig.GoModulesFilename != "" {
+ // We want to watch this for changes and trigger rebuild on version
+ // changes etc.
+ configFilenames = append(configFilenames, moduleConfig.GoModulesFilename)
+ }
+
+ return moduleConfig.ActiveModules, configFilenames, err
+}
+
+func (l configLoader) loadConfig(configName string) (string, error) {
+ baseDir := l.configFileDir()
+ var baseFilename string
+ if filepath.IsAbs(configName) {
+ baseFilename = configName
+ } else {
+ baseFilename = filepath.Join(baseDir, configName)
+ }
+
+ var filename string
+ if cpaths.ExtNoDelimiter(configName) != "" {
+ exists, _ := helpers.Exists(baseFilename, l.Fs)
+ if exists {
+ filename = baseFilename
+ }
+ } else {
+ for _, ext := range config.ValidConfigFileExtensions {
+ filenameToCheck := baseFilename + "." + ext
+ exists, _ := helpers.Exists(filenameToCheck, l.Fs)
+ if exists {
+ filename = filenameToCheck
+ break
+ }
+ }
+ }
+
+ if filename == "" {
+ return "", ErrNoConfigFile
+ }
+
+ m, err := config.FromFileToMap(l.Fs, filename)
+ if err != nil {
+ return filename, err
+ }
+
+ // Set overwrites keys of the same name, recursively.
+ l.cfg.Set("", m)
+
+ return filename, nil
+}
+
+func (l configLoader) deleteMergeStrategies() {
+ l.cfg.WalkParams(func(params ...config.KeyParams) bool {
+ params[len(params)-1].Params.DeleteMergeStrategy()
+ return false
+ })
+}
+
+func (l configLoader) loadLanguageSettings(oldLangs langs.Languages) error {
+ _, err := langs.LoadLanguageSettings(l.cfg, oldLangs)
+ return err
+}
+
+func (l configLoader) loadModulesConfig() (modules.Config, error) {
+ modConfig, err := modules.DecodeConfig(l.cfg)
+ if err != nil {
+ return modules.Config{}, err
+ }
+
+ return modConfig, nil
+}
+
+func (configLoader) loadSiteConfig(cfg config.Provider) (scfg SiteConfig, err error) {
+ privacyConfig, err := privacy.DecodeConfig(cfg)
+ if err != nil {
+ return
+ }
+
+ servicesConfig, err := services.DecodeConfig(cfg)
+ if err != nil {
+ return
+ }
+
+ scfg.Privacy = privacyConfig
+ scfg.Services = servicesConfig
+
+ return
+}
+
+func (l configLoader) wrapFileError(err error, filename string) error {
+ fe := herrors.UnwrapFileError(err)
+ if fe != nil {
+ pos := fe.Position()
+ pos.Filename = filename
+ fe.UpdatePosition(pos)
+ return err
+ }
+ return herrors.NewFileErrorFromFile(err, filename, l.Fs, nil)
+}
diff --git a/hugolib/config_test.go b/hugolib/config_test.go
new file mode 100644
index 000000000..882d83c8d
--- /dev/null
+++ b/hugolib/config_test.go
@@ -0,0 +1,784 @@
+// Copyright 2016-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/google/go-cmp/cmp"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/spf13/afero"
+)
+
+func TestLoadConfig(t *testing.T) {
+
+ c := qt.New(t)
+
+ loadConfig := func(c *qt.C, configContent string, fromDir bool) config.Provider {
+ mm := afero.NewMemMapFs()
+ filename := "config.toml"
+ descriptor := ConfigSourceDescriptor{Fs: mm}
+ if fromDir {
+ filename = filepath.Join("config", "_default", filename)
+ descriptor.AbsConfigDir = "config"
+ }
+ writeToFs(t, mm, filename, configContent)
+ cfg, _, err := LoadConfig(descriptor)
+ c.Assert(err, qt.IsNil)
+ return cfg
+ }
+
+ c.Run("Basic", func(c *qt.C) {
+ c.Parallel()
+ // Add a random config variable for testing.
+ // side = page in Norwegian.
+ cfg := loadConfig(c, `PaginatePath = "side"`, false)
+ c.Assert(cfg.GetString("paginatePath"), qt.Equals, "side")
+ })
+
+ // Issue #8763
+ for _, fromDir := range []bool{false, true} {
+ testName := "Taxonomy overrides"
+ if fromDir {
+ testName += " from dir"
+ }
+ c.Run(testName, func(c *qt.C) {
+ c.Parallel()
+ cfg := loadConfig(c, `[taxonomies]
+appellation = "appellations"
+vigneron = "vignerons"`, fromDir)
+
+ c.Assert(cfg.Get("taxonomies"), qt.DeepEquals, maps.Params{
+ "appellation": "appellations",
+ "vigneron": "vignerons",
+ })
+ })
+ }
+}
+
+func TestLoadMultiConfig(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ // Add a random config variable for testing.
+ // side = page in Norwegian.
+ configContentBase := `
+ DontChange = "same"
+ PaginatePath = "side"
+ `
+ configContentSub := `
+ PaginatePath = "top"
+ `
+ mm := afero.NewMemMapFs()
+
+ writeToFs(t, mm, "base.toml", configContentBase)
+
+ writeToFs(t, mm, "override.toml", configContentSub)
+
+ cfg, _, err := LoadConfig(ConfigSourceDescriptor{Fs: mm, Filename: "base.toml,override.toml"})
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(cfg.GetString("paginatePath"), qt.Equals, "top")
+ c.Assert(cfg.GetString("DontChange"), qt.Equals, "same")
+}
+
+func TestLoadConfigFromThemes(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ mainConfigTemplate := `
+theme = "test-theme"
+baseURL = "https://example.com/"
+
+[frontmatter]
+date = ["date","publishDate"]
+
+[params]
+MERGE_PARAMS
+p1 = "p1 main"
+[params.b]
+b1 = "b1 main"
+[params.b.c]
+bc1 = "bc1 main"
+
+[mediaTypes]
+[mediaTypes."text/m1"]
+suffixes = ["m1main"]
+
+[outputFormats.o1]
+mediaType = "text/m1"
+baseName = "o1main"
+
+[languages]
+[languages.en]
+languageName = "English"
+[languages.en.params]
+pl1 = "p1-en-main"
+[languages.nb]
+languageName = "Norsk"
+[languages.nb.params]
+pl1 = "p1-nb-main"
+
+[[menu.main]]
+name = "menu-main-main"
+
+[[menu.top]]
+name = "menu-top-main"
+
+`
+
+ themeConfig := `
+baseURL = "http://bep.is/"
+
+# Can not be set in theme.
+disableKinds = ["taxonomy", "term"]
+
+# Can not be set in theme.
+[frontmatter]
+expiryDate = ["date"]
+
+[params]
+p1 = "p1 theme"
+p2 = "p2 theme"
+[params.b]
+b1 = "b1 theme"
+b2 = "b2 theme"
+[params.b.c]
+bc1 = "bc1 theme"
+bc2 = "bc2 theme"
+[params.b.c.d]
+bcd1 = "bcd1 theme"
+
+[mediaTypes]
+[mediaTypes."text/m1"]
+suffixes = ["m1theme"]
+[mediaTypes."text/m2"]
+suffixes = ["m2theme"]
+
+[outputFormats.o1]
+mediaType = "text/m1"
+baseName = "o1theme"
+[outputFormats.o2]
+mediaType = "text/m2"
+baseName = "o2theme"
+
+[languages]
+[languages.en]
+languageName = "English2"
+[languages.en.params]
+pl1 = "p1-en-theme"
+pl2 = "p2-en-theme"
+[[languages.en.menu.main]]
+name = "menu-lang-en-main"
+[[languages.en.menu.theme]]
+name = "menu-lang-en-theme"
+[languages.nb]
+languageName = "Norsk2"
+[languages.nb.params]
+pl1 = "p1-nb-theme"
+pl2 = "p2-nb-theme"
+top = "top-nb-theme"
+[[languages.nb.menu.main]]
+name = "menu-lang-nb-main"
+[[languages.nb.menu.theme]]
+name = "menu-lang-nb-theme"
+[[languages.nb.menu.top]]
+name = "menu-lang-nb-top"
+
+[[menu.main]]
+name = "menu-main-theme"
+
+[[menu.thememenu]]
+name = "menu-theme"
+
+`
+
+ buildForConfig := func(t testing.TB, mainConfig, themeConfig string) *sitesBuilder {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", mainConfig).WithThemeConfigFile("toml", themeConfig)
+ return b.Build(BuildCfg{})
+ }
+
+ buildForStrategy := func(t testing.TB, s string) *sitesBuilder {
+ mainConfig := strings.ReplaceAll(mainConfigTemplate, "MERGE_PARAMS", s)
+ return buildForConfig(t, mainConfig, themeConfig)
+ }
+
+ c.Run("Merge default", func(c *qt.C) {
+ b := buildForStrategy(c, "")
+
+ got := b.Cfg.Get("").(maps.Params)
+
+ // Issue #8866
+ b.Assert(b.Cfg.Get("disableKinds"), qt.IsNil)
+
+ b.Assert(got["params"], qt.DeepEquals, maps.Params{
+ "b": maps.Params{
+ "b1": "b1 main",
+ "c": maps.Params{
+ "bc1": "bc1 main",
+ "bc2": "bc2 theme",
+ "d": maps.Params{"bcd1": string("bcd1 theme")},
+ },
+ "b2": "b2 theme",
+ },
+ "p2": "p2 theme",
+ "p1": "p1 main",
+ })
+
+ b.Assert(got["mediatypes"], qt.DeepEquals, maps.Params{
+ "text/m2": maps.Params{
+ "suffixes": []any{
+ "m2theme",
+ },
+ },
+ "text/m1": maps.Params{
+ "suffixes": []any{
+ "m1main",
+ },
+ },
+ })
+
+ var eq = qt.CmpEquals(
+ cmp.Comparer(func(m1, m2 media.Type) bool {
+ if m1.SubType != m2.SubType {
+ return false
+ }
+ return m1.FirstSuffix == m2.FirstSuffix
+ }),
+ )
+
+ mediaTypes := b.H.Sites[0].mediaTypesConfig
+ m1, _ := mediaTypes.GetByType("text/m1")
+ m2, _ := mediaTypes.GetByType("text/m2")
+
+ b.Assert(got["outputformats"], eq, maps.Params{
+ "o1": maps.Params{
+ "mediatype": m1,
+ "basename": "o1main",
+ },
+ "o2": maps.Params{
+ "basename": "o2theme",
+ "mediatype": m2,
+ },
+ })
+
+ b.Assert(got["languages"], qt.DeepEquals, maps.Params{
+ "en": maps.Params{
+ "languagename": "English",
+ "params": maps.Params{
+ "pl2": "p2-en-theme",
+ "pl1": "p1-en-main",
+ },
+ "menus": maps.Params{
+ "main": []any{
+ map[string]any{
+ "name": "menu-lang-en-main",
+ },
+ },
+ "theme": []any{
+ map[string]any{
+ "name": "menu-lang-en-theme",
+ },
+ },
+ },
+ },
+ "nb": maps.Params{
+ "languagename": "Norsk",
+ "params": maps.Params{
+ "top": "top-nb-theme",
+ "pl1": "p1-nb-main",
+ "pl2": "p2-nb-theme",
+ },
+ "menus": maps.Params{
+ "main": []any{
+ map[string]any{
+ "name": "menu-lang-nb-main",
+ },
+ },
+ "theme": []any{
+ map[string]any{
+ "name": "menu-lang-nb-theme",
+ },
+ },
+ "top": []any{
+ map[string]any{
+ "name": "menu-lang-nb-top",
+ },
+ },
+ },
+ },
+ })
+
+ c.Assert(got["baseurl"], qt.Equals, "https://example.com/")
+ })
+
+ c.Run("Merge shallow", func(c *qt.C) {
+ b := buildForStrategy(c, fmt.Sprintf("_merge=%q", "shallow"))
+
+ got := b.Cfg.Get("").(maps.Params)
+
+ // Shallow merge, only add new keys to params.
+ b.Assert(got["params"], qt.DeepEquals, maps.Params{
+ "p1": "p1 main",
+ "b": maps.Params{
+ "b1": "b1 main",
+ "c": maps.Params{
+ "bc1": "bc1 main",
+ },
+ },
+ "p2": "p2 theme",
+ })
+ })
+
+ c.Run("Merge no params in project", func(c *qt.C) {
+ b := buildForConfig(
+ c,
+ "baseURL=\"https://example.org\"\ntheme = \"test-theme\"\n",
+ "[params]\np1 = \"p1 theme\"\n",
+ )
+
+ got := b.Cfg.Get("").(maps.Params)
+
+ b.Assert(got["params"], qt.DeepEquals, maps.Params{
+ "p1": "p1 theme",
+ })
+ })
+
+ c.Run("Merge language no menus or params in project", func(c *qt.C) {
+ b := buildForConfig(
+ c,
+ `
+theme = "test-theme"
+baseURL = "https://example.com/"
+
+[languages]
+[languages.en]
+languageName = "English"
+
+`,
+ `
+[languages]
+[languages.en]
+languageName = "EnglishTheme"
+
+[languages.en.params]
+p1="themep1"
+
+[[languages.en.menus.main]]
+name = "menu-theme"
+`,
+ )
+
+ got := b.Cfg.Get("").(maps.Params)
+
+ b.Assert(got["languages"], qt.DeepEquals,
+ maps.Params{
+ "en": maps.Params{
+ "languagename": "English",
+ "menus": maps.Params{
+ "main": []any{
+ map[string]any{
+ "name": "menu-theme",
+ },
+ },
+ },
+ "params": maps.Params{
+ "p1": "themep1",
+ },
+ },
+ },
+ )
+ })
+
+ // Issue #8724
+ for _, mergeStrategy := range []string{"none", "shallow"} {
+ c.Run(fmt.Sprintf("Merge with sitemap config in theme, mergestrategy %s", mergeStrategy), func(c *qt.C) {
+
+ smapConfigTempl := `[sitemap]
+ changefreq = %q
+ filename = "sitemap.xml"
+ priority = 0.5`
+
+ b := buildForConfig(
+ c,
+ fmt.Sprintf("_merge=%q\nbaseURL=\"https://example.org\"\ntheme = \"test-theme\"\n", mergeStrategy),
+ "baseURL=\"http://example.com\"\n"+fmt.Sprintf(smapConfigTempl, "monthly"),
+ )
+
+ got := b.Cfg.Get("").(maps.Params)
+
+ if mergeStrategy == "none" {
+ b.Assert(got["sitemap"], qt.DeepEquals, maps.Params{
+ "priority": int(-1),
+ "filename": "sitemap.xml",
+ })
+
+ b.AssertFileContent("public/sitemap.xml", "schemas/sitemap")
+ } else {
+ b.Assert(got["sitemap"], qt.DeepEquals, maps.Params{
+ "priority": int(-1),
+ "filename": "sitemap.xml",
+ "changefreq": "monthly",
+ })
+
+ b.AssertFileContent("public/sitemap.xml", "<changefreq>monthly</changefreq>")
+ }
+
+ })
+ }
+
+}
+
+func TestLoadConfigFromThemeDir(t *testing.T) {
+ t.Parallel()
+
+ mainConfig := `
+theme = "test-theme"
+
+[params]
+m1 = "mv1"
+`
+
+ themeConfig := `
+[params]
+t1 = "tv1"
+t2 = "tv2"
+`
+
+ themeConfigDir := filepath.Join("themes", "test-theme", "config")
+ themeConfigDirDefault := filepath.Join(themeConfigDir, "_default")
+ themeConfigDirProduction := filepath.Join(themeConfigDir, "production")
+
+ projectConfigDir := "config"
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", mainConfig).WithThemeConfigFile("toml", themeConfig)
+ b.Assert(b.Fs.Source.MkdirAll(themeConfigDirDefault, 0777), qt.IsNil)
+ b.Assert(b.Fs.Source.MkdirAll(themeConfigDirProduction, 0777), qt.IsNil)
+ b.Assert(b.Fs.Source.MkdirAll(projectConfigDir, 0777), qt.IsNil)
+
+ b.WithSourceFile(filepath.Join(projectConfigDir, "config.toml"), `[params]
+m2 = "mv2"
+`)
+ b.WithSourceFile(filepath.Join(themeConfigDirDefault, "config.toml"), `[params]
+t2 = "tv2d"
+t3 = "tv3d"
+`)
+
+ b.WithSourceFile(filepath.Join(themeConfigDirProduction, "config.toml"), `[params]
+t3 = "tv3p"
+`)
+
+ b.Build(BuildCfg{})
+
+ got := b.Cfg.Get("params").(maps.Params)
+
+ b.Assert(got, qt.DeepEquals, maps.Params{
+ "t3": "tv3p",
+ "m1": "mv1",
+ "t1": "tv1",
+ "t2": "tv2d",
+ })
+
+}
+
+func TestPrivacyConfig(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ tomlConfig := `
+
+someOtherValue = "foo"
+
+[privacy]
+[privacy.youtube]
+privacyEnhanced = true
+`
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", tomlConfig)
+ b.Build(BuildCfg{SkipRender: true})
+
+ c.Assert(b.H.Sites[0].Info.Config().Privacy.YouTube.PrivacyEnhanced, qt.Equals, true)
+}
+
+func TestLoadConfigModules(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ // https://github.com/gohugoio/hugoThemes#themetoml
+
+ const (
+ // Before Hugo 0.56 each theme/component could have its own theme.toml
+ // with some settings, mostly used on the Hugo themes site.
+ // To preserve combability we read these files into the new "modules"
+ // section in config.toml.
+ o1t = `
+name = "Component o1"
+license = "MIT"
+min_version = 0.38
+`
+ // This is the component's config.toml, using the old theme syntax.
+ o1c = `
+theme = ["n2"]
+`
+
+ n1 = `
+title = "Component n1"
+
+[module]
+description = "Component n1 description"
+[module.hugoVersion]
+min = "0.40.0"
+max = "0.50.0"
+extended = true
+[[module.imports]]
+path="o1"
+[[module.imports]]
+path="n3"
+
+
+`
+
+ n2 = `
+title = "Component n2"
+`
+
+ n3 = `
+title = "Component n3"
+`
+
+ n4 = `
+title = "Component n4"
+`
+ )
+
+ b := newTestSitesBuilder(t)
+
+ writeThemeFiles := func(name, configTOML, themeTOML string) {
+ b.WithSourceFile(filepath.Join("themes", name, "data", "module.toml"), fmt.Sprintf("name=%q", name))
+ if configTOML != "" {
+ b.WithSourceFile(filepath.Join("themes", name, "config.toml"), configTOML)
+ }
+ if themeTOML != "" {
+ b.WithSourceFile(filepath.Join("themes", name, "theme.toml"), themeTOML)
+ }
+ }
+
+ writeThemeFiles("n1", n1, "")
+ writeThemeFiles("n2", n2, "")
+ writeThemeFiles("n3", n3, "")
+ writeThemeFiles("n4", n4, "")
+ writeThemeFiles("o1", o1c, o1t)
+
+ b.WithConfigFile("toml", `
+[module]
+[[module.imports]]
+path="n1"
+[[module.imports]]
+path="n4"
+
+`)
+
+ b.Build(BuildCfg{})
+
+ modulesClient := b.H.Paths.ModulesClient
+ var graphb bytes.Buffer
+ modulesClient.Graph(&graphb)
+
+ expected := `project n1
+n1 o1
+o1 n2
+n1 n3
+project n4
+`
+
+ c.Assert(graphb.String(), qt.Equals, expected)
+}
+
+func TestLoadConfigWithOsEnvOverrides(t *testing.T) {
+ c := qt.New(t)
+
+ baseConfig := `
+
+theme = "mytheme"
+environment = "production"
+enableGitInfo = true
+intSlice = [5,7,9]
+floatSlice = [3.14, 5.19]
+stringSlice = ["a", "b"]
+
+[outputFormats]
+[outputFormats.ofbase]
+mediaType = "text/plain"
+
+[params]
+paramWithNoEnvOverride="nooverride"
+[params.api_config]
+api_key="default_key"
+another_key="default another_key"
+
+[imaging]
+anchor = "smart"
+quality = 75
+`
+
+ newB := func(t testing.TB) *sitesBuilder {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", baseConfig)
+
+ b.WithSourceFile("themes/mytheme/config.toml", `
+
+[outputFormats]
+[outputFormats.oftheme]
+mediaType = "text/plain"
+[outputFormats.ofbase]
+mediaType = "application/xml"
+
+[params]
+[params.mytheme_section]
+theme_param="themevalue"
+theme_param_nooverride="nooverride"
+[params.mytheme_section2]
+theme_param="themevalue2"
+
+`)
+
+ return b
+ }
+
+ c.Run("Variations", func(c *qt.C) {
+
+ b := newB(c)
+
+ b.WithEnviron(
+ "HUGO_ENVIRONMENT", "test",
+ "HUGO_NEW", "new", // key not in config.toml
+ "HUGO_ENABLEGITINFO", "false",
+ "HUGO_IMAGING_ANCHOR", "top",
+ "HUGO_IMAGING_RESAMPLEFILTER", "CatmullRom",
+ "HUGO_STRINGSLICE", `["c", "d"]`,
+ "HUGO_INTSLICE", `[5, 8, 9]`,
+ "HUGO_FLOATSLICE", `[5.32]`,
+ // Issue #7829
+ "HUGOxPARAMSxAPI_CONFIGxAPI_KEY", "new_key",
+ // Delimiters are case sensitive.
+ "HUGOxPARAMSxAPI_CONFIGXANOTHER_KEY", "another_key",
+ // Issue #8346
+ "HUGOxPARAMSxMYTHEME_SECTIONxTHEME_PARAM", "themevalue_changed",
+ "HUGOxPARAMSxMYTHEME_SECTION2xTHEME_PARAM", "themevalue2_changed",
+ "HUGO_PARAMS_EMPTY", ``,
+ "HUGO_PARAMS_HTML", `<a target="_blank" />`,
+ // Issue #8618
+ "HUGO_SERVICES_GOOGLEANALYTICS_ID", `gaid`,
+ "HUGO_PARAMS_A_B_C", "abc",
+ )
+
+ b.Build(BuildCfg{})
+
+ cfg := b.H.Cfg
+ s := b.H.Sites[0]
+ scfg := s.siteConfigConfig.Services
+
+ c.Assert(cfg.Get("environment"), qt.Equals, "test")
+ c.Assert(cfg.GetBool("enablegitinfo"), qt.Equals, false)
+ c.Assert(cfg.Get("new"), qt.Equals, "new")
+ c.Assert(cfg.Get("imaging.anchor"), qt.Equals, "top")
+ c.Assert(cfg.Get("imaging.quality"), qt.Equals, int64(75))
+ c.Assert(cfg.Get("imaging.resamplefilter"), qt.Equals, "CatmullRom")
+ c.Assert(cfg.Get("stringSlice"), qt.DeepEquals, []any{"c", "d"})
+ c.Assert(cfg.Get("floatSlice"), qt.DeepEquals, []any{5.32})
+ c.Assert(cfg.Get("intSlice"), qt.DeepEquals, []any{5, 8, 9})
+ c.Assert(cfg.Get("params.api_config.api_key"), qt.Equals, "new_key")
+ c.Assert(cfg.Get("params.api_config.another_key"), qt.Equals, "default another_key")
+ c.Assert(cfg.Get("params.mytheme_section.theme_param"), qt.Equals, "themevalue_changed")
+ c.Assert(cfg.Get("params.mytheme_section.theme_param_nooverride"), qt.Equals, "nooverride")
+ c.Assert(cfg.Get("params.mytheme_section2.theme_param"), qt.Equals, "themevalue2_changed")
+ c.Assert(cfg.Get("params.empty"), qt.Equals, ``)
+ c.Assert(cfg.Get("params.html"), qt.Equals, `<a target="_blank" />`)
+
+ params := cfg.Get("params").(maps.Params)
+ c.Assert(params["paramwithnoenvoverride"], qt.Equals, "nooverride")
+ c.Assert(cfg.Get("params.paramwithnoenvoverride"), qt.Equals, "nooverride")
+ c.Assert(scfg.GoogleAnalytics.ID, qt.Equals, "gaid")
+ c.Assert(cfg.Get("params.a.b"), qt.DeepEquals, maps.Params{
+ "c": "abc",
+ })
+
+ ofBase, _ := s.outputFormatsConfig.GetByName("ofbase")
+ ofTheme, _ := s.outputFormatsConfig.GetByName("oftheme")
+
+ c.Assert(ofBase.MediaType, qt.Equals, media.TextType)
+ c.Assert(ofTheme.MediaType, qt.Equals, media.TextType)
+
+ })
+
+ // Issue #8709
+ c.Run("Set in string", func(c *qt.C) {
+ b := newB(c)
+
+ b.WithEnviron(
+ "HUGO_ENABLEGITINFO", "false",
+ // imaging.anchor is a string, and it's not possible
+ // to set a child attribute.
+ "HUGO_IMAGING_ANCHOR_FOO", "top",
+ )
+
+ b.Build(BuildCfg{})
+
+ cfg := b.H.Cfg
+ c.Assert(cfg.Get("imaging.anchor"), qt.Equals, "smart")
+
+ })
+
+}
+
+func TestInvalidDefaultMarkdownHandler(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+defaultMarkdownHandler = 'blackfriday'
+-- content/_index.md --
+## Foo
+-- layouts/index.html --
+{{ .Content }}
+
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, "Configured defaultMarkdownHandler \"blackfriday\" not found. Did you mean to use goldmark? Blackfriday was removed in Hugo v0.100.0.")
+
+}
diff --git a/hugolib/configdir_test.go b/hugolib/configdir_test.go
new file mode 100644
index 000000000..7ac3f969d
--- /dev/null
+++ b/hugolib/configdir_test.go
@@ -0,0 +1,152 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/spf13/afero"
+)
+
+func TestLoadConfigDir(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configContent := `
+baseURL = "https://example.org"
+paginagePath = "pag_root"
+
+[languages.en]
+weight = 0
+languageName = "English"
+
+[languages.no]
+weight = 10
+languageName = "FOO"
+
+[params]
+p1 = "p1_base"
+
+`
+
+ mm := afero.NewMemMapFs()
+
+ writeToFs(t, mm, "hugo.toml", configContent)
+
+ fb := htesting.NewTestdataBuilder(mm, "config/_default", t)
+
+ fb.Add("config.toml", `paginatePath = "pag_default"`)
+
+ fb.Add("params.yaml", `
+p2: "p2params_default"
+p3: "p3params_default"
+p4: "p4params_default"
+`)
+ fb.Add("menus.toml", `
+[[docs]]
+name = "About Hugo"
+weight = 1
+[[docs]]
+name = "Home"
+weight = 2
+ `)
+
+ fb.Add("menus.no.toml", `
+ [[docs]]
+ name = "Om Hugo"
+ weight = 1
+ `)
+
+ fb.Add("params.no.toml",
+ `
+p3 = "p3params_no_default"
+p4 = "p4params_no_default"`,
+ )
+ fb.Add("languages.no.toml", `languageName = "Norsk_no_default"`)
+
+ fb.Build()
+
+ fb = fb.WithWorkingDir("config/production")
+
+ fb.Add("config.toml", `paginatePath = "pag_production"`)
+
+ fb.Add("params.no.toml", `
+p2 = "p2params_no_production"
+p3 = "p3params_no_production"
+`)
+
+ fb.Build()
+
+ fb = fb.WithWorkingDir("config/development")
+
+ // This is set in all the config.toml variants above, but this will win.
+ fb.Add("config.TOML", `paginatePath = "pag_development"`)
+ // Issue #5646
+ fb.Add("config.toml.swp", `p3 = "paginatePath = "nono"`)
+
+ fb.Add("params.no.toml", `p3 = "p3params_no_development"`)
+ fb.Add("params.toml", `p3 = "p3params_development"`)
+
+ fb.Build()
+
+ cfg, _, err := LoadConfig(ConfigSourceDescriptor{Fs: mm, Environment: "development", Filename: "hugo.toml", AbsConfigDir: "config"})
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(cfg.GetString("paginatePath"), qt.Equals, "pag_development") // /config/development/config.toml
+
+ c.Assert(cfg.GetInt("languages.no.weight"), qt.Equals, 10) // /config.toml
+ c.Assert(cfg.GetString("languages.no.languageName"), qt.Equals, "Norsk_no_default") // /config/_default/languages.no.toml
+
+ c.Assert(cfg.GetString("params.p1"), qt.Equals, "p1_base")
+ c.Assert(cfg.GetString("params.p2"), qt.Equals, "p2params_default") // Is in both _default and production
+ c.Assert(cfg.GetString("params.p3"), qt.Equals, "p3params_development")
+ c.Assert(cfg.GetString("languages.no.params.p3"), qt.Equals, "p3params_no_development")
+
+ c.Assert(len(cfg.Get("menus.docs").([]any)), qt.Equals, 2)
+ noMenus := cfg.Get("languages.no.menus.docs")
+ c.Assert(noMenus, qt.Not(qt.IsNil))
+ c.Assert(len(noMenus.([]any)), qt.Equals, 1)
+}
+
+func TestLoadConfigDirError(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configContent := `
+baseURL = "https://example.org"
+
+`
+
+ mm := afero.NewMemMapFs()
+
+ writeToFs(t, mm, "hugo.toml", configContent)
+
+ fb := htesting.NewTestdataBuilder(mm, "config/development", t)
+
+ fb.Add("config.toml", `invalid & syntax`).Build()
+
+ _, _, err := LoadConfig(ConfigSourceDescriptor{Fs: mm, Environment: "development", Filename: "hugo.toml", AbsConfigDir: "config"})
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ fe := herrors.UnwrapFileError(err)
+ c.Assert(fe, qt.Not(qt.IsNil))
+ c.Assert(fe.Position().Filename, qt.Equals, filepath.FromSlash("config/development/config.toml"))
+}
diff --git a/hugolib/content_factory.go b/hugolib/content_factory.go
new file mode 100644
index 000000000..0a4d0aa0a
--- /dev/null
+++ b/hugolib/content_factory.go
@@ -0,0 +1,177 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "io"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/spf13/afero"
+)
+
+// ContentFactory creates content files from archetype templates.
+type ContentFactory struct {
+ h *HugoSites
+
+ // We parse the archetype templates as Go templates, so we need
+ // to replace any shortcode with a temporary placeholder.
+ shortcodeReplacerPre *strings.Replacer
+ shortcodeReplacerPost *strings.Replacer
+}
+
+// ApplyArchetypeFilename archetypeFilename to w as a template using the given Page p as the foundation for the data context.
+func (f ContentFactory) ApplyArchetypeFilename(w io.Writer, p page.Page, archetypeKind, archetypeFilename string) error {
+
+ fi, err := f.h.SourceFilesystems.Archetypes.Fs.Stat(archetypeFilename)
+ if err != nil {
+ return err
+ }
+
+ if fi.IsDir() {
+ return fmt.Errorf("archetype directory (%q) not supported", archetypeFilename)
+ }
+
+ templateSource, err := afero.ReadFile(f.h.SourceFilesystems.Archetypes.Fs, archetypeFilename)
+ if err != nil {
+ return fmt.Errorf("failed to read archetype file %q: %s: %w", archetypeFilename, err, err)
+
+ }
+
+ return f.ApplyArchetypeTemplate(w, p, archetypeKind, string(templateSource))
+
+}
+
+// ApplyArchetypeTemplate templateSource to w as a template using the given Page p as the foundation for the data context.
+func (f ContentFactory) ApplyArchetypeTemplate(w io.Writer, p page.Page, archetypeKind, templateSource string) error {
+ ps := p.(*pageState)
+ if archetypeKind == "" {
+ archetypeKind = p.Type()
+ }
+
+ d := &archetypeFileData{
+ Type: archetypeKind,
+ Date: htime.Now().Format(time.RFC3339),
+ Page: p,
+ File: p.File(),
+ }
+
+ templateSource = f.shortcodeReplacerPre.Replace(templateSource)
+
+ templ, err := ps.s.TextTmpl().Parse("archetype.md", string(templateSource))
+ if err != nil {
+ return fmt.Errorf("failed to parse archetype template: %s: %w", err, err)
+ }
+
+ result, err := executeToString(ps.s.Tmpl(), templ, d)
+ if err != nil {
+ return fmt.Errorf("failed to execute archetype template: %s: %w", err, err)
+ }
+
+ _, err = io.WriteString(w, f.shortcodeReplacerPost.Replace(result))
+
+ return err
+
+}
+
+func (f ContentFactory) SectionFromFilename(filename string) (string, error) {
+ filename = filepath.Clean(filename)
+ rel, _, err := f.h.AbsProjectContentDir(filename)
+ if err != nil {
+ return "", err
+ }
+
+ parts := strings.Split(helpers.ToSlashTrimLeading(rel), "/")
+ if len(parts) < 2 {
+ return "", nil
+ }
+ return parts[0], nil
+}
+
+// CreateContentPlaceHolder creates a content placeholder file inside the
+// best matching content directory.
+func (f ContentFactory) CreateContentPlaceHolder(filename string) (string, error) {
+ filename = filepath.Clean(filename)
+ _, abs, err := f.h.AbsProjectContentDir(filename)
+
+ if err != nil {
+ return "", err
+ }
+
+ // This will be overwritten later, just write a placeholder to get
+ // the paths correct.
+ placeholder := `---
+title: "Content Placeholder"
+_build:
+ render: never
+ list: never
+ publishResources: false
+---
+
+`
+
+ return abs, afero.SafeWriteReader(f.h.Fs.Source, abs, strings.NewReader(placeholder))
+}
+
+// NewContentFactory creates a new ContentFactory for h.
+func NewContentFactory(h *HugoSites) ContentFactory {
+ return ContentFactory{
+ h: h,
+ shortcodeReplacerPre: strings.NewReplacer(
+ "{{<", "{x{<",
+ "{{%", "{x{%",
+ ">}}", ">}x}",
+ "%}}", "%}x}"),
+ shortcodeReplacerPost: strings.NewReplacer(
+ "{x{<", "{{<",
+ "{x{%", "{{%",
+ ">}x}", ">}}",
+ "%}x}", "%}}"),
+ }
+}
+
+// archetypeFileData represents the data available to an archetype template.
+type archetypeFileData struct {
+ // The archetype content type, either given as --kind option or extracted
+ // from the target path's section, i.e. "blog/mypost.md" will resolve to
+ // "blog".
+ Type string
+
+ // The current date and time as a RFC3339 formatted string, suitable for use in front matter.
+ Date string
+
+ // The temporary page. Note that only the file path information is relevant at this stage.
+ Page page.Page
+
+ // File is the same as Page.File, embedded here for historic reasons.
+ // TODO(bep) make this a method.
+ source.File
+}
+
+func (f *archetypeFileData) Site() page.Site {
+ return f.Page.Site()
+}
+
+func (f *archetypeFileData) Name() string {
+ return f.Page.File().ContentBaseName()
+}
diff --git a/hugolib/content_factory_test.go b/hugolib/content_factory_test.go
new file mode 100644
index 000000000..23dcd660a
--- /dev/null
+++ b/hugolib/content_factory_test.go
@@ -0,0 +1,78 @@
+package hugolib
+
+import (
+ "bytes"
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestContentFactory(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ c.Run("Simple", func(c *qt.C) {
+ workingDir := "/my/work"
+ b := newTestSitesBuilder(c)
+ b.WithWorkingDir(workingDir).WithConfigFile("toml", `
+
+workingDir="/my/work"
+
+[module]
+[[module.mounts]]
+source = 'mcontent/en'
+target = 'content'
+lang = 'en'
+[[module.mounts]]
+source = 'archetypes'
+target = 'archetypes'
+
+`)
+
+ b.WithSourceFile(filepath.Join("mcontent/en/bundle", "index.md"), "")
+
+ b.WithSourceFile(filepath.Join("archetypes", "post.md"), `---
+title: "{{ replace .Name "-" " " | title }}"
+date: {{ .Date }}
+draft: true
+---
+
+Hello World.
+`)
+ b.CreateSites()
+ cf := NewContentFactory(b.H)
+ abs, err := cf.CreateContentPlaceHolder(filepath.FromSlash("mcontent/en/blog/mypage.md"))
+ b.Assert(err, qt.IsNil)
+ b.Assert(abs, qt.Equals, filepath.FromSlash("/my/work/mcontent/en/blog/mypage.md"))
+ b.Build(BuildCfg{SkipRender: true})
+
+ p := b.H.GetContentPage(abs)
+ b.Assert(p, qt.Not(qt.IsNil))
+
+ var buf bytes.Buffer
+ b.Assert(cf.ApplyArchetypeFilename(&buf, p, "", "post.md"), qt.IsNil)
+
+ b.Assert(buf.String(), qt.Contains, `title: "Mypage"`)
+ })
+
+ // Issue #9129
+ c.Run("Content in both project and theme", func(c *qt.C) {
+ b := newTestSitesBuilder(c)
+ b.WithConfigFile("toml", `
+theme = 'ipsum'
+`)
+
+ themeDir := filepath.Join("themes", "ipsum")
+ b.WithSourceFile("content/posts/foo.txt", `Hello.`)
+ b.WithSourceFile(filepath.Join(themeDir, "content/posts/foo.txt"), `Hello.`)
+ b.CreateSites()
+ cf := NewContentFactory(b.H)
+ abs, err := cf.CreateContentPlaceHolder(filepath.FromSlash("posts/test.md"))
+ b.Assert(err, qt.IsNil)
+ b.Assert(abs, qt.Equals, filepath.FromSlash("content/posts/test.md"))
+
+ })
+
+}
diff --git a/hugolib/content_map.go b/hugolib/content_map.go
new file mode 100644
index 000000000..6849998b6
--- /dev/null
+++ b/hugolib/content_map.go
@@ -0,0 +1,1061 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ radix "github.com/armon/go-radix"
+)
+
+// We store the branch nodes in either the `sections` or `taxonomies` tree
+// with their path as a key; Unix style slashes, a leading and trailing slash.
+//
+// E.g. "/blog/" or "/categories/funny/"
+//
+// Pages that belongs to a section are stored in the `pages` tree below
+// the section name and a branch separator, e.g. "/blog/__hb_". A page is
+// given a key using the path below the section and the base filename with no extension
+// with a leaf separator added.
+//
+// For bundled pages (/mybundle/index.md), we use the folder name.
+//
+// An exmple of a full page key would be "/blog/__hb_page1__hl_"
+//
+// Bundled resources are stored in the `resources` having their path prefixed
+// with the bundle they belong to, e.g.
+// "/blog/__hb_bundle__hl_data.json".
+//
+// The weighted taxonomy entries extracted from page front matter are stored in
+// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
+// "/categories/funny/blog/__hb_bundle__hl_".
+const (
+ cmBranchSeparator = "__hb_"
+ cmLeafSeparator = "__hl_"
+)
+
+// Used to mark ambiguous keys in reverse index lookups.
+var ambiguousContentNode = &contentNode{}
+
+func newContentMap(cfg contentMapConfig) *contentMap {
+ m := &contentMap{
+ cfg: &cfg,
+ pages: &contentTree{Name: "pages", Tree: radix.New()},
+ sections: &contentTree{Name: "sections", Tree: radix.New()},
+ taxonomies: &contentTree{Name: "taxonomies", Tree: radix.New()},
+ taxonomyEntries: &contentTree{Name: "taxonomyEntries", Tree: radix.New()},
+ resources: &contentTree{Name: "resources", Tree: radix.New()},
+ }
+
+ m.pageTrees = []*contentTree{
+ m.pages, m.sections, m.taxonomies,
+ }
+
+ m.bundleTrees = []*contentTree{
+ m.pages, m.sections, m.taxonomies, m.resources,
+ }
+
+ m.branchTrees = []*contentTree{
+ m.sections, m.taxonomies,
+ }
+
+ addToReverseMap := func(k string, n *contentNode, m map[any]*contentNode) {
+ k = strings.ToLower(k)
+ existing, found := m[k]
+ if found && existing != ambiguousContentNode {
+ m[k] = ambiguousContentNode
+ } else if !found {
+ m[k] = n
+ }
+ }
+
+ m.pageReverseIndex = &contentTreeReverseIndex{
+ t: []*contentTree{m.pages, m.sections, m.taxonomies},
+ contentTreeReverseIndexMap: &contentTreeReverseIndexMap{
+ initFn: func(t *contentTree, m map[any]*contentNode) {
+ t.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ if n.p != nil && !n.p.File().IsZero() {
+ meta := n.p.File().FileInfo().Meta()
+ if meta.Path != meta.PathFile() {
+ // Keep track of the original mount source.
+ mountKey := filepath.ToSlash(filepath.Join(meta.Module, meta.PathFile()))
+ addToReverseMap(mountKey, n, m)
+ }
+ }
+ k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator)
+ addToReverseMap(k, n, m)
+ return false
+ })
+ },
+ },
+ }
+
+ return m
+}
+
+type cmInsertKeyBuilder struct {
+ m *contentMap
+
+ err error
+
+ // Builder state
+ tree *contentTree
+ baseKey string // Section or page key
+ key string
+}
+
+func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
+ // fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
+ baseKey := b.baseKey
+ b.baseKey = s
+
+ if baseKey != "/" {
+ // Don't repeat the section path in the key.
+ s = strings.TrimPrefix(s, baseKey)
+ }
+ s = strings.TrimPrefix(s, "/")
+
+ switch b.tree {
+ case b.m.sections:
+ b.tree = b.m.pages
+ b.key = baseKey + cmBranchSeparator + s + cmLeafSeparator
+ case b.m.taxonomies:
+ b.key = path.Join(baseKey, s)
+ default:
+ panic("invalid state")
+ }
+
+ return &b
+}
+
+func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
+ // fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
+
+ baseKey := helpers.AddTrailingSlash(b.baseKey)
+ s = strings.TrimPrefix(s, baseKey)
+
+ switch b.tree {
+ case b.m.pages:
+ b.key = b.key + s
+ case b.m.sections, b.m.taxonomies:
+ b.key = b.key + cmLeafSeparator + s
+ default:
+ panic(fmt.Sprintf("invalid state: %#v", b.tree))
+ }
+ b.tree = b.m.resources
+ return &b
+}
+
+func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
+ if b.err == nil {
+ b.tree.Insert(b.Key(), n)
+ }
+ return b
+}
+
+func (b *cmInsertKeyBuilder) Key() string {
+ switch b.tree {
+ case b.m.sections, b.m.taxonomies:
+ return cleanSectionTreeKey(b.key)
+ default:
+ return cleanTreeKey(b.key)
+ }
+}
+
+func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
+ if b.err == nil {
+ b.tree.DeletePrefix(b.Key())
+ }
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilder {
+ b.newTopLevel()
+ m := b.m
+ meta := fi.Meta()
+ p := cleanTreeKey(meta.Path)
+ bundlePath := m.getBundleDir(meta)
+ isBundle := meta.Classifier.IsBundle()
+ if isBundle {
+ panic("not implemented")
+ }
+
+ p, k := b.getBundle(p)
+ if k == "" {
+ b.err = fmt.Errorf("no bundle header found for %q", bundlePath)
+ return b
+ }
+
+ id := k + m.reduceKeyPart(p, fi.Meta().Path)
+ b.tree = b.m.resources
+ b.key = id
+ b.baseKey = p
+
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
+ b.newTopLevel()
+ b.tree = b.m.sections
+ b.baseKey = s
+ b.key = s
+ return b
+}
+
+func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
+ s = cleanSectionTreeKey(s)
+ b.newTopLevel()
+ b.tree = b.m.taxonomies
+ b.baseKey = s
+ b.key = s
+ return b
+}
+
+// getBundle gets both the key to the section and the prefix to where to store
+// this page bundle and its resources.
+func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
+ m := b.m
+ section, _ := m.getSection(s)
+
+ p := strings.TrimPrefix(s, section)
+
+ bundlePathParts := strings.Split(p, "/")
+ basePath := section + cmBranchSeparator
+
+ // Put it into an existing bundle if found.
+ for i := len(bundlePathParts) - 2; i >= 0; i-- {
+ bundlePath := path.Join(bundlePathParts[:i]...)
+ searchKey := basePath + bundlePath + cmLeafSeparator
+ if _, found := m.pages.Get(searchKey); found {
+ return section + bundlePath, searchKey
+ }
+ }
+
+ // Put it into the section bundle.
+ return section, section + cmLeafSeparator
+}
+
+func (b *cmInsertKeyBuilder) newTopLevel() {
+ b.key = ""
+}
+
+type contentBundleViewInfo struct {
+ ordinal int
+ name viewName
+ termKey string
+ termOrigin string
+ weight int
+ ref *contentNode
+}
+
+func (c *contentBundleViewInfo) kind() string {
+ if c.termKey != "" {
+ return page.KindTerm
+ }
+ return page.KindTaxonomy
+}
+
+func (c *contentBundleViewInfo) sections() []string {
+ if c.kind() == page.KindTaxonomy {
+ return []string{c.name.plural}
+ }
+
+ return []string{c.name.plural, c.termKey}
+}
+
+func (c *contentBundleViewInfo) term() string {
+ if c.termOrigin != "" {
+ return c.termOrigin
+ }
+
+ return c.termKey
+}
+
+type contentMap struct {
+ cfg *contentMapConfig
+
+ // View of regular pages, sections, and taxonomies.
+ pageTrees contentTrees
+
+ // View of pages, sections, taxonomies, and resources.
+ bundleTrees contentTrees
+
+ // View of sections and taxonomies.
+ branchTrees contentTrees
+
+ // Stores page bundles keyed by its path's directory or the base filename,
+ // e.g. "blog/post.md" => "/blog/post", "blog/post/index.md" => "/blog/post"
+ // These are the "regular pages" and all of them are bundles.
+ pages *contentTree
+
+ // A reverse index used as a fallback in GetPage.
+ // There are currently two cases where this is used:
+ // 1. Short name lookups in ref/relRef, e.g. using only "mypage.md" without a path.
+ // 2. Links resolved from a remounted content directory. These are restricted to the same module.
+ // Both of the above cases can result in ambigous lookup errors.
+ pageReverseIndex *contentTreeReverseIndex
+
+ // Section nodes.
+ sections *contentTree
+
+ // Taxonomy nodes.
+ taxonomies *contentTree
+
+ // Pages in a taxonomy.
+ taxonomyEntries *contentTree
+
+ // Resources stored per bundle below a common prefix, e.g. "/blog/post__hb_".
+ resources *contentTree
+}
+
+func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error {
+ for _, fi := range fis {
+ if err := m.addFile(fi); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error {
+ var (
+ meta = header.Meta()
+ classifier = meta.Classifier
+ isBranch = classifier == files.ContentClassBranch
+ bundlePath = m.getBundleDir(meta)
+
+ n = m.newContentNodeFromFi(header)
+ b = m.newKeyBuilder()
+
+ section string
+ )
+
+ if isBranch {
+ // Either a section or a taxonomy node.
+ section = bundlePath
+ if tc := m.cfg.getTaxonomyConfig(section); !tc.IsZero() {
+ term := strings.TrimPrefix(strings.TrimPrefix(section, "/"+tc.plural), "/")
+
+ n.viewInfo = &contentBundleViewInfo{
+ name: tc,
+ termKey: term,
+ termOrigin: term,
+ }
+
+ n.viewInfo.ref = n
+ b.WithTaxonomy(section).Insert(n)
+ } else {
+ b.WithSection(section).Insert(n)
+ }
+ } else {
+ // A regular page. Attach it to its section.
+ section, _ = m.getOrCreateSection(n, bundlePath)
+ b = b.WithSection(section).ForPage(bundlePath).Insert(n)
+ }
+
+ if m.cfg.isRebuild {
+ // The resource owner will be either deleted or overwritten on rebuilds,
+ // but make sure we handle deletion of resources (images etc.) as well.
+ b.ForResource("").DeleteAll()
+ }
+
+ for _, r := range resources {
+ rb := b.ForResource(cleanTreeKey(r.Meta().Path))
+ rb.Insert(&contentNode{fi: r})
+ }
+
+ return nil
+}
+
+func (m *contentMap) CreateMissingNodes() error {
+ // Create missing home and root sections
+ rootSections := make(map[string]any)
+ trackRootSection := func(s string, b *contentNode) {
+ parts := strings.Split(s, "/")
+ if len(parts) > 2 {
+ root := strings.TrimSuffix(parts[1], cmBranchSeparator)
+ if root != "" {
+ if _, found := rootSections[root]; !found {
+ rootSections[root] = b
+ }
+ }
+ }
+ }
+
+ m.sections.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+
+ if s == "/" {
+ return false
+ }
+
+ trackRootSection(s, n)
+ return false
+ })
+
+ m.pages.Walk(func(s string, v any) bool {
+ trackRootSection(s, v.(*contentNode))
+ return false
+ })
+
+ if _, found := rootSections["/"]; !found {
+ rootSections["/"] = true
+ }
+
+ for sect, v := range rootSections {
+ var sectionPath string
+ if n, ok := v.(*contentNode); ok && n.path != "" {
+ sectionPath = n.path
+ firstSlash := strings.Index(sectionPath, "/")
+ if firstSlash != -1 {
+ sectionPath = sectionPath[:firstSlash]
+ }
+ }
+ sect = cleanSectionTreeKey(sect)
+ _, found := m.sections.Get(sect)
+ if !found {
+ m.sections.Insert(sect, &contentNode{path: sectionPath})
+ }
+ }
+
+ for _, view := range m.cfg.taxonomyConfig {
+ s := cleanSectionTreeKey(view.plural)
+ _, found := m.taxonomies.Get(s)
+ if !found {
+ b := &contentNode{
+ viewInfo: &contentBundleViewInfo{
+ name: view,
+ },
+ }
+ b.viewInfo.ref = b
+ m.taxonomies.Insert(s, b)
+ }
+ }
+
+ return nil
+}
+
+func (m *contentMap) getBundleDir(meta *hugofs.FileMeta) string {
+ dir := cleanTreeKey(filepath.Dir(meta.Path))
+
+ switch meta.Classifier {
+ case files.ContentClassContent:
+ return path.Join(dir, meta.TranslationBaseName)
+ default:
+ return dir
+ }
+}
+
+func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
+ return &contentNode{
+ fi: fi,
+ path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path), "/"),
+ }
+}
+
+func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
+ s = helpers.AddTrailingSlash(s)
+ for {
+ k, v, found := m.sections.LongestPrefix(s)
+
+ if !found {
+ return "", nil
+ }
+
+ if strings.Count(k, "/") <= 2 {
+ return k, v.(*contentNode)
+ }
+
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+
+ }
+}
+
+func (m *contentMap) newKeyBuilder() *cmInsertKeyBuilder {
+ return &cmInsertKeyBuilder{m: m}
+}
+
+func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *contentNode) {
+ level := strings.Count(s, "/")
+ k, b := m.getSection(s)
+
+ mustCreate := false
+
+ if k == "" {
+ mustCreate = true
+ } else if level > 1 && k == "/" {
+ // We found the home section, but this page needs to be placed in
+ // the root, e.g. "/blog", section.
+ mustCreate = true
+ }
+
+ if mustCreate {
+ k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1])
+
+ b = &contentNode{
+ path: n.rootSection(),
+ }
+
+ m.sections.Insert(k, b)
+ }
+
+ return k, b
+}
+
+func (m *contentMap) getPage(section, name string) *contentNode {
+ section = helpers.AddTrailingSlash(section)
+ key := section + cmBranchSeparator + name + cmLeafSeparator
+
+ v, found := m.pages.Get(key)
+ if found {
+ return v.(*contentNode)
+ }
+ return nil
+}
+
+func (m *contentMap) getSection(s string) (string, *contentNode) {
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+
+ k, v, found := m.sections.LongestPrefix(s)
+
+ if found {
+ return k, v.(*contentNode)
+ }
+ return "", nil
+}
+
+func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
+ s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
+ k, v, found := m.taxonomies.LongestPrefix(s)
+
+ if found {
+ return k, v.(*contentNode)
+ }
+
+ v, found = m.sections.Get("/")
+ if found {
+ return s, v.(*contentNode)
+ }
+
+ return "", nil
+}
+
+func (m *contentMap) addFile(fi hugofs.FileMetaInfo) error {
+ b := m.newKeyBuilder()
+ return b.WithFile(fi).Insert(m.newContentNodeFromFi(fi)).err
+}
+
+func cleanTreeKey(k string) string {
+ k = "/" + strings.ToLower(strings.Trim(path.Clean(filepath.ToSlash(k)), "./"))
+ return k
+}
+
+func cleanSectionTreeKey(k string) string {
+ k = cleanTreeKey(k)
+ if k != "/" {
+ k += "/"
+ }
+
+ return k
+}
+
+func (m *contentMap) onSameLevel(s1, s2 string) bool {
+ return strings.Count(s1, "/") == strings.Count(s2, "/")
+}
+
+func (m *contentMap) deleteBundleMatching(matches func(b *contentNode) bool) {
+ // Check sections first
+ s := m.sections.getMatch(matches)
+ if s != "" {
+ m.deleteSectionByPath(s)
+ return
+ }
+
+ s = m.pages.getMatch(matches)
+ if s != "" {
+ m.deletePage(s)
+ return
+ }
+
+ s = m.resources.getMatch(matches)
+ if s != "" {
+ m.resources.Delete(s)
+ }
+}
+
+// Deletes any empty root section that's not backed by a content file.
+func (m *contentMap) deleteOrphanSections() {
+ var sectionsToDelete []string
+
+ m.sections.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+
+ if n.fi != nil {
+ // Section may be empty, but is backed by a content file.
+ return false
+ }
+
+ if s == "/" || strings.Count(s, "/") > 2 {
+ return false
+ }
+
+ prefixBundle := s + cmBranchSeparator
+
+ if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) {
+ sectionsToDelete = append(sectionsToDelete, s)
+ }
+
+ return false
+ })
+
+ for _, s := range sectionsToDelete {
+ m.sections.Delete(s)
+ }
+}
+
+func (m *contentMap) deletePage(s string) {
+ m.pages.DeletePrefix(s)
+ m.resources.DeletePrefix(s)
+}
+
+func (m *contentMap) deleteSectionByPath(s string) {
+ if !strings.HasSuffix(s, "/") {
+ panic("section must end with a slash")
+ }
+ if !strings.HasPrefix(s, "/") {
+ panic("section must start with a slash")
+ }
+ m.sections.DeletePrefix(s)
+ m.pages.DeletePrefix(s)
+ m.resources.DeletePrefix(s)
+}
+
+func (m *contentMap) deletePageByPath(s string) {
+ m.pages.Walk(func(s string, v any) bool {
+ fmt.Println("S", s)
+
+ return false
+ })
+}
+
+func (m *contentMap) deleteTaxonomy(s string) {
+ m.taxonomies.DeletePrefix(s)
+}
+
+func (m *contentMap) reduceKeyPart(dir, filename string) string {
+ dir, filename = filepath.ToSlash(dir), filepath.ToSlash(filename)
+ dir, filename = strings.TrimPrefix(dir, "/"), strings.TrimPrefix(filename, "/")
+
+ return strings.TrimPrefix(strings.TrimPrefix(filename, dir), "/")
+}
+
+func (m *contentMap) splitKey(k string) []string {
+ if k == "" || k == "/" {
+ return nil
+ }
+
+ return strings.Split(k, "/")[1:]
+}
+
+func (m *contentMap) testDump() string {
+ var sb strings.Builder
+
+ for i, r := range []*contentTree{m.pages, m.sections, m.resources} {
+ sb.WriteString(fmt.Sprintf("Tree %d:\n", i))
+ r.Walk(func(s string, v any) bool {
+ sb.WriteString("\t" + s + "\n")
+ return false
+ })
+ }
+
+ for i, r := range []*contentTree{m.pages, m.sections} {
+ r.Walk(func(s string, v any) bool {
+ c := v.(*contentNode)
+ cpToString := func(c *contentNode) string {
+ var sb strings.Builder
+ if c.p != nil {
+ sb.WriteString("|p:" + c.p.Title())
+ }
+ if c.fi != nil {
+ sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path))
+ }
+ return sb.String()
+ }
+ sb.WriteString(path.Join(m.cfg.lang, r.Name) + s + cpToString(c) + "\n")
+
+ resourcesPrefix := s
+
+ if i == 1 {
+ resourcesPrefix += cmLeafSeparator
+
+ m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v any) bool {
+ sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
+ return false
+ })
+ }
+
+ m.resources.WalkPrefix(resourcesPrefix, func(s string, v any) bool {
+ sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
+ return false
+ })
+
+ return false
+ })
+ }
+
+ return sb.String()
+}
+
+type contentMapConfig struct {
+ lang string
+ taxonomyConfig []viewName
+ taxonomyDisabled bool
+ taxonomyTermDisabled bool
+ pageDisabled bool
+ isRebuild bool
+}
+
+func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
+ s = strings.TrimPrefix(s, "/")
+ if s == "" {
+ return
+ }
+ for _, n := range cfg.taxonomyConfig {
+ if strings.HasPrefix(s, n.plural) {
+ return n
+ }
+ }
+
+ return
+}
+
+type contentNode struct {
+ p *pageState
+
+ // Set for taxonomy nodes.
+ viewInfo *contentBundleViewInfo
+
+ // Set if source is a file.
+ // We will soon get other sources.
+ fi hugofs.FileMetaInfo
+
+ // The source path. Unix slashes. No leading slash.
+ path string
+}
+
+func (b *contentNode) rootSection() string {
+ if b.path == "" {
+ return ""
+ }
+ firstSlash := strings.Index(b.path, "/")
+ if firstSlash == -1 {
+ return b.path
+ }
+ return b.path[:firstSlash]
+}
+
+type contentTree struct {
+ Name string
+ *radix.Tree
+}
+
+type contentTrees []*contentTree
+
+func (t contentTrees) DeletePrefix(prefix string) int {
+ var count int
+ for _, tree := range t {
+ tree.Walk(func(s string, v any) bool {
+ return false
+ })
+ count += tree.DeletePrefix(prefix)
+ }
+ return count
+}
+
+type contentTreeNodeCallback func(s string, n *contentNode) bool
+
+func newContentTreeFilter(fn func(n *contentNode) bool) contentTreeNodeCallback {
+ return func(s string, n *contentNode) bool {
+ return fn(n)
+ }
+}
+
+var (
+ contentTreeNoListAlwaysFilter = func(s string, n *contentNode) bool {
+ if n.p == nil {
+ return true
+ }
+ return n.p.m.noListAlways()
+ }
+
+ contentTreeNoRenderFilter = func(s string, n *contentNode) bool {
+ if n.p == nil {
+ return true
+ }
+ return n.p.m.noRender()
+ }
+
+ contentTreeNoLinkFilter = func(s string, n *contentNode) bool {
+ if n.p == nil {
+ return true
+ }
+ return n.p.m.noLink()
+ }
+)
+
+func (c *contentTree) WalkQuery(query pageMapQuery, walkFn contentTreeNodeCallback) {
+ filter := query.Filter
+ if filter == nil {
+ filter = contentTreeNoListAlwaysFilter
+ }
+ if query.Prefix != "" {
+ c.WalkBelow(query.Prefix, func(s string, v any) bool {
+ n := v.(*contentNode)
+ if filter != nil && filter(s, n) {
+ return false
+ }
+ return walkFn(s, n)
+ })
+
+ return
+ }
+
+ c.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ if filter != nil && filter(s, n) {
+ return false
+ }
+ return walkFn(s, n)
+ })
+}
+
+func (c contentTrees) WalkRenderable(fn contentTreeNodeCallback) {
+ query := pageMapQuery{Filter: contentTreeNoRenderFilter}
+ for _, tree := range c {
+ tree.WalkQuery(query, fn)
+ }
+}
+
+func (c contentTrees) WalkLinkable(fn contentTreeNodeCallback) {
+ query := pageMapQuery{Filter: contentTreeNoLinkFilter}
+ for _, tree := range c {
+ tree.WalkQuery(query, fn)
+ }
+}
+
+func (c contentTrees) Walk(fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ return fn(s, n)
+ })
+ }
+}
+
+func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
+ for _, tree := range c {
+ tree.WalkPrefix(prefix, func(s string, v any) bool {
+ n := v.(*contentNode)
+ return fn(s, n)
+ })
+ }
+}
+
+// WalkBelow walks the tree below the given prefix, i.e. it skips the
+// node with the given prefix as key.
+func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) {
+ c.Tree.WalkPrefix(prefix, func(s string, v any) bool {
+ if s == prefix {
+ return false
+ }
+ return fn(s, v)
+ })
+}
+
+func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
+ var match string
+ c.Walk(func(s string, v any) bool {
+ n, ok := v.(*contentNode)
+ if !ok {
+ return false
+ }
+
+ if matches(n) {
+ match = s
+ return true
+ }
+
+ return false
+ })
+
+ return match
+}
+
+func (c *contentTree) hasBelow(s1 string) bool {
+ var t bool
+ c.WalkBelow(s1, func(s2 string, v any) bool {
+ t = true
+ return true
+ })
+ return t
+}
+
+func (c *contentTree) printKeys() {
+ c.Walk(func(s string, v any) bool {
+ fmt.Println(s)
+ return false
+ })
+}
+
+func (c *contentTree) printKeysPrefix(prefix string) {
+ c.WalkPrefix(prefix, func(s string, v any) bool {
+ fmt.Println(s)
+ return false
+ })
+}
+
+// contentTreeRef points to a node in the given tree.
+type contentTreeRef struct {
+ m *pageMap
+ t *contentTree
+ n *contentNode
+ key string
+}
+
+func (c *contentTreeRef) getCurrentSection() (string, *contentNode) {
+ if c.isSection() {
+ return c.key, c.n
+ }
+ return c.getSection()
+}
+
+func (c *contentTreeRef) isSection() bool {
+ return c.t == c.m.sections
+}
+
+func (c *contentTreeRef) getSection() (string, *contentNode) {
+ if c.t == c.m.taxonomies {
+ return c.m.getTaxonomyParent(c.key)
+ }
+ return c.m.getSection(c.key)
+}
+
+func (c *contentTreeRef) getPages() page.Pages {
+ var pas page.Pages
+ c.m.collectPages(
+ pageMapQuery{
+ Prefix: c.key + cmBranchSeparator,
+ Filter: c.n.p.m.getListFilter(true),
+ },
+ func(c *contentNode) {
+ pas = append(pas, c.p)
+ },
+ )
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (c *contentTreeRef) getPagesRecursive() page.Pages {
+ var pas page.Pages
+
+ query := pageMapQuery{
+ Filter: c.n.p.m.getListFilter(true),
+ }
+
+ query.Prefix = c.key
+ c.m.collectPages(query, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (c *contentTreeRef) getPagesAndSections() page.Pages {
+ var pas page.Pages
+
+ query := pageMapQuery{
+ Filter: c.n.p.m.getListFilter(true),
+ Prefix: c.key,
+ }
+
+ c.m.collectPagesAndSections(query, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (c *contentTreeRef) getSections() page.Pages {
+ var pas page.Pages
+
+ query := pageMapQuery{
+ Filter: c.n.p.m.getListFilter(true),
+ Prefix: c.key,
+ }
+
+ c.m.collectSections(query, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+type contentTreeReverseIndex struct {
+ t []*contentTree
+ *contentTreeReverseIndexMap
+}
+
+type contentTreeReverseIndexMap struct {
+ m map[any]*contentNode
+ init sync.Once
+ initFn func(*contentTree, map[any]*contentNode)
+}
+
+func (c *contentTreeReverseIndex) Reset() {
+ c.contentTreeReverseIndexMap = &contentTreeReverseIndexMap{
+ initFn: c.initFn,
+ }
+}
+
+func (c *contentTreeReverseIndex) Get(key any) *contentNode {
+ c.init.Do(func() {
+ c.m = make(map[any]*contentNode)
+ for _, tree := range c.t {
+ c.initFn(tree, c.m)
+ }
+ })
+ return c.m[key]
+}
diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go
new file mode 100644
index 000000000..7e6b6e670
--- /dev/null
+++ b/hugolib/content_map_page.go
@@ -0,0 +1,1039 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "context"
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/resources"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/parser/pageparser"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/para"
+)
+
+func newPageMaps(h *HugoSites) *pageMaps {
+ mps := make([]*pageMap, len(h.Sites))
+ for i, s := range h.Sites {
+ mps[i] = s.pageMap
+ }
+ return &pageMaps{
+ workers: para.New(h.numWorkers),
+ pmaps: mps,
+ }
+}
+
+type pageMap struct {
+ s *Site
+ *contentMap
+}
+
+func (m *pageMap) Len() int {
+ l := 0
+ for _, t := range m.contentMap.pageTrees {
+ l += t.Len()
+ }
+ return l
+}
+
+func (m *pageMap) createMissingTaxonomyNodes() error {
+ if m.cfg.taxonomyDisabled {
+ return nil
+ }
+ m.taxonomyEntries.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ vi := n.viewInfo
+ k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
+
+ if _, found := m.taxonomies.Get(k); !found {
+ vic := &contentBundleViewInfo{
+ name: vi.name,
+ termKey: vi.termKey,
+ termOrigin: vi.termOrigin,
+ }
+ m.taxonomies.Insert(k, &contentNode{viewInfo: vic})
+ }
+ return false
+ })
+
+ return nil
+}
+
+func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) {
+ if n.fi == nil {
+ panic("FileInfo must (currently) be set")
+ }
+
+ f, err := newFileInfo(m.s.SourceSpec, n.fi)
+ if err != nil {
+ return nil, err
+ }
+
+ meta := n.fi.Meta()
+ content := func() (hugio.ReadSeekCloser, error) {
+ return meta.Open()
+ }
+
+ bundled := owner != nil
+ s := m.s
+
+ sections := s.sectionsFromFile(f)
+
+ kind := s.kindFromFileInfoOrSections(f, sections)
+ if kind == page.KindTerm {
+ s.PathSpec.MakePathsSanitized(sections)
+ }
+
+ metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f}
+
+ ps, err := newPageBase(metaProvider)
+ if err != nil {
+ return nil, err
+ }
+
+ if n.fi.Meta().IsRootFile {
+ // Make sure that the bundle/section we start walking from is always
+ // rendered.
+ // This is only relevant in server fast render mode.
+ ps.forceRender = true
+ }
+
+ n.p = ps
+ if ps.IsNode() {
+ ps.bucket = newPageBucket(ps)
+ }
+
+ gi, err := s.h.gitInfoForPage(ps)
+ if err != nil {
+ return nil, fmt.Errorf("failed to load Git data: %w", err)
+ }
+ ps.gitInfo = gi
+
+ owners, err := s.h.codeownersForPage(ps)
+ if err != nil {
+ return nil, fmt.Errorf("failed to load CODEOWNERS: %w", err)
+ }
+ ps.codeowners = owners
+
+ r, err := content()
+ if err != nil {
+ return nil, err
+ }
+ defer r.Close()
+
+ parseResult, err := pageparser.Parse(
+ r,
+ pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji},
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ ps.pageContent = pageContent{
+ source: rawPageContent{
+ parsed: parseResult,
+ posMainContent: -1,
+ posSummaryEnd: -1,
+ posBodyStart: -1,
+ },
+ }
+
+ if err := ps.mapContent(parentBucket, metaProvider); err != nil {
+ return nil, ps.wrapError(err)
+ }
+
+ if err := metaProvider.applyDefaultValues(n); err != nil {
+ return nil, err
+ }
+
+ ps.init.Add(func() (any, error) {
+ pp, err := newPagePaths(s, ps, metaProvider)
+ if err != nil {
+ return nil, err
+ }
+
+ outputFormatsForPage := ps.m.outputFormats()
+
+ // Prepare output formats for all sites.
+ // We do this even if this page does not get rendered on
+ // its own. It may be referenced via .Site.GetPage and
+ // it will then need an output format.
+ ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
+ created := make(map[string]*pageOutput)
+ shouldRenderPage := !ps.m.noRender()
+
+ for i, f := range ps.s.h.renderFormats {
+ if po, found := created[f.Name]; found {
+ ps.pageOutputs[i] = po
+ continue
+ }
+
+ render := shouldRenderPage
+ if render {
+ _, render = outputFormatsForPage.GetByName(f.Name)
+ }
+
+ po := newPageOutput(ps, pp, f, render)
+
+ // Create a content provider for the first,
+ // we may be able to reuse it.
+ if i == 0 {
+ contentProvider, err := newPageContentOutput(ps, po)
+ if err != nil {
+ return nil, err
+ }
+ po.initContentProvider(contentProvider)
+ }
+
+ ps.pageOutputs[i] = po
+ created[f.Name] = po
+
+ }
+
+ if err := ps.initCommonProviders(pp); err != nil {
+ return nil, err
+ }
+
+ return nil, nil
+ })
+
+ ps.parent = owner
+
+ return ps, nil
+}
+
+func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
+ if owner == nil {
+ panic("owner is nil")
+ }
+ // TODO(bep) consolidate with multihost logic + clean up
+ outputFormats := owner.m.outputFormats()
+ seen := make(map[string]bool)
+ var targetBasePaths []string
+ // Make sure bundled resources are published to all of the output formats'
+ // sub paths.
+ for _, f := range outputFormats {
+ p := f.Path
+ if seen[p] {
+ continue
+ }
+ seen[p] = true
+ targetBasePaths = append(targetBasePaths, p)
+
+ }
+
+ meta := fim.Meta()
+ r := func() (hugio.ReadSeekCloser, error) {
+ return meta.Open()
+ }
+
+ target := strings.TrimPrefix(meta.Path, owner.File().Dir())
+
+ return owner.s.ResourceSpec.New(
+ resources.ResourceSourceDescriptor{
+ TargetPaths: owner.getTargetPaths,
+ OpenReadSeekCloser: r,
+ FileInfo: fim,
+ RelTargetFilename: target,
+ TargetBasePaths: targetBasePaths,
+ LazyPublish: !owner.m.buildConfig.PublishResources,
+ })
+}
+
+func (m *pageMap) createSiteTaxonomies() error {
+ m.s.taxonomies = make(TaxonomyList)
+ var walkErr error
+ m.taxonomies.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ t := n.viewInfo
+
+ viewName := t.name
+
+ if t.termKey == "" {
+ m.s.taxonomies[viewName.plural] = make(Taxonomy)
+ } else {
+ taxonomy := m.s.taxonomies[viewName.plural]
+ if taxonomy == nil {
+ walkErr = fmt.Errorf("missing taxonomy: %s", viewName.plural)
+ return true
+ }
+ m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool {
+ b2 := v.(*contentNode)
+ info := b2.viewInfo
+ taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p))
+
+ return false
+ })
+ }
+
+ return false
+ })
+
+ for _, taxonomy := range m.s.taxonomies {
+ for _, v := range taxonomy {
+ v.Sort()
+ }
+ }
+
+ return walkErr
+}
+
+func (m *pageMap) createListAllPages() page.Pages {
+ pages := make(page.Pages, 0)
+
+ m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool {
+ if n.p == nil {
+ panic(fmt.Sprintf("BUG: page not set for %q", s))
+ }
+ if contentTreeNoListAlwaysFilter(s, n) {
+ return false
+ }
+ pages = append(pages, n.p)
+ return false
+ })
+
+ page.SortByDefault(pages)
+ return pages
+}
+
+func (m *pageMap) assemblePages() error {
+ m.taxonomyEntries.DeletePrefix("/")
+
+ if err := m.assembleSections(); err != nil {
+ return err
+ }
+
+ var err error
+
+ if err != nil {
+ return err
+ }
+
+ m.pages.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+
+ var shouldBuild bool
+
+ defer func() {
+ // Make sure we always rebuild the view cache.
+ if shouldBuild && err == nil && n.p != nil {
+ m.attachPageToViews(s, n)
+ }
+ }()
+
+ if n.p != nil {
+ // A rebuild
+ shouldBuild = true
+ return false
+ }
+
+ var parent *contentNode
+ var parentBucket *pagesMapBucket
+
+ _, parent = m.getSection(s)
+ if parent == nil {
+ panic(fmt.Sprintf("BUG: parent not set for %q", s))
+ }
+ parentBucket = parent.p.bucket
+
+ n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
+ if err != nil {
+ return true
+ }
+
+ shouldBuild = !(n.p.Kind() == page.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p)
+ if !shouldBuild {
+ m.deletePage(s)
+ return false
+ }
+
+ n.p.treeRef = &contentTreeRef{
+ m: m,
+ t: m.pages,
+ n: n,
+ key: s,
+ }
+
+ if err = m.assembleResources(s, n.p, parentBucket); err != nil {
+ return true
+ }
+
+ return false
+ })
+
+ m.deleteOrphanSections()
+
+ return err
+}
+
+func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error {
+ var err error
+
+ m.resources.WalkPrefix(s, func(s string, v any) bool {
+ n := v.(*contentNode)
+ meta := n.fi.Meta()
+ classifier := meta.Classifier
+ var r resource.Resource
+ switch classifier {
+ case files.ContentClassContent:
+ var rp *pageState
+ rp, err = m.newPageFromContentNode(n, parentBucket, p)
+ if err != nil {
+ return true
+ }
+ rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.File().Path(), p.File().Dir()))
+ r = rp
+
+ case files.ContentClassFile:
+ r, err = m.newResource(n.fi, p)
+ if err != nil {
+ return true
+ }
+ default:
+ panic(fmt.Sprintf("invalid classifier: %q", classifier))
+ }
+
+ p.resources = append(p.resources, r)
+ return false
+ })
+
+ return err
+}
+
+func (m *pageMap) assembleSections() error {
+ var sectionsToDelete []string
+ var err error
+
+ m.sections.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+ var shouldBuild bool
+
+ defer func() {
+ // Make sure we always rebuild the view cache.
+ if shouldBuild && err == nil && n.p != nil {
+ m.attachPageToViews(s, n)
+ if n.p.IsHome() {
+ m.s.home = n.p
+ }
+ }
+ }()
+
+ sections := m.splitKey(s)
+
+ if n.p != nil {
+ if n.p.IsHome() {
+ m.s.home = n.p
+ }
+ shouldBuild = true
+ return false
+ }
+
+ var parent *contentNode
+ var parentBucket *pagesMapBucket
+
+ if s != "/" {
+ _, parent = m.getSection(s)
+ if parent == nil || parent.p == nil {
+ panic(fmt.Sprintf("BUG: parent not set for %q", s))
+ }
+ }
+
+ if parent != nil {
+ parentBucket = parent.p.bucket
+ } else if s == "/" {
+ parentBucket = m.s.siteBucket
+ }
+
+ kind := page.KindSection
+ if s == "/" {
+ kind = page.KindHome
+ }
+
+ if n.fi != nil {
+ n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
+ if err != nil {
+ return true
+ }
+ } else {
+ n.p = m.s.newPage(n, parentBucket, kind, "", sections...)
+ }
+
+ shouldBuild = m.s.shouldBuild(n.p)
+ if !shouldBuild {
+ sectionsToDelete = append(sectionsToDelete, s)
+ return false
+ }
+
+ n.p.treeRef = &contentTreeRef{
+ m: m,
+ t: m.sections,
+ n: n,
+ key: s,
+ }
+
+ if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil {
+ return true
+ }
+
+ return false
+ })
+
+ for _, s := range sectionsToDelete {
+ m.deleteSectionByPath(s)
+ }
+
+ return err
+}
+
+func (m *pageMap) assembleTaxonomies() error {
+ var taxonomiesToDelete []string
+ var err error
+
+ m.taxonomies.Walk(func(s string, v any) bool {
+ n := v.(*contentNode)
+
+ if n.p != nil {
+ return false
+ }
+
+ kind := n.viewInfo.kind()
+ sections := n.viewInfo.sections()
+
+ _, parent := m.getTaxonomyParent(s)
+ if parent == nil || parent.p == nil {
+ panic(fmt.Sprintf("BUG: parent not set for %q", s))
+ }
+ parentBucket := parent.p.bucket
+
+ if n.fi != nil {
+ n.p, err = m.newPageFromContentNode(n, parent.p.bucket, nil)
+ if err != nil {
+ return true
+ }
+ } else {
+ title := ""
+ if kind == page.KindTerm {
+ title = n.viewInfo.term()
+ }
+ n.p = m.s.newPage(n, parent.p.bucket, kind, title, sections...)
+ }
+
+ if !m.s.shouldBuild(n.p) {
+ taxonomiesToDelete = append(taxonomiesToDelete, s)
+ return false
+ }
+
+ n.p.treeRef = &contentTreeRef{
+ m: m,
+ t: m.taxonomies,
+ n: n,
+ key: s,
+ }
+
+ if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil {
+ return true
+ }
+
+ return false
+ })
+
+ for _, s := range taxonomiesToDelete {
+ m.deleteTaxonomy(s)
+ }
+
+ return err
+}
+
+func (m *pageMap) attachPageToViews(s string, b *contentNode) {
+ if m.cfg.taxonomyDisabled {
+ return
+ }
+
+ for _, viewName := range m.cfg.taxonomyConfig {
+ vals := types.ToStringSlicePreserveString(getParam(b.p, viewName.plural, false))
+ if vals == nil {
+ continue
+ }
+ w := getParamToLower(b.p, viewName.plural+"_weight")
+ weight, err := cast.ToIntE(w)
+ if err != nil {
+ m.s.Log.Errorf("Unable to convert taxonomy weight %#v to int for %q", w, b.p.Pathc())
+ // weight will equal zero, so let the flow continue
+ }
+
+ for i, v := range vals {
+ termKey := m.s.getTaxonomyKey(v)
+
+ bv := &contentNode{
+ viewInfo: &contentBundleViewInfo{
+ ordinal: i,
+ name: viewName,
+ termKey: termKey,
+ termOrigin: v,
+ weight: weight,
+ ref: b,
+ },
+ }
+
+ var key string
+ if strings.HasSuffix(s, "/") {
+ key = cleanSectionTreeKey(path.Join(viewName.plural, termKey, s))
+ } else {
+ key = cleanTreeKey(path.Join(viewName.plural, termKey, s))
+ }
+ m.taxonomyEntries.Insert(key, bv)
+ }
+ }
+}
+
+type pageMapQuery struct {
+ Prefix string
+ Filter contentTreeNodeCallback
+}
+
+func (m *pageMap) collectPages(query pageMapQuery, fn func(c *contentNode)) error {
+ if query.Filter == nil {
+ query.Filter = contentTreeNoListAlwaysFilter
+ }
+
+ m.pages.WalkQuery(query, func(s string, n *contentNode) bool {
+ fn(n)
+ return false
+ })
+
+ return nil
+}
+
+func (m *pageMap) collectPagesAndSections(query pageMapQuery, fn func(c *contentNode)) error {
+ if err := m.collectSections(query, fn); err != nil {
+ return err
+ }
+
+ query.Prefix = query.Prefix + cmBranchSeparator
+ if err := m.collectPages(query, fn); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (m *pageMap) collectSections(query pageMapQuery, fn func(c *contentNode)) error {
+ level := strings.Count(query.Prefix, "/")
+
+ return m.collectSectionsFn(query, func(s string, c *contentNode) bool {
+ if strings.Count(s, "/") != level+1 {
+ return false
+ }
+
+ fn(c)
+
+ return false
+ })
+}
+
+func (m *pageMap) collectSectionsFn(query pageMapQuery, fn func(s string, c *contentNode) bool) error {
+ if !strings.HasSuffix(query.Prefix, "/") {
+ query.Prefix += "/"
+ }
+
+ m.sections.WalkQuery(query, func(s string, n *contentNode) bool {
+ return fn(s, n)
+ })
+
+ return nil
+}
+
+func (m *pageMap) collectSectionsRecursiveIncludingSelf(query pageMapQuery, fn func(c *contentNode)) error {
+ return m.collectSectionsFn(query, func(s string, c *contentNode) bool {
+ fn(c)
+ return false
+ })
+}
+
+func (m *pageMap) collectTaxonomies(prefix string, fn func(c *contentNode)) error {
+ m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool {
+ fn(n)
+ return false
+ })
+ return nil
+}
+
+// withEveryBundlePage applies fn to every Page, including those bundled inside
+// leaf bundles.
+func (m *pageMap) withEveryBundlePage(fn func(p *pageState) bool) {
+ m.bundleTrees.Walk(func(s string, n *contentNode) bool {
+ if n.p != nil {
+ return fn(n.p)
+ }
+ return false
+ })
+}
+
+type pageMaps struct {
+ workers *para.Workers
+ pmaps []*pageMap
+}
+
+// deleteSection deletes the entire section from s.
+func (m *pageMaps) deleteSection(s string) {
+ m.withMaps(func(pm *pageMap) error {
+ pm.deleteSectionByPath(s)
+ return nil
+ })
+}
+
+func (m *pageMaps) AssemblePages() error {
+ return m.withMaps(func(pm *pageMap) error {
+ if err := pm.CreateMissingNodes(); err != nil {
+ return err
+ }
+
+ if err := pm.assemblePages(); err != nil {
+ return err
+ }
+
+ if err := pm.createMissingTaxonomyNodes(); err != nil {
+ return err
+ }
+
+ // Handle any new sections created in the step above.
+ if err := pm.assembleSections(); err != nil {
+ return err
+ }
+
+ if pm.s.home == nil {
+ // Home is disabled, everything is.
+ pm.bundleTrees.DeletePrefix("")
+ return nil
+ }
+
+ if err := pm.assembleTaxonomies(); err != nil {
+ return err
+ }
+
+ if err := pm.createSiteTaxonomies(); err != nil {
+ return err
+ }
+
+ sw := &sectionWalker{m: pm.contentMap}
+ a := sw.applyAggregates()
+ _, mainSectionsSet := pm.s.s.Info.Params()["mainsections"]
+ if !mainSectionsSet && a.mainSection != "" {
+ mainSections := []string{strings.TrimRight(a.mainSection, "/")}
+ pm.s.s.Info.Params()["mainSections"] = mainSections
+ pm.s.s.Info.Params()["mainsections"] = mainSections
+ }
+
+ pm.s.lastmod = a.datesAll.Lastmod()
+ if resource.IsZeroDates(pm.s.home) {
+ pm.s.home.m.Dates = a.datesAll
+ }
+
+ return nil
+ })
+}
+
+func (m *pageMaps) walkBundles(fn func(n *contentNode) bool) {
+ _ = m.withMaps(func(pm *pageMap) error {
+ pm.bundleTrees.Walk(func(s string, n *contentNode) bool {
+ return fn(n)
+ })
+ return nil
+ })
+}
+
+func (m *pageMaps) walkBranchesPrefix(prefix string, fn func(s string, n *contentNode) bool) {
+ _ = m.withMaps(func(pm *pageMap) error {
+ pm.branchTrees.WalkPrefix(prefix, func(s string, n *contentNode) bool {
+ return fn(s, n)
+ })
+ return nil
+ })
+}
+
+func (m *pageMaps) withMaps(fn func(pm *pageMap) error) error {
+ g, _ := m.workers.Start(context.Background())
+ for _, pm := range m.pmaps {
+ pm := pm
+ g.Run(func() error {
+ return fn(pm)
+ })
+ }
+ return g.Wait()
+}
+
+type pagesMapBucket struct {
+ // Cascading front matter.
+ cascade map[page.PageMatcher]maps.Params
+
+ owner *pageState // The branch node
+
+ *pagesMapBucketPages
+}
+
+type pagesMapBucketPages struct {
+ pagesInit sync.Once
+ pages page.Pages
+
+ pagesAndSectionsInit sync.Once
+ pagesAndSections page.Pages
+
+ sectionsInit sync.Once
+ sections page.Pages
+}
+
+func (b *pagesMapBucket) getPages() page.Pages {
+ b.pagesInit.Do(func() {
+ b.pages = b.owner.treeRef.getPages()
+ page.SortByDefault(b.pages)
+ })
+ return b.pages
+}
+
+func (b *pagesMapBucket) getPagesRecursive() page.Pages {
+ pages := b.owner.treeRef.getPagesRecursive()
+ page.SortByDefault(pages)
+ return pages
+}
+
+func (b *pagesMapBucket) getPagesAndSections() page.Pages {
+ b.pagesAndSectionsInit.Do(func() {
+ b.pagesAndSections = b.owner.treeRef.getPagesAndSections()
+ })
+ return b.pagesAndSections
+}
+
+func (b *pagesMapBucket) getSections() page.Pages {
+ b.sectionsInit.Do(func() {
+ if b.owner.treeRef == nil {
+ return
+ }
+ b.sections = b.owner.treeRef.getSections()
+ })
+
+ return b.sections
+}
+
+func (b *pagesMapBucket) getTaxonomies() page.Pages {
+ b.sectionsInit.Do(func() {
+ var pas page.Pages
+ ref := b.owner.treeRef
+ ref.m.collectTaxonomies(ref.key, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+ b.sections = pas
+ })
+
+ return b.sections
+}
+
+func (b *pagesMapBucket) getTaxonomyEntries() page.Pages {
+ var pas page.Pages
+ ref := b.owner.treeRef
+ viewInfo := ref.n.viewInfo
+ prefix := strings.ToLower("/" + viewInfo.name.plural + "/" + viewInfo.termKey + "/")
+ ref.m.taxonomyEntries.WalkPrefix(prefix, func(s string, v any) bool {
+ n := v.(*contentNode)
+ pas = append(pas, n.viewInfo.ref.p)
+ return false
+ })
+ page.SortByDefault(pas)
+ return pas
+}
+
+type sectionAggregate struct {
+ datesAll resource.Dates
+ datesSection resource.Dates
+ pageCount int
+ mainSection string
+ mainSectionPageCount int
+}
+
+type sectionAggregateHandler struct {
+ sectionAggregate
+ sectionPageCount int
+
+ // Section
+ b *contentNode
+ s string
+}
+
+func (h *sectionAggregateHandler) String() string {
+ return fmt.Sprintf("%s/%s - %d - %s", h.sectionAggregate.datesAll, h.sectionAggregate.datesSection, h.sectionPageCount, h.s)
+}
+
+func (h *sectionAggregateHandler) isRootSection() bool {
+ return h.s != "/" && strings.Count(h.s, "/") == 2
+}
+
+func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error {
+ nested := v.(*sectionAggregateHandler)
+ h.sectionPageCount += nested.pageCount
+ h.pageCount += h.sectionPageCount
+ h.datesAll.UpdateDateAndLastmodIfAfter(nested.datesAll)
+ h.datesSection.UpdateDateAndLastmodIfAfter(nested.datesAll)
+ return nil
+}
+
+func (h *sectionAggregateHandler) handlePage(s string, n *contentNode) error {
+ h.sectionPageCount++
+
+ var d resource.Dated
+ if n.p != nil {
+ d = n.p
+ } else if n.viewInfo != nil && n.viewInfo.ref != nil {
+ d = n.viewInfo.ref.p
+ } else {
+ return nil
+ }
+
+ h.datesAll.UpdateDateAndLastmodIfAfter(d)
+ h.datesSection.UpdateDateAndLastmodIfAfter(d)
+ return nil
+}
+
+func (h *sectionAggregateHandler) handleSectionPost() error {
+ if h.sectionPageCount > h.mainSectionPageCount && h.isRootSection() {
+ h.mainSectionPageCount = h.sectionPageCount
+ h.mainSection = strings.TrimPrefix(h.s, "/")
+ }
+
+ if resource.IsZeroDates(h.b.p) {
+ h.b.p.m.Dates = h.datesSection
+ }
+
+ h.datesSection = resource.Dates{}
+
+ return nil
+}
+
+func (h *sectionAggregateHandler) handleSectionPre(s string, b *contentNode) error {
+ h.s = s
+ h.b = b
+ h.sectionPageCount = 0
+ h.datesAll.UpdateDateAndLastmodIfAfter(b.p)
+ return nil
+}
+
+type sectionWalkHandler interface {
+ handleNested(v sectionWalkHandler) error
+ handlePage(s string, b *contentNode) error
+ handleSectionPost() error
+ handleSectionPre(s string, b *contentNode) error
+}
+
+type sectionWalker struct {
+ err error
+ m *contentMap
+}
+
+func (w *sectionWalker) applyAggregates() *sectionAggregateHandler {
+ return w.walkLevel("/", func() sectionWalkHandler {
+ return &sectionAggregateHandler{}
+ }).(*sectionAggregateHandler)
+}
+
+func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler {
+ level := strings.Count(prefix, "/")
+
+ visitor := createVisitor()
+
+ w.m.taxonomies.WalkBelow(prefix, func(s string, v any) bool {
+ currentLevel := strings.Count(s, "/")
+
+ if currentLevel > level+1 {
+ return false
+ }
+
+ n := v.(*contentNode)
+
+ if w.err = visitor.handleSectionPre(s, n); w.err != nil {
+ return true
+ }
+
+ if currentLevel == 2 {
+ nested := w.walkLevel(s, createVisitor)
+ if w.err = visitor.handleNested(nested); w.err != nil {
+ return true
+ }
+ } else {
+ w.m.taxonomyEntries.WalkPrefix(s, func(ss string, v any) bool {
+ n := v.(*contentNode)
+ w.err = visitor.handlePage(ss, n)
+ return w.err != nil
+ })
+ }
+
+ w.err = visitor.handleSectionPost()
+
+ return w.err != nil
+ })
+
+ w.m.sections.WalkBelow(prefix, func(s string, v any) bool {
+ currentLevel := strings.Count(s, "/")
+ if currentLevel > level+1 {
+ return false
+ }
+
+ n := v.(*contentNode)
+
+ if w.err = visitor.handleSectionPre(s, n); w.err != nil {
+ return true
+ }
+
+ w.m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v any) bool {
+ w.err = visitor.handlePage(s, v.(*contentNode))
+ return w.err != nil
+ })
+
+ if w.err != nil {
+ return true
+ }
+
+ nested := w.walkLevel(s, createVisitor)
+ if w.err = visitor.handleNested(nested); w.err != nil {
+ return true
+ }
+
+ w.err = visitor.handleSectionPost()
+
+ return w.err != nil
+ })
+
+ return visitor
+}
+
+type viewName struct {
+ singular string // e.g. "category"
+ plural string // e.g. "categories"
+}
+
+func (v viewName) IsZero() bool {
+ return v.singular == ""
+}
diff --git a/hugolib/content_map_test.go b/hugolib/content_map_test.go
new file mode 100644
index 000000000..883587a01
--- /dev/null
+++ b/hugolib/content_map_test.go
@@ -0,0 +1,456 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func BenchmarkContentMap(b *testing.B) {
+ writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo {
+ c.Helper()
+ filename = filepath.FromSlash(filename)
+ c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil)
+
+ fi, err := fs.Stat(filename)
+ c.Assert(err, qt.IsNil)
+
+ mfi := fi.(hugofs.FileMetaInfo)
+ return mfi
+ }
+
+ createFs := func(fs afero.Fs, lang string) afero.Fs {
+ return hugofs.NewBaseFileDecorator(fs,
+ func(fi hugofs.FileMetaInfo) {
+ meta := fi.Meta()
+ // We have a more elaborate filesystem setup in the
+ // real flow, so simulate this here.
+ meta.Lang = lang
+ meta.Path = meta.Filename
+ meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc)
+ })
+ }
+
+ b.Run("CreateMissingNodes", func(b *testing.B) {
+ c := qt.New(b)
+ b.StopTimer()
+ mps := make([]*contentMap, b.N)
+ for i := 0; i < b.N; i++ {
+ m := newContentMap(contentMapConfig{lang: "en"})
+ mps[i] = m
+ memfs := afero.NewMemMapFs()
+ fs := createFs(memfs, "en")
+ for i := 1; i <= 20; i++ {
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect%d/a/index.md", i), "page")), qt.IsNil)
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect2%d/%sindex.md", i, strings.Repeat("b/", i)), "page")), qt.IsNil)
+ }
+
+ }
+
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ m := mps[i]
+ c.Assert(m.CreateMissingNodes(), qt.IsNil)
+
+ b.StopTimer()
+ m.pages.DeletePrefix("/")
+ m.sections.DeletePrefix("/")
+ b.StartTimer()
+ }
+ })
+}
+
+func TestContentMap(t *testing.T) {
+ c := qt.New(t)
+
+ writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo {
+ c.Helper()
+ filename = filepath.FromSlash(filename)
+ c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil)
+
+ fi, err := fs.Stat(filename)
+ c.Assert(err, qt.IsNil)
+
+ mfi := fi.(hugofs.FileMetaInfo)
+ return mfi
+ }
+
+ createFs := func(fs afero.Fs, lang string) afero.Fs {
+ return hugofs.NewBaseFileDecorator(fs,
+ func(fi hugofs.FileMetaInfo) {
+ meta := fi.Meta()
+ // We have a more elaborate filesystem setup in the
+ // real flow, so simulate this here.
+ meta.Lang = lang
+ meta.Path = meta.Filename
+ meta.TranslationBaseName = paths.Filename(fi.Name())
+ meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc)
+ })
+ }
+
+ c.Run("AddFiles", func(c *qt.C) {
+ memfs := afero.NewMemMapFs()
+
+ fsl := func(lang string) afero.Fs {
+ return createFs(memfs, lang)
+ }
+
+ fs := fsl("en")
+
+ header := writeFile(c, fs, "blog/a/index.md", "page")
+
+ c.Assert(header.Meta().Lang, qt.Equals, "en")
+
+ resources := []hugofs.FileMetaInfo{
+ writeFile(c, fs, "blog/a/b/data.json", "data"),
+ writeFile(c, fs, "blog/a/logo.png", "image"),
+ }
+
+ m := newContentMap(contentMapConfig{lang: "en"})
+
+ c.Assert(m.AddFilesBundle(header, resources...), qt.IsNil)
+
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b/c/index.md", "page")), qt.IsNil)
+
+ c.Assert(m.AddFilesBundle(
+ writeFile(c, fs, "blog/_index.md", "section page"),
+ writeFile(c, fs, "blog/sectiondata.json", "section resource"),
+ ), qt.IsNil)
+
+ got := m.testDump()
+
+ expect := `
+ Tree 0:
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
+ Tree 1:
+ /blog/
+ Tree 2:
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hl_sectiondata.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ - R: blog/a/b/data.json
+ - R: blog/a/logo.png
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
+ en/sections/blog/|f:blog/_index.md
+ - P: blog/a/index.md
+ - P: blog/b/c/index.md
+ - R: blog/sectiondata.json
+
+`
+
+ c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got))
+
+ // Add a data file to the section bundle
+ c.Assert(m.AddFiles(
+ writeFile(c, fs, "blog/sectiondata2.json", "section resource"),
+ ), qt.IsNil)
+
+ // And then one to the leaf bundles
+ c.Assert(m.AddFiles(
+ writeFile(c, fs, "blog/a/b/data2.json", "data2"),
+ ), qt.IsNil)
+
+ c.Assert(m.AddFiles(
+ writeFile(c, fs, "blog/b/c/d/data3.json", "data3"),
+ ), qt.IsNil)
+
+ got = m.testDump()
+
+ expect = `
+ Tree 0:
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
+ Tree 1:
+ /blog/
+ Tree 2:
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ - R: blog/a/b/data.json
+ - R: blog/a/b/data2.json
+ - R: blog/a/logo.png
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
+ - R: blog/b/c/d/data3.json
+ en/sections/blog/|f:blog/_index.md
+ - P: blog/a/index.md
+ - P: blog/b/c/index.md
+ - R: blog/sectiondata.json
+ - R: blog/sectiondata2.json
+
+`
+
+ c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got))
+
+ // Add a regular page (i.e. not a bundle)
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b.md", "page")), qt.IsNil)
+
+ c.Assert(m.testDump(), hqt.IsSameString, `
+ Tree 0:
+ /blog/__hb_a__hl_
+ /blog/__hb_b/c__hl_
+ /blog/__hb_b__hl_
+ Tree 1:
+ /blog/
+ Tree 2:
+ /blog/__hb_a__hl_b/data.json
+ /blog/__hb_a__hl_b/data2.json
+ /blog/__hb_a__hl_logo.png
+ /blog/__hb_b/c__hl_d/data3.json
+ /blog/__hl_sectiondata.json
+ /blog/__hl_sectiondata2.json
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ - R: blog/a/b/data.json
+ - R: blog/a/b/data2.json
+ - R: blog/a/logo.png
+ en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
+ - R: blog/b/c/d/data3.json
+ en/pages/blog/__hb_b__hl_|f:blog/b.md
+ en/sections/blog/|f:blog/_index.md
+ - P: blog/a/index.md
+ - P: blog/b/c/index.md
+ - P: blog/b.md
+ - R: blog/sectiondata.json
+ - R: blog/sectiondata2.json
+
+
+ `, qt.Commentf(m.testDump()))
+ })
+
+ c.Run("CreateMissingNodes", func(c *qt.C) {
+ memfs := afero.NewMemMapFs()
+
+ fsl := func(lang string) afero.Fs {
+ return createFs(memfs, lang)
+ }
+
+ fs := fsl("en")
+
+ m := newContentMap(contentMapConfig{lang: "en"})
+
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/page.md", "page")), qt.IsNil)
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/a/index.md", "page")), qt.IsNil)
+ c.Assert(m.AddFilesBundle(writeFile(c, fs, "bundle/index.md", "page")), qt.IsNil)
+
+ c.Assert(m.CreateMissingNodes(), qt.IsNil)
+
+ got := m.testDump()
+
+ c.Assert(got, hqt.IsSameString, `
+
+ Tree 0:
+ /__hb_bundle__hl_
+ /blog/__hb_a__hl_
+ /blog/__hb_page__hl_
+ Tree 1:
+ /
+ /blog/
+ Tree 2:
+ en/pages/__hb_bundle__hl_|f:bundle/index.md
+ en/pages/blog/__hb_a__hl_|f:blog/a/index.md
+ en/pages/blog/__hb_page__hl_|f:blog/page.md
+ en/sections/
+ - P: bundle/index.md
+ en/sections/blog/
+ - P: blog/a/index.md
+ - P: blog/page.md
+
+ `, qt.Commentf(got))
+ })
+
+ c.Run("cleanKey", func(c *qt.C) {
+ for _, test := range []struct {
+ in string
+ expected string
+ }{
+ {"/a/b/", "/a/b"},
+ {filepath.FromSlash("/a/b/"), "/a/b"},
+ {"/a//b/", "/a/b"},
+ } {
+ c.Assert(cleanTreeKey(test.in), qt.Equals, test.expected)
+ }
+ })
+}
+
+func TestContentMapSite(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ pageTempl := `
+---
+title: "Page %d"
+date: "2019-06-0%d"
+lastMod: "2019-06-0%d"
+categories: ["funny"]
+---
+
+Page content.
+`
+ createPage := func(i int) string {
+ return fmt.Sprintf(pageTempl, i, i, i+1)
+ }
+
+ draftTemplate := `---
+title: "Draft"
+draft: true
+---
+
+`
+
+ b.WithContent("_index.md", `
+---
+title: "Hugo Home"
+cascade:
+ description: "Common Description"
+
+---
+
+Home Content.
+`)
+
+ b.WithContent("blog/page1.md", createPage(1))
+ b.WithContent("blog/page2.md", createPage(2))
+ b.WithContent("blog/page3.md", createPage(3))
+ b.WithContent("blog/bundle/index.md", createPage(12))
+ b.WithContent("blog/bundle/data.json", "data")
+ b.WithContent("blog/bundle/page.md", createPage(99))
+ b.WithContent("blog/subsection/_index.md", createPage(3))
+ b.WithContent("blog/subsection/subdata.json", "data")
+ b.WithContent("blog/subsection/page4.md", createPage(8))
+ b.WithContent("blog/subsection/page5.md", createPage(10))
+ b.WithContent("blog/subsection/draft/index.md", draftTemplate)
+ b.WithContent("blog/subsection/draft/data.json", "data")
+ b.WithContent("blog/draftsection/_index.md", draftTemplate)
+ b.WithContent("blog/draftsection/page/index.md", createPage(12))
+ b.WithContent("blog/draftsection/page/folder/data.json", "data")
+ b.WithContent("blog/draftsection/sub/_index.md", createPage(12))
+ b.WithContent("blog/draftsection/sub/page.md", createPage(13))
+ b.WithContent("docs/page6.md", createPage(11))
+ b.WithContent("tags/_index.md", createPage(32))
+ b.WithContent("overlap/_index.md", createPage(33))
+ b.WithContent("overlap2/_index.md", createPage(34))
+
+ b.WithTemplatesAdded("layouts/index.html", `
+Num Regular: {{ len .Site.RegularPages }}
+Main Sections: {{ .Site.Params.mainSections }}
+Pag Num Pages: {{ len .Paginator.Pages }}
+{{ $home := .Site.Home }}
+{{ $blog := .Site.GetPage "blog" }}
+{{ $categories := .Site.GetPage "categories" }}
+{{ $funny := .Site.GetPage "categories/funny" }}
+{{ $blogSub := .Site.GetPage "blog/subsection" }}
+{{ $page := .Site.GetPage "blog/page1" }}
+{{ $page2 := .Site.GetPage "blog/page2" }}
+{{ $page4 := .Site.GetPage "blog/subsection/page4" }}
+{{ $bundle := .Site.GetPage "blog/bundle" }}
+{{ $overlap1 := .Site.GetPage "overlap" }}
+{{ $overlap2 := .Site.GetPage "overlap2" }}
+
+Home: {{ template "print-page" $home }}
+Blog Section: {{ template "print-page" $blog }}
+Blog Sub Section: {{ template "print-page" $blogSub }}
+Page: {{ template "print-page" $page }}
+Bundle: {{ template "print-page" $bundle }}
+IsDescendant: true: {{ $page.IsDescendant $blog }} true: {{ $blogSub.IsDescendant $blog }} true: {{ $bundle.IsDescendant $blog }} true: {{ $page4.IsDescendant $blog }} true: {{ $blog.IsDescendant $home }} false: {{ $blog.IsDescendant $blog }} false: {{ $home.IsDescendant $blog }}
+IsAncestor: true: {{ $blog.IsAncestor $page }} true: {{ $home.IsAncestor $blog }} true: {{ $blog.IsAncestor $blogSub }} true: {{ $blog.IsAncestor $bundle }} true: {{ $blog.IsAncestor $page4 }} true: {{ $home.IsAncestor $page }} false: {{ $blog.IsAncestor $blog }} false: {{ $page.IsAncestor $blog }} false: {{ $blog.IsAncestor $home }} false: {{ $blogSub.IsAncestor $blog }}
+IsDescendant overlap1: false: {{ $overlap1.IsDescendant $overlap2 }}
+IsDescendant overlap2: false: {{ $overlap2.IsDescendant $overlap1 }}
+IsAncestor overlap1: false: {{ $overlap1.IsAncestor $overlap2 }}
+IsAncestor overlap2: false: {{ $overlap2.IsAncestor $overlap1 }}
+FirstSection: {{ $blogSub.FirstSection.RelPermalink }} {{ $blog.FirstSection.RelPermalink }} {{ $home.FirstSection.RelPermalink }} {{ $page.FirstSection.RelPermalink }}
+InSection: true: {{ $page.InSection $blog }} false: {{ $page.InSection $blogSub }}
+Next: {{ $page2.Next.RelPermalink }}
+NextInSection: {{ $page2.NextInSection.RelPermalink }}
+Pages: {{ range $blog.Pages }}{{ .RelPermalink }}|{{ end }}
+Sections: {{ range $home.Sections }}{{ .RelPermalink }}|{{ end }}
+Categories: {{ range .Site.Taxonomies.categories }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}
+Category Terms: {{ $categories.Kind}}: {{ range $categories.Data.Terms.Alphabetical }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}
+Category Funny: {{ $funny.Kind}}; {{ $funny.Data.Term }}: {{ range $funny.Pages }}{{ .RelPermalink }};|{{ end }}
+Pag Num Pages: {{ len .Paginator.Pages }}
+Pag Blog Num Pages: {{ len $blog.Paginator.Pages }}
+Blog Num RegularPages: {{ len $blog.RegularPages }}
+Blog Num Pages: {{ len $blog.Pages }}
+
+Draft1: {{ if (.Site.GetPage "blog/subsection/draft") }}FOUND{{ end }}|
+Draft2: {{ if (.Site.GetPage "blog/draftsection") }}FOUND{{ end }}|
+Draft3: {{ if (.Site.GetPage "blog/draftsection/page") }}FOUND{{ end }}|
+Draft4: {{ if (.Site.GetPage "blog/draftsection/sub") }}FOUND{{ end }}|
+Draft5: {{ if (.Site.GetPage "blog/draftsection/sub/page") }}FOUND{{ end }}|
+
+{{ define "print-page" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Date.Format "2006-01-02" }}|Current Section: {{ .CurrentSection.SectionsPath }}|Resources: {{ range .Resources }}{{ .ResourceType }}: {{ .RelPermalink }}|{{ end }}{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+
+ `
+ Num Regular: 7
+ Main Sections: [blog]
+ Pag Num Pages: 7
+
+ Home: Hugo Home|/|2019-06-08|Current Section: |Resources:
+ Blog Section: Blogs|/blog/|2019-06-08|Current Section: blog|Resources:
+ Blog Sub Section: Page 3|/blog/subsection/|2019-06-03|Current Section: blog/subsection|Resources: application: /blog/subsection/subdata.json|
+ Page: Page 1|/blog/page1/|2019-06-01|Current Section: blog|Resources:
+ Bundle: Page 12|/blog/bundle/|0001-01-01|Current Section: blog|Resources: application: /blog/bundle/data.json|page: |
+ IsDescendant: true: true true: true true: true true: true true: true false: false false: false
+ IsAncestor: true: true true: true true: true true: true true: true true: true false: false false: false false: false false: false
+ IsDescendant overlap1: false: false
+ IsDescendant overlap2: false: false
+ IsAncestor overlap1: false: false
+ IsAncestor overlap2: false: false
+ FirstSection: /blog/ /blog/ / /blog/
+ InSection: true: true false: false
+ Next: /blog/page3/
+ NextInSection: /blog/page3/
+ Pages: /blog/page3/|/blog/subsection/|/blog/page2/|/blog/page1/|/blog/bundle/|
+ Sections: /blog/|/docs/|
+ Categories: /categories/funny/; funny; 11|
+ Category Terms: taxonomy: /categories/funny/; funny; 11|
+ Category Funny: term; funny: /blog/subsection/page4/;|/blog/page3/;|/blog/subsection/;|/blog/page2/;|/blog/page1/;|/blog/subsection/page5/;|/docs/page6/;|/blog/bundle/;|;|
+ Pag Num Pages: 7
+ Pag Blog Num Pages: 4
+ Blog Num RegularPages: 4
+ Blog Num Pages: 5
+
+ Draft1: |
+ Draft2: |
+ Draft3: |
+ Draft4: |
+ Draft5: |
+
+`)
+}
diff --git a/hugolib/content_render_hooks_test.go b/hugolib/content_render_hooks_test.go
new file mode 100644
index 000000000..dbfd46459
--- /dev/null
+++ b/hugolib/content_render_hooks_test.go
@@ -0,0 +1,429 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless requiredF by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestRenderHookEditNestedPartial(t *testing.T) {
+ config := `
+baseURL="https://example.org"
+workingDir="/mywork"
+`
+ b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running()
+
+ b.WithTemplates("_default/single.html", "{{ .Content }}")
+ b.WithTemplates("partials/mypartial1.html", `PARTIAL1 {{ partial "mypartial2.html" }}`)
+ b.WithTemplates("partials/mypartial2.html", `PARTIAL2`)
+ b.WithTemplates("_default/_markup/render-link.html", `Link {{ .Text | safeHTML }}|{{ partial "mypartial1.html" . }}END`)
+
+ b.WithContent("p1.md", `---
+title: P1
+---
+
+[First Link](https://www.google.com "Google's Homepage")
+
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1 PARTIAL2END`)
+
+ b.EditFiles("layouts/partials/mypartial1.html", `PARTIAL1_EDITED {{ partial "mypartial2.html" }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1_EDITED PARTIAL2END`)
+
+ b.EditFiles("layouts/partials/mypartial2.html", `PARTIAL2_EDITED`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `Link First Link|PARTIAL1_EDITED PARTIAL2_EDITEDEND`)
+}
+
+func TestRenderHooks(t *testing.T) {
+ config := `
+baseURL="https://example.org"
+workingDir="/mywork"
+
+[markup]
+[markup.goldmark]
+[markup.goldmark.parser]
+autoHeadingID = true
+autoHeadingIDType = "github"
+[markup.goldmark.parser.attribute]
+block = true
+title = true
+
+`
+ b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running()
+ b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`)
+ b.WithTemplatesAdded("shortcodes/myshortcode1.html", `{{ partial "mypartial1" }}`)
+ b.WithTemplatesAdded("shortcodes/myshortcode2.html", `{{ partial "mypartial2" }}`)
+ b.WithTemplatesAdded("shortcodes/myshortcode3.html", `SHORT3|`)
+ b.WithTemplatesAdded("shortcodes/myshortcode4.html", `
+<div class="foo">
+{{ .Inner | markdownify }}
+</div>
+`)
+ b.WithTemplatesAdded("shortcodes/myshortcode5.html", `
+Inner Inline: {{ .Inner | .Page.RenderString }}
+Inner Block: {{ .Inner | .Page.RenderString (dict "display" "block" ) }}
+`)
+
+ b.WithTemplatesAdded("shortcodes/myshortcode6.html", `.Render: {{ .Page.Render "myrender" }}`)
+ b.WithTemplatesAdded("partials/mypartial1.html", `PARTIAL1`)
+ b.WithTemplatesAdded("partials/mypartial2.html", `PARTIAL2 {{ partial "mypartial3.html" }}`)
+ b.WithTemplatesAdded("partials/mypartial3.html", `PARTIAL3`)
+ b.WithTemplatesAdded("partials/mypartial4.html", `PARTIAL4`)
+ b.WithTemplatesAdded("customview/myrender.html", `myrender: {{ .Title }}|P4: {{ partial "mypartial4" }}`)
+ b.WithTemplatesAdded("_default/_markup/render-link.html", `{{ with .Page }}{{ .Title }}{{ end }}|{{ .Destination | safeURL }}|Title: {{ .Title | safeHTML }}|Text: {{ .Text | safeHTML }}|END`)
+ b.WithTemplatesAdded("docs/_markup/render-link.html", `Link docs section: {{ .Text | safeHTML }}|END`)
+ b.WithTemplatesAdded("_default/_markup/render-image.html", `IMAGE: {{ .Page.Title }}||{{ .Destination | safeURL }}|Title: {{ .Title | safeHTML }}|Text: {{ .Text | safeHTML }}|END`)
+ b.WithTemplatesAdded("_default/_markup/render-heading.html", `HEADING: {{ .Page.Title }}||Level: {{ .Level }}|Anchor: {{ .Anchor | safeURL }}|Text: {{ .Text | safeHTML }}|Attributes: {{ .Attributes }}|END`)
+ b.WithTemplatesAdded("docs/_markup/render-heading.html", `Docs Level: {{ .Level }}|END`)
+
+ b.WithContent("customview/p1.md", `---
+title: Custom View
+---
+
+{{< myshortcode6 >}}
+
+ `, "blog/p1.md", `---
+title: Cool Page
+---
+
+[First Link](https://www.google.com "Google's Homepage")
+<https://foo.bar/>
+https://bar.baz/
+<fake@example.com>
+<mailto:fake2@example.com>
+
+{{< myshortcode3 >}}
+
+[Second Link](https://www.google.com "Google's Homepage")
+
+Image:
+
+![Drag Racing](/images/Dragster.jpg "image title")
+
+Attributes:
+
+## Some Heading {.text-serif #a-heading title="Hovered"}
+
+
+`, "blog/p2.md", `---
+title: Cool Page2
+layout: mylayout
+---
+
+{{< myshortcode1 >}}
+
+[Some Text](https://www.google.com "Google's Homepage")
+
+,[No Whitespace Please](https://gohugo.io),
+
+
+
+`, "blog/p3.md", `---
+title: Cool Page3
+---
+
+{{< myshortcode2 >}}
+
+
+`, "docs/docs1.md", `---
+title: Docs 1
+---
+
+
+[Docs 1](https://www.google.com "Google's Homepage")
+
+
+`, "blog/p4.md", `---
+title: Cool Page With Image
+---
+
+Image:
+
+![Drag Racing](/images/Dragster.jpg "image title")
+
+
+`, "blog/p5.md", `---
+title: Cool Page With Markdownify
+---
+
+{{< myshortcode4 >}}
+Inner Link: [Inner Link](https://www.google.com "Google's Homepage")
+{{< /myshortcode4 >}}
+
+`, "blog/p6.md", `---
+title: With RenderString
+---
+
+{{< myshortcode5 >}}Inner Link: [Inner Link](https://www.gohugo.io "Hugo's Homepage"){{< /myshortcode5 >}}
+
+`, "blog/p7.md", `---
+title: With Headings
+---
+
+# Heading Level 1
+some text
+
+## Heading Level 2
+
+### Heading Level 3
+`,
+ "docs/p8.md", `---
+title: Doc With Heading
+---
+
+# Docs lvl 1
+
+`,
+ )
+
+ for i := 1; i <= 30; i++ {
+ // Add some content with no shortcodes or links, i.e no templates needed.
+ b.WithContent(fmt.Sprintf("blog/notempl%d.md", i), `---
+title: No Template
+---
+
+## Content
+`)
+ }
+ counters := &testCounters{}
+ b.Build(BuildCfg{testCounters: counters})
+ b.Assert(int(counters.contentRenderCounter), qt.Equals, 45)
+
+ b.AssertFileContent("public/blog/p1/index.html", `
+Cool Page|https://www.google.com|Title: Google's Homepage|Text: First Link|END
+Cool Page|https://foo.bar/|Title: |Text: https://foo.bar/|END
+Cool Page|https://bar.baz/|Title: |Text: https://bar.baz/|END
+Cool Page|mailto:fake@example.com|Title: |Text: fake@example.com|END
+Cool Page|mailto:fake2@example.com|Title: |Text: mailto:fake2@example.com|END
+Text: Second
+SHORT3|
+<p>IMAGE: Cool Page||/images/Dragster.jpg|Title: image title|Text: Drag Racing|END</p>
+`)
+
+ b.AssertFileContent("public/customview/p1/index.html", `.Render: myrender: Custom View|P4: PARTIAL4`)
+ b.AssertFileContent("public/blog/p2/index.html",
+ `PARTIAL
+,Cool Page2|https://gohugo.io|Title: |Text: No Whitespace Please|END,`,
+ )
+ b.AssertFileContent("public/blog/p3/index.html", `PARTIAL3`)
+ // We may add type template support later, keep this for then. b.AssertFileContent("public/docs/docs1/index.html", `Link docs section: Docs 1|END`)
+ b.AssertFileContent("public/blog/p4/index.html", `<p>IMAGE: Cool Page With Image||/images/Dragster.jpg|Title: image title|Text: Drag Racing|END</p>`)
+ // markdownify
+ b.AssertFileContent("public/blog/p5/index.html", "Inner Link: |https://www.google.com|Title: Google's Homepage|Text: Inner Link|END")
+
+ b.AssertFileContent("public/blog/p6/index.html",
+ "Inner Inline: Inner Link: With RenderString|https://www.gohugo.io|Title: Hugo's Homepage|Text: Inner Link|END",
+ "Inner Block: <p>Inner Link: With RenderString|https://www.gohugo.io|Title: Hugo's Homepage|Text: Inner Link|END</p>",
+ )
+
+ b.EditFiles(
+ "layouts/_default/_markup/render-link.html", `EDITED: {{ .Destination | safeURL }}|`,
+ "layouts/_default/_markup/render-image.html", `IMAGE EDITED: {{ .Destination | safeURL }}|`,
+ "layouts/docs/_markup/render-link.html", `DOCS EDITED: {{ .Destination | safeURL }}|`,
+ "layouts/partials/mypartial1.html", `PARTIAL1_EDITED`,
+ "layouts/partials/mypartial3.html", `PARTIAL3_EDITED`,
+ "layouts/partials/mypartial4.html", `PARTIAL4_EDITED`,
+ "layouts/shortcodes/myshortcode3.html", `SHORT3_EDITED|`,
+ )
+
+ counters = &testCounters{}
+ b.Build(BuildCfg{testCounters: counters})
+ // Make sure that only content using the changed templates are re-rendered.
+ b.Assert(int(counters.contentRenderCounter), qt.Equals, 7)
+
+ b.AssertFileContent("public/customview/p1/index.html", `.Render: myrender: Custom View|P4: PARTIAL4_EDITED`)
+ b.AssertFileContent("public/blog/p1/index.html", `<p>EDITED: https://www.google.com|</p>`, "SHORT3_EDITED|")
+ b.AssertFileContent("public/blog/p2/index.html", `PARTIAL1_EDITED`)
+ b.AssertFileContent("public/blog/p3/index.html", `PARTIAL3_EDITED`)
+ // We may add type template support later, keep this for then. b.AssertFileContent("public/docs/docs1/index.html", `DOCS EDITED: https://www.google.com|</p>`)
+ b.AssertFileContent("public/blog/p4/index.html", `IMAGE EDITED: /images/Dragster.jpg|`)
+ b.AssertFileContent("public/blog/p6/index.html", "<p>Inner Link: EDITED: https://www.gohugo.io|</p>")
+ b.AssertFileContent("public/blog/p7/index.html", "HEADING: With Headings||Level: 1|Anchor: heading-level-1|Text: Heading Level 1|Attributes: map[id:heading-level-1]|END<p>some text</p>\nHEADING: With Headings||Level: 2|Anchor: heading-level-2|Text: Heading Level 2|Attributes: map[id:heading-level-2]|ENDHEADING: With Headings||Level: 3|Anchor: heading-level-3|Text: Heading Level 3|Attributes: map[id:heading-level-3]|END")
+
+ // https://github.com/gohugoio/hugo/issues/7349
+ b.AssertFileContent("public/docs/p8/index.html", "Docs Level: 1")
+}
+
+func TestRenderHooksDeleteTemplate(t *testing.T) {
+ config := `
+baseURL="https://example.org"
+workingDir="/mywork"
+`
+ b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running()
+ b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`)
+ b.WithTemplatesAdded("_default/_markup/render-link.html", `html-render-link`)
+
+ b.WithContent("p1.md", `---
+title: P1
+---
+[First Link](https://www.google.com "Google's Homepage")
+
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `<p>html-render-link</p>`)
+
+ b.RemoveFiles(
+ "layouts/_default/_markup/render-link.html",
+ )
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/p1/index.html", `<p><a href="https://www.google.com" title="Google's Homepage">First Link</a></p>`)
+}
+
+func TestRenderHookAddTemplate(t *testing.T) {
+ config := `
+baseURL="https://example.org"
+workingDir="/mywork"
+`
+ b := newTestSitesBuilder(t).WithWorkingDir("/mywork").WithConfigFile("toml", config).Running()
+ b.WithTemplatesAdded("_default/single.html", `{{ .Content }}`)
+
+ b.WithContent("p1.md", `---
+title: P1
+---
+[First Link](https://www.google.com "Google's Homepage")
+
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `<p><a href="https://www.google.com" title="Google's Homepage">First Link</a></p>`)
+
+ b.EditFiles("layouts/_default/_markup/render-link.html", `html-render-link`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `<p>html-render-link</p>`)
+}
+
+func TestRenderHooksRSS(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplates("index.html", `
+{{ $p := site.GetPage "p1.md" }}
+{{ $p2 := site.GetPage "p2.md" }}
+
+P1: {{ $p.Content }}
+P2: {{ $p2.Content }}
+
+ `, "index.xml", `
+
+{{ $p2 := site.GetPage "p2.md" }}
+{{ $p3 := site.GetPage "p3.md" }}
+
+P2: {{ $p2.Content }}
+P3: {{ $p3.Content }}
+
+
+ `,
+ "_default/_markup/render-link.html", `html-link: {{ .Destination | safeURL }}|`,
+ "_default/_markup/render-link.rss.xml", `xml-link: {{ .Destination | safeURL }}|`,
+ "_default/_markup/render-heading.html", `html-heading: {{ .Text }}|`,
+ "_default/_markup/render-heading.rss.xml", `xml-heading: {{ .Text }}|`,
+ )
+
+ b.WithContent("p1.md", `---
+title: "p1"
+---
+P1. [I'm an inline-style link](https://www.gohugo.io)
+
+# Heading in p1
+
+`, "p2.md", `---
+title: "p2"
+---
+P1. [I'm an inline-style link](https://www.bep.is)
+
+# Heading in p2
+
+`,
+ "p3.md", `---
+title: "p2"
+outputs: ["rss"]
+---
+P3. [I'm an inline-style link](https://www.example.org)
+
+`,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+P1: <p>P1. html-link: https://www.gohugo.io|</p>
+html-heading: Heading in p1|
+html-heading: Heading in p2|
+`)
+ b.AssertFileContent("public/index.xml", `
+P2: <p>P1. xml-link: https://www.bep.is|</p>
+P3: <p>P3. xml-link: https://www.example.org|</p>
+xml-heading: Heading in p2|
+`)
+}
+
+// https://github.com/gohugoio/hugo/issues/6629
+func TestRenderLinkWithMarkupInText(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+
+baseURL="https://example.org"
+
+[markup]
+ [markup.goldmark]
+ [markup.goldmark.renderer]
+ unsafe = true
+
+`)
+
+ b.WithTemplates("index.html", `
+{{ $p := site.GetPage "p1.md" }}
+P1: {{ $p.Content }}
+
+ `,
+ "_default/_markup/render-link.html", `html-link: {{ .Destination | safeURL }}|Text: {{ .Text | safeHTML }}|Plain: {{ .PlainText | safeHTML }}`,
+ "_default/_markup/render-image.html", `html-image: {{ .Destination | safeURL }}|Text: {{ .Text | safeHTML }}|Plain: {{ .PlainText | safeHTML }}`,
+ )
+
+ b.WithContent("p1.md", `---
+title: "p1"
+---
+
+START: [**should be bold**](https://gohugo.io)END
+
+Some regular **markup**.
+
+Image:
+
+![Hello<br> Goodbye](image.jpg)END
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ P1: <p>START: html-link: https://gohugo.io|Text: <strong>should be bold</strong>|Plain: should be boldEND</p>
+<p>Some regular <strong>markup</strong>.</p>
+<p>html-image: image.jpg|Text: Hello<br> Goodbye|Plain: Hello GoodbyeEND</p>
+`)
+}
diff --git a/hugolib/datafiles_test.go b/hugolib/datafiles_test.go
new file mode 100644
index 000000000..a6bcae944
--- /dev/null
+++ b/hugolib/datafiles_test.go
@@ -0,0 +1,444 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/deps"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDataFromTheme(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[module]
+[[module.imports]]
+path = "mytheme"
+-- data/a.toml --
+d1 = "d1main"
+d2 = "d2main"
+-- themes/mytheme/data/a.toml --
+d1 = "d1theme"
+d2 = "d2theme"
+d3 = "d3theme"
+-- layouts/index.html --
+d1: {{ site.Data.a.d1 }}|d2: {{ site.Data.a.d2 }}|d3: {{ site.Data.a.d3 }}
+
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+d1: d1main|d2: d2main|d3: d3theme
+ `)
+}
+
+func TestDataDir(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 3)
+ equivDataDirs[0].addSource("data/test/a.json", `{ "b" : { "c1": "red" , "c2": "blue" } }`)
+ equivDataDirs[1].addSource("data/test/a.yaml", "b:\n c1: red\n c2: blue")
+ equivDataDirs[2].addSource("data/test/a.toml", "[b]\nc1 = \"red\"\nc2 = \"blue\"\n")
+ expected := map[string]any{
+ "test": map[string]any{
+ "a": map[string]any{
+ "b": map[string]any{
+ "c1": "red",
+ "c2": "blue",
+ },
+ },
+ },
+ }
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+// Unable to enforce equivalency for int values as
+// the JSON, YAML and TOML parsers return
+// float64, int, int64 respectively. They all return
+// float64 for float values though:
+func TestDataDirNumeric(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 3)
+ equivDataDirs[0].addSource("data/test/a.json", `{ "b" : { "c1": 1.7 , "c2": 2.9 } }`)
+ equivDataDirs[1].addSource("data/test/a.yaml", "b:\n c1: 1.7\n c2: 2.9")
+ equivDataDirs[2].addSource("data/test/a.toml", "[b]\nc1 = 1.7\nc2 = 2.9\n")
+ expected := map[string]any{
+ "test": map[string]any{
+ "a": map[string]any{
+ "b": map[string]any{
+ "c1": 1.7,
+ "c2": 2.9,
+ },
+ },
+ },
+ }
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+func TestDataDirBoolean(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 3)
+ equivDataDirs[0].addSource("data/test/a.json", `{ "b" : { "c1": true , "c2": false } }`)
+ equivDataDirs[1].addSource("data/test/a.yaml", "b:\n c1: true\n c2: false")
+ equivDataDirs[2].addSource("data/test/a.toml", "[b]\nc1 = true\nc2 = false\n")
+ expected := map[string]any{
+ "test": map[string]any{
+ "a": map[string]any{
+ "b": map[string]any{
+ "c1": true,
+ "c2": false,
+ },
+ },
+ },
+ }
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+func TestDataDirTwoFiles(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 3)
+
+ equivDataDirs[0].addSource("data/test/foo.json", `{ "bar": "foofoo" }`)
+ equivDataDirs[0].addSource("data/test.json", `{ "hello": [ "world", "foo" ] }`)
+
+ equivDataDirs[1].addSource("data/test/foo.yaml", "bar: foofoo")
+ equivDataDirs[1].addSource("data/test.yaml", "hello:\n- world\n- foo")
+
+ equivDataDirs[2].addSource("data/test/foo.toml", "bar = \"foofoo\"")
+ equivDataDirs[2].addSource("data/test.toml", "hello = [\"world\", \"foo\"]")
+
+ expected :=
+ map[string]any{
+ "test": map[string]any{
+ "hello": []any{
+ "world",
+ "foo",
+ },
+ "foo": map[string]any{
+ "bar": "foofoo",
+ },
+ },
+ }
+
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+func TestDataDirOverriddenValue(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 3)
+
+ // filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
+ equivDataDirs[0].addSource("data/a.json", `{"a": "1"}`)
+ equivDataDirs[0].addSource("data/test/v1.json", `{"v1-2": "2"}`)
+ equivDataDirs[0].addSource("data/test/v2.json", `{"v2": ["2", "3"]}`)
+ equivDataDirs[0].addSource("data/test.json", `{"v1": "1"}`)
+
+ equivDataDirs[1].addSource("data/a.yaml", "a: \"1\"")
+ equivDataDirs[1].addSource("data/test/v1.yaml", "v1-2: \"2\"")
+ equivDataDirs[1].addSource("data/test/v2.yaml", "v2:\n- \"2\"\n- \"3\"")
+ equivDataDirs[1].addSource("data/test.yaml", "v1: \"1\"")
+
+ equivDataDirs[2].addSource("data/a.toml", "a = \"1\"")
+ equivDataDirs[2].addSource("data/test/v1.toml", "v1-2 = \"2\"")
+ equivDataDirs[2].addSource("data/test/v2.toml", "v2 = [\"2\", \"3\"]")
+ equivDataDirs[2].addSource("data/test.toml", "v1 = \"1\"")
+
+ expected :=
+ map[string]any{
+ "a": map[string]any{"a": "1"},
+ "test": map[string]any{
+ "v1": map[string]any{"v1-2": "2"},
+ "v2": map[string]any{"v2": []any{"2", "3"}},
+ },
+ }
+
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+// Issue #4361, #3890
+func TestDataDirArrayAtTopLevelOfFile(t *testing.T) {
+ t.Parallel()
+ equivDataDirs := make([]dataDir, 2)
+
+ equivDataDirs[0].addSource("data/test.json", `[ { "hello": "world" }, { "what": "time" }, { "is": "lunch?" } ]`)
+ equivDataDirs[1].addSource("data/test.yaml", `
+- hello: world
+- what: time
+- is: lunch?
+`)
+
+ expected :=
+ map[string]any{
+ "test": []any{
+ map[string]any{"hello": "world"},
+ map[string]any{"what": "time"},
+ map[string]any{"is": "lunch?"},
+ },
+ }
+
+ doTestEquivalentDataDirs(t, equivDataDirs, expected)
+}
+
+// Issue #892
+func TestDataDirMultipleSources(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ dd.addSource("data/test/first.yaml", "bar: 1")
+ dd.addSource("themes/mytheme/data/test/first.yaml", "bar: 2")
+ dd.addSource("data/test/second.yaml", "tender: 2")
+
+ expected :=
+ map[string]any{
+ "test": map[string]any{
+ "first": map[string]any{
+ "bar": 1,
+ },
+ "second": map[string]any{
+ "tender": 2,
+ },
+ },
+ }
+
+ doTestDataDir(t, dd, expected,
+ "theme", "mytheme")
+}
+
+// test (and show) the way values from four different sources,
+// including theme data, commingle and override
+func TestDataDirMultipleSourcesCommingled(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ dd.addSource("data/a.json", `{ "b1" : { "c1": "data/a" }, "b2": "data/a", "b3": ["x", "y", "z"] }`)
+ dd.addSource("themes/mytheme/data/a.json", `{ "b1": "mytheme/data/a", "b2": "mytheme/data/a", "b3": "mytheme/data/a" }`)
+ dd.addSource("themes/mytheme/data/a/b1.json", `{ "c1": "mytheme/data/a/b1", "c2": "mytheme/data/a/b1" }`)
+ dd.addSource("data/a/b1.json", `{ "c1": "data/a/b1" }`)
+
+ // Per handleDataFile() comment:
+ // 1. A theme uses the same key; the main data folder wins
+ // 2. A sub folder uses the same key: the sub folder wins
+ expected :=
+ map[string]any{
+ "a": map[string]any{
+ "b1": map[string]any{
+ "c1": "data/a/b1",
+ "c2": "mytheme/data/a/b1",
+ },
+ "b2": "data/a",
+ "b3": []any{"x", "y", "z"},
+ },
+ }
+
+ doTestDataDir(t, dd, expected, "theme", "mytheme")
+}
+
+func TestDataDirCollidingChildArrays(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ dd.addSource("themes/mytheme/data/a/b2.json", `["Q", "R", "S"]`)
+ dd.addSource("data/a.json", `{ "b1" : "data/a", "b2" : ["x", "y", "z"] }`)
+ dd.addSource("data/a/b2.json", `["1", "2", "3"]`)
+
+ // Per handleDataFile() comment:
+ // 1. A theme uses the same key; the main data folder wins
+ // 2. A sub folder uses the same key: the sub folder wins
+ expected :=
+ map[string]any{
+ "a": map[string]any{
+ "b1": "data/a",
+ "b2": []any{"1", "2", "3"},
+ },
+ }
+
+ doTestDataDir(t, dd, expected, "theme", "mytheme")
+}
+
+func TestDataDirCollidingTopLevelArrays(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ dd.addSource("themes/mytheme/data/a/b1.json", `["x", "y", "z"]`)
+ dd.addSource("data/a/b1.json", `["1", "2", "3"]`)
+
+ expected :=
+ map[string]any{
+ "a": map[string]any{
+ "b1": []any{"1", "2", "3"},
+ },
+ }
+
+ doTestDataDir(t, dd, expected, "theme", "mytheme")
+}
+
+func TestDataDirCollidingMapsAndArrays(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ // on
+ dd.addSource("themes/mytheme/data/a.json", `["1", "2", "3"]`)
+ dd.addSource("themes/mytheme/data/b.json", `{ "film" : "Logan Lucky" }`)
+ dd.addSource("data/a.json", `{ "music" : "Queen's Rebuke" }`)
+ dd.addSource("data/b.json", `["x", "y", "z"]`)
+
+ expected :=
+ map[string]any{
+ "a": map[string]any{
+ "music": "Queen's Rebuke",
+ },
+ "b": []any{"x", "y", "z"},
+ }
+
+ doTestDataDir(t, dd, expected, "theme", "mytheme")
+}
+
+// https://discourse.gohugo.io/t/recursive-data-file-parsing/26192
+func TestDataDirNestedDirectories(t *testing.T) {
+ t.Parallel()
+
+ var dd dataDir
+ dd.addSource("themes/mytheme/data/a.json", `["1", "2", "3"]`)
+ dd.addSource("data/test1/20/06/a.json", `{ "artist" : "Michael Brecker" }`)
+ dd.addSource("data/test1/20/05/b.json", `{ "artist" : "Charlie Parker" }`)
+
+ expected :=
+ map[string]any{
+ "a": []any{"1", "2", "3"},
+ "test1": map[string]any{"20": map[string]any{"05": map[string]any{"b": map[string]any{"artist": "Charlie Parker"}}, "06": map[string]any{"a": map[string]any{"artist": "Michael Brecker"}}}},
+ }
+
+ doTestDataDir(t, dd, expected, "theme", "mytheme")
+}
+
+type dataDir struct {
+ sources [][2]string
+}
+
+func (d *dataDir) addSource(path, content string) {
+ d.sources = append(d.sources, [2]string{path, content})
+}
+
+func doTestEquivalentDataDirs(t *testing.T, equivDataDirs []dataDir, expected any, configKeyValues ...any) {
+ for i, dd := range equivDataDirs {
+ err := doTestDataDirImpl(t, dd, expected, configKeyValues...)
+ if err != "" {
+ t.Errorf("equivDataDirs[%d]: %s", i, err)
+ }
+ }
+}
+
+func doTestDataDir(t *testing.T, dd dataDir, expected any, configKeyValues ...any) {
+ err := doTestDataDirImpl(t, dd, expected, configKeyValues...)
+ if err != "" {
+ t.Error(err)
+ }
+}
+
+func doTestDataDirImpl(t *testing.T, dd dataDir, expected any, configKeyValues ...any) (err string) {
+ cfg, fs := newTestCfg()
+
+ for i := 0; i < len(configKeyValues); i += 2 {
+ cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
+ }
+
+ var (
+ logger = loggers.NewErrorLogger()
+ depsCfg = deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: logger}
+ )
+
+ writeSource(t, fs, filepath.Join("content", "dummy.md"), "content")
+ writeSourcesToSource(t, "", fs, dd.sources...)
+
+ expectBuildError := false
+
+ if ok, shouldFail := expected.(bool); ok && shouldFail {
+ expectBuildError = true
+ }
+
+ // trap and report panics as unmarshaling errors so that test suit can complete
+ defer func() {
+ if r := recover(); r != nil {
+ // Capture the stack trace
+ buf := make([]byte, 10000)
+ runtime.Stack(buf, false)
+ t.Errorf("PANIC: %s\n\nStack Trace : %s", r, string(buf))
+ }
+ }()
+
+ s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true})
+
+ if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) {
+ // This disabled code detects the situation described in the WARNING message below.
+ // The situation seems to only occur for TOML data with integer values.
+ // Perhaps the TOML parser returns ints in another type.
+ // Re-enable temporarily to debug fails that should be passing.
+ // Re-enable permanently if reflect.DeepEqual is simply too strict.
+ /*
+ exp := fmt.Sprintf("%#v", expected)
+ got := fmt.Sprintf("%#v", s.Data)
+ if exp == got {
+ t.Logf("WARNING: reflect.DeepEqual returned FALSE for values that appear equal.\n"+
+ "Treating as equal for the purpose of the test, but this maybe should be investigated.\n"+
+ "Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data)
+ return
+ }
+ */
+
+ return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data())
+ }
+
+ return
+}
+
+func TestDataFromShortcode(t *testing.T) {
+ t.Parallel()
+
+ var (
+ cfg, fs = newTestCfg()
+ c = qt.New(t)
+ )
+
+ writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"")
+ writeSource(t, fs, "layouts/_default/single.html", `
+* Slogan from template: {{ .Site.Data.hugo.slogan }}
+* {{ .Content }}`)
+ writeSource(t, fs, "layouts/shortcodes/d.html", `{{ .Page.Site.Data.hugo.slogan }}`)
+ writeSource(t, fs, "content/c.md", `---
+---
+Slogan from shortcode: {{< d >}}
+`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ content := readSource(t, fs, "public/c/index.html")
+
+ c.Assert(content, qt.Contains, "Slogan from template: Hugo Rocks!")
+ c.Assert(content, qt.Contains, "Slogan from shortcode: Hugo Rocks!")
+}
diff --git a/hugolib/dates_test.go b/hugolib/dates_test.go
new file mode 100644
index 000000000..47629fb0a
--- /dev/null
+++ b/hugolib/dates_test.go
@@ -0,0 +1,275 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+
+ qt "github.com/frankban/quicktest"
+
+ "strings"
+ "testing"
+)
+
+func TestDateFormatMultilingual(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+defaultContentLanguage = "en"
+defaultContentLanguageInSubDir = true
+
+[languages]
+[languages.en]
+weight=10
+[languages.nn]
+weight=20
+
+`)
+
+ pageWithDate := `---
+title: Page
+date: 2021-07-18
+---
+`
+
+ b.WithContent(
+ "_index.en.md", pageWithDate,
+ "_index.nn.md", pageWithDate,
+ )
+
+ b.WithTemplatesAdded("index.html", `
+Date: {{ .Date | time.Format ":date_long" }}
+ `)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/index.html", `Date: July 18, 2021`)
+ b.AssertFileContent("public/nn/index.html", `Date: 18. juli 2021`)
+
+}
+
+func TestTimeZones(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+defaultContentLanguage = "en"
+defaultContentLanguageInSubDir = true
+
+[languages]
+[languages.en]
+timeZone="UTC"
+weight=10
+[languages.nn]
+timeZone="America/Antigua"
+weight=20
+
+`)
+
+ const (
+ pageTemplYaml = `---
+title: Page
+date: %s
+lastMod: %s
+publishDate: %s
+expiryDate: %s
+---
+`
+
+ pageTemplTOML = `+++
+title="Page"
+date=%s
+lastMod=%s
+publishDate=%s
+expiryDate=%s
++++
+`
+
+ shortDateTempl = `%d-07-%d`
+ longDateTempl = `%d-07-%d 15:28:01`
+ )
+
+ createPageContent := func(pageTempl, dateTempl string, quoted bool) string {
+ createDate := func(year, i int) string {
+ d := fmt.Sprintf(dateTempl, year, i)
+ if quoted {
+ return fmt.Sprintf("%q", d)
+ }
+
+ return d
+ }
+
+ return fmt.Sprintf(
+ pageTempl,
+ createDate(2021, 10),
+ createDate(2021, 11),
+ createDate(2021, 12),
+ createDate(2099, 13), // This test will fail in 2099 :-)
+ )
+ }
+
+ b.WithContent(
+ // YAML
+ "short-date-yaml-unqouted.en.md", createPageContent(pageTemplYaml, shortDateTempl, false),
+ "short-date-yaml-unqouted.nn.md", createPageContent(pageTemplYaml, shortDateTempl, false),
+ "short-date-yaml-qouted.en.md", createPageContent(pageTemplYaml, shortDateTempl, true),
+ "short-date-yaml-qouted.nn.md", createPageContent(pageTemplYaml, shortDateTempl, true),
+ "long-date-yaml-unqouted.en.md", createPageContent(pageTemplYaml, longDateTempl, false),
+ "long-date-yaml-unqouted.nn.md", createPageContent(pageTemplYaml, longDateTempl, false),
+ // TOML
+ "short-date-toml-unqouted.en.md", createPageContent(pageTemplTOML, shortDateTempl, false),
+ "short-date-toml-unqouted.nn.md", createPageContent(pageTemplTOML, shortDateTempl, false),
+ "short-date-toml-qouted.en.md", createPageContent(pageTemplTOML, shortDateTempl, true),
+ "short-date-toml-qouted.nn.md", createPageContent(pageTemplTOML, shortDateTempl, true),
+ )
+
+ const datesTempl = `
+Date: {{ .Date | safeHTML }}
+Lastmod: {{ .Lastmod | safeHTML }}
+PublishDate: {{ .PublishDate | safeHTML }}
+ExpiryDate: {{ .ExpiryDate | safeHTML }}
+
+ `
+
+ b.WithTemplatesAdded(
+ "_default/single.html", datesTempl,
+ )
+
+ b.Build(BuildCfg{})
+
+ expectShortDateEn := `
+Date: 2021-07-10 00:00:00 +0000 UTC
+Lastmod: 2021-07-11 00:00:00 +0000 UTC
+PublishDate: 2021-07-12 00:00:00 +0000 UTC
+ExpiryDate: 2099-07-13 00:00:00 +0000 UTC`
+
+ expectShortDateNn := strings.ReplaceAll(expectShortDateEn, "+0000 UTC", "-0400 AST")
+
+ expectLongDateEn := `
+Date: 2021-07-10 15:28:01 +0000 UTC
+Lastmod: 2021-07-11 15:28:01 +0000 UTC
+PublishDate: 2021-07-12 15:28:01 +0000 UTC
+ExpiryDate: 2099-07-13 15:28:01 +0000 UTC`
+
+ expectLongDateNn := strings.ReplaceAll(expectLongDateEn, "+0000 UTC", "-0400 AST")
+
+ // TODO(bep) create a common proposal for go-yaml, go-toml
+ // for a custom date parser hook to handle these time zones.
+ // JSON is omitted from this test as JSON does no (to my knowledge)
+ // have date literals.
+
+ // YAML
+ // Note: This is with go-yaml v2, I suspect v3 will fail with the unquouted values.
+ b.AssertFileContent("public/en/short-date-yaml-unqouted/index.html", expectShortDateEn)
+ b.AssertFileContent("public/nn/short-date-yaml-unqouted/index.html", expectShortDateNn)
+ b.AssertFileContent("public/en/short-date-yaml-qouted/index.html", expectShortDateEn)
+ b.AssertFileContent("public/nn/short-date-yaml-qouted/index.html", expectShortDateNn)
+
+ b.AssertFileContent("public/en/long-date-yaml-unqouted/index.html", expectLongDateEn)
+ b.AssertFileContent("public/nn/long-date-yaml-unqouted/index.html", expectLongDateNn)
+
+ // TOML
+ // These fails: TOML (Burnt Sushi) defaults to local timezone.
+ // TODO(bep) check go-toml
+ b.AssertFileContent("public/en/short-date-toml-unqouted/index.html", expectShortDateEn)
+ b.AssertFileContent("public/nn/short-date-toml-unqouted/index.html", expectShortDateNn)
+ b.AssertFileContent("public/en/short-date-toml-qouted/index.html", expectShortDateEn)
+ b.AssertFileContent("public/nn/short-date-toml-qouted/index.html", expectShortDateNn)
+
+}
+
+// Issue 8832
+func TestTimeZoneInvalid(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithConfigFile("toml", `
+
+timeZone = "America/LosAngeles" # Should be America/Los_Angeles
+`)
+
+ err := b.CreateSitesE()
+ b.Assert(err, qt.Not(qt.IsNil))
+ b.Assert(err.Error(), qt.Contains, `failed to load config: invalid timeZone for language "en": unknown time zone America/LosAngeles`)
+}
+
+// Issue 8835
+func TestTimeOnError(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplates("index.html", `time: {{ time "2020-10-20" "invalid-timezone" }}`)
+ b.WithContent("p1.md", "")
+
+ b.Assert(b.BuildE(BuildCfg{}), qt.Not(qt.IsNil))
+
+}
+
+func TestTOMLDates(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+timeZone = "America/Los_Angeles"
+-- content/_index.md --
+---
+date: "2020-10-20"
+---
+-- content/p1.md --
++++
+title = "TOML Date with UTC offset"
+date = 2021-08-16T06:00:00+00:00
++++
+
+
+## Foo
+-- data/mydata.toml --
+date = 2020-10-20
+talks = [
+ { date = 2017-01-23, name = "Past talk 1" },
+ { date = 2017-01-24, name = "Past talk 2" },
+ { date = 2017-01-26, name = "Past talk 3" },
+ { date = 2050-02-12, name = "Future talk 1" },
+ { date = 2050-02-13, name = "Future talk 2" },
+]
+-- layouts/index.html --
+{{ $futureTalks := where site.Data.mydata.talks "date" ">" now }}
+{{ $pastTalks := where site.Data.mydata.talks "date" "<" now }}
+
+{{ $homeDate := site.Home.Date }}
+{{ $p1Date := (site.GetPage "p1").Date }}
+Future talks: {{ len $futureTalks }}
+Past talks: {{ len $pastTalks }}
+
+Home's Date should be greater than past: {{ gt $homeDate (index $pastTalks 0).date }}
+Home's Date should be less than future: {{ lt $homeDate (index $futureTalks 0).date }}
+Home's Date should be equal mydata date: {{ eq $homeDate site.Data.mydata.date }}
+Full time: {{ $p1Date | time.Format ":time_full" }}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+Future talks: 2
+Past talks: 3
+Home's Date should be greater than past: true
+Home's Date should be less than future: true
+Home's Date should be equal mydata date: true
+Full time: 6:00:00 am UTC
+`)
+}
diff --git a/hugolib/disableKinds_test.go b/hugolib/disableKinds_test.go
new file mode 100644
index 000000000..87a60d636
--- /dev/null
+++ b/hugolib/disableKinds_test.go
@@ -0,0 +1,417 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func TestDisable(t *testing.T) {
+ c := qt.New(t)
+
+ newSitesBuilder := func(c *qt.C, disableKind string) *sitesBuilder {
+ config := fmt.Sprintf(`
+baseURL = "http://example.com/blog"
+enableRobotsTXT = true
+ignoreErrors = ["error-disable-taxonomy"]
+disableKinds = [%q]
+`, disableKind)
+
+ b := newTestSitesBuilder(c)
+ b.WithTemplatesAdded("_default/single.html", `single`)
+ b.WithConfigFile("toml", config).WithContent("sect/page.md", `
+---
+title: Page
+categories: ["mycat"]
+tags: ["mytag"]
+---
+
+`, "sect/no-list.md", `
+---
+title: No List
+_build:
+ list: false
+---
+
+`, "sect/no-render.md", `
+---
+title: No List
+_build:
+ render: false
+---
+`,
+ "sect/no-render-link.md", `
+---
+title: No Render Link
+aliases: ["/link-alias"]
+_build:
+ render: link
+---
+`,
+ "sect/no-publishresources/index.md", `
+---
+title: No Publish Resources
+_build:
+ publishResources: false
+---
+
+`, "sect/headlessbundle/index.md", `
+---
+title: Headless
+headless: true
+---
+
+
+`, "headless-local/_index.md", `
+---
+title: Headless Local Lists
+cascade:
+ _build:
+ render: false
+ list: local
+ publishResources: false
+---
+
+`, "headless-local/headless-local-page.md", "---\ntitle: Headless Local Page\n---",
+ "headless-local/sub/_index.md", `
+---
+title: Headless Local Lists Sub
+---
+
+`, "headless-local/sub/headless-local-sub-page.md", "---\ntitle: Headless Local Sub Page\n---",
+ )
+
+ b.WithSourceFile("content/sect/headlessbundle/data.json", "DATA")
+ b.WithSourceFile("content/sect/no-publishresources/data.json", "DATA")
+
+ return b
+ }
+
+ getPage := func(b *sitesBuilder, ref string) page.Page {
+ b.Helper()
+ p, err := b.H.Sites[0].getPageNew(nil, ref)
+ b.Assert(err, qt.IsNil)
+ return p
+ }
+
+ getPageInSitePages := func(b *sitesBuilder, ref string) page.Page {
+ b.Helper()
+ for _, pages := range []page.Pages{b.H.Sites[0].Pages(), b.H.Sites[0].RegularPages()} {
+ for _, p := range pages {
+ if ref == p.(*pageState).sourceRef() {
+ return p
+ }
+ }
+ }
+ return nil
+ }
+
+ getPageInPagePages := func(p page.Page, ref string, pageCollections ...page.Pages) page.Page {
+ if len(pageCollections) == 0 {
+ pageCollections = []page.Pages{p.Pages(), p.RegularPages(), p.RegularPagesRecursive(), p.Sections()}
+ }
+ for _, pages := range pageCollections {
+ for _, p := range pages {
+ if ref == p.(*pageState).sourceRef() {
+ return p
+ }
+ }
+ }
+ return nil
+ }
+
+ disableKind := page.KindPage
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ s := b.H.Sites[0]
+ b.Assert(getPage(b, "/sect/page.md"), qt.IsNil)
+ b.Assert(b.CheckExists("public/sect/page/index.html"), qt.Equals, false)
+ b.Assert(getPageInSitePages(b, "/sect/page.md"), qt.IsNil)
+ b.Assert(getPageInPagePages(getPage(b, "/"), "/sect/page.md"), qt.IsNil)
+
+ // Also check the side effects
+ b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, false)
+ b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 0)
+ })
+
+ disableKind = page.KindTerm
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ s := b.H.Sites[0]
+ b.Assert(b.CheckExists("public/categories/index.html"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, false)
+ b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 0)
+ b.Assert(getPage(b, "/categories"), qt.Not(qt.IsNil))
+ b.Assert(getPage(b, "/categories/mycat"), qt.IsNil)
+ })
+
+ disableKind = page.KindTaxonomy
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ s := b.H.Sites[0]
+ b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/categories/index.html"), qt.Equals, false)
+ b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 1)
+ b.Assert(getPage(b, "/categories/mycat"), qt.Not(qt.IsNil))
+ categories := getPage(b, "/categories")
+ b.Assert(categories, qt.Not(qt.IsNil))
+ b.Assert(categories.RelPermalink(), qt.Equals, "")
+ b.Assert(getPageInSitePages(b, "/categories"), qt.IsNil)
+ b.Assert(getPageInPagePages(getPage(b, "/"), "/categories"), qt.IsNil)
+ })
+
+ disableKind = page.KindHome
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/index.html"), qt.Equals, false)
+ home := getPage(b, "/")
+ b.Assert(home, qt.Not(qt.IsNil))
+ b.Assert(home.RelPermalink(), qt.Equals, "")
+ b.Assert(getPageInSitePages(b, "/"), qt.IsNil)
+ b.Assert(getPageInPagePages(home, "/"), qt.IsNil)
+ b.Assert(getPage(b, "/sect/page.md"), qt.Not(qt.IsNil))
+ })
+
+ disableKind = page.KindSection
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/sect/index.html"), qt.Equals, false)
+ sect := getPage(b, "/sect")
+ b.Assert(sect, qt.Not(qt.IsNil))
+ b.Assert(sect.RelPermalink(), qt.Equals, "")
+ b.Assert(getPageInSitePages(b, "/sect"), qt.IsNil)
+ home := getPage(b, "/")
+ b.Assert(getPageInPagePages(home, "/sect"), qt.IsNil)
+ b.Assert(home.OutputFormats(), qt.HasLen, 2)
+ page := getPage(b, "/sect/page.md")
+ b.Assert(page, qt.Not(qt.IsNil))
+ b.Assert(page.CurrentSection(), qt.Equals, sect)
+ b.Assert(getPageInPagePages(sect, "/sect/page.md"), qt.Not(qt.IsNil))
+ b.AssertFileContent("public/sitemap.xml", "sitemap")
+ b.AssertFileContent("public/index.xml", "rss")
+ })
+
+ disableKind = kindRSS
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/index.xml"), qt.Equals, false)
+ home := getPage(b, "/")
+ b.Assert(home.OutputFormats(), qt.HasLen, 1)
+ })
+
+ disableKind = kindSitemap
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/sitemap.xml"), qt.Equals, false)
+ })
+
+ disableKind = kind404
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/404.html"), qt.Equals, false)
+ })
+
+ disableKind = kindRobotsTXT
+ c.Run("Disable "+disableKind, func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.WithTemplatesAdded("robots.txt", "myrobots")
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/robots.txt"), qt.Equals, false)
+ })
+
+ c.Run("Headless bundle", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/sect/headlessbundle/index.html"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/sect/headlessbundle/data.json"), qt.Equals, true)
+ bundle := getPage(b, "/sect/headlessbundle/index.md")
+ b.Assert(bundle, qt.Not(qt.IsNil))
+ b.Assert(bundle.RelPermalink(), qt.Equals, "")
+ resource := bundle.Resources()[0]
+ b.Assert(resource.RelPermalink(), qt.Equals, "/blog/sect/headlessbundle/data.json")
+ b.Assert(bundle.OutputFormats(), qt.HasLen, 0)
+ b.Assert(bundle.AlternativeOutputFormats(), qt.HasLen, 0)
+ })
+
+ c.Run("Build config, no list", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ ref := "/sect/no-list.md"
+ b.Assert(b.CheckExists("public/sect/no-list/index.html"), qt.Equals, true)
+ p := getPage(b, ref)
+ b.Assert(p, qt.Not(qt.IsNil))
+ b.Assert(p.RelPermalink(), qt.Equals, "/blog/sect/no-list/")
+ b.Assert(getPageInSitePages(b, ref), qt.IsNil)
+ sect := getPage(b, "/sect")
+ b.Assert(getPageInPagePages(sect, ref), qt.IsNil)
+ })
+
+ c.Run("Build config, local list", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ ref := "/headless-local"
+ sect := getPage(b, ref)
+ b.Assert(sect, qt.Not(qt.IsNil))
+ b.Assert(getPageInSitePages(b, ref), qt.IsNil)
+
+ b.Assert(getPageInSitePages(b, "/headless-local/_index.md"), qt.IsNil)
+ b.Assert(getPageInSitePages(b, "/headless-local/headless-local-page.md"), qt.IsNil)
+
+ localPageRef := ref + "/headless-local-page.md"
+
+ b.Assert(getPageInPagePages(sect, localPageRef, sect.RegularPages()), qt.Not(qt.IsNil))
+ b.Assert(getPageInPagePages(sect, localPageRef, sect.RegularPagesRecursive()), qt.Not(qt.IsNil))
+ b.Assert(getPageInPagePages(sect, localPageRef, sect.Pages()), qt.Not(qt.IsNil))
+
+ ref = "/headless-local/sub"
+
+ sect = getPage(b, ref)
+ b.Assert(sect, qt.Not(qt.IsNil))
+
+ localPageRef = ref + "/headless-local-sub-page.md"
+ b.Assert(getPageInPagePages(sect, localPageRef), qt.Not(qt.IsNil))
+ })
+
+ c.Run("Build config, no render", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ ref := "/sect/no-render.md"
+ b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false)
+ p := getPage(b, ref)
+ b.Assert(p, qt.Not(qt.IsNil))
+ b.Assert(p.RelPermalink(), qt.Equals, "")
+ b.Assert(p.OutputFormats(), qt.HasLen, 0)
+ b.Assert(getPageInSitePages(b, ref), qt.Not(qt.IsNil))
+ sect := getPage(b, "/sect")
+ b.Assert(getPageInPagePages(sect, ref), qt.Not(qt.IsNil))
+ })
+
+ c.Run("Build config, no render link", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ ref := "/sect/no-render-link.md"
+ b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false)
+ p := getPage(b, ref)
+ b.Assert(p, qt.Not(qt.IsNil))
+ b.Assert(p.RelPermalink(), qt.Equals, "/blog/sect/no-render-link/")
+ b.Assert(p.OutputFormats(), qt.HasLen, 1)
+ b.Assert(getPageInSitePages(b, ref), qt.Not(qt.IsNil))
+ sect := getPage(b, "/sect")
+ b.Assert(getPageInPagePages(sect, ref), qt.Not(qt.IsNil))
+
+ // https://github.com/gohugoio/hugo/issues/7832
+ // It should still render any aliases.
+ b.AssertFileContent("public/link-alias/index.html", "refresh")
+ })
+
+ c.Run("Build config, no publish resources", func(c *qt.C) {
+ b := newSitesBuilder(c, disableKind)
+ b.Build(BuildCfg{})
+ b.Assert(b.CheckExists("public/sect/no-publishresources/index.html"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/sect/no-publishresources/data.json"), qt.Equals, false)
+ bundle := getPage(b, "/sect/no-publishresources/index.md")
+ b.Assert(bundle, qt.Not(qt.IsNil))
+ b.Assert(bundle.RelPermalink(), qt.Equals, "/blog/sect/no-publishresources/")
+ b.Assert(bundle.Resources(), qt.HasLen, 1)
+ resource := bundle.Resources()[0]
+ b.Assert(resource.RelPermalink(), qt.Equals, "/blog/sect/no-publishresources/data.json")
+ })
+}
+
+// https://github.com/gohugoio/hugo/issues/6897#issuecomment-587947078
+func TestDisableRSSWithRSSInCustomOutputs(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+disableKinds = ["term", "taxonomy", "RSS"]
+[outputs]
+home = [ "HTML", "RSS" ]
+`).Build(BuildCfg{})
+
+ // The config above is a little conflicting, but it exists in the real world.
+ // In Hugo 0.65 we consolidated the code paths and made RSS a pure output format,
+ // but we should make sure to not break existing sites.
+ b.Assert(b.CheckExists("public/index.xml"), qt.Equals, false)
+}
+
+func TestBundleNoPublishResources(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithTemplates("index.html", `
+{{ $bundle := site.GetPage "section/bundle-false" }}
+{{ $data1 := $bundle.Resources.GetMatch "data1*" }}
+Data1: {{ $data1.RelPermalink }}
+
+`)
+
+ b.WithContent("section/bundle-false/index.md", `---\ntitle: BundleFalse
+_build:
+ publishResources: false
+---`,
+ "section/bundle-false/data1.json", "Some data1",
+ "section/bundle-false/data2.json", "Some data2",
+ )
+
+ b.WithContent("section/bundle-true/index.md", `---\ntitle: BundleTrue
+---`,
+ "section/bundle-true/data3.json", "Some data 3",
+ )
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `Data1: /section/bundle-false/data1.json`)
+ b.AssertFileContent("public/section/bundle-false/data1.json", `Some data1`)
+ b.Assert(b.CheckExists("public/section/bundle-false/data2.json"), qt.Equals, false)
+ b.AssertFileContent("public/section/bundle-true/data3.json", `Some data 3`)
+}
+
+func TestNoRenderAndNoPublishResources(t *testing.T) {
+ noRenderPage := `
+---
+title: %s
+_build:
+ render: false
+ publishResources: false
+---
+`
+ b := newTestSitesBuilder(t)
+ b.WithTemplatesAdded("index.html", `
+{{ $page := site.GetPage "sect/no-render" }}
+{{ $sect := site.GetPage "sect-no-render" }}
+
+Page: {{ $page.Title }}|RelPermalink: {{ $page.RelPermalink }}|Outputs: {{ len $page.OutputFormats }}
+Section: {{ $sect.Title }}|RelPermalink: {{ $sect.RelPermalink }}|Outputs: {{ len $sect.OutputFormats }}
+
+
+`)
+ b.WithContent("sect-no-render/_index.md", fmt.Sprintf(noRenderPage, "MySection"))
+ b.WithContent("sect/no-render.md", fmt.Sprintf(noRenderPage, "MyPage"))
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Page: MyPage|RelPermalink: |Outputs: 0
+Section: MySection|RelPermalink: |Outputs: 0
+`)
+
+ b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/sect-no-render/index.html"), qt.Equals, false)
+}
diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go
new file mode 100644
index 000000000..1707bcfa7
--- /dev/null
+++ b/hugolib/embedded_shortcodes_test.go
@@ -0,0 +1,418 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "encoding/json"
+ "fmt"
+ "html/template"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/deps"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const (
+ testBaseURL = "http://foo/bar"
+)
+
+func TestShortcodeCrossrefs(t *testing.T) {
+ t.Parallel()
+
+ for _, relative := range []bool{true, false} {
+ doTestShortcodeCrossrefs(t, relative)
+ }
+}
+
+func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
+ var (
+ cfg, fs = newTestCfg()
+ c = qt.New(t)
+ )
+
+ cfg.Set("baseURL", testBaseURL)
+
+ var refShortcode string
+ var expectedBase string
+
+ if relative {
+ refShortcode = "relref"
+ expectedBase = "/bar"
+ } else {
+ refShortcode = "ref"
+ expectedBase = testBaseURL
+ }
+
+ path := filepath.FromSlash("blog/post.md")
+ in := fmt.Sprintf(`{{< %s "%s" >}}`, refShortcode, path)
+
+ writeSource(t, fs, "content/"+path, simplePageWithURL+": "+in)
+
+ expected := fmt.Sprintf(`%s/simple/url/`, expectedBase)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ content, err := s.RegularPages()[0].Content()
+ c.Assert(err, qt.IsNil)
+ output := cast.ToString(content)
+
+ if !strings.Contains(output, expected) {
+ t.Errorf("Got\n%q\nExpected\n%q", output, expected)
+ }
+}
+
+func TestShortcodeHighlight(t *testing.T) {
+ t.Parallel()
+
+ for _, this := range []struct {
+ in, expected string
+ }{
+ {
+ `{{< highlight java >}}
+void do();
+{{< /highlight >}}`,
+ `(?s)<div class="highlight"><pre tabindex="0" style="background-color:#fff;-moz-tab-size:4;-o-tab-size:4;tab-size:4;"><code class="language-java"`,
+ },
+ {
+ `{{< highlight java "style=friendly" >}}
+void do();
+{{< /highlight >}}`,
+ `(?s)<div class="highlight"><pre tabindex="0" style="background-color:#f0f0f0;-moz-tab-size:4;-o-tab-size:4;tab-size:4;"><code class="language-java" data-lang="java">`,
+ },
+ } {
+
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ cfg.Set("markup.highlight.style", "bw")
+ cfg.Set("markup.highlight.noClasses", true)
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
+
+func TestShortcodeFigure(t *testing.T) {
+ t.Parallel()
+
+ for _, this := range []struct {
+ in, expected string
+ }{
+ {
+ `{{< figure src="/img/hugo-logo.png" >}}`,
+ "(?s)<figure>.*?<img src=\"/img/hugo-logo.png\"/>.*?</figure>",
+ },
+ {
+ // set alt
+ `{{< figure src="/img/hugo-logo.png" alt="Hugo logo" >}}`,
+ "(?s)<figure>.*?<img src=\"/img/hugo-logo.png\".+?alt=\"Hugo logo\"/>.*?</figure>",
+ },
+ // set title
+ {
+ `{{< figure src="/img/hugo-logo.png" title="Hugo logo" >}}`,
+ "(?s)<figure>.*?<img src=\"/img/hugo-logo.png\"/>.*?<figcaption>.*?<h4>Hugo logo</h4>.*?</figcaption>.*?</figure>",
+ },
+ // set attr and attrlink
+ {
+ `{{< figure src="/img/hugo-logo.png" attr="Hugo logo" attrlink="/img/hugo-logo.png" >}}`,
+ "(?s)<figure>.*?<img src=\"/img/hugo-logo.png\"/>.*?<figcaption>.*?<p>.*?<a href=\"/img/hugo-logo.png\">.*?Hugo logo.*?</a>.*?</p>.*?</figcaption>.*?</figure>",
+ },
+ } {
+
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
+
+func TestShortcodeYoutube(t *testing.T) {
+ t.Parallel()
+
+ for _, this := range []struct {
+ in, expected string
+ }{
+ {
+ `{{< youtube w7Ft2ymGmfc >}}`,
+ "(?s)\n<div style=\".*?\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\" style=\".*?\" allowfullscreen title=\"YouTube Video\">.*?</iframe>.*?</div>\n",
+ },
+ // set class
+ {
+ `{{< youtube w7Ft2ymGmfc video>}}`,
+ "(?s)\n<div class=\"video\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\" allowfullscreen title=\"YouTube Video\">.*?</iframe>.*?</div>\n",
+ },
+ // set class and autoplay (using named params)
+ {
+ `{{< youtube id="w7Ft2ymGmfc" class="video" autoplay="true" >}}`,
+ "(?s)\n<div class=\"video\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\\?autoplay=1\".*?allowfullscreen title=\"YouTube Video\">.*?</iframe>.*?</div>",
+ },
+ // set custom title for accessibility)
+ {
+ `{{< youtube id="w7Ft2ymGmfc" title="A New Hugo Site in Under Two Minutes" >}}`,
+ "(?s)\n<div style=\".*?\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\" style=\".*?\" allowfullscreen title=\"A New Hugo Site in Under Two Minutes\">.*?</iframe>.*?</div>",
+ },
+ } {
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+ }
+}
+
+func TestShortcodeVimeo(t *testing.T) {
+ t.Parallel()
+
+ for _, this := range []struct {
+ in, expected string
+ }{
+ {
+ `{{< vimeo 146022717 >}}`,
+ "(?s)\n<div style=\".*?\">.*?<iframe src=\"https://player.vimeo.com/video/146022717\" style=\".*?\" title=\"vimeo video\" webkitallowfullscreen mozallowfullscreen allowfullscreen>.*?</iframe>.*?</div>\n",
+ },
+ // set class
+ {
+ `{{< vimeo 146022717 video >}}`,
+ "(?s)\n<div class=\"video\">.*?<iframe src=\"https://player.vimeo.com/video/146022717\" title=\"vimeo video\" webkitallowfullscreen mozallowfullscreen allowfullscreen>.*?</iframe>.*?</div>\n",
+ },
+ // set vimeo title
+ {
+ `{{< vimeo 146022717 video my-title >}}`,
+ "(?s)\n<div class=\"video\">.*?<iframe src=\"https://player.vimeo.com/video/146022717\" title=\"my-title\" webkitallowfullscreen mozallowfullscreen allowfullscreen>.*?</iframe>.*?</div>\n",
+ },
+ // set class (using named params)
+ {
+ `{{< vimeo id="146022717" class="video" >}}`,
+ "(?s)^<div class=\"video\">.*?<iframe src=\"https://player.vimeo.com/video/146022717\" title=\"vimeo video\" webkitallowfullscreen mozallowfullscreen allowfullscreen>.*?</iframe>.*?</div>",
+ },
+ // set vimeo title (using named params)
+ {
+ `{{< vimeo id="146022717" class="video" title="my vimeo video" >}}`,
+ "(?s)^<div class=\"video\">.*?<iframe src=\"https://player.vimeo.com/video/146022717\" title=\"my vimeo video\" webkitallowfullscreen mozallowfullscreen allowfullscreen>.*?</iframe>.*?</div>",
+ },
+ } {
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
+
+func TestShortcodeGist(t *testing.T) {
+ t.Parallel()
+
+ for _, this := range []struct {
+ in, expected string
+ }{
+ {
+ `{{< gist spf13 7896402 >}}`,
+ "(?s)^<script type=\"application/javascript\" src=\"https://gist.github.com/spf13/7896402.js\"></script>",
+ },
+ {
+ `{{< gist spf13 7896402 "img.html" >}}`,
+ "(?s)^<script type=\"application/javascript\" src=\"https://gist.github.com/spf13/7896402.js\\?file=img.html\"></script>",
+ },
+ } {
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
+
+func TestShortcodeTweet(t *testing.T) {
+ t.Parallel()
+
+ for i, this := range []struct {
+ privacy map[string]any
+ in, resp, expected string
+ }{
+ {
+ map[string]any{
+ "twitter": map[string]any{
+ "simple": true,
+ },
+ },
+ `{{< tweet 666616452582129664 >}}`,
+ `{"author_name":"Steve Francia","author_url":"https://twitter.com/spf13","cache_age":"3153600000","height":null,"html":"\u003cblockquote class=\"twitter-tweet\"\u003e\u003cp lang=\"en\" dir=\"ltr\"\u003eHugo 0.15 will have 30%+ faster render times thanks to this commit \u003ca href=\"https://t.co/FfzhM8bNhT\"\u003ehttps://t.co/FfzhM8bNhT\u003c/a\u003e \u003ca href=\"https://twitter.com/hashtag/gohugo?src=hash\u0026amp;ref_src=twsrc%5Etfw\"\u003e#gohugo\u003c/a\u003e \u003ca href=\"https://twitter.com/hashtag/golang?src=hash\u0026amp;ref_src=twsrc%5Etfw\"\u003e#golang\u003c/a\u003e \u003ca href=\"https://t.co/ITbMNU2BUf\"\u003ehttps://t.co/ITbMNU2BUf\u003c/a\u003e\u003c/p\u003e\u0026mdash; Steve Francia (@spf13) \u003ca href=\"https://twitter.com/spf13/status/666616452582129664?ref_src=twsrc%5Etfw\"\u003eNovember 17, 2015\u003c/a\u003e\u003c/blockquote\u003e\n\u003cscript async src=\"https://platform.twitter.com/widgets.js\" charset=\"utf-8\"\u003e\u003c/script\u003e\n","provider_name":"Twitter","provider_url":"https://twitter.com","type":"rich","url":"https://twitter.com/spf13/status/666616452582129664","version":"1.0","width":550}`,
+ `.twitter-tweet a`,
+ },
+ {
+ map[string]any{
+ "twitter": map[string]any{
+ "simple": false,
+ },
+ },
+ `{{< tweet 666616452582129664 >}}`,
+ `{"author_name":"Steve Francia","author_url":"https://twitter.com/spf13","cache_age":"3153600000","height":null,"html":"\u003cblockquote class=\"twitter-tweet\"\u003e\u003cp lang=\"en\" dir=\"ltr\"\u003eHugo 0.15 will have 30%+ faster render times thanks to this commit \u003ca href=\"https://t.co/FfzhM8bNhT\"\u003ehttps://t.co/FfzhM8bNhT\u003c/a\u003e \u003ca href=\"https://twitter.com/hashtag/gohugo?src=hash\u0026amp;ref_src=twsrc%5Etfw\"\u003e#gohugo\u003c/a\u003e \u003ca href=\"https://twitter.com/hashtag/golang?src=hash\u0026amp;ref_src=twsrc%5Etfw\"\u003e#golang\u003c/a\u003e \u003ca href=\"https://t.co/ITbMNU2BUf\"\u003ehttps://t.co/ITbMNU2BUf\u003c/a\u003e\u003c/p\u003e\u0026mdash; Steve Francia (@spf13) \u003ca href=\"https://twitter.com/spf13/status/666616452582129664?ref_src=twsrc%5Etfw\"\u003eNovember 17, 2015\u003c/a\u003e\u003c/blockquote\u003e\n\u003cscript async src=\"https://platform.twitter.com/widgets.js\" charset=\"utf-8\"\u003e\u003c/script\u003e\n","provider_name":"Twitter","provider_url":"https://twitter.com","type":"rich","url":"https://twitter.com/spf13/status/666616452582129664","version":"1.0","width":550}`,
+ `(?s)<blockquote class="twitter-tweet"><p lang="en" dir="ltr">Hugo 0.15 will have 30%\+ faster render times thanks to this commit <a href="https://t.co/FfzhM8bNhT">https://t.co/FfzhM8bNhT</a> <a href="https://twitter.com/hashtag/gohugo\?src=hash&amp;ref_src=twsrc%5Etfw">#gohugo</a> <a href="https://twitter.com/hashtag/golang\?src=hash&amp;ref_src=twsrc%5Etfw">#golang</a> <a href="https://t.co/ITbMNU2BUf">https://t.co/ITbMNU2BUf</a></p>&mdash; Steve Francia \(@spf13\) <a href="https://twitter.com/spf13/status/666616452582129664\?ref_src=twsrc%5Etfw">November 17, 2015</a></blockquote>\s*<script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>`,
+ },
+ {
+ map[string]any{
+ "twitter": map[string]any{
+ "simple": false,
+ },
+ },
+ `{{< tweet user="SanDiegoZoo" id="1453110110599868418" >}}`,
+ `{"author_name":"San Diego Boo 👻 Wildlife Alliance","author_url":"https://twitter.com/sandiegozoo","cache_age":"3153600000","height":null,"html":"\u003cblockquote class=\"twitter-tweet\"\u003e\u003cp lang=\"en\" dir=\"ltr\"\u003eOwl bet you\u0026#39;ll lose this staring contest 🦉 \u003ca href=\"https://t.co/eJh4f2zncC\"\u003epic.twitter.com/eJh4f2zncC\u003c/a\u003e\u003c/p\u003e\u0026mdash; San Diego Boo 👻 Wildlife Alliance (@sandiegozoo) \u003ca href=\"https://twitter.com/sandiegozoo/status/1453110110599868418?ref_src=twsrc%5Etfw\"\u003eOctober 26, 2021\u003c/a\u003e\u003c/blockquote\u003e\n\u003cscript async src=\"https://platform.twitter.com/widgets.js\" charset=\"utf-8\"\u003e\u003c/script\u003e\n","provider_name":"Twitter","provider_url":"https://twitter.com","type":"rich","url":"https://twitter.com/sandiegozoo/status/1453110110599868418","version":"1.0","width":550}`,
+ `(?s)<blockquote class="twitter-tweet"><p lang="en" dir="ltr">Owl bet you&#39;ll lose this staring contest 🦉 <a href="https://t.co/eJh4f2zncC">pic.twitter.com/eJh4f2zncC</a></p>&mdash; San Diego Boo 👻 Wildlife Alliance \(@sandiegozoo\) <a href="https://twitter.com/sandiegozoo/status/1453110110599868418\?ref_src=twsrc%5Etfw">October 26, 2021</a></blockquote>\s*<script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>`,
+ },
+ } {
+ // overload getJSON to return mock API response from Twitter
+ tweetFuncMap := template.FuncMap{
+ "getJSON": func(urlParts ...any) any {
+ var v any
+ err := json.Unmarshal([]byte(this.resp), &v)
+ if err != nil {
+ t.Fatalf("[%d] unexpected error in json.Unmarshal: %s", i, err)
+ return err
+ }
+ return v
+ },
+ }
+
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ cfg.Set("privacy", this.privacy)
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, OverloadedTemplateFuncs: tweetFuncMap}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
+
+func TestShortcodeInstagram(t *testing.T) {
+ t.Parallel()
+
+ for i, this := range []struct {
+ in, hidecaption, resp, expected string
+ }{
+ {
+ `{{< instagram BMokmydjG-M >}}`,
+ `0`,
+ `{"provider_url": "https://www.instagram.com", "media_id": "1380514280986406796_25025320", "author_name": "instagram", "height": null, "thumbnail_url": "https://scontent-amt2-1.cdninstagram.com/t51.2885-15/s640x640/sh0.08/e35/15048135_1880160212214218_7827880881132929024_n.jpg?ig_cache_key=MTM4MDUxNDI4MDk4NjQwNjc5Ng%3D%3D.2", "thumbnail_width": 640, "thumbnail_height": 640, "provider_name": "Instagram", "title": "Today, we\u2019re introducing a few new tools to help you make your story even more fun: Boomerang and mentions. We\u2019re also starting to test links inside some stories.\nBoomerang lets you turn everyday moments into something fun and unexpected. Now you can easily take a Boomerang right inside Instagram. Swipe right from your feed to open the stories camera. A new format picker under the record button lets you select \u201cBoomerang\u201d mode.\nYou can also now share who you\u2019re with or who you\u2019re thinking of by mentioning them in your story. When you add text to your story, type \u201c@\u201d followed by a username and select the person you\u2019d like to mention. Their username will appear underlined in your story. And when someone taps the mention, they'll see a pop-up that takes them to that profile.\nYou may begin to spot \u201cSee More\u201d links at the bottom of some stories. This is a test that lets verified accounts add links so it\u2019s easy to learn more. From your favorite chefs\u2019 recipes to articles from top journalists or concert dates from the musicians you love, tap \u201cSee More\u201d or swipe up to view the link right inside the app.\nTo learn more about today\u2019s updates, check out help.instagram.com.\nThese updates for Instagram Stories are available as part of Instagram version 9.7 available for iOS in the Apple App Store, for Android in Google Play and for Windows 10 in the Windows Store.", "html": "\u003cblockquote class=\"instagram-media\" data-instgrm-captioned data-instgrm-version=\"7\" style=\" background:#FFF; border:0; border-radius:3px; box-shadow:0 0 1px 0 rgba(0,0,0,0.5),0 1px 10px 0 rgba(0,0,0,0.15); margin: 1px; max-width:658px; padding:0; width:99.375%; width:-webkit-calc(100% - 2px); width:calc(100% - 2px);\"\u003e\u003cdiv style=\"padding:8px;\"\u003e \u003cdiv style=\" background:#F8F8F8; line-height:0; margin-top:40px; padding:50.0% 0; text-align:center; width:100%;\"\u003e \u003cdiv style=\" background:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACwAAAAsCAMAAAApWqozAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAMUExURczMzPf399fX1+bm5mzY9AMAAADiSURBVDjLvZXbEsMgCES5/P8/t9FuRVCRmU73JWlzosgSIIZURCjo/ad+EQJJB4Hv8BFt+IDpQoCx1wjOSBFhh2XssxEIYn3ulI/6MNReE07UIWJEv8UEOWDS88LY97kqyTliJKKtuYBbruAyVh5wOHiXmpi5we58Ek028czwyuQdLKPG1Bkb4NnM+VeAnfHqn1k4+GPT6uGQcvu2h2OVuIf/gWUFyy8OWEpdyZSa3aVCqpVoVvzZZ2VTnn2wU8qzVjDDetO90GSy9mVLqtgYSy231MxrY6I2gGqjrTY0L8fxCxfCBbhWrsYYAAAAAElFTkSuQmCC); display:block; height:44px; margin:0 auto -44px; position:relative; top:-22px; width:44px;\"\u003e\u003c/div\u003e\u003c/div\u003e \u003cp style=\" margin:8px 0 0 0; padding:0 4px;\"\u003e \u003ca href=\"https://www.instagram.com/p/BMokmydjG-M/\" style=\" color:#000; font-family:Arial,sans-serif; font-size:14px; font-style:normal; font-weight:normal; line-height:17px; text-decoration:none; word-wrap:break-word;\" target=\"_blank\"\u003eToday, we\u2019re introducing a few new tools to help you make your story even more fun: Boomerang and mentions. We\u2019re also starting to test links inside some stories. Boomerang lets you turn everyday moments into something fun and unexpected. Now you can easily take a Boomerang right inside Instagram. Swipe right from your feed to open the stories camera. A new format picker under the record button lets you select \u201cBoomerang\u201d mode. You can also now share who you\u2019re with or who you\u2019re thinking of by mentioning them in your story. When you add text to your story, type \u201c@\u201d followed by a username and select the person you\u2019d like to mention. Their username will appear underlined in your story. And when someone taps the mention, they\u0026#39;ll see a pop-up that takes them to that profile. You may begin to spot \u201cSee More\u201d links at the bottom of some stories. This is a test that lets verified accounts add links so it\u2019s easy to learn more. From your favorite chefs\u2019 recipes to articles from top journalists or concert dates from the musicians you love, tap \u201cSee More\u201d or swipe up to view the link right inside the app. To learn more about today\u2019s updates, check out help.instagram.com. These updates for Instagram Stories are available as part of Instagram version 9.7 available for iOS in the Apple App Store, for Android in Google Play and for Windows 10 in the Windows Store.\u003c/a\u003e\u003c/p\u003e \u003cp style=\" color:#c9c8cd; font-family:Arial,sans-serif; font-size:14px; line-height:17px; margin-bottom:0; margin-top:8px; overflow:hidden; padding:8px 0 7px; text-align:center; text-overflow:ellipsis; white-space:nowrap;\"\u003eA photo posted by Instagram (@instagram) on \u003ctime style=\" font-family:Arial,sans-serif; font-size:14px; line-height:17px;\" datetime=\"2016-11-10T15:02:28+00:00\"\u003eNov 10, 2016 at 7:02am PST\u003c/time\u003e\u003c/p\u003e\u003c/div\u003e\u003c/blockquote\u003e\n\u003cscript async defer src=\"//platform.instagram.com/en_US/embeds.js\"\u003e\u003c/script\u003e", "width": 658, "version": "1.0", "author_url": "https://www.instagram.com/instagram", "author_id": 25025320, "type": "rich"}`,
+ `(?s)<blockquote class="instagram-media" data-instgrm-captioned data-instgrm-version="7" .*defer src="//platform.instagram.com/en_US/embeds.js"></script>`,
+ },
+ {
+ `{{< instagram BMokmydjG-M hidecaption >}}`,
+ `1`,
+ `{"provider_url": "https://www.instagram.com", "media_id": "1380514280986406796_25025320", "author_name": "instagram", "height": null, "thumbnail_url": "https://scontent-amt2-1.cdninstagram.com/t51.2885-15/s640x640/sh0.08/e35/15048135_1880160212214218_7827880881132929024_n.jpg?ig_cache_key=MTM4MDUxNDI4MDk4NjQwNjc5Ng%3D%3D.2", "thumbnail_width": 640, "thumbnail_height": 640, "provider_name": "Instagram", "title": "Today, we\u2019re introducing a few new tools to help you make your story even more fun: Boomerang and mentions. We\u2019re also starting to test links inside some stories.\nBoomerang lets you turn everyday moments into something fun and unexpected. Now you can easily take a Boomerang right inside Instagram. Swipe right from your feed to open the stories camera. A new format picker under the record button lets you select \u201cBoomerang\u201d mode.\nYou can also now share who you\u2019re with or who you\u2019re thinking of by mentioning them in your story. When you add text to your story, type \u201c@\u201d followed by a username and select the person you\u2019d like to mention. Their username will appear underlined in your story. And when someone taps the mention, they'll see a pop-up that takes them to that profile.\nYou may begin to spot \u201cSee More\u201d links at the bottom of some stories. This is a test that lets verified accounts add links so it\u2019s easy to learn more. From your favorite chefs\u2019 recipes to articles from top journalists or concert dates from the musicians you love, tap \u201cSee More\u201d or swipe up to view the link right inside the app.\nTo learn more about today\u2019s updates, check out help.instagram.com.\nThese updates for Instagram Stories are available as part of Instagram version 9.7 available for iOS in the Apple App Store, for Android in Google Play and for Windows 10 in the Windows Store.", "html": "\u003cblockquote class=\"instagram-media\" data-instgrm-version=\"7\" style=\" background:#FFF; border:0; border-radius:3px; box-shadow:0 0 1px 0 rgba(0,0,0,0.5),0 1px 10px 0 rgba(0,0,0,0.15); margin: 1px; max-width:658px; padding:0; width:99.375%; width:-webkit-calc(100% - 2px); width:calc(100% - 2px);\"\u003e\u003cdiv style=\"padding:8px;\"\u003e \u003cdiv style=\" background:#F8F8F8; line-height:0; margin-top:40px; padding:50.0% 0; text-align:center; width:100%;\"\u003e \u003cdiv style=\" background:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACwAAAAsCAMAAAApWqozAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAMUExURczMzPf399fX1+bm5mzY9AMAAADiSURBVDjLvZXbEsMgCES5/P8/t9FuRVCRmU73JWlzosgSIIZURCjo/ad+EQJJB4Hv8BFt+IDpQoCx1wjOSBFhh2XssxEIYn3ulI/6MNReE07UIWJEv8UEOWDS88LY97kqyTliJKKtuYBbruAyVh5wOHiXmpi5we58Ek028czwyuQdLKPG1Bkb4NnM+VeAnfHqn1k4+GPT6uGQcvu2h2OVuIf/gWUFyy8OWEpdyZSa3aVCqpVoVvzZZ2VTnn2wU8qzVjDDetO90GSy9mVLqtgYSy231MxrY6I2gGqjrTY0L8fxCxfCBbhWrsYYAAAAAElFTkSuQmCC); display:block; height:44px; margin:0 auto -44px; position:relative; top:-22px; width:44px;\"\u003e\u003c/div\u003e\u003c/div\u003e\u003cp style=\" color:#c9c8cd; font-family:Arial,sans-serif; font-size:14px; line-height:17px; margin-bottom:0; margin-top:8px; overflow:hidden; padding:8px 0 7px; text-align:center; text-overflow:ellipsis; white-space:nowrap;\"\u003e\u003ca href=\"https://www.instagram.com/p/BMokmydjG-M/\" style=\" color:#c9c8cd; font-family:Arial,sans-serif; font-size:14px; font-style:normal; font-weight:normal; line-height:17px; text-decoration:none;\" target=\"_blank\"\u003eA photo posted by Instagram (@instagram)\u003c/a\u003e on \u003ctime style=\" font-family:Arial,sans-serif; font-size:14px; line-height:17px;\" datetime=\"2016-11-10T15:02:28+00:00\"\u003eNov 10, 2016 at 7:02am PST\u003c/time\u003e\u003c/p\u003e\u003c/div\u003e\u003c/blockquote\u003e\n\u003cscript async defer src=\"//platform.instagram.com/en_US/embeds.js\"\u003e\u003c/script\u003e", "width": 658, "version": "1.0", "author_url": "https://www.instagram.com/instagram", "author_id": 25025320, "type": "rich"}`,
+ `(?s)<blockquote class="instagram-media" data-instgrm-version="7" style=" background:#FFF; border:0; .*<script async defer src="//platform.instagram.com/en_US/embeds.js"></script>`,
+ },
+ } {
+ // overload getJSON to return mock API response from Instagram
+ instagramFuncMap := template.FuncMap{
+ "getJSON": func(args ...any) any {
+ headers := args[len(args)-1].(map[string]any)
+ auth := headers["Authorization"]
+ if auth != "Bearer dummytoken" {
+ return fmt.Errorf("invalid access token: %q", auth)
+ }
+ var v any
+ err := json.Unmarshal([]byte(this.resp), &v)
+ if err != nil {
+ return fmt.Errorf("[%d] unexpected error in json.Unmarshal: %s", i, err)
+ }
+ return v
+ },
+ }
+
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ cfg.Set("services.instagram.accessToken", "dummytoken")
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), fmt.Sprintf(`---
+title: Shorty
+---
+%s`, this.in))
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .Content | safeHTML }}`)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, OverloadedTemplateFuncs: instagramFuncMap}, BuildCfg{})
+
+ th.assertFileContentRegexp(filepath.Join("public", "simple", "index.html"), this.expected)
+
+ }
+}
diff --git a/hugolib/embedded_templates_test.go b/hugolib/embedded_templates_test.go
new file mode 100644
index 000000000..b40d77dc4
--- /dev/null
+++ b/hugolib/embedded_templates_test.go
@@ -0,0 +1,169 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestInternalTemplatesImage(t *testing.T) {
+ config := `
+baseURL = "https://example.org"
+
+[params]
+images=["siteimg1.jpg", "siteimg2.jpg"]
+
+`
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+
+ b.WithContent("mybundle/index.md", `---
+title: My Bundle
+date: 2021-02-26T18:02:00-01:00
+lastmod: 2021-05-22T19:25:00-01:00
+---
+`)
+
+ b.WithContent("mypage.md", `---
+title: My Page
+images: ["pageimg1.jpg", "pageimg2.jpg"]
+date: 2021-02-26T18:02:00+01:00
+lastmod: 2021-05-22T19:25:00+01:00
+---
+`)
+
+ b.WithContent("mysite.md", `---
+title: My Site
+---
+`)
+
+ b.WithTemplatesAdded("_default/single.html", `
+
+{{ template "_internal/twitter_cards.html" . }}
+{{ template "_internal/opengraph.html" . }}
+{{ template "_internal/schema.html" . }}
+
+`)
+
+ b.WithSunset("content/mybundle/featured-sunset.jpg")
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/mybundle/index.html", `
+<meta name="twitter:image" content="https://example.org/mybundle/featured-sunset.jpg"/>
+<meta name="twitter:title" content="My Bundle"/>
+<meta property="og:title" content="My Bundle" />
+<meta property="og:url" content="https://example.org/mybundle/" />
+<meta property="og:image" content="https://example.org/mybundle/featured-sunset.jpg"/>
+<meta property="article:published_time" content="2021-02-26T18:02:00-01:00" />
+<meta property="article:modified_time" content="2021-05-22T19:25:00-01:00" />
+<meta itemprop="name" content="My Bundle">
+<meta itemprop="image" content="https://example.org/mybundle/featured-sunset.jpg">
+<meta itemprop="datePublished" content="2021-02-26T18:02:00-01:00" />
+<meta itemprop="dateModified" content="2021-05-22T19:25:00-01:00" />
+
+`)
+ b.AssertFileContent("public/mypage/index.html", `
+<meta name="twitter:image" content="https://example.org/pageimg1.jpg"/>
+<meta property="og:image" content="https://example.org/pageimg1.jpg" />
+<meta property="og:image" content="https://example.org/pageimg2.jpg" />
+<meta property="article:published_time" content="2021-02-26T18:02:00+01:00" />
+<meta property="article:modified_time" content="2021-05-22T19:25:00+01:00" />
+<meta itemprop="image" content="https://example.org/pageimg1.jpg">
+<meta itemprop="image" content="https://example.org/pageimg2.jpg">
+<meta itemprop="datePublished" content="2021-02-26T18:02:00+01:00" />
+<meta itemprop="dateModified" content="2021-05-22T19:25:00+01:00" />
+`)
+ b.AssertFileContent("public/mysite/index.html", `
+<meta name="twitter:image" content="https://example.org/siteimg1.jpg"/>
+<meta property="og:image" content="https://example.org/siteimg1.jpg"/>
+<meta itemprop="image" content="https://example.org/siteimg1.jpg"/>
+`)
+}
+
+// Just some simple test of the embedded templates to avoid
+// https://github.com/gohugoio/hugo/issues/4757 and similar.
+func TestEmbeddedTemplates(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ c.Assert(true, qt.Equals, true)
+
+ home := []string{"index.html", `
+GA:
+{{ template "_internal/google_analytics.html" . }}
+
+GA async:
+
+{{ template "_internal/google_analytics_async.html" . }}
+
+Disqus:
+
+{{ template "_internal/disqus.html" . }}
+
+`}
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded(home...)
+
+ b.Build(BuildCfg{})
+
+ // Gheck GA regular and async
+ b.AssertFileContent("public/index.html",
+ "'anonymizeIp', true",
+ "'script','https://www.google-analytics.com/analytics.js','ga');\n\tga('create', 'UA-ga_id', 'auto')",
+ "<script async src='https://www.google-analytics.com/analytics.js'>")
+
+ // Disqus
+ b.AssertFileContent("public/index.html", "\"disqus_shortname\" + '.disqus.com/embed.js';")
+}
+
+func TestEmbeddedPaginationTemplate(t *testing.T) {
+ t.Parallel()
+
+ test := func(variant string, expectedOutput string) {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `paginate = 1`)
+ b.WithContent(
+ "s1/p01.md", "---\ntitle: p01\n---",
+ "s1/p02.md", "---\ntitle: p02\n---",
+ "s1/p03.md", "---\ntitle: p03\n---",
+ "s1/p04.md", "---\ntitle: p04\n---",
+ "s1/p05.md", "---\ntitle: p05\n---",
+ "s1/p06.md", "---\ntitle: p06\n---",
+ "s1/p07.md", "---\ntitle: p07\n---",
+ "s1/p08.md", "---\ntitle: p08\n---",
+ "s1/p09.md", "---\ntitle: p09\n---",
+ "s1/p10.md", "---\ntitle: p10\n---",
+ )
+ b.WithTemplates("index.html", `{{ .Paginate (where site.RegularPages "Section" "s1") }}`+variant)
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", expectedOutput)
+ }
+
+ expectedOutputDefaultFormat := "Pager 1\n <ul class=\"pagination pagination-default\">\n <li class=\"page-item disabled\">\n <a aria-disabled=\"true\" aria-label=\"First\" class=\"page-link\" role=\"button\" tabindex=\"-1\"><span aria-hidden=\"true\">&laquo;&laquo;</span></a>\n </li>\n <li class=\"page-item disabled\">\n <a aria-disabled=\"true\" aria-label=\"Previous\" class=\"page-link\" role=\"button\" tabindex=\"-1\"><span aria-hidden=\"true\">&laquo;</span></a>\n </li>\n <li class=\"page-item active\">\n <a aria-current=\"page\" aria-label=\"Page 1\" class=\"page-link\" role=\"button\">1</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/2/\" aria-label=\"Page 2\" class=\"page-link\" role=\"button\">2</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/3/\" aria-label=\"Page 3\" class=\"page-link\" role=\"button\">3</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/4/\" aria-label=\"Page 4\" class=\"page-link\" role=\"button\">4</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/5/\" aria-label=\"Page 5\" class=\"page-link\" role=\"button\">5</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/2/\" aria-label=\"Next\" class=\"page-link\" role=\"button\"><span aria-hidden=\"true\">&raquo;</span></a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/10/\" aria-label=\"Last\" class=\"page-link\" role=\"button\"><span aria-hidden=\"true\">&raquo;&raquo;</span></a>\n </li>\n </ul>"
+ expectedOutputTerseFormat := "Pager 1\n <ul class=\"pagination pagination-terse\">\n <li class=\"page-item active\">\n <a aria-current=\"page\" aria-label=\"Page 1\" class=\"page-link\" role=\"button\">1</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/2/\" aria-label=\"Page 2\" class=\"page-link\" role=\"button\">2</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/3/\" aria-label=\"Page 3\" class=\"page-link\" role=\"button\">3</a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/2/\" aria-label=\"Next\" class=\"page-link\" role=\"button\"><span aria-hidden=\"true\">&raquo;</span></a>\n </li>\n <li class=\"page-item\">\n <a href=\"/page/10/\" aria-label=\"Last\" class=\"page-link\" role=\"button\"><span aria-hidden=\"true\">&raquo;&raquo;</span></a>\n </li>\n </ul>"
+
+ variant := `{{ template "_internal/pagination.html" . }}`
+ test(variant, expectedOutputDefaultFormat)
+
+ variant = `{{ template "_internal/pagination.html" (dict "page" .) }}`
+ test(variant, expectedOutputDefaultFormat)
+
+ variant = `{{ template "_internal/pagination.html" (dict "page" . "format" "default") }}`
+ test(variant, expectedOutputDefaultFormat)
+
+ variant = `{{ template "_internal/pagination.html" (dict "page" . "format" "terse") }}`
+ test(variant, expectedOutputTerseFormat)
+}
diff --git a/hugolib/fileInfo.go b/hugolib/fileInfo.go
new file mode 100644
index 000000000..1cdd7041d
--- /dev/null
+++ b/hugolib/fileInfo.go
@@ -0,0 +1,115 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/source"
+)
+
+// fileInfo implements the File and ReadableFile interface.
+var (
+ _ source.File = (*fileInfo)(nil)
+)
+
+type fileInfo struct {
+ source.File
+
+ overriddenLang string
+}
+
+func (fi *fileInfo) Open() (afero.File, error) {
+ f, err := fi.FileInfo().Meta().Open()
+ if err != nil {
+ err = fmt.Errorf("fileInfo: %w", err)
+ }
+
+ return f, err
+}
+
+func (fi *fileInfo) Lang() string {
+ if fi.overriddenLang != "" {
+ return fi.overriddenLang
+ }
+ return fi.File.Lang()
+}
+
+func (fi *fileInfo) String() string {
+ if fi == nil || fi.File == nil {
+ return ""
+ }
+ return fi.Path()
+}
+
+// TODO(bep) rename
+func newFileInfo(sp *source.SourceSpec, fi hugofs.FileMetaInfo) (*fileInfo, error) {
+ baseFi, err := sp.NewFileInfo(fi)
+ if err != nil {
+ return nil, err
+ }
+
+ f := &fileInfo{
+ File: baseFi,
+ }
+
+ return f, nil
+}
+
+type bundleDirType int
+
+const (
+ bundleNot bundleDirType = iota
+
+ // All from here are bundles in one form or another.
+ bundleLeaf
+ bundleBranch
+)
+
+// Returns the given file's name's bundle type and whether it is a content
+// file or not.
+func classifyBundledFile(name string) (bundleDirType, bool) {
+ if !files.IsContentFile(name) {
+ return bundleNot, false
+ }
+ if strings.HasPrefix(name, "_index.") {
+ return bundleBranch, true
+ }
+
+ if strings.HasPrefix(name, "index.") {
+ return bundleLeaf, true
+ }
+
+ return bundleNot, true
+}
+
+func (b bundleDirType) String() string {
+ switch b {
+ case bundleNot:
+ return "Not a bundle"
+ case bundleLeaf:
+ return "Regular bundle"
+ case bundleBranch:
+ return "Branch bundle"
+ }
+
+ return ""
+}
diff --git a/hugolib/fileInfo_test.go b/hugolib/fileInfo_test.go
new file mode 100644
index 000000000..d8a70e9d3
--- /dev/null
+++ b/hugolib/fileInfo_test.go
@@ -0,0 +1,31 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/cast"
+)
+
+func TestFileInfo(t *testing.T) {
+ t.Run("String", func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ fi := &fileInfo{}
+ _, err := cast.ToStringE(fi)
+ c.Assert(err, qt.IsNil)
+ })
+}
diff --git a/hugolib/filesystems/basefs.go b/hugolib/filesystems/basefs.go
new file mode 100644
index 000000000..a380857cd
--- /dev/null
+++ b/hugolib/filesystems/basefs.go
@@ -0,0 +1,846 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package filesystems provides the fine grained file systems used by Hugo. These
+// are typically virtual filesystems that are composites of project and theme content.
+package filesystems
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/bep/overlayfs"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/rogpeppe/go-internal/lockedfile"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/modules"
+
+ hpaths "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib/paths"
+ "github.com/spf13/afero"
+)
+
+const (
+ // Used to control concurrency between multiple Hugo instances, e.g.
+ // a running server and building new content with 'hugo new'.
+ // It's placed in the project root.
+ lockFileBuild = ".hugo_build.lock"
+)
+
+var filePathSeparator = string(filepath.Separator)
+
+// BaseFs contains the core base filesystems used by Hugo. The name "base" is used
+// to underline that even if they can be composites, they all have a base path set to a specific
+// resource folder, e.g "/my-project/content". So, no absolute filenames needed.
+type BaseFs struct {
+
+ // SourceFilesystems contains the different source file systems.
+ *SourceFilesystems
+
+ // The project source.
+ SourceFs afero.Fs
+
+ // The filesystem used to publish the rendered site.
+ // This usually maps to /my-project/public.
+ PublishFs afero.Fs
+
+ // The filesystem used for renderStaticToDisk.
+ PublishFsStatic afero.Fs
+
+ // A read-only filesystem starting from the project workDir.
+ WorkDir afero.Fs
+
+ theBigFs *filesystemsCollector
+
+ // Locks.
+ buildMu Lockable // <project>/.hugo_build.lock
+}
+
+type Lockable interface {
+ Lock() (unlock func(), err error)
+}
+
+type fakeLockfileMutex struct {
+ mu sync.Mutex
+}
+
+func (f *fakeLockfileMutex) Lock() (func(), error) {
+ f.mu.Lock()
+ return func() { f.mu.Unlock() }, nil
+}
+
+// Tries to acquire a build lock.
+func (fs *BaseFs) LockBuild() (unlock func(), err error) {
+ return fs.buildMu.Lock()
+}
+
+// TODO(bep) we can get regular files in here and that is fine, but
+// we need to clean up the naming.
+func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo {
+ var dirs []hugofs.FileMetaInfo
+ for _, dir := range fs.AllDirs() {
+ if dir.Meta().Watch {
+ dirs = append(dirs, dir)
+ }
+ }
+ return dirs
+}
+
+func (fs *BaseFs) AllDirs() []hugofs.FileMetaInfo {
+ var dirs []hugofs.FileMetaInfo
+ for _, dirSet := range [][]hugofs.FileMetaInfo{
+ fs.Archetypes.Dirs,
+ fs.I18n.Dirs,
+ fs.Data.Dirs,
+ fs.Content.Dirs,
+ fs.Assets.Dirs,
+ fs.Layouts.Dirs,
+ // fs.Resources.Dirs,
+ fs.StaticDirs,
+ } {
+ dirs = append(dirs, dirSet...)
+ }
+
+ return dirs
+}
+
+// RelContentDir tries to create a path relative to the content root from
+// the given filename. The return value is the path and language code.
+func (b *BaseFs) RelContentDir(filename string) string {
+ for _, dir := range b.SourceFilesystems.Content.Dirs {
+ dirname := dir.Meta().Filename
+ if strings.HasPrefix(filename, dirname) {
+ rel := path.Join(dir.Meta().Path, strings.TrimPrefix(filename, dirname))
+ return strings.TrimPrefix(rel, filePathSeparator)
+ }
+ }
+ // Either not a content dir or already relative.
+ return filename
+}
+
+// AbsProjectContentDir tries to construct a filename below the most
+// relevant content directory.
+func (b *BaseFs) AbsProjectContentDir(filename string) (string, string, error) {
+ isAbs := filepath.IsAbs(filename)
+ for _, dir := range b.SourceFilesystems.Content.Dirs {
+ meta := dir.Meta()
+ if !meta.IsProject {
+ continue
+ }
+
+ if isAbs {
+ if strings.HasPrefix(filename, meta.Filename) {
+ return strings.TrimPrefix(filename, meta.Filename), filename, nil
+ }
+ } else {
+ contentDir := strings.TrimPrefix(strings.TrimPrefix(meta.Filename, meta.BaseDir), filePathSeparator) + filePathSeparator
+
+ if strings.HasPrefix(filename, contentDir) {
+ relFilename := strings.TrimPrefix(filename, contentDir)
+ absFilename := filepath.Join(meta.Filename, relFilename)
+ return relFilename, absFilename, nil
+ }
+ }
+
+ }
+
+ if !isAbs {
+ // A filename on the form "posts/mypage.md", put it inside
+ // the first content folder, usually <workDir>/content.
+ // Pick the first project dir (which is probably the most important one).
+ for _, dir := range b.SourceFilesystems.Content.Dirs {
+ meta := dir.Meta()
+ if meta.IsProject {
+ return filename, filepath.Join(meta.Filename, filename), nil
+ }
+ }
+
+ }
+
+ return "", "", fmt.Errorf("could not determine content directory for %q", filename)
+}
+
+// ResolveJSConfigFile resolves the JS-related config file to a absolute
+// filename. One example of such would be postcss.config.js.
+func (fs *BaseFs) ResolveJSConfigFile(name string) string {
+ // First look in assets/_jsconfig
+ fi, err := fs.Assets.Fs.Stat(filepath.Join(files.FolderJSConfig, name))
+ if err == nil {
+ return fi.(hugofs.FileMetaInfo).Meta().Filename
+ }
+ // Fall back to the work dir.
+ fi, err = fs.Work.Stat(name)
+ if err == nil {
+ return fi.(hugofs.FileMetaInfo).Meta().Filename
+ }
+
+ return ""
+}
+
+// SourceFilesystems contains the different source file systems. These can be
+// composite file systems (theme and project etc.), and they have all root
+// set to the source type the provides: data, i18n, static, layouts.
+type SourceFilesystems struct {
+ Content *SourceFilesystem
+ Data *SourceFilesystem
+ I18n *SourceFilesystem
+ Layouts *SourceFilesystem
+ Archetypes *SourceFilesystem
+ Assets *SourceFilesystem
+
+ // Writable filesystem on top the project's resources directory,
+ // with any sub module's resource fs layered below.
+ ResourcesCache afero.Fs
+
+ // The work folder (may be a composite of project and theme components).
+ Work afero.Fs
+
+ // When in multihost we have one static filesystem per language. The sync
+ // static files is currently done outside of the Hugo build (where there is
+ // a concept of a site per language).
+ // When in non-multihost mode there will be one entry in this map with a blank key.
+ Static map[string]*SourceFilesystem
+
+ // All the /static dirs (including themes/modules).
+ StaticDirs []hugofs.FileMetaInfo
+}
+
+// FileSystems returns the FileSystems relevant for the change detection
+// in server mode.
+// Note: This does currently not return any static fs.
+func (s *SourceFilesystems) FileSystems() []*SourceFilesystem {
+ return []*SourceFilesystem{
+ s.Content,
+ s.Data,
+ s.I18n,
+ s.Layouts,
+ s.Archetypes,
+ // TODO(bep) static
+ }
+}
+
+// A SourceFilesystem holds the filesystem for a given source type in Hugo (data,
+// i18n, layouts, static) and additional metadata to be able to use that filesystem
+// in server mode.
+type SourceFilesystem struct {
+ // Name matches one in files.ComponentFolders
+ Name string
+
+ // This is a virtual composite filesystem. It expects path relative to a context.
+ Fs afero.Fs
+
+ // This filesystem as separate root directories, starting from project and down
+ // to the themes/modules.
+ Dirs []hugofs.FileMetaInfo
+
+ // When syncing a source folder to the target (e.g. /public), this may
+ // be set to publish into a subfolder. This is used for static syncing
+ // in multihost mode.
+ PublishFolder string
+}
+
+// ContentStaticAssetFs will create a new composite filesystem from the content,
+// static, and asset filesystems. The site language is needed to pick the correct static filesystem.
+// The order is content, static and then assets.
+// TODO(bep) check usage
+func (s SourceFilesystems) ContentStaticAssetFs(lang string) afero.Fs {
+ return overlayfs.New(
+ overlayfs.Options{
+ Fss: []afero.Fs{
+ s.Content.Fs,
+ s.StaticFs(lang),
+ s.Assets.Fs,
+ },
+ },
+ )
+
+}
+
+// StaticFs returns the static filesystem for the given language.
+// This can be a composite filesystem.
+func (s SourceFilesystems) StaticFs(lang string) afero.Fs {
+ var staticFs afero.Fs = hugofs.NoOpFs
+
+ if fs, ok := s.Static[lang]; ok {
+ staticFs = fs.Fs
+ } else if fs, ok := s.Static[""]; ok {
+ staticFs = fs.Fs
+ }
+
+ return staticFs
+}
+
+// StatResource looks for a resource in these filesystems in order: static, assets and finally content.
+// If found in any of them, it returns FileInfo and the relevant filesystem.
+// Any non os.IsNotExist error will be returned.
+// An os.IsNotExist error wil be returned only if all filesystems return such an error.
+// Note that if we only wanted to find the file, we could create a composite Afero fs,
+// but we also need to know which filesystem root it lives in.
+func (s SourceFilesystems) StatResource(lang, filename string) (fi os.FileInfo, fs afero.Fs, err error) {
+ for _, fsToCheck := range []afero.Fs{s.StaticFs(lang), s.Assets.Fs, s.Content.Fs} {
+ fs = fsToCheck
+ fi, err = fs.Stat(filename)
+ if err == nil || !os.IsNotExist(err) {
+ return
+ }
+ }
+ // Not found.
+ return
+}
+
+// IsStatic returns true if the given filename is a member of one of the static
+// filesystems.
+func (s SourceFilesystems) IsStatic(filename string) bool {
+ for _, staticFs := range s.Static {
+ if staticFs.Contains(filename) {
+ return true
+ }
+ }
+ return false
+}
+
+// IsContent returns true if the given filename is a member of the content filesystem.
+func (s SourceFilesystems) IsContent(filename string) bool {
+ return s.Content.Contains(filename)
+}
+
+// IsLayout returns true if the given filename is a member of the layouts filesystem.
+func (s SourceFilesystems) IsLayout(filename string) bool {
+ return s.Layouts.Contains(filename)
+}
+
+// IsData returns true if the given filename is a member of the data filesystem.
+func (s SourceFilesystems) IsData(filename string) bool {
+ return s.Data.Contains(filename)
+}
+
+// IsAsset returns true if the given filename is a member of the asset filesystem.
+func (s SourceFilesystems) IsAsset(filename string) bool {
+ return s.Assets.Contains(filename)
+}
+
+// IsI18n returns true if the given filename is a member of the i18n filesystem.
+func (s SourceFilesystems) IsI18n(filename string) bool {
+ return s.I18n.Contains(filename)
+}
+
+// MakeStaticPathRelative makes an absolute static filename into a relative one.
+// It will return an empty string if the filename is not a member of a static filesystem.
+func (s SourceFilesystems) MakeStaticPathRelative(filename string) string {
+ for _, staticFs := range s.Static {
+ rel, _ := staticFs.MakePathRelative(filename)
+ if rel != "" {
+ return rel
+ }
+ }
+ return ""
+}
+
+// MakePathRelative creates a relative path from the given filename.
+func (d *SourceFilesystem) MakePathRelative(filename string) (string, bool) {
+ for _, dir := range d.Dirs {
+ meta := dir.(hugofs.FileMetaInfo).Meta()
+ currentPath := meta.Filename
+
+ if strings.HasPrefix(filename, currentPath) {
+ rel := strings.TrimPrefix(filename, currentPath)
+ if mp := meta.Path; mp != "" {
+ rel = filepath.Join(mp, rel)
+ }
+ return strings.TrimPrefix(rel, filePathSeparator), true
+ }
+ }
+ return "", false
+}
+
+func (d *SourceFilesystem) RealFilename(rel string) string {
+ fi, err := d.Fs.Stat(rel)
+ if err != nil {
+ return rel
+ }
+ if realfi, ok := fi.(hugofs.FileMetaInfo); ok {
+ return realfi.Meta().Filename
+ }
+
+ return rel
+}
+
+// Contains returns whether the given filename is a member of the current filesystem.
+func (d *SourceFilesystem) Contains(filename string) bool {
+ for _, dir := range d.Dirs {
+ if strings.HasPrefix(filename, dir.Meta().Filename) {
+ return true
+ }
+ }
+ return false
+}
+
+// Path returns the mount relative path to the given filename if it is a member of
+// of the current filesystem, an empty string if not.
+func (d *SourceFilesystem) Path(filename string) string {
+ for _, dir := range d.Dirs {
+ meta := dir.Meta()
+ if strings.HasPrefix(filename, meta.Filename) {
+ p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename), filePathSeparator)
+ if mountRoot := meta.MountRoot; mountRoot != "" {
+ return filepath.Join(mountRoot, p)
+ }
+ return p
+ }
+ }
+ return ""
+}
+
+// RealDirs gets a list of absolute paths to directories starting from the given
+// path.
+func (d *SourceFilesystem) RealDirs(from string) []string {
+ var dirnames []string
+ for _, dir := range d.Dirs {
+ meta := dir.Meta()
+ dirname := filepath.Join(meta.Filename, from)
+ _, err := meta.Fs.Stat(from)
+
+ if err == nil {
+ dirnames = append(dirnames, dirname)
+ }
+ }
+ return dirnames
+}
+
+// WithBaseFs allows reuse of some potentially expensive to create parts that remain
+// the same across sites/languages.
+func WithBaseFs(b *BaseFs) func(*BaseFs) error {
+ return func(bb *BaseFs) error {
+ bb.theBigFs = b.theBigFs
+ bb.SourceFilesystems = b.SourceFilesystems
+ return nil
+ }
+}
+
+// NewBase builds the filesystems used by Hugo given the paths and options provided.NewBase
+func NewBase(p *paths.Paths, logger loggers.Logger, options ...func(*BaseFs) error) (*BaseFs, error) {
+ fs := p.Fs
+ if logger == nil {
+ logger = loggers.NewWarningLogger()
+ }
+
+ publishFs := hugofs.NewBaseFileDecorator(fs.PublishDir)
+ sourceFs := hugofs.NewBaseFileDecorator(afero.NewBasePathFs(fs.Source, p.WorkingDir))
+ publishFsStatic := fs.PublishDirStatic
+
+ var buildMu Lockable
+ if p.Cfg.GetBool("noBuildLock") || htesting.IsTest {
+ buildMu = &fakeLockfileMutex{}
+ } else {
+ buildMu = lockedfile.MutexAt(filepath.Join(p.WorkingDir, lockFileBuild))
+ }
+
+ b := &BaseFs{
+ SourceFs: sourceFs,
+ WorkDir: fs.WorkingDirReadOnly,
+ PublishFs: publishFs,
+ PublishFsStatic: publishFsStatic,
+ buildMu: buildMu,
+ }
+
+ for _, opt := range options {
+ if err := opt(b); err != nil {
+ return nil, err
+ }
+ }
+
+ if b.theBigFs != nil && b.SourceFilesystems != nil {
+ return b, nil
+ }
+
+ builder := newSourceFilesystemsBuilder(p, logger, b)
+ sourceFilesystems, err := builder.Build()
+ if err != nil {
+ return nil, fmt.Errorf("build filesystems: %w", err)
+ }
+
+ b.SourceFilesystems = sourceFilesystems
+ b.theBigFs = builder.theBigFs
+
+ return b, nil
+}
+
+type sourceFilesystemsBuilder struct {
+ logger loggers.Logger
+ p *paths.Paths
+ sourceFs afero.Fs
+ result *SourceFilesystems
+ theBigFs *filesystemsCollector
+}
+
+func newSourceFilesystemsBuilder(p *paths.Paths, logger loggers.Logger, b *BaseFs) *sourceFilesystemsBuilder {
+ sourceFs := hugofs.NewBaseFileDecorator(p.Fs.Source)
+ return &sourceFilesystemsBuilder{p: p, logger: logger, sourceFs: sourceFs, theBigFs: b.theBigFs, result: &SourceFilesystems{}}
+}
+
+func (b *sourceFilesystemsBuilder) newSourceFilesystem(name string, fs afero.Fs, dirs []hugofs.FileMetaInfo) *SourceFilesystem {
+ return &SourceFilesystem{
+ Name: name,
+ Fs: fs,
+ Dirs: dirs,
+ }
+}
+
+func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) {
+ if b.theBigFs == nil {
+ theBigFs, err := b.createMainOverlayFs(b.p)
+ if err != nil {
+ return nil, fmt.Errorf("create main fs: %w", err)
+ }
+
+ b.theBigFs = theBigFs
+ }
+
+ createView := func(componentID string) *SourceFilesystem {
+ if b.theBigFs == nil || b.theBigFs.overlayMounts == nil {
+ return b.newSourceFilesystem(componentID, hugofs.NoOpFs, nil)
+ }
+
+ dirs := b.theBigFs.overlayDirs[componentID]
+
+ return b.newSourceFilesystem(componentID, afero.NewBasePathFs(b.theBigFs.overlayMounts, componentID), dirs)
+ }
+
+ b.result.Archetypes = createView(files.ComponentFolderArchetypes)
+ b.result.Layouts = createView(files.ComponentFolderLayouts)
+ b.result.Assets = createView(files.ComponentFolderAssets)
+ b.result.ResourcesCache = b.theBigFs.overlayResources
+
+ // Data, i18n and content cannot use the overlay fs
+ dataDirs := b.theBigFs.overlayDirs[files.ComponentFolderData]
+ dataFs, err := hugofs.NewSliceFs(dataDirs...)
+ if err != nil {
+ return nil, err
+ }
+
+ b.result.Data = b.newSourceFilesystem(files.ComponentFolderData, dataFs, dataDirs)
+
+ i18nDirs := b.theBigFs.overlayDirs[files.ComponentFolderI18n]
+ i18nFs, err := hugofs.NewSliceFs(i18nDirs...)
+ if err != nil {
+ return nil, err
+ }
+ b.result.I18n = b.newSourceFilesystem(files.ComponentFolderI18n, i18nFs, i18nDirs)
+
+ contentDirs := b.theBigFs.overlayDirs[files.ComponentFolderContent]
+ contentBfs := afero.NewBasePathFs(b.theBigFs.overlayMountsContent, files.ComponentFolderContent)
+
+ contentFs, err := hugofs.NewLanguageFs(b.p.LanguagesDefaultFirst.AsOrdinalSet(), contentBfs)
+ if err != nil {
+ return nil, fmt.Errorf("create content filesystem: %w", err)
+ }
+
+ b.result.Content = b.newSourceFilesystem(files.ComponentFolderContent, contentFs, contentDirs)
+
+ b.result.Work = afero.NewReadOnlyFs(b.theBigFs.overlayFull)
+
+ // Create static filesystem(s)
+ ms := make(map[string]*SourceFilesystem)
+ b.result.Static = ms
+ b.result.StaticDirs = b.theBigFs.overlayDirs[files.ComponentFolderStatic]
+
+ if b.theBigFs.staticPerLanguage != nil {
+ // Multihost mode
+ for k, v := range b.theBigFs.staticPerLanguage {
+ sfs := b.newSourceFilesystem(files.ComponentFolderStatic, v, b.result.StaticDirs)
+ sfs.PublishFolder = k
+ ms[k] = sfs
+ }
+ } else {
+ bfs := afero.NewBasePathFs(b.theBigFs.overlayMountsStatic, files.ComponentFolderStatic)
+ ms[""] = b.newSourceFilesystem(files.ComponentFolderStatic, bfs, b.result.StaticDirs)
+ }
+
+ return b.result, nil
+}
+
+func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesystemsCollector, error) {
+ var staticFsMap map[string]*overlayfs.OverlayFs
+ if b.p.Cfg.GetBool("multihost") {
+ staticFsMap = make(map[string]*overlayfs.OverlayFs)
+ for _, l := range b.p.Languages {
+ staticFsMap[l.Lang] = overlayfs.New(overlayfs.Options{})
+ }
+ }
+
+ collector := &filesystemsCollector{
+ sourceProject: b.sourceFs,
+ sourceModules: hugofs.NewNoSymlinkFs(b.sourceFs, b.logger, false),
+ overlayDirs: make(map[string][]hugofs.FileMetaInfo),
+ staticPerLanguage: staticFsMap,
+
+ overlayMounts: overlayfs.New(overlayfs.Options{}),
+ overlayMountsContent: overlayfs.New(overlayfs.Options{DirsMerger: hugofs.LanguageDirsMerger}),
+ overlayMountsStatic: overlayfs.New(overlayfs.Options{DirsMerger: hugofs.LanguageDirsMerger}),
+ overlayFull: overlayfs.New(overlayfs.Options{}),
+ overlayResources: overlayfs.New(overlayfs.Options{FirstWritable: true}),
+ }
+
+ mods := p.AllModules
+
+ mounts := make([]mountsDescriptor, len(mods))
+
+ for i := 0; i < len(mods); i++ {
+ mod := mods[i]
+ dir := mod.Dir()
+
+ isMainProject := mod.Owner() == nil
+ mounts[i] = mountsDescriptor{
+ Module: mod,
+ dir: dir,
+ isMainProject: isMainProject,
+ ordinal: i,
+ }
+
+ }
+
+ err := b.createOverlayFs(collector, mounts)
+
+ return collector, err
+}
+
+func (b *sourceFilesystemsBuilder) isContentMount(mnt modules.Mount) bool {
+ return strings.HasPrefix(mnt.Target, files.ComponentFolderContent)
+}
+
+func (b *sourceFilesystemsBuilder) isStaticMount(mnt modules.Mount) bool {
+ return strings.HasPrefix(mnt.Target, files.ComponentFolderStatic)
+}
+
+func (b *sourceFilesystemsBuilder) createOverlayFs(
+ collector *filesystemsCollector,
+ mounts []mountsDescriptor) error {
+
+ if len(mounts) == 0 {
+ appendNopIfEmpty := func(ofs *overlayfs.OverlayFs) *overlayfs.OverlayFs {
+ if ofs.NumFilesystems() > 0 {
+ return ofs
+ }
+ return ofs.Append(hugofs.NoOpFs)
+ }
+ collector.overlayMounts = appendNopIfEmpty(collector.overlayMounts)
+ collector.overlayMountsContent = appendNopIfEmpty(collector.overlayMountsContent)
+ collector.overlayMountsStatic = appendNopIfEmpty(collector.overlayMountsStatic)
+ collector.overlayFull = appendNopIfEmpty(collector.overlayFull)
+ collector.overlayResources = appendNopIfEmpty(collector.overlayResources)
+
+ return nil
+ }
+
+ for _, md := range mounts {
+ var (
+ fromTo []hugofs.RootMapping
+ fromToContent []hugofs.RootMapping
+ fromToStatic []hugofs.RootMapping
+ )
+
+ absPathify := func(path string) (string, string) {
+ if filepath.IsAbs(path) {
+ return "", path
+ }
+ return md.dir, hpaths.AbsPathify(md.dir, path)
+ }
+
+ for i, mount := range md.Mounts() {
+
+ // Add more weight to early mounts.
+ // When two mounts contain the same filename,
+ // the first entry wins.
+ mountWeight := (10 + md.ordinal) * (len(md.Mounts()) - i)
+
+ inclusionFilter, err := glob.NewFilenameFilter(
+ types.ToStringSlicePreserveString(mount.IncludeFiles),
+ types.ToStringSlicePreserveString(mount.ExcludeFiles),
+ )
+ if err != nil {
+ return err
+ }
+
+ base, filename := absPathify(mount.Source)
+
+ rm := hugofs.RootMapping{
+ From: mount.Target,
+ To: filename,
+ ToBasedir: base,
+ Module: md.Module.Path(),
+ IsProject: md.isMainProject,
+ Meta: &hugofs.FileMeta{
+ Watch: md.Watch(),
+ Weight: mountWeight,
+ Classifier: files.ContentClassContent,
+ InclusionFilter: inclusionFilter,
+ },
+ }
+
+ isContentMount := b.isContentMount(mount)
+
+ lang := mount.Lang
+ if lang == "" && isContentMount {
+ lang = b.p.DefaultContentLanguage
+ }
+
+ rm.Meta.Lang = lang
+
+ if isContentMount {
+ fromToContent = append(fromToContent, rm)
+ } else if b.isStaticMount(mount) {
+ fromToStatic = append(fromToStatic, rm)
+ } else {
+ fromTo = append(fromTo, rm)
+ }
+ }
+
+ modBase := collector.sourceProject
+ if !md.isMainProject {
+ modBase = collector.sourceModules
+ }
+ sourceStatic := hugofs.NewNoSymlinkFs(modBase, b.logger, true)
+
+ rmfs, err := hugofs.NewRootMappingFs(modBase, fromTo...)
+ if err != nil {
+ return err
+ }
+ rmfsContent, err := hugofs.NewRootMappingFs(modBase, fromToContent...)
+ if err != nil {
+ return err
+ }
+ rmfsStatic, err := hugofs.NewRootMappingFs(sourceStatic, fromToStatic...)
+ if err != nil {
+ return err
+ }
+
+ // We need to keep the ordered list of directories for watching and
+ // some special merge operations (data, i18n).
+ collector.addDirs(rmfs)
+ collector.addDirs(rmfsContent)
+ collector.addDirs(rmfsStatic)
+
+ if collector.staticPerLanguage != nil {
+ for _, l := range b.p.Languages {
+ lang := l.Lang
+
+ lfs := rmfsStatic.Filter(func(rm hugofs.RootMapping) bool {
+ rlang := rm.Meta.Lang
+ return rlang == "" || rlang == lang
+ })
+
+ bfs := afero.NewBasePathFs(lfs, files.ComponentFolderStatic)
+ collector.staticPerLanguage[lang] = collector.staticPerLanguage[lang].Append(bfs)
+
+ }
+ }
+
+ getResourcesDir := func() string {
+ if md.isMainProject {
+ return b.p.AbsResourcesDir
+ }
+ _, filename := absPathify(files.FolderResources)
+ return filename
+ }
+
+ collector.overlayMounts = collector.overlayMounts.Append(rmfs)
+ collector.overlayMountsContent = collector.overlayMountsContent.Append(rmfsContent)
+ collector.overlayMountsStatic = collector.overlayMountsStatic.Append(rmfsStatic)
+ collector.overlayFull = collector.overlayFull.Append(afero.NewBasePathFs(modBase, md.dir))
+ collector.overlayResources = collector.overlayResources.Append(afero.NewBasePathFs(modBase, getResourcesDir()))
+
+ }
+
+ return nil
+}
+
+func printFs(fs afero.Fs, path string, w io.Writer) {
+ if fs == nil {
+ return
+ }
+ afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ return nil
+ }
+ var filename string
+ if fim, ok := info.(hugofs.FileMetaInfo); ok {
+ filename = fim.Meta().Filename
+ }
+ fmt.Fprintf(w, " %q %q\n", path, filename)
+ return nil
+ })
+}
+
+type filesystemsCollector struct {
+ sourceProject afero.Fs // Source for project folders
+ sourceModules afero.Fs // Source for modules/themes
+
+ overlayMounts *overlayfs.OverlayFs
+ overlayMountsContent *overlayfs.OverlayFs
+ overlayMountsStatic *overlayfs.OverlayFs
+ overlayFull *overlayfs.OverlayFs
+ overlayResources *overlayfs.OverlayFs
+
+ // Maps component type (layouts, static, content etc.) an ordered list of
+ // directories representing the overlay filesystems above.
+ overlayDirs map[string][]hugofs.FileMetaInfo
+
+ // Set if in multihost mode
+ staticPerLanguage map[string]*overlayfs.OverlayFs
+
+ finalizerInit sync.Once
+}
+
+func (c *filesystemsCollector) addDirs(rfs *hugofs.RootMappingFs) {
+ for _, componentFolder := range files.ComponentFolders {
+ c.addDir(rfs, componentFolder)
+ }
+}
+
+func (c *filesystemsCollector) addDir(rfs *hugofs.RootMappingFs, componentFolder string) {
+ dirs, err := rfs.Dirs(componentFolder)
+
+ if err == nil {
+ c.overlayDirs[componentFolder] = append(c.overlayDirs[componentFolder], dirs...)
+ }
+}
+
+func (c *filesystemsCollector) reverseFis(fis []hugofs.FileMetaInfo) {
+ for i := len(fis)/2 - 1; i >= 0; i-- {
+ opp := len(fis) - 1 - i
+ fis[i], fis[opp] = fis[opp], fis[i]
+ }
+}
+
+type mountsDescriptor struct {
+ modules.Module
+ dir string
+ isMainProject bool
+ ordinal int
+}
diff --git a/hugolib/filesystems/basefs_test.go b/hugolib/filesystems/basefs_test.go
new file mode 100644
index 000000000..a729e63b1
--- /dev/null
+++ b/hugolib/filesystems/basefs_test.go
@@ -0,0 +1,460 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package filesystems
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gobwas/glob"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib/paths"
+ "github.com/gohugoio/hugo/modules"
+)
+
+func initConfig(fs afero.Fs, cfg config.Provider) error {
+ if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
+ return err
+ }
+
+ modConfig, err := modules.DecodeConfig(cfg)
+ if err != nil {
+ return err
+ }
+
+ workingDir := cfg.GetString("workingDir")
+ themesDir := cfg.GetString("themesDir")
+ if !filepath.IsAbs(themesDir) {
+ themesDir = filepath.Join(workingDir, themesDir)
+ }
+ globAll := glob.MustCompile("**", '/')
+ modulesClient := modules.NewClient(modules.ClientConfig{
+ Fs: fs,
+ WorkingDir: workingDir,
+ ThemesDir: themesDir,
+ ModuleConfig: modConfig,
+ IgnoreVendor: globAll,
+ })
+
+ moduleConfig, err := modulesClient.Collect()
+ if err != nil {
+ return err
+ }
+
+ if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[0]); err != nil {
+ return err
+ }
+
+ cfg.Set("allModules", moduleConfig.ActiveModules)
+
+ return nil
+}
+
+func TestNewBaseFs(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+
+ fs := hugofs.NewMem(v)
+
+ themes := []string{"btheme", "atheme"}
+
+ workingDir := filepath.FromSlash("/my/work")
+ v.Set("workingDir", workingDir)
+ v.Set("contentDir", "content")
+ v.Set("themesDir", "themes")
+ v.Set("defaultContentLanguage", "en")
+ v.Set("theme", themes[:1])
+
+ // Write some data to the themes
+ for _, theme := range themes {
+ for _, dir := range []string{"i18n", "data", "archetypes", "layouts"} {
+ base := filepath.Join(workingDir, "themes", theme, dir)
+ filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme))
+ filenameOverlap := filepath.Join(base, "f3.txt")
+ fs.Source.Mkdir(base, 0755)
+ content := []byte(fmt.Sprintf("content:%s:%s", theme, dir))
+ afero.WriteFile(fs.Source, filenameTheme, content, 0755)
+ afero.WriteFile(fs.Source, filenameOverlap, content, 0755)
+ }
+ // Write some files to the root of the theme
+ base := filepath.Join(workingDir, "themes", theme)
+ afero.WriteFile(fs.Source, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0755)
+ }
+
+ afero.WriteFile(fs.Source, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0755)
+
+ afero.WriteFile(fs.Source, filepath.Join(workingDir, "themes", "btheme", "config.toml"), []byte(`
+theme = ["atheme"]
+`), 0755)
+
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "contentDir", "mycontent", 3)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "i18nDir", "myi18n", 4)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "layoutDir", "mylayouts", 5)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "staticDir", "mystatic", 6)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "dataDir", "mydata", 7)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "archetypeDir", "myarchetypes", 8)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "assetDir", "myassets", 9)
+ setConfigAndWriteSomeFilesTo(fs.Source, v, "resourceDir", "myrsesource", 10)
+
+ v.Set("publishDir", "public")
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(bfs, qt.Not(qt.IsNil))
+
+ root, err := bfs.I18n.Fs.Open("")
+ c.Assert(err, qt.IsNil)
+ dirnames, err := root.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirnames, qt.DeepEquals, []string{"f1.txt", "f2.txt", "f3.txt", "f4.txt", "f3.txt", "theme-file-btheme.txt", "f3.txt", "theme-file-atheme.txt"})
+
+ root, err = bfs.Data.Fs.Open("")
+ c.Assert(err, qt.IsNil)
+ dirnames, err = root.Readdirnames(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirnames, qt.DeepEquals, []string{"f1.txt", "f2.txt", "f3.txt", "f4.txt", "f5.txt", "f6.txt", "f7.txt", "f3.txt", "theme-file-btheme.txt", "f3.txt", "theme-file-atheme.txt"})
+
+ checkFileCount(bfs.Layouts.Fs, "", c, 7)
+
+ checkFileCount(bfs.Content.Fs, "", c, 3)
+ checkFileCount(bfs.I18n.Fs, "", c, 8) // 4 + 4 themes
+
+ checkFileCount(bfs.Static[""].Fs, "", c, 6)
+ checkFileCount(bfs.Data.Fs, "", c, 11) // 7 + 4 themes
+ checkFileCount(bfs.Archetypes.Fs, "", c, 10) // 8 + 2 themes
+ checkFileCount(bfs.Assets.Fs, "", c, 9)
+ checkFileCount(bfs.Work, "", c, 90)
+
+ c.Assert(bfs.IsData(filepath.Join(workingDir, "mydata", "file1.txt")), qt.Equals, true)
+ c.Assert(bfs.IsI18n(filepath.Join(workingDir, "myi18n", "file1.txt")), qt.Equals, true)
+ c.Assert(bfs.IsLayout(filepath.Join(workingDir, "mylayouts", "file1.txt")), qt.Equals, true)
+ c.Assert(bfs.IsStatic(filepath.Join(workingDir, "mystatic", "file1.txt")), qt.Equals, true)
+ c.Assert(bfs.IsAsset(filepath.Join(workingDir, "myassets", "file1.txt")), qt.Equals, true)
+
+ contentFilename := filepath.Join(workingDir, "mycontent", "file1.txt")
+ c.Assert(bfs.IsContent(contentFilename), qt.Equals, true)
+ rel := bfs.RelContentDir(contentFilename)
+ c.Assert(rel, qt.Equals, "file1.txt")
+
+ // Check Work fs vs theme
+ checkFileContent(bfs.Work, "file-root.txt", c, "content-project")
+ checkFileContent(bfs.Work, "theme-root-atheme.txt", c, "content:atheme")
+
+ // https://github.com/gohugoio/hugo/issues/5318
+ // Check both project and theme.
+ for _, fs := range []afero.Fs{bfs.Archetypes.Fs, bfs.Layouts.Fs} {
+ for _, filename := range []string{"/f1.txt", "/theme-file-atheme.txt"} {
+ filename = filepath.FromSlash(filename)
+ f, err := fs.Open(filename)
+ c.Assert(err, qt.IsNil)
+ f.Close()
+ }
+ }
+}
+
+func createConfig() config.Provider {
+ v := config.NewWithTestDefaults()
+ v.Set("contentDir", "mycontent")
+ v.Set("i18nDir", "myi18n")
+ v.Set("staticDir", "mystatic")
+ v.Set("dataDir", "mydata")
+ v.Set("layoutDir", "mylayouts")
+ v.Set("archetypeDir", "myarchetypes")
+ v.Set("assetDir", "myassets")
+ v.Set("resourceDir", "resources")
+ v.Set("publishDir", "public")
+ v.Set("defaultContentLanguage", "en")
+
+ return v
+}
+
+func TestNewBaseFsEmpty(t *testing.T) {
+ c := qt.New(t)
+ v := createConfig()
+ fs := hugofs.NewMem(v)
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(bfs, qt.Not(qt.IsNil))
+ c.Assert(bfs.Archetypes.Fs, qt.Not(qt.IsNil))
+ c.Assert(bfs.Layouts.Fs, qt.Not(qt.IsNil))
+ c.Assert(bfs.Data.Fs, qt.Not(qt.IsNil))
+ c.Assert(bfs.I18n.Fs, qt.Not(qt.IsNil))
+ c.Assert(bfs.Work, qt.Not(qt.IsNil))
+ c.Assert(bfs.Content.Fs, qt.Not(qt.IsNil))
+ c.Assert(bfs.Static, qt.Not(qt.IsNil))
+}
+
+func TestRealDirs(t *testing.T) {
+ c := qt.New(t)
+ v := createConfig()
+ root, themesDir := t.TempDir(), t.TempDir()
+ v.Set("workingDir", root)
+ v.Set("themesDir", themesDir)
+ v.Set("theme", "mytheme")
+
+ fs := hugofs.NewDefault(v)
+ sfs := fs.Source
+
+ defer func() {
+ os.RemoveAll(root)
+ os.RemoveAll(themesDir)
+ }()
+
+ c.Assert(sfs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf1"), 0755), qt.IsNil)
+ c.Assert(sfs.MkdirAll(filepath.Join(root, "myassets", "scss", "sf2"), 0755), qt.IsNil)
+ c.Assert(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2"), 0755), qt.IsNil)
+ c.Assert(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3"), 0755), qt.IsNil)
+ c.Assert(sfs.MkdirAll(filepath.Join(root, "resources"), 0755), qt.IsNil)
+ c.Assert(sfs.MkdirAll(filepath.Join(themesDir, "mytheme", "resources"), 0755), qt.IsNil)
+
+ c.Assert(sfs.MkdirAll(filepath.Join(root, "myassets", "js", "f2"), 0755), qt.IsNil)
+
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf1", "a1.scss")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "sf2", "a3.scss")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "scss", "a2.scss")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf2", "a3.scss")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "assets", "scss", "sf3", "a4.scss")), []byte("content"), 0755)
+
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(themesDir, "mytheme", "resources", "t1.txt")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "resources", "p1.txt")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "resources", "p2.txt")), []byte("content"), 0755)
+
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0755)
+ afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0755)
+
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(bfs, qt.Not(qt.IsNil))
+
+ checkFileCount(bfs.Assets.Fs, "", c, 6)
+
+ realDirs := bfs.Assets.RealDirs("scss")
+ c.Assert(len(realDirs), qt.Equals, 2)
+ c.Assert(realDirs[0], qt.Equals, filepath.Join(root, "myassets/scss"))
+ c.Assert(realDirs[len(realDirs)-1], qt.Equals, filepath.Join(themesDir, "mytheme/assets/scss"))
+
+ c.Assert(bfs.theBigFs, qt.Not(qt.IsNil))
+}
+
+func TestStaticFs(t *testing.T) {
+ c := qt.New(t)
+ v := createConfig()
+ workDir := "mywork"
+ v.Set("workingDir", workDir)
+ v.Set("themesDir", "themes")
+ v.Set("theme", []string{"t1", "t2"})
+
+ fs := hugofs.NewMem(v)
+
+ themeStaticDir := filepath.Join(workDir, "themes", "t1", "static")
+ themeStaticDir2 := filepath.Join(workDir, "themes", "t2", "static")
+
+ afero.WriteFile(fs.Source, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(themeStaticDir2, "f2.txt"), []byte("Hugo Themes Rocks in t2!"), 0755)
+
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+
+ sfs := bfs.StaticFs("en")
+ checkFileContent(sfs, "f1.txt", c, "Hugo Rocks!")
+ checkFileContent(sfs, "f2.txt", c, "Hugo Themes Still Rocks!")
+}
+
+func TestStaticFsMultiHost(t *testing.T) {
+ c := qt.New(t)
+ v := createConfig()
+ workDir := "mywork"
+ v.Set("workingDir", workDir)
+ v.Set("themesDir", "themes")
+ v.Set("theme", "t1")
+ v.Set("defaultContentLanguage", "en")
+
+ langConfig := map[string]any{
+ "no": map[string]any{
+ "staticDir": "static_no",
+ "baseURL": "https://example.org/no/",
+ },
+ "en": map[string]any{
+ "baseURL": "https://example.org/en/",
+ },
+ }
+
+ v.Set("languages", langConfig)
+
+ fs := hugofs.NewMem(v)
+
+ themeStaticDir := filepath.Join(workDir, "themes", "t1", "static")
+
+ afero.WriteFile(fs.Source, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(workDir, "static_no", "f1.txt"), []byte("Hugo Rocks in Norway!"), 0755)
+
+ afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755)
+ afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755)
+
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+ enFs := bfs.StaticFs("en")
+ checkFileContent(enFs, "f1.txt", c, "Hugo Rocks!")
+ checkFileContent(enFs, "f2.txt", c, "Hugo Themes Still Rocks!")
+
+ noFs := bfs.StaticFs("no")
+ checkFileContent(noFs, "f1.txt", c, "Hugo Rocks in Norway!")
+ checkFileContent(noFs, "f2.txt", c, "Hugo Themes Still Rocks!")
+}
+
+func TestMakePathRelative(t *testing.T) {
+ c := qt.New(t)
+ v := createConfig()
+ fs := hugofs.NewMem(v)
+ workDir := "mywork"
+ v.Set("workingDir", workDir)
+
+ c.Assert(fs.Source.MkdirAll(filepath.Join(workDir, "dist", "d1"), 0777), qt.IsNil)
+ c.Assert(fs.Source.MkdirAll(filepath.Join(workDir, "static", "d2"), 0777), qt.IsNil)
+ c.Assert(fs.Source.MkdirAll(filepath.Join(workDir, "dust", "d2"), 0777), qt.IsNil)
+
+ moduleCfg := map[string]any{
+ "mounts": []any{
+ map[string]any{
+ "source": "dist",
+ "target": "static/mydist",
+ },
+ map[string]any{
+ "source": "dust",
+ "target": "static/foo/bar",
+ },
+ map[string]any{
+ "source": "static",
+ "target": "static",
+ },
+ },
+ }
+
+ v.Set("module", moduleCfg)
+
+ c.Assert(initConfig(fs.Source, v), qt.IsNil)
+
+ p, err := paths.New(fs, v)
+ c.Assert(err, qt.IsNil)
+ bfs, err := NewBase(p, nil)
+ c.Assert(err, qt.IsNil)
+
+ sfs := bfs.Static[""]
+ c.Assert(sfs, qt.Not(qt.IsNil))
+
+ makeRel := func(s string) string {
+ r, _ := sfs.MakePathRelative(s)
+ return r
+ }
+
+ c.Assert(makeRel(filepath.Join(workDir, "dist", "d1", "foo.txt")), qt.Equals, filepath.FromSlash("mydist/d1/foo.txt"))
+ c.Assert(makeRel(filepath.Join(workDir, "static", "d2", "foo.txt")), qt.Equals, filepath.FromSlash("d2/foo.txt"))
+ c.Assert(makeRel(filepath.Join(workDir, "dust", "d3", "foo.txt")), qt.Equals, filepath.FromSlash("foo/bar/d3/foo.txt"))
+}
+
+func checkFileCount(fs afero.Fs, dirname string, c *qt.C, expected int) {
+ count, _, err := countFilesAndGetFilenames(fs, dirname)
+ c.Assert(err, qt.IsNil)
+ c.Assert(count, qt.Equals, expected)
+}
+
+func checkFileContent(fs afero.Fs, filename string, c *qt.C, expected ...string) {
+ b, err := afero.ReadFile(fs, filename)
+ c.Assert(err, qt.IsNil)
+
+ content := string(b)
+
+ for _, e := range expected {
+ c.Assert(content, qt.Contains, e)
+ }
+}
+
+func countFilesAndGetFilenames(fs afero.Fs, dirname string) (int, []string, error) {
+ if fs == nil {
+ return 0, nil, errors.New("no fs")
+ }
+
+ counter := 0
+ var filenames []string
+
+ wf := func(path string, info hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if !info.IsDir() {
+ counter++
+ }
+
+ if info.Name() != "." {
+ name := info.Name()
+ name = strings.Replace(name, filepath.FromSlash("/my/work"), "WORK_DIR", 1)
+ filenames = append(filenames, name)
+ }
+
+ return nil
+ }
+
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{Fs: fs, Root: dirname, WalkFn: wf})
+
+ if err := w.Walk(); err != nil {
+ return -1, nil, err
+ }
+
+ return counter, filenames, nil
+}
+
+func setConfigAndWriteSomeFilesTo(fs afero.Fs, v config.Provider, key, val string, num int) {
+ workingDir := v.GetString("workingDir")
+ v.Set(key, val)
+ fs.Mkdir(val, 0755)
+ for i := 0; i < num; i++ {
+ filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1))
+ afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0755)
+ }
+}
diff --git a/hugolib/gitinfo.go b/hugolib/gitinfo.go
new file mode 100644
index 000000000..17717ed52
--- /dev/null
+++ b/hugolib/gitinfo.go
@@ -0,0 +1,46 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "path/filepath"
+ "strings"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+type gitInfo struct {
+ contentDir string
+ repo *gitmap.GitRepo
+}
+
+func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo {
+ name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir)
+ name = strings.TrimPrefix(name, "/")
+
+ return g.repo.Files[name]
+}
+
+func newGitInfo(cfg config.Provider) (*gitInfo, error) {
+ workingDir := cfg.GetString("workingDir")
+
+ gitRepo, err := gitmap.Map(workingDir, "")
+ if err != nil {
+ return nil, err
+ }
+
+ return &gitInfo{contentDir: gitRepo.TopLevelAbsPath, repo: gitRepo}, nil
+}
diff --git a/hugolib/hugo_modules_test.go b/hugolib/hugo_modules_test.go
new file mode 100644
index 000000000..aca3f157c
--- /dev/null
+++ b/hugolib/hugo_modules_test.go
@@ -0,0 +1,1173 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "math/rand"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/modules/npm"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugofs"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/testmodBuilder/mods"
+)
+
+func TestHugoModulesVariants(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("skip (relative) long running modules test when running locally")
+ }
+
+ tomlConfig := `
+baseURL="https://example.org"
+workingDir = %q
+
+[module]
+[[module.imports]]
+path="github.com/gohugoio/hugoTestModule2"
+%s
+`
+
+ createConfig := func(workingDir, moduleOpts string) string {
+ return fmt.Sprintf(tomlConfig, workingDir, moduleOpts)
+ }
+
+ newTestBuilder := func(t testing.TB, moduleOpts string) *sitesBuilder {
+ b := newTestSitesBuilder(t)
+ tempDir := t.TempDir()
+ workingDir := filepath.Join(tempDir, "myhugosite")
+ b.Assert(os.MkdirAll(workingDir, 0777), qt.IsNil)
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workingDir)
+ b.Fs = hugofs.NewDefault(cfg)
+ b.WithWorkingDir(workingDir).WithConfigFile("toml", createConfig(workingDir, moduleOpts))
+ b.WithTemplates(
+ "index.html", `
+Param from module: {{ site.Params.Hugo }}|
+{{ $js := resources.Get "jslibs/alpinejs/alpine.js" }}
+JS imported in module: {{ with $js }}{{ .RelPermalink }}{{ end }}|
+`,
+ "_default/single.html", `{{ .Content }}`)
+ b.WithContent("p1.md", `---
+title: "Page"
+---
+
+[A link](https://bep.is)
+
+`)
+ b.WithSourceFile("go.mod", `
+module github.com/gohugoio/tests/testHugoModules
+
+
+`)
+
+ b.WithSourceFile("go.sum", `
+github.com/gohugoio/hugoTestModule2 v0.0.0-20200131160637-9657d7697877 h1:WLM2bQCKIWo04T6NsIWsX/Vtirhf0TnpY66xyqGlgVY=
+github.com/gohugoio/hugoTestModule2 v0.0.0-20200131160637-9657d7697877/go.mod h1:CBFZS3khIAXKxReMwq0le8sEl/D8hcXmixlOHVv+Gd0=
+`)
+
+ return b
+ }
+
+ t.Run("Target in subfolder", func(t *testing.T) {
+ b := newTestBuilder(t, "ignoreImports=true")
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/p1/index.html", `<p>Page|https://bep.is|Title: |Text: A link|END</p>`)
+ })
+
+ t.Run("Ignore config", func(t *testing.T) {
+ b := newTestBuilder(t, "ignoreConfig=true")
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Param from module: |
+JS imported in module: |
+`)
+ })
+
+ t.Run("Ignore imports", func(t *testing.T) {
+ b := newTestBuilder(t, "ignoreImports=true")
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Param from module: Rocks|
+JS imported in module: |
+`)
+ })
+
+ t.Run("Create package.json", func(t *testing.T) {
+ b := newTestBuilder(t, "")
+
+ b.WithSourceFile("package.json", `{
+ "name": "mypack",
+ "version": "1.2.3",
+ "scripts": {
+ "client": "wait-on http://localhost:1313 && open http://localhost:1313",
+ "start": "run-p client server",
+ "test": "echo 'hoge' > hoge"
+ },
+ "dependencies": {
+ "nonon": "error"
+ }
+}`)
+
+ b.WithSourceFile("package.hugo.json", `{
+ "name": "mypack",
+ "version": "1.2.3",
+ "scripts": {
+ "client": "wait-on http://localhost:1313 && open http://localhost:1313",
+ "start": "run-p client server",
+ "test": "echo 'hoge' > hoge"
+ },
+ "dependencies": {
+ "foo": "1.2.3"
+ },
+ "devDependencies": {
+ "postcss-cli": "7.8.0",
+ "tailwindcss": "1.8.0"
+
+ }
+}`)
+
+ b.Build(BuildCfg{})
+ b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil)
+
+ b.AssertFileContentFn("package.json", func(s string) bool {
+ return s == `{
+ "comments": {
+ "dependencies": {
+ "foo": "project",
+ "react-dom": "github.com/gohugoio/hugoTestModule2"
+ },
+ "devDependencies": {
+ "@babel/cli": "github.com/gohugoio/hugoTestModule2",
+ "@babel/core": "github.com/gohugoio/hugoTestModule2",
+ "@babel/preset-env": "github.com/gohugoio/hugoTestModule2",
+ "postcss-cli": "project",
+ "tailwindcss": "project"
+ }
+ },
+ "dependencies": {
+ "foo": "1.2.3",
+ "react-dom": "^16.13.1"
+ },
+ "devDependencies": {
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5",
+ "postcss-cli": "7.8.0",
+ "tailwindcss": "1.8.0"
+ },
+ "name": "mypack",
+ "scripts": {
+ "client": "wait-on http://localhost:1313 && open http://localhost:1313",
+ "start": "run-p client server",
+ "test": "echo 'hoge' > hoge"
+ },
+ "version": "1.2.3"
+}
+`
+ })
+ })
+
+ t.Run("Create package.json, no default", func(t *testing.T) {
+ b := newTestBuilder(t, "")
+
+ const origPackageJSON = `{
+ "name": "mypack",
+ "version": "1.2.3",
+ "scripts": {
+ "client": "wait-on http://localhost:1313 && open http://localhost:1313",
+ "start": "run-p client server",
+ "test": "echo 'hoge' > hoge"
+ },
+ "dependencies": {
+ "moo": "1.2.3"
+ }
+}`
+
+ b.WithSourceFile("package.json", origPackageJSON)
+
+ b.Build(BuildCfg{})
+ b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil)
+
+ b.AssertFileContentFn("package.json", func(s string) bool {
+ return s == `{
+ "comments": {
+ "dependencies": {
+ "moo": "project",
+ "react-dom": "github.com/gohugoio/hugoTestModule2"
+ },
+ "devDependencies": {
+ "@babel/cli": "github.com/gohugoio/hugoTestModule2",
+ "@babel/core": "github.com/gohugoio/hugoTestModule2",
+ "@babel/preset-env": "github.com/gohugoio/hugoTestModule2",
+ "postcss-cli": "github.com/gohugoio/hugoTestModule2",
+ "tailwindcss": "github.com/gohugoio/hugoTestModule2"
+ }
+ },
+ "dependencies": {
+ "moo": "1.2.3",
+ "react-dom": "^16.13.1"
+ },
+ "devDependencies": {
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5",
+ "postcss-cli": "7.1.0",
+ "tailwindcss": "1.2.0"
+ },
+ "name": "mypack",
+ "scripts": {
+ "client": "wait-on http://localhost:1313 && open http://localhost:1313",
+ "start": "run-p client server",
+ "test": "echo 'hoge' > hoge"
+ },
+ "version": "1.2.3"
+}
+`
+ })
+
+ // https://github.com/gohugoio/hugo/issues/7690
+ b.AssertFileContent("package.hugo.json", origPackageJSON)
+ })
+
+ t.Run("Create package.json, no default, no package.json", func(t *testing.T) {
+ b := newTestBuilder(t, "")
+
+ b.Build(BuildCfg{})
+ b.Assert(npm.Pack(b.H.BaseFs.SourceFs, b.H.BaseFs.Assets.Dirs), qt.IsNil)
+
+ b.AssertFileContentFn("package.json", func(s string) bool {
+ return s == `{
+ "comments": {
+ "dependencies": {
+ "react-dom": "github.com/gohugoio/hugoTestModule2"
+ },
+ "devDependencies": {
+ "@babel/cli": "github.com/gohugoio/hugoTestModule2",
+ "@babel/core": "github.com/gohugoio/hugoTestModule2",
+ "@babel/preset-env": "github.com/gohugoio/hugoTestModule2",
+ "postcss-cli": "github.com/gohugoio/hugoTestModule2",
+ "tailwindcss": "github.com/gohugoio/hugoTestModule2"
+ }
+ },
+ "dependencies": {
+ "react-dom": "^16.13.1"
+ },
+ "devDependencies": {
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5",
+ "postcss-cli": "7.1.0",
+ "tailwindcss": "1.2.0"
+ },
+ "name": "myhugosite",
+ "version": "0.1.0"
+}
+`
+ })
+ })
+}
+
+// TODO(bep) this fails when testmodBuilder is also building ...
+func TestHugoModulesMatrix(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("skip (relative) long running modules test when running locally")
+ }
+ t.Parallel()
+
+ if !htesting.IsCI() || hugo.GoMinorVersion() < 12 {
+ // https://github.com/golang/go/issues/26794
+ // There were some concurrent issues with Go modules in < Go 12.
+ t.Skip("skip this on local host and for Go <= 1.11 due to a bug in Go's stdlib")
+ }
+
+ if testing.Short() {
+ t.Skip()
+ }
+
+ rnd := rand.New(rand.NewSource(time.Now().UnixNano()))
+ gooss := []string{"linux", "darwin", "windows"}
+ goos := gooss[rnd.Intn(len(gooss))]
+ ignoreVendor := rnd.Intn(2) == 0
+ testmods := mods.CreateModules(goos).Collect()
+ rnd.Shuffle(len(testmods), func(i, j int) { testmods[i], testmods[j] = testmods[j], testmods[i] })
+
+ for _, m := range testmods[:2] {
+ c := qt.New(t)
+
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-modules-test")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ v := config.NewWithTestDefaults()
+ v.Set("workingDir", workingDir)
+
+ configTemplate := `
+baseURL = "https://example.com"
+title = "My Modular Site"
+workingDir = %q
+theme = %q
+ignoreVendorPaths = %q
+
+`
+
+ ignoreVendorPaths := ""
+ if ignoreVendor {
+ ignoreVendorPaths = "github.com/**"
+ }
+ config := fmt.Sprintf(configTemplate, workingDir, m.Path(), ignoreVendorPaths)
+
+ b := newTestSitesBuilder(t)
+
+ // Need to use OS fs for this.
+ b.Fs = hugofs.NewDefault(v)
+
+ b.WithWorkingDir(workingDir).WithConfigFile("toml", config)
+ b.WithContent("page.md", `
+---
+title: "Foo"
+---
+`)
+ b.WithTemplates("home.html", `
+
+{{ $mod := .Site.Data.modinfo.module }}
+Mod Name: {{ $mod.name }}
+Mod Version: {{ $mod.version }}
+----
+{{ range $k, $v := .Site.Data.modinfo }}
+- {{ $k }}: {{ range $kk, $vv := $v }}{{ $kk }}: {{ $vv }}|{{ end -}}
+{{ end }}
+
+
+`)
+ b.WithSourceFile("go.mod", `
+module github.com/gohugoio/tests/testHugoModules
+
+
+`)
+
+ b.Build(BuildCfg{})
+
+ // Verify that go.mod is autopopulated with all the modules in config.toml.
+ b.AssertFileContent("go.mod", m.Path())
+
+ b.AssertFileContent("public/index.html",
+ "Mod Name: "+m.Name(),
+ "Mod Version: v1.4.0")
+
+ b.AssertFileContent("public/index.html", createChildModMatchers(m, ignoreVendor, m.Vendor)...)
+
+ }
+}
+
+func createChildModMatchers(m *mods.Md, ignoreVendor, vendored bool) []string {
+ // Child dependencies are one behind.
+ expectMinorVersion := 3
+
+ if !ignoreVendor && vendored {
+ // Vendored modules are stuck at v1.1.0.
+ expectMinorVersion = 1
+ }
+
+ expectVersion := fmt.Sprintf("v1.%d.0", expectMinorVersion)
+
+ var matchers []string
+
+ for _, mm := range m.Children {
+ matchers = append(
+ matchers,
+ fmt.Sprintf("%s: name: %s|version: %s", mm.Name(), mm.Name(), expectVersion))
+ matchers = append(matchers, createChildModMatchers(mm, ignoreVendor, vendored || mm.Vendor)...)
+ }
+ return matchers
+}
+
+func TestModulesWithContent(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
+baseURL="https://example.org"
+
+workingDir="/site"
+
+defaultContentLanguage = "en"
+
+[module]
+[[module.imports]]
+path="a"
+[[module.imports.mounts]]
+source="myacontent"
+target="content/blog"
+lang="en"
+[[module.imports]]
+path="b"
+[[module.imports.mounts]]
+source="mybcontent"
+target="content/blog"
+lang="nn"
+[[module.imports]]
+path="c"
+[[module.imports]]
+path="d"
+
+[languages]
+
+[languages.en]
+title = "Title in English"
+languageName = "English"
+weight = 1
+[languages.nn]
+languageName = "Nynorsk"
+weight = 2
+title = "Tittel på nynorsk"
+[languages.nb]
+languageName = "Bokmål"
+weight = 3
+title = "Tittel på bokmål"
+[languages.fr]
+languageName = "French"
+weight = 4
+title = "French Title"
+
+
+`)
+
+ b.WithTemplatesAdded("index.html", `
+{{ range .Site.RegularPages }}
+|{{ .Title }}|{{ .RelPermalink }}|{{ .Plain }}
+{{ end }}
+{{ $data := .Site.Data }}
+Data Common: {{ $data.common.value }}
+Data C: {{ $data.c.value }}
+Data D: {{ $data.d.value }}
+All Data: {{ $data }}
+
+i18n hello1: {{ i18n "hello1" . }}
+i18n theme: {{ i18n "theme" . }}
+i18n theme2: {{ i18n "theme2" . }}
+`)
+
+ content := func(id string) string {
+ return fmt.Sprintf(`---
+title: Title %s
+---
+Content %s
+
+`, id, id)
+ }
+
+ i18nContent := func(id, value string) string {
+ return fmt.Sprintf(`
+[%s]
+other = %q
+`, id, value)
+ }
+
+ // Content files
+ b.WithSourceFile("themes/a/myacontent/page.md", content("theme-a-en"))
+ b.WithSourceFile("themes/b/mybcontent/page.md", content("theme-b-nn"))
+ b.WithSourceFile("themes/c/content/blog/c.md", content("theme-c-nn"))
+
+ // Data files
+ b.WithSourceFile("data/common.toml", `value="Project"`)
+ b.WithSourceFile("themes/c/data/common.toml", `value="Theme C"`)
+ b.WithSourceFile("themes/c/data/c.toml", `value="Hugo Rocks!"`)
+ b.WithSourceFile("themes/d/data/c.toml", `value="Hugo Rodcks!"`)
+ b.WithSourceFile("themes/d/data/d.toml", `value="Hugo Rodks!"`)
+
+ // i18n files
+ b.WithSourceFile("i18n/en.toml", i18nContent("hello1", "Project"))
+ b.WithSourceFile("themes/c/i18n/en.toml", `
+[hello1]
+other="Theme C Hello"
+[theme]
+other="Theme C"
+`)
+ b.WithSourceFile("themes/d/i18n/en.toml", i18nContent("theme", "Theme D"))
+ b.WithSourceFile("themes/d/i18n/en.toml", i18nContent("theme2", "Theme2 D"))
+
+ // Static files
+ b.WithSourceFile("themes/c/static/hello.txt", `Hugo Rocks!"`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "|Title theme-a-en|/blog/page/|Content theme-a-en")
+ b.AssertFileContent("public/nn/index.html", "|Title theme-b-nn|/nn/blog/page/|Content theme-b-nn")
+
+ // Data
+ b.AssertFileContent("public/index.html",
+ "Data Common: Project",
+ "Data C: Hugo Rocks!",
+ "Data D: Hugo Rodks!",
+ )
+
+ // i18n
+ b.AssertFileContent("public/index.html",
+ "i18n hello1: Project",
+ "i18n theme: Theme C",
+ "i18n theme2: Theme2 D",
+ )
+}
+
+func TestModulesIgnoreConfig(t *testing.T) {
+ b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
+baseURL="https://example.org"
+
+workingDir="/site"
+
+[module]
+[[module.imports]]
+path="a"
+ignoreConfig=true
+
+`)
+
+ b.WithSourceFile("themes/a/config.toml", `
+[params]
+a = "Should Be Ignored!"
+`)
+
+ b.WithTemplatesAdded("index.html", `Params: {{ .Site.Params }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContentFn("public/index.html", func(s string) bool {
+ return !strings.Contains(s, "Ignored")
+ })
+}
+
+func TestModulesDisabled(t *testing.T) {
+ b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
+baseURL="https://example.org"
+
+workingDir="/site"
+
+[module]
+[[module.imports]]
+path="a"
+[[module.imports]]
+path="b"
+disable=true
+
+
+`)
+
+ b.WithSourceFile("themes/a/config.toml", `
+[params]
+a = "A param"
+`)
+
+ b.WithSourceFile("themes/b/config.toml", `
+[params]
+b = "B param"
+`)
+
+ b.WithTemplatesAdded("index.html", `Params: {{ .Site.Params }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContentFn("public/index.html", func(s string) bool {
+ return strings.Contains(s, "A param") && !strings.Contains(s, "B param")
+ })
+}
+
+func TestModulesIncompatible(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
+baseURL="https://example.org"
+
+workingDir="/site"
+
+[module]
+[[module.imports]]
+path="ok"
+[[module.imports]]
+path="incompat1"
+[[module.imports]]
+path="incompat2"
+[[module.imports]]
+path="incompat3"
+
+`)
+
+ b.WithSourceFile("themes/ok/data/ok.toml", `title = "OK"`)
+
+ b.WithSourceFile("themes/incompat1/config.toml", `
+
+[module]
+[module.hugoVersion]
+min = "0.33.2"
+max = "0.45.0"
+
+`)
+
+ // Old setup.
+ b.WithSourceFile("themes/incompat2/theme.toml", `
+min_version = "5.0.0"
+
+`)
+
+ // Issue 6162
+ b.WithSourceFile("themes/incompat3/theme.toml", `
+min_version = 0.55.0
+
+`)
+
+ logger := loggers.NewWarningLogger()
+ b.WithLogger(logger)
+
+ b.Build(BuildCfg{})
+
+ c := qt.New(t)
+
+ c.Assert(logger.LogCounters().WarnCounter.Count(), qt.Equals, uint64(3))
+}
+
+func TestModulesSymlinks(t *testing.T) {
+ skipSymlink(t)
+
+ wd, _ := os.Getwd()
+ defer func() {
+ os.Chdir(wd)
+ }()
+
+ c := qt.New(t)
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mod-sym")
+ c.Assert(err, qt.IsNil)
+
+ // We need to use the OS fs for this.
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workingDir)
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ defer clean()
+
+ const homeTemplate = `
+Data: {{ .Site.Data }}
+`
+
+ createDirsAndFiles := func(baseDir string) {
+ for _, dir := range files.ComponentFolders {
+ realDir := filepath.Join(baseDir, dir, "real")
+ c.Assert(os.MkdirAll(realDir, 0777), qt.IsNil)
+ c.Assert(afero.WriteFile(fs.Source, filepath.Join(realDir, "data.toml"), []byte("[hello]\nother = \"hello\""), 0777), qt.IsNil)
+ }
+
+ c.Assert(afero.WriteFile(fs.Source, filepath.Join(baseDir, "layouts", "index.html"), []byte(homeTemplate), 0777), qt.IsNil)
+ }
+
+ // Create project dirs and files.
+ createDirsAndFiles(workingDir)
+ // Create one module inside the default themes folder.
+ themeDir := filepath.Join(workingDir, "themes", "mymod")
+ createDirsAndFiles(themeDir)
+
+ createSymlinks := func(baseDir, id string) {
+ for _, dir := range files.ComponentFolders {
+ // Issue #9119: private use language tags cannot exceed 8 characters.
+ if dir != "i18n" {
+ c.Assert(os.Chdir(filepath.Join(baseDir, dir)), qt.IsNil)
+ c.Assert(os.Symlink("real", fmt.Sprintf("realsym%s", id)), qt.IsNil)
+ c.Assert(os.Chdir(filepath.Join(baseDir, dir, "real")), qt.IsNil)
+ c.Assert(os.Symlink("data.toml", fmt.Sprintf(filepath.FromSlash("datasym%s.toml"), id)), qt.IsNil)
+ }
+ }
+ }
+
+ createSymlinks(workingDir, "project")
+ createSymlinks(themeDir, "mod")
+
+ config := `
+baseURL = "https://example.com"
+theme="mymod"
+defaultContentLanguage="nn"
+defaultContentLanguageInSubDir=true
+
+[languages]
+[languages.nn]
+weight = 1
+[languages.en]
+weight = 2
+
+
+`
+
+ b := newTestSitesBuilder(t).WithNothingAdded().WithWorkingDir(workingDir)
+ b.WithLogger(loggers.NewErrorLogger())
+ b.Fs = fs
+
+ b.WithConfigFile("toml", config)
+ c.Assert(os.Chdir(workingDir), qt.IsNil)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContentFn(filepath.Join("public", "en", "index.html"), func(s string) bool {
+ // Symbolic links only followed in project. There should be WARNING logs.
+ return !strings.Contains(s, "symmod") && strings.Contains(s, "symproject")
+ })
+
+ bfs := b.H.BaseFs
+
+ for i, componentFs := range []afero.Fs{
+ bfs.Static[""].Fs,
+ bfs.Archetypes.Fs,
+ bfs.Content.Fs,
+ bfs.Data.Fs,
+ bfs.Assets.Fs,
+ bfs.I18n.Fs,
+ } {
+
+ if i != 0 {
+ continue
+ }
+
+ for j, id := range []string{"mod", "project"} {
+
+ statCheck := func(fs afero.Fs, filename string, isDir bool) {
+ shouldFail := j == 0
+ if !shouldFail && i == 0 {
+ // Static dirs only supports symlinks for files
+ shouldFail = isDir
+ }
+
+ _, err := fs.Stat(filepath.FromSlash(filename))
+ if err != nil {
+ if i > 0 && strings.HasSuffix(filename, "toml") && strings.Contains(err.Error(), "files not supported") {
+ // OK
+ return
+ }
+ }
+
+ if shouldFail {
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(err, qt.Equals, hugofs.ErrPermissionSymlink)
+ } else {
+ c.Assert(err, qt.IsNil)
+ }
+ }
+
+ c.Logf("Checking %d:%d %q", i, j, id)
+
+ statCheck(componentFs, fmt.Sprintf("realsym%s", id), true)
+ statCheck(componentFs, fmt.Sprintf("real/datasym%s.toml", id), false)
+
+ }
+ }
+}
+
+func TestMountsProject(t *testing.T) {
+ t.Parallel()
+
+ config := `
+
+baseURL="https://example.org"
+
+[module]
+[[module.mounts]]
+source="mycontent"
+target="content"
+
+`
+ b := newTestSitesBuilder(t).
+ WithConfigFile("toml", config).
+ WithSourceFile(filepath.Join("mycontent", "mypage.md"), `
+---
+title: "My Page"
+---
+
+`)
+
+ b.Build(BuildCfg{})
+
+ // helpers.PrintFs(b.H.Fs.Source, "public", os.Stdout)
+
+ b.AssertFileContent("public/mypage/index.html", "Permalink: https://example.org/mypage/")
+}
+
+// https://github.com/gohugoio/hugo/issues/6684
+func TestMountsContentFile(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-modules-content-file")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ configTemplate := `
+baseURL = "https://example.com"
+title = "My Modular Site"
+workingDir = %q
+
+[module]
+ [[module.mounts]]
+ source = "README.md"
+ target = "content/_index.md"
+ [[module.mounts]]
+ source = "mycontent"
+ target = "content/blog"
+
+`
+
+ tomlConfig := fmt.Sprintf(configTemplate, workingDir)
+
+ b := newTestSitesBuilder(t).Running()
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workingDir)
+
+ b.Fs = hugofs.NewDefault(cfg)
+
+ b.WithWorkingDir(workingDir).WithConfigFile("toml", tomlConfig)
+ b.WithTemplatesAdded("index.html", `
+{{ .Title }}
+{{ .Content }}
+
+{{ $readme := .Site.GetPage "/README.md" }}
+{{ with $readme }}README: {{ .Title }}|Filename: {{ path.Join .File.Filename }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }}
+
+
+{{ $mypage := .Site.GetPage "/blog/mypage.md" }}
+{{ with $mypage }}MYPAGE: {{ .Title }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }}
+{{ $mybundle := .Site.GetPage "/blog/mybundle" }}
+{{ with $mybundle }}MYBUNDLE: {{ .Title }}|Path: {{ path.Join .File.Path }}|FilePath: {{ path.Join .File.FileInfo.Meta.PathFile }}|{{ end }}
+
+
+`, "_default/_markup/render-link.html", `
+{{ $link := .Destination }}
+{{ $isRemote := strings.HasPrefix $link "http" }}
+{{- if not $isRemote -}}
+{{ $url := urls.Parse .Destination }}
+{{ $fragment := "" }}
+{{- with $url.Fragment }}{{ $fragment = printf "#%s" . }}{{ end -}}
+{{- with .Page.GetPage $url.Path }}{{ $link = printf "%s%s" .Permalink $fragment }}{{ end }}{{ end -}}
+<a href="{{ $link | safeURL }}"{{ with .Title}} title="{{ . }}"{{ end }}{{ if $isRemote }} target="_blank"{{ end }}>{{ .Text | safeHTML }}</a>
+`)
+
+ os.Mkdir(filepath.Join(workingDir, "mycontent"), 0777)
+ os.Mkdir(filepath.Join(workingDir, "mycontent", "mybundle"), 0777)
+
+ b.WithSourceFile("README.md", `---
+title: "Readme Title"
+---
+
+Readme Content.
+`,
+ filepath.Join("mycontent", "mypage.md"), `
+---
+title: "My Page"
+---
+
+
+* [Relative Link From Page](mybundle)
+* [Relative Link From Page, filename](mybundle/index.md)
+* [Link using original path](/mycontent/mybundle/index.md)
+
+
+`, filepath.Join("mycontent", "mybundle", "index.md"), `
+---
+title: "My Bundle"
+---
+
+* [Dot Relative Link From Bundle](../mypage.md)
+* [Link using original path](/mycontent/mypage.md)
+* [Link to Home](/)
+* [Link to Home, README.md](/README.md)
+* [Link to Home, _index.md](/_index.md)
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+README: Readme Title
+/README.md|Path: _index.md|FilePath: README.md
+Readme Content.
+MYPAGE: My Page|Path: blog/mypage.md|FilePath: mycontent/mypage.md|
+MYBUNDLE: My Bundle|Path: blog/mybundle/index.md|FilePath: mycontent/mybundle/index.md|
+`)
+ b.AssertFileContent("public/blog/mypage/index.html", `
+<a href="https://example.com/blog/mybundle/">Relative Link From Page</a>
+<a href="https://example.com/blog/mybundle/">Relative Link From Page, filename</a>
+<a href="https://example.com/blog/mybundle/">Link using original path</a>
+
+`)
+ b.AssertFileContent("public/blog/mybundle/index.html", `
+<a href="https://example.com/blog/mypage/">Dot Relative Link From Bundle</a>
+<a href="https://example.com/blog/mypage/">Link using original path</a>
+<a href="https://example.com/">Link to Home</a>
+<a href="https://example.com/">Link to Home, README.md</a>
+<a href="https://example.com/">Link to Home, _index.md</a>
+`)
+
+ b.EditFiles("README.md", `---
+title: "Readme Edit"
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Readme Edit
+`)
+}
+
+func TestMountsPaths(t *testing.T) {
+ c := qt.New(t)
+
+ type test struct {
+ b *sitesBuilder
+ clean func()
+ workingDir string
+ }
+
+ prepare := func(c *qt.C, mounts string) test {
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mounts-paths")
+ c.Assert(err, qt.IsNil)
+
+ configTemplate := `
+baseURL = "https://example.com"
+title = "My Modular Site"
+workingDir = %q
+
+%s
+
+`
+ tomlConfig := fmt.Sprintf(configTemplate, workingDir, mounts)
+ tomlConfig = strings.Replace(tomlConfig, "WORKING_DIR", workingDir, -1)
+
+ b := newTestSitesBuilder(c).Running()
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workingDir)
+ b.Fs = hugofs.NewDefault(cfg)
+
+ os.MkdirAll(filepath.Join(workingDir, "content", "blog"), 0777)
+
+ b.WithWorkingDir(workingDir).WithConfigFile("toml", tomlConfig)
+
+ return test{
+ b: b,
+ clean: clean,
+ workingDir: workingDir,
+ }
+ }
+
+ c.Run("Default", func(c *qt.C) {
+ mounts := ``
+
+ test := prepare(c, mounts)
+ b := test.b
+ defer test.clean()
+
+ b.WithContent("blog/p1.md", `---
+title: P1
+---`)
+
+ b.Build(BuildCfg{})
+
+ p := b.GetPage("blog/p1.md")
+ f := p.File().FileInfo().Meta()
+ b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/p1.md")
+ b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "content/blog/p1.md")
+
+ b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(test.workingDir, "layouts", "_default", "single.html")), qt.Equals, filepath.FromSlash("_default/single.html"))
+ })
+
+ c.Run("Mounts", func(c *qt.C) {
+ absDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mounts-paths-abs")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ mounts := `[module]
+ [[module.mounts]]
+ source = "README.md"
+ target = "content/_index.md"
+ [[module.mounts]]
+ source = "mycontent"
+ target = "content/blog"
+ [[module.mounts]]
+ source = "subdir/mypartials"
+ target = "layouts/partials"
+ [[module.mounts]]
+ source = %q
+ target = "layouts/shortcodes"
+`
+ mounts = fmt.Sprintf(mounts, filepath.Join(absDir, "/abs/myshortcodes"))
+
+ test := prepare(c, mounts)
+ b := test.b
+ defer test.clean()
+
+ subContentDir := filepath.Join(test.workingDir, "mycontent", "sub")
+ os.MkdirAll(subContentDir, 0777)
+ myPartialsDir := filepath.Join(test.workingDir, "subdir", "mypartials")
+ os.MkdirAll(myPartialsDir, 0777)
+
+ absShortcodesDir := filepath.Join(absDir, "abs", "myshortcodes")
+ os.MkdirAll(absShortcodesDir, 0777)
+
+ b.WithSourceFile("README.md", "---\ntitle: Readme\n---")
+ b.WithSourceFile("mycontent/sub/p1.md", "---\ntitle: P1\n---")
+
+ b.WithSourceFile(filepath.Join(absShortcodesDir, "myshort.html"), "MYSHORT")
+ b.WithSourceFile(filepath.Join(myPartialsDir, "mypartial.html"), "MYPARTIAL")
+
+ b.Build(BuildCfg{})
+
+ p1_1 := b.GetPage("/blog/sub/p1.md")
+ p1_2 := b.GetPage("/mycontent/sub/p1.md")
+ b.Assert(p1_1, qt.Not(qt.IsNil))
+ b.Assert(p1_2, qt.Equals, p1_1)
+
+ f := p1_1.File().FileInfo().Meta()
+ b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/sub/p1.md")
+ b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "mycontent/sub/p1.md")
+ b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(myPartialsDir, "mypartial.html")), qt.Equals, filepath.FromSlash("partials/mypartial.html"))
+ b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(absShortcodesDir, "myshort.html")), qt.Equals, filepath.FromSlash("shortcodes/myshort.html"))
+ b.Assert(b.H.BaseFs.Content.Path(filepath.Join(subContentDir, "p1.md")), qt.Equals, filepath.FromSlash("blog/sub/p1.md"))
+ b.Assert(b.H.BaseFs.Content.Path(filepath.Join(test.workingDir, "README.md")), qt.Equals, filepath.FromSlash("_index.md"))
+ })
+}
+
+// https://github.com/gohugoio/hugo/issues/6299
+func TestSiteWithGoModButNoModules(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ // We need to use the OS fs for this.
+ workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-no-mod")
+ c.Assert(err, qt.IsNil)
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workDir)
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ defer clean()
+
+ b := newTestSitesBuilder(t)
+ b.Fs = fs
+
+ b.WithWorkingDir(workDir).WithViper(cfg)
+
+ b.WithSourceFile("go.mod", "")
+ b.Build(BuildCfg{})
+}
+
+// https://github.com/gohugoio/hugo/issues/6622
+func TestModuleAbsMount(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ // We need to use the OS fs for this.
+ workDir, clean1, err := htesting.CreateTempDir(hugofs.Os, "hugo-project")
+ c.Assert(err, qt.IsNil)
+ absContentDir, clean2, err := htesting.CreateTempDir(hugofs.Os, "hugo-content")
+ c.Assert(err, qt.IsNil)
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workDir)
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ config := fmt.Sprintf(`
+workingDir=%q
+
+[module]
+ [[module.mounts]]
+ source = %q
+ target = "content"
+
+`, workDir, absContentDir)
+
+ defer clean1()
+ defer clean2()
+
+ b := newTestSitesBuilder(t)
+ b.Fs = fs
+
+ contentFilename := filepath.Join(absContentDir, "p1.md")
+ afero.WriteFile(hugofs.Os, contentFilename, []byte(`
+---
+title: Abs
+---
+
+Content.
+`), 0777)
+
+ b.WithWorkingDir(workDir).WithConfigFile("toml", config)
+ b.WithContent("dummy.md", "")
+
+ b.WithTemplatesAdded("index.html", `
+{{ $p1 := site.GetPage "p1" }}
+P1: {{ $p1.Title }}|{{ $p1.RelPermalink }}|Filename: {{ $p1.File.Filename }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "P1: Abs|/p1/", "Filename: "+contentFilename)
+}
+
+// Issue 9426
+func TestMountSameSource(t *testing.T) {
+ config := `baseURL = 'https://example.org/'
+languageCode = 'en-us'
+title = 'Hugo GitHub Issue #9426'
+
+disableKinds = ['RSS','sitemap','taxonomy','term']
+
+[[module.mounts]]
+source = "content"
+target = "content"
+
+[[module.mounts]]
+source = "extra-content"
+target = "content/resources-a"
+
+[[module.mounts]]
+source = "extra-content"
+target = "content/resources-b"
+`
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+
+ b.WithContent("p1.md", "")
+
+ b.WithSourceFile(
+ "extra-content/_index.md", "",
+ "extra-content/subdir/_index.md", "",
+ "extra-content/subdir/about.md", "",
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/resources-a/subdir/about/index.html", "Single")
+ b.AssertFileContent("public/resources-b/subdir/about/index.html", "Single")
+}
diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go
new file mode 100644
index 000000000..6be26d60e
--- /dev/null
+++ b/hugolib/hugo_sites.go
@@ -0,0 +1,1160 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+ "sync/atomic"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/fsnotify/fsnotify"
+
+ "github.com/gohugoio/hugo/identity"
+
+ radix "github.com/armon/go-radix"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/para"
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/publisher"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/lazy"
+
+ "github.com/gohugoio/hugo/langs/i18n"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/page/pagemeta"
+ "github.com/gohugoio/hugo/tpl"
+ "github.com/gohugoio/hugo/tpl/tplimpl"
+)
+
+// HugoSites represents the sites to build. Each site represents a language.
+type HugoSites struct {
+ Sites []*Site
+
+ multilingual *Multilingual
+
+ // Multihost is set if multilingual and baseURL set on the language level.
+ multihost bool
+
+ // If this is running in the dev server.
+ running bool
+
+ // Render output formats for all sites.
+ renderFormats output.Formats
+
+ // The currently rendered Site.
+ currentSite *Site
+
+ *deps.Deps
+
+ gitInfo *gitInfo
+ codeownerInfo *codeownerInfo
+
+ // As loaded from the /data dirs
+ data map[string]any
+
+ contentInit sync.Once
+ content *pageMaps
+
+ // Keeps track of bundle directories and symlinks to enable partial rebuilding.
+ ContentChanges *contentChangeMap
+
+ // File change events with filename stored in this map will be skipped.
+ skipRebuildForFilenamesMu sync.Mutex
+ skipRebuildForFilenames map[string]bool
+
+ init *hugoSitesInit
+
+ workers *para.Workers
+ numWorkers int
+
+ *fatalErrorHandler
+ *testCounters
+}
+
+// ShouldSkipFileChangeEvent allows skipping filesystem event early before
+// the build is started.
+func (h *HugoSites) ShouldSkipFileChangeEvent(ev fsnotify.Event) bool {
+ h.skipRebuildForFilenamesMu.Lock()
+ defer h.skipRebuildForFilenamesMu.Unlock()
+ return h.skipRebuildForFilenames[ev.Name]
+}
+
+func (h *HugoSites) getContentMaps() *pageMaps {
+ h.contentInit.Do(func() {
+ h.content = newPageMaps(h)
+ })
+ return h.content
+}
+
+// Only used in tests.
+type testCounters struct {
+ contentRenderCounter uint64
+ pageRenderCounter uint64
+}
+
+func (h *testCounters) IncrContentRender() {
+ if h == nil {
+ return
+ }
+ atomic.AddUint64(&h.contentRenderCounter, 1)
+}
+
+func (h *testCounters) IncrPageRender() {
+ if h == nil {
+ return
+ }
+ atomic.AddUint64(&h.pageRenderCounter, 1)
+}
+
+type fatalErrorHandler struct {
+ mu sync.Mutex
+
+ h *HugoSites
+
+ err error
+
+ done bool
+ donec chan bool // will be closed when done
+}
+
+// FatalError error is used in some rare situations where it does not make sense to
+// continue processing, to abort as soon as possible and log the error.
+func (f *fatalErrorHandler) FatalError(err error) {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+ if !f.done {
+ f.done = true
+ close(f.donec)
+ }
+ f.err = err
+}
+
+func (f *fatalErrorHandler) getErr() error {
+ f.mu.Lock()
+ defer f.mu.Unlock()
+ return f.err
+}
+
+func (f *fatalErrorHandler) Done() <-chan bool {
+ return f.donec
+}
+
+type hugoSitesInit struct {
+ // Loads the data from all of the /data folders.
+ data *lazy.Init
+
+ // Performs late initialization (before render) of the templates.
+ layouts *lazy.Init
+
+ // Loads the Git info and CODEOWNERS for all the pages if enabled.
+ gitInfo *lazy.Init
+
+ // Maps page translations.
+ translations *lazy.Init
+}
+
+func (h *hugoSitesInit) Reset() {
+ h.data.Reset()
+ h.layouts.Reset()
+ h.gitInfo.Reset()
+ h.translations.Reset()
+}
+
+func (h *HugoSites) Data() map[string]any {
+ if _, err := h.init.data.Do(); err != nil {
+ h.SendError(fmt.Errorf("failed to load data: %w", err))
+ return nil
+ }
+ return h.data
+}
+
+func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
+ if _, err := h.init.gitInfo.Do(); err != nil {
+ return nil, err
+ }
+
+ if h.gitInfo == nil {
+ return nil, nil
+ }
+
+ return h.gitInfo.forPage(p), nil
+}
+
+func (h *HugoSites) codeownersForPage(p page.Page) ([]string, error) {
+ if _, err := h.init.gitInfo.Do(); err != nil {
+ return nil, err
+ }
+
+ if h.codeownerInfo == nil {
+ return nil, nil
+ }
+
+ return h.codeownerInfo.forPage(p), nil
+}
+
+func (h *HugoSites) siteInfos() page.Sites {
+ infos := make(page.Sites, len(h.Sites))
+ for i, site := range h.Sites {
+ infos[i] = site.Info
+ }
+ return infos
+}
+
+func (h *HugoSites) pickOneAndLogTheRest(errors []error) error {
+ if len(errors) == 0 {
+ return nil
+ }
+
+ var i int
+
+ for j, err := range errors {
+ // If this is in server mode, we want to return an error to the client
+ // with a file context, if possible.
+ if herrors.UnwrapFileError(err) != nil {
+ i = j
+ break
+ }
+ }
+
+ // Log the rest, but add a threshold to avoid flooding the log.
+ const errLogThreshold = 5
+
+ for j, err := range errors {
+ if j == i || err == nil {
+ continue
+ }
+
+ if j >= errLogThreshold {
+ break
+ }
+
+ h.Log.Errorln(err)
+ }
+
+ return errors[i]
+}
+
+func (h *HugoSites) IsMultihost() bool {
+ return h != nil && h.multihost
+}
+
+// TODO(bep) consolidate
+func (h *HugoSites) LanguageSet() map[string]int {
+ set := make(map[string]int)
+ for i, s := range h.Sites {
+ set[s.language.Lang] = i
+ }
+ return set
+}
+
+func (h *HugoSites) NumLogErrors() int {
+ if h == nil {
+ return 0
+ }
+ return int(h.Log.LogCounters().ErrorCounter.Count())
+}
+
+func (h *HugoSites) PrintProcessingStats(w io.Writer) {
+ stats := make([]*helpers.ProcessingStats, len(h.Sites))
+ for i := 0; i < len(h.Sites); i++ {
+ stats[i] = h.Sites[i].PathSpec.ProcessingStats
+ }
+ helpers.ProcessingStatsTable(w, stats...)
+}
+
+// GetContentPage finds a Page with content given the absolute filename.
+// Returns nil if none found.
+func (h *HugoSites) GetContentPage(filename string) page.Page {
+ var p page.Page
+
+ h.getContentMaps().walkBundles(func(b *contentNode) bool {
+ if b.p == nil || b.fi == nil {
+ return false
+ }
+
+ if b.fi.Meta().Filename == filename {
+ p = b.p
+ return true
+ }
+
+ return false
+ })
+
+ return p
+}
+
+// NewHugoSites creates a new collection of sites given the input sites, building
+// a language configuration based on those.
+func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
+ if cfg.Language != nil {
+ return nil, errors.New("Cannot provide Language in Cfg when sites are provided")
+ }
+
+ // Return error at the end. Make the caller decide if it's fatal or not.
+ var initErr error
+
+ langConfig, err := newMultiLingualFromSites(cfg.Cfg, sites...)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create language config: %w", err)
+ }
+
+ var contentChangeTracker *contentChangeMap
+
+ numWorkers := config.GetNumWorkerMultiplier()
+ if numWorkers > len(sites) {
+ numWorkers = len(sites)
+ }
+ var workers *para.Workers
+ if numWorkers > 1 {
+ workers = para.New(numWorkers)
+ }
+
+ h := &HugoSites{
+ running: cfg.Running,
+ multilingual: langConfig,
+ multihost: cfg.Cfg.GetBool("multihost"),
+ Sites: sites,
+ workers: workers,
+ numWorkers: numWorkers,
+ skipRebuildForFilenames: make(map[string]bool),
+ init: &hugoSitesInit{
+ data: lazy.New(),
+ layouts: lazy.New(),
+ gitInfo: lazy.New(),
+ translations: lazy.New(),
+ },
+ }
+
+ h.fatalErrorHandler = &fatalErrorHandler{
+ h: h,
+ donec: make(chan bool),
+ }
+
+ h.init.data.Add(func() (any, error) {
+ err := h.loadData(h.PathSpec.BaseFs.Data.Dirs)
+ if err != nil {
+ return nil, fmt.Errorf("failed to load data: %w", err)
+ }
+ return nil, nil
+ })
+
+ h.init.layouts.Add(func() (any, error) {
+ for _, s := range h.Sites {
+ if err := s.Tmpl().(tpl.TemplateManager).MarkReady(); err != nil {
+ return nil, err
+ }
+ }
+ return nil, nil
+ })
+
+ h.init.translations.Add(func() (any, error) {
+ if len(h.Sites) > 1 {
+ allTranslations := pagesToTranslationsMap(h.Sites)
+ assignTranslationsToPages(allTranslations, h.Sites)
+ }
+
+ return nil, nil
+ })
+
+ h.init.gitInfo.Add(func() (any, error) {
+ err := h.loadGitInfo()
+ if err != nil {
+ return nil, fmt.Errorf("failed to load Git info: %w", err)
+ }
+ return nil, nil
+ })
+
+ for _, s := range sites {
+ s.h = h
+ }
+
+ var l configLoader
+ if err := l.applyDeps(cfg, sites...); err != nil {
+ initErr = fmt.Errorf("add site dependencies: %w", err)
+ }
+
+ h.Deps = sites[0].Deps
+ if h.Deps == nil {
+ return nil, initErr
+ }
+
+ // Only needed in server mode.
+ // TODO(bep) clean up the running vs watching terms
+ if cfg.Running {
+ contentChangeTracker = &contentChangeMap{
+ pathSpec: h.PathSpec,
+ symContent: make(map[string]map[string]bool),
+ leafBundles: radix.New(),
+ branchBundles: make(map[string]bool),
+ }
+ h.ContentChanges = contentChangeTracker
+ }
+
+ return h, initErr
+}
+
+func (h *HugoSites) loadGitInfo() error {
+ if h.Cfg.GetBool("enableGitInfo") {
+ gi, err := newGitInfo(h.Cfg)
+ if err != nil {
+ h.Log.Errorln("Failed to read Git log:", err)
+ } else {
+ h.gitInfo = gi
+ }
+
+ co, err := newCodeOwners(h.Cfg)
+ if err != nil {
+ h.Log.Errorln("Failed to read CODEOWNERS:", err)
+ } else {
+ h.codeownerInfo = co
+ }
+ }
+ return nil
+}
+
+func (l configLoader) applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
+ if cfg.TemplateProvider == nil {
+ cfg.TemplateProvider = tplimpl.DefaultTemplateProvider
+ }
+
+ if cfg.TranslationProvider == nil {
+ cfg.TranslationProvider = i18n.NewTranslationProvider()
+ }
+
+ var (
+ d *deps.Deps
+ err error
+ )
+
+ for _, s := range sites {
+ if s.Deps != nil {
+ continue
+ }
+
+ onCreated := func(d *deps.Deps) error {
+ s.Deps = d
+
+ // Set up the main publishing chain.
+ pub, err := publisher.NewDestinationPublisher(
+ d.ResourceSpec,
+ s.outputFormatsConfig,
+ s.mediaTypesConfig,
+ )
+ if err != nil {
+ return err
+ }
+ s.publisher = pub
+
+ if err := s.initializeSiteInfo(); err != nil {
+ return err
+ }
+
+ d.Site = s.Info
+
+ siteConfig, err := l.loadSiteConfig(s.language)
+ if err != nil {
+ return fmt.Errorf("load site config: %w", err)
+ }
+ s.siteConfigConfig = siteConfig
+
+ pm := &pageMap{
+ contentMap: newContentMap(contentMapConfig{
+ lang: s.Lang(),
+ taxonomyConfig: s.siteCfg.taxonomiesConfig.Values(),
+ taxonomyDisabled: !s.isEnabled(page.KindTerm),
+ taxonomyTermDisabled: !s.isEnabled(page.KindTaxonomy),
+ pageDisabled: !s.isEnabled(page.KindPage),
+ }),
+ s: s,
+ }
+
+ s.PageCollections = newPageCollections(pm)
+
+ s.siteRefLinker, err = newSiteRefLinker(s.language, s)
+ return err
+ }
+
+ cfg.Language = s.language
+ cfg.MediaTypes = s.mediaTypesConfig
+ cfg.OutputFormats = s.outputFormatsConfig
+
+ if d == nil {
+ cfg.WithTemplate = s.withSiteTemplates(cfg.WithTemplate)
+
+ var err error
+ d, err = deps.New(cfg)
+ if err != nil {
+ return fmt.Errorf("create deps: %w", err)
+ }
+
+ d.OutputFormatsConfig = s.outputFormatsConfig
+
+ if err := onCreated(d); err != nil {
+ return fmt.Errorf("on created: %w", err)
+ }
+
+ if err = d.LoadResources(); err != nil {
+ return fmt.Errorf("load resources: %w", err)
+ }
+
+ } else {
+ d, err = d.ForLanguage(cfg, onCreated)
+ if err != nil {
+ return err
+ }
+ d.OutputFormatsConfig = s.outputFormatsConfig
+ }
+ }
+
+ return nil
+}
+
+// NewHugoSites creates HugoSites from the given config.
+func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
+ if cfg.Logger == nil {
+ cfg.Logger = loggers.NewErrorLogger()
+ }
+ sites, err := createSitesFromConfig(cfg)
+ if err != nil {
+ return nil, fmt.Errorf("from config: %w", err)
+ }
+ return newHugoSites(cfg, sites...)
+}
+
+func (s *Site) withSiteTemplates(withTemplates ...func(templ tpl.TemplateManager) error) func(templ tpl.TemplateManager) error {
+ return func(templ tpl.TemplateManager) error {
+ for _, wt := range withTemplates {
+ if wt == nil {
+ continue
+ }
+ if err := wt(templ); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+}
+
+func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
+ var sites []*Site
+
+ languages := getLanguages(cfg.Cfg)
+
+ for _, lang := range languages {
+ if lang.Disabled {
+ continue
+ }
+ var s *Site
+ var err error
+ cfg.Language = lang
+ s, err = newSite(cfg)
+
+ if err != nil {
+ return nil, err
+ }
+
+ sites = append(sites, s)
+ }
+
+ return sites, nil
+}
+
+// Reset resets the sites and template caches etc., making it ready for a full rebuild.
+func (h *HugoSites) reset(config *BuildCfg) {
+ if config.ResetState {
+ for i, s := range h.Sites {
+ h.Sites[i] = s.reset()
+ if r, ok := s.Fs.PublishDir.(hugofs.Reseter); ok {
+ r.Reset()
+ }
+ }
+ }
+
+ h.fatalErrorHandler = &fatalErrorHandler{
+ h: h,
+ donec: make(chan bool),
+ }
+
+ h.init.Reset()
+}
+
+// resetLogs resets the log counters etc. Used to do a new build on the same sites.
+func (h *HugoSites) resetLogs() {
+ h.Log.Reset()
+ loggers.GlobalErrorCounter.Reset()
+ for _, s := range h.Sites {
+ s.Deps.Log.Reset()
+ s.Deps.LogDistinct.Reset()
+ }
+}
+
+func (h *HugoSites) withSite(fn func(s *Site) error) error {
+ if h.workers == nil {
+ for _, s := range h.Sites {
+ if err := fn(s); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ g, _ := h.workers.Start(context.Background())
+ for _, s := range h.Sites {
+ s := s
+ g.Run(func() error {
+ return fn(s)
+ })
+ }
+ return g.Wait()
+}
+
+func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
+ oldLangs, _ := h.Cfg.Get("languagesSorted").(langs.Languages)
+
+ l := configLoader{cfg: h.Cfg}
+ if err := l.loadLanguageSettings(oldLangs); err != nil {
+ return err
+ }
+
+ depsCfg := deps.DepsCfg{Fs: h.Fs, Cfg: l.cfg}
+
+ sites, err := createSitesFromConfig(depsCfg)
+ if err != nil {
+ return err
+ }
+
+ langConfig, err := newMultiLingualFromSites(depsCfg.Cfg, sites...)
+ if err != nil {
+ return err
+ }
+
+ h.Sites = sites
+
+ for _, s := range sites {
+ s.h = h
+ }
+
+ var cl configLoader
+ if err := cl.applyDeps(depsCfg, sites...); err != nil {
+ return err
+ }
+
+ h.Deps = sites[0].Deps
+
+ h.multilingual = langConfig
+ h.multihost = h.Deps.Cfg.GetBool("multihost")
+
+ return nil
+}
+
+func (h *HugoSites) toSiteInfos() []*SiteInfo {
+ infos := make([]*SiteInfo, len(h.Sites))
+ for i, s := range h.Sites {
+ infos[i] = s.Info
+ }
+ return infos
+}
+
+// BuildCfg holds build options used to, as an example, skip the render step.
+type BuildCfg struct {
+ // Reset site state before build. Use to force full rebuilds.
+ ResetState bool
+ // If set, we re-create the sites from the given configuration before a build.
+ // This is needed if new languages are added.
+ NewConfig config.Provider
+ // Skip rendering. Useful for testing.
+ SkipRender bool
+ // Use this to indicate what changed (for rebuilds).
+ whatChanged *whatChanged
+
+ // This is a partial re-render of some selected pages. This means
+ // we should skip most of the processing.
+ PartialReRender bool
+
+ // Set in server mode when the last build failed for some reason.
+ ErrRecovery bool
+
+ // Recently visited URLs. This is used for partial re-rendering.
+ RecentlyVisited map[string]bool
+
+ // Can be set to build only with a sub set of the content source.
+ ContentInclusionFilter *glob.FilenameFilter
+
+ // Set when the buildlock is already acquired (e.g. the archetype content builder).
+ NoBuildLock bool
+
+ testCounters *testCounters
+}
+
+// shouldRender is used in the Fast Render Mode to determine if we need to re-render
+// a Page: If it is recently visited (the home pages will always be in this set) or changed.
+// Note that a page does not have to have a content page / file.
+// For regular builds, this will allways return true.
+// TODO(bep) rename/work this.
+func (cfg *BuildCfg) shouldRender(p *pageState) bool {
+ if p == nil {
+ return false
+ }
+
+ if p.forceRender {
+ return true
+ }
+
+ if len(cfg.RecentlyVisited) == 0 {
+ return true
+ }
+
+ if cfg.RecentlyVisited[p.RelPermalink()] {
+ return true
+ }
+
+ if cfg.whatChanged != nil && !p.File().IsZero() {
+ return cfg.whatChanged.files[p.File().Filename()]
+ }
+
+ return false
+}
+
+func (h *HugoSites) renderCrossSitesSitemap() error {
+ if !h.multilingual.enabled() || h.IsMultihost() {
+ return nil
+ }
+
+ sitemapEnabled := false
+ for _, s := range h.Sites {
+ if s.isEnabled(kindSitemap) {
+ sitemapEnabled = true
+ break
+ }
+ }
+
+ if !sitemapEnabled {
+ return nil
+ }
+
+ s := h.Sites[0]
+
+ templ := s.lookupLayouts("sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml")
+
+ return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
+ s.siteCfg.sitemap.Filename, h.toSiteInfos(), templ)
+}
+
+func (h *HugoSites) renderCrossSitesRobotsTXT() error {
+ if h.multihost {
+ return nil
+ }
+ if !h.Cfg.GetBool("enableRobotsTXT") {
+ return nil
+ }
+
+ s := h.Sites[0]
+
+ p, err := newPageStandalone(&pageMeta{
+ s: s,
+ kind: kindRobotsTXT,
+ urlPaths: pagemeta.URLPath{
+ URL: "robots.txt",
+ },
+ },
+ output.RobotsTxtFormat)
+ if err != nil {
+ return err
+ }
+
+ if !p.render {
+ return nil
+ }
+
+ templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt")
+
+ return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", "robots.txt", p, templ)
+}
+
+func (h *HugoSites) removePageByFilename(filename string) {
+ h.getContentMaps().withMaps(func(m *pageMap) error {
+ m.deleteBundleMatching(func(b *contentNode) bool {
+ if b.p == nil {
+ return false
+ }
+
+ if b.fi == nil {
+ return false
+ }
+
+ return b.fi.Meta().Filename == filename
+ })
+ return nil
+ })
+}
+
+func (h *HugoSites) createPageCollections() error {
+ allPages := newLazyPagesFactory(func() page.Pages {
+ var pages page.Pages
+ for _, s := range h.Sites {
+ pages = append(pages, s.Pages()...)
+ }
+
+ page.SortByDefault(pages)
+
+ return pages
+ })
+
+ allRegularPages := newLazyPagesFactory(func() page.Pages {
+ return h.findPagesByKindIn(page.KindPage, allPages.get())
+ })
+
+ for _, s := range h.Sites {
+ s.PageCollections.allPages = allPages
+ s.PageCollections.allRegularPages = allRegularPages
+ }
+
+ return nil
+}
+
+func (s *Site) preparePagesForRender(isRenderingSite bool, idx int) error {
+ var err error
+ s.pageMap.withEveryBundlePage(func(p *pageState) bool {
+ if err = p.initOutputFormat(isRenderingSite, idx); err != nil {
+ return true
+ }
+ return false
+ })
+ return nil
+}
+
+// Pages returns all pages for all sites.
+func (h *HugoSites) Pages() page.Pages {
+ return h.Sites[0].AllPages()
+}
+
+func (h *HugoSites) loadData(fis []hugofs.FileMetaInfo) (err error) {
+ spec := source.NewSourceSpec(h.PathSpec, nil, nil)
+
+ h.data = make(map[string]any)
+ for _, fi := range fis {
+ fileSystem := spec.NewFilesystemFromFileMetaInfo(fi)
+ files, err := fileSystem.Files()
+ if err != nil {
+ return err
+ }
+ for _, r := range files {
+ if err := h.handleDataFile(r); err != nil {
+ return err
+ }
+ }
+ }
+
+ return
+}
+
+func (h *HugoSites) handleDataFile(r source.File) error {
+ var current map[string]any
+
+ f, err := r.FileInfo().Meta().Open()
+ if err != nil {
+ return fmt.Errorf("data: failed to open %q: %w", r.LogicalName(), err)
+ }
+ defer f.Close()
+
+ // Crawl in data tree to insert data
+ current = h.data
+ keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
+
+ for _, key := range keyParts {
+ if key != "" {
+ if _, ok := current[key]; !ok {
+ current[key] = make(map[string]any)
+ }
+ current = current[key].(map[string]any)
+ }
+ }
+
+ data, err := h.readData(r)
+ if err != nil {
+ return h.errWithFileContext(err, r)
+ }
+
+ if data == nil {
+ return nil
+ }
+
+ // filepath.Walk walks the files in lexical order, '/' comes before '.'
+ higherPrecedentData := current[r.BaseFileName()]
+
+ switch data.(type) {
+ case nil:
+ case map[string]any:
+
+ switch higherPrecedentData.(type) {
+ case nil:
+ current[r.BaseFileName()] = data
+ case map[string]any:
+ // merge maps: insert entries from data for keys that
+ // don't already exist in higherPrecedentData
+ higherPrecedentMap := higherPrecedentData.(map[string]any)
+ for key, value := range data.(map[string]any) {
+ if _, exists := higherPrecedentMap[key]; exists {
+ // this warning could happen if
+ // 1. A theme uses the same key; the main data folder wins
+ // 2. A sub folder uses the same key: the sub folder wins
+ // TODO(bep) figure out a way to detect 2) above and make that a WARN
+ h.Log.Infof("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
+ } else {
+ higherPrecedentMap[key] = value
+ }
+ }
+ default:
+ // can't merge: higherPrecedentData is not a map
+ h.Log.Warnf("The %T data from '%s' overridden by "+
+ "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
+ }
+
+ case []any:
+ if higherPrecedentData == nil {
+ current[r.BaseFileName()] = data
+ } else {
+ // we don't merge array data
+ h.Log.Warnf("The %T data from '%s' overridden by "+
+ "higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
+ }
+
+ default:
+ h.Log.Errorf("unexpected data type %T in file %s", data, r.LogicalName())
+ }
+
+ return nil
+}
+
+func (h *HugoSites) errWithFileContext(err error, f source.File) error {
+ fim, ok := f.FileInfo().(hugofs.FileMetaInfo)
+ if !ok {
+ return err
+ }
+ realFilename := fim.Meta().Filename
+
+ return herrors.NewFileErrorFromFile(err, realFilename, h.SourceSpec.Fs.Source, nil)
+
+}
+
+func (h *HugoSites) readData(f source.File) (any, error) {
+ file, err := f.FileInfo().Meta().Open()
+ if err != nil {
+ return nil, fmt.Errorf("readData: failed to open data file: %w", err)
+ }
+ defer file.Close()
+ content := helpers.ReaderToBytes(file)
+
+ format := metadecoders.FormatFromString(f.Ext())
+ return metadecoders.Default.Unmarshal(content, format)
+}
+
+func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
+ return h.Sites[0].findPagesByKindIn(kind, inPages)
+}
+
+func (h *HugoSites) resetPageState() {
+ h.getContentMaps().walkBundles(func(n *contentNode) bool {
+ if n.p == nil {
+ return false
+ }
+ p := n.p
+ for _, po := range p.pageOutputs {
+ if po.cp == nil {
+ continue
+ }
+ po.cp.Reset()
+ }
+
+ return false
+ })
+}
+
+func (h *HugoSites) resetPageStateFromEvents(idset identity.Identities) {
+ h.getContentMaps().walkBundles(func(n *contentNode) bool {
+ if n.p == nil {
+ return false
+ }
+ p := n.p
+ OUTPUTS:
+ for _, po := range p.pageOutputs {
+ if po.cp == nil {
+ continue
+ }
+ for id := range idset {
+ if po.cp.dependencyTracker.Search(id) != nil {
+ po.cp.Reset()
+ continue OUTPUTS
+ }
+ }
+ }
+
+ if p.shortcodeState == nil {
+ return false
+ }
+
+ for _, s := range p.shortcodeState.shortcodes {
+ for _, templ := range s.templs {
+ sid := templ.(identity.Manager)
+ for id := range idset {
+ if sid.Search(id) != nil {
+ for _, po := range p.pageOutputs {
+ if po.cp != nil {
+ po.cp.Reset()
+ }
+ }
+ return false
+ }
+ }
+ }
+ }
+ return false
+ })
+}
+
+// Used in partial reloading to determine if the change is in a bundle.
+type contentChangeMap struct {
+ mu sync.RWMutex
+
+ // Holds directories with leaf bundles.
+ leafBundles *radix.Tree
+
+ // Holds directories with branch bundles.
+ branchBundles map[string]bool
+
+ pathSpec *helpers.PathSpec
+
+ // Hugo supports symlinked content (both directories and files). This
+ // can lead to situations where the same file can be referenced from several
+ // locations in /content -- which is really cool, but also means we have to
+ // go an extra mile to handle changes.
+ // This map is only used in watch mode.
+ // It maps either file to files or the real dir to a set of content directories
+ // where it is in use.
+ symContentMu sync.Mutex
+ symContent map[string]map[string]bool
+}
+
+func (m *contentChangeMap) add(dirname string, tp bundleDirType) {
+ m.mu.Lock()
+ if !strings.HasSuffix(dirname, helpers.FilePathSeparator) {
+ dirname += helpers.FilePathSeparator
+ }
+ switch tp {
+ case bundleBranch:
+ m.branchBundles[dirname] = true
+ case bundleLeaf:
+ m.leafBundles.Insert(dirname, true)
+ default:
+ m.mu.Unlock()
+ panic("invalid bundle type")
+ }
+ m.mu.Unlock()
+}
+
+func (m *contentChangeMap) resolveAndRemove(filename string) (string, bundleDirType) {
+ m.mu.RLock()
+ defer m.mu.RUnlock()
+
+ // Bundles share resources, so we need to start from the virtual root.
+ relFilename := m.pathSpec.RelContentDir(filename)
+ dir, name := filepath.Split(relFilename)
+ if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
+ dir += helpers.FilePathSeparator
+ }
+
+ if _, found := m.branchBundles[dir]; found {
+ delete(m.branchBundles, dir)
+ return dir, bundleBranch
+ }
+
+ if key, _, found := m.leafBundles.LongestPrefix(dir); found {
+ m.leafBundles.Delete(key)
+ dir = string(key)
+ return dir, bundleLeaf
+ }
+
+ fileTp, isContent := classifyBundledFile(name)
+ if isContent && fileTp != bundleNot {
+ // A new bundle.
+ return dir, fileTp
+ }
+
+ return dir, bundleNot
+}
+
+func (m *contentChangeMap) addSymbolicLinkMapping(fim hugofs.FileMetaInfo) {
+ meta := fim.Meta()
+ if !meta.IsSymlink {
+ return
+ }
+ m.symContentMu.Lock()
+
+ from, to := meta.Filename, meta.OriginalFilename
+ if fim.IsDir() {
+ if !strings.HasSuffix(from, helpers.FilePathSeparator) {
+ from += helpers.FilePathSeparator
+ }
+ }
+
+ mm, found := m.symContent[from]
+
+ if !found {
+ mm = make(map[string]bool)
+ m.symContent[from] = mm
+ }
+ mm[to] = true
+ m.symContentMu.Unlock()
+}
+
+func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string {
+ mm, found := m.symContent[dir]
+ if !found {
+ return nil
+ }
+ dirs := make([]string, len(mm))
+ i := 0
+ for dir := range mm {
+ dirs[i] = dir
+ i++
+ }
+
+ sort.Strings(dirs)
+
+ return dirs
+}
diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go
new file mode 100644
index 000000000..1a191257c
--- /dev/null
+++ b/hugolib/hugo_sites_build.go
@@ -0,0 +1,507 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "runtime/trace"
+ "strings"
+
+ "github.com/gohugoio/hugo/publisher"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/para"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/resources/postpub"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/output"
+
+ "errors"
+
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// Build builds all sites. If filesystem events are provided,
+// this is considered to be a potential partial rebuild.
+func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
+ ctx, task := trace.NewTask(context.Background(), "Build")
+ defer task.End()
+
+ if !config.NoBuildLock {
+ unlock, err := h.BaseFs.LockBuild()
+ if err != nil {
+ return fmt.Errorf("failed to acquire a build lock: %w", err)
+ }
+ defer unlock()
+ }
+
+ errCollector := h.StartErrorCollector()
+ errs := make(chan error)
+
+ go func(from, to chan error) {
+ var errors []error
+ i := 0
+ for e := range from {
+ i++
+ if i > 50 {
+ break
+ }
+ errors = append(errors, e)
+ }
+ to <- h.pickOneAndLogTheRest(errors)
+
+ close(to)
+ }(errCollector, errs)
+
+ if h.Metrics != nil {
+ h.Metrics.Reset()
+ }
+
+ h.testCounters = config.testCounters
+
+ // Need a pointer as this may be modified.
+ conf := &config
+
+ if conf.whatChanged == nil {
+ // Assume everything has changed
+ conf.whatChanged = &whatChanged{source: true}
+ }
+
+ var prepareErr error
+
+ if !config.PartialReRender {
+ prepare := func() error {
+ init := func(conf *BuildCfg) error {
+ for _, s := range h.Sites {
+ s.Deps.BuildStartListeners.Notify()
+ }
+
+ if len(events) > 0 {
+ // Rebuild
+ if err := h.initRebuild(conf); err != nil {
+ return fmt.Errorf("initRebuild: %w", err)
+ }
+ } else {
+ if err := h.initSites(conf); err != nil {
+ return fmt.Errorf("initSites: %w", err)
+ }
+ }
+
+ return nil
+ }
+
+ var err error
+
+ f := func() {
+ err = h.process(conf, init, events...)
+ }
+ trace.WithRegion(ctx, "process", f)
+ if err != nil {
+ return fmt.Errorf("process: %w", err)
+ }
+
+ f = func() {
+ err = h.assemble(conf)
+ }
+ trace.WithRegion(ctx, "assemble", f)
+ if err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ f := func() {
+ prepareErr = prepare()
+ }
+ trace.WithRegion(ctx, "prepare", f)
+ if prepareErr != nil {
+ h.SendError(prepareErr)
+ }
+
+ }
+
+ if prepareErr == nil {
+ var err error
+ f := func() {
+ err = h.render(conf)
+ }
+ trace.WithRegion(ctx, "render", f)
+ if err != nil {
+ h.SendError(err)
+ }
+
+ if err = h.postProcess(); err != nil {
+ h.SendError(err)
+ }
+ }
+
+ if h.Metrics != nil {
+ var b bytes.Buffer
+ h.Metrics.WriteMetrics(&b)
+
+ h.Log.Printf("\nTemplate Metrics:\n\n")
+ h.Log.Println(b.String())
+ }
+
+ select {
+ // Make sure the channel always gets something.
+ case errCollector <- nil:
+ default:
+ }
+ close(errCollector)
+
+ err := <-errs
+ if err != nil {
+ return err
+ }
+
+ if err := h.fatalErrorHandler.getErr(); err != nil {
+ return err
+ }
+
+ errorCount := h.Log.LogCounters().ErrorCounter.Count()
+ if errorCount > 0 {
+ return fmt.Errorf("logged %d error(s)", errorCount)
+ }
+
+ return nil
+}
+
+// Build lifecycle methods below.
+// The order listed matches the order of execution.
+
+func (h *HugoSites) initSites(config *BuildCfg) error {
+ h.reset(config)
+
+ if config.NewConfig != nil {
+ if err := h.createSitesFromConfig(config.NewConfig); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *HugoSites) initRebuild(config *BuildCfg) error {
+ if config.NewConfig != nil {
+ return errors.New("rebuild does not support 'NewConfig'")
+ }
+
+ if config.ResetState {
+ return errors.New("rebuild does not support 'ResetState'")
+ }
+
+ if !h.running {
+ return errors.New("rebuild called when not in watch mode")
+ }
+
+ for _, s := range h.Sites {
+ s.resetBuildState(config.whatChanged.source)
+ }
+
+ h.reset(config)
+ h.resetLogs()
+ helpers.InitLoggers()
+
+ return nil
+}
+
+func (h *HugoSites) process(config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error {
+ // We should probably refactor the Site and pull up most of the logic from there to here,
+ // but that seems like a daunting task.
+ // So for now, if there are more than one site (language),
+ // we pre-process the first one, then configure all the sites based on that.
+
+ firstSite := h.Sites[0]
+
+ if len(events) > 0 {
+ // This is a rebuild
+ return firstSite.processPartial(config, init, events)
+ }
+
+ return firstSite.process(*config)
+}
+
+func (h *HugoSites) assemble(bcfg *BuildCfg) error {
+ if len(h.Sites) > 1 {
+ // The first is initialized during process; initialize the rest
+ for _, site := range h.Sites[1:] {
+ if err := site.initializeSiteInfo(); err != nil {
+ return err
+ }
+ }
+ }
+
+ if !bcfg.whatChanged.source {
+ return nil
+ }
+
+ if err := h.getContentMaps().AssemblePages(); err != nil {
+ return err
+ }
+
+ if err := h.createPageCollections(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (h *HugoSites) render(config *BuildCfg) error {
+ if _, err := h.init.layouts.Do(); err != nil {
+ return err
+ }
+
+ siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost}
+
+ if !config.PartialReRender {
+ h.renderFormats = output.Formats{}
+ h.withSite(func(s *Site) error {
+ s.initRenderFormats()
+ return nil
+ })
+
+ for _, s := range h.Sites {
+ h.renderFormats = append(h.renderFormats, s.renderFormats...)
+ }
+ }
+
+ i := 0
+ for _, s := range h.Sites {
+ h.currentSite = s
+ for siteOutIdx, renderFormat := range s.renderFormats {
+ siteRenderContext.outIdx = siteOutIdx
+ siteRenderContext.sitesOutIdx = i
+ i++
+
+ select {
+ case <-h.Done():
+ return nil
+ default:
+ for _, s2 := range h.Sites {
+ // We render site by site, but since the content is lazily rendered
+ // and a site can "borrow" content from other sites, every site
+ // needs this set.
+ s2.rc = &siteRenderingContext{Format: renderFormat}
+
+ if err := s2.preparePagesForRender(s == s2, siteRenderContext.sitesOutIdx); err != nil {
+ return err
+ }
+ }
+
+ if !config.SkipRender {
+ if config.PartialReRender {
+ if err := s.renderPages(siteRenderContext); err != nil {
+ return err
+ }
+ } else {
+ if err := s.render(siteRenderContext); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ }
+ }
+
+ if !config.SkipRender {
+ if err := h.renderCrossSitesSitemap(); err != nil {
+ return err
+ }
+ if err := h.renderCrossSitesRobotsTXT(); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (h *HugoSites) postProcess() error {
+ // Make sure to write any build stats to disk first so it's available
+ // to the post processors.
+ if err := h.writeBuildStats(); err != nil {
+ return err
+ }
+
+ // This will only be set when js.Build have been triggered with
+ // imports that resolves to the project or a module.
+ // Write a jsconfig.json file to the project's /asset directory
+ // to help JS intellisense in VS Code etc.
+ if !h.ResourceSpec.BuildConfig.NoJSConfigInAssets && h.BaseFs.Assets.Dirs != nil {
+ fi, err := h.BaseFs.Assets.Fs.Stat("")
+ if err != nil {
+ h.Log.Warnf("Failed to resolve jsconfig.json dir: %s", err)
+ } else {
+ m := fi.(hugofs.FileMetaInfo).Meta()
+ assetsDir := m.SourceRoot
+ if strings.HasPrefix(assetsDir, h.ResourceSpec.WorkingDir) {
+ if jsConfig := h.ResourceSpec.JSConfigBuilder.Build(assetsDir); jsConfig != nil {
+
+ b, err := json.MarshalIndent(jsConfig, "", " ")
+ if err != nil {
+ h.Log.Warnf("Failed to create jsconfig.json: %s", err)
+ } else {
+ filename := filepath.Join(assetsDir, "jsconfig.json")
+ if h.running {
+ h.skipRebuildForFilenamesMu.Lock()
+ h.skipRebuildForFilenames[filename] = true
+ h.skipRebuildForFilenamesMu.Unlock()
+ }
+ // Make sure it's written to the OS fs as this is used by
+ // editors.
+ if err := afero.WriteFile(hugofs.Os, filename, b, 0666); err != nil {
+ h.Log.Warnf("Failed to write jsconfig.json: %s", err)
+ }
+ }
+ }
+ }
+
+ }
+ }
+
+ var toPostProcess []postpub.PostPublishedResource
+ for _, r := range h.ResourceSpec.PostProcessResources {
+ toPostProcess = append(toPostProcess, r)
+ }
+
+ if len(toPostProcess) == 0 {
+ // Nothing more to do.
+ return nil
+ }
+
+ workers := para.New(config.GetNumWorkerMultiplier())
+ g, _ := workers.Start(context.Background())
+
+ handleFile := func(filename string) error {
+ content, err := afero.ReadFile(h.BaseFs.PublishFs, filename)
+ if err != nil {
+ return err
+ }
+
+ k := 0
+ changed := false
+
+ for {
+ l := bytes.Index(content[k:], []byte(postpub.PostProcessPrefix))
+ if l == -1 {
+ break
+ }
+ m := bytes.Index(content[k+l:], []byte(postpub.PostProcessSuffix)) + len(postpub.PostProcessSuffix)
+
+ low, high := k+l, k+l+m
+
+ field := content[low:high]
+
+ forward := l + m
+
+ for i, r := range toPostProcess {
+ if r == nil {
+ panic(fmt.Sprintf("resource %d to post process is nil", i+1))
+ }
+ v, ok := r.GetFieldString(string(field))
+ if ok {
+ content = append(content[:low], append([]byte(v), content[high:]...)...)
+ changed = true
+ forward = len(v)
+ break
+ }
+ }
+
+ k += forward
+ }
+
+ if changed {
+ return afero.WriteFile(h.BaseFs.PublishFs, filename, content, 0666)
+ }
+
+ return nil
+ }
+
+ _ = afero.Walk(h.BaseFs.PublishFs, "", func(path string, info os.FileInfo, err error) error {
+ if info == nil || info.IsDir() {
+ return nil
+ }
+
+ if !strings.HasSuffix(path, "html") {
+ return nil
+ }
+
+ g.Run(func() error {
+ return handleFile(path)
+ })
+
+ return nil
+ })
+
+ // Prepare for a new build.
+ for _, s := range h.Sites {
+ s.ResourceSpec.PostProcessResources = make(map[string]postpub.PostPublishedResource)
+ }
+
+ return g.Wait()
+}
+
+type publishStats struct {
+ CSSClasses string `json:"cssClasses"`
+}
+
+func (h *HugoSites) writeBuildStats() error {
+ if !h.ResourceSpec.BuildConfig.WriteStats {
+ return nil
+ }
+
+ htmlElements := &publisher.HTMLElements{}
+ for _, s := range h.Sites {
+ stats := s.publisher.PublishStats()
+ htmlElements.Merge(stats.HTMLElements)
+ }
+
+ htmlElements.Sort()
+
+ stats := publisher.PublishStats{
+ HTMLElements: *htmlElements,
+ }
+
+ js, err := json.MarshalIndent(stats, "", " ")
+ if err != nil {
+ return err
+ }
+
+ filename := filepath.Join(h.WorkingDir, "hugo_stats.json")
+
+ // Make sure it's always written to the OS fs.
+ if err := afero.WriteFile(hugofs.Os, filename, js, 0666); err != nil {
+ return err
+ }
+
+ // Write to the destination as well if it's a in-memory fs.
+ if !hugofs.IsOsFs(h.Fs.Source) {
+ if err := afero.WriteFile(h.Fs.WorkingDirWritable, filename, js, 0666); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/hugolib/hugo_sites_build_errors_test.go b/hugolib/hugo_sites_build_errors_test.go
new file mode 100644
index 000000000..ffbfe1c17
--- /dev/null
+++ b/hugolib/hugo_sites_build_errors_test.go
@@ -0,0 +1,647 @@
+package hugolib
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/herrors"
+)
+
+type testSiteBuildErrorAsserter struct {
+ name string
+ c *qt.C
+}
+
+func (t testSiteBuildErrorAsserter) getFileError(err error) herrors.FileError {
+ t.c.Assert(err, qt.Not(qt.IsNil), qt.Commentf(t.name))
+ fe := herrors.UnwrapFileError(err)
+ t.c.Assert(fe, qt.Not(qt.IsNil))
+ return fe
+}
+
+func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) {
+ t.c.Helper()
+ fe := t.getFileError(err)
+ t.c.Assert(fe.Position().LineNumber, qt.Equals, lineNumber, qt.Commentf(err.Error()))
+}
+
+func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) {
+ // The error message will contain filenames with OS slashes. Normalize before compare.
+ e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2)
+ t.c.Assert(e2, qt.Contains, e1)
+}
+
+func TestSiteBuildErrors(t *testing.T) {
+ const (
+ yamlcontent = "yamlcontent"
+ tomlcontent = "tomlcontent"
+ jsoncontent = "jsoncontent"
+ shortcode = "shortcode"
+ base = "base"
+ single = "single"
+ )
+
+ // TODO(bep) add content tests after https://github.com/gohugoio/hugo/issues/5324
+ // is implemented.
+
+ tests := []struct {
+ name string
+ fileType string
+ fileFixer func(content string) string
+ assertCreateError func(a testSiteBuildErrorAsserter, err error)
+ assertBuildError func(a testSiteBuildErrorAsserter, err error)
+ }{
+
+ {
+ name: "Base template parse failed",
+ fileType: base,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title }}", ".Title }", 1)
+ },
+ // Base templates gets parsed at build time.
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ a.assertLineNumber(4, err)
+ },
+ },
+ {
+ name: "Base template execute failed",
+ fileType: base,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title", ".Titles", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ a.assertLineNumber(4, err)
+ },
+ },
+ {
+ name: "Single template parse failed",
+ fileType: single,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title }}", ".Title }", 1)
+ },
+ assertCreateError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 5)
+ a.c.Assert(fe.Position().ColumnNumber, qt.Equals, 1)
+ a.assertErrorMessage("\"layouts/foo/single.html:5:1\": parse failed: template: foo/single.html:5: unexpected \"}\" in operand", fe.Error())
+ },
+ },
+ {
+ name: "Single template execute failed",
+ fileType: single,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title", ".Titles", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 5)
+ a.c.Assert(fe.Position().ColumnNumber, qt.Equals, 14)
+ a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
+ },
+ },
+ {
+ name: "Single template execute failed, long keyword",
+ fileType: single,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title", ".ThisIsAVeryLongTitle", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 5)
+ a.c.Assert(fe.Position().ColumnNumber, qt.Equals, 14)
+ a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
+ },
+ },
+ {
+ name: "Shortcode parse failed",
+ fileType: shortcode,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title }}", ".Title }", 1)
+ },
+ assertCreateError: func(a testSiteBuildErrorAsserter, err error) {
+ a.assertLineNumber(4, err)
+ },
+ },
+ {
+ name: "Shortcode execute failed",
+ fileType: shortcode,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title", ".Titles", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ // Make sure that it contains both the content file and template
+ a.assertErrorMessage(`"content/myyaml.md:7:10": failed to render shortcode "sc": failed to process shortcode: "layouts/shortcodes/sc.html:4:22": execute of template failed: template: shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate field Titles in type page.Page`, fe.Error())
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 7)
+
+ },
+ },
+ {
+ name: "Shortode does not exist",
+ fileType: yamlcontent,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, "{{< sc >}}", "{{< nono >}}", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 7)
+ a.c.Assert(fe.Position().ColumnNumber, qt.Equals, 10)
+ a.assertErrorMessage(`"content/myyaml.md:7:10": failed to extract shortcode: template for shortcode "nono" not found`, fe.Error())
+ },
+ },
+ {
+ name: "Invalid YAML front matter",
+ fileType: yamlcontent,
+ fileFixer: func(content string) string {
+ return `---
+title: "My YAML Content"
+foo bar
+---
+`
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ a.assertLineNumber(3, err)
+ },
+ },
+ {
+ name: "Invalid TOML front matter",
+ fileType: tomlcontent,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, "description = ", "description &", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 6)
+ },
+ },
+ {
+ name: "Invalid JSON front matter",
+ fileType: jsoncontent,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, "\"description\":", "\"description\"", 1)
+ },
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 3)
+ },
+ },
+ {
+ // See https://github.com/gohugoio/hugo/issues/5327
+ name: "Panic in template Execute",
+ fileType: single,
+ fileFixer: func(content string) string {
+ return strings.Replace(content, ".Title", ".Parent.Parent.Parent", 1)
+ },
+
+ assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
+ a.c.Assert(err, qt.Not(qt.IsNil))
+ fe := a.getFileError(err)
+ a.c.Assert(fe.Position().LineNumber, qt.Equals, 5)
+ a.c.Assert(fe.Position().ColumnNumber, qt.Equals, 21)
+ },
+ },
+ }
+
+ for _, test := range tests {
+ if test.name != "Invalid JSON front matter" {
+ continue
+ }
+ test := test
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ errorAsserter := testSiteBuildErrorAsserter{
+ c: c,
+ name: test.name,
+ }
+
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ f := func(fileType, content string) string {
+ if fileType != test.fileType {
+ return content
+ }
+ return test.fileFixer(content)
+ }
+
+ b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
+SHORTCODE L2
+SHORTCODE L3:
+SHORTCODE L4: {{ .Page.Title }}
+`))
+ b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
+BASEOF L2
+BASEOF L3
+BASEOF L4{{ if .Title }}{{ end }}
+{{block "main" .}}This is the main content.{{end}}
+BASEOF L6
+`))
+
+ b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
+SINGLE L2:
+SINGLE L3:
+SINGLE L4:
+SINGLE L5: {{ .Title }} {{ .Content }}
+{{ end }}
+`))
+
+ b.WithTemplatesAdded("layouts/foo/single.html", f(single, `
+SINGLE L2:
+SINGLE L3:
+SINGLE L4:
+SINGLE L5: {{ .Title }} {{ .Content }}
+`))
+
+ b.WithContent("myyaml.md", f(yamlcontent, `---
+title: "The YAML"
+---
+
+Some content.
+
+ {{< sc >}}
+
+Some more text.
+
+The end.
+
+`))
+
+ b.WithContent("mytoml.md", f(tomlcontent, `+++
+title = "The TOML"
+p1 = "v"
+p2 = "v"
+p3 = "v"
+description = "Descriptioon"
++++
+
+Some content.
+
+
+`))
+
+ b.WithContent("myjson.md", f(jsoncontent, `{
+ "title": "This is a title",
+ "description": "This is a description."
+}
+
+Some content.
+
+
+`))
+
+ createErr := b.CreateSitesE()
+ if test.assertCreateError != nil {
+ test.assertCreateError(errorAsserter, createErr)
+ } else {
+ c.Assert(createErr, qt.IsNil)
+ }
+
+ if createErr == nil {
+ buildErr := b.BuildE(BuildCfg{})
+ if test.assertBuildError != nil {
+ test.assertBuildError(errorAsserter, buildErr)
+ } else {
+ c.Assert(buildErr, qt.IsNil)
+ }
+ }
+ })
+ }
+
+}
+
+// Issue 9852
+func TestErrorMinify(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+minify = true
+
+-- layouts/index.html --
+<body>
+<script>=;</script>
+</body>
+
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ fe := herrors.UnwrapFileError(err)
+ b.Assert(fe, qt.IsNotNil)
+ b.Assert(fe.Position().LineNumber, qt.Equals, 2)
+ b.Assert(fe.Position().ColumnNumber, qt.Equals, 9)
+ b.Assert(fe.Error(), qt.Contains, "unexpected = in expression on line 2 and column 9")
+ b.Assert(filepath.ToSlash(fe.Position().Filename), qt.Contains, "hugo-transform-error")
+ b.Assert(os.Remove(fe.Position().Filename), qt.IsNil)
+
+}
+
+func TestErrorNestedRender(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/_index.md --
+---
+title: "Home"
+---
+-- layouts/index.html --
+line 1
+line 2
+1{{ .Render "myview" }}
+-- layouts/_default/myview.html --
+line 1
+12{{ partial "foo.html" . }}
+line 4
+line 5
+-- layouts/partials/foo.html --
+line 1
+line 2
+123{{ .ThisDoesNotExist }}
+line 4
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ errors := herrors.UnwrapFileErrorsWithErrorContext(err)
+ b.Assert(errors, qt.HasLen, 4)
+ b.Assert(errors[0].Position().LineNumber, qt.Equals, 3)
+ b.Assert(errors[0].Position().ColumnNumber, qt.Equals, 4)
+ b.Assert(errors[0].Error(), qt.Contains, filepath.FromSlash(`"/layouts/index.html:3:4": execute of template failed`))
+ b.Assert(errors[0].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "1{{ .Render \"myview\" }}"})
+ b.Assert(errors[2].Position().LineNumber, qt.Equals, 2)
+ b.Assert(errors[2].Position().ColumnNumber, qt.Equals, 5)
+ b.Assert(errors[2].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "12{{ partial \"foo.html\" . }}", "line 4", "line 5"})
+
+ b.Assert(errors[3].Position().LineNumber, qt.Equals, 3)
+ b.Assert(errors[3].Position().ColumnNumber, qt.Equals, 6)
+ b.Assert(errors[3].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "123{{ .ThisDoesNotExist }}", "line 4"})
+
+}
+
+func TestErrorNestedShortocde(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/_index.md --
+---
+title: "Home"
+---
+
+## Hello
+{{< hello >}}
+
+-- layouts/index.html --
+line 1
+line 2
+{{ .Content }}
+line 5
+-- layouts/shortcodes/hello.html --
+line 1
+12{{ partial "foo.html" . }}
+line 4
+line 5
+-- layouts/partials/foo.html --
+line 1
+line 2
+123{{ .ThisDoesNotExist }}
+line 4
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ errors := herrors.UnwrapFileErrorsWithErrorContext(err)
+
+ b.Assert(errors, qt.HasLen, 3)
+
+ b.Assert(errors[0].Position().LineNumber, qt.Equals, 6)
+ b.Assert(errors[0].Position().ColumnNumber, qt.Equals, 1)
+ b.Assert(errors[0].ErrorContext().ChromaLexer, qt.Equals, "md")
+ b.Assert(errors[0].Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:6:1": failed to render shortcode "hello": failed to process shortcode: "/layouts/shortcodes/hello.html:2:5":`))
+ b.Assert(errors[0].ErrorContext().Lines, qt.DeepEquals, []string{"", "## Hello", "{{< hello >}}", ""})
+ b.Assert(errors[1].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "12{{ partial \"foo.html\" . }}", "line 4", "line 5"})
+ b.Assert(errors[2].ErrorContext().Lines, qt.DeepEquals, []string{"line 1", "line 2", "123{{ .ThisDoesNotExist }}", "line 4"})
+
+}
+
+func TestErrorRenderHookHeading(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/_index.md --
+---
+title: "Home"
+---
+
+## Hello
+
+-- layouts/index.html --
+line 1
+line 2
+{{ .Content }}
+line 5
+-- layouts/_default/_markup/render-heading.html --
+line 1
+12{{ .Levels }}
+line 4
+line 5
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ errors := herrors.UnwrapFileErrorsWithErrorContext(err)
+
+ b.Assert(errors, qt.HasLen, 2)
+ b.Assert(errors[0].Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:1:1": "/layouts/_default/_markup/render-heading.html:2:5": execute of template failed`))
+
+}
+
+func TestErrorRenderHookCodeblock(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/_index.md --
+---
+title: "Home"
+---
+
+## Hello
+
+§§§ foo
+bar
+§§§
+
+
+-- layouts/index.html --
+line 1
+line 2
+{{ .Content }}
+line 5
+-- layouts/_default/_markup/render-codeblock-foo.html --
+line 1
+12{{ .Foo }}
+line 4
+line 5
+`
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ errors := herrors.UnwrapFileErrorsWithErrorContext(err)
+
+ b.Assert(errors, qt.HasLen, 2)
+ first := errors[0]
+ b.Assert(first.Error(), qt.Contains, filepath.FromSlash(`"/content/_index.md:7:1": "/layouts/_default/_markup/render-codeblock-foo.html:2:5": execute of template failed`))
+
+}
+
+func TestErrorInBaseTemplate(t *testing.T) {
+ t.Parallel()
+
+ filesTemplate := `
+-- config.toml --
+-- content/_index.md --
+---
+title: "Home"
+---
+-- layouts/baseof.html --
+line 1 base
+line 2 base
+{{ block "main" . }}empty{{ end }}
+line 4 base
+{{ block "toc" . }}empty{{ end }}
+-- layouts/index.html --
+{{ define "main" }}
+line 2 index
+line 3 index
+line 4 index
+{{ end }}
+{{ define "toc" }}
+TOC: {{ partial "toc.html" . }}
+{{ end }}
+-- layouts/partials/toc.html --
+toc line 1
+toc line 2
+toc line 3
+toc line 4
+
+
+
+
+`
+
+ t.Run("base template", func(t *testing.T) {
+ files := strings.Replace(filesTemplate, "line 4 base", "123{{ .ThisDoesNotExist \"abc\" }}", 1)
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/baseof.html:4:6"`))
+
+ })
+
+ t.Run("index template", func(t *testing.T) {
+ files := strings.Replace(filesTemplate, "line 3 index", "1234{{ .ThisDoesNotExist \"abc\" }}", 1)
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/index.html:3:7"`))
+
+ })
+
+ t.Run("partial from define", func(t *testing.T) {
+ files := strings.Replace(filesTemplate, "toc line 2", "12345{{ .ThisDoesNotExist \"abc\" }}", 1)
+
+ b, err := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`render of "home" failed: "/layouts/index.html:7:8": execute of template failed`))
+ b.Assert(err.Error(), qt.Contains, `execute of template failed: template: partials/toc.html:2:8: executing "partials/toc.html"`)
+
+ })
+
+}
+
+// https://github.com/gohugoio/hugo/issues/5375
+func TestSiteBuildTimeout(t *testing.T) {
+ if !htesting.IsCI() {
+ //defer leaktest.CheckTimeout(t, 10*time.Second)()
+ }
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+timeout = 5
+`)
+
+ b.WithTemplatesAdded("_default/single.html", `
+{{ .WordCount }}
+`, "shortcodes/c.html", `
+{{ range .Page.Site.RegularPages }}
+{{ .WordCount }}
+{{ end }}
+
+`)
+
+ for i := 1; i < 100; i++ {
+ b.WithContent(fmt.Sprintf("page%d.md", i), `---
+title: "A page"
+---
+
+{{< c >}}`)
+ }
+
+ b.CreateSites().BuildFail(BuildCfg{})
+}
diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go
new file mode 100644
index 000000000..c31f94713
--- /dev/null
+++ b/hugolib/hugo_sites_build_test.go
@@ -0,0 +1,1407 @@
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/fortytw2/leaktest"
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+)
+
+func TestMultiSitesMainLangInRoot(t *testing.T) {
+ t.Parallel()
+ for _, b := range []bool{false} {
+ doTestMultiSitesMainLangInRoot(t, b)
+ }
+}
+
+func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
+ c := qt.New(t)
+
+ siteConfig := map[string]any{
+ "DefaultContentLanguage": "fr",
+ "DefaultContentLanguageInSubdir": defaultInSubDir,
+ }
+
+ b := newMultiSiteTestBuilder(t, "toml", multiSiteTOMLConfigTemplate, siteConfig)
+
+ pathMod := func(s string) string {
+ return s
+ }
+
+ if !defaultInSubDir {
+ pathMod = func(s string) string {
+ return strings.Replace(s, "/fr/", "/", -1)
+ }
+ }
+
+ b.CreateSites()
+ b.Build(BuildCfg{})
+
+ sites := b.H.Sites
+ c.Assert(len(sites), qt.Equals, 4)
+
+ enSite := sites[0]
+ frSite := sites[1]
+
+ c.Assert(enSite.Info.LanguagePrefix, qt.Equals, "/en")
+
+ if defaultInSubDir {
+ c.Assert(frSite.Info.LanguagePrefix, qt.Equals, "/fr")
+ } else {
+ c.Assert(frSite.Info.LanguagePrefix, qt.Equals, "")
+ }
+
+ c.Assert(enSite.PathSpec.RelURL("foo", true), qt.Equals, "/blog/en/foo")
+
+ doc1en := enSite.RegularPages()[0]
+ doc1fr := frSite.RegularPages()[0]
+
+ enPerm := doc1en.Permalink()
+ enRelPerm := doc1en.RelPermalink()
+ c.Assert(enPerm, qt.Equals, "http://example.com/blog/en/sect/doc1-slug/")
+ c.Assert(enRelPerm, qt.Equals, "/blog/en/sect/doc1-slug/")
+
+ frPerm := doc1fr.Permalink()
+ frRelPerm := doc1fr.RelPermalink()
+
+ b.AssertFileContent(pathMod("public/fr/sect/doc1/index.html"), "Single", "Bonjour")
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Hello")
+
+ if defaultInSubDir {
+ c.Assert(frPerm, qt.Equals, "http://example.com/blog/fr/sect/doc1/")
+ c.Assert(frRelPerm, qt.Equals, "/blog/fr/sect/doc1/")
+
+ // should have a redirect on top level.
+ b.AssertFileContent("public/index.html", `<meta http-equiv="refresh" content="0; url=http://example.com/blog/fr">`)
+ } else {
+ // Main language in root
+ c.Assert(frPerm, qt.Equals, "http://example.com/blog/sect/doc1/")
+ c.Assert(frRelPerm, qt.Equals, "/blog/sect/doc1/")
+
+ // should have redirect back to root
+ b.AssertFileContent("public/fr/index.html", `<meta http-equiv="refresh" content="0; url=http://example.com/blog">`)
+ }
+ b.AssertFileContent(pathMod("public/fr/index.html"), "Home", "Bonjour")
+ b.AssertFileContent("public/en/index.html", "Home", "Hello")
+
+ // Check list pages
+ b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour")
+ b.AssertFileContent("public/en/sect/index.html", "List", "Hello")
+ b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour")
+ b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello")
+
+ // Check sitemaps
+ // Sitemaps behaves different: In a multilanguage setup there will always be a index file and
+ // one sitemap in each lang folder.
+ b.AssertFileContent("public/sitemap.xml",
+ "<loc>http://example.com/blog/en/sitemap.xml</loc>",
+ "<loc>http://example.com/blog/fr/sitemap.xml</loc>")
+
+ if defaultInSubDir {
+ b.AssertFileContent("public/fr/sitemap.xml", "<loc>http://example.com/blog/fr/</loc>")
+ } else {
+ b.AssertFileContent("public/fr/sitemap.xml", "<loc>http://example.com/blog/</loc>")
+ }
+ b.AssertFileContent("public/en/sitemap.xml", "<loc>http://example.com/blog/en/</loc>")
+
+ // Check rss
+ b.AssertFileContent(pathMod("public/fr/index.xml"), pathMod(`<atom:link href="http://example.com/blog/fr/index.xml"`),
+ `rel="self" type="application/rss+xml"`)
+ b.AssertFileContent("public/en/index.xml", `<atom:link href="http://example.com/blog/en/index.xml"`)
+ b.AssertFileContent(
+ pathMod("public/fr/sect/index.xml"),
+ pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`))
+ b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`)
+ b.AssertFileContent(
+ pathMod("public/fr/plaques/FRtag1/index.xml"),
+ pathMod(`<atom:link href="http://example.com/blog/fr/plaques/FRtag1/index.xml"`))
+ b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`)
+
+ // Check paginators
+ b.AssertFileContent(pathMod("public/fr/page/1/index.html"), pathMod(`refresh" content="0; url=http://example.com/blog/fr/"`))
+ b.AssertFileContent("public/en/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/"`)
+ b.AssertFileContent(pathMod("public/fr/page/2/index.html"), "Home Page 2", "Bonjour", pathMod("http://example.com/blog/fr/"))
+ b.AssertFileContent("public/en/page/2/index.html", "Home Page 2", "Hello", "http://example.com/blog/en/")
+ b.AssertFileContent(pathMod("public/fr/sect/page/1/index.html"), pathMod(`refresh" content="0; url=http://example.com/blog/fr/sect/"`))
+ b.AssertFileContent("public/en/sect/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/sect/"`)
+ b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/"))
+ b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/")
+ b.AssertFileContent(
+ pathMod("public/fr/plaques/FRtag1/page/1/index.html"),
+ pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/FRtag1/"`))
+ b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`)
+ b.AssertFileContent(
+ pathMod("public/fr/plaques/FRtag1/page/2/index.html"), "List Page 2", "Bonjour",
+ pathMod("http://example.com/blog/fr/plaques/FRtag1/"))
+ b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/")
+ // nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian)
+ b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`)
+ b.AssertFileContent("public/nb/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nb/"`)
+}
+
+func TestMultiSitesWithTwoLanguages(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+
+defaultContentLanguage = "nn"
+
+[languages]
+[languages.nn]
+languageName = "Nynorsk"
+weight = 1
+title = "Tittel på Nynorsk"
+[languages.nn.params]
+p1 = "p1nn"
+
+[languages.en]
+title = "Title in English"
+languageName = "English"
+weight = 2
+[languages.en.params]
+p1 = "p1en"
+`)
+
+ b.CreateSites()
+ b.Build(BuildCfg{SkipRender: true})
+ sites := b.H.Sites
+
+ c.Assert(len(sites), qt.Equals, 2)
+
+ nnSite := sites[0]
+ nnHome := nnSite.getPage(page.KindHome)
+ c.Assert(len(nnHome.AllTranslations()), qt.Equals, 2)
+ c.Assert(len(nnHome.Translations()), qt.Equals, 1)
+ c.Assert(nnHome.IsTranslated(), qt.Equals, true)
+
+ enHome := sites[1].getPage(page.KindHome)
+
+ p1, err := enHome.Param("p1")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p1, qt.Equals, "p1en")
+
+ p1, err = nnHome.Param("p1")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p1, qt.Equals, "p1nn")
+}
+
+func TestMultiSitesBuild(t *testing.T) {
+ for _, config := range []struct {
+ content string
+ suffix string
+ }{
+ {multiSiteTOMLConfigTemplate, "toml"},
+ {multiSiteYAMLConfigTemplate, "yml"},
+ {multiSiteJSONConfigTemplate, "json"},
+ } {
+ t.Run(config.suffix, func(t *testing.T) {
+ t.Parallel()
+ doTestMultiSitesBuild(t, config.content, config.suffix)
+ })
+ }
+}
+
+func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
+ c := qt.New(t)
+
+ b := newMultiSiteTestBuilder(t, configSuffix, configTemplate, nil)
+ b.CreateSites()
+
+ sites := b.H.Sites
+ c.Assert(len(sites), qt.Equals, 4)
+
+ b.Build(BuildCfg{})
+
+ // Check site config
+ for _, s := range sites {
+ c.Assert(s.Info.defaultContentLanguageInSubdir, qt.Equals, true)
+ c.Assert(s.disabledKinds, qt.Not(qt.IsNil))
+ }
+
+ gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
+ c.Assert(gp1, qt.Not(qt.IsNil))
+ c.Assert(gp1.Title(), qt.Equals, "doc1")
+ gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
+ c.Assert(gp2, qt.IsNil)
+
+ enSite := sites[0]
+ enSiteHome := enSite.getPage(page.KindHome)
+ c.Assert(enSiteHome.IsTranslated(), qt.Equals, true)
+
+ c.Assert(enSite.language.Lang, qt.Equals, "en")
+
+ // dumpPages(enSite.RegularPages()...)
+
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 5)
+ c.Assert(len(enSite.AllPages()), qt.Equals, 32)
+
+ // Check 404s
+ b.AssertFileContent("public/en/404.html", "404|en|404 Page not found")
+ b.AssertFileContent("public/fr/404.html", "404|fr|404 Page not found")
+
+ // Check robots.txt
+ // the domain root is the public directory, so the robots.txt has to be created there and not in the language directories
+ b.AssertFileContent("public/robots.txt", "robots")
+ b.AssertFileDoesNotExist("public/en/robots.txt")
+ b.AssertFileDoesNotExist("public/nn/robots.txt")
+
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Permalink: http://example.com/blog/en/sect/doc1-slug/")
+ b.AssertFileContent("public/en/sect/doc2/index.html", "Permalink: http://example.com/blog/en/sect/doc2/")
+ b.AssertFileContent("public/superbob/index.html", "Permalink: http://example.com/blog/superbob/")
+
+ doc2 := enSite.RegularPages()[1]
+ doc3 := enSite.RegularPages()[2]
+ c.Assert(doc3, qt.Equals, doc2.Prev())
+ doc1en := enSite.RegularPages()[0]
+ doc1fr := doc1en.Translations()[0]
+ b.AssertFileContent("public/fr/sect/doc1/index.html", "Permalink: http://example.com/blog/fr/sect/doc1/")
+
+ c.Assert(doc1fr, qt.Equals, doc1en.Translations()[0])
+ c.Assert(doc1en, qt.Equals, doc1fr.Translations()[0])
+ c.Assert(doc1fr.Language().Lang, qt.Equals, "fr")
+
+ doc4 := enSite.AllPages()[4]
+ c.Assert(len(doc4.Translations()), qt.Equals, 0)
+
+ // Taxonomies and their URLs
+ c.Assert(len(enSite.Taxonomies()), qt.Equals, 1)
+ tags := enSite.Taxonomies()["tags"]
+ c.Assert(len(tags), qt.Equals, 2)
+ c.Assert(doc1en, qt.Equals, tags["tag1"][0].Page)
+
+ frSite := sites[1]
+
+ c.Assert(frSite.language.Lang, qt.Equals, "fr")
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 4)
+ c.Assert(len(frSite.AllPages()), qt.Equals, 32)
+
+ for _, frenchPage := range frSite.RegularPages() {
+ p := frenchPage
+ c.Assert(p.Language().Lang, qt.Equals, "fr")
+ }
+
+ // See https://github.com/gohugoio/hugo/issues/4285
+ // Before Hugo 0.33 you had to be explicit with the content path to get the correct Page, which
+ // isn't ideal in a multilingual setup. You want a way to get the current language version if available.
+ // Now you can do lookups with translation base name to get that behaviour.
+ // Let us test all the regular page variants:
+ getPageDoc1En := enSite.getPage(page.KindPage, filepath.ToSlash(doc1en.File().Path()))
+ getPageDoc1EnBase := enSite.getPage(page.KindPage, "sect/doc1")
+ getPageDoc1Fr := frSite.getPage(page.KindPage, filepath.ToSlash(doc1fr.File().Path()))
+ getPageDoc1FrBase := frSite.getPage(page.KindPage, "sect/doc1")
+ c.Assert(getPageDoc1En, qt.Equals, doc1en)
+ c.Assert(getPageDoc1Fr, qt.Equals, doc1fr)
+ c.Assert(getPageDoc1EnBase, qt.Equals, doc1en)
+ c.Assert(getPageDoc1FrBase, qt.Equals, doc1fr)
+
+ // Check redirect to main language, French
+ b.AssertFileContent("public/index.html", "0; url=http://example.com/blog/fr")
+
+ // check home page content (including data files rendering)
+ b.AssertFileContent("public/en/index.html", "Default Home Page 1", "Hello", "Hugo Rocks!")
+ b.AssertFileContent("public/fr/index.html", "French Home Page 1", "Bonjour", "Hugo Rocks!")
+
+ // check single page content
+ b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour", "LingoFrench")
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault")
+
+ // Check node translations
+ homeEn := enSite.getPage(page.KindHome)
+ c.Assert(homeEn, qt.Not(qt.IsNil))
+ c.Assert(len(homeEn.Translations()), qt.Equals, 3)
+ c.Assert(homeEn.Translations()[0].Language().Lang, qt.Equals, "fr")
+ c.Assert(homeEn.Translations()[1].Language().Lang, qt.Equals, "nn")
+ c.Assert(homeEn.Translations()[1].Title(), qt.Equals, "På nynorsk")
+ c.Assert(homeEn.Translations()[2].Language().Lang, qt.Equals, "nb")
+ c.Assert(homeEn.Translations()[2].Title(), qt.Equals, "På bokmål")
+ c.Assert(homeEn.Translations()[2].Language().LanguageName, qt.Equals, "Bokmål")
+
+ sectFr := frSite.getPage(page.KindSection, "sect")
+ c.Assert(sectFr, qt.Not(qt.IsNil))
+
+ c.Assert(sectFr.Language().Lang, qt.Equals, "fr")
+ c.Assert(len(sectFr.Translations()), qt.Equals, 1)
+ c.Assert(sectFr.Translations()[0].Language().Lang, qt.Equals, "en")
+ c.Assert(sectFr.Translations()[0].Title(), qt.Equals, "Sects")
+
+ nnSite := sites[2]
+ c.Assert(nnSite.language.Lang, qt.Equals, "nn")
+ taxNn := nnSite.getPage(page.KindTaxonomy, "lag")
+ c.Assert(taxNn, qt.Not(qt.IsNil))
+ c.Assert(len(taxNn.Translations()), qt.Equals, 1)
+ c.Assert(taxNn.Translations()[0].Language().Lang, qt.Equals, "nb")
+
+ taxTermNn := nnSite.getPage(page.KindTerm, "lag", "sogndal")
+ c.Assert(taxTermNn, qt.Not(qt.IsNil))
+ c.Assert(nnSite.getPage(page.KindTerm, "LAG", "SOGNDAL"), qt.Equals, taxTermNn)
+ c.Assert(len(taxTermNn.Translations()), qt.Equals, 1)
+ c.Assert(taxTermNn.Translations()[0].Language().Lang, qt.Equals, "nb")
+
+ // Check sitemap(s)
+ b.AssertFileContent("public/sitemap.xml",
+ "<loc>http://example.com/blog/en/sitemap.xml</loc>",
+ "<loc>http://example.com/blog/fr/sitemap.xml</loc>")
+ b.AssertFileContent("public/en/sitemap.xml", "http://example.com/blog/en/sect/doc2/")
+ b.AssertFileContent("public/fr/sitemap.xml", "http://example.com/blog/fr/sect/doc1/")
+
+ // Check taxonomies
+ enTags := enSite.Taxonomies()["tags"]
+ frTags := frSite.Taxonomies()["plaques"]
+ c.Assert(len(enTags), qt.Equals, 2, qt.Commentf("Tags in en: %v", enTags))
+ c.Assert(len(frTags), qt.Equals, 2, qt.Commentf("Tags in fr: %v", frTags))
+ c.Assert(enTags["tag1"], qt.Not(qt.IsNil))
+ c.Assert(frTags["FRtag1"], qt.Not(qt.IsNil))
+ b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
+
+ // en and nn have custom site menus
+ c.Assert(len(frSite.Menus()), qt.Equals, 0)
+ c.Assert(len(enSite.Menus()), qt.Equals, 1)
+ c.Assert(len(nnSite.Menus()), qt.Equals, 1)
+
+ c.Assert(enSite.Menus()["main"].ByName()[0].Name, qt.Equals, "Home")
+ c.Assert(nnSite.Menus()["main"].ByName()[0].Name, qt.Equals, "Heim")
+
+ // Issue #3108
+ prevPage := enSite.RegularPages()[0].Prev()
+ c.Assert(prevPage, qt.Not(qt.IsNil))
+ c.Assert(prevPage.Kind(), qt.Equals, page.KindPage)
+
+ for {
+ if prevPage == nil {
+ break
+ }
+ c.Assert(prevPage.Kind(), qt.Equals, page.KindPage)
+ prevPage = prevPage.Prev()
+ }
+
+ // Check bundles
+ b.AssertFileContent("public/fr/bundles/b1/index.html", "RelPermalink: /blog/fr/bundles/b1/|")
+ bundleFr := frSite.getPage(page.KindPage, "bundles/b1/index.md")
+ c.Assert(bundleFr, qt.Not(qt.IsNil))
+ c.Assert(len(bundleFr.Resources()), qt.Equals, 1)
+ logoFr := bundleFr.Resources().GetMatch("logo*")
+ logoFrGet := bundleFr.Resources().Get("logo.png")
+ c.Assert(logoFrGet, qt.Equals, logoFr)
+ c.Assert(logoFr, qt.Not(qt.IsNil))
+ b.AssertFileContent("public/fr/bundles/b1/index.html", "Resources: image/png: /blog/fr/bundles/b1/logo.png")
+ b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
+
+ bundleEn := enSite.getPage(page.KindPage, "bundles/b1/index.en.md")
+ c.Assert(bundleEn, qt.Not(qt.IsNil))
+ b.AssertFileContent("public/en/bundles/b1/index.html", "RelPermalink: /blog/en/bundles/b1/|")
+ c.Assert(len(bundleEn.Resources()), qt.Equals, 1)
+ logoEn := bundleEn.Resources().GetMatch("logo*")
+ c.Assert(logoEn, qt.Not(qt.IsNil))
+ b.AssertFileContent("public/en/bundles/b1/index.html", "Resources: image/png: /blog/en/bundles/b1/logo.png")
+ b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
+}
+
+func TestMultiSitesRebuild(t *testing.T) {
+ // t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4
+ // This leaktest seems to be a little bit shaky on Travis.
+ if !htesting.IsCI() {
+ defer leaktest.CheckTimeout(t, 10*time.Second)()
+ }
+
+ c := qt.New(t)
+
+ b := newMultiSiteTestDefaultBuilder(t).Running().CreateSites().Build(BuildCfg{})
+
+ sites := b.H.Sites
+ fs := b.Fs
+
+ b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
+
+ enSite := sites[0]
+ frSite := sites[1]
+
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 5)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 4)
+
+ // Verify translations
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello")
+ b.AssertFileContent("public/fr/sect/doc1/index.html", "Bonjour")
+
+ // check single page content
+ b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
+
+ homeEn := enSite.getPage(page.KindHome)
+ c.Assert(homeEn, qt.Not(qt.IsNil))
+ c.Assert(len(homeEn.Translations()), qt.Equals, 3)
+
+ contentFs := b.H.Fs.Source
+
+ for i, this := range []struct {
+ preFunc func(t *testing.T)
+ events []fsnotify.Event
+ assertFunc func(t *testing.T)
+ }{
+ // * Remove doc
+ // * Add docs existing languages
+ // (Add doc new language: TODO(bep) we should load config.toml as part of these so we can add languages).
+ // * Rename file
+ // * Change doc
+ // * Change a template
+ // * Change language file
+ {
+ func(t *testing.T) {
+ fs.Source.Remove("content/sect/doc2.en.md")
+ },
+ []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 4, qt.Commentf("1 en removed"))
+ },
+ },
+ {
+ func(t *testing.T) {
+ writeNewContentFile(t, contentFs, "new_en_1", "2016-07-31", "content/new1.en.md", -5)
+ writeNewContentFile(t, contentFs, "new_en_2", "1989-07-30", "content/new2.en.md", -10)
+ writeNewContentFile(t, contentFs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10)
+ },
+ []fsnotify.Event{
+ {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
+ {Name: filepath.FromSlash("content/new2.en.md"), Op: fsnotify.Create},
+ {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
+ },
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6)
+ c.Assert(len(enSite.AllPages()), qt.Equals, 34)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 5)
+ c.Assert(frSite.RegularPages()[3].Title(), qt.Equals, "new_fr_1")
+ c.Assert(enSite.RegularPages()[0].Title(), qt.Equals, "new_en_2")
+ c.Assert(enSite.RegularPages()[1].Title(), qt.Equals, "new_en_1")
+
+ rendered := readWorkingDir(t, fs, "public/en/new1/index.html")
+ c.Assert(strings.Contains(rendered, "new_en_1"), qt.Equals, true)
+ },
+ },
+ {
+ func(t *testing.T) {
+ p := "content/sect/doc1.en.md"
+ doc1 := readFileFromFs(t, contentFs, p)
+ doc1 += "CHANGED"
+ writeToFs(t, contentFs, p, doc1)
+ },
+ []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6)
+ doc1 := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html")
+ c.Assert(strings.Contains(doc1, "CHANGED"), qt.Equals, true)
+ },
+ },
+ // Rename a file
+ {
+ func(t *testing.T) {
+ if err := contentFs.Rename("content/new1.en.md", "content/new1renamed.en.md"); err != nil {
+ t.Fatalf("Rename failed: %s", err)
+ }
+ },
+ []fsnotify.Event{
+ {Name: filepath.FromSlash("content/new1renamed.en.md"), Op: fsnotify.Rename},
+ {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
+ },
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6, qt.Commentf("Rename"))
+ c.Assert(enSite.RegularPages()[1].Title(), qt.Equals, "new_en_1")
+ rendered := readWorkingDir(t, fs, "public/en/new1renamed/index.html")
+ c.Assert(rendered, qt.Contains, "new_en_1")
+ },
+ },
+ {
+ // Change a template
+ func(t *testing.T) {
+ template := "layouts/_default/single.html"
+ templateContent := readSource(t, fs, template)
+ templateContent += "{{ print \"Template Changed\"}}"
+ writeSource(t, fs, template, templateContent)
+ },
+ []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6)
+ c.Assert(len(enSite.AllPages()), qt.Equals, 34)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 5)
+ doc1 := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html")
+ c.Assert(strings.Contains(doc1, "Template Changed"), qt.Equals, true)
+ },
+ },
+ {
+ // Change a language file
+ func(t *testing.T) {
+ languageFile := "i18n/fr.yaml"
+ langContent := readSource(t, fs, languageFile)
+ langContent = strings.Replace(langContent, "Bonjour", "Salut", 1)
+ writeSource(t, fs, languageFile, langContent)
+ },
+ []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6)
+ c.Assert(len(enSite.AllPages()), qt.Equals, 34)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 5)
+ docEn := readWorkingDir(t, fs, "public/en/sect/doc1-slug/index.html")
+ c.Assert(strings.Contains(docEn, "Hello"), qt.Equals, true)
+ docFr := readWorkingDir(t, fs, "public/fr/sect/doc1/index.html")
+ c.Assert(strings.Contains(docFr, "Salut"), qt.Equals, true)
+
+ homeEn := enSite.getPage(page.KindHome)
+ c.Assert(homeEn, qt.Not(qt.IsNil))
+ c.Assert(len(homeEn.Translations()), qt.Equals, 3)
+ c.Assert(homeEn.Translations()[0].Language().Lang, qt.Equals, "fr")
+ },
+ },
+ // Change a shortcode
+ {
+ func(t *testing.T) {
+ writeSource(t, fs, "layouts/shortcodes/shortcode.html", "Modified Shortcode: {{ i18n \"hello\" }}")
+ },
+ []fsnotify.Event{
+ {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
+ },
+ func(t *testing.T) {
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 6)
+ c.Assert(len(enSite.AllPages()), qt.Equals, 34)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 5)
+ b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut")
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello")
+ },
+ },
+ } {
+
+ if this.preFunc != nil {
+ this.preFunc(t)
+ }
+
+ err := b.H.Build(BuildCfg{}, this.events...)
+ if err != nil {
+ t.Fatalf("[%d] Failed to rebuild sites: %s", i, err)
+ }
+
+ this.assertFunc(t)
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/4706
+func TestContentStressTest(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ numPages := 500
+
+ contentTempl := `
+---
+%s
+title: %q
+weight: %d
+multioutput: %t
+---
+
+# Header
+
+CONTENT
+
+The End.
+`
+
+ contentTempl = strings.Replace(contentTempl, "CONTENT", strings.Repeat(`
+
+## Another header
+
+Some text. Some more text.
+
+`, 100), -1)
+
+ var content []string
+ defaultOutputs := `outputs: ["html", "json", "rss" ]`
+
+ for i := 1; i <= numPages; i++ {
+ outputs := defaultOutputs
+ multioutput := true
+ if i%3 == 0 {
+ outputs = `outputs: ["json"]`
+ multioutput = false
+ }
+ section := "s1"
+ if i%10 == 0 {
+ section = "s2"
+ }
+ content = append(content, []string{fmt.Sprintf("%s/page%d.md", section, i), fmt.Sprintf(contentTempl, outputs, fmt.Sprintf("Title %d", i), i, multioutput)}...)
+ }
+
+ content = append(content, []string{"_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("Home %d", 0), 0, true)}...)
+ content = append(content, []string{"s1/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 1), 1, true)}...)
+ content = append(content, []string{"s2/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 2), 2, true)}...)
+
+ b.WithSimpleConfigFile()
+ b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
+ b.WithTemplates("layouts/_default/myview.html", `View: {{ len .Content }}`)
+ b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
+ b.WithTemplates("layouts/_default/list.html", `
+Page: {{ .Paginator.PageNumber }}
+P: {{ with .File }}{{ path.Join .Path }}{{ end }}
+List: {{ len .Paginator.Pages }}|List Content: {{ len .Content }}
+{{ $shuffled := where .Site.RegularPages "Params.multioutput" true | shuffle }}
+{{ $first5 := $shuffled | first 5 }}
+L1: {{ len .Site.RegularPages }} L2: {{ len $first5 }}
+{{ range $i, $e := $first5 }}
+Render {{ $i }}: {{ .Render "myview" }}
+{{ end }}
+END
+`)
+
+ b.WithContent(content...)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ contentMatchers := []string{"<h2 id=\"another-header\">Another header</h2>", "<h2 id=\"another-header-99\">Another header</h2>", "<p>The End.</p>"}
+
+ for i := 1; i <= numPages; i++ {
+ if i%3 != 0 {
+ section := "s1"
+ if i%10 == 0 {
+ section = "s2"
+ }
+ checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), contentMatchers...)
+ }
+ }
+
+ for i := 1; i <= numPages; i++ {
+ section := "s1"
+ if i%10 == 0 {
+ section = "s2"
+ }
+ checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), contentMatchers...)
+ }
+
+ checkContent(b, "public/s1/index.html", "P: s1/_index.md\nList: 10|List Content: 8132\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8132\n\nRender 1: View: 8132\n\nRender 2: View: 8132\n\nRender 3: View: 8132\n\nRender 4: View: 8132\n\nEND\n")
+ checkContent(b, "public/s2/index.html", "P: s2/_index.md\nList: 10|List Content: 8132", "Render 4: View: 8132\n\nEND")
+ checkContent(b, "public/index.html", "P: _index.md\nList: 10|List Content: 8132", "4: View: 8132\n\nEND")
+
+ // Check paginated pages
+ for i := 2; i <= 9; i++ {
+ checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), fmt.Sprintf("Page: %d", i), "Content: 8132\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8132", "Render 4: View: 8132\n\nEND")
+ }
+}
+
+func checkContent(s *sitesBuilder, filename string, matches ...string) {
+ s.T.Helper()
+ content := readWorkingDir(s.T, s.Fs, filename)
+ for _, match := range matches {
+ if !strings.Contains(content, match) {
+ s.Fatalf("No match for\n%q\nin content for %s\n%q\nDiff:\n%s", match, filename, content, htesting.DiffStrings(content, match))
+ }
+ }
+}
+
+func TestTranslationsFromContentToNonContent(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+
+baseURL = "http://example.com/"
+
+defaultContentLanguage = "en"
+
+[languages]
+[languages.en]
+weight = 10
+contentDir = "content/en"
+[languages.nn]
+weight = 20
+contentDir = "content/nn"
+
+
+`)
+
+ b.WithContent("en/mysection/_index.md", `
+---
+Title: My Section
+---
+
+`)
+
+ b.WithContent("en/_index.md", `
+---
+Title: My Home
+---
+
+`)
+
+ b.WithContent("en/categories/mycat/_index.md", `
+---
+Title: My MyCat
+---
+
+`)
+
+ b.WithContent("en/categories/_index.md", `
+---
+Title: My categories
+---
+
+`)
+
+ for _, lang := range []string{"en", "nn"} {
+ b.WithContent(lang+"/mysection/page.md", `
+---
+Title: My Page
+categories: ["mycat"]
+---
+
+`)
+ }
+
+ b.Build(BuildCfg{})
+
+ for _, path := range []string{
+ "/",
+ "/mysection",
+ "/categories",
+ "/categories/mycat",
+ } {
+ t.Run(path, func(t *testing.T) {
+ c := qt.New(t)
+
+ s1, _ := b.H.Sites[0].getPageNew(nil, path)
+ s2, _ := b.H.Sites[1].getPageNew(nil, path)
+
+ c.Assert(s1, qt.Not(qt.IsNil))
+ c.Assert(s2, qt.Not(qt.IsNil))
+
+ c.Assert(len(s1.Translations()), qt.Equals, 1)
+ c.Assert(len(s2.Translations()), qt.Equals, 1)
+ c.Assert(s1.Translations()[0], qt.Equals, s2)
+ c.Assert(s2.Translations()[0], qt.Equals, s1)
+
+ m1 := s1.Translations().MergeByLanguage(s2.Translations())
+ m2 := s2.Translations().MergeByLanguage(s1.Translations())
+
+ c.Assert(len(m1), qt.Equals, 1)
+ c.Assert(len(m2), qt.Equals, 1)
+ })
+ }
+}
+
+var tocShortcode = `
+TOC1: {{ .Page.TableOfContents }}
+
+TOC2: {{ .Page.TableOfContents }}
+`
+
+func TestSelfReferencedContentInShortcode(t *testing.T) {
+ t.Parallel()
+
+ b := newMultiSiteTestDefaultBuilder(t)
+
+ var (
+ shortcode = `{{- .Page.Content -}}{{- .Page.Summary -}}{{- .Page.Plain -}}{{- .Page.PlainWords -}}{{- .Page.WordCount -}}{{- .Page.ReadingTime -}}`
+
+ page = `---
+title: sctest
+---
+Empty:{{< mycontent >}}:
+`
+ )
+
+ b.WithTemplatesAdded("layouts/shortcodes/mycontent.html", shortcode)
+ b.WithContent("post/simple.en.md", page)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/post/simple/index.html", "Empty:[]00:")
+}
+
+var tocPageSimple = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+{{< toc >}}
+# Heading 1 {#1}
+Some text.
+## Subheading 1.1 {#1-1}
+Some more text.
+# Heading 2 {#2}
+Even more text.
+## Subheading 2.1 {#2-1}
+Lorem ipsum...
+`
+
+var tocPageVariants1 = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+Variant 1:
+{{% wrapper %}}
+{{< toc >}}
+{{% /wrapper %}}
+# Heading 1
+
+Variant 3:
+{{% toc %}}
+
+`
+
+var tocPageVariants2 = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+Variant 1:
+{{% wrapper %}}
+{{< toc >}}
+{{% /wrapper %}}
+# Heading 1
+
+Variant 2:
+{{< wrapper >}}
+{{< toc >}}
+{{< /wrapper >}}
+
+Variant 3:
+{{% toc %}}
+
+`
+
+var tocPageSimpleExpected = `<nav id="TableOfContents">
+<ul>
+<li><a href="#1">Heading 1</a>
+<ul>
+<li><a href="#1-1">Subheading 1.1</a></li>
+</ul></li>
+<li><a href="#2">Heading 2</a>
+<ul>
+<li><a href="#2-1">Subheading 2.1</a></li>
+</ul></li>
+</ul>
+</nav>`
+
+var tocPageWithShortcodesInHeadings = `---
+title: tocTest
+publishdate: "2000-01-01"
+---
+
+{{< toc >}}
+
+# Heading 1 {#1}
+
+Some text.
+
+## Subheading 1.1 {{< shortcode >}} {#1-1}
+
+Some more text.
+
+# Heading 2 {{% shortcode %}} {#2}
+
+Even more text.
+
+## Subheading 2.1 {#2-1}
+
+Lorem ipsum...
+`
+
+var tocPageWithShortcodesInHeadingsExpected = `<nav id="TableOfContents">
+<ul>
+<li><a href="#1">Heading 1</a>
+<ul>
+<li><a href="#1-1">Subheading 1.1 Shortcode: Hello</a></li>
+</ul></li>
+<li><a href="#2">Heading 2 Shortcode: Hello</a>
+<ul>
+<li><a href="#2-1">Subheading 2.1</a></li>
+</ul></li>
+</ul>
+</nav>`
+
+var multiSiteTOMLConfigTemplate = `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+disablePathToLower = true
+defaultContentLanguage = "{{ .DefaultContentLanguage }}"
+defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }}
+enableRobotsTXT = true
+
+[permalinks]
+other = "/somewhere/else/:filename"
+
+[Taxonomies]
+tag = "tags"
+
+[Languages]
+[Languages.en]
+weight = 10
+title = "In English"
+languageName = "English"
+[[Languages.en.menu.main]]
+url = "/"
+name = "Home"
+weight = 0
+
+[Languages.fr]
+weight = 20
+title = "Le Français"
+languageName = "Français"
+[Languages.fr.Taxonomies]
+plaque = "plaques"
+
+[Languages.nn]
+weight = 30
+title = "På nynorsk"
+languageName = "Nynorsk"
+paginatePath = "side"
+[Languages.nn.Taxonomies]
+lag = "lag"
+[[Languages.nn.menu.main]]
+url = "/"
+name = "Heim"
+weight = 1
+
+[Languages.nb]
+weight = 40
+title = "På bokmål"
+languageName = "Bokmål"
+paginatePath = "side"
+[Languages.nb.Taxonomies]
+lag = "lag"
+`
+
+var multiSiteYAMLConfigTemplate = `
+baseURL: "http://example.com/blog"
+
+disablePathToLower: true
+paginate: 1
+defaultContentLanguage: "{{ .DefaultContentLanguage }}"
+defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }}
+enableRobotsTXT: true
+
+permalinks:
+ other: "/somewhere/else/:filename"
+
+Taxonomies:
+ tag: "tags"
+
+Languages:
+ en:
+ weight: 10
+ title: "In English"
+ languageName: "English"
+ menu:
+ main:
+ - url: "/"
+ name: "Home"
+ weight: 0
+ fr:
+ weight: 20
+ title: "Le Français"
+ languageName: "Français"
+ Taxonomies:
+ plaque: "plaques"
+ nn:
+ weight: 30
+ title: "På nynorsk"
+ languageName: "Nynorsk"
+ paginatePath: "side"
+ Taxonomies:
+ lag: "lag"
+ menu:
+ main:
+ - url: "/"
+ name: "Heim"
+ weight: 1
+ nb:
+ weight: 40
+ title: "På bokmål"
+ languageName: "Bokmål"
+ paginatePath: "side"
+ Taxonomies:
+ lag: "lag"
+
+`
+
+// TODO(bep) clean move
+var multiSiteJSONConfigTemplate = `
+{
+ "baseURL": "http://example.com/blog",
+ "paginate": 1,
+ "disablePathToLower": true,
+ "defaultContentLanguage": "{{ .DefaultContentLanguage }}",
+ "defaultContentLanguageInSubdir": true,
+ "enableRobotsTXT": true,
+ "permalinks": {
+ "other": "/somewhere/else/:filename"
+ },
+ "Taxonomies": {
+ "tag": "tags"
+ },
+ "Languages": {
+ "en": {
+ "weight": 10,
+ "title": "In English",
+ "languageName": "English",
+ "menu": {
+ "main": [
+ {
+ "url": "/",
+ "name": "Home",
+ "weight": 0
+ }
+ ]
+ }
+ },
+ "fr": {
+ "weight": 20,
+ "title": "Le Français",
+ "languageName": "Français",
+ "Taxonomies": {
+ "plaque": "plaques"
+ }
+ },
+ "nn": {
+ "weight": 30,
+ "title": "På nynorsk",
+ "paginatePath": "side",
+ "languageName": "Nynorsk",
+ "Taxonomies": {
+ "lag": "lag"
+ },
+ "menu": {
+ "main": [
+ {
+ "url": "/",
+ "name": "Heim",
+ "weight": 1
+ }
+ ]
+ }
+ },
+ "nb": {
+ "weight": 40,
+ "title": "På bokmål",
+ "paginatePath": "side",
+ "languageName": "Bokmål",
+ "Taxonomies": {
+ "lag": "lag"
+ }
+ }
+ }
+}
+`
+
+func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
+ t.Helper()
+ writeToFs(t, fs.Source, filename, content)
+}
+
+func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
+ t.Helper()
+ if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
+ t.Fatalf("Failed to write file: %s", err)
+ }
+}
+
+func readWorkingDir(t testing.TB, fs *hugofs.Fs, filename string) string {
+ t.Helper()
+ return readFileFromFs(t, fs.WorkingDirReadOnly, filename)
+}
+
+func workingDirExists(fs *hugofs.Fs, filename string) bool {
+ b, err := helpers.Exists(filename, fs.WorkingDirReadOnly)
+ if err != nil {
+ panic(err)
+ }
+ return b
+}
+
+func readSource(t *testing.T, fs *hugofs.Fs, filename string) string {
+ return readFileFromFs(t, fs.Source, filename)
+}
+
+func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
+ t.Helper()
+ filename = filepath.Clean(filename)
+ b, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ // Print some debug info
+ hadSlash := strings.HasPrefix(filename, helpers.FilePathSeparator)
+ start := 0
+ if hadSlash {
+ start = 1
+ }
+ end := start + 1
+
+ parts := strings.Split(filename, helpers.FilePathSeparator)
+ if parts[start] == "work" {
+ end++
+ }
+
+ /*
+ root := filepath.Join(parts[start:end]...)
+ if hadSlash {
+ root = helpers.FilePathSeparator + root
+ }
+
+ helpers.PrintFs(fs, root, os.Stdout)
+ */
+
+ t.Fatalf("Failed to read file: %s", err)
+ }
+ return string(b)
+}
+
+const testPageTemplate = `---
+title: "%s"
+publishdate: "%s"
+weight: %d
+---
+# Doc %s
+`
+
+func newTestPage(title, date string, weight int) string {
+ return fmt.Sprintf(testPageTemplate, title, date, weight, title)
+}
+
+func writeNewContentFile(t *testing.T, fs afero.Fs, title, date, filename string, weight int) {
+ content := newTestPage(title, date, weight)
+ writeToFs(t, fs, filename, content)
+}
+
+type multiSiteTestBuilder struct {
+ configData any
+ config string
+ configFormat string
+
+ *sitesBuilder
+}
+
+func newMultiSiteTestDefaultBuilder(t testing.TB) *multiSiteTestBuilder {
+ return newMultiSiteTestBuilder(t, "", "", nil)
+}
+
+func (b *multiSiteTestBuilder) WithNewConfig(config string) *multiSiteTestBuilder {
+ b.WithConfigTemplate(b.configData, b.configFormat, config)
+ return b
+}
+
+func (b *multiSiteTestBuilder) WithNewConfigData(data any) *multiSiteTestBuilder {
+ b.WithConfigTemplate(data, b.configFormat, b.config)
+ return b
+}
+
+func newMultiSiteTestBuilder(t testing.TB, configFormat, config string, configData any) *multiSiteTestBuilder {
+ if configData == nil {
+ configData = map[string]any{
+ "DefaultContentLanguage": "fr",
+ "DefaultContentLanguageInSubdir": true,
+ }
+ }
+
+ if config == "" {
+ config = multiSiteTOMLConfigTemplate
+ }
+
+ if configFormat == "" {
+ configFormat = "toml"
+ }
+
+ b := newTestSitesBuilder(t).WithConfigTemplate(configData, configFormat, config)
+ b.WithContent("root.en.md", `---
+title: root
+weight: 10000
+slug: root
+publishdate: "2000-01-01"
+---
+# root
+`,
+ "sect/doc1.en.md", `---
+title: doc1
+weight: 1
+slug: doc1-slug
+tags:
+ - tag1
+publishdate: "2000-01-01"
+---
+# doc1
+*some "content"*
+
+{{< shortcode >}}
+
+{{< lingo >}}
+
+NOTE: slug should be used as URL
+`,
+ "sect/doc1.fr.md", `---
+title: doc1
+weight: 1
+plaques:
+ - FRtag1
+ - FRtag2
+publishdate: "2000-01-04"
+---
+# doc1
+*quelque "contenu"*
+
+{{< shortcode >}}
+
+{{< lingo >}}
+
+NOTE: should be in the 'en' Page's 'Translations' field.
+NOTE: date is after "doc3"
+`,
+ "sect/doc2.en.md", `---
+title: doc2
+weight: 2
+publishdate: "2000-01-02"
+---
+# doc2
+*some content*
+NOTE: without slug, "doc2" should be used, without ".en" as URL
+`,
+ "sect/doc3.en.md", `---
+title: doc3
+weight: 3
+publishdate: "2000-01-03"
+aliases: [/en/al/alias1,/al/alias2/]
+tags:
+ - tag2
+ - tag1
+url: /superbob/
+---
+# doc3
+*some content*
+NOTE: third 'en' doc, should trigger pagination on home page.
+`,
+ "sect/doc4.md", `---
+title: doc4
+weight: 4
+plaques:
+ - FRtag1
+publishdate: "2000-01-05"
+---
+# doc4
+*du contenu francophone*
+NOTE: should use the defaultContentLanguage and mark this doc as 'fr'.
+NOTE: doesn't have any corresponding translation in 'en'
+`,
+ "other/doc5.fr.md", `---
+title: doc5
+weight: 5
+publishdate: "2000-01-06"
+---
+# doc5
+*autre contenu francophone*
+NOTE: should use the "permalinks" configuration with :filename
+`,
+ // Add some for the stats
+ "stats/expired.fr.md", `---
+title: expired
+publishdate: "2000-01-06"
+expiryDate: "2001-01-06"
+---
+# Expired
+`,
+ "stats/future.fr.md", `---
+title: future
+weight: 6
+publishdate: "2100-01-06"
+---
+# Future
+`,
+ "stats/expired.en.md", `---
+title: expired
+weight: 7
+publishdate: "2000-01-06"
+expiryDate: "2001-01-06"
+---
+# Expired
+`,
+ "stats/future.en.md", `---
+title: future
+weight: 6
+publishdate: "2100-01-06"
+---
+# Future
+`,
+ "stats/draft.en.md", `---
+title: expired
+publishdate: "2000-01-06"
+draft: true
+---
+# Draft
+`,
+ "stats/tax.nn.md", `---
+title: Tax NN
+weight: 8
+publishdate: "2000-01-06"
+weight: 1001
+lag:
+- Sogndal
+---
+# Tax NN
+`,
+ "stats/tax.nb.md", `---
+title: Tax NB
+weight: 8
+publishdate: "2000-01-06"
+weight: 1002
+lag:
+- Sogndal
+---
+# Tax NB
+`,
+ // Bundle
+ "bundles/b1/index.en.md", `---
+title: Bundle EN
+publishdate: "2000-01-06"
+weight: 2001
+---
+# Bundle Content EN
+`,
+ "bundles/b1/index.md", `---
+title: Bundle Default
+publishdate: "2000-01-06"
+weight: 2002
+---
+# Bundle Content Default
+`,
+ "bundles/b1/logo.png", `
+PNG Data
+`)
+
+ i18nContent := func(id, value string) string {
+ return fmt.Sprintf(`
+[%s]
+other = %q
+`, id, value)
+ }
+
+ b.WithSourceFile("i18n/en.toml", i18nContent("hello", "Hello"))
+ b.WithSourceFile("i18n/fr.toml", i18nContent("hello", "Bonjour"))
+ b.WithSourceFile("i18n/nb.toml", i18nContent("hello", "Hallo"))
+ b.WithSourceFile("i18n/nn.toml", i18nContent("hello", "Hallo"))
+
+ return &multiSiteTestBuilder{sitesBuilder: b, configFormat: configFormat, config: config, configData: configData}
+}
+
+func TestRebuildOnAssetChange(t *testing.T) {
+ b := newTestSitesBuilder(t).Running()
+ b.WithTemplatesAdded("index.html", `
+{{ (resources.Get "data.json").Content }}
+`)
+ b.WithSourceFile("assets/data.json", "orig data")
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `orig data`)
+
+ b.EditFiles("assets/data.json", "changed data")
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `changed data`)
+}
diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go
new file mode 100644
index 000000000..b008fbdef
--- /dev/null
+++ b/hugolib/hugo_sites_multihost_test.go
@@ -0,0 +1,119 @@
+package hugolib
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMultihosts(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configTemplate := `
+paginate = 1
+disablePathToLower = true
+defaultContentLanguage = "fr"
+defaultContentLanguageInSubdir = false
+staticDir = ["s1", "s2"]
+enableRobotsTXT = true
+
+[permalinks]
+other = "/somewhere/else/:filename"
+
+[Taxonomies]
+tag = "tags"
+
+[Languages]
+[Languages.en]
+staticDir2 = ["ens1", "ens2"]
+baseURL = "https://example.com/docs"
+weight = 10
+title = "In English"
+languageName = "English"
+
+[Languages.fr]
+staticDir2 = ["frs1", "frs2"]
+baseURL = "https://example.fr"
+weight = 20
+title = "Le Français"
+languageName = "Français"
+
+[Languages.nn]
+staticDir2 = ["nns1", "nns2"]
+baseURL = "https://example.no"
+weight = 30
+title = "På nynorsk"
+languageName = "Nynorsk"
+
+`
+
+ b := newMultiSiteTestDefaultBuilder(t).WithConfigFile("toml", configTemplate)
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello")
+
+ s1 := b.H.Sites[0]
+
+ s1h := s1.getPage(page.KindHome)
+ c.Assert(s1h.IsTranslated(), qt.Equals, true)
+ c.Assert(len(s1h.Translations()), qt.Equals, 2)
+ c.Assert(s1h.Permalink(), qt.Equals, "https://example.com/docs/")
+
+ // For “regular multilingual” we kept the aliases pages with url in front matter
+ // as a literal value that we use as is.
+ // There is an ambiguity in the guessing.
+ // For multihost, we never want any content in the root.
+ //
+ // check url in front matter:
+ pageWithURLInFrontMatter := s1.getPage(page.KindPage, "sect/doc3.en.md")
+ c.Assert(pageWithURLInFrontMatter, qt.Not(qt.IsNil))
+ c.Assert(pageWithURLInFrontMatter.RelPermalink(), qt.Equals, "/docs/superbob/")
+ b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en")
+
+ // the domain root is the language directory for each language, so the robots.txt is created in the language directories
+ b.AssertFileContent("public/en/robots.txt", "robots|en")
+ b.AssertFileContent("public/fr/robots.txt", "robots|fr")
+ b.AssertFileContent("public/nn/robots.txt", "robots|nn")
+ b.AssertFileDoesNotExist("public/robots.txt")
+
+ // check alias:
+ b.AssertFileContent("public/en/al/alias1/index.html", `content="0; url=https://example.com/docs/superbob/"`)
+ b.AssertFileContent("public/en/al/alias2/index.html", `content="0; url=https://example.com/docs/superbob/"`)
+
+ s2 := b.H.Sites[1]
+
+ s2h := s2.getPage(page.KindHome)
+ c.Assert(s2h.Permalink(), qt.Equals, "https://example.fr/")
+
+ b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt")
+ b.AssertFileContent("public/fr/text/pipes.txt", "Hugo Pipes")
+ b.AssertFileContent("public/en/index.html", "Default Home Page", "String Resource: /docs/text/pipes.txt")
+ b.AssertFileContent("public/en/text/pipes.txt", "Hugo Pipes")
+
+ // Check paginators
+ b.AssertFileContent("public/en/page/1/index.html", `refresh" content="0; url=https://example.com/docs/"`)
+ b.AssertFileContent("public/nn/page/1/index.html", `refresh" content="0; url=https://example.no/"`)
+ b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "https://example.com/docs/sect/", "\"/docs/sect/page/3/")
+ b.AssertFileContent("public/fr/sect/page/2/index.html", "List Page 2", "Bonjour", "https://example.fr/sect/")
+
+ // Check bundles
+
+ bundleEn := s1.getPage(page.KindPage, "bundles/b1/index.en.md")
+ c.Assert(bundleEn, qt.Not(qt.IsNil))
+ c.Assert(bundleEn.RelPermalink(), qt.Equals, "/docs/bundles/b1/")
+ c.Assert(len(bundleEn.Resources()), qt.Equals, 1)
+
+ b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
+ b.AssertFileContent("public/en/bundles/b1/index.html", " image/png: /docs/bundles/b1/logo.png")
+
+ bundleFr := s2.getPage(page.KindPage, "bundles/b1/index.md")
+ c.Assert(bundleFr, qt.Not(qt.IsNil))
+ c.Assert(bundleFr.RelPermalink(), qt.Equals, "/bundles/b1/")
+ c.Assert(len(bundleFr.Resources()), qt.Equals, 1)
+ b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
+ b.AssertFileContent("public/fr/bundles/b1/index.html", " image/png: /bundles/b1/logo.png")
+}
diff --git a/hugolib/hugo_sites_rebuild_test.go b/hugolib/hugo_sites_rebuild_test.go
new file mode 100644
index 000000000..d312d2199
--- /dev/null
+++ b/hugolib/hugo_sites_rebuild_test.go
@@ -0,0 +1,316 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSitesRebuild(t *testing.T) {
+ configFile := `
+baseURL = "https://example.com"
+title = "Rebuild this"
+contentDir = "content"
+enableInlineShortcodes = true
+timeout = "5s"
+
+
+`
+
+ var (
+ contentFilename = "content/blog/page1.md"
+ dataFilename = "data/mydata.toml"
+ )
+
+ createSiteBuilder := func(t testing.TB) *sitesBuilder {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", configFile).Running()
+
+ b.WithSourceFile(dataFilename, `hugo = "Rocks!"`)
+
+ b.WithContent("content/_index.md", `---
+title: Home, Sweet Home!
+---
+
+`)
+
+ b.WithContent(contentFilename, `
+---
+title: "Page 1"
+summary: "Initial summary"
+paginate: 3
+---
+
+Content.
+
+{{< badge.inline >}}
+Data Inline: {{ site.Data.mydata.hugo }}
+{{< /badge.inline >}}
+`)
+
+ // For .Page.Render tests
+ b.WithContent("prender.md", `---
+title: Page 1
+---
+
+Content for Page 1.
+
+{{< dorender >}}
+
+`)
+
+ b.WithTemplatesAdded(
+ "layouts/shortcodes/dorender.html", `
+{{ $p := .Page }}
+Render {{ $p.RelPermalink }}: {{ $p.Render "single" }}
+
+`)
+
+ b.WithTemplatesAdded("index.html", `
+{{ range (.Paginate .Site.RegularPages).Pages }}
+* Page Paginate: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
+{{ end }}
+{{ range .Site.RegularPages }}
+* Page Pages: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
+{{ end }}
+Content: {{ .Content }}
+Data: {{ site.Data.mydata.hugo }}
+`)
+
+ b.WithTemplatesAdded("layouts/partials/mypartial1.html", `Mypartial1`)
+ b.WithTemplatesAdded("layouts/partials/mypartial2.html", `Mypartial2`)
+ b.WithTemplatesAdded("layouts/partials/mypartial3.html", `Mypartial3`)
+ b.WithTemplatesAdded("_default/single.html", `{{ define "main" }}Single Main: {{ .Title }}|Mypartial1: {{ partial "mypartial1.html" }}{{ end }}`)
+ b.WithTemplatesAdded("_default/list.html", `{{ define "main" }}List Main: {{ .Title }}{{ end }}`)
+ b.WithTemplatesAdded("_default/baseof.html", `Baseof:{{ block "main" . }}Baseof Main{{ end }}|Mypartial3: {{ partial "mypartial3.html" }}:END`)
+
+ return b
+ }
+
+ t.Run("Refresh paginator on edit", func(t *testing.T) {
+ b := createSiteBuilder(t)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "* Page Paginate: Page 1|Summary: Initial summary|Content: <p>Content.</p>")
+
+ b.EditFiles(contentFilename, `
+---
+title: "Page 1 edit"
+summary: "Edited summary"
+---
+
+Edited content.
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "* Page Paginate: Page 1 edit|Summary: Edited summary|Content: <p>Edited content.</p>")
+ // https://github.com/gohugoio/hugo/issues/5833
+ b.AssertFileContent("public/index.html", "* Page Pages: Page 1 edit|Summary: Edited summary|Content: <p>Edited content.</p>")
+ })
+
+ // https://github.com/gohugoio/hugo/issues/6768
+ t.Run("Edit data", func(t *testing.T) {
+ b := createSiteBuilder(t)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Data: Rocks!
+Data Inline: Rocks!
+`)
+
+ b.EditFiles(dataFilename, `hugo = "Rules!"`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Data: Rules!
+Data Inline: Rules!`)
+ })
+
+ // https://github.com/gohugoio/hugo/issues/6968
+ t.Run("Edit single.html with base", func(t *testing.T) {
+ b := newTestSitesBuilder(t).Running()
+
+ b.WithTemplates(
+ "_default/single.html", `{{ define "main" }}Single{{ end }}`,
+ "_default/baseof.html", `Base: {{ block "main" .}}Block{{ end }}`,
+ )
+
+ b.WithContent("p1.md", "---\ntitle: Page\n---")
+
+ b.Build(BuildCfg{})
+
+ b.EditFiles("layouts/_default/single.html", `Single Edit: {{ define "main" }}Single{{ end }}`)
+
+ counters := &testCounters{}
+
+ b.Build(BuildCfg{testCounters: counters})
+
+ b.Assert(int(counters.contentRenderCounter), qt.Equals, 0)
+ })
+
+ t.Run("Page.Render, edit baseof", func(t *testing.T) {
+ b := createSiteBuilder(t)
+
+ b.WithTemplatesAdded("index.html", `
+{{ $p := site.GetPage "prender.md" }}
+prender: {{ $p.Title }}|{{ $p.Content }}
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3:END
+`)
+
+ b.EditFiles("layouts/_default/baseof.html", `Baseof Edited:{{ block "main" . }}Baseof Main{{ end }}:END`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Render /prender/: Baseof Edited:Single Main: Page 1|Mypartial1: Mypartial1:END
+`)
+ })
+
+ t.Run("Page.Render, edit partial in baseof", func(t *testing.T) {
+ b := createSiteBuilder(t)
+
+ b.WithTemplatesAdded("index.html", `
+{{ $p := site.GetPage "prender.md" }}
+prender: {{ $p.Title }}|{{ $p.Content }}
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3:END
+`)
+
+ b.EditFiles("layouts/partials/mypartial3.html", `Mypartial3 Edited`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Render /prender/: Baseof:Single Main: Page 1|Mypartial1: Mypartial1|Mypartial3: Mypartial3 Edited:END
+`)
+ })
+
+ t.Run("Edit RSS shortcode", func(t *testing.T) {
+ b := createSiteBuilder(t)
+
+ b.WithContent("output.md", `---
+title: Output
+outputs: ["HTML", "AMP"]
+layout: output
+---
+
+Content for Output.
+
+{{< output >}}
+
+`)
+
+ b.WithTemplates(
+ "layouts/_default/output.html", `Output HTML: {{ .RelPermalink }}|{{ .Content }}`,
+ "layouts/_default/output.amp.html", `Output AMP: {{ .RelPermalink }}|{{ .Content }}`,
+ "layouts/shortcodes/output.html", `Output Shortcode HTML`,
+ "layouts/shortcodes/output.amp.html", `Output Shortcode AMP`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/output/index.html", `
+Output Shortcode HTML
+`)
+ b.AssertFileContent("public/amp/output/index.html", `
+Output Shortcode AMP
+`)
+
+ b.EditFiles("layouts/shortcodes/output.amp.html", `Output Shortcode AMP Edited`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/amp/output/index.html", `
+Output Shortcode AMP Edited
+`)
+ })
+}
+
+// Issues #7623 #7625
+func TestSitesRebuildOnFilesIncludedWithGetPage(t *testing.T) {
+ b := newTestSitesBuilder(t).Running()
+ b.WithContent("pages/p1.md", `---
+title: p1
+---
+P3: {{< GetPage "pages/p3" >}}
+`)
+
+ b.WithContent("pages/p2.md", `---
+title: p2
+---
+P4: {{< site_GetPage "pages/p4" >}}
+P5: {{< site_GetPage "p5" >}}
+P6: {{< dot_site_GetPage "p6" >}}
+`)
+
+ b.WithContent("pages/p3/index.md", "---\ntitle: p3\nheadless: true\n---\nP3 content")
+ b.WithContent("pages/p4/index.md", "---\ntitle: p4\nheadless: true\n---\nP4 content")
+ b.WithContent("pages/p5.md", "---\ntitle: p5\n---\nP5 content")
+ b.WithContent("pages/p6.md", "---\ntitle: p6\n---\nP6 content")
+
+ b.WithTemplates(
+ "_default/single.html", `{{ .Content }}`,
+ "shortcodes/GetPage.html", `
+{{ $arg := .Get 0 }}
+{{ $p := .Page.GetPage $arg }}
+{{ $p.Content }}
+ `,
+ "shortcodes/site_GetPage.html", `
+{{ $arg := .Get 0 }}
+{{ $p := site.GetPage $arg }}
+{{ $p.Content }}
+ `, "shortcodes/dot_site_GetPage.html", `
+{{ $arg := .Get 0 }}
+{{ $p := .Site.GetPage $arg }}
+{{ $p.Content }}
+ `,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/pages/p1/index.html", "P3 content")
+ b.AssertFileContent("public/pages/p2/index.html", `P4 content
+P5 content
+P6 content
+`)
+
+ b.EditFiles("content/pages/p3/index.md", "---\ntitle: p3\n---\nP3 changed content")
+ b.EditFiles("content/pages/p4/index.md", "---\ntitle: p4\n---\nP4 changed content")
+ b.EditFiles("content/pages/p5.md", "---\ntitle: p5\n---\nP5 changed content")
+ b.EditFiles("content/pages/p6.md", "---\ntitle: p6\n---\nP6 changed content")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/pages/p1/index.html", "P3 changed content")
+ b.AssertFileContent("public/pages/p2/index.html", `P4 changed content
+P5 changed content
+P6 changed content
+`)
+}
diff --git a/hugolib/hugo_smoke_test.go b/hugolib/hugo_smoke_test.go
new file mode 100644
index 000000000..0c14414e5
--- /dev/null
+++ b/hugolib/hugo_smoke_test.go
@@ -0,0 +1,443 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "math/rand"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// The most basic build test.
+func TestHello(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+baseURL="https://example.org"
+disableKinds = ["term", "taxonomy", "section", "page"]
+`)
+ b.WithContent("p1", `
+---
+title: Page
+---
+
+`)
+ b.WithTemplates("index.html", `Site: {{ .Site.Language.Lang | upper }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `Site: EN`)
+}
+
+func TestSmoke(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ const configFile = `
+baseURL = "https://example.com"
+title = "Simple Site"
+rssLimit = 3
+defaultContentLanguage = "en"
+enableRobotsTXT = true
+
+[languages]
+[languages.en]
+weight = 1
+title = "In English"
+[languages.no]
+weight = 2
+title = "På norsk"
+
+[params]
+hugo = "Rules!"
+
+[outputs]
+ home = ["HTML", "JSON", "CSV", "RSS"]
+
+`
+
+ const pageContentAndSummaryDivider = `---
+title: Page with outputs
+hugo: "Rocks!"
+outputs: ["HTML", "JSON"]
+tags: [ "hugo" ]
+aliases: [ "/a/b/c" ]
+---
+
+This is summary.
+
+<!--more-->
+
+This is content with some shortcodes.
+
+Shortcode 1: {{< sc >}}.
+Shortcode 2: {{< sc >}}.
+
+`
+
+ const pageContentWithMarkdownShortcodes = `---
+title: Page with markdown shortcode
+hugo: "Rocks!"
+outputs: ["HTML", "JSON"]
+---
+
+This is summary.
+
+<!--more-->
+
+This is content[^a].
+
+# Header above
+
+{{% markdown-shortcode %}}
+# Header inside
+
+Some **markdown**.[^b]
+
+{{% /markdown-shortcode %}}
+
+# Heder below
+
+Some more content[^c].
+
+Footnotes:
+
+[^a]: Fn 1
+[^b]: Fn 2
+[^c]: Fn 3
+
+`
+
+ pageContentAutoSummary := strings.Replace(pageContentAndSummaryDivider, "<!--more-->", "", 1)
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
+ b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", `
+Some **Markdown** in shortcode.
+
+{{ .Inner }}
+
+
+
+`)
+
+ b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", `
+Some **Markdown** in JSON shortcode.
+{{ .Inner }}
+
+`)
+
+ for i := 1; i <= 11; i++ {
+ if i%2 == 0 {
+ b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider)
+ b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider)
+ } else {
+ b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary)
+ }
+ }
+
+ for i := 1; i <= 5; i++ {
+ // Root section pages
+ b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary)
+ }
+
+ // https://github.com/gohugoio/hugo/issues/4695
+ b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes)
+
+ // Add one bundle
+ b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider)
+ b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV")
+
+ const (
+ commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .File.Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
+ commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
+ commonListTemplateNoPaginator = `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+ commonListTemplate = commonPaginatorTemplate + `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+ commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
+ prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
+ prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
+ paramsTemplate = `|Params: {{ .Params.hugo }}`
+ treeNavTemplate = `|CurrentSection: {{ .CurrentSection }}`
+ )
+
+ b.WithTemplates(
+ "_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}",
+ "_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator,
+ "_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator,
+ "_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate,
+ "_default/single.json", "JSON: Single"+commonPageTemplate,
+
+ // For .Render test
+ "_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate,
+ "_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate,
+ "_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate,
+
+ "404.html", "{{ .Kind }}|{{ .Title }}|Page not found",
+
+ "shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate,
+ "shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate,
+ "shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/blog/page1/index.html",
+ "This is content with some shortcodes.",
+ "Page with outputs",
+ "Pages: Pages(0)",
+ "RelPermalink: /blog/page1/|",
+ "Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.",
+ "Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.",
+ "Prev: /blog/page10/|Next: /blog/mybundle/",
+ "PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/",
+ "Summary: This is summary.",
+ "CurrentSection: Page(/blog)",
+ )
+
+ b.AssertFileContent("public/blog/page1/index.json",
+ "JSON: Single|page|Page with outputs|",
+ "SON: Shortcode: |sc|0||")
+
+ b.AssertFileContent("public/index.html",
+ "home|In English",
+ "Site params: Rules",
+ "Pages: Pages(6)|Data Pages: Pages(6)",
+ "Paginator: 1",
+ "First Site: In English",
+ "RelPermalink: /",
+ )
+
+ b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/")
+
+ // Check RSS
+ rssHome := b.FileContent("public/index.xml")
+ c.Assert(rssHome, qt.Contains, `<atom:link href="https://example.com/index.xml" rel="self" type="application/rss+xml" />`)
+ c.Assert(strings.Count(rssHome, "<item>"), qt.Equals, 3) // rssLimit = 3
+
+ // .Render should use template/content from the current output format
+ // even if that output format isn't configured for that page.
+ b.AssertFileContent(
+ "public/index.json",
+ "Render 0: page|JSON: LI|false|Params: Rocks!",
+ )
+
+ b.AssertFileContent(
+ "public/index.html",
+ "Render 0: page|HTML: LI|false|Params: Rocks!|",
+ )
+
+ b.AssertFileContent(
+ "public/index.csv",
+ "Render 0: page|CSV: LI|false|Params: Rocks!|",
+ )
+
+ // Check bundled resources
+ b.AssertFileContent(
+ "public/blog/mybundle/index.html",
+ "Resources: 1",
+ )
+
+ // Check pages in root section
+ b.AssertFileContent(
+ "public/root3/index.html",
+ "Single|page|Page with outputs|root3.md|",
+ "Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/",
+ )
+
+ b.AssertFileContent(
+ "public/root3/index.json", "Shortcode 1: JSON:")
+
+ // Paginators
+ b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`)
+ b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2")
+
+ // 404
+ b.AssertFileContent("public/404.html", "404|404 Page not found")
+
+ // Sitemaps
+ b.AssertFileContent("public/en/sitemap.xml", "<loc>https://example.com/blog/</loc>")
+ b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`)
+
+ b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/no/sitemap.xml</loc>")
+
+ // robots.txt
+ b.AssertFileContent("public/robots.txt", `User-agent: *`)
+
+ // Aliases
+ b.AssertFileContent("public/a/b/c/index.html", `refresh`)
+
+ // Markdown vs shortcodes
+ // Check that all footnotes are grouped (even those from inside the shortcode)
+ b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*<ol>.*Fn 1.*Fn 2.*Fn 3.*</ol>`)
+}
+
+// https://github.com/golang/go/issues/30286
+func TestDataRace(t *testing.T) {
+ const page = `
+---
+title: "The Page"
+outputs: ["HTML", "JSON"]
+---
+
+The content.
+
+
+ `
+
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ for i := 1; i <= 50; i++ {
+ b.WithContent(fmt.Sprintf("blog/page%d.md", i), page)
+ }
+
+ b.WithContent("_index.md", `
+---
+title: "The Home"
+outputs: ["HTML", "JSON", "CSV", "RSS"]
+---
+
+The content.
+
+
+`)
+
+ commonTemplate := `{{ .Data.Pages }}`
+
+ b.WithTemplatesAdded("_default/single.html", "HTML Single: "+commonTemplate)
+ b.WithTemplatesAdded("_default/list.html", "HTML List: "+commonTemplate)
+
+ b.CreateSites().Build(BuildCfg{})
+}
+
+// This is just a test to verify that BenchmarkBaseline is working as intended.
+func TestBenchmarkBaseline(t *testing.T) {
+ cfg := IntegrationTestConfig{
+ T: t,
+ TxtarString: benchmarkBaselineFiles(),
+ }
+ b := NewIntegrationTestBuilder(cfg).Build()
+
+ b.Assert(len(b.H.Sites), qt.Equals, 4)
+
+}
+
+func BenchmarkBaseline(b *testing.B) {
+ cfg := IntegrationTestConfig{
+ T: b,
+ TxtarString: benchmarkBaselineFiles(),
+ }
+ builders := make([]*IntegrationTestBuilder, b.N)
+
+ for i := range builders {
+ builders[i] = NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+}
+
+func benchmarkBaselineFiles() string {
+ files := `
+-- config.toml --
+baseURL = "https://example.com"
+defaultContentLanguage = 'en'
+
+[module]
+[[module.mounts]]
+source = 'content/en'
+target = 'content/en'
+lang = 'en'
+[[module.mounts]]
+source = 'content/nn'
+target = 'content/nn'
+lang = 'nn'
+[[module.mounts]]
+source = 'content/no'
+target = 'content/no'
+lang = 'no'
+[[module.mounts]]
+source = 'content/sv'
+target = 'content/sv'
+lang = 'sv'
+[[module.mounts]]
+source = 'layouts'
+target = 'layouts'
+
+[languages]
+[languages.en]
+title = "English"
+weight = 1
+[languages.nn]
+title = "Nynorsk"
+weight = 2
+[languages.no]
+title = "Norsk"
+weight = 3
+[languages.sv]
+title = "Svenska"
+weight = 4
+-- layouts/_default/list.html --
+{{ .Title }}
+{{ .Content }}
+-- layouts/_default/single.html --
+{{ .Title }}
+{{ .Content }}
+-- layouts/shortcodes/myshort.html --
+{{ .Inner }}
+`
+
+ contentTemplate := `
+---
+title: "Page %d"
+date: "2018-01-01"
+weight: %d
+---
+
+## Heading 1
+
+Duis nisi reprehenderit nisi cupidatat cillum aliquip ea id eu esse commodo et.
+
+## Heading 2
+
+Aliqua labore enim et sint anim amet excepteur ea dolore.
+
+{{< myshort >}}
+Hello, World!
+{{< /myshort >}}
+
+Aliqua labore enim et sint anim amet excepteur ea dolore.
+
+
+`
+
+ for _, lang := range []string{"en", "nn", "no", "sv"} {
+ files += fmt.Sprintf("\n-- content/%s/_index.md --\n"+contentTemplate, lang, 1, 1, 1)
+ for i, root := range []string{"", "foo", "bar", "baz"} {
+ for j, section := range []string{"posts", "posts/funny", "posts/science", "posts/politics", "posts/world", "posts/technology", "posts/world/news", "posts/world/news/europe"} {
+ n := i + j + 1
+ files += fmt.Sprintf("\n-- content/%s/%s/%s/_index.md --\n"+contentTemplate, lang, root, section, n, n, n)
+ for k := 1; k < rand.Intn(30)+1; k++ {
+ n := n + k
+ files += fmt.Sprintf("\n-- content/%s/%s/%s/p%d.md --\n"+contentTemplate, lang, root, section, n, n, n)
+ }
+ }
+ }
+ }
+
+ return files
+}
diff --git a/hugolib/image_test.go b/hugolib/image_test.go
new file mode 100644
index 000000000..ac18b9423
--- /dev/null
+++ b/hugolib/image_test.go
@@ -0,0 +1,247 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "io"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/htesting"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+// We have many tests for the different resize operations etc. in the resource package,
+// this is an integration test.
+func TestImageOps(t *testing.T) {
+ c := qt.New(t)
+ // Make this a real as possible.
+ workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "image-resize")
+ c.Assert(err, qt.IsNil)
+ defer clean()
+
+ newBuilder := func(timeout any) *sitesBuilder {
+ v := config.NewWithTestDefaults()
+ v.Set("workingDir", workDir)
+ v.Set("baseURL", "https://example.org")
+ v.Set("timeout", timeout)
+
+ b := newTestSitesBuilder(t).WithWorkingDir(workDir)
+ b.Fs = hugofs.NewDefault(v)
+ b.WithViper(v)
+ b.WithContent("mybundle/index.md", `
+---
+title: "My bundle"
+---
+
+{{< imgproc >}}
+
+`)
+
+ b.WithTemplatesAdded(
+ "shortcodes/imgproc.html", `
+{{ $img := resources.Get "images/sunset.jpg" }}
+{{ $r := $img.Resize "129x239" }}
+IMG SHORTCODE: {{ $r.RelPermalink }}/{{ $r.Width }}
+`,
+ "index.html", `
+{{ $p := .Site.GetPage "mybundle" }}
+{{ $img1 := resources.Get "images/sunset.jpg" }}
+{{ $img2 := $p.Resources.GetMatch "sunset.jpg" }}
+{{ $img3 := resources.GetMatch "images/*.jpg" }}
+{{ $r := $img1.Resize "123x234" }}
+{{ $r2 := $r.Resize "12x23" }}
+{{ $b := $img2.Resize "345x678" }}
+{{ $b2 := $b.Resize "34x67" }}
+{{ $c := $img3.Resize "456x789" }}
+{{ $fingerprinted := $img1.Resize "350x" | fingerprint }}
+
+{{ $images := slice $r $r2 $b $b2 $c $fingerprinted }}
+
+{{ range $i, $r := $images }}
+{{ printf "Resized%d:" (add $i 1) }} {{ $r.Name }}|{{ $r.Width }}|{{ $r.Height }}|{{ $r.MediaType }}|{{ $r.RelPermalink }}|
+{{ end }}
+
+{{ $blurryGrayscale1 := $r | images.Filter images.Grayscale (images.GaussianBlur 8) }}
+BG1: {{ $blurryGrayscale1.RelPermalink }}/{{ $blurryGrayscale1.Width }}
+{{ $blurryGrayscale2 := $r.Filter images.Grayscale (images.GaussianBlur 8) }}
+BG2: {{ $blurryGrayscale2.RelPermalink }}/{{ $blurryGrayscale2.Width }}
+{{ $blurryGrayscale2_2 := $r.Filter images.Grayscale (images.GaussianBlur 8) }}
+BG2_2: {{ $blurryGrayscale2_2.RelPermalink }}/{{ $blurryGrayscale2_2.Width }}
+
+{{ $filters := slice images.Grayscale (images.GaussianBlur 9) }}
+{{ $blurryGrayscale3 := $r | images.Filter $filters }}
+BG3: {{ $blurryGrayscale3.RelPermalink }}/{{ $blurryGrayscale3.Width }}
+
+{{ $blurryGrayscale4 := $r.Filter $filters }}
+BG4: {{ $blurryGrayscale4.RelPermalink }}/{{ $blurryGrayscale4.Width }}
+
+{{ $p.Content }}
+
+`)
+
+ return b
+ }
+
+ imageDir := filepath.Join(workDir, "assets", "images")
+ bundleDir := filepath.Join(workDir, "content", "mybundle")
+
+ c.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil)
+ c.Assert(os.MkdirAll(bundleDir, 0777), qt.IsNil)
+ src, err := os.Open("testdata/sunset.jpg")
+ c.Assert(err, qt.IsNil)
+ out, err := os.Create(filepath.Join(imageDir, "sunset.jpg"))
+ c.Assert(err, qt.IsNil)
+ _, err = io.Copy(out, src)
+ c.Assert(err, qt.IsNil)
+ out.Close()
+
+ src.Seek(0, 0)
+
+ out, err = os.Create(filepath.Join(bundleDir, "sunset.jpg"))
+ c.Assert(err, qt.IsNil)
+ _, err = io.Copy(out, src)
+ c.Assert(err, qt.IsNil)
+ out.Close()
+ src.Close()
+
+ // First build it with a very short timeout to trigger errors.
+ b := newBuilder("10ns")
+
+ imgExpect := `
+Resized1: images/sunset.jpg|123|234|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg|
+Resized2: images/sunset.jpg|12|23|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ada4bb1a57f77a63306e3bd67286248e.jpg|
+Resized3: sunset.jpg|345|678|image/jpeg|/mybundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_345x678_resize_q75_box.jpg|
+Resized4: sunset.jpg|34|67|image/jpeg|/mybundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_44d8c928664d7c5a67377c6ec58425ce.jpg|
+Resized5: images/sunset.jpg|456|789|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_456x789_resize_q75_box.jpg|
+Resized6: images/sunset.jpg|350|219|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_350x0_resize_q75_box.a86fe88d894e5db613f6aa8a80538fefc25b20fa24ba0d782c057adcef616f56.jpg|
+BG1: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_2ae8bb993431ec1aec40fe59927b46b4.jpg/123
+BG2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_2ae8bb993431ec1aec40fe59927b46b4.jpg/123
+BG3: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg/123
+BG4: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg/123
+IMG SHORTCODE: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_129x239_resize_q75_box.jpg/129
+`
+
+ assertImages := func() {
+ b.Helper()
+ b.AssertFileContent("public/index.html", imgExpect)
+ b.AssertImage(350, 219, "public/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_350x0_resize_q75_box.a86fe88d894e5db613f6aa8a80538fefc25b20fa24ba0d782c057adcef616f56.jpg")
+ b.AssertImage(129, 239, "public/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_129x239_resize_q75_box.jpg")
+ }
+
+ err = b.BuildE(BuildCfg{})
+ if runtime.GOOS != "windows" && !strings.Contains(runtime.GOARCH, "arm") && !htesting.IsGitHubAction() {
+ // TODO(bep)
+ c.Assert(err, qt.Not(qt.IsNil))
+ }
+
+ b = newBuilder(29000)
+ b.Build(BuildCfg{})
+
+ assertImages()
+
+ // Truncate one image.
+ imgInCache := filepath.Join(workDir, "resources/_gen/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg")
+ f, err := os.Create(imgInCache)
+ c.Assert(err, qt.IsNil)
+ f.Close()
+
+ // Build it again to make sure we read images from file cache.
+ b = newBuilder("30s")
+ b.Build(BuildCfg{})
+
+ assertImages()
+}
+
+func TestImageResizeMultilingual(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+baseURL="https://example.org"
+defaultContentLanguage = "en"
+
+[languages]
+[languages.en]
+title = "Title in English"
+languageName = "English"
+weight = 1
+[languages.nn]
+languageName = "Nynorsk"
+weight = 2
+title = "Tittel på nynorsk"
+[languages.nb]
+languageName = "Bokmål"
+weight = 3
+title = "Tittel på bokmål"
+[languages.fr]
+languageName = "French"
+weight = 4
+title = "French Title"
+
+`)
+
+ pageContent := `---
+title: "Page"
+---
+`
+
+ b.WithContent("bundle/index.md", pageContent)
+ b.WithContent("bundle/index.nn.md", pageContent)
+ b.WithContent("bundle/index.fr.md", pageContent)
+ b.WithSunset("content/bundle/sunset.jpg")
+ b.WithSunset("assets/images/sunset.jpg")
+ b.WithTemplates("index.html", `
+{{ with (.Site.GetPage "bundle" ) }}
+{{ $sunset := .Resources.GetMatch "sunset*" }}
+{{ if $sunset }}
+{{ $resized := $sunset.Resize "200x200" }}
+SUNSET FOR: {{ $.Site.Language.Lang }}: {{ $resized.RelPermalink }}/{{ $resized.Width }}/Lat: {{ $resized.Exif.Lat }}
+{{ end }}
+{{ else }}
+No bundle for {{ $.Site.Language.Lang }}
+{{ end }}
+
+{{ $sunset2 := resources.Get "images/sunset.jpg" }}
+{{ $resized2 := $sunset2.Resize "123x234" }}
+SUNSET2: {{ $resized2.RelPermalink }}/{{ $resized2.Width }}/Lat: {{ $resized2.Exif.Lat }}
+
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "SUNSET FOR: en: /bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667")
+ b.AssertFileContent("public/fr/index.html", "SUNSET FOR: fr: /fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg/200/Lat: 36.59744166666667")
+ b.AssertFileContent("public/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667")
+ b.AssertFileContent("public/nn/index.html", " SUNSET2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg/123/Lat: 36.59744166666667")
+
+ b.AssertImage(200, 200, "public/fr/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg")
+ b.AssertImage(200, 200, "public/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg")
+
+ // Check the file cache
+ b.AssertImage(200, 200, "resources/_gen/images/bundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_resize_q75_box.jpg")
+
+ b.AssertFileContent("resources/_gen/images/bundle/sunset_3166614710256882113.json",
+ "DateTimeDigitized|time.Time", "PENTAX")
+ b.AssertImage(123, 234, "resources/_gen/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg")
+ b.AssertFileContent("resources/_gen/images/sunset_3166614710256882113.json",
+ "DateTimeDigitized|time.Time", "PENTAX")
+
+ // TODO(bep) add this as a default assertion after Build()?
+ b.AssertNoDuplicateWrites()
+}
diff --git a/hugolib/integrationtest_builder.go b/hugolib/integrationtest_builder.go
new file mode 100644
index 000000000..9dcfe4830
--- /dev/null
+++ b/hugolib/integrationtest_builder.go
@@ -0,0 +1,491 @@
+package hugolib
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "sync"
+ "testing"
+
+ jww "github.com/spf13/jwalterweatherman"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+ "golang.org/x/tools/txtar"
+)
+
+func NewIntegrationTestBuilder(conf IntegrationTestConfig) *IntegrationTestBuilder {
+ // Code fences.
+ conf.TxtarString = strings.ReplaceAll(conf.TxtarString, "§§§", "```")
+
+ data := txtar.Parse([]byte(conf.TxtarString))
+
+ c, ok := conf.T.(*qt.C)
+ if !ok {
+ c = qt.New(conf.T)
+ }
+
+ if conf.NeedsOsFS {
+ if !filepath.IsAbs(conf.WorkingDir) {
+ tempDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-integration-test")
+ c.Assert(err, qt.IsNil)
+ conf.WorkingDir = filepath.Join(tempDir, conf.WorkingDir)
+ if !conf.PrintAndKeepTempDir {
+ c.Cleanup(clean)
+ } else {
+ fmt.Println("\nUsing WorkingDir dir:", conf.WorkingDir)
+ }
+ }
+ } else if conf.WorkingDir == "" {
+ conf.WorkingDir = helpers.FilePathSeparator
+ }
+
+ return &IntegrationTestBuilder{
+ Cfg: conf,
+ C: c,
+ data: data,
+ }
+}
+
+// IntegrationTestBuilder is a (partial) rewrite of sitesBuilder.
+// The main problem with the "old" one was that it was that the test data was often a little hidden,
+// so it became hard to look at a test and determine what it should do, especially coming back to the
+// test after a year or so.
+type IntegrationTestBuilder struct {
+ *qt.C
+
+ data *txtar.Archive
+
+ fs *hugofs.Fs
+ H *HugoSites
+
+ Cfg IntegrationTestConfig
+
+ changedFiles []string
+ createdFiles []string
+ removedFiles []string
+ renamedFiles []string
+
+ buildCount int
+ counters *testCounters
+ logBuff lockingBuffer
+
+ builderInit sync.Once
+}
+
+type lockingBuffer struct {
+ sync.Mutex
+ bytes.Buffer
+}
+
+func (b *lockingBuffer) Write(p []byte) (n int, err error) {
+ b.Lock()
+ n, err = b.Buffer.Write(p)
+ b.Unlock()
+ return
+}
+
+func (s *IntegrationTestBuilder) AssertLogContains(text string) {
+ s.Helper()
+ s.Assert(s.logBuff.String(), qt.Contains, text)
+}
+
+func (s *IntegrationTestBuilder) AssertLogMatches(expression string) {
+ s.Helper()
+ re := regexp.MustCompile(expression)
+ s.Assert(re.MatchString(s.logBuff.String()), qt.IsTrue, qt.Commentf(s.logBuff.String()))
+}
+
+func (s *IntegrationTestBuilder) AssertBuildCountData(count int) {
+ s.Helper()
+ s.Assert(s.H.init.data.InitCount(), qt.Equals, count)
+}
+
+func (s *IntegrationTestBuilder) AssertBuildCountGitInfo(count int) {
+ s.Helper()
+ s.Assert(s.H.init.gitInfo.InitCount(), qt.Equals, count)
+}
+
+func (s *IntegrationTestBuilder) AssertBuildCountLayouts(count int) {
+ s.Helper()
+ s.Assert(s.H.init.layouts.InitCount(), qt.Equals, count)
+}
+
+func (s *IntegrationTestBuilder) AssertBuildCountTranslations(count int) {
+ s.Helper()
+ s.Assert(s.H.init.translations.InitCount(), qt.Equals, count)
+}
+
+func (s *IntegrationTestBuilder) AssertFileContent(filename string, matches ...string) {
+ s.Helper()
+ content := strings.TrimSpace(s.FileContent(filename))
+ for _, m := range matches {
+ lines := strings.Split(m, "\n")
+ for _, match := range lines {
+ match = strings.TrimSpace(match)
+ if match == "" || strings.HasPrefix(match, "#") {
+ continue
+ }
+ s.Assert(content, qt.Contains, match, qt.Commentf(m))
+ }
+ }
+}
+
+func (s *IntegrationTestBuilder) AssertFileContentExact(filename string, matches ...string) {
+ s.Helper()
+ content := s.FileContent(filename)
+ for _, m := range matches {
+ s.Assert(content, qt.Contains, m, qt.Commentf(m))
+ }
+}
+
+func (s *IntegrationTestBuilder) AssertDestinationExists(filename string, b bool) {
+ checker := qt.IsTrue
+ if !b {
+ checker = qt.IsFalse
+ }
+ s.Assert(s.destinationExists(filepath.Clean(filename)), checker)
+}
+
+func (s *IntegrationTestBuilder) destinationExists(filename string) bool {
+ b, err := helpers.Exists(filename, s.fs.PublishDir)
+ if err != nil {
+ panic(err)
+ }
+ return b
+}
+
+func (s *IntegrationTestBuilder) AssertIsFileError(err error) herrors.FileError {
+ s.Assert(err, qt.ErrorAs, new(herrors.FileError))
+ return herrors.UnwrapFileError(err)
+}
+
+func (s *IntegrationTestBuilder) AssertRenderCountContent(count int) {
+ s.Helper()
+ s.Assert(s.counters.contentRenderCounter, qt.Equals, uint64(count))
+}
+
+func (s *IntegrationTestBuilder) AssertRenderCountPage(count int) {
+ s.Helper()
+ s.Assert(s.counters.pageRenderCounter, qt.Equals, uint64(count))
+}
+
+func (s *IntegrationTestBuilder) Build() *IntegrationTestBuilder {
+ s.Helper()
+ _, err := s.BuildE()
+ if s.Cfg.Verbose || err != nil {
+ fmt.Println(s.logBuff.String())
+ }
+ s.Assert(err, qt.IsNil)
+ return s
+}
+
+func (s *IntegrationTestBuilder) BuildE() (*IntegrationTestBuilder, error) {
+ s.Helper()
+ if err := s.initBuilder(); err != nil {
+ return s, err
+ }
+
+ err := s.build(BuildCfg{})
+ return s, err
+}
+
+type IntegrationTestDebugConfig struct {
+ Out io.Writer
+
+ PrintDestinationFs bool
+ PrintPagemap bool
+
+ PrefixDestinationFs string
+ PrefixPagemap string
+}
+
+func (s *IntegrationTestBuilder) EditFileReplace(filename string, replacementFunc func(s string) string) *IntegrationTestBuilder {
+ absFilename := s.absFilename(filename)
+ b, err := afero.ReadFile(s.fs.Source, absFilename)
+ s.Assert(err, qt.IsNil)
+ s.changedFiles = append(s.changedFiles, absFilename)
+ oldContent := string(b)
+ s.writeSource(absFilename, replacementFunc(oldContent))
+ return s
+}
+
+func (s *IntegrationTestBuilder) EditFiles(filenameContent ...string) *IntegrationTestBuilder {
+ for i := 0; i < len(filenameContent); i += 2 {
+ filename, content := filepath.FromSlash(filenameContent[i]), filenameContent[i+1]
+ absFilename := s.absFilename(filename)
+ s.changedFiles = append(s.changedFiles, absFilename)
+ s.writeSource(absFilename, content)
+ }
+ return s
+}
+
+func (s *IntegrationTestBuilder) AddFiles(filenameContent ...string) *IntegrationTestBuilder {
+ for i := 0; i < len(filenameContent); i += 2 {
+ filename, content := filepath.FromSlash(filenameContent[i]), filenameContent[i+1]
+ absFilename := s.absFilename(filename)
+ s.createdFiles = append(s.createdFiles, absFilename)
+ s.writeSource(absFilename, content)
+ }
+ return s
+}
+
+func (s *IntegrationTestBuilder) RemoveFiles(filenames ...string) *IntegrationTestBuilder {
+ for _, filename := range filenames {
+ absFilename := s.absFilename(filename)
+ s.removedFiles = append(s.removedFiles, absFilename)
+ s.Assert(s.fs.Source.Remove(absFilename), qt.IsNil)
+
+ }
+
+ return s
+}
+
+func (s *IntegrationTestBuilder) RenameFile(old, new string) *IntegrationTestBuilder {
+ absOldFilename := s.absFilename(old)
+ absNewFilename := s.absFilename(new)
+ s.renamedFiles = append(s.renamedFiles, absOldFilename)
+ s.createdFiles = append(s.createdFiles, absNewFilename)
+ s.Assert(s.fs.Source.Rename(absOldFilename, absNewFilename), qt.IsNil)
+ return s
+}
+
+func (s *IntegrationTestBuilder) FileContent(filename string) string {
+ s.Helper()
+ return s.readWorkingDir(s, s.fs, filepath.FromSlash(filename))
+}
+
+func (s *IntegrationTestBuilder) initBuilder() error {
+ var initErr error
+ s.builderInit.Do(func() {
+ var afs afero.Fs
+ if s.Cfg.NeedsOsFS {
+ afs = afero.NewOsFs()
+ } else {
+ afs = afero.NewMemMapFs()
+ }
+
+ if s.Cfg.LogLevel == 0 {
+ s.Cfg.LogLevel = jww.LevelWarn
+ }
+
+ logger := loggers.NewBasicLoggerForWriter(s.Cfg.LogLevel, &s.logBuff)
+
+ isBinaryRe := regexp.MustCompile(`^(.*)(\.png|\.jpg)$`)
+
+ for _, f := range s.data.Files {
+ filename := filepath.Join(s.Cfg.WorkingDir, f.Name)
+ data := bytes.TrimSuffix(f.Data, []byte("\n"))
+ if isBinaryRe.MatchString(filename) {
+ var err error
+ data, err = base64.StdEncoding.DecodeString(string(data))
+ s.Assert(err, qt.IsNil)
+
+ }
+ s.Assert(afs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
+ s.Assert(afero.WriteFile(afs, filename, data, 0666), qt.IsNil)
+ }
+
+ cfg, _, err := LoadConfig(
+ ConfigSourceDescriptor{
+ WorkingDir: s.Cfg.WorkingDir,
+ Fs: afs,
+ Logger: logger,
+ Environ: []string{},
+ Filename: "config.toml",
+ },
+ func(cfg config.Provider) error {
+ return nil
+ },
+ )
+
+ s.Assert(err, qt.IsNil)
+
+ cfg.Set("workingDir", s.Cfg.WorkingDir)
+
+ fs := hugofs.NewFrom(afs, cfg)
+
+ s.Assert(err, qt.IsNil)
+
+ depsCfg := deps.DepsCfg{Cfg: cfg, Fs: fs, Running: s.Cfg.Running, Logger: logger}
+ sites, err := NewHugoSites(depsCfg)
+ if err != nil {
+ initErr = err
+ return
+ }
+
+ s.H = sites
+ s.fs = fs
+
+ if s.Cfg.NeedsNpmInstall {
+ wd, _ := os.Getwd()
+ s.Assert(os.Chdir(s.Cfg.WorkingDir), qt.IsNil)
+ s.C.Cleanup(func() { os.Chdir(wd) })
+ sc := security.DefaultConfig
+ sc.Exec.Allow = security.NewWhitelist("npm")
+ ex := hexec.New(sc)
+ command, err := ex.New("npm", "install")
+ s.Assert(err, qt.IsNil)
+ s.Assert(command.Run(), qt.IsNil)
+
+ }
+ })
+
+ return initErr
+}
+
+func (s *IntegrationTestBuilder) absFilename(filename string) string {
+ filename = filepath.FromSlash(filename)
+ if filepath.IsAbs(filename) {
+ return filename
+ }
+ if s.Cfg.WorkingDir != "" && !strings.HasPrefix(filename, s.Cfg.WorkingDir) {
+ filename = filepath.Join(s.Cfg.WorkingDir, filename)
+ }
+ return filename
+}
+
+func (s *IntegrationTestBuilder) build(cfg BuildCfg) error {
+ s.Helper()
+ defer func() {
+ s.changedFiles = nil
+ s.createdFiles = nil
+ s.removedFiles = nil
+ s.renamedFiles = nil
+ }()
+
+ changeEvents := s.changeEvents()
+ s.logBuff.Reset()
+ s.counters = &testCounters{}
+ cfg.testCounters = s.counters
+
+ if s.buildCount > 0 && (len(changeEvents) == 0) {
+ return nil
+ }
+
+ s.buildCount++
+
+ err := s.H.Build(cfg, changeEvents...)
+ if err != nil {
+ return err
+ }
+ logErrorCount := s.H.NumLogErrors()
+ if logErrorCount > 0 {
+ return fmt.Errorf("logged %d error(s): %s", logErrorCount, s.logBuff.String())
+ }
+
+ return nil
+}
+
+func (s *IntegrationTestBuilder) changeEvents() []fsnotify.Event {
+ var events []fsnotify.Event
+ for _, v := range s.removedFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Remove,
+ })
+ }
+ for _, v := range s.renamedFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Rename,
+ })
+ }
+ for _, v := range s.changedFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Write,
+ })
+ }
+ for _, v := range s.createdFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Create,
+ })
+ }
+
+ return events
+}
+
+func (s *IntegrationTestBuilder) readWorkingDir(t testing.TB, fs *hugofs.Fs, filename string) string {
+ t.Helper()
+ return s.readFileFromFs(t, fs.WorkingDirReadOnly, filename)
+}
+
+func (s *IntegrationTestBuilder) readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
+ t.Helper()
+ filename = filepath.Clean(filename)
+ b, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ // Print some debug info
+ hadSlash := strings.HasPrefix(filename, helpers.FilePathSeparator)
+ start := 0
+ if hadSlash {
+ start = 1
+ }
+ end := start + 1
+
+ parts := strings.Split(filename, helpers.FilePathSeparator)
+ if parts[start] == "work" {
+ end++
+ }
+
+ s.Assert(err, qt.IsNil)
+
+ }
+ return string(b)
+}
+
+func (s *IntegrationTestBuilder) writeSource(filename, content string) {
+ s.Helper()
+ s.writeToFs(s.fs.Source, filename, content)
+}
+
+func (s *IntegrationTestBuilder) writeToFs(fs afero.Fs, filename, content string) {
+ s.Helper()
+ if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
+ s.Fatalf("Failed to write file: %s", err)
+ }
+}
+
+type IntegrationTestConfig struct {
+ T testing.TB
+
+ // The files to use on txtar format, see
+ // https://pkg.go.dev/golang.org/x/exp/cmd/txtar
+ TxtarString string
+
+ // Whether to simulate server mode.
+ Running bool
+
+ // Will print the log buffer after the build
+ Verbose bool
+
+ LogLevel jww.Threshold
+
+ // Whether it needs the real file system (e.g. for js.Build tests).
+ NeedsOsFS bool
+
+ // Do not remove the temp dir after the test.
+ PrintAndKeepTempDir bool
+
+ // Whether to run npm install before Build.
+ NeedsNpmInstall bool
+
+ WorkingDir string
+}
diff --git a/hugolib/language_content_dir_test.go b/hugolib/language_content_dir_test.go
new file mode 100644
index 000000000..57cdab67b
--- /dev/null
+++ b/hugolib/language_content_dir_test.go
@@ -0,0 +1,526 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+/*
+
+/en/p1.md
+/nn/p1.md
+
+.Readdir
+
+- Name() => p1.en.md, p1.nn.md
+
+.Stat(name)
+
+.Open() --- real file name
+
+
+*/
+
+func TestLanguageContentRoot(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ config := `
+baseURL = "https://example.org/"
+
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+
+contentDir = "content/main"
+workingDir = "/my/project"
+
+[Languages]
+[Languages.en]
+weight = 10
+title = "In English"
+languageName = "English"
+
+[Languages.nn]
+weight = 20
+title = "På Norsk"
+languageName = "Norsk"
+# This tells Hugo that all content in this directory is in the Norwegian language.
+# It does not have to have the "my-page.nn.md" format. It can, but that is optional.
+contentDir = "content/norsk"
+
+[Languages.sv]
+weight = 30
+title = "På Svenska"
+languageName = "Svensk"
+contentDir = "content/svensk"
+`
+
+ pageTemplate := `
+---
+title: %s
+slug: %s
+weight: %d
+---
+
+Content.
+
+SVP3-REF: {{< ref path="/sect/page3.md" lang="sv" >}}
+SVP3-RELREF: {{< relref path="/sect/page3.md" lang="sv" >}}
+
+`
+
+ pageBundleTemplate := `
+---
+title: %s
+weight: %d
+---
+
+Content.
+
+`
+ var contentFiles []string
+ section := "sect"
+
+ contentRoot := func(lang string) string {
+ switch lang {
+ case "nn":
+ return "content/norsk"
+ case "sv":
+ return "content/svensk"
+ default:
+ return "content/main"
+ }
+ }
+
+ contentSectionRoot := func(lang string) string {
+ return contentRoot(lang) + "/" + section
+ }
+
+ for _, lang := range []string{"en", "nn", "sv"} {
+ for j := 1; j <= 10; j++ {
+ if (lang == "nn" || lang == "en") && j%4 == 0 {
+ // Skip 4 and 8 for nn
+ // We also skip it for en, but that is added to the Swedish directory below.
+ continue
+ }
+
+ if lang == "sv" && j%5 == 0 {
+ // Skip 5 and 10 for sv
+ continue
+ }
+
+ base := fmt.Sprintf("p-%s-%d", lang, j)
+ slug := base
+ langID := ""
+
+ if lang == "sv" && j%4 == 0 {
+ // Put an English page in the Swedish content dir.
+ langID = ".en"
+ }
+
+ if lang == "en" && j == 8 {
+ // This should win over the sv variant above.
+ langID = ".en"
+ }
+
+ slug += langID
+
+ contentRoot := contentSectionRoot(lang)
+
+ filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID))
+ contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j))
+ }
+ }
+
+ // Put common translations in all of them
+ for i, lang := range []string{"en", "nn", "sv"} {
+ contentRoot := contentSectionRoot(lang)
+
+ slug := fmt.Sprintf("common_%s", lang)
+
+ filename := filepath.Join(contentRoot, "common.md")
+ contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, 100+i))
+
+ for j, lang2 := range []string{"en", "nn", "sv"} {
+ filename := filepath.Join(contentRoot, fmt.Sprintf("translated_all.%s.md", lang2))
+ langSlug := slug + "_translated_all_" + lang2
+ contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 200+i+j))
+ }
+
+ for j, lang2 := range []string{"sv", "nn"} {
+ if lang == "en" {
+ continue
+ }
+ filename := filepath.Join(contentRoot, fmt.Sprintf("translated_some.%s.md", lang2))
+ langSlug := slug + "_translated_some_" + lang2
+ contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 300+i+j))
+ }
+ }
+
+ // Add a bundle with some images
+ for i, lang := range []string{"en", "nn", "sv"} {
+ contentRoot := contentSectionRoot(lang)
+ slug := fmt.Sprintf("bundle_%s", lang)
+ filename := filepath.Join(contentRoot, "mybundle", "index.md")
+ contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i))
+ if lang == "en" {
+ imageFilename := filepath.Join(contentRoot, "mybundle", "logo.png")
+ contentFiles = append(contentFiles, imageFilename, "PNG Data")
+ }
+ imageFilename := filepath.Join(contentRoot, "mybundle", "featured.png")
+ contentFiles = append(contentFiles, imageFilename, fmt.Sprintf("PNG Data for %s", lang))
+
+ // Add some bundled pages
+ contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 401+i))
+ contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "sub", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 402+i))
+
+ }
+
+ // Add some static files inside the content dir
+ // https://github.com/gohugoio/hugo/issues/5759
+ for _, lang := range []string{"en", "nn", "sv"} {
+ contentRoot := contentRoot(lang)
+ for i := 0; i < 2; i++ {
+ filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i))
+ contentFiles = append(contentFiles, filename, lang)
+ }
+ }
+
+ b := newTestSitesBuilder(t)
+ b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites()
+
+ _ = os.Stdout
+
+ err := b.BuildE(BuildCfg{})
+
+ // dumpPages(b.H.Sites[1].RegularPages()...)
+
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(len(b.H.Sites), qt.Equals, 3)
+
+ enSite := b.H.Sites[0]
+ nnSite := b.H.Sites[1]
+ svSite := b.H.Sites[2]
+
+ b.AssertFileContent("public/en/mystatic/file1.yaml", "en")
+ b.AssertFileContent("public/nn/mystatic/file1.yaml", "nn")
+
+ // dumpPages(nnSite.RegularPages()...)
+
+ c.Assert(len(nnSite.RegularPages()), qt.Equals, 12)
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 13)
+
+ c.Assert(len(svSite.RegularPages()), qt.Equals, 10)
+
+ svP2, err := svSite.getPageNew(nil, "/sect/page2.md")
+ c.Assert(err, qt.IsNil)
+ nnP2, err := nnSite.getPageNew(nil, "/sect/page2.md")
+ c.Assert(err, qt.IsNil)
+
+ enP2, err := enSite.getPageNew(nil, "/sect/page2.md")
+ c.Assert(err, qt.IsNil)
+ c.Assert(enP2.Language().Lang, qt.Equals, "en")
+ c.Assert(svP2.Language().Lang, qt.Equals, "sv")
+ c.Assert(nnP2.Language().Lang, qt.Equals, "nn")
+
+ content, _ := nnP2.Content()
+ contentStr := cast.ToString(content)
+ c.Assert(contentStr, qt.Contains, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
+ c.Assert(contentStr, qt.Contains, "SVP3-RELREF: /sv/sect/p-sv-3/")
+
+ // Test RelRef with and without language indicator.
+ nn3RefArgs := map[string]any{
+ "path": "/sect/page3.md",
+ "lang": "nn",
+ }
+ nnP3RelRef, err := svP2.RelRef(
+ nn3RefArgs,
+ )
+ c.Assert(err, qt.IsNil)
+ c.Assert(nnP3RelRef, qt.Equals, "/nn/sect/p-nn-3/")
+ nnP3Ref, err := svP2.Ref(
+ nn3RefArgs,
+ )
+ c.Assert(err, qt.IsNil)
+ c.Assert(nnP3Ref, qt.Equals, "https://example.org/nn/sect/p-nn-3/")
+
+ for i, p := range enSite.RegularPages() {
+ j := i + 1
+ c.Assert(p.Language().Lang, qt.Equals, "en")
+ c.Assert(p.Section(), qt.Equals, "sect")
+ if j < 9 {
+ if j%4 == 0 {
+ } else {
+ c.Assert(p.Title(), qt.Contains, "p-en")
+ }
+ }
+ }
+
+ for _, p := range nnSite.RegularPages() {
+ c.Assert(p.Language().Lang, qt.Equals, "nn")
+ c.Assert(p.Title(), qt.Contains, "nn")
+ }
+
+ for _, p := range svSite.RegularPages() {
+ c.Assert(p.Language().Lang, qt.Equals, "sv")
+ c.Assert(p.Title(), qt.Contains, "sv")
+ }
+
+ // Check bundles
+ bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1]
+ bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1]
+ bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1]
+
+ c.Assert(bundleEn.RelPermalink(), qt.Equals, "/en/sect/mybundle/")
+ c.Assert(bundleSv.RelPermalink(), qt.Equals, "/sv/sect/mybundle/")
+
+ c.Assert(len(bundleNn.Resources()), qt.Equals, 4)
+ c.Assert(len(bundleSv.Resources()), qt.Equals, 4)
+ c.Assert(len(bundleEn.Resources()), qt.Equals, 4)
+
+ b.AssertFileContent("public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png")
+ b.AssertFileContent("public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png")
+ b.AssertFileContent("public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png")
+
+ b.AssertFileContent("public/sv/sect/mybundle/featured.png", "PNG Data for sv")
+ b.AssertFileContent("public/nn/sect/mybundle/featured.png", "PNG Data for nn")
+ b.AssertFileContent("public/en/sect/mybundle/featured.png", "PNG Data for en")
+ b.AssertFileContent("public/en/sect/mybundle/logo.png", "PNG Data")
+ b.AssertFileContent("public/sv/sect/mybundle/logo.png", "PNG Data")
+ b.AssertFileContent("public/nn/sect/mybundle/logo.png", "PNG Data")
+
+ nnSect := nnSite.getPage(page.KindSection, "sect")
+ c.Assert(nnSect, qt.Not(qt.IsNil))
+ c.Assert(len(nnSect.Pages()), qt.Equals, 12)
+ nnHome := nnSite.Info.Home()
+ c.Assert(nnHome.RelPermalink(), qt.Equals, "/nn/")
+}
+
+// https://github.com/gohugoio/hugo/issues/6463
+func TestLanguageRootSectionsMismatch(t *testing.T) {
+ t.Parallel()
+
+ config := `
+baseURL: "https://example.org/"
+languageCode: "en-us"
+title: "My New Hugo Site"
+theme: "mytheme"
+
+contentDir: "content/en"
+
+languages:
+ en:
+ weight: 1
+ languageName: "English"
+ contentDir: content/en
+ es:
+ weight: 2
+ languageName: "Español"
+ contentDir: content/es
+ fr:
+ weight: 4
+ languageName: "Française"
+ contentDir: content/fr
+
+
+`
+ createPage := func(title string) string {
+ return fmt.Sprintf(`---
+title: %q
+---
+
+`, title)
+ }
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("yaml", config)
+
+ b.WithSourceFile("themes/mytheme/layouts/index.html", `MYTHEME`)
+ b.WithTemplates("index.html", `
+Lang: {{ .Lang }}
+{{ range .Site.RegularPages }}
+Page: {{ .RelPermalink }}|{{ .Title -}}
+{{ end }}
+
+`)
+ b.WithSourceFile("static/hello.txt", `hello`)
+ b.WithContent("en/_index.md", createPage("en home"))
+ b.WithContent("es/_index.md", createPage("es home"))
+ b.WithContent("fr/_index.md", createPage("fr home"))
+
+ for i := 1; i < 3; i++ {
+ b.WithContent(fmt.Sprintf("en/event/page%d.md", i), createPage(fmt.Sprintf("ev-en%d", i)))
+ b.WithContent(fmt.Sprintf("es/event/page%d.md", i), createPage(fmt.Sprintf("ev-es%d", i)))
+ b.WithContent(fmt.Sprintf("fr/event/page%d.md", i), createPage(fmt.Sprintf("ev-fr%d", i)))
+ b.WithContent(fmt.Sprintf("en/blog/page%d.md", i), createPage(fmt.Sprintf("blog-en%d", i)))
+ b.WithContent(fmt.Sprintf("es/blog/page%d.md", i), createPage(fmt.Sprintf("blog-es%d", i)))
+ b.WithContent(fmt.Sprintf("fr/other/page%d.md", i), createPage(fmt.Sprintf("other-fr%d", i)))
+ }
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Lang: en
+Page: /blog/page1/|blog-en1
+Page: /blog/page2/|blog-en2
+Page: /event/page1/|ev-en1
+Page: /event/page2/|ev-en2
+`)
+
+ b.AssertFileContent("public/es/index.html", `
+Lang: es
+Page: /es/blog/page1/|blog-es1
+Page: /es/blog/page2/|blog-es2
+Page: /es/event/page1/|ev-es1
+Page: /es/event/page2/|ev-es2
+`)
+ b.AssertFileContent("public/fr/index.html", `
+Lang: fr
+Page: /fr/event/page1/|ev-fr1
+Page: /fr/event/page2/|ev-fr2
+Page: /fr/other/page1/|other-fr1
+Page: /fr/other/page2/|other-fr2`)
+}
+
+// Issue 9693
+func TestContentMountMerge(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'https://example.org/'
+languageCode = 'en-us'
+title = 'Hugo Forum Topic #37225'
+theme = 'mytheme'
+
+disableKinds = ['sitemap','RSS','taxonomy','term']
+defaultContentLanguage = 'en'
+defaultContentLanguageInSubdir = true
+
+[languages.en]
+languageName = 'English'
+weight = 1
+[languages.de]
+languageName = 'Deutsch'
+weight = 2
+[languages.nl]
+languageName = 'Nederlands'
+weight = 3
+
+# EN content
+[[module.mounts]]
+source = 'content/en'
+target = 'content'
+lang = 'en'
+
+# DE content
+[[module.mounts]]
+source = 'content/de'
+target = 'content'
+lang = 'de'
+
+# This fills in the gaps in DE content with EN content
+[[module.mounts]]
+source = 'content/en'
+target = 'content'
+lang = 'de'
+
+# NL content
+[[module.mounts]]
+source = 'content/nl'
+target = 'content'
+lang = 'nl'
+
+# This should fill in the gaps in NL content with EN content
+[[module.mounts]]
+source = 'content/en'
+target = 'content'
+lang = 'nl'
+
+-- content/de/_index.md --
+---
+title: "home (de)"
+---
+-- content/de/p1.md --
+---
+title: "p1 (de)"
+---
+-- content/en/_index.md --
+---
+title: "home (en)"
+---
+-- content/en/p1.md --
+---
+title: "p1 (en)"
+---
+-- content/en/p2.md --
+---
+title: "p2 (en)"
+---
+-- content/en/p3.md --
+---
+title: "p3 (en)"
+---
+-- content/nl/_index.md --
+---
+title: "home (nl)"
+---
+-- content/nl/p1.md --
+---
+title: "p1 (nl)"
+---
+-- content/nl/p3.md --
+---
+title: "p3 (nl)"
+---
+-- layouts/home.html --
+{{ .Title }}: {{ site.Language.Lang }}: {{ range site.RegularPages }}{{ .Title }}|{{ end }}:END
+-- themes/mytheme/config.toml --
+[[module.mounts]]
+source = 'content/nlt'
+target = 'content'
+lang = 'nl'
+-- themes/mytheme/content/nlt/p3.md --
+---
+title: "p3 theme (nl)"
+---
+-- themes/mytheme/content/nlt/p4.md --
+---
+title: "p4 theme (nl)"
+---
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/nl/index.html", `home (nl): nl: p1 (nl)|p2 (en)|p3 (nl)|p4 theme (nl)|:END`)
+ b.AssertFileContent("public/de/index.html", `home (de): de: p1 (de)|p2 (en)|p3 (en)|:END`)
+ b.AssertFileContent("public/en/index.html", `home (en): en: p1 (en)|p2 (en)|p3 (en)|:END`)
+
+}
diff --git a/hugolib/language_test.go b/hugolib/language_test.go
new file mode 100644
index 000000000..74f80a29d
--- /dev/null
+++ b/hugolib/language_test.go
@@ -0,0 +1,139 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestI18n(t *testing.T) {
+ c := qt.New(t)
+
+ //https://github.com/gohugoio/hugo/issues/7804
+ c.Run("pt-br should be case insensitive", func(c *qt.C) {
+ b := newTestSitesBuilder(c)
+ langCode := func() string {
+ c := "pt-br"
+ if htesting.RandBool() {
+ c = strings.ToUpper(c)
+ }
+ return c
+ }
+
+ b.WithConfigFile(`toml`, fmt.Sprintf(`
+baseURL = "https://example.com"
+defaultContentLanguage = "%s"
+
+[languages]
+[languages.%s]
+weight = 1
+`, langCode(), langCode()))
+
+ b.WithI18n(fmt.Sprintf("i18n/%s.toml", langCode()), `hello.one = "Hello"`)
+ b.WithTemplates("index.html", `Hello: {{ i18n "hello" 1 }}`)
+ b.WithContent("p1.md", "")
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "Hello: Hello")
+ })
+}
+
+func TestLanguageBugs(t *testing.T) {
+ c := qt.New(t)
+
+ // Issue #8672
+ c.Run("Config with language, menu in root only", func(c *qt.C) {
+ b := newTestSitesBuilder(c)
+ b.WithConfigFile("toml", `
+theme = "test-theme"
+[[menus.foo]]
+name = "foo-a"
+[languages.en]
+
+`,
+ )
+
+ b.WithThemeConfigFile("toml", `[languages.en]`)
+
+ b.Build(BuildCfg{})
+
+ menus := b.H.Sites[0].Menus()
+ c.Assert(menus, qt.HasLen, 1)
+
+ })
+}
+
+func TestLanguageNumberFormatting(t *testing.T) {
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+defaultContentLanguage = "en"
+defaultContentLanguageInSubDir = true
+
+[languages]
+[languages.en]
+timeZone="UTC"
+weight=10
+[languages.nn]
+weight=20
+
+`)
+
+ b.WithTemplates("index.html", `
+
+FormatNumber: {{ 512.5032 | lang.FormatNumber 2 }}
+FormatPercent: {{ 512.5032 | lang.FormatPercent 2 }}
+FormatCurrency: {{ 512.5032 | lang.FormatCurrency 2 "USD" }}
+FormatAccounting: {{ 512.5032 | lang.FormatAccounting 2 "NOK" }}
+FormatNumberCustom: {{ lang.FormatNumberCustom 2 12345.6789 }}
+
+# We renamed this to FormatNumberCustom in 0.87.0.
+NumFmt: {{ -98765.4321 | lang.NumFmt 2 }}
+
+
+`)
+ b.WithContent("p1.md", "")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/index.html", `
+FormatNumber: 512.50
+FormatPercent: 512.50%
+FormatCurrency: $512.50
+FormatAccounting: NOK512.50
+FormatNumberCustom: 12,345.68
+
+NumFmt: -98,765.43
+`,
+ )
+
+ b.AssertFileContent("public/nn/index.html", `
+FormatNumber: 512,50
+FormatPercent: 512,50 %
+FormatCurrency: 512,50 USD
+FormatAccounting: 512,50 kr
+FormatNumberCustom: 12,345.68
+
+# We renamed this to FormatNumberCustom in 0.87.0.
+NumFmt: -98,765.43
+`)
+}
diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go
new file mode 100644
index 000000000..4237082af
--- /dev/null
+++ b/hugolib/menu_test.go
@@ -0,0 +1,590 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const (
+ menuPageTemplate = `---
+title: %q
+weight: %d
+menu:
+ %s:
+ title: %s
+ weight: %d
+---
+# Doc Menu
+`
+)
+
+func TestMenusSectionPagesMenu(t *testing.T) {
+ t.Parallel()
+
+ siteConfig := `
+baseurl = "http://example.com/"
+title = "Section Menu"
+sectionPagesMenu = "sect"
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
+
+ b.WithTemplates(
+ "partials/menu.html",
+ `{{- $p := .page -}}
+{{- $m := .menu -}}
+{{ range (index $p.Site.Menus $m) -}}
+{{- .URL }}|{{ .Name }}|{{ .Title }}|{{ .Weight -}}|
+{{- if $p.IsMenuCurrent $m . }}IsMenuCurrent{{ else }}-{{ end -}}|
+{{- if $p.HasMenuCurrent $m . }}HasMenuCurrent{{ else }}-{{ end -}}|
+{{- end -}}
+`,
+ "_default/single.html",
+ `Single|{{ .Title }}
+Menu Sect: {{ partial "menu.html" (dict "page" . "menu" "sect") }}
+Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
+ "_default/list.html", "List|{{ .Title }}|{{ .Content }}",
+ )
+
+ b.WithContent(
+ "sect1/p1.md", fmt.Sprintf(menuPageTemplate, "p1", 1, "main", "atitle1", 40),
+ "sect1/p2.md", fmt.Sprintf(menuPageTemplate, "p2", 2, "main", "atitle2", 30),
+ "sect2/p3.md", fmt.Sprintf(menuPageTemplate, "p3", 3, "main", "atitle3", 20),
+ "sect2/p4.md", fmt.Sprintf(menuPageTemplate, "p4", 4, "main", "atitle4", 10),
+ "sect3/p5.md", fmt.Sprintf(menuPageTemplate, "p5", 5, "main", "atitle5", 5),
+ "sect1/_index.md", newTestPage("Section One", "2017-01-01", 100),
+ "sect5/_index.md", newTestPage("Section Five", "2017-01-01", 10),
+ )
+
+ b.Build(BuildCfg{})
+ h := b.H
+
+ s := h.Sites[0]
+
+ b.Assert(len(s.Menus()), qt.Equals, 2)
+
+ p1 := s.RegularPages()[0].Menus()
+
+ // There is only one menu in the page, but it is "member of" 2
+ b.Assert(len(p1), qt.Equals, 1)
+
+ b.AssertFileContent("public/sect1/p1/index.html", "Single",
+ "Menu Sect: "+
+ "/sect5/|Section Five|Section Five|10|-|-|"+
+ "/sect1/|Section One|Section One|100|-|HasMenuCurrent|"+
+ "/sect2/|Sect2s|Sect2s|0|-|-|"+
+ "/sect3/|Sect3s|Sect3s|0|-|-|",
+ "Menu Main: "+
+ "/sect3/p5/|p5|atitle5|5|-|-|"+
+ "/sect2/p4/|p4|atitle4|10|-|-|"+
+ "/sect2/p3/|p3|atitle3|20|-|-|"+
+ "/sect1/p2/|p2|atitle2|30|-|-|"+
+ "/sect1/p1/|p1|atitle1|40|IsMenuCurrent|-|",
+ )
+
+ b.AssertFileContent("public/sect2/p3/index.html", "Single",
+ "Menu Sect: "+
+ "/sect5/|Section Five|Section Five|10|-|-|"+
+ "/sect1/|Section One|Section One|100|-|-|"+
+ "/sect2/|Sect2s|Sect2s|0|-|HasMenuCurrent|"+
+ "/sect3/|Sect3s|Sect3s|0|-|-|")
+}
+
+// related issue #7594
+func TestMenusSort(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithTemplatesAdded("index.html", `
+{{ range $k, $v := .Site.Menus.main }}
+Default1|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+{{ range $k, $v := .Site.Menus.main.ByWeight }}
+ByWeight|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+{{ range $k, $v := (.Site.Menus.main.ByWeight).Reverse }}
+Reverse|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+{{ range $k, $v := .Site.Menus.main }}
+Default2|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+{{ range $k, $v := .Site.Menus.main.ByWeight }}
+ByWeight|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+{{ range $k, $v := .Site.Menus.main }}
+Default3|{{ $k }}|{{ $v.Weight }}|{{ $v.Name }}|{{ .URL }}|{{ $v.Page }}{{ end }}
+`)
+
+ b.WithContent("_index.md", `
+---
+title: Home
+menu:
+ main:
+ weight: 100
+---`)
+
+ b.WithContent("blog/A.md", `
+---
+title: "A"
+menu:
+ main:
+ weight: 10
+---
+`)
+
+ b.WithContent("blog/B.md", `
+---
+title: "B"
+menu:
+ main:
+ weight: 20
+---
+`)
+ b.WithContent("blog/C.md", `
+---
+title: "C"
+menu:
+ main:
+ weight: 30
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ `Default1|0|10|A|/blog/a/|Page(/blog/A.md)
+ Default1|1|20|B|/blog/b/|Page(/blog/B.md)
+ Default1|2|30|C|/blog/c/|Page(/blog/C.md)
+ Default1|3|100|Home|/|Page(/_index.md)
+
+ ByWeight|0|10|A|/blog/a/|Page(/blog/A.md)
+ ByWeight|1|20|B|/blog/b/|Page(/blog/B.md)
+ ByWeight|2|30|C|/blog/c/|Page(/blog/C.md)
+ ByWeight|3|100|Home|/|Page(/_index.md)
+
+ Reverse|0|100|Home|/|Page(/_index.md)
+ Reverse|1|30|C|/blog/c/|Page(/blog/C.md)
+ Reverse|2|20|B|/blog/b/|Page(/blog/B.md)
+ Reverse|3|10|A|/blog/a/|Page(/blog/A.md)
+
+ Default2|0|10|A|/blog/a/|Page(/blog/A.md)
+ Default2|1|20|B|/blog/b/|Page(/blog/B.md)
+ Default2|2|30|C|/blog/c/|Page(/blog/C.md)
+ Default2|3|100|Home|/|Page(/_index.md)
+
+ ByWeight|0|10|A|/blog/a/|Page(/blog/A.md)
+ ByWeight|1|20|B|/blog/b/|Page(/blog/B.md)
+ ByWeight|2|30|C|/blog/c/|Page(/blog/C.md)
+ ByWeight|3|100|Home|/|Page(/_index.md)
+
+ Default3|0|10|A|/blog/a/|Page(/blog/A.md)
+ Default3|1|20|B|/blog/b/|Page(/blog/B.md)
+ Default3|2|30|C|/blog/c/|Page(/blog/C.md)
+ Default3|3|100|Home|/|Page(/_index.md)`,
+ )
+}
+
+func TestMenusFrontMatter(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithTemplatesAdded("index.html", `
+Main: {{ len .Site.Menus.main }}
+Other: {{ len .Site.Menus.other }}
+{{ range .Site.Menus.main }}
+* Main|{{ .Name }}: {{ .URL }}
+{{ end }}
+{{ range .Site.Menus.other }}
+* Other|{{ .Name }}: {{ .URL }}
+{{ end }}
+`)
+
+ // Issue #5828
+ b.WithContent("blog/page1.md", `
+---
+title: "P1"
+menu: main
+---
+
+`)
+
+ b.WithContent("blog/page2.md", `
+---
+title: "P2"
+menu: [main,other]
+---
+
+`)
+
+ b.WithContent("blog/page3.md", `
+---
+title: "P3"
+menu:
+ main:
+ weight: 30
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ "Main: 3", "Other: 1",
+ "Main|P1: /blog/page1/",
+ "Other|P2: /blog/page2/",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/5849
+func TestMenusPageMultipleOutputFormats(t *testing.T) {
+ config := `
+baseURL = "https://example.com"
+
+# DAMP is similar to AMP, but not permalinkable.
+[outputFormats]
+[outputFormats.damp]
+mediaType = "text/html"
+path = "damp"
+
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+ b.WithContent("_index.md", `
+---
+Title: Home Sweet Home
+outputs: [ "html", "amp" ]
+menu: "main"
+---
+
+`)
+
+ b.WithContent("blog/html-amp.md", `
+---
+Title: AMP and HTML
+outputs: [ "html", "amp" ]
+menu: "main"
+---
+
+`)
+
+ b.WithContent("blog/html.md", `
+---
+Title: HTML only
+outputs: [ "html" ]
+menu: "main"
+---
+
+`)
+
+ b.WithContent("blog/amp.md", `
+---
+Title: AMP only
+outputs: [ "amp" ]
+menu: "main"
+---
+
+`)
+
+ b.WithTemplatesAdded("index.html", `{{ range .Site.Menus.main }}{{ .Title }}|{{ .URL }}|{{ end }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "AMP and HTML|/blog/html-amp/|AMP only|/amp/blog/amp/|Home Sweet Home|/|HTML only|/blog/html/|")
+ b.AssertFileContent("public/amp/index.html", "AMP and HTML|/amp/blog/html-amp/|AMP only|/amp/blog/amp/|Home Sweet Home|/amp/|HTML only|/blog/html/|")
+}
+
+// https://github.com/gohugoio/hugo/issues/5989
+func TestMenusPageSortByDate(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithContent("blog/a.md", `
+---
+Title: A
+date: 2019-01-01
+menu:
+ main:
+ identifier: "a"
+ weight: 1
+---
+
+`)
+
+ b.WithContent("blog/b.md", `
+---
+Title: B
+date: 2018-01-02
+menu:
+ main:
+ parent: "a"
+ weight: 100
+---
+
+`)
+
+ b.WithContent("blog/c.md", `
+---
+Title: C
+date: 2019-01-03
+menu:
+ main:
+ parent: "a"
+ weight: 10
+---
+
+`)
+
+ b.WithTemplatesAdded("index.html", `{{ range .Site.Menus.main }}{{ .Title }}|Children:
+{{- $children := sort .Children ".Page.Date" "desc" }}{{ range $children }}{{ .Title }}|{{ end }}{{ end }}
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "A|Children:C|B|")
+}
+
+// Issue #8825
+func TestMenuParamsEmptyYaml(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("yaml", `
+
+`)
+
+ b.WithTemplates("index.html", `{{ site.Menus }}`)
+
+ b.WithContent("p1.md", `---
+menus:
+ main:
+ identity: journal
+ weight: 2
+ params:
+---
+`)
+ b.Build(BuildCfg{})
+}
+
+func TestMenuParams(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+[[menus.main]]
+identifier = "contact"
+title = "Contact Us"
+url = "mailto:noreply@example.com"
+weight = 300
+[menus.main.params]
+foo = "foo_config"
+key2 = "key2_config"
+camelCase = "camelCase_config"
+`)
+
+ b.WithTemplatesAdded("index.html", `
+Main: {{ len .Site.Menus.main }}
+{{ range .Site.Menus.main }}
+foo: {{ .Params.foo }}
+key2: {{ .Params.KEy2 }}
+camelCase: {{ .Params.camelcase }}
+{{ end }}
+`)
+
+ b.WithContent("_index.md", `
+---
+title: "Home"
+menu:
+ main:
+ weight: 10
+ params:
+ foo: "foo_content"
+ key2: "key2_content"
+ camelCase: "camelCase_content"
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Main: 2
+
+foo: foo_content
+key2: key2_content
+camelCase: camelCase_content
+
+foo: foo_config
+key2: key2_config
+camelCase: camelCase_config
+`)
+}
+
+func TestMenusShadowMembers(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+[[menus.main]]
+identifier = "contact"
+pageRef = "contact"
+title = "Contact Us"
+url = "mailto:noreply@example.com"
+weight = 1
+[[menus.main]]
+pageRef = "/blog/post3"
+title = "My Post 3"
+url = "/blog/post3"
+
+`)
+
+ commonTempl := `
+Main: {{ len .Site.Menus.main }}
+{{ range .Site.Menus.main }}
+{{ .Title }}|HasMenuCurrent: {{ $.HasMenuCurrent "main" . }}|Page: {{ .Page }}
+{{ .Title }}|IsMenuCurrent: {{ $.IsMenuCurrent "main" . }}|Page: {{ .Page }}
+{{ end }}
+`
+
+ b.WithTemplatesAdded("index.html", commonTempl)
+ b.WithTemplatesAdded("_default/single.html", commonTempl)
+
+ b.WithContent("_index.md", `
+---
+title: "Home"
+menu:
+ main:
+ weight: 10
+---
+`)
+
+ b.WithContent("blog/_index.md", `
+---
+title: "Blog"
+menu:
+ main:
+ weight: 20
+---
+`)
+
+ b.WithContent("blog/post1.md", `
+---
+title: "My Post 1: With No Menu Defined"
+---
+`)
+
+ b.WithContent("blog/post2.md", `
+---
+title: "My Post 2: With Menu Defined"
+menu:
+ main:
+ weight: 30
+---
+`)
+
+ b.WithContent("blog/post3.md", `
+---
+title: "My Post 2: With No Menu Defined"
+---
+`)
+
+ b.WithContent("contact.md", `
+---
+title: "Contact: With No Menu Defined"
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Main: 5
+Home|HasMenuCurrent: false|Page: Page(/_index.md)
+Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md)
+My Post 2: With Menu Defined|HasMenuCurrent: false|Page: Page(/blog/post2.md)
+My Post 3|HasMenuCurrent: false|Page: Page(/blog/post3.md)
+Contact Us|HasMenuCurrent: false|Page: Page(/contact.md)
+`)
+
+ b.AssertFileContent("public/blog/post1/index.html", `
+Home|HasMenuCurrent: false|Page: Page(/_index.md)
+Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md)
+`)
+
+ b.AssertFileContent("public/blog/post2/index.html", `
+Home|HasMenuCurrent: false|Page: Page(/_index.md)
+Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md)
+Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md)
+`)
+
+ b.AssertFileContent("public/blog/post3/index.html", `
+Home|HasMenuCurrent: false|Page: Page(/_index.md)
+Blog|HasMenuCurrent: true|Page: Page(/blog/_index.md)
+`)
+
+ b.AssertFileContent("public/contact/index.html", `
+Contact Us|HasMenuCurrent: false|Page: Page(/contact.md)
+Contact Us|IsMenuCurrent: true|Page: Page(/contact.md)
+Blog|HasMenuCurrent: false|Page: Page(/blog/_index.md)
+Blog|IsMenuCurrent: false|Page: Page(/blog/_index.md)
+`)
+}
+
+// Issue 9846
+func TestMenuHasMenuCurrentSection(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+disableKinds = ['RSS','sitemap','taxonomy','term']
+[[menu.main]]
+name = 'Home'
+pageRef = '/'
+weight = 1
+
+[[menu.main]]
+name = 'Tests'
+pageRef = '/tests'
+weight = 2
+[[menu.main]]
+name = 'Test 1'
+pageRef = '/tests/test-1'
+parent = 'Tests'
+weight = 1
+
+-- content/tests/test-1.md --
+---
+title: "Test 1"
+---
+-- layouts/_default/list.html --
+{{ range site.Menus.main }}
+{{ .Name }}|{{ .URL }}|IsMenuCurrent = {{ $.IsMenuCurrent "main" . }}|HasMenuCurrent = {{ $.HasMenuCurrent "main" . }}|
+{{ range .Children }}
+{{ .Name }}|{{ .URL }}|IsMenuCurrent = {{ $.IsMenuCurrent "main" . }}|HasMenuCurrent = {{ $.HasMenuCurrent "main" . }}|
+{{ end }}
+{{ end }}
+
+{{/* Some tests for issue 9925 */}}
+{{ $page := .Site.GetPage "tests/test-1" }}
+{{ $section := site.GetPage "tests" }}
+
+Home IsAncestor Self: {{ site.Home.IsAncestor site.Home }}
+Home IsDescendant Self: {{ site.Home.IsDescendant site.Home }}
+Section IsAncestor Self: {{ $section.IsAncestor $section }}
+Section IsDescendant Self: {{ $section.IsDescendant $section}}
+Page IsAncestor Self: {{ $page.IsAncestor $page }}
+Page IsDescendant Self: {{ $page.IsDescendant $page}}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/tests/index.html", `
+Tests|/tests/|IsMenuCurrent = true|HasMenuCurrent = false
+Home IsAncestor Self: false
+Home IsDescendant Self: false
+Section IsAncestor Self: false
+Section IsDescendant Self: false
+Page IsAncestor Self: false
+Page IsDescendant Self: false
+`)
+}
diff --git a/hugolib/minify_publisher_test.go b/hugolib/minify_publisher_test.go
new file mode 100644
index 000000000..03b46a5fe
--- /dev/null
+++ b/hugolib/minify_publisher_test.go
@@ -0,0 +1,63 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestMinifyPublisher(t *testing.T) {
+ t.Parallel()
+
+ v := config.NewWithTestDefaults()
+ v.Set("minify", true)
+ v.Set("baseURL", "https://example.org/")
+
+ htmlTemplate := `
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <meta charset="utf-8">
+ <title>HTML5 boilerplate – all you really need…</title>
+ <link rel="stylesheet" href="css/style.css">
+ <!--[if IE]>
+ <script src="http://html5shiv.googlecode.com/svn/trunk/html5.js"></script>
+ <![endif]-->
+</head>
+
+<body id="home">
+
+ <h1>{{ .Title }}</h1>
+ <p>{{ .Permalink }}</p>
+
+</body>
+</html>
+`
+
+ b := newTestSitesBuilder(t)
+ b.WithViper(v).WithTemplatesAdded("layouts/index.html", htmlTemplate)
+ b.CreateSites().Build(BuildCfg{})
+
+ // Check minification
+ // HTML
+ b.AssertFileContent("public/index.html", "<!doctype html>")
+
+ // RSS
+ b.AssertFileContent("public/index.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\"><channel><title/><link>https://example.org/</link>")
+
+ // Sitemap
+ b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>h")
+}
diff --git a/hugolib/mount_filters_test.go b/hugolib/mount_filters_test.go
new file mode 100644
index 000000000..688cf2558
--- /dev/null
+++ b/hugolib/mount_filters_test.go
@@ -0,0 +1,119 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugofs"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMountFilters(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t)
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-test-mountfilters")
+ b.Assert(err, qt.IsNil)
+ defer clean()
+
+ for _, component := range files.ComponentFolders {
+ b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0777), qt.IsNil)
+ }
+ b.WithWorkingDir(workingDir).WithLogger(loggers.NewInfoLogger())
+ b.WithConfigFile("toml", fmt.Sprintf(`
+workingDir = %q
+
+[module]
+[[module.mounts]]
+source = 'content'
+target = 'content'
+excludeFiles = "/a/c/**"
+[[module.mounts]]
+source = 'static'
+target = 'static'
+[[module.mounts]]
+source = 'layouts'
+target = 'layouts'
+excludeFiles = "/**/foo.html"
+[[module.mounts]]
+source = 'data'
+target = 'data'
+includeFiles = "/mydata/**"
+[[module.mounts]]
+source = 'assets'
+target = 'assets'
+excludeFiles = ["/**exclude.*", "/moooo.*"]
+[[module.mounts]]
+source = 'i18n'
+target = 'i18n'
+[[module.mounts]]
+source = 'archetypes'
+target = 'archetypes'
+
+
+`, workingDir))
+
+ b.WithContent("/a/b/p1.md", "---\ntitle: Include\n---")
+ b.WithContent("/a/c/p2.md", "---\ntitle: Exclude\n---")
+
+ b.WithSourceFile(
+ "data/mydata/b.toml", `b1='bval'`,
+ "data/nodata/c.toml", `c1='bval'`,
+ "layouts/partials/foo.html", `foo`,
+ "assets/exclude.txt", `foo`,
+ "assets/js/exclude.js", `foo`,
+ "assets/js/include.js", `foo`,
+ "assets/js/exclude.js", `foo`,
+ )
+
+ b.WithTemplatesAdded("index.html", `
+
+Data: {{ site.Data }}:END
+
+Template: {{ templates.Exists "partials/foo.html" }}:END
+Resource1: {{ resources.Get "js/include.js" }}:END
+Resource2: {{ resources.Get "js/exclude.js" }}:END
+Resource3: {{ resources.Get "exclude.txt" }}:END
+Resources: {{ resources.Match "**.js" }}
+`)
+
+ b.Build(BuildCfg{})
+
+ assertExists := func(name string, shouldExist bool) {
+ b.Helper()
+ b.Assert(b.CheckExists(name), qt.Equals, shouldExist)
+ }
+
+ assertExists("public/a/b/p1/index.html", true)
+ assertExists("public/a/c/p2/index.html", false)
+
+ b.AssertFileContent(filepath.Join("public", "index.html"), `
+Data: map[mydata:map[b:map[b1:bval]]]:END
+Template: false
+Resource1: js/include.js:END
+Resource2: :END
+Resource3: :END
+Resources: [js/include.js]
+`)
+
+}
diff --git a/hugolib/multilingual.go b/hugolib/multilingual.go
new file mode 100644
index 000000000..baebc9e0f
--- /dev/null
+++ b/hugolib/multilingual.go
@@ -0,0 +1,82 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "errors"
+ "sync"
+
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+// Multilingual manages the all languages used in a multilingual site.
+type Multilingual struct {
+ Languages langs.Languages
+
+ DefaultLang *langs.Language
+
+ langMap map[string]*langs.Language
+ langMapInit sync.Once
+}
+
+// Language returns the Language associated with the given string.
+func (ml *Multilingual) Language(lang string) *langs.Language {
+ ml.langMapInit.Do(func() {
+ ml.langMap = make(map[string]*langs.Language)
+ for _, l := range ml.Languages {
+ ml.langMap[l.Lang] = l
+ }
+ })
+ return ml.langMap[lang]
+}
+
+func getLanguages(cfg config.Provider) langs.Languages {
+ if cfg.IsSet("languagesSorted") {
+ return cfg.Get("languagesSorted").(langs.Languages)
+ }
+
+ return langs.Languages{langs.NewDefaultLanguage(cfg)}
+}
+
+func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingual, error) {
+ languages := make(langs.Languages, len(sites))
+
+ for i, s := range sites {
+ if s.language == nil {
+ return nil, errors.New("missing language for site")
+ }
+ languages[i] = s.language
+ }
+
+ defaultLang := cfg.GetString("defaultContentLanguage")
+
+ if defaultLang == "" {
+ defaultLang = "en"
+ }
+
+ return &Multilingual{Languages: languages, DefaultLang: langs.NewLanguage(defaultLang, cfg)}, nil
+}
+
+func (ml *Multilingual) enabled() bool {
+ return len(ml.Languages) > 1
+}
+
+func (s *Site) multilingualEnabled() bool {
+ if s.h == nil {
+ return false
+ }
+ return s.h.multilingual != nil && s.h.multilingual.enabled()
+}
diff --git a/hugolib/page.go b/hugolib/page.go
new file mode 100644
index 000000000..e37b47300
--- /dev/null
+++ b/hugolib/page.go
@@ -0,0 +1,982 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "go.uber.org/atomic"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/bep/gitmap"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+
+ "github.com/gohugoio/hugo/output"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ page.Page = (*pageState)(nil)
+ _ collections.Grouper = (*pageState)(nil)
+ _ collections.Slicer = (*pageState)(nil)
+)
+
+var (
+ pageTypesProvider = resource.NewResourceTypesProvider(media.OctetType, pageResourceType)
+ nopPageOutput = &pageOutput{
+ pagePerOutputProviders: nopPagePerOutput,
+ ContentProvider: page.NopPage,
+ TableOfContentsProvider: page.NopPage,
+ }
+)
+
+// pageContext provides contextual information about this page, for error
+// logging and similar.
+type pageContext interface {
+ posOffset(offset int) text.Position
+ wrapError(err error) error
+ getContentConverter() converter.Converter
+ addDependency(dep identity.Provider)
+}
+
+// wrapErr adds some context to the given error if possible.
+func wrapErr(err error, ctx any) error {
+ if pc, ok := ctx.(pageContext); ok {
+ return pc.wrapError(err)
+ }
+ return err
+}
+
+type pageSiteAdapter struct {
+ p page.Page
+ s *Site
+}
+
+func (pa pageSiteAdapter) GetPageWithTemplateInfo(info tpl.Info, ref string) (page.Page, error) {
+ p, err := pa.GetPage(ref)
+ if p != nil {
+ // Track pages referenced by templates/shortcodes
+ // when in server mode.
+ if im, ok := info.(identity.Manager); ok {
+ im.Add(p)
+ }
+ }
+ return p, err
+}
+
+func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) {
+ p, err := pa.s.getPageNew(pa.p, ref)
+ if p == nil {
+ // The nil struct has meaning in some situations, mostly to avoid breaking
+ // existing sites doing $nilpage.IsDescendant($p), which will always return
+ // false.
+ p = page.NilPage
+ }
+ return p, err
+}
+
+type pageState struct {
+ // This slice will be of same length as the number of global slice of output
+ // formats (for all sites).
+ pageOutputs []*pageOutput
+
+ // Used to determine if we can reuse content across output formats.
+ pageOutputTemplateVariationsState *atomic.Uint32
+
+ // This will be shifted out when we start to render a new output format.
+ *pageOutput
+
+ // Common for all output formats.
+ *pageCommon
+}
+
+func (p *pageState) reusePageOutputContent() bool {
+ return p.pageOutputTemplateVariationsState.Load() == 1
+}
+
+func (p *pageState) Err() resource.ResourceError {
+ return nil
+}
+
+// Eq returns whether the current page equals the given page.
+// This is what's invoked when doing `{{ if eq $page $otherPage }}`
+func (p *pageState) Eq(other any) bool {
+ pp, err := unwrapPage(other)
+ if err != nil {
+ return false
+ }
+
+ return p == pp
+}
+
+func (p *pageState) GetIdentity() identity.Identity {
+ return identity.NewPathIdentity(files.ComponentFolderContent, filepath.FromSlash(p.Pathc()))
+}
+
+func (p *pageState) GitInfo() *gitmap.GitInfo {
+ return p.gitInfo
+}
+
+func (p *pageState) CodeOwners() []string {
+ return p.codeowners
+}
+
+// GetTerms gets the terms defined on this page in the given taxonomy.
+// The pages returned will be ordered according to the front matter.
+func (p *pageState) GetTerms(taxonomy string) page.Pages {
+ if p.treeRef == nil {
+ return nil
+ }
+
+ m := p.s.pageMap
+
+ taxonomy = strings.ToLower(taxonomy)
+ prefix := cleanSectionTreeKey(taxonomy)
+ self := strings.TrimPrefix(p.treeRef.key, "/")
+
+ var pas page.Pages
+
+ m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool {
+ key := s + self
+ if tn, found := m.taxonomyEntries.Get(key); found {
+ vi := tn.(*contentNode).viewInfo
+ pas = append(pas, pageWithOrdinal{pageState: n.p, ordinal: vi.ordinal})
+ }
+ return false
+ })
+
+ page.SortByDefault(pas)
+
+ return pas
+}
+
+func (p *pageState) MarshalJSON() ([]byte, error) {
+ return page.MarshalPageToJSON(p)
+}
+
+func (p *pageState) getPages() page.Pages {
+ b := p.bucket
+ if b == nil {
+ return nil
+ }
+ return b.getPages()
+}
+
+func (p *pageState) getPagesRecursive() page.Pages {
+ b := p.bucket
+ if b == nil {
+ return nil
+ }
+ return b.getPagesRecursive()
+}
+
+func (p *pageState) getPagesAndSections() page.Pages {
+ b := p.bucket
+ if b == nil {
+ return nil
+ }
+ return b.getPagesAndSections()
+}
+
+func (p *pageState) RegularPagesRecursive() page.Pages {
+ p.regularPagesRecursiveInit.Do(func() {
+ var pages page.Pages
+ switch p.Kind() {
+ case page.KindSection:
+ pages = p.getPagesRecursive()
+ default:
+ pages = p.RegularPages()
+ }
+ p.regularPagesRecursive = pages
+ })
+ return p.regularPagesRecursive
+}
+
+func (p *pageState) PagesRecursive() page.Pages {
+ return nil
+}
+
+func (p *pageState) RegularPages() page.Pages {
+ p.regularPagesInit.Do(func() {
+ var pages page.Pages
+
+ switch p.Kind() {
+ case page.KindPage:
+ case page.KindSection, page.KindHome, page.KindTaxonomy:
+ pages = p.getPages()
+ case page.KindTerm:
+ all := p.Pages()
+ for _, p := range all {
+ if p.IsPage() {
+ pages = append(pages, p)
+ }
+ }
+ default:
+ pages = p.s.RegularPages()
+ }
+
+ p.regularPages = pages
+ })
+
+ return p.regularPages
+}
+
+func (p *pageState) Pages() page.Pages {
+ p.pagesInit.Do(func() {
+ var pages page.Pages
+
+ switch p.Kind() {
+ case page.KindPage:
+ case page.KindSection, page.KindHome:
+ pages = p.getPagesAndSections()
+ case page.KindTerm:
+ pages = p.bucket.getTaxonomyEntries()
+ case page.KindTaxonomy:
+ pages = p.bucket.getTaxonomies()
+ default:
+ pages = p.s.Pages()
+ }
+
+ p.pages = pages
+ })
+
+ return p.pages
+}
+
+// RawContent returns the un-rendered source content without
+// any leading front matter.
+func (p *pageState) RawContent() string {
+ if p.source.parsed == nil {
+ return ""
+ }
+ start := p.source.posMainContent
+ if start == -1 {
+ start = 0
+ }
+ return string(p.source.parsed.Input()[start:])
+}
+
+func (p *pageState) sortResources() {
+ sort.SliceStable(p.resources, func(i, j int) bool {
+ ri, rj := p.resources[i], p.resources[j]
+ if ri.ResourceType() < rj.ResourceType() {
+ return true
+ }
+
+ p1, ok1 := ri.(page.Page)
+ p2, ok2 := rj.(page.Page)
+
+ if ok1 != ok2 {
+ return ok2
+ }
+
+ if ok1 {
+ return page.DefaultPageSort(p1, p2)
+ }
+
+ // Make sure not to use RelPermalink or any of the other methods that
+ // trigger lazy publishing.
+ return ri.Name() < rj.Name()
+ })
+}
+
+func (p *pageState) Resources() resource.Resources {
+ p.resourcesInit.Do(func() {
+ p.sortResources()
+ if len(p.m.resourcesMetadata) > 0 {
+ resources.AssignMetadata(p.m.resourcesMetadata, p.resources...)
+ p.sortResources()
+ }
+ })
+ return p.resources
+}
+
+func (p *pageState) HasShortcode(name string) bool {
+ if p.shortcodeState == nil {
+ return false
+ }
+
+ return p.shortcodeState.hasName(name)
+}
+
+func (p *pageState) Site() page.Site {
+ return p.s.Info
+}
+
+func (p *pageState) String() string {
+ if sourceRef := p.sourceRef(); sourceRef != "" {
+ return fmt.Sprintf("Page(%s)", sourceRef)
+ }
+ return fmt.Sprintf("Page(%q)", p.Title())
+}
+
+// IsTranslated returns whether this content file is translated to
+// other language(s).
+func (p *pageState) IsTranslated() bool {
+ p.s.h.init.translations.Do()
+ return len(p.translations) > 0
+}
+
+// TranslationKey returns the key used to map language translations of this page.
+// It will use the translationKey set in front matter if set, or the content path and
+// filename (excluding any language code and extension), e.g. "about/index".
+// The Page Kind is always prepended.
+func (p *pageState) TranslationKey() string {
+ p.translationKeyInit.Do(func() {
+ if p.m.translationKey != "" {
+ p.translationKey = p.Kind() + "/" + p.m.translationKey
+ } else if p.IsPage() && !p.File().IsZero() {
+ p.translationKey = path.Join(p.Kind(), filepath.ToSlash(p.File().Dir()), p.File().TranslationBaseName())
+ } else if p.IsNode() {
+ p.translationKey = path.Join(p.Kind(), p.SectionsPath())
+ }
+ })
+
+ return p.translationKey
+}
+
+// AllTranslations returns all translations, including the current Page.
+func (p *pageState) AllTranslations() page.Pages {
+ p.s.h.init.translations.Do()
+ return p.allTranslations
+}
+
+// Translations returns the translations excluding the current Page.
+func (p *pageState) Translations() page.Pages {
+ p.s.h.init.translations.Do()
+ return p.translations
+}
+
+func (ps *pageState) initCommonProviders(pp pagePaths) error {
+ if ps.IsPage() {
+ ps.posNextPrev = &nextPrev{init: ps.s.init.prevNext}
+ ps.posNextPrevSection = &nextPrev{init: ps.s.init.prevNextInSection}
+ ps.InSectionPositioner = newPagePositionInSection(ps.posNextPrevSection)
+ ps.Positioner = newPagePosition(ps.posNextPrev)
+ }
+
+ ps.OutputFormatsProvider = pp
+ ps.targetPathDescriptor = pp.targetPathDescriptor
+ ps.RefProvider = newPageRef(ps)
+ ps.SitesProvider = ps.s.Info
+
+ return nil
+}
+
+func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
+ p.layoutDescriptorInit.Do(func() {
+ var section string
+ sections := p.SectionsEntries()
+
+ switch p.Kind() {
+ case page.KindSection:
+ if len(sections) > 0 {
+ section = sections[0]
+ }
+ case page.KindTaxonomy, page.KindTerm:
+ b := p.getTreeRef().n
+ section = b.viewInfo.name.singular
+ default:
+ }
+
+ p.layoutDescriptor = output.LayoutDescriptor{
+ Kind: p.Kind(),
+ Type: p.Type(),
+ Lang: p.Language().Lang,
+ Layout: p.Layout(),
+ Section: section,
+ }
+ })
+
+ return p.layoutDescriptor
+}
+
+func (p *pageState) resolveTemplate(layouts ...string) (tpl.Template, bool, error) {
+ f := p.outputFormat()
+
+ if len(layouts) == 0 {
+ selfLayout := p.selfLayoutForOutput(f)
+ if selfLayout != "" {
+ templ, found := p.s.Tmpl().Lookup(selfLayout)
+ return templ, found, nil
+ }
+ }
+
+ d := p.getLayoutDescriptor()
+
+ if len(layouts) > 0 {
+ d.Layout = layouts[0]
+ d.LayoutOverride = true
+ }
+
+ return p.s.Tmpl().LookupLayout(d, f)
+}
+
+// This is serialized
+func (p *pageState) initOutputFormat(isRenderingSite bool, idx int) error {
+ if err := p.shiftToOutputFormat(isRenderingSite, idx); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// Must be run after the site section tree etc. is built and ready.
+func (p *pageState) initPage() error {
+ if _, err := p.init.Do(); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (p *pageState) renderResources() (err error) {
+ p.resourcesPublishInit.Do(func() {
+ var toBeDeleted []int
+
+ for i, r := range p.Resources() {
+
+ if _, ok := r.(page.Page); ok {
+ // Pages gets rendered with the owning page but we count them here.
+ p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
+ continue
+ }
+
+ src, ok := r.(resource.Source)
+ if !ok {
+ err = fmt.Errorf("Resource %T does not support resource.Source", src)
+ return
+ }
+
+ if err := src.Publish(); err != nil {
+ if os.IsNotExist(err) {
+ // The resource has been deleted from the file system.
+ // This should be extremely rare, but can happen on live reload in server
+ // mode when the same resource is member of different page bundles.
+ toBeDeleted = append(toBeDeleted, i)
+ } else {
+ p.s.Log.Errorf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
+ }
+ } else {
+ p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
+ }
+ }
+
+ for _, i := range toBeDeleted {
+ p.deleteResource(i)
+ }
+ })
+
+ return
+}
+
+func (p *pageState) deleteResource(i int) {
+ p.resources = append(p.resources[:i], p.resources[i+1:]...)
+}
+
+func (p *pageState) getTargetPaths() page.TargetPaths {
+ return p.targetPaths()
+}
+
+func (p *pageState) setTranslations(pages page.Pages) {
+ p.allTranslations = pages
+ page.SortByLanguage(p.allTranslations)
+ translations := make(page.Pages, 0)
+ for _, t := range p.allTranslations {
+ if !t.Eq(p) {
+ translations = append(translations, t)
+ }
+ }
+ p.translations = translations
+}
+
+func (p *pageState) AlternativeOutputFormats() page.OutputFormats {
+ f := p.outputFormat()
+ var o page.OutputFormats
+ for _, of := range p.OutputFormats() {
+ if of.Format.NotAlternative || of.Format.Name == f.Name {
+ continue
+ }
+
+ o = append(o, of)
+ }
+ return o
+}
+
+type renderStringOpts struct {
+ Display string
+ Markup string
+}
+
+var defaultRenderStringOpts = renderStringOpts{
+ Display: "inline",
+ Markup: "", // Will inherit the page's value when not set.
+}
+
+func (p *pageState) addDependency(dep identity.Provider) {
+ if !p.s.running() || p.pageOutput.cp == nil {
+ return
+ }
+ p.pageOutput.cp.dependencyTracker.Add(dep)
+}
+
+// wrapError adds some more context to the given error if possible/needed
+func (p *pageState) wrapError(err error) error {
+ if err == nil {
+ panic("wrapError with nil")
+ }
+
+ if p.File().IsZero() {
+ // No more details to add.
+ return fmt.Errorf("%q: %w", p.Pathc(), err)
+ }
+
+ filename := p.File().Filename()
+
+ // Check if it's already added.
+ for _, ferr := range herrors.UnwrapFileErrors(err) {
+ errfilename := ferr.Position().Filename
+ if errfilename == filename {
+ if ferr.ErrorContext() == nil {
+ f, ioerr := p.s.SourceSpec.Fs.Source.Open(filename)
+ if ioerr != nil {
+ return err
+ }
+ defer f.Close()
+ ferr.UpdateContent(f, nil)
+ }
+ return err
+ }
+ }
+
+ return herrors.NewFileErrorFromFile(err, filename, p.s.SourceSpec.Fs.Source, herrors.NopLineMatcher)
+
+}
+
+func (p *pageState) getContentConverter() converter.Converter {
+ var err error
+ p.m.contentConverterInit.Do(func() {
+ markup := p.m.markup
+ if markup == "html" {
+ // Only used for shortcode inner content.
+ markup = "markdown"
+ }
+ p.m.contentConverter, err = p.m.newContentConverter(p, markup)
+ })
+
+ if err != nil {
+ p.s.Log.Errorln("Failed to create content converter:", err)
+ }
+ return p.m.contentConverter
+}
+
+func (p *pageState) mapContent(bucket *pagesMapBucket, meta *pageMeta) error {
+ p.cmap = &pageContentMap{
+ items: make([]any, 0, 20),
+ }
+
+ return p.mapContentForResult(
+ p.source.parsed,
+ p.shortcodeState,
+ p.cmap,
+ meta.markup,
+ func(m map[string]interface{}) error {
+ return meta.setMetadata(bucket, p, m)
+ },
+ )
+}
+
+func (p *pageState) mapContentForResult(
+ result pageparser.Result,
+ s *shortcodeHandler,
+ rn *pageContentMap,
+ markup string,
+ withFrontMatter func(map[string]any) error,
+) error {
+
+ iter := result.Iterator()
+
+ fail := func(err error, i pageparser.Item) error {
+ if fe, ok := err.(herrors.FileError); ok {
+ return fe
+ }
+ return p.parseError(err, iter.Input(), i.Pos)
+ }
+
+ // the parser is guaranteed to return items in proper order or fail, so …
+ // … it's safe to keep some "global" state
+ var currShortcode shortcode
+ var ordinal int
+ var frontMatterSet bool
+
+Loop:
+ for {
+ it := iter.Next()
+
+ switch {
+ case it.Type == pageparser.TypeIgnore:
+ case it.IsFrontMatter():
+ f := pageparser.FormatFromFrontMatterType(it.Type)
+ m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
+ if err != nil {
+ if fe, ok := err.(herrors.FileError); ok {
+ pos := fe.Position()
+ // Apply the error to the content file.
+ pos.Filename = p.File().Filename()
+ // Offset the starting position of front matter.
+ offset := iter.LineNumber() - 1
+ if f == metadecoders.YAML {
+ offset -= 1
+ }
+ pos.LineNumber += offset
+
+ fe.UpdatePosition(pos)
+
+ return fe
+ } else {
+ return err
+ }
+ }
+
+ if withFrontMatter != nil {
+ if err := withFrontMatter(m); err != nil {
+ return err
+ }
+ }
+
+ frontMatterSet = true
+
+ next := iter.Peek()
+ if !next.IsDone() {
+ p.source.posMainContent = next.Pos
+ }
+
+ if !p.s.shouldBuild(p) {
+ // Nothing more to do.
+ return nil
+ }
+
+ case it.Type == pageparser.TypeLeadSummaryDivider:
+ posBody := -1
+ f := func(item pageparser.Item) bool {
+ if posBody == -1 && !item.IsDone() {
+ posBody = item.Pos
+ }
+
+ if item.IsNonWhitespace() {
+ p.truncated = true
+
+ // Done
+ return false
+ }
+ return true
+ }
+ iter.PeekWalk(f)
+
+ p.source.posSummaryEnd = it.Pos
+ p.source.posBodyStart = posBody
+ p.source.hasSummaryDivider = true
+
+ if markup != "html" {
+ // The content will be rendered by Goldmark or similar,
+ // and we need to track the summary.
+ rn.AddReplacement(internalSummaryDividerPre, it)
+ }
+
+ // Handle shortcode
+ case it.IsLeftShortcodeDelim():
+ // let extractShortcode handle left delim (will do so recursively)
+ iter.Backup()
+
+ currShortcode, err := s.extractShortcode(ordinal, 0, iter)
+ if err != nil {
+ return fail(err, it)
+ }
+
+ currShortcode.pos = it.Pos
+ currShortcode.length = iter.Current().Pos - it.Pos
+ if currShortcode.placeholder == "" {
+ currShortcode.placeholder = createShortcodePlaceholder("s", currShortcode.ordinal)
+ }
+
+ if currShortcode.name != "" {
+ s.addName(currShortcode.name)
+ }
+
+ if currShortcode.params == nil {
+ var s []string
+ currShortcode.params = s
+ }
+
+ currShortcode.placeholder = createShortcodePlaceholder("s", ordinal)
+ ordinal++
+ s.shortcodes = append(s.shortcodes, currShortcode)
+
+ rn.AddShortcode(currShortcode)
+
+ case it.Type == pageparser.TypeEmoji:
+ if emoji := helpers.Emoji(it.ValStr()); emoji != nil {
+ rn.AddReplacement(emoji, it)
+ } else {
+ rn.AddBytes(it)
+ }
+ case it.IsEOF():
+ break Loop
+ case it.IsError():
+ err := fail(errors.New(it.ValStr()), it)
+ currShortcode.err = err
+ return err
+
+ default:
+ rn.AddBytes(it)
+ }
+ }
+
+ if !frontMatterSet && withFrontMatter != nil {
+ // Page content without front matter. Assign default front matter from
+ // cascades etc.
+ if err := withFrontMatter(nil); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (p *pageState) errorf(err error, format string, a ...any) error {
+ if herrors.UnwrapFileError(err) != nil {
+ // More isn't always better.
+ return err
+ }
+ args := append([]any{p.Language().Lang, p.pathOrTitle()}, a...)
+ args = append(args, err)
+ format = "[%s] page %q: " + format + ": %w"
+ if err == nil {
+ return fmt.Errorf(format, args...)
+ }
+ return fmt.Errorf(format, args...)
+}
+
+func (p *pageState) outputFormat() (f output.Format) {
+ if p.pageOutput == nil {
+ panic("no pageOutput")
+ }
+ return p.pageOutput.f
+}
+
+func (p *pageState) parseError(err error, input []byte, offset int) error {
+ pos := p.posFromInput(input, offset)
+ return herrors.NewFileErrorFromName(err, p.File().Filename()).UpdatePosition(pos)
+}
+
+func (p *pageState) pathOrTitle() string {
+ if !p.File().IsZero() {
+ return p.File().Filename()
+ }
+
+ if p.Pathc() != "" {
+ return p.Pathc()
+ }
+
+ return p.Title()
+}
+
+func (p *pageState) posFromPage(offset int) text.Position {
+ return p.posFromInput(p.source.parsed.Input(), offset)
+}
+
+func (p *pageState) posFromInput(input []byte, offset int) text.Position {
+ if offset < 0 {
+ return text.Position{
+ Filename: p.pathOrTitle(),
+ }
+ }
+ lf := []byte("\n")
+ input = input[:offset]
+ lineNumber := bytes.Count(input, lf) + 1
+ endOfLastLine := bytes.LastIndex(input, lf)
+
+ return text.Position{
+ Filename: p.pathOrTitle(),
+ LineNumber: lineNumber,
+ ColumnNumber: offset - endOfLastLine,
+ Offset: offset,
+ }
+}
+
+func (p *pageState) posOffset(offset int) text.Position {
+ return p.posFromInput(p.source.parsed.Input(), offset)
+}
+
+// shiftToOutputFormat is serialized. The output format idx refers to the
+// full set of output formats for all sites.
+func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
+ if err := p.initPage(); err != nil {
+ return err
+ }
+
+ if len(p.pageOutputs) == 1 {
+ idx = 0
+ }
+
+ p.pageOutput = p.pageOutputs[idx]
+ if p.pageOutput == nil {
+ panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx))
+ }
+
+ // Reset any built paginator. This will trigger when re-rendering pages in
+ // server mode.
+ if isRenderingSite && p.pageOutput.paginator != nil && p.pageOutput.paginator.current != nil {
+ p.pageOutput.paginator.reset()
+ }
+
+ if isRenderingSite {
+ cp := p.pageOutput.cp
+ if cp == nil && p.reusePageOutputContent() {
+ // Look for content to reuse.
+ for i := 0; i < len(p.pageOutputs); i++ {
+ if i == idx {
+ continue
+ }
+ po := p.pageOutputs[i]
+
+ if po.cp != nil {
+ cp = po.cp
+ break
+ }
+ }
+ }
+
+ if cp == nil {
+ var err error
+ cp, err = newPageContentOutput(p, p.pageOutput)
+ if err != nil {
+ return err
+ }
+ }
+ p.pageOutput.initContentProvider(cp)
+ } else {
+ // We attempt to assign pageContentOutputs while preparing each site
+ // for rendering and before rendering each site. This lets us share
+ // content between page outputs to conserve resources. But if a template
+ // unexpectedly calls a method of a ContentProvider that is not yet
+ // initialized, we assign a LazyContentProvider that performs the
+ // initialization just in time.
+ if lcp, ok := (p.pageOutput.ContentProvider.(*page.LazyContentProvider)); ok {
+ lcp.Reset()
+ } else {
+ lcp = page.NewLazyContentProvider(func() (page.OutputFormatContentProvider, error) {
+ cp, err := newPageContentOutput(p, p.pageOutput)
+ if err != nil {
+ return nil, err
+ }
+ return cp, nil
+ })
+ p.pageOutput.ContentProvider = lcp
+ p.pageOutput.TableOfContentsProvider = lcp
+ p.pageOutput.PageRenderProvider = lcp
+ }
+ }
+
+ return nil
+}
+
+// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to
+// this page. It is prefixed with a "/".
+//
+// For pages that have a source file, it is returns the path to this file as an
+// absolute path rooted in this site's content dir.
+// For pages that do not (sections without content page etc.), it returns the
+// virtual path, consistent with where you would add a source file.
+func (p *pageState) sourceRef() string {
+ if !p.File().IsZero() {
+ sourcePath := p.File().Path()
+ if sourcePath != "" {
+ return "/" + filepath.ToSlash(sourcePath)
+ }
+ }
+
+ if len(p.SectionsEntries()) > 0 {
+ // no backing file, return the virtual source path
+ return "/" + p.SectionsPath()
+ }
+
+ return ""
+}
+
+func (s *Site) sectionsFromFile(fi source.File) []string {
+ dirname := fi.Dir()
+
+ dirname = strings.Trim(dirname, helpers.FilePathSeparator)
+ if dirname == "" {
+ return nil
+ }
+ parts := strings.Split(dirname, helpers.FilePathSeparator)
+
+ if fii, ok := fi.(*fileInfo); ok {
+ if len(parts) > 0 && fii.FileInfo().Meta().Classifier == files.ContentClassLeaf {
+ // my-section/mybundle/index.md => my-section
+ return parts[:len(parts)-1]
+ }
+ }
+
+ return parts
+}
+
+var (
+ _ page.Page = (*pageWithOrdinal)(nil)
+ _ collections.Order = (*pageWithOrdinal)(nil)
+ _ pageWrapper = (*pageWithOrdinal)(nil)
+)
+
+type pageWithOrdinal struct {
+ ordinal int
+ *pageState
+}
+
+func (p pageWithOrdinal) Ordinal() int {
+ return p.ordinal
+}
+
+func (p pageWithOrdinal) page() page.Page {
+ return p.pageState
+}
diff --git a/hugolib/page__common.go b/hugolib/page__common.go
new file mode 100644
index 000000000..59f0bc776
--- /dev/null
+++ b/hugolib/page__common.go
@@ -0,0 +1,156 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "sync"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/lazy"
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+type treeRefProvider interface {
+ getTreeRef() *contentTreeRef
+}
+
+func (p *pageCommon) getTreeRef() *contentTreeRef {
+ return p.treeRef
+}
+
+type nextPrevProvider interface {
+ getNextPrev() *nextPrev
+}
+
+func (p *pageCommon) getNextPrev() *nextPrev {
+ return p.posNextPrev
+}
+
+type nextPrevInSectionProvider interface {
+ getNextPrevInSection() *nextPrev
+}
+
+func (p *pageCommon) getNextPrevInSection() *nextPrev {
+ return p.posNextPrevSection
+}
+
+type pageCommon struct {
+ s *Site
+ m *pageMeta
+
+ bucket *pagesMapBucket
+ treeRef *contentTreeRef
+
+ // Lazily initialized dependencies.
+ init *lazy.Init
+
+ // Store holds state that survives server rebuilds.
+ store *maps.Scratch
+
+ // All of these represents the common parts of a page.Page
+ maps.Scratcher
+ navigation.PageMenusProvider
+ page.AuthorProvider
+ page.AlternativeOutputFormatsProvider
+ page.ChildCareProvider
+ page.FileProvider
+ page.GetPageProvider
+ page.GitInfoProvider
+ page.InSectionPositioner
+ page.OutputFormatsProvider
+ page.PageMetaProvider
+ page.Positioner
+ page.RawContentProvider
+ page.RelatedKeywordsProvider
+ page.RefProvider
+ page.ShortcodeInfoProvider
+ page.SitesProvider
+ page.TranslationsProvider
+ page.TreeProvider
+ resource.LanguageProvider
+ resource.ResourceDataProvider
+ resource.ResourceMetaProvider
+ resource.ResourceParamsProvider
+ resource.ResourceTypeProvider
+ resource.MediaTypeProvider
+ resource.TranslationKeyProvider
+ compare.Eqer
+
+ // Describes how paths and URLs for this page and its descendants
+ // should look like.
+ targetPathDescriptor page.TargetPathDescriptor
+
+ layoutDescriptor output.LayoutDescriptor
+ layoutDescriptorInit sync.Once
+
+ // The parsed page content.
+ pageContent
+
+ // Keeps track of the shortcodes on a page.
+ shortcodeState *shortcodeHandler
+
+ // Set if feature enabled and this is in a Git repo.
+ gitInfo *gitmap.GitInfo
+ codeowners []string
+
+ // Positional navigation
+ posNextPrev *nextPrev
+ posNextPrevSection *nextPrev
+
+ // Menus
+ pageMenus *pageMenus
+
+ // Internal use
+ page.InternalDependencies
+
+ // The children. Regular pages will have none.
+ *pagePages
+
+ // Any bundled resources
+ resources resource.Resources
+ resourcesInit sync.Once
+ resourcesPublishInit sync.Once
+
+ translations page.Pages
+ allTranslations page.Pages
+
+ // Calculated an cached translation mapping key
+ translationKey string
+ translationKeyInit sync.Once
+
+ // Will only be set for bundled pages.
+ parent *pageState
+
+ // Set in fast render mode to force render a given page.
+ forceRender bool
+}
+
+func (p *pageCommon) Store() *maps.Scratch {
+ return p.store
+}
+
+type pagePages struct {
+ pagesInit sync.Once
+ pages page.Pages
+
+ regularPagesInit sync.Once
+ regularPages page.Pages
+ regularPagesRecursiveInit sync.Once
+ regularPagesRecursive page.Pages
+}
diff --git a/hugolib/page__content.go b/hugolib/page__content.go
new file mode 100644
index 000000000..bf69fafcd
--- /dev/null
+++ b/hugolib/page__content.go
@@ -0,0 +1,130 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/parser/pageparser"
+)
+
+var (
+ internalSummaryDividerBase = "HUGOMORE42"
+ internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase)
+ internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n")
+)
+
+// The content related items on a Page.
+type pageContent struct {
+ selfLayout string
+ truncated bool
+
+ cmap *pageContentMap
+
+ source rawPageContent
+}
+
+// returns the content to be processed by Goldmark or similar.
+func (p pageContent) contentToRender(parsed pageparser.Result, pm *pageContentMap, renderedShortcodes map[string]string) []byte {
+ source := parsed.Input()
+
+ c := make([]byte, 0, len(source)+(len(source)/10))
+
+ for _, it := range pm.items {
+ switch v := it.(type) {
+ case pageparser.Item:
+ c = append(c, source[v.Pos:v.Pos+len(v.Val)]...)
+ case pageContentReplacement:
+ c = append(c, v.val...)
+ case *shortcode:
+ if !v.insertPlaceholder() {
+ // Insert the rendered shortcode.
+ renderedShortcode, found := renderedShortcodes[v.placeholder]
+ if !found {
+ // This should never happen.
+ panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
+ }
+
+ c = append(c, []byte(renderedShortcode)...)
+
+ } else {
+ // Insert the placeholder so we can insert the content after
+ // markdown processing.
+ c = append(c, []byte(v.placeholder)...)
+ }
+ default:
+ panic(fmt.Sprintf("unknown item type %T", it))
+ }
+ }
+
+ return c
+}
+
+func (p pageContent) selfLayoutForOutput(f output.Format) string {
+ if p.selfLayout == "" {
+ return ""
+ }
+ return p.selfLayout + f.Name
+}
+
+type rawPageContent struct {
+ hasSummaryDivider bool
+
+ // The AST of the parsed page. Contains information about:
+ // shortcodes, front matter, summary indicators.
+ parsed pageparser.Result
+
+ // Returns the position in bytes after any front matter.
+ posMainContent int
+
+ // These are set if we're able to determine this from the source.
+ posSummaryEnd int
+ posBodyStart int
+}
+
+type pageContentReplacement struct {
+ val []byte
+
+ source pageparser.Item
+}
+
+type pageContentMap struct {
+
+ // If not, we can skip any pre-rendering of shortcodes.
+ hasMarkdownShortcode bool
+
+ // Indicates whether we must do placeholder replacements.
+ hasNonMarkdownShortcode bool
+
+ // *shortcode, pageContentReplacement or pageparser.Item
+ items []any
+}
+
+func (p *pageContentMap) AddBytes(item pageparser.Item) {
+ p.items = append(p.items, item)
+}
+
+func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) {
+ p.items = append(p.items, pageContentReplacement{val: val, source: source})
+}
+
+func (p *pageContentMap) AddShortcode(s *shortcode) {
+ p.items = append(p.items, s)
+ if s.insertPlaceholder() {
+ p.hasNonMarkdownShortcode = true
+ } else {
+ p.hasMarkdownShortcode = true
+ }
+}
diff --git a/hugolib/page__data.go b/hugolib/page__data.go
new file mode 100644
index 000000000..19b0154ad
--- /dev/null
+++ b/hugolib/page__data.go
@@ -0,0 +1,66 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+type pageData struct {
+ *pageState
+
+ dataInit sync.Once
+ data page.Data
+}
+
+func (p *pageData) Data() any {
+ p.dataInit.Do(func() {
+ p.data = make(page.Data)
+
+ if p.Kind() == page.KindPage {
+ return
+ }
+
+ switch p.Kind() {
+ case page.KindTerm:
+ b := p.treeRef.n
+ name := b.viewInfo.name
+ termKey := b.viewInfo.termKey
+
+ taxonomy := p.s.Taxonomies()[name.plural].Get(termKey)
+
+ p.data[name.singular] = taxonomy
+ p.data["Singular"] = name.singular
+ p.data["Plural"] = name.plural
+ p.data["Term"] = b.viewInfo.term()
+ case page.KindTaxonomy:
+ b := p.treeRef.n
+ name := b.viewInfo.name
+
+ p.data["Singular"] = name.singular
+ p.data["Plural"] = name.plural
+ p.data["Terms"] = p.s.Taxonomies()[name.plural]
+ // keep the following just for legacy reasons
+ p.data["OrderedIndex"] = p.data["Terms"]
+ p.data["Index"] = p.data["Terms"]
+ }
+
+ // Assign the function to the map to make sure it is lazily initialized
+ p.data["pages"] = p.Pages
+ })
+
+ return p.data
+}
diff --git a/hugolib/page__menus.go b/hugolib/page__menus.go
new file mode 100644
index 000000000..49d392c2f
--- /dev/null
+++ b/hugolib/page__menus.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "sync"
+
+ "github.com/gohugoio/hugo/navigation"
+)
+
+type pageMenus struct {
+ p *pageState
+
+ q navigation.MenuQueryProvider
+
+ pmInit sync.Once
+ pm navigation.PageMenus
+}
+
+func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+ p.p.s.init.menus.Do()
+ p.init()
+ return p.q.HasMenuCurrent(menuID, me)
+}
+
+func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+ p.p.s.init.menus.Do()
+ p.init()
+ return p.q.IsMenuCurrent(menuID, inme)
+}
+
+func (p *pageMenus) Menus() navigation.PageMenus {
+ // There is a reverse dependency here. initMenus will, once, build the
+ // site menus and update any relevant page.
+ p.p.s.init.menus.Do()
+
+ return p.menus()
+}
+
+func (p *pageMenus) menus() navigation.PageMenus {
+ p.init()
+ return p.pm
+}
+
+func (p *pageMenus) init() {
+ p.pmInit.Do(func() {
+ p.q = navigation.NewMenuQueryProvider(
+ p,
+ p.p.s,
+ p.p,
+ )
+
+ var err error
+ p.pm, err = navigation.PageMenusFromPage(p.p)
+ if err != nil {
+ p.p.s.Log.Errorln(p.p.wrapError(err))
+ }
+ })
+}
diff --git a/hugolib/page__meta.go b/hugolib/page__meta.go
new file mode 100644
index 000000000..daf989f42
--- /dev/null
+++ b/hugolib/page__meta.go
@@ -0,0 +1,830 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gobuffalo/flect"
+ "github.com/gohugoio/hugo/markup/converter"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/related"
+
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/page/pagemeta"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/cast"
+)
+
+var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
+
+type pageMeta struct {
+ // kind is the discriminator that identifies the different page types
+ // in the different page collections. This can, as an example, be used
+ // to to filter regular pages, find sections etc.
+ // Kind will, for the pages available to the templates, be one of:
+ // page, home, section, taxonomy and term.
+ // It is of string type to make it easy to reason about in
+ // the templates.
+ kind string
+
+ // This is a standalone page not part of any page collection. These
+ // include sitemap, robotsTXT and similar. It will have no pageOutputs, but
+ // a fixed pageOutput.
+ standalone bool
+
+ draft bool // Only published when running with -D flag
+ buildConfig pagemeta.BuildConfig
+
+ bundleType files.ContentClass
+
+ // Params contains configuration defined in the params section of page frontmatter.
+ params map[string]any
+
+ title string
+ linkTitle string
+
+ summary string
+
+ resourcePath string
+
+ weight int
+
+ markup string
+ contentType string
+
+ // whether the content is in a CJK language.
+ isCJKLanguage bool
+
+ layout string
+
+ aliases []string
+
+ description string
+ keywords []string
+
+ urlPaths pagemeta.URLPath
+
+ resource.Dates
+
+ // Set if this page is bundled inside another.
+ bundled bool
+
+ // A key that maps to translation(s) of this page. This value is fetched
+ // from the page front matter.
+ translationKey string
+
+ // From front matter.
+ configuredOutputFormats output.Formats
+
+ // This is the raw front matter metadata that is going to be assigned to
+ // the Resources above.
+ resourcesMetadata []map[string]any
+
+ f source.File
+
+ sections []string
+
+ // Sitemap overrides from front matter.
+ sitemap config.Sitemap
+
+ s *Site
+
+ contentConverterInit sync.Once
+ contentConverter converter.Converter
+}
+
+func (p *pageMeta) Aliases() []string {
+ return p.aliases
+}
+
+func (p *pageMeta) Author() page.Author {
+ helpers.Deprecated(".Author", "Use taxonomies.", false)
+ authors := p.Authors()
+
+ for _, author := range authors {
+ return author
+ }
+ return page.Author{}
+}
+
+func (p *pageMeta) Authors() page.AuthorList {
+ helpers.Deprecated(".Authors", "Use taxonomies.", false)
+ authorKeys, ok := p.params["authors"]
+ if !ok {
+ return page.AuthorList{}
+ }
+ authors := authorKeys.([]string)
+ if len(authors) < 1 || len(p.s.Info.Authors) < 1 {
+ return page.AuthorList{}
+ }
+
+ al := make(page.AuthorList)
+ for _, author := range authors {
+ a, ok := p.s.Info.Authors[author]
+ if ok {
+ al[author] = a
+ }
+ }
+ return al
+}
+
+func (p *pageMeta) BundleType() files.ContentClass {
+ return p.bundleType
+}
+
+func (p *pageMeta) Description() string {
+ return p.description
+}
+
+func (p *pageMeta) Lang() string {
+ return p.s.Lang()
+}
+
+func (p *pageMeta) Draft() bool {
+ return p.draft
+}
+
+func (p *pageMeta) File() source.File {
+ return p.f
+}
+
+func (p *pageMeta) IsHome() bool {
+ return p.Kind() == page.KindHome
+}
+
+func (p *pageMeta) Keywords() []string {
+ return p.keywords
+}
+
+func (p *pageMeta) Kind() string {
+ return p.kind
+}
+
+func (p *pageMeta) Layout() string {
+ return p.layout
+}
+
+func (p *pageMeta) LinkTitle() string {
+ if p.linkTitle != "" {
+ return p.linkTitle
+ }
+
+ return p.Title()
+}
+
+func (p *pageMeta) Name() string {
+ if p.resourcePath != "" {
+ return p.resourcePath
+ }
+ return p.Title()
+}
+
+func (p *pageMeta) IsNode() bool {
+ return !p.IsPage()
+}
+
+func (p *pageMeta) IsPage() bool {
+ return p.Kind() == page.KindPage
+}
+
+// Param is a convenience method to do lookups in Page's and Site's Params map,
+// in that order.
+//
+// This method is also implemented on SiteInfo.
+// TODO(bep) interface
+func (p *pageMeta) Param(key any) (any, error) {
+ return resource.Param(p, p.s.Info.Params(), key)
+}
+
+func (p *pageMeta) Params() maps.Params {
+ return p.params
+}
+
+func (p *pageMeta) Path() string {
+ if !p.File().IsZero() {
+ const example = `
+ {{ $path := "" }}
+ {{ with .File }}
+ {{ $path = .Path }}
+ {{ else }}
+ {{ $path = .Path }}
+ {{ end }}
+`
+ helpers.Deprecated(".Path when the page is backed by a file", "We plan to use Path for a canonical source path and you probably want to check the source is a file. To get the current behaviour, you can use a construct similar to the one below:\n"+example, false)
+
+ }
+
+ return p.Pathc()
+}
+
+// This is just a bridge method, use Path in templates.
+func (p *pageMeta) Pathc() string {
+ if !p.File().IsZero() {
+ return p.File().Path()
+ }
+ return p.SectionsPath()
+}
+
+// RelatedKeywords implements the related.Document interface needed for fast page searches.
+func (p *pageMeta) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+ v, err := p.Param(cfg.Name)
+ if err != nil {
+ return nil, err
+ }
+
+ return cfg.ToKeywords(v)
+}
+
+func (p *pageMeta) IsSection() bool {
+ return p.Kind() == page.KindSection
+}
+
+func (p *pageMeta) Section() string {
+ if p.IsHome() {
+ return ""
+ }
+
+ if p.IsNode() {
+ if len(p.sections) == 0 {
+ // May be a sitemap or similar.
+ return ""
+ }
+ return p.sections[0]
+ }
+
+ if !p.File().IsZero() {
+ return p.File().Section()
+ }
+
+ panic("invalid page state")
+}
+
+func (p *pageMeta) SectionsEntries() []string {
+ return p.sections
+}
+
+func (p *pageMeta) SectionsPath() string {
+ return path.Join(p.SectionsEntries()...)
+}
+
+func (p *pageMeta) Sitemap() config.Sitemap {
+ return p.sitemap
+}
+
+func (p *pageMeta) Title() string {
+ return p.title
+}
+
+const defaultContentType = "page"
+
+func (p *pageMeta) Type() string {
+ if p.contentType != "" {
+ return p.contentType
+ }
+
+ if sect := p.Section(); sect != "" {
+ return sect
+ }
+
+ return defaultContentType
+}
+
+func (p *pageMeta) Weight() int {
+ return p.weight
+}
+
+func (pm *pageMeta) mergeBucketCascades(b1, b2 *pagesMapBucket) {
+ if b1.cascade == nil {
+ b1.cascade = make(map[page.PageMatcher]maps.Params)
+ }
+
+ if b2 != nil && b2.cascade != nil {
+ for k, v := range b2.cascade {
+
+ vv, found := b1.cascade[k]
+ if !found {
+ b1.cascade[k] = v
+ } else {
+ // Merge
+ for ck, cv := range v {
+ if _, found := vv[ck]; !found {
+ vv[ck] = cv
+ }
+ }
+ }
+ }
+ }
+}
+
+func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, frontmatter map[string]any) error {
+ pm.params = make(maps.Params)
+
+ if frontmatter == nil && (parentBucket == nil || parentBucket.cascade == nil) {
+ return nil
+ }
+
+ if frontmatter != nil {
+ // Needed for case insensitive fetching of params values
+ maps.PrepareParams(frontmatter)
+ if p.bucket != nil {
+ // Check for any cascade define on itself.
+ if cv, found := frontmatter["cascade"]; found {
+ var err error
+ p.bucket.cascade, err = page.DecodeCascade(cv)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ } else {
+ frontmatter = make(map[string]any)
+ }
+
+ var cascade map[page.PageMatcher]maps.Params
+
+ if p.bucket != nil {
+ if parentBucket != nil {
+ // Merge missing keys from parent into this.
+ pm.mergeBucketCascades(p.bucket, parentBucket)
+ }
+ cascade = p.bucket.cascade
+ } else if parentBucket != nil {
+ cascade = parentBucket.cascade
+ }
+
+ for m, v := range cascade {
+ if !m.Matches(p) {
+ continue
+ }
+ for kk, vv := range v {
+ if _, found := frontmatter[kk]; !found {
+ frontmatter[kk] = vv
+ }
+ }
+ }
+
+ var mtime time.Time
+ var contentBaseName string
+ if !p.File().IsZero() {
+ contentBaseName = p.File().ContentBaseName()
+ if p.File().FileInfo() != nil {
+ mtime = p.File().FileInfo().ModTime()
+ }
+ }
+
+ var gitAuthorDate time.Time
+ if p.gitInfo != nil {
+ gitAuthorDate = p.gitInfo.AuthorDate
+ }
+
+ descriptor := &pagemeta.FrontMatterDescriptor{
+ Frontmatter: frontmatter,
+ Params: pm.params,
+ Dates: &pm.Dates,
+ PageURLs: &pm.urlPaths,
+ BaseFilename: contentBaseName,
+ ModTime: mtime,
+ GitAuthorDate: gitAuthorDate,
+ Location: langs.GetLocation(pm.s.Language()),
+ }
+
+ // Handle the date separately
+ // TODO(bep) we need to "do more" in this area so this can be split up and
+ // more easily tested without the Page, but the coupling is strong.
+ err := pm.s.frontmatterHandler.HandleDates(descriptor)
+ if err != nil {
+ p.s.Log.Errorf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
+ }
+
+ pm.buildConfig, err = pagemeta.DecodeBuildConfig(frontmatter["_build"])
+ if err != nil {
+ return err
+ }
+
+ var sitemapSet bool
+
+ var draft, published, isCJKLanguage *bool
+ for k, v := range frontmatter {
+ loki := strings.ToLower(k)
+
+ if loki == "published" { // Intentionally undocumented
+ vv, err := cast.ToBoolE(v)
+ if err == nil {
+ published = &vv
+ }
+ // published may also be a date
+ continue
+ }
+
+ if pm.s.frontmatterHandler.IsDateKey(loki) {
+ continue
+ }
+
+ switch loki {
+ case "title":
+ pm.title = cast.ToString(v)
+ pm.params[loki] = pm.title
+ case "linktitle":
+ pm.linkTitle = cast.ToString(v)
+ pm.params[loki] = pm.linkTitle
+ case "summary":
+ pm.summary = cast.ToString(v)
+ pm.params[loki] = pm.summary
+ case "description":
+ pm.description = cast.ToString(v)
+ pm.params[loki] = pm.description
+ case "slug":
+ // Don't start or end with a -
+ pm.urlPaths.Slug = strings.Trim(cast.ToString(v), "-")
+ pm.params[loki] = pm.Slug()
+ case "url":
+ url := cast.ToString(v)
+ if strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
+ return fmt.Errorf("URLs with protocol (http*) not supported: %q. In page %q", url, p.pathOrTitle())
+ }
+ lang := p.s.GetLanguagePrefix()
+ if lang != "" && !strings.HasPrefix(url, "/") && strings.HasPrefix(url, lang+"/") {
+ if strings.HasPrefix(hugo.CurrentVersion.String(), "0.55") {
+ // We added support for page relative URLs in Hugo 0.55 and
+ // this may get its language path added twice.
+ // TODO(bep) eventually remove this.
+ p.s.Log.Warnf(`Front matter in %q with the url %q with no leading / has what looks like the language prefix added. In Hugo 0.55 we added support for page relative URLs in front matter, no language prefix needed. Check the URL and consider to either add a leading / or remove the language prefix.`, p.pathOrTitle(), url)
+ }
+ }
+ pm.urlPaths.URL = url
+ pm.params[loki] = url
+ case "type":
+ pm.contentType = cast.ToString(v)
+ pm.params[loki] = pm.contentType
+ case "keywords":
+ pm.keywords = cast.ToStringSlice(v)
+ pm.params[loki] = pm.keywords
+ case "headless":
+ // Legacy setting for leaf bundles.
+ // This is since Hugo 0.63 handled in a more general way for all
+ // pages.
+ isHeadless := cast.ToBool(v)
+ pm.params[loki] = isHeadless
+ if p.File().TranslationBaseName() == "index" && isHeadless {
+ pm.buildConfig.List = pagemeta.Never
+ pm.buildConfig.Render = pagemeta.Never
+ }
+ case "outputs":
+ o := cast.ToStringSlice(v)
+ if len(o) > 0 {
+ // Output formats are explicitly set in front matter, use those.
+ outFormats, err := p.s.outputFormatsConfig.GetByNames(o...)
+
+ if err != nil {
+ p.s.Log.Errorf("Failed to resolve output formats: %s", err)
+ } else {
+ pm.configuredOutputFormats = outFormats
+ pm.params[loki] = outFormats
+ }
+
+ }
+ case "draft":
+ draft = new(bool)
+ *draft = cast.ToBool(v)
+ case "layout":
+ pm.layout = cast.ToString(v)
+ pm.params[loki] = pm.layout
+ case "markup":
+ pm.markup = cast.ToString(v)
+ pm.params[loki] = pm.markup
+ case "weight":
+ pm.weight = cast.ToInt(v)
+ pm.params[loki] = pm.weight
+ case "aliases":
+ pm.aliases = cast.ToStringSlice(v)
+ for i, alias := range pm.aliases {
+ if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
+ return fmt.Errorf("http* aliases not supported: %q", alias)
+ }
+ pm.aliases[i] = filepath.ToSlash(alias)
+ }
+ pm.params[loki] = pm.aliases
+ case "sitemap":
+ p.m.sitemap = config.DecodeSitemap(p.s.siteCfg.sitemap, maps.ToStringMap(v))
+ pm.params[loki] = p.m.sitemap
+ sitemapSet = true
+ case "iscjklanguage":
+ isCJKLanguage = new(bool)
+ *isCJKLanguage = cast.ToBool(v)
+ case "translationkey":
+ pm.translationKey = cast.ToString(v)
+ pm.params[loki] = pm.translationKey
+ case "resources":
+ var resources []map[string]any
+ handled := true
+
+ switch vv := v.(type) {
+ case []map[any]any:
+ for _, vvv := range vv {
+ resources = append(resources, maps.ToStringMap(vvv))
+ }
+ case []map[string]any:
+ resources = append(resources, vv...)
+ case []any:
+ for _, vvv := range vv {
+ switch vvvv := vvv.(type) {
+ case map[any]any:
+ resources = append(resources, maps.ToStringMap(vvvv))
+ case map[string]any:
+ resources = append(resources, vvvv)
+ }
+ }
+ default:
+ handled = false
+ }
+
+ if handled {
+ pm.params[loki] = resources
+ pm.resourcesMetadata = resources
+ break
+ }
+ fallthrough
+
+ default:
+ // If not one of the explicit values, store in Params
+ switch vv := v.(type) {
+ case bool:
+ pm.params[loki] = vv
+ case string:
+ pm.params[loki] = vv
+ case int64, int32, int16, int8, int:
+ pm.params[loki] = vv
+ case float64, float32:
+ pm.params[loki] = vv
+ case time.Time:
+ pm.params[loki] = vv
+ default: // handle array of strings as well
+ switch vvv := vv.(type) {
+ case []any:
+ if len(vvv) > 0 {
+ switch vvv[0].(type) {
+ case map[any]any:
+ pm.params[loki] = vvv
+ case map[string]any:
+ pm.params[loki] = vvv
+ case []any:
+ pm.params[loki] = vvv
+ default:
+ a := make([]string, len(vvv))
+ for i, u := range vvv {
+ a[i] = cast.ToString(u)
+ }
+
+ pm.params[loki] = a
+ }
+ } else {
+ pm.params[loki] = []string{}
+ }
+ default:
+ pm.params[loki] = vv
+ }
+ }
+ }
+ }
+
+ if !sitemapSet {
+ pm.sitemap = p.s.siteCfg.sitemap
+ }
+
+ pm.markup = p.s.ContentSpec.ResolveMarkup(pm.markup)
+
+ if draft != nil && published != nil {
+ pm.draft = *draft
+ p.m.s.Log.Warnf("page %q has both draft and published settings in its frontmatter. Using draft.", p.File().Filename())
+ } else if draft != nil {
+ pm.draft = *draft
+ } else if published != nil {
+ pm.draft = !*published
+ }
+ pm.params["draft"] = pm.draft
+
+ if isCJKLanguage != nil {
+ pm.isCJKLanguage = *isCJKLanguage
+ } else if p.s.siteCfg.hasCJKLanguage && p.source.parsed != nil {
+ if cjkRe.Match(p.source.parsed.Input()) {
+ pm.isCJKLanguage = true
+ } else {
+ pm.isCJKLanguage = false
+ }
+ }
+
+ pm.params["iscjklanguage"] = p.m.isCJKLanguage
+
+ return nil
+}
+
+func (p *pageMeta) noListAlways() bool {
+ return p.buildConfig.List != pagemeta.Always
+}
+
+func (p *pageMeta) getListFilter(local bool) contentTreeNodeCallback {
+ return newContentTreeFilter(func(n *contentNode) bool {
+ if n == nil {
+ return true
+ }
+
+ var shouldList bool
+ switch n.p.m.buildConfig.List {
+ case pagemeta.Always:
+ shouldList = true
+ case pagemeta.Never:
+ shouldList = false
+ case pagemeta.ListLocally:
+ shouldList = local
+ }
+
+ return !shouldList
+ })
+}
+
+func (p *pageMeta) noRender() bool {
+ return p.buildConfig.Render != pagemeta.Always
+}
+
+func (p *pageMeta) noLink() bool {
+ return p.buildConfig.Render == pagemeta.Never
+}
+
+func (p *pageMeta) applyDefaultValues(n *contentNode) error {
+ if p.buildConfig.IsZero() {
+ p.buildConfig, _ = pagemeta.DecodeBuildConfig(nil)
+ }
+
+ if !p.s.isEnabled(p.Kind()) {
+ (&p.buildConfig).Disable()
+ }
+
+ if p.markup == "" {
+ if !p.File().IsZero() {
+ // Fall back to file extension
+ p.markup = p.s.ContentSpec.ResolveMarkup(p.File().Ext())
+ }
+ if p.markup == "" {
+ p.markup = "markdown"
+ }
+ }
+
+ if p.title == "" && p.f.IsZero() {
+ switch p.Kind() {
+ case page.KindHome:
+ p.title = p.s.Info.title
+ case page.KindSection:
+ var sectionName string
+ if n != nil {
+ sectionName = n.rootSection()
+ } else {
+ sectionName = p.sections[0]
+ }
+
+ sectionName = helpers.FirstUpper(sectionName)
+ if p.s.Cfg.GetBool("pluralizeListTitles") {
+ p.title = flect.Pluralize(sectionName)
+ } else {
+ p.title = sectionName
+ }
+ case page.KindTerm:
+ // TODO(bep) improve
+ key := p.sections[len(p.sections)-1]
+ p.title = strings.Replace(p.s.titleFunc(key), "-", " ", -1)
+ case page.KindTaxonomy:
+ p.title = p.s.titleFunc(p.sections[0])
+ case kind404:
+ p.title = "404 Page not found"
+
+ }
+ }
+
+ if p.IsNode() {
+ p.bundleType = files.ContentClassBranch
+ } else {
+ source := p.File()
+ if fi, ok := source.(*fileInfo); ok {
+ class := fi.FileInfo().Meta().Classifier
+ switch class {
+ case files.ContentClassBranch, files.ContentClassLeaf:
+ p.bundleType = class
+ }
+ }
+ }
+
+ return nil
+}
+
+func (p *pageMeta) newContentConverter(ps *pageState, markup string) (converter.Converter, error) {
+ if ps == nil {
+ panic("no Page provided")
+ }
+ cp := p.s.ContentSpec.Converters.Get(markup)
+ if cp == nil {
+ return converter.NopConverter, fmt.Errorf("no content renderer found for markup %q", p.markup)
+ }
+
+ var id string
+ var filename string
+ var path string
+ if !p.f.IsZero() {
+ id = p.f.UniqueID()
+ filename = p.f.Filename()
+ path = p.f.Path()
+ } else {
+ path = p.Pathc()
+ }
+
+ cpp, err := cp.New(
+ converter.DocumentContext{
+ Document: newPageForRenderHook(ps),
+ DocumentID: id,
+ DocumentName: path,
+ Filename: filename,
+ },
+ )
+ if err != nil {
+ return converter.NopConverter, err
+ }
+
+ return cpp, nil
+}
+
+// The output formats this page will be rendered to.
+func (m *pageMeta) outputFormats() output.Formats {
+ if len(m.configuredOutputFormats) > 0 {
+ return m.configuredOutputFormats
+ }
+
+ return m.s.outputFormats[m.Kind()]
+}
+
+func (p *pageMeta) Slug() string {
+ return p.urlPaths.Slug
+}
+
+func getParam(m resource.ResourceParamsProvider, key string, stringToLower bool) any {
+ v := m.Params()[strings.ToLower(key)]
+
+ if v == nil {
+ return nil
+ }
+
+ switch val := v.(type) {
+ case bool:
+ return val
+ case string:
+ if stringToLower {
+ return strings.ToLower(val)
+ }
+ return val
+ case int64, int32, int16, int8, int:
+ return cast.ToInt(v)
+ case float64, float32:
+ return cast.ToFloat64(v)
+ case time.Time:
+ return val
+ case []string:
+ if stringToLower {
+ return helpers.SliceToLower(val)
+ }
+ return v
+ default:
+ return v
+ }
+}
+
+func getParamToLower(m resource.ResourceParamsProvider, key string) any {
+ return getParam(m, key, true)
+}
diff --git a/hugolib/page__new.go b/hugolib/page__new.go
new file mode 100644
index 000000000..e52b9476b
--- /dev/null
+++ b/hugolib/page__new.go
@@ -0,0 +1,213 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "html/template"
+ "strings"
+
+ "go.uber.org/atomic"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/output"
+
+ "github.com/gohugoio/hugo/lazy"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func newPageBase(metaProvider *pageMeta) (*pageState, error) {
+ if metaProvider.s == nil {
+ panic("must provide a Site")
+ }
+
+ s := metaProvider.s
+
+ ps := &pageState{
+ pageOutput: nopPageOutput,
+ pageOutputTemplateVariationsState: atomic.NewUint32(0),
+ pageCommon: &pageCommon{
+ FileProvider: metaProvider,
+ AuthorProvider: metaProvider,
+ Scratcher: maps.NewScratcher(),
+ store: maps.NewScratch(),
+ Positioner: page.NopPage,
+ InSectionPositioner: page.NopPage,
+ ResourceMetaProvider: metaProvider,
+ ResourceParamsProvider: metaProvider,
+ PageMetaProvider: metaProvider,
+ RelatedKeywordsProvider: metaProvider,
+ OutputFormatsProvider: page.NopPage,
+ ResourceTypeProvider: pageTypesProvider,
+ MediaTypeProvider: pageTypesProvider,
+ RefProvider: page.NopPage,
+ ShortcodeInfoProvider: page.NopPage,
+ LanguageProvider: s,
+ pagePages: &pagePages{},
+
+ InternalDependencies: s,
+ init: lazy.New(),
+ m: metaProvider,
+ s: s,
+ },
+ }
+
+ ps.shortcodeState = newShortcodeHandler(ps, ps.s)
+
+ siteAdapter := pageSiteAdapter{s: s, p: ps}
+
+ ps.pageMenus = &pageMenus{p: ps}
+ ps.PageMenusProvider = ps.pageMenus
+ ps.GetPageProvider = siteAdapter
+ ps.GitInfoProvider = ps
+ ps.TranslationsProvider = ps
+ ps.ResourceDataProvider = &pageData{pageState: ps}
+ ps.RawContentProvider = ps
+ ps.ChildCareProvider = ps
+ ps.TreeProvider = pageTree{p: ps}
+ ps.Eqer = ps
+ ps.TranslationKeyProvider = ps
+ ps.ShortcodeInfoProvider = ps
+ ps.AlternativeOutputFormatsProvider = ps
+
+ return ps, nil
+}
+
+func newPageBucket(p *pageState) *pagesMapBucket {
+ return &pagesMapBucket{owner: p, pagesMapBucketPages: &pagesMapBucketPages{}}
+}
+
+func newPageFromMeta(
+ n *contentNode,
+ parentBucket *pagesMapBucket,
+ meta map[string]any,
+ metaProvider *pageMeta) (*pageState, error) {
+ if metaProvider.f == nil {
+ metaProvider.f = page.NewZeroFile(metaProvider.s.LogDistinct)
+ }
+
+ ps, err := newPageBase(metaProvider)
+ if err != nil {
+ return nil, err
+ }
+
+ bucket := parentBucket
+
+ if ps.IsNode() {
+ ps.bucket = newPageBucket(ps)
+ }
+
+ if meta != nil || parentBucket != nil {
+ if err := metaProvider.setMetadata(bucket, ps, meta); err != nil {
+ return nil, ps.wrapError(err)
+ }
+ }
+
+ if err := metaProvider.applyDefaultValues(n); err != nil {
+ return nil, err
+ }
+
+ ps.init.Add(func() (any, error) {
+ pp, err := newPagePaths(metaProvider.s, ps, metaProvider)
+ if err != nil {
+ return nil, err
+ }
+
+ makeOut := func(f output.Format, render bool) *pageOutput {
+ return newPageOutput(ps, pp, f, render)
+ }
+
+ shouldRenderPage := !ps.m.noRender()
+
+ if ps.m.standalone {
+ ps.pageOutput = makeOut(ps.m.outputFormats()[0], shouldRenderPage)
+ } else {
+ outputFormatsForPage := ps.m.outputFormats()
+
+ // Prepare output formats for all sites.
+ // We do this even if this page does not get rendered on
+ // its own. It may be referenced via .Site.GetPage and
+ // it will then need an output format.
+ ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
+ created := make(map[string]*pageOutput)
+ for i, f := range ps.s.h.renderFormats {
+ po, found := created[f.Name]
+ if !found {
+ render := shouldRenderPage
+ if render {
+ _, render = outputFormatsForPage.GetByName(f.Name)
+ }
+ po = makeOut(f, render)
+ created[f.Name] = po
+ }
+ ps.pageOutputs[i] = po
+ }
+ }
+
+ if err := ps.initCommonProviders(pp); err != nil {
+ return nil, err
+ }
+
+ return nil, nil
+ })
+
+ return ps, err
+}
+
+// Used by the legacy 404, sitemap and robots.txt rendering
+func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) {
+ m.configuredOutputFormats = output.Formats{f}
+ m.standalone = true
+ p, err := newPageFromMeta(nil, nil, nil, m)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := p.initPage(); err != nil {
+ return nil, err
+ }
+
+ return p, nil
+}
+
+type pageDeprecatedWarning struct {
+ p *pageState
+}
+
+func (p *pageDeprecatedWarning) IsDraft() bool { return p.p.m.draft }
+func (p *pageDeprecatedWarning) Hugo() hugo.Info { return p.p.s.Info.Hugo() }
+func (p *pageDeprecatedWarning) LanguagePrefix() string { return p.p.s.Info.LanguagePrefix }
+func (p *pageDeprecatedWarning) GetParam(key string) any {
+ return p.p.m.params[strings.ToLower(key)]
+}
+
+func (p *pageDeprecatedWarning) RSSLink() template.URL {
+ f := p.p.OutputFormats().Get("RSS")
+ if f == nil {
+ return ""
+ }
+ return template.URL(f.Permalink())
+}
+
+func (p *pageDeprecatedWarning) URL() string {
+ if p.p.IsPage() && p.p.m.urlPaths.URL != "" {
+ // This is the url set in front matter
+ return p.p.m.urlPaths.URL
+ }
+ // Fall back to the relative permalink.
+ return p.p.RelPermalink()
+}
diff --git a/hugolib/page__output.go b/hugolib/page__output.go
new file mode 100644
index 000000000..413323477
--- /dev/null
+++ b/hugolib/page__output.go
@@ -0,0 +1,107 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+func newPageOutput(
+ ps *pageState,
+ pp pagePaths,
+ f output.Format,
+ render bool) *pageOutput {
+ var targetPathsProvider targetPathsHolder
+ var linksProvider resource.ResourceLinksProvider
+
+ ft, found := pp.targetPaths[f.Name]
+ if !found {
+ // Link to the main output format
+ ft = pp.targetPaths[pp.firstOutputFormat.Format.Name]
+ }
+ targetPathsProvider = ft
+ linksProvider = ft
+
+ var paginatorProvider page.PaginatorProvider = page.NopPage
+ var pag *pagePaginator
+
+ if render && ps.IsNode() {
+ pag = newPagePaginator(ps)
+ paginatorProvider = pag
+ }
+
+ providers := struct {
+ page.PaginatorProvider
+ resource.ResourceLinksProvider
+ targetPather
+ }{
+ paginatorProvider,
+ linksProvider,
+ targetPathsProvider,
+ }
+
+ po := &pageOutput{
+ f: f,
+ pagePerOutputProviders: providers,
+ ContentProvider: page.NopPage,
+ TableOfContentsProvider: page.NopPage,
+ PageRenderProvider: page.NopPage,
+ render: render,
+ paginator: pag,
+ }
+
+ return po
+}
+
+// We create a pageOutput for every output format combination, even if this
+// particular page isn't configured to be rendered to that format.
+type pageOutput struct {
+ // Set if this page isn't configured to be rendered to this format.
+ render bool
+
+ f output.Format
+
+ // Only set if render is set.
+ // Note that this will be lazily initialized, so only used if actually
+ // used in template(s).
+ paginator *pagePaginator
+
+ // These interface provides the functionality that is specific for this
+ // output format.
+ pagePerOutputProviders
+ page.ContentProvider
+ page.TableOfContentsProvider
+ page.PageRenderProvider
+
+ // May be nil.
+ cp *pageContentOutput
+}
+
+func (p *pageOutput) initContentProvider(cp *pageContentOutput) {
+ if cp == nil {
+ return
+ }
+ p.ContentProvider = cp
+ p.TableOfContentsProvider = cp
+ p.PageRenderProvider = cp
+ p.cp = cp
+}
+
+func (p *pageOutput) enablePlaceholders() {
+ if p.cp != nil {
+ p.cp.enablePlaceholders()
+ }
+}
diff --git a/hugolib/page__paginator.go b/hugolib/page__paginator.go
new file mode 100644
index 000000000..709f0e9ea
--- /dev/null
+++ b/hugolib/page__paginator.go
@@ -0,0 +1,111 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagePaginator(source *pageState) *pagePaginator {
+ return &pagePaginator{
+ source: source,
+ pagePaginatorInit: &pagePaginatorInit{},
+ }
+}
+
+type pagePaginator struct {
+ *pagePaginatorInit
+ source *pageState
+}
+
+type pagePaginatorInit struct {
+ init sync.Once
+ current *page.Pager
+}
+
+// reset resets the paginator to allow for a rebuild.
+func (p *pagePaginator) reset() {
+ p.pagePaginatorInit = &pagePaginatorInit{}
+}
+
+func (p *pagePaginator) Paginate(seq any, options ...any) (*page.Pager, error) {
+ var initErr error
+ p.init.Do(func() {
+ pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
+ if err != nil {
+ initErr = err
+ return
+ }
+
+ pd := p.source.targetPathDescriptor
+ pd.Type = p.source.outputFormat()
+ paginator, err := page.Paginate(pd, seq, pagerSize)
+ if err != nil {
+ initErr = err
+ return
+ }
+
+ p.current = paginator.Pagers()[0]
+ })
+
+ if initErr != nil {
+ return nil, initErr
+ }
+
+ return p.current, nil
+}
+
+func (p *pagePaginator) Paginator(options ...any) (*page.Pager, error) {
+ var initErr error
+ p.init.Do(func() {
+ pagerSize, err := page.ResolvePagerSize(p.source.s.Cfg, options...)
+ if err != nil {
+ initErr = err
+ return
+ }
+
+ pd := p.source.targetPathDescriptor
+ pd.Type = p.source.outputFormat()
+
+ var pages page.Pages
+
+ switch p.source.Kind() {
+ case page.KindHome:
+ // From Hugo 0.57 we made home.Pages() work like any other
+ // section. To avoid the default paginators for the home page
+ // changing in the wild, we make this a special case.
+ pages = p.source.s.RegularPages()
+ case page.KindTerm, page.KindTaxonomy:
+ pages = p.source.Pages()
+ default:
+ pages = p.source.RegularPages()
+ }
+
+ paginator, err := page.Paginate(pd, pages, pagerSize)
+ if err != nil {
+ initErr = err
+ return
+ }
+
+ p.current = paginator.Pagers()[0]
+ })
+
+ if initErr != nil {
+ return nil, initErr
+ }
+
+ return p.current, nil
+}
diff --git a/hugolib/page__paths.go b/hugolib/page__paths.go
new file mode 100644
index 000000000..947cdde9d
--- /dev/null
+++ b/hugolib/page__paths.go
@@ -0,0 +1,165 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "net/url"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagePaths(
+ s *Site,
+ p page.Page,
+ pm *pageMeta) (pagePaths, error) {
+ targetPathDescriptor, err := createTargetPathDescriptor(s, p, pm)
+ if err != nil {
+ return pagePaths{}, err
+ }
+
+ outputFormats := pm.outputFormats()
+ if len(outputFormats) == 0 {
+ return pagePaths{}, nil
+ }
+
+ if pm.noRender() {
+ outputFormats = outputFormats[:1]
+ }
+
+ pageOutputFormats := make(page.OutputFormats, len(outputFormats))
+ targets := make(map[string]targetPathsHolder)
+
+ for i, f := range outputFormats {
+ desc := targetPathDescriptor
+ desc.Type = f
+ paths := page.CreateTargetPaths(desc)
+
+ var relPermalink, permalink string
+
+ // If a page is headless or bundled in another,
+ // it will not get published on its own and it will have no links.
+ // We also check the build options if it's set to not render or have
+ // a link.
+ if !pm.noLink() && !pm.bundled {
+ relPermalink = paths.RelPermalink(s.PathSpec)
+ permalink = paths.PermalinkForOutputFormat(s.PathSpec, f)
+ }
+
+ pageOutputFormats[i] = page.NewOutputFormat(relPermalink, permalink, len(outputFormats) == 1, f)
+
+ // Use the main format for permalinks, usually HTML.
+ permalinksIndex := 0
+ if f.Permalinkable {
+ // Unless it's permalinkable
+ permalinksIndex = i
+ }
+
+ targets[f.Name] = targetPathsHolder{
+ paths: paths,
+ OutputFormat: pageOutputFormats[permalinksIndex],
+ }
+
+ }
+
+ var out page.OutputFormats
+ if !pm.noLink() {
+ out = pageOutputFormats
+ }
+
+ return pagePaths{
+ outputFormats: out,
+ firstOutputFormat: pageOutputFormats[0],
+ targetPaths: targets,
+ targetPathDescriptor: targetPathDescriptor,
+ }, nil
+}
+
+type pagePaths struct {
+ outputFormats page.OutputFormats
+ firstOutputFormat page.OutputFormat
+
+ targetPaths map[string]targetPathsHolder
+ targetPathDescriptor page.TargetPathDescriptor
+}
+
+func (l pagePaths) OutputFormats() page.OutputFormats {
+ return l.outputFormats
+}
+
+func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.TargetPathDescriptor, error) {
+ var (
+ dir string
+ baseName string
+ contentBaseName string
+ )
+
+ d := s.Deps
+
+ if !p.File().IsZero() {
+ dir = p.File().Dir()
+ baseName = p.File().TranslationBaseName()
+ contentBaseName = p.File().ContentBaseName()
+ }
+
+ if baseName != contentBaseName {
+ // See https://github.com/gohugoio/hugo/issues/4870
+ // A leaf bundle
+ dir = strings.TrimSuffix(dir, contentBaseName+helpers.FilePathSeparator)
+ baseName = contentBaseName
+ }
+
+ alwaysInSubDir := p.Kind() == kindSitemap
+
+ desc := page.TargetPathDescriptor{
+ PathSpec: d.PathSpec,
+ Kind: p.Kind(),
+ Sections: p.SectionsEntries(),
+ UglyURLs: s.Info.uglyURLs(p),
+ ForcePrefix: s.h.IsMultihost() || alwaysInSubDir,
+ Dir: dir,
+ URL: pm.urlPaths.URL,
+ }
+
+ if pm.Slug() != "" {
+ desc.BaseName = pm.Slug()
+ } else {
+ desc.BaseName = baseName
+ }
+
+ desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir)
+ desc.PrefixLink = s.getLanguagePermalinkLang(alwaysInSubDir)
+
+ // Expand only page.KindPage and page.KindTaxonomy; don't expand other Kinds of Pages
+ // like page.KindSection or page.KindTaxonomyTerm because they are "shallower" and
+ // the permalink configuration values are likely to be redundant, e.g.
+ // naively expanding /category/:slug/ would give /category/categories/ for
+ // the "categories" page.KindTaxonomyTerm.
+ if p.Kind() == page.KindPage || p.Kind() == page.KindTerm {
+ opath, err := d.ResourceSpec.Permalinks.Expand(p.Section(), p)
+ if err != nil {
+ return desc, err
+ }
+
+ if opath != "" {
+ opath, _ = url.QueryUnescape(opath)
+ desc.ExpandedPermalink = opath
+ }
+
+ }
+
+ return desc, nil
+}
diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go
new file mode 100644
index 000000000..de70047c9
--- /dev/null
+++ b/hugolib/page__per_output.go
@@ -0,0 +1,783 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "html/template"
+ "runtime/debug"
+ "strings"
+ "sync"
+ "unicode/utf8"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/common/types/hstring"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/parser/pageparser"
+ "github.com/mitchellh/mapstructure"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/gohugoio/hugo/lazy"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ nopTargetPath = targetPathsHolder{}
+ nopPagePerOutput = struct {
+ resource.ResourceLinksProvider
+ page.ContentProvider
+ page.PageRenderProvider
+ page.PaginatorProvider
+ page.TableOfContentsProvider
+ page.AlternativeOutputFormatsProvider
+
+ targetPather
+ }{
+ page.NopPage,
+ page.NopPage,
+ page.NopPage,
+ page.NopPage,
+ page.NopPage,
+ page.NopPage,
+ nopTargetPath,
+ }
+)
+
+var pageContentOutputDependenciesID = identity.KeyValueIdentity{Key: "pageOutput", Value: "dependencies"}
+
+func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, error) {
+ parent := p.init
+
+ var dependencyTracker identity.Manager
+ if p.s.running() {
+ dependencyTracker = identity.NewManager(pageContentOutputDependenciesID)
+ }
+
+ cp := &pageContentOutput{
+ dependencyTracker: dependencyTracker,
+ p: p,
+ f: po.f,
+ renderHooks: &renderHooks{},
+ }
+
+ initContent := func() (err error) {
+ p.s.h.IncrContentRender()
+
+ if p.cmap == nil {
+ // Nothing to do.
+ return nil
+ }
+ defer func() {
+ // See https://github.com/gohugoio/hugo/issues/6210
+ if r := recover(); r != nil {
+ err = fmt.Errorf("%s", r)
+ p.s.Log.Errorf("[BUG] Got panic:\n%s\n%s", r, string(debug.Stack()))
+ }
+ }()
+
+ if err := po.cp.initRenderHooks(); err != nil {
+ return err
+ }
+
+ var hasShortcodeVariants bool
+
+ f := po.f
+ cp.contentPlaceholders, hasShortcodeVariants, err = p.shortcodeState.renderShortcodesForPage(p, f)
+ if err != nil {
+ return err
+ }
+
+ if hasShortcodeVariants {
+ p.pageOutputTemplateVariationsState.Store(2)
+ }
+
+ cp.workContent = p.contentToRender(p.source.parsed, p.cmap, cp.contentPlaceholders)
+
+ isHTML := cp.p.m.markup == "html"
+
+ if !isHTML {
+ r, err := cp.renderContent(cp.workContent, true)
+ if err != nil {
+ return err
+ }
+
+ cp.workContent = r.Bytes()
+
+ if tocProvider, ok := r.(converter.TableOfContentsProvider); ok {
+ cfg := p.s.ContentSpec.Converters.GetMarkupConfig()
+ cp.tableOfContents = template.HTML(
+ tocProvider.TableOfContents().ToHTML(
+ cfg.TableOfContents.StartLevel,
+ cfg.TableOfContents.EndLevel,
+ cfg.TableOfContents.Ordered,
+ ),
+ )
+ } else {
+ tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent)
+ cp.tableOfContents = helpers.BytesToHTML(tmpTableOfContents)
+ cp.workContent = tmpContent
+ }
+ }
+
+ if cp.placeholdersEnabled {
+ // ToC was accessed via .Page.TableOfContents in the shortcode,
+ // at a time when the ToC wasn't ready.
+ cp.contentPlaceholders[tocShortcodePlaceholder] = string(cp.tableOfContents)
+ }
+
+ if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled {
+ // There are one or more replacement tokens to be replaced.
+ cp.workContent, err = replaceShortcodeTokens(cp.workContent, cp.contentPlaceholders)
+ if err != nil {
+ return err
+ }
+ }
+
+ if cp.p.source.hasSummaryDivider {
+ if isHTML {
+ src := p.source.parsed.Input()
+
+ // Use the summary sections as they are provided by the user.
+ if p.source.posSummaryEnd != -1 {
+ cp.summary = helpers.BytesToHTML(src[p.source.posMainContent:p.source.posSummaryEnd])
+ }
+
+ if cp.p.source.posBodyStart != -1 {
+ cp.workContent = src[cp.p.source.posBodyStart:]
+ }
+
+ } else {
+ summary, content, err := splitUserDefinedSummaryAndContent(cp.p.m.markup, cp.workContent)
+ if err != nil {
+ cp.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.p.pathOrTitle(), err)
+ } else {
+ cp.workContent = content
+ cp.summary = helpers.BytesToHTML(summary)
+ }
+ }
+ } else if cp.p.m.summary != "" {
+ b, err := cp.renderContent([]byte(cp.p.m.summary), false)
+ if err != nil {
+ return err
+ }
+ html := cp.p.s.ContentSpec.TrimShortHTML(b.Bytes())
+ cp.summary = helpers.BytesToHTML(html)
+ }
+
+ cp.content = helpers.BytesToHTML(cp.workContent)
+
+ return nil
+ }
+
+ // There may be recursive loops in shortcodes and render hooks.
+ cp.initMain = parent.BranchWithTimeout(p.s.siteCfg.timeout, func(ctx context.Context) (any, error) {
+ return nil, initContent()
+ })
+
+ cp.initPlain = cp.initMain.Branch(func() (any, error) {
+ cp.plain = tpl.StripHTML(string(cp.content))
+ cp.plainWords = strings.Fields(cp.plain)
+ cp.setWordCounts(p.m.isCJKLanguage)
+
+ if err := cp.setAutoSummary(); err != nil {
+ return err, nil
+ }
+
+ return nil, nil
+ })
+
+ return cp, nil
+}
+
+type renderHooks struct {
+ getRenderer hooks.GetRendererFunc
+ init sync.Once
+}
+
+// pageContentOutput represents the Page content for a given output format.
+type pageContentOutput struct {
+ f output.Format
+
+ p *pageState
+
+ // Lazy load dependencies
+ initMain *lazy.Init
+ initPlain *lazy.Init
+
+ placeholdersEnabled bool
+ placeholdersEnabledInit sync.Once
+
+ // Renders Markdown hooks.
+ renderHooks *renderHooks
+
+ workContent []byte
+ dependencyTracker identity.Manager // Set in server mode.
+
+ // Temporary storage of placeholders mapped to their content.
+ // These are shortcodes etc. Some of these will need to be replaced
+ // after any markup is rendered, so they share a common prefix.
+ contentPlaceholders map[string]string
+
+ // Content sections
+ content template.HTML
+ summary template.HTML
+ tableOfContents template.HTML
+
+ truncated bool
+
+ plainWords []string
+ plain string
+ fuzzyWordCount int
+ wordCount int
+ readingTime int
+}
+
+func (p *pageContentOutput) trackDependency(id identity.Provider) {
+ if p.dependencyTracker != nil {
+ p.dependencyTracker.Add(id)
+ }
+}
+
+func (p *pageContentOutput) Reset() {
+ if p.dependencyTracker != nil {
+ p.dependencyTracker.Reset()
+ }
+ p.initMain.Reset()
+ p.initPlain.Reset()
+ p.renderHooks = &renderHooks{}
+}
+
+func (p *pageContentOutput) Content() (any, error) {
+ if p.p.s.initInit(p.initMain, p.p) {
+ return p.content, nil
+ }
+ return nil, nil
+}
+
+func (p *pageContentOutput) FuzzyWordCount() int {
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.fuzzyWordCount
+}
+
+func (p *pageContentOutput) Len() int {
+ p.p.s.initInit(p.initMain, p.p)
+ return len(p.content)
+}
+
+func (p *pageContentOutput) Plain() string {
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.plain
+}
+
+func (p *pageContentOutput) PlainWords() []string {
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.plainWords
+}
+
+func (p *pageContentOutput) ReadingTime() int {
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.readingTime
+}
+
+func (p *pageContentOutput) Summary() template.HTML {
+ p.p.s.initInit(p.initMain, p.p)
+ if !p.p.source.hasSummaryDivider {
+ p.p.s.initInit(p.initPlain, p.p)
+ }
+ return p.summary
+}
+
+func (p *pageContentOutput) TableOfContents() template.HTML {
+ p.p.s.initInit(p.initMain, p.p)
+ return p.tableOfContents
+}
+
+func (p *pageContentOutput) Truncated() bool {
+ if p.p.truncated {
+ return true
+ }
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.truncated
+}
+
+func (p *pageContentOutput) WordCount() int {
+ p.p.s.initInit(p.initPlain, p.p)
+ return p.wordCount
+}
+
+func (p *pageContentOutput) RenderString(args ...any) (template.HTML, error) {
+ if len(args) < 1 || len(args) > 2 {
+ return "", errors.New("want 1 or 2 arguments")
+ }
+
+ var contentToRender string
+ opts := defaultRenderStringOpts
+ sidx := 1
+
+ if len(args) == 1 {
+ sidx = 0
+ } else {
+ m, ok := args[0].(map[string]any)
+ if !ok {
+ return "", errors.New("first argument must be a map")
+ }
+
+ if err := mapstructure.WeakDecode(m, &opts); err != nil {
+ return "", fmt.Errorf("failed to decode options: %w", err)
+ }
+ }
+
+ contentToRenderv := args[sidx]
+
+ if _, ok := contentToRenderv.(hstring.RenderedString); ok {
+ // This content is already rendered, this is potentially
+ // a infinite recursion.
+ return "", errors.New("text is already rendered, repeating it may cause infinite recursion")
+ }
+
+ var err error
+ contentToRender, err = cast.ToStringE(contentToRenderv)
+ if err != nil {
+ return "", err
+ }
+
+ if err = p.initRenderHooks(); err != nil {
+ return "", err
+ }
+
+ conv := p.p.getContentConverter()
+ if opts.Markup != "" && opts.Markup != p.p.m.markup {
+ var err error
+ // TODO(bep) consider cache
+ conv, err = p.p.m.newContentConverter(p.p, opts.Markup)
+ if err != nil {
+ return "", p.p.wrapError(err)
+ }
+ }
+
+ var rendered []byte
+
+ if strings.Contains(contentToRender, "{{") {
+ // Probably a shortcode.
+ parsed, err := pageparser.ParseMain(strings.NewReader(contentToRender), pageparser.Config{})
+ if err != nil {
+ return "", err
+ }
+ pm := &pageContentMap{
+ items: make([]any, 0, 20),
+ }
+ s := newShortcodeHandler(p.p, p.p.s)
+
+ if err := p.p.mapContentForResult(
+ parsed,
+ s,
+ pm,
+ opts.Markup,
+ nil,
+ ); err != nil {
+ return "", err
+ }
+
+ placeholders, hasShortcodeVariants, err := s.renderShortcodesForPage(p.p, p.f)
+ if err != nil {
+ return "", err
+ }
+
+ if hasShortcodeVariants {
+ p.p.pageOutputTemplateVariationsState.Store(2)
+ }
+
+ b, err := p.renderContentWithConverter(conv, p.p.contentToRender(parsed, pm, placeholders), false)
+ if err != nil {
+ return "", p.p.wrapError(err)
+ }
+ rendered = b.Bytes()
+
+ if p.placeholdersEnabled {
+ // ToC was accessed via .Page.TableOfContents in the shortcode,
+ // at a time when the ToC wasn't ready.
+ if _, err := p.p.Content(); err != nil {
+ return "", err
+ }
+ placeholders[tocShortcodePlaceholder] = string(p.tableOfContents)
+ }
+
+ if pm.hasNonMarkdownShortcode || p.placeholdersEnabled {
+ rendered, err = replaceShortcodeTokens(rendered, placeholders)
+ if err != nil {
+ return "", err
+ }
+ }
+
+ // We need a consolidated view in $page.HasShortcode
+ p.p.shortcodeState.transferNames(s)
+
+ } else {
+ c, err := p.renderContentWithConverter(conv, []byte(contentToRender), false)
+ if err != nil {
+ return "", p.p.wrapError(err)
+ }
+
+ rendered = c.Bytes()
+ }
+
+ if opts.Display == "inline" {
+ // We may have to rethink this in the future when we get other
+ // renderers.
+ rendered = p.p.s.ContentSpec.TrimShortHTML(rendered)
+ }
+
+ return template.HTML(string(rendered)), nil
+}
+
+func (p *pageContentOutput) RenderWithTemplateInfo(info tpl.Info, layout ...string) (template.HTML, error) {
+ p.p.addDependency(info)
+ return p.Render(layout...)
+}
+
+func (p *pageContentOutput) Render(layout ...string) (template.HTML, error) {
+ templ, found, err := p.p.resolveTemplate(layout...)
+ if err != nil {
+ return "", p.p.wrapError(err)
+ }
+
+ if !found {
+ return "", nil
+ }
+
+ p.p.addDependency(templ.(tpl.Info))
+
+ // Make sure to send the *pageState and not the *pageContentOutput to the template.
+ res, err := executeToString(p.p.s.Tmpl(), templ, p.p)
+ if err != nil {
+ return "", p.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err))
+ }
+ return template.HTML(res), nil
+}
+
+func (p *pageContentOutput) initRenderHooks() error {
+ if p == nil {
+ return nil
+ }
+
+ p.renderHooks.init.Do(func() {
+ if p.p.pageOutputTemplateVariationsState.Load() == 0 {
+ p.p.pageOutputTemplateVariationsState.Store(1)
+ }
+
+ type cacheKey struct {
+ tp hooks.RendererType
+ id any
+ f output.Format
+ }
+
+ renderCache := make(map[cacheKey]any)
+ var renderCacheMu sync.Mutex
+
+ resolvePosition := func(ctx any) text.Position {
+ var offset int
+
+ switch v := ctx.(type) {
+ case hooks.CodeblockContext:
+ offset = bytes.Index(p.p.source.parsed.Input(), []byte(v.Inner()))
+ }
+
+ pos := p.p.posFromInput(p.p.source.parsed.Input(), offset)
+
+ if pos.LineNumber > 0 {
+ // Move up to the code fence delimiter.
+ // This is in line with how we report on shortcodes.
+ pos.LineNumber = pos.LineNumber - 1
+ }
+
+ return pos
+ }
+
+ p.renderHooks.getRenderer = func(tp hooks.RendererType, id any) any {
+ renderCacheMu.Lock()
+ defer renderCacheMu.Unlock()
+
+ key := cacheKey{tp: tp, id: id, f: p.f}
+ if r, ok := renderCache[key]; ok {
+ return r
+ }
+
+ layoutDescriptor := p.p.getLayoutDescriptor()
+ layoutDescriptor.RenderingHook = true
+ layoutDescriptor.LayoutOverride = false
+ layoutDescriptor.Layout = ""
+
+ switch tp {
+ case hooks.LinkRendererType:
+ layoutDescriptor.Kind = "render-link"
+ case hooks.ImageRendererType:
+ layoutDescriptor.Kind = "render-image"
+ case hooks.HeadingRendererType:
+ layoutDescriptor.Kind = "render-heading"
+ case hooks.CodeBlockRendererType:
+ layoutDescriptor.Kind = "render-codeblock"
+ if id != nil {
+ lang := id.(string)
+ lexer := lexers.Get(lang)
+ if lexer != nil {
+ layoutDescriptor.KindVariants = strings.Join(lexer.Config().Aliases, ",")
+ } else {
+ layoutDescriptor.KindVariants = lang
+ }
+ }
+ }
+
+ getHookTemplate := func(f output.Format) (tpl.Template, bool) {
+ templ, found, err := p.p.s.Tmpl().LookupLayout(layoutDescriptor, f)
+ if err != nil {
+ panic(err)
+ }
+ return templ, found
+ }
+
+ templ, found1 := getHookTemplate(p.f)
+
+ if p.p.reusePageOutputContent() {
+ // Check if some of the other output formats would give a different template.
+ for _, f := range p.p.s.renderFormats {
+ if f.Name == p.f.Name {
+ continue
+ }
+ templ2, found2 := getHookTemplate(f)
+ if found2 {
+ if !found1 {
+ templ = templ2
+ found1 = true
+ break
+ }
+
+ if templ != templ2 {
+ p.p.pageOutputTemplateVariationsState.Store(2)
+ break
+ }
+ }
+ }
+ }
+ if !found1 {
+ if tp == hooks.CodeBlockRendererType {
+ // No user provided tempplate for code blocks, so we use the native Go code version -- which is also faster.
+ r := p.p.s.ContentSpec.Converters.GetHighlighter()
+ renderCache[key] = r
+ return r
+ }
+ return nil
+ }
+
+ r := hookRendererTemplate{
+ templateHandler: p.p.s.Tmpl(),
+ SearchProvider: templ.(identity.SearchProvider),
+ templ: templ,
+ resolvePosition: resolvePosition,
+ }
+ renderCache[key] = r
+ return r
+ }
+ })
+
+ return nil
+}
+
+func (p *pageContentOutput) setAutoSummary() error {
+ if p.p.source.hasSummaryDivider || p.p.m.summary != "" {
+ return nil
+ }
+
+ var summary string
+ var truncated bool
+
+ if p.p.m.isCJKLanguage {
+ summary, truncated = p.p.s.ContentSpec.TruncateWordsByRune(p.plainWords)
+ } else {
+ summary, truncated = p.p.s.ContentSpec.TruncateWordsToWholeSentence(p.plain)
+ }
+ p.summary = template.HTML(summary)
+
+ p.truncated = truncated
+
+ return nil
+}
+
+func (cp *pageContentOutput) renderContent(content []byte, renderTOC bool) (converter.Result, error) {
+ if err := cp.initRenderHooks(); err != nil {
+ return nil, err
+ }
+ c := cp.p.getContentConverter()
+ return cp.renderContentWithConverter(c, content, renderTOC)
+}
+
+func (cp *pageContentOutput) renderContentWithConverter(c converter.Converter, content []byte, renderTOC bool) (converter.Result, error) {
+ r, err := c.Convert(
+ converter.RenderContext{
+ Src: content,
+ RenderTOC: renderTOC,
+ GetRenderer: cp.renderHooks.getRenderer,
+ })
+
+ if err == nil {
+ if ids, ok := r.(identity.IdentitiesProvider); ok {
+ for _, v := range ids.GetIdentities() {
+ cp.trackDependency(v)
+ }
+ }
+ }
+
+ return r, err
+}
+
+func (p *pageContentOutput) setWordCounts(isCJKLanguage bool) {
+ if isCJKLanguage {
+ p.wordCount = 0
+ for _, word := range p.plainWords {
+ runeCount := utf8.RuneCountInString(word)
+ if len(word) == runeCount {
+ p.wordCount++
+ } else {
+ p.wordCount += runeCount
+ }
+ }
+ } else {
+ p.wordCount = helpers.TotalWords(p.plain)
+ }
+
+ // TODO(bep) is set in a test. Fix that.
+ if p.fuzzyWordCount == 0 {
+ p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
+ }
+
+ if isCJKLanguage {
+ p.readingTime = (p.wordCount + 500) / 501
+ } else {
+ p.readingTime = (p.wordCount + 212) / 213
+ }
+}
+
+// A callback to signal that we have inserted a placeholder into the rendered
+// content. This avoids doing extra replacement work.
+func (p *pageContentOutput) enablePlaceholders() {
+ p.placeholdersEnabledInit.Do(func() {
+ p.placeholdersEnabled = true
+ })
+}
+
+// these will be shifted out when rendering a given output format.
+type pagePerOutputProviders interface {
+ targetPather
+ page.PaginatorProvider
+ resource.ResourceLinksProvider
+}
+
+type targetPather interface {
+ targetPaths() page.TargetPaths
+}
+
+type targetPathsHolder struct {
+ paths page.TargetPaths
+ page.OutputFormat
+}
+
+func (t targetPathsHolder) targetPaths() page.TargetPaths {
+ return t.paths
+}
+
+func executeToString(h tpl.TemplateHandler, templ tpl.Template, data any) (string, error) {
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+ if err := h.Execute(templ, b, data); err != nil {
+ return "", err
+ }
+ return b.String(), nil
+}
+
+func splitUserDefinedSummaryAndContent(markup string, c []byte) (summary []byte, content []byte, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ err = fmt.Errorf("summary split failed: %s", r)
+ }
+ }()
+
+ startDivider := bytes.Index(c, internalSummaryDividerBaseBytes)
+
+ if startDivider == -1 {
+ return
+ }
+
+ startTag := "p"
+ switch markup {
+ case "asciidocext":
+ startTag = "div"
+ }
+
+ // Walk back and forward to the surrounding tags.
+ start := bytes.LastIndex(c[:startDivider], []byte("<"+startTag))
+ end := bytes.Index(c[startDivider:], []byte("</"+startTag))
+
+ if start == -1 {
+ start = startDivider
+ } else {
+ start = startDivider - (startDivider - start)
+ }
+
+ if end == -1 {
+ end = startDivider + len(internalSummaryDividerBase)
+ } else {
+ end = startDivider + end + len(startTag) + 3
+ }
+
+ var addDiv bool
+
+ switch markup {
+ case "rst":
+ addDiv = true
+ }
+
+ withoutDivider := append(c[:start], bytes.Trim(c[end:], "\n")...)
+
+ if len(withoutDivider) > 0 {
+ summary = bytes.TrimSpace(withoutDivider[:start])
+ }
+
+ if addDiv {
+ // For the rst
+ summary = append(append([]byte(nil), summary...), []byte("</div>")...)
+ }
+
+ if err != nil {
+ return
+ }
+
+ content = bytes.TrimSpace(withoutDivider)
+
+ return
+}
diff --git a/hugolib/page__position.go b/hugolib/page__position.go
new file mode 100644
index 000000000..a087872cc
--- /dev/null
+++ b/hugolib/page__position.go
@@ -0,0 +1,75 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/lazy"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagePosition(n *nextPrev) pagePosition {
+ return pagePosition{nextPrev: n}
+}
+
+func newPagePositionInSection(n *nextPrev) pagePositionInSection {
+ return pagePositionInSection{nextPrev: n}
+}
+
+type nextPrev struct {
+ init *lazy.Init
+ prevPage page.Page
+ nextPage page.Page
+}
+
+func (n *nextPrev) next() page.Page {
+ n.init.Do()
+ return n.nextPage
+}
+
+func (n *nextPrev) prev() page.Page {
+ n.init.Do()
+ return n.prevPage
+}
+
+type pagePosition struct {
+ *nextPrev
+}
+
+func (p pagePosition) Next() page.Page {
+ return p.next()
+}
+
+func (p pagePosition) NextPage() page.Page {
+ return p.Next()
+}
+
+func (p pagePosition) Prev() page.Page {
+ return p.prev()
+}
+
+func (p pagePosition) PrevPage() page.Page {
+ return p.Prev()
+}
+
+type pagePositionInSection struct {
+ *nextPrev
+}
+
+func (p pagePositionInSection) NextInSection() page.Page {
+ return p.next()
+}
+
+func (p pagePositionInSection) PrevInSection() page.Page {
+ return p.prev()
+}
diff --git a/hugolib/page__ref.go b/hugolib/page__ref.go
new file mode 100644
index 000000000..e55a8a3e4
--- /dev/null
+++ b/hugolib/page__ref.go
@@ -0,0 +1,114 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/common/text"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+func newPageRef(p *pageState) pageRef {
+ return pageRef{p: p}
+}
+
+type pageRef struct {
+ p *pageState
+}
+
+func (p pageRef) Ref(argsm map[string]any) (string, error) {
+ return p.ref(argsm, p.p)
+}
+
+func (p pageRef) RefFrom(argsm map[string]any, source any) (string, error) {
+ return p.ref(argsm, source)
+}
+
+func (p pageRef) RelRef(argsm map[string]any) (string, error) {
+ return p.relRef(argsm, p.p)
+}
+
+func (p pageRef) RelRefFrom(argsm map[string]any, source any) (string, error) {
+ return p.relRef(argsm, source)
+}
+
+func (p pageRef) decodeRefArgs(args map[string]any) (refArgs, *Site, error) {
+ var ra refArgs
+ err := mapstructure.WeakDecode(args, &ra)
+ if err != nil {
+ return ra, nil, nil
+ }
+
+ s := p.p.s
+
+ if ra.Lang != "" && ra.Lang != p.p.s.Language().Lang {
+ // Find correct site
+ found := false
+ for _, ss := range p.p.s.h.Sites {
+ if ss.Lang() == ra.Lang {
+ found = true
+ s = ss
+ }
+ }
+
+ if !found {
+ p.p.s.siteRefLinker.logNotFound(ra.Path, fmt.Sprintf("no site found with lang %q", ra.Lang), nil, text.Position{})
+ return ra, nil, nil
+ }
+ }
+
+ return ra, s, nil
+}
+
+func (p pageRef) ref(argsm map[string]any, source any) (string, error) {
+ args, s, err := p.decodeRefArgs(argsm)
+ if err != nil {
+ return "", fmt.Errorf("invalid arguments to Ref: %w", err)
+ }
+
+ if s == nil {
+ return p.p.s.siteRefLinker.notFoundURL, nil
+ }
+
+ if args.Path == "" {
+ return "", nil
+ }
+
+ return s.refLink(args.Path, source, false, args.OutputFormat)
+}
+
+func (p pageRef) relRef(argsm map[string]any, source any) (string, error) {
+ args, s, err := p.decodeRefArgs(argsm)
+ if err != nil {
+ return "", fmt.Errorf("invalid arguments to Ref: %w", err)
+ }
+
+ if s == nil {
+ return p.p.s.siteRefLinker.notFoundURL, nil
+ }
+
+ if args.Path == "" {
+ return "", nil
+ }
+
+ return s.refLink(args.Path, source, true, args.OutputFormat)
+}
+
+type refArgs struct {
+ Path string
+ Lang string
+ OutputFormat string
+}
diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go
new file mode 100644
index 000000000..828500e62
--- /dev/null
+++ b/hugolib/page__tree.go
@@ -0,0 +1,187 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "path"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+type pageTree struct {
+ p *pageState
+}
+
+func (pt pageTree) IsAncestor(other any) (bool, error) {
+ if pt.p == nil {
+ return false, nil
+ }
+
+ tp, ok := other.(treeRefProvider)
+ if !ok {
+ return false, nil
+ }
+
+ ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
+ if ref1 != nil && ref2 != nil && ref1.key == ref2.key {
+ return false, nil
+ }
+
+ if ref1 != nil && ref1.key == "/" {
+ return true, nil
+ }
+
+ if ref1 == nil || ref2 == nil {
+ if ref1 == nil {
+ // A 404 or other similar standalone page.
+ return false, nil
+ }
+
+ return ref1.n.p.IsHome(), nil
+ }
+
+ if strings.HasPrefix(ref2.key, ref1.key) {
+ return true, nil
+ }
+
+ return strings.HasPrefix(ref2.key, ref1.key+cmBranchSeparator), nil
+}
+
+func (pt pageTree) CurrentSection() page.Page {
+ p := pt.p
+
+ if p.IsHome() || p.IsSection() {
+ return p
+ }
+
+ return p.Parent()
+}
+
+func (pt pageTree) IsDescendant(other any) (bool, error) {
+ if pt.p == nil {
+ return false, nil
+ }
+
+ tp, ok := other.(treeRefProvider)
+ if !ok {
+ return false, nil
+ }
+
+ ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
+ if ref1 != nil && ref2 != nil && ref1.key == ref2.key {
+ return false, nil
+ }
+
+ if ref2 != nil && ref2.key == "/" {
+ return true, nil
+ }
+
+ if ref1 == nil || ref2 == nil {
+ if ref2 == nil {
+ // A 404 or other similar standalone page.
+ return false, nil
+ }
+
+ return ref2.n.p.IsHome(), nil
+ }
+
+ if strings.HasPrefix(ref1.key, ref2.key) {
+ return true, nil
+ }
+
+ return strings.HasPrefix(ref1.key, ref2.key+cmBranchSeparator), nil
+}
+
+func (pt pageTree) FirstSection() page.Page {
+ ref := pt.p.getTreeRef()
+ if ref == nil {
+ return pt.p.s.home
+ }
+ key := ref.key
+
+ if !ref.isSection() {
+ key = path.Dir(key)
+ }
+
+ _, b := ref.m.getFirstSection(key)
+ if b == nil {
+ return nil
+ }
+ return b.p
+}
+
+func (pt pageTree) InSection(other any) (bool, error) {
+ if pt.p == nil || types.IsNil(other) {
+ return false, nil
+ }
+
+ tp, ok := other.(treeRefProvider)
+ if !ok {
+ return false, nil
+ }
+
+ ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
+
+ if ref1 == nil || ref2 == nil {
+ if ref1 == nil {
+ // A 404 or other similar standalone page.
+ return false, nil
+ }
+ return ref1.n.p.IsHome(), nil
+ }
+
+ s1, _ := ref1.getCurrentSection()
+ s2, _ := ref2.getCurrentSection()
+
+ return s1 == s2, nil
+}
+
+func (pt pageTree) Page() page.Page {
+ return pt.p
+}
+
+func (pt pageTree) Parent() page.Page {
+ p := pt.p
+
+ if p.parent != nil {
+ return p.parent
+ }
+
+ if pt.p.IsHome() {
+ return nil
+ }
+
+ tree := p.getTreeRef()
+
+ if tree == nil || pt.p.Kind() == page.KindTaxonomy {
+ return pt.p.s.home
+ }
+
+ _, b := tree.getSection()
+ if b == nil {
+ return nil
+ }
+
+ return b.p
+}
+
+func (pt pageTree) Sections() page.Pages {
+ if pt.p.bucket == nil {
+ return nil
+ }
+
+ return pt.p.bucket.getSections()
+}
diff --git a/hugolib/page_kinds.go b/hugolib/page_kinds.go
new file mode 100644
index 000000000..b63da1d13
--- /dev/null
+++ b/hugolib/page_kinds.go
@@ -0,0 +1,52 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "strings"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+// This is all the kinds we can expect to find in .Site.Pages.
+var allKindsInPages = []string{page.KindPage, page.KindHome, page.KindSection, page.KindTerm, page.KindTaxonomy}
+
+const (
+
+ // Temporary state.
+ kindUnknown = "unknown"
+
+ // The following are (currently) temporary nodes,
+ // i.e. nodes we create just to render in isolation.
+ kindRSS = "RSS"
+ kindSitemap = "sitemap"
+ kindRobotsTXT = "robotsTXT"
+ kind404 = "404"
+
+ pageResourceType = "page"
+)
+
+var kindMap = map[string]string{
+ strings.ToLower(kindRSS): kindRSS,
+ strings.ToLower(kindSitemap): kindSitemap,
+ strings.ToLower(kindRobotsTXT): kindRobotsTXT,
+ strings.ToLower(kind404): kind404,
+}
+
+func getKind(s string) string {
+ if pkind := page.GetKind(s); pkind != "" {
+ return pkind
+ }
+ return kindMap[strings.ToLower(s)]
+}
diff --git a/hugolib/page_permalink_test.go b/hugolib/page_permalink_test.go
new file mode 100644
index 000000000..0939cc1ff
--- /dev/null
+++ b/hugolib/page_permalink_test.go
@@ -0,0 +1,149 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "html/template"
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+func TestPermalink(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ file string
+ base template.URL
+ slug string
+ url string
+ uglyURLs bool
+ canonifyURLs bool
+ expectedAbs string
+ expectedRel string
+ }{
+ {"x/y/z/boofar.md", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
+ {"x/y/z/boofar.md", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
+ // Issue #1174
+ {"x/y/z/boofar.md", "http://gopher.com/", "", "", false, true, "http://gopher.com/x/y/z/boofar/", "/x/y/z/boofar/"},
+ {"x/y/z/boofar.md", "http://gopher.com/", "", "", true, true, "http://gopher.com/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "", "boofar", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
+ {"x/y/z/boofar.md", "http://barnew/", "", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
+ {"x/y/z/boofar.md", "http://barnew/", "boofar", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
+ {"x/y/z/boofar.md", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "", "boofar", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "http://barnew/", "", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "http://barnew/", "boofar", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
+ {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", true, false, "http://barnew/boo/x/y/z/booslug.html", "/boo/x/y/z/booslug.html"},
+ {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", false, true, "http://barnew/boo/x/y/z/booslug/", "/x/y/z/booslug/"},
+ {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", false, false, "http://barnew/boo/x/y/z/booslug/", "/boo/x/y/z/booslug/"},
+ {"x/y/z/boofar.md", "http://barnew/boo/", "booslug", "", true, true, "http://barnew/boo/x/y/z/booslug.html", "/x/y/z/booslug.html"},
+ {"x/y/z/boofar.md", "http://barnew/boo", "booslug", "", true, true, "http://barnew/boo/x/y/z/booslug.html", "/x/y/z/booslug.html"},
+ // Issue #4666
+ {"x/y/z/boo-makeindex.md", "http://barnew/boo", "", "", true, true, "http://barnew/boo/x/y/z/boo-makeindex.html", "/x/y/z/boo-makeindex.html"},
+
+ // test URL overrides
+ {"x/y/z/boofar.md", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
+ }
+
+ for i, test := range tests {
+ test := test
+ t.Run(fmt.Sprintf("%s-%d", test.file, i), func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ cfg, fs := newTestCfg()
+
+ cfg.Set("uglyURLs", test.uglyURLs)
+ cfg.Set("canonifyURLs", test.canonifyURLs)
+ cfg.Set("baseURL", test.base)
+
+ pageContent := fmt.Sprintf(`---
+title: Page
+slug: %q
+url: %q
+output: ["HTML"]
+---
+Content
+`, test.slug, test.url)
+
+ writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.file)), pageContent)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ p := s.RegularPages()[0]
+
+ u := p.Permalink()
+
+ expected := test.expectedAbs
+ if u != expected {
+ t.Fatalf("[%d] Expected abs url: %s, got: %s", i, expected, u)
+ }
+
+ u = p.RelPermalink()
+
+ expected = test.expectedRel
+ if u != expected {
+ t.Errorf("[%d] Expected rel url: %s, got: %s", i, expected, u)
+ }
+ })
+ }
+}
+
+func TestRelativeURLInFrontMatter(t *testing.T) {
+ config := `
+baseURL = "https://example.com"
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = false
+
+[Languages]
+[Languages.en]
+weight = 10
+contentDir = "content/en"
+[Languages.nn]
+weight = 20
+contentDir = "content/nn"
+
+`
+
+ pageTempl := `---
+title: "A page"
+url: %q
+---
+
+Some content.
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+ b.WithContent("content/en/blog/page1.md", fmt.Sprintf(pageTempl, "myblog/p1/"))
+ b.WithContent("content/en/blog/page2.md", fmt.Sprintf(pageTempl, "../../../../../myblog/p2/"))
+ b.WithContent("content/en/blog/page3.md", fmt.Sprintf(pageTempl, "../myblog/../myblog/p3/"))
+ b.WithContent("content/en/blog/_index.md", fmt.Sprintf(pageTempl, "this-is-my-english-blog"))
+ b.WithContent("content/nn/blog/page1.md", fmt.Sprintf(pageTempl, "myblog/p1/"))
+ b.WithContent("content/nn/blog/_index.md", fmt.Sprintf(pageTempl, "this-is-my-blog"))
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/nn/myblog/p1/index.html", "Single: A page|Hello|nn|RelPermalink: /nn/myblog/p1/|")
+ b.AssertFileContent("public/nn/this-is-my-blog/index.html", "List Page 1|A page|Hello|https://example.com/nn/this-is-my-blog/|")
+ b.AssertFileContent("public/this-is-my-english-blog/index.html", "List Page 1|A page|Hello|https://example.com/this-is-my-english-blog/|")
+ b.AssertFileContent("public/myblog/p1/index.html", "Single: A page|Hello|en|RelPermalink: /myblog/p1/|Permalink: https://example.com/myblog/p1/|")
+ b.AssertFileContent("public/myblog/p2/index.html", "Single: A page|Hello|en|RelPermalink: /myblog/p2/|Permalink: https://example.com/myblog/p2/|")
+ b.AssertFileContent("public/myblog/p3/index.html", "Single: A page|Hello|en|RelPermalink: /myblog/p3/|Permalink: https://example.com/myblog/p3/|")
+}
diff --git a/hugolib/page_test.go b/hugolib/page_test.go
new file mode 100644
index 000000000..1d9e3e348
--- /dev/null
+++ b/hugolib/page_test.go
@@ -0,0 +1,2003 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "html/template"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/bep/clock"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/markup/asciidocext"
+ "github.com/gohugoio/hugo/markup/rst"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/jwalterweatherman"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+)
+
+const (
+ homePage = "---\ntitle: Home\n---\nHome Page Content\n"
+ simplePage = "---\ntitle: Simple\n---\nSimple Page\n"
+
+ simplePageRFC3339Date = "---\ntitle: RFC3339 Date\ndate: \"2013-05-17T16:59:30Z\"\n---\nrfc3339 content"
+
+ simplePageWithoutSummaryDelimiter = `---
+title: SimpleWithoutSummaryDelimiter
+---
+[Lorem ipsum](https://lipsum.com/) dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+
+Additional text.
+
+Further text.
+`
+
+ simplePageWithSummaryDelimiter = `---
+title: Simple
+---
+Summary Next Line
+
+<!--more-->
+Some more text
+`
+
+ simplePageWithSummaryParameter = `---
+title: SimpleWithSummaryParameter
+summary: "Page with summary parameter and [a link](http://www.example.com/)"
+---
+
+Some text.
+
+Some more text.
+`
+
+ simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder = `---
+title: Simple
+---
+The [best static site generator][hugo].[^1]
+<!--more-->
+[hugo]: http://gohugo.io/
+[^1]: Many people say so.
+`
+ simplePageWithShortcodeInSummary = `---
+title: Simple
+---
+Summary Next Line. {{<figure src="/not/real" >}}.
+More text here.
+
+Some more text
+`
+
+ simplePageWithSummaryDelimiterSameLine = `---
+title: Simple
+---
+Summary Same Line<!--more-->
+
+Some more text
+`
+
+ simplePageWithAllCJKRunes = `---
+title: Simple
+---
+
+
+€ € € € €
+你好
+도형이
+カテゴリー
+
+
+`
+
+ simplePageWithMainEnglishWithCJKRunes = `---
+title: Simple
+---
+
+
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+In Chinese, 好 means good. In Chinese, 好 means good.
+More then 70 words.
+
+
+`
+ simplePageWithMainEnglishWithCJKRunesSummary = "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good. " +
+ "In Chinese, 好 means good. In Chinese, 好 means good."
+
+ simplePageWithIsCJKLanguageFalse = `---
+title: Simple
+isCJKLanguage: false
+---
+
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀 means good.
+In Chinese, 好的啊 means good. In Chinese, 好的呀呀 means good enough.
+More then 70 words.
+
+
+`
+ simplePageWithIsCJKLanguageFalseSummary = "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀 means good. " +
+ "In Chinese, 好的啊 means good. In Chinese, 好的呀呀 means good enough."
+
+ simplePageWithLongContent = `---
+title: Simple
+---
+
+Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor
+incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis
+nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu
+fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
+culpa qui officia deserunt mollit anim id est laborum. Lorem ipsum dolor sit
+amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore
+et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor
+in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
+pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui
+officia deserunt mollit anim id est laborum. Lorem ipsum dolor sit amet,
+consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et
+dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco
+laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in
+reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.
+Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia
+deserunt mollit anim id est laborum. Lorem ipsum dolor sit amet, consectetur
+adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna
+aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi
+ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in
+voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
+occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim
+id est laborum. Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed
+do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim
+veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Lorem
+ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor
+incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis
+nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
+Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu
+fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
+culpa qui officia deserunt mollit anim id est laborum. Lorem ipsum dolor sit
+amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore
+et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation
+ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor
+in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
+pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui
+officia deserunt mollit anim id est laborum.`
+
+ pageWithToC = `---
+title: TOC
+---
+For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.
+
+## AA
+
+I have no idea, of course, how long it took me to reach the limit of the plain,
+but at last I entered the foothills, following a pretty little canyon upward
+toward the mountains. Beside me frolicked a laughing brooklet, hurrying upon
+its noisy way down to the silent sea. In its quieter pools I discovered many
+small fish, of four-or five-pound weight I should imagine. In appearance,
+except as to size and color, they were not unlike the whale of our own seas. As
+I watched them playing about I discovered, not only that they suckled their
+young, but that at intervals they rose to the surface to breathe as well as to
+feed upon certain grasses and a strange, scarlet lichen which grew upon the
+rocks just above the water line.
+
+### AAA
+
+I remember I felt an extraordinary persuasion that I was being played with,
+that presently, when I was upon the very verge of safety, this mysterious
+death--as swift as the passage of light--would leap after me from the pit about
+the cylinder and strike me down. ## BB
+
+### BBB
+
+"You're a great Granser," he cried delightedly, "always making believe them little marks mean something."
+`
+
+ simplePageWithURL = `---
+title: Simple
+url: simple/url/
+---
+Simple Page With URL`
+
+ simplePageWithSlug = `---
+title: Simple
+slug: simple-slug
+---
+Simple Page With Slug`
+
+ simplePageWithDate = `---
+title: Simple
+date: '2013-10-15T06:16:13'
+---
+Simple Page With Date`
+
+ UTF8Page = `---
+title: ラーメン
+---
+UTF8 Page`
+
+ UTF8PageWithURL = `---
+title: ラーメン
+url: ラーメン/url/
+---
+UTF8 Page With URL`
+
+ UTF8PageWithSlug = `---
+title: ラーメン
+slug: ラーメン-slug
+---
+UTF8 Page With Slug`
+
+ UTF8PageWithDate = `---
+title: ラーメン
+date: '2013-10-15T06:16:13'
+---
+UTF8 Page With Date`
+)
+
+func checkPageTitle(t *testing.T, page page.Page, title string) {
+ if page.Title() != title {
+ t.Fatalf("Page title is: %s. Expected %s", page.Title(), title)
+ }
+}
+
+func checkPageContent(t *testing.T, page page.Page, expected string, msg ...any) {
+ t.Helper()
+ a := normalizeContent(expected)
+ b := normalizeContent(content(page))
+ if a != b {
+ t.Fatalf("Page content is:\n%q\nExpected:\n%q (%q)", b, a, msg)
+ }
+}
+
+func normalizeContent(c string) string {
+ norm := c
+ norm = strings.Replace(norm, "\n", " ", -1)
+ norm = strings.Replace(norm, " ", " ", -1)
+ norm = strings.Replace(norm, " ", " ", -1)
+ norm = strings.Replace(norm, " ", " ", -1)
+ norm = strings.Replace(norm, "p> ", "p>", -1)
+ norm = strings.Replace(norm, "> <", "> <", -1)
+ return strings.TrimSpace(norm)
+}
+
+func checkPageTOC(t *testing.T, page page.Page, toc string) {
+ t.Helper()
+ if page.TableOfContents() != template.HTML(toc) {
+ t.Fatalf("Page TableOfContents is:\n%q.\nExpected %q", page.TableOfContents(), toc)
+ }
+}
+
+func checkPageSummary(t *testing.T, page page.Page, summary string, msg ...any) {
+ a := normalizeContent(string(page.Summary()))
+ b := normalizeContent(summary)
+ if a != b {
+ t.Fatalf("Page summary is:\n%q.\nExpected\n%q (%q)", a, b, msg)
+ }
+}
+
+func checkPageType(t *testing.T, page page.Page, pageType string) {
+ if page.Type() != pageType {
+ t.Fatalf("Page type is: %s. Expected: %s", page.Type(), pageType)
+ }
+}
+
+func checkPageDate(t *testing.T, page page.Page, time time.Time) {
+ if page.Date() != time {
+ t.Fatalf("Page date is: %s. Expected: %s", page.Date(), time)
+ }
+}
+
+func normalizeExpected(ext, str string) string {
+ str = normalizeContent(str)
+ switch ext {
+ default:
+ return str
+ case "html":
+ return strings.Trim(tpl.StripHTML(str), " ")
+ case "ad":
+ paragraphs := strings.Split(str, "</p>")
+ expected := ""
+ for _, para := range paragraphs {
+ if para == "" {
+ continue
+ }
+ expected += fmt.Sprintf("<div class=\"paragraph\">\n%s</p></div>\n", para)
+ }
+
+ return expected
+ case "rst":
+ return fmt.Sprintf("<div class=\"document\">\n\n\n%s</div>", str)
+ }
+}
+
+func testAllMarkdownEnginesForPages(t *testing.T,
+ assertFunc func(t *testing.T, ext string, pages page.Pages), settings map[string]any, pageSources ...string) {
+
+ engines := []struct {
+ ext string
+ shouldExecute func() bool
+ }{
+ {"md", func() bool { return true }},
+ {"ad", func() bool { return asciidocext.Supports() }},
+ {"rst", func() bool { return rst.Supports() }},
+ }
+
+ for _, e := range engines {
+ if !e.shouldExecute() {
+ continue
+ }
+
+ t.Run(e.ext, func(t *testing.T) {
+ cfg, fs := newTestCfg(func(cfg config.Provider) error {
+ for k, v := range settings {
+ cfg.Set(k, v)
+ }
+ return nil
+ })
+
+ contentDir := "content"
+
+ if s := cfg.GetString("contentDir"); s != "" {
+ contentDir = s
+ }
+
+ cfg.Set("security", map[string]any{
+ "exec": map[string]any{
+ "allow": []string{"^python$", "^rst2html.*", "^asciidoctor$"},
+ },
+ })
+
+ var fileSourcePairs []string
+
+ for i, source := range pageSources {
+ fileSourcePairs = append(fileSourcePairs, fmt.Sprintf("p%d.%s", i, e.ext), source)
+ }
+
+ for i := 0; i < len(fileSourcePairs); i += 2 {
+ writeSource(t, fs, filepath.Join(contentDir, fileSourcePairs[i]), fileSourcePairs[i+1])
+ }
+
+ // Add a content page for the home page
+ homePath := fmt.Sprintf("_index.%s", e.ext)
+ writeSource(t, fs, filepath.Join(contentDir, homePath), homePage)
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
+ b.Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+
+ b.Assert(len(s.RegularPages()), qt.Equals, len(pageSources))
+
+ assertFunc(t, e.ext, s.RegularPages())
+
+ home := s.Info.Home()
+ b.Assert(home, qt.Not(qt.IsNil))
+ b.Assert(home.File().Path(), qt.Equals, homePath)
+ b.Assert(content(home), qt.Contains, "Home Page Content")
+ })
+
+ }
+}
+
+// Issue #1076
+func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageWithSummaryDelimiterAndMarkdownThatCrossesBorder)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ p := s.RegularPages()[0]
+
+ if p.Summary() != template.HTML(
+ "<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup id=\"fnref:1\"><a href=\"#fn:1\" class=\"footnote-ref\" role=\"doc-noteref\">1</a></sup></p>") {
+ t.Fatalf("Got summary:\n%q", p.Summary())
+ }
+
+ cnt := content(p)
+ if cnt != "<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup id=\"fnref:1\"><a href=\"#fn:1\" class=\"footnote-ref\" role=\"doc-noteref\">1</a></sup></p>\n<div class=\"footnotes\" role=\"doc-endnotes\">\n<hr>\n<ol>\n<li id=\"fn:1\">\n<p>Many people say so.&#160;<a href=\"#fnref:1\" class=\"footnote-backref\" role=\"doc-backlink\">&#x21a9;&#xfe0e;</a></p>\n</li>\n</ol>\n</div>" {
+ t.Fatalf("Got content:\n%q", cnt)
+ }
+}
+
+func TestPageDatesAllKinds(t *testing.T) {
+ t.Parallel()
+
+ pageContent := `
+---
+title: Page
+date: 2017-01-15
+tags: ["hugo"]
+categories: ["cool stuff"]
+---
+`
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithContent("page.md", pageContent)
+ b.WithContent("blog/page.md", pageContent)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.Assert(len(b.H.Sites), qt.Equals, 1)
+ s := b.H.Sites[0]
+
+ checkDate := func(t time.Time, msg string) {
+ b.Assert(t.Year(), qt.Equals, 2017, qt.Commentf(msg))
+ }
+
+ checkDated := func(d resource.Dated, msg string) {
+ checkDate(d.Date(), "date: "+msg)
+ checkDate(d.Lastmod(), "lastmod: "+msg)
+ }
+ for _, p := range s.Pages() {
+ checkDated(p, p.Kind())
+ }
+ checkDate(s.Info.LastChange(), "site")
+}
+
+func TestPageDatesSections(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithContent("no-index/page.md", `
+---
+title: Page
+date: 2017-01-15
+---
+`, "with-index-no-date/_index.md", `---
+title: No Date
+---
+
+`,
+ // https://github.com/gohugoio/hugo/issues/5854
+ "with-index-date/_index.md", `---
+title: Date
+date: 2018-01-15
+---
+
+`, "with-index-date/p1.md", `---
+title: Date
+date: 2018-01-15
+---
+
+`, "with-index-date/p1.md", `---
+title: Date
+date: 2018-01-15
+---
+
+`)
+
+ for i := 1; i <= 20; i++ {
+ b.WithContent(fmt.Sprintf("main-section/p%d.md", i), `---
+title: Date
+date: 2012-01-12
+---
+
+`)
+ }
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.Assert(len(b.H.Sites), qt.Equals, 1)
+ s := b.H.Sites[0]
+
+ checkDate := func(p page.Page, year int) {
+ b.Assert(p.Date().Year(), qt.Equals, year)
+ b.Assert(p.Lastmod().Year(), qt.Equals, year)
+ }
+
+ checkDate(s.getPage("/"), 2018)
+ checkDate(s.getPage("/no-index"), 2017)
+ b.Assert(s.getPage("/with-index-no-date").Date().IsZero(), qt.Equals, true)
+ checkDate(s.getPage("/with-index-date"), 2018)
+
+ b.Assert(s.Site.LastChange().Year(), qt.Equals, 2018)
+}
+
+func TestCreateNewPage(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+
+ // issue #2290: Path is relative to the content dir and will continue to be so.
+ c.Assert(p.File().Path(), qt.Equals, fmt.Sprintf("p0.%s", ext))
+ c.Assert(p.IsHome(), qt.Equals, false)
+ checkPageTitle(t, p, "Simple")
+ checkPageContent(t, p, normalizeExpected(ext, "<p>Simple Page</p>\n"))
+ checkPageSummary(t, p, "Simple Page")
+ checkPageType(t, p, "page")
+ }
+
+ settings := map[string]any{
+ "contentDir": "mycontent",
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePage)
+}
+
+func TestPageSummary(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ checkPageTitle(t, p, "SimpleWithoutSummaryDelimiter")
+ // Source is not Asciidoctor- or RST-compatible so don't test them
+ if ext != "ad" && ext != "rst" {
+ checkPageContent(t, p, normalizeExpected(ext, "<p><a href=\"https://lipsum.com/\">Lorem ipsum</a> dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>\n\n<p>Additional text.</p>\n\n<p>Further text.</p>\n"), ext)
+ checkPageSummary(t, p, normalizeExpected(ext, "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Additional text."), ext)
+ }
+ checkPageType(t, p, "page")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithoutSummaryDelimiter)
+}
+
+func TestPageWithDelimiter(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ checkPageTitle(t, p, "Simple")
+ checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Next Line</p>\n\n<p>Some more text</p>\n"), ext)
+ checkPageSummary(t, p, normalizeExpected(ext, "<p>Summary Next Line</p>"), ext)
+ checkPageType(t, p, "page")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiter)
+}
+
+func TestPageWithSummaryParameter(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ checkPageTitle(t, p, "SimpleWithSummaryParameter")
+ checkPageContent(t, p, normalizeExpected(ext, "<p>Some text.</p>\n\n<p>Some more text.</p>\n"), ext)
+ // Summary is not Asciidoctor- or RST-compatible so don't test them
+ if ext != "ad" && ext != "rst" {
+ checkPageSummary(t, p, normalizeExpected(ext, "Page with summary parameter and <a href=\"http://www.example.com/\">a link</a>"), ext)
+ }
+ checkPageType(t, p, "page")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryParameter)
+}
+
+// Issue #3854
+// Also see https://github.com/gohugoio/hugo/issues/3977
+func TestPageWithDateFields(t *testing.T) {
+ c := qt.New(t)
+ pageWithDate := `---
+title: P%d
+weight: %d
+%s: 2017-10-13
+---
+Simple Page With Some Date`
+
+ hasDate := func(p page.Page) bool {
+ return p.Date().Year() == 2017
+ }
+
+ datePage := func(field string, weight int) string {
+ return fmt.Sprintf(pageWithDate, weight, weight, field)
+ }
+
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ c.Assert(len(pages) > 0, qt.Equals, true)
+ for _, p := range pages {
+ c.Assert(hasDate(p), qt.Equals, true)
+ }
+ }
+
+ fields := []string{"date", "publishdate", "pubdate", "published"}
+ pageContents := make([]string, len(fields))
+ for i, field := range fields {
+ pageContents[i] = datePage(field, i+1)
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, pageContents...)
+}
+
+// Issue #2601
+func TestPageRawContent(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "raw.md"), `---
+title: Raw
+---
+**Raw**`)
+
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{ .RawContent }}`)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+ p := s.RegularPages()[0]
+
+ c.Assert("**Raw**", qt.Equals, p.RawContent())
+}
+
+func TestPageWithShortCodeInSummary(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ checkPageTitle(t, p, "Simple")
+ checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Next Line. <figure><img src=\"/not/real\"/> </figure> . More text here.</p><p>Some more text</p>"))
+ checkPageSummary(t, p, "Summary Next Line. . More text here. Some more text")
+ checkPageType(t, p, "page")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithShortcodeInSummary)
+}
+
+func TestTableOfContents(t *testing.T) {
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "tocpage.md"), pageWithToC)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ p := s.RegularPages()[0]
+
+ checkPageContent(t, p, "<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p><h2 id=\"aa\">AA</h2> <p>I have no idea, of course, how long it took me to reach the limit of the plain, but at last I entered the foothills, following a pretty little canyon upward toward the mountains. Beside me frolicked a laughing brooklet, hurrying upon its noisy way down to the silent sea. In its quieter pools I discovered many small fish, of four-or five-pound weight I should imagine. In appearance, except as to size and color, they were not unlike the whale of our own seas. As I watched them playing about I discovered, not only that they suckled their young, but that at intervals they rose to the surface to breathe as well as to feed upon certain grasses and a strange, scarlet lichen which grew upon the rocks just above the water line.</p><h3 id=\"aaa\">AAA</h3> <p>I remember I felt an extraordinary persuasion that I was being played with, that presently, when I was upon the very verge of safety, this mysterious death&ndash;as swift as the passage of light&ndash;would leap after me from the pit about the cylinder and strike me down. ## BB</p><h3 id=\"bbb\">BBB</h3> <p>&ldquo;You&rsquo;re a great Granser,&rdquo; he cried delightedly, &ldquo;always making believe them little marks mean something.&rdquo;</p>")
+ checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n <ul>\n <li><a href=\"#aa\">AA</a>\n <ul>\n <li><a href=\"#aaa\">AAA</a></li>\n <li><a href=\"#bbb\">BBB</a></li>\n </ul>\n </li>\n </ul>\n</nav>")
+}
+
+func TestPageWithMoreTag(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ checkPageTitle(t, p, "Simple")
+ checkPageContent(t, p, normalizeExpected(ext, "<p>Summary Same Line</p>\n\n<p>Some more text</p>\n"))
+ checkPageSummary(t, p, normalizeExpected(ext, "<p>Summary Same Line</p>"))
+ checkPageType(t, p, "page")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryDelimiterSameLine)
+}
+
+// #2973
+func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) {
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ c := qt.New(t)
+ p := pages[0]
+ s := string(p.Summary())
+ c.Assert(s, qt.Contains, "Happy new year everyone!")
+ c.Assert(s, qt.Not(qt.Contains), "User interface")
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, `---
+title: Simple
+---
+Happy new year everyone!
+
+Here is the last report for commits in the year 2016. It covers hrev50718-hrev50829.
+
+<!--more-->
+
+<h3>User interface</h3>
+
+`)
+}
+
+// Issue 9383
+func TestRenderStringForRegularPageTranslations(t *testing.T) {
+ c := qt.New(t)
+ b := newTestSitesBuilder(t)
+ b.WithLogger(loggers.NewBasicLoggerForWriter(jwalterweatherman.LevelError, os.Stderr))
+
+ b.WithConfigFile("toml",
+ `baseurl = "https://example.org/"
+title = "My Site"
+
+defaultContentLanguage = "ru"
+defaultContentLanguageInSubdir = true
+
+[languages.ru]
+contentDir = 'content/ru'
+weight = 1
+
+[languages.en]
+weight = 2
+contentDir = 'content/en'
+
+[outputs]
+home = ["HTML", "JSON"]`)
+
+ b.WithTemplates("index.html", `
+{{- range .Site.Home.Translations -}}
+ <p>{{- .RenderString "foo" -}}</p>
+{{- end -}}
+{{- range .Site.Home.AllTranslations -}}
+ <p>{{- .RenderString "bar" -}}</p>
+{{- end -}}
+`, "_default/single.html",
+ `{{ .Content }}`,
+ "index.json",
+ `{"Title": "My Site"}`,
+ )
+
+ b.WithContent(
+ "ru/a.md",
+ "",
+ "en/a.md",
+ "",
+ )
+
+ err := b.BuildE(BuildCfg{})
+ c.Assert(err, qt.Equals, nil)
+
+ b.AssertFileContent("public/ru/index.html", `
+<p>foo</p>
+<p>foo</p>
+<p>bar</p>
+<p>bar</p>
+`)
+
+ b.AssertFileContent("public/en/index.html", `
+<p>foo</p>
+<p>foo</p>
+<p>bar</p>
+<p>bar</p>
+`)
+}
+
+// Issue 8919
+func TestContentProviderWithCustomOutputFormat(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithLogger(loggers.NewBasicLoggerForWriter(jwalterweatherman.LevelDebug, os.Stderr))
+ b.WithConfigFile("toml", `baseURL = 'http://example.org/'
+title = 'My New Hugo Site'
+
+timeout = 600000 # ten minutes in case we want to pause and debug
+
+defaultContentLanguage = "en"
+
+[languages]
+ [languages.en]
+ title = "Repro"
+ languageName = "English"
+ contentDir = "content/en"
+
+ [languages.zh_CN]
+ title = "Repro"
+ languageName = "简体中文"
+ contentDir = "content/zh_CN"
+
+[outputFormats]
+ [outputFormats.metadata]
+ baseName = "metadata"
+ mediaType = "text/html"
+ isPlainText = true
+ notAlternative = true
+
+[outputs]
+ home = ["HTML", "metadata"]`)
+
+ b.WithTemplates("home.metadata.html", `<h2>Translations metadata</h2>
+<ul>
+{{ $p := .Page }}
+{{ range $p.Translations}}
+<li>Title: {{ .Title }}, {{ .Summary }}</li>
+<li>Content: {{ .Content }}</li>
+<li>Plain: {{ .Plain }}</li>
+<li>PlainWords: {{ .PlainWords }}</li>
+<li>Summary: {{ .Summary }}</li>
+<li>Truncated: {{ .Truncated }}</li>
+<li>FuzzyWordCount: {{ .FuzzyWordCount }}</li>
+<li>ReadingTime: {{ .ReadingTime }}</li>
+<li>Len: {{ .Len }}</li>
+{{ end }}
+</ul>`)
+
+ b.WithTemplates("_default/baseof.html", `<html>
+
+<body>
+ {{ block "main" . }}{{ end }}
+</body>
+
+</html>`)
+
+ b.WithTemplates("_default/home.html", `{{ define "main" }}
+<h2>Translations</h2>
+<ul>
+{{ $p := .Page }}
+{{ range $p.Translations}}
+<li>Title: {{ .Title }}, {{ .Summary }}</li>
+<li>Content: {{ .Content }}</li>
+<li>Plain: {{ .Plain }}</li>
+<li>PlainWords: {{ .PlainWords }}</li>
+<li>Summary: {{ .Summary }}</li>
+<li>Truncated: {{ .Truncated }}</li>
+<li>FuzzyWordCount: {{ .FuzzyWordCount }}</li>
+<li>ReadingTime: {{ .ReadingTime }}</li>
+<li>Len: {{ .Len }}</li>
+{{ end }}
+</ul>
+{{ end }}`)
+
+ b.WithContent("en/_index.md", `---
+title: Title (en)
+summary: Summary (en)
+---
+
+Here is some content.
+`)
+
+ b.WithContent("zh_CN/_index.md", `---
+title: Title (zh)
+summary: Summary (zh)
+---
+
+这是一些内容
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `<html>
+
+<body>
+
+<h2>Translations</h2>
+<ul>
+
+
+<li>Title: Title (zh), Summary (zh)</li>
+<li>Content: <p>这是一些内容</p>
+</li>
+<li>Plain: 这是一些内容
+</li>
+<li>PlainWords: [这是一些内容]</li>
+<li>Summary: Summary (zh)</li>
+<li>Truncated: false</li>
+<li>FuzzyWordCount: 100</li>
+<li>ReadingTime: 1</li>
+<li>Len: 26</li>
+
+</ul>
+
+</body>
+
+</html>`)
+ b.AssertFileContent("public/metadata.html", `<h2>Translations metadata</h2>
+<ul>
+
+
+<li>Title: Title (zh), Summary (zh)</li>
+<li>Content: <p>这是一些内容</p>
+</li>
+<li>Plain: 这是一些内容
+</li>
+<li>PlainWords: [这是一些内容]</li>
+<li>Summary: Summary (zh)</li>
+<li>Truncated: false</li>
+<li>FuzzyWordCount: 100</li>
+<li>ReadingTime: 1</li>
+<li>Len: 26</li>
+
+</ul>`)
+ b.AssertFileContent("public/zh_cn/index.html", `<html>
+
+<body>
+
+<h2>Translations</h2>
+<ul>
+
+
+<li>Title: Title (en), Summary (en)</li>
+<li>Content: <p>Here is some content.</p>
+</li>
+<li>Plain: Here is some content.
+</li>
+<li>PlainWords: [Here is some content.]</li>
+<li>Summary: Summary (en)</li>
+<li>Truncated: false</li>
+<li>FuzzyWordCount: 100</li>
+<li>ReadingTime: 1</li>
+<li>Len: 29</li>
+
+</ul>
+
+</body>
+
+</html>`)
+ b.AssertFileContent("public/zh_cn/metadata.html", `<h2>Translations metadata</h2>
+<ul>
+
+
+<li>Title: Title (en), Summary (en)</li>
+<li>Content: <p>Here is some content.</p>
+</li>
+<li>Plain: Here is some content.
+</li>
+<li>PlainWords: [Here is some content.]</li>
+<li>Summary: Summary (en)</li>
+<li>Truncated: false</li>
+<li>FuzzyWordCount: 100</li>
+<li>ReadingTime: 1</li>
+<li>Len: 29</li>
+
+</ul>`)
+}
+
+func TestPageWithDate(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), simplePageRFC3339Date)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ p := s.RegularPages()[0]
+ d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z")
+
+ checkPageDate(t, p, d)
+}
+
+func TestPageWithLastmodFromGitInfo(t *testing.T) {
+ if htesting.IsCI() {
+ // TODO(bep) figure out why this fails on GitHub actions.
+ t.Skip("Skip GitInfo test on CI")
+ }
+ c := qt.New(t)
+
+ wd, err := os.Getwd()
+ c.Assert(err, qt.IsNil)
+
+ // We need to use the OS fs for this.
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", filepath.Join(wd, "testsite"))
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ cfg.Set("frontmatter", map[string]any{
+ "lastmod": []string{":git", "lastmod"},
+ })
+ cfg.Set("defaultContentLanguage", "en")
+
+ langConfig := map[string]any{
+ "en": map[string]any{
+ "weight": 1,
+ "languageName": "English",
+ "contentDir": "content",
+ },
+ "nn": map[string]any{
+ "weight": 2,
+ "languageName": "Nynorsk",
+ "contentDir": "content_nn",
+ },
+ }
+
+ cfg.Set("languages", langConfig)
+ cfg.Set("enableGitInfo", true)
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
+
+ b.Build(BuildCfg{SkipRender: true})
+ h := b.H
+
+ c.Assert(len(h.Sites), qt.Equals, 2)
+
+ enSite := h.Sites[0]
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 1)
+
+ // 2018-03-11 is the Git author date for testsite/content/first-post.md
+ c.Assert(enSite.RegularPages()[0].Lastmod().Format("2006-01-02"), qt.Equals, "2018-03-11")
+ c.Assert(enSite.RegularPages()[0].CodeOwners()[0], qt.Equals, "@bep")
+
+ nnSite := h.Sites[1]
+ c.Assert(len(nnSite.RegularPages()), qt.Equals, 1)
+
+ // 2018-08-11 is the Git author date for testsite/content_nn/first-post.md
+ c.Assert(nnSite.RegularPages()[0].Lastmod().Format("2006-01-02"), qt.Equals, "2018-08-11")
+ c.Assert(enSite.RegularPages()[0].CodeOwners()[0], qt.Equals, "@bep")
+}
+
+func TestPageWithFrontMatterConfig(t *testing.T) {
+ for _, dateHandler := range []string{":filename", ":fileModTime"} {
+ dateHandler := dateHandler
+ t.Run(fmt.Sprintf("dateHandler=%q", dateHandler), func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ cfg, fs := newTestCfg()
+
+ pageTemplate := `
+---
+title: Page
+weight: %d
+lastMod: 2018-02-28
+%s
+---
+Content
+`
+
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{dateHandler, "date"},
+ })
+
+ c1 := filepath.Join("content", "section", "2012-02-21-noslug.md")
+ c2 := filepath.Join("content", "section", "2012-02-22-slug.md")
+
+ writeSource(t, fs, c1, fmt.Sprintf(pageTemplate, 1, ""))
+ writeSource(t, fs, c2, fmt.Sprintf(pageTemplate, 2, "slug: aslug"))
+
+ c1fi, err := fs.Source.Stat(c1)
+ c.Assert(err, qt.IsNil)
+ c2fi, err := fs.Source.Stat(c2)
+ c.Assert(err, qt.IsNil)
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
+ b.Build(BuildCfg{SkipRender: true})
+
+ s := b.H.Sites[0]
+ c.Assert(len(s.RegularPages()), qt.Equals, 2)
+
+ noSlug := s.RegularPages()[0]
+ slug := s.RegularPages()[1]
+
+ c.Assert(noSlug.Lastmod().Day(), qt.Equals, 28)
+
+ switch strings.ToLower(dateHandler) {
+ case ":filename":
+ c.Assert(noSlug.Date().IsZero(), qt.Equals, false)
+ c.Assert(slug.Date().IsZero(), qt.Equals, false)
+ c.Assert(noSlug.Date().Year(), qt.Equals, 2012)
+ c.Assert(slug.Date().Year(), qt.Equals, 2012)
+ c.Assert(noSlug.Slug(), qt.Equals, "noslug")
+ c.Assert(slug.Slug(), qt.Equals, "aslug")
+ case ":filemodtime":
+ c.Assert(noSlug.Date().Year(), qt.Equals, c1fi.ModTime().Year())
+ c.Assert(slug.Date().Year(), qt.Equals, c2fi.ModTime().Year())
+ fallthrough
+ default:
+ c.Assert(noSlug.Slug(), qt.Equals, "")
+ c.Assert(slug.Slug(), qt.Equals, "aslug")
+
+ }
+ })
+ }
+}
+
+func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ if p.WordCount() != 8 {
+ t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 8, p.WordCount())
+ }
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithAllCJKRunes)
+}
+
+func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
+ t.Parallel()
+ settings := map[string]any{"hasCJKLanguage": true}
+
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ if p.WordCount() != 15 {
+ t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 15, p.WordCount())
+ }
+ }
+ testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
+}
+
+func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
+ t.Parallel()
+ settings := map[string]any{"hasCJKLanguage": true}
+
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ if p.WordCount() != 74 {
+ t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 74, p.WordCount())
+ }
+
+ if p.Summary() != simplePageWithMainEnglishWithCJKRunesSummary {
+ t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
+ simplePageWithMainEnglishWithCJKRunesSummary, p.Summary())
+ }
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithMainEnglishWithCJKRunes)
+}
+
+func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
+ t.Parallel()
+ settings := map[string]any{
+ "hasCJKLanguage": true,
+ }
+
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ if p.WordCount() != 75 {
+ t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.Plain(), 74, p.WordCount())
+ }
+
+ if p.Summary() != simplePageWithIsCJKLanguageFalseSummary {
+ t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
+ simplePageWithIsCJKLanguageFalseSummary, p.Summary())
+ }
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithIsCJKLanguageFalse)
+}
+
+func TestWordCount(t *testing.T) {
+ t.Parallel()
+ assertFunc := func(t *testing.T, ext string, pages page.Pages) {
+ p := pages[0]
+ if p.WordCount() != 483 {
+ t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount())
+ }
+
+ if p.FuzzyWordCount() != 500 {
+ t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 500, p.FuzzyWordCount())
+ }
+
+ if p.ReadingTime() != 3 {
+ t.Fatalf("[%s] incorrect min read. expected %v, got %v", ext, 3, p.ReadingTime())
+ }
+ }
+
+ testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithLongContent)
+}
+
+func TestPagePaths(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ siteParmalinksSetting := map[string]string{
+ "post": ":year/:month/:day/:title/",
+ }
+
+ tests := []struct {
+ content string
+ path string
+ hasPermalink bool
+ expected string
+ }{
+ {simplePage, "post/x.md", false, "post/x.html"},
+ {simplePageWithURL, "post/x.md", false, "simple/url/index.html"},
+ {simplePageWithSlug, "post/x.md", false, "post/simple-slug.html"},
+ {simplePageWithDate, "post/x.md", true, "2013/10/15/simple/index.html"},
+ {UTF8Page, "post/x.md", false, "post/x.html"},
+ {UTF8PageWithURL, "post/x.md", false, "ラーメン/url/index.html"},
+ {UTF8PageWithSlug, "post/x.md", false, "post/ラーメン-slug.html"},
+ {UTF8PageWithDate, "post/x.md", true, "2013/10/15/ラーメン/index.html"},
+ }
+
+ for _, test := range tests {
+ cfg, fs := newTestCfg()
+
+ if test.hasPermalink {
+ cfg.Set("permalinks", siteParmalinksSetting)
+ }
+
+ writeSource(t, fs, filepath.Join("content", filepath.FromSlash(test.path)), test.content)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ }
+}
+
+func TestTranslationKey(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ cfg, fs := newTestCfg()
+
+ writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.no.md")), "---\ntitle: \"A1\"\ntranslationKey: \"k1\"\n---\nContent\n")
+ writeSource(t, fs, filepath.Join("content", filepath.FromSlash("sect/simple.en.md")), "---\ntitle: \"A2\"\n---\nContent\n")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 2)
+
+ home := s.Info.Home()
+ c.Assert(home, qt.Not(qt.IsNil))
+ c.Assert(home.TranslationKey(), qt.Equals, "home")
+ c.Assert(s.RegularPages()[0].TranslationKey(), qt.Equals, "page/k1")
+ p2 := s.RegularPages()[1]
+
+ c.Assert(p2.TranslationKey(), qt.Equals, "page/sect/simple")
+}
+
+func TestChompBOM(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ const utf8BOM = "\xef\xbb\xbf"
+
+ cfg, fs := newTestCfg()
+
+ writeSource(t, fs, filepath.Join("content", "simple.md"), utf8BOM+simplePage)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ p := s.RegularPages()[0]
+
+ checkPageTitle(t, p, "Simple")
+}
+
+func TestPageWithEmoji(t *testing.T) {
+ for _, enableEmoji := range []bool{true, false} {
+ v := config.NewWithTestDefaults()
+ v.Set("enableEmoji", enableEmoji)
+
+ b := newTestSitesBuilder(t).WithViper(v)
+
+ b.WithContent("page-emoji.md", `---
+title: "Hugo Smile"
+---
+This is a :smile:.
+<!--more-->
+
+Another :smile: This is :not: :an: :emoji:.
+
+O :christmas_tree:
+
+Write me an :e-mail: or :email:?
+
+Too many colons: :: ::: :::: :?: :!: :.:
+
+If you dislike this video, you can hit that :-1: button :stuck_out_tongue_winking_eye:,
+but if you like it, hit :+1: and get subscribed!
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ if enableEmoji {
+ b.AssertFileContent("public/page-emoji/index.html",
+ "This is a 😄",
+ "Another 😄",
+ "This is :not: :an: :emoji:.",
+ "O 🎄",
+ "Write me an 📧 or ✉️?",
+ "Too many colons: :: ::: :::: :?: :!: :.:",
+ "you can hit that 👎 button 😜,",
+ "hit 👍 and get subscribed!",
+ )
+ } else {
+ b.AssertFileContent("public/page-emoji/index.html",
+ "This is a :smile:",
+ "Another :smile:",
+ "This is :not: :an: :emoji:.",
+ "O :christmas_tree:",
+ "Write me an :e-mail: or :email:?",
+ "Too many colons: :: ::: :::: :?: :!: :.:",
+ "you can hit that :-1: button :stuck_out_tongue_winking_eye:,",
+ "hit :+1: and get subscribed!",
+ )
+ }
+
+ }
+}
+
+func TestPageHTMLContent(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile()
+
+ frontmatter := `---
+title: "HTML Content"
+---
+`
+ b.WithContent("regular.html", frontmatter+`<h1>Hugo</h1>`)
+ b.WithContent("nomarkdownforyou.html", frontmatter+`**Hugo!**`)
+ b.WithContent("manualsummary.html", frontmatter+`
+<p>This is summary</p>
+<!--more-->
+<p>This is the main content.</p>`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent(
+ "public/regular/index.html",
+ "Single: HTML Content|Hello|en|RelPermalink: /regular/|",
+ "Summary: Hugo|Truncated: false")
+
+ b.AssertFileContent(
+ "public/nomarkdownforyou/index.html",
+ "Permalink: http://example.com/nomarkdownforyou/|**Hugo!**|",
+ )
+
+ // https://github.com/gohugoio/hugo/issues/5723
+ b.AssertFileContent(
+ "public/manualsummary/index.html",
+ "Single: HTML Content|Hello|en|RelPermalink: /manualsummary/|",
+ "Summary: \n<p>This is summary</p>\n|Truncated: true",
+ "|<p>This is the main content.</p>|",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/5381
+func TestPageManualSummary(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile()
+
+ b.WithContent("page-md-shortcode.md", `---
+title: "Hugo"
+---
+This is a {{< sc >}}.
+<!--more-->
+Content.
+`)
+
+ // https://github.com/gohugoio/hugo/issues/5464
+ b.WithContent("page-md-only-shortcode.md", `---
+title: "Hugo"
+---
+{{< sc >}}
+<!--more-->
+{{< sc >}}
+`)
+
+ b.WithContent("page-md-shortcode-same-line.md", `---
+title: "Hugo"
+---
+This is a {{< sc >}}<!--more-->Same line.
+`)
+
+ b.WithContent("page-md-shortcode-same-line-after.md", `---
+title: "Hugo"
+---
+Summary<!--more-->{{< sc >}}
+`)
+
+ b.WithContent("page-org-shortcode.org", `#+TITLE: T1
+#+AUTHOR: A1
+#+DESCRIPTION: D1
+This is a {{< sc >}}.
+# more
+Content.
+`)
+
+ b.WithContent("page-org-variant1.org", `#+TITLE: T1
+Summary.
+
+# more
+
+Content.
+`)
+
+ b.WithTemplatesAdded("layouts/shortcodes/sc.html", "a shortcode")
+ b.WithTemplatesAdded("layouts/_default/single.html", `
+SUMMARY:{{ .Summary }}:END
+--------------------------
+CONTENT:{{ .Content }}
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/page-md-shortcode/index.html",
+ "SUMMARY:<p>This is a a shortcode.</p>:END",
+ "CONTENT:<p>This is a a shortcode.</p>\n\n<p>Content.</p>\n",
+ )
+
+ b.AssertFileContent("public/page-md-shortcode-same-line/index.html",
+ "SUMMARY:<p>This is a a shortcode</p>:END",
+ "CONTENT:<p>This is a a shortcode</p>\n\n<p>Same line.</p>\n",
+ )
+
+ b.AssertFileContent("public/page-md-shortcode-same-line-after/index.html",
+ "SUMMARY:<p>Summary</p>:END",
+ "CONTENT:<p>Summary</p>\n\na shortcode",
+ )
+
+ b.AssertFileContent("public/page-org-shortcode/index.html",
+ "SUMMARY:<p>\nThis is a a shortcode.\n</p>:END",
+ "CONTENT:<p>\nThis is a a shortcode.\n</p>\n<p>\nContent.\t\n</p>\n",
+ )
+ b.AssertFileContent("public/page-org-variant1/index.html",
+ "SUMMARY:<p>\nSummary.\n</p>:END",
+ "CONTENT:<p>\nSummary.\n</p>\n<p>\nContent.\t\n</p>\n",
+ )
+
+ b.AssertFileContent("public/page-md-only-shortcode/index.html",
+ "SUMMARY:a shortcode:END",
+ "CONTENT:a shortcode\n\na shortcode\n",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/5478
+func TestPageWithCommentedOutFrontMatter(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile()
+
+ b.WithContent("page.md", `<!--
++++
+title = "hello"
++++
+-->
+This is the content.
+`)
+
+ b.WithTemplatesAdded("layouts/_default/single.html", `
+Title: {{ .Title }}
+Content:{{ .Content }}
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/page/index.html",
+ "Title: hello",
+ "Content:<p>This is the content.</p>",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/5781
+func TestPageWithZeroFile(t *testing.T) {
+ newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger()).WithSimpleConfigFile().
+ WithTemplatesAdded("index.html", "{{ .File.Filename }}{{ with .File }}{{ .Dir }}{{ end }}").Build(BuildCfg{})
+}
+
+func TestHomePageWithNoTitle(t *testing.T) {
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+title = "Site Title"
+`)
+ b.WithTemplatesAdded("index.html", "Title|{{ with .Title }}{{ . }}{{ end }}|")
+ b.WithContent("_index.md", `---
+description: "No title for you!"
+---
+
+Content.
+`)
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", "Title||")
+}
+
+func TestShouldBuild(t *testing.T) {
+ past := time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC)
+ future := time.Date(2037, 11, 17, 20, 34, 58, 651387237, time.UTC)
+ zero := time.Time{}
+
+ publishSettings := []struct {
+ buildFuture bool
+ buildExpired bool
+ buildDrafts bool
+ draft bool
+ publishDate time.Time
+ expiryDate time.Time
+ out bool
+ }{
+ // publishDate and expiryDate
+ {false, false, false, false, zero, zero, true},
+ {false, false, false, false, zero, future, true},
+ {false, false, false, false, past, zero, true},
+ {false, false, false, false, past, future, true},
+ {false, false, false, false, past, past, false},
+ {false, false, false, false, future, future, false},
+ {false, false, false, false, future, past, false},
+
+ // buildFuture and buildExpired
+ {false, true, false, false, past, past, true},
+ {true, true, false, false, past, past, true},
+ {true, false, false, false, past, past, false},
+ {true, false, false, false, future, future, true},
+ {true, true, false, false, future, future, true},
+ {false, true, false, false, future, past, false},
+
+ // buildDrafts and draft
+ {true, true, false, true, past, future, false},
+ {true, true, true, true, past, future, true},
+ {true, true, true, true, past, future, true},
+ }
+
+ for _, ps := range publishSettings {
+ s := shouldBuild(ps.buildFuture, ps.buildExpired, ps.buildDrafts, ps.draft,
+ ps.publishDate, ps.expiryDate)
+ if s != ps.out {
+ t.Errorf("AssertShouldBuild unexpected output with params: %+v", ps)
+ }
+ }
+}
+
+func TestShouldBuildWithClock(t *testing.T) {
+ htime.Clock = clock.Start(time.Date(2021, 11, 17, 20, 34, 58, 651387237, time.UTC))
+ t.Cleanup(func() { htime.Clock = clock.System() })
+ past := time.Date(2009, 11, 17, 20, 34, 58, 651387237, time.UTC)
+ future := time.Date(2037, 11, 17, 20, 34, 58, 651387237, time.UTC)
+ zero := time.Time{}
+
+ publishSettings := []struct {
+ buildFuture bool
+ buildExpired bool
+ buildDrafts bool
+ draft bool
+ publishDate time.Time
+ expiryDate time.Time
+ out bool
+ }{
+ // publishDate and expiryDate
+ {false, false, false, false, zero, zero, true},
+ {false, false, false, false, zero, future, true},
+ {false, false, false, false, past, zero, true},
+ {false, false, false, false, past, future, true},
+ {false, false, false, false, past, past, false},
+ {false, false, false, false, future, future, false},
+ {false, false, false, false, future, past, false},
+
+ // buildFuture and buildExpired
+ {false, true, false, false, past, past, true},
+ {true, true, false, false, past, past, true},
+ {true, false, false, false, past, past, false},
+ {true, false, false, false, future, future, true},
+ {true, true, false, false, future, future, true},
+ {false, true, false, false, future, past, false},
+
+ // buildDrafts and draft
+ {true, true, false, true, past, future, false},
+ {true, true, true, true, past, future, true},
+ {true, true, true, true, past, future, true},
+ }
+
+ for _, ps := range publishSettings {
+ s := shouldBuild(ps.buildFuture, ps.buildExpired, ps.buildDrafts, ps.draft,
+ ps.publishDate, ps.expiryDate)
+ if s != ps.out {
+ t.Errorf("AssertShouldBuildWithClock unexpected output with params: %+v", ps)
+ }
+ }
+}
+
+// "dot" in path: #1885 and #2110
+// disablePathToLower regression: #3374
+func TestPathIssues(t *testing.T) {
+ for _, disablePathToLower := range []bool{false, true} {
+ for _, uglyURLs := range []bool{false, true} {
+ disablePathToLower := disablePathToLower
+ uglyURLs := uglyURLs
+ t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+ th := newTestHelper(cfg, fs, t)
+ c := qt.New(t)
+
+ cfg.Set("permalinks", map[string]string{
+ "post": ":section/:title",
+ })
+
+ cfg.Set("uglyURLs", uglyURLs)
+ cfg.Set("disablePathToLower", disablePathToLower)
+ cfg.Set("paginate", 1)
+
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html><body>{{.Content}}</body></html>")
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
+ "<html><body>P{{.Paginator.PageNumber}}|URL: {{.Paginator.URL}}|{{ if .Paginator.HasNext }}Next: {{.Paginator.Next.URL }}{{ end }}</body></html>")
+
+ for i := 0; i < 3; i++ {
+ writeSource(t, fs, filepath.Join("content", "post", fmt.Sprintf("doc%d.md", i)),
+ fmt.Sprintf(`---
+title: "test%d.dot"
+tags:
+- ".net"
+---
+# doc1
+*some content*`, i))
+ }
+
+ writeSource(t, fs, filepath.Join("content", "Blog", "Blog1.md"),
+ fmt.Sprintf(`---
+title: "testBlog"
+tags:
+- "Blog"
+---
+# doc1
+*some blog content*`))
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 4)
+
+ pathFunc := func(s string) string {
+ if uglyURLs {
+ return strings.Replace(s, "/index.html", ".html", 1)
+ }
+ return s
+ }
+
+ blog := "blog"
+
+ if disablePathToLower {
+ blog = "Blog"
+ }
+
+ th.assertFileContent(pathFunc("public/"+blog+"/"+blog+"1/index.html"), "some blog content")
+
+ th.assertFileContent(pathFunc("public/post/test0.dot/index.html"), "some content")
+
+ if uglyURLs {
+ th.assertFileContent("public/post/page/1.html", `canonical" href="/post.html"`)
+ th.assertFileContent("public/post.html", `<body>P1|URL: /post.html|Next: /post/page/2.html</body>`)
+ th.assertFileContent("public/post/page/2.html", `<body>P2|URL: /post/page/2.html|Next: /post/page/3.html</body>`)
+ } else {
+ th.assertFileContent("public/post/page/1/index.html", `canonical" href="/post/"`)
+ th.assertFileContent("public/post/index.html", `<body>P1|URL: /post/|Next: /post/page/2/</body>`)
+ th.assertFileContent("public/post/page/2/index.html", `<body>P2|URL: /post/page/2/|Next: /post/page/3/</body>`)
+ th.assertFileContent("public/tags/.net/index.html", `<body>P1|URL: /tags/.net/|Next: /tags/.net/page/2/</body>`)
+
+ }
+
+ p := s.RegularPages()[0]
+ if uglyURLs {
+ c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot.html")
+ } else {
+ c.Assert(p.RelPermalink(), qt.Equals, "/post/test0.dot/")
+ }
+ })
+ }
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/4675
+func TestWordCountAndSimilarVsSummary(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ single := []string{"_default/single.html", `
+WordCount: {{ .WordCount }}
+FuzzyWordCount: {{ .FuzzyWordCount }}
+ReadingTime: {{ .ReadingTime }}
+Len Plain: {{ len .Plain }}
+Len PlainWords: {{ len .PlainWords }}
+Truncated: {{ .Truncated }}
+Len Summary: {{ len .Summary }}
+Len Content: {{ len .Content }}
+
+SUMMARY:{{ .Summary }}:{{ len .Summary }}:END
+
+`}
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded(single...).WithContent("p1.md", fmt.Sprintf(`---
+title: p1
+---
+
+%s
+
+`, strings.Repeat("word ", 510)),
+
+ "p2.md", fmt.Sprintf(`---
+title: p2
+---
+This is a summary.
+
+<!--more-->
+
+%s
+
+`, strings.Repeat("word ", 310)),
+ "p3.md", fmt.Sprintf(`---
+title: p3
+isCJKLanguage: true
+---
+Summary: In Chinese, 好 means good.
+
+<!--more-->
+
+%s
+
+`, strings.Repeat("好", 200)),
+ "p4.md", fmt.Sprintf(`---
+title: p4
+isCJKLanguage: false
+---
+Summary: In Chinese, 好 means good.
+
+<!--more-->
+
+%s
+
+`, strings.Repeat("好", 200)),
+
+ "p5.md", fmt.Sprintf(`---
+title: p4
+isCJKLanguage: true
+---
+Summary: In Chinese, 好 means good.
+
+%s
+
+`, strings.Repeat("好", 200)),
+ "p6.md", fmt.Sprintf(`---
+title: p4
+isCJKLanguage: false
+---
+Summary: In Chinese, 好 means good.
+
+%s
+
+`, strings.Repeat("好", 200)),
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ c.Assert(len(b.H.Sites), qt.Equals, 1)
+ c.Assert(len(b.H.Sites[0].RegularPages()), qt.Equals, 6)
+
+ b.AssertFileContent("public/p1/index.html", "WordCount: 510\nFuzzyWordCount: 600\nReadingTime: 3\nLen Plain: 2550\nLen PlainWords: 510\nTruncated: false\nLen Summary: 2549\nLen Content: 2557")
+
+ b.AssertFileContent("public/p2/index.html", "WordCount: 314\nFuzzyWordCount: 400\nReadingTime: 2\nLen Plain: 1569\nLen PlainWords: 314\nTruncated: true\nLen Summary: 25\nLen Content: 1582")
+
+ b.AssertFileContent("public/p3/index.html", "WordCount: 206\nFuzzyWordCount: 300\nReadingTime: 1\nLen Plain: 638\nLen PlainWords: 7\nTruncated: true\nLen Summary: 43\nLen Content: 651")
+ b.AssertFileContent("public/p4/index.html", "WordCount: 7\nFuzzyWordCount: 100\nReadingTime: 1\nLen Plain: 638\nLen PlainWords: 7\nTruncated: true\nLen Summary: 43\nLen Content: 651")
+ b.AssertFileContent("public/p5/index.html", "WordCount: 206\nFuzzyWordCount: 300\nReadingTime: 1\nLen Plain: 638\nLen PlainWords: 7\nTruncated: true\nLen Summary: 229\nLen Content: 652")
+ b.AssertFileContent("public/p6/index.html", "WordCount: 7\nFuzzyWordCount: 100\nReadingTime: 1\nLen Plain: 638\nLen PlainWords: 7\nTruncated: false\nLen Summary: 637\nLen Content: 652")
+}
+
+func TestScratch(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded("index.html", `
+{{ .Scratch.Set "b" "bv" }}
+B: {{ .Scratch.Get "b" }}
+`,
+ "shortcodes/scratch.html", `
+{{ .Scratch.Set "c" "cv" }}
+C: {{ .Scratch.Get "c" }}
+`,
+ )
+
+ b.WithContentAdded("scratchme.md", `
+---
+title: Scratch Me!
+---
+
+{{< scratch >}}
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "B: bv")
+ b.AssertFileContent("public/scratchme/index.html", "C: cv")
+}
+
+func TestScratchRebuild(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+{{< scratchme >}}
+-- layouts/shortcodes/foo.html --
+notused
+-- layouts/shortcodes/scratchme.html --
+{{ .Page.Scratch.Set "scratch" "foo" }}
+{{ .Page.Store.Set "scratch" "bar" }}
+-- layouts/_default/single.html --
+{{ .Content }}
+Scratch: {{ .Scratch.Get "scratch" }}|
+Store: {{ .Store.Get "scratch" }}|
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ Running: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+Scratch: foo|
+Store: bar|
+ `)
+
+ b.EditFiles("layouts/shortcodes/foo.html", "edit")
+
+ b.Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+Scratch: |
+Store: bar|
+ `)
+}
+
+func TestPageParam(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+
+baseURL = "https://example.org"
+
+[params]
+[params.author]
+ name = "Kurt Vonnegut"
+
+`)
+ b.WithTemplatesAdded("index.html", `
+
+{{ $withParam := .Site.GetPage "withparam" }}
+{{ $noParam := .Site.GetPage "noparam" }}
+{{ $withStringParam := .Site.GetPage "withstringparam" }}
+
+Author page: {{ $withParam.Param "author.name" }}
+Author name page string: {{ $withStringParam.Param "author.name" }}|
+Author page string: {{ $withStringParam.Param "author" }}|
+Author site config: {{ $noParam.Param "author.name" }}
+
+`,
+ )
+
+ b.WithContent("withparam.md", `
++++
+title = "With Param!"
+[author]
+ name = "Ernest Miller Hemingway"
+
++++
+
+`,
+
+ "noparam.md", `
+---
+title: "No Param!"
+---
+`, "withstringparam.md", `
++++
+title = "With string Param!"
+author = "Jo Nesbø"
+
++++
+
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ "Author page: Ernest Miller Hemingway",
+ "Author name page string: Kurt Vonnegut|",
+ "Author page string: Jo Nesbø|",
+ "Author site config: Kurt Vonnegut")
+}
+
+func TestGoldmark(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+[markup]
+defaultMarkdownHandler="goldmark"
+[markup.goldmark]
+[markup.goldmark.renderer]
+unsafe = false
+[markup.highlight]
+noClasses=false
+
+
+`)
+ b.WithTemplatesAdded("_default/single.html", `
+Title: {{ .Title }}
+ToC: {{ .TableOfContents }}
+Content: {{ .Content }}
+
+`, "shortcodes/t.html", `T-SHORT`, "shortcodes/s.html", `## Code
+{{ .Inner }}
+`)
+
+ content := `
++++
+title = "A Page!"
++++
+
+## Shortcode {{% t %}} in header
+
+## Code Fense in Shortcode
+
+{{% s %}}
+$$$bash {hl_lines=[1]}
+SHORT
+$$$
+{{% /s %}}
+
+## Code Fence
+
+$$$bash {hl_lines=[1]}
+MARKDOWN
+$$$
+
+Link with URL as text
+
+[https://google.com](https://google.com)
+
+
+`
+ content = strings.ReplaceAll(content, "$$$", "```")
+
+ b.WithContent("page.md", content)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/page/index.html",
+ `<nav id="TableOfContents">
+<li><a href="#shortcode-t-short-in-header">Shortcode T-SHORT in header</a></li>
+<code class="language-bash" data-lang="bash"><span class="line hl"><span class="cl">SHORT
+<code class="language-bash" data-lang="bash"><span class="line hl"><span class="cl">MARKDOWN
+<p><a href="https://google.com">https://google.com</a></p>
+`)
+}
+
+func TestPageCaseIssues(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `defaultContentLanguage = "no"
+[languages]
+[languages.NO]
+title = "Norsk"
+`)
+ b.WithContent("a/B/C/Page1.md", "---\ntitle: Page1\n---")
+ b.WithTemplates("index.html", `
+{{ $p1 := site.GetPage "a/B/C/Page1" }}
+Lang: {{ .Lang }}
+Page1: {{ $p1.Path }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", "Lang: no", filepath.FromSlash("Page1: a/B/C/Page1.md"))
+}
diff --git a/hugolib/page_unwrap.go b/hugolib/page_unwrap.go
new file mode 100644
index 000000000..c3e1ce8dd
--- /dev/null
+++ b/hugolib/page_unwrap.go
@@ -0,0 +1,50 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+// Wraps a Page.
+type pageWrapper interface {
+ page() page.Page
+}
+
+// unwrapPage is used in equality checks and similar.
+func unwrapPage(in any) (page.Page, error) {
+ switch v := in.(type) {
+ case *pageState:
+ return v, nil
+ case pageWrapper:
+ return v.page(), nil
+ case page.Page:
+ return v, nil
+ case nil:
+ return nil, nil
+ default:
+ return nil, fmt.Errorf("unwrapPage: %T not supported", in)
+ }
+}
+
+func mustUnwrapPage(in any) page.Page {
+ p, err := unwrapPage(in)
+ if err != nil {
+ panic(err)
+ }
+
+ return p
+}
diff --git a/hugolib/page_unwrap_test.go b/hugolib/page_unwrap_test.go
new file mode 100644
index 000000000..2d9b5e17f
--- /dev/null
+++ b/hugolib/page_unwrap_test.go
@@ -0,0 +1,38 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func TestUnwrapPage(t *testing.T) {
+ c := qt.New(t)
+
+ p := &pageState{}
+
+ c.Assert(mustUnwrap(newPageForShortcode(p)), qt.Equals, p)
+ c.Assert(mustUnwrap(newPageForRenderHook(p)), qt.Equals, p)
+}
+
+func mustUnwrap(v any) page.Page {
+ p, err := unwrapPage(v)
+ if err != nil {
+ panic(err)
+ }
+ return p
+}
diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go
new file mode 100644
index 000000000..f88d2e4d2
--- /dev/null
+++ b/hugolib/pagebundler_test.go
@@ -0,0 +1,1348 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/deps"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPageBundlerSiteRegular(t *testing.T) {
+ c := qt.New(t)
+ baseBaseURL := "https://example.com"
+
+ for _, baseURLPath := range []string{"", "/hugo"} {
+ for _, canonify := range []bool{false, true} {
+ for _, ugly := range []bool{false, true} {
+ baseURLPathId := baseURLPath
+ if baseURLPathId == "" {
+ baseURLPathId = "NONE"
+ }
+ ugly := ugly
+ canonify := canonify
+ c.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
+ func(c *qt.C) {
+ c.Parallel()
+ baseURL := baseBaseURL + baseURLPath
+ relURLBase := baseURLPath
+ if canonify {
+ relURLBase = ""
+ }
+ fs, cfg := newTestBundleSources(c)
+ cfg.Set("baseURL", baseURL)
+ cfg.Set("canonifyURLs", canonify)
+
+ cfg.Set("permalinks", map[string]string{
+ "a": ":sections/:filename",
+ "b": ":year/:slug/",
+ "c": ":sections/:slug",
+ "/": ":filename/",
+ })
+
+ cfg.Set("outputFormats", map[string]any{
+ "CUSTOMO": map[string]any{
+ "mediaType": "text/html",
+ "baseName": "cindex",
+ "path": "cpath",
+ "permalinkable": true,
+ },
+ })
+
+ cfg.Set("outputs", map[string]any{
+ "home": []string{"HTML", "CUSTOMO"},
+ "page": []string{"HTML", "CUSTOMO"},
+ "section": []string{"HTML", "CUSTOMO"},
+ })
+
+ cfg.Set("uglyURLs", ugly)
+
+ b := newTestSitesBuilderFromDepsCfg(c, deps.DepsCfg{Logger: loggers.NewErrorLogger(), Fs: fs, Cfg: cfg}).WithNothingAdded()
+
+ b.Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 8)
+
+ singlePage := s.getPage(page.KindPage, "a/1.md")
+ c.Assert(singlePage.BundleType(), qt.Equals, files.ContentClass(""))
+
+ c.Assert(singlePage, qt.Not(qt.IsNil))
+ c.Assert(s.getPage("page", "a/1"), qt.Equals, singlePage)
+ c.Assert(s.getPage("page", "1"), qt.Equals, singlePage)
+
+ c.Assert(content(singlePage), qt.Contains, "TheContent")
+
+ relFilename := func(basePath, outBase string) (string, string) {
+ rel := basePath
+ if ugly {
+ rel = strings.TrimSuffix(basePath, "/") + ".html"
+ }
+
+ var filename string
+ if !ugly {
+ filename = path.Join(basePath, outBase)
+ } else {
+ filename = rel
+ }
+
+ rel = fmt.Sprintf("%s%s", relURLBase, rel)
+
+ return rel, filename
+ }
+
+ // Check both output formats
+ rel, filename := relFilename("/a/1/", "index.html")
+ b.AssertFileContent(filepath.Join("public", filename),
+ "TheContent",
+ "Single RelPermalink: "+rel,
+ )
+
+ rel, filename = relFilename("/cpath/a/1/", "cindex.html")
+
+ b.AssertFileContent(filepath.Join("public", filename),
+ "TheContent",
+ "Single RelPermalink: "+rel,
+ )
+
+ b.AssertFileContent(filepath.FromSlash("public/images/hugo-logo.png"), "content")
+
+ // This should be just copied to destination.
+ b.AssertFileContent(filepath.FromSlash("public/assets/pic1.png"), "content")
+
+ leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md")
+ c.Assert(leafBundle1, qt.Not(qt.IsNil))
+ c.Assert(leafBundle1.BundleType(), qt.Equals, files.ContentClassLeaf)
+ c.Assert(leafBundle1.Section(), qt.Equals, "b")
+ sectionB := s.getPage(page.KindSection, "b")
+ c.Assert(sectionB, qt.Not(qt.IsNil))
+ home := s.Info.Home()
+ c.Assert(home.BundleType(), qt.Equals, files.ContentClassBranch)
+
+ // This is a root bundle and should live in the "home section"
+ // See https://github.com/gohugoio/hugo/issues/4332
+ rootBundle := s.getPage(page.KindPage, "root")
+ c.Assert(rootBundle, qt.Not(qt.IsNil))
+ c.Assert(rootBundle.Parent().IsHome(), qt.Equals, true)
+ if !ugly {
+ b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/")
+ b.AssertFileContent(filepath.FromSlash("public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/")
+ }
+
+ leafBundle2 := s.getPage(page.KindPage, "a/b/index.md")
+ c.Assert(leafBundle2, qt.Not(qt.IsNil))
+ unicodeBundle := s.getPage(page.KindPage, "c/bundle/index.md")
+ c.Assert(unicodeBundle, qt.Not(qt.IsNil))
+
+ pageResources := leafBundle1.Resources().ByType(pageResourceType)
+ c.Assert(len(pageResources), qt.Equals, 2)
+ firstPage := pageResources[0].(page.Page)
+ secondPage := pageResources[1].(page.Page)
+
+ c.Assert(firstPage.File().Filename(), qt.Equals, filepath.FromSlash("/work/base/b/my-bundle/1.md"))
+ c.Assert(content(firstPage), qt.Contains, "TheContent")
+ c.Assert(len(leafBundle1.Resources()), qt.Equals, 6)
+
+ // Verify shortcode in bundled page
+ c.Assert(content(secondPage), qt.Contains, filepath.FromSlash("MyShort in b/my-bundle/2.md"))
+
+ // https://github.com/gohugoio/hugo/issues/4582
+ c.Assert(firstPage.Parent(), qt.Equals, leafBundle1)
+ c.Assert(secondPage.Parent(), qt.Equals, leafBundle1)
+
+ c.Assert(pageResources.GetMatch("1*"), qt.Equals, firstPage)
+ c.Assert(pageResources.GetMatch("2*"), qt.Equals, secondPage)
+ c.Assert(pageResources.GetMatch("doesnotexist*"), qt.IsNil)
+
+ imageResources := leafBundle1.Resources().ByType("image")
+ c.Assert(len(imageResources), qt.Equals, 3)
+
+ c.Assert(leafBundle1.OutputFormats().Get("CUSTOMO"), qt.Not(qt.IsNil))
+
+ relPermalinker := func(s string) string {
+ return fmt.Sprintf(s, relURLBase)
+ }
+
+ permalinker := func(s string) string {
+ return fmt.Sprintf(s, baseURL)
+ }
+
+ if ugly {
+ b.AssertFileContent("public/2017/pageslug.html",
+ relPermalinker("Single RelPermalink: %s/2017/pageslug.html"),
+ permalinker("Single Permalink: %s/2017/pageslug.html"),
+ relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+ permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
+ } else {
+ b.AssertFileContent("public/2017/pageslug/index.html",
+ relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+ permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
+
+ b.AssertFileContent("public/cpath/2017/pageslug/cindex.html",
+ relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"),
+ relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"),
+ relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"),
+ permalinker("Sunset Permalink: %s/cpath/2017/pageslug/sunset1.jpg"),
+ )
+ }
+
+ b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/c/logo.png"), "content")
+ b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/c/logo.png"), "content")
+ c.Assert(b.CheckExists("public/cpath/cpath/2017/pageslug/c/logo.png"), qt.Equals, false)
+
+ // Custom media type defined in site config.
+ c.Assert(len(leafBundle1.Resources().ByType("bepsays")), qt.Equals, 1)
+
+ if ugly {
+ b.AssertFileContent(filepath.FromSlash("public/2017/pageslug.html"),
+ "TheContent",
+ relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+ permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"),
+ "Thumb Width: 123",
+ "Thumb Name: my-sunset-1",
+ relPermalinker("Short Sunset RelPermalink: %s/2017/pageslug/sunset2.jpg"),
+ "Short Thumb Width: 56",
+ "1: Image Title: Sunset Galore 1",
+ "1: Image Params: map[myparam:My Sunny Param]",
+ relPermalinker("1: Image RelPermalink: %s/2017/pageslug/sunset1.jpg"),
+ "2: Image Title: Sunset Galore 2",
+ "2: Image Params: map[myparam:My Sunny Param]",
+ "1: Image myParam: Lower: My Sunny Param Caps: My Sunny Param",
+ "0: Page Title: Bundle Galore",
+ )
+
+ // https://github.com/gohugoio/hugo/issues/5882
+ b.AssertFileContent(
+ filepath.FromSlash("public/2017/pageslug.html"), "0: Page RelPermalink: |")
+
+ b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug.html"), "TheContent")
+
+ // 은행
+ b.AssertFileContent(filepath.FromSlash("public/c/은행/logo-은행.png"), "은행 PNG")
+
+ } else {
+ b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "TheContent")
+ b.AssertFileContent(filepath.FromSlash("public/cpath/2017/pageslug/cindex.html"), "TheContent")
+ b.AssertFileContent(filepath.FromSlash("public/2017/pageslug/index.html"), "Single Title")
+ b.AssertFileContent(filepath.FromSlash("public/root/index.html"), "Single Title")
+
+ }
+ })
+ }
+ }
+ }
+}
+
+func TestPageBundlerSiteMultilingual(t *testing.T) {
+ t.Parallel()
+
+ for _, ugly := range []bool{false, true} {
+ ugly := ugly
+ t.Run(fmt.Sprintf("ugly=%t", ugly),
+ func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ fs, cfg := newTestBundleSourcesMultilingual(t)
+ cfg.Set("uglyURLs", ugly)
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
+ b.Build(BuildCfg{})
+
+ sites := b.H
+
+ c.Assert(len(sites.Sites), qt.Equals, 2)
+
+ s := sites.Sites[0]
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 8)
+ c.Assert(len(s.Pages()), qt.Equals, 16)
+ // dumpPages(s.AllPages()...)
+
+ c.Assert(len(s.AllPages()), qt.Equals, 31)
+
+ bundleWithSubPath := s.getPage(page.KindPage, "lb/index")
+ c.Assert(bundleWithSubPath, qt.Not(qt.IsNil))
+
+ // See https://github.com/gohugoio/hugo/issues/4312
+ // Before that issue:
+ // A bundle in a/b/index.en.md
+ // a/b/index.en.md => OK
+ // a/b/index => OK
+ // index.en.md => ambiguous, but OK.
+ // With bundles, the file name has little meaning, the folder it lives in does. So this should also work:
+ // a/b
+ // and probably also just b (aka "my-bundle")
+ // These may also be translated, so we also need to test that.
+ // "bf", "my-bf-bundle", "index.md + nn
+ bfBundle := s.getPage(page.KindPage, "bf/my-bf-bundle/index")
+ c.Assert(bfBundle, qt.Not(qt.IsNil))
+ c.Assert(bfBundle.Language().Lang, qt.Equals, "en")
+ c.Assert(s.getPage(page.KindPage, "bf/my-bf-bundle/index.md"), qt.Equals, bfBundle)
+ c.Assert(s.getPage(page.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundle)
+ c.Assert(s.getPage(page.KindPage, "my-bf-bundle"), qt.Equals, bfBundle)
+
+ nnSite := sites.Sites[1]
+ c.Assert(len(nnSite.RegularPages()), qt.Equals, 7)
+
+ bfBundleNN := nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index")
+ c.Assert(bfBundleNN, qt.Not(qt.IsNil))
+ c.Assert(bfBundleNN.Language().Lang, qt.Equals, "nn")
+ c.Assert(nnSite.getPage(page.KindPage, "bf/my-bf-bundle/index.nn.md"), qt.Equals, bfBundleNN)
+ c.Assert(nnSite.getPage(page.KindPage, "bf/my-bf-bundle"), qt.Equals, bfBundleNN)
+ c.Assert(nnSite.getPage(page.KindPage, "my-bf-bundle"), qt.Equals, bfBundleNN)
+
+ // See https://github.com/gohugoio/hugo/issues/4295
+ // Every resource should have its Name prefixed with its base folder.
+ cBundleResources := bundleWithSubPath.Resources().Match("c/**")
+ c.Assert(len(cBundleResources), qt.Equals, 4)
+ bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*")
+ c.Assert(bundlePage, qt.Not(qt.IsNil))
+
+ bcBundleNN, _ := nnSite.getPageNew(nil, "bc")
+ c.Assert(bcBundleNN, qt.Not(qt.IsNil))
+ bcBundleEN, _ := s.getPageNew(nil, "bc")
+ c.Assert(bcBundleNN.Language().Lang, qt.Equals, "nn")
+ c.Assert(bcBundleEN.Language().Lang, qt.Equals, "en")
+ c.Assert(len(bcBundleNN.Resources()), qt.Equals, 3)
+ c.Assert(len(bcBundleEN.Resources()), qt.Equals, 3)
+ b.AssertFileContent("public/en/bc/data1.json", "data1")
+ b.AssertFileContent("public/en/bc/data2.json", "data2")
+ b.AssertFileContent("public/en/bc/logo-bc.png", "logo")
+ b.AssertFileContent("public/nn/bc/data1.nn.json", "data1.nn")
+ b.AssertFileContent("public/nn/bc/data2.json", "data2")
+ b.AssertFileContent("public/nn/bc/logo-bc.png", "logo")
+ })
+ }
+}
+
+func TestMultilingualDisableDefaultLanguage(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ _, cfg := newTestBundleSourcesMultilingual(t)
+ cfg.Set("disableLanguages", []string{"en"})
+ l := configLoader{cfg: cfg}
+ err := l.applyConfigDefaults()
+ c.Assert(err, qt.IsNil)
+ err = l.loadLanguageSettings(nil)
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(err.Error(), qt.Contains, "cannot disable default language")
+}
+
+func TestMultilingualDisableLanguage(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ fs, cfg := newTestBundleSourcesMultilingual(t)
+ cfg.Set("disableLanguages", []string{"nn"})
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
+ b.Build(BuildCfg{})
+ sites := b.H
+
+ c.Assert(len(sites.Sites), qt.Equals, 1)
+
+ s := sites.Sites[0]
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 8)
+ c.Assert(len(s.Pages()), qt.Equals, 16)
+ // No nn pages
+ c.Assert(len(s.AllPages()), qt.Equals, 16)
+ s.pageMap.withEveryBundlePage(func(p *pageState) bool {
+ c.Assert(p.Language().Lang != "nn", qt.Equals, true)
+ return false
+ })
+}
+
+func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
+ skipSymlink(t)
+
+ wd, _ := os.Getwd()
+ defer func() {
+ os.Chdir(wd)
+ }()
+
+ c := qt.New(t)
+
+ // We need to use the OS fs for this.
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugosym")
+ c.Assert(err, qt.IsNil)
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workingDir)
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ contentDirName := "content"
+
+ contentDir := filepath.Join(workingDir, contentDirName)
+ c.Assert(os.MkdirAll(filepath.Join(contentDir, "a"), 0777), qt.IsNil)
+
+ for i := 1; i <= 3; i++ {
+ c.Assert(os.MkdirAll(filepath.Join(workingDir, fmt.Sprintf("symcontent%d", i)), 0777), qt.IsNil)
+ }
+
+ c.Assert(os.MkdirAll(filepath.Join(workingDir, "symcontent2", "a1"), 0777), qt.IsNil)
+
+ // Symlinked sections inside content.
+ os.Chdir(contentDir)
+ for i := 1; i <= 3; i++ {
+ c.Assert(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)), qt.IsNil)
+ }
+
+ c.Assert(os.Chdir(filepath.Join(contentDir, "a")), qt.IsNil)
+
+ // Create a symlink to one single content file
+ c.Assert(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"), qt.IsNil)
+
+ c.Assert(os.Chdir(filepath.FromSlash("../../symcontent3")), qt.IsNil)
+
+ // Create a circular symlink. Will print some warnings.
+ c.Assert(os.Symlink(filepath.Join("..", contentDirName), filepath.FromSlash("circus")), qt.IsNil)
+
+ c.Assert(os.Chdir(workingDir), qt.IsNil)
+
+ defer clean()
+
+ cfg.Set("workingDir", workingDir)
+ cfg.Set("contentDir", contentDirName)
+ cfg.Set("baseURL", "https://example.com")
+
+ layout := `{{ .Title }}|{{ .Content }}`
+ pageContent := `---
+slug: %s
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{
+ Fs: fs,
+ Cfg: cfg,
+ })
+
+ b.WithTemplates(
+ "_default/single.html", layout,
+ "_default/list.html", layout,
+ )
+
+ b.WithContent(
+ "a/regular.md", fmt.Sprintf(pageContent, "a1"),
+ )
+
+ b.WithSourceFile(
+ "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"),
+ "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"),
+ // Regular files inside symlinked folder.
+ "symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"),
+ "symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"),
+
+ // A bundle
+ "symcontent2/a1/index.md", fmt.Sprintf(pageContent, ""),
+ "symcontent2/a1/page.md", fmt.Sprintf(pageContent, "page"),
+ "symcontent2/a1/logo.png", "image",
+
+ // Assets
+ "symcontent3/s1.png", "image",
+ "symcontent3/s2.png", "image",
+ )
+
+ b.Build(BuildCfg{})
+ s := b.H.Sites[0]
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 7)
+ a1Bundle := s.getPage(page.KindPage, "symbolic2/a1/index.md")
+ c.Assert(a1Bundle, qt.Not(qt.IsNil))
+ c.Assert(len(a1Bundle.Resources()), qt.Equals, 2)
+ c.Assert(len(a1Bundle.Resources().ByType(pageResourceType)), qt.Equals, 1)
+
+ b.AssertFileContent(filepath.FromSlash("public/a/page/index.html"), "TheContent")
+ b.AssertFileContent(filepath.FromSlash("public/symbolic1/s1/index.html"), "TheContent")
+ b.AssertFileContent(filepath.FromSlash("public/symbolic2/a1/index.html"), "TheContent")
+}
+
+func TestPageBundlerHeadless(t *testing.T) {
+ t.Parallel()
+
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ workDir := "/work"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", "base")
+ cfg.Set("baseURL", "https://example.com")
+
+ pageContent := `---
+title: "Bundle Galore"
+slug: s1
+date: 2017-01-23
+---
+
+TheContent.
+
+{{< myShort >}}
+`
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), "single {{ .Content }}")
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), "list")
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.html"), "SHORTCODE")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "l1.png"), "PNG image")
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "l2.png"), "PNG image")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "index.md"), `---
+title: "Headless Bundle in Topless Bar"
+slug: s2
+headless: true
+date: 2017-01-23
+---
+
+TheContent.
+HEADLESS {{< myShort >}}
+`)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "l1.png"), "PNG image")
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "l2.png"), "PNG image")
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "p1.md"), pageContent)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ regular := s.getPage(page.KindPage, "a/index")
+ c.Assert(regular.RelPermalink(), qt.Equals, "/s1/")
+
+ headless := s.getPage(page.KindPage, "b/index")
+ c.Assert(headless, qt.Not(qt.IsNil))
+ c.Assert(headless.Title(), qt.Equals, "Headless Bundle in Topless Bar")
+ c.Assert(headless.RelPermalink(), qt.Equals, "")
+ c.Assert(headless.Permalink(), qt.Equals, "")
+ c.Assert(content(headless), qt.Contains, "HEADLESS SHORTCODE")
+
+ headlessResources := headless.Resources()
+ c.Assert(len(headlessResources), qt.Equals, 3)
+ res := headlessResources.Match("l*")
+ c.Assert(len(res), qt.Equals, 2)
+ pageResource := headlessResources.GetMatch("p*")
+ c.Assert(pageResource, qt.Not(qt.IsNil))
+ p := pageResource.(page.Page)
+ c.Assert(content(p), qt.Contains, "SHORTCODE")
+ c.Assert(p.Name(), qt.Equals, "p1.md")
+
+ th := newTestHelper(s.Cfg, s.Fs, t)
+
+ th.assertFileContent(filepath.FromSlash("public/s1/index.html"), "TheContent")
+ th.assertFileContent(filepath.FromSlash("public/s1/l1.png"), "PNG")
+
+ th.assertFileNotExist("public/s2/index.html")
+ // But the bundled resources needs to be published
+ th.assertFileContent(filepath.FromSlash("public/s2/l1.png"), "PNG")
+
+ // No headless bundles here, please.
+ // https://github.com/gohugoio/hugo/issues/6492
+ c.Assert(s.RegularPages(), qt.HasLen, 1)
+ c.Assert(s.home.RegularPages(), qt.HasLen, 1)
+ c.Assert(s.home.Pages(), qt.HasLen, 1)
+}
+
+func TestPageBundlerHeadlessIssue6552(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithContent("headless/h1/index.md", `
+---
+title: My Headless Bundle1
+headless: true
+---
+`, "headless/h1/p1.md", `
+---
+title: P1
+---
+`, "headless/h2/index.md", `
+---
+title: My Headless Bundle2
+headless: true
+---
+`)
+
+ b.WithTemplatesAdded("index.html", `
+{{ $headless1 := .Site.GetPage "headless/h1" }}
+{{ $headless2 := .Site.GetPage "headless/h2" }}
+
+HEADLESS1: {{ $headless1.Title }}|{{ $headless1.RelPermalink }}|{{ len $headless1.Resources }}|
+HEADLESS2: {{ $headless2.Title }}{{ $headless2.RelPermalink }}|{{ len $headless2.Resources }}|
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+HEADLESS1: My Headless Bundle1||1|
+HEADLESS2: My Headless Bundle2|0|
+`)
+}
+
+func TestMultiSiteBundles(t *testing.T) {
+ c := qt.New(t)
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+
+baseURL = "http://example.com/"
+
+defaultContentLanguage = "en"
+
+[languages]
+[languages.en]
+weight = 10
+contentDir = "content/en"
+[languages.nn]
+weight = 20
+contentDir = "content/nn"
+
+
+`)
+
+ b.WithContent("en/mybundle/index.md", `
+---
+headless: true
+---
+
+`)
+
+ b.WithContent("nn/mybundle/index.md", `
+---
+headless: true
+---
+
+`)
+
+ b.WithContent("en/mybundle/data.yaml", `data en`)
+ b.WithContent("en/mybundle/forms.yaml", `forms en`)
+ b.WithContent("nn/mybundle/data.yaml", `data nn`)
+
+ b.WithContent("en/_index.md", `
+---
+Title: Home
+---
+
+Home content.
+
+`)
+
+ b.WithContent("en/section-not-bundle/_index.md", `
+---
+Title: Section Page
+---
+
+Section content.
+
+`)
+
+ b.WithContent("en/section-not-bundle/single.md", `
+---
+Title: Section Single
+Date: 2018-02-01
+---
+
+Single content.
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/nn/mybundle/data.yaml", "data nn")
+ b.AssertFileContent("public/nn/mybundle/forms.yaml", "forms en")
+ b.AssertFileContent("public/mybundle/data.yaml", "data en")
+ b.AssertFileContent("public/mybundle/forms.yaml", "forms en")
+
+ c.Assert(b.CheckExists("public/nn/nn/mybundle/data.yaml"), qt.Equals, false)
+ c.Assert(b.CheckExists("public/en/mybundle/data.yaml"), qt.Equals, false)
+
+ homeEn := b.H.Sites[0].home
+ c.Assert(homeEn, qt.Not(qt.IsNil))
+ c.Assert(homeEn.Date().Year(), qt.Equals, 2018)
+
+ b.AssertFileContent("public/section-not-bundle/index.html", "Section Page", "Content: <p>Section content.</p>")
+ b.AssertFileContent("public/section-not-bundle/single/index.html", "Section Single", "|<p>Single content.</p>")
+}
+
+func newTestBundleSources(t testing.TB) (*hugofs.Fs, config.Provider) {
+ cfg, fs := newTestCfgBasic()
+ c := qt.New(t)
+
+ workDir := "/work"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", "base")
+ cfg.Set("baseURL", "https://example.com")
+ cfg.Set("mediaTypes", map[string]any{
+ "bepsays/bep": map[string]any{
+ "suffixes": []string{"bep"},
+ },
+ })
+
+ pageContent := `---
+title: "Bundle Galore"
+slug: pageslug
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ pageContentShortcode := `---
+title: "Bundle Galore"
+slug: pageslug
+date: 2017-10-09
+---
+
+TheContent.
+
+{{< myShort >}}
+`
+
+ pageWithImageShortcodeAndResourceMetadataContent := `---
+title: "Bundle Galore"
+slug: pageslug
+date: 2017-10-09
+resources:
+- src: "*.jpg"
+ name: "my-sunset-:counter"
+ title: "Sunset Galore :counter"
+ params:
+ myParam: "My Sunny Param"
+---
+
+TheContent.
+
+{{< myShort >}}
+`
+
+ pageContentNoSlug := `---
+title: "Bundle Galore #2"
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ singleLayout := `
+Single Title: {{ .Title }}
+Single RelPermalink: {{ .RelPermalink }}
+Single Permalink: {{ .Permalink }}
+Content: {{ .Content }}
+{{ $sunset := .Resources.GetMatch "my-sunset-1*" }}
+{{ with $sunset }}
+Sunset RelPermalink: {{ .RelPermalink }}
+Sunset Permalink: {{ .Permalink }}
+{{ $thumb := .Fill "123x123" }}
+Thumb Width: {{ $thumb.Width }}
+Thumb Name: {{ $thumb.Name }}
+Thumb Title: {{ $thumb.Title }}
+Thumb RelPermalink: {{ $thumb.RelPermalink }}
+{{ end }}
+{{ $types := slice "image" "page" }}
+{{ range $types }}
+{{ $typeTitle := . | title }}
+{{ range $i, $e := $.Resources.ByType . }}
+{{ $i }}: {{ $typeTitle }} Title: {{ .Title }}
+{{ $i }}: {{ $typeTitle }} Name: {{ .Name }}
+{{ $i }}: {{ $typeTitle }} RelPermalink: {{ .RelPermalink }}|
+{{ $i }}: {{ $typeTitle }} Params: {{ printf "%v" .Params }}
+{{ $i }}: {{ $typeTitle }} myParam: Lower: {{ .Params.myparam }} Caps: {{ .Params.MYPARAM }}
+{{ end }}
+{{ end }}
+`
+
+ myShort := `
+MyShort in {{ .Page.File.Path }}:
+{{ $sunset := .Page.Resources.GetMatch "my-sunset-2*" }}
+{{ with $sunset }}
+Short Sunset RelPermalink: {{ .RelPermalink }}
+{{ $thumb := .Fill "56x56" }}
+Short Thumb Width: {{ $thumb.Width }}
+{{ end }}
+`
+
+ listLayout := `{{ .Title }}|{{ .Content }}`
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), singleLayout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), listLayout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.html"), myShort)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "shortcodes", "myShort.customo"), myShort)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug)
+ writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug)
+
+ // Mostly plain static assets in a folder with a page in a sub folder thrown in.
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent)
+
+ // Bundle
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "index.md"), pageWithImageShortcodeAndResourceMetadataContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "2.md"), pageContentShortcode)
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "custom-mime.bep"), "bepsays")
+ writeSource(t, fs, filepath.Join(workDir, "base", "b", "my-bundle", "c", "logo.png"), "content")
+
+ // Bundle with 은행 slug
+ // See https://github.com/gohugoio/hugo/issues/4241
+ writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "index.md"), `---
+title: "은행 은행"
+slug: 은행
+date: 2017-10-09
+---
+
+Content for 은행.
+`)
+
+ // Bundle in root
+ writeSource(t, fs, filepath.Join(workDir, "base", "root", "index.md"), pageWithImageShortcodeAndResourceMetadataContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "root", "1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "root", "c", "logo.png"), "content")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "c", "bundle", "logo-은행.png"), "은행 PNG")
+
+ // Write a real image into one of the bundle above.
+ src, err := os.Open("testdata/sunset.jpg")
+ c.Assert(err, qt.IsNil)
+
+ // We need 2 to test https://github.com/gohugoio/hugo/issues/4202
+ out, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset1.jpg"))
+ c.Assert(err, qt.IsNil)
+ out2, err := fs.Source.Create(filepath.Join(workDir, "base", "b", "my-bundle", "sunset2.jpg"))
+ c.Assert(err, qt.IsNil)
+
+ _, err = io.Copy(out, src)
+ c.Assert(err, qt.IsNil)
+ out.Close()
+ src.Seek(0, 0)
+ _, err = io.Copy(out2, src)
+ out2.Close()
+ src.Close()
+ c.Assert(err, qt.IsNil)
+
+ return fs, cfg
+}
+
+func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, config.Provider) {
+ cfg, fs := newTestCfgBasic()
+
+ workDir := "/work"
+ cfg.Set("workingDir", workDir)
+ cfg.Set("contentDir", "base")
+ cfg.Set("baseURL", "https://example.com")
+ cfg.Set("defaultContentLanguage", "en")
+
+ langConfig := map[string]any{
+ "en": map[string]any{
+ "weight": 1,
+ "languageName": "English",
+ },
+ "nn": map[string]any{
+ "weight": 2,
+ "languageName": "Nynorsk",
+ },
+ }
+
+ cfg.Set("languages", langConfig)
+
+ pageContent := `---
+slug: pageslug
+date: 2017-10-09
+---
+
+TheContent.
+`
+
+ layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}`
+
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
+ writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), "logo")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.json"), "data1")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data2.json"), "data2")
+ writeSource(t, fs, filepath.Join(workDir, "base", "bc", "data1.nn.json"), "data1.nn")
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent)
+
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent)
+
+ // Bundle leaf, multilingual
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "page.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content")
+ writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content")
+
+ // Translated bundle in some sensible sub path.
+ writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent)
+ writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent)
+
+ return fs, cfg
+}
+
+// https://github.com/gohugoio/hugo/issues/5858
+func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).Running().WithConfigFile("toml", `
+baseURL = "https://example.org"
+[outputs]
+ # This looks odd, but it triggers the behaviour in #5858
+ # The total output formats list gets sorted, so CSS before HTML.
+ home = [ "CSS" ]
+
+`)
+ b.WithContent("mybundle/index.md", `
+---
+title: Page
+date: 2017-01-15
+---
+`,
+ "mybundle/data.json", "MyData",
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/mybundle/data.json", "MyData")
+
+ // Change the bundled JSON file and make sure it gets republished.
+ b.EditFiles("content/mybundle/data.json", "My changed data")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/mybundle/data.json", "My changed data")
+}
+
+// https://github.com/gohugoio/hugo/issues/4870
+func TestBundleSlug(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ const pageTemplate = `---
+title: Title
+slug: %s
+---
+`
+
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplatesAdded("index.html", `{{ range .Site.RegularPages }}|{{ .RelPermalink }}{{ end }}|`)
+ b.WithSimpleConfigFile().
+ WithContent("about/services1/misc.md", fmt.Sprintf(pageTemplate, "this-is-the-slug")).
+ WithContent("about/services2/misc/index.md", fmt.Sprintf(pageTemplate, "this-is-another-slug"))
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertHome(
+ "|/about/services1/this-is-the-slug/|/",
+ "|/about/services2/this-is-another-slug/|")
+
+ c.Assert(b.CheckExists("public/about/services1/this-is-the-slug/index.html"), qt.Equals, true)
+ c.Assert(b.CheckExists("public/about/services2/this-is-another-slug/index.html"), qt.Equals, true)
+}
+
+func TestBundleMisc(t *testing.T) {
+ config := `
+baseURL = "https://example.com"
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+ignoreFiles = ["README\\.md", "content/en/ignore"]
+
+[Languages]
+[Languages.en]
+weight = 99999
+contentDir = "content/en"
+[Languages.nn]
+weight = 20
+contentDir = "content/nn"
+[Languages.sv]
+weight = 30
+contentDir = "content/sv"
+[Languages.nb]
+weight = 40
+contentDir = "content/nb"
+
+`
+
+ const pageContent = `---
+title: %q
+---
+`
+ createPage := func(s string) string {
+ return fmt.Sprintf(pageContent, s)
+ }
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+ b.WithLogger(loggers.NewWarningLogger())
+
+ b.WithTemplates("_default/list.html", `{{ range .Site.Pages }}
+{{ .Kind }}|{{ .Path }}|{{ with .CurrentSection }}CurrentSection: {{ .Path }}{{ end }}|{{ .RelPermalink }}{{ end }}
+`)
+
+ b.WithTemplates("_default/single.html", `Single: {{ .Title }}`)
+
+ b.WithContent("en/sect1/sect2/_index.md", createPage("en: Sect 2"))
+ b.WithContent("en/sect1/sect2/page.md", createPage("en: Page"))
+ b.WithContent("en/sect1/sect2/data-branch.json", "mydata")
+ b.WithContent("nn/sect1/sect2/page.md", createPage("nn: Page"))
+ b.WithContent("nn/sect1/sect2/data-branch.json", "my nn data")
+
+ // En only
+ b.WithContent("en/enonly/myen.md", createPage("en: Page"))
+ b.WithContent("en/enonly/myendata.json", "mydata")
+
+ // Leaf
+
+ b.WithContent("nn/b1/index.md", createPage("nn: leaf"))
+ b.WithContent("en/b1/index.md", createPage("en: leaf"))
+ b.WithContent("sv/b1/index.md", createPage("sv: leaf"))
+ b.WithContent("nb/b1/index.md", createPage("nb: leaf"))
+
+ // Should be ignored
+ b.WithContent("en/ignore/page.md", createPage("en: ignore"))
+ b.WithContent("en/README.md", createPage("en: ignore"))
+
+ // Both leaf and branch bundle in same dir
+ b.WithContent("en/b2/index.md", `---
+slug: leaf
+---
+`)
+ b.WithContent("en/b2/_index.md", createPage("en: branch"))
+
+ b.WithContent("en/b1/data1.json", "en: data")
+ b.WithContent("sv/b1/data1.json", "sv: data")
+ b.WithContent("sv/b1/data2.json", "sv: data2")
+ b.WithContent("nb/b1/data2.json", "nb: data2")
+
+ b.WithContent("en/b3/_index.md", createPage("en: branch"))
+ b.WithContent("en/b3/p1.md", createPage("en: page"))
+ b.WithContent("en/b3/data1.json", "en: data")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/index.html",
+ filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"),
+ "myen.md|CurrentSection: enonly")
+
+ b.AssertFileContentFn("public/en/index.html", func(s string) bool {
+ // Check ignored files
+ return !regexp.MustCompile("README|ignore").MatchString(s)
+ })
+
+ b.AssertFileContent("public/nn/index.html", filepath.FromSlash("page|sect1/sect2/page.md|CurrentSection: sect1"))
+ b.AssertFileContentFn("public/nn/index.html", func(s string) bool {
+ return !strings.Contains(s, "enonly")
+ })
+
+ // Check order of inherited data file
+ b.AssertFileContent("public/nb/b1/data1.json", "en: data") // Default content
+ b.AssertFileContent("public/nn/b1/data2.json", "sv: data") // First match
+
+ b.AssertFileContent("public/en/enonly/myen/index.html", "Single: en: Page")
+ b.AssertFileContent("public/en/enonly/myendata.json", "mydata")
+
+ c := qt.New(t)
+ c.Assert(b.CheckExists("public/sv/enonly/myen/index.html"), qt.Equals, false)
+
+ // Both leaf and branch bundle in same dir
+ // We log a warning about it, but we keep both.
+ b.AssertFileContent("public/en/b2/index.html",
+ "/en/b2/leaf/",
+ filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"))
+}
+
+// Issue 6136
+func TestPageBundlerPartialTranslations(t *testing.T) {
+ config := `
+baseURL = "https://example.org"
+defaultContentLanguage = "en"
+defaultContentLanguageInSubDir = true
+disableKinds = ["taxonomy", "term"]
+[languages]
+[languages.nn]
+languageName = "Nynorsk"
+weight = 2
+title = "Tittel på Nynorsk"
+[languages.en]
+title = "Title in English"
+languageName = "English"
+weight = 1
+`
+
+ pageContent := func(id string) string {
+ return fmt.Sprintf(`
+---
+title: %q
+---
+`, id)
+ }
+
+ dataContent := func(id string) string {
+ return id
+ }
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+
+ b.WithContent("blog/sect1/_index.nn.md", pageContent("s1.nn"))
+ b.WithContent("blog/sect1/data.json", dataContent("s1.data"))
+
+ b.WithContent("blog/sect1/b1/index.nn.md", pageContent("s1.b1.nn"))
+ b.WithContent("blog/sect1/b1/data.json", dataContent("s1.b1.data"))
+
+ b.WithContent("blog/sect2/_index.md", pageContent("s2"))
+ b.WithContent("blog/sect2/data.json", dataContent("s2.data"))
+
+ b.WithContent("blog/sect2/b1/index.md", pageContent("s2.b1"))
+ b.WithContent("blog/sect2/b1/data.json", dataContent("s2.b1.data"))
+
+ b.WithContent("blog/sect2/b2/index.md", pageContent("s2.b2"))
+ b.WithContent("blog/sect2/b2/bp.md", pageContent("s2.b2.bundlecontent"))
+
+ b.WithContent("blog/sect2/b3/index.md", pageContent("s2.b3"))
+ b.WithContent("blog/sect2/b3/bp.nn.md", pageContent("s2.b3.bundlecontent.nn"))
+
+ b.WithContent("blog/sect2/b4/index.nn.md", pageContent("s2.b4"))
+ b.WithContent("blog/sect2/b4/bp.nn.md", pageContent("s2.b4.bundlecontent.nn"))
+
+ b.WithTemplates("index.html", `
+Num Pages: {{ len .Site.Pages }}
+{{ range .Site.Pages }}
+{{ .Kind }}|{{ .RelPermalink }}|Content: {{ .Title }}|Resources: {{ range .Resources }}R: {{ .Title }}|{{ .Content }}|{{ end -}}
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/nn/index.html",
+ "Num Pages: 6",
+ "page|/nn/blog/sect1/b1/|Content: s1.b1.nn|Resources: R: data.json|s1.b1.data|",
+ "page|/nn/blog/sect2/b3/|Content: s2.b3|Resources: R: s2.b3.bundlecontent.nn|",
+ "page|/nn/blog/sect2/b4/|Content: s2.b4|Resources: R: s2.b4.bundlecontent.nn",
+ )
+
+ b.AssertFileContent("public/en/index.html",
+ "Num Pages: 6",
+ "section|/en/blog/sect2/|Content: s2|Resources: R: data.json|s2.data|",
+ "page|/en/blog/sect2/b1/|Content: s2.b1|Resources: R: data.json|s2.b1.data|",
+ "page|/en/blog/sect2/b2/|Content: s2.b2|Resources: R: s2.b2.bundlecontent|",
+ )
+}
+
+// #6208
+func TestBundleIndexInSubFolder(t *testing.T) {
+ config := `
+baseURL = "https://example.com"
+
+`
+
+ const pageContent = `---
+title: %q
+---
+`
+ createPage := func(s string) string {
+ return fmt.Sprintf(pageContent, s)
+ }
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+ b.WithLogger(loggers.NewWarningLogger())
+
+ b.WithTemplates("_default/single.html", `{{ range .Resources }}
+{{ .ResourceType }}|{{ .Title }}|
+{{ end }}
+
+
+`)
+
+ b.WithContent("bundle/index.md", createPage("bundle index"))
+ b.WithContent("bundle/p1.md", createPage("bundle p1"))
+ b.WithContent("bundle/sub/p2.md", createPage("bundle sub p2"))
+ b.WithContent("bundle/sub/index.md", createPage("bundle sub index"))
+ b.WithContent("bundle/sub/data.json", "data")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/bundle/index.html", `
+ application|sub/data.json|
+ page|bundle p1|
+ page|bundle sub index|
+ page|bundle sub p2|
+`)
+}
+
+func TestBundleTransformMany(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile().Running()
+
+ for i := 1; i <= 50; i++ {
+ b.WithContent(fmt.Sprintf("bundle%d/index.md", i), fmt.Sprintf(`
+---
+title: "Page"
+weight: %d
+---
+
+`, i))
+ b.WithSourceFile(fmt.Sprintf("content/bundle%d/data.yaml", i), fmt.Sprintf(`data: v%d`, i))
+ b.WithSourceFile(fmt.Sprintf("content/bundle%d/data.json", i), fmt.Sprintf(`{ "data": "v%d" }`, i))
+ b.WithSourceFile(fmt.Sprintf("assets/data%d/data.yaml", i), fmt.Sprintf(`vdata: v%d`, i))
+
+ }
+
+ b.WithTemplatesAdded("_default/single.html", `
+{{ $bundleYaml := .Resources.GetMatch "*.yaml" }}
+{{ $bundleJSON := .Resources.GetMatch "*.json" }}
+{{ $assetsYaml := resources.GetMatch (printf "data%d/*.yaml" .Weight) }}
+{{ $data1 := $bundleYaml | transform.Unmarshal }}
+{{ $data2 := $assetsYaml | transform.Unmarshal }}
+{{ $bundleFingerprinted := $bundleYaml | fingerprint "md5" }}
+{{ $assetsFingerprinted := $assetsYaml | fingerprint "md5" }}
+{{ $jsonMin := $bundleJSON | minify }}
+{{ $jsonMinMin := $jsonMin | minify }}
+{{ $jsonMinMinMin := $jsonMinMin | minify }}
+
+data content unmarshaled: {{ $data1.data }}
+data assets content unmarshaled: {{ $data2.vdata }}
+bundle fingerprinted: {{ $bundleFingerprinted.RelPermalink }}
+assets fingerprinted: {{ $assetsFingerprinted.RelPermalink }}
+
+bundle min min min: {{ $jsonMinMinMin.RelPermalink }}
+bundle min min key: {{ $jsonMinMin.Key }}
+
+`)
+
+ for i := 0; i < 3; i++ {
+
+ b.Build(BuildCfg{})
+
+ for i := 1; i <= 50; i++ {
+ index := fmt.Sprintf("public/bundle%d/index.html", i)
+ b.AssertFileContent(fmt.Sprintf("public/bundle%d/data.yaml", i), fmt.Sprintf("data: v%d", i))
+ b.AssertFileContent(index, fmt.Sprintf("data content unmarshaled: v%d", i))
+ b.AssertFileContent(index, fmt.Sprintf("data assets content unmarshaled: v%d", i))
+
+ md5Asset := helpers.MD5String(fmt.Sprintf(`vdata: v%d`, i))
+ b.AssertFileContent(index, fmt.Sprintf("assets fingerprinted: /data%d/data.%s.yaml", i, md5Asset))
+
+ // The original is not used, make sure it's not published.
+ b.Assert(b.CheckExists(fmt.Sprintf("public/data%d/data.yaml", i)), qt.Equals, false)
+
+ md5Bundle := helpers.MD5String(fmt.Sprintf(`data: v%d`, i))
+ b.AssertFileContent(index, fmt.Sprintf("bundle fingerprinted: /bundle%d/data.%s.yaml", i, md5Bundle))
+
+ b.AssertFileContent(index,
+ fmt.Sprintf("bundle min min min: /bundle%d/data.min.min.min.json", i),
+ fmt.Sprintf("bundle min min key: /bundle%d/data.min.min.json", i),
+ )
+ b.Assert(b.CheckExists(fmt.Sprintf("public/bundle%d/data.min.min.min.json", i)), qt.Equals, true)
+ b.Assert(b.CheckExists(fmt.Sprintf("public/bundle%d/data.min.json", i)), qt.Equals, false)
+ b.Assert(b.CheckExists(fmt.Sprintf("public/bundle%d/data.min.min.json", i)), qt.Equals, false)
+
+ }
+
+ b.EditFiles("assets/data/foo.yaml", "FOO")
+
+ }
+}
+
+func TestPageBundlerHome(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-bundler-home")
+ c.Assert(err, qt.IsNil)
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("workingDir", workDir)
+ fs := hugofs.NewFrom(hugofs.Os, cfg)
+
+ os.MkdirAll(filepath.Join(workDir, "content"), 0777)
+
+ defer clean()
+
+ b := newTestSitesBuilder(t)
+ b.Fs = fs
+
+ b.WithWorkingDir(workDir).WithViper(cfg)
+
+ b.WithContent("_index.md", "---\ntitle: Home\n---\n![Alt text](image.jpg)")
+ b.WithSourceFile("content/data.json", "DATA")
+
+ b.WithTemplates("index.html", `Title: {{ .Title }}|First Resource: {{ index .Resources 0 }}|Content: {{ .Content }}`)
+ b.WithTemplates("_default/_markup/render-image.html", `Hook Len Page Resources {{ len .Page.Resources }}`)
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `
+Title: Home|First Resource: data.json|Content: <p>Hook Len Page Resources 1</p>
+`)
+}
diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go
new file mode 100644
index 000000000..811fb6025
--- /dev/null
+++ b/hugolib/pagecollections.go
@@ -0,0 +1,340 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+// PageCollections contains the page collections for a site.
+type PageCollections struct {
+ pageMap *pageMap
+
+ // Lazy initialized page collections
+ pages *lazyPagesFactory
+ regularPages *lazyPagesFactory
+ allPages *lazyPagesFactory
+ allRegularPages *lazyPagesFactory
+}
+
+// Pages returns all pages.
+// This is for the current language only.
+func (c *PageCollections) Pages() page.Pages {
+ return c.pages.get()
+}
+
+// RegularPages returns all the regular pages.
+// This is for the current language only.
+func (c *PageCollections) RegularPages() page.Pages {
+ return c.regularPages.get()
+}
+
+// AllPages returns all pages for all languages.
+func (c *PageCollections) AllPages() page.Pages {
+ return c.allPages.get()
+}
+
+// AllPages returns all regular pages for all languages.
+func (c *PageCollections) AllRegularPages() page.Pages {
+ return c.allRegularPages.get()
+}
+
+type lazyPagesFactory struct {
+ pages page.Pages
+
+ init sync.Once
+ factory page.PagesFactory
+}
+
+func (l *lazyPagesFactory) get() page.Pages {
+ l.init.Do(func() {
+ l.pages = l.factory()
+ })
+ return l.pages
+}
+
+func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory {
+ return &lazyPagesFactory{factory: factory}
+}
+
+func newPageCollections(m *pageMap) *PageCollections {
+ if m == nil {
+ panic("must provide a pageMap")
+ }
+
+ c := &PageCollections{pageMap: m}
+
+ c.pages = newLazyPagesFactory(func() page.Pages {
+ return m.createListAllPages()
+ })
+
+ c.regularPages = newLazyPagesFactory(func() page.Pages {
+ return c.findPagesByKindIn(page.KindPage, c.pages.get())
+ })
+
+ return c
+}
+
+// This is an adapter func for the old API with Kind as first argument.
+// This is invoked when you do .Site.GetPage. We drop the Kind and fails
+// if there are more than 2 arguments, which would be ambiguous.
+func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) {
+ var refs []string
+ for _, r := range ref {
+ // A common construct in the wild is
+ // .Site.GetPage "home" "" or
+ // .Site.GetPage "home" "/"
+ if r != "" && r != "/" {
+ refs = append(refs, r)
+ }
+ }
+
+ var key string
+
+ if len(refs) > 2 {
+ // This was allowed in Hugo <= 0.44, but we cannot support this with the
+ // new API. This should be the most unusual case.
+ return nil, fmt.Errorf(`too many arguments to .Site.GetPage: %v. Use lookups on the form {{ .Site.GetPage "/posts/mypage-md" }}`, ref)
+ }
+
+ if len(refs) == 0 || refs[0] == page.KindHome {
+ key = "/"
+ } else if len(refs) == 1 {
+ if len(ref) == 2 && refs[0] == page.KindSection {
+ // This is an old style reference to the "Home Page section".
+ // Typically fetched via {{ .Site.GetPage "section" .Section }}
+ // See https://github.com/gohugoio/hugo/issues/4989
+ key = "/"
+ } else {
+ key = refs[0]
+ }
+ } else {
+ key = refs[1]
+ }
+
+ key = filepath.ToSlash(key)
+ if !strings.HasPrefix(key, "/") {
+ key = "/" + key
+ }
+
+ return c.getPageNew(nil, key)
+}
+
+// Only used in tests.
+func (c *PageCollections) getPage(typ string, sections ...string) page.Page {
+ refs := append([]string{typ}, path.Join(sections...))
+ p, _ := c.getPageOldVersion(refs...)
+ return p
+}
+
+// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive
+// search path than getPageNew.
+func (c *PageCollections) getPageRef(context page.Page, ref string) (page.Page, error) {
+ n, err := c.getContentNode(context, true, ref)
+ if err != nil || n == nil || n.p == nil {
+ return nil, err
+ }
+ return n.p, nil
+}
+
+func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) {
+ n, err := c.getContentNode(context, false, ref)
+ if err != nil || n == nil || n.p == nil {
+ return nil, err
+ }
+ return n.p, nil
+}
+
+func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) {
+ var n *contentNode
+
+ pref := helpers.AddTrailingSlash(ref)
+ s, v, found := c.pageMap.sections.LongestPrefix(pref)
+
+ if found {
+ n = v.(*contentNode)
+ }
+
+ if found && s == pref {
+ // A section
+ return n, ""
+ }
+
+ m := c.pageMap
+
+ filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/")
+ langSuffix := "." + m.s.Lang()
+
+ // Trim both extension and any language code.
+ name := paths.PathNoExt(filename)
+ name = strings.TrimSuffix(name, langSuffix)
+
+ // These are reserved bundle names and will always be stored by their owning
+ // folder name.
+ name = strings.TrimSuffix(name, "/index")
+ name = strings.TrimSuffix(name, "/_index")
+
+ if !found {
+ return nil, name
+ }
+
+ // Check if it's a section with filename provided.
+ if !n.p.File().IsZero() && n.p.File().LogicalName() == filename {
+ return n, name
+ }
+
+ return m.getPage(s, name), name
+}
+
+// For Ref/Reflink and .Site.GetPage do simple name lookups for the potentially ambigous myarticle.md and /myarticle.md,
+// but not when we get ./myarticle*, section/myarticle.
+func shouldDoSimpleLookup(ref string) bool {
+ if ref[0] == '.' {
+ return false
+ }
+
+ slashCount := strings.Count(ref, "/")
+
+ if slashCount > 1 {
+ return false
+ }
+
+ return slashCount == 0 || ref[0] == '/'
+}
+
+func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) {
+ ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref)))
+
+ if ref == "" {
+ ref = "/"
+ }
+
+ inRef := ref
+ navUp := strings.HasPrefix(ref, "..")
+ var doSimpleLookup bool
+ if isReflink || context == nil {
+ doSimpleLookup = shouldDoSimpleLookup(ref)
+ }
+
+ if context != nil && !strings.HasPrefix(ref, "/") {
+ // Try the page-relative path.
+ var base string
+ if context.File().IsZero() {
+ base = context.SectionsPath()
+ } else {
+ meta := context.File().FileInfo().Meta()
+ base = filepath.ToSlash(filepath.Dir(meta.Path))
+ if meta.Classifier == files.ContentClassLeaf {
+ // Bundles are stored in subfolders e.g. blog/mybundle/index.md,
+ // so if the user has not explicitly asked to go up,
+ // look on the "blog" level.
+ if !navUp {
+ base = path.Dir(base)
+ }
+ }
+ }
+ ref = path.Join("/", strings.ToLower(base), ref)
+ }
+
+ if !strings.HasPrefix(ref, "/") {
+ ref = "/" + ref
+ }
+
+ m := c.pageMap
+
+ // It's either a section, a page in a section or a taxonomy node.
+ // Start with the most likely:
+ n, name := c.getSectionOrPage(ref)
+ if n != nil {
+ return n, nil
+ }
+
+ if !strings.HasPrefix(inRef, "/") {
+ // Many people will have "post/foo.md" in their content files.
+ if n, _ := c.getSectionOrPage("/" + inRef); n != nil {
+ return n, nil
+ }
+ }
+
+ // Check if it's a taxonomy node
+ pref := helpers.AddTrailingSlash(ref)
+ s, v, found := m.taxonomies.LongestPrefix(pref)
+
+ if found {
+ if !m.onSameLevel(pref, s) {
+ return nil, nil
+ }
+ return v.(*contentNode), nil
+ }
+
+ getByName := func(s string) (*contentNode, error) {
+ n := m.pageReverseIndex.Get(s)
+ if n != nil {
+ if n == ambiguousContentNode {
+ return nil, fmt.Errorf("page reference %q is ambiguous", ref)
+ }
+ return n, nil
+ }
+
+ return nil, nil
+ }
+
+ var module string
+ if context != nil && !context.File().IsZero() {
+ module = context.File().FileInfo().Meta().Module
+ }
+
+ if module == "" && !c.pageMap.s.home.File().IsZero() {
+ module = c.pageMap.s.home.File().FileInfo().Meta().Module
+ }
+
+ if module != "" {
+ n, err := getByName(module + ref)
+ if err != nil {
+ return nil, err
+ }
+ if n != nil {
+ return n, nil
+ }
+ }
+
+ if !doSimpleLookup {
+ return nil, nil
+ }
+
+ // Ref/relref supports this potentially ambigous lookup.
+ return getByName(path.Base(name))
+}
+
+func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
+ var pages page.Pages
+ for _, p := range inPages {
+ if p.Kind() == kind {
+ pages = append(pages, p)
+ }
+ }
+ return pages
+}
diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go
new file mode 100644
index 000000000..d664b7f4e
--- /dev/null
+++ b/hugolib/pagecollections_test.go
@@ -0,0 +1,420 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "math/rand"
+ "path"
+ "path/filepath"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+const pageCollectionsPageTemplate = `---
+title: "%s"
+categories:
+- Hugo
+---
+# Doc
+`
+
+func BenchmarkGetPage(b *testing.B) {
+ var (
+ cfg, fs = newTestCfg()
+ r = rand.New(rand.NewSource(time.Now().UnixNano()))
+ )
+
+ for i := 0; i < 10; i++ {
+ for j := 0; j < 100; j++ {
+ writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), "CONTENT")
+ }
+ }
+
+ s := buildSingleSite(b, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ pagePaths := make([]string, b.N)
+
+ for i := 0; i < b.N; i++ {
+ pagePaths[i] = fmt.Sprintf("sect%d", r.Intn(10))
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ home, _ := s.getPageNew(nil, "/")
+ if home == nil {
+ b.Fatal("Home is nil")
+ }
+
+ p, _ := s.getPageNew(nil, pagePaths[i])
+ if p == nil {
+ b.Fatal("Section is nil")
+ }
+
+ }
+}
+
+func createGetPageRegularBenchmarkSite(t testing.TB) *Site {
+ var (
+ c = qt.New(t)
+ cfg, fs = newTestCfg()
+ )
+
+ pc := func(title string) string {
+ return fmt.Sprintf(pageCollectionsPageTemplate, title)
+ }
+
+ for i := 0; i < 10; i++ {
+ for j := 0; j < 100; j++ {
+ content := pc(fmt.Sprintf("Title%d_%d", i, j))
+ writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
+ }
+ }
+
+ return buildSingleSite(c, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+}
+
+func TestBenchmarkGetPageRegular(t *testing.T) {
+ c := qt.New(t)
+ s := createGetPageRegularBenchmarkSite(t)
+
+ for i := 0; i < 10; i++ {
+ pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i))
+ page, _ := s.getPageNew(nil, pp)
+ c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp))
+ }
+}
+
+func BenchmarkGetPageRegular(b *testing.B) {
+ r := rand.New(rand.NewSource(time.Now().UnixNano()))
+
+ b.Run("From root", func(b *testing.B) {
+ s := createGetPageRegularBenchmarkSite(b)
+ c := qt.New(b)
+
+ pagePaths := make([]string, b.N)
+
+ for i := 0; i < b.N; i++ {
+ pagePaths[i] = path.Join(fmt.Sprintf("/sect%d", r.Intn(10)), fmt.Sprintf("page%d.md", r.Intn(100)))
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ page, _ := s.getPageNew(nil, pagePaths[i])
+ c.Assert(page, qt.Not(qt.IsNil))
+ }
+ })
+
+ b.Run("Page relative", func(b *testing.B) {
+ s := createGetPageRegularBenchmarkSite(b)
+ c := qt.New(b)
+ allPages := s.RegularPages()
+
+ pagePaths := make([]string, b.N)
+ pages := make([]page.Page, b.N)
+
+ for i := 0; i < b.N; i++ {
+ pagePaths[i] = fmt.Sprintf("page%d.md", r.Intn(100))
+ pages[i] = allPages[r.Intn(len(allPages)/3)]
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ page, _ := s.getPageNew(pages[i], pagePaths[i])
+ c.Assert(page, qt.Not(qt.IsNil))
+ }
+ })
+}
+
+type getPageTest struct {
+ name string
+ kind string
+ context page.Page
+ pathVariants []string
+ expectedTitle string
+}
+
+func (t *getPageTest) check(p page.Page, err error, errorMsg string, c *qt.C) {
+ c.Helper()
+ errorComment := qt.Commentf(errorMsg)
+ switch t.kind {
+ case "Ambiguous":
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(p, qt.IsNil, errorComment)
+ case "NoPage":
+ c.Assert(err, qt.IsNil)
+ c.Assert(p, qt.IsNil, errorComment)
+ default:
+ c.Assert(err, qt.IsNil, errorComment)
+ c.Assert(p, qt.Not(qt.IsNil), errorComment)
+ c.Assert(p.Kind(), qt.Equals, t.kind, errorComment)
+ c.Assert(p.Title(), qt.Equals, t.expectedTitle, errorComment)
+ }
+}
+
+func TestGetPage(t *testing.T) {
+ var (
+ cfg, fs = newTestCfg()
+ c = qt.New(t)
+ )
+
+ pc := func(title string) string {
+ return fmt.Sprintf(pageCollectionsPageTemplate, title)
+ }
+
+ for i := 0; i < 10; i++ {
+ for j := 0; j < 10; j++ {
+ content := pc(fmt.Sprintf("Title%d_%d", i, j))
+ writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
+ }
+ }
+
+ content := pc("home page")
+ writeSource(t, fs, filepath.Join("content", "_index.md"), content)
+
+ content = pc("about page")
+ writeSource(t, fs, filepath.Join("content", "about.md"), content)
+
+ content = pc("section 3")
+ writeSource(t, fs, filepath.Join("content", "sect3", "_index.md"), content)
+
+ writeSource(t, fs, filepath.Join("content", "sect3", "unique.md"), pc("UniqueBase"))
+ writeSource(t, fs, filepath.Join("content", "sect3", "Unique2.md"), pc("UniqueBase2"))
+
+ content = pc("another sect7")
+ writeSource(t, fs, filepath.Join("content", "sect3", "sect7", "_index.md"), content)
+
+ content = pc("deep page")
+ writeSource(t, fs, filepath.Join("content", "sect3", "subsect", "deep.md"), content)
+
+ // Bundle variants
+ writeSource(t, fs, filepath.Join("content", "sect3", "b1", "index.md"), pc("b1 bundle"))
+ writeSource(t, fs, filepath.Join("content", "sect3", "index", "index.md"), pc("index bundle"))
+
+ writeSource(t, fs, filepath.Join("content", "section_bundle_overlap", "_index.md"), pc("index overlap section"))
+ writeSource(t, fs, filepath.Join("content", "section_bundle_overlap_bundle", "index.md"), pc("index overlap bundle"))
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ sec3, err := s.getPageNew(nil, "/sect3")
+ c.Assert(err, qt.IsNil)
+ c.Assert(sec3, qt.Not(qt.IsNil))
+
+ tests := []getPageTest{
+ // legacy content root relative paths
+ {"Root relative, no slash, home", page.KindHome, nil, []string{""}, "home page"},
+ {"Root relative, no slash, root page", page.KindPage, nil, []string{"about.md", "ABOUT.md"}, "about page"},
+ {"Root relative, no slash, section", page.KindSection, nil, []string{"sect3"}, "section 3"},
+ {"Root relative, no slash, section page", page.KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
+ {"Root relative, no slash, sub setion", page.KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
+ {"Root relative, no slash, nested page", page.KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
+ {"Root relative, no slash, OS slashes", page.KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"},
+
+ {"Short ref, unique", page.KindPage, nil, []string{"unique.md", "unique"}, "UniqueBase"},
+ {"Short ref, unique, upper case", page.KindPage, nil, []string{"Unique2.md", "unique2.md", "unique2"}, "UniqueBase2"},
+ {"Short ref, ambiguous", "Ambiguous", nil, []string{"page1.md"}, ""},
+
+ // ISSUE: This is an ambiguous ref, but because we have to support the legacy
+ // content root relative paths without a leading slash, the lookup
+ // returns /sect7. This undermines ambiguity detection, but we have no choice.
+ //{"Ambiguous", nil, []string{"sect7"}, ""},
+ {"Section, ambigous", page.KindSection, nil, []string{"sect7"}, "Sect7s"},
+
+ {"Absolute, home", page.KindHome, nil, []string{"/", ""}, "home page"},
+ {"Absolute, page", page.KindPage, nil, []string{"/about.md", "/about"}, "about page"},
+ {"Absolute, sect", page.KindSection, nil, []string{"/sect3"}, "section 3"},
+ {"Absolute, page in subsection", page.KindPage, nil, []string{"/sect3/page1.md", "/Sect3/Page1.md"}, "Title3_1"},
+ {"Absolute, section, subsection with same name", page.KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
+ {"Absolute, page, deep", page.KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
+ {"Absolute, page, OS slashes", page.KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, // test OS-specific path
+ {"Absolute, unique", page.KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"},
+ {"Absolute, unique, case", page.KindPage, nil, []string{"/sect3/Unique2.md", "/sect3/unique2.md", "/sect3/unique2", "/sect3/Unique2"}, "UniqueBase2"},
+ // next test depends on this page existing
+ // {"NoPage", nil, []string{"/unique.md"}, ""}, // ISSUE #4969: this is resolving to /sect3/unique.md
+ {"Absolute, missing page", "NoPage", nil, []string{"/missing-page.md"}, ""},
+ {"Absolute, missing section", "NoPage", nil, []string{"/missing-section"}, ""},
+
+ // relative paths
+ {"Dot relative, home", page.KindHome, sec3, []string{".."}, "home page"},
+ {"Dot relative, home, slash", page.KindHome, sec3, []string{"../"}, "home page"},
+ {"Dot relative about", page.KindPage, sec3, []string{"../about.md"}, "about page"},
+ {"Dot", page.KindSection, sec3, []string{"."}, "section 3"},
+ {"Dot slash", page.KindSection, sec3, []string{"./"}, "section 3"},
+ {"Page relative, no dot", page.KindPage, sec3, []string{"page1.md"}, "Title3_1"},
+ {"Page relative, dot", page.KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
+ {"Up and down another section", page.KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
+ {"Rel sect7", page.KindSection, sec3, []string{"sect7"}, "another sect7"},
+ {"Rel sect7 dot", page.KindSection, sec3, []string{"./sect7"}, "another sect7"},
+ {"Dot deep", page.KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
+ {"Dot dot inner", page.KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
+ {"Dot OS slash", page.KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, // test OS-specific path
+ {"Dot unique", page.KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
+ {"Dot sect", "NoPage", sec3, []string{"./sect2"}, ""},
+ //{"NoPage", sec3, []string{"sect2"}, ""}, // ISSUE: /sect3 page relative query is resolving to /sect2
+
+ {"Abs, ignore context, home", page.KindHome, sec3, []string{"/"}, "home page"},
+ {"Abs, ignore context, about", page.KindPage, sec3, []string{"/about.md"}, "about page"},
+ {"Abs, ignore context, page in section", page.KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
+ {"Abs, ignore context, page subsect deep", page.KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, // next test depends on this page existing
+ {"Abs, ignore context, page deep", "NoPage", sec3, []string{"/subsect/deep.md"}, ""},
+
+ // Taxonomies
+ {"Taxonomy term", page.KindTaxonomy, nil, []string{"categories"}, "Categories"},
+ {"Taxonomy", page.KindTerm, nil, []string{"categories/hugo", "categories/Hugo"}, "Hugo"},
+
+ // Bundle variants
+ {"Bundle regular", page.KindPage, nil, []string{"sect3/b1", "sect3/b1/index.md", "sect3/b1/index.en.md"}, "b1 bundle"},
+ {"Bundle index name", page.KindPage, nil, []string{"sect3/index/index.md", "sect3/index"}, "index bundle"},
+
+ // https://github.com/gohugoio/hugo/issues/7301
+ {"Section and bundle overlap", page.KindPage, nil, []string{"section_bundle_overlap_bundle"}, "index overlap bundle"},
+ }
+
+ for _, test := range tests {
+ c.Run(test.name, func(c *qt.C) {
+ errorMsg := fmt.Sprintf("Test case %v %v -> %s", test.context, test.pathVariants, test.expectedTitle)
+
+ // test legacy public Site.GetPage (which does not support page context relative queries)
+ if test.context == nil {
+ for _, ref := range test.pathVariants {
+ args := append([]string{test.kind}, ref)
+ page, err := s.Info.GetPage(args...)
+ test.check(page, err, errorMsg, c)
+ }
+ }
+
+ // test new internal Site.getPageNew
+ for _, ref := range test.pathVariants {
+ page2, err := s.getPageNew(test.context, ref)
+ test.check(page2, err, errorMsg, c)
+ }
+ })
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/6034
+func TestGetPageRelative(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ for i, section := range []string{"what", "where", "who"} {
+ isDraft := i == 2
+ b.WithContent(
+ section+"/_index.md", fmt.Sprintf("---title: %s\n---", section),
+ section+"/members.md", fmt.Sprintf("---title: members %s\ndraft: %t\n---", section, isDraft),
+ )
+ }
+
+ b.WithTemplates("_default/list.html", `
+{{ with .GetPage "members.md" }}
+ Members: {{ .Title }}
+{{ else }}
+NOT FOUND
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/what/index.html", `Members: members what`)
+ b.AssertFileContent("public/where/index.html", `Members: members where`)
+ b.AssertFileContent("public/who/index.html", `NOT FOUND`)
+}
+
+// https://github.com/gohugoio/hugo/issues/7016
+func TestGetPageMultilingual(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithConfigFile("yaml", `
+baseURL: "http://example.org/"
+languageCode: "en-us"
+defaultContentLanguage: ru
+title: "My New Hugo Site"
+uglyurls: true
+
+languages:
+ ru: {}
+ en: {}
+`)
+
+ b.WithContent(
+ "docs/1.md", "\n---title: p1\n---",
+ "news/1.md", "\n---title: p1\n---",
+ "news/1.en.md", "\n---title: p1en\n---",
+ "news/about/1.md", "\n---title: about1\n---",
+ "news/about/1.en.md", "\n---title: about1en\n---",
+ )
+
+ b.WithTemplates("index.html", `
+{{ with site.GetPage "docs/1" }}
+ Docs p1: {{ .Title }}
+{{ else }}
+NOT FOUND
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `Docs p1: p1`)
+ b.AssertFileContent("public/en/index.html", `NOT FOUND`)
+}
+
+func TestShouldDoSimpleLookup(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(shouldDoSimpleLookup("foo.md"), qt.Equals, true)
+ c.Assert(shouldDoSimpleLookup("/foo.md"), qt.Equals, true)
+ c.Assert(shouldDoSimpleLookup("./foo.md"), qt.Equals, false)
+ c.Assert(shouldDoSimpleLookup("docs/foo.md"), qt.Equals, false)
+}
+
+func TestRegularPagesRecursive(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithConfigFile("yaml", `
+baseURL: "http://example.org/"
+title: "My New Hugo Site"
+
+`)
+
+ b.WithContent(
+ "docs/1.md", "\n---title: docs1\n---",
+ "docs/sect1/_index.md", "\n---title: docs_sect1\n---",
+ "docs/sect1/ps1.md", "\n---title: docs_sect1_ps1\n---",
+ "docs/sect1/ps2.md", "\n---title: docs_sect1_ps2\n---",
+ "docs/sect1/sect1_s2/_index.md", "\n---title: docs_sect1_s2\n---",
+ "docs/sect1/sect1_s2/ps2_1.md", "\n---title: docs_sect1_s2_1\n---",
+ "docs/sect2/_index.md", "\n---title: docs_sect2\n---",
+ "docs/sect2/ps1.md", "\n---title: docs_sect2_ps1\n---",
+ "docs/sect2/ps2.md", "\n---title: docs_sect2_ps2\n---",
+ "news/1.md", "\n---title: news1\n---",
+ )
+
+ b.WithTemplates("index.html", `
+{{ $sect1 := site.GetPage "sect1" }}
+
+Sect1 RegularPagesRecursive: {{ range $sect1.RegularPagesRecursive }}{{ .Kind }}:{{ .RelPermalink}}|{{ end }}|End.
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Sect1 RegularPagesRecursive: page:/docs/sect1/ps1/|page:/docs/sect1/ps2/|page:/docs/sect1/sect1_s2/ps2_1/||End.
+
+
+`)
+}
diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go
new file mode 100644
index 000000000..da7515fc2
--- /dev/null
+++ b/hugolib/pages_capture.go
@@ -0,0 +1,580 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "context"
+ "fmt"
+ "os"
+ pth "path"
+ "path/filepath"
+ "reflect"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+)
+
+const (
+ walkIsRootFileMetaKey = "walkIsRootFileMetaKey"
+)
+
+func newPagesCollector(
+ sp *source.SourceSpec,
+ contentMap *pageMaps,
+ logger loggers.Logger,
+ contentTracker *contentChangeMap,
+ proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector {
+ return &pagesCollector{
+ fs: sp.SourceFs,
+ contentMap: contentMap,
+ proc: proc,
+ sp: sp,
+ logger: logger,
+ filenames: filenames,
+ tracker: contentTracker,
+ }
+}
+
+type contentDirKey struct {
+ dirname string
+ filename string
+ tp bundleDirType
+}
+
+type fileinfoBundle struct {
+ header hugofs.FileMetaInfo
+ resources []hugofs.FileMetaInfo
+}
+
+func (b *fileinfoBundle) containsResource(name string) bool {
+ for _, r := range b.resources {
+ if r.Name() == name {
+ return true
+ }
+ }
+
+ return false
+}
+
+type pageBundles map[string]*fileinfoBundle
+
+type pagesCollector struct {
+ sp *source.SourceSpec
+ fs afero.Fs
+ logger loggers.Logger
+
+ contentMap *pageMaps
+
+ // Ordered list (bundle headers first) used in partial builds.
+ filenames []string
+
+ // Content files tracker used in partial builds.
+ tracker *contentChangeMap
+
+ proc pagesCollectorProcessorProvider
+}
+
+// isCascadingEdit returns whether the dir represents a cascading edit.
+// That is, if a front matter cascade section is removed, added or edited.
+// If this is the case we must re-evaluate its descendants.
+func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) {
+ // This is either a section or a taxonomy node. Find it.
+ prefix := cleanTreeKey(dir.dirname)
+
+ section := "/"
+ var isCascade bool
+
+ c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool {
+ if n.fi == nil || dir.filename != n.fi.Meta().Filename {
+ return false
+ }
+
+ f, err := n.fi.Meta().Open()
+ if err != nil {
+ // File may have been removed, assume a cascading edit.
+ // Some false positives is not too bad.
+ isCascade = true
+ return true
+ }
+
+ pf, err := pageparser.ParseFrontMatterAndContent(f)
+ f.Close()
+ if err != nil {
+ isCascade = true
+ return true
+ }
+
+ if n.p == nil || n.p.bucket == nil {
+ return true
+ }
+
+ section = s
+
+ maps.PrepareParams(pf.FrontMatter)
+ cascade1, ok := pf.FrontMatter["cascade"]
+ hasCascade := n.p.bucket.cascade != nil && len(n.p.bucket.cascade) > 0
+ if !ok {
+ isCascade = hasCascade
+
+ return true
+ }
+
+ if !hasCascade {
+ isCascade = true
+ return true
+ }
+
+ for _, v := range n.p.bucket.cascade {
+ isCascade = !reflect.DeepEqual(cascade1, v)
+ if isCascade {
+ break
+ }
+ }
+
+ return true
+ })
+
+ return isCascade, section
+}
+
+// Collect.
+func (c *pagesCollector) Collect() (collectErr error) {
+ c.proc.Start(context.Background())
+ defer func() {
+ err := c.proc.Wait()
+ if collectErr == nil {
+ collectErr = err
+ }
+ }()
+
+ if len(c.filenames) == 0 {
+ // Collect everything.
+ collectErr = c.collectDir("", false, nil)
+ } else {
+ for _, pm := range c.contentMap.pmaps {
+ pm.cfg.isRebuild = true
+ }
+ dirs := make(map[contentDirKey]bool)
+ for _, filename := range c.filenames {
+ dir, btype := c.tracker.resolveAndRemove(filename)
+ dirs[contentDirKey{dir, filename, btype}] = true
+ }
+
+ for dir := range dirs {
+ for _, pm := range c.contentMap.pmaps {
+ pm.s.ResourceSpec.DeleteBySubstring(dir.dirname)
+ }
+
+ switch dir.tp {
+ case bundleLeaf:
+ collectErr = c.collectDir(dir.dirname, true, nil)
+ case bundleBranch:
+ isCascading, section := c.isCascadingEdit(dir)
+
+ if isCascading {
+ c.contentMap.deleteSection(section)
+ }
+ collectErr = c.collectDir(dir.dirname, !isCascading, nil)
+ default:
+ // We always start from a directory.
+ collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool {
+ return dir.filename == fim.Meta().Filename
+ })
+ }
+
+ if collectErr != nil {
+ break
+ }
+ }
+
+ }
+
+ return
+}
+
+func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool {
+ class := fi.Meta().Classifier
+ return class == files.ContentClassLeaf || class == files.ContentClassBranch
+}
+
+func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string {
+ lang := fi.Meta().Lang
+ if lang != "" {
+ return lang
+ }
+
+ return c.sp.DefaultContentLanguage
+}
+
+func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirType, bundles pageBundles) error {
+ getBundle := func(lang string) *fileinfoBundle {
+ return bundles[lang]
+ }
+
+ cloneBundle := func(lang string) *fileinfoBundle {
+ // Every bundled content file needs a content file header.
+ // Use the default content language if found, else just
+ // pick one.
+ var (
+ source *fileinfoBundle
+ found bool
+ )
+
+ source, found = bundles[c.sp.DefaultContentLanguage]
+ if !found {
+ for _, b := range bundles {
+ source = b
+ break
+ }
+ }
+
+ if source == nil {
+ panic(fmt.Sprintf("no source found, %d", len(bundles)))
+ }
+
+ clone := c.cloneFileInfo(source.header)
+ clone.Meta().Lang = lang
+
+ return &fileinfoBundle{
+ header: clone,
+ }
+ }
+
+ lang := c.getLang(info)
+ bundle := getBundle(lang)
+ isBundleHeader := c.isBundleHeader(info)
+ if bundle != nil && isBundleHeader {
+ // index.md file inside a bundle, see issue 6208.
+ info.Meta().Classifier = files.ContentClassContent
+ isBundleHeader = false
+ }
+ classifier := info.Meta().Classifier
+ isContent := classifier == files.ContentClassContent
+ if bundle == nil {
+ if isBundleHeader {
+ bundle = &fileinfoBundle{header: info}
+ bundles[lang] = bundle
+ } else {
+ if btyp == bundleBranch {
+ // No special logic for branch bundles.
+ // Every language needs its own _index.md file.
+ // Also, we only clone bundle headers for lonesome, bundled,
+ // content files.
+ return c.handleFiles(info)
+ }
+
+ if isContent {
+ bundle = cloneBundle(lang)
+ bundles[lang] = bundle
+ }
+ }
+ }
+
+ if !isBundleHeader && bundle != nil {
+ bundle.resources = append(bundle.resources, info)
+ }
+
+ if classifier == files.ContentClassFile {
+ translations := info.Meta().Translations
+
+ for lang, b := range bundles {
+ if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) {
+
+ // Clone and add it to the bundle.
+ clone := c.cloneFileInfo(info)
+ clone.Meta().Lang = lang
+ b.resources = append(b.resources, clone)
+ }
+ }
+ }
+
+ return nil
+}
+
+func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo {
+ return hugofs.NewFileMetaInfo(fi, hugofs.NewFileMeta())
+}
+
+func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error {
+ fi, err := c.fs.Stat(dirname)
+ if err != nil {
+ if os.IsNotExist(err) {
+ // May have been deleted.
+ return nil
+ }
+ return err
+ }
+
+ handleDir := func(
+ btype bundleDirType,
+ dir hugofs.FileMetaInfo,
+ path string,
+ readdir []hugofs.FileMetaInfo) error {
+ if btype > bundleNot && c.tracker != nil {
+ c.tracker.add(path, btype)
+ }
+
+ if btype == bundleBranch {
+ if err := c.handleBundleBranch(readdir); err != nil {
+ return err
+ }
+ // A branch bundle is only this directory level, so keep walking.
+ return nil
+ } else if btype == bundleLeaf {
+ if err := c.handleBundleLeaf(dir, path, readdir); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ if err := c.handleFiles(readdir...); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ filter := func(fim hugofs.FileMetaInfo) bool {
+ if fim.Meta().SkipDir {
+ return false
+ }
+
+ if c.sp.IgnoreFile(fim.Meta().Filename) {
+ return false
+ }
+
+ if inFilter != nil {
+ return inFilter(fim)
+ }
+ return true
+ }
+
+ preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
+ var btype bundleDirType
+
+ filtered := readdir[:0]
+ for _, fi := range readdir {
+ if filter(fi) {
+ filtered = append(filtered, fi)
+
+ if c.tracker != nil {
+ // Track symlinks.
+ c.tracker.addSymbolicLinkMapping(fi)
+ }
+ }
+ }
+ walkRoot := dir.Meta().IsRootFile
+ readdir = filtered
+
+ // We merge language directories, so there can be duplicates, but they
+ // will be ordered, most important first.
+ var duplicates []int
+ seen := make(map[string]bool)
+
+ for i, fi := range readdir {
+
+ if fi.IsDir() {
+ continue
+ }
+
+ meta := fi.Meta()
+ meta.IsRootFile = walkRoot
+ class := meta.Classifier
+ translationBase := meta.TranslationBaseNameWithExt
+ key := pth.Join(meta.Lang, translationBase)
+
+ if seen[key] {
+ duplicates = append(duplicates, i)
+ continue
+ }
+ seen[key] = true
+
+ var thisBtype bundleDirType
+
+ switch class {
+ case files.ContentClassLeaf:
+ thisBtype = bundleLeaf
+ case files.ContentClassBranch:
+ thisBtype = bundleBranch
+ }
+
+ // Folders with both index.md and _index.md type of files have
+ // undefined behaviour and can never work.
+ // The branch variant will win because of sort order, but log
+ // a warning about it.
+ if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype {
+ c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename)
+ // Reclassify it so it will be handled as a content file inside the
+ // section, which is in line with the <= 0.55 behaviour.
+ meta.Classifier = files.ContentClassContent
+ } else if thisBtype > bundleNot {
+ btype = thisBtype
+ }
+
+ }
+
+ if len(duplicates) > 0 {
+ for i := len(duplicates) - 1; i >= 0; i-- {
+ idx := duplicates[i]
+ readdir = append(readdir[:idx], readdir[idx+1:]...)
+ }
+ }
+
+ err := handleDir(btype, dir, path, readdir)
+ if err != nil {
+ return nil, err
+ }
+
+ if btype == bundleLeaf || partial {
+ return nil, filepath.SkipDir
+ }
+
+ // Keep walking.
+ return readdir, nil
+ }
+
+ var postHook hugofs.WalkHook
+ if c.tracker != nil {
+ postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
+ if c.tracker == nil {
+ // Nothing to do.
+ return readdir, nil
+ }
+
+ return readdir, nil
+ }
+ }
+
+ wfn := func(path string, info hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ fim := fi.(hugofs.FileMetaInfo)
+ // Make sure the pages in this directory gets re-rendered,
+ // even in fast render mode.
+ fim.Meta().IsRootFile = true
+
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{
+ Fs: c.fs,
+ Logger: c.logger,
+ Root: dirname,
+ Info: fim,
+ HookPre: preHook,
+ HookPost: postHook,
+ WalkFn: wfn,
+ })
+
+ return w.Walk()
+}
+
+func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error {
+ // Maps bundles to its language.
+ bundles := pageBundles{}
+
+ var contentFiles []hugofs.FileMetaInfo
+
+ for _, fim := range readdir {
+
+ if fim.IsDir() {
+ continue
+ }
+
+ meta := fim.Meta()
+
+ switch meta.Classifier {
+ case files.ContentClassContent:
+ contentFiles = append(contentFiles, fim)
+ default:
+ if err := c.addToBundle(fim, bundleBranch, bundles); err != nil {
+ return err
+ }
+ }
+
+ }
+
+ // Make sure the section is created before its pages.
+ if err := c.proc.Process(bundles); err != nil {
+ return err
+ }
+
+ return c.handleFiles(contentFiles...)
+}
+
+func (c *pagesCollector) handleBundleLeaf(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) error {
+ // Maps bundles to its language.
+ bundles := pageBundles{}
+
+ walk := func(path string, info hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+ if info.IsDir() {
+ return nil
+ }
+
+ return c.addToBundle(info, bundleLeaf, bundles)
+ }
+
+ // Start a new walker from the given path.
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{
+ Root: path,
+ Fs: c.fs,
+ Logger: c.logger,
+ Info: dir,
+ DirEntries: readdir,
+ WalkFn: walk,
+ })
+
+ if err := w.Walk(); err != nil {
+ return err
+ }
+
+ return c.proc.Process(bundles)
+}
+
+func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error {
+ for _, fi := range fis {
+ if fi.IsDir() {
+ continue
+ }
+
+ if err := c.proc.Process(fi); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func stringSliceContains(k string, values ...string) bool {
+ for _, v := range values {
+ if k == v {
+ return true
+ }
+ }
+ return false
+}
diff --git a/hugolib/pages_capture_test.go b/hugolib/pages_capture_test.go
new file mode 100644
index 000000000..ea2ef4e1e
--- /dev/null
+++ b/hugolib/pages_capture_test.go
@@ -0,0 +1,79 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "context"
+ "fmt"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+)
+
+func TestPagesCapture(t *testing.T) {
+ cfg, hfs := newTestCfg()
+ fs := hfs.Source
+
+ c := qt.New(t)
+
+ writeFile := func(filename string) {
+ c.Assert(afero.WriteFile(fs, filepath.FromSlash(filename), []byte(fmt.Sprintf("content-%s", filename)), 0755), qt.IsNil)
+ }
+
+ writeFile("_index.md")
+ writeFile("logo.png")
+ writeFile("root.md")
+ writeFile("blog/index.md")
+ writeFile("blog/hello.md")
+ writeFile("blog/images/sunset.png")
+ writeFile("pages/page1.md")
+ writeFile("pages/page2.md")
+ writeFile("pages/page.png")
+
+ ps, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, loggers.NewErrorLogger())
+ c.Assert(err, qt.IsNil)
+ sourceSpec := source.NewSourceSpec(ps, nil, fs)
+
+ t.Run("Collect", func(t *testing.T) {
+ c := qt.New(t)
+ proc := &testPagesCollectorProcessor{}
+ coll := newPagesCollector(sourceSpec, nil, loggers.NewErrorLogger(), nil, proc)
+ c.Assert(coll.Collect(), qt.IsNil)
+ c.Assert(len(proc.items), qt.Equals, 4)
+ })
+}
+
+type testPagesCollectorProcessor struct {
+ items []any
+ waitErr error
+}
+
+func (proc *testPagesCollectorProcessor) Process(item any) error {
+ proc.items = append(proc.items, item)
+ return nil
+}
+
+func (proc *testPagesCollectorProcessor) Start(ctx context.Context) context.Context {
+ return ctx
+}
+
+func (proc *testPagesCollectorProcessor) Wait() error { return proc.waitErr }
diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go
new file mode 100644
index 000000000..55241d306
--- /dev/null
+++ b/hugolib/pages_language_merge_test.go
@@ -0,0 +1,196 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// TODO(bep) move and rewrite in resource/page.
+
+func TestMergeLanguages(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ b := newTestSiteForLanguageMerge(t, 30)
+ b.CreateSites()
+
+ b.Build(BuildCfg{SkipRender: true})
+
+ h := b.H
+
+ enSite := h.Sites[0]
+ frSite := h.Sites[1]
+ nnSite := h.Sites[2]
+
+ c.Assert(len(enSite.RegularPages()), qt.Equals, 31)
+ c.Assert(len(frSite.RegularPages()), qt.Equals, 6)
+ c.Assert(len(nnSite.RegularPages()), qt.Equals, 12)
+
+ for i := 0; i < 2; i++ {
+ mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
+ c.Assert(len(mergedNN), qt.Equals, 31)
+ for i := 1; i <= 31; i++ {
+ expectedLang := "en"
+ if i == 2 || i%3 == 0 || i == 31 {
+ expectedLang = "nn"
+ }
+ p := mergedNN[i-1]
+ c.Assert(p.Language().Lang, qt.Equals, expectedLang)
+ }
+ }
+
+ mergedFR := frSite.RegularPages().MergeByLanguage(enSite.RegularPages())
+ c.Assert(len(mergedFR), qt.Equals, 31)
+ for i := 1; i <= 31; i++ {
+ expectedLang := "en"
+ if i%5 == 0 {
+ expectedLang = "fr"
+ }
+ p := mergedFR[i-1]
+ c.Assert(p.Language().Lang, qt.Equals, expectedLang)
+ }
+
+ firstNN := nnSite.RegularPages()[0]
+ c.Assert(len(firstNN.Sites()), qt.Equals, 4)
+ c.Assert(firstNN.Sites().First().Language().Lang, qt.Equals, "en")
+
+ nnBundle := nnSite.getPage("page", "bundle")
+ enBundle := enSite.getPage("page", "bundle")
+
+ c.Assert(len(enBundle.Resources()), qt.Equals, 6)
+ c.Assert(len(nnBundle.Resources()), qt.Equals, 2)
+
+ var ri any = nnBundle.Resources()
+
+ // This looks less ugly in the templates ...
+ mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources())
+ c.Assert(len(mergedNNResources), qt.Equals, 6)
+
+ unchanged, err := nnSite.RegularPages().MergeByLanguageInterface(nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(unchanged, deepEqualsPages, nnSite.RegularPages())
+}
+
+func TestMergeLanguagesTemplate(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSiteForLanguageMerge(t, 15)
+ b.WithTemplates("home.html", `
+{{ $pages := .Site.RegularPages }}
+{{ .Scratch.Set "pages" $pages }}
+{{ $enSite := index .Sites 0 }}
+{{ $frSite := index .Sites 1 }}
+{{ if eq .Language.Lang "nn" }}:
+{{ $nnBundle := .Site.GetPage "page" "bundle" }}
+{{ $enBundle := $enSite.GetPage "page" "bundle" }}
+{{ .Scratch.Set "pages" ($pages | lang.Merge $frSite.RegularPages| lang.Merge $enSite.RegularPages) }}
+{{ .Scratch.Set "pages2" (sort ($nnBundle.Resources | lang.Merge $enBundle.Resources) "Title") }}
+{{ end }}
+{{ $pages := .Scratch.Get "pages" }}
+{{ $pages2 := .Scratch.Get "pages2" }}
+Pages1: {{ range $i, $p := $pages }}{{ add $i 1 }}: {{ .File.Path }} {{ .Language.Lang }} | {{ end }}
+Pages2: {{ range $i, $p := $pages2 }}{{ add $i 1 }}: {{ .Title }} {{ .Language.Lang }} | {{ end }}
+{{ $nil := resources.Get "asdfasdfasdf" }}
+Pages3: {{ $frSite.RegularPages | lang.Merge $nil }}
+Pages4: {{ $nil | lang.Merge $frSite.RegularPages }}
+
+
+`,
+ "shortcodes/shortcode.html", "MyShort",
+ "shortcodes/lingo.html", "MyLingo",
+ )
+
+ b.CreateSites()
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/nn/index.html", "Pages1: 1: p1.md en | 2: p2.nn.md nn | 3: p3.nn.md nn | 4: p4.md en | 5: p5.fr.md fr | 6: p6.nn.md nn | 7: p7.md en | 8: p8.md en | 9: p9.nn.md nn | 10: p10.fr.md fr | 11: p11.md en | 12: p12.nn.md nn | 13: p13.md en | 14: p14.md en | 15: p15.nn.md nn")
+ b.AssertFileContent("public/nn/index.html", "Pages2: 1: doc100 en | 2: doc101 nn | 3: doc102 nn | 4: doc103 en | 5: doc104 en | 6: doc105 en")
+ b.AssertFileContent("public/nn/index.html", `
+Pages3: Pages(3)
+Pages4: Pages(3)
+ `)
+}
+
+func newTestSiteForLanguageMerge(t testing.TB, count int) *sitesBuilder {
+ contentTemplate := `---
+title: doc%d
+weight: %d
+date: "2018-02-28"
+---
+# doc
+*some "content"*
+
+{{< shortcode >}}
+
+{{< lingo >}}
+`
+
+ builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+ // We need some content with some missing translations.
+ // "en" is the main language, so add some English content + some Norwegian (nn, nynorsk) content.
+ var contentPairs []string
+ for i := 1; i <= count; i++ {
+ content := fmt.Sprintf(contentTemplate, i, i)
+ contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.md", i), content}...)
+ if i == 2 || i%3 == 0 {
+ // Add page 2,3, 6, 9 ... to both languages
+ contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.nn.md", i), content}...)
+ }
+ if i%5 == 0 {
+ // Add some French content, too.
+ contentPairs = append(contentPairs, []string{fmt.Sprintf("p%d.fr.md", i), content}...)
+ }
+ }
+
+ // See https://github.com/gohugoio/hugo/issues/4644
+ // Add a bundles
+ j := 100
+ contentPairs = append(contentPairs, []string{"bundle/index.md", fmt.Sprintf(contentTemplate, j, j)}...)
+ for i := 0; i < 6; i++ {
+ contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...)
+ }
+ contentPairs = append(contentPairs, []string{"bundle/index.nn.md", fmt.Sprintf(contentTemplate, j, j)}...)
+ for i := 1; i < 3; i++ {
+ contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.nn.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...)
+ }
+
+ builder.WithContent(contentPairs...)
+ return builder
+}
+
+func BenchmarkMergeByLanguage(b *testing.B) {
+ const count = 100
+
+ // newTestSiteForLanguageMerge creates count+1 pages.
+ builder := newTestSiteForLanguageMerge(b, count-1)
+ builder.CreateSites()
+ builder.Build(BuildCfg{SkipRender: true})
+ h := builder.H
+
+ enSite := h.Sites[0]
+ nnSite := h.Sites[2]
+
+ for i := 0; i < b.N; i++ {
+ merged := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
+ if len(merged) != count {
+ b.Fatal("Count mismatch")
+ }
+ }
+}
diff --git a/hugolib/pages_process.go b/hugolib/pages_process.go
new file mode 100644
index 000000000..04ac0218a
--- /dev/null
+++ b/hugolib/pages_process.go
@@ -0,0 +1,210 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "context"
+ "fmt"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+ "golang.org/x/sync/errgroup"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+func newPagesProcessor(h *HugoSites, sp *source.SourceSpec) *pagesProcessor {
+ procs := make(map[string]pagesCollectorProcessorProvider)
+ for _, s := range h.Sites {
+ procs[s.Lang()] = &sitePagesProcessor{
+ m: s.pageMap,
+ errorSender: s.h,
+ itemChan: make(chan interface{}, config.GetNumWorkerMultiplier()*2),
+ renderStaticToDisk: h.Cfg.GetBool("renderStaticToDisk"),
+ }
+ }
+ return &pagesProcessor{
+ procs: procs,
+ }
+}
+
+type pagesCollectorProcessorProvider interface {
+ Process(item any) error
+ Start(ctx context.Context) context.Context
+ Wait() error
+}
+
+type pagesProcessor struct {
+ // Per language/Site
+ procs map[string]pagesCollectorProcessorProvider
+}
+
+func (proc *pagesProcessor) Process(item any) error {
+ switch v := item.(type) {
+ // Page bundles mapped to their language.
+ case pageBundles:
+ for _, vv := range v {
+ proc.getProcFromFi(vv.header).Process(vv)
+ }
+ case hugofs.FileMetaInfo:
+ proc.getProcFromFi(v).Process(v)
+ default:
+ panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
+
+ }
+
+ return nil
+}
+
+func (proc *pagesProcessor) Start(ctx context.Context) context.Context {
+ for _, p := range proc.procs {
+ ctx = p.Start(ctx)
+ }
+ return ctx
+}
+
+func (proc *pagesProcessor) Wait() error {
+ var err error
+ for _, p := range proc.procs {
+ if e := p.Wait(); e != nil {
+ err = e
+ }
+ }
+ return err
+}
+
+func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider {
+ if p, found := proc.procs[fi.Meta().Lang]; found {
+ return p
+ }
+ return defaultPageProcessor
+}
+
+type nopPageProcessor int
+
+func (nopPageProcessor) Process(item any) error {
+ return nil
+}
+
+func (nopPageProcessor) Start(ctx context.Context) context.Context {
+ return context.Background()
+}
+
+func (nopPageProcessor) Wait() error {
+ return nil
+}
+
+var defaultPageProcessor = new(nopPageProcessor)
+
+type sitePagesProcessor struct {
+ m *pageMap
+ errorSender herrors.ErrorSender
+
+ ctx context.Context
+ itemChan chan any
+ itemGroup *errgroup.Group
+
+ renderStaticToDisk bool
+}
+
+func (p *sitePagesProcessor) Process(item any) error {
+ select {
+ case <-p.ctx.Done():
+ return nil
+ default:
+ p.itemChan <- item
+ }
+ return nil
+}
+
+func (p *sitePagesProcessor) Start(ctx context.Context) context.Context {
+ p.itemGroup, ctx = errgroup.WithContext(ctx)
+ p.ctx = ctx
+ p.itemGroup.Go(func() error {
+ for item := range p.itemChan {
+ if err := p.doProcess(item); err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+ return ctx
+}
+
+func (p *sitePagesProcessor) Wait() error {
+ close(p.itemChan)
+ return p.itemGroup.Wait()
+}
+
+func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error {
+ meta := fim.Meta()
+ f, err := meta.Open()
+ if err != nil {
+ return fmt.Errorf("copyFile: failed to open: %w", err)
+ }
+
+ s := p.m.s
+
+ target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path)
+
+ defer f.Close()
+
+ fs := s.PublishFs
+ if p.renderStaticToDisk {
+ fs = s.PublishFsStatic
+ }
+
+ return s.publish(&s.PathSpec.ProcessingStats.Files, target, f, fs)
+}
+
+func (p *sitePagesProcessor) doProcess(item any) error {
+ m := p.m
+ switch v := item.(type) {
+ case *fileinfoBundle:
+ if err := m.AddFilesBundle(v.header, v.resources...); err != nil {
+ return err
+ }
+ case hugofs.FileMetaInfo:
+ if p.shouldSkip(v) {
+ return nil
+ }
+ meta := v.Meta()
+
+ classifier := meta.Classifier
+ switch classifier {
+ case files.ContentClassContent:
+ if err := m.AddFilesBundle(v); err != nil {
+ return err
+ }
+ case files.ContentClassFile:
+ if err := p.copyFile(v); err != nil {
+ return err
+ }
+ default:
+ panic(fmt.Sprintf("invalid classifier: %q", classifier))
+ }
+ default:
+ panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
+ }
+ return nil
+}
+
+func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool {
+ // TODO(ep) unify
+ return p.m.s.SourceSpec.DisabledLanguages[fim.Meta().Lang]
+}
diff --git a/hugolib/pages_test.go b/hugolib/pages_test.go
new file mode 100644
index 000000000..30e9e59d2
--- /dev/null
+++ b/hugolib/pages_test.go
@@ -0,0 +1,119 @@
+package hugolib
+
+import (
+ "fmt"
+ "math/rand"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func newPagesPrevNextTestSite(t testing.TB, numPages int) *sitesBuilder {
+ categories := []string{"blue", "green", "red", "orange", "indigo", "amber", "lime"}
+ cat1, cat2 := categories[rand.Intn(len(categories))], categories[rand.Intn(len(categories))]
+ categoriesSlice := fmt.Sprintf("[%q,%q]", cat1, cat2)
+ pageTemplate := `
+---
+title: "Page %d"
+weight: %d
+categories: %s
+---
+
+`
+ b := newTestSitesBuilder(t)
+
+ for i := 1; i <= numPages; i++ {
+ b.WithContent(fmt.Sprintf("page%d.md", i), fmt.Sprintf(pageTemplate, i, rand.Intn(numPages), categoriesSlice))
+ }
+
+ return b
+}
+
+func TestPagesPrevNext(t *testing.T) {
+ b := newPagesPrevNextTestSite(t, 100)
+ b.Build(BuildCfg{SkipRender: true})
+
+ pages := b.H.Sites[0].RegularPages()
+
+ b.Assert(pages, qt.HasLen, 100)
+
+ for _, p := range pages {
+ msg := qt.Commentf("w=%d", p.Weight())
+ b.Assert(pages.Next(p), qt.Equals, p.Next(), msg)
+ b.Assert(pages.Prev(p), qt.Equals, p.Prev(), msg)
+ }
+}
+
+func BenchmarkPagesPrevNext(b *testing.B) {
+ type Variant struct {
+ name string
+ preparePages func(pages page.Pages) page.Pages
+ run func(p page.Page, pages page.Pages)
+ }
+
+ shufflePages := func(pages page.Pages) page.Pages {
+ rand.Shuffle(len(pages), func(i, j int) { pages[i], pages[j] = pages[j], pages[i] })
+ return pages
+ }
+
+ for _, variant := range []Variant{
+ {".Next", nil, func(p page.Page, pages page.Pages) { p.Next() }},
+ {".Prev", nil, func(p page.Page, pages page.Pages) { p.Prev() }},
+ {"Pages.Next", nil, func(p page.Page, pages page.Pages) { pages.Next(p) }},
+ {"Pages.Prev", nil, func(p page.Page, pages page.Pages) { pages.Prev(p) }},
+ {"Pages.Shuffled.Next", shufflePages, func(p page.Page, pages page.Pages) { pages.Next(p) }},
+ {"Pages.Shuffled.Prev", shufflePages, func(p page.Page, pages page.Pages) { pages.Prev(p) }},
+ {"Pages.ByTitle.Next", func(pages page.Pages) page.Pages { return pages.ByTitle() }, func(p page.Page, pages page.Pages) { pages.Next(p) }},
+ } {
+ for _, numPages := range []int{300, 5000} {
+ b.Run(fmt.Sprintf("%s-pages-%d", variant.name, numPages), func(b *testing.B) {
+ b.StopTimer()
+ builder := newPagesPrevNextTestSite(b, numPages)
+ builder.Build(BuildCfg{SkipRender: true})
+ pages := builder.H.Sites[0].RegularPages()
+ if variant.preparePages != nil {
+ pages = variant.preparePages(pages)
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ p := pages[rand.Intn(len(pages))]
+ variant.run(p, pages)
+ }
+ })
+ }
+ }
+}
+
+func BenchmarkPagePageCollections(b *testing.B) {
+ type Variant struct {
+ name string
+ run func(p page.Page)
+ }
+
+ for _, variant := range []Variant{
+ {".Pages", func(p page.Page) { p.Pages() }},
+ {".RegularPages", func(p page.Page) { p.RegularPages() }},
+ {".RegularPagesRecursive", func(p page.Page) { p.RegularPagesRecursive() }},
+ } {
+ for _, numPages := range []int{300, 5000} {
+ b.Run(fmt.Sprintf("%s-%d", variant.name, numPages), func(b *testing.B) {
+ b.StopTimer()
+ builder := newPagesPrevNextTestSite(b, numPages)
+ builder.Build(BuildCfg{SkipRender: true})
+ var pages page.Pages
+ for _, p := range builder.H.Sites[0].Pages() {
+ if !p.IsPage() {
+ pages = append(pages, p)
+ }
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ p := pages[rand.Intn(len(pages))]
+ variant.run(p)
+ }
+ })
+ }
+ }
+}
diff --git a/hugolib/paginator_test.go b/hugolib/paginator_test.go
new file mode 100644
index 000000000..a8d8ac8df
--- /dev/null
+++ b/hugolib/paginator_test.go
@@ -0,0 +1,138 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPaginator(t *testing.T) {
+ configFile := `
+baseURL = "https://example.com/foo/"
+paginate = 3
+paginatepath = "thepage"
+
+[languages.en]
+weight = 1
+contentDir = "content/en"
+
+[languages.nn]
+weight = 2
+contentDir = "content/nn"
+
+`
+ b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
+ var content []string
+ for i := 0; i < 9; i++ {
+ for _, contentDir := range []string{"content/en", "content/nn"} {
+ content = append(content, fmt.Sprintf(contentDir+"/blog/page%d.md", i), fmt.Sprintf(`---
+title: Page %d
+---
+
+Content.
+`, i))
+ }
+ }
+
+ b.WithContent(content...)
+
+ pagTemplate := `
+{{ $pag := $.Paginator }}
+Total: {{ $pag.TotalPages }}
+First: {{ $pag.First.URL }}
+Page Number: {{ $pag.PageNumber }}
+URL: {{ $pag.URL }}
+{{ with $pag.Next }}Next: {{ .URL }}{{ end }}
+{{ with $pag.Prev }}Prev: {{ .URL }}{{ end }}
+{{ range $i, $e := $pag.Pagers }}
+{{ printf "%d: %d/%d %t" $i $pag.PageNumber .PageNumber (eq . $pag) -}}
+{{ end }}
+`
+
+ b.WithTemplatesAdded("index.html", pagTemplate)
+ b.WithTemplatesAdded("index.xml", pagTemplate)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ "Page Number: 1",
+ "0: 1/1 true")
+
+ b.AssertFileContent("public/thepage/2/index.html",
+ "Total: 3",
+ "Page Number: 2",
+ "URL: /foo/thepage/2/",
+ "Next: /foo/thepage/3/",
+ "Prev: /foo/",
+ "1: 2/2 true",
+ )
+
+ b.AssertFileContent("public/index.xml",
+ "Page Number: 1",
+ "0: 1/1 true")
+ b.AssertFileContent("public/thepage/2/index.xml",
+ "Page Number: 2",
+ "1: 2/2 true")
+
+ b.AssertFileContent("public/nn/index.html",
+ "Page Number: 1",
+ "0: 1/1 true")
+
+ b.AssertFileContent("public/nn/index.xml",
+ "Page Number: 1",
+ "0: 1/1 true")
+}
+
+// Issue 6023
+func TestPaginateWithSort(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ b.WithTemplatesAdded("index.html", `{{ range (.Paginate (sort .Site.RegularPages ".File.Filename" "desc")).Pages }}|{{ .File.Filename }}{{ end }}`)
+ b.Build(BuildCfg{}).AssertFileContent("public/index.html",
+ filepath.FromSlash("|content/sect/doc1.nn.md|content/sect/doc1.nb.md|content/sect/doc1.fr.md|content/sect/doc1.en.md"))
+}
+
+// https://github.com/gohugoio/hugo/issues/6797
+func TestPaginateOutputFormat(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ b.WithContent("_index.md", `---
+title: "Home"
+cascade:
+ outputs:
+ - JSON
+---`)
+
+ for i := 0; i < 22; i++ {
+ b.WithContent(fmt.Sprintf("p%d.md", i+1), fmt.Sprintf(`---
+title: "Page"
+weight: %d
+---`, i+1))
+ }
+
+ b.WithTemplatesAdded("index.json", `JSON: {{ .Paginator.TotalNumberOfElements }}: {{ range .Paginator.Pages }}|{{ .RelPermalink }}{{ end }}:DONE`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.json",
+ `JSON: 22
+|/p1/index.json|/p2/index.json|
+`)
+
+ // This looks odd, so are most bugs.
+ b.Assert(b.CheckExists("public/page/1/index.json/index.html"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/page/1/index.json"), qt.Equals, false)
+ b.AssertFileContent("public/page/2/index.json", `JSON: 22: |/p11/index.json|/p12/index.json`)
+}
diff --git a/hugolib/paths/baseURL.go b/hugolib/paths/baseURL.go
new file mode 100644
index 000000000..a3c7e9d27
--- /dev/null
+++ b/hugolib/paths/baseURL.go
@@ -0,0 +1,87 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+)
+
+// A BaseURL in Hugo is normally on the form scheme://path, but the
+// form scheme: is also valid (mailto:hugo@rules.com).
+type BaseURL struct {
+ url *url.URL
+ urlStr string
+}
+
+func (b BaseURL) String() string {
+ if b.urlStr != "" {
+ return b.urlStr
+ }
+ return b.url.String()
+}
+
+func (b BaseURL) Path() string {
+ return b.url.Path
+}
+
+// HostURL returns the URL to the host root without any path elements.
+func (b BaseURL) HostURL() string {
+ return strings.TrimSuffix(b.String(), b.Path())
+}
+
+// WithProtocol returns the BaseURL prefixed with the given protocol.
+// The Protocol is normally of the form "scheme://", i.e. "webcal://".
+func (b BaseURL) WithProtocol(protocol string) (string, error) {
+ u := b.URL()
+
+ scheme := protocol
+ isFullProtocol := strings.HasSuffix(scheme, "://")
+ isOpaqueProtocol := strings.HasSuffix(scheme, ":")
+
+ if isFullProtocol {
+ scheme = strings.TrimSuffix(scheme, "://")
+ } else if isOpaqueProtocol {
+ scheme = strings.TrimSuffix(scheme, ":")
+ }
+
+ u.Scheme = scheme
+
+ if isFullProtocol && u.Opaque != "" {
+ u.Opaque = "//" + u.Opaque
+ } else if isOpaqueProtocol && u.Opaque == "" {
+ return "", fmt.Errorf("cannot determine BaseURL for protocol %q", protocol)
+ }
+
+ return u.String(), nil
+}
+
+// URL returns a copy of the internal URL.
+// The copy can be safely used and modified.
+func (b BaseURL) URL() *url.URL {
+ c := *b.url
+ return &c
+}
+
+func newBaseURLFromString(b string) (BaseURL, error) {
+ var result BaseURL
+
+ base, err := url.Parse(b)
+ if err != nil {
+ return result, err
+ }
+
+ return BaseURL{url: base, urlStr: base.String()}, nil
+}
diff --git a/hugolib/paths/baseURL_test.go b/hugolib/paths/baseURL_test.go
new file mode 100644
index 000000000..77095bb7d
--- /dev/null
+++ b/hugolib/paths/baseURL_test.go
@@ -0,0 +1,67 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestBaseURL(t *testing.T) {
+ c := qt.New(t)
+ b, err := newBaseURLFromString("http://example.com")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "http://example.com")
+
+ p, err := b.WithProtocol("webcal://")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p, qt.Equals, "webcal://example.com")
+
+ p, err = b.WithProtocol("webcal")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p, qt.Equals, "webcal://example.com")
+
+ _, err = b.WithProtocol("mailto:")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ b, err = newBaseURLFromString("mailto:hugo@rules.com")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "mailto:hugo@rules.com")
+
+ // These are pretty constructed
+ p, err = b.WithProtocol("webcal")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p, qt.Equals, "webcal:hugo@rules.com")
+
+ p, err = b.WithProtocol("webcal://")
+ c.Assert(err, qt.IsNil)
+ c.Assert(p, qt.Equals, "webcal://hugo@rules.com")
+
+ // Test with "non-URLs". Some people will try to use these as a way to get
+ // relative URLs working etc.
+ b, err = newBaseURLFromString("/")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "/")
+
+ b, err = newBaseURLFromString("")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "")
+
+ // BaseURL with sub path
+ b, err = newBaseURLFromString("http://example.com/sub")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b.String(), qt.Equals, "http://example.com/sub")
+ c.Assert(b.HostURL(), qt.Equals, "http://example.com")
+}
diff --git a/hugolib/paths/paths.go b/hugolib/paths/paths.go
new file mode 100644
index 000000000..e80215b92
--- /dev/null
+++ b/hugolib/paths/paths.go
@@ -0,0 +1,274 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ hpaths "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+var FilePathSeparator = string(filepath.Separator)
+
+type Paths struct {
+ Fs *hugofs.Fs
+ Cfg config.Provider
+
+ BaseURL
+ BaseURLString string
+ BaseURLNoPathString string
+
+ // If the baseURL contains a base path, e.g. https://example.com/docs, then "/docs" will be the BasePath.
+ BasePath string
+
+ // Directories
+ // TODO(bep) when we have trimmed down most of the dirs usage outside of this package, make
+ // these into an interface.
+ ThemesDir string
+ WorkingDir string
+
+ // Directories to store Resource related artifacts.
+ AbsResourcesDir string
+
+ AbsPublishDir string
+
+ // pagination path handling
+ PaginatePath string
+
+ // When in multihost mode, this returns a list of base paths below PublishDir
+ // for each language.
+ MultihostTargetBasePaths []string
+
+ DisablePathToLower bool
+ RemovePathAccents bool
+ UglyURLs bool
+ CanonifyURLs bool
+
+ Language *langs.Language
+ Languages langs.Languages
+ LanguagesDefaultFirst langs.Languages
+
+ // The PathSpec looks up its config settings in both the current language
+ // and then in the global Viper config.
+ // Some settings, the settings listed below, does not make sense to be set
+ // on per-language-basis. We have no good way of protecting against this
+ // other than a "white-list". See language.go.
+ defaultContentLanguageInSubdir bool
+ DefaultContentLanguage string
+ multilingual bool
+
+ AllModules modules.Modules
+ ModulesClient *modules.Client
+}
+
+func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
+ baseURLstr := cfg.GetString("baseURL")
+ baseURL, err := newBaseURLFromString(baseURLstr)
+ if err != nil {
+ return nil, fmt.Errorf("Failed to create baseURL from %q:: %w", baseURLstr, err)
+ }
+
+ contentDir := filepath.Clean(cfg.GetString("contentDir"))
+ workingDir := filepath.Clean(cfg.GetString("workingDir"))
+ resourceDir := filepath.Clean(cfg.GetString("resourceDir"))
+ publishDir := filepath.Clean(cfg.GetString("publishDir"))
+
+ if publishDir == "" {
+ return nil, fmt.Errorf("publishDir not set")
+ }
+
+ defaultContentLanguage := cfg.GetString("defaultContentLanguage")
+
+ var (
+ language *langs.Language
+ languages langs.Languages
+ languagesDefaultFirst langs.Languages
+ )
+
+ if l, ok := cfg.(*langs.Language); ok {
+ language = l
+ }
+
+ if l, ok := cfg.Get("languagesSorted").(langs.Languages); ok {
+ languages = l
+ }
+
+ if l, ok := cfg.Get("languagesSortedDefaultFirst").(langs.Languages); ok {
+ languagesDefaultFirst = l
+ }
+
+ //
+
+ if len(languages) == 0 {
+ // We have some old tests that does not test the entire chain, hence
+ // they have no languages. So create one so we get the proper filesystem.
+ languages = langs.Languages{&langs.Language{Lang: "en", Cfg: cfg, ContentDir: contentDir}}
+ }
+
+ absPublishDir := hpaths.AbsPathify(workingDir, publishDir)
+ if !strings.HasSuffix(absPublishDir, FilePathSeparator) {
+ absPublishDir += FilePathSeparator
+ }
+ // If root, remove the second '/'
+ if absPublishDir == "//" {
+ absPublishDir = FilePathSeparator
+ }
+ absResourcesDir := hpaths.AbsPathify(workingDir, resourceDir)
+ if !strings.HasSuffix(absResourcesDir, FilePathSeparator) {
+ absResourcesDir += FilePathSeparator
+ }
+ if absResourcesDir == "//" {
+ absResourcesDir = FilePathSeparator
+ }
+
+ var multihostTargetBasePaths []string
+ if languages.IsMultihost() {
+ for _, l := range languages {
+ multihostTargetBasePaths = append(multihostTargetBasePaths, l.Lang)
+ }
+ }
+
+ var baseURLString = baseURL.String()
+ var baseURLNoPath = baseURL.URL()
+ baseURLNoPath.Path = ""
+ var baseURLNoPathString = baseURLNoPath.String()
+
+ p := &Paths{
+ Fs: fs,
+ Cfg: cfg,
+ BaseURL: baseURL,
+ BaseURLString: baseURLString,
+ BaseURLNoPathString: baseURLNoPathString,
+
+ DisablePathToLower: cfg.GetBool("disablePathToLower"),
+ RemovePathAccents: cfg.GetBool("removePathAccents"),
+ UglyURLs: cfg.GetBool("uglyURLs"),
+ CanonifyURLs: cfg.GetBool("canonifyURLs"),
+
+ ThemesDir: cfg.GetString("themesDir"),
+ WorkingDir: workingDir,
+
+ AbsResourcesDir: absResourcesDir,
+ AbsPublishDir: absPublishDir,
+
+ multilingual: cfg.GetBool("multilingual"),
+ defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"),
+ DefaultContentLanguage: defaultContentLanguage,
+
+ Language: language,
+ Languages: languages,
+ LanguagesDefaultFirst: languagesDefaultFirst,
+ MultihostTargetBasePaths: multihostTargetBasePaths,
+
+ PaginatePath: cfg.GetString("paginatePath"),
+ }
+
+ if cfg.IsSet("allModules") {
+ p.AllModules = cfg.Get("allModules").(modules.Modules)
+ }
+
+ if cfg.IsSet("modulesClient") {
+ p.ModulesClient = cfg.Get("modulesClient").(*modules.Client)
+ }
+
+ return p, nil
+}
+
+// GetBasePath returns any path element in baseURL if needed.
+func (p *Paths) GetBasePath(isRelativeURL bool) string {
+ if isRelativeURL && p.CanonifyURLs {
+ // The baseURL will be prepended later.
+ return ""
+ }
+ return p.BasePath
+}
+
+func (p *Paths) Lang() string {
+ if p == nil || p.Language == nil {
+ return ""
+ }
+ return p.Language.Lang
+}
+
+func (p *Paths) GetTargetLanguageBasePath() string {
+ if p.Languages.IsMultihost() {
+ // In a multihost configuration all assets will be published below the language code.
+ return p.Lang()
+ }
+ return p.GetLanguagePrefix()
+}
+
+func (p *Paths) GetURLLanguageBasePath() string {
+ if p.Languages.IsMultihost() {
+ return ""
+ }
+ return p.GetLanguagePrefix()
+}
+
+func (p *Paths) GetLanguagePrefix() string {
+ if !p.multilingual {
+ return ""
+ }
+
+ defaultLang := p.DefaultContentLanguage
+ defaultInSubDir := p.defaultContentLanguageInSubdir
+
+ currentLang := p.Language.Lang
+ if currentLang == "" || (currentLang == defaultLang && !defaultInSubDir) {
+ return ""
+ }
+ return currentLang
+}
+
+// GetLangSubDir returns the given language's subdir if needed.
+func (p *Paths) GetLangSubDir(lang string) string {
+ if !p.multilingual {
+ return ""
+ }
+
+ if p.Languages.IsMultihost() {
+ return ""
+ }
+
+ if lang == "" || (lang == p.DefaultContentLanguage && !p.defaultContentLanguageInSubdir) {
+ return ""
+ }
+
+ return lang
+}
+
+// AbsPathify creates an absolute path if given a relative path. If already
+// absolute, the path is just cleaned.
+func (p *Paths) AbsPathify(inPath string) string {
+ return hpaths.AbsPathify(p.WorkingDir, inPath)
+}
+
+// RelPathify trims any WorkingDir prefix from the given filename. If
+// the filename is not considered to be absolute, the path is just cleaned.
+func (p *Paths) RelPathify(filename string) string {
+ filename = filepath.Clean(filename)
+ if !filepath.IsAbs(filename) {
+ return filename
+ }
+
+ return strings.TrimPrefix(strings.TrimPrefix(filename, p.WorkingDir), FilePathSeparator)
+}
diff --git a/hugolib/paths/paths_test.go b/hugolib/paths/paths_test.go
new file mode 100644
index 000000000..cd9d0593f
--- /dev/null
+++ b/hugolib/paths/paths_test.go
@@ -0,0 +1,50 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package paths
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+)
+
+func TestNewPaths(t *testing.T) {
+ c := qt.New(t)
+
+ v := config.NewWithTestDefaults()
+ fs := hugofs.NewMem(v)
+
+ v.Set("languages", map[string]any{
+ "no": map[string]any{},
+ "en": map[string]any{},
+ })
+ v.Set("defaultContentLanguageInSubdir", true)
+ v.Set("defaultContentLanguage", "no")
+ v.Set("contentDir", "content")
+ v.Set("workingDir", "work")
+ v.Set("resourceDir", "resources")
+ v.Set("publishDir", "public")
+
+ langs.LoadLanguageSettings(v, nil)
+
+ p, err := New(fs, v)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(p.defaultContentLanguageInSubdir, qt.Equals, true)
+ c.Assert(p.DefaultContentLanguage, qt.Equals, "no")
+ c.Assert(p.multilingual, qt.Equals, true)
+}
diff --git a/hugolib/permalinker.go b/hugolib/permalinker.go
new file mode 100644
index 000000000..aeaa673f7
--- /dev/null
+++ b/hugolib/permalinker.go
@@ -0,0 +1,22 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+var _ Permalinker = (*pageState)(nil)
+
+// Permalinker provides permalinks of both the relative and absolute kind.
+type Permalinker interface {
+ Permalink() string
+ RelPermalink() string
+}
diff --git a/hugolib/prune_resources.go b/hugolib/prune_resources.go
new file mode 100644
index 000000000..bf5a1ef2f
--- /dev/null
+++ b/hugolib/prune_resources.go
@@ -0,0 +1,19 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+// GC requires a build first and must run on it's own. It is not thread safe.
+func (h *HugoSites) GC() (int, error) {
+ return h.Deps.FileCaches.Prune()
+}
diff --git a/hugolib/renderstring_test.go b/hugolib/renderstring_test.go
new file mode 100644
index 000000000..1be0cdffb
--- /dev/null
+++ b/hugolib/renderstring_test.go
@@ -0,0 +1,192 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless requiredF by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/loggers"
+)
+
+func TestRenderString(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplates("index.html", `
+{{ $p := site.GetPage "p1.md" }}
+{{ $optBlock := dict "display" "block" }}
+{{ $optOrg := dict "markup" "org" }}
+RSTART:{{ "**Bold Markdown**" | $p.RenderString }}:REND
+RSTART:{{ "**Bold Block Markdown**" | $p.RenderString $optBlock }}:REND
+RSTART:{{ "/italic org mode/" | $p.RenderString $optOrg }}:REND
+RSTART:{{ "## Header2" | $p.RenderString }}:REND
+
+
+`, "_default/_markup/render-heading.html", "Hook Heading: {{ .Level }}")
+
+ b.WithContent("p1.md", `---
+title: "p1"
+---
+`,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+RSTART:<strong>Bold Markdown</strong>:REND
+RSTART:<p><strong>Bold Block Markdown</strong></p>
+RSTART:<em>italic org mode</em>:REND
+RSTART:Hook Heading: 2:REND
+`)
+}
+
+// https://github.com/gohugoio/hugo/issues/6882
+func TestRenderStringOnListPage(t *testing.T) {
+ renderStringTempl := `
+{{ .RenderString "**Hello**" }}
+`
+ b := newTestSitesBuilder(t)
+ b.WithContent("mysection/p1.md", `FOO`)
+ b.WithTemplates(
+ "index.html", renderStringTempl,
+ "_default/list.html", renderStringTempl,
+ "_default/single.html", renderStringTempl,
+ )
+
+ b.Build(BuildCfg{})
+
+ for _, filename := range []string{
+ "index.html",
+ "mysection/index.html",
+ "categories/index.html",
+ "tags/index.html",
+ "mysection/p1/index.html",
+ } {
+ b.AssertFileContent("public/"+filename, `<strong>Hello</strong>`)
+ }
+}
+
+// Issue 9433
+func TestRenderStringOnPageNotBackedByAFile(t *testing.T) {
+ t.Parallel()
+ logger := loggers.NewWarningLogger()
+ b := newTestSitesBuilder(t).WithLogger(logger).WithConfigFile("toml", `
+disableKinds = ["page", "section", "taxonomy", "term"]
+`)
+ b.WithTemplates("index.html", `{{ .RenderString "**Hello**" }}`).WithContent("p1.md", "")
+ b.BuildE(BuildCfg{})
+ b.Assert(int(logger.LogCounters().WarnCounter.Count()), qt.Equals, 0)
+}
+
+func TestRenderStringWithShortcode(t *testing.T) {
+ t.Parallel()
+
+ filesTemplate := `
+-- config.toml --
+title = "Hugo Rocks!"
+enableInlineShortcodes = true
+-- content/p1/index.md --
+---
+title: "P1"
+---
+## First
+-- layouts/shortcodes/mark1.md --
+{{ .Inner }}
+-- layouts/shortcodes/mark2.md --
+1. Item Mark2 1
+1. Item Mark2 2
+ 1. Item Mark2 2-1
+1. Item Mark2 3
+-- layouts/shortcodes/myhthml.html --
+Title: {{ .Page.Title }}
+TableOfContents: {{ .Page.TableOfContents }}
+Page Type: {{ printf "%T" .Page }}
+-- layouts/_default/single.html --
+{{ .RenderString "Markdown: {{% mark2 %}}|HTML: {{< myhthml >}}|Inline: {{< foo.inline >}}{{ site.Title }}{{< /foo.inline >}}|" }}
+HasShortcode: mark2:{{ .HasShortcode "mark2" }}:true
+HasShortcode: foo:{{ .HasShortcode "foo" }}:false
+
+`
+
+ t.Run("Basic", func(t *testing.T) {
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: filesTemplate,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ "<p>Markdown: 1. Item Mark2 1</p>\n<ol>\n<li>Item Mark2 2\n<ol>\n<li>Item Mark2 2-1</li>\n</ol>\n</li>\n<li>Item Mark2 3|",
+ "<a href=\"#first\">First</a>", // ToC
+ `
+HTML: Title: P1
+Inline: Hugo Rocks!
+HasShortcode: mark2:true:true
+HasShortcode: foo:false:false
+Page Type: *hugolib.pageForShortcode`,
+ )
+
+ })
+
+ t.Run("Edit shortcode", func(t *testing.T) {
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: filesTemplate,
+ Running: true,
+ },
+ ).Build()
+
+ b.EditFiles("layouts/shortcodes/myhthml.html", "Edit shortcode").Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ `Edit shortcode`,
+ )
+
+ })
+}
+
+// Issue 9959
+func TestRenderStringWithShortcodeInPageWithNoContentFile(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- layouts/shortcodes/myshort.html --
+Page Kind: {{ .Page.Kind }}
+-- layouts/index.html --
+Short: {{ .RenderString "{{< myshort >}}" }}
+Has myshort: {{ .HasShortcode "myshort" }}
+Has other: {{ .HasShortcode "other" }}
+
+ `
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html",
+ `
+Page Kind: home
+Has myshort: true
+Has other: false
+`)
+
+}
diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go
new file mode 100644
index 000000000..d94d389a7
--- /dev/null
+++ b/hugolib/resource_chain_test.go
@@ -0,0 +1,756 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "math/rand"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss"
+)
+
+func TestResourceChainBasic(t *testing.T) {
+ failIfHandler := func(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == "/fail.jpg" {
+ http.Error(w, "{ msg: failed }", 500)
+ return
+ }
+ h.ServeHTTP(w, r)
+
+ })
+ }
+ ts := httptest.NewServer(
+ failIfHandler(http.FileServer(http.Dir("testdata/"))),
+ )
+ t.Cleanup(func() {
+ ts.Close()
+ })
+
+ b := newTestSitesBuilder(t)
+ b.WithTemplatesAdded("index.html", fmt.Sprintf(`
+{{ $hello := "<h1> Hello World! </h1>" | resources.FromString "hello.html" | fingerprint "sha512" | minify | fingerprint }}
+{{ $cssFingerprinted1 := "body { background-color: lightblue; }" | resources.FromString "styles.css" | minify | fingerprint }}
+{{ $cssFingerprinted2 := "body { background-color: orange; }" | resources.FromString "styles2.css" | minify | fingerprint }}
+
+
+HELLO: {{ $hello.Name }}|{{ $hello.RelPermalink }}|{{ $hello.Content | safeHTML }}
+
+{{ $img := resources.Get "images/sunset.jpg" }}
+{{ $fit := $img.Fit "200x200" }}
+{{ $fit2 := $fit.Fit "100x200" }}
+{{ $img = $img | fingerprint }}
+SUNSET: {{ $img.Name }}|{{ $img.RelPermalink }}|{{ $img.Width }}|{{ len $img.Content }}
+FIT: {{ $fit.Name }}|{{ $fit.RelPermalink }}|{{ $fit.Width }}
+CSS integrity Data first: {{ $cssFingerprinted1.Data.Integrity }} {{ $cssFingerprinted1.RelPermalink }}
+CSS integrity Data last: {{ $cssFingerprinted2.RelPermalink }} {{ $cssFingerprinted2.Data.Integrity }}
+
+{{ $failedImg := resources.GetRemote "%[1]s/fail.jpg" }}
+{{ $rimg := resources.GetRemote "%[1]s/sunset.jpg" }}
+{{ $remotenotfound := resources.GetRemote "%[1]s/notfound.jpg" }}
+{{ $localnotfound := resources.Get "images/notfound.jpg" }}
+{{ $gopherprotocol := resources.GetRemote "gopher://example.org" }}
+{{ $rfit := $rimg.Fit "200x200" }}
+{{ $rfit2 := $rfit.Fit "100x200" }}
+{{ $rimg = $rimg | fingerprint }}
+SUNSET REMOTE: {{ $rimg.Name }}|{{ $rimg.RelPermalink }}|{{ $rimg.Width }}|{{ len $rimg.Content }}
+FIT REMOTE: {{ $rfit.Name }}|{{ $rfit.RelPermalink }}|{{ $rfit.Width }}
+REMOTE NOT FOUND: {{ if $remotenotfound }}FAILED{{ else}}OK{{ end }}
+LOCAL NOT FOUND: {{ if $localnotfound }}FAILED{{ else}}OK{{ end }}
+PRINT PROTOCOL ERROR1: {{ with $gopherprotocol }}{{ . | safeHTML }}{{ end }}
+PRINT PROTOCOL ERROR2: {{ with $gopherprotocol }}{{ .Err | safeHTML }}{{ end }}
+PRINT PROTOCOL ERROR DETAILS: {{ with $gopherprotocol }}Err: {{ .Err | safeHTML }}{{ with .Err }}|{{ with .Data }}Body: {{ .Body }}|StatusCode: {{ .StatusCode }}{{ end }}|{{ end }}{{ end }}
+FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg.Err }}|{{ . }}|{{ with .Data }}Body: {{ .Body }}|StatusCode: {{ .StatusCode }}|ContentLength: {{ .ContentLength }}|ContentType: {{ .ContentType }}{{ end }}{{ end }}|
+`, ts.URL))
+
+ fs := b.Fs.Source
+
+ imageDir := filepath.Join("assets", "images")
+ b.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil)
+ src, err := os.Open("testdata/sunset.jpg")
+ b.Assert(err, qt.IsNil)
+ out, err := fs.Create(filepath.Join(imageDir, "sunset.jpg"))
+ b.Assert(err, qt.IsNil)
+ _, err = io.Copy(out, src)
+ b.Assert(err, qt.IsNil)
+ out.Close()
+
+ b.Running()
+
+ for i := 0; i < 2; i++ {
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ fmt.Sprintf(`
+SUNSET: images/sunset.jpg|/images/sunset.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587
+FIT: images/sunset.jpg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_fit_q75_box.jpg|200
+CSS integrity Data first: sha256-od9YaHw8nMOL8mUy97Sy8sKwMV3N4hI3aVmZXATxH&#43;8= /styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css
+CSS integrity Data last: /styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css sha256-HPxSmGg2QF03&#43;ZmKY/1t2GCOjEEOXj2x2qow94vCc7o=
+
+SUNSET REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s.a9bf1d944e19c0f382e0d8f51de690f7d0bc8fa97390c4242a86c3e5c0737e71.jpg|900|90587
+FIT REMOTE: sunset_%[1]s.jpg|/sunset_%[1]s_hu59e56ffff1bc1d8d122b1403d34e039f_0_200x200_fit_q75_box.jpg|200
+REMOTE NOT FOUND: OK
+LOCAL NOT FOUND: OK
+PRINT PROTOCOL ERROR DETAILS: Err: error calling resources.GetRemote: Get "gopher://example.org": unsupported protocol scheme "gopher"||
+FAILED REMOTE ERROR DETAILS CONTENT: |failed to fetch remote resource: Internal Server Error|Body: { msg: failed }
+|StatusCode: 500|ContentLength: 16|ContentType: text/plain; charset=utf-8|
+
+
+`, helpers.HashString(ts.URL+"/sunset.jpg", map[string]any{})))
+
+ b.AssertFileContent("public/styles.min.a1df58687c3c9cc38bf26532f7b4b2f2c2b0315dcde212376959995c04f11fef.css", "body{background-color:#add8e6}")
+ b.AssertFileContent("public//styles2.min.1cfc52986836405d37f9998a63fd6dd8608e8c410e5e3db1daaa30f78bc273ba.css", "body{background-color:orange}")
+
+ b.EditFiles("page1.md", `
+---
+title: "Page 1 edit"
+summary: "Edited summary"
+---
+
+Edited content.
+
+`)
+
+ b.Assert(b.Fs.WorkingDirWritable.Remove("public"), qt.IsNil)
+ b.H.ResourceSpec.ClearCaches()
+
+ }
+}
+
+func TestResourceChainPostProcess(t *testing.T) {
+ t.Parallel()
+
+ rnd := rand.New(rand.NewSource(time.Now().UnixNano()))
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `[minify]
+ minifyOutput = true
+ [minify.tdewolff]
+ [minify.tdewolff.html]
+ keepQuotes = false
+ keepWhitespace = false`)
+ b.WithContent("page1.md", "---\ntitle: Page1\n---")
+ b.WithContent("page2.md", "---\ntitle: Page2\n---")
+
+ b.WithTemplates(
+ "_default/single.html", `{{ $hello := "<h1> Hello World! </h1>" | resources.FromString "hello.html" | minify | fingerprint "md5" | resources.PostProcess }}
+HELLO: {{ $hello.RelPermalink }}
+`,
+ "index.html", `Start.
+{{ $hello := "<h1> Hello World! </h1>" | resources.FromString "hello.html" | minify | fingerprint "md5" | resources.PostProcess }}
+
+HELLO: {{ $hello.RelPermalink }}|Integrity: {{ $hello.Data.Integrity }}|MediaType: {{ $hello.MediaType.Type }}
+HELLO2: Name: {{ $hello.Name }}|Content: {{ $hello.Content }}|Title: {{ $hello.Title }}|ResourceType: {{ $hello.ResourceType }}
+
+// Issue #8884
+<a href="hugo.rocks">foo</a>
+<a href="{{ $hello.RelPermalink }}" integrity="{{ $hello.Data.Integrity}}">Hello</a>
+`+strings.Repeat("a b", rnd.Intn(10)+1)+`
+
+
+End.`)
+
+ b.Running()
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html",
+ `Start.
+HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html|Integrity: md5-otHLJPJLMip9rVIEFMUj6Q==|MediaType: text/html
+HELLO2: Name: hello.html|Content: <h1>Hello World!</h1>|Title: hello.html|ResourceType: text
+<a href=hugo.rocks>foo</a>
+<a href="/hello.min.a2d1cb24f24b322a7dad520414c523e9.html" integrity="md5-otHLJPJLMip9rVIEFMUj6Q==">Hello</a>
+End.`)
+
+ b.AssertFileContent("public/page1/index.html", `HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html`)
+ b.AssertFileContent("public/page2/index.html", `HELLO: /hello.min.a2d1cb24f24b322a7dad520414c523e9.html`)
+}
+
+func BenchmarkResourceChainPostProcess(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ b.StopTimer()
+ s := newTestSitesBuilder(b)
+ for i := 0; i < 300; i++ {
+ s.WithContent(fmt.Sprintf("page%d.md", i+1), "---\ntitle: Page\n---")
+ }
+ s.WithTemplates("_default/single.html", `Start.
+Some text.
+
+
+{{ $hello1 := "<h1> Hello World 2! </h1>" | resources.FromString "hello.html" | minify | fingerprint "md5" | resources.PostProcess }}
+{{ $hello2 := "<h1> Hello World 2! </h1>" | resources.FromString (printf "%s.html" .Path) | minify | fingerprint "md5" | resources.PostProcess }}
+
+Some more text.
+
+HELLO: {{ $hello1.RelPermalink }}|Integrity: {{ $hello1.Data.Integrity }}|MediaType: {{ $hello1.MediaType.Type }}
+
+Some more text.
+
+HELLO2: Name: {{ $hello2.Name }}|Content: {{ $hello2.Content }}|Title: {{ $hello2.Title }}|ResourceType: {{ $hello2.ResourceType }}
+
+Some more text.
+
+HELLO2_2: Name: {{ $hello2.Name }}|Content: {{ $hello2.Content }}|Title: {{ $hello2.Title }}|ResourceType: {{ $hello2.ResourceType }}
+
+End.
+`)
+
+ b.StartTimer()
+ s.Build(BuildCfg{})
+
+ }
+}
+
+func TestResourceChains(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.URL.Path {
+ case "/css/styles1.css":
+ w.Header().Set("Content-Type", "text/css")
+ w.Write([]byte(`h1 {
+ font-style: bold;
+ }`))
+ return
+
+ case "/js/script1.js":
+ w.Write([]byte(`var x; x = 5, document.getElementById("demo").innerHTML = x * 10`))
+ return
+
+ case "/mydata/json1.json":
+ w.Write([]byte(`{
+ "employees": [
+ {
+ "firstName": "John",
+ "lastName": "Doe"
+ },
+ {
+ "firstName": "Anna",
+ "lastName": "Smith"
+ },
+ {
+ "firstName": "Peter",
+ "lastName": "Jones"
+ }
+ ]
+ }`))
+ return
+
+ case "/mydata/xml1.xml":
+ w.Write([]byte(`
+ <hello>
+ <world>Hugo Rocks!</<world>
+ </hello>`))
+ return
+
+ case "/mydata/svg1.svg":
+ w.Header().Set("Content-Disposition", `attachment; filename="image.svg"`)
+ w.Write([]byte(`
+ <svg height="100" width="100">
+ <path d="M1e2 1e2H3e2 2e2z"/>
+ </svg>`))
+ return
+
+ case "/mydata/html1.html":
+ w.Write([]byte(`
+ <html>
+ <a href=#>Cool</a>
+ </html>`))
+ return
+
+ case "/authenticated/":
+ w.Header().Set("Content-Type", "text/plain")
+ if r.Header.Get("Authorization") == "Bearer abcd" {
+ w.Write([]byte(`Welcome`))
+ return
+ }
+ http.Error(w, "Forbidden", http.StatusForbidden)
+ return
+
+ case "/post":
+ w.Header().Set("Content-Type", "text/plain")
+ if r.Method == http.MethodPost {
+ body, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ return
+ }
+ w.Write(body)
+ return
+ }
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ http.Error(w, "Not found", http.StatusNotFound)
+ return
+ }))
+ t.Cleanup(func() {
+ ts.Close()
+ })
+
+ tests := []struct {
+ name string
+ shouldRun func() bool
+ prepare func(b *sitesBuilder)
+ verify func(b *sitesBuilder)
+ }{
+ {"tocss", func() bool { return scss.Supports() }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $scss := resources.Get "scss/styles2.scss" | toCSS }}
+{{ $sass := resources.Get "sass/styles3.sass" | toCSS }}
+{{ $scssCustomTarget := resources.Get "scss/styles2.scss" | toCSS (dict "targetPath" "styles/main.css") }}
+{{ $scssCustomTargetString := resources.Get "scss/styles2.scss" | toCSS "styles/main.css" }}
+{{ $scssMin := resources.Get "scss/styles2.scss" | toCSS | minify }}
+{{ $scssFromTempl := ".{{ .Kind }} { color: blue; }" | resources.FromString "kindofblue.templ" | resources.ExecuteAsTemplate "kindofblue.scss" . | toCSS (dict "targetPath" "styles/templ.css") | minify }}
+{{ $bundle1 := slice $scssFromTempl $scssMin | resources.Concat "styles/bundle1.css" }}
+T1: Len Content: {{ len $scss.Content }}|RelPermalink: {{ $scss.RelPermalink }}|Permalink: {{ $scss.Permalink }}|MediaType: {{ $scss.MediaType.Type }}
+T2: Content: {{ $scssMin.Content }}|RelPermalink: {{ $scssMin.RelPermalink }}
+T3: Content: {{ len $scssCustomTarget.Content }}|RelPermalink: {{ $scssCustomTarget.RelPermalink }}|MediaType: {{ $scssCustomTarget.MediaType.Type }}
+T4: Content: {{ len $scssCustomTargetString.Content }}|RelPermalink: {{ $scssCustomTargetString.RelPermalink }}|MediaType: {{ $scssCustomTargetString.MediaType.Type }}
+T5: Content: {{ $sass.Content }}|T5 RelPermalink: {{ $sass.RelPermalink }}|
+T6: {{ $bundle1.Permalink }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `T1: Len Content: 24|RelPermalink: /scss/styles2.css|Permalink: http://example.com/scss/styles2.css|MediaType: text/css`)
+ b.AssertFileContent("public/index.html", `T2: Content: body{color:#333}|RelPermalink: /scss/styles2.min.css`)
+ b.AssertFileContent("public/index.html", `T3: Content: 24|RelPermalink: /styles/main.css|MediaType: text/css`)
+ b.AssertFileContent("public/index.html", `T4: Content: 24|RelPermalink: /styles/main.css|MediaType: text/css`)
+ b.AssertFileContent("public/index.html", `T5: Content: .content-navigation {`)
+ b.AssertFileContent("public/index.html", `T5 RelPermalink: /sass/styles3.css|`)
+ b.AssertFileContent("public/index.html", `T6: http://example.com/styles/bundle1.css`)
+
+ c.Assert(b.CheckExists("public/styles/templ.min.css"), qt.Equals, false)
+ b.AssertFileContent("public/styles/bundle1.css", `.home{color:blue}body{color:#333}`)
+ }},
+
+ {"minify", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithConfigFile("toml", `[minify]
+ [minify.tdewolff]
+ [minify.tdewolff.html]
+ keepWhitespace = false
+`)
+ b.WithTemplates("home.html", fmt.Sprintf(`
+Min CSS: {{ ( resources.Get "css/styles1.css" | minify ).Content }}
+Min CSS Remote: {{ ( resources.GetRemote "%[1]s/css/styles1.css" | minify ).Content }}
+Min JS: {{ ( resources.Get "js/script1.js" | resources.Minify ).Content | safeJS }}
+Min JS Remote: {{ ( resources.GetRemote "%[1]s/js/script1.js" | minify ).Content }}
+Min JSON: {{ ( resources.Get "mydata/json1.json" | resources.Minify ).Content | safeHTML }}
+Min JSON Remote: {{ ( resources.GetRemote "%[1]s/mydata/json1.json" | resources.Minify ).Content | safeHTML }}
+Min XML: {{ ( resources.Get "mydata/xml1.xml" | resources.Minify ).Content | safeHTML }}
+Min XML Remote: {{ ( resources.GetRemote "%[1]s/mydata/xml1.xml" | resources.Minify ).Content | safeHTML }}
+Min SVG: {{ ( resources.Get "mydata/svg1.svg" | resources.Minify ).Content | safeHTML }}
+Min SVG Remote: {{ ( resources.GetRemote "%[1]s/mydata/svg1.svg" | resources.Minify ).Content | safeHTML }}
+Min SVG again: {{ ( resources.Get "mydata/svg1.svg" | resources.Minify ).Content | safeHTML }}
+Min HTML: {{ ( resources.Get "mydata/html1.html" | resources.Minify ).Content | safeHTML }}
+Min HTML Remote: {{ ( resources.GetRemote "%[1]s/mydata/html1.html" | resources.Minify ).Content | safeHTML }}
+`, ts.URL))
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `Min CSS: h1{font-style:bold}`)
+ b.AssertFileContent("public/index.html", `Min CSS Remote: h1{font-style:bold}`)
+ b.AssertFileContent("public/index.html", `Min JS: var x=5;document.getElementById(&#34;demo&#34;).innerHTML=x*10`)
+ b.AssertFileContent("public/index.html", `Min JS Remote: var x=5;document.getElementById(&#34;demo&#34;).innerHTML=x*10`)
+ b.AssertFileContent("public/index.html", `Min JSON: {"employees":[{"firstName":"John","lastName":"Doe"},{"firstName":"Anna","lastName":"Smith"},{"firstName":"Peter","lastName":"Jones"}]}`)
+ b.AssertFileContent("public/index.html", `Min JSON Remote: {"employees":[{"firstName":"John","lastName":"Doe"},{"firstName":"Anna","lastName":"Smith"},{"firstName":"Peter","lastName":"Jones"}]}`)
+ b.AssertFileContent("public/index.html", `Min XML: <hello><world>Hugo Rocks!</<world></hello>`)
+ b.AssertFileContent("public/index.html", `Min XML Remote: <hello><world>Hugo Rocks!</<world></hello>`)
+ b.AssertFileContent("public/index.html", `Min SVG: <svg height="100" width="100"><path d="M1e2 1e2H3e2 2e2z"/></svg>`)
+ b.AssertFileContent("public/index.html", `Min SVG Remote: <svg height="100" width="100"><path d="M1e2 1e2H3e2 2e2z"/></svg>`)
+ b.AssertFileContent("public/index.html", `Min SVG again: <svg height="100" width="100"><path d="M1e2 1e2H3e2 2e2z"/></svg>`)
+ b.AssertFileContent("public/index.html", `Min HTML: <html><a href=#>Cool</a></html>`)
+ b.AssertFileContent("public/index.html", `Min HTML Remote: <html><a href=#>Cool</a></html>`)
+ }},
+
+ {"remote", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", fmt.Sprintf(`
+{{$js := resources.GetRemote "%[1]s/js/script1.js" }}
+Remote Filename: {{ $js.RelPermalink }}
+{{$svg := resources.GetRemote "%[1]s/mydata/svg1.svg" }}
+Remote Content-Disposition: {{ $svg.RelPermalink }}
+{{$auth := resources.GetRemote "%[1]s/authenticated/" (dict "headers" (dict "Authorization" "Bearer abcd")) }}
+Remote Authorization: {{ $auth.Content }}
+{{$post := resources.GetRemote "%[1]s/post" (dict "method" "post" "body" "Request body") }}
+Remote POST: {{ $post.Content }}
+`, ts.URL))
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `Remote Filename: /script1_`)
+ b.AssertFileContent("public/index.html", `Remote Content-Disposition: /image_`)
+ b.AssertFileContent("public/index.html", `Remote Authorization: Welcome`)
+ b.AssertFileContent("public/index.html", `Remote POST: Request body`)
+ }},
+
+ {"concat", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $a := "A" | resources.FromString "a.txt"}}
+{{ $b := "B" | resources.FromString "b.txt"}}
+{{ $c := "C" | resources.FromString "c.txt"}}
+{{ $textResources := .Resources.Match "*.txt" }}
+{{ $combined := slice $a $b $c | resources.Concat "bundle/concat.txt" }}
+T1: Content: {{ $combined.Content }}|RelPermalink: {{ $combined.RelPermalink }}|Permalink: {{ $combined.Permalink }}|MediaType: {{ $combined.MediaType.Type }}
+{{ with $textResources }}
+{{ $combinedText := . | resources.Concat "bundle/concattxt.txt" }}
+T2: Content: {{ $combinedText.Content }}|{{ $combinedText.RelPermalink }}
+{{ end }}
+{{/* https://github.com/gohugoio/hugo/issues/5269 */}}
+{{ $css := "body { color: blue; }" | resources.FromString "styles.css" }}
+{{ $minified := resources.Get "css/styles1.css" | minify }}
+{{ slice $css $minified | resources.Concat "bundle/mixed.css" }}
+{{/* https://github.com/gohugoio/hugo/issues/5403 */}}
+{{ $d := "function D {} // A comment" | resources.FromString "d.js"}}
+{{ $e := "(function E {})" | resources.FromString "e.js"}}
+{{ $f := "(function F {})()" | resources.FromString "f.js"}}
+{{ $jsResources := .Resources.Match "*.js" }}
+{{ $combinedJs := slice $d $e $f | resources.Concat "bundle/concatjs.js" }}
+T3: Content: {{ $combinedJs.Content }}|{{ $combinedJs.RelPermalink }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `T1: Content: ABC|RelPermalink: /bundle/concat.txt|Permalink: http://example.com/bundle/concat.txt|MediaType: text/plain`)
+ b.AssertFileContent("public/bundle/concat.txt", "ABC")
+
+ b.AssertFileContent("public/index.html", `T2: Content: t1t|t2t|`)
+ b.AssertFileContent("public/bundle/concattxt.txt", "t1t|t2t|")
+
+ b.AssertFileContent("public/index.html", `T3: Content: function D {} // A comment
+;
+(function E {})
+;
+(function F {})()|`)
+ b.AssertFileContent("public/bundle/concatjs.js", `function D {} // A comment
+;
+(function E {})
+;
+(function F {})()`)
+ }},
+
+ {"concat and fingerprint", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $a := "A" | resources.FromString "a.txt"}}
+{{ $b := "B" | resources.FromString "b.txt"}}
+{{ $c := "C" | resources.FromString "c.txt"}}
+{{ $combined := slice $a $b $c | resources.Concat "bundle/concat.txt" }}
+{{ $fingerprinted := $combined | fingerprint }}
+Fingerprinted: {{ $fingerprinted.RelPermalink }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", "Fingerprinted: /bundle/concat.b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78.txt")
+ b.AssertFileContent("public/bundle/concat.b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78.txt", "ABC")
+ }},
+
+ {"fromstring", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $r := "Hugo Rocks!" | resources.FromString "rocks/hugo.txt" }}
+{{ $r.Content }}|{{ $r.RelPermalink }}|{{ $r.Permalink }}|{{ $r.MediaType.Type }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `Hugo Rocks!|/rocks/hugo.txt|http://example.com/rocks/hugo.txt|text/plain`)
+ b.AssertFileContent("public/rocks/hugo.txt", "Hugo Rocks!")
+ }},
+ {"execute-as-template", func() bool {
+ return true
+ }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $var := "Hugo Page" }}
+{{ if .IsHome }}
+{{ $var = "Hugo Home" }}
+{{ end }}
+T1: {{ $var }}
+{{ $result := "{{ .Kind | upper }}" | resources.FromString "mytpl.txt" | resources.ExecuteAsTemplate "result.txt" . }}
+T2: {{ $result.Content }}|{{ $result.RelPermalink}}|{{$result.MediaType.Type }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `T2: HOME|/result.txt|text/plain`, `T1: Hugo Home`)
+ }},
+ {"fingerprint", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $r := "ab" | resources.FromString "rocks/hugo.txt" }}
+{{ $result := $r | fingerprint }}
+{{ $result512 := $r | fingerprint "sha512" }}
+{{ $resultMD5 := $r | fingerprint "md5" }}
+T1: {{ $result.Content }}|{{ $result.RelPermalink}}|{{$result.MediaType.Type }}|{{ $result.Data.Integrity }}|
+T2: {{ $result512.Content }}|{{ $result512.RelPermalink}}|{{$result512.MediaType.Type }}|{{ $result512.Data.Integrity }}|
+T3: {{ $resultMD5.Content }}|{{ $resultMD5.RelPermalink}}|{{$resultMD5.MediaType.Type }}|{{ $resultMD5.Data.Integrity }}|
+{{ $r2 := "bc" | resources.FromString "rocks/hugo2.txt" | fingerprint }}
+{{/* https://github.com/gohugoio/hugo/issues/5296 */}}
+T4: {{ $r2.Data.Integrity }}|
+
+
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `T1: ab|/rocks/hugo.fb8e20fc2e4c3f248c60c39bd652f3c1347298bb977b8b4d5903b85055620603.txt|text/plain|sha256-&#43;44g/C5MPySMYMOb1lLzwTRymLuXe4tNWQO4UFViBgM=|`)
+ b.AssertFileContent("public/index.html", `T2: ab|/rocks/hugo.2d408a0717ec188158278a796c689044361dc6fdde28d6f04973b80896e1823975cdbf12eb63f9e0591328ee235d80e9b5bf1aa6a44f4617ff3caf6400eb172d.txt|text/plain|sha512-LUCKBxfsGIFYJ4p5bGiQRDYdxv3eKNbwSXO4CJbhgjl1zb8S62P54FkTKO4jXYDptb8apqRPRhf/PK9kAOsXLQ==|`)
+ b.AssertFileContent("public/index.html", `T3: ab|/rocks/hugo.187ef4436122d1cc2f40dc2b92f0eba0.txt|text/plain|md5-GH70Q2Ei0cwvQNwrkvDroA==|`)
+ b.AssertFileContent("public/index.html", `T4: sha256-Hgu9bGhroFC46wP/7txk/cnYCUf86CGrvl1tyNJSxaw=|`)
+ }},
+ // https://github.com/gohugoio/hugo/issues/5226
+ {"baseurl-path", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithSimpleConfigFileAndBaseURL("https://example.com/hugo/")
+ b.WithTemplates("home.html", `
+{{ $r1 := "ab" | resources.FromString "rocks/hugo.txt" }}
+T1: {{ $r1.Permalink }}|{{ $r1.RelPermalink }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", `T1: https://example.com/hugo/rocks/hugo.txt|/hugo/rocks/hugo.txt`)
+ }},
+
+ // https://github.com/gohugoio/hugo/issues/4944
+ {"Prevent resource publish on .Content only", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $cssInline := "body { color: green; }" | resources.FromString "inline.css" | minify }}
+{{ $cssPublish1 := "body { color: blue; }" | resources.FromString "external1.css" | minify }}
+{{ $cssPublish2 := "body { color: orange; }" | resources.FromString "external2.css" | minify }}
+
+Inline: {{ $cssInline.Content }}
+Publish 1: {{ $cssPublish1.Content }} {{ $cssPublish1.RelPermalink }}
+Publish 2: {{ $cssPublish2.Permalink }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html",
+ `Inline: body{color:green}`,
+ "Publish 1: body{color:blue} /external1.min.css",
+ "Publish 2: http://example.com/external2.min.css",
+ )
+ b.Assert(b.CheckExists("public/external2.css"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/external1.css"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/external2.min.css"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/external1.min.css"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/inline.min.css"), qt.Equals, false)
+ }},
+
+ {"unmarshal", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `
+{{ $toml := "slogan = \"Hugo Rocks!\"" | resources.FromString "slogan.toml" | transform.Unmarshal }}
+{{ $csv1 := "\"Hugo Rocks\",\"Hugo is Fast!\"" | resources.FromString "slogans.csv" | transform.Unmarshal }}
+{{ $csv2 := "a;b;c" | transform.Unmarshal (dict "delimiter" ";") }}
+{{ $xml := "<?xml version=\"1.0\" encoding=\"UTF-8\"?><note><to>You</to><from>Me</from><heading>Reminder</heading><body>Do not forget XML</body></note>" | transform.Unmarshal }}
+
+Slogan: {{ $toml.slogan }}
+CSV1: {{ $csv1 }} {{ len (index $csv1 0) }}
+CSV2: {{ $csv2 }}
+XML: {{ $xml.body }}
+`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html",
+ `Slogan: Hugo Rocks!`,
+ `[[Hugo Rocks Hugo is Fast!]] 2`,
+ `CSV2: [[a b c]]`,
+ `XML: Do not forget XML`,
+ )
+ }},
+ {"resources.Get", func() bool { return true }, func(b *sitesBuilder) {
+ b.WithTemplates("home.html", `NOT FOUND: {{ if (resources.Get "this-does-not-exist") }}FAILED{{ else }}OK{{ end }}`)
+ }, func(b *sitesBuilder) {
+ b.AssertFileContent("public/index.html", "NOT FOUND: OK")
+ }},
+
+ {"template", func() bool { return true }, func(b *sitesBuilder) {}, func(b *sitesBuilder) {
+ }},
+ }
+
+ for _, test := range tests {
+ test := test
+ t.Run(test.name, func(t *testing.T) {
+ if !test.shouldRun() {
+ t.Skip()
+ }
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
+ b.WithContent("_index.md", `
+---
+title: Home
+---
+
+Home.
+
+`,
+ "page1.md", `
+---
+title: Hello1
+---
+
+Hello1
+`,
+ "page2.md", `
+---
+title: Hello2
+---
+
+Hello2
+`,
+ "t1.txt", "t1t|",
+ "t2.txt", "t2t|",
+ )
+
+ b.WithSourceFile(filepath.Join("assets", "css", "styles1.css"), `
+h1 {
+ font-style: bold;
+}
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "js", "script1.js"), `
+var x;
+x = 5;
+document.getElementById("demo").innerHTML = x * 10;
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "mydata", "json1.json"), `
+{
+"employees":[
+ {"firstName":"John", "lastName":"Doe"},
+ {"firstName":"Anna", "lastName":"Smith"},
+ {"firstName":"Peter", "lastName":"Jones"}
+]
+}
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "mydata", "svg1.svg"), `
+<svg height="100" width="100">
+ <path d="M 100 100 L 300 100 L 200 100 z"/>
+</svg>
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "mydata", "xml1.xml"), `
+<hello>
+<world>Hugo Rocks!</<world>
+</hello>
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "mydata", "html1.html"), `
+<html>
+<a href="#">
+Cool
+</a >
+</html>
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "scss", "styles2.scss"), `
+$color: #333;
+
+body {
+ color: $color;
+}
+`)
+
+ b.WithSourceFile(filepath.Join("assets", "sass", "styles3.sass"), `
+$color: #333;
+
+.content-navigation
+ border-color: $color
+
+`)
+
+ test.prepare(b)
+ b.Build(BuildCfg{})
+ test.verify(b)
+ })
+ }
+}
+
+func TestMultiSiteResource(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ b := newMultiSiteTestDefaultBuilder(t)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ // This build is multilingual, but not multihost. There should be only one pipes.txt
+ b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /blog/text/pipes.txt")
+ c.Assert(b.CheckExists("public/fr/text/pipes.txt"), qt.Equals, false)
+ c.Assert(b.CheckExists("public/en/text/pipes.txt"), qt.Equals, false)
+ b.AssertFileContent("public/en/index.html", "Default Home Page", "String Resource: /blog/text/pipes.txt")
+ b.AssertFileContent("public/text/pipes.txt", "Hugo Pipes")
+}
+
+func TestResourcesMatch(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+
+ b.WithContent("page.md", "")
+
+ b.WithSourceFile(
+ "assets/images/img1.png", "png",
+ "assets/images/img2.jpg", "jpg",
+ "assets/jsons/data1.json", "json1 content",
+ "assets/jsons/data2.json", "json2 content",
+ "assets/jsons/data3.xml", "xml content",
+ )
+
+ b.WithTemplates("index.html", `
+{{ $jsons := (resources.Match "jsons/*.json") }}
+{{ $json := (resources.GetMatch "jsons/*.json") }}
+{{ printf "jsonsMatch: %d" (len $jsons) }}
+{{ printf "imagesByType: %d" (len (resources.ByType "image") ) }}
+{{ printf "applicationByType: %d" (len (resources.ByType "application") ) }}
+JSON: {{ $json.RelPermalink }}: {{ $json.Content }}
+{{ range $jsons }}
+{{- .RelPermalink }}: {{ .Content }}
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ "JSON: /jsons/data1.json: json1 content",
+ "jsonsMatch: 2",
+ "imagesByType: 2",
+ "applicationByType: 3",
+ "/jsons/data1.json: json1 content")
+}
+
+func TestResourceMinifyDisabled(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+[minify]
+disableXML=true
+
+
+`)
+
+ b.WithContent("page.md", "")
+
+ b.WithSourceFile(
+ "assets/xml/data.xml", "<root> <foo> asdfasdf </foo> </root>",
+ )
+
+ b.WithTemplates("index.html", `
+{{ $xml := resources.Get "xml/data.xml" | minify | fingerprint }}
+XML: {{ $xml.Content | safeHTML }}|{{ $xml.RelPermalink }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+XML: <root> <foo> asdfasdf </foo> </root>|/xml/data.min.3be4fddd19aaebb18c48dd6645215b822df74701957d6d36e59f203f9c30fd9f.xml
+`)
+}
diff --git a/hugolib/robotstxt_test.go b/hugolib/robotstxt_test.go
new file mode 100644
index 000000000..c58795ca4
--- /dev/null
+++ b/hugolib/robotstxt_test.go
@@ -0,0 +1,41 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+const robotTxtTemplate = `User-agent: Googlebot
+ {{ range .Data.Pages }}
+ Disallow: {{.RelPermalink}}
+ {{ end }}
+`
+
+func TestRobotsTXTOutput(t *testing.T) {
+ t.Parallel()
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("baseURL", "http://auth/bub/")
+ cfg.Set("enableRobotsTXT", true)
+
+ b := newTestSitesBuilder(t).WithViper(cfg)
+ b.WithTemplatesAdded("layouts/robots.txt", robotTxtTemplate)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/robots.txt", "User-agent: Googlebot")
+}
diff --git a/hugolib/rss_test.go b/hugolib/rss_test.go
new file mode 100644
index 000000000..5da8ea0d6
--- /dev/null
+++ b/hugolib/rss_test.go
@@ -0,0 +1,100 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+func TestRSSOutput(t *testing.T) {
+ t.Parallel()
+ var (
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ rssLimit := len(weightedSources) - 1
+
+ rssURI := "index.xml"
+
+ cfg.Set("baseURL", "http://auth/bub/")
+ cfg.Set("title", "RSSTest")
+ cfg.Set("rssLimit", rssLimit)
+
+ for _, src := range weightedSources {
+ writeSource(t, fs, filepath.Join("content", "sect", src[0]), src[1])
+ }
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ // Home RSS
+ th.assertFileContent(filepath.Join("public", rssURI), "<?xml", "rss version", "RSSTest")
+ // Section RSS
+ th.assertFileContent(filepath.Join("public", "sect", rssURI), "<?xml", "rss version", "Sects on RSSTest")
+ // Taxonomy RSS
+ th.assertFileContent(filepath.Join("public", "categories", "hugo", rssURI), "<?xml", "rss version", "hugo on RSSTest")
+
+ // RSS Item Limit
+ content := readWorkingDir(t, fs, filepath.Join("public", rssURI))
+ c := strings.Count(content, "<item>")
+ if c != rssLimit {
+ t.Errorf("incorrect RSS item count: expected %d, got %d", rssLimit, c)
+ }
+
+ // Encoded summary
+ th.assertFileContent(filepath.Join("public", rssURI), "<?xml", "description", "A &lt;em&gt;custom&lt;/em&gt; summary")
+}
+
+// Before Hugo 0.49 we set the pseudo page kind RSS on the page when output to RSS.
+// This had some unintended side effects, esp. when the only output format for that page
+// was RSS.
+// For the page kinds that can have multiple output formats, the Kind should be one of the
+// standard home, page etc.
+// This test has this single purpose: Check that the Kind is that of the source page.
+// See https://github.com/gohugoio/hugo/issues/5138
+func TestRSSKind(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded("index.rss.xml", `RSS Kind: {{ .Kind }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.xml", "RSS Kind: home")
+}
+
+func TestRSSCanonifyURLs(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded("index.rss.xml", `<rss>{{ range .Pages }}<item>{{ .Content | html }}</item>{{ end }}</rss>`)
+ b.WithContent("page.md", `---
+Title: My Page
+---
+
+Figure:
+
+{{< figure src="/images/sunset.jpg" title="Sunset" >}}
+
+
+
+`)
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.xml", "img src=&#34;http://example.com/images/sunset.jpg")
+}
diff --git a/hugolib/securitypolicies_test.go b/hugolib/securitypolicies_test.go
new file mode 100644
index 000000000..aa062bb1f
--- /dev/null
+++ b/hugolib/securitypolicies_test.go
@@ -0,0 +1,202 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "net/http"
+ "net/http/httptest"
+ "runtime"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/markup/asciidocext"
+ "github.com/gohugoio/hugo/markup/pandoc"
+ "github.com/gohugoio/hugo/markup/rst"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass"
+)
+
+func TestSecurityPolicies(t *testing.T) {
+ c := qt.New(t)
+
+ testVariant := func(c *qt.C, withBuilder func(b *sitesBuilder), expectErr string) {
+ c.Helper()
+ b := newTestSitesBuilder(c)
+ withBuilder(b)
+
+ if expectErr != "" {
+ err := b.BuildE(BuildCfg{})
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err, qt.ErrorMatches, expectErr)
+ } else {
+ b.Build(BuildCfg{})
+ }
+
+ }
+
+ httpTestVariant := func(c *qt.C, templ, expectErr string, withBuilder func(b *sitesBuilder)) {
+ ts := httptest.NewServer(http.FileServer(http.Dir("testdata/")))
+ c.Cleanup(func() {
+ ts.Close()
+ })
+ cb := func(b *sitesBuilder) {
+ b.WithTemplatesAdded("index.html", fmt.Sprintf(templ, ts.URL))
+ if withBuilder != nil {
+ withBuilder(b)
+ }
+ }
+ testVariant(c, cb, expectErr)
+ }
+
+ c.Run("os.GetEnv, denied", func(c *qt.C) {
+ c.Parallel()
+ cb := func(b *sitesBuilder) {
+ b.WithTemplatesAdded("index.html", `{{ os.Getenv "FOOBAR" }}`)
+ }
+ testVariant(c, cb, `(?s).*"FOOBAR" is not whitelisted in policy "security\.funcs\.getenv".*`)
+ })
+
+ c.Run("os.GetEnv, OK", func(c *qt.C) {
+ c.Parallel()
+ cb := func(b *sitesBuilder) {
+ b.WithTemplatesAdded("index.html", `{{ os.Getenv "HUGO_FOO" }}`)
+ }
+ testVariant(c, cb, "")
+ })
+
+ c.Run("Asciidoc, denied", func(c *qt.C) {
+ c.Parallel()
+ if !asciidocext.Supports() {
+ c.Skip()
+ }
+
+ cb := func(b *sitesBuilder) {
+ b.WithContent("page.ad", "foo")
+ }
+
+ testVariant(c, cb, `(?s).*"asciidoctor" is not whitelisted in policy "security\.exec\.allow".*`)
+ })
+
+ c.Run("RST, denied", func(c *qt.C) {
+ c.Parallel()
+ if !rst.Supports() {
+ c.Skip()
+ }
+
+ cb := func(b *sitesBuilder) {
+ b.WithContent("page.rst", "foo")
+ }
+
+ if runtime.GOOS == "windows" {
+ testVariant(c, cb, `(?s).*python(\.exe)?" is not whitelisted in policy "security\.exec\.allow".*`)
+ } else {
+ testVariant(c, cb, `(?s).*"rst2html(\.py)?" is not whitelisted in policy "security\.exec\.allow".*`)
+
+ }
+
+ })
+
+ c.Run("Pandoc, denied", func(c *qt.C) {
+ c.Parallel()
+ if !pandoc.Supports() {
+ c.Skip()
+ }
+
+ cb := func(b *sitesBuilder) {
+ b.WithContent("page.pdc", "foo")
+ }
+
+ testVariant(c, cb, `"(?s).*pandoc" is not whitelisted in policy "security\.exec\.allow".*`)
+ })
+
+ c.Run("Dart SASS, OK", func(c *qt.C) {
+ c.Parallel()
+ if !dartsass.Supports() {
+ c.Skip()
+ }
+ cb := func(b *sitesBuilder) {
+ b.WithTemplatesAdded("index.html", `{{ $scss := "body { color: #333; }" | resources.FromString "foo.scss" | resources.ToCSS (dict "transpiler" "dartsass") }}`)
+ }
+ testVariant(c, cb, "")
+ })
+
+ c.Run("Dart SASS, denied", func(c *qt.C) {
+ c.Parallel()
+ if !dartsass.Supports() {
+ c.Skip()
+ }
+ cb := func(b *sitesBuilder) {
+ b.WithConfigFile("toml", `
+ [security]
+ [security.exec]
+ allow="none"
+
+ `)
+ b.WithTemplatesAdded("index.html", `{{ $scss := "body { color: #333; }" | resources.FromString "foo.scss" | resources.ToCSS (dict "transpiler" "dartsass") }}`)
+ }
+ testVariant(c, cb, `(?s).*"dart-sass-embedded" is not whitelisted in policy "security\.exec\.allow".*`)
+ })
+
+ c.Run("resources.GetRemote, OK", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $json := resources.GetRemote "%[1]s/fruits.json" }}{{ $json.Content }}`, "", nil)
+ })
+
+ c.Run("resources.GetRemote, denied method", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $json := resources.GetRemote "%[1]s/fruits.json" (dict "method" "DELETE" ) }}{{ $json.Content }}`, `(?s).*"DELETE" is not whitelisted in policy "security\.http\.method".*`, nil)
+ })
+
+ c.Run("resources.GetRemote, denied URL", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $json := resources.GetRemote "%[1]s/fruits.json" }}{{ $json.Content }}`, `(?s).*is not whitelisted in policy "security\.http\.urls".*`,
+ func(b *sitesBuilder) {
+ b.WithConfigFile("toml", `
+[security]
+[security.http]
+urls="none"
+`)
+ })
+ })
+
+ c.Run("getJSON, OK", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $json := getJSON "%[1]s/fruits.json" }}{{ $json.Content }}`, "", nil)
+ })
+
+ c.Run("getJSON, denied URL", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $json := getJSON "%[1]s/fruits.json" }}{{ $json.Content }}`, `(?s).*is not whitelisted in policy "security\.http\.urls".*`,
+ func(b *sitesBuilder) {
+ b.WithConfigFile("toml", `
+[security]
+[security.http]
+urls="none"
+`)
+ })
+ })
+
+ c.Run("getCSV, denied URL", func(c *qt.C) {
+ c.Parallel()
+ httpTestVariant(c, `{{ $d := getCSV ";" "%[1]s/cities.csv" }}{{ $d.Content }}`, `(?s).*is not whitelisted in policy "security\.http\.urls".*`,
+ func(b *sitesBuilder) {
+ b.WithConfigFile("toml", `
+[security]
+[security.http]
+urls="none"
+`)
+ })
+ })
+
+}
diff --git a/hugolib/shortcode.go b/hugolib/shortcode.go
new file mode 100644
index 000000000..33767fc68
--- /dev/null
+++ b/hugolib/shortcode.go
@@ -0,0 +1,726 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "path"
+ "reflect"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/common/urls"
+ "github.com/gohugoio/hugo/output"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+var (
+ _ urls.RefLinker = (*ShortcodeWithPage)(nil)
+ _ pageWrapper = (*ShortcodeWithPage)(nil)
+ _ text.Positioner = (*ShortcodeWithPage)(nil)
+)
+
+// ShortcodeWithPage is the "." context in a shortcode template.
+type ShortcodeWithPage struct {
+ Params any
+ Inner template.HTML
+ Page page.Page
+ Parent *ShortcodeWithPage
+ Name string
+ IsNamedParams bool
+
+ // Zero-based ordinal in relation to its parent. If the parent is the page itself,
+ // this ordinal will represent the position of this shortcode in the page content.
+ Ordinal int
+
+ // Indentation before the opening shortcode in the source.
+ indentation string
+
+ innerDeindentInit sync.Once
+ innerDeindent template.HTML
+
+ // pos is the position in bytes in the source file. Used for error logging.
+ posInit sync.Once
+ posOffset int
+ pos text.Position
+
+ scratch *maps.Scratch
+}
+
+// InnerDeindent returns the (potentially de-indented) inner content of the shortcode.
+func (scp *ShortcodeWithPage) InnerDeindent() template.HTML {
+ if scp.indentation == "" {
+ return scp.Inner
+ }
+ scp.innerDeindentInit.Do(func() {
+ b := bp.GetBuffer()
+ text.VisitLinesAfter(string(scp.Inner), func(s string) {
+ if strings.HasPrefix(s, scp.indentation) {
+ b.WriteString(strings.TrimPrefix(s, scp.indentation))
+ } else {
+ b.WriteString(s)
+ }
+ })
+ scp.innerDeindent = template.HTML(b.String())
+ bp.PutBuffer(b)
+ })
+
+ return scp.innerDeindent
+}
+
+// Position returns this shortcode's detailed position. Note that this information
+// may be expensive to calculate, so only use this in error situations.
+func (scp *ShortcodeWithPage) Position() text.Position {
+ scp.posInit.Do(func() {
+ if p, ok := mustUnwrapPage(scp.Page).(pageContext); ok {
+ scp.pos = p.posOffset(scp.posOffset)
+ }
+ })
+ return scp.pos
+}
+
+// Site returns information about the current site.
+func (scp *ShortcodeWithPage) Site() page.Site {
+ return scp.Page.Site()
+}
+
+// Ref is a shortcut to the Ref method on Page. It passes itself as a context
+// to get better error messages.
+func (scp *ShortcodeWithPage) Ref(args map[string]any) (string, error) {
+ return scp.Page.RefFrom(args, scp)
+}
+
+// RelRef is a shortcut to the RelRef method on Page. It passes itself as a context
+// to get better error messages.
+func (scp *ShortcodeWithPage) RelRef(args map[string]any) (string, error) {
+ return scp.Page.RelRefFrom(args, scp)
+}
+
+// Scratch returns a scratch-pad scoped for this shortcode. This can be used
+// as a temporary storage for variables, counters etc.
+func (scp *ShortcodeWithPage) Scratch() *maps.Scratch {
+ if scp.scratch == nil {
+ scp.scratch = maps.NewScratch()
+ }
+ return scp.scratch
+}
+
+// Get is a convenience method to look up shortcode parameters by its key.
+func (scp *ShortcodeWithPage) Get(key any) any {
+ if scp.Params == nil {
+ return nil
+ }
+ if reflect.ValueOf(scp.Params).Len() == 0 {
+ return nil
+ }
+
+ var x reflect.Value
+
+ switch key.(type) {
+ case int64, int32, int16, int8, int:
+ if reflect.TypeOf(scp.Params).Kind() == reflect.Map {
+ // We treat this as a non error, so people can do similar to
+ // {{ $myParam := .Get "myParam" | default .Get 0 }}
+ // Without having to do additional checks.
+ return nil
+ } else if reflect.TypeOf(scp.Params).Kind() == reflect.Slice {
+ idx := int(reflect.ValueOf(key).Int())
+ ln := reflect.ValueOf(scp.Params).Len()
+ if idx > ln-1 {
+ return ""
+ }
+ x = reflect.ValueOf(scp.Params).Index(idx)
+ }
+ case string:
+ if reflect.TypeOf(scp.Params).Kind() == reflect.Map {
+ x = reflect.ValueOf(scp.Params).MapIndex(reflect.ValueOf(key))
+ if !x.IsValid() {
+ return ""
+ }
+ } else if reflect.TypeOf(scp.Params).Kind() == reflect.Slice {
+ // We treat this as a non error, so people can do similar to
+ // {{ $myParam := .Get "myParam" | default .Get 0 }}
+ // Without having to do additional checks.
+ return nil
+ }
+ }
+
+ return x.Interface()
+}
+
+func (scp *ShortcodeWithPage) page() page.Page {
+ return scp.Page
+}
+
+// Note - this value must not contain any markup syntax
+const shortcodePlaceholderPrefix = "HAHAHUGOSHORTCODE"
+
+func createShortcodePlaceholder(id string, ordinal int) string {
+ return shortcodePlaceholderPrefix + "-" + id + strconv.Itoa(ordinal) + "-HBHB"
+}
+
+type shortcode struct {
+ name string
+ isInline bool // inline shortcode. Any inner will be a Go template.
+ isClosing bool // whether a closing tag was provided
+ inner []any // string or nested shortcode
+ params any // map or array
+ ordinal int
+ err error
+
+ indentation string // indentation from source.
+
+ info tpl.Info // One of the output formats (arbitrary)
+ templs []tpl.Template // All output formats
+
+ // If set, the rendered shortcode is sent as part of the surrounding content
+ // to Goldmark and similar.
+ // Before Hug0 0.55 we didn't send any shortcode output to the markup
+ // renderer, and this flag told Hugo to process the {{ .Inner }} content
+ // separately.
+ // The old behaviour can be had by starting your shortcode template with:
+ // {{ $_hugo_config := `{ "version": 1 }`}}
+ doMarkup bool
+
+ // the placeholder in the source when passed to Goldmark etc.
+ // This also identifies the rendered shortcode.
+ placeholder string
+
+ pos int // the position in bytes in the source file
+ length int // the length in bytes in the source file
+}
+
+func (s shortcode) insertPlaceholder() bool {
+ return !s.doMarkup || s.configVersion() == 1
+}
+
+func (s shortcode) configVersion() int {
+ if s.info == nil {
+ // Not set for inline shortcodes.
+ return 2
+ }
+
+ return s.info.ParseInfo().Config.Version
+}
+
+func (s shortcode) innerString() string {
+ var sb strings.Builder
+
+ for _, inner := range s.inner {
+ sb.WriteString(inner.(string))
+ }
+
+ return sb.String()
+}
+
+func (sc shortcode) String() string {
+ // for testing (mostly), so any change here will break tests!
+ var params any
+ switch v := sc.params.(type) {
+ case map[string]any:
+ // sort the keys so test assertions won't fail
+ var keys []string
+ for k := range v {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ tmp := make(map[string]any)
+
+ for _, k := range keys {
+ tmp[k] = v[k]
+ }
+ params = tmp
+
+ default:
+ // use it as is
+ params = sc.params
+ }
+
+ return fmt.Sprintf("%s(%q, %t){%s}", sc.name, params, sc.doMarkup, sc.inner)
+}
+
+type shortcodeHandler struct {
+ p *pageState
+
+ s *Site
+
+ // Ordered list of shortcodes for a page.
+ shortcodes []*shortcode
+
+ // All the shortcode names in this set.
+ nameSet map[string]bool
+ nameSetMu sync.RWMutex
+
+ // Configuration
+ enableInlineShortcodes bool
+}
+
+func newShortcodeHandler(p *pageState, s *Site) *shortcodeHandler {
+ sh := &shortcodeHandler{
+ p: p,
+ s: s,
+ enableInlineShortcodes: s.ExecHelper.Sec().EnableInlineShortcodes,
+ shortcodes: make([]*shortcode, 0, 4),
+ nameSet: make(map[string]bool),
+ }
+
+ return sh
+}
+
+const (
+ innerNewlineRegexp = "\n"
+ innerCleanupRegexp = `\A<p>(.*)</p>\n\z`
+ innerCleanupExpand = "$1"
+)
+
+func renderShortcode(
+ level int,
+ s *Site,
+ tplVariants tpl.TemplateVariants,
+ sc *shortcode,
+ parent *ShortcodeWithPage,
+ p *pageState) (string, bool, error) {
+ var tmpl tpl.Template
+
+ // Tracks whether this shortcode or any of its children has template variations
+ // in other languages or output formats. We are currently only interested in
+ // the output formats, so we may get some false positives -- we
+ // should improve on that.
+ var hasVariants bool
+
+ if sc.isInline {
+ if !p.s.ExecHelper.Sec().EnableInlineShortcodes {
+ return "", false, nil
+ }
+ templName := path.Join("_inline_shortcode", p.File().Path(), sc.name)
+ if sc.isClosing {
+ templStr := sc.innerString()
+
+ var err error
+ tmpl, err = s.TextTmpl().Parse(templName, templStr)
+ if err != nil {
+ fe := herrors.NewFileErrorFromName(err, p.File().Filename())
+ pos := fe.Position()
+ pos.LineNumber += p.posOffset(sc.pos).LineNumber
+ fe = fe.UpdatePosition(pos)
+ return "", false, p.wrapError(fe)
+ }
+
+ } else {
+ // Re-use of shortcode defined earlier in the same page.
+ var found bool
+ tmpl, found = s.TextTmpl().Lookup(templName)
+ if !found {
+ return "", false, fmt.Errorf("no earlier definition of shortcode %q found", sc.name)
+ }
+ }
+ } else {
+ var found, more bool
+ tmpl, found, more = s.Tmpl().LookupVariant(sc.name, tplVariants)
+ if !found {
+ s.Log.Errorf("Unable to locate template for shortcode %q in page %q", sc.name, p.File().Path())
+ return "", false, nil
+ }
+ hasVariants = hasVariants || more
+ }
+
+ data := &ShortcodeWithPage{Ordinal: sc.ordinal, posOffset: sc.pos, indentation: sc.indentation, Params: sc.params, Page: newPageForShortcode(p), Parent: parent, Name: sc.name}
+ if sc.params != nil {
+ data.IsNamedParams = reflect.TypeOf(sc.params).Kind() == reflect.Map
+ }
+
+ if len(sc.inner) > 0 {
+ var inner string
+ for _, innerData := range sc.inner {
+ switch innerData := innerData.(type) {
+ case string:
+ inner += innerData
+ case *shortcode:
+ s, more, err := renderShortcode(level+1, s, tplVariants, innerData, data, p)
+ if err != nil {
+ return "", false, err
+ }
+ hasVariants = hasVariants || more
+ inner += s
+ default:
+ s.Log.Errorf("Illegal state on shortcode rendering of %q in page %q. Illegal type in inner data: %s ",
+ sc.name, p.File().Path(), reflect.TypeOf(innerData))
+ return "", false, nil
+ }
+ }
+
+ // Pre Hugo 0.55 this was the behaviour even for the outer-most
+ // shortcode.
+ if sc.doMarkup && (level > 0 || sc.configVersion() == 1) {
+ var err error
+ b, err := p.pageOutput.cp.renderContent([]byte(inner), false)
+ if err != nil {
+ return "", false, err
+ }
+
+ newInner := b.Bytes()
+
+ // If the type is “” (unknown) or “markdown”, we assume the markdown
+ // generation has been performed. Given the input: `a line`, markdown
+ // specifies the HTML `<p>a line</p>\n`. When dealing with documents as a
+ // whole, this is OK. When dealing with an `{{ .Inner }}` block in Hugo,
+ // this is not so good. This code does two things:
+ //
+ // 1. Check to see if inner has a newline in it. If so, the Inner data is
+ // unchanged.
+ // 2 If inner does not have a newline, strip the wrapping <p> block and
+ // the newline.
+ switch p.m.markup {
+ case "", "markdown":
+ if match, _ := regexp.MatchString(innerNewlineRegexp, inner); !match {
+ cleaner, err := regexp.Compile(innerCleanupRegexp)
+
+ if err == nil {
+ newInner = cleaner.ReplaceAll(newInner, []byte(innerCleanupExpand))
+ }
+ }
+ }
+
+ // TODO(bep) we may have plain text inner templates.
+ data.Inner = template.HTML(newInner)
+ } else {
+ data.Inner = template.HTML(inner)
+ }
+
+ }
+
+ result, err := renderShortcodeWithPage(s.Tmpl(), tmpl, data)
+
+ if err != nil && sc.isInline {
+ fe := herrors.NewFileErrorFromName(err, p.File().Filename())
+ pos := fe.Position()
+ pos.LineNumber += p.posOffset(sc.pos).LineNumber
+ fe = fe.UpdatePosition(pos)
+ return "", false, fe
+ }
+
+ if len(sc.inner) == 0 && len(sc.indentation) > 0 {
+ b := bp.GetBuffer()
+ i := 0
+ text.VisitLinesAfter(result, func(line string) {
+ // The first line is correctly indented.
+ if i > 0 {
+ b.WriteString(sc.indentation)
+ }
+ i++
+ b.WriteString(line)
+ })
+
+ result = b.String()
+ bp.PutBuffer(b)
+ }
+
+ return result, hasVariants, err
+}
+
+func (s *shortcodeHandler) hasShortcodes() bool {
+ return s != nil && len(s.shortcodes) > 0
+}
+
+func (s *shortcodeHandler) addName(name string) {
+ s.nameSetMu.Lock()
+ defer s.nameSetMu.Unlock()
+ s.nameSet[name] = true
+}
+
+func (s *shortcodeHandler) transferNames(in *shortcodeHandler) {
+ s.nameSetMu.Lock()
+ defer s.nameSetMu.Unlock()
+ for k := range in.nameSet {
+ s.nameSet[k] = true
+ }
+
+}
+
+func (s *shortcodeHandler) hasName(name string) bool {
+ s.nameSetMu.RLock()
+ defer s.nameSetMu.RUnlock()
+ _, ok := s.nameSet[name]
+ return ok
+}
+
+func (s *shortcodeHandler) renderShortcodesForPage(p *pageState, f output.Format) (map[string]string, bool, error) {
+ rendered := make(map[string]string)
+
+ tplVariants := tpl.TemplateVariants{
+ Language: p.Language().Lang,
+ OutputFormat: f,
+ }
+
+ var hasVariants bool
+
+ for _, v := range s.shortcodes {
+ s, more, err := renderShortcode(0, s.s, tplVariants, v, nil, p)
+ if err != nil {
+ err = p.parseError(fmt.Errorf("failed to render shortcode %q: %w", v.name, err), p.source.parsed.Input(), v.pos)
+ return nil, false, err
+ }
+ hasVariants = hasVariants || more
+ rendered[v.placeholder] = s
+
+ }
+
+ return rendered, hasVariants, nil
+}
+
+var errShortCodeIllegalState = errors.New("Illegal shortcode state")
+
+func (s *shortcodeHandler) parseError(err error, input []byte, pos int) error {
+ if s.p != nil {
+ return s.p.parseError(err, input, pos)
+ }
+ return err
+}
+
+// pageTokens state:
+// - before: positioned just before the shortcode start
+// - after: shortcode(s) consumed (plural when they are nested)
+func (s *shortcodeHandler) extractShortcode(ordinal, level int, pt *pageparser.Iterator) (*shortcode, error) {
+ if s == nil {
+ panic("handler nil")
+ }
+ sc := &shortcode{ordinal: ordinal}
+
+ // Back up one to identify any indentation.
+ if pt.Pos() > 0 {
+ pt.Backup()
+ item := pt.Next()
+ if item.IsIndentation() {
+ sc.indentation = string(item.Val)
+ }
+ }
+
+ cnt := 0
+ nestedOrdinal := 0
+ nextLevel := level + 1
+ const errorPrefix = "failed to extract shortcode"
+
+ fail := func(err error, i pageparser.Item) error {
+ return s.parseError(fmt.Errorf("%s: %w", errorPrefix, err), pt.Input(), i.Pos)
+ }
+
+Loop:
+ for {
+ currItem := pt.Next()
+ switch {
+ case currItem.IsLeftShortcodeDelim():
+ next := pt.Peek()
+ if next.IsRightShortcodeDelim() {
+ // no name: {{< >}} or {{% %}}
+ return sc, errors.New("shortcode has no name")
+ }
+ if next.IsShortcodeClose() {
+ continue
+ }
+
+ if cnt > 0 {
+ // nested shortcode; append it to inner content
+ pt.Backup()
+ nested, err := s.extractShortcode(nestedOrdinal, nextLevel, pt)
+ nestedOrdinal++
+ if nested != nil && nested.name != "" {
+ s.addName(nested.name)
+ }
+
+ if err == nil {
+ sc.inner = append(sc.inner, nested)
+ } else {
+ return sc, err
+ }
+
+ } else {
+ sc.doMarkup = currItem.IsShortcodeMarkupDelimiter()
+ }
+
+ cnt++
+
+ case currItem.IsRightShortcodeDelim():
+ // we trust the template on this:
+ // if there's no inner, we're done
+ if !sc.isInline {
+ if sc.info == nil {
+ // This should not happen.
+ return sc, fail(errors.New("BUG: template info not set"), currItem)
+ }
+ if !sc.info.ParseInfo().IsInner {
+ return sc, nil
+ }
+ }
+
+ case currItem.IsShortcodeClose():
+ next := pt.Peek()
+ if !sc.isInline {
+ if sc.info == nil || !sc.info.ParseInfo().IsInner {
+ if next.IsError() {
+ // return that error, more specific
+ continue
+ }
+ return sc, fail(fmt.Errorf("shortcode %q has no .Inner, yet a closing tag was provided", next.Val), next)
+ }
+ }
+ if next.IsRightShortcodeDelim() {
+ // self-closing
+ pt.Consume(1)
+ } else {
+ sc.isClosing = true
+ pt.Consume(2)
+ }
+
+ return sc, nil
+ case currItem.IsText():
+ sc.inner = append(sc.inner, currItem.ValStr())
+ case currItem.Type == pageparser.TypeEmoji:
+ // TODO(bep) avoid the duplication of these "text cases", to prevent
+ // more of #6504 in the future.
+ val := currItem.ValStr()
+ if emoji := helpers.Emoji(val); emoji != nil {
+ sc.inner = append(sc.inner, string(emoji))
+ } else {
+ sc.inner = append(sc.inner, val)
+ }
+ case currItem.IsShortcodeName():
+
+ sc.name = currItem.ValStr()
+
+ // Used to check if the template expects inner content.
+ templs := s.s.Tmpl().LookupVariants(sc.name)
+ if templs == nil {
+ return nil, fmt.Errorf("%s: template for shortcode %q not found", errorPrefix, sc.name)
+ }
+
+ sc.info = templs[0].(tpl.Info)
+ sc.templs = templs
+ case currItem.IsInlineShortcodeName():
+ sc.name = currItem.ValStr()
+ sc.isInline = true
+ case currItem.IsShortcodeParam():
+ if !pt.IsValueNext() {
+ continue
+ } else if pt.Peek().IsShortcodeParamVal() {
+ // named params
+ if sc.params == nil {
+ params := make(map[string]any)
+ params[currItem.ValStr()] = pt.Next().ValTyped()
+ sc.params = params
+ } else {
+ if params, ok := sc.params.(map[string]any); ok {
+ params[currItem.ValStr()] = pt.Next().ValTyped()
+ } else {
+ return sc, errShortCodeIllegalState
+ }
+ }
+ } else {
+ // positional params
+ if sc.params == nil {
+ var params []any
+ params = append(params, currItem.ValTyped())
+ sc.params = params
+ } else {
+ if params, ok := sc.params.([]any); ok {
+ params = append(params, currItem.ValTyped())
+ sc.params = params
+ } else {
+ return sc, errShortCodeIllegalState
+ }
+ }
+ }
+ case currItem.IsDone():
+ // handled by caller
+ pt.Backup()
+ break Loop
+
+ }
+ }
+ return sc, nil
+}
+
+// Replace prefixed shortcode tokens with the real content.
+// Note: This function will rewrite the input slice.
+func replaceShortcodeTokens(source []byte, replacements map[string]string) ([]byte, error) {
+ if len(replacements) == 0 {
+ return source, nil
+ }
+
+ start := 0
+
+ pre := []byte(shortcodePlaceholderPrefix)
+ post := []byte("HBHB")
+ pStart := []byte("<p>")
+ pEnd := []byte("</p>")
+
+ k := bytes.Index(source[start:], pre)
+
+ for k != -1 {
+ j := start + k
+ postIdx := bytes.Index(source[j:], post)
+ if postIdx < 0 {
+ // this should never happen, but let the caller decide to panic or not
+ return nil, errors.New("illegal state in content; shortcode token missing end delim")
+ }
+
+ end := j + postIdx + 4
+
+ newVal := []byte(replacements[string(source[j:end])])
+
+ // Issue #1148: Check for wrapping p-tags <p>
+ if j >= 3 && bytes.Equal(source[j-3:j], pStart) {
+ if (k+4) < len(source) && bytes.Equal(source[end:end+4], pEnd) {
+ j -= 3
+ end += 4
+ }
+ }
+
+ // This and other cool slice tricks: https://github.com/golang/go/wiki/SliceTricks
+ source = append(source[:j], append(newVal, source[end:]...)...)
+ start = j
+ k = bytes.Index(source[start:], pre)
+
+ }
+
+ return source, nil
+}
+
+func renderShortcodeWithPage(h tpl.TemplateHandler, tmpl tpl.Template, data *ShortcodeWithPage) (string, error) {
+ buffer := bp.GetBuffer()
+ defer bp.PutBuffer(buffer)
+
+ err := h.Execute(tmpl, buffer, data)
+ if err != nil {
+ return "", fmt.Errorf("failed to process shortcode: %w", err)
+ }
+ return buffer.String(), nil
+}
diff --git a/hugolib/shortcode_page.go b/hugolib/shortcode_page.go
new file mode 100644
index 000000000..5a56e434f
--- /dev/null
+++ b/hugolib/shortcode_page.go
@@ -0,0 +1,75 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "html/template"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+var tocShortcodePlaceholder = createShortcodePlaceholder("TOC", 0)
+
+// This is sent to the shortcodes. They cannot access the content
+// they're a part of. It would cause an infinite regress.
+//
+// Go doesn't support virtual methods, so this careful dance is currently (I think)
+// the best we can do.
+type pageForShortcode struct {
+ page.PageWithoutContent
+ page.ContentProvider
+
+ // We need to replace it after we have rendered it, so provide a
+ // temporary placeholder.
+ toc template.HTML
+
+ p *pageState
+}
+
+func newPageForShortcode(p *pageState) page.Page {
+ return &pageForShortcode{
+ PageWithoutContent: p,
+ ContentProvider: page.NopPage,
+ toc: template.HTML(tocShortcodePlaceholder),
+ p: p,
+ }
+}
+
+func (p *pageForShortcode) page() page.Page {
+ return p.PageWithoutContent.(page.Page)
+}
+
+func (p *pageForShortcode) TableOfContents() template.HTML {
+ p.p.enablePlaceholders()
+ return p.toc
+}
+
+// This is what is sent into the content render hooks (link, image).
+type pageForRenderHooks struct {
+ page.PageWithoutContent
+ page.TableOfContentsProvider
+ page.ContentProvider
+}
+
+func newPageForRenderHook(p *pageState) page.Page {
+ return &pageForRenderHooks{
+ PageWithoutContent: p,
+ ContentProvider: page.NopPage,
+ TableOfContentsProvider: page.NopPage,
+ }
+}
+
+func (p *pageForRenderHooks) page() page.Page {
+ return p.PageWithoutContent.(page.Page)
+}
diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go
new file mode 100644
index 000000000..5b8a5c295
--- /dev/null
+++ b/hugolib/shortcode_test.go
@@ -0,0 +1,1055 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/parser/pageparser"
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestExtractShortcodes(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithTemplates(
+ "default/single.html", `EMPTY`,
+ "_internal/shortcodes/tag.html", `tag`,
+ "_internal/shortcodes/legacytag.html", `{{ $_hugo_config := "{ \"version\": 1 }" }}tag`,
+ "_internal/shortcodes/sc1.html", `sc1`,
+ "_internal/shortcodes/sc2.html", `sc2`,
+ "_internal/shortcodes/inner.html", `{{with .Inner }}{{ . }}{{ end }}`,
+ "_internal/shortcodes/inner2.html", `{{.Inner}}`,
+ "_internal/shortcodes/inner3.html", `{{.Inner}}`,
+ ).WithContent("page.md", `---
+title: "Shortcodes Galore!"
+---
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+
+ // Make it more regexp friendly
+ strReplacer := strings.NewReplacer("[", "{", "]", "}")
+
+ str := func(s *shortcode) string {
+ if s == nil {
+ return "<nil>"
+ }
+
+ var version int
+ if s.info != nil {
+ version = s.info.ParseInfo().Config.Version
+ }
+ return strReplacer.Replace(fmt.Sprintf("%s;inline:%t;closing:%t;inner:%v;params:%v;ordinal:%d;markup:%t;version:%d;pos:%d",
+ s.name, s.isInline, s.isClosing, s.inner, s.params, s.ordinal, s.doMarkup, version, s.pos))
+ }
+
+ regexpCheck := func(re string) func(c *qt.C, shortcode *shortcode, err error) {
+ return func(c *qt.C, shortcode *shortcode, err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(str(shortcode), qt.Matches, ".*"+re+".*")
+ }
+ }
+
+ for _, test := range []struct {
+ name string
+ input string
+ check func(c *qt.C, shortcode *shortcode, err error)
+ }{
+ {"one shortcode, no markup", "{{< tag >}}", regexpCheck("tag.*closing:false.*markup:false")},
+ {"one shortcode, markup", "{{% tag %}}", regexpCheck("tag.*closing:false.*markup:true;version:2")},
+ {"one shortcode, markup, legacy", "{{% legacytag %}}", regexpCheck("tag.*closing:false.*markup:true;version:1")},
+ {"outer shortcode markup", "{{% inner %}}{{< tag >}}{{% /inner %}}", regexpCheck("inner.*closing:true.*markup:true")},
+ {"inner shortcode markup", "{{< inner >}}{{% tag %}}{{< /inner >}}", regexpCheck("inner.*closing:true.*;markup:false;version:2")},
+ {"one pos param", "{{% tag param1 %}}", regexpCheck("tag.*params:{param1}")},
+ {"two pos params", "{{< tag param1 param2>}}", regexpCheck("tag.*params:{param1 param2}")},
+ {"one named param", `{{% tag param1="value" %}}`, regexpCheck("tag.*params:map{param1:value}")},
+ {"two named params", `{{< tag param1="value1" param2="value2" >}}`, regexpCheck("tag.*params:map{param\\d:value\\d param\\d:value\\d}")},
+ {"inner", `{{< inner >}}Inner Content{{< / inner >}}`, regexpCheck("inner;inline:false;closing:true;inner:{Inner Content};")},
+ // issue #934
+ {"inner self-closing", `{{< inner />}}`, regexpCheck("inner;.*inner:{}")},
+ {
+ "nested inner", `{{< inner >}}Inner Content->{{% inner2 param1 %}}inner2txt{{% /inner2 %}}Inner close->{{< / inner >}}`,
+ regexpCheck("inner;.*inner:{Inner Content->.*Inner close->}"),
+ },
+ {
+ "nested, nested inner", `{{< inner >}}inner2->{{% inner2 param1 %}}inner2txt->inner3{{< inner3>}}inner3txt{{</ inner3 >}}{{% /inner2 %}}final close->{{< / inner >}}`,
+ regexpCheck("inner:{inner2-> inner2.*{{inner2txt->inner3.*final close->}"),
+ },
+ {"closed without content", `{{< inner param1 >}}{{< / inner >}}`, regexpCheck("inner.*inner:{}")},
+ {"inline", `{{< my.inline >}}Hi{{< /my.inline >}}`, regexpCheck("my.inline;inline:true;closing:true;inner:{Hi};")},
+ } {
+
+ test := test
+
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ p, err := pageparser.ParseMain(strings.NewReader(test.input), pageparser.Config{})
+ c.Assert(err, qt.IsNil)
+ handler := newShortcodeHandler(nil, s)
+ iter := p.Iterator()
+
+ short, err := handler.extractShortcode(0, 0, iter)
+
+ test.check(c, short, err)
+ })
+ }
+}
+
+func TestShortcodeMultipleOutputFormats(t *testing.T) {
+ t.Parallel()
+
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+
+disableKinds = ["section", "term", "taxonomy", "RSS", "sitemap", "robotsTXT", "404"]
+
+[outputs]
+home = [ "HTML", "AMP", "Calendar" ]
+page = [ "HTML", "AMP", "JSON" ]
+
+`
+
+ pageTemplate := `---
+title: "%s"
+---
+# Doc
+
+{{< myShort >}}
+{{< noExt >}}
+{{%% onlyHTML %%}}
+
+{{< myInner >}}{{< myShort >}}{{< /myInner >}}
+
+`
+
+ pageTemplateCSVOnly := `---
+title: "%s"
+outputs: ["CSV"]
+---
+# Doc
+
+CSV: {{< myShort >}}
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
+ b.WithTemplates(
+ "layouts/_default/single.html", `Single HTML: {{ .Title }}|{{ .Content }}`,
+ "layouts/_default/single.json", `Single JSON: {{ .Title }}|{{ .Content }}`,
+ "layouts/_default/single.csv", `Single CSV: {{ .Title }}|{{ .Content }}`,
+ "layouts/index.html", `Home HTML: {{ .Title }}|{{ .Content }}`,
+ "layouts/index.amp.html", `Home AMP: {{ .Title }}|{{ .Content }}`,
+ "layouts/index.ics", `Home Calendar: {{ .Title }}|{{ .Content }}`,
+ "layouts/shortcodes/myShort.html", `ShortHTML`,
+ "layouts/shortcodes/myShort.amp.html", `ShortAMP`,
+ "layouts/shortcodes/myShort.csv", `ShortCSV`,
+ "layouts/shortcodes/myShort.ics", `ShortCalendar`,
+ "layouts/shortcodes/myShort.json", `ShortJSON`,
+ "layouts/shortcodes/noExt", `ShortNoExt`,
+ "layouts/shortcodes/onlyHTML.html", `ShortOnlyHTML`,
+ "layouts/shortcodes/myInner.html", `myInner:--{{- .Inner -}}--`,
+ )
+
+ b.WithContent("_index.md", fmt.Sprintf(pageTemplate, "Home"),
+ "sect/mypage.md", fmt.Sprintf(pageTemplate, "Single"),
+ "sect/mycsvpage.md", fmt.Sprintf(pageTemplateCSVOnly, "Single CSV"),
+ )
+
+ b.Build(BuildCfg{})
+ h := b.H
+ b.Assert(len(h.Sites), qt.Equals, 1)
+
+ s := h.Sites[0]
+ home := s.getPage(page.KindHome)
+ b.Assert(home, qt.Not(qt.IsNil))
+ b.Assert(len(home.OutputFormats()), qt.Equals, 3)
+
+ b.AssertFileContent("public/index.html",
+ "Home HTML",
+ "ShortHTML",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortHTML--",
+ )
+
+ b.AssertFileContent("public/amp/index.html",
+ "Home AMP",
+ "ShortAMP",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortAMP--",
+ )
+
+ b.AssertFileContent("public/index.ics",
+ "Home Calendar",
+ "ShortCalendar",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortCalendar--",
+ )
+
+ b.AssertFileContent("public/sect/mypage/index.html",
+ "Single HTML",
+ "ShortHTML",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortHTML--",
+ )
+
+ b.AssertFileContent("public/sect/mypage/index.json",
+ "Single JSON",
+ "ShortJSON",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortJSON--",
+ )
+
+ b.AssertFileContent("public/amp/sect/mypage/index.html",
+ // No special AMP template
+ "Single HTML",
+ "ShortAMP",
+ "ShortNoExt",
+ "ShortOnlyHTML",
+ "myInner:--ShortAMP--",
+ )
+
+ b.AssertFileContent("public/sect/mycsvpage/index.csv",
+ "Single CSV",
+ "ShortCSV",
+ )
+}
+
+func BenchmarkReplaceShortcodeTokens(b *testing.B) {
+ type input struct {
+ in []byte
+ replacements map[string]string
+ expect []byte
+ }
+
+ data := []struct {
+ input string
+ replacements map[string]string
+ expect []byte
+ }{
+ {"Hello HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, []byte("Hello World.")},
+ {strings.Repeat("A", 100) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("A", 100) + " Hello World.")},
+ {strings.Repeat("A", 500) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("A", 500) + " Hello World.")},
+ {strings.Repeat("ABCD ", 500) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("ABCD ", 500) + " Hello World.")},
+ {strings.Repeat("A ", 3000) + " HAHAHUGOSHORTCODE-1HBHB." + strings.Repeat("BC ", 1000) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("A ", 3000) + " Hello World." + strings.Repeat("BC ", 1000) + " Hello World.")},
+ }
+
+ in := make([]input, b.N*len(data))
+ cnt := 0
+ for i := 0; i < b.N; i++ {
+ for _, this := range data {
+ in[cnt] = input{[]byte(this.input), this.replacements, this.expect}
+ cnt++
+ }
+ }
+
+ b.ResetTimer()
+ cnt = 0
+ for i := 0; i < b.N; i++ {
+ for j := range data {
+ currIn := in[cnt]
+ cnt++
+ results, err := replaceShortcodeTokens(currIn.in, currIn.replacements)
+ if err != nil {
+ b.Fatalf("[%d] failed: %s", i, err)
+ continue
+ }
+ if len(results) != len(currIn.expect) {
+ b.Fatalf("[%d] replaceShortcodeTokens, got \n%q but expected \n%q", j, results, currIn.expect)
+ }
+
+ }
+ }
+}
+
+func BenchmarkShortcodesInSite(b *testing.B) {
+ files := `
+-- config.toml --
+-- layouts/shortcodes/mark1.md --
+{{ .Inner }}
+-- layouts/shortcodes/mark2.md --
+1. Item Mark2 1
+1. Item Mark2 2
+ 1. Item Mark2 2-1
+1. Item Mark2 3
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ content := `
+---
+title: "Markdown Shortcode"
+---
+
+## List
+
+1. List 1
+ {{§ mark1 §}}
+ 1. Item Mark1 1
+ 1. Item Mark1 2
+ {{§ mark2 §}}
+ {{§ /mark1 §}}
+
+`
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/posts/p%d.md --\n"+content, i+1)
+ }
+ files = strings.ReplaceAll(files, "§", "%")
+
+ cfg := IntegrationTestConfig{
+ T: b,
+ TxtarString: files,
+ }
+ builders := make([]*IntegrationTestBuilder, b.N)
+
+ for i := range builders {
+ builders[i] = NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+}
+
+func TestReplaceShortcodeTokens(t *testing.T) {
+ t.Parallel()
+ for i, this := range []struct {
+ input string
+ prefix string
+ replacements map[string]string
+ expect any
+ }{
+ {"Hello HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello World."},
+ {"Hello HAHAHUGOSHORTCODE-1@}@.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, false},
+ {"HAHAHUGOSHORTCODE2-1HBHB", "PREFIX2", map[string]string{"HAHAHUGOSHORTCODE2-1HBHB": "World"}, "World"},
+ {"Hello World!", "PREFIX2", map[string]string{}, "Hello World!"},
+ {"!HAHAHUGOSHORTCODE-1HBHB", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "!World"},
+ {"HAHAHUGOSHORTCODE-1HBHB!", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "World!"},
+ {"!HAHAHUGOSHORTCODE-1HBHB!", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "!World!"},
+ {"_{_PREFIX-1HBHB", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "_{_PREFIX-1HBHB"},
+ {"Hello HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "To You My Old Friend Who Told Me This Fantastic Story"}, "Hello To You My Old Friend Who Told Me This Fantastic Story."},
+ {"A HAHAHUGOSHORTCODE-1HBHB asdf HAHAHUGOSHORTCODE-2HBHB.", "A", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "v1", "HAHAHUGOSHORTCODE-2HBHB": "v2"}, "A v1 asdf v2."},
+ {"Hello HAHAHUGOSHORTCODE2-1HBHB. Go HAHAHUGOSHORTCODE2-2HBHB, Go, Go HAHAHUGOSHORTCODE2-3HBHB Go Go!.", "PREFIX2", map[string]string{"HAHAHUGOSHORTCODE2-1HBHB": "Europe", "HAHAHUGOSHORTCODE2-2HBHB": "Jonny", "HAHAHUGOSHORTCODE2-3HBHB": "Johnny"}, "Hello Europe. Go Jonny, Go, Go Johnny Go Go!."},
+ {"A HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "A B A."},
+ {"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A"}, false},
+ {"A HAHAHUGOSHORTCODE-1HBHB but not the second.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "A A but not the second."},
+ {"An HAHAHUGOSHORTCODE-1HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "An A."},
+ {"An HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B"}, "An A B."},
+ {"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-3HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-3HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B", "HAHAHUGOSHORTCODE-3HBHB": "C"}, "A A B C A C."},
+ {"A HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-2HBHB HAHAHUGOSHORTCODE-3HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-3HBHB.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "A", "HAHAHUGOSHORTCODE-2HBHB": "B", "HAHAHUGOSHORTCODE-3HBHB": "C"}, "A A B C A C."},
+ // Issue #1148 remove p-tags 10 =>
+ {"Hello <p>HAHAHUGOSHORTCODE-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello World. END."},
+ {"Hello <p>HAHAHUGOSHORTCODE-1HBHB</p>. <p>HAHAHUGOSHORTCODE-2HBHB</p> END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World", "HAHAHUGOSHORTCODE-2HBHB": "THE"}, "Hello World. THE END."},
+ {"Hello <p>HAHAHUGOSHORTCODE-1HBHB. END</p>.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello <p>World. END</p>."},
+ {"<p>Hello HAHAHUGOSHORTCODE-1HBHB</p>. END.", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "<p>Hello World</p>. END."},
+ {"Hello <p>HAHAHUGOSHORTCODE-1HBHB12", "PREFIX", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "World"}, "Hello <p>World12"},
+ {
+ "Hello HAHAHUGOSHORTCODE-1HBHB. HAHAHUGOSHORTCODE-1HBHB-HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB HAHAHUGOSHORTCODE-1HBHB END", "P",
+ map[string]string{"HAHAHUGOSHORTCODE-1HBHB": strings.Repeat("BC", 100)},
+ fmt.Sprintf("Hello %s. %s-%s %s %s %s END",
+ strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100), strings.Repeat("BC", 100)),
+ },
+ } {
+
+ results, err := replaceShortcodeTokens([]byte(this.input), this.replacements)
+
+ if b, ok := this.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] replaceShortcodeTokens didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] failed: %s", i, err)
+ continue
+ }
+ if !reflect.DeepEqual(results, []byte(this.expect.(string))) {
+ t.Errorf("[%d] replaceShortcodeTokens, got \n%q but expected \n%q", i, results, this.expect)
+ }
+ }
+
+ }
+}
+
+func TestShortcodeGetContent(t *testing.T) {
+ t.Parallel()
+
+ contentShortcode := `
+{{- $t := .Get 0 -}}
+{{- $p := .Get 1 -}}
+{{- $k := .Get 2 -}}
+{{- $page := $.Page.Site.GetPage "page" $p -}}
+{{ if $page }}
+{{- if eq $t "bundle" -}}
+{{- .Scratch.Set "p" ($page.Resources.GetMatch (printf "%s*" $k)) -}}
+{{- else -}}
+{{- $.Scratch.Set "p" $page -}}
+{{- end -}}P1:{{ .Page.Content }}|P2:{{ $p := ($.Scratch.Get "p") }}{{ $p.Title }}/{{ $p.Content }}|
+{{- else -}}
+{{- errorf "Page %s is nil" $p -}}
+{{- end -}}
+`
+
+ var templates []string
+ var content []string
+
+ contentWithShortcodeTemplate := `---
+title: doc%s
+weight: %d
+---
+Logo:{{< c "bundle" "b1" "logo.png" >}}:P1: {{< c "page" "section1/p1" "" >}}:BP1:{{< c "bundle" "b1" "bp1" >}}`
+
+ simpleContentTemplate := `---
+title: doc%s
+weight: %d
+---
+C-%s`
+
+ templates = append(templates, []string{"shortcodes/c.html", contentShortcode}...)
+ templates = append(templates, []string{"_default/single.html", "Single Content: {{ .Content }}"}...)
+ templates = append(templates, []string{"_default/list.html", "List Content: {{ .Content }}"}...)
+
+ content = append(content, []string{"b1/index.md", fmt.Sprintf(contentWithShortcodeTemplate, "b1", 1)}...)
+ content = append(content, []string{"b1/logo.png", "PNG logo"}...)
+ content = append(content, []string{"b1/bp1.md", fmt.Sprintf(simpleContentTemplate, "bp1", 1, "bp1")}...)
+
+ content = append(content, []string{"section1/_index.md", fmt.Sprintf(contentWithShortcodeTemplate, "s1", 2)}...)
+ content = append(content, []string{"section1/p1.md", fmt.Sprintf(simpleContentTemplate, "s1p1", 2, "s1p1")}...)
+
+ content = append(content, []string{"section2/_index.md", fmt.Sprintf(simpleContentTemplate, "b1", 1, "b1")}...)
+ content = append(content, []string{"section2/s2p1.md", fmt.Sprintf(contentWithShortcodeTemplate, "bp1", 1)}...)
+
+ builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+ builder.WithContent(content...).WithTemplates(templates...).CreateSites().Build(BuildCfg{})
+ s := builder.H.Sites[0]
+ builder.Assert(len(s.RegularPages()), qt.Equals, 3)
+
+ builder.AssertFileContent("public/en/section1/index.html",
+ "List Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
+ "BP1:P1:|P2:docbp1/<p>C-bp1</p>",
+ )
+
+ builder.AssertFileContent("public/en/b1/index.html",
+ "Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
+ "P2:docbp1/<p>C-bp1</p>",
+ )
+
+ builder.AssertFileContent("public/en/section2/s2p1/index.html",
+ "Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
+ "P2:docbp1/<p>C-bp1</p>",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/5833
+func TestShortcodeParentResourcesOnRebuild(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).Running().WithSimpleConfigFile()
+ b.WithTemplatesAdded(
+ "index.html", `
+{{ $b := .Site.GetPage "b1" }}
+b1 Content: {{ $b.Content }}
+{{$p := $b.Resources.GetMatch "p1*" }}
+Content: {{ $p.Content }}
+{{ $article := .Site.GetPage "blog/article" }}
+Article Content: {{ $article.Content }}
+`,
+ "shortcodes/c.html", `
+{{ range .Page.Parent.Resources }}
+* Parent resource: {{ .Name }}: {{ .RelPermalink }}
+{{ end }}
+`)
+
+ pageContent := `
+---
+title: MyPage
+---
+
+SHORTCODE: {{< c >}}
+
+`
+
+ b.WithContent("b1/index.md", pageContent,
+ "b1/logo.png", "PNG logo",
+ "b1/p1.md", pageContent,
+ "blog/_index.md", pageContent,
+ "blog/logo-article.png", "PNG logo",
+ "blog/article.md", pageContent,
+ )
+
+ b.Build(BuildCfg{})
+
+ assert := func(matchers ...string) {
+ allMatchers := append(matchers, "Parent resource: logo.png: /b1/logo.png",
+ "Article Content: <p>SHORTCODE: \n\n* Parent resource: logo-article.png: /blog/logo-article.png",
+ )
+
+ b.AssertFileContent("public/index.html",
+ allMatchers...,
+ )
+ }
+
+ assert()
+
+ b.EditFiles("content/b1/index.md", pageContent+" Edit.")
+
+ b.Build(BuildCfg{})
+
+ assert("Edit.")
+}
+
+func TestShortcodePreserveOrder(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ contentTemplate := `---
+title: doc%d
+weight: %d
+---
+# doc
+
+{{< s1 >}}{{< s2 >}}{{< s3 >}}{{< s4 >}}{{< s5 >}}
+
+{{< nested >}}
+{{< ordinal >}} {{< scratch >}}
+{{< ordinal >}} {{< scratch >}}
+{{< ordinal >}} {{< scratch >}}
+{{< /nested >}}
+
+`
+
+ ordinalShortcodeTemplate := `ordinal: {{ .Ordinal }}{{ .Page.Scratch.Set "ordinal" .Ordinal }}`
+
+ nestedShortcode := `outer ordinal: {{ .Ordinal }} inner: {{ .Inner }}`
+ scratchGetShortcode := `scratch ordinal: {{ .Ordinal }} scratch get ordinal: {{ .Page.Scratch.Get "ordinal" }}`
+ shortcodeTemplate := `v%d: {{ .Ordinal }} sgo: {{ .Page.Scratch.Get "o2" }}{{ .Page.Scratch.Set "o2" .Ordinal }}|`
+
+ var shortcodes []string
+ var content []string
+
+ shortcodes = append(shortcodes, []string{"shortcodes/nested.html", nestedShortcode}...)
+ shortcodes = append(shortcodes, []string{"shortcodes/ordinal.html", ordinalShortcodeTemplate}...)
+ shortcodes = append(shortcodes, []string{"shortcodes/scratch.html", scratchGetShortcode}...)
+
+ for i := 1; i <= 5; i++ {
+ sc := fmt.Sprintf(shortcodeTemplate, i)
+ sc = strings.Replace(sc, "%%", "%", -1)
+ shortcodes = append(shortcodes, []string{fmt.Sprintf("shortcodes/s%d.html", i), sc}...)
+ }
+
+ for i := 1; i <= 3; i++ {
+ content = append(content, []string{fmt.Sprintf("p%d.md", i), fmt.Sprintf(contentTemplate, i, i)}...)
+ }
+
+ builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+ builder.WithContent(content...).WithTemplatesAdded(shortcodes...).CreateSites().Build(BuildCfg{})
+
+ s := builder.H.Sites[0]
+ c.Assert(len(s.RegularPages()), qt.Equals, 3)
+
+ builder.AssertFileContent("public/en/p1/index.html", `v1: 0 sgo: |v2: 1 sgo: 0|v3: 2 sgo: 1|v4: 3 sgo: 2|v5: 4 sgo: 3`)
+ builder.AssertFileContent("public/en/p1/index.html", `outer ordinal: 5 inner:
+ordinal: 0 scratch ordinal: 1 scratch get ordinal: 0
+ordinal: 2 scratch ordinal: 3 scratch get ordinal: 2
+ordinal: 4 scratch ordinal: 5 scratch get ordinal: 4`)
+}
+
+func TestShortcodeVariables(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ builder := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ builder.WithContent("page.md", `---
+title: "Hugo Rocks!"
+---
+
+# doc
+
+ {{< s1 >}}
+
+`).WithTemplatesAdded("layouts/shortcodes/s1.html", `
+Name: {{ .Name }}
+{{ with .Position }}
+File: {{ .Filename }}
+Offset: {{ .Offset }}
+Line: {{ .LineNumber }}
+Column: {{ .ColumnNumber }}
+String: {{ . | safeHTML }}
+{{ end }}
+
+`).CreateSites().Build(BuildCfg{})
+
+ s := builder.H.Sites[0]
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ builder.AssertFileContent("public/page/index.html",
+ filepath.FromSlash("File: content/page.md"),
+ "Line: 7", "Column: 4", "Offset: 40",
+ filepath.FromSlash("String: \"content/page.md:7:4\""),
+ "Name: s1",
+ )
+}
+
+func TestInlineShortcodes(t *testing.T) {
+ for _, enableInlineShortcodes := range []bool{true, false} {
+ enableInlineShortcodes := enableInlineShortcodes
+ t.Run(fmt.Sprintf("enableInlineShortcodes=%t", enableInlineShortcodes),
+ func(t *testing.T) {
+ t.Parallel()
+ conf := fmt.Sprintf(`
+baseURL = "https://example.com"
+enableInlineShortcodes = %t
+`, enableInlineShortcodes)
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", conf)
+
+ shortcodeContent := `FIRST:{{< myshort.inline "first" >}}
+Page: {{ .Page.Title }}
+Seq: {{ seq 3 }}
+Param: {{ .Get 0 }}
+{{< /myshort.inline >}}:END:
+
+SECOND:{{< myshort.inline "second" />}}:END
+NEW INLINE: {{< n1.inline "5" >}}W1: {{ seq (.Get 0) }}{{< /n1.inline >}}:END:
+INLINE IN INNER: {{< outer >}}{{< n2.inline >}}W2: {{ seq 4 }}{{< /n2.inline >}}{{< /outer >}}:END:
+REUSED INLINE IN INNER: {{< outer >}}{{< n1.inline "3" />}}{{< /outer >}}:END:
+## MARKDOWN DELIMITER: {{% mymarkdown.inline %}}**Hugo Rocks!**{{% /mymarkdown.inline %}}
+`
+
+ b.WithContent("page-md-shortcode.md", `---
+title: "Hugo"
+---
+`+shortcodeContent)
+
+ b.WithContent("_index.md", `---
+title: "Hugo Home"
+---
+
+`+shortcodeContent)
+
+ b.WithTemplatesAdded("layouts/_default/single.html", `
+CONTENT:{{ .Content }}
+TOC: {{ .TableOfContents }}
+`)
+
+ b.WithTemplatesAdded("layouts/index.html", `
+CONTENT:{{ .Content }}
+TOC: {{ .TableOfContents }}
+`)
+
+ b.WithTemplatesAdded("layouts/shortcodes/outer.html", `Inner: {{ .Inner }}`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ shouldContain := []string{
+ "Seq: [1 2 3]",
+ "Param: first",
+ "Param: second",
+ "NEW INLINE: W1: [1 2 3 4 5]",
+ "INLINE IN INNER: Inner: W2: [1 2 3 4]",
+ "REUSED INLINE IN INNER: Inner: W1: [1 2 3]",
+ `<li><a href="#markdown-delimiter-hugo-rocks">MARKDOWN DELIMITER: <strong>Hugo Rocks!</strong></a></li>`,
+ }
+
+ if enableInlineShortcodes {
+ b.AssertFileContent("public/page-md-shortcode/index.html",
+ shouldContain...,
+ )
+ b.AssertFileContent("public/index.html",
+ shouldContain...,
+ )
+ } else {
+ b.AssertFileContent("public/page-md-shortcode/index.html",
+ "FIRST::END",
+ "SECOND::END",
+ "NEW INLINE: :END",
+ "INLINE IN INNER: Inner: :END:",
+ "REUSED INLINE IN INNER: Inner: :END:",
+ )
+ }
+ })
+
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/5863
+func TestShortcodeNamespaced(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ builder := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ builder.WithContent("page.md", `---
+title: "Hugo Rocks!"
+---
+
+# doc
+
+ hello: {{< hello >}}
+ test/hello: {{< test/hello >}}
+
+`).WithTemplatesAdded(
+ "layouts/shortcodes/hello.html", `hello`,
+ "layouts/shortcodes/test/hello.html", `test/hello`).CreateSites().Build(BuildCfg{})
+
+ s := builder.H.Sites[0]
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ builder.AssertFileContent("public/page/index.html",
+ "hello: hello",
+ "test/hello: test/hello",
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/6504
+func TestShortcodeEmoji(t *testing.T) {
+ t.Parallel()
+
+ v := config.NewWithTestDefaults()
+ v.Set("enableEmoji", true)
+
+ builder := newTestSitesBuilder(t).WithViper(v)
+
+ builder.WithContent("page.md", `---
+title: "Hugo Rocks!"
+---
+
+# doc
+
+{{< event >}}10:30-11:00 My :smile: Event {{< /event >}}
+
+
+`).WithTemplatesAdded(
+ "layouts/shortcodes/event.html", `<div>{{ "\u29BE" }} {{ .Inner }} </div>`)
+
+ builder.Build(BuildCfg{})
+ builder.AssertFileContent("public/page/index.html",
+ "⦾ 10:30-11:00 My 😄 Event",
+ )
+}
+
+func TestShortcodeTypedParams(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ builder := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ builder.WithContent("page.md", `---
+title: "Hugo Rocks!"
+---
+
+# doc
+
+types positional: {{< hello true false 33 3.14 >}}
+types named: {{< hello b1=true b2=false i1=33 f1=3.14 >}}
+types string: {{< hello "true" trues "33" "3.14" >}}
+
+
+`).WithTemplatesAdded(
+ "layouts/shortcodes/hello.html",
+ `{{ range $i, $v := .Params }}
+- {{ printf "%v: %v (%T)" $i $v $v }}
+{{ end }}
+{{ $b1 := .Get "b1" }}
+Get: {{ printf "%v (%T)" $b1 $b1 | safeHTML }}
+`).Build(BuildCfg{})
+
+ s := builder.H.Sites[0]
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+
+ builder.AssertFileContent("public/page/index.html",
+ "types positional: - 0: true (bool) - 1: false (bool) - 2: 33 (int) - 3: 3.14 (float64)",
+ "types named: - b1: true (bool) - b2: false (bool) - f1: 3.14 (float64) - i1: 33 (int) Get: true (bool) ",
+ "types string: - 0: true (string) - 1: trues (string) - 2: 33 (string) - 3: 3.14 (string) ",
+ )
+}
+
+func TestShortcodeRef(t *testing.T) {
+ t.Parallel()
+
+ v := config.NewWithTestDefaults()
+ v.Set("baseURL", "https://example.org")
+
+ builder := newTestSitesBuilder(t).WithViper(v)
+
+ for i := 1; i <= 2; i++ {
+ builder.WithContent(fmt.Sprintf("page%d.md", i), `---
+title: "Hugo Rocks!"
+---
+
+
+
+[Page 1]({{< ref "page1.md" >}})
+[Page 1 with anchor]({{< relref "page1.md#doc" >}})
+[Page 2]({{< ref "page2.md" >}})
+[Page 2 with anchor]({{< relref "page2.md#doc" >}})
+
+
+## Doc
+
+
+`)
+ }
+
+ builder.Build(BuildCfg{})
+
+ builder.AssertFileContent("public/page2/index.html", `
+<a href="/page1/#doc">Page 1 with anchor</a>
+<a href="https://example.org/page2/">Page 2</a>
+<a href="/page2/#doc">Page 2 with anchor</a></p>
+
+<h2 id="doc">Doc</h2>
+`,
+ )
+
+}
+
+// https://github.com/gohugoio/hugo/issues/6857
+func TestShortcodeNoInner(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+
+ b.WithContent("mypage.md", `---
+title: "No Inner!"
+---
+{{< noinner >}}{{< /noinner >}}
+
+
+`).WithTemplatesAdded(
+ "layouts/shortcodes/noinner.html", `No inner here.`)
+
+ err := b.BuildE(BuildCfg{})
+ b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`"content/mypage.md:4:21": failed to extract shortcode: shortcode "noinner" has no .Inner, yet a closing tag was provided`))
+}
+
+func TestShortcodeStableOutputFormatTemplates(t *testing.T) {
+ t.Parallel()
+
+ for i := 0; i < 5; i++ {
+
+ b := newTestSitesBuilder(t)
+
+ const numPages = 10
+
+ for i := 0; i < numPages; i++ {
+ b.WithContent(fmt.Sprintf("page%d.md", i), `---
+title: "Page"
+outputs: ["html", "css", "csv", "json"]
+---
+{{< myshort >}}
+
+`)
+ }
+
+ b.WithTemplates(
+ "_default/single.html", "{{ .Content }}",
+ "_default/single.css", "{{ .Content }}",
+ "_default/single.csv", "{{ .Content }}",
+ "_default/single.json", "{{ .Content }}",
+ "shortcodes/myshort.html", `Short-HTML`,
+ "shortcodes/myshort.csv", `Short-CSV`,
+ )
+
+ b.Build(BuildCfg{})
+
+ // helpers.PrintFs(b.Fs.Destination, "public", os.Stdout)
+
+ for i := 0; i < numPages; i++ {
+ b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", i), "Short-HTML")
+ b.AssertFileContent(fmt.Sprintf("public/page%d/index.csv", i), "Short-CSV")
+ b.AssertFileContent(fmt.Sprintf("public/page%d/index.json", i), "Short-HTML")
+
+ }
+
+ for i := 0; i < numPages; i++ {
+ b.AssertFileContent(fmt.Sprintf("public/page%d/styles.css", i), "Short-HTML")
+ }
+
+ }
+}
+
+// #9821
+func TestShortcodeMarkdownOutputFormat(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+{{< foo >}}
+-- layouts/shortcodes/foo.md --
+§§§
+<x
+§§§
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ Running: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+<x
+ `)
+
+}
+
+func TestShortcodePreserveIndentation(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## List With Indented Shortcodes
+
+1. List 1
+ {{% mark1 %}}
+ 1. Item Mark1 1
+ 1. Item Mark1 2
+ {{% mark2 %}}
+ {{% /mark1 %}}
+-- layouts/shortcodes/mark1.md --
+{{ .Inner }}
+-- layouts/shortcodes/mark2.md --
+1. Item Mark2 1
+1. Item Mark2 2
+ 1. Item Mark2 2-1
+1. Item Mark2 3
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ Running: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", "<ol>\n<li>\n<p>List 1</p>\n<ol>\n<li>Item Mark1 1</li>\n<li>Item Mark1 2</li>\n<li>Item Mark2 1</li>\n<li>Item Mark2 2\n<ol>\n<li>Item Mark2 2-1</li>\n</ol>\n</li>\n<li>Item Mark2 3</li>\n</ol>\n</li>\n</ol>")
+
+}
+
+func TestShortcodeCodeblockIndent(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Code block
+
+ {{% code %}}
+
+-- layouts/shortcodes/code.md --
+echo "foo";
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ Running: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", "<pre><code>echo &quot;foo&quot;;\n</code></pre>")
+
+}
+
+func TestShortcodeHighlightDeindent(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+[markup.highlight]
+codeFences = true
+noClasses = false
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Indent 5 Spaces
+
+ {{< highlight bash >}}
+ line 1;
+ line 2;
+ line 3;
+ {{< /highlight >}}
+
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := NewIntegrationTestBuilder(
+ IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ Running: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+<pre><code> <div class="highlight"><pre tabindex="0" class="chroma"><code class="language-bash" data-lang="bash"><span class="line"><span class="cl">line 1<span class="p">;</span>
+</span></span><span class="line"><span class="cl">line 2<span class="p">;</span>
+</span></span><span class="line"><span class="cl">line 3<span class="p">;</span></span></span></code></pre></div>
+</code></pre>
+
+ `)
+
+}
diff --git a/hugolib/site.go b/hugolib/site.go
new file mode 100644
index 000000000..cbfc4d836
--- /dev/null
+++ b/hugolib/site.go
@@ -0,0 +1,1922 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "html/template"
+ "io"
+ "log"
+ "mime"
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/modules"
+ "golang.org/x/text/unicode/norm"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/common/constants"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/resources"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/common/text"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/publisher"
+
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/lazy"
+
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/fsnotify/fsnotify"
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/related"
+ "github.com/gohugoio/hugo/resources/page/pagemeta"
+ "github.com/gohugoio/hugo/source"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+)
+
+// Site contains all the information relevant for constructing a static
+// site. The basic flow of information is as follows:
+//
+// 1. A list of Files is parsed and then converted into Pages.
+//
+// 2. Pages contain sections (based on the file they were generated from),
+// aliases and slugs (included in a pages frontmatter) which are the
+// various targets that will get generated. There will be canonical
+// listing. The canonical path can be overruled based on a pattern.
+//
+// 3. Taxonomies are created via configuration and will present some aspect of
+// the final page and typically a perm url.
+//
+// 4. All Pages are passed through a template based on their desired
+// layout based on numerous different elements.
+//
+// 5. The entire collection of files is written to disk.
+type Site struct {
+
+ // The owning container. When multiple languages, there will be multiple
+ // sites .
+ h *HugoSites
+
+ *PageCollections
+
+ taxonomies TaxonomyList
+
+ Sections Taxonomy
+ Info *SiteInfo
+
+ language *langs.Language
+ siteBucket *pagesMapBucket
+
+ siteCfg siteConfigHolder
+
+ disabledKinds map[string]bool
+
+ // Output formats defined in site config per Page Kind, or some defaults
+ // if not set.
+ // Output formats defined in Page front matter will override these.
+ outputFormats map[string]output.Formats
+
+ // All the output formats and media types available for this site.
+ // These values will be merged from the Hugo defaults, the site config and,
+ // finally, the language settings.
+ outputFormatsConfig output.Formats
+ mediaTypesConfig media.Types
+
+ siteConfigConfig SiteConfig
+
+ // How to handle page front matter.
+ frontmatterHandler pagemeta.FrontMatterHandler
+
+ // We render each site for all the relevant output formats in serial with
+ // this rendering context pointing to the current one.
+ rc *siteRenderingContext
+
+ // The output formats that we need to render this site in. This slice
+ // will be fixed once set.
+ // This will be the union of Site.Pages' outputFormats.
+ // This slice will be sorted.
+ renderFormats output.Formats
+
+ // Logger etc.
+ *deps.Deps `json:"-"`
+
+ // The func used to title case titles.
+ titleFunc func(s string) string
+
+ relatedDocsHandler *page.RelatedDocsHandler
+ siteRefLinker
+
+ publisher publisher.Publisher
+
+ menus navigation.Menus
+
+ // Shortcut to the home page. Note that this may be nil if
+ // home page, for some odd reason, is disabled.
+ home *pageState
+
+ // The last modification date of this site.
+ lastmod time.Time
+
+ // Lazily loaded site dependencies
+ init *siteInit
+}
+
+func (s *Site) Taxonomies() TaxonomyList {
+ s.init.taxonomies.Do()
+ return s.taxonomies
+}
+
+type taxonomiesConfig map[string]string
+
+func (t taxonomiesConfig) Values() []viewName {
+ var vals []viewName
+ for k, v := range t {
+ vals = append(vals, viewName{singular: k, plural: v})
+ }
+ sort.Slice(vals, func(i, j int) bool {
+ return vals[i].plural < vals[j].plural
+ })
+
+ return vals
+}
+
+type siteConfigHolder struct {
+ sitemap config.Sitemap
+ taxonomiesConfig taxonomiesConfig
+ timeout time.Duration
+ hasCJKLanguage bool
+ enableEmoji bool
+}
+
+// Lazily loaded site dependencies.
+type siteInit struct {
+ prevNext *lazy.Init
+ prevNextInSection *lazy.Init
+ menus *lazy.Init
+ taxonomies *lazy.Init
+}
+
+func (init *siteInit) Reset() {
+ init.prevNext.Reset()
+ init.prevNextInSection.Reset()
+ init.menus.Reset()
+ init.taxonomies.Reset()
+}
+
+func (s *Site) initInit(init *lazy.Init, pctx pageContext) bool {
+ _, err := init.Do()
+ if err != nil {
+ s.h.FatalError(pctx.wrapError(err))
+ }
+ return err == nil
+}
+
+func (s *Site) prepareInits() {
+ s.init = &siteInit{}
+
+ var init lazy.Init
+
+ s.init.prevNext = init.Branch(func() (any, error) {
+ regularPages := s.RegularPages()
+ for i, p := range regularPages {
+ np, ok := p.(nextPrevProvider)
+ if !ok {
+ continue
+ }
+
+ pos := np.getNextPrev()
+ if pos == nil {
+ continue
+ }
+
+ pos.nextPage = nil
+ pos.prevPage = nil
+
+ if i > 0 {
+ pos.nextPage = regularPages[i-1]
+ }
+
+ if i < len(regularPages)-1 {
+ pos.prevPage = regularPages[i+1]
+ }
+ }
+ return nil, nil
+ })
+
+ s.init.prevNextInSection = init.Branch(func() (any, error) {
+ var sections page.Pages
+ s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) {
+ sections = append(sections, n.p)
+ })
+
+ setNextPrev := func(pas page.Pages) {
+ for i, p := range pas {
+ np, ok := p.(nextPrevInSectionProvider)
+ if !ok {
+ continue
+ }
+
+ pos := np.getNextPrevInSection()
+ if pos == nil {
+ continue
+ }
+
+ pos.nextPage = nil
+ pos.prevPage = nil
+
+ if i > 0 {
+ pos.nextPage = pas[i-1]
+ }
+
+ if i < len(pas)-1 {
+ pos.prevPage = pas[i+1]
+ }
+ }
+ }
+
+ for _, sect := range sections {
+ treeRef := sect.(treeRefProvider).getTreeRef()
+
+ var pas page.Pages
+ treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+
+ setNextPrev(pas)
+ }
+
+ // The root section only goes one level down.
+ treeRef := s.home.getTreeRef()
+
+ var pas page.Pages
+ treeRef.m.collectPages(pageMapQuery{Prefix: treeRef.key + cmBranchSeparator}, func(c *contentNode) {
+ pas = append(pas, c.p)
+ })
+ page.SortByDefault(pas)
+
+ setNextPrev(pas)
+
+ return nil, nil
+ })
+
+ s.init.menus = init.Branch(func() (any, error) {
+ s.assembleMenus()
+ return nil, nil
+ })
+
+ s.init.taxonomies = init.Branch(func() (any, error) {
+ err := s.pageMap.assembleTaxonomies()
+ return nil, err
+ })
+}
+
+type siteRenderingContext struct {
+ output.Format
+}
+
+func (s *Site) Menus() navigation.Menus {
+ s.init.menus.Do()
+ return s.menus
+}
+
+func (s *Site) initRenderFormats() {
+ formatSet := make(map[string]bool)
+ formats := output.Formats{}
+ s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool {
+ for _, f := range n.p.m.configuredOutputFormats {
+ if !formatSet[f.Name] {
+ formats = append(formats, f)
+ formatSet[f.Name] = true
+ }
+ }
+ return false
+ })
+
+ // Add the per kind configured output formats
+ for _, kind := range allKindsInPages {
+ if siteFormats, found := s.outputFormats[kind]; found {
+ for _, f := range siteFormats {
+ if !formatSet[f.Name] {
+ formats = append(formats, f)
+ formatSet[f.Name] = true
+ }
+ }
+ }
+ }
+
+ sort.Sort(formats)
+ s.renderFormats = formats
+}
+
+func (s *Site) GetRelatedDocsHandler() *page.RelatedDocsHandler {
+ return s.relatedDocsHandler
+}
+
+func (s *Site) Language() *langs.Language {
+ return s.language
+}
+
+func (s *Site) isEnabled(kind string) bool {
+ if kind == kindUnknown {
+ panic("Unknown kind")
+ }
+ return !s.disabledKinds[kind]
+}
+
+// reset returns a new Site prepared for rebuild.
+func (s *Site) reset() *Site {
+ return &Site{
+ Deps: s.Deps,
+ disabledKinds: s.disabledKinds,
+ titleFunc: s.titleFunc,
+ relatedDocsHandler: s.relatedDocsHandler.Clone(),
+ siteRefLinker: s.siteRefLinker,
+ outputFormats: s.outputFormats,
+ rc: s.rc,
+ outputFormatsConfig: s.outputFormatsConfig,
+ frontmatterHandler: s.frontmatterHandler,
+ mediaTypesConfig: s.mediaTypesConfig,
+ language: s.language,
+ siteBucket: s.siteBucket,
+ h: s.h,
+ publisher: s.publisher,
+ siteConfigConfig: s.siteConfigConfig,
+ init: s.init,
+ PageCollections: s.PageCollections,
+ siteCfg: s.siteCfg,
+ }
+}
+
+// newSite creates a new site with the given configuration.
+func newSite(cfg deps.DepsCfg) (*Site, error) {
+ if cfg.Language == nil {
+ cfg.Language = langs.NewDefaultLanguage(cfg.Cfg)
+ }
+ if cfg.Logger == nil {
+ panic("logger must be set")
+ }
+
+ ignoreErrors := cast.ToStringSlice(cfg.Language.Get("ignoreErrors"))
+ ignorableLogger := loggers.NewIgnorableLogger(cfg.Logger, ignoreErrors...)
+
+ disabledKinds := make(map[string]bool)
+ for _, disabled := range cast.ToStringSlice(cfg.Language.Get("disableKinds")) {
+ disabledKinds[disabled] = true
+ }
+
+ if disabledKinds["taxonomyTerm"] {
+ // Correct from the value it had before Hugo 0.73.0.
+ if disabledKinds[page.KindTaxonomy] {
+ disabledKinds[page.KindTerm] = true
+ } else {
+ disabledKinds[page.KindTaxonomy] = true
+ }
+
+ delete(disabledKinds, "taxonomyTerm")
+ } else if disabledKinds[page.KindTaxonomy] && !disabledKinds[page.KindTerm] {
+ // This is a potentially ambigous situation. It may be correct.
+ ignorableLogger.Errorsf(constants.ErrIDAmbigousDisableKindTaxonomy, `You have the value 'taxonomy' in the disabledKinds list. In Hugo 0.73.0 we fixed these to be what most people expect (taxonomy and term).
+But this also means that your site configuration may not do what you expect. If it is correct, you can suppress this message by following the instructions below.`)
+ }
+
+ var (
+ mediaTypesConfig []map[string]any
+ outputFormatsConfig []map[string]any
+
+ siteOutputFormatsConfig output.Formats
+ siteMediaTypesConfig media.Types
+ err error
+ )
+
+ // Add language last, if set, so it gets precedence.
+ for _, cfg := range []config.Provider{cfg.Cfg, cfg.Language} {
+ if cfg.IsSet("mediaTypes") {
+ mediaTypesConfig = append(mediaTypesConfig, cfg.GetStringMap("mediaTypes"))
+ }
+ if cfg.IsSet("outputFormats") {
+ outputFormatsConfig = append(outputFormatsConfig, cfg.GetStringMap("outputFormats"))
+ }
+ }
+
+ siteMediaTypesConfig, err = media.DecodeTypes(mediaTypesConfig...)
+ if err != nil {
+ return nil, err
+ }
+
+ siteOutputFormatsConfig, err = output.DecodeFormats(siteMediaTypesConfig, outputFormatsConfig...)
+ if err != nil {
+ return nil, err
+ }
+
+ rssDisabled := disabledKinds[kindRSS]
+ if rssDisabled {
+ // Legacy
+ tmp := siteOutputFormatsConfig[:0]
+ for _, x := range siteOutputFormatsConfig {
+ if !strings.EqualFold(x.Name, "rss") {
+ tmp = append(tmp, x)
+ }
+ }
+ siteOutputFormatsConfig = tmp
+ }
+
+ var siteOutputs map[string]any
+ if cfg.Language.IsSet("outputs") {
+ siteOutputs = cfg.Language.GetStringMap("outputs")
+
+ // Check and correct taxonomy kinds vs pre Hugo 0.73.0.
+ v1, hasTaxonomyTerm := siteOutputs["taxonomyterm"]
+ v2, hasTaxonomy := siteOutputs[page.KindTaxonomy]
+ _, hasTerm := siteOutputs[page.KindTerm]
+ if hasTaxonomy && hasTaxonomyTerm {
+ siteOutputs[page.KindTaxonomy] = v1
+ siteOutputs[page.KindTerm] = v2
+ delete(siteOutputs, "taxonomyTerm")
+ } else if hasTaxonomy && !hasTerm {
+ // This is a potentially ambigous situation. It may be correct.
+ ignorableLogger.Errorsf(constants.ErrIDAmbigousOutputKindTaxonomy, `You have configured output formats for 'taxonomy' in your site configuration. In Hugo 0.73.0 we fixed these to be what most people expect (taxonomy and term).
+But this also means that your site configuration may not do what you expect. If it is correct, you can suppress this message by following the instructions below.`)
+ }
+ if !hasTaxonomy && hasTaxonomyTerm {
+ siteOutputs[page.KindTaxonomy] = v1
+ delete(siteOutputs, "taxonomyterm")
+ }
+ }
+
+ outputFormats, err := createSiteOutputFormats(siteOutputFormatsConfig, siteOutputs, rssDisabled)
+ if err != nil {
+ return nil, err
+ }
+
+ taxonomies := cfg.Language.GetStringMapString("taxonomies")
+
+ var relatedContentConfig related.Config
+
+ if cfg.Language.IsSet("related") {
+ relatedContentConfig, err = related.DecodeConfig(cfg.Language.GetParams("related"))
+ if err != nil {
+ return nil, fmt.Errorf("failed to decode related config: %w", err)
+ }
+ } else {
+ relatedContentConfig = related.DefaultConfig
+ if _, found := taxonomies["tag"]; found {
+ relatedContentConfig.Add(related.IndexConfig{Name: "tags", Weight: 80})
+ }
+ }
+
+ titleFunc := helpers.GetTitleFunc(cfg.Language.GetString("titleCaseStyle"))
+
+ frontMatterHandler, err := pagemeta.NewFrontmatterHandler(cfg.Logger, cfg.Cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ timeout := 30 * time.Second
+ if cfg.Language.IsSet("timeout") {
+ v := cfg.Language.Get("timeout")
+ d, err := types.ToDurationE(v)
+ if err == nil {
+ timeout = d
+ }
+ }
+
+ siteConfig := siteConfigHolder{
+ sitemap: config.DecodeSitemap(config.Sitemap{Priority: -1, Filename: "sitemap.xml"}, cfg.Language.GetStringMap("sitemap")),
+ taxonomiesConfig: taxonomies,
+ timeout: timeout,
+ hasCJKLanguage: cfg.Language.GetBool("hasCJKLanguage"),
+ enableEmoji: cfg.Language.Cfg.GetBool("enableEmoji"),
+ }
+
+ var siteBucket *pagesMapBucket
+ if cfg.Language.IsSet("cascade") {
+ var err error
+ cascade, err := page.DecodeCascade(cfg.Language.Get("cascade"))
+ if err != nil {
+ return nil, fmt.Errorf("failed to decode cascade config: %s", err)
+ }
+
+ siteBucket = &pagesMapBucket{
+ cascade: cascade,
+ }
+
+ }
+
+ s := &Site{
+ language: cfg.Language,
+ siteBucket: siteBucket,
+ disabledKinds: disabledKinds,
+
+ outputFormats: outputFormats,
+ outputFormatsConfig: siteOutputFormatsConfig,
+ mediaTypesConfig: siteMediaTypesConfig,
+
+ siteCfg: siteConfig,
+
+ titleFunc: titleFunc,
+
+ rc: &siteRenderingContext{output.HTMLFormat},
+
+ frontmatterHandler: frontMatterHandler,
+ relatedDocsHandler: page.NewRelatedDocsHandler(relatedContentConfig),
+ }
+
+ s.prepareInits()
+
+ return s, nil
+}
+
+// NewSite creates a new site with the given dependency configuration.
+// The site will have a template system loaded and ready to use.
+// Note: This is mainly used in single site tests.
+func NewSite(cfg deps.DepsCfg) (*Site, error) {
+ s, err := newSite(cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ var l configLoader
+ if err = l.applyDeps(cfg, s); err != nil {
+ return nil, err
+ }
+
+ return s, nil
+}
+
+// NewSiteDefaultLang creates a new site in the default language.
+// The site will have a template system loaded and ready to use.
+// Note: This is mainly used in single site tests.
+// TODO(bep) test refactor -- remove
+func NewSiteDefaultLang(withTemplate ...func(templ tpl.TemplateManager) error) (*Site, error) {
+ l := configLoader{cfg: config.New()}
+ if err := l.applyConfigDefaults(); err != nil {
+ return nil, err
+ }
+ return newSiteForLang(langs.NewDefaultLanguage(l.cfg), withTemplate...)
+}
+
+// NewEnglishSite creates a new site in English language.
+// The site will have a template system loaded and ready to use.
+// Note: This is mainly used in single site tests.
+// TODO(bep) test refactor -- remove
+func NewEnglishSite(withTemplate ...func(templ tpl.TemplateManager) error) (*Site, error) {
+ l := configLoader{cfg: config.New()}
+ if err := l.applyConfigDefaults(); err != nil {
+ return nil, err
+ }
+ return newSiteForLang(langs.NewLanguage("en", l.cfg), withTemplate...)
+}
+
+// newSiteForLang creates a new site in the given language.
+func newSiteForLang(lang *langs.Language, withTemplate ...func(templ tpl.TemplateManager) error) (*Site, error) {
+ withTemplates := func(templ tpl.TemplateManager) error {
+ for _, wt := range withTemplate {
+ if err := wt(templ); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ cfg := deps.DepsCfg{WithTemplate: withTemplates, Cfg: lang}
+
+ return NewSiteForCfg(cfg)
+}
+
+// NewSiteForCfg creates a new site for the given configuration.
+// The site will have a template system loaded and ready to use.
+// Note: This is mainly used in single site tests.
+func NewSiteForCfg(cfg deps.DepsCfg) (*Site, error) {
+ h, err := NewHugoSites(cfg)
+ if err != nil {
+ return nil, err
+ }
+ return h.Sites[0], nil
+}
+
+type SiteInfo struct {
+ Authors page.AuthorList
+ Social SiteSocial
+
+ hugoInfo hugo.Info
+ title string
+ RSSLink string
+ Author map[string]any
+ LanguageCode string
+ Copyright string
+
+ permalinks map[string]string
+
+ LanguagePrefix string
+ Languages langs.Languages
+
+ BuildDrafts bool
+
+ canonifyURLs bool
+ relativeURLs bool
+ uglyURLs func(p page.Page) bool
+
+ owner *HugoSites
+ s *Site
+ language *langs.Language
+ defaultContentLanguageInSubdir bool
+ sectionPagesMenu string
+}
+
+func (s *SiteInfo) Pages() page.Pages {
+ return s.s.Pages()
+}
+
+func (s *SiteInfo) RegularPages() page.Pages {
+ return s.s.RegularPages()
+}
+
+func (s *SiteInfo) AllPages() page.Pages {
+ return s.s.AllPages()
+}
+
+func (s *SiteInfo) AllRegularPages() page.Pages {
+ return s.s.AllRegularPages()
+}
+
+func (s *SiteInfo) LastChange() time.Time {
+ return s.s.lastmod
+}
+
+func (s *SiteInfo) Title() string {
+ return s.title
+}
+
+func (s *SiteInfo) Site() page.Site {
+ return s
+}
+
+func (s *SiteInfo) Menus() navigation.Menus {
+ return s.s.Menus()
+}
+
+// TODO(bep) type
+func (s *SiteInfo) Taxonomies() any {
+ return s.s.Taxonomies()
+}
+
+func (s *SiteInfo) Params() maps.Params {
+ return s.s.Language().Params()
+}
+
+func (s *SiteInfo) Data() map[string]any {
+ return s.s.h.Data()
+}
+
+func (s *SiteInfo) Language() *langs.Language {
+ return s.language
+}
+
+func (s *SiteInfo) Config() SiteConfig {
+ return s.s.siteConfigConfig
+}
+
+func (s *SiteInfo) Hugo() hugo.Info {
+ return s.hugoInfo
+}
+
+// Sites is a convenience method to get all the Hugo sites/languages configured.
+func (s *SiteInfo) Sites() page.Sites {
+ return s.s.h.siteInfos()
+}
+
+// Current returns the currently rendered Site.
+// If that isn't set yet, which is the situation before we start rendering,
+// if will return the Site itself.
+func (s *SiteInfo) Current() page.Site {
+ if s.s.h.currentSite == nil {
+ return s
+ }
+ return s.s.h.currentSite.Info
+}
+
+func (s *SiteInfo) String() string {
+ return fmt.Sprintf("Site(%q)", s.title)
+}
+
+func (s *SiteInfo) BaseURL() template.URL {
+ return template.URL(s.s.PathSpec.BaseURL.String())
+}
+
+// ServerPort returns the port part of the BaseURL, 0 if none found.
+func (s *SiteInfo) ServerPort() int {
+ ps := s.s.PathSpec.BaseURL.URL().Port()
+ if ps == "" {
+ return 0
+ }
+ p, err := strconv.Atoi(ps)
+ if err != nil {
+ return 0
+ }
+ return p
+}
+
+// GoogleAnalytics is kept here for historic reasons.
+func (s *SiteInfo) GoogleAnalytics() string {
+ return s.Config().Services.GoogleAnalytics.ID
+}
+
+// DisqusShortname is kept here for historic reasons.
+func (s *SiteInfo) DisqusShortname() string {
+ return s.Config().Services.Disqus.Shortname
+}
+
+// SiteSocial is a place to put social details on a site level. These are the
+// standard keys that themes will expect to have available, but can be
+// expanded to any others on a per site basis
+// github
+// facebook
+// facebook_admin
+// twitter
+// twitter_domain
+// pinterest
+// instagram
+// youtube
+// linkedin
+type SiteSocial map[string]string
+
+// Param is a convenience method to do lookups in SiteInfo's Params map.
+//
+// This method is also implemented on Page.
+func (s *SiteInfo) Param(key any) (any, error) {
+ return resource.Param(s, nil, key)
+}
+
+func (s *SiteInfo) IsMultiLingual() bool {
+ return len(s.Languages) > 1
+}
+
+func (s *SiteInfo) IsServer() bool {
+ return s.owner.running
+}
+
+type siteRefLinker struct {
+ s *Site
+
+ errorLogger *log.Logger
+ notFoundURL string
+}
+
+func newSiteRefLinker(cfg config.Provider, s *Site) (siteRefLinker, error) {
+ logger := s.Log.Error()
+
+ notFoundURL := cfg.GetString("refLinksNotFoundURL")
+ errLevel := cfg.GetString("refLinksErrorLevel")
+ if strings.EqualFold(errLevel, "warning") {
+ logger = s.Log.Warn()
+ }
+ return siteRefLinker{s: s, errorLogger: logger, notFoundURL: notFoundURL}, nil
+}
+
+func (s siteRefLinker) logNotFound(ref, what string, p page.Page, position text.Position) {
+ if position.IsValid() {
+ s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q: %s: %s", s.s.Lang(), ref, position.String(), what)
+ } else if p == nil {
+ s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q: %s", s.s.Lang(), ref, what)
+ } else {
+ s.errorLogger.Printf("[%s] REF_NOT_FOUND: Ref %q from page %q: %s", s.s.Lang(), ref, p.Pathc(), what)
+ }
+}
+
+func (s *siteRefLinker) refLink(ref string, source any, relative bool, outputFormat string) (string, error) {
+ p, err := unwrapPage(source)
+ if err != nil {
+ return "", err
+ }
+
+ var refURL *url.URL
+
+ ref = filepath.ToSlash(ref)
+
+ refURL, err = url.Parse(ref)
+
+ if err != nil {
+ return s.notFoundURL, err
+ }
+
+ var target page.Page
+ var link string
+
+ if refURL.Path != "" {
+ var err error
+ target, err = s.s.getPageRef(p, refURL.Path)
+ var pos text.Position
+ if err != nil || target == nil {
+ if p, ok := source.(text.Positioner); ok {
+ pos = p.Position()
+ }
+ }
+
+ if err != nil {
+ s.logNotFound(refURL.Path, err.Error(), p, pos)
+ return s.notFoundURL, nil
+ }
+
+ if target == nil {
+ s.logNotFound(refURL.Path, "page not found", p, pos)
+ return s.notFoundURL, nil
+ }
+
+ var permalinker Permalinker = target
+
+ if outputFormat != "" {
+ o := target.OutputFormats().Get(outputFormat)
+
+ if o == nil {
+ s.logNotFound(refURL.Path, fmt.Sprintf("output format %q", outputFormat), p, pos)
+ return s.notFoundURL, nil
+ }
+ permalinker = o
+ }
+
+ if relative {
+ link = permalinker.RelPermalink()
+ } else {
+ link = permalinker.Permalink()
+ }
+ }
+
+ if refURL.Fragment != "" {
+ _ = target
+ link = link + "#" + refURL.Fragment
+
+ if pctx, ok := target.(pageContext); ok {
+ if refURL.Path != "" {
+ if di, ok := pctx.getContentConverter().(converter.DocumentInfo); ok {
+ link = link + di.AnchorSuffix()
+ }
+ }
+ } else if pctx, ok := p.(pageContext); ok {
+ if di, ok := pctx.getContentConverter().(converter.DocumentInfo); ok {
+ link = link + di.AnchorSuffix()
+ }
+ }
+
+ }
+
+ return link, nil
+}
+
+func (s *Site) running() bool {
+ return s.h != nil && s.h.running
+}
+
+func (s *Site) multilingual() *Multilingual {
+ return s.h.multilingual
+}
+
+type whatChanged struct {
+ source bool
+ files map[string]bool
+}
+
+// RegisterMediaTypes will register the Site's media types in the mime
+// package, so it will behave correctly with Hugo's built-in server.
+func (s *Site) RegisterMediaTypes() {
+ for _, mt := range s.mediaTypesConfig {
+ for _, suffix := range mt.Suffixes() {
+ _ = mime.AddExtensionType(mt.Delimiter+suffix, mt.Type()+"; charset=utf-8")
+ }
+ }
+}
+
+func (s *Site) filterFileEvents(events []fsnotify.Event) []fsnotify.Event {
+ var filtered []fsnotify.Event
+ seen := make(map[fsnotify.Event]bool)
+
+ for _, ev := range events {
+ // Avoid processing the same event twice.
+ if seen[ev] {
+ continue
+ }
+ seen[ev] = true
+
+ if s.SourceSpec.IgnoreFile(ev.Name) {
+ continue
+ }
+
+ // Throw away any directories
+ isRegular, err := s.SourceSpec.IsRegularSourceFile(ev.Name)
+ if err != nil && os.IsNotExist(err) && (ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename) {
+ // Force keep of event
+ isRegular = true
+ }
+ if !isRegular {
+ continue
+ }
+
+ if runtime.GOOS == "darwin" { // When a file system is HFS+, its filepath is in NFD form.
+ ev.Name = norm.NFC.String(ev.Name)
+ }
+
+ filtered = append(filtered, ev)
+ }
+
+ return filtered
+}
+
+func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
+ var filtered []fsnotify.Event
+
+ eventMap := make(map[string][]fsnotify.Event)
+
+ // We often get a Remove etc. followed by a Create, a Create followed by a Write.
+ // Remove the superfluous events to mage the update logic simpler.
+ for _, ev := range events {
+ eventMap[ev.Name] = append(eventMap[ev.Name], ev)
+ }
+
+ for _, ev := range events {
+ mapped := eventMap[ev.Name]
+
+ // Keep one
+ found := false
+ var kept fsnotify.Event
+ for i, ev2 := range mapped {
+ if i == 0 {
+ kept = ev2
+ }
+
+ if ev2.Op&fsnotify.Write == fsnotify.Write {
+ kept = ev2
+ found = true
+ }
+
+ if !found && ev2.Op&fsnotify.Create == fsnotify.Create {
+ kept = ev2
+ }
+ }
+
+ filtered = append(filtered, kept)
+ }
+
+ return filtered
+}
+
+var (
+ // These are only used for cache busting, so false positives are fine.
+ // We also deliberately do not match for file suffixes to also catch
+ // directory names.
+ // TODO(bep) consider this when completing the relevant PR rewrite on this.
+ cssFileRe = regexp.MustCompile("(css|sass|scss)")
+ cssConfigRe = regexp.MustCompile(`(postcss|tailwind)\.config\.js`)
+ jsFileRe = regexp.MustCompile("(js|ts|jsx|tsx)")
+)
+
+// reBuild partially rebuilds a site given the filesystem events.
+// It returns whatever the content source was changed.
+// TODO(bep) clean up/rewrite this method.
+func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
+ events = s.filterFileEvents(events)
+ events = s.translateFileEvents(events)
+
+ changeIdentities := make(identity.Identities)
+
+ s.Log.Debugf("Rebuild for events %q", events)
+
+ h := s.h
+
+ // First we need to determine what changed
+
+ var (
+ sourceChanged = []fsnotify.Event{}
+ sourceReallyChanged = []fsnotify.Event{}
+ contentFilesChanged []string
+
+ tmplChanged bool
+ tmplAdded bool
+ dataChanged bool
+ i18nChanged bool
+
+ sourceFilesChanged = make(map[string]bool)
+
+ // prevent spamming the log on changes
+ logger = helpers.NewDistinctErrorLogger()
+ )
+
+ var cachePartitions []string
+ // Special case
+ // TODO(bep) I have a ongoing branch where I have redone the cache. Consider this there.
+ var (
+ evictCSSRe *regexp.Regexp
+ evictJSRe *regexp.Regexp
+ )
+
+ for _, ev := range events {
+ if assetsFilename, _ := s.BaseFs.Assets.MakePathRelative(ev.Name); assetsFilename != "" {
+ cachePartitions = append(cachePartitions, resources.ResourceKeyPartitions(assetsFilename)...)
+ if evictCSSRe == nil {
+ if cssFileRe.MatchString(assetsFilename) || cssConfigRe.MatchString(assetsFilename) {
+ evictCSSRe = cssFileRe
+ }
+ }
+ if evictJSRe == nil && jsFileRe.MatchString(assetsFilename) {
+ evictJSRe = jsFileRe
+ }
+ }
+
+ id, found := s.eventToIdentity(ev)
+ if found {
+ changeIdentities[id] = id
+
+ switch id.Type {
+ case files.ComponentFolderContent:
+ logger.Println("Source changed", ev)
+ sourceChanged = append(sourceChanged, ev)
+ case files.ComponentFolderLayouts:
+ tmplChanged = true
+ if !s.Tmpl().HasTemplate(id.Path) {
+ tmplAdded = true
+ }
+ if tmplAdded {
+ logger.Println("Template added", ev)
+ } else {
+ logger.Println("Template changed", ev)
+ }
+
+ case files.ComponentFolderData:
+ logger.Println("Data changed", ev)
+ dataChanged = true
+ case files.ComponentFolderI18n:
+ logger.Println("i18n changed", ev)
+ i18nChanged = true
+
+ }
+ }
+ }
+
+ changed := &whatChanged{
+ source: len(sourceChanged) > 0,
+ files: sourceFilesChanged,
+ }
+
+ config.whatChanged = changed
+
+ if err := init(config); err != nil {
+ return err
+ }
+
+ // These in memory resource caches will be rebuilt on demand.
+ for _, s := range s.h.Sites {
+ s.ResourceSpec.ResourceCache.DeletePartitions(cachePartitions...)
+ if evictCSSRe != nil {
+ s.ResourceSpec.ResourceCache.DeleteMatches(evictCSSRe)
+ }
+ if evictJSRe != nil {
+ s.ResourceSpec.ResourceCache.DeleteMatches(evictJSRe)
+ }
+ }
+
+ if tmplChanged || i18nChanged {
+ sites := s.h.Sites
+ first := sites[0]
+
+ s.h.init.Reset()
+
+ // TOD(bep) globals clean
+ if err := first.Deps.LoadResources(); err != nil {
+ return err
+ }
+
+ for i := 1; i < len(sites); i++ {
+ site := sites[i]
+ var err error
+ depsCfg := deps.DepsCfg{
+ Language: site.language,
+ MediaTypes: site.mediaTypesConfig,
+ OutputFormats: site.outputFormatsConfig,
+ }
+ site.Deps, err = first.Deps.ForLanguage(depsCfg, func(d *deps.Deps) error {
+ d.Site = site.Info
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if dataChanged {
+ s.h.init.data.Reset()
+ }
+
+ for _, ev := range sourceChanged {
+ removed := false
+
+ if ev.Op&fsnotify.Remove == fsnotify.Remove {
+ removed = true
+ }
+
+ // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
+ // Sometimes a rename operation means that file has been renamed other times it means
+ // it's been updated
+ if ev.Op&fsnotify.Rename == fsnotify.Rename {
+ // If the file is still on disk, it's only been updated, if it's not, it's been moved
+ if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil {
+ removed = true
+ }
+ }
+
+ if removed && files.IsContentFile(ev.Name) {
+ h.removePageByFilename(ev.Name)
+ }
+
+ sourceReallyChanged = append(sourceReallyChanged, ev)
+ sourceFilesChanged[ev.Name] = true
+ }
+
+ if config.ErrRecovery || tmplAdded || dataChanged {
+ h.resetPageState()
+ } else {
+ h.resetPageStateFromEvents(changeIdentities)
+ }
+
+ if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 {
+ var filenamesChanged []string
+ for _, e := range sourceReallyChanged {
+ filenamesChanged = append(filenamesChanged, e.Name)
+ }
+ if len(contentFilesChanged) > 0 {
+ filenamesChanged = append(filenamesChanged, contentFilesChanged...)
+ }
+
+ filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged)
+
+ if err := s.readAndProcessContent(*config, filenamesChanged...); err != nil {
+ return err
+ }
+
+ }
+
+ return nil
+}
+
+func (s *Site) process(config BuildCfg) (err error) {
+ if err = s.initialize(); err != nil {
+ err = fmt.Errorf("initialize: %w", err)
+ return
+ }
+ if err = s.readAndProcessContent(config); err != nil {
+ err = fmt.Errorf("readAndProcessContent: %w", err)
+ return
+ }
+ return err
+}
+
+func (s *Site) render(ctx *siteRenderContext) (err error) {
+ if err := page.Clear(); err != nil {
+ return err
+ }
+
+ if ctx.outIdx == 0 {
+ // Note that even if disableAliases is set, the aliases themselves are
+ // preserved on page. The motivation with this is to be able to generate
+ // 301 redirects in a .htacess file and similar using a custom output format.
+ if !s.Cfg.GetBool("disableAliases") {
+ // Aliases must be rendered before pages.
+ // Some sites, Hugo docs included, have faulty alias definitions that point
+ // to itself or another real page. These will be overwritten in the next
+ // step.
+ if err = s.renderAliases(); err != nil {
+ return
+ }
+ }
+ }
+
+ if err = s.renderPages(ctx); err != nil {
+ return
+ }
+
+ if ctx.outIdx == 0 {
+ if err = s.renderSitemap(); err != nil {
+ return
+ }
+
+ if ctx.multihost {
+ if err = s.renderRobotsTXT(); err != nil {
+ return
+ }
+ }
+
+ if err = s.render404(); err != nil {
+ return
+ }
+ }
+
+ if !ctx.renderSingletonPages() {
+ return
+ }
+
+ if err = s.renderMainLanguageRedirect(); err != nil {
+ return
+ }
+
+ return
+}
+
+func (s *Site) Initialise() (err error) {
+ return s.initialize()
+}
+
+func (s *Site) initialize() (err error) {
+ return s.initializeSiteInfo()
+}
+
+// HomeAbsURL is a convenience method giving the absolute URL to the home page.
+func (s *SiteInfo) HomeAbsURL() string {
+ base := ""
+ if s.IsMultiLingual() {
+ base = s.Language().Lang
+ }
+ return s.owner.AbsURL(base, false)
+}
+
+// SitemapAbsURL is a convenience method giving the absolute URL to the sitemap.
+func (s *SiteInfo) SitemapAbsURL() string {
+ p := s.HomeAbsURL()
+ if !strings.HasSuffix(p, "/") {
+ p += "/"
+ }
+ p += s.s.siteCfg.sitemap.Filename
+ return p
+}
+
+func (s *Site) initializeSiteInfo() error {
+ var (
+ lang = s.language
+ languages langs.Languages
+ )
+
+ if s.h != nil && s.h.multilingual != nil {
+ languages = s.h.multilingual.Languages
+ }
+
+ permalinks := s.Cfg.GetStringMapString("permalinks")
+
+ defaultContentInSubDir := s.Cfg.GetBool("defaultContentLanguageInSubdir")
+ defaultContentLanguage := s.Cfg.GetString("defaultContentLanguage")
+
+ languagePrefix := ""
+ if s.multilingualEnabled() && (defaultContentInSubDir || lang.Lang != defaultContentLanguage) {
+ languagePrefix = "/" + lang.Lang
+ }
+
+ uglyURLs := func(p page.Page) bool {
+ return false
+ }
+
+ v := s.Cfg.Get("uglyURLs")
+ if v != nil {
+ switch vv := v.(type) {
+ case bool:
+ uglyURLs = func(p page.Page) bool {
+ return vv
+ }
+ case string:
+ // Is what be get from CLI (--uglyURLs)
+ vvv := cast.ToBool(vv)
+ uglyURLs = func(p page.Page) bool {
+ return vvv
+ }
+ default:
+ m := maps.ToStringMapBool(v)
+ uglyURLs = func(p page.Page) bool {
+ return m[p.Section()]
+ }
+ }
+ }
+
+ // Assemble dependencies to be used in hugo.Deps.
+ // TODO(bep) another reminder: We need to clean up this Site vs HugoSites construct.
+ var deps []*hugo.Dependency
+ var depFromMod func(m modules.Module) *hugo.Dependency
+ depFromMod = func(m modules.Module) *hugo.Dependency {
+ dep := &hugo.Dependency{
+ Path: m.Path(),
+ Version: m.Version(),
+ Time: m.Time(),
+ Vendor: m.Vendor(),
+ }
+
+ // These are pointers, but this all came from JSON so there's no recursive navigation,
+ // so just create new values.
+ if m.Replace() != nil {
+ dep.Replace = depFromMod(m.Replace())
+ }
+ if m.Owner() != nil {
+ dep.Owner = depFromMod(m.Owner())
+ }
+ return dep
+ }
+ for _, m := range s.Paths.AllModules {
+ deps = append(deps, depFromMod(m))
+ }
+
+ s.Info = &SiteInfo{
+ title: lang.GetString("title"),
+ Author: lang.GetStringMap("author"),
+ Social: lang.GetStringMapString("social"),
+ LanguageCode: lang.GetString("languageCode"),
+ Copyright: lang.GetString("copyright"),
+ language: lang,
+ LanguagePrefix: languagePrefix,
+ Languages: languages,
+ defaultContentLanguageInSubdir: defaultContentInSubDir,
+ sectionPagesMenu: lang.GetString("sectionPagesMenu"),
+ BuildDrafts: s.Cfg.GetBool("buildDrafts"),
+ canonifyURLs: s.Cfg.GetBool("canonifyURLs"),
+ relativeURLs: s.Cfg.GetBool("relativeURLs"),
+ uglyURLs: uglyURLs,
+ permalinks: permalinks,
+ owner: s.h,
+ s: s,
+ hugoInfo: hugo.NewInfo(s.Cfg.GetString("environment"), deps),
+ }
+
+ rssOutputFormat, found := s.outputFormats[page.KindHome].GetByName(output.RSSFormat.Name)
+
+ if found {
+ s.Info.RSSLink = s.permalink(rssOutputFormat.BaseFilename())
+ }
+
+ return nil
+}
+
+func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) {
+ for _, fs := range s.BaseFs.SourceFilesystems.FileSystems() {
+ if p := fs.Path(e.Name); p != "" {
+ return identity.NewPathIdentity(fs.Name, filepath.ToSlash(p)), true
+ }
+ }
+ return identity.PathIdentity{}, false
+}
+
+func (s *Site) readAndProcessContent(buildConfig BuildCfg, filenames ...string) error {
+ sourceSpec := source.NewSourceSpec(s.PathSpec, buildConfig.ContentInclusionFilter, s.BaseFs.Content.Fs)
+
+ proc := newPagesProcessor(s.h, sourceSpec)
+
+ c := newPagesCollector(sourceSpec, s.h.getContentMaps(), s.Log, s.h.ContentChanges, proc, filenames...)
+
+ if err := c.Collect(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (s *Site) getMenusFromConfig() navigation.Menus {
+ ret := navigation.Menus{}
+
+ if menus := s.language.GetStringMap("menus"); menus != nil {
+ for name, menu := range menus {
+ m, err := cast.ToSliceE(menu)
+ if err != nil {
+ s.Log.Errorf("menus in site config contain errors\n")
+ s.Log.Errorln(err)
+ } else {
+ handleErr := func(err error) {
+ if err == nil {
+ return
+ }
+ s.Log.Errorf("menus in site config contain errors\n")
+ s.Log.Errorln(err)
+ }
+
+ for _, entry := range m {
+ s.Log.Debugf("found menu: %q, in site config\n", name)
+
+ menuEntry := navigation.MenuEntry{Menu: name}
+ ime, err := maps.ToStringMapE(entry)
+ handleErr(err)
+
+ err = menuEntry.MarshallMap(ime)
+ handleErr(err)
+
+ // TODO(bep) clean up all of this
+ menuEntry.ConfiguredURL = s.Info.createNodeMenuEntryURL(menuEntry.ConfiguredURL)
+
+ if ret[name] == nil {
+ ret[name] = navigation.Menu{}
+ }
+ ret[name] = ret[name].Add(&menuEntry)
+ }
+ }
+ }
+ return ret
+ }
+ return ret
+}
+
+func (s *SiteInfo) createNodeMenuEntryURL(in string) string {
+ if !strings.HasPrefix(in, "/") {
+ return in
+ }
+ // make it match the nodes
+ menuEntryURL := in
+ menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(s.s.PathSpec.URLize(menuEntryURL))
+ if !s.canonifyURLs {
+ menuEntryURL = paths.AddContextRoot(s.s.PathSpec.BaseURL.String(), menuEntryURL)
+ }
+ return menuEntryURL
+}
+
+func (s *Site) assembleMenus() {
+ s.menus = make(navigation.Menus)
+
+ type twoD struct {
+ MenuName, EntryName string
+ }
+ flat := map[twoD]*navigation.MenuEntry{}
+ children := map[twoD]navigation.Menu{}
+
+ // add menu entries from config to flat hash
+ menuConfig := s.getMenusFromConfig()
+ for name, menu := range menuConfig {
+ for _, me := range menu {
+ if types.IsNil(me.Page) && me.PageRef != "" {
+ // Try to resolve the page.
+ me.Page, _ = s.getPageNew(nil, me.PageRef)
+ }
+ flat[twoD{name, me.KeyName()}] = me
+ }
+ }
+
+ sectionPagesMenu := s.Info.sectionPagesMenu
+
+ if sectionPagesMenu != "" {
+ s.pageMap.sections.Walk(func(s string, v any) bool {
+ p := v.(*contentNode).p
+ if p.IsHome() {
+ return false
+ }
+ // From Hugo 0.22 we have nested sections, but until we get a
+ // feel of how that would work in this setting, let us keep
+ // this menu for the top level only.
+ id := p.Section()
+ if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
+ return false
+ }
+
+ me := navigation.MenuEntry{
+ Identifier: id,
+ Name: p.LinkTitle(),
+ Weight: p.Weight(),
+ Page: p,
+ }
+ flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
+
+ return false
+ })
+ }
+
+ // Add menu entries provided by pages
+ s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool {
+ p := n.p
+
+ for name, me := range p.pageMenus.menus() {
+ if _, ok := flat[twoD{name, me.KeyName()}]; ok {
+ err := p.wrapError(fmt.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))
+ s.Log.Warnln(err)
+ continue
+ }
+ flat[twoD{name, me.KeyName()}] = me
+ }
+
+ return false
+ })
+
+ // Create Children Menus First
+ for _, e := range flat {
+ if e.Parent != "" {
+ children[twoD{e.Menu, e.Parent}] = children[twoD{e.Menu, e.Parent}].Add(e)
+ }
+ }
+
+ // Placing Children in Parents (in flat)
+ for p, childmenu := range children {
+ _, ok := flat[twoD{p.MenuName, p.EntryName}]
+ if !ok {
+ // if parent does not exist, create one without a URL
+ flat[twoD{p.MenuName, p.EntryName}] = &navigation.MenuEntry{Name: p.EntryName}
+ }
+ flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
+ }
+
+ // Assembling Top Level of Tree
+ for menu, e := range flat {
+ if e.Parent == "" {
+ _, ok := s.menus[menu.MenuName]
+ if !ok {
+ s.menus[menu.MenuName] = navigation.Menu{}
+ }
+ s.menus[menu.MenuName] = s.menus[menu.MenuName].Add(e)
+ }
+ }
+}
+
+// get any language code to prefix the target file path with.
+func (s *Site) getLanguageTargetPathLang(alwaysInSubDir bool) string {
+ if s.h.IsMultihost() {
+ return s.Language().Lang
+ }
+
+ return s.getLanguagePermalinkLang(alwaysInSubDir)
+}
+
+// get any lanaguagecode to prefix the relative permalink with.
+func (s *Site) getLanguagePermalinkLang(alwaysInSubDir bool) string {
+ if !s.Info.IsMultiLingual() || s.h.IsMultihost() {
+ return ""
+ }
+
+ if alwaysInSubDir {
+ return s.Language().Lang
+ }
+
+ isDefault := s.Language().Lang == s.multilingual().DefaultLang.Lang
+
+ if !isDefault || s.Info.defaultContentLanguageInSubdir {
+ return s.Language().Lang
+ }
+
+ return ""
+}
+
+func (s *Site) getTaxonomyKey(key string) string {
+ if s.PathSpec.DisablePathToLower {
+ return s.PathSpec.MakePath(key)
+ }
+ return strings.ToLower(s.PathSpec.MakePath(key))
+}
+
+// Prepare site for a new full build.
+func (s *Site) resetBuildState(sourceChanged bool) {
+ s.relatedDocsHandler = s.relatedDocsHandler.Clone()
+ s.init.Reset()
+
+ if sourceChanged {
+ s.pageMap.contentMap.pageReverseIndex.Reset()
+ s.PageCollections = newPageCollections(s.pageMap)
+ s.pageMap.withEveryBundlePage(func(p *pageState) bool {
+ p.pagePages = &pagePages{}
+ if p.bucket != nil {
+ p.bucket.pagesMapBucketPages = &pagesMapBucketPages{}
+ }
+ p.parent = nil
+ p.Scratcher = maps.NewScratcher()
+ return false
+ })
+ } else {
+ s.pageMap.withEveryBundlePage(func(p *pageState) bool {
+ p.Scratcher = maps.NewScratcher()
+ return false
+ })
+ }
+}
+
+func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
+ var errors []error
+ for e := range results {
+ errors = append(errors, e)
+ }
+
+ errs <- s.h.pickOneAndLogTheRest(errors)
+
+ close(errs)
+}
+
+// GetPage looks up a page of a given type for the given ref.
+// In Hugo <= 0.44 you had to add Page Kind (section, home) etc. as the first
+// argument and then either a unix styled path (with or without a leading slash))
+// or path elements separated.
+// When we now remove the Kind from this API, we need to make the transition as painless
+// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
+// i.e. 2 arguments, so we test for that.
+func (s *SiteInfo) GetPage(ref ...string) (page.Page, error) {
+ p, err := s.s.getPageOldVersion(ref...)
+
+ if p == nil {
+ // The nil struct has meaning in some situations, mostly to avoid breaking
+ // existing sites doing $nilpage.IsDescendant($p), which will always return
+ // false.
+ p = page.NilPage
+ }
+
+ return p, err
+}
+
+func (s *SiteInfo) GetPageWithTemplateInfo(info tpl.Info, ref ...string) (page.Page, error) {
+ p, err := s.GetPage(ref...)
+ if p != nil {
+ // Track pages referenced by templates/shortcodes
+ // when in server mode.
+ if im, ok := info.(identity.Manager); ok {
+ im.Add(p)
+ }
+ }
+ return p, err
+}
+
+func (s *Site) permalink(link string) string {
+ return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.BaseURL.String())
+}
+
+func (s *Site) absURLPath(targetPath string) string {
+ var path string
+ if s.Info.relativeURLs {
+ path = helpers.GetDottedRelativePath(targetPath)
+ } else {
+ url := s.PathSpec.BaseURL.String()
+ if !strings.HasSuffix(url, "/") {
+ url += "/"
+ }
+ path = url
+ }
+
+ return path
+}
+
+func (s *Site) lookupLayouts(layouts ...string) tpl.Template {
+ for _, l := range layouts {
+ if templ, found := s.Tmpl().Lookup(l); found {
+ return templ
+ }
+ }
+
+ return nil
+}
+
+func (s *Site) renderAndWriteXML(statCounter *uint64, name string, targetPath string, d any, templ tpl.Template) error {
+ s.Log.Debugf("Render XML for %q to %q", name, targetPath)
+ renderBuffer := bp.GetBuffer()
+ defer bp.PutBuffer(renderBuffer)
+
+ if err := s.renderForTemplate(name, "", d, renderBuffer, templ); err != nil {
+ return err
+ }
+
+ pd := publisher.Descriptor{
+ Src: renderBuffer,
+ TargetPath: targetPath,
+ StatCounter: statCounter,
+ // For the minification part of XML,
+ // we currently only use the MIME type.
+ OutputFormat: output.RSSFormat,
+ AbsURLPath: s.absURLPath(targetPath),
+ }
+
+ return s.publisher.Publish(pd)
+}
+
+func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath string, p *pageState, templ tpl.Template) error {
+ s.Log.Debugf("Render %s to %q", name, targetPath)
+ s.h.IncrPageRender()
+ renderBuffer := bp.GetBuffer()
+ defer bp.PutBuffer(renderBuffer)
+
+ of := p.outputFormat()
+
+ if err := s.renderForTemplate(p.Kind(), of.Name, p, renderBuffer, templ); err != nil {
+ return err
+ }
+
+ if renderBuffer.Len() == 0 {
+ return nil
+ }
+
+ isHTML := of.IsHTML
+ isRSS := of.Name == "RSS"
+
+ pd := publisher.Descriptor{
+ Src: renderBuffer,
+ TargetPath: targetPath,
+ StatCounter: statCounter,
+ OutputFormat: p.outputFormat(),
+ }
+
+ if isRSS {
+ // Always canonify URLs in RSS
+ pd.AbsURLPath = s.absURLPath(targetPath)
+ } else if isHTML {
+ if s.Info.relativeURLs || s.Info.canonifyURLs {
+ pd.AbsURLPath = s.absURLPath(targetPath)
+ }
+
+ if s.running() && s.Cfg.GetBool("watch") && !s.Cfg.GetBool("disableLiveReload") {
+ pd.LiveReloadBaseURL = s.PathSpec.BaseURL.URL()
+ if s.Cfg.GetInt("liveReloadPort") != -1 {
+ pd.LiveReloadBaseURL.Host = fmt.Sprintf("%s:%d", pd.LiveReloadBaseURL.Hostname(), s.Cfg.GetInt("liveReloadPort"))
+ }
+ }
+
+ // For performance reasons we only inject the Hugo generator tag on the home page.
+ if p.IsHome() {
+ pd.AddHugoGeneratorTag = !s.Cfg.GetBool("disableHugoGeneratorInject")
+ }
+
+ }
+
+ return s.publisher.Publish(pd)
+}
+
+var infoOnMissingLayout = map[string]bool{
+ // The 404 layout is very much optional in Hugo, but we do look for it.
+ "404": true,
+}
+
+// hookRendererTemplate is the canonical implementation of all hooks.ITEMRenderer,
+// where ITEM is the thing being hooked.
+type hookRendererTemplate struct {
+ templateHandler tpl.TemplateHandler
+ identity.SearchProvider
+ templ tpl.Template
+ resolvePosition func(ctx any) text.Position
+}
+
+func (hr hookRendererTemplate) RenderLink(w io.Writer, ctx hooks.LinkContext) error {
+ return hr.templateHandler.Execute(hr.templ, w, ctx)
+}
+
+func (hr hookRendererTemplate) RenderHeading(w io.Writer, ctx hooks.HeadingContext) error {
+ return hr.templateHandler.Execute(hr.templ, w, ctx)
+}
+
+func (hr hookRendererTemplate) RenderCodeblock(w hugio.FlexiWriter, ctx hooks.CodeblockContext) error {
+ return hr.templateHandler.Execute(hr.templ, w, ctx)
+}
+
+func (hr hookRendererTemplate) ResolvePosition(ctx any) text.Position {
+ return hr.resolvePosition(ctx)
+}
+
+func (hr hookRendererTemplate) IsDefaultCodeBlockRenderer() bool {
+ return false
+}
+
+func (s *Site) renderForTemplate(name, outputFormat string, d any, w io.Writer, templ tpl.Template) (err error) {
+ if templ == nil {
+ s.logMissingLayout(name, "", "", outputFormat)
+ return nil
+ }
+
+ if err = s.Tmpl().Execute(templ, w, d); err != nil {
+ return fmt.Errorf("render of %q failed: %w", name, err)
+ }
+ return
+}
+
+func (s *Site) lookupTemplate(layouts ...string) (tpl.Template, bool) {
+ for _, l := range layouts {
+ if templ, found := s.Tmpl().Lookup(l); found {
+ return templ, true
+ }
+ }
+
+ return nil, false
+}
+
+func (s *Site) publish(statCounter *uint64, path string, r io.Reader, fs afero.Fs) (err error) {
+ s.PathSpec.ProcessingStats.Incr(statCounter)
+
+ return helpers.WriteToDisk(filepath.Clean(path), r, fs)
+}
+
+func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) string {
+ if fi.TranslationBaseName() == "_index" {
+ if fi.Dir() == "" {
+ return page.KindHome
+ }
+
+ return s.kindFromSections(sections)
+
+ }
+
+ return page.KindPage
+}
+
+func (s *Site) kindFromSections(sections []string) string {
+ if len(sections) == 0 {
+ return page.KindHome
+ }
+
+ return s.kindFromSectionPath(path.Join(sections...))
+}
+
+func (s *Site) kindFromSectionPath(sectionPath string) string {
+ for _, plural := range s.siteCfg.taxonomiesConfig {
+ if plural == sectionPath {
+ return page.KindTaxonomy
+ }
+
+ if strings.HasPrefix(sectionPath, plural) {
+ return page.KindTerm
+ }
+
+ }
+
+ return page.KindSection
+}
+
+func (s *Site) newPage(
+ n *contentNode,
+ parentbBucket *pagesMapBucket,
+ kind, title string,
+ sections ...string) *pageState {
+ m := map[string]any{}
+ if title != "" {
+ m["title"] = title
+ }
+
+ p, err := newPageFromMeta(
+ n,
+ parentbBucket,
+ m,
+ &pageMeta{
+ s: s,
+ kind: kind,
+ sections: sections,
+ })
+ if err != nil {
+ panic(err)
+ }
+
+ return p
+}
+
+func (s *Site) shouldBuild(p page.Page) bool {
+ return shouldBuild(s.BuildFuture, s.BuildExpired,
+ s.BuildDrafts, p.Draft(), p.PublishDate(), p.ExpiryDate())
+}
+
+func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
+ publishDate time.Time, expiryDate time.Time) bool {
+ if !(buildDrafts || !Draft) {
+ return false
+ }
+ hnow := htime.Now()
+ if !buildFuture && !publishDate.IsZero() && publishDate.After(hnow) {
+ return false
+ }
+ if !buildExpired && !expiryDate.IsZero() && expiryDate.Before(hnow) {
+ return false
+ }
+ return true
+}
diff --git a/hugolib/siteJSONEncode_test.go b/hugolib/siteJSONEncode_test.go
new file mode 100644
index 000000000..94bac1873
--- /dev/null
+++ b/hugolib/siteJSONEncode_test.go
@@ -0,0 +1,44 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "testing"
+)
+
+// Issue #1123
+// Testing prevention of cyclic refs in JSON encoding
+// May be smart to run with: -timeout 4000ms
+func TestEncodePage(t *testing.T) {
+ t.Parallel()
+
+ templ := `Page: |{{ index .Site.RegularPages 0 | jsonify }}|
+Site: {{ site | jsonify }}
+`
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile().WithTemplatesAdded("index.html", templ)
+ b.WithContent("page.md", `---
+title: "Page"
+date: 2019-02-28
+---
+
+Content.
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `"Date":"2019-02-28T00:00:00Z"`)
+}
diff --git a/hugolib/site_benchmark_new_test.go b/hugolib/site_benchmark_new_test.go
new file mode 100644
index 000000000..ea3f223dc
--- /dev/null
+++ b/hugolib/site_benchmark_new_test.go
@@ -0,0 +1,558 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "math/rand"
+ "path"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type siteBenchmarkTestcase struct {
+ name string
+ create func(t testing.TB) *sitesBuilder
+ check func(s *sitesBuilder)
+}
+
+func getBenchmarkSiteDeepContent(b testing.TB) *sitesBuilder {
+ pageContent := func(size int) string {
+ return getBenchmarkTestDataPageContentForMarkdown(size, false, "", benchmarkMarkdownSnippets)
+ }
+
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+baseURL = "https://example.com"
+
+[languages]
+[languages.en]
+weight=1
+contentDir="content/en"
+[languages.fr]
+weight=2
+contentDir="content/fr"
+[languages.no]
+weight=3
+contentDir="content/no"
+[languages.sv]
+weight=4
+contentDir="content/sv"
+
+`)
+
+ createContent := func(dir, name string) {
+ sb.WithContent(filepath.Join("content", dir, name), pageContent(1))
+ }
+
+ createBundledFiles := func(dir string) {
+ sb.WithContent(filepath.Join("content", dir, "data.json"), `{ "hello": "world" }`)
+ for i := 1; i <= 3; i++ {
+ sb.WithContent(filepath.Join("content", dir, fmt.Sprintf("page%d.md", i)), pageContent(1))
+ }
+ }
+
+ for _, lang := range []string{"en", "fr", "no", "sv"} {
+ for level := 1; level <= 5; level++ {
+ sectionDir := path.Join(lang, strings.Repeat("section/", level))
+ createContent(sectionDir, "_index.md")
+ createBundledFiles(sectionDir)
+ for i := 1; i <= 3; i++ {
+ leafBundleDir := path.Join(sectionDir, fmt.Sprintf("bundle%d", i))
+ createContent(leafBundleDir, "index.md")
+ createBundledFiles(path.Join(leafBundleDir, "assets1"))
+ createBundledFiles(path.Join(leafBundleDir, "assets1", "assets2"))
+ }
+ }
+ }
+
+ return sb
+}
+
+func getBenchmarkTestDataPageContentForMarkdown(size int, toml bool, category, markdown string) string {
+ base := `---
+title: "My Page"
+%s
+---
+
+My page content.
+`
+ if toml {
+ base = `+++
+title="My Page"
+%s
++++
+
+My page content.
+`
+
+ }
+
+ var categoryKey string
+ if category != "" {
+ categoryKey = fmt.Sprintf("categories: [%s]", category)
+ if toml {
+ categoryKey = fmt.Sprintf("categories=[%s]", category)
+ }
+ }
+ base = fmt.Sprintf(base, categoryKey)
+
+ return base + strings.Repeat(markdown, size)
+}
+
+const benchmarkMarkdownSnippets = `
+
+## Links
+
+
+This is [an example](http://example.com/ "Title") inline link.
+
+[This link](http://example.net/) has no title attribute.
+
+This is [Relative](/all-is-relative).
+
+See my [About](/about/) page for details.
+`
+
+func getBenchmarkSiteNewTestCases() []siteBenchmarkTestcase {
+ pageContentWithCategory := func(size int, category string) string {
+ return getBenchmarkTestDataPageContentForMarkdown(size, false, category, benchmarkMarkdownSnippets)
+ }
+
+ pageContent := func(size int) string {
+ return getBenchmarkTestDataPageContentForMarkdown(size, false, "", benchmarkMarkdownSnippets)
+ }
+
+ config := `
+baseURL = "https://example.com"
+`
+
+ benchmarks := []siteBenchmarkTestcase{
+ {
+ "Bundle with image", func(b testing.TB) *sitesBuilder {
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", config)
+ sb.WithContent("content/blog/mybundle/index.md", pageContent(1))
+ sb.WithSunset("content/blog/mybundle/sunset1.jpg")
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.AssertFileContent("public/blog/mybundle/index.html", "/blog/mybundle/sunset1.jpg")
+ s.CheckExists("public/blog/mybundle/sunset1.jpg")
+ },
+ },
+ {
+ "Bundle with JSON file", func(b testing.TB) *sitesBuilder {
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", config)
+ sb.WithContent("content/blog/mybundle/index.md", pageContent(1))
+ sb.WithContent("content/blog/mybundle/mydata.json", `{ "hello": "world" }`)
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.AssertFileContent("public/blog/mybundle/index.html", "Resources: application/json: /blog/mybundle/mydata.json")
+ s.CheckExists("public/blog/mybundle/mydata.json")
+ },
+ },
+ {
+ "Tags and categories", func(b testing.TB) *sitesBuilder {
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+title = "Tags and Cats"
+baseURL = "https://example.com"
+
+`)
+
+ const pageTemplate = `
+---
+title: "Some tags and cats"
+categories: ["caGR", "cbGR"]
+tags: ["taGR", "tbGR"]
+---
+
+Some content.
+
+`
+ for i := 1; i <= 100; i++ {
+ content := strings.Replace(pageTemplate, "GR", strconv.Itoa(i/3), -1)
+ sb.WithContent(fmt.Sprintf("content/page%d.md", i), content)
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.AssertFileContent("public/page3/index.html", "/page3/|Permalink: https://example.com/page3/")
+ s.AssertFileContent("public/tags/ta3/index.html", "|ta3|")
+ },
+ },
+ {
+ "Canonify URLs", func(b testing.TB) *sitesBuilder {
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+title = "Canon"
+baseURL = "https://example.com"
+canonifyURLs = true
+
+`)
+ for i := 1; i <= 100; i++ {
+ sb.WithContent(fmt.Sprintf("content/page%d.md", i), pageContent(i))
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.AssertFileContent("public/page8/index.html", "https://example.com/about/")
+ },
+ },
+
+ {
+ "Deep content tree", func(b testing.TB) *sitesBuilder {
+ return getBenchmarkSiteDeepContent(b)
+ },
+ func(s *sitesBuilder) {
+ s.CheckExists("public/blog/mybundle/index.html")
+ s.Assert(len(s.H.Sites), qt.Equals, 4)
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, len(s.H.Sites[1].RegularPages()))
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, 30)
+ },
+ },
+ {
+ "TOML front matter", func(b testing.TB) *sitesBuilder {
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", config)
+ for i := 1; i <= 200; i++ {
+ content := getBenchmarkTestDataPageContentForMarkdown(1, true, "\"a\", \"b\", \"c\"", benchmarkMarkdownSnippets)
+ sb.WithContent(fmt.Sprintf("content/p%d.md", i), content)
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+
+ },
+ },
+ {
+ "Many HTML templates", func(b testing.TB) *sitesBuilder {
+ pageTemplateTemplate := `
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <title>{{ if not .IsPage }}{{ .Title }}{{ else }}{{ printf "Site: %s" site.Title }}{{ end }}</title>
+ <style>
+ body {
+ margin: 3rem;
+ }
+ </style>
+ </head>
+ <body>
+ <div class="page">{{ .Content }}</div>
+ <ul>
+ {{ with .Pages }}
+ {{ range . }}
+ <li><a href="{{ .RelPermalink }}">{{ .LinkTitle }} {{ if not .IsNode }} (Page){{ end }}</a></li>
+ {{ end }}
+ {{ end }}
+ </ul>
+ </body>
+</html>
+`
+
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+baseURL = "https://example.com"
+
+[languages]
+[languages.en]
+weight=1
+contentDir="content/en"
+[languages.fr]
+weight=2
+contentDir="content/fr"
+[languages.no]
+weight=3
+contentDir="content/no"
+[languages.sv]
+weight=4
+contentDir="content/sv"
+
+`)
+
+ createContent := func(dir, name string) {
+ sb.WithContent(filepath.Join("content", dir, name), pageContent(1))
+ }
+
+ for _, lang := range []string{"en", "fr", "no", "sv"} {
+ sb.WithTemplatesAdded(fmt.Sprintf("_default/single.%s.html", lang), pageTemplateTemplate)
+ sb.WithTemplatesAdded(fmt.Sprintf("_default/list.%s.html", lang), pageTemplateTemplate)
+
+ for level := 1; level <= 5; level++ {
+ sectionDir := path.Join(lang, strings.Repeat("section/", level))
+ createContent(sectionDir, "_index.md")
+ for i := 1; i <= 3; i++ {
+ leafBundleDir := path.Join(sectionDir, fmt.Sprintf("bundle%d", i))
+ createContent(leafBundleDir, "index.md")
+ }
+ }
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.CheckExists("public/blog/mybundle/index.html")
+ s.Assert(len(s.H.Sites), qt.Equals, 4)
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, len(s.H.Sites[1].RegularPages()))
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, 15)
+ },
+ },
+ {
+ "Page collections", func(b testing.TB) *sitesBuilder {
+ pageTemplateTemplate := `
+{{ if .IsNode }}
+{{ len .Paginator.Pages }}
+{{ end }}
+{{ len .Sections }}
+{{ len .Pages }}
+{{ len .RegularPages }}
+{{ len .Resources }}
+{{ len site.RegularPages }}
+{{ len site.Pages }}
+{{ with .NextInSection }}Next in section: {{ .RelPermalink }}{{ end }}
+{{ with .PrevInSection }}Prev in section: {{ .RelPermalink }}{{ end }}
+{{ with .Next }}Next: {{ .RelPermalink }}{{ end }}
+{{ with .Prev }}Prev: {{ .RelPermalink }}{{ end }}
+`
+
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+baseURL = "https://example.com"
+
+[languages]
+[languages.en]
+weight=1
+contentDir="content/en"
+[languages.fr]
+weight=2
+contentDir="content/fr"
+[languages.no]
+weight=3
+contentDir="content/no"
+[languages.sv]
+weight=4
+contentDir="content/sv"
+
+`)
+
+ sb.WithTemplates("index.html", pageTemplateTemplate)
+ sb.WithTemplates("_default/single.html", pageTemplateTemplate)
+ sb.WithTemplates("_default/list.html", pageTemplateTemplate)
+
+ r := rand.New(rand.NewSource(99))
+
+ createContent := func(dir, name string) {
+ var content string
+ if strings.Contains(name, "_index") {
+ content = pageContent(1)
+ } else {
+ content = pageContentWithCategory(1, fmt.Sprintf("category%d", r.Intn(5)+1))
+ }
+
+ sb.WithContent(filepath.Join("content", dir, name), content)
+ }
+
+ createBundledFiles := func(dir string) {
+ sb.WithContent(filepath.Join("content", dir, "data.json"), `{ "hello": "world" }`)
+ for i := 1; i <= 3; i++ {
+ sb.WithContent(filepath.Join("content", dir, fmt.Sprintf("page%d.md", i)), pageContent(1))
+ }
+ }
+
+ for _, lang := range []string{"en", "fr", "no", "sv"} {
+ for level := 1; level <= r.Intn(5)+1; level++ {
+ sectionDir := path.Join(lang, strings.Repeat("section/", level))
+ createContent(sectionDir, "_index.md")
+ createBundledFiles(sectionDir)
+ for i := 1; i <= r.Intn(20)+1; i++ {
+ leafBundleDir := path.Join(sectionDir, fmt.Sprintf("bundle%d", i))
+ createContent(leafBundleDir, "index.md")
+ createBundledFiles(path.Join(leafBundleDir, "assets1"))
+ createBundledFiles(path.Join(leafBundleDir, "assets1", "assets2"))
+ }
+ }
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.CheckExists("public/blog/mybundle/index.html")
+ s.Assert(len(s.H.Sites), qt.Equals, 4)
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, 26)
+ },
+ },
+ {
+ "List terms", func(b testing.TB) *sitesBuilder {
+ pageTemplateTemplate := `
+<ul>
+ {{ range (.GetTerms "categories") }}
+ <li><a href="{{ .Permalink }}">{{ .LinkTitle }}</a></li>
+ {{ end }}
+</ul>
+`
+
+ sb := newTestSitesBuilder(b).WithConfigFile("toml", `
+baseURL = "https://example.com"
+`)
+
+ sb.WithTemplates("_default/single.html", pageTemplateTemplate)
+
+ r := rand.New(rand.NewSource(99))
+
+ createContent := func(dir, name string) {
+ var content string
+ if strings.Contains(name, "_index") {
+ content = pageContent(1)
+ } else {
+ content = pageContentWithCategory(1, fmt.Sprintf("category%d", r.Intn(5)+1))
+ sb.WithContent(filepath.Join("content", dir, name), content)
+ }
+ }
+
+ for level := 1; level <= r.Intn(5)+1; level++ {
+ sectionDir := path.Join(strings.Repeat("section/", level))
+ createContent(sectionDir, "_index.md")
+ for i := 1; i <= r.Intn(33); i++ {
+ leafBundleDir := path.Join(sectionDir, fmt.Sprintf("bundle%d", i))
+ createContent(leafBundleDir, "index.md")
+ }
+ }
+
+ return sb
+ },
+ func(s *sitesBuilder) {
+ s.AssertFileContent("public/section/bundle8/index.html", ` <li><a href="https://example.com/categories/category1/">category1</a></li>`)
+ s.Assert(len(s.H.Sites), qt.Equals, 1)
+ s.Assert(len(s.H.Sites[0].RegularPages()), qt.Equals, 35)
+ },
+ },
+ }
+
+ return benchmarks
+}
+
+// Run the benchmarks below as tests. Mostly useful when adding new benchmark
+// variants.
+func TestBenchmarkSiteNew(b *testing.T) {
+ benchmarks := getBenchmarkSiteNewTestCases()
+ for _, bm := range benchmarks {
+ b.Run(bm.name, func(b *testing.T) {
+ s := bm.create(b)
+
+ err := s.BuildE(BuildCfg{})
+ if err != nil {
+ b.Fatal(err)
+ }
+ bm.check(s)
+ })
+ }
+}
+
+func TestBenchmarkSiteDeepContentEdit(t *testing.T) {
+ b := getBenchmarkSiteDeepContent(t).Running()
+ b.Build(BuildCfg{})
+
+ p := b.H.Sites[0].RegularPages()[12]
+
+ b.EditFiles(p.File().Filename(), fmt.Sprintf(`---
+title: %s
+---
+
+Edited!!`, p.Title()))
+
+ counters := &testCounters{}
+
+ b.Build(BuildCfg{testCounters: counters})
+
+ // We currently rebuild all the language versions of the same content file.
+ // We could probably optimize that case, but it's not trivial.
+ b.Assert(int(counters.contentRenderCounter), qt.Equals, 4)
+ b.AssertFileContent("public"+p.RelPermalink()+"index.html", "Edited!!")
+}
+
+func BenchmarkSiteNew(b *testing.B) {
+ rnd := rand.New(rand.NewSource(32))
+ benchmarks := getBenchmarkSiteNewTestCases()
+ for _, edit := range []bool{true, false} {
+ for _, bm := range benchmarks {
+ name := bm.name
+ if edit {
+ name = "Edit_" + name
+ } else {
+ name = "Regular_" + name
+ }
+ b.Run(name, func(b *testing.B) {
+ sites := make([]*sitesBuilder, b.N)
+ for i := 0; i < b.N; i++ {
+ sites[i] = bm.create(b)
+ if edit {
+ sites[i].Running()
+ }
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if edit {
+ b.StopTimer()
+ }
+ s := sites[i]
+ err := s.BuildE(BuildCfg{})
+ if err != nil {
+ b.Fatal(err)
+ }
+ bm.check(s)
+
+ if edit {
+ if edit {
+ b.StartTimer()
+ }
+ // Edit a random page in a random language.
+ pages := s.H.Sites[rnd.Intn(len(s.H.Sites))].Pages()
+ var p page.Page
+ count := 0
+ for {
+ count++
+ if count > 100 {
+ panic("infinite loop")
+ }
+ p = pages[rnd.Intn(len(pages))]
+ if !p.File().IsZero() {
+ break
+ }
+ }
+
+ s.EditFiles(p.File().Filename(), fmt.Sprintf(`---
+title: %s
+---
+
+Edited!!`, p.Title()))
+
+ err := s.BuildE(BuildCfg{})
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ }
+ })
+ }
+ }
+}
diff --git a/hugolib/site_output.go b/hugolib/site_output.go
new file mode 100644
index 000000000..1e248baff
--- /dev/null
+++ b/hugolib/site_output.go
@@ -0,0 +1,108 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/spf13/cast"
+)
+
+func createDefaultOutputFormats(allFormats output.Formats) map[string]output.Formats {
+ rssOut, rssFound := allFormats.GetByName(output.RSSFormat.Name)
+ htmlOut, _ := allFormats.GetByName(output.HTMLFormat.Name)
+ robotsOut, _ := allFormats.GetByName(output.RobotsTxtFormat.Name)
+ sitemapOut, _ := allFormats.GetByName(output.SitemapFormat.Name)
+
+ defaultListTypes := output.Formats{htmlOut}
+ if rssFound {
+ defaultListTypes = append(defaultListTypes, rssOut)
+ }
+
+ m := map[string]output.Formats{
+ page.KindPage: {htmlOut},
+ page.KindHome: defaultListTypes,
+ page.KindSection: defaultListTypes,
+ page.KindTerm: defaultListTypes,
+ page.KindTaxonomy: defaultListTypes,
+ // Below are for consistency. They are currently not used during rendering.
+ kindSitemap: {sitemapOut},
+ kindRobotsTXT: {robotsOut},
+ kind404: {htmlOut},
+ }
+
+ // May be disabled
+ if rssFound {
+ m[kindRSS] = output.Formats{rssOut}
+ }
+
+ return m
+}
+
+func createSiteOutputFormats(allFormats output.Formats, outputs map[string]any, rssDisabled bool) (map[string]output.Formats, error) {
+ defaultOutputFormats := createDefaultOutputFormats(allFormats)
+
+ if outputs == nil {
+ return defaultOutputFormats, nil
+ }
+
+ outFormats := make(map[string]output.Formats)
+
+ if len(outputs) == 0 {
+ return outFormats, nil
+ }
+
+ seen := make(map[string]bool)
+
+ for k, v := range outputs {
+ k = getKind(k)
+ if k == "" {
+ // Invalid kind
+ continue
+ }
+ var formats output.Formats
+ vals := cast.ToStringSlice(v)
+ for _, format := range vals {
+ f, found := allFormats.GetByName(format)
+ if !found {
+ if rssDisabled && strings.EqualFold(format, "RSS") {
+ // This is legacy behaviour. We used to have both
+ // a RSS page kind and output format.
+ continue
+ }
+ return nil, fmt.Errorf("failed to resolve output format %q from site config", format)
+ }
+ formats = append(formats, f)
+ }
+
+ // This effectively prevents empty outputs entries for a given Kind.
+ // We need at least one.
+ if len(formats) > 0 {
+ seen[k] = true
+ outFormats[k] = formats
+ }
+ }
+
+ // Add defaults for the entries not provided by the user.
+ for k, v := range defaultOutputFormats {
+ if !seen[k] {
+ outFormats[k] = v
+ }
+ }
+
+ return outFormats, nil
+}
diff --git a/hugolib/site_output_test.go b/hugolib/site_output_test.go
new file mode 100644
index 000000000..1a8bbadec
--- /dev/null
+++ b/hugolib/site_output_test.go
@@ -0,0 +1,648 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/resources/page"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/output"
+)
+
+func TestSiteWithPageOutputs(t *testing.T) {
+ for _, outputs := range [][]string{{"html", "json", "calendar"}, {"json"}} {
+ outputs := outputs
+ t.Run(fmt.Sprintf("%v", outputs), func(t *testing.T) {
+ t.Parallel()
+ doTestSiteWithPageOutputs(t, outputs)
+ })
+ }
+}
+
+func doTestSiteWithPageOutputs(t *testing.T, outputs []string) {
+ outputsStr := strings.Replace(fmt.Sprintf("%q", outputs), " ", ", ", -1)
+
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+defaultContentLanguage = "en"
+
+disableKinds = ["section", "term", "taxonomy", "RSS", "sitemap", "robotsTXT", "404"]
+
+[Taxonomies]
+tag = "tags"
+category = "categories"
+
+defaultContentLanguage = "en"
+
+
+[languages]
+
+[languages.en]
+title = "Title in English"
+languageName = "English"
+weight = 1
+
+[languages.nn]
+languageName = "Nynorsk"
+weight = 2
+title = "Tittel på Nynorsk"
+
+`
+
+ pageTemplate := `---
+title: "%s"
+outputs: %s
+---
+# Doc
+
+{{< myShort >}}
+
+{{< myOtherShort >}}
+
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
+ b.WithI18n("en.toml", `
+[elbow]
+other = "Elbow"
+`, "nn.toml", `
+[elbow]
+other = "Olboge"
+`)
+
+ b.WithTemplates(
+ // Case issue partials #3333
+ "layouts/partials/GoHugo.html", `Go Hugo Partial`,
+ "layouts/_default/baseof.json", `START JSON:{{block "main" .}}default content{{ end }}:END JSON`,
+ "layouts/_default/baseof.html", `START HTML:{{block "main" .}}default content{{ end }}:END HTML`,
+ "layouts/shortcodes/myOtherShort.html", `OtherShort: {{ "<h1>Hi!</h1>" | safeHTML }}`,
+ "layouts/shortcodes/myShort.html", `ShortHTML`,
+ "layouts/shortcodes/myShort.json", `ShortJSON`,
+
+ "layouts/_default/list.json", `{{ define "main" }}
+List JSON|{{ .Title }}|{{ .Content }}|Alt formats: {{ len .AlternativeOutputFormats -}}|
+{{- range .AlternativeOutputFormats -}}
+Alt Output: {{ .Name -}}|
+{{- end -}}|
+{{- range .OutputFormats -}}
+Output/Rel: {{ .Name -}}/{{ .Rel }}|{{ .MediaType }}
+{{- end -}}
+ {{ with .OutputFormats.Get "JSON" }}
+<atom:link href={{ .Permalink }} rel="self" type="{{ .MediaType }}" />
+{{ end }}
+{{ .Site.Language.Lang }}: {{ T "elbow" -}}
+{{ end }}
+`,
+ "layouts/_default/list.html", `{{ define "main" }}
+List HTML|{{.Title }}|
+{{- with .OutputFormats.Get "HTML" -}}
+<atom:link href={{ .Permalink }} rel="self" type="{{ .MediaType }}" />
+{{- end -}}
+{{ .Site.Language.Lang }}: {{ T "elbow" -}}
+Partial Hugo 1: {{ partial "GoHugo.html" . }}
+Partial Hugo 2: {{ partial "GoHugo" . -}}
+Content: {{ .Content }}
+Len Pages: {{ .Kind }} {{ len .Site.RegularPages }} Page Number: {{ .Paginator.PageNumber }}
+{{ end }}
+`,
+ "layouts/_default/single.html", `{{ define "main" }}{{ .Content }}{{ end }}`,
+ )
+
+ b.WithContent("_index.md", fmt.Sprintf(pageTemplate, "JSON Home", outputsStr))
+ b.WithContent("_index.nn.md", fmt.Sprintf(pageTemplate, "JSON Nynorsk Heim", outputsStr))
+
+ for i := 1; i <= 10; i++ {
+ b.WithContent(fmt.Sprintf("p%d.md", i), fmt.Sprintf(pageTemplate, fmt.Sprintf("Page %d", i), outputsStr))
+ }
+
+ b.Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+ b.Assert(s.language.Lang, qt.Equals, "en")
+
+ home := s.getPage(page.KindHome)
+
+ b.Assert(home, qt.Not(qt.IsNil))
+
+ lenOut := len(outputs)
+
+ b.Assert(len(home.OutputFormats()), qt.Equals, lenOut)
+
+ // There is currently always a JSON output to make it simpler ...
+ altFormats := lenOut - 1
+ hasHTML := helpers.InStringArray(outputs, "html")
+ b.AssertFileContent("public/index.json",
+ "List JSON",
+ fmt.Sprintf("Alt formats: %d", altFormats),
+ )
+
+ if hasHTML {
+ b.AssertFileContent("public/index.json",
+ "Alt Output: HTML",
+ "Output/Rel: JSON/alternate|",
+ "Output/Rel: HTML/canonical|",
+ "en: Elbow",
+ "ShortJSON",
+ "OtherShort: <h1>Hi!</h1>",
+ )
+
+ b.AssertFileContent("public/index.html",
+ // The HTML entity is a deliberate part of this test: The HTML templates are
+ // parsed with html/template.
+ `List HTML|JSON Home|<atom:link href=http://example.com/blog/ rel="self" type="text/html" />`,
+ "en: Elbow",
+ "ShortHTML",
+ "OtherShort: <h1>Hi!</h1>",
+ "Len Pages: home 10",
+ )
+ b.AssertFileContent("public/page/2/index.html", "Page Number: 2")
+ b.Assert(b.CheckExists("public/page/2/index.json"), qt.Equals, false)
+
+ b.AssertFileContent("public/nn/index.html",
+ "List HTML|JSON Nynorsk Heim|",
+ "nn: Olboge")
+ } else {
+ b.AssertFileContent("public/index.json",
+ "Output/Rel: JSON/canonical|",
+ // JSON is plain text, so no need to safeHTML this and that
+ `<atom:link href=http://example.com/blog/index.json rel="self" type="application/json" />`,
+ "ShortJSON",
+ "OtherShort: <h1>Hi!</h1>",
+ )
+ b.AssertFileContent("public/nn/index.json",
+ "List JSON|JSON Nynorsk Heim|",
+ "nn: Olboge",
+ "ShortJSON",
+ )
+ }
+
+ of := home.OutputFormats()
+
+ json := of.Get("JSON")
+ b.Assert(json, qt.Not(qt.IsNil))
+ b.Assert(json.RelPermalink(), qt.Equals, "/blog/index.json")
+ b.Assert(json.Permalink(), qt.Equals, "http://example.com/blog/index.json")
+
+ if helpers.InStringArray(outputs, "cal") {
+ cal := of.Get("calendar")
+ b.Assert(cal, qt.Not(qt.IsNil))
+ b.Assert(cal.RelPermalink(), qt.Equals, "/blog/index.ics")
+ b.Assert(cal.Permalink(), qt.Equals, "webcal://example.com/blog/index.ics")
+ }
+
+ b.Assert(home.HasShortcode("myShort"), qt.Equals, true)
+ b.Assert(home.HasShortcode("doesNotExist"), qt.Equals, false)
+}
+
+// Issue #3447
+func TestRedefineRSSOutputFormat(t *testing.T) {
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+defaultContentLanguage = "en"
+
+disableKinds = ["page", "section", "term", "taxonomy", "sitemap", "robotsTXT", "404"]
+
+[outputFormats]
+[outputFormats.RSS]
+mediatype = "application/rss"
+baseName = "feed"
+
+`
+
+ c := qt.New(t)
+
+ mf := afero.NewMemMapFs()
+ writeToFs(t, mf, "content/foo.html", `foo`)
+
+ th, h := newTestSitesFromConfig(t, mf, siteConfig)
+
+ err := h.Build(BuildCfg{})
+
+ c.Assert(err, qt.IsNil)
+
+ th.assertFileContent("public/feed.xml", "Recent content on")
+
+ s := h.Sites[0]
+
+ // Issue #3450
+ c.Assert(s.Info.RSSLink, qt.Equals, "http://example.com/blog/feed.xml")
+}
+
+// Issue #3614
+func TestDotLessOutputFormat(t *testing.T) {
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+defaultContentLanguage = "en"
+
+disableKinds = ["page", "section", "term", "taxonomy", "sitemap", "robotsTXT", "404"]
+
+[mediaTypes]
+[mediaTypes."text/nodot"]
+delimiter = ""
+[mediaTypes."text/defaultdelim"]
+suffixes = ["defd"]
+[mediaTypes."text/nosuffix"]
+[mediaTypes."text/customdelim"]
+suffixes = ["del"]
+delimiter = "_"
+
+[outputs]
+home = [ "DOTLESS", "DEF", "NOS", "CUS" ]
+
+[outputFormats]
+[outputFormats.DOTLESS]
+mediatype = "text/nodot"
+baseName = "_redirects" # This is how Netlify names their redirect files.
+[outputFormats.DEF]
+mediatype = "text/defaultdelim"
+baseName = "defaultdelimbase"
+[outputFormats.NOS]
+mediatype = "text/nosuffix"
+baseName = "nosuffixbase"
+[outputFormats.CUS]
+mediatype = "text/customdelim"
+baseName = "customdelimbase"
+
+`
+
+ c := qt.New(t)
+
+ mf := afero.NewMemMapFs()
+ writeToFs(t, mf, "content/foo.html", `foo`)
+ writeToFs(t, mf, "layouts/_default/list.dotless", `a dotless`)
+ writeToFs(t, mf, "layouts/_default/list.def.defd", `default delimim`)
+ writeToFs(t, mf, "layouts/_default/list.nos", `no suffix`)
+ writeToFs(t, mf, "layouts/_default/list.cus.del", `custom delim`)
+
+ th, h := newTestSitesFromConfig(t, mf, siteConfig)
+
+ err := h.Build(BuildCfg{})
+
+ c.Assert(err, qt.IsNil)
+
+ s := h.Sites[0]
+
+ th.assertFileContent("public/_redirects", "a dotless")
+ th.assertFileContent("public/defaultdelimbase.defd", "default delimim")
+ // This looks weird, but the user has chosen this definition.
+ th.assertFileContent("public/nosuffixbase", "no suffix")
+ th.assertFileContent("public/customdelimbase_del", "custom delim")
+
+ home := s.getPage(page.KindHome)
+ c.Assert(home, qt.Not(qt.IsNil))
+
+ outputs := home.OutputFormats()
+
+ c.Assert(outputs.Get("DOTLESS").RelPermalink(), qt.Equals, "/blog/_redirects")
+ c.Assert(outputs.Get("DEF").RelPermalink(), qt.Equals, "/blog/defaultdelimbase.defd")
+ c.Assert(outputs.Get("NOS").RelPermalink(), qt.Equals, "/blog/nosuffixbase")
+ c.Assert(outputs.Get("CUS").RelPermalink(), qt.Equals, "/blog/customdelimbase_del")
+}
+
+// Issue 8030
+func TestGetOutputFormatRel(t *testing.T) {
+ b := newTestSitesBuilder(t).
+ WithSimpleConfigFileAndSettings(map[string]any{
+ "outputFormats": map[string]any{
+ "humansTXT": map[string]any{
+ "name": "HUMANS",
+ "mediaType": "text/plain",
+ "baseName": "humans",
+ "isPlainText": true,
+ "rel": "author",
+ },
+ },
+ }).WithTemplates("index.html", `
+{{- with ($.Site.GetPage "humans").OutputFormats.Get "humans" -}}
+<link rel="{{ .Rel }}" type="{{ .MediaType.String }}" href="{{ .Permalink }}">
+{{- end -}}
+`).WithContent("humans.md", `---
+outputs:
+- HUMANS
+---
+This is my content.
+`)
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `
+<link rel="author" type="text/plain" href="/humans.txt">
+`)
+}
+
+func TestCreateSiteOutputFormats(t *testing.T) {
+ t.Run("Basic", func(t *testing.T) {
+ c := qt.New(t)
+
+ outputsConfig := map[string]any{
+ page.KindHome: []string{"HTML", "JSON"},
+ page.KindSection: []string{"JSON"},
+ }
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("outputs", outputsConfig)
+
+ outputs, err := createSiteOutputFormats(output.DefaultFormats, cfg.GetStringMap("outputs"), false)
+ c.Assert(err, qt.IsNil)
+ c.Assert(outputs[page.KindSection], deepEqualsOutputFormats, output.Formats{output.JSONFormat})
+ c.Assert(outputs[page.KindHome], deepEqualsOutputFormats, output.Formats{output.HTMLFormat, output.JSONFormat})
+
+ // Defaults
+ c.Assert(outputs[page.KindTerm], deepEqualsOutputFormats, output.Formats{output.HTMLFormat, output.RSSFormat})
+ c.Assert(outputs[page.KindTaxonomy], deepEqualsOutputFormats, output.Formats{output.HTMLFormat, output.RSSFormat})
+ c.Assert(outputs[page.KindPage], deepEqualsOutputFormats, output.Formats{output.HTMLFormat})
+
+ // These aren't (currently) in use when rendering in Hugo,
+ // but the pages needs to be assigned an output format,
+ // so these should also be correct/sensible.
+ c.Assert(outputs[kindRSS], deepEqualsOutputFormats, output.Formats{output.RSSFormat})
+ c.Assert(outputs[kindSitemap], deepEqualsOutputFormats, output.Formats{output.SitemapFormat})
+ c.Assert(outputs[kindRobotsTXT], deepEqualsOutputFormats, output.Formats{output.RobotsTxtFormat})
+ c.Assert(outputs[kind404], deepEqualsOutputFormats, output.Formats{output.HTMLFormat})
+ })
+
+ // Issue #4528
+ t.Run("Mixed case", func(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.NewWithTestDefaults()
+
+ outputsConfig := map[string]any{
+ // Note that we in Hugo 0.53.0 renamed this Kind to "taxonomy",
+ // but keep this test to test the legacy mapping.
+ "taxonomyterm": []string{"JSON"},
+ }
+ cfg.Set("outputs", outputsConfig)
+
+ outputs, err := createSiteOutputFormats(output.DefaultFormats, cfg.GetStringMap("outputs"), false)
+ c.Assert(err, qt.IsNil)
+ c.Assert(outputs[page.KindTaxonomy], deepEqualsOutputFormats, output.Formats{output.JSONFormat})
+ })
+}
+
+func TestCreateSiteOutputFormatsInvalidConfig(t *testing.T) {
+ c := qt.New(t)
+
+ outputsConfig := map[string]any{
+ page.KindHome: []string{"FOO", "JSON"},
+ }
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("outputs", outputsConfig)
+
+ _, err := createSiteOutputFormats(output.DefaultFormats, cfg.GetStringMap("outputs"), false)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestCreateSiteOutputFormatsEmptyConfig(t *testing.T) {
+ c := qt.New(t)
+
+ outputsConfig := map[string]any{
+ page.KindHome: []string{},
+ }
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("outputs", outputsConfig)
+
+ outputs, err := createSiteOutputFormats(output.DefaultFormats, cfg.GetStringMap("outputs"), false)
+ c.Assert(err, qt.IsNil)
+ c.Assert(outputs[page.KindHome], deepEqualsOutputFormats, output.Formats{output.HTMLFormat, output.RSSFormat})
+}
+
+func TestCreateSiteOutputFormatsCustomFormats(t *testing.T) {
+ c := qt.New(t)
+
+ outputsConfig := map[string]any{
+ page.KindHome: []string{},
+ }
+
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("outputs", outputsConfig)
+
+ var (
+ customRSS = output.Format{Name: "RSS", BaseName: "customRSS"}
+ customHTML = output.Format{Name: "HTML", BaseName: "customHTML"}
+ )
+
+ outputs, err := createSiteOutputFormats(output.Formats{customRSS, customHTML}, cfg.GetStringMap("outputs"), false)
+ c.Assert(err, qt.IsNil)
+ c.Assert(outputs[page.KindHome], deepEqualsOutputFormats, output.Formats{customHTML, customRSS})
+}
+
+// https://github.com/gohugoio/hugo/issues/5849
+func TestOutputFormatPermalinkable(t *testing.T) {
+ config := `
+baseURL = "https://example.com"
+
+
+
+# DAMP is similar to AMP, but not permalinkable.
+[outputFormats]
+[outputFormats.damp]
+mediaType = "text/html"
+path = "damp"
+[outputFormats.ramp]
+mediaType = "text/html"
+path = "ramp"
+permalinkable = true
+[outputFormats.base]
+mediaType = "text/html"
+isHTML = true
+baseName = "that"
+permalinkable = true
+[outputFormats.nobase]
+mediaType = "application/json"
+permalinkable = true
+
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", config)
+ b.WithContent("_index.md", `
+---
+Title: Home Sweet Home
+outputs: [ "html", "amp", "damp", "base" ]
+---
+
+`)
+
+ b.WithContent("blog/html-amp.md", `
+---
+Title: AMP and HTML
+outputs: [ "html", "amp" ]
+---
+
+`)
+
+ b.WithContent("blog/html-damp.md", `
+---
+Title: DAMP and HTML
+outputs: [ "html", "damp" ]
+---
+
+`)
+
+ b.WithContent("blog/html-ramp.md", `
+---
+Title: RAMP and HTML
+outputs: [ "html", "ramp" ]
+---
+
+`)
+
+ b.WithContent("blog/html.md", `
+---
+Title: HTML only
+outputs: [ "html" ]
+---
+
+`)
+
+ b.WithContent("blog/amp.md", `
+---
+Title: AMP only
+outputs: [ "amp" ]
+---
+
+`)
+
+ b.WithContent("blog/html-base-nobase.md", `
+---
+Title: HTML, Base and Nobase
+outputs: [ "html", "base", "nobase" ]
+---
+
+`)
+
+ const commonTemplate = `
+This RelPermalink: {{ .RelPermalink }}
+Output Formats: {{ len .OutputFormats }};{{ range .OutputFormats }}{{ .Name }};{{ .RelPermalink }}|{{ end }}
+
+`
+
+ b.WithTemplatesAdded("index.html", commonTemplate)
+ b.WithTemplatesAdded("_default/single.html", commonTemplate)
+ b.WithTemplatesAdded("_default/single.json", commonTemplate)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ "This RelPermalink: /",
+ "Output Formats: 4;HTML;/|AMP;/amp/|damp;/damp/|base;/that.html|",
+ )
+
+ b.AssertFileContent("public/amp/index.html",
+ "This RelPermalink: /amp/",
+ "Output Formats: 4;HTML;/|AMP;/amp/|damp;/damp/|base;/that.html|",
+ )
+
+ b.AssertFileContent("public/blog/html-amp/index.html",
+ "Output Formats: 2;HTML;/blog/html-amp/|AMP;/amp/blog/html-amp/|",
+ "This RelPermalink: /blog/html-amp/")
+
+ b.AssertFileContent("public/amp/blog/html-amp/index.html",
+ "Output Formats: 2;HTML;/blog/html-amp/|AMP;/amp/blog/html-amp/|",
+ "This RelPermalink: /amp/blog/html-amp/")
+
+ // Damp is not permalinkable
+ b.AssertFileContent("public/damp/blog/html-damp/index.html",
+ "This RelPermalink: /blog/html-damp/",
+ "Output Formats: 2;HTML;/blog/html-damp/|damp;/damp/blog/html-damp/|")
+
+ b.AssertFileContent("public/blog/html-ramp/index.html",
+ "This RelPermalink: /blog/html-ramp/",
+ "Output Formats: 2;HTML;/blog/html-ramp/|ramp;/ramp/blog/html-ramp/|")
+
+ b.AssertFileContent("public/ramp/blog/html-ramp/index.html",
+ "This RelPermalink: /ramp/blog/html-ramp/",
+ "Output Formats: 2;HTML;/blog/html-ramp/|ramp;/ramp/blog/html-ramp/|")
+
+ // https://github.com/gohugoio/hugo/issues/5877
+ outputFormats := "Output Formats: 3;HTML;/blog/html-base-nobase/|base;/blog/html-base-nobase/that.html|nobase;/blog/html-base-nobase/index.json|"
+
+ b.AssertFileContent("public/blog/html-base-nobase/index.json",
+ "This RelPermalink: /blog/html-base-nobase/index.json",
+ outputFormats,
+ )
+
+ b.AssertFileContent("public/blog/html-base-nobase/that.html",
+ "This RelPermalink: /blog/html-base-nobase/that.html",
+ outputFormats,
+ )
+
+ b.AssertFileContent("public/blog/html-base-nobase/index.html",
+ "This RelPermalink: /blog/html-base-nobase/",
+ outputFormats,
+ )
+}
+
+func TestSiteWithPageNoOutputs(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+baseURL = "https://example.com"
+
+[outputFormats.o1]
+mediaType = "text/html"
+
+
+
+`)
+ b.WithContent("outputs-empty.md", `---
+title: "Empty Outputs"
+outputs: []
+---
+
+Word1. Word2.
+
+`,
+ "outputs-string.md", `---
+title: "Outputs String"
+outputs: "o1"
+---
+
+Word1. Word2.
+
+`)
+
+ b.WithTemplates("index.html", `
+{{ range .Site.RegularPages }}
+WordCount: {{ .WordCount }}
+{{ end }}
+`)
+
+ b.WithTemplates("_default/single.html", `HTML: {{ .Content }}`)
+ b.WithTemplates("_default/single.o1.html", `O1: {{ .Content }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent(
+ "public/index.html",
+ " WordCount: 2")
+
+ b.AssertFileContent("public/outputs-empty/index.html", "HTML:", "Word1. Word2.")
+ b.AssertFileContent("public/outputs-string/index.html", "O1:", "Word1. Word2.")
+}
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
new file mode 100644
index 000000000..b572c443e
--- /dev/null
+++ b/hugolib/site_render.go
@@ -0,0 +1,407 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/config"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/output"
+
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/page/pagemeta"
+)
+
+type siteRenderContext struct {
+ cfg *BuildCfg
+
+ // Zero based index for all output formats combined.
+ sitesOutIdx int
+
+ // Zero based index of the output formats configured within a Site.
+ // Note that these outputs are sorted.
+ outIdx int
+
+ multihost bool
+}
+
+// Whether to render 404.html, robotsTXT.txt which usually is rendered
+// once only in the site root.
+func (s siteRenderContext) renderSingletonPages() bool {
+ if s.multihost {
+ // 1 per site
+ return s.outIdx == 0
+ }
+
+ // 1 for all sites
+ return s.sitesOutIdx == 0
+}
+
+// renderPages renders pages each corresponding to a markdown file.
+// TODO(bep np doc
+func (s *Site) renderPages(ctx *siteRenderContext) error {
+ numWorkers := config.GetNumWorkerMultiplier()
+
+ results := make(chan error)
+ pages := make(chan *pageState, numWorkers) // buffered for performance
+ errs := make(chan error)
+
+ go s.errorCollator(results, errs)
+
+ wg := &sync.WaitGroup{}
+
+ for i := 0; i < numWorkers; i++ {
+ wg.Add(1)
+ go pageRenderer(ctx, s, pages, results, wg)
+ }
+
+ cfg := ctx.cfg
+
+ s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
+ if cfg.shouldRender(n.p) {
+ select {
+ case <-s.h.Done():
+ return true
+ default:
+ pages <- n.p
+ }
+ }
+ return false
+ })
+
+ close(pages)
+
+ wg.Wait()
+
+ close(results)
+
+ err := <-errs
+ if err != nil {
+ return fmt.Errorf("failed to render pages: %w", err)
+ }
+ return nil
+}
+
+func pageRenderer(
+ ctx *siteRenderContext,
+ s *Site,
+ pages <-chan *pageState,
+ results chan<- error,
+ wg *sync.WaitGroup) {
+ defer wg.Done()
+
+ for p := range pages {
+ if p.m.buildConfig.PublishResources {
+ if err := p.renderResources(); err != nil {
+ s.SendError(p.errorf(err, "failed to render page resources"))
+ continue
+ }
+ }
+
+ if !p.render {
+ // Nothing more to do for this page.
+ continue
+ }
+
+ templ, found, err := p.resolveTemplate()
+ if err != nil {
+ s.SendError(p.errorf(err, "failed to resolve template"))
+ continue
+ }
+
+ if !found {
+ s.logMissingLayout("", p.Layout(), p.Kind(), p.f.Name)
+ continue
+ }
+
+ targetPath := p.targetPaths().TargetFilename
+
+ if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, templ); err != nil {
+ results <- err
+ }
+
+ if p.paginator != nil && p.paginator.current != nil {
+ if err := s.renderPaginator(p, templ); err != nil {
+ results <- err
+ }
+ }
+ }
+}
+
+func (s *Site) logMissingLayout(name, layout, kind, outputFormat string) {
+ log := s.Log.Warn()
+ if name != "" && infoOnMissingLayout[name] {
+ log = s.Log.Info()
+ }
+
+ errMsg := "You should create a template file which matches Hugo Layouts Lookup Rules for this combination."
+ var args []any
+ msg := "found no layout file for"
+ if outputFormat != "" {
+ msg += " %q"
+ args = append(args, outputFormat)
+ }
+
+ if layout != "" {
+ msg += " for layout %q"
+ args = append(args, layout)
+ }
+
+ if kind != "" {
+ msg += " for kind %q"
+ args = append(args, kind)
+ }
+
+ if name != "" {
+ msg += " for %q"
+ args = append(args, name)
+ }
+
+ msg += ": " + errMsg
+
+ log.Printf(msg, args...)
+}
+
+// renderPaginator must be run after the owning Page has been rendered.
+func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
+ paginatePath := s.Cfg.GetString("paginatePath")
+
+ d := p.targetPathDescriptor
+ f := p.s.rc.Format
+ d.Type = f
+
+ if p.paginator.current == nil || p.paginator.current != p.paginator.current.First() {
+ panic(fmt.Sprintf("invalid paginator state for %q", p.pathOrTitle()))
+ }
+
+ if f.IsHTML {
+ // Write alias for page 1
+ d.Addends = fmt.Sprintf("/%s/%d", paginatePath, 1)
+ targetPaths := page.CreateTargetPaths(d)
+
+ if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, nil); err != nil {
+ return err
+ }
+ }
+
+ // Render pages for the rest
+ for current := p.paginator.current.Next(); current != nil; current = current.Next() {
+
+ p.paginator.current = current
+ d.Addends = fmt.Sprintf("/%s/%d", paginatePath, current.PageNumber())
+ targetPaths := page.CreateTargetPaths(d)
+
+ if err := s.renderAndWritePage(
+ &s.PathSpec.ProcessingStats.PaginatorPages,
+ p.Title(),
+ targetPaths.TargetFilename, p, templ); err != nil {
+ return err
+ }
+
+ }
+
+ return nil
+}
+
+func (s *Site) render404() error {
+ p, err := newPageStandalone(&pageMeta{
+ s: s,
+ kind: kind404,
+ urlPaths: pagemeta.URLPath{
+ URL: "404.html",
+ },
+ },
+ output.HTMLFormat,
+ )
+ if err != nil {
+ return err
+ }
+
+ if !p.render {
+ return nil
+ }
+
+ var d output.LayoutDescriptor
+ d.Kind = kind404
+
+ templ, found, err := s.Tmpl().LookupLayout(d, output.HTMLFormat)
+ if err != nil {
+ return err
+ }
+ if !found {
+ return nil
+ }
+
+ targetPath := p.targetPaths().TargetFilename
+
+ if targetPath == "" {
+ return errors.New("failed to create targetPath for 404 page")
+ }
+
+ return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, templ)
+}
+
+func (s *Site) renderSitemap() error {
+ p, err := newPageStandalone(&pageMeta{
+ s: s,
+ kind: kindSitemap,
+ urlPaths: pagemeta.URLPath{
+ URL: s.siteCfg.sitemap.Filename,
+ },
+ },
+ output.HTMLFormat,
+ )
+ if err != nil {
+ return err
+ }
+
+ if !p.render {
+ return nil
+ }
+
+ targetPath := p.targetPaths().TargetFilename
+
+ if targetPath == "" {
+ return errors.New("failed to create targetPath for sitemap")
+ }
+
+ templ := s.lookupLayouts("sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml")
+
+ return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, templ)
+}
+
+func (s *Site) renderRobotsTXT() error {
+ if !s.Cfg.GetBool("enableRobotsTXT") {
+ return nil
+ }
+
+ p, err := newPageStandalone(&pageMeta{
+ s: s,
+ kind: kindRobotsTXT,
+ urlPaths: pagemeta.URLPath{
+ URL: "robots.txt",
+ },
+ },
+ output.RobotsTxtFormat)
+ if err != nil {
+ return err
+ }
+
+ if !p.render {
+ return nil
+ }
+
+ templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt")
+
+ return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, templ)
+}
+
+// renderAliases renders shell pages that simply have a redirect in the header.
+func (s *Site) renderAliases() error {
+ var err error
+ s.pageMap.pageTrees.WalkLinkable(func(ss string, n *contentNode) bool {
+ p := n.p
+ if len(p.Aliases()) == 0 {
+ return false
+ }
+
+ pathSeen := make(map[string]bool)
+
+ for _, of := range p.OutputFormats() {
+ if !of.Format.IsHTML {
+ continue
+ }
+
+ f := of.Format
+
+ if pathSeen[f.Path] {
+ continue
+ }
+ pathSeen[f.Path] = true
+
+ plink := of.Permalink()
+
+ for _, a := range p.Aliases() {
+ isRelative := !strings.HasPrefix(a, "/")
+
+ if isRelative {
+ // Make alias relative, where "." will be on the
+ // same directory level as the current page.
+ basePath := path.Join(p.targetPaths().SubResourceBaseLink, "..")
+ a = path.Join(basePath, a)
+
+ } else {
+ // Make sure AMP and similar doesn't clash with regular aliases.
+ a = path.Join(f.Path, a)
+ }
+
+ if s.UglyURLs && !strings.HasSuffix(a, ".html") {
+ a += ".html"
+ }
+
+ lang := p.Language().Lang
+
+ if s.h.multihost && !strings.HasPrefix(a, "/"+lang) {
+ // These need to be in its language root.
+ a = path.Join(lang, a)
+ }
+
+ err = s.writeDestAlias(a, plink, f, p)
+ if err != nil {
+ return true
+ }
+ }
+ }
+ return false
+ })
+
+ return err
+}
+
+// renderMainLanguageRedirect creates a redirect to the main language home,
+// depending on if it lives in sub folder (e.g. /en) or not.
+func (s *Site) renderMainLanguageRedirect() error {
+ if !s.h.multilingual.enabled() || s.h.IsMultihost() {
+ // No need for a redirect
+ return nil
+ }
+
+ html, found := s.outputFormatsConfig.GetByName("HTML")
+ if found {
+ mainLang := s.h.multilingual.DefaultLang
+ if s.Info.defaultContentLanguageInSubdir {
+ mainLangURL := s.PathSpec.AbsURL(mainLang.Lang+"/", false)
+ s.Log.Debugf("Write redirect to main language %s: %s", mainLang, mainLangURL)
+ if err := s.publishDestAlias(true, "/", mainLangURL, html, nil); err != nil {
+ return err
+ }
+ } else {
+ mainLangURL := s.PathSpec.AbsURL("", false)
+ s.Log.Debugf("Write redirect to main language %s: %s", mainLang, mainLangURL)
+ if err := s.publishDestAlias(true, mainLang.Lang, mainLangURL, html, nil); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/hugolib/site_sections.go b/hugolib/site_sections.go
new file mode 100644
index 000000000..50dfe6ffa
--- /dev/null
+++ b/hugolib/site_sections.go
@@ -0,0 +1,28 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+// Sections returns the top level sections.
+func (s *SiteInfo) Sections() page.Pages {
+ return s.Home().Sections()
+}
+
+// Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
+func (s *SiteInfo) Home() page.Page {
+ return s.s.home
+}
diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go
new file mode 100644
index 000000000..2a4c39533
--- /dev/null
+++ b/hugolib/site_sections_test.go
@@ -0,0 +1,375 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func TestNestedSections(t *testing.T) {
+ var (
+ c = qt.New(t)
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+ )
+
+ cfg.Set("permalinks", map[string]string{
+ "perm a": ":sections/:title",
+ })
+
+ pageTemplate := `---
+title: T%d_%d
+---
+Content
+`
+
+ // Home page
+ writeSource(t, fs, filepath.Join("content", "_index.md"), fmt.Sprintf(pageTemplate, -1, -1))
+
+ // Top level content page
+ writeSource(t, fs, filepath.Join("content", "mypage.md"), fmt.Sprintf(pageTemplate, 1234, 5))
+
+ // Top level section without index content page
+ writeSource(t, fs, filepath.Join("content", "top", "mypage2.md"), fmt.Sprintf(pageTemplate, 12345, 6))
+ // Just a page in a subfolder, i.e. not a section.
+ writeSource(t, fs, filepath.Join("content", "top", "folder", "mypage3.md"), fmt.Sprintf(pageTemplate, 12345, 67))
+
+ for level1 := 1; level1 < 3; level1++ {
+ writeSource(t, fs, filepath.Join("content", "l1", fmt.Sprintf("page_1_%d.md", level1)),
+ fmt.Sprintf(pageTemplate, 1, level1))
+ }
+
+ // Issue #3586
+ writeSource(t, fs, filepath.Join("content", "post", "0000.md"), fmt.Sprintf(pageTemplate, 1, 2))
+ writeSource(t, fs, filepath.Join("content", "post", "0000", "0001.md"), fmt.Sprintf(pageTemplate, 1, 3))
+ writeSource(t, fs, filepath.Join("content", "elsewhere", "0003.md"), fmt.Sprintf(pageTemplate, 1, 4))
+
+ // Empty nested section, i.e. no regular content pages.
+ writeSource(t, fs, filepath.Join("content", "empty1", "b", "c", "_index.md"), fmt.Sprintf(pageTemplate, 33, -1))
+ // Index content file a the end and in the middle.
+ writeSource(t, fs, filepath.Join("content", "empty2", "b", "_index.md"), fmt.Sprintf(pageTemplate, 40, -1))
+ writeSource(t, fs, filepath.Join("content", "empty2", "b", "c", "d", "_index.md"), fmt.Sprintf(pageTemplate, 41, -1))
+
+ // Empty with content file in the middle.
+ writeSource(t, fs, filepath.Join("content", "empty3", "b", "c", "d", "_index.md"), fmt.Sprintf(pageTemplate, 41, -1))
+ writeSource(t, fs, filepath.Join("content", "empty3", "b", "empty3.md"), fmt.Sprintf(pageTemplate, 3, -1))
+
+ // Section with permalink config
+ writeSource(t, fs, filepath.Join("content", "perm a", "link", "_index.md"), fmt.Sprintf(pageTemplate, 9, -1))
+ for i := 1; i < 4; i++ {
+ writeSource(t, fs, filepath.Join("content", "perm a", "link", fmt.Sprintf("page_%d.md", i)),
+ fmt.Sprintf(pageTemplate, 1, i))
+ }
+ writeSource(t, fs, filepath.Join("content", "perm a", "link", "regular", fmt.Sprintf("page_%d.md", 5)),
+ fmt.Sprintf(pageTemplate, 1, 5))
+
+ writeSource(t, fs, filepath.Join("content", "l1", "l2", "_index.md"), fmt.Sprintf(pageTemplate, 2, -1))
+ writeSource(t, fs, filepath.Join("content", "l1", "l2_2", "_index.md"), fmt.Sprintf(pageTemplate, 22, -1))
+ writeSource(t, fs, filepath.Join("content", "l1", "l2", "l3", "_index.md"), fmt.Sprintf(pageTemplate, 3, -1))
+
+ for level2 := 1; level2 < 4; level2++ {
+ writeSource(t, fs, filepath.Join("content", "l1", "l2", fmt.Sprintf("page_2_%d.md", level2)),
+ fmt.Sprintf(pageTemplate, 2, level2))
+ }
+ for level2 := 1; level2 < 3; level2++ {
+ writeSource(t, fs, filepath.Join("content", "l1", "l2_2", fmt.Sprintf("page_2_2_%d.md", level2)),
+ fmt.Sprintf(pageTemplate, 2, level2))
+ }
+ for level3 := 1; level3 < 3; level3++ {
+ writeSource(t, fs, filepath.Join("content", "l1", "l2", "l3", fmt.Sprintf("page_3_%d.md", level3)),
+ fmt.Sprintf(pageTemplate, 3, level3))
+ }
+
+ writeSource(t, fs, filepath.Join("content", "Spaces in Section", "page100.md"), fmt.Sprintf(pageTemplate, 10, 0))
+
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html>Single|{{ .Title }}</html>")
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
+ `
+{{ $sect := (.Site.GetPage "l1/l2") }}
+<html>List|{{ .Title }}|L1/l2-IsActive: {{ .InSection $sect }}
+{{ range .Paginator.Pages }}
+PAG|{{ .Title }}|{{ $sect.InSection . }}
+{{ end }}
+{{/* https://github.com/gohugoio/hugo/issues/4989 */}}
+{{ $sections := (.Site.GetPage "section" .Section).Sections.ByWeight }}
+</html>`)
+
+ cfg.Set("paginate", 2)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 21)
+
+ tests := []struct {
+ sections string
+ verify func(c *qt.C, p page.Page)
+ }{
+ {"elsewhere", func(c *qt.C, p page.Page) {
+ c.Assert(len(p.Pages()), qt.Equals, 1)
+ for _, p := range p.Pages() {
+ c.Assert(p.SectionsPath(), qt.Equals, "elsewhere")
+ }
+ }},
+ {"post", func(c *qt.C, p page.Page) {
+ c.Assert(len(p.Pages()), qt.Equals, 2)
+ for _, p := range p.Pages() {
+ c.Assert(p.Section(), qt.Equals, "post")
+ }
+ }},
+ {"empty1", func(c *qt.C, p page.Page) {
+ // > b,c
+ c.Assert(getPage(p, "/empty1/b"), qt.IsNil) // No _index.md page.
+ c.Assert(getPage(p, "/empty1/b/c"), qt.Not(qt.IsNil))
+ }},
+ {"empty2", func(c *qt.C, p page.Page) {
+ // > b,c,d where b and d have _index.md files.
+ b := getPage(p, "/empty2/b")
+ c.Assert(b, qt.Not(qt.IsNil))
+ c.Assert(b.Title(), qt.Equals, "T40_-1")
+
+ cp := getPage(p, "/empty2/b/c")
+ c.Assert(cp, qt.IsNil) // No _index.md
+
+ d := getPage(p, "/empty2/b/c/d")
+ c.Assert(d, qt.Not(qt.IsNil))
+ c.Assert(d.Title(), qt.Equals, "T41_-1")
+
+ c.Assert(cp.Eq(d), qt.Equals, false)
+ c.Assert(cp.Eq(cp), qt.Equals, true)
+ c.Assert(cp.Eq("asdf"), qt.Equals, false)
+ }},
+ {"empty3", func(c *qt.C, p page.Page) {
+ // b,c,d with regular page in b
+ b := getPage(p, "/empty3/b")
+ c.Assert(b, qt.IsNil) // No _index.md
+ e3 := getPage(p, "/empty3/b/empty3")
+ c.Assert(e3, qt.Not(qt.IsNil))
+ c.Assert(e3.File().LogicalName(), qt.Equals, "empty3.md")
+ }},
+ {"empty3", func(c *qt.C, p page.Page) {
+ xxx := getPage(p, "/empty3/nil")
+ c.Assert(xxx, qt.IsNil)
+ }},
+ {"top", func(c *qt.C, p page.Page) {
+ c.Assert(p.Title(), qt.Equals, "Tops")
+ c.Assert(len(p.Pages()), qt.Equals, 2)
+ c.Assert(p.Pages()[0].File().LogicalName(), qt.Equals, "mypage2.md")
+ c.Assert(p.Pages()[1].File().LogicalName(), qt.Equals, "mypage3.md")
+ home := p.Parent()
+ c.Assert(home.IsHome(), qt.Equals, true)
+ c.Assert(len(p.Sections()), qt.Equals, 0)
+ c.Assert(home.CurrentSection(), qt.Equals, home)
+ active, err := home.InSection(home)
+ c.Assert(err, qt.IsNil)
+ c.Assert(active, qt.Equals, true)
+ c.Assert(p.FirstSection(), qt.Equals, p)
+ }},
+ {"l1", func(c *qt.C, p page.Page) {
+ c.Assert(p.Title(), qt.Equals, "L1s")
+ c.Assert(len(p.Pages()), qt.Equals, 4) // 2 pages + 2 sections
+ c.Assert(p.Parent().IsHome(), qt.Equals, true)
+ c.Assert(len(p.Sections()), qt.Equals, 2)
+ }},
+ {"l1,l2", func(c *qt.C, p page.Page) {
+ c.Assert(p.Title(), qt.Equals, "T2_-1")
+ c.Assert(len(p.Pages()), qt.Equals, 4) // 3 pages + 1 section
+ c.Assert(p.Pages()[0].Parent(), qt.Equals, p)
+ c.Assert(p.Parent().Title(), qt.Equals, "L1s")
+ c.Assert(p.RelPermalink(), qt.Equals, "/l1/l2/")
+ c.Assert(len(p.Sections()), qt.Equals, 1)
+
+ for _, child := range p.Pages() {
+ if child.IsSection() {
+ c.Assert(child.CurrentSection(), qt.Equals, child)
+ continue
+ }
+
+ c.Assert(child.CurrentSection(), qt.Equals, p)
+ active, err := child.InSection(p)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(active, qt.Equals, true)
+ active, err = p.InSection(child)
+ c.Assert(err, qt.IsNil)
+ c.Assert(active, qt.Equals, true)
+ active, err = p.InSection(getPage(p, "/"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(active, qt.Equals, false)
+
+ isAncestor, err := p.IsAncestor(child)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, true)
+ isAncestor, err = child.IsAncestor(p)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, false)
+
+ isDescendant, err := p.IsDescendant(child)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, false)
+ isDescendant, err = child.IsDescendant(p)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, true)
+ }
+
+ c.Assert(p.Eq(p.CurrentSection()), qt.Equals, true)
+ }},
+ {"l1,l2_2", func(c *qt.C, p page.Page) {
+ c.Assert(p.Title(), qt.Equals, "T22_-1")
+ c.Assert(len(p.Pages()), qt.Equals, 2)
+ c.Assert(p.Pages()[0].File().Path(), qt.Equals, filepath.FromSlash("l1/l2_2/page_2_2_1.md"))
+ c.Assert(p.Parent().Title(), qt.Equals, "L1s")
+ c.Assert(len(p.Sections()), qt.Equals, 0)
+ }},
+ {"l1,l2,l3", func(c *qt.C, p page.Page) {
+ nilp, _ := p.GetPage("this/does/not/exist")
+
+ c.Assert(p.Title(), qt.Equals, "T3_-1")
+ c.Assert(len(p.Pages()), qt.Equals, 2)
+ c.Assert(p.Parent().Title(), qt.Equals, "T2_-1")
+ c.Assert(len(p.Sections()), qt.Equals, 0)
+
+ l1 := getPage(p, "/l1")
+ isDescendant, err := l1.IsDescendant(p)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, false)
+ isDescendant, err = l1.IsDescendant(nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, false)
+ isDescendant, err = nilp.IsDescendant(p)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, false)
+ isDescendant, err = p.IsDescendant(l1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isDescendant, qt.Equals, true)
+
+ isAncestor, err := l1.IsAncestor(p)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, true)
+ isAncestor, err = p.IsAncestor(l1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, false)
+ c.Assert(p.FirstSection(), qt.Equals, l1)
+ isAncestor, err = p.IsAncestor(nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, false)
+ isAncestor, err = nilp.IsAncestor(l1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(isAncestor, qt.Equals, false)
+ }},
+ {"perm a,link", func(c *qt.C, p page.Page) {
+ c.Assert(p.Title(), qt.Equals, "T9_-1")
+ c.Assert(p.RelPermalink(), qt.Equals, "/perm-a/link/")
+ c.Assert(len(p.Pages()), qt.Equals, 4)
+ first := p.Pages()[0]
+ c.Assert(first.RelPermalink(), qt.Equals, "/perm-a/link/t1_1/")
+ th.assertFileContent("public/perm-a/link/t1_1/index.html", "Single|T1_1")
+
+ last := p.Pages()[3]
+ c.Assert(last.RelPermalink(), qt.Equals, "/perm-a/link/t1_5/")
+ }},
+ }
+
+ home := s.getPage(page.KindHome)
+
+ for _, test := range tests {
+ test := test
+ t.Run(fmt.Sprintf("sections %s", test.sections), func(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ sections := strings.Split(test.sections, ",")
+ p := s.getPage(page.KindSection, sections...)
+ c.Assert(p, qt.Not(qt.IsNil), qt.Commentf(fmt.Sprint(sections)))
+
+ if p.Pages() != nil {
+ c.Assert(p.Data().(page.Data).Pages(), deepEqualsPages, p.Pages())
+ }
+ c.Assert(p.Parent(), qt.Not(qt.IsNil))
+ test.verify(c, p)
+ })
+ }
+
+ c.Assert(home, qt.Not(qt.IsNil))
+
+ c.Assert(len(home.Sections()), qt.Equals, 9)
+ c.Assert(s.Info.Sections(), deepEqualsPages, home.Sections())
+
+ rootPage := s.getPage(page.KindPage, "mypage.md")
+ c.Assert(rootPage, qt.Not(qt.IsNil))
+ c.Assert(rootPage.Parent().IsHome(), qt.Equals, true)
+ // https://github.com/gohugoio/hugo/issues/6365
+ c.Assert(rootPage.Sections(), qt.HasLen, 0)
+
+ // Add a odd test for this as this looks a little bit off, but I'm not in the mood
+ // to think too hard a out this right now. It works, but people will have to spell
+ // out the directory name as is.
+ // If we later decide to do something about this, we will have to do some normalization in
+ // getPage.
+ // TODO(bep)
+ sectionWithSpace := s.getPage(page.KindSection, "Spaces in Section")
+ c.Assert(sectionWithSpace, qt.Not(qt.IsNil))
+ c.Assert(sectionWithSpace.RelPermalink(), qt.Equals, "/spaces-in-section/")
+
+ th.assertFileContent("public/l1/l2/page/2/index.html", "L1/l2-IsActive: true", "PAG|T2_3|true")
+}
+
+func TestNextInSectionNested(t *testing.T) {
+ t.Parallel()
+
+ pageContent := `---
+title: "The Page"
+weight: %d
+---
+Some content.
+`
+ createPageContent := func(weight int) string {
+ return fmt.Sprintf(pageContent, weight)
+ }
+
+ b := newTestSitesBuilder(t)
+ b.WithSimpleConfigFile()
+ b.WithTemplates("_default/single.html", `
+Prev: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|
+Next: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}|
+`)
+
+ b.WithContent("blog/page1.md", createPageContent(1))
+ b.WithContent("blog/page2.md", createPageContent(2))
+ b.WithContent("blog/cool/_index.md", createPageContent(1))
+ b.WithContent("blog/cool/cool1.md", createPageContent(1))
+ b.WithContent("blog/cool/cool2.md", createPageContent(2))
+ b.WithContent("root1.md", createPageContent(1))
+ b.WithContent("root2.md", createPageContent(2))
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/root1/index.html",
+ "Prev: /root2/|", "Next: |")
+ b.AssertFileContent("public/root2/index.html",
+ "Prev: |", "Next: /root1/|")
+ b.AssertFileContent("public/blog/page1/index.html",
+ "Prev: /blog/page2/|", "Next: |")
+ b.AssertFileContent("public/blog/page2/index.html",
+ "Prev: |", "Next: /blog/page1/|")
+ b.AssertFileContent("public/blog/cool/cool1/index.html",
+ "Prev: /blog/cool/cool2/|", "Next: |")
+ b.AssertFileContent("public/blog/cool/cool2/index.html",
+ "Prev: |", "Next: /blog/cool/cool1/|")
+}
diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go
new file mode 100644
index 000000000..df1f64840
--- /dev/null
+++ b/hugolib/site_stats_test.go
@@ -0,0 +1,98 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSiteStats(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ siteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+defaultContentLanguage = "nn"
+
+[languages]
+[languages.nn]
+languageName = "Nynorsk"
+weight = 1
+title = "Hugo på norsk"
+
+[languages.en]
+languageName = "English"
+weight = 2
+title = "Hugo in English"
+
+`
+
+ pageTemplate := `---
+title: "T%d"
+tags:
+%s
+categories:
+%s
+aliases: [/Ali%d]
+---
+# Doc
+`
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
+
+ b.WithTemplates(
+ "_default/single.html", "Single|{{ .Title }}|{{ .Content }}",
+ "_default/list.html", `List|{{ .Title }}|Pages: {{ .Paginator.TotalPages }}|{{ .Content }}`,
+ "_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
+ )
+
+ for i := 0; i < 2; i++ {
+ for j := 0; j < 2; j++ {
+ pageID := i + j + 1
+ b.WithContent(fmt.Sprintf("content/sect/p%d.md", pageID),
+ fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID))
+ }
+ }
+
+ for i := 0; i < 5; i++ {
+ b.WithContent(fmt.Sprintf("assets/image%d.png", i+1), "image")
+ }
+
+ b.Build(BuildCfg{})
+ h := b.H
+
+ stats := []*helpers.ProcessingStats{
+ h.Sites[0].PathSpec.ProcessingStats,
+ h.Sites[1].PathSpec.ProcessingStats,
+ }
+
+ stats[0].Table(ioutil.Discard)
+ stats[1].Table(ioutil.Discard)
+
+ var buff bytes.Buffer
+
+ helpers.ProcessingStatsTable(&buff, stats...)
+
+ c.Assert(buff.String(), qt.Contains, "Pages | 19 | 6")
+}
diff --git a/hugolib/site_test.go b/hugolib/site_test.go
new file mode 100644
index 000000000..8dac8fc92
--- /dev/null
+++ b/hugolib/site_test.go
@@ -0,0 +1,1111 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gobuffalo/flect"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/publisher"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+const (
+ templateMissingFunc = "{{ .Title | funcdoesnotexists }}"
+ templateWithURLAbs = "<a href=\"/foobar.jpg\">Going</a>"
+)
+
+func TestRenderWithInvalidTemplate(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+
+ writeSource(t, fs, filepath.Join("content", "foo.md"), "foo")
+
+ withTemplate := createWithTemplateFromNameValues("missing", templateMissingFunc)
+
+ buildSingleSiteExpected(t, true, false, deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}, BuildCfg{})
+}
+
+func TestDraftAndFutureRender(t *testing.T) {
+ t.Parallel()
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), "---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*"},
+ {filepath.FromSlash("sect/doc2.md"), "---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*"},
+ {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*"},
+ {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*"},
+ }
+
+ siteSetup := func(t *testing.T, configKeyValues ...any) *Site {
+ cfg, fs := newTestCfg()
+
+ cfg.Set("baseURL", "http://auth/bub")
+
+ for i := 0; i < len(configKeyValues); i += 2 {
+ cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
+ }
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ }
+
+ // Testing Defaults.. Only draft:true and publishDate in the past should be rendered
+ s := siteSetup(t)
+ if len(s.RegularPages()) != 1 {
+ t.Fatal("Draft or Future dated content published unexpectedly")
+ }
+
+ // only publishDate in the past should be rendered
+ s = siteSetup(t, "buildDrafts", true)
+ if len(s.RegularPages()) != 2 {
+ t.Fatal("Future Dated Posts published unexpectedly")
+ }
+
+ // drafts should not be rendered, but all dates should
+ s = siteSetup(t,
+ "buildDrafts", false,
+ "buildFuture", true)
+
+ if len(s.RegularPages()) != 2 {
+ t.Fatal("Draft posts published unexpectedly")
+ }
+
+ // all 4 should be included
+ s = siteSetup(t,
+ "buildDrafts", true,
+ "buildFuture", true)
+
+ if len(s.RegularPages()) != 4 {
+ t.Fatal("Drafts or Future posts not included as expected")
+ }
+}
+
+func TestFutureExpirationRender(t *testing.T) {
+ t.Parallel()
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*"},
+ {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*"},
+ }
+
+ siteSetup := func(t *testing.T) *Site {
+ cfg, fs := newTestCfg()
+ cfg.Set("baseURL", "http://auth/bub")
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ }
+
+ s := siteSetup(t)
+
+ if len(s.AllPages()) != 1 {
+ if len(s.RegularPages()) > 1 {
+ t.Fatal("Expired content published unexpectedly")
+ }
+
+ if len(s.RegularPages()) < 1 {
+ t.Fatal("Valid content expired unexpectedly")
+ }
+ }
+
+ if s.AllPages()[0].Title() == "doc2" {
+ t.Fatal("Expired content published unexpectedly")
+ }
+}
+
+func TestLastChange(t *testing.T) {
+ t.Parallel()
+
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "sect/doc1.md"), "---\ntitle: doc1\nweight: 1\ndate: 2014-05-29\n---\n# doc1\n*some content*")
+ writeSource(t, fs, filepath.Join("content", "sect/doc2.md"), "---\ntitle: doc2\nweight: 2\ndate: 2015-05-29\n---\n# doc2\n*some content*")
+ writeSource(t, fs, filepath.Join("content", "sect/doc3.md"), "---\ntitle: doc3\nweight: 3\ndate: 2017-05-29\n---\n# doc3\n*some content*")
+ writeSource(t, fs, filepath.Join("content", "sect/doc4.md"), "---\ntitle: doc4\nweight: 4\ndate: 2016-05-29\n---\n# doc4\n*some content*")
+ writeSource(t, fs, filepath.Join("content", "sect/doc5.md"), "---\ntitle: doc5\nweight: 3\n---\n# doc5\n*some content*")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(s.Info.LastChange().IsZero(), qt.Equals, false)
+ c.Assert(s.Info.LastChange().Year(), qt.Equals, 2017)
+}
+
+// Issue #_index
+func TestPageWithUnderScoreIndexInFilename(t *testing.T) {
+ t.Parallel()
+
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ writeSource(t, fs, filepath.Join("content", "sect/my_index_file.md"), "---\ntitle: doc1\nweight: 1\ndate: 2014-05-29\n---\n# doc1\n*some content*")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 1)
+}
+
+// Issue #957
+func TestCrossrefs(t *testing.T) {
+ t.Parallel()
+ for _, uglyURLs := range []bool{true, false} {
+ for _, relative := range []bool{true, false} {
+ doTestCrossrefs(t, relative, uglyURLs)
+ }
+ }
+}
+
+func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) {
+ c := qt.New(t)
+
+ baseURL := "http://foo/bar"
+
+ var refShortcode string
+ var expectedBase string
+ var expectedURLSuffix string
+ var expectedPathSuffix string
+
+ if relative {
+ refShortcode = "relref"
+ expectedBase = "/bar"
+ } else {
+ refShortcode = "ref"
+ expectedBase = baseURL
+ }
+
+ if uglyURLs {
+ expectedURLSuffix = ".html"
+ expectedPathSuffix = ".html"
+ } else {
+ expectedURLSuffix = "/"
+ expectedPathSuffix = "/index.html"
+ }
+
+ doc3Slashed := filepath.FromSlash("/sect/doc3.md")
+
+ sources := [][2]string{
+ {
+ filepath.FromSlash("sect/doc1.md"),
+ fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode),
+ },
+ // Issue #1148: Make sure that no P-tags is added around shortcodes.
+ {
+ filepath.FromSlash("sect/doc2.md"),
+ fmt.Sprintf(`**Ref 1:**
+
+{{< %s "sect/doc1.md" >}}
+
+THE END.`, refShortcode),
+ },
+ // Issue #1753: Should not add a trailing newline after shortcode.
+ {
+ filepath.FromSlash("sect/doc3.md"),
+ fmt.Sprintf(`**Ref 1:** {{< %s "sect/doc3.md" >}}.`, refShortcode),
+ },
+ // Issue #3703
+ {
+ filepath.FromSlash("sect/doc4.md"),
+ fmt.Sprintf(`**Ref 1:** {{< %s "%s" >}}.`, refShortcode, doc3Slashed),
+ },
+ }
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("baseURL", baseURL)
+ cfg.Set("uglyURLs", uglyURLs)
+ cfg.Set("verbose", true)
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ s := buildSingleSite(
+ t,
+ deps.DepsCfg{
+ Fs: fs,
+ Cfg: cfg,
+ WithTemplate: createWithTemplateFromNameValues("_default/single.html", "{{.Content}}"),
+ },
+ BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 4)
+
+ th := newTestHelper(s.Cfg, s.Fs, t)
+
+ tests := []struct {
+ doc string
+ expected string
+ }{
+ {filepath.FromSlash(fmt.Sprintf("public/sect/doc1%s", expectedPathSuffix)), fmt.Sprintf("<p>Ref 2: %s/sect/doc2%s</p>\n", expectedBase, expectedURLSuffix)},
+ {filepath.FromSlash(fmt.Sprintf("public/sect/doc2%s", expectedPathSuffix)), fmt.Sprintf("<p><strong>Ref 1:</strong></p>\n%s/sect/doc1%s\n<p>THE END.</p>\n", expectedBase, expectedURLSuffix)},
+ {filepath.FromSlash(fmt.Sprintf("public/sect/doc3%s", expectedPathSuffix)), fmt.Sprintf("<p><strong>Ref 1:</strong> %s/sect/doc3%s.</p>\n", expectedBase, expectedURLSuffix)},
+ {filepath.FromSlash(fmt.Sprintf("public/sect/doc4%s", expectedPathSuffix)), fmt.Sprintf("<p><strong>Ref 1:</strong> %s/sect/doc3%s.</p>\n", expectedBase, expectedURLSuffix)},
+ }
+
+ for _, test := range tests {
+ th.assertFileContent(test.doc, test.expected)
+ }
+}
+
+// Issue #939
+// Issue #1923
+func TestShouldAlwaysHaveUglyURLs(t *testing.T) {
+ t.Parallel()
+ for _, uglyURLs := range []bool{true, false} {
+ doTestShouldAlwaysHaveUglyURLs(t, uglyURLs)
+ }
+}
+
+func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
+ cfg, fs := newTestCfg()
+ c := qt.New(t)
+
+ cfg.Set("verbose", true)
+ cfg.Set("baseURL", "http://auth/bub")
+ cfg.Set("uglyURLs", uglyURLs)
+
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
+ {filepath.FromSlash("sect/doc2.md"), "---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*"},
+ }
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ writeSource(t, fs, filepath.Join("layouts", "index.html"), "Home Sweet {{ if.IsHome }}Home{{ end }}.")
+ writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}{{ if.IsHome }}This is not home!{{ end }}")
+ writeSource(t, fs, filepath.Join("layouts", "404.html"), "Page Not Found.{{ if.IsHome }}This is not home!{{ end }}")
+ writeSource(t, fs, filepath.Join("layouts", "rss.xml"), "<root>RSS</root>")
+ writeSource(t, fs, filepath.Join("layouts", "sitemap.xml"), "<root>SITEMAP</root>")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ var expectedPagePath string
+ if uglyURLs {
+ expectedPagePath = "public/sect/doc1.html"
+ } else {
+ expectedPagePath = "public/sect/doc1/index.html"
+ }
+
+ tests := []struct {
+ doc string
+ expected string
+ }{
+ {filepath.FromSlash("public/index.html"), "Home Sweet Home."},
+ {filepath.FromSlash(expectedPagePath), "<h1 id=\"title\">title</h1>\n<p>some <em>content</em></p>\n"},
+ {filepath.FromSlash("public/404.html"), "Page Not Found."},
+ {filepath.FromSlash("public/index.xml"), "<root>RSS</root>"},
+ {filepath.FromSlash("public/sitemap.xml"), "<root>SITEMAP</root>"},
+ // Issue #1923
+ {filepath.FromSlash("public/ugly.html"), "<h1 id=\"title\">title</h1>\n<p>doc2 <em>content</em></p>\n"},
+ }
+
+ for _, p := range s.RegularPages() {
+ c.Assert(p.IsHome(), qt.Equals, false)
+ }
+
+ for _, test := range tests {
+ content := readWorkingDir(t, fs, test.doc)
+
+ if content != test.expected {
+ t.Errorf("%s content expected:\n%q\ngot:\n%q", test.doc, test.expected, content)
+ }
+ }
+}
+
+// Issue #3355
+func TestShouldNotWriteZeroLengthFilesToDestination(t *testing.T) {
+ cfg, fs := newTestCfg()
+
+ writeSource(t, fs, filepath.Join("content", "simple.html"), "simple")
+ writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
+ writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ th := newTestHelper(s.Cfg, s.Fs, t)
+
+ th.assertFileNotExist(filepath.Join("public", "index.html"))
+}
+
+func TestMainSections(t *testing.T) {
+ c := qt.New(t)
+ for _, paramSet := range []bool{false, true} {
+ c.Run(fmt.Sprintf("param-%t", paramSet), func(c *qt.C) {
+ v := config.NewWithTestDefaults()
+ if paramSet {
+ v.Set("params", map[string]any{
+ "mainSections": []string{"a1", "a2"},
+ })
+ }
+
+ b := newTestSitesBuilder(c).WithViper(v)
+
+ for i := 0; i < 20; i++ {
+ b.WithContent(fmt.Sprintf("page%d.md", i), `---
+title: "Page"
+---
+`)
+ }
+
+ for i := 0; i < 5; i++ {
+ b.WithContent(fmt.Sprintf("blog/page%d.md", i), `---
+title: "Page"
+tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
+---
+`)
+ }
+
+ for i := 0; i < 3; i++ {
+ b.WithContent(fmt.Sprintf("docs/page%d.md", i), `---
+title: "Page"
+---
+`)
+ }
+
+ b.WithTemplates("index.html", `
+mainSections: {{ .Site.Params.mainSections }}
+
+{{ range (where .Site.RegularPages "Type" "in" .Site.Params.mainSections) }}
+Main section page: {{ .RelPermalink }}
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ if paramSet {
+ b.AssertFileContent("public/index.html", "mainSections: [a1 a2]")
+ } else {
+ b.AssertFileContent("public/index.html", "mainSections: [blog]", "Main section page: /blog/page3/")
+ }
+ })
+ }
+}
+
+// Issue #1176
+func TestSectionNaming(t *testing.T) {
+ for _, canonify := range []bool{true, false} {
+ for _, uglify := range []bool{true, false} {
+ for _, pluralize := range []bool{true, false} {
+ canonify := canonify
+ uglify := uglify
+ pluralize := pluralize
+ t.Run(fmt.Sprintf("canonify=%t,uglify=%t,pluralize=%t", canonify, uglify, pluralize), func(t *testing.T) {
+ t.Parallel()
+ doTestSectionNaming(t, canonify, uglify, pluralize)
+ })
+ }
+ }
+ }
+}
+
+func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
+ c := qt.New(t)
+
+ var expectedPathSuffix string
+
+ if uglify {
+ expectedPathSuffix = ".html"
+ } else {
+ expectedPathSuffix = "/index.html"
+ }
+
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.html"), "doc1"},
+ // Add one more page to sect to make sure sect is picked in mainSections
+ {filepath.FromSlash("sect/sect.html"), "sect"},
+ {filepath.FromSlash("Fish and Chips/doc2.html"), "doc2"},
+ {filepath.FromSlash("ラーメン/doc3.html"), "doc3"},
+ }
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("baseURL", "http://auth/sub/")
+ cfg.Set("uglyURLs", uglify)
+ cfg.Set("pluralizeListTitles", pluralize)
+ cfg.Set("canonifyURLs", canonify)
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
+ writeSource(t, fs, filepath.Join("layouts", "_default/list.html"), "{{ .Kind }}|{{.Title}}")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ mainSections, err := s.Info.Param("mainSections")
+ c.Assert(err, qt.IsNil)
+ c.Assert(mainSections, qt.DeepEquals, []string{"sect"})
+
+ th := newTestHelper(s.Cfg, s.Fs, t)
+ tests := []struct {
+ doc string
+ pluralAware bool
+ expected string
+ }{
+ {filepath.FromSlash(fmt.Sprintf("sect/doc1%s", expectedPathSuffix)), false, "doc1"},
+ {filepath.FromSlash(fmt.Sprintf("sect%s", expectedPathSuffix)), true, "Sect"},
+ {filepath.FromSlash(fmt.Sprintf("fish-and-chips/doc2%s", expectedPathSuffix)), false, "doc2"},
+ {filepath.FromSlash(fmt.Sprintf("fish-and-chips%s", expectedPathSuffix)), true, "Fish and Chips"},
+ {filepath.FromSlash(fmt.Sprintf("ラーメン/doc3%s", expectedPathSuffix)), false, "doc3"},
+ {filepath.FromSlash(fmt.Sprintf("ラーメン%s", expectedPathSuffix)), true, "ラーメン"},
+ }
+
+ for _, test := range tests {
+
+ if test.pluralAware && pluralize {
+ test.expected = flect.Pluralize(test.expected)
+ }
+
+ th.assertFileContent(filepath.Join("public", test.doc), test.expected)
+ }
+}
+
+func TestAbsURLify(t *testing.T) {
+ t.Parallel()
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.html"), "<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>"},
+ {filepath.FromSlash("blue/doc2.html"), "---\nf: t\n---\n<!doctype html><html><body>more content</body></html>"},
+ }
+ for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} {
+ for _, canonify := range []bool{true, false} {
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("uglyURLs", true)
+ cfg.Set("canonifyURLs", canonify)
+ cfg.Set("baseURL", baseURL)
+
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ writeSource(t, fs, filepath.Join("layouts", "blue/single.html"), templateWithURLAbs)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ th := newTestHelper(s.Cfg, s.Fs, t)
+
+ tests := []struct {
+ file, expected string
+ }{
+ {"public/blue/doc2.html", "<a href=\"%s/foobar.jpg\">Going</a>"},
+ {"public/sect/doc1.html", "<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>"},
+ }
+
+ for _, test := range tests {
+
+ expected := test.expected
+
+ if strings.Contains(expected, "%s") {
+ expected = fmt.Sprintf(expected, baseURL)
+ }
+
+ if !canonify {
+ expected = strings.Replace(expected, baseURL, "", -1)
+ }
+
+ th.assertFileContent(test.file, expected)
+
+ }
+ }
+ }
+}
+
+var weightedPage1 = `+++
+weight = "2"
+title = "One"
+my_param = "foo"
+my_date = 1979-05-27T07:32:00Z
++++
+Front Matter with Ordered Pages`
+
+var weightedPage2 = `+++
+weight = "6"
+title = "Two"
+publishdate = "2012-03-05"
+my_param = "foo"
++++
+Front Matter with Ordered Pages 2`
+
+var weightedPage3 = `+++
+weight = "4"
+title = "Three"
+date = "2012-04-06"
+publishdate = "2012-04-06"
+my_param = "bar"
+only_one = "yes"
+my_date = 2010-05-27T07:32:00Z
++++
+Front Matter with Ordered Pages 3`
+
+var weightedPage4 = `+++
+weight = "4"
+title = "Four"
+date = "2012-01-01"
+publishdate = "2012-01-01"
+my_param = "baz"
+my_date = 2010-05-27T07:32:00Z
+summary = "A _custom_ summary"
+categories = [ "hugo" ]
++++
+Front Matter with Ordered Pages 4. This is longer content`
+
+var weightedPage5 = `+++
+weight = "5"
+title = "Five"
+
+[_build]
+render = "never"
++++
+Front Matter with Ordered Pages 5`
+
+var weightedSources = [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), weightedPage1},
+ {filepath.FromSlash("sect/doc2.md"), weightedPage2},
+ {filepath.FromSlash("sect/doc3.md"), weightedPage3},
+ {filepath.FromSlash("sect/doc4.md"), weightedPage4},
+ {filepath.FromSlash("sect/doc5.md"), weightedPage5},
+}
+
+func TestOrderedPages(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+ cfg.Set("baseURL", "http://auth/bub")
+
+ for _, src := range weightedSources {
+ writeSource(t, fs, filepath.Join("content", src[0]), src[1])
+ }
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ if s.getPage(page.KindSection, "sect").Pages()[1].Title() != "Three" || s.getPage(page.KindSection, "sect").Pages()[2].Title() != "Four" {
+ t.Error("Pages in unexpected order.")
+ }
+
+ bydate := s.RegularPages().ByDate()
+
+ if bydate[0].Title() != "One" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title())
+ }
+
+ rev := bydate.Reverse()
+ if rev[0].Title() != "Three" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title())
+ }
+
+ bypubdate := s.RegularPages().ByPublishDate()
+
+ if bypubdate[0].Title() != "One" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title())
+ }
+
+ rbypubdate := bypubdate.Reverse()
+ if rbypubdate[0].Title() != "Three" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title())
+ }
+
+ bylength := s.RegularPages().ByLength()
+ if bylength[0].Title() != "One" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title())
+ }
+
+ rbylength := bylength.Reverse()
+ if rbylength[0].Title() != "Four" {
+ t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].Title())
+ }
+}
+
+var groupedSources = [][2]string{
+ {filepath.FromSlash("sect1/doc1.md"), weightedPage1},
+ {filepath.FromSlash("sect1/doc2.md"), weightedPage2},
+ {filepath.FromSlash("sect2/doc3.md"), weightedPage3},
+ {filepath.FromSlash("sect3/doc4.md"), weightedPage4},
+}
+
+func TestGroupedPages(t *testing.T) {
+ t.Parallel()
+ defer func() {
+ if r := recover(); r != nil {
+ fmt.Println("Recovered in f", r)
+ }
+ }()
+
+ cfg, fs := newTestCfg()
+ cfg.Set("baseURL", "http://auth/bub")
+
+ writeSourcesToSource(t, "content", fs, groupedSources...)
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ rbysection, err := s.RegularPages().GroupBy("Section", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+
+ if rbysection[0].Key != "sect3" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "sect3", rbysection[0].Key)
+ }
+ if rbysection[1].Key != "sect2" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "sect2", rbysection[1].Key)
+ }
+ if rbysection[2].Key != "sect1" {
+ t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key)
+ }
+ if rbysection[0].Pages[0].Title() != "Four" {
+ t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].Title())
+ }
+ if len(rbysection[2].Pages) != 2 {
+ t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
+ }
+
+ bytype, err := s.RegularPages().GroupBy("Type", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if bytype[0].Key != "sect1" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "sect1", bytype[0].Key)
+ }
+ if bytype[1].Key != "sect2" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "sect2", bytype[1].Key)
+ }
+ if bytype[2].Key != "sect3" {
+ t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key)
+ }
+ if bytype[2].Pages[0].Title() != "Four" {
+ t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].Title())
+ }
+ if len(bytype[0].Pages) != 2 {
+ t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
+ }
+
+ bydate, err := s.RegularPages().GroupByDate("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if bydate[0].Key != "0001-01" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "0001-01", bydate[0].Key)
+ }
+ if bydate[1].Key != "2012-01" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "2012-01", bydate[1].Key)
+ }
+
+ bypubdate, err := s.RegularPages().GroupByPublishDate("2006")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if bypubdate[0].Key != "2012" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "2012", bypubdate[0].Key)
+ }
+ if bypubdate[1].Key != "0001" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key)
+ }
+ if bypubdate[0].Pages[0].Title() != "Three" {
+ t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].Title())
+ }
+ if len(bypubdate[0].Pages) != 3 {
+ t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
+ }
+
+ byparam, err := s.RegularPages().GroupByParam("my_param", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if byparam[0].Key != "foo" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "foo", byparam[0].Key)
+ }
+ if byparam[1].Key != "baz" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "baz", byparam[1].Key)
+ }
+ if byparam[2].Key != "bar" {
+ t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key)
+ }
+ if byparam[2].Pages[0].Title() != "Three" {
+ t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].Title())
+ }
+ if len(byparam[0].Pages) != 2 {
+ t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
+ }
+
+ _, err = s.RegularPages().GroupByParam("not_exist")
+ if err == nil {
+ t.Errorf("GroupByParam didn't return an expected error")
+ }
+
+ byOnlyOneParam, err := s.RegularPages().GroupByParam("only_one")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if len(byOnlyOneParam) != 1 {
+ t.Errorf("PageGroup array has unexpected elements. Group length should be '%d', got '%d'", 1, len(byOnlyOneParam))
+ }
+ if byOnlyOneParam[0].Key != "yes" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key)
+ }
+
+ byParamDate, err := s.RegularPages().GroupByParamDate("my_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PageGroup array: %s", err)
+ }
+ if byParamDate[0].Key != "2010-05" {
+ t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "2010-05", byParamDate[0].Key)
+ }
+ if byParamDate[1].Key != "1979-05" {
+ t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key)
+ }
+ if byParamDate[1].Pages[0].Title() != "One" {
+ t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].Title())
+ }
+ if len(byParamDate[0].Pages) != 2 {
+ t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages))
+ }
+}
+
+var pageWithWeightedTaxonomies1 = `+++
+tags = [ "a", "b", "c" ]
+tags_weight = 22
+categories = ["d"]
+title = "foo"
+categories_weight = 44
++++
+Front Matter with weighted tags and categories`
+
+var pageWithWeightedTaxonomies2 = `+++
+tags = "a"
+tags_weight = 33
+title = "bar"
+categories = [ "d", "e" ]
+categories_weight = 11.0
+alias = "spf13"
+date = 1979-05-27T07:32:00Z
++++
+Front Matter with weighted tags and categories`
+
+var pageWithWeightedTaxonomies3 = `+++
+title = "bza"
+categories = [ "e" ]
+categories_weight = 11
+alias = "spf13"
+date = 2010-05-27T07:32:00Z
++++
+Front Matter with weighted tags and categories`
+
+func TestWeightedTaxonomies(t *testing.T) {
+ t.Parallel()
+ sources := [][2]string{
+ {filepath.FromSlash("sect/doc1.md"), pageWithWeightedTaxonomies2},
+ {filepath.FromSlash("sect/doc2.md"), pageWithWeightedTaxonomies1},
+ {filepath.FromSlash("sect/doc3.md"), pageWithWeightedTaxonomies3},
+ }
+ taxonomies := make(map[string]string)
+
+ taxonomies["tag"] = "tags"
+ taxonomies["category"] = "categories"
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("baseURL", "http://auth/bub")
+ cfg.Set("taxonomies", taxonomies)
+
+ writeSourcesToSource(t, "content", fs, sources...)
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ if s.Taxonomies()["tags"]["a"][0].Page.Title() != "foo" {
+ t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies()["tags"]["a"][0].Page.Title())
+ }
+
+ if s.Taxonomies()["categories"]["d"][0].Page.Title() != "bar" {
+ t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies()["categories"]["d"][0].Page.Title())
+ }
+
+ if s.Taxonomies()["categories"]["e"][0].Page.Title() != "bza" {
+ t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies()["categories"]["e"][0].Page.Title())
+ }
+}
+
+func setupLinkingMockSite(t *testing.T) *Site {
+ sources := [][2]string{
+ {filepath.FromSlash("level2/unique.md"), ""},
+ {filepath.FromSlash("_index.md"), ""},
+ {filepath.FromSlash("common.md"), ""},
+ {filepath.FromSlash("rootfile.md"), ""},
+ {filepath.FromSlash("root-image.png"), ""},
+
+ {filepath.FromSlash("level2/2-root.md"), ""},
+ {filepath.FromSlash("level2/common.md"), ""},
+
+ {filepath.FromSlash("level2/2-image.png"), ""},
+ {filepath.FromSlash("level2/common.png"), ""},
+
+ {filepath.FromSlash("level2/level3/start.md"), ""},
+ {filepath.FromSlash("level2/level3/_index.md"), ""},
+ {filepath.FromSlash("level2/level3/3-root.md"), ""},
+ {filepath.FromSlash("level2/level3/common.md"), ""},
+ {filepath.FromSlash("level2/level3/3-image.png"), ""},
+ {filepath.FromSlash("level2/level3/common.png"), ""},
+
+ {filepath.FromSlash("level2/level3/embedded.dot.md"), ""},
+
+ {filepath.FromSlash("leafbundle/index.md"), ""},
+ }
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("baseURL", "http://auth/")
+ cfg.Set("uglyURLs", false)
+ cfg.Set("outputs", map[string]any{
+ "page": []string{"HTML", "AMP"},
+ })
+ cfg.Set("pluralizeListTitles", false)
+ cfg.Set("canonifyURLs", false)
+ writeSourcesToSource(t, "content", fs, sources...)
+ return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+}
+
+func TestRefLinking(t *testing.T) {
+ t.Parallel()
+ site := setupLinkingMockSite(t)
+
+ currentPage := site.getPage(page.KindPage, "level2/level3/start.md")
+ if currentPage == nil {
+ t.Fatalf("failed to find current page in site")
+ }
+
+ for i, test := range []struct {
+ link string
+ outputFormat string
+ relative bool
+ expected string
+ }{
+ // different refs resolving to the same unique filename:
+ {"/level2/unique.md", "", true, "/level2/unique/"},
+ {"../unique.md", "", true, "/level2/unique/"},
+ {"unique.md", "", true, "/level2/unique/"},
+
+ {"level2/common.md", "", true, "/level2/common/"},
+ {"3-root.md", "", true, "/level2/level3/3-root/"},
+ {"../..", "", true, "/"},
+
+ // different refs resolving to the same ambiguous top-level filename:
+ {"../../common.md", "", true, "/common/"},
+ {"/common.md", "", true, "/common/"},
+
+ // different refs resolving to the same ambiguous level-2 filename:
+ {"/level2/common.md", "", true, "/level2/common/"},
+ {"../common.md", "", true, "/level2/common/"},
+ {"common.md", "", true, "/level2/level3/common/"},
+
+ // different refs resolving to the same section:
+ {"/level2", "", true, "/level2/"},
+ {"..", "", true, "/level2/"},
+ {"../", "", true, "/level2/"},
+
+ // different refs resolving to the same subsection:
+ {"/level2/level3", "", true, "/level2/level3/"},
+ {"/level2/level3/_index.md", "", true, "/level2/level3/"},
+ {".", "", true, "/level2/level3/"},
+ {"./", "", true, "/level2/level3/"},
+
+ // try to confuse parsing
+ {"embedded.dot.md", "", true, "/level2/level3/embedded.dot/"},
+
+ // test empty link, as well as fragment only link
+ {"", "", true, ""},
+ } {
+ t.Run(fmt.Sprintf("t%dt", i), func(t *testing.T) {
+ checkLinkCase(site, test.link, currentPage, test.relative, test.outputFormat, test.expected, t, i)
+
+ // make sure fragment links are also handled
+ checkLinkCase(site, test.link+"#intro", currentPage, test.relative, test.outputFormat, test.expected+"#intro", t, i)
+ })
+ }
+
+ // TODO: and then the failure cases.
+}
+
+func checkLinkCase(site *Site, link string, currentPage page.Page, relative bool, outputFormat string, expected string, t *testing.T, i int) {
+ t.Helper()
+ if out, err := site.refLink(link, currentPage, relative, outputFormat); err != nil || out != expected {
+ t.Fatalf("[%d] Expected %q from %q to resolve to %q, got %q - error: %s", i, link, currentPage.Pathc(), expected, out, err)
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/6952
+func TestRefIssues(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithContent(
+ "post/b1/index.md", "---\ntitle: pb1\n---\nRef: {{< ref \"b2\" >}}",
+ "post/b2/index.md", "---\ntitle: pb2\n---\n",
+ "post/nested-a/content-a.md", "---\ntitle: ca\n---\n{{< ref \"content-b\" >}}",
+ "post/nested-b/content-b.md", "---\ntitle: ca\n---\n",
+ )
+ b.WithTemplates("index.html", `Home`)
+ b.WithTemplates("_default/single.html", `Content: {{ .Content }}`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/post/b1/index.html", `Content: <p>Ref: http://example.com/post/b2/</p>`)
+ b.AssertFileContent("public/post/nested-a/content-a/index.html", `Content: http://example.com/post/nested-b/content-b/`)
+}
+
+func TestClassCollector(t *testing.T) {
+ for _, minify := range []bool{false, true} {
+ t.Run(fmt.Sprintf("minify-%t", minify), func(t *testing.T) {
+ statsFilename := "hugo_stats.json"
+ defer os.Remove(statsFilename)
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", fmt.Sprintf(`
+
+
+minify = %t
+
+[build]
+ writeStats = true
+
+`, minify))
+
+ b.WithTemplates("index.html", `
+
+<div id="el1" class="a b c">Foo</div>
+
+Some text.
+
+<div class="c d e" id="el2">Foo</div>
+
+<span class=z>FOO</span>
+
+ <a class="text-base hover:text-gradient inline-block px-3 pb-1 rounded lowercase" href="{{ .RelPermalink }}">{{ .Title }}</a>
+
+
+`)
+
+ b.WithContent("p1.md", "")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("hugo_stats.json", `
+ {
+ "htmlElements": {
+ "tags": [
+ "a",
+ "div",
+ "span"
+ ],
+ "classes": [
+ "a",
+ "b",
+ "c",
+ "d",
+ "e",
+ "hover:text-gradient",
+ "inline-block",
+ "lowercase",
+ "pb-1",
+ "px-3",
+ "rounded",
+ "text-base",
+ "z"
+ ],
+ "ids": [
+ "el1",
+ "el2"
+ ]
+ }
+ }
+`)
+ })
+ }
+}
+
+func TestClassCollectorStress(t *testing.T) {
+ statsFilename := "hugo_stats.json"
+ defer os.Remove(statsFilename)
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", `
+
+disableKinds = ["home", "section", "term", "taxonomy" ]
+
+[languages]
+[languages.en]
+[languages.nb]
+[languages.no]
+[languages.sv]
+
+
+[build]
+ writeStats = true
+
+`)
+
+ b.WithTemplates("_default/single.html", `
+<div class="c d e" id="el2">Foo</div>
+
+Some text.
+
+{{ $n := index (shuffle (seq 1 20)) 0 }}
+
+{{ "<span class=_a>Foo</span>" | strings.Repeat $n | safeHTML }}
+
+<div class="{{ .Title }}">
+ABC.
+</div>
+
+<div class="f"></div>
+
+{{ $n := index (shuffle (seq 1 5)) 0 }}
+
+{{ "<hr class=p-3>" | safeHTML }}
+
+`)
+
+ for _, lang := range []string{"en", "nb", "no", "sv"} {
+ for i := 100; i <= 999; i++ {
+ b.WithContent(fmt.Sprintf("p%d.%s.md", i, lang), fmt.Sprintf("---\ntitle: p%s%d\n---", lang, i))
+ }
+ }
+
+ b.Build(BuildCfg{})
+
+ contentMem := b.FileContent(statsFilename)
+ cb, err := ioutil.ReadFile(statsFilename)
+ b.Assert(err, qt.IsNil)
+ contentFile := string(cb)
+
+ for _, content := range []string{contentMem, contentFile} {
+
+ stats := &publisher.PublishStats{}
+ b.Assert(json.Unmarshal([]byte(content), stats), qt.IsNil)
+
+ els := stats.HTMLElements
+
+ b.Assert(els.Classes, qt.HasLen, 3606) // (4 * 900) + 4 +2
+ b.Assert(els.Tags, qt.HasLen, 8)
+ b.Assert(els.IDs, qt.HasLen, 1)
+ }
+}
diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go
new file mode 100644
index 000000000..ec68d21fc
--- /dev/null
+++ b/hugolib/site_url_test.go
@@ -0,0 +1,187 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "html/template"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+)
+
+const slugDoc1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - /sd1/foo/\n - /sd2\n - /sd3/\n - /sd4.html\n---\nslug doc 1 content\n"
+
+const slugDoc2 = `---
+title: slug doc 2
+slug: slug-doc-2
+---
+slug doc 2 content
+`
+
+var urlFakeSource = [][2]string{
+ {filepath.FromSlash("content/blue/doc1.md"), slugDoc1},
+ {filepath.FromSlash("content/blue/doc2.md"), slugDoc2},
+}
+
+// Issue #1105
+func TestShouldNotAddTrailingSlashToBaseURL(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, this := range []struct {
+ in string
+ expected string
+ }{
+ {"http://base.com/", "http://base.com/"},
+ {"http://base.com/sub/", "http://base.com/sub/"},
+ {"http://base.com/sub", "http://base.com/sub"},
+ {"http://base.com", "http://base.com"},
+ } {
+
+ cfg, fs := newTestCfg()
+ cfg.Set("baseURL", this.in)
+ d := deps.DepsCfg{Cfg: cfg, Fs: fs}
+ s, err := NewSiteForCfg(d)
+ c.Assert(err, qt.IsNil)
+ c.Assert(s.initializeSiteInfo(), qt.IsNil)
+
+ if s.Info.BaseURL() != template.URL(this.expected) {
+ t.Errorf("[%d] got %s expected %s", i, s.Info.BaseURL(), this.expected)
+ }
+ }
+}
+
+func TestPageCount(t *testing.T) {
+ t.Parallel()
+ cfg, fs := newTestCfg()
+ cfg.Set("uglyURLs", false)
+ cfg.Set("paginate", 10)
+
+ writeSourcesToSource(t, "", fs, urlFakeSource...)
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ _, err := s.Fs.WorkingDirReadOnly.Open("public/blue")
+ if err != nil {
+ t.Errorf("No indexed rendered.")
+ }
+
+ for _, pth := range []string{
+ "public/sd1/foo/index.html",
+ "public/sd2/index.html",
+ "public/sd3/index.html",
+ "public/sd4.html",
+ } {
+ if _, err := s.Fs.WorkingDirReadOnly.Open(filepath.FromSlash(pth)); err != nil {
+ t.Errorf("No alias rendered: %s", pth)
+ }
+ }
+}
+
+func TestUglyURLsPerSection(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ const dt = `---
+title: Do not go gentle into that good night
+---
+
+Wild men who caught and sang the sun in flight,
+And learn, too late, they grieved it on its way,
+Do not go gentle into that good night.
+
+`
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("uglyURLs", map[string]bool{
+ "sect2": true,
+ })
+
+ writeSource(t, fs, filepath.Join("content", "sect1", "p1.md"), dt)
+ writeSource(t, fs, filepath.Join("content", "sect2", "p2.md"), dt)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 2)
+
+ notUgly := s.getPage(page.KindPage, "sect1/p1.md")
+ c.Assert(notUgly, qt.Not(qt.IsNil))
+ c.Assert(notUgly.Section(), qt.Equals, "sect1")
+ c.Assert(notUgly.RelPermalink(), qt.Equals, "/sect1/p1/")
+
+ ugly := s.getPage(page.KindPage, "sect2/p2.md")
+ c.Assert(ugly, qt.Not(qt.IsNil))
+ c.Assert(ugly.Section(), qt.Equals, "sect2")
+ c.Assert(ugly.RelPermalink(), qt.Equals, "/sect2/p2.html")
+}
+
+func TestSectionWithURLInFrontMatter(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ const st = `---
+title: Do not go gentle into that good night
+url: %s
+---
+
+Wild men who caught and sang the sun in flight,
+And learn, too late, they grieved it on its way,
+Do not go gentle into that good night.
+
+`
+
+ const pt = `---
+title: Wild men who caught and sang the sun in flight
+---
+
+Wild men who caught and sang the sun in flight,
+And learn, too late, they grieved it on its way,
+Do not go gentle into that good night.
+
+`
+
+ cfg, fs := newTestCfg()
+ th := newTestHelper(cfg, fs, t)
+
+ cfg.Set("paginate", 1)
+
+ writeSource(t, fs, filepath.Join("content", "sect1", "_index.md"), fmt.Sprintf(st, "/ss1/"))
+ writeSource(t, fs, filepath.Join("content", "sect2", "_index.md"), fmt.Sprintf(st, "/ss2/"))
+
+ for i := 0; i < 5; i++ {
+ writeSource(t, fs, filepath.Join("content", "sect1", fmt.Sprintf("p%d.md", i+1)), pt)
+ writeSource(t, fs, filepath.Join("content", "sect2", fmt.Sprintf("p%d.md", i+1)), pt)
+ }
+
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html><body>{{.Content}}</body></html>")
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
+ "<html><body>P{{.Paginator.PageNumber}}|URL: {{.Paginator.URL}}|{{ if .Paginator.HasNext }}Next: {{.Paginator.Next.URL }}{{ end }}</body></html>")
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ c.Assert(len(s.RegularPages()), qt.Equals, 10)
+
+ sect1 := s.getPage(page.KindSection, "sect1")
+ c.Assert(sect1, qt.Not(qt.IsNil))
+ c.Assert(sect1.RelPermalink(), qt.Equals, "/ss1/")
+ th.assertFileContent(filepath.Join("public", "ss1", "index.html"), "P1|URL: /ss1/|Next: /ss1/page/2/")
+ th.assertFileContent(filepath.Join("public", "ss1", "page", "2", "index.html"), "P2|URL: /ss1/page/2/|Next: /ss1/page/3/")
+}
diff --git a/hugolib/sitemap_test.go b/hugolib/sitemap_test.go
new file mode 100644
index 000000000..cb4eea234
--- /dev/null
+++ b/hugolib/sitemap_test.go
@@ -0,0 +1,120 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+const sitemapTemplate = `<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
+ {{ range .Data.Pages }}
+ {{- if .Permalink -}}
+ <url>
+ <loc>{{ .Permalink }}</loc>{{ if not .Lastmod.IsZero }}
+ <lastmod>{{ safeHTML ( .Lastmod.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
+ <changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }}
+ <priority>{{ .Sitemap.Priority }}</priority>{{ end }}
+ </url>
+ {{- end -}}
+ {{ end }}
+</urlset>`
+
+func TestSitemapOutput(t *testing.T) {
+ t.Parallel()
+ for _, internal := range []bool{false, true} {
+ doTestSitemapOutput(t, internal)
+ }
+}
+
+func doTestSitemapOutput(t *testing.T, internal bool) {
+ c := qt.New(t)
+ cfg, fs := newTestCfg()
+ cfg.Set("baseURL", "http://auth/bub/")
+
+ depsCfg := deps.DepsCfg{Fs: fs, Cfg: cfg}
+
+ depsCfg.WithTemplate = func(templ tpl.TemplateManager) error {
+ if !internal {
+ templ.AddTemplate("sitemap.xml", sitemapTemplate)
+ }
+
+ // We want to check that the 404 page is not included in the sitemap
+ // output. This template should have no effect either way, but include
+ // it for the clarity.
+ templ.AddTemplate("404.html", "Not found")
+ return nil
+ }
+
+ writeSourcesToSource(t, "content", fs, weightedSources...)
+ s := buildSingleSite(t, depsCfg, BuildCfg{})
+ th := newTestHelper(s.Cfg, s.Fs, t)
+ outputSitemap := "public/sitemap.xml"
+
+ th.assertFileContent(outputSitemap,
+ // Regular page
+ " <loc>http://auth/bub/sect/doc1/</loc>",
+ // Home page
+ "<loc>http://auth/bub/</loc>",
+ // Section
+ "<loc>http://auth/bub/sect/</loc>",
+ // Tax terms
+ "<loc>http://auth/bub/categories/</loc>",
+ // Tax list
+ "<loc>http://auth/bub/categories/hugo/</loc>",
+ )
+
+ content := readWorkingDir(th, th.Fs, outputSitemap)
+ c.Assert(content, qt.Not(qt.Contains), "404")
+ c.Assert(content, qt.Not(qt.Contains), "<loc></loc>")
+}
+
+func TestParseSitemap(t *testing.T) {
+ t.Parallel()
+ expected := config.Sitemap{Priority: 3.0, Filename: "doo.xml", ChangeFreq: "3"}
+ input := map[string]any{
+ "changefreq": "3",
+ "priority": 3.0,
+ "filename": "doo.xml",
+ "unknown": "ignore",
+ }
+ result := config.DecodeSitemap(config.Sitemap{}, input)
+
+ if !reflect.DeepEqual(expected, result) {
+ t.Errorf("Got \n%v expected \n%v", result, expected)
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/5910
+func TestSitemapOutputFormats(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithContent("blog/html-amp.md", `
+---
+Title: AMP and HTML
+outputs: [ "html", "amp" ]
+---
+
+`)
+
+ b.Build(BuildCfg{})
+
+ // Should link to the HTML version.
+ b.AssertFileContent("public/sitemap.xml", " <loc>http://example.com/blog/html-amp/</loc>")
+}
diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go
new file mode 100644
index 000000000..6b3c2b961
--- /dev/null
+++ b/hugolib/taxonomy.go
@@ -0,0 +1,173 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+// The TaxonomyList is a list of all taxonomies and their values
+// e.g. List['tags'] => TagTaxonomy (from above)
+type TaxonomyList map[string]Taxonomy
+
+func (tl TaxonomyList) String() string {
+ return fmt.Sprintf("TaxonomyList(%d)", len(tl))
+}
+
+// A Taxonomy is a map of keywords to a list of pages.
+// For example
+// TagTaxonomy['technology'] = page.WeightedPages
+// TagTaxonomy['go'] = page.WeightedPages
+type Taxonomy map[string]page.WeightedPages
+
+// OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
+// Important because you can't order a map.
+type OrderedTaxonomy []OrderedTaxonomyEntry
+
+// getOneOPage returns one page in the taxonomy,
+// nil if there is none.
+func (t OrderedTaxonomy) getOneOPage() page.Page {
+ if len(t) == 0 {
+ return nil
+ }
+ return t[0].Pages()[0]
+}
+
+// OrderedTaxonomyEntry is similar to an element of a Taxonomy, but with the key embedded (as name)
+// e.g: {Name: Technology, page.WeightedPages: TaxonomyPages}
+type OrderedTaxonomyEntry struct {
+ Name string
+ page.WeightedPages
+}
+
+// Get the weighted pages for the given key.
+func (i Taxonomy) Get(key string) page.WeightedPages {
+ return i[key]
+}
+
+// Count the weighted pages for the given key.
+func (i Taxonomy) Count(key string) int { return len(i[key]) }
+
+func (i Taxonomy) add(key string, w page.WeightedPage) {
+ i[key] = append(i[key], w)
+}
+
+// TaxonomyArray returns an ordered taxonomy with a non defined order.
+func (i Taxonomy) TaxonomyArray() OrderedTaxonomy {
+ ies := make([]OrderedTaxonomyEntry, len(i))
+ count := 0
+ for k, v := range i {
+ ies[count] = OrderedTaxonomyEntry{Name: k, WeightedPages: v}
+ count++
+ }
+ return ies
+}
+
+// Alphabetical returns an ordered taxonomy sorted by key name.
+func (i Taxonomy) Alphabetical() OrderedTaxonomy {
+ ia := i.TaxonomyArray()
+ p := ia.getOneOPage()
+ if p == nil {
+ return ia
+ }
+ currentSite := p.Site().Current()
+ coll := langs.GetCollator(currentSite.Language())
+ coll.Lock()
+ defer coll.Unlock()
+ name := func(i1, i2 *OrderedTaxonomyEntry) bool {
+ return coll.CompareStrings(i1.Name, i2.Name) < 0
+ }
+ oiBy(name).Sort(ia)
+ return ia
+}
+
+// ByCount returns an ordered taxonomy sorted by # of pages per key.
+// If taxonomies have the same # of pages, sort them alphabetical
+func (i Taxonomy) ByCount() OrderedTaxonomy {
+ count := func(i1, i2 *OrderedTaxonomyEntry) bool {
+ li1 := len(i1.WeightedPages)
+ li2 := len(i2.WeightedPages)
+
+ if li1 == li2 {
+ return compare.LessStrings(i1.Name, i2.Name)
+ }
+ return li1 > li2
+ }
+
+ ia := i.TaxonomyArray()
+ oiBy(count).Sort(ia)
+ return ia
+}
+
+// Pages returns the Pages for this taxonomy.
+func (ie OrderedTaxonomyEntry) Pages() page.Pages {
+ return ie.WeightedPages.Pages()
+}
+
+// Count returns the count the pages in this taxonomy.
+func (ie OrderedTaxonomyEntry) Count() int {
+ return len(ie.WeightedPages)
+}
+
+// Term returns the name given to this taxonomy.
+func (ie OrderedTaxonomyEntry) Term() string {
+ return ie.Name
+}
+
+// Reverse reverses the order of the entries in this taxonomy.
+func (t OrderedTaxonomy) Reverse() OrderedTaxonomy {
+ for i, j := 0, len(t)-1; i < j; i, j = i+1, j-1 {
+ t[i], t[j] = t[j], t[i]
+ }
+
+ return t
+}
+
+// A type to implement the sort interface for TaxonomyEntries.
+type orderedTaxonomySorter struct {
+ taxonomy OrderedTaxonomy
+ by oiBy
+}
+
+// Closure used in the Sort.Less method.
+type oiBy func(i1, i2 *OrderedTaxonomyEntry) bool
+
+func (by oiBy) Sort(taxonomy OrderedTaxonomy) {
+ ps := &orderedTaxonomySorter{
+ taxonomy: taxonomy,
+ by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
+ }
+ sort.Stable(ps)
+}
+
+// Len is part of sort.Interface.
+func (s *orderedTaxonomySorter) Len() int {
+ return len(s.taxonomy)
+}
+
+// Swap is part of sort.Interface.
+func (s *orderedTaxonomySorter) Swap(i, j int) {
+ s.taxonomy[i], s.taxonomy[j] = s.taxonomy[j], s.taxonomy[i]
+}
+
+// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
+func (s *orderedTaxonomySorter) Less(i, j int) bool {
+ return s.by(&s.taxonomy[i], &s.taxonomy[j])
+}
diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go
new file mode 100644
index 000000000..b26032174
--- /dev/null
+++ b/hugolib/taxonomy_test.go
@@ -0,0 +1,696 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+func TestTaxonomiesCountOrder(t *testing.T) {
+ t.Parallel()
+ taxonomies := make(map[string]string)
+
+ taxonomies["tag"] = "tags"
+ taxonomies["category"] = "categories"
+
+ cfg, fs := newTestCfg()
+
+ cfg.Set("taxonomies", taxonomies)
+
+ const pageContent = `---
+tags: ['a', 'B', 'c']
+categories: 'd'
+---
+YAML frontmatter with tags and categories taxonomy.`
+
+ writeSource(t, fs, filepath.Join("content", "page.md"), pageContent)
+
+ s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+
+ st := make([]string, 0)
+ for _, t := range s.Taxonomies()["tags"].ByCount() {
+ st = append(st, t.Page().Title()+":"+t.Name)
+ }
+
+ expect := []string{"a:a", "B:b", "c:c"}
+
+ if !reflect.DeepEqual(st, expect) {
+ t.Fatalf("ordered taxonomies mismatch, expected\n%v\ngot\n%q", expect, st)
+ }
+}
+
+//
+func TestTaxonomiesWithAndWithoutContentFile(t *testing.T) {
+ for _, uglyURLs := range []bool{false, true} {
+ uglyURLs := uglyURLs
+ t.Run(fmt.Sprintf("uglyURLs=%t", uglyURLs), func(t *testing.T) {
+ t.Parallel()
+ doTestTaxonomiesWithAndWithoutContentFile(t, uglyURLs)
+ })
+ }
+}
+
+func doTestTaxonomiesWithAndWithoutContentFile(t *testing.T, uglyURLs bool) {
+ siteConfig := `
+baseURL = "http://example.com/blog"
+uglyURLs = %t
+paginate = 1
+defaultContentLanguage = "en"
+[Taxonomies]
+tag = "tags"
+category = "categories"
+other = "others"
+empty = "empties"
+permalinked = "permalinkeds"
+[permalinks]
+permalinkeds = "/perma/:slug/"
+`
+
+ pageTemplate := `---
+title: "%s"
+tags:
+%s
+categories:
+%s
+others:
+%s
+permalinkeds:
+%s
+---
+# Doc
+`
+
+ siteConfig = fmt.Sprintf(siteConfig, uglyURLs)
+
+ b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
+
+ b.WithContent(
+ "p1.md", fmt.Sprintf(pageTemplate, "t1/c1", "- Tag1", "- cAt1", "- o1", "- Pl1"),
+ "p2.md", fmt.Sprintf(pageTemplate, "t2/c1", "- tag2", "- cAt1", "- o1", "- Pl1"),
+ "p3.md", fmt.Sprintf(pageTemplate, "t2/c12", "- tag2", "- cat2", "- o1", "- Pl1"),
+ "p4.md", fmt.Sprintf(pageTemplate, "Hello World", "", "", "- \"Hello Hugo world\"", "- Pl1"),
+ "categories/_index.md", newTestPage("Category Terms", "2017-01-01", 10),
+ "tags/Tag1/_index.md", newTestPage("Tag1 List", "2017-01-01", 10),
+ // https://github.com/gohugoio/hugo/issues/5847
+ "/tags/not-used/_index.md", newTestPage("Unused Tag List", "2018-01-01", 10),
+ )
+
+ b.Build(BuildCfg{})
+
+ // So what we have now is:
+ // 1. categories with terms content page, but no content page for the only c1 category
+ // 2. tags with no terms content page, but content page for one of 2 tags (tag1)
+ // 3. the "others" taxonomy with no content pages.
+ // 4. the "permalinkeds" taxonomy with permalinks configuration.
+
+ pathFunc := func(s string) string {
+ if uglyURLs {
+ return strings.Replace(s, "/index.html", ".html", 1)
+ }
+ return s
+ }
+
+ // 1.
+ b.AssertFileContent(pathFunc("public/categories/cat1/index.html"), "List", "cAt1")
+ b.AssertFileContent(pathFunc("public/categories/index.html"), "Taxonomy Term Page", "Category Terms")
+
+ // 2.
+ b.AssertFileContent(pathFunc("public/tags/tag2/index.html"), "List", "tag2")
+ b.AssertFileContent(pathFunc("public/tags/tag1/index.html"), "List", "Tag1")
+ b.AssertFileContent(pathFunc("public/tags/index.html"), "Taxonomy Term Page", "Tags")
+
+ // 3.
+ b.AssertFileContent(pathFunc("public/others/o1/index.html"), "List", "o1")
+ b.AssertFileContent(pathFunc("public/others/index.html"), "Taxonomy Term Page", "Others")
+
+ // 4.
+ b.AssertFileContent(pathFunc("public/perma/pl1/index.html"), "List", "Pl1")
+
+ // This looks kind of funky, but the taxonomy terms do not have a permalinks definition,
+ // for good reasons.
+ b.AssertFileContent(pathFunc("public/permalinkeds/index.html"), "Taxonomy Term Page", "Permalinkeds")
+
+ s := b.H.Sites[0]
+
+ // Make sure that each page.KindTaxonomyTerm page has an appropriate number
+ // of page.KindTaxonomy pages in its Pages slice.
+ taxonomyTermPageCounts := map[string]int{
+ "tags": 3,
+ "categories": 2,
+ "others": 2,
+ "empties": 0,
+ "permalinkeds": 1,
+ }
+
+ for taxonomy, count := range taxonomyTermPageCounts {
+ msg := qt.Commentf(taxonomy)
+ term := s.getPage(page.KindTaxonomy, taxonomy)
+ b.Assert(term, qt.Not(qt.IsNil), msg)
+ b.Assert(len(term.Pages()), qt.Equals, count, msg)
+
+ for _, p := range term.Pages() {
+ b.Assert(p.Kind(), qt.Equals, page.KindTerm)
+ }
+ }
+
+ cat1 := s.getPage(page.KindTerm, "categories", "cat1")
+ b.Assert(cat1, qt.Not(qt.IsNil))
+ if uglyURLs {
+ b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1.html")
+ } else {
+ b.Assert(cat1.RelPermalink(), qt.Equals, "/blog/categories/cat1/")
+ }
+
+ pl1 := s.getPage(page.KindTerm, "permalinkeds", "pl1")
+ permalinkeds := s.getPage(page.KindTaxonomy, "permalinkeds")
+ b.Assert(pl1, qt.Not(qt.IsNil))
+ b.Assert(permalinkeds, qt.Not(qt.IsNil))
+ if uglyURLs {
+ b.Assert(pl1.RelPermalink(), qt.Equals, "/blog/perma/pl1.html")
+ b.Assert(permalinkeds.RelPermalink(), qt.Equals, "/blog/permalinkeds.html")
+ } else {
+ b.Assert(pl1.RelPermalink(), qt.Equals, "/blog/perma/pl1/")
+ b.Assert(permalinkeds.RelPermalink(), qt.Equals, "/blog/permalinkeds/")
+ }
+
+ helloWorld := s.getPage(page.KindTerm, "others", "hello-hugo-world")
+ b.Assert(helloWorld, qt.Not(qt.IsNil))
+ b.Assert(helloWorld.Title(), qt.Equals, "Hello Hugo world")
+
+ // Issue #2977
+ b.AssertFileContent(pathFunc("public/empties/index.html"), "Taxonomy Term Page", "Empties")
+}
+
+// https://github.com/gohugoio/hugo/issues/5513
+// https://github.com/gohugoio/hugo/issues/5571
+func TestTaxonomiesPathSeparation(t *testing.T) {
+ t.Parallel()
+
+ config := `
+baseURL = "https://example.com"
+[taxonomies]
+"news/tag" = "news/tags"
+"news/category" = "news/categories"
+"t1/t2/t3" = "t1/t2/t3s"
+"s1/s2/s3" = "s1/s2/s3s"
+`
+
+ pageContent := `
++++
+title = "foo"
+"news/categories" = ["a", "b", "c", "d/e", "f/g/h"]
+"t1/t2/t3s" = ["t4/t5", "t4/t5/t6"]
++++
+Content.
+`
+
+ b := newTestSitesBuilder(t)
+ b.WithConfigFile("toml", config)
+ b.WithContent("page.md", pageContent)
+ b.WithContent("news/categories/b/_index.md", `
+---
+title: "This is B"
+---
+`)
+
+ b.WithContent("news/categories/f/g/h/_index.md", `
+---
+title: "This is H"
+---
+`)
+
+ b.WithContent("t1/t2/t3s/t4/t5/_index.md", `
+---
+title: "This is T5"
+---
+`)
+
+ b.WithContent("s1/s2/s3s/_index.md", `
+---
+title: "This is S3s"
+---
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+
+ filterbyKind := func(kind string) page.Pages {
+ var pages page.Pages
+ for _, p := range s.Pages() {
+ if p.Kind() == kind {
+ pages = append(pages, p)
+ }
+ }
+ return pages
+ }
+
+ ta := filterbyKind(page.KindTerm)
+ te := filterbyKind(page.KindTaxonomy)
+
+ b.Assert(len(te), qt.Equals, 4)
+ b.Assert(len(ta), qt.Equals, 7)
+
+ b.AssertFileContent("public/news/categories/a/index.html", "Taxonomy List Page 1|a|Hello|https://example.com/news/categories/a/|")
+ b.AssertFileContent("public/news/categories/b/index.html", "Taxonomy List Page 1|This is B|Hello|https://example.com/news/categories/b/|")
+ b.AssertFileContent("public/news/categories/d/e/index.html", "Taxonomy List Page 1|d/e|Hello|https://example.com/news/categories/d/e/|")
+ b.AssertFileContent("public/news/categories/f/g/h/index.html", "Taxonomy List Page 1|This is H|Hello|https://example.com/news/categories/f/g/h/|")
+ b.AssertFileContent("public/t1/t2/t3s/t4/t5/index.html", "Taxonomy List Page 1|This is T5|Hello|https://example.com/t1/t2/t3s/t4/t5/|")
+ b.AssertFileContent("public/t1/t2/t3s/t4/t5/t6/index.html", "Taxonomy List Page 1|t4/t5/t6|Hello|https://example.com/t1/t2/t3s/t4/t5/t6/|")
+
+ b.AssertFileContent("public/news/categories/index.html", "Taxonomy Term Page 1|News/Categories|Hello|https://example.com/news/categories/|")
+ b.AssertFileContent("public/t1/t2/t3s/index.html", "Taxonomy Term Page 1|T1/T2/T3s|Hello|https://example.com/t1/t2/t3s/|")
+ b.AssertFileContent("public/s1/s2/s3s/index.html", "Taxonomy Term Page 1|This is S3s|Hello|https://example.com/s1/s2/s3s/|")
+}
+
+// https://github.com/gohugoio/hugo/issues/5719
+func TestTaxonomiesNextGenLoops(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithTemplatesAdded("index.html", `
+<h1>Tags</h1>
+<ul>
+ {{ range .Site.Taxonomies.tags }}
+ <li><a href="{{ .Page.Permalink }}">{{ .Page.Title }}</a> {{ .Count }}</li>
+ {{ end }}
+</ul>
+
+`)
+
+ b.WithTemplatesAdded("_default/terms.html", `
+<h1>Terms</h1>
+<ul>
+ {{ range .Data.Terms.Alphabetical }}
+ <li><a href="{{ .Page.Permalink }}">{{ .Page.Title }}</a> {{ .Count }}</li>
+ {{ end }}
+</ul>
+`)
+
+ for i := 0; i < 10; i++ {
+ b.WithContent(fmt.Sprintf("page%d.md", i+1), `
+---
+Title: "Taxonomy!"
+tags: ["Hugo Rocks!", "Rocks I say!" ]
+categories: ["This is Cool", "And new" ]
+---
+
+Content.
+
+ `)
+ }
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `<li><a href="http://example.com/tags/hugo-rocks/">Hugo Rocks!</a> 10</li>`)
+ b.AssertFileContent("public/categories/index.html", `<li><a href="http://example.com/categories/this-is-cool/">This is Cool</a> 10</li>`)
+ b.AssertFileContent("public/tags/index.html", `<li><a href="http://example.com/tags/rocks-i-say/">Rocks I say!</a> 10</li>`)
+}
+
+// Issue 6213
+func TestTaxonomiesNotForDrafts(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithContent("draft.md", `---
+title: "Draft"
+draft: true
+categories: ["drafts"]
+---
+
+`,
+ "regular.md", `---
+title: "Not Draft"
+categories: ["regular"]
+---
+
+`)
+
+ b.Build(BuildCfg{})
+ s := b.H.Sites[0]
+
+ b.Assert(b.CheckExists("public/categories/regular/index.html"), qt.Equals, true)
+ b.Assert(b.CheckExists("public/categories/drafts/index.html"), qt.Equals, false)
+
+ reg, _ := s.getPageNew(nil, "categories/regular")
+ dra, _ := s.getPageNew(nil, "categories/draft")
+ b.Assert(reg, qt.Not(qt.IsNil))
+ b.Assert(dra, qt.IsNil)
+}
+
+func TestTaxonomiesIndexDraft(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithContent(
+ "categories/_index.md", `---
+title: "The Categories"
+draft: true
+---
+
+Content.
+
+`,
+ "page.md", `---
+title: "The Page"
+categories: ["cool"]
+---
+
+Content.
+
+`,
+ )
+
+ b.WithTemplates("index.html", `
+{{ range .Site.Pages }}
+{{ .RelPermalink }}|{{ .Title }}|{{ .WordCount }}|{{ .Content }}|
+{{ end }}
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContentFn("public/index.html", func(s string) bool {
+ return !strings.Contains(s, "categories")
+ })
+}
+
+// https://github.com/gohugoio/hugo/issues/6927
+func TestTaxonomiesHomeDraft(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithContent(
+ "_index.md", `---
+title: "Home"
+draft: true
+---
+
+Content.
+
+`,
+ "posts/_index.md", `---
+title: "Posts"
+draft: true
+---
+
+Content.
+
+`,
+ "posts/page.md", `---
+title: "The Page"
+categories: ["cool"]
+---
+
+Content.
+
+`,
+ )
+
+ b.WithTemplates("index.html", `
+NO HOME FOR YOU
+`)
+
+ b.Build(BuildCfg{})
+
+ b.Assert(b.CheckExists("public/index.html"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/categories/index.html"), qt.Equals, false)
+ b.Assert(b.CheckExists("public/posts/index.html"), qt.Equals, false)
+}
+
+// https://github.com/gohugoio/hugo/issues/6173
+func TestTaxonomiesWithBundledResources(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithTemplates("_default/list.html", `
+List {{ .Title }}:
+{{ range .Resources }}
+Resource: {{ .RelPermalink }}|{{ .MediaType }}
+{{ end }}
+ `)
+
+ b.WithContent("p1.md", `---
+title: Page
+categories: ["funny"]
+---
+ `,
+ "categories/_index.md", "---\ntitle: Categories Page\n---",
+ "categories/data.json", "Category data",
+ "categories/funny/_index.md", "---\ntitle: Funny Category\n---",
+ "categories/funny/funnydata.json", "Category funny data",
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/categories/index.html", `Resource: /categories/data.json|application/json`)
+ b.AssertFileContent("public/categories/funny/index.html", `Resource: /categories/funny/funnydata.json|application/json`)
+}
+
+func TestTaxonomiesRemoveOne(t *testing.T) {
+ b := newTestSitesBuilder(t).Running()
+ b.WithTemplates("index.html", `
+ {{ $cats := .Site.Taxonomies.categories.cats }}
+ {{ if $cats }}
+ Len cats: {{ len $cats }}
+ {{ range $cats }}
+ Cats:|{{ .Page.RelPermalink }}|
+ {{ end }}
+ {{ end }}
+ {{ $funny := .Site.Taxonomies.categories.funny }}
+ {{ if $funny }}
+ Len funny: {{ len $funny }}
+ {{ range $funny }}
+ Funny:|{{ .Page.RelPermalink }}|
+ {{ end }}
+ {{ end }}
+ `)
+
+ b.WithContent("p1.md", `---
+title: Page
+categories: ["funny", "cats"]
+---
+ `, "p2.md", `---
+title: Page2
+categories: ["funny", "cats"]
+---
+ `,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Len cats: 2
+Len funny: 2
+Cats:|/p1/|
+Cats:|/p2/|
+Funny:|/p1/|
+Funny:|/p2/|`)
+
+ // Remove one category from one of the pages.
+ b.EditFiles("content/p1.md", `---
+title: Page
+categories: ["funny"]
+---
+ `)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Len cats: 1
+Len funny: 2
+Cats:|/p2/|
+Funny:|/p1/|
+Funny:|/p2/|`)
+}
+
+//https://github.com/gohugoio/hugo/issues/6590
+func TestTaxonomiesListPages(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithTemplates("_default/list.html", `
+
+{{ template "print-taxo" "categories.cats" }}
+{{ template "print-taxo" "categories.funny" }}
+
+{{ define "print-taxo" }}
+{{ $node := index site.Taxonomies (split $ ".") }}
+{{ if $node }}
+Len {{ $ }}: {{ len $node }}
+{{ range $node }}
+ {{ $ }}:|{{ .Page.RelPermalink }}|
+{{ end }}
+{{ else }}
+{{ $ }} not found.
+{{ end }}
+{{ end }}
+ `)
+
+ b.WithContent("_index.md", `---
+title: Home
+categories: ["funny", "cats"]
+---
+ `, "blog/p1.md", `---
+title: Page1
+categories: ["funny"]
+---
+ `, "blog/_index.md", `---
+title: Blog Section
+categories: ["cats"]
+---
+ `,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+
+Len categories.cats: 2
+categories.cats:|/blog/|
+categories.cats:|/|
+
+Len categories.funny: 2
+categories.funny:|/|
+categories.funny:|/blog/p1/|
+`)
+}
+
+func TestTaxonomiesPageCollections(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t)
+ b.WithContent(
+ "_index.md", `---
+title: "Home Sweet Home"
+categories: [ "dogs", "gorillas"]
+---
+`,
+ "section/_index.md", `---
+title: "Section"
+categories: [ "cats", "dogs", "birds"]
+---
+`,
+ "section/p1.md", `---
+title: "Page1"
+categories: ["funny", "cats"]
+---
+`, "section/p2.md", `---
+title: "Page2"
+categories: ["funny"]
+---
+`)
+
+ b.WithTemplatesAdded("index.html", `
+{{ $home := site.Home }}
+{{ $section := site.GetPage "section" }}
+{{ $categories := site.GetPage "categories" }}
+{{ $funny := site.GetPage "categories/funny" }}
+{{ $cats := site.GetPage "categories/cats" }}
+{{ $p1 := site.GetPage "section/p1" }}
+
+Categories Pages: {{ range $categories.Pages}}{{.RelPermalink }}|{{ end }}:END
+Funny Pages: {{ range $funny.Pages}}{{.RelPermalink }}|{{ end }}:END
+Cats Pages: {{ range $cats.Pages}}{{.RelPermalink }}|{{ end }}:END
+P1 Terms: {{ range $p1.GetTerms "categories" }}{{.RelPermalink }}|{{ end }}:END
+Section Terms: {{ range $section.GetTerms "categories" }}{{.RelPermalink }}|{{ end }}:END
+Home Terms: {{ range $home.GetTerms "categories" }}{{.RelPermalink }}|{{ end }}:END
+Category Paginator {{ range $categories.Paginator.Pages }}{{ .RelPermalink }}|{{ end }}:END
+Cats Paginator {{ range $cats.Paginator.Pages }}{{ .RelPermalink }}|{{ end }}:END
+
+`)
+ b.WithTemplatesAdded("404.html", `
+404 Terms: {{ range .GetTerms "categories" }}{{.RelPermalink }}|{{ end }}:END
+ `)
+ b.Build(BuildCfg{})
+
+ cat := b.GetPage("categories")
+ funny := b.GetPage("categories/funny")
+
+ b.Assert(cat, qt.Not(qt.IsNil))
+ b.Assert(funny, qt.Not(qt.IsNil))
+
+ b.Assert(cat.Parent().IsHome(), qt.Equals, true)
+ b.Assert(funny.Kind(), qt.Equals, "term")
+ b.Assert(funny.Parent(), qt.Equals, cat)
+
+ b.AssertFileContent("public/index.html", `
+Categories Pages: /categories/birds/|/categories/cats/|/categories/dogs/|/categories/funny/|/categories/gorillas/|:END
+Funny Pages: /section/p1/|/section/p2/|:END
+Cats Pages: /section/p1/|/section/|:END
+P1 Terms: /categories/funny/|/categories/cats/|:END
+Section Terms: /categories/cats/|/categories/dogs/|/categories/birds/|:END
+Home Terms: /categories/dogs/|/categories/gorillas/|:END
+Cats Paginator /section/p1/|/section/|:END
+Category Paginator /categories/birds/|/categories/cats/|/categories/dogs/|/categories/funny/|/categories/gorillas/|:END`,
+ )
+ b.AssertFileContent("public/404.html", "\n404 Terms: :END\n\t")
+ b.AssertFileContent("public/categories/funny/index.xml", `<link>http://example.com/section/p1/</link>`)
+ b.AssertFileContent("public/categories/index.xml", `<link>http://example.com/categories/funny/</link>`)
+}
+
+func TestTaxonomiesDirectoryOverlaps(t *testing.T) {
+ t.Parallel()
+
+ b := newTestSitesBuilder(t).WithContent(
+ "abc/_index.md", "---\ntitle: \"abc\"\nabcdefgs: [abc]\n---",
+ "abc/p1.md", "---\ntitle: \"abc-p\"\n---",
+ "abcdefgh/_index.md", "---\ntitle: \"abcdefgh\"\n---",
+ "abcdefgh/p1.md", "---\ntitle: \"abcdefgh-p\"\n---",
+ "abcdefghijk/index.md", "---\ntitle: \"abcdefghijk\"\n---",
+ )
+
+ b.WithConfigFile("toml", `
+baseURL = "https://example.org"
+
+[taxonomies]
+ abcdef = "abcdefs"
+ abcdefg = "abcdefgs"
+ abcdefghi = "abcdefghis"
+`)
+
+ b.WithTemplatesAdded("index.html", `
+{{ range site.Pages }}Page: {{ template "print-page" . }}
+{{ end }}
+{{ $abc := site.GetPage "abcdefgs/abc" }}
+{{ $abcdefgs := site.GetPage "abcdefgs" }}
+abc: {{ template "print-page" $abc }}|IsAncestor: {{ $abc.IsAncestor $abcdefgs }}|IsDescendant: {{ $abc.IsDescendant $abcdefgs }}
+abcdefgs: {{ template "print-page" $abcdefgs }}|IsAncestor: {{ $abcdefgs.IsAncestor $abc }}|IsDescendant: {{ $abcdefgs.IsDescendant $abc }}
+
+{{ define "print-page" }}{{ .RelPermalink }}|{{ .Title }}|{{.Kind }}|Parent: {{ with .Parent }}{{ .RelPermalink }}{{ end }}|CurrentSection: {{ .CurrentSection.RelPermalink}}|FirstSection: {{ .FirstSection.RelPermalink }}{{ end }}
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ Page: /||home|Parent: |CurrentSection: /|
+ Page: /abc/|abc|section|Parent: /|CurrentSection: /abc/|
+ Page: /abc/p1/|abc-p|page|Parent: /abc/|CurrentSection: /abc/|
+ Page: /abcdefgh/|abcdefgh|section|Parent: /|CurrentSection: /abcdefgh/|
+ Page: /abcdefgh/p1/|abcdefgh-p|page|Parent: /abcdefgh/|CurrentSection: /abcdefgh/|
+ Page: /abcdefghijk/|abcdefghijk|page|Parent: /|CurrentSection: /|
+ Page: /abcdefghis/|Abcdefghis|taxonomy|Parent: /|CurrentSection: /|
+ Page: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /|
+ Page: /abcdefs/|Abcdefs|taxonomy|Parent: /|CurrentSection: /|
+ abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/|
+ abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /|
+ abc: /abcdefgs/abc/|abc|term|Parent: /abcdefgs/|CurrentSection: /abcdefgs/|FirstSection: /|IsAncestor: false|IsDescendant: true
+ abcdefgs: /abcdefgs/|Abcdefgs|taxonomy|Parent: /|CurrentSection: /|FirstSection: /|IsAncestor: true|IsDescendant: false
+`)
+}
diff --git a/hugolib/template_test.go b/hugolib/template_test.go
new file mode 100644
index 000000000..f9d54d8dc
--- /dev/null
+++ b/hugolib/template_test.go
@@ -0,0 +1,774 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/identity"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+func TestTemplateLookupOrder(t *testing.T) {
+ var (
+ fs *hugofs.Fs
+ cfg config.Provider
+ th testHelper
+ )
+
+ // Variants base templates:
+ // 1. <current-path>/<template-name>-baseof.<suffix>, e.g. list-baseof.<suffix>.
+ // 2. <current-path>/baseof.<suffix>
+ // 3. _default/<template-name>-baseof.<suffix>, e.g. list-baseof.<suffix>.
+ // 4. _default/baseof.<suffix>
+ for _, this := range []struct {
+ name string
+ setup func(t *testing.T)
+ assert func(t *testing.T)
+ }{
+ {
+ "Variant 1",
+ func(t *testing.T) {
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect1-baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect1.html"), `{{define "main"}}sect{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base: sect")
+ },
+ },
+ {
+ "Variant 2",
+ func(t *testing.T) {
+ writeSource(t, fs, filepath.Join("layouts", "baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "index.html"), `{{define "main"}}index{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "index.html"), "Base: index")
+ },
+ },
+ {
+ "Variant 3",
+ func(t *testing.T) {
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list-baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base: list")
+ },
+ },
+ {
+ "Variant 4",
+ func(t *testing.T) {
+ writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base: list")
+ },
+ },
+ {
+ "Variant 1, theme, use site base",
+ func(t *testing.T) {
+ cfg.Set("theme", "mytheme")
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect1-baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect1.html"), `{{define "main"}}sect{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base: sect")
+ },
+ },
+ {
+ "Variant 1, theme, use theme base",
+ func(t *testing.T) {
+ cfg.Set("theme", "mytheme")
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect1-baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect1.html"), `{{define "main"}}sect{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base Theme: sect")
+ },
+ },
+ {
+ "Variant 4, theme, use site base",
+ func(t *testing.T) {
+ cfg.Set("theme", "mytheme")
+ writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "index.html"), `{{define "main"}}index{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base: list")
+ th.assertFileContent(filepath.Join("public", "index.html"), "Base: index") // Issue #3505
+ },
+ },
+ {
+ "Variant 4, theme, use themes base",
+ func(t *testing.T) {
+ cfg.Set("theme", "mytheme")
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "baseof.html"), `Base Theme: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `{{define "main"}}list{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base Theme: list")
+ },
+ },
+ {
+ // Issue #3116
+ "Test section list and single template selection",
+ func(t *testing.T) {
+ cfg.Set("theme", "mytheme")
+
+ writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base: {{block "main" .}}block{{end}}`)
+
+ // Both single and list template in /SECTION/
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "sect1", "list.html"), `sect list`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "list.html"), `default list`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "sect1", "single.html"), `sect single`)
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "_default", "single.html"), `default single`)
+
+ // sect2 with list template in /section
+ writeSource(t, fs, filepath.Join("themes", "mytheme", "layouts", "section", "sect2.html"), `sect2 list`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "sect list")
+ th.assertFileContent(filepath.Join("public", "sect1", "page1", "index.html"), "sect single")
+ th.assertFileContent(filepath.Join("public", "sect2", "index.html"), "sect2 list")
+ },
+ },
+ {
+ // Issue #2995
+ "Test section list and single template selection with base template",
+ func(t *testing.T) {
+ writeSource(t, fs, filepath.Join("layouts", "_default", "baseof.html"), `Base Default: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "sect1", "baseof.html"), `Base Sect1: {{block "main" .}}block{{end}}`)
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect2-baseof.html"), `Base Sect2: {{block "main" .}}block{{end}}`)
+
+ // Both single and list + base template in /SECTION/
+ writeSource(t, fs, filepath.Join("layouts", "sect1", "list.html"), `{{define "main"}}sect1 list{{ end }}`)
+ writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"), `{{define "main"}}default list{{ end }}`)
+ writeSource(t, fs, filepath.Join("layouts", "sect1", "single.html"), `{{define "main"}}sect single{{ end }}`)
+ writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), `{{define "main"}}default single{{ end }}`)
+
+ // sect2 with list template in /section
+ writeSource(t, fs, filepath.Join("layouts", "section", "sect2.html"), `{{define "main"}}sect2 list{{ end }}`)
+ },
+ func(t *testing.T) {
+ th.assertFileContent(filepath.Join("public", "sect1", "index.html"), "Base Sect1", "sect1 list")
+ th.assertFileContent(filepath.Join("public", "sect1", "page1", "index.html"), "Base Sect1", "sect single")
+ th.assertFileContent(filepath.Join("public", "sect2", "index.html"), "Base Sect2", "sect2 list")
+
+ // Note that this will get the default base template and not the one in /sect2 -- because there are no
+ // single template defined in /sect2.
+ th.assertFileContent(filepath.Join("public", "sect2", "page2", "index.html"), "Base Default", "default single")
+ },
+ },
+ } {
+
+ this := this
+ t.Run(this.name, func(t *testing.T) {
+ // TODO(bep) there are some function vars need to pull down here to enable => t.Parallel()
+ cfg, fs = newTestCfg()
+ th = newTestHelper(cfg, fs, t)
+
+ for i := 1; i <= 3; i++ {
+ writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)), `---
+title: Template test
+---
+Some content
+`)
+ }
+
+ this.setup(t)
+
+ buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+ // helpers.PrintFs(s.BaseFs.Layouts.Fs, "", os.Stdout)
+ this.assert(t)
+ })
+
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/4895
+func TestTemplateBOM(t *testing.T) {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ bom := "\ufeff"
+
+ b.WithTemplatesAdded(
+ "_default/baseof.html", bom+`
+ Base: {{ block "main" . }}base main{{ end }}`,
+ "_default/single.html", bom+`{{ define "main" }}Hi!?{{ end }}`)
+
+ b.WithContent("page.md", `---
+title: "Page"
+---
+
+Page Content
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/page/index.html", "Base: Hi!?")
+}
+
+func TestTemplateManyBaseTemplates(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ numPages := 100 // To get some parallelism
+
+ pageTemplate := `---
+title: "Page %d"
+layout: "layout%d"
+---
+
+Content.
+`
+
+ singleTemplate := `
+{{ define "main" }}%d{{ end }}
+`
+ baseTemplate := `
+Base %d: {{ block "main" . }}FOO{{ end }}
+`
+
+ for i := 0; i < numPages; i++ {
+ id := i + 1
+ b.WithContent(fmt.Sprintf("page%d.md", id), fmt.Sprintf(pageTemplate, id, id))
+ b.WithTemplates(fmt.Sprintf("_default/layout%d.html", id), fmt.Sprintf(singleTemplate, id))
+ b.WithTemplates(fmt.Sprintf("_default/layout%d-baseof.html", id), fmt.Sprintf(baseTemplate, id))
+ }
+
+ b.Build(BuildCfg{})
+ for i := 0; i < numPages; i++ {
+ id := i + 1
+ b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", id), fmt.Sprintf(`Base %d: %d`, id, id))
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/6790
+func TestTemplateNoBasePlease(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+
+ b.WithTemplates("_default/list.html", `
+ {{ define "main" }}
+ Bonjour
+ {{ end }}
+
+ {{ printf "list" }}
+
+
+ `)
+
+ b.WithTemplates(
+ "_default/single.html", `
+{{ printf "single" }}
+{{ define "main" }}
+ Bonjour
+{{ end }}
+
+
+`)
+
+ b.WithContent("blog/p1.md", `---
+title: The Page
+---
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/blog/p1/index.html", `single`)
+ b.AssertFileContent("public/blog/index.html", `list`)
+}
+
+// https://github.com/gohugoio/hugo/issues/6816
+func TestTemplateBaseWithComment(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ b.WithTemplatesAdded(
+ "baseof.html", `Base: {{ block "main" . }}{{ end }}`,
+ "index.html", `
+ {{/* A comment */}}
+ {{ define "main" }}
+ Bonjour
+ {{ end }}
+
+
+ `)
+
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `Base:
+Bonjour`)
+}
+
+func TestTemplateLookupSite(t *testing.T) {
+ t.Run("basic", func(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ b.WithTemplates(
+ "_default/single.html", `Single: {{ .Title }}`,
+ "_default/list.html", `List: {{ .Title }}`,
+ )
+
+ createContent := func(title string) string {
+ return fmt.Sprintf(`---
+title: %s
+---`, title)
+ }
+
+ b.WithContent(
+ "_index.md", createContent("Home Sweet Home"),
+ "p1.md", createContent("P1"))
+
+ b.CreateSites().Build(BuildCfg{})
+ b.AssertFileContent("public/index.html", `List: Home Sweet Home`)
+ b.AssertFileContent("public/p1/index.html", `Single: P1`)
+ })
+
+ t.Run("baseof", func(t *testing.T) {
+ t.Parallel()
+ b := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+ b.WithTemplatesAdded(
+ "index.html", `{{ define "main" }}Main Home En{{ end }}`,
+ "index.fr.html", `{{ define "main" }}Main Home Fr{{ end }}`,
+ "baseof.html", `Baseof en: {{ block "main" . }}main block{{ end }}`,
+ "baseof.fr.html", `Baseof fr: {{ block "main" . }}main block{{ end }}`,
+ "mysection/baseof.html", `Baseof mysection: {{ block "main" . }}mysection block{{ end }}`,
+ "_default/single.html", `{{ define "main" }}Main Default Single{{ end }}`,
+ "_default/list.html", `{{ define "main" }}Main Default List{{ end }}`,
+ )
+
+ b.WithContent("mysection/p1.md", `---
+title: My Page
+---
+
+`)
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/index.html", `Baseof en: Main Home En`)
+ b.AssertFileContent("public/fr/index.html", `Baseof fr: Main Home Fr`)
+ b.AssertFileContent("public/en/mysection/index.html", `Baseof mysection: Main Default List`)
+ b.AssertFileContent("public/en/mysection/p1/index.html", `Baseof mysection: Main Default Single`)
+ })
+}
+
+func TestTemplateFuncs(t *testing.T) {
+ b := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
+
+ homeTpl := `Site: {{ site.Language.Lang }} / {{ .Site.Language.Lang }} / {{ site.BaseURL }}
+Sites: {{ site.Sites.First.Home.Language.Lang }}
+Hugo: {{ hugo.Generator }}
+`
+
+ b.WithTemplatesAdded(
+ "index.html", homeTpl,
+ "index.fr.html", homeTpl,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/en/index.html",
+ "Site: en / en / http://example.com/blog",
+ "Sites: en",
+ "Hugo: <meta name=\"generator\" content=\"Hugo")
+ b.AssertFileContent("public/fr/index.html",
+ "Site: fr / fr / http://example.com/blog",
+ "Sites: en",
+ "Hugo: <meta name=\"generator\" content=\"Hugo",
+ )
+}
+
+func TestPartialWithReturn(t *testing.T) {
+ c := qt.New(t)
+
+ newBuilder := func(t testing.TB) *sitesBuilder {
+ b := newTestSitesBuilder(t).WithSimpleConfigFile()
+ b.WithTemplatesAdded(
+ "partials/add42.tpl", `
+ {{ $v := add . 42 }}
+ {{ return $v }}
+ `,
+ "partials/dollarContext.tpl", `
+{{ $v := add $ 42 }}
+{{ return $v }}
+`,
+ "partials/dict.tpl", `
+{{ $v := add $.adder 42 }}
+{{ return $v }}
+`,
+ "partials/complex.tpl", `
+{{ return add . 42 }}
+`, "partials/hello.tpl", `
+ {{ $v := printf "hello %s" . }}
+ {{ return $v }}
+ `,
+ )
+
+ return b
+ }
+
+ c.Run("Return", func(c *qt.C) {
+ b := newBuilder(c)
+
+ b.WithTemplatesAdded(
+ "index.html", `
+Test Partials With Return Values:
+
+add42: 50: {{ partial "add42.tpl" 8 }}
+hello world: {{ partial "hello.tpl" "world" }}
+dollarContext: 60: {{ partial "dollarContext.tpl" 18 }}
+adder: 70: {{ partial "dict.tpl" (dict "adder" 28) }}
+complex: 80: {{ partial "complex.tpl" 38 }}
+`,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+add42: 50: 50
+hello world: hello world
+dollarContext: 60: 60
+adder: 70: 70
+complex: 80: 80
+`,
+ )
+ })
+}
+
+// Issue 7528
+func TestPartialWithZeroedArgs(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithTemplatesAdded("index.html",
+ `
+X{{ partial "retval" dict }}X
+X{{ partial "retval" slice }}X
+X{{ partial "retval" "" }}X
+X{{ partial "retval" false }}X
+X{{ partial "retval" 0 }}X
+{{ define "partials/retval" }}
+ {{ return 123 }}
+{{ end }}`)
+
+ b.WithContentAdded("p.md", ``)
+ b.Build(BuildCfg{})
+ b.AssertFileContent("public/index.html",
+ `
+X123X
+X123X
+X123X
+X123X
+X123X
+`)
+}
+
+func TestPartialCached(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplatesAdded(
+ "index.html", `
+{{ $key1 := (dict "a" "av" ) }}
+{{ $key2 := (dict "a" "av2" ) }}
+Partial cached1: {{ partialCached "p1" "input1" $key1 }}
+Partial cached2: {{ partialCached "p1" "input2" $key1 }}
+Partial cached3: {{ partialCached "p1" "input3" $key2 }}
+`,
+
+ "partials/p1.html", `partial: {{ . }}`,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+ Partial cached1: partial: input1
+ Partial cached2: partial: input1
+ Partial cached3: partial: input3
+`)
+}
+
+// https://github.com/gohugoio/hugo/issues/6615
+func TestTemplateTruth(t *testing.T) {
+ b := newTestSitesBuilder(t)
+ b.WithTemplatesAdded("index.html", `
+{{ $p := index site.RegularPages 0 }}
+{{ $zero := $p.ExpiryDate }}
+{{ $notZero := time.Now }}
+
+if: Zero: {{ if $zero }}FAIL{{ else }}OK{{ end }}
+if: Not Zero: {{ if $notZero }}OK{{ else }}Fail{{ end }}
+not: Zero: {{ if not $zero }}OK{{ else }}FAIL{{ end }}
+not: Not Zero: {{ if not $notZero }}FAIL{{ else }}OK{{ end }}
+
+with: Zero {{ with $zero }}FAIL{{ else }}OK{{ end }}
+
+`)
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+if: Zero: OK
+if: Not Zero: OK
+not: Zero: OK
+not: Not Zero: OK
+with: Zero OK
+`)
+}
+
+func TestTemplateDependencies(t *testing.T) {
+ b := newTestSitesBuilder(t).Running()
+
+ b.WithTemplates("index.html", `
+{{ $p := site.GetPage "p1" }}
+{{ partial "p1.html" $p }}
+{{ partialCached "p2.html" "foo" }}
+{{ partials.Include "p3.html" "data" }}
+{{ partials.IncludeCached "p4.html" "foo" }}
+{{ $p := partial "p5" }}
+{{ partial "sub/p6.html" }}
+{{ partial "P7.html" }}
+{{ template "_default/foo.html" }}
+Partial nested: {{ partial "p10" }}
+
+`,
+ "partials/p1.html", `ps: {{ .Render "li" }}`,
+ "partials/p2.html", `p2`,
+ "partials/p3.html", `p3`,
+ "partials/p4.html", `p4`,
+ "partials/p5.html", `p5`,
+ "partials/sub/p6.html", `p6`,
+ "partials/P7.html", `p7`,
+ "partials/p8.html", `p8 {{ partial "p9.html" }}`,
+ "partials/p9.html", `p9`,
+ "partials/p10.html", `p10 {{ partial "p11.html" }}`,
+ "partials/p11.html", `p11`,
+ "_default/foo.html", `foo`,
+ "_default/li.html", `li {{ partial "p8.html" }}`,
+ )
+
+ b.WithContent("p1.md", `---
+title: P1
+---
+
+
+`)
+
+ b.Build(BuildCfg{})
+
+ s := b.H.Sites[0]
+
+ templ, found := s.lookupTemplate("index.html")
+ b.Assert(found, qt.Equals, true)
+
+ idset := make(map[identity.Identity]bool)
+ collectIdentities(idset, templ.(tpl.Info))
+ b.Assert(idset, qt.HasLen, 11)
+}
+
+func TestTemplateGoIssues(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplatesAdded(
+ "index.html", `
+{{ $title := "a & b" }}
+<script type="application/ld+json">{"@type":"WebPage","headline":"{{$title}}"}</script>
+
+{{/* Action/commands newlines, from Go 1.16, see https://github.com/golang/go/issues/29770 */}}
+{{ $norway := dict
+ "country" "Norway"
+ "population" "5 millions"
+ "language" "Norwegian"
+ "language_code" "nb"
+ "weather" "freezing cold"
+ "capitol" "Oslo"
+ "largest_city" "Oslo"
+ "currency" "Norwegian krone"
+ "dialing_code" "+47"
+}}
+
+Population in Norway is {{
+ $norway.population
+ | lower
+ | upper
+}}
+
+`,
+ )
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+<script type="application/ld+json">{"@type":"WebPage","headline":"a \u0026 b"}</script>
+Population in Norway is 5 MILLIONS
+
+`)
+}
+
+func collectIdentities(set map[identity.Identity]bool, provider identity.Provider) {
+ if ids, ok := provider.(identity.IdentitiesProvider); ok {
+ for _, id := range ids.GetIdentities() {
+ collectIdentities(set, id)
+ }
+ } else {
+ set[provider.GetIdentity()] = true
+ }
+}
+
+func ident(level int) string {
+ return strings.Repeat(" ", level)
+}
+
+func TestPartialInline(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithContent("p1.md", "")
+
+ b.WithTemplates(
+ "index.html", `
+
+{{ $p1 := partial "p1" . }}
+{{ $p2 := partial "p2" . }}
+
+P1: {{ $p1 }}
+P2: {{ $p2 }}
+
+{{ define "partials/p1" }}Inline: p1{{ end }}
+
+{{ define "partials/p2" }}
+{{ $value := 32 }}
+{{ return $value }}
+{{ end }}
+
+
+`,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ `
+P1: Inline: p1
+P2: 32`,
+ )
+}
+
+func TestPartialInlineBase(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithContent("p1.md", "")
+
+ b.WithTemplates(
+ "baseof.html", `{{ $p3 := partial "p3" . }}P3: {{ $p3 }}
+{{ block "main" . }}{{ end }}{{ define "partials/p3" }}Inline: p3{{ end }}`,
+ "index.html", `
+{{ define "main" }}
+
+{{ $p1 := partial "p1" . }}
+{{ $p2 := partial "p2" . }}
+
+P1: {{ $p1 }}
+P2: {{ $p2 }}
+
+{{ end }}
+
+
+{{ define "partials/p1" }}Inline: p1{{ end }}
+
+{{ define "partials/p2" }}
+{{ $value := 32 }}
+{{ return $value }}
+{{ end }}
+
+
+`,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html",
+ `
+P1: Inline: p1
+P2: 32
+P3: Inline: p3
+`,
+ )
+}
+
+// https://github.com/gohugoio/hugo/issues/7478
+func TestBaseWithAndWithoutDefine(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithContent("p1.md", "---\ntitle: P\n---\nContent")
+
+ b.WithTemplates(
+ "_default/baseof.html", `
+::Header Start:{{ block "header" . }}{{ end }}:Header End:
+::{{ block "main" . }}Main{{ end }}::
+`, "index.html", `
+{{ define "header" }}
+Home Header
+{{ end }}
+{{ define "main" }}
+This is home main
+{{ end }}
+`,
+
+ "_default/single.html", `
+{{ define "main" }}
+This is single main
+{{ end }}
+`,
+ )
+
+ b.CreateSites().Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `
+Home Header
+This is home main
+`,
+ )
+
+ b.AssertFileContent("public/p1/index.html", `
+ ::Header Start::Header End:
+This is single main
+`,
+ )
+}
+
+// Issue 9393.
+func TestApplyWithNamespace(t *testing.T) {
+ b := newTestSitesBuilder(t)
+
+ b.WithTemplates(
+ "index.html", `
+{{ $b := slice " a " " b " " c" }}
+{{ $a := apply $b "strings.Trim" "." " " }}
+a: {{ $a }}
+`,
+ ).WithContent("p1.md", "")
+
+ b.Build(BuildCfg{})
+
+ b.AssertFileContent("public/index.html", `a: [a b c]`)
+}
diff --git a/hugolib/testdata/cities.csv b/hugolib/testdata/cities.csv
new file mode 100644
index 000000000..ee6b058b6
--- /dev/null
+++ b/hugolib/testdata/cities.csv
@@ -0,0 +1,130 @@
+"LatD", "LatM", "LatS", "NS", "LonD", "LonM", "LonS", "EW", "City", "State"
+ 41, 5, 59, "N", 80, 39, 0, "W", "Youngstown", OH
+ 42, 52, 48, "N", 97, 23, 23, "W", "Yankton", SD
+ 46, 35, 59, "N", 120, 30, 36, "W", "Yakima", WA
+ 42, 16, 12, "N", 71, 48, 0, "W", "Worcester", MA
+ 43, 37, 48, "N", 89, 46, 11, "W", "Wisconsin Dells", WI
+ 36, 5, 59, "N", 80, 15, 0, "W", "Winston-Salem", NC
+ 49, 52, 48, "N", 97, 9, 0, "W", "Winnipeg", MB
+ 39, 11, 23, "N", 78, 9, 36, "W", "Winchester", VA
+ 34, 14, 24, "N", 77, 55, 11, "W", "Wilmington", NC
+ 39, 45, 0, "N", 75, 33, 0, "W", "Wilmington", DE
+ 48, 9, 0, "N", 103, 37, 12, "W", "Williston", ND
+ 41, 15, 0, "N", 77, 0, 0, "W", "Williamsport", PA
+ 37, 40, 48, "N", 82, 16, 47, "W", "Williamson", WV
+ 33, 54, 0, "N", 98, 29, 23, "W", "Wichita Falls", TX
+ 37, 41, 23, "N", 97, 20, 23, "W", "Wichita", KS
+ 40, 4, 11, "N", 80, 43, 12, "W", "Wheeling", WV
+ 26, 43, 11, "N", 80, 3, 0, "W", "West Palm Beach", FL
+ 47, 25, 11, "N", 120, 19, 11, "W", "Wenatchee", WA
+ 41, 25, 11, "N", 122, 23, 23, "W", "Weed", CA
+ 31, 13, 11, "N", 82, 20, 59, "W", "Waycross", GA
+ 44, 57, 35, "N", 89, 38, 23, "W", "Wausau", WI
+ 42, 21, 36, "N", 87, 49, 48, "W", "Waukegan", IL
+ 44, 54, 0, "N", 97, 6, 36, "W", "Watertown", SD
+ 43, 58, 47, "N", 75, 55, 11, "W", "Watertown", NY
+ 42, 30, 0, "N", 92, 20, 23, "W", "Waterloo", IA
+ 41, 32, 59, "N", 73, 3, 0, "W", "Waterbury", CT
+ 38, 53, 23, "N", 77, 1, 47, "W", "Washington", DC
+ 41, 50, 59, "N", 79, 8, 23, "W", "Warren", PA
+ 46, 4, 11, "N", 118, 19, 48, "W", "Walla Walla", WA
+ 31, 32, 59, "N", 97, 8, 23, "W", "Waco", TX
+ 38, 40, 48, "N", 87, 31, 47, "W", "Vincennes", IN
+ 28, 48, 35, "N", 97, 0, 36, "W", "Victoria", TX
+ 32, 20, 59, "N", 90, 52, 47, "W", "Vicksburg", MS
+ 49, 16, 12, "N", 123, 7, 12, "W", "Vancouver", BC
+ 46, 55, 11, "N", 98, 0, 36, "W", "Valley City", ND
+ 30, 49, 47, "N", 83, 16, 47, "W", "Valdosta", GA
+ 43, 6, 36, "N", 75, 13, 48, "W", "Utica", NY
+ 39, 54, 0, "N", 79, 43, 48, "W", "Uniontown", PA
+ 32, 20, 59, "N", 95, 18, 0, "W", "Tyler", TX
+ 42, 33, 36, "N", 114, 28, 12, "W", "Twin Falls", ID
+ 33, 12, 35, "N", 87, 34, 11, "W", "Tuscaloosa", AL
+ 34, 15, 35, "N", 88, 42, 35, "W", "Tupelo", MS
+ 36, 9, 35, "N", 95, 54, 36, "W", "Tulsa", OK
+ 32, 13, 12, "N", 110, 58, 12, "W", "Tucson", AZ
+ 37, 10, 11, "N", 104, 30, 36, "W", "Trinidad", CO
+ 40, 13, 47, "N", 74, 46, 11, "W", "Trenton", NJ
+ 44, 45, 35, "N", 85, 37, 47, "W", "Traverse City", MI
+ 43, 39, 0, "N", 79, 22, 47, "W", "Toronto", ON
+ 39, 2, 59, "N", 95, 40, 11, "W", "Topeka", KS
+ 41, 39, 0, "N", 83, 32, 24, "W", "Toledo", OH
+ 33, 25, 48, "N", 94, 3, 0, "W", "Texarkana", TX
+ 39, 28, 12, "N", 87, 24, 36, "W", "Terre Haute", IN
+ 27, 57, 0, "N", 82, 26, 59, "W", "Tampa", FL
+ 30, 27, 0, "N", 84, 16, 47, "W", "Tallahassee", FL
+ 47, 14, 24, "N", 122, 25, 48, "W", "Tacoma", WA
+ 43, 2, 59, "N", 76, 9, 0, "W", "Syracuse", NY
+ 32, 35, 59, "N", 82, 20, 23, "W", "Swainsboro", GA
+ 33, 55, 11, "N", 80, 20, 59, "W", "Sumter", SC
+ 40, 59, 24, "N", 75, 11, 24, "W", "Stroudsburg", PA
+ 37, 57, 35, "N", 121, 17, 24, "W", "Stockton", CA
+ 44, 31, 12, "N", 89, 34, 11, "W", "Stevens Point", WI
+ 40, 21, 36, "N", 80, 37, 12, "W", "Steubenville", OH
+ 40, 37, 11, "N", 103, 13, 12, "W", "Sterling", CO
+ 38, 9, 0, "N", 79, 4, 11, "W", "Staunton", VA
+ 39, 55, 11, "N", 83, 48, 35, "W", "Springfield", OH
+ 37, 13, 12, "N", 93, 17, 24, "W", "Springfield", MO
+ 42, 5, 59, "N", 72, 35, 23, "W", "Springfield", MA
+ 39, 47, 59, "N", 89, 39, 0, "W", "Springfield", IL
+ 47, 40, 11, "N", 117, 24, 36, "W", "Spokane", WA
+ 41, 40, 48, "N", 86, 15, 0, "W", "South Bend", IN
+ 43, 32, 24, "N", 96, 43, 48, "W", "Sioux Falls", SD
+ 42, 29, 24, "N", 96, 23, 23, "W", "Sioux City", IA
+ 32, 30, 35, "N", 93, 45, 0, "W", "Shreveport", LA
+ 33, 38, 23, "N", 96, 36, 36, "W", "Sherman", TX
+ 44, 47, 59, "N", 106, 57, 35, "W", "Sheridan", WY
+ 35, 13, 47, "N", 96, 40, 48, "W", "Seminole", OK
+ 32, 25, 11, "N", 87, 1, 11, "W", "Selma", AL
+ 38, 42, 35, "N", 93, 13, 48, "W", "Sedalia", MO
+ 47, 35, 59, "N", 122, 19, 48, "W", "Seattle", WA
+ 41, 24, 35, "N", 75, 40, 11, "W", "Scranton", PA
+ 41, 52, 11, "N", 103, 39, 36, "W", "Scottsbluff", NB
+ 42, 49, 11, "N", 73, 56, 59, "W", "Schenectady", NY
+ 32, 4, 48, "N", 81, 5, 23, "W", "Savannah", GA
+ 46, 29, 24, "N", 84, 20, 59, "W", "Sault Sainte Marie", MI
+ 27, 20, 24, "N", 82, 31, 47, "W", "Sarasota", FL
+ 38, 26, 23, "N", 122, 43, 12, "W", "Santa Rosa", CA
+ 35, 40, 48, "N", 105, 56, 59, "W", "Santa Fe", NM
+ 34, 25, 11, "N", 119, 41, 59, "W", "Santa Barbara", CA
+ 33, 45, 35, "N", 117, 52, 12, "W", "Santa Ana", CA
+ 37, 20, 24, "N", 121, 52, 47, "W", "San Jose", CA
+ 37, 46, 47, "N", 122, 25, 11, "W", "San Francisco", CA
+ 41, 27, 0, "N", 82, 42, 35, "W", "Sandusky", OH
+ 32, 42, 35, "N", 117, 9, 0, "W", "San Diego", CA
+ 34, 6, 36, "N", 117, 18, 35, "W", "San Bernardino", CA
+ 29, 25, 12, "N", 98, 30, 0, "W", "San Antonio", TX
+ 31, 27, 35, "N", 100, 26, 24, "W", "San Angelo", TX
+ 40, 45, 35, "N", 111, 52, 47, "W", "Salt Lake City", UT
+ 38, 22, 11, "N", 75, 35, 59, "W", "Salisbury", MD
+ 36, 40, 11, "N", 121, 39, 0, "W", "Salinas", CA
+ 38, 50, 24, "N", 97, 36, 36, "W", "Salina", KS
+ 38, 31, 47, "N", 106, 0, 0, "W", "Salida", CO
+ 44, 56, 23, "N", 123, 1, 47, "W", "Salem", OR
+ 44, 57, 0, "N", 93, 5, 59, "W", "Saint Paul", MN
+ 38, 37, 11, "N", 90, 11, 24, "W", "Saint Louis", MO
+ 39, 46, 12, "N", 94, 50, 23, "W", "Saint Joseph", MO
+ 42, 5, 59, "N", 86, 28, 48, "W", "Saint Joseph", MI
+ 44, 25, 11, "N", 72, 1, 11, "W", "Saint Johnsbury", VT
+ 45, 34, 11, "N", 94, 10, 11, "W", "Saint Cloud", MN
+ 29, 53, 23, "N", 81, 19, 11, "W", "Saint Augustine", FL
+ 43, 25, 48, "N", 83, 56, 24, "W", "Saginaw", MI
+ 38, 35, 24, "N", 121, 29, 23, "W", "Sacramento", CA
+ 43, 36, 36, "N", 72, 58, 12, "W", "Rutland", VT
+ 33, 24, 0, "N", 104, 31, 47, "W", "Roswell", NM
+ 35, 56, 23, "N", 77, 48, 0, "W", "Rocky Mount", NC
+ 41, 35, 24, "N", 109, 13, 48, "W", "Rock Springs", WY
+ 42, 16, 12, "N", 89, 5, 59, "W", "Rockford", IL
+ 43, 9, 35, "N", 77, 36, 36, "W", "Rochester", NY
+ 44, 1, 12, "N", 92, 27, 35, "W", "Rochester", MN
+ 37, 16, 12, "N", 79, 56, 24, "W", "Roanoke", VA
+ 37, 32, 24, "N", 77, 26, 59, "W", "Richmond", VA
+ 39, 49, 48, "N", 84, 53, 23, "W", "Richmond", IN
+ 38, 46, 12, "N", 112, 5, 23, "W", "Richfield", UT
+ 45, 38, 23, "N", 89, 25, 11, "W", "Rhinelander", WI
+ 39, 31, 12, "N", 119, 48, 35, "W", "Reno", NV
+ 50, 25, 11, "N", 104, 39, 0, "W", "Regina", SA
+ 40, 10, 48, "N", 122, 14, 23, "W", "Red Bluff", CA
+ 40, 19, 48, "N", 75, 55, 48, "W", "Reading", PA
+ 41, 9, 35, "N", 81, 14, 23, "W", "Ravenna", OH
+
diff --git a/hugolib/testdata/fruits.json b/hugolib/testdata/fruits.json
new file mode 100644
index 000000000..3bb802a16
--- /dev/null
+++ b/hugolib/testdata/fruits.json
@@ -0,0 +1,5 @@
+{
+ "fruit": "Apple",
+ "size": "Large",
+ "color": "Red"
+}
diff --git a/hugolib/testdata/redis.cn.md b/hugolib/testdata/redis.cn.md
new file mode 100644
index 000000000..d485061d5
--- /dev/null
+++ b/hugolib/testdata/redis.cn.md
@@ -0,0 +1,697 @@
+---
+title: The Little Redis Book cn
+---
+\thispagestyle{empty}
+\changepage{}{}{}{-0.5cm}{}{2cm}{}{}{}
+![The Little Redis Book cn, By Karl Seguin, Translate By Jason Lai](title.png)\
+
+\clearpage
+\changepage{}{}{}{0.5cm}{}{-2cm}{}{}{}
+
+## 关于此书
+
+### 许可证
+
+《The Little Redis Book》是经由Attribution-NonCommercial 3.0 Unported license许可的,你不需要为此书付钱。
+
+你可以自由地对此书进行复制,分发,修改或者展示等操作。当然,你必须知道且认可这本书的作者是Karl Seguin,译者是赖立维,而且不应该将此书用于商业用途。
+
+关于这个**许可证**的*详细描述*在这里:
+
+<http://creativecommons.org/licenses/by-nc/3.0/legalcode>
+
+### 关于作者
+
+作者Karl Seguin是一名在多项技术领域浸淫多年的开发者。他是开源软件计划的活跃贡献者,同时也是一名技术作者以及业余演讲者。他写过若干关于Radis的文章以及一些工具。在他的一个面向业余游戏开发者的免费服务里,Redis为其中的评级和统计功能提供了支持:[mogade.com](http://mogade.com/)。
+
+Karl之前还写了[《The Little MongoDB Book》](http://openmymind.net/2011/3/28/The-Little-MongoDB-Book/),这是一本免费且受好评,关于MongoDB的书。
+
+他的博客是<http://openmymind.net>,你也可以关注他的Twitter帐号,via [@karlseguin](http://twitter.com/karlseguin)。
+
+### 关于译者
+
+译者 赖立维 是一名长在天朝的普通程序员,对许多技术都有浓厚的兴趣,是开源软件的支持者,Emacs的轻度使用者。
+
+虽然译者已经很认真地对待这次翻译,但是限于水平有限,肯定会有不少错漏,如果发现该书的翻译有什么需要修改,可以通过他的邮箱与他联系。他的邮箱是<jasonlai256@gmail.com>。
+
+### 致谢
+
+必须特别感谢[Perry Neal](https://twitter.com/perryneal)一直以来的指导,我的眼界、触觉以及激情都来源于你。你为我提供了无价的帮助,感谢你。
+
+### 最新版本
+
+此书的最新有效资源在:
+<https://github.com/karlseguin/the-little-redis-book>
+
+中文版是英文版的一个分支,最新的中文版本在:
+<https://github.com/JasonLai256/the-little-redis-book>
+
+\clearpage
+
+## 简介
+
+最近几年来,关于持久化和数据查询的相关技术,其需求已经增长到了让人惊讶的程度。可以断言,关系型数据库再也不是放之四海皆准。换一句话说,围绕数据的解决方案不可能再只有唯一一种。
+
+对于我来说,在众多新出现的解决方案和工具里,最让人兴奋的,无疑是Redis。为什么?首先是因为其让人不可思议的容易学习,只需要简短的几个小时学习时间,就能对Redis有个大概的认识。还有,Redis在处理一组特定的问题集的同时能保持相当的通用性。更准确地说就是,Redis不会尝试去解决关于数据的所有事情。在你足够了解Redis后,事情就会变得越来越清晰,什么是可行的,什么是不应该由Redis来处理的。作为一名开发人员,如此的经验当是相当的美妙。
+
+当你能仅使用Redis去构建一个完整系统时,我想大多数人将会发现,Redis能使得他们的许多数据方案变得更为通用,不论是一个传统的关系型数据库,一个面向文档的系统,或是其它更多的东西。这是一种用来实现某些特定特性的解决方法。就类似于一个索引引擎,你不会在Lucene上构建整个程序,但当你需要足够好的搜索,为什么不使用它呢?这对你和你的用户都有好处。当然,关于Redis和索引引擎之间相似性的讨论到此为止。
+
+本书的目的是向读者传授掌握Redis所需要的基本知识。我们将会注重于学习Redis的5种数据结构,并研究各种数据建模方法。我们还会接触到一些主要的管理细节和调试技巧。
+
+## 入门
+
+每个人的学习方式都不一样,有的人喜欢亲自实践学习,有的喜欢观看教学视频,还有的喜欢通过阅读来学习。对于Redis,没有什么比亲自实践学习来得效果更好的了。Redis的安装非常简单。而且通过随之安装的一个简单的命令解析程序,就能处理我们想做的一切事情。让我们先花几分钟的时间把Redis安装到我们的机器上。
+
+### Windows平台
+
+Redis并没有官方支持Windows平台,但还是可供选择。你不会想在这里配置实际的生产环境,不过在我过往的开发经历里并没有感到有什么限制。
+
+首先进入<https://github.com/dmajkic/redis/downloads>,然后下载最新的版本(应该会在列表的最上方)。
+
+获取zip文件,然后根据你的系统架构,打开`64bit`或`32bit`文件夹。
+
+### *nix和MacOSX平台
+
+对于*nix和MacOSX平台的用户,从源文件来安装是你的最佳选择。通过最新的版本号来选择,有效地址于<http://redis.io/download>。在编写此书的时候,最新的版本是2.4.6,我们可以运行下面的命令来安装该版本:
+
+ wget http://redis.googlecode.com/files/redis-2.4.6.tar.gz
+ tar xzf redis-2.4.6.tar.gz
+ cd redis-2.4.6
+ make
+
+(当然,Redis同样可以通过套件管理程序来安装。例如,使用Homebrew的MaxOSX用户可以只键入`brew install redis`即可。)
+
+如果你是通过源文件来安装,二进制可执行文件会被放置在`src`目录里。通过运行`cd src`可跳转到`src`目录。
+
+### 运行和连接Redis
+
+如果一切都工作正常,那Redis的二进制文件应该已经可以曼妙地跳跃于你的指尖之下。Redis只有少量的可执行文件,我们将着重于Redis的服务器和命令行界面(一个类DOS的客户端)。首先,让我们来运行服务器。在Windows平台,双击`redis-server`,在*nix/MacOSX平台则运行`./redis-server`.
+
+如果你仔细看了启动信息,你会看到一个警告,指没能找到`redis.conf`文件。Redis将会采用内置的默认设置,这对于我们将要做的已经足够了。
+
+然后,通过双击`redis-cli`(Windows平台)或者运行`./redis-cli`(*nix/MacOSX平台),启动Redis的控制台。控制台将会通过默认的端口(6379)来连接本地运行的服务器。
+
+可以在命令行界面键入`info`命令来查看一切是不是都运行正常。你会很乐意看到这么一大组关键字-值(key-value)对的显示,这为我们查看服务器的状态提供了大量有效信息。
+
+如果在上面的启动步骤里遇到什么问题,我建议你到[Redis的官方支持组](https://groups.google.com/forum/#!forum/redis-db)里获取帮助。
+
+## 驱动Redis
+
+很快你就会发现,Redis的API就如一组定义明确的函数那般容易理解。Redis具有让人难以置信的简单性,其操作过程也同样如此。这意味着,无论你是使用命令行程序,或是使用你喜欢的语言来驱动,整体的感觉都不会相差多少。因此,相对于命令行程序,如果你更愿意通过一种编程语言去驱动Redis,你不会感觉到有任何适应的问题。如果真想如此,可以到Redis的[客户端推荐页面](http://redis.io/clients)下载适合的Redis载体。
+
+\clearpage
+
+## 第1章 - 基础知识
+
+是什么使Redis显得这么特别?Redis具体能解决什么类型的问题?要实际应用Redis,开发者必须储备什么知识?在我们能回答这么一些问题之前,我们需要明白Redis到底是什么。
+
+Redis通常被人们认为是一种持久化的存储器关键字-值型存储(in-memory persistent key-value store)。我认为这种对Redis的描述并不太准确。Redis的确是将所有的数据存放于存储器(更多是是按位存储),而且也确实通过将数据写入磁盘来实现持久化,但是Redis的实际意义比单纯的关键字-值型存储要来得深远。纠正脑海里的这种误解观点非常关键,否则你对于Redis之道以及其应用的洞察力就会变得越发狭义。
+
+事实是,Redis引入了5种不同的数据结构,只有一个是典型的关键字-值型结构。理解Redis的关键就在于搞清楚这5种数据结构,其工作的原理都是如何,有什么关联方法以及你能怎样应用这些数据结构去构建模型。首先,让我们来弄明白这些数据结构的实际意义。
+
+应用上面提及的数据结构概念到我们熟悉的关系型数据库里,我们可以认为其引入了一个单独的数据结构——表格。表格既复杂又灵活,基于表格的存储和管理,没有多少东西是你不能进行建模的。然而,这种通用性并不是没有缺点。具体来说就是,事情并不是总能达到假设中的简单或者快速。相对于这种普遍适用(one-size-fits-all)的结构体系,我们可以使用更为专门化的结构体系。当然,因此可能有些事情我们会完成不了(至少,达不到很好的程度)。但话说回来,这样做就能确定我们可以获得想象中的简单性和速度吗?
+
+针对特定类型的问题使用特定的数据结构?我们不就是这样进行编程的吗?你不会使用一个散列表去存储每份数据,也不会使用一个标量变量去存储。对我来说,这正是Redis的做法。如果你需要处理标量、列表、散列或者集合,为什么不直接就用标量、列表、散列和集合去存储他们?为什么不是直接调用`exists(key)`去检测一个已存在的值,而是要调用其他比O(1)(常量时间查找,不会因为待处理元素的增长而变慢)慢的操作?
+
+### 数据库(Databases)
+
+与你熟悉的关系型数据库一致,Redis有着相同的数据库基本概念,即一个数据库包含一组数据。典型的数据库应用案例是,将一个程序的所有数据组织起来,使之与另一个程序的数据保持独立。
+
+在Redis里,数据库简单的使用一个数字编号来进行辨认,默认数据库的数字编号是`0`。如果你想切换到一个不同的数据库,你可以使用`select`命令来实现。在命令行界面里键入`select 1`,Redis应该会回复一条`OK`的信息,然后命令行界面里的提示符会变成类似`redis 127.0.0.1:6379[1]>`这样。如果你想切换回默认数据库,只要在命令行界面键入`select 0`即可。
+
+### 命令、关键字和值(Commands, Keys and Values)
+
+Redis不仅仅是一种简单的关键字-值型存储,从其核心概念来看,Redis的5种数据结构中的每一个都至少有一个关键字和一个值。在转入其它关于Redis的有用信息之前,我们必须理解关键字和值的概念。
+
+关键字(Keys)是用来标识数据块。我们将会很常跟关键字打交道,不过在现在,明白关键字就是类似于`users:leto`这样的表述就足够了。一般都能很好地理解到,这样关键字包含的信息是一个名为`leto`的用户。这个关键字里的冒号没有任何特殊含义,对于Redis而言,使用分隔符来组织关键字是很常见的方法。
+
+值(Values)是关联于关键字的实际值,可以是任何东西。有时候你会存储字符串,有时候是整数,还有时候你会存储序列化对象(使用JSON、XML或其他格式)。在大多数情况下,Redis会把值看做是一个字节序列,而不会关注它们实质上是什么。要注意,不同的Redis载体处理序列化会有所不同(一些会让你自己决定)。因此,在这本书里,我们将仅讨论字符串、整数和JSON。
+
+现在让我们活动一下手指吧。在命令行界面键入下面的命令:
+
+ set users:leto "{name: leto, planet: dune, likes: [spice]}"
+
+这就是Redis命令的基本构成。首先我们要有一个确定的命令,在上面的语句里就是`set`。然后就是相应的参数,`set`命令接受两个参数,包括要设置的关键字,以及相应要设置的值。很多的情况是,命令接受一个关键字(当这种情况出现,其经常是第一个参数)。你能想到如何去获取这个值吗?我想你会说(当然一时拿不准也没什么):
+
+ get users:leto
+
+关键字和值的是Redis的基本概念,而`get`和`set`命令是对此最简单的使用。你可以创建更多的用户,去尝试不同类型的关键字以及不同的值,看看一些不同的组合。
+
+### 查询(Querying)
+
+随着学习的持续深入,两件事情将变得清晰起来。对于Redis而言,关键字就是一切,而值是没有任何意义。更通俗来看就是,Redis不允许你通过值来进行查询。回到上面的例子,我们就不能查询生活在`dune`行星上的用户。
+
+对许多人来说,这会引起一些担忧。在我们生活的世界里,数据查询是如此的灵活和强大,而Redis的方式看起来是这么的原始和不高效。不要让这些扰乱你太久。要记住,Redis不是一种普遍使用(one-size-fits-all)的解决方案,确实存在这么一些事情是不应该由Redis来解决的(因为其查询的限制)。事实上,在考虑了这些情况后,你会找到新的方法去构建你的数据。
+
+很快,我们就能看到更多实际的用例。很重要的一点是,我们要明白关于Redis的这些基本事实。这能帮助我们弄清楚为什么值可以是任何东西,因为Redis从来不需要去读取或理解它们。而且,这也可以帮助我们理清思路,然后去思考如何在这个新世界里建立模型。
+
+### 存储器和持久化(Memory and Persistence)
+
+我们之前提及过,Redis是一种持久化的存储器内存储(in-memory persistent store)。对于持久化,默认情况下,Redis会根据已变更的关键字数量来进行判断,然后在磁盘里创建数据库的快照(snapshot)。你可以对此进行设置,如果X个关键字已变更,那么每隔Y秒存储数据库一次。默认情况下,如果1000个或更多的关键字已变更,Redis会每隔60秒存储数据库;而如果9个或更少的关键字已变更,Redis会每隔15分钟存储数据库。
+
+除了创建磁盘快照外,Redis可以在附加模式下运行。任何时候,如果有一个关键字变更,一个单一附加(append-only)的文件会在磁盘里进行更新。在一些情况里,虽然硬件或软件可能发生错误,但用那60秒有效数据存储去换取更好性能是可以接受的。而在另一些情况里,这种损失就难以让人接受,Redis为你提供了选择。在第5章里,我们将会看到第三种选择,其将持久化任务减荷到一个从属数据库里。
+
+至于存储器,Redis会将所有数据都保留在存储器中。显而易见,运行Redis具有不低的成本:因为RAM仍然是最昂贵的服务器硬件部件。
+
+我很清楚有一些开发者对即使是一点点的数据空间都是那么的敏感。一本《威廉·莎士比亚全集》需要近5.5MB的存储空间。对于缩放的需求,其它的解决方案趋向于IO-bound或者CPU-bound。这些限制(RAM或者IO)将会需要你去理解更多机器实际依赖的数据类型,以及应该如何去进行存储和查询。除非你是存储大容量的多媒体文件到Redis中,否则存储器内存储应该不会是一个问题。如果这对于一个程序是个问题,你就很可能不会用IO-bound的解决方案。
+
+Redis有虚拟存储器的支持。然而,这个功能已经被认为是失败的了(通过Redis的开发者),而且它的使用已经被废弃了。
+
+(从另一个角度来看,一本5.5MB的《威廉·莎士比亚全集》可以通过压缩减小到近2MB。当然,Redis不会自动对值进行压缩,但是因为其将所有值都看作是字节,没有什么限制让你不能对数据进行压缩/解压,通过牺牲处理时间来换取存储空间。)
+
+### 整体来看(Putting It Together)
+
+我们已经接触了好几个高层次的主题。在继续深入Redis之前,我想做的最后一件事情是将这些主题整合起来。这些主题包括,查询的限制,数据结构以及Redis在存储器内存储数据的方法。
+
+当你将这3个主题整合起来,你最终会得出一个绝妙的结论:速度。一些人可能会想,当然Redis会很快速,要知道所以的东西都在存储器里。但这仅仅是其中的一部分,让Redis闪耀的真正原因是其不同于其它解决方案的特殊数据结构。
+
+能有多快速?这依赖于很多东西,包括你正在使用着哪个命令,数据的类型等等。但Redis的性能测试是趋向于数万或数十万次操作**每秒**。你可以通过运行`redis-benchmark`(就在`redis-server`和`redis-cli`的同一个文件夹里)来进行测试。
+
+我曾经试过将一组使用传统模型的代码转向使用Redis。在传统模型里,运行一个我写的载入测试,需要超过5分钟的时间来完成。而在Redis里,只需要150毫秒就完成了。你不会总能得到这么好的收获,但希望这能让你对我们所谈的东西有更清晰的理解。
+
+理解Redis的这个特性很重要,因为这将影响到你如何去与Redis进行交互。拥有SQL背景的程序员通常会致力于让数据库的数据往返次数减至最小。这对于任何系统都是个好建议,包括Redis。然而,考虑到我们是在处理比较简单的数据结构,有时候我们还是需要与Redis服务器频繁交互,以达到我们的目的。刚开始的时候,可能会对这种数据访问模式感到不太自然。实际上,相对于我们通过Redis获得的高性能而言,这仅仅是微不足道的损失。
+
+### 小结
+
+虽然我们只接触和摆弄了Redis的冰山一角,但我们讨论的主题已然覆盖了很大范围内的东西。如果觉得有些事情还是不太清楚(例如查询),不用为此而担心,在下一章我们将会继续深入探讨,希望你的问题都能得到解答。
+
+这一章的要点包括:
+
+* 关键字(Keys)是用于标识一段数据的一个字符串
+
+* 值(Values)是一段任意的字节序列,Redis不会关注它们实质上是什么
+
+* Redis展示了(也实现了)5种专门的数据结构
+
+* 上面的几点使得Redis快速而且容易使用,但要知道Redis并不适用于所有的应用场景
+
+\clearpage
+
+## 第2章 - 数据结构
+
+现在开始将探究Redis的5种数据结构,我们会解释每种数据结构都是什么,包含了什么有效的方法(Method),以及你能用这些数据结构处理哪些类型的特性和数据。
+
+目前为止,我们所知道的Redis构成仅包括命令、关键字和值,还没有接触到关于数据结构的具体概念。当我们使用`set`命令时,Redis是怎么知道我们是在使用哪个数据结构?其解决方法是,每个命令都相对应于一种特定的数据结构。例如,当你使用`set`命令,你就是将值存储到一个字符串数据结构里。而当你使用`hset`命令,你就是将值存储到一个散列数据结构里。考虑到Redis的关键字集很小,这样的机制具有相当的可管理性。
+
+**[Redis的网站](http://redis.io/commands)里有着非常优秀的参考文档,没有任何理由去重造轮子。但为了搞清楚这些数据结构的作用,我们将会覆盖那些必须知道的重要命令。**
+
+没有什么事情比高兴的玩和试验有趣的东西来得更重要的了。在任何时候,你都能通过键入`flushdb`命令将你数据库里的所有值清除掉,因此,不要再那么害羞了,去尝试做些疯狂的事情吧!
+
+### 字符串(Strings)
+
+在Redis里,字符串是最基本的数据结构。当你在思索着关键字-值对时,你就是在思索着字符串数据结构。不要被名字给搞混了,如之前说过的,你的值可以是任何东西。我更喜欢将他们称作“标量”(Scalars),但也许只有我才这样想。
+
+我们已经看到了一个常见的字符串使用案例,即通过关键字存储对象的实例。有时候,你会频繁地用到这类操作:
+
+ set users:leto "{name: leto, planet: dune, likes: [spice]}"
+
+除了这些外,Redis还有一些常用的操作。例如,`strlen <key>`能用来获取一个关键字对应值的长度;`getrange <key> <start> <end>`将返回指定范围内的关键字对应值;`append <key> <value>`会将value附加到已存在的关键字对应值中(如果该关键字并不存在,则会创建一个新的关键字-值对)。不要犹豫,去试试看这些命令吧。下面是我得到的:
+
+ > strlen users:leto
+ (integer) 42
+
+ > getrange users:leto 27 40
+ "likes: [spice]"
+
+ > append users:leto " OVER 9000!!"
+ (integer) 54
+
+现在你可能会想,这很好,但似乎没有什么意义。你不能有效地提取出一段范围内的JSON文件,或者为其附加一些值。你是对的,这里的经验是,一些命令,尤其是关于字符串数据结构的,只有在给定了明确的数据类型后,才会有实际意义。
+
+之前我们知道了,Redis不会去关注你的值是什么东西。通常情况下,这没有错。然而,一些字符串命令是专门为一些类型或值的结构而设计的。作为一个有些含糊的用例,我们可以看到,对于一些自定义的空间效率很高的(space-efficient)串行化对象,`append`和`getrange`命令将会很有用。对于一个更为具体的用例,我们可以再看一下`incr`、`incrby`、`decr`和`decrby`命令。这些命令会增长或者缩减一个字符串数据结构的值:
+
+ > incr stats:page:about
+ (integer) 1
+ > incr stats:page:about
+ (integer) 2
+
+ > incrby ratings:video:12333 5
+ (integer) 5
+ > incrby ratings:video:12333 3
+ (integer) 8
+
+由此你可以想象到,Redis的字符串数据结构能很好地用于分析用途。你还可以去尝试增长`users:leto`(一个不是整数的值),然后看看会发生什么(应该会得到一个错误)。
+
+更为进阶的用例是`setbit`和`getbit`命令。“今天我们有多少个独立用户访问”是个在Web应用里常见的问题,有一篇[精彩的博文](http://blog.getspool.com/2011/11/29/fast-easy-realtime-metrics-using-redis-bitmaps/),在里面可以看到Spool是如何使用这两个命令有效地解决此问题。对于1.28亿个用户,一部笔记本电脑在不到50毫秒的时间里就给出了答复,而且只用了16MB的存储空间。
+
+最重要的事情不是在于你是否明白位图(Bitmaps)的工作原理,或者Spool是如何去使用这些命令,而是应该要清楚Redis的字符串数据结构比你当初所想的要有用许多。然而,最常见的应用案例还是上面我们给出的:存储对象(简单或复杂)和计数。同时,由于通过关键字来获取一个值是如此之快,字符串数据结构很常被用来缓存数据。
+
+### 散列(Hashes)
+
+我们已经知道把Redis称为一种关键字-值型存储是不太准确的,散列数据结构是一个很好的例证。你会看到,在很多方面里,散列数据结构很像字符串数据结构。两者显著的区别在于,散列数据结构提供了一个额外的间接层:一个域(Field)。因此,散列数据结构中的`set`和`get`是:
+
+ hset users:goku powerlevel 9000
+ hget users:goku powerlevel
+
+相关的操作还包括在同一时间设置多个域、同一时间获取多个域、获取所有的域和值、列出所有的域或者删除指定的一个域:
+
+ hmset users:goku race saiyan age 737
+ hmget users:goku race powerlevel
+ hgetall users:goku
+ hkeys users:goku
+ hdel users:goku age
+
+如你所见,散列数据结构比普通的字符串数据结构具有更多的可操作性。我们可以使用一个散列数据结构去获得更精确的描述,是存储一个用户,而不是一个序列化对象。从而得到的好处是能够提取、更新和删除具体的数据片段,而不必去获取或写入整个值。
+
+对于散列数据结构,可以从一个经过明确定义的对象的角度来考虑,例如一个用户,关键之处在于要理解他们是如何工作的。从性能上的原因来看,这是正确的,更具粒度化的控制可能会相当有用。在下一章我们将会看到,如何用散列数据结构去组织你的数据,使查询变得更为实效。在我看来,这是散列真正耀眼的地方。
+
+### 列表(Lists)
+
+对于一个给定的关键字,列表数据结构让你可以存储和处理一组值。你可以添加一个值到列表里、获取列表的第一个值或最后一个值以及用给定的索引来处理值。列表数据结构维护了值的顺序,提供了基于索引的高效操作。为了跟踪在网站里注册的最新用户,我们可以维护一个`newusers`的列表:
+
+ lpush newusers goku
+ ltrim newusers 0 50
+
+**(译注:`ltrim`命令的具体构成是`LTRIM Key start stop`。要理解`ltrim`命令,首先要明白Key所存储的值是一个列表,理论上列表可以存放任意个值。对于指定的列表,根据所提供的两个范围参数start和stop,`ltrim`命令会将指定范围外的值都删除掉,只留下范围内的值。)**
+
+首先,我们将一个新用户推入到列表的前端,然后对列表进行调整,使得该列表只包含50个最近被推入的用户。这是一种常见的模式。`ltrim`是一个具有O(N)时间复杂度的操作,N是被删除的值的数量。从上面的例子来看,我们总是在插入了一个用户后再进行列表调整,实际上,其将具有O(1)的时间复杂度(因为N将永远等于1)的常数性能。
+
+这是我们第一次看到一个关键字的对应值索引另一个值。如果我们想要获取最近的10个用户的详细资料,我们可以运行下面的组合操作:
+
+ keys = redis.lrange('newusers', 0, 10)
+ redis.mget(*keys.map {|u| "users:#{u}"})
+
+我们之前谈论过关于多次往返数据的模式,上面的两行Ruby代码为我们进行了很好的演示。
+
+当然,对于存储和索引关键字的功能,并不是只有列表数据结构这种方式。值可以是任意的东西,你可以使用列表数据结构去存储日志,也可以用来跟踪用户浏览网站时的路径。如果你过往曾构建过游戏,你可能会使用列表数据结构去跟踪用户的排队活动。
+
+### 集合
+
+集合数据结构常常被用来存储只能唯一存在的值,并提供了许多的基于集合的操作,例如并集。集合数据结构没有对值进行排序,但是其提供了高效的基于值的操作。使用集合数据结构的典型用例是朋友名单的实现:
+
+ sadd friends:leto ghanima paul chani jessica
+ sadd friends:duncan paul jessica alia
+
+不管一个用户有多少个朋友,我们都能高效地(O(1)时间复杂度)识别出用户X是不是用户Y的朋友:
+
+ sismember friends:leto jessica
+ sismember friends:leto vladimir
+
+而且,我们可以查看两个或更多的人是不是有共同的朋友:
+
+ sinter friends:leto friends:duncan
+
+甚至可以在一个新的关键字里存储结果:
+
+ sinterstore friends:leto_duncan friends:leto friends:duncan
+
+有时候需要对值的属性进行标记和跟踪处理,但不能通过简单的复制操作完成,集合数据结构是解决此类问题的最好方法之一。当然,对于那些需要运用集合操作的地方(例如交集和并集),集合数据结构就是最好的选择。
+
+### 分类集合(Sorted Sets)
+
+最后也是最强大的数据结构是分类集合数据结构。如果说散列数据结构类似于字符串数据结构,主要区分是域(field)的概念;那么分类集合数据结构就类似于集合数据结构,主要区分是标记(score)的概念。标记提供了排序(sorting)和秩划分(ranking)的功能。如果我们想要一个秩分类的朋友名单,可以这样做:
+
+ zadd friends:duncan 70 ghanima 95 paul 95 chani 75 jessica 1 vladimir
+
+对于`duncan`的朋友,要怎样计算出标记(score)为90或更高的人数?
+
+ zcount friends:duncan 90 100
+
+如何获取`chani`在名单里的秩(rank)?
+
+ zrevrank friends:duncan chani
+
+**(译注:`zrank`命令的具体构成是`ZRANK Key menber`,要知道Key存储的Sorted Set默认是根据Score对各个menber进行升序的排列,该命令就是用来获取menber在该排列里的次序,这就是所谓的秩。)**
+
+我们使用了`zrevrank`命令而不是`zrank`命令,这是因为Redis的默认排序是从低到高,但是在这个例子里我们的秩划分是从高到低。对于分类集合数据结构,最常见的应用案例是用来实现排行榜系统。事实上,对于一些基于整数排序,且能以标记(score)来进行有效操作的东西,使用分类集合数据结构来处理应该都是不错的选择。
+
+### 小结
+
+对于Redis的5种数据结构,我们进行了高层次的概述。一件有趣的事情是,相对于最初构建时的想法,你经常能用Redis创造出一些更具实效的事情。对于字符串数据结构和分类集合数据结构的使用,很有可能存在一些构建方法是还没有人想到的。当你理解了那些常用的应用案例后,你将发现Redis对于许多类型的问题,都是很理想的选择。还有,不要因为Redis展示了5种数据结构和相应的各种方法,就认为你必须要把所有的东西都用上。只使用一些命令去构建一个特性是很常见的。
+
+\clearpage
+
+## 第3章 - 使用数据结构
+
+在上一章里,我们谈论了Redis的5种数据结构,对于一些可能的用途也给出了用例。现在是时候来看看一些更高级,但依然很常见的主题和设计模式。
+
+### 大O表示法(Big O Notation)
+
+在本书中,我们之前就已经看到过大O表示法,包括O(1)和O(N)的表示。大O表示法的惯常用途是,描述一些用于处理一定数量元素的行为的综合表现。在Redis里,对于一个要处理一定数量元素的命令,大O表示法让我们能了解该命令的大概运行速度。
+
+在Redis的文档里,每一个命令的时间复杂度都用大O表示法进行了描述,还能知道各命令的具体性能会受什么因素影响。让我们来看看一些用例。
+
+常数时间复杂度O(1)被认为是最快速的,无论我们是在处理5个元素还是5百万个元素,最终都能得到相同的性能。对于`sismember`命令,其作用是告诉我们一个值是否属于一个集合,时间复杂度为O(1)。`sismember`命令很强大,很大部分的原因是其高效的性能特征。许多Redis命令都具有O(1)的时间复杂度。
+
+对数时间复杂度O(log(N))被认为是第二快速的,其通过使需扫描的区间不断皱缩来快速完成处理。使用这种“分而治之”的方式,大量的元素能在几个迭代过程里被快速分解完整。`zadd`命令的时间复杂度就是O(log(N)),其中N是在分类集合中的元素数量。
+
+再下来就是线性时间复杂度O(N),在一个表格的非索引列里进行查找就需要O(N)次操作。`ltrim`命令具有O(N)的时间复杂度,但是,在`ltrim`命令里,N不是列表所拥有的元素数量,而是被删除的元素数量。从一个具有百万元素的列表里用`ltrim`命令删除1个元素,要比从一个具有一千个元素的列表里用`ltrim`命令删除10个元素来的快速(实际上,两者很可能会是一样快,因为两个时间都非常的小)。
+
+根据给定的最小和最大的值的标记,`zremrangebyscore`命令会在一个分类集合里进行删除元素操作,其时间复杂度是O(log(N)+M)。这看起来似乎有点儿杂乱,通过阅读文档可以知道,这里的N指的是在分类集合里的总元素数量,而M则是被删除的元素数量。可以看出,对于性能而言,被删除的元素数量很可能会比分类集合里的总元素数量更为重要。
+
+**(译注:`zremrangebyscore`命令的具体构成是`ZREMRANGEBYSCORE Key max mix`。)**
+
+对于`sort`命令,其时间复杂度为O(N+M*log(M)),我们将会在下一章谈论更多的相关细节。从`sort`命令的性能特征来看,可以说这是Redis里最复杂的一个命令。
+
+还存在其他的时间复杂度描述,包括O(N^2)和O(C^N)。随着N的增大,其性能将急速下降。在Redis里,没有任何一个命令具有这些类型的时间复杂度。
+
+值得指出的一点是,在Redis里,当我们发现一些操作具有O(N)的时间复杂度时,我们可能可以找到更为好的方法去处理。
+
+**(译注:对于Big O Notation,相信大家都非常的熟悉,虽然原文仅仅是对该表示法进行简单的介绍,但限于个人的算法知识和文笔水平实在有限,此小节的翻译让我头痛颇久,最终成果也确实难以让人满意,望见谅。)**
+
+### 仿多关键字查询(Pseudo Multi Key Queries)
+
+时常,你会想通过不同的关键字去查询相同的值。例如,你会想通过电子邮件(当用户开始登录时)去获取用户的具体信息,或者通过用户id(在用户登录后)去获取。有一种很不实效的解决方法,其将用户对象分别放置到两个字符串值里去:
+
+ set users:leto@dune.gov "{id: 9001, email: 'leto@dune.gov', ...}"
+ set users:9001 "{id: 9001, email: 'leto@dune.gov', ...}"
+
+这种方法很糟糕,如此不但会产生两倍数量的内存,而且这将会成为数据管理的恶梦。
+
+如果Redis允许你将一个关键字链接到另一个的话,可能情况会好很多,可惜Redis并没有提供这样的功能(而且很可能永远都不会提供)。Redis发展到现在,其开发的首要目的是要保持代码和API的整洁简单,关键字链接功能的内部实现并不符合这个前提(对于关键字,我们还有很多相关方法没有谈论到)。其实,Redis已经提供了解决的方法:散列。
+
+使用散列数据结构,我们可以摆脱重复的缠绕:
+
+ set users:9001 "{id: 9001, email: leto@dune.gov, ...}"
+ hset users:lookup:email leto@dune.gov 9001
+
+我们所做的是,使用域来作为一个二级索引,然后去引用单个用户对象。要通过id来获取用户信息,我们可以使用一个普通的`get`命令:
+
+ get users:9001
+
+而如果想通过电子邮箱来获取用户信息,我们可以使用`hget`命令再配合使用`get`命令(Ruby代码):
+
+ id = redis.hget('users:lookup:email', 'leto@dune.gov')
+ user = redis.get("users:#{id}")
+
+你很可能将会经常使用这类用法。在我看来,这就是散列真正耀眼的地方。在你了解这类用法之前,这可能不是一个明显的用例。
+
+### 引用和索引(References and Indexes)
+
+我们已经看过几个关于值引用的用例,包括介绍列表数据结构时的用例,以及在上面使用散列数据结构来使查询更灵活一些。进行归纳后会发现,对于那些值与值间的索引和引用,我们都必须手动的去管理。诚实来讲,这确实会让人有点沮丧,尤其是当你想到那些引用相关的操作,如管理、更新和删除等,都必须手动的进行时。在Redis里,这个问题还没有很好的解决方法。
+
+我们已经看到,集合数据结构很常被用来实现这类索引:
+
+ sadd friends:leto ghanima paul chani jessica
+
+这个集合里的每一个成员都是一个Redis字符串数据结构的引用,而每一个引用的值则包含着用户对象的具体信息。那么如果`chani`改变了她的名字,或者删除了她的帐号,应该如何处理?从整个朋友圈的关系结构来看可能会更好理解,我们知道,`chani`也有她的朋友:
+
+ sadd friends_of:chani leto paul
+
+如果你有什么待处理情况像上面那样,那在维护成本之外,还会有对于额外索引值的处理和存储空间的成本。这可能会令你感到有点退缩。在下一小节里,我们将会谈论减少使用额外数据交互的性能成本的一些方法(在第1章我们粗略地讨论了下)。
+
+如果你确实在担忧着这些情况,其实,关系型数据库也有同样的开销。索引需要一定的存储空间,必须通过扫描或查找,然后才能找到相应的记录。其开销也是存在的,当然他们对此做了很多的优化工作,使之变得更为有效。
+
+再次说明,需要在Redis里手动地管理引用确实是颇为棘手。但是,对于你关心的那些问题,包括性能或存储空间等,应该在经过测试后,才会有真正的理解。我想你会发现这不会是一个大问题。
+
+### 数据交互和流水线(Round Trips and Pipelining)
+
+我们已经提到过,与服务器频繁交互是Redis的一种常见模式。这类情况可能很常出现,为了使我们能获益更多,值得仔细去看看我们能利用哪些特性。
+
+许多命令能接受一个或更多的参数,也有一种关联命令(sister-command)可以接受多个参数。例如早前我们看到过`mget`命令,接受多个关键字,然后返回值:
+
+ keys = redis.lrange('newusers', 0, 10)
+ redis.mget(*keys.map {|u| "users:#{u}"})
+
+或者是`sadd`命令,能添加一个或多个成员到集合里:
+
+ sadd friends:vladimir piter
+ sadd friends:paul jessica leto "leto II" chani
+
+Redis还支持流水线功能。通常情况下,当一个客户端发送请求到Redis后,在发送下一个请求之前必须等待Redis的答复。使用流水线功能,你可以发送多个请求,而不需要等待Redis响应。这不但减少了网络开销,还能获得性能上的显著提高。
+
+值得一提的是,Redis会使用存储器去排列命令,因此批量执行命令是一个好主意。至于具体要多大的批量,将取决于你要使用什么命令(更明确来说,该参数有多大)。另一方面来看,如果你要执行的命令需要差不多50个字符的关键字,你大概可以对此进行数千或数万的批量操作。
+
+对于不同的Redis载体,在流水线里运行命令的方式会有所差异。在Ruby里,你传递一个代码块到`pipelined`方法:
+
+ redis.pipelined do
+ 9001.times do
+ redis.incr('powerlevel')
+ end
+ end
+
+正如你可能猜想到的,流水线功能可以实际地加速一连串命令的处理。
+
+### 事务(Transactions)
+
+每一个Redis命令都具有原子性,包括那些一次处理多项事情的命令。此外,对于使用多个命令,Redis支持事务功能。
+
+你可能不知道,但Redis实际上是单线程运行的,这就是为什么每一个Redis命令都能够保证具有原子性。当一个命令在执行时,没有其他命令会运行(我们会在往后的章节里简略谈论一下Scaling)。在你考虑到一些命令去做多项事情时,这会特别的有用。例如:
+
+`incr`命令实际上就是一个`get`命令然后紧随一个`set`命令。
+
+`getset`命令设置一个新的值然后返回原始值。
+
+`setnx`命令首先测试关键字是否存在,只有当关键字不存在时才设置值
+
+虽然这些都很有用,但在实际开发时,往往会需要运行具有原子性的一组命令。若要这样做,首先要执行`multi`命令,紧随其后的是所有你想要执行的命令(作为事务的一部分),最后执行`exec`命令去实际执行命令,或者使用`discard`命令放弃执行命令。Redis的事务功能保证了什么?
+
+* 事务中的命令将会按顺序地被执行
+
+* 事务中的命令将会如单个原子操作般被执行(没有其它的客户端命令会在中途被执行)
+
+* 事务中的命令要么全部被执行,要么不会执行
+
+你可以(也应该)在命令行界面对事务功能进行一下测试。还有一点要注意到,没有什么理由不能结合流水线功能和事务功能。
+
+ multi
+ hincrby groups:1percent balance -9000000000
+ hincrby groups:99percent balance 9000000000
+ exec
+
+最后,Redis能让你指定一个关键字(或多个关键字),当关键字有改变时,可以查看或者有条件地应用一个事务。这是用于当你需要获取值,且待运行的命令基于那些值时,所有都在一个事务里。对于上面展示的代码,我们不能去实现自己的`incr`命令,因为一旦`exec`命令被调用,他们会全部被执行在一块。我们不能这么做:
+
+ redis.multi()
+ current = redis.get('powerlevel')
+ redis.set('powerlevel', current + 1)
+ redis.exec()
+
+**(译注:虽然Redis是单线程运行的,但是我们可以同时运行多个Redis客户端进程,常见的并发问题还是会出现。像上面的代码,在`get`运行之后,`set`运行之前,`powerlevel`的值可能会被另一个Redis客户端给改变,从而造成错误。)**
+
+这些不是Redis的事务功能的工作。但是,如果我们增加一个`watch`到`powerlevel`,我们可以这样做:
+
+ redis.watch('powerlevel')
+ current = redis.get('powerlevel')
+ redis.multi()
+ redis.set('powerlevel', current + 1)
+ redis.exec()
+
+在我们调用`watch`后,如果另一个客户端改变了`powerlevel`的值,我们的事务将会运行失败。如果没有客户端改变`powerlevel`的值,那么事务会继续工作。我们可以在一个循环里运行这些代码,直到其能正常工作。
+
+### 关键字反模式(Keys Anti-Pattern)
+
+在下一章中,我们将会谈论那些没有确切关联到数据结构的命令,其中的一些是管理或调试工具。然而有一个命令我想特别地在这里进行谈论:`keys`命令。这个命令需要一个模式,然后查找所有匹配的关键字。这个命令看起来很适合一些任务,但这不应该用在实际的产品代码里。为什么?因为这个命令通过线性扫描所有的关键字来进行匹配。或者,简单地说,这个命令太慢了。
+
+人们会如此去使用这个命令?一般会用来构建一个本地的Bug追踪服务。每一个帐号都有一个`id`,你可能会通过一个看起来像`bug:account_id:bug_id`的关键字,把每一个Bug存储到一个字符串数据结构值中去。如果你在任何时候需要查询一个帐号的Bug(显示它们,或者当用户删除了帐号时删除掉这些Bugs),你可能会尝试去使用`keys`命令:
+
+ keys bug:1233:*
+
+更好的解决方法应该使用一个散列数据结构,就像我们可以使用散列数据结构来提供一种方法去展示二级索引,因此我们可以使用域来组织数据:
+
+ hset bugs:1233 1 "{id:1, account: 1233, subject: '...'}"
+ hset bugs:1233 2 "{id:2, account: 1233, subject: '...'}"
+
+从一个帐号里获取所有的Bug标识,可以简单地调用`hkeys bugs:1233`。去删除一个指定的Bug,可以调用`hdel bugs:1233 2`。如果要删除了一个帐号,可以通过`del bugs:1233`把关键字删除掉。
+
+### 小结
+
+结合这一章以及前一章,希望能让你得到一些洞察力,了解如何使用Redis去支持(Power)实际项目。还有其他的模式可以让你去构建各种类型的东西,但真正的关键是要理解基本的数据结构。你将能领悟到,这些数据结构是如何能够实现你最初视角之外的东西。
+
+\clearpage
+
+## 第4章 超越数据结构
+
+5种数据结构组成了Redis的基础,其他没有关联特定数据结构的命令也有很多。我们已经看过一些这样的命令:`info`, `select`, `flushdb`, `multi`, `exec`, `discard`, `watch`和`keys `。这一章将看看其他的一些重要命令。
+
+### 使用期限(Expiration)
+
+Redis允许你标记一个关键字的使用期限。你可以给予一个Unix时间戳形式(自1970年1月1日起)的绝对时间,或者一个基于秒的存活时间。这是一个基于关键字的命令,因此其不在乎关键字表示的是哪种类型的数据结构。
+
+ expire pages:about 30
+ expireat pages:about 1356933600
+
+第一个命令将会在30秒后删除掉关键字(包括其关联的值)。第二个命令则会在2012年12月31日上午12点删除掉关键字。
+
+这让Redis能成为一个理想的缓冲引擎。通过`ttl`命令,你可以知道一个关键字还能够存活多久。而通过`persist`命令,你可以把一个关键字的使用期限删除掉。
+
+ ttl pages:about
+ persist pages:about
+
+最后,有个特殊的字符串命令,`setex`命令让你可以在一个单独的原子命令里设置一个字符串值,同时里指定一个生存期(这比任何事情都要方便)。
+
+ setex pages:about 30 '<h1>about us</h1>....'
+
+### 发布和订阅(Publication and Subscriptions)
+
+Redis的列表数据结构有`blpop`和`brpop`命令,能从列表里返回且删除第一个(或最后一个)元素,或者被堵塞,直到有一个元素可供操作。这可以用来实现一个简单的队列。
+
+**(译注:对于`blpop`和`brpop`命令,如果列表里没有关键字可供操作,连接将被堵塞,直到有另外的Redis客户端使用`lpush`或`rpush`命令推入关键字为止。)**
+
+此外,Redis对于消息发布和频道订阅有着一流的支持。你可以打开第二个`redis-cli`窗口,去尝试一下这些功能。在第一个窗口里订阅一个频道(我们会称它为`warnings`):
+
+ subscribe warnings
+
+其将会答复你订阅的信息。现在,在另一个窗口,发布一条消息到`warnings`频道:
+
+ publish warnings "it's over 9000!"
+
+如果你回到第一个窗口,你应该已经接收到`warnings`频道发来的消息。
+
+你可以订阅多个频道(`subscribe channel1 channel2 ...`),订阅一组基于模式的频道(`psubscribe warnings:*`),以及使用`unsubscribe`和`punsubscribe`命令停止监听一个或多个频道,或一个频道模式。
+
+最后,可以注意到`publish`命令的返回值是1,这指出了接收到消息的客户端数量。
+
+### 监控和延迟日志(Monitor and Slow Log)
+
+`monitor`命令可以让你查看Redis正在做什么。这是一个优秀的调试工具,能让你了解你的程序如何与Redis进行交互。在两个`redis-cli`窗口中选一个(如果其中一个还处于订阅状态,你可以使用`unsubscribe`命令退订,或者直接关掉窗口再重新打开一个新窗口)键入`monitor`命令。在另一个窗口,执行任何其他类型的命令(例如`get`或`set`命令)。在第一个窗口里,你应该可以看到这些命令,包括他们的参数。
+
+在实际生产环境里,你应该谨慎运行`monitor`命令,这真的仅仅就是一个很有用的调试和开发工具。除此之外,没有更多要说的了。
+
+随同`monitor`命令一起,Redis拥有一个`slowlog`命令,这是一个优秀的性能剖析工具。其会记录执行时间超过一定数量**微秒**的命令。在下一章节,我们会简略地涉及如何配置Redis,现在你可以按下面的输入配置Redis去记录所有的命令:
+
+ config set slowlog-log-slower-than 0
+
+然后,执行一些命令。最后,你可以检索到所有日志,或者检索最近的那些日志:
+
+ slowlog get
+ slowlog get 10
+
+通过键入`slowlog len`,你可以获取延迟日志里的日志数量。
+
+对于每个被你键入的命令,你应该查看4个参数:
+
+* 一个自动递增的id
+
+* 一个Unix时间戳,表示命令开始运行的时间
+
+* 一个微妙级的时间,显示命令运行的总时间
+
+* 该命令以及所带参数
+
+延迟日志保存在存储器中,因此在生产环境中运行(即使有一个低阀值)也应该不是一个问题。默认情况下,它将会追踪最近的1024个日志。
+
+### 排序(Sort)
+
+`sort`命令是Redis最强大的命令之一。它让你可以在一个列表、集合或者分类集合里对值进行排序(分类集合是通过标记来进行排序,而不是集合里的成员)。下面是一个`sort`命令的简单用例:
+
+ rpush users:leto:guesses 5 9 10 2 4 10 19 2
+ sort users:leto:guesses
+
+这将返回进行升序排序后的值。这里有一个更高级的例子:
+
+ sadd friends:ghanima leto paul chani jessica alia duncan
+ sort friends:ghanima limit 0 3 desc alpha
+
+上面的命令向我们展示了,如何对已排序的记录进行分页(通过`limit`),如何返回降序排序的结果(通过`desc`),以及如何用字典序排序代替数值序排序(通过`alpha`)。
+
+`sort`命令的真正力量是其基于引用对象来进行排序的能力。早先的时候,我们说明了列表、集合和分类集合很常被用于引用其他的Redis对象,`sort`命令能够解引用这些关系,而且通过潜在值来进行排序。例如,假设我们有一个Bug追踪器能让用户看到各类已存在问题。我们可能使用一个集合数据结构去追踪正在被监视的问题:
+
+ sadd watch:leto 12339 1382 338 9338
+
+你可能会有强烈的感觉,想要通过id来排序这些问题(默认的排序就是这样的),但是,我们更可能是通过问题的严重性来对这些问题进行排序。为此,我们要告诉Redis将使用什么模式来进行排序。首先,为了可以看到一个有意义的结果,让我们添加多一点数据:
+
+ set severity:12339 3
+ set severity:1382 2
+ set severity:338 5
+ set severity:9338 4
+
+要通过问题的严重性来降序排序这些Bug,你可以这样做:
+
+ sort watch:leto by severity:* desc
+
+Redis将会用存储在列表(集合或分类集合)中的值去替代模式中的`*`(通过`by`)。这会创建出关键字名字,Redis将通过查询其实际值来排序。
+
+在Redis里,虽然你可以有成千上万个关键字,类似上面展示的关系还是会引起一些混乱。幸好,`sort`命令也可以工作在散列数据结构及其相关域里。相对于拥有大量的高层次关键字,你可以利用散列:
+
+ hset bug:12339 severity 3
+ hset bug:12339 priority 1
+ hset bug:12339 details "{id: 12339, ....}"
+
+ hset bug:1382 severity 2
+ hset bug:1382 priority 2
+ hset bug:1382 details "{id: 1382, ....}"
+
+ hset bug:338 severity 5
+ hset bug:338 priority 3
+ hset bug:338 details "{id: 338, ....}"
+
+ hset bug:9338 severity 4
+ hset bug:9338 priority 2
+ hset bug:9338 details "{id: 9338, ....}"
+
+所有的事情不仅变得更为容易管理,而且我们能通过`severity`或`priority`来进行排序,还可以告诉`sort`命令具体要检索出哪一个域的数据:
+
+ sort watch:leto by bug:*->priority get bug:*->details
+
+相同的值替代出现了,但Redis还能识别`->`符号,用它来查看散列中指定的域。里面还包括了`get`参数,这里也会进行值替代和域查看,从而检索出Bug的细节(details域的数据)。
+
+对于太大的集合,`sort`命令的执行可能会变得很慢。好消息是,`sort`命令的输出可以被存储起来:
+
+ sort watch:leto by bug:*->priority get bug:*->details store watch_by_priority:leto
+
+使用我们已经看过的`expiration`命令,再结合`sort`命令的`store`能力,这是一个美妙的组合。
+
+### 小结
+
+这一章主要关注那些非特定数据结构关联的命令。和其他事情一样,它们的使用依情况而定。构建一个程序或特性时,可能不会用到使用期限、发布和订阅或者排序等功能。但知道这些功能的存在是很好的。而且,我们也只接触到了一些命令。还有更多的命令,当你消化理解完这本书后,非常值得去浏览一下[完整的命令列表](http://redis.io/commands)。
+
+\clearpage
+
+## 第5章 - 管理
+
+在最后一章里,我们将集中谈论Redis运行中的一些管理方面内容。这是一个不完整的Redis管理指南,我们将会回答一些基本的问题,初接触Redis的新用户可能会很感兴趣。
+
+### 配置(Configuration)
+
+当你第一次运行Redis的服务器,它会向你显示一个警告,指`redis.conf`文件没有被找到。这个文件可以被用来配置Redis的各个方面。一个充分定义(well-documented)的`redis.conf`文件对各个版本的Redis都有效。范例文件包含了默认的配置选项,因此,对于想要了解设置在干什么,或默认设置是什么,都会很有用。你可以在<https://github.com/antirez/redis/raw/2.4.6/redis.conf>找到这个文件。
+
+**这个配置文件针对的是Redis 2.4.6,你应该用你的版本号替代上面URL里的"2.4.6"。运行`info`命令,其显示的第一个值就是Redis的版本号。**
+
+因为这个文件已经是充分定义(well-documented),我们就不去再进行设置了。
+
+除了通过`redis.conf`文件来配置Redis,`config set`命令可以用来对个别值进行设置。实际上,在将`slowlog-log-slower-than`设置为0时,我们就已经使用过这个命令了。
+
+还有一个`config get`命令能显示一个设置值。这个命令支持模式匹配,因此如果我们想要显示关联于日志(logging)的所有设置,我们可以这样做:
+
+ config get *log*
+
+### 验证(Authentication)
+
+通过设置`requirepass`(使用`config set`命令或`redis.conf`文件),可以让Redis需要一个密码验证。当`requirepass`被设置了一个值(就是待用的密码),客户端将需要执行一个`auth password`命令。
+
+一旦一个客户端通过了验证,就可以在任意数据库里执行任何一条命令,包括`flushall`命令,这将会清除掉每一个数据库里的所有关键字。通过配置,你可以重命名一些重要命令为混乱的字符串,从而获得一些安全性。
+
+ rename-command CONFIG 5ec4db169f9d4dddacbfb0c26ea7e5ef
+ rename-command FLUSHALL 1041285018a942a4922cbf76623b741e
+
+或者,你可以将新名字设置为一个空字符串,从而禁用掉一个命令。
+
+### 大小限制(Size Limitations)
+
+当你开始使用Redis,你可能会想知道,我能使用多少个关键字?还可能想知道,一个散列数据结构能有多少个域(尤其是当你用它来组织数据时),或者是,一个列表数据结构或集合数据结构能有多少个元素?对于每一个实例,实际限制都能达到亿万级别(hundreds of millions)。
+
+### 复制(Replication)
+
+Redis支持复制功能,这意味着当你向一个Redis实例(Master)进行写入时,一个或多个其他实例(Slaves)能通过Master实例来保持更新。可以在配置文件里设置`slaveof`,或使用`slaveof`命令来配置一个Slave实例。对于那些没有进行这些设置的Redis实例,就可能一个Master实例。
+
+为了更好保护你的数据,复制功能拷贝数据到不同的服务器。复制功能还能用于改善性能,因为读取请求可以被发送到Slave实例。他们可能会返回一些稍微滞后的数据,但对于大多数程序来说,这是一个值得做的折衷。
+
+遗憾的是,Redis的复制功能还没有提供自动故障恢复。如果Master实例崩溃了,一个Slave实例需要手动的进行升级。如果你想使用Redis去达到某种高可用性,对于使用心跳监控(heartbeat monitoring)和脚本自动开关(scripts to automate the switch)的传统高可用性工具来说,现在还是一个棘手的难题。
+
+### 备份文件(Backups)
+
+备份Redis非常简单,你可以将Redis的快照(snapshot)拷贝到任何地方,包括S3、FTP等。默认情况下,Redis会把快照存储为一个名为`dump.rdb`的文件。在任何时候,你都可以对这个文件执行`scp`、`ftp`或`cp`等常用命令。
+
+有一种常见情况,在Master实例上会停用快照以及单一附加文件(aof),然后让一个Slave实例去处理备份事宜。这可以帮助减少Master实例的载荷。在不损害整体系统响应性的情况下,你还可以在Slave实例上设置更多主动存储的参数。
+
+### 缩放和Redis集群(Scaling and Redis Cluster)
+
+复制功能(Replication)是一个成长中的网站可以利用的第一个工具。有一些命令会比另外一些来的昂贵(例如`sort`命令),将这些运行载荷转移到一个Slave实例里,可以保持整体系统对于查询的快速响应。
+
+此外,通过分发你的关键字到多个Redis实例里,可以达到真正的缩放Redis(记住,Redis是单线程的,这些可以运行在同一个逻辑框里)。随着时间的推移,你将需要特别注意这些事情(尽管许多的Redis载体都提供了consistent-hashing算法)。对于数据水平分布(horizontal distribution)的考虑不在这本书所讨论的范围内。这些东西你也很可能不需要去担心,但是,无论你使用哪一种解决方案,有一些事情你还是必须意识到。
+
+好消息是,这些工作都可在Redis集群下进行。不仅提供水平缩放(包括均衡),为了高可用性,还提供了自动故障恢复。
+
+高可用性和缩放是可以达到的,只要你愿意为此付出时间和精力,Redis集群也使事情变得简单多了。
+
+### 小结
+
+在过去的一段时间里,已经有许多的计划和网站使用了Redis,毫无疑问,Redis已经可以应用于实际生产中了。然而,一些工具还是不够成熟,尤其是一些安全性和可用性相关的工具。对于Redis集群,我们希望很快就能看到其实现,这应该能为一些现有的管理挑战提供处理帮忙。
+
+\clearpage
+
+## 总结
+
+在许多方面,Redis体现了一种简易的数据处理方式,其剥离掉了大部分的复杂性和抽象,并可有效的在不同系统里运行。不少情况下,选择Redis不是最佳的选择。在另一些情况里,Redis就像是为你的数据提供了特别定制的解决方案。
+
+最终,回到我最开始所说的:Redis很容易学习。现在有许多的新技术,很难弄清楚哪些才真正值得我们花时间去学习。如果你从实际好处来考虑,Redis提供了他的简单性。我坚信,对于你和你的团队,学习Redis是最好的技术投资之一。
diff --git a/hugolib/testdata/sunset.jpg b/hugolib/testdata/sunset.jpg
new file mode 100644
index 000000000..7d7307bed
--- /dev/null
+++ b/hugolib/testdata/sunset.jpg
Binary files differ
diff --git a/hugolib/testdata/what-is-markdown.md b/hugolib/testdata/what-is-markdown.md
new file mode 100644
index 000000000..87db650b7
--- /dev/null
+++ b/hugolib/testdata/what-is-markdown.md
@@ -0,0 +1,9702 @@
+# Introduction
+
+## What is Markdown?
+
+Markdown is a plain text format for writing structured documents,
+based on conventions for indicating formatting in email
+and usenet posts. It was developed by John Gruber (with
+help from Aaron Swartz) and released in 2004 in the form of a
+[syntax description](http://daringfireball.net/projects/markdown/syntax)
+and a Perl script (`Markdown.pl`) for converting Markdown to
+HTML. In the next decade, dozens of implementations were
+developed in many languages. Some extended the original
+Markdown syntax with conventions for footnotes, tables, and
+other document elements. Some allowed Markdown documents to be
+rendered in formats other than HTML. Websites like Reddit,
+StackOverflow, and GitHub had millions of people using Markdown.
+And Markdown started to be used beyond the web, to author books,
+articles, slide shows, letters, and lecture notes.
+
+What distinguishes Markdown from many other lightweight markup
+syntaxes, which are often easier to write, is its readability.
+As Gruber writes:
+
+> The overriding design goal for Markdown's formatting syntax is
+> to make it as readable as possible. The idea is that a
+> Markdown-formatted document should be publishable as-is, as
+> plain text, without looking like it's been marked up with tags
+> or formatting instructions.
+> (<http://daringfireball.net/projects/markdown/>)
+
+The point can be illustrated by comparing a sample of
+[AsciiDoc](http://www.methods.co.nz/asciidoc/) with
+an equivalent sample of Markdown. Here is a sample of
+AsciiDoc from the AsciiDoc manual:
+
+```
+1. List item one.
++
+List item one continued with a second paragraph followed by an
+Indented block.
++
+.................
+$ ls *.sh
+$ mv *.sh ~/tmp
+.................
++
+List item continued with a third paragraph.
+
+2. List item two continued with an open block.
++
+--
+This paragraph is part of the preceding list item.
+
+a. This list is nested and does not require explicit item
+continuation.
++
+This paragraph is part of the preceding list item.
+
+b. List item b.
+
+This paragraph belongs to item two of the outer list.
+--
+```
+
+And here is the equivalent in Markdown:
+```
+1. List item one.
+
+ List item one continued with a second paragraph followed by an
+ Indented block.
+
+ $ ls *.sh
+ $ mv *.sh ~/tmp
+
+ List item continued with a third paragraph.
+
+2. List item two continued with an open block.
+
+ This paragraph is part of the preceding list item.
+
+ 1. This list is nested and does not require explicit item continuation.
+
+ This paragraph is part of the preceding list item.
+
+ 2. List item b.
+
+ This paragraph belongs to item two of the outer list.
+```
+
+The AsciiDoc version is, arguably, easier to write. You don't need
+to worry about indentation. But the Markdown version is much easier
+to read. The nesting of list items is apparent to the eye in the
+source, not just in the processed document.
+
+## Why is a spec needed?
+
+John Gruber's [canonical description of Markdown's
+syntax](http://daringfireball.net/projects/markdown/syntax)
+does not specify the syntax unambiguously. Here are some examples of
+questions it does not answer:
+
+1. How much indentation is needed for a sublist? The spec says that
+ continuation paragraphs need to be indented four spaces, but is
+ not fully explicit about sublists. It is natural to think that
+ they, too, must be indented four spaces, but `Markdown.pl` does
+ not require that. This is hardly a "corner case," and divergences
+ between implementations on this issue often lead to surprises for
+ users in real documents. (See [this comment by John
+ Gruber](http://article.gmane.org/gmane.text.markdown.general/1997).)
+
+2. Is a blank line needed before a block quote or heading?
+ Most implementations do not require the blank line. However,
+ this can lead to unexpected results in hard-wrapped text, and
+ also to ambiguities in parsing (note that some implementations
+ put the heading inside the blockquote, while others do not).
+ (John Gruber has also spoken [in favor of requiring the blank
+ lines](http://article.gmane.org/gmane.text.markdown.general/2146).)
+
+3. Is a blank line needed before an indented code block?
+ (`Markdown.pl` requires it, but this is not mentioned in the
+ documentation, and some implementations do not require it.)
+
+ ``` markdown
+ paragraph
+ code?
+ ```
+
+4. What is the exact rule for determining when list items get
+ wrapped in `<p>` tags? Can a list be partially "loose" and partially
+ "tight"? What should we do with a list like this?
+
+ ``` markdown
+ 1. one
+
+ 2. two
+ 3. three
+ ```
+
+ Or this?
+
+ ``` markdown
+ 1. one
+ - a
+
+ - b
+ 2. two
+ ```
+
+ (There are some relevant comments by John Gruber
+ [here](http://article.gmane.org/gmane.text.markdown.general/2554).)
+
+5. Can list markers be indented? Can ordered list markers be right-aligned?
+
+ ``` markdown
+ 8. item 1
+ 9. item 2
+ 10. item 2a
+ ```
+
+6. Is this one list with a thematic break in its second item,
+ or two lists separated by a thematic break?
+
+ ``` markdown
+ * a
+ * * * * *
+ * b
+ ```
+
+7. When list markers change from numbers to bullets, do we have
+ two lists or one? (The Markdown syntax description suggests two,
+ but the perl scripts and many other implementations produce one.)
+
+ ``` markdown
+ 1. fee
+ 2. fie
+ - foe
+ - fum
+ ```
+
+8. What are the precedence rules for the markers of inline structure?
+ For example, is the following a valid link, or does the code span
+ take precedence ?
+
+ ``` markdown
+ [a backtick (`)](/url) and [another backtick (`)](/url).
+ ```
+
+9. What are the precedence rules for markers of emphasis and strong
+ emphasis? For example, how should the following be parsed?
+
+ ``` markdown
+ *foo *bar* baz*
+ ```
+
+10. What are the precedence rules between block-level and inline-level
+ structure? For example, how should the following be parsed?
+
+ ``` markdown
+ - `a long code span can contain a hyphen like this
+ - and it can screw things up`
+ ```
+
+11. Can list items include section headings? (`Markdown.pl` does not
+ allow this, but does allow blockquotes to include headings.)
+
+ ``` markdown
+ - # Heading
+ ```
+
+12. Can list items be empty?
+
+ ``` markdown
+ * a
+ *
+ * b
+ ```
+
+13. Can link references be defined inside block quotes or list items?
+
+ ``` markdown
+ > Blockquote [foo].
+ >
+ > [foo]: /url
+ ```
+
+14. If there are multiple definitions for the same reference, which takes
+ precedence?
+
+ ``` markdown
+ [foo]: /url1
+ [foo]: /url2
+
+ [foo][]
+ ```
+
+In the absence of a spec, early implementers consulted `Markdown.pl`
+to resolve these ambiguities. But `Markdown.pl` was quite buggy, and
+gave manifestly bad results in many cases, so it was not a
+satisfactory replacement for a spec.
+
+Because there is no unambiguous spec, implementations have diverged
+considerably. As a result, users are often surprised to find that
+a document that renders one way on one system (say, a GitHub wiki)
+renders differently on another (say, converting to docbook using
+pandoc). To make matters worse, because nothing in Markdown counts
+as a "syntax error," the divergence often isn't discovered right away.
+
+## About this document
+
+This document attempts to specify Markdown syntax unambiguously.
+It contains many examples with side-by-side Markdown and
+HTML. These are intended to double as conformance tests. An
+accompanying script `spec_tests.py` can be used to run the tests
+against any Markdown program:
+
+ python test/spec_tests.py --spec spec.txt --program PROGRAM
+
+Since this document describes how Markdown is to be parsed into
+an abstract syntax tree, it would have made sense to use an abstract
+representation of the syntax tree instead of HTML. But HTML is capable
+of representing the structural distinctions we need to make, and the
+choice of HTML for the tests makes it possible to run the tests against
+an implementation without writing an abstract syntax tree renderer.
+
+This document is generated from a text file, `spec.txt`, written
+in Markdown with a small extension for the side-by-side tests.
+The script `tools/makespec.py` can be used to convert `spec.txt` into
+HTML or CommonMark (which can then be converted into other formats).
+
+In the examples, the `→` character is used to represent tabs.
+
+# Preliminaries
+
+## Characters and lines
+
+Any sequence of [characters] is a valid CommonMark
+document.
+
+A [character](@) is a Unicode code point. Although some
+code points (for example, combining accents) do not correspond to
+characters in an intuitive sense, all code points count as characters
+for purposes of this spec.
+
+This spec does not specify an encoding; it thinks of lines as composed
+of [characters] rather than bytes. A conforming parser may be limited
+to a certain encoding.
+
+A [line](@) is a sequence of zero or more [characters]
+other than newline (`U+000A`) or carriage return (`U+000D`),
+followed by a [line ending] or by the end of file.
+
+A [line ending](@) is a newline (`U+000A`), a carriage return
+(`U+000D`) not followed by a newline, or a carriage return and a
+following newline.
+
+A line containing no characters, or a line containing only spaces
+(`U+0020`) or tabs (`U+0009`), is called a [blank line](@).
+
+The following definitions of character classes will be used in this spec:
+
+A [whitespace character](@) is a space
+(`U+0020`), tab (`U+0009`), newline (`U+000A`), line tabulation (`U+000B`),
+form feed (`U+000C`), or carriage return (`U+000D`).
+
+[Whitespace](@) is a sequence of one or more [whitespace
+characters].
+
+A [Unicode whitespace character](@) is
+any code point in the Unicode `Zs` general category, or a tab (`U+0009`),
+carriage return (`U+000D`), newline (`U+000A`), or form feed
+(`U+000C`).
+
+[Unicode whitespace](@) is a sequence of one
+or more [Unicode whitespace characters].
+
+A [space](@) is `U+0020`.
+
+A [non-whitespace character](@) is any character
+that is not a [whitespace character].
+
+An [ASCII punctuation character](@)
+is `!`, `"`, `#`, `$`, `%`, `&`, `'`, `(`, `)`,
+`*`, `+`, `,`, `-`, `.`, `/` (U+0021–2F),
+`:`, `;`, `<`, `=`, `>`, `?`, `@` (U+003A–0040),
+`[`, `\`, `]`, `^`, `_`, `` ` `` (U+005B–0060),
+`{`, `|`, `}`, or `~` (U+007B–007E).
+
+A [punctuation character](@) is an [ASCII
+punctuation character] or anything in
+the general Unicode categories `Pc`, `Pd`, `Pe`, `Pf`, `Pi`, `Po`, or `Ps`.
+
+## Tabs
+
+Tabs in lines are not expanded to [spaces]. However,
+in contexts where whitespace helps to define block structure,
+tabs behave as if they were replaced by spaces with a tab stop
+of 4 characters.
+
+Thus, for example, a tab can be used instead of four spaces
+in an indented code block. (Note, however, that internal
+tabs are passed through as literal tabs, not expanded to
+spaces.)
+
+```````````````````````````````` example
+→foo→baz→→bim
+.
+<pre><code>foo→baz→→bim
+</code></pre>
+````````````````````````````````
+
+```````````````````````````````` example
+ →foo→baz→→bim
+.
+<pre><code>foo→baz→→bim
+</code></pre>
+````````````````````````````````
+
+```````````````````````````````` example
+ a→a
+ ὐ→a
+.
+<pre><code>a→a
+ὐ→a
+</code></pre>
+````````````````````````````````
+
+In the following example, a continuation paragraph of a list
+item is indented with a tab; this has exactly the same effect
+as indentation with four spaces would:
+
+```````````````````````````````` example
+ - foo
+
+→bar
+.
+<ul>
+<li>
+<p>foo</p>
+<p>bar</p>
+</li>
+</ul>
+````````````````````````````````
+
+```````````````````````````````` example
+- foo
+
+→→bar
+.
+<ul>
+<li>
+<p>foo</p>
+<pre><code> bar
+</code></pre>
+</li>
+</ul>
+````````````````````````````````
+
+Normally the `>` that begins a block quote may be followed
+optionally by a space, which is not considered part of the
+content. In the following case `>` is followed by a tab,
+which is treated as if it were expanded into three spaces.
+Since one of these spaces is considered part of the
+delimiter, `foo` is considered to be indented six spaces
+inside the block quote context, so we get an indented
+code block starting with two spaces.
+
+```````````````````````````````` example
+>→→foo
+.
+<blockquote>
+<pre><code> foo
+</code></pre>
+</blockquote>
+````````````````````````````````
+
+```````````````````````````````` example
+-→→foo
+.
+<ul>
+<li>
+<pre><code> foo
+</code></pre>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ foo
+→bar
+.
+<pre><code>foo
+bar
+</code></pre>
+````````````````````````````````
+
+```````````````````````````````` example
+ - foo
+ - bar
+→ - baz
+.
+<ul>
+<li>foo
+<ul>
+<li>bar
+<ul>
+<li>baz</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+```````````````````````````````` example
+#→Foo
+.
+<h1>Foo</h1>
+````````````````````````````````
+
+```````````````````````````````` example
+*→*→*→
+.
+<hr />
+````````````````````````````````
+
+
+## Insecure characters
+
+For security reasons, the Unicode character `U+0000` must be replaced
+with the REPLACEMENT CHARACTER (`U+FFFD`).
+
+# Blocks and inlines
+
+We can think of a document as a sequence of
+[blocks](@)---structural elements like paragraphs, block
+quotations, lists, headings, rules, and code blocks. Some blocks (like
+block quotes and list items) contain other blocks; others (like
+headings and paragraphs) contain [inline](@) content---text,
+links, emphasized text, images, code spans, and so on.
+
+## Precedence
+
+Indicators of block structure always take precedence over indicators
+of inline structure. So, for example, the following is a list with
+two items, not a list with one item containing a code span:
+
+```````````````````````````````` example
+- `one
+- two`
+.
+<ul>
+<li>`one</li>
+<li>two`</li>
+</ul>
+````````````````````````````````
+
+
+This means that parsing can proceed in two steps: first, the block
+structure of the document can be discerned; second, text lines inside
+paragraphs, headings, and other block constructs can be parsed for inline
+structure. The second step requires information about link reference
+definitions that will be available only at the end of the first
+step. Note that the first step requires processing lines in sequence,
+but the second can be parallelized, since the inline parsing of
+one block element does not affect the inline parsing of any other.
+
+## Container blocks and leaf blocks
+
+We can divide blocks into two types:
+[container blocks](@),
+which can contain other blocks, and [leaf blocks](@),
+which cannot.
+
+# Leaf blocks
+
+This section describes the different kinds of leaf block that make up a
+Markdown document.
+
+## Thematic breaks
+
+A line consisting of 0-3 spaces of indentation, followed by a sequence
+of three or more matching `-`, `_`, or `*` characters, each followed
+optionally by any number of spaces or tabs, forms a
+[thematic break](@).
+
+```````````````````````````````` example
+***
+---
+___
+.
+<hr />
+<hr />
+<hr />
+````````````````````````````````
+
+
+Wrong characters:
+
+```````````````````````````````` example
++++
+.
+<p>+++</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+===
+.
+<p>===</p>
+````````````````````````````````
+
+
+Not enough characters:
+
+```````````````````````````````` example
+--
+**
+__
+.
+<p>--
+**
+__</p>
+````````````````````````````````
+
+
+One to three spaces indent are allowed:
+
+```````````````````````````````` example
+ ***
+ ***
+ ***
+.
+<hr />
+<hr />
+<hr />
+````````````````````````````````
+
+
+Four spaces is too many:
+
+```````````````````````````````` example
+ ***
+.
+<pre><code>***
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+Foo
+ ***
+.
+<p>Foo
+***</p>
+````````````````````````````````
+
+
+More than three characters may be used:
+
+```````````````````````````````` example
+_____________________________________
+.
+<hr />
+````````````````````````````````
+
+
+Spaces are allowed between the characters:
+
+```````````````````````````````` example
+ - - -
+.
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+ ** * ** * ** * **
+.
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+- - - -
+.
+<hr />
+````````````````````````````````
+
+
+Spaces are allowed at the end:
+
+```````````````````````````````` example
+- - - -
+.
+<hr />
+````````````````````````````````
+
+
+However, no other characters may occur in the line:
+
+```````````````````````````````` example
+_ _ _ _ a
+
+a------
+
+---a---
+.
+<p>_ _ _ _ a</p>
+<p>a------</p>
+<p>---a---</p>
+````````````````````````````````
+
+
+It is required that all of the [non-whitespace characters] be the same.
+So, this is not a thematic break:
+
+```````````````````````````````` example
+ *-*
+.
+<p><em>-</em></p>
+````````````````````````````````
+
+
+Thematic breaks do not need blank lines before or after:
+
+```````````````````````````````` example
+- foo
+***
+- bar
+.
+<ul>
+<li>foo</li>
+</ul>
+<hr />
+<ul>
+<li>bar</li>
+</ul>
+````````````````````````````````
+
+
+Thematic breaks can interrupt a paragraph:
+
+```````````````````````````````` example
+Foo
+***
+bar
+.
+<p>Foo</p>
+<hr />
+<p>bar</p>
+````````````````````````````````
+
+
+If a line of dashes that meets the above conditions for being a
+thematic break could also be interpreted as the underline of a [setext
+heading], the interpretation as a
+[setext heading] takes precedence. Thus, for example,
+this is a setext heading, not a paragraph followed by a thematic break:
+
+```````````````````````````````` example
+Foo
+---
+bar
+.
+<h2>Foo</h2>
+<p>bar</p>
+````````````````````````````````
+
+
+When both a thematic break and a list item are possible
+interpretations of a line, the thematic break takes precedence:
+
+```````````````````````````````` example
+* Foo
+* * *
+* Bar
+.
+<ul>
+<li>Foo</li>
+</ul>
+<hr />
+<ul>
+<li>Bar</li>
+</ul>
+````````````````````````````````
+
+
+If you want a thematic break in a list item, use a different bullet:
+
+```````````````````````````````` example
+- Foo
+- * * *
+.
+<ul>
+<li>Foo</li>
+<li>
+<hr />
+</li>
+</ul>
+````````````````````````````````
+
+
+## ATX headings
+
+An [ATX heading](@)
+consists of a string of characters, parsed as inline content, between an
+opening sequence of 1--6 unescaped `#` characters and an optional
+closing sequence of any number of unescaped `#` characters.
+The opening sequence of `#` characters must be followed by a
+[space] or by the end of line. The optional closing sequence of `#`s must be
+preceded by a [space] and may be followed by spaces only. The opening
+`#` character may be indented 0-3 spaces. The raw contents of the
+heading are stripped of leading and trailing spaces before being parsed
+as inline content. The heading level is equal to the number of `#`
+characters in the opening sequence.
+
+Simple headings:
+
+```````````````````````````````` example
+# foo
+## foo
+### foo
+#### foo
+##### foo
+###### foo
+.
+<h1>foo</h1>
+<h2>foo</h2>
+<h3>foo</h3>
+<h4>foo</h4>
+<h5>foo</h5>
+<h6>foo</h6>
+````````````````````````````````
+
+
+More than six `#` characters is not a heading:
+
+```````````````````````````````` example
+####### foo
+.
+<p>####### foo</p>
+````````````````````````````````
+
+
+At least one space is required between the `#` characters and the
+heading's contents, unless the heading is empty. Note that many
+implementations currently do not require the space. However, the
+space was required by the
+[original ATX implementation](http://www.aaronsw.com/2002/atx/atx.py),
+and it helps prevent things like the following from being parsed as
+headings:
+
+```````````````````````````````` example
+#5 bolt
+
+#hashtag
+.
+<p>#5 bolt</p>
+<p>#hashtag</p>
+````````````````````````````````
+
+
+This is not a heading, because the first `#` is escaped:
+
+```````````````````````````````` example
+\## foo
+.
+<p>## foo</p>
+````````````````````````````````
+
+
+Contents are parsed as inlines:
+
+```````````````````````````````` example
+# foo *bar* \*baz\*
+.
+<h1>foo <em>bar</em> *baz*</h1>
+````````````````````````````````
+
+
+Leading and trailing [whitespace] is ignored in parsing inline content:
+
+```````````````````````````````` example
+# foo
+.
+<h1>foo</h1>
+````````````````````````````````
+
+
+One to three spaces indentation are allowed:
+
+```````````````````````````````` example
+ ### foo
+ ## foo
+ # foo
+.
+<h3>foo</h3>
+<h2>foo</h2>
+<h1>foo</h1>
+````````````````````````````````
+
+
+Four spaces are too much:
+
+```````````````````````````````` example
+ # foo
+.
+<pre><code># foo
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo
+ # bar
+.
+<p>foo
+# bar</p>
+````````````````````````````````
+
+
+A closing sequence of `#` characters is optional:
+
+```````````````````````````````` example
+## foo ##
+ ### bar ###
+.
+<h2>foo</h2>
+<h3>bar</h3>
+````````````````````````````````
+
+
+It need not be the same length as the opening sequence:
+
+```````````````````````````````` example
+# foo ##################################
+##### foo ##
+.
+<h1>foo</h1>
+<h5>foo</h5>
+````````````````````````````````
+
+
+Spaces are allowed after the closing sequence:
+
+```````````````````````````````` example
+### foo ###
+.
+<h3>foo</h3>
+````````````````````````````````
+
+
+A sequence of `#` characters with anything but [spaces] following it
+is not a closing sequence, but counts as part of the contents of the
+heading:
+
+```````````````````````````````` example
+### foo ### b
+.
+<h3>foo ### b</h3>
+````````````````````````````````
+
+
+The closing sequence must be preceded by a space:
+
+```````````````````````````````` example
+# foo#
+.
+<h1>foo#</h1>
+````````````````````````````````
+
+
+Backslash-escaped `#` characters do not count as part
+of the closing sequence:
+
+```````````````````````````````` example
+### foo \###
+## foo #\##
+# foo \#
+.
+<h3>foo ###</h3>
+<h2>foo ###</h2>
+<h1>foo #</h1>
+````````````````````````````````
+
+
+ATX headings need not be separated from surrounding content by blank
+lines, and they can interrupt paragraphs:
+
+```````````````````````````````` example
+****
+## foo
+****
+.
+<hr />
+<h2>foo</h2>
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+Foo bar
+# baz
+Bar foo
+.
+<p>Foo bar</p>
+<h1>baz</h1>
+<p>Bar foo</p>
+````````````````````````````````
+
+
+ATX headings can be empty:
+
+```````````````````````````````` example
+##
+#
+### ###
+.
+<h2></h2>
+<h1></h1>
+<h3></h3>
+````````````````````````````````
+
+
+## Setext headings
+
+A [setext heading](@) consists of one or more
+lines of text, each containing at least one [non-whitespace
+character], with no more than 3 spaces indentation, followed by
+a [setext heading underline]. The lines of text must be such
+that, were they not followed by the setext heading underline,
+they would be interpreted as a paragraph: they cannot be
+interpretable as a [code fence], [ATX heading][ATX headings],
+[block quote][block quotes], [thematic break][thematic breaks],
+[list item][list items], or [HTML block][HTML blocks].
+
+A [setext heading underline](@) is a sequence of
+`=` characters or a sequence of `-` characters, with no more than 3
+spaces indentation and any number of trailing spaces. If a line
+containing a single `-` can be interpreted as an
+empty [list items], it should be interpreted this way
+and not as a [setext heading underline].
+
+The heading is a level 1 heading if `=` characters are used in
+the [setext heading underline], and a level 2 heading if `-`
+characters are used. The contents of the heading are the result
+of parsing the preceding lines of text as CommonMark inline
+content.
+
+In general, a setext heading need not be preceded or followed by a
+blank line. However, it cannot interrupt a paragraph, so when a
+setext heading comes after a paragraph, a blank line is needed between
+them.
+
+Simple examples:
+
+```````````````````````````````` example
+Foo *bar*
+=========
+
+Foo *bar*
+---------
+.
+<h1>Foo <em>bar</em></h1>
+<h2>Foo <em>bar</em></h2>
+````````````````````````````````
+
+
+The content of the header may span more than one line:
+
+```````````````````````````````` example
+Foo *bar
+baz*
+====
+.
+<h1>Foo <em>bar
+baz</em></h1>
+````````````````````````````````
+
+The contents are the result of parsing the headings's raw
+content as inlines. The heading's raw content is formed by
+concatenating the lines and removing initial and final
+[whitespace].
+
+```````````````````````````````` example
+ Foo *bar
+baz*→
+====
+.
+<h1>Foo <em>bar
+baz</em></h1>
+````````````````````````````````
+
+
+The underlining can be any length:
+
+```````````````````````````````` example
+Foo
+-------------------------
+
+Foo
+=
+.
+<h2>Foo</h2>
+<h1>Foo</h1>
+````````````````````````````````
+
+
+The heading content can be indented up to three spaces, and need
+not line up with the underlining:
+
+```````````````````````````````` example
+ Foo
+---
+
+ Foo
+-----
+
+ Foo
+ ===
+.
+<h2>Foo</h2>
+<h2>Foo</h2>
+<h1>Foo</h1>
+````````````````````````````````
+
+
+Four spaces indent is too much:
+
+```````````````````````````````` example
+ Foo
+ ---
+
+ Foo
+---
+.
+<pre><code>Foo
+---
+
+Foo
+</code></pre>
+<hr />
+````````````````````````````````
+
+
+The setext heading underline can be indented up to three spaces, and
+may have trailing spaces:
+
+```````````````````````````````` example
+Foo
+ ----
+.
+<h2>Foo</h2>
+````````````````````````````````
+
+
+Four spaces is too much:
+
+```````````````````````````````` example
+Foo
+ ---
+.
+<p>Foo
+---</p>
+````````````````````````````````
+
+
+The setext heading underline cannot contain internal spaces:
+
+```````````````````````````````` example
+Foo
+= =
+
+Foo
+--- -
+.
+<p>Foo
+= =</p>
+<p>Foo</p>
+<hr />
+````````````````````````````````
+
+
+Trailing spaces in the content line do not cause a line break:
+
+```````````````````````````````` example
+Foo
+-----
+.
+<h2>Foo</h2>
+````````````````````````````````
+
+
+Nor does a backslash at the end:
+
+```````````````````````````````` example
+Foo\
+----
+.
+<h2>Foo\</h2>
+````````````````````````````````
+
+
+Since indicators of block structure take precedence over
+indicators of inline structure, the following are setext headings:
+
+```````````````````````````````` example
+`Foo
+----
+`
+
+<a title="a lot
+---
+of dashes"/>
+.
+<h2>`Foo</h2>
+<p>`</p>
+<h2>&lt;a title=&quot;a lot</h2>
+<p>of dashes&quot;/&gt;</p>
+````````````````````````````````
+
+
+The setext heading underline cannot be a [lazy continuation
+line] in a list item or block quote:
+
+```````````````````````````````` example
+> Foo
+---
+.
+<blockquote>
+<p>Foo</p>
+</blockquote>
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+> foo
+bar
+===
+.
+<blockquote>
+<p>foo
+bar
+===</p>
+</blockquote>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- Foo
+---
+.
+<ul>
+<li>Foo</li>
+</ul>
+<hr />
+````````````````````````````````
+
+
+A blank line is needed between a paragraph and a following
+setext heading, since otherwise the paragraph becomes part
+of the heading's content:
+
+```````````````````````````````` example
+Foo
+Bar
+---
+.
+<h2>Foo
+Bar</h2>
+````````````````````````````````
+
+
+But in general a blank line is not required before or after
+setext headings:
+
+```````````````````````````````` example
+---
+Foo
+---
+Bar
+---
+Baz
+.
+<hr />
+<h2>Foo</h2>
+<h2>Bar</h2>
+<p>Baz</p>
+````````````````````````````````
+
+
+Setext headings cannot be empty:
+
+```````````````````````````````` example
+
+====
+.
+<p>====</p>
+````````````````````````````````
+
+
+Setext heading text lines must not be interpretable as block
+constructs other than paragraphs. So, the line of dashes
+in these examples gets interpreted as a thematic break:
+
+```````````````````````````````` example
+---
+---
+.
+<hr />
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+- foo
+-----
+.
+<ul>
+<li>foo</li>
+</ul>
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+ foo
+---
+.
+<pre><code>foo
+</code></pre>
+<hr />
+````````````````````````````````
+
+
+```````````````````````````````` example
+> foo
+-----
+.
+<blockquote>
+<p>foo</p>
+</blockquote>
+<hr />
+````````````````````````````````
+
+
+If you want a heading with `> foo` as its literal text, you can
+use backslash escapes:
+
+```````````````````````````````` example
+\> foo
+------
+.
+<h2>&gt; foo</h2>
+````````````````````````````````
+
+
+**Compatibility note:** Most existing Markdown implementations
+do not allow the text of setext headings to span multiple lines.
+But there is no consensus about how to interpret
+
+``` markdown
+Foo
+bar
+---
+baz
+```
+
+One can find four different interpretations:
+
+1. paragraph "Foo", heading "bar", paragraph "baz"
+2. paragraph "Foo bar", thematic break, paragraph "baz"
+3. paragraph "Foo bar --- baz"
+4. heading "Foo bar", paragraph "baz"
+
+We find interpretation 4 most natural, and interpretation 4
+increases the expressive power of CommonMark, by allowing
+multiline headings. Authors who want interpretation 1 can
+put a blank line after the first paragraph:
+
+```````````````````````````````` example
+Foo
+
+bar
+---
+baz
+.
+<p>Foo</p>
+<h2>bar</h2>
+<p>baz</p>
+````````````````````````````````
+
+
+Authors who want interpretation 2 can put blank lines around
+the thematic break,
+
+```````````````````````````````` example
+Foo
+bar
+
+---
+
+baz
+.
+<p>Foo
+bar</p>
+<hr />
+<p>baz</p>
+````````````````````````````````
+
+
+or use a thematic break that cannot count as a [setext heading
+underline], such as
+
+```````````````````````````````` example
+Foo
+bar
+* * *
+baz
+.
+<p>Foo
+bar</p>
+<hr />
+<p>baz</p>
+````````````````````````````````
+
+
+Authors who want interpretation 3 can use backslash escapes:
+
+```````````````````````````````` example
+Foo
+bar
+\---
+baz
+.
+<p>Foo
+bar
+---
+baz</p>
+````````````````````````````````
+
+
+## Indented code blocks
+
+An [indented code block](@) is composed of one or more
+[indented chunks] separated by blank lines.
+An [indented chunk](@) is a sequence of non-blank lines,
+each indented four or more spaces. The contents of the code block are
+the literal contents of the lines, including trailing
+[line endings], minus four spaces of indentation.
+An indented code block has no [info string].
+
+An indented code block cannot interrupt a paragraph, so there must be
+a blank line between a paragraph and a following indented code block.
+(A blank line is not needed, however, between a code block and a following
+paragraph.)
+
+```````````````````````````````` example
+ a simple
+ indented code block
+.
+<pre><code>a simple
+ indented code block
+</code></pre>
+````````````````````````````````
+
+
+If there is any ambiguity between an interpretation of indentation
+as a code block and as indicating that material belongs to a [list
+item][list items], the list item interpretation takes precedence:
+
+```````````````````````````````` example
+ - foo
+
+ bar
+.
+<ul>
+<li>
+<p>foo</p>
+<p>bar</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1. foo
+
+ - bar
+.
+<ol>
+<li>
+<p>foo</p>
+<ul>
+<li>bar</li>
+</ul>
+</li>
+</ol>
+````````````````````````````````
+
+
+
+The contents of a code block are literal text, and do not get parsed
+as Markdown:
+
+```````````````````````````````` example
+ <a/>
+ *hi*
+
+ - one
+.
+<pre><code>&lt;a/&gt;
+*hi*
+
+- one
+</code></pre>
+````````````````````````````````
+
+
+Here we have three chunks separated by blank lines:
+
+```````````````````````````````` example
+ chunk1
+
+ chunk2
+
+
+
+ chunk3
+.
+<pre><code>chunk1
+
+chunk2
+
+
+
+chunk3
+</code></pre>
+````````````````````````````````
+
+
+Any initial spaces beyond four will be included in the content, even
+in interior blank lines:
+
+```````````````````````````````` example
+ chunk1
+
+ chunk2
+.
+<pre><code>chunk1
+
+ chunk2
+</code></pre>
+````````````````````````````````
+
+
+An indented code block cannot interrupt a paragraph. (This
+allows hanging indents and the like.)
+
+```````````````````````````````` example
+Foo
+ bar
+
+.
+<p>Foo
+bar</p>
+````````````````````````````````
+
+
+However, any non-blank line with fewer than four leading spaces ends
+the code block immediately. So a paragraph may occur immediately
+after indented code:
+
+```````````````````````````````` example
+ foo
+bar
+.
+<pre><code>foo
+</code></pre>
+<p>bar</p>
+````````````````````````````````
+
+
+And indented code can occur immediately before and after other kinds of
+blocks:
+
+```````````````````````````````` example
+# Heading
+ foo
+Heading
+------
+ foo
+----
+.
+<h1>Heading</h1>
+<pre><code>foo
+</code></pre>
+<h2>Heading</h2>
+<pre><code>foo
+</code></pre>
+<hr />
+````````````````````````````````
+
+
+The first line can be indented more than four spaces:
+
+```````````````````````````````` example
+ foo
+ bar
+.
+<pre><code> foo
+bar
+</code></pre>
+````````````````````````````````
+
+
+Blank lines preceding or following an indented code block
+are not included in it:
+
+```````````````````````````````` example
+
+
+ foo
+
+
+.
+<pre><code>foo
+</code></pre>
+````````````````````````````````
+
+
+Trailing spaces are included in the code block's content:
+
+```````````````````````````````` example
+ foo
+.
+<pre><code>foo
+</code></pre>
+````````````````````````````````
+
+
+
+## Fenced code blocks
+
+A [code fence](@) is a sequence
+of at least three consecutive backtick characters (`` ` ``) or
+tildes (`~`). (Tildes and backticks cannot be mixed.)
+A [fenced code block](@)
+begins with a code fence, indented no more than three spaces.
+
+The line with the opening code fence may optionally contain some text
+following the code fence; this is trimmed of leading and trailing
+whitespace and called the [info string](@). If the [info string] comes
+after a backtick fence, it may not contain any backtick
+characters. (The reason for this restriction is that otherwise
+some inline code would be incorrectly interpreted as the
+beginning of a fenced code block.)
+
+The content of the code block consists of all subsequent lines, until
+a closing [code fence] of the same type as the code block
+began with (backticks or tildes), and with at least as many backticks
+or tildes as the opening code fence. If the leading code fence is
+indented N spaces, then up to N spaces of indentation are removed from
+each line of the content (if present). (If a content line is not
+indented, it is preserved unchanged. If it is indented less than N
+spaces, all of the indentation is removed.)
+
+The closing code fence may be indented up to three spaces, and may be
+followed only by spaces, which are ignored. If the end of the
+containing block (or document) is reached and no closing code fence
+has been found, the code block contains all of the lines after the
+opening code fence until the end of the containing block (or
+document). (An alternative spec would require backtracking in the
+event that a closing code fence is not found. But this makes parsing
+much less efficient, and there seems to be no real down side to the
+behavior described here.)
+
+A fenced code block may interrupt a paragraph, and does not require
+a blank line either before or after.
+
+The content of a code fence is treated as literal text, not parsed
+as inlines. The first word of the [info string] is typically used to
+specify the language of the code sample, and rendered in the `class`
+attribute of the `code` tag. However, this spec does not mandate any
+particular treatment of the [info string].
+
+Here is a simple example with backticks:
+
+```````````````````````````````` example
+```
+<
+ >
+```
+.
+<pre><code>&lt;
+ &gt;
+</code></pre>
+````````````````````````````````
+
+
+With tildes:
+
+```````````````````````````````` example
+~~~
+<
+ >
+~~~
+.
+<pre><code>&lt;
+ &gt;
+</code></pre>
+````````````````````````````````
+
+Fewer than three backticks is not enough:
+
+```````````````````````````````` example
+``
+foo
+``
+.
+<p><code>foo</code></p>
+````````````````````````````````
+
+The closing code fence must use the same character as the opening
+fence:
+
+```````````````````````````````` example
+```
+aaa
+~~~
+```
+.
+<pre><code>aaa
+~~~
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+~~~
+aaa
+```
+~~~
+.
+<pre><code>aaa
+```
+</code></pre>
+````````````````````````````````
+
+
+The closing code fence must be at least as long as the opening fence:
+
+```````````````````````````````` example
+````
+aaa
+```
+``````
+.
+<pre><code>aaa
+```
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+~~~~
+aaa
+~~~
+~~~~
+.
+<pre><code>aaa
+~~~
+</code></pre>
+````````````````````````````````
+
+
+Unclosed code blocks are closed by the end of the document
+(or the enclosing [block quote][block quotes] or [list item][list items]):
+
+```````````````````````````````` example
+```
+.
+<pre><code></code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+`````
+
+```
+aaa
+.
+<pre><code>
+```
+aaa
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+> ```
+> aaa
+
+bbb
+.
+<blockquote>
+<pre><code>aaa
+</code></pre>
+</blockquote>
+<p>bbb</p>
+````````````````````````````````
+
+
+A code block can have all empty lines as its content:
+
+```````````````````````````````` example
+```
+
+
+```
+.
+<pre><code>
+
+</code></pre>
+````````````````````````````````
+
+
+A code block can be empty:
+
+```````````````````````````````` example
+```
+```
+.
+<pre><code></code></pre>
+````````````````````````````````
+
+
+Fences can be indented. If the opening fence is indented,
+content lines will have equivalent opening indentation removed,
+if present:
+
+```````````````````````````````` example
+ ```
+ aaa
+aaa
+```
+.
+<pre><code>aaa
+aaa
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ ```
+aaa
+ aaa
+aaa
+ ```
+.
+<pre><code>aaa
+aaa
+aaa
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ ```
+ aaa
+ aaa
+ aaa
+ ```
+.
+<pre><code>aaa
+ aaa
+aaa
+</code></pre>
+````````````````````````````````
+
+
+Four spaces indentation produces an indented code block:
+
+```````````````````````````````` example
+ ```
+ aaa
+ ```
+.
+<pre><code>```
+aaa
+```
+</code></pre>
+````````````````````````````````
+
+
+Closing fences may be indented by 0-3 spaces, and their indentation
+need not match that of the opening fence:
+
+```````````````````````````````` example
+```
+aaa
+ ```
+.
+<pre><code>aaa
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ ```
+aaa
+ ```
+.
+<pre><code>aaa
+</code></pre>
+````````````````````````````````
+
+
+This is not a closing fence, because it is indented 4 spaces:
+
+```````````````````````````````` example
+```
+aaa
+ ```
+.
+<pre><code>aaa
+ ```
+</code></pre>
+````````````````````````````````
+
+
+
+Code fences (opening and closing) cannot contain internal spaces:
+
+```````````````````````````````` example
+``` ```
+aaa
+.
+<p><code> </code>
+aaa</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+~~~~~~
+aaa
+~~~ ~~
+.
+<pre><code>aaa
+~~~ ~~
+</code></pre>
+````````````````````````````````
+
+
+Fenced code blocks can interrupt paragraphs, and can be followed
+directly by paragraphs, without a blank line between:
+
+```````````````````````````````` example
+foo
+```
+bar
+```
+baz
+.
+<p>foo</p>
+<pre><code>bar
+</code></pre>
+<p>baz</p>
+````````````````````````````````
+
+
+Other blocks can also occur before and after fenced code blocks
+without an intervening blank line:
+
+```````````````````````````````` example
+foo
+---
+~~~
+bar
+~~~
+# baz
+.
+<h2>foo</h2>
+<pre><code>bar
+</code></pre>
+<h1>baz</h1>
+````````````````````````````````
+
+
+An [info string] can be provided after the opening code fence.
+Although this spec doesn't mandate any particular treatment of
+the info string, the first word is typically used to specify
+the language of the code block. In HTML output, the language is
+normally indicated by adding a class to the `code` element consisting
+of `language-` followed by the language name.
+
+```````````````````````````````` example
+```ruby
+def foo(x)
+ return 3
+end
+```
+.
+<pre><code class="language-ruby">def foo(x)
+ return 3
+end
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+~~~~ ruby startline=3 $%@#$
+def foo(x)
+ return 3
+end
+~~~~~~~
+.
+<pre><code class="language-ruby">def foo(x)
+ return 3
+end
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+````;
+````
+.
+<pre><code class="language-;"></code></pre>
+````````````````````````````````
+
+
+[Info strings] for backtick code blocks cannot contain backticks:
+
+```````````````````````````````` example
+``` aa ```
+foo
+.
+<p><code>aa</code>
+foo</p>
+````````````````````````````````
+
+
+[Info strings] for tilde code blocks can contain backticks and tildes:
+
+```````````````````````````````` example
+~~~ aa ``` ~~~
+foo
+~~~
+.
+<pre><code class="language-aa">foo
+</code></pre>
+````````````````````````````````
+
+
+Closing code fences cannot have [info strings]:
+
+```````````````````````````````` example
+```
+``` aaa
+```
+.
+<pre><code>``` aaa
+</code></pre>
+````````````````````````````````
+
+
+
+## HTML blocks
+
+An [HTML block](@) is a group of lines that is treated
+as raw HTML (and will not be escaped in HTML output).
+
+There are seven kinds of [HTML block], which can be defined by their
+start and end conditions. The block begins with a line that meets a
+[start condition](@) (after up to three spaces optional indentation).
+It ends with the first subsequent line that meets a matching [end
+condition](@), or the last line of the document, or the last line of
+the [container block](#container-blocks) containing the current HTML
+block, if no line is encountered that meets the [end condition]. If
+the first line meets both the [start condition] and the [end
+condition], the block will contain just that line.
+
+1. **Start condition:** line begins with the string `<script`,
+`<pre`, or `<style` (case-insensitive), followed by whitespace,
+the string `>`, or the end of the line.\
+**End condition:** line contains an end tag
+`</script>`, `</pre>`, or `</style>` (case-insensitive; it
+need not match the start tag).
+
+2. **Start condition:** line begins with the string `<!--`.\
+**End condition:** line contains the string `-->`.
+
+3. **Start condition:** line begins with the string `<?`.\
+**End condition:** line contains the string `?>`.
+
+4. **Start condition:** line begins with the string `<!`
+followed by an uppercase ASCII letter.\
+**End condition:** line contains the character `>`.
+
+5. **Start condition:** line begins with the string
+`<![CDATA[`.\
+**End condition:** line contains the string `]]>`.
+
+6. **Start condition:** line begins the string `<` or `</`
+followed by one of the strings (case-insensitive) `address`,
+`article`, `aside`, `base`, `basefont`, `blockquote`, `body`,
+`caption`, `center`, `col`, `colgroup`, `dd`, `details`, `dialog`,
+`dir`, `div`, `dl`, `dt`, `fieldset`, `figcaption`, `figure`,
+`footer`, `form`, `frame`, `frameset`,
+`h1`, `h2`, `h3`, `h4`, `h5`, `h6`, `head`, `header`, `hr`,
+`html`, `iframe`, `legend`, `li`, `link`, `main`, `menu`, `menuitem`,
+`nav`, `noframes`, `ol`, `optgroup`, `option`, `p`, `param`,
+`section`, `source`, `summary`, `table`, `tbody`, `td`,
+`tfoot`, `th`, `thead`, `title`, `tr`, `track`, `ul`, followed
+by [whitespace], the end of the line, the string `>`, or
+the string `/>`.\
+**End condition:** line is followed by a [blank line].
+
+7. **Start condition:** line begins with a complete [open tag]
+(with any [tag name] other than `script`,
+`style`, or `pre`) or a complete [closing tag],
+followed only by [whitespace] or the end of the line.\
+**End condition:** line is followed by a [blank line].
+
+HTML blocks continue until they are closed by their appropriate
+[end condition], or the last line of the document or other [container
+block](#container-blocks). This means any HTML **within an HTML
+block** that might otherwise be recognised as a start condition will
+be ignored by the parser and passed through as-is, without changing
+the parser's state.
+
+For instance, `<pre>` within a HTML block started by `<table>` will not affect
+the parser state; as the HTML block was started in by start condition 6, it
+will end at any blank line. This can be surprising:
+
+```````````````````````````````` example
+<table><tr><td>
+<pre>
+**Hello**,
+
+_world_.
+</pre>
+</td></tr></table>
+.
+<table><tr><td>
+<pre>
+**Hello**,
+<p><em>world</em>.
+</pre></p>
+</td></tr></table>
+````````````````````````````````
+
+In this case, the HTML block is terminated by the newline — the `**Hello**`
+text remains verbatim — and regular parsing resumes, with a paragraph,
+emphasised `world` and inline and block HTML following.
+
+All types of [HTML blocks] except type 7 may interrupt
+a paragraph. Blocks of type 7 may not interrupt a paragraph.
+(This restriction is intended to prevent unwanted interpretation
+of long tags inside a wrapped paragraph as starting HTML blocks.)
+
+Some simple examples follow. Here are some basic HTML blocks
+of type 6:
+
+```````````````````````````````` example
+<table>
+ <tr>
+ <td>
+ hi
+ </td>
+ </tr>
+</table>
+
+okay.
+.
+<table>
+ <tr>
+ <td>
+ hi
+ </td>
+ </tr>
+</table>
+<p>okay.</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ <div>
+ *hello*
+ <foo><a>
+.
+ <div>
+ *hello*
+ <foo><a>
+````````````````````````````````
+
+
+A block can also start with a closing tag:
+
+```````````````````````````````` example
+</div>
+*foo*
+.
+</div>
+*foo*
+````````````````````````````````
+
+
+Here we have two HTML blocks with a Markdown paragraph between them:
+
+```````````````````````````````` example
+<DIV CLASS="foo">
+
+*Markdown*
+
+</DIV>
+.
+<DIV CLASS="foo">
+<p><em>Markdown</em></p>
+</DIV>
+````````````````````````````````
+
+
+The tag on the first line can be partial, as long
+as it is split where there would be whitespace:
+
+```````````````````````````````` example
+<div id="foo"
+ class="bar">
+</div>
+.
+<div id="foo"
+ class="bar">
+</div>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<div id="foo" class="bar
+ baz">
+</div>
+.
+<div id="foo" class="bar
+ baz">
+</div>
+````````````````````````````````
+
+
+An open tag need not be closed:
+```````````````````````````````` example
+<div>
+*foo*
+
+*bar*
+.
+<div>
+*foo*
+<p><em>bar</em></p>
+````````````````````````````````
+
+
+
+A partial tag need not even be completed (garbage
+in, garbage out):
+
+```````````````````````````````` example
+<div id="foo"
+*hi*
+.
+<div id="foo"
+*hi*
+````````````````````````````````
+
+
+```````````````````````````````` example
+<div class
+foo
+.
+<div class
+foo
+````````````````````````````````
+
+
+The initial tag doesn't even need to be a valid
+tag, as long as it starts like one:
+
+```````````````````````````````` example
+<div *???-&&&-<---
+*foo*
+.
+<div *???-&&&-<---
+*foo*
+````````````````````````````````
+
+
+In type 6 blocks, the initial tag need not be on a line by
+itself:
+
+```````````````````````````````` example
+<div><a href="bar">*foo*</a></div>
+.
+<div><a href="bar">*foo*</a></div>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<table><tr><td>
+foo
+</td></tr></table>
+.
+<table><tr><td>
+foo
+</td></tr></table>
+````````````````````````````````
+
+
+Everything until the next blank line or end of document
+gets included in the HTML block. So, in the following
+example, what looks like a Markdown code block
+is actually part of the HTML block, which continues until a blank
+line or the end of the document is reached:
+
+```````````````````````````````` example
+<div></div>
+``` c
+int x = 33;
+```
+.
+<div></div>
+``` c
+int x = 33;
+```
+````````````````````````````````
+
+
+To start an [HTML block] with a tag that is *not* in the
+list of block-level tags in (6), you must put the tag by
+itself on the first line (and it must be complete):
+
+```````````````````````````````` example
+<a href="foo">
+*bar*
+</a>
+.
+<a href="foo">
+*bar*
+</a>
+````````````````````````````````
+
+
+In type 7 blocks, the [tag name] can be anything:
+
+```````````````````````````````` example
+<Warning>
+*bar*
+</Warning>
+.
+<Warning>
+*bar*
+</Warning>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<i class="foo">
+*bar*
+</i>
+.
+<i class="foo">
+*bar*
+</i>
+````````````````````````````````
+
+
+```````````````````````````````` example
+</ins>
+*bar*
+.
+</ins>
+*bar*
+````````````````````````````````
+
+
+These rules are designed to allow us to work with tags that
+can function as either block-level or inline-level tags.
+The `<del>` tag is a nice example. We can surround content with
+`<del>` tags in three different ways. In this case, we get a raw
+HTML block, because the `<del>` tag is on a line by itself:
+
+```````````````````````````````` example
+<del>
+*foo*
+</del>
+.
+<del>
+*foo*
+</del>
+````````````````````````````````
+
+
+In this case, we get a raw HTML block that just includes
+the `<del>` tag (because it ends with the following blank
+line). So the contents get interpreted as CommonMark:
+
+```````````````````````````````` example
+<del>
+
+*foo*
+
+</del>
+.
+<del>
+<p><em>foo</em></p>
+</del>
+````````````````````````````````
+
+
+Finally, in this case, the `<del>` tags are interpreted
+as [raw HTML] *inside* the CommonMark paragraph. (Because
+the tag is not on a line by itself, we get inline HTML
+rather than an [HTML block].)
+
+```````````````````````````````` example
+<del>*foo*</del>
+.
+<p><del><em>foo</em></del></p>
+````````````````````````````````
+
+
+HTML tags designed to contain literal content
+(`script`, `style`, `pre`), comments, processing instructions,
+and declarations are treated somewhat differently.
+Instead of ending at the first blank line, these blocks
+end at the first line containing a corresponding end tag.
+As a result, these blocks can contain blank lines:
+
+A pre tag (type 1):
+
+```````````````````````````````` example
+<pre language="haskell"><code>
+import Text.HTML.TagSoup
+
+main :: IO ()
+main = print $ parseTags tags
+</code></pre>
+okay
+.
+<pre language="haskell"><code>
+import Text.HTML.TagSoup
+
+main :: IO ()
+main = print $ parseTags tags
+</code></pre>
+<p>okay</p>
+````````````````````````````````
+
+
+A script tag (type 1):
+
+```````````````````````````````` example
+<script type="text/javascript">
+// JavaScript example
+
+document.getElementById("demo").innerHTML = "Hello JavaScript!";
+</script>
+okay
+.
+<script type="text/javascript">
+// JavaScript example
+
+document.getElementById("demo").innerHTML = "Hello JavaScript!";
+</script>
+<p>okay</p>
+````````````````````````````````
+
+
+A style tag (type 1):
+
+```````````````````````````````` example
+<style
+ type="text/css">
+h1 {color:red;}
+
+p {color:blue;}
+</style>
+okay
+.
+<style
+ type="text/css">
+h1 {color:red;}
+
+p {color:blue;}
+</style>
+<p>okay</p>
+````````````````````````````````
+
+
+If there is no matching end tag, the block will end at the
+end of the document (or the enclosing [block quote][block quotes]
+or [list item][list items]):
+
+```````````````````````````````` example
+<style
+ type="text/css">
+
+foo
+.
+<style
+ type="text/css">
+
+foo
+````````````````````````````````
+
+
+```````````````````````````````` example
+> <div>
+> foo
+
+bar
+.
+<blockquote>
+<div>
+foo
+</blockquote>
+<p>bar</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- <div>
+- foo
+.
+<ul>
+<li>
+<div>
+</li>
+<li>foo</li>
+</ul>
+````````````````````````````````
+
+
+The end tag can occur on the same line as the start tag:
+
+```````````````````````````````` example
+<style>p{color:red;}</style>
+*foo*
+.
+<style>p{color:red;}</style>
+<p><em>foo</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<!-- foo -->*bar*
+*baz*
+.
+<!-- foo -->*bar*
+<p><em>baz</em></p>
+````````````````````````````````
+
+
+Note that anything on the last line after the
+end tag will be included in the [HTML block]:
+
+```````````````````````````````` example
+<script>
+foo
+</script>1. *bar*
+.
+<script>
+foo
+</script>1. *bar*
+````````````````````````````````
+
+
+A comment (type 2):
+
+```````````````````````````````` example
+<!-- Foo
+
+bar
+ baz -->
+okay
+.
+<!-- Foo
+
+bar
+ baz -->
+<p>okay</p>
+````````````````````````````````
+
+
+
+A processing instruction (type 3):
+
+```````````````````````````````` example
+<?php
+
+ echo '>';
+
+?>
+okay
+.
+<?php
+
+ echo '>';
+
+?>
+<p>okay</p>
+````````````````````````````````
+
+
+A declaration (type 4):
+
+```````````````````````````````` example
+<!DOCTYPE html>
+.
+<!DOCTYPE html>
+````````````````````````````````
+
+
+CDATA (type 5):
+
+```````````````````````````````` example
+<![CDATA[
+function matchwo(a,b)
+{
+ if (a < b && a < 0) then {
+ return 1;
+
+ } else {
+
+ return 0;
+ }
+}
+]]>
+okay
+.
+<![CDATA[
+function matchwo(a,b)
+{
+ if (a < b && a < 0) then {
+ return 1;
+
+ } else {
+
+ return 0;
+ }
+}
+]]>
+<p>okay</p>
+````````````````````````````````
+
+
+The opening tag can be indented 1-3 spaces, but not 4:
+
+```````````````````````````````` example
+ <!-- foo -->
+
+ <!-- foo -->
+.
+ <!-- foo -->
+<pre><code>&lt;!-- foo --&gt;
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ <div>
+
+ <div>
+.
+ <div>
+<pre><code>&lt;div&gt;
+</code></pre>
+````````````````````````````````
+
+
+An HTML block of types 1--6 can interrupt a paragraph, and need not be
+preceded by a blank line.
+
+```````````````````````````````` example
+Foo
+<div>
+bar
+</div>
+.
+<p>Foo</p>
+<div>
+bar
+</div>
+````````````````````````````````
+
+
+However, a following blank line is needed, except at the end of
+a document, and except for blocks of types 1--5, [above][HTML
+block]:
+
+```````````````````````````````` example
+<div>
+bar
+</div>
+*foo*
+.
+<div>
+bar
+</div>
+*foo*
+````````````````````````````````
+
+
+HTML blocks of type 7 cannot interrupt a paragraph:
+
+```````````````````````````````` example
+Foo
+<a href="bar">
+baz
+.
+<p>Foo
+<a href="bar">
+baz</p>
+````````````````````````````````
+
+
+This rule differs from John Gruber's original Markdown syntax
+specification, which says:
+
+> The only restrictions are that block-level HTML elements —
+> e.g. `<div>`, `<table>`, `<pre>`, `<p>`, etc. — must be separated from
+> surrounding content by blank lines, and the start and end tags of the
+> block should not be indented with tabs or spaces.
+
+In some ways Gruber's rule is more restrictive than the one given
+here:
+
+- It requires that an HTML block be preceded by a blank line.
+- It does not allow the start tag to be indented.
+- It requires a matching end tag, which it also does not allow to
+ be indented.
+
+Most Markdown implementations (including some of Gruber's own) do not
+respect all of these restrictions.
+
+There is one respect, however, in which Gruber's rule is more liberal
+than the one given here, since it allows blank lines to occur inside
+an HTML block. There are two reasons for disallowing them here.
+First, it removes the need to parse balanced tags, which is
+expensive and can require backtracking from the end of the document
+if no matching end tag is found. Second, it provides a very simple
+and flexible way of including Markdown content inside HTML tags:
+simply separate the Markdown from the HTML using blank lines:
+
+Compare:
+
+```````````````````````````````` example
+<div>
+
+*Emphasized* text.
+
+</div>
+.
+<div>
+<p><em>Emphasized</em> text.</p>
+</div>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<div>
+*Emphasized* text.
+</div>
+.
+<div>
+*Emphasized* text.
+</div>
+````````````````````````````````
+
+
+Some Markdown implementations have adopted a convention of
+interpreting content inside tags as text if the open tag has
+the attribute `markdown=1`. The rule given above seems a simpler and
+more elegant way of achieving the same expressive power, which is also
+much simpler to parse.
+
+The main potential drawback is that one can no longer paste HTML
+blocks into Markdown documents with 100% reliability. However,
+*in most cases* this will work fine, because the blank lines in
+HTML are usually followed by HTML block tags. For example:
+
+```````````````````````````````` example
+<table>
+
+<tr>
+
+<td>
+Hi
+</td>
+
+</tr>
+
+</table>
+.
+<table>
+<tr>
+<td>
+Hi
+</td>
+</tr>
+</table>
+````````````````````````````````
+
+
+There are problems, however, if the inner tags are indented
+*and* separated by spaces, as then they will be interpreted as
+an indented code block:
+
+```````````````````````````````` example
+<table>
+
+ <tr>
+
+ <td>
+ Hi
+ </td>
+
+ </tr>
+
+</table>
+.
+<table>
+ <tr>
+<pre><code>&lt;td&gt;
+ Hi
+&lt;/td&gt;
+</code></pre>
+ </tr>
+</table>
+````````````````````````````````
+
+
+Fortunately, blank lines are usually not necessary and can be
+deleted. The exception is inside `<pre>` tags, but as described
+[above][HTML blocks], raw HTML blocks starting with `<pre>`
+*can* contain blank lines.
+
+## Link reference definitions
+
+A [link reference definition](@)
+consists of a [link label], indented up to three spaces, followed
+by a colon (`:`), optional [whitespace] (including up to one
+[line ending]), a [link destination],
+optional [whitespace] (including up to one
+[line ending]), and an optional [link
+title], which if it is present must be separated
+from the [link destination] by [whitespace].
+No further [non-whitespace characters] may occur on the line.
+
+A [link reference definition]
+does not correspond to a structural element of a document. Instead, it
+defines a label which can be used in [reference links]
+and reference-style [images] elsewhere in the document. [Link
+reference definitions] can come either before or after the links that use
+them.
+
+```````````````````````````````` example
+[foo]: /url "title"
+
+[foo]
+.
+<p><a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ [foo]:
+ /url
+ 'the title'
+
+[foo]
+.
+<p><a href="/url" title="the title">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[Foo*bar\]]:my_(url) 'title (with parens)'
+
+[Foo*bar\]]
+.
+<p><a href="my_(url)" title="title (with parens)">Foo*bar]</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[Foo bar]:
+<my url>
+'title'
+
+[Foo bar]
+.
+<p><a href="my%20url" title="title">Foo bar</a></p>
+````````````````````````````````
+
+
+The title may extend over multiple lines:
+
+```````````````````````````````` example
+[foo]: /url '
+title
+line1
+line2
+'
+
+[foo]
+.
+<p><a href="/url" title="
+title
+line1
+line2
+">foo</a></p>
+````````````````````````````````
+
+
+However, it may not contain a [blank line]:
+
+```````````````````````````````` example
+[foo]: /url 'title
+
+with blank line'
+
+[foo]
+.
+<p>[foo]: /url 'title</p>
+<p>with blank line'</p>
+<p>[foo]</p>
+````````````````````````````````
+
+
+The title may be omitted:
+
+```````````````````````````````` example
+[foo]:
+/url
+
+[foo]
+.
+<p><a href="/url">foo</a></p>
+````````````````````````````````
+
+
+The link destination may not be omitted:
+
+```````````````````````````````` example
+[foo]:
+
+[foo]
+.
+<p>[foo]:</p>
+<p>[foo]</p>
+````````````````````````````````
+
+ However, an empty link destination may be specified using
+ angle brackets:
+
+```````````````````````````````` example
+[foo]: <>
+
+[foo]
+.
+<p><a href="">foo</a></p>
+````````````````````````````````
+
+The title must be separated from the link destination by
+whitespace:
+
+```````````````````````````````` example
+[foo]: <bar>(baz)
+
+[foo]
+.
+<p>[foo]: <bar>(baz)</p>
+<p>[foo]</p>
+````````````````````````````````
+
+
+Both title and destination can contain backslash escapes
+and literal backslashes:
+
+```````````````````````````````` example
+[foo]: /url\bar\*baz "foo\"bar\baz"
+
+[foo]
+.
+<p><a href="/url%5Cbar*baz" title="foo&quot;bar\baz">foo</a></p>
+````````````````````````````````
+
+
+A link can come before its corresponding definition:
+
+```````````````````````````````` example
+[foo]
+
+[foo]: url
+.
+<p><a href="url">foo</a></p>
+````````````````````````````````
+
+
+If there are several matching definitions, the first one takes
+precedence:
+
+```````````````````````````````` example
+[foo]
+
+[foo]: first
+[foo]: second
+.
+<p><a href="first">foo</a></p>
+````````````````````````````````
+
+
+As noted in the section on [Links], matching of labels is
+case-insensitive (see [matches]).
+
+```````````````````````````````` example
+[FOO]: /url
+
+[Foo]
+.
+<p><a href="/url">Foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[ΑΓΩ]: /φου
+
+[αγω]
+.
+<p><a href="/%CF%86%CE%BF%CF%85">αγω</a></p>
+````````````````````````````````
+
+
+Here is a link reference definition with no corresponding link.
+It contributes nothing to the document.
+
+```````````````````````````````` example
+[foo]: /url
+.
+````````````````````````````````
+
+
+Here is another one:
+
+```````````````````````````````` example
+[
+foo
+]: /url
+bar
+.
+<p>bar</p>
+````````````````````````````````
+
+
+This is not a link reference definition, because there are
+[non-whitespace characters] after the title:
+
+```````````````````````````````` example
+[foo]: /url "title" ok
+.
+<p>[foo]: /url &quot;title&quot; ok</p>
+````````````````````````````````
+
+
+This is a link reference definition, but it has no title:
+
+```````````````````````````````` example
+[foo]: /url
+"title" ok
+.
+<p>&quot;title&quot; ok</p>
+````````````````````````````````
+
+
+This is not a link reference definition, because it is indented
+four spaces:
+
+```````````````````````````````` example
+ [foo]: /url "title"
+
+[foo]
+.
+<pre><code>[foo]: /url &quot;title&quot;
+</code></pre>
+<p>[foo]</p>
+````````````````````````````````
+
+
+This is not a link reference definition, because it occurs inside
+a code block:
+
+```````````````````````````````` example
+```
+[foo]: /url
+```
+
+[foo]
+.
+<pre><code>[foo]: /url
+</code></pre>
+<p>[foo]</p>
+````````````````````````````````
+
+
+A [link reference definition] cannot interrupt a paragraph.
+
+```````````````````````````````` example
+Foo
+[bar]: /baz
+
+[bar]
+.
+<p>Foo
+[bar]: /baz</p>
+<p>[bar]</p>
+````````````````````````````````
+
+
+However, it can directly follow other block elements, such as headings
+and thematic breaks, and it need not be followed by a blank line.
+
+```````````````````````````````` example
+# [Foo]
+[foo]: /url
+> bar
+.
+<h1><a href="/url">Foo</a></h1>
+<blockquote>
+<p>bar</p>
+</blockquote>
+````````````````````````````````
+
+```````````````````````````````` example
+[foo]: /url
+bar
+===
+[foo]
+.
+<h1>bar</h1>
+<p><a href="/url">foo</a></p>
+````````````````````````````````
+
+```````````````````````````````` example
+[foo]: /url
+===
+[foo]
+.
+<p>===
+<a href="/url">foo</a></p>
+````````````````````````````````
+
+
+Several [link reference definitions]
+can occur one after another, without intervening blank lines.
+
+```````````````````````````````` example
+[foo]: /foo-url "foo"
+[bar]: /bar-url
+ "bar"
+[baz]: /baz-url
+
+[foo],
+[bar],
+[baz]
+.
+<p><a href="/foo-url" title="foo">foo</a>,
+<a href="/bar-url" title="bar">bar</a>,
+<a href="/baz-url">baz</a></p>
+````````````````````````````````
+
+
+[Link reference definitions] can occur
+inside block containers, like lists and block quotations. They
+affect the entire document, not just the container in which they
+are defined:
+
+```````````````````````````````` example
+[foo]
+
+> [foo]: /url
+.
+<p><a href="/url">foo</a></p>
+<blockquote>
+</blockquote>
+````````````````````````````````
+
+
+Whether something is a [link reference definition] is
+independent of whether the link reference it defines is
+used in the document. Thus, for example, the following
+document contains just a link reference definition, and
+no visible content:
+
+```````````````````````````````` example
+[foo]: /url
+.
+````````````````````````````````
+
+
+## Paragraphs
+
+A sequence of non-blank lines that cannot be interpreted as other
+kinds of blocks forms a [paragraph](@).
+The contents of the paragraph are the result of parsing the
+paragraph's raw content as inlines. The paragraph's raw content
+is formed by concatenating the lines and removing initial and final
+[whitespace].
+
+A simple example with two paragraphs:
+
+```````````````````````````````` example
+aaa
+
+bbb
+.
+<p>aaa</p>
+<p>bbb</p>
+````````````````````````````````
+
+
+Paragraphs can contain multiple lines, but no blank lines:
+
+```````````````````````````````` example
+aaa
+bbb
+
+ccc
+ddd
+.
+<p>aaa
+bbb</p>
+<p>ccc
+ddd</p>
+````````````````````````````````
+
+
+Multiple blank lines between paragraph have no effect:
+
+```````````````````````````````` example
+aaa
+
+
+bbb
+.
+<p>aaa</p>
+<p>bbb</p>
+````````````````````````````````
+
+
+Leading spaces are skipped:
+
+```````````````````````````````` example
+ aaa
+ bbb
+.
+<p>aaa
+bbb</p>
+````````````````````````````````
+
+
+Lines after the first may be indented any amount, since indented
+code blocks cannot interrupt paragraphs.
+
+```````````````````````````````` example
+aaa
+ bbb
+ ccc
+.
+<p>aaa
+bbb
+ccc</p>
+````````````````````````````````
+
+
+However, the first line may be indented at most three spaces,
+or an indented code block will be triggered:
+
+```````````````````````````````` example
+ aaa
+bbb
+.
+<p>aaa
+bbb</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ aaa
+bbb
+.
+<pre><code>aaa
+</code></pre>
+<p>bbb</p>
+````````````````````````````````
+
+
+Final spaces are stripped before inline parsing, so a paragraph
+that ends with two or more spaces will not end with a [hard line
+break]:
+
+```````````````````````````````` example
+aaa
+bbb
+.
+<p>aaa<br />
+bbb</p>
+````````````````````````````````
+
+
+## Blank lines
+
+[Blank lines] between block-level elements are ignored,
+except for the role they play in determining whether a [list]
+is [tight] or [loose].
+
+Blank lines at the beginning and end of the document are also ignored.
+
+```````````````````````````````` example
+
+
+aaa
+
+
+# aaa
+
+
+.
+<p>aaa</p>
+<h1>aaa</h1>
+````````````````````````````````
+
+
+
+# Container blocks
+
+A [container block](#container-blocks) is a block that has other
+blocks as its contents. There are two basic kinds of container blocks:
+[block quotes] and [list items].
+[Lists] are meta-containers for [list items].
+
+We define the syntax for container blocks recursively. The general
+form of the definition is:
+
+> If X is a sequence of blocks, then the result of
+> transforming X in such-and-such a way is a container of type Y
+> with these blocks as its content.
+
+So, we explain what counts as a block quote or list item by explaining
+how these can be *generated* from their contents. This should suffice
+to define the syntax, although it does not give a recipe for *parsing*
+these constructions. (A recipe is provided below in the section entitled
+[A parsing strategy](#appendix-a-parsing-strategy).)
+
+## Block quotes
+
+A [block quote marker](@)
+consists of 0-3 spaces of initial indent, plus (a) the character `>` together
+with a following space, or (b) a single character `>` not followed by a space.
+
+The following rules define [block quotes]:
+
+1. **Basic case.** If a string of lines *Ls* constitute a sequence
+ of blocks *Bs*, then the result of prepending a [block quote
+ marker] to the beginning of each line in *Ls*
+ is a [block quote](#block-quotes) containing *Bs*.
+
+2. **Laziness.** If a string of lines *Ls* constitute a [block
+ quote](#block-quotes) with contents *Bs*, then the result of deleting
+ the initial [block quote marker] from one or
+ more lines in which the next [non-whitespace character] after the [block
+ quote marker] is [paragraph continuation
+ text] is a block quote with *Bs* as its content.
+ [Paragraph continuation text](@) is text
+ that will be parsed as part of the content of a paragraph, but does
+ not occur at the beginning of the paragraph.
+
+3. **Consecutiveness.** A document cannot contain two [block
+ quotes] in a row unless there is a [blank line] between them.
+
+Nothing else counts as a [block quote](#block-quotes).
+
+Here is a simple example:
+
+```````````````````````````````` example
+> # Foo
+> bar
+> baz
+.
+<blockquote>
+<h1>Foo</h1>
+<p>bar
+baz</p>
+</blockquote>
+````````````````````````````````
+
+
+The spaces after the `>` characters can be omitted:
+
+```````````````````````````````` example
+># Foo
+>bar
+> baz
+.
+<blockquote>
+<h1>Foo</h1>
+<p>bar
+baz</p>
+</blockquote>
+````````````````````````````````
+
+
+The `>` characters can be indented 1-3 spaces:
+
+```````````````````````````````` example
+ > # Foo
+ > bar
+ > baz
+.
+<blockquote>
+<h1>Foo</h1>
+<p>bar
+baz</p>
+</blockquote>
+````````````````````````````````
+
+
+Four spaces gives us a code block:
+
+```````````````````````````````` example
+ > # Foo
+ > bar
+ > baz
+.
+<pre><code>&gt; # Foo
+&gt; bar
+&gt; baz
+</code></pre>
+````````````````````````````````
+
+
+The Laziness clause allows us to omit the `>` before
+[paragraph continuation text]:
+
+```````````````````````````````` example
+> # Foo
+> bar
+baz
+.
+<blockquote>
+<h1>Foo</h1>
+<p>bar
+baz</p>
+</blockquote>
+````````````````````````````````
+
+
+A block quote can contain some lazy and some non-lazy
+continuation lines:
+
+```````````````````````````````` example
+> bar
+baz
+> foo
+.
+<blockquote>
+<p>bar
+baz
+foo</p>
+</blockquote>
+````````````````````````````````
+
+
+Laziness only applies to lines that would have been continuations of
+paragraphs had they been prepended with [block quote markers].
+For example, the `> ` cannot be omitted in the second line of
+
+``` markdown
+> foo
+> ---
+```
+
+without changing the meaning:
+
+```````````````````````````````` example
+> foo
+---
+.
+<blockquote>
+<p>foo</p>
+</blockquote>
+<hr />
+````````````````````````````````
+
+
+Similarly, if we omit the `> ` in the second line of
+
+``` markdown
+> - foo
+> - bar
+```
+
+then the block quote ends after the first line:
+
+```````````````````````````````` example
+> - foo
+- bar
+.
+<blockquote>
+<ul>
+<li>foo</li>
+</ul>
+</blockquote>
+<ul>
+<li>bar</li>
+</ul>
+````````````````````````````````
+
+
+For the same reason, we can't omit the `> ` in front of
+subsequent lines of an indented or fenced code block:
+
+```````````````````````````````` example
+> foo
+ bar
+.
+<blockquote>
+<pre><code>foo
+</code></pre>
+</blockquote>
+<pre><code>bar
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+> ```
+foo
+```
+.
+<blockquote>
+<pre><code></code></pre>
+</blockquote>
+<p>foo</p>
+<pre><code></code></pre>
+````````````````````````````````
+
+
+Note that in the following case, we have a [lazy
+continuation line]:
+
+```````````````````````````````` example
+> foo
+ - bar
+.
+<blockquote>
+<p>foo
+- bar</p>
+</blockquote>
+````````````````````````````````
+
+
+To see why, note that in
+
+```markdown
+> foo
+> - bar
+```
+
+the `- bar` is indented too far to start a list, and can't
+be an indented code block because indented code blocks cannot
+interrupt paragraphs, so it is [paragraph continuation text].
+
+A block quote can be empty:
+
+```````````````````````````````` example
+>
+.
+<blockquote>
+</blockquote>
+````````````````````````````````
+
+
+```````````````````````````````` example
+>
+>
+>
+.
+<blockquote>
+</blockquote>
+````````````````````````````````
+
+
+A block quote can have initial or final blank lines:
+
+```````````````````````````````` example
+>
+> foo
+>
+.
+<blockquote>
+<p>foo</p>
+</blockquote>
+````````````````````````````````
+
+
+A blank line always separates block quotes:
+
+```````````````````````````````` example
+> foo
+
+> bar
+.
+<blockquote>
+<p>foo</p>
+</blockquote>
+<blockquote>
+<p>bar</p>
+</blockquote>
+````````````````````````````````
+
+
+(Most current Markdown implementations, including John Gruber's
+original `Markdown.pl`, will parse this example as a single block quote
+with two paragraphs. But it seems better to allow the author to decide
+whether two block quotes or one are wanted.)
+
+Consecutiveness means that if we put these block quotes together,
+we get a single block quote:
+
+```````````````````````````````` example
+> foo
+> bar
+.
+<blockquote>
+<p>foo
+bar</p>
+</blockquote>
+````````````````````````````````
+
+
+To get a block quote with two paragraphs, use:
+
+```````````````````````````````` example
+> foo
+>
+> bar
+.
+<blockquote>
+<p>foo</p>
+<p>bar</p>
+</blockquote>
+````````````````````````````````
+
+
+Block quotes can interrupt paragraphs:
+
+```````````````````````````````` example
+foo
+> bar
+.
+<p>foo</p>
+<blockquote>
+<p>bar</p>
+</blockquote>
+````````````````````````````````
+
+
+In general, blank lines are not needed before or after block
+quotes:
+
+```````````````````````````````` example
+> aaa
+***
+> bbb
+.
+<blockquote>
+<p>aaa</p>
+</blockquote>
+<hr />
+<blockquote>
+<p>bbb</p>
+</blockquote>
+````````````````````````````````
+
+
+However, because of laziness, a blank line is needed between
+a block quote and a following paragraph:
+
+```````````````````````````````` example
+> bar
+baz
+.
+<blockquote>
+<p>bar
+baz</p>
+</blockquote>
+````````````````````````````````
+
+
+```````````````````````````````` example
+> bar
+
+baz
+.
+<blockquote>
+<p>bar</p>
+</blockquote>
+<p>baz</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+> bar
+>
+baz
+.
+<blockquote>
+<p>bar</p>
+</blockquote>
+<p>baz</p>
+````````````````````````````````
+
+
+It is a consequence of the Laziness rule that any number
+of initial `>`s may be omitted on a continuation line of a
+nested block quote:
+
+```````````````````````````````` example
+> > > foo
+bar
+.
+<blockquote>
+<blockquote>
+<blockquote>
+<p>foo
+bar</p>
+</blockquote>
+</blockquote>
+</blockquote>
+````````````````````````````````
+
+
+```````````````````````````````` example
+>>> foo
+> bar
+>>baz
+.
+<blockquote>
+<blockquote>
+<blockquote>
+<p>foo
+bar
+baz</p>
+</blockquote>
+</blockquote>
+</blockquote>
+````````````````````````````````
+
+
+When including an indented code block in a block quote,
+remember that the [block quote marker] includes
+both the `>` and a following space. So *five spaces* are needed after
+the `>`:
+
+```````````````````````````````` example
+> code
+
+> not code
+.
+<blockquote>
+<pre><code>code
+</code></pre>
+</blockquote>
+<blockquote>
+<p>not code</p>
+</blockquote>
+````````````````````````````````
+
+
+
+## List items
+
+A [list marker](@) is a
+[bullet list marker] or an [ordered list marker].
+
+A [bullet list marker](@)
+is a `-`, `+`, or `*` character.
+
+An [ordered list marker](@)
+is a sequence of 1--9 arabic digits (`0-9`), followed by either a
+`.` character or a `)` character. (The reason for the length
+limit is that with 10 digits we start seeing integer overflows
+in some browsers.)
+
+The following rules define [list items]:
+
+1. **Basic case.** If a sequence of lines *Ls* constitute a sequence of
+ blocks *Bs* starting with a [non-whitespace character], and *M* is a
+ list marker of width *W* followed by 1 ≤ *N* ≤ 4 spaces, then the result
+ of prepending *M* and the following spaces to the first line of
+ *Ls*, and indenting subsequent lines of *Ls* by *W + N* spaces, is a
+ list item with *Bs* as its contents. The type of the list item
+ (bullet or ordered) is determined by the type of its list marker.
+ If the list item is ordered, then it is also assigned a start
+ number, based on the ordered list marker.
+
+ Exceptions:
+
+ 1. When the first list item in a [list] interrupts
+ a paragraph---that is, when it starts on a line that would
+ otherwise count as [paragraph continuation text]---then (a)
+ the lines *Ls* must not begin with a blank line, and (b) if
+ the list item is ordered, the start number must be 1.
+ 2. If any line is a [thematic break][thematic breaks] then
+ that line is not a list item.
+
+For example, let *Ls* be the lines
+
+```````````````````````````````` example
+A paragraph
+with two lines.
+
+ indented code
+
+> A block quote.
+.
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+````````````````````````````````
+
+
+And let *M* be the marker `1.`, and *N* = 2. Then rule #1 says
+that the following is an ordered list item with start number 1,
+and the same contents as *Ls*:
+
+```````````````````````````````` example
+1. A paragraph
+ with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<ol>
+<li>
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+The most important thing to notice is that the position of
+the text after the list marker determines how much indentation
+is needed in subsequent blocks in the list item. If the list
+marker takes up two spaces, and there are three spaces between
+the list marker and the next [non-whitespace character], then blocks
+must be indented five spaces in order to fall under the list
+item.
+
+Here are some examples showing how far content must be indented to be
+put under the list item:
+
+```````````````````````````````` example
+- one
+
+ two
+.
+<ul>
+<li>one</li>
+</ul>
+<p>two</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- one
+
+ two
+.
+<ul>
+<li>
+<p>one</p>
+<p>two</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ - one
+
+ two
+.
+<ul>
+<li>one</li>
+</ul>
+<pre><code> two
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ - one
+
+ two
+.
+<ul>
+<li>
+<p>one</p>
+<p>two</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+It is tempting to think of this in terms of columns: the continuation
+blocks must be indented at least to the column of the first
+[non-whitespace character] after the list marker. However, that is not quite right.
+The spaces after the list marker determine how much relative indentation
+is needed. Which column this indentation reaches will depend on
+how the list item is embedded in other constructions, as shown by
+this example:
+
+```````````````````````````````` example
+ > > 1. one
+>>
+>> two
+.
+<blockquote>
+<blockquote>
+<ol>
+<li>
+<p>one</p>
+<p>two</p>
+</li>
+</ol>
+</blockquote>
+</blockquote>
+````````````````````````````````
+
+
+Here `two` occurs in the same column as the list marker `1.`,
+but is actually contained in the list item, because there is
+sufficient indentation after the last containing blockquote marker.
+
+The converse is also possible. In the following example, the word `two`
+occurs far to the right of the initial text of the list item, `one`, but
+it is not considered part of the list item, because it is not indented
+far enough past the blockquote marker:
+
+```````````````````````````````` example
+>>- one
+>>
+ > > two
+.
+<blockquote>
+<blockquote>
+<ul>
+<li>one</li>
+</ul>
+<p>two</p>
+</blockquote>
+</blockquote>
+````````````````````````````````
+
+
+Note that at least one space is needed between the list marker and
+any following content, so these are not list items:
+
+```````````````````````````````` example
+-one
+
+2.two
+.
+<p>-one</p>
+<p>2.two</p>
+````````````````````````````````
+
+
+A list item may contain blocks that are separated by more than
+one blank line.
+
+```````````````````````````````` example
+- foo
+
+
+ bar
+.
+<ul>
+<li>
+<p>foo</p>
+<p>bar</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+A list item may contain any kind of block:
+
+```````````````````````````````` example
+1. foo
+
+ ```
+ bar
+ ```
+
+ baz
+
+ > bam
+.
+<ol>
+<li>
+<p>foo</p>
+<pre><code>bar
+</code></pre>
+<p>baz</p>
+<blockquote>
+<p>bam</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+A list item that contains an indented code block will preserve
+empty lines within the code block verbatim.
+
+```````````````````````````````` example
+- Foo
+
+ bar
+
+
+ baz
+.
+<ul>
+<li>
+<p>Foo</p>
+<pre><code>bar
+
+
+baz
+</code></pre>
+</li>
+</ul>
+````````````````````````````````
+
+Note that ordered list start numbers must be nine digits or less:
+
+```````````````````````````````` example
+123456789. ok
+.
+<ol start="123456789">
+<li>ok</li>
+</ol>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1234567890. not ok
+.
+<p>1234567890. not ok</p>
+````````````````````````````````
+
+
+A start number may begin with 0s:
+
+```````````````````````````````` example
+0. ok
+.
+<ol start="0">
+<li>ok</li>
+</ol>
+````````````````````````````````
+
+
+```````````````````````````````` example
+003. ok
+.
+<ol start="3">
+<li>ok</li>
+</ol>
+````````````````````````````````
+
+
+A start number may not be negative:
+
+```````````````````````````````` example
+-1. not ok
+.
+<p>-1. not ok</p>
+````````````````````````````````
+
+
+
+2. **Item starting with indented code.** If a sequence of lines *Ls*
+ constitute a sequence of blocks *Bs* starting with an indented code
+ block, and *M* is a list marker of width *W* followed by
+ one space, then the result of prepending *M* and the following
+ space to the first line of *Ls*, and indenting subsequent lines of
+ *Ls* by *W + 1* spaces, is a list item with *Bs* as its contents.
+ If a line is empty, then it need not be indented. The type of the
+ list item (bullet or ordered) is determined by the type of its list
+ marker. If the list item is ordered, then it is also assigned a
+ start number, based on the ordered list marker.
+
+An indented code block will have to be indented four spaces beyond
+the edge of the region where text will be included in the list item.
+In the following case that is 6 spaces:
+
+```````````````````````````````` example
+- foo
+
+ bar
+.
+<ul>
+<li>
+<p>foo</p>
+<pre><code>bar
+</code></pre>
+</li>
+</ul>
+````````````````````````````````
+
+
+And in this case it is 11 spaces:
+
+```````````````````````````````` example
+ 10. foo
+
+ bar
+.
+<ol start="10">
+<li>
+<p>foo</p>
+<pre><code>bar
+</code></pre>
+</li>
+</ol>
+````````````````````````````````
+
+
+If the *first* block in the list item is an indented code block,
+then by rule #2, the contents must be indented *one* space after the
+list marker:
+
+```````````````````````````````` example
+ indented code
+
+paragraph
+
+ more code
+.
+<pre><code>indented code
+</code></pre>
+<p>paragraph</p>
+<pre><code>more code
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1. indented code
+
+ paragraph
+
+ more code
+.
+<ol>
+<li>
+<pre><code>indented code
+</code></pre>
+<p>paragraph</p>
+<pre><code>more code
+</code></pre>
+</li>
+</ol>
+````````````````````````````````
+
+
+Note that an additional space indent is interpreted as space
+inside the code block:
+
+```````````````````````````````` example
+1. indented code
+
+ paragraph
+
+ more code
+.
+<ol>
+<li>
+<pre><code> indented code
+</code></pre>
+<p>paragraph</p>
+<pre><code>more code
+</code></pre>
+</li>
+</ol>
+````````````````````````````````
+
+
+Note that rules #1 and #2 only apply to two cases: (a) cases
+in which the lines to be included in a list item begin with a
+[non-whitespace character], and (b) cases in which
+they begin with an indented code
+block. In a case like the following, where the first block begins with
+a three-space indent, the rules do not allow us to form a list item by
+indenting the whole thing and prepending a list marker:
+
+```````````````````````````````` example
+ foo
+
+bar
+.
+<p>foo</p>
+<p>bar</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- foo
+
+ bar
+.
+<ul>
+<li>foo</li>
+</ul>
+<p>bar</p>
+````````````````````````````````
+
+
+This is not a significant restriction, because when a block begins
+with 1-3 spaces indent, the indentation can always be removed without
+a change in interpretation, allowing rule #1 to be applied. So, in
+the above case:
+
+```````````````````````````````` example
+- foo
+
+ bar
+.
+<ul>
+<li>
+<p>foo</p>
+<p>bar</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+3. **Item starting with a blank line.** If a sequence of lines *Ls*
+ starting with a single [blank line] constitute a (possibly empty)
+ sequence of blocks *Bs*, not separated from each other by more than
+ one blank line, and *M* is a list marker of width *W*,
+ then the result of prepending *M* to the first line of *Ls*, and
+ indenting subsequent lines of *Ls* by *W + 1* spaces, is a list
+ item with *Bs* as its contents.
+ If a line is empty, then it need not be indented. The type of the
+ list item (bullet or ordered) is determined by the type of its list
+ marker. If the list item is ordered, then it is also assigned a
+ start number, based on the ordered list marker.
+
+Here are some list items that start with a blank line but are not empty:
+
+```````````````````````````````` example
+-
+ foo
+-
+ ```
+ bar
+ ```
+-
+ baz
+.
+<ul>
+<li>foo</li>
+<li>
+<pre><code>bar
+</code></pre>
+</li>
+<li>
+<pre><code>baz
+</code></pre>
+</li>
+</ul>
+````````````````````````````````
+
+When the list item starts with a blank line, the number of spaces
+following the list marker doesn't change the required indentation:
+
+```````````````````````````````` example
+-
+ foo
+.
+<ul>
+<li>foo</li>
+</ul>
+````````````````````````````````
+
+
+A list item can begin with at most one blank line.
+In the following example, `foo` is not part of the list
+item:
+
+```````````````````````````````` example
+-
+
+ foo
+.
+<ul>
+<li></li>
+</ul>
+<p>foo</p>
+````````````````````````````````
+
+
+Here is an empty bullet list item:
+
+```````````````````````````````` example
+- foo
+-
+- bar
+.
+<ul>
+<li>foo</li>
+<li></li>
+<li>bar</li>
+</ul>
+````````````````````````````````
+
+
+It does not matter whether there are spaces following the [list marker]:
+
+```````````````````````````````` example
+- foo
+-
+- bar
+.
+<ul>
+<li>foo</li>
+<li></li>
+<li>bar</li>
+</ul>
+````````````````````````````````
+
+
+Here is an empty ordered list item:
+
+```````````````````````````````` example
+1. foo
+2.
+3. bar
+.
+<ol>
+<li>foo</li>
+<li></li>
+<li>bar</li>
+</ol>
+````````````````````````````````
+
+
+A list may start or end with an empty list item:
+
+```````````````````````````````` example
+*
+.
+<ul>
+<li></li>
+</ul>
+````````````````````````````````
+
+However, an empty list item cannot interrupt a paragraph:
+
+```````````````````````````````` example
+foo
+*
+
+foo
+1.
+.
+<p>foo
+*</p>
+<p>foo
+1.</p>
+````````````````````````````````
+
+
+4. **Indentation.** If a sequence of lines *Ls* constitutes a list item
+ according to rule #1, #2, or #3, then the result of indenting each line
+ of *Ls* by 1-3 spaces (the same for each line) also constitutes a
+ list item with the same contents and attributes. If a line is
+ empty, then it need not be indented.
+
+Indented one space:
+
+```````````````````````````````` example
+ 1. A paragraph
+ with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<ol>
+<li>
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+Indented two spaces:
+
+```````````````````````````````` example
+ 1. A paragraph
+ with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<ol>
+<li>
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+Indented three spaces:
+
+```````````````````````````````` example
+ 1. A paragraph
+ with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<ol>
+<li>
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+Four spaces indent gives a code block:
+
+```````````````````````````````` example
+ 1. A paragraph
+ with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<pre><code>1. A paragraph
+ with two lines.
+
+ indented code
+
+ &gt; A block quote.
+</code></pre>
+````````````````````````````````
+
+
+
+5. **Laziness.** If a string of lines *Ls* constitute a [list
+ item](#list-items) with contents *Bs*, then the result of deleting
+ some or all of the indentation from one or more lines in which the
+ next [non-whitespace character] after the indentation is
+ [paragraph continuation text] is a
+ list item with the same contents and attributes. The unindented
+ lines are called
+ [lazy continuation line](@)s.
+
+Here is an example with [lazy continuation lines]:
+
+```````````````````````````````` example
+ 1. A paragraph
+with two lines.
+
+ indented code
+
+ > A block quote.
+.
+<ol>
+<li>
+<p>A paragraph
+with two lines.</p>
+<pre><code>indented code
+</code></pre>
+<blockquote>
+<p>A block quote.</p>
+</blockquote>
+</li>
+</ol>
+````````````````````````````````
+
+
+Indentation can be partially deleted:
+
+```````````````````````````````` example
+ 1. A paragraph
+ with two lines.
+.
+<ol>
+<li>A paragraph
+with two lines.</li>
+</ol>
+````````````````````````````````
+
+
+These examples show how laziness can work in nested structures:
+
+```````````````````````````````` example
+> 1. > Blockquote
+continued here.
+.
+<blockquote>
+<ol>
+<li>
+<blockquote>
+<p>Blockquote
+continued here.</p>
+</blockquote>
+</li>
+</ol>
+</blockquote>
+````````````````````````````````
+
+
+```````````````````````````````` example
+> 1. > Blockquote
+> continued here.
+.
+<blockquote>
+<ol>
+<li>
+<blockquote>
+<p>Blockquote
+continued here.</p>
+</blockquote>
+</li>
+</ol>
+</blockquote>
+````````````````````````````````
+
+
+
+6. **That's all.** Nothing that is not counted as a list item by rules
+ #1--5 counts as a [list item](#list-items).
+
+The rules for sublists follow from the general rules
+[above][List items]. A sublist must be indented the same number
+of spaces a paragraph would need to be in order to be included
+in the list item.
+
+So, in this case we need two spaces indent:
+
+```````````````````````````````` example
+- foo
+ - bar
+ - baz
+ - boo
+.
+<ul>
+<li>foo
+<ul>
+<li>bar
+<ul>
+<li>baz
+<ul>
+<li>boo</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+
+One is not enough:
+
+```````````````````````````````` example
+- foo
+ - bar
+ - baz
+ - boo
+.
+<ul>
+<li>foo</li>
+<li>bar</li>
+<li>baz</li>
+<li>boo</li>
+</ul>
+````````````````````````````````
+
+
+Here we need four, because the list marker is wider:
+
+```````````````````````````````` example
+10) foo
+ - bar
+.
+<ol start="10">
+<li>foo
+<ul>
+<li>bar</li>
+</ul>
+</li>
+</ol>
+````````````````````````````````
+
+
+Three is not enough:
+
+```````````````````````````````` example
+10) foo
+ - bar
+.
+<ol start="10">
+<li>foo</li>
+</ol>
+<ul>
+<li>bar</li>
+</ul>
+````````````````````````````````
+
+
+A list may be the first block in a list item:
+
+```````````````````````````````` example
+- - foo
+.
+<ul>
+<li>
+<ul>
+<li>foo</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1. - 2. foo
+.
+<ol>
+<li>
+<ul>
+<li>
+<ol start="2">
+<li>foo</li>
+</ol>
+</li>
+</ul>
+</li>
+</ol>
+````````````````````````````````
+
+
+A list item can contain a heading:
+
+```````````````````````````````` example
+- # Foo
+- Bar
+ ---
+ baz
+.
+<ul>
+<li>
+<h1>Foo</h1>
+</li>
+<li>
+<h2>Bar</h2>
+baz</li>
+</ul>
+````````````````````````````````
+
+
+### Motivation
+
+John Gruber's Markdown spec says the following about list items:
+
+1. "List markers typically start at the left margin, but may be indented
+ by up to three spaces. List markers must be followed by one or more
+ spaces or a tab."
+
+2. "To make lists look nice, you can wrap items with hanging indents....
+ But if you don't want to, you don't have to."
+
+3. "List items may consist of multiple paragraphs. Each subsequent
+ paragraph in a list item must be indented by either 4 spaces or one
+ tab."
+
+4. "It looks nice if you indent every line of the subsequent paragraphs,
+ but here again, Markdown will allow you to be lazy."
+
+5. "To put a blockquote within a list item, the blockquote's `>`
+ delimiters need to be indented."
+
+6. "To put a code block within a list item, the code block needs to be
+ indented twice — 8 spaces or two tabs."
+
+These rules specify that a paragraph under a list item must be indented
+four spaces (presumably, from the left margin, rather than the start of
+the list marker, but this is not said), and that code under a list item
+must be indented eight spaces instead of the usual four. They also say
+that a block quote must be indented, but not by how much; however, the
+example given has four spaces indentation. Although nothing is said
+about other kinds of block-level content, it is certainly reasonable to
+infer that *all* block elements under a list item, including other
+lists, must be indented four spaces. This principle has been called the
+*four-space rule*.
+
+The four-space rule is clear and principled, and if the reference
+implementation `Markdown.pl` had followed it, it probably would have
+become the standard. However, `Markdown.pl` allowed paragraphs and
+sublists to start with only two spaces indentation, at least on the
+outer level. Worse, its behavior was inconsistent: a sublist of an
+outer-level list needed two spaces indentation, but a sublist of this
+sublist needed three spaces. It is not surprising, then, that different
+implementations of Markdown have developed very different rules for
+determining what comes under a list item. (Pandoc and python-Markdown,
+for example, stuck with Gruber's syntax description and the four-space
+rule, while discount, redcarpet, marked, PHP Markdown, and others
+followed `Markdown.pl`'s behavior more closely.)
+
+Unfortunately, given the divergences between implementations, there
+is no way to give a spec for list items that will be guaranteed not
+to break any existing documents. However, the spec given here should
+correctly handle lists formatted with either the four-space rule or
+the more forgiving `Markdown.pl` behavior, provided they are laid out
+in a way that is natural for a human to read.
+
+The strategy here is to let the width and indentation of the list marker
+determine the indentation necessary for blocks to fall under the list
+item, rather than having a fixed and arbitrary number. The writer can
+think of the body of the list item as a unit which gets indented to the
+right enough to fit the list marker (and any indentation on the list
+marker). (The laziness rule, #5, then allows continuation lines to be
+unindented if needed.)
+
+This rule is superior, we claim, to any rule requiring a fixed level of
+indentation from the margin. The four-space rule is clear but
+unnatural. It is quite unintuitive that
+
+``` markdown
+- foo
+
+ bar
+
+ - baz
+```
+
+should be parsed as two lists with an intervening paragraph,
+
+``` html
+<ul>
+<li>foo</li>
+</ul>
+<p>bar</p>
+<ul>
+<li>baz</li>
+</ul>
+```
+
+as the four-space rule demands, rather than a single list,
+
+``` html
+<ul>
+<li>
+<p>foo</p>
+<p>bar</p>
+<ul>
+<li>baz</li>
+</ul>
+</li>
+</ul>
+```
+
+The choice of four spaces is arbitrary. It can be learned, but it is
+not likely to be guessed, and it trips up beginners regularly.
+
+Would it help to adopt a two-space rule? The problem is that such
+a rule, together with the rule allowing 1--3 spaces indentation of the
+initial list marker, allows text that is indented *less than* the
+original list marker to be included in the list item. For example,
+`Markdown.pl` parses
+
+``` markdown
+ - one
+
+ two
+```
+
+as a single list item, with `two` a continuation paragraph:
+
+``` html
+<ul>
+<li>
+<p>one</p>
+<p>two</p>
+</li>
+</ul>
+```
+
+and similarly
+
+``` markdown
+> - one
+>
+> two
+```
+
+as
+
+``` html
+<blockquote>
+<ul>
+<li>
+<p>one</p>
+<p>two</p>
+</li>
+</ul>
+</blockquote>
+```
+
+This is extremely unintuitive.
+
+Rather than requiring a fixed indent from the margin, we could require
+a fixed indent (say, two spaces, or even one space) from the list marker (which
+may itself be indented). This proposal would remove the last anomaly
+discussed. Unlike the spec presented above, it would count the following
+as a list item with a subparagraph, even though the paragraph `bar`
+is not indented as far as the first paragraph `foo`:
+
+``` markdown
+ 10. foo
+
+ bar
+```
+
+Arguably this text does read like a list item with `bar` as a subparagraph,
+which may count in favor of the proposal. However, on this proposal indented
+code would have to be indented six spaces after the list marker. And this
+would break a lot of existing Markdown, which has the pattern:
+
+``` markdown
+1. foo
+
+ indented code
+```
+
+where the code is indented eight spaces. The spec above, by contrast, will
+parse this text as expected, since the code block's indentation is measured
+from the beginning of `foo`.
+
+The one case that needs special treatment is a list item that *starts*
+with indented code. How much indentation is required in that case, since
+we don't have a "first paragraph" to measure from? Rule #2 simply stipulates
+that in such cases, we require one space indentation from the list marker
+(and then the normal four spaces for the indented code). This will match the
+four-space rule in cases where the list marker plus its initial indentation
+takes four spaces (a common case), but diverge in other cases.
+
+## Lists
+
+A [list](@) is a sequence of one or more
+list items [of the same type]. The list items
+may be separated by any number of blank lines.
+
+Two list items are [of the same type](@)
+if they begin with a [list marker] of the same type.
+Two list markers are of the
+same type if (a) they are bullet list markers using the same character
+(`-`, `+`, or `*`) or (b) they are ordered list numbers with the same
+delimiter (either `.` or `)`).
+
+A list is an [ordered list](@)
+if its constituent list items begin with
+[ordered list markers], and a
+[bullet list](@) if its constituent list
+items begin with [bullet list markers].
+
+The [start number](@)
+of an [ordered list] is determined by the list number of
+its initial list item. The numbers of subsequent list items are
+disregarded.
+
+A list is [loose](@) if any of its constituent
+list items are separated by blank lines, or if any of its constituent
+list items directly contain two block-level elements with a blank line
+between them. Otherwise a list is [tight](@).
+(The difference in HTML output is that paragraphs in a loose list are
+wrapped in `<p>` tags, while paragraphs in a tight list are not.)
+
+Changing the bullet or ordered list delimiter starts a new list:
+
+```````````````````````````````` example
+- foo
+- bar
++ baz
+.
+<ul>
+<li>foo</li>
+<li>bar</li>
+</ul>
+<ul>
+<li>baz</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1. foo
+2. bar
+3) baz
+.
+<ol>
+<li>foo</li>
+<li>bar</li>
+</ol>
+<ol start="3">
+<li>baz</li>
+</ol>
+````````````````````````````````
+
+
+In CommonMark, a list can interrupt a paragraph. That is,
+no blank line is needed to separate a paragraph from a following
+list:
+
+```````````````````````````````` example
+Foo
+- bar
+- baz
+.
+<p>Foo</p>
+<ul>
+<li>bar</li>
+<li>baz</li>
+</ul>
+````````````````````````````````
+
+`Markdown.pl` does not allow this, through fear of triggering a list
+via a numeral in a hard-wrapped line:
+
+``` markdown
+The number of windows in my house is
+14. The number of doors is 6.
+```
+
+Oddly, though, `Markdown.pl` *does* allow a blockquote to
+interrupt a paragraph, even though the same considerations might
+apply.
+
+In CommonMark, we do allow lists to interrupt paragraphs, for
+two reasons. First, it is natural and not uncommon for people
+to start lists without blank lines:
+
+``` markdown
+I need to buy
+- new shoes
+- a coat
+- a plane ticket
+```
+
+Second, we are attracted to a
+
+> [principle of uniformity](@):
+> if a chunk of text has a certain
+> meaning, it will continue to have the same meaning when put into a
+> container block (such as a list item or blockquote).
+
+(Indeed, the spec for [list items] and [block quotes] presupposes
+this principle.) This principle implies that if
+
+``` markdown
+ * I need to buy
+ - new shoes
+ - a coat
+ - a plane ticket
+```
+
+is a list item containing a paragraph followed by a nested sublist,
+as all Markdown implementations agree it is (though the paragraph
+may be rendered without `<p>` tags, since the list is "tight"),
+then
+
+``` markdown
+I need to buy
+- new shoes
+- a coat
+- a plane ticket
+```
+
+by itself should be a paragraph followed by a nested sublist.
+
+Since it is well established Markdown practice to allow lists to
+interrupt paragraphs inside list items, the [principle of
+uniformity] requires us to allow this outside list items as
+well. ([reStructuredText](http://docutils.sourceforge.net/rst.html)
+takes a different approach, requiring blank lines before lists
+even inside other list items.)
+
+In order to solve of unwanted lists in paragraphs with
+hard-wrapped numerals, we allow only lists starting with `1` to
+interrupt paragraphs. Thus,
+
+```````````````````````````````` example
+The number of windows in my house is
+14. The number of doors is 6.
+.
+<p>The number of windows in my house is
+14. The number of doors is 6.</p>
+````````````````````````````````
+
+We may still get an unintended result in cases like
+
+```````````````````````````````` example
+The number of windows in my house is
+1. The number of doors is 6.
+.
+<p>The number of windows in my house is</p>
+<ol>
+<li>The number of doors is 6.</li>
+</ol>
+````````````````````````````````
+
+but this rule should prevent most spurious list captures.
+
+There can be any number of blank lines between items:
+
+```````````````````````````````` example
+- foo
+
+- bar
+
+
+- baz
+.
+<ul>
+<li>
+<p>foo</p>
+</li>
+<li>
+<p>bar</p>
+</li>
+<li>
+<p>baz</p>
+</li>
+</ul>
+````````````````````````````````
+
+```````````````````````````````` example
+- foo
+ - bar
+ - baz
+
+
+ bim
+.
+<ul>
+<li>foo
+<ul>
+<li>bar
+<ul>
+<li>
+<p>baz</p>
+<p>bim</p>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+
+To separate consecutive lists of the same type, or to separate a
+list from an indented code block that would otherwise be parsed
+as a subparagraph of the final list item, you can insert a blank HTML
+comment:
+
+```````````````````````````````` example
+- foo
+- bar
+
+<!-- -->
+
+- baz
+- bim
+.
+<ul>
+<li>foo</li>
+<li>bar</li>
+</ul>
+<!-- -->
+<ul>
+<li>baz</li>
+<li>bim</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- foo
+
+ notcode
+
+- foo
+
+<!-- -->
+
+ code
+.
+<ul>
+<li>
+<p>foo</p>
+<p>notcode</p>
+</li>
+<li>
+<p>foo</p>
+</li>
+</ul>
+<!-- -->
+<pre><code>code
+</code></pre>
+````````````````````````````````
+
+
+List items need not be indented to the same level. The following
+list items will be treated as items at the same list level,
+since none is indented enough to belong to the previous list
+item:
+
+```````````````````````````````` example
+- a
+ - b
+ - c
+ - d
+ - e
+ - f
+- g
+.
+<ul>
+<li>a</li>
+<li>b</li>
+<li>c</li>
+<li>d</li>
+<li>e</li>
+<li>f</li>
+<li>g</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+1. a
+
+ 2. b
+
+ 3. c
+.
+<ol>
+<li>
+<p>a</p>
+</li>
+<li>
+<p>b</p>
+</li>
+<li>
+<p>c</p>
+</li>
+</ol>
+````````````````````````````````
+
+Note, however, that list items may not be indented more than
+three spaces. Here `- e` is treated as a paragraph continuation
+line, because it is indented more than three spaces:
+
+```````````````````````````````` example
+- a
+ - b
+ - c
+ - d
+ - e
+.
+<ul>
+<li>a</li>
+<li>b</li>
+<li>c</li>
+<li>d
+- e</li>
+</ul>
+````````````````````````````````
+
+And here, `3. c` is treated as in indented code block,
+because it is indented four spaces and preceded by a
+blank line.
+
+```````````````````````````````` example
+1. a
+
+ 2. b
+
+ 3. c
+.
+<ol>
+<li>
+<p>a</p>
+</li>
+<li>
+<p>b</p>
+</li>
+</ol>
+<pre><code>3. c
+</code></pre>
+````````````````````````````````
+
+
+This is a loose list, because there is a blank line between
+two of the list items:
+
+```````````````````````````````` example
+- a
+- b
+
+- c
+.
+<ul>
+<li>
+<p>a</p>
+</li>
+<li>
+<p>b</p>
+</li>
+<li>
+<p>c</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+So is this, with a empty second item:
+
+```````````````````````````````` example
+* a
+*
+
+* c
+.
+<ul>
+<li>
+<p>a</p>
+</li>
+<li></li>
+<li>
+<p>c</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+These are loose lists, even though there is no space between the items,
+because one of the items directly contains two block-level elements
+with a blank line between them:
+
+```````````````````````````````` example
+- a
+- b
+
+ c
+- d
+.
+<ul>
+<li>
+<p>a</p>
+</li>
+<li>
+<p>b</p>
+<p>c</p>
+</li>
+<li>
+<p>d</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- a
+- b
+
+ [ref]: /url
+- d
+.
+<ul>
+<li>
+<p>a</p>
+</li>
+<li>
+<p>b</p>
+</li>
+<li>
+<p>d</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+This is a tight list, because the blank lines are in a code block:
+
+```````````````````````````````` example
+- a
+- ```
+ b
+
+
+ ```
+- c
+.
+<ul>
+<li>a</li>
+<li>
+<pre><code>b
+
+
+</code></pre>
+</li>
+<li>c</li>
+</ul>
+````````````````````````````````
+
+
+This is a tight list, because the blank line is between two
+paragraphs of a sublist. So the sublist is loose while
+the outer list is tight:
+
+```````````````````````````````` example
+- a
+ - b
+
+ c
+- d
+.
+<ul>
+<li>a
+<ul>
+<li>
+<p>b</p>
+<p>c</p>
+</li>
+</ul>
+</li>
+<li>d</li>
+</ul>
+````````````````````````````````
+
+
+This is a tight list, because the blank line is inside the
+block quote:
+
+```````````````````````````````` example
+* a
+ > b
+ >
+* c
+.
+<ul>
+<li>a
+<blockquote>
+<p>b</p>
+</blockquote>
+</li>
+<li>c</li>
+</ul>
+````````````````````````````````
+
+
+This list is tight, because the consecutive block elements
+are not separated by blank lines:
+
+```````````````````````````````` example
+- a
+ > b
+ ```
+ c
+ ```
+- d
+.
+<ul>
+<li>a
+<blockquote>
+<p>b</p>
+</blockquote>
+<pre><code>c
+</code></pre>
+</li>
+<li>d</li>
+</ul>
+````````````````````````````````
+
+
+A single-paragraph list is tight:
+
+```````````````````````````````` example
+- a
+.
+<ul>
+<li>a</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- a
+ - b
+.
+<ul>
+<li>a
+<ul>
+<li>b</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+
+This list is loose, because of the blank line between the
+two block elements in the list item:
+
+```````````````````````````````` example
+1. ```
+ foo
+ ```
+
+ bar
+.
+<ol>
+<li>
+<pre><code>foo
+</code></pre>
+<p>bar</p>
+</li>
+</ol>
+````````````````````````````````
+
+
+Here the outer list is loose, the inner list tight:
+
+```````````````````````````````` example
+* foo
+ * bar
+
+ baz
+.
+<ul>
+<li>
+<p>foo</p>
+<ul>
+<li>bar</li>
+</ul>
+<p>baz</p>
+</li>
+</ul>
+````````````````````````````````
+
+
+```````````````````````````````` example
+- a
+ - b
+ - c
+
+- d
+ - e
+ - f
+.
+<ul>
+<li>
+<p>a</p>
+<ul>
+<li>b</li>
+<li>c</li>
+</ul>
+</li>
+<li>
+<p>d</p>
+<ul>
+<li>e</li>
+<li>f</li>
+</ul>
+</li>
+</ul>
+````````````````````````````````
+
+
+# Inlines
+
+Inlines are parsed sequentially from the beginning of the character
+stream to the end (left to right, in left-to-right languages).
+Thus, for example, in
+
+```````````````````````````````` example
+`hi`lo`
+.
+<p><code>hi</code>lo`</p>
+````````````````````````````````
+
+`hi` is parsed as code, leaving the backtick at the end as a literal
+backtick.
+
+
+## Backslash escapes
+
+Any ASCII punctuation character may be backslash-escaped:
+
+```````````````````````````````` example
+\!\"\#\$\%\&\'\(\)\*\+\,\-\.\/\:\;\<\=\>\?\@\[\\\]\^\_\`\{\|\}\~
+.
+<p>!&quot;#$%&amp;'()*+,-./:;&lt;=&gt;?@[\]^_`{|}~</p>
+````````````````````````````````
+
+
+Backslashes before other characters are treated as literal
+backslashes:
+
+```````````````````````````````` example
+\→\A\a\ \3\φ\«
+.
+<p>\→\A\a\ \3\φ\«</p>
+````````````````````````````````
+
+
+Escaped characters are treated as regular characters and do
+not have their usual Markdown meanings:
+
+```````````````````````````````` example
+\*not emphasized*
+\<br/> not a tag
+\[not a link](/foo)
+\`not code`
+1\. not a list
+\* not a list
+\# not a heading
+\[foo]: /url "not a reference"
+\&ouml; not a character entity
+.
+<p>*not emphasized*
+&lt;br/&gt; not a tag
+[not a link](/foo)
+`not code`
+1. not a list
+* not a list
+# not a heading
+[foo]: /url &quot;not a reference&quot;
+&amp;ouml; not a character entity</p>
+````````````````````````````````
+
+
+If a backslash is itself escaped, the following character is not:
+
+```````````````````````````````` example
+\\*emphasis*
+.
+<p>\<em>emphasis</em></p>
+````````````````````````````````
+
+
+A backslash at the end of the line is a [hard line break]:
+
+```````````````````````````````` example
+foo\
+bar
+.
+<p>foo<br />
+bar</p>
+````````````````````````````````
+
+
+Backslash escapes do not work in code blocks, code spans, autolinks, or
+raw HTML:
+
+```````````````````````````````` example
+`` \[\` ``
+.
+<p><code>\[\`</code></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ \[\]
+.
+<pre><code>\[\]
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+~~~
+\[\]
+~~~
+.
+<pre><code>\[\]
+</code></pre>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<http://example.com?find=\*>
+.
+<p><a href="http://example.com?find=%5C*">http://example.com?find=\*</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<a href="/bar\/)">
+.
+<a href="/bar\/)">
+````````````````````````````````
+
+
+But they work in all other contexts, including URLs and link titles,
+link references, and [info strings] in [fenced code blocks]:
+
+```````````````````````````````` example
+[foo](/bar\* "ti\*tle")
+.
+<p><a href="/bar*" title="ti*tle">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo]
+
+[foo]: /bar\* "ti\*tle"
+.
+<p><a href="/bar*" title="ti*tle">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+``` foo\+bar
+foo
+```
+.
+<pre><code class="language-foo+bar">foo
+</code></pre>
+````````````````````````````````
+
+
+
+## Entity and numeric character references
+
+Valid HTML entity references and numeric character references
+can be used in place of the corresponding Unicode character,
+with the following exceptions:
+
+- Entity and character references are not recognized in code
+ blocks and code spans.
+
+- Entity and character references cannot stand in place of
+ special characters that define structural elements in
+ CommonMark. For example, although `&#42;` can be used
+ in place of a literal `*` character, `&#42;` cannot replace
+ `*` in emphasis delimiters, bullet list markers, or thematic
+ breaks.
+
+Conforming CommonMark parsers need not store information about
+whether a particular character was represented in the source
+using a Unicode character or an entity reference.
+
+[Entity references](@) consist of `&` + any of the valid
+HTML5 entity names + `;`. The
+document <https://html.spec.whatwg.org/multipage/entities.json>
+is used as an authoritative source for the valid entity
+references and their corresponding code points.
+
+```````````````````````````````` example
+&nbsp; &amp; &copy; &AElig; &Dcaron;
+&frac34; &HilbertSpace; &DifferentialD;
+&ClockwiseContourIntegral; &ngE;
+.
+<p>  &amp; © Æ Ď
+¾ ℋ ⅆ
+∲ ≧̸</p>
+````````````````````````````````
+
+
+[Decimal numeric character
+references](@)
+consist of `&#` + a string of 1--7 arabic digits + `;`. A
+numeric character reference is parsed as the corresponding
+Unicode character. Invalid Unicode code points will be replaced by
+the REPLACEMENT CHARACTER (`U+FFFD`). For security reasons,
+the code point `U+0000` will also be replaced by `U+FFFD`.
+
+```````````````````````````````` example
+&#35; &#1234; &#992; &#0;
+.
+<p># Ӓ Ϡ �</p>
+````````````````````````````````
+
+
+[Hexadecimal numeric character
+references](@) consist of `&#` +
+either `X` or `x` + a string of 1-6 hexadecimal digits + `;`.
+They too are parsed as the corresponding Unicode character (this
+time specified with a hexadecimal numeral instead of decimal).
+
+```````````````````````````````` example
+&#X22; &#XD06; &#xcab;
+.
+<p>&quot; ആ ಫ</p>
+````````````````````````````````
+
+
+Here are some nonentities:
+
+```````````````````````````````` example
+&nbsp &x; &#; &#x;
+&#87654321;
+&#abcdef0;
+&ThisIsNotDefined; &hi?;
+.
+<p>&amp;nbsp &amp;x; &amp;#; &amp;#x;
+&amp;#87654321;
+&amp;#abcdef0;
+&amp;ThisIsNotDefined; &amp;hi?;</p>
+````````````````````````````````
+
+
+Although HTML5 does accept some entity references
+without a trailing semicolon (such as `&copy`), these are not
+recognized here, because it makes the grammar too ambiguous:
+
+```````````````````````````````` example
+&copy
+.
+<p>&amp;copy</p>
+````````````````````````````````
+
+
+Strings that are not on the list of HTML5 named entities are not
+recognized as entity references either:
+
+```````````````````````````````` example
+&MadeUpEntity;
+.
+<p>&amp;MadeUpEntity;</p>
+````````````````````````````````
+
+
+Entity and numeric character references are recognized in any
+context besides code spans or code blocks, including
+URLs, [link titles], and [fenced code block][] [info strings]:
+
+```````````````````````````````` example
+<a href="&ouml;&ouml;.html">
+.
+<a href="&ouml;&ouml;.html">
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo](/f&ouml;&ouml; "f&ouml;&ouml;")
+.
+<p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo]
+
+[foo]: /f&ouml;&ouml; "f&ouml;&ouml;"
+.
+<p><a href="/f%C3%B6%C3%B6" title="föö">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+``` f&ouml;&ouml;
+foo
+```
+.
+<pre><code class="language-föö">foo
+</code></pre>
+````````````````````````````````
+
+
+Entity and numeric character references are treated as literal
+text in code spans and code blocks:
+
+```````````````````````````````` example
+`f&ouml;&ouml;`
+.
+<p><code>f&amp;ouml;&amp;ouml;</code></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+ f&ouml;f&ouml;
+.
+<pre><code>f&amp;ouml;f&amp;ouml;
+</code></pre>
+````````````````````````````````
+
+
+Entity and numeric character references cannot be used
+in place of symbols indicating structure in CommonMark
+documents.
+
+```````````````````````````````` example
+&#42;foo&#42;
+*foo*
+.
+<p>*foo*
+<em>foo</em></p>
+````````````````````````````````
+
+```````````````````````````````` example
+&#42; foo
+
+* foo
+.
+<p>* foo</p>
+<ul>
+<li>foo</li>
+</ul>
+````````````````````````````````
+
+```````````````````````````````` example
+foo&#10;&#10;bar
+.
+<p>foo
+
+bar</p>
+````````````````````````````````
+
+```````````````````````````````` example
+&#9;foo
+.
+<p>→foo</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[a](url &quot;tit&quot;)
+.
+<p>[a](url &quot;tit&quot;)</p>
+````````````````````````````````
+
+
+## Code spans
+
+A [backtick string](@)
+is a string of one or more backtick characters (`` ` ``) that is neither
+preceded nor followed by a backtick.
+
+A [code span](@) begins with a backtick string and ends with
+a backtick string of equal length. The contents of the code span are
+the characters between the two backtick strings, normalized in the
+following ways:
+
+- First, [line endings] are converted to [spaces].
+- If the resulting string both begins *and* ends with a [space]
+ character, but does not consist entirely of [space]
+ characters, a single [space] character is removed from the
+ front and back. This allows you to include code that begins
+ or ends with backtick characters, which must be separated by
+ whitespace from the opening or closing backtick strings.
+
+This is a simple code span:
+
+```````````````````````````````` example
+`foo`
+.
+<p><code>foo</code></p>
+````````````````````````````````
+
+
+Here two backticks are used, because the code contains a backtick.
+This example also illustrates stripping of a single leading and
+trailing space:
+
+```````````````````````````````` example
+`` foo ` bar ``
+.
+<p><code>foo ` bar</code></p>
+````````````````````````````````
+
+
+This example shows the motivation for stripping leading and trailing
+spaces:
+
+```````````````````````````````` example
+` `` `
+.
+<p><code>``</code></p>
+````````````````````````````````
+
+Note that only *one* space is stripped:
+
+```````````````````````````````` example
+` `` `
+.
+<p><code> `` </code></p>
+````````````````````````````````
+
+The stripping only happens if the space is on both
+sides of the string:
+
+```````````````````````````````` example
+` a`
+.
+<p><code> a</code></p>
+````````````````````````````````
+
+Only [spaces], and not [unicode whitespace] in general, are
+stripped in this way:
+
+```````````````````````````````` example
+` b `
+.
+<p><code> b </code></p>
+````````````````````````````````
+
+No stripping occurs if the code span contains only spaces:
+
+```````````````````````````````` example
+` `
+` `
+.
+<p><code> </code>
+<code> </code></p>
+````````````````````````````````
+
+
+[Line endings] are treated like spaces:
+
+```````````````````````````````` example
+``
+foo
+bar
+baz
+``
+.
+<p><code>foo bar baz</code></p>
+````````````````````````````````
+
+```````````````````````````````` example
+``
+foo
+``
+.
+<p><code>foo </code></p>
+````````````````````````````````
+
+
+Interior spaces are not collapsed:
+
+```````````````````````````````` example
+`foo bar
+baz`
+.
+<p><code>foo bar baz</code></p>
+````````````````````````````````
+
+Note that browsers will typically collapse consecutive spaces
+when rendering `<code>` elements, so it is recommended that
+the following CSS be used:
+
+ code{white-space: pre-wrap;}
+
+
+Note that backslash escapes do not work in code spans. All backslashes
+are treated literally:
+
+```````````````````````````````` example
+`foo\`bar`
+.
+<p><code>foo\</code>bar`</p>
+````````````````````````````````
+
+
+Backslash escapes are never needed, because one can always choose a
+string of *n* backtick characters as delimiters, where the code does
+not contain any strings of exactly *n* backtick characters.
+
+```````````````````````````````` example
+``foo`bar``
+.
+<p><code>foo`bar</code></p>
+````````````````````````````````
+
+```````````````````````````````` example
+` foo `` bar `
+.
+<p><code>foo `` bar</code></p>
+````````````````````````````````
+
+
+Code span backticks have higher precedence than any other inline
+constructs except HTML tags and autolinks. Thus, for example, this is
+not parsed as emphasized text, since the second `*` is part of a code
+span:
+
+```````````````````````````````` example
+*foo`*`
+.
+<p>*foo<code>*</code></p>
+````````````````````````````````
+
+
+And this is not parsed as a link:
+
+```````````````````````````````` example
+[not a `link](/foo`)
+.
+<p>[not a <code>link](/foo</code>)</p>
+````````````````````````````````
+
+
+Code spans, HTML tags, and autolinks have the same precedence.
+Thus, this is code:
+
+```````````````````````````````` example
+`<a href="`">`
+.
+<p><code>&lt;a href=&quot;</code>&quot;&gt;`</p>
+````````````````````````````````
+
+
+But this is an HTML tag:
+
+```````````````````````````````` example
+<a href="`">`
+.
+<p><a href="`">`</p>
+````````````````````````````````
+
+
+And this is code:
+
+```````````````````````````````` example
+`<http://foo.bar.`baz>`
+.
+<p><code>&lt;http://foo.bar.</code>baz&gt;`</p>
+````````````````````````````````
+
+
+But this is an autolink:
+
+```````````````````````````````` example
+<http://foo.bar.`baz>`
+.
+<p><a href="http://foo.bar.%60baz">http://foo.bar.`baz</a>`</p>
+````````````````````````````````
+
+
+When a backtick string is not closed by a matching backtick string,
+we just have literal backticks:
+
+```````````````````````````````` example
+```foo``
+.
+<p>```foo``</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+`foo
+.
+<p>`foo</p>
+````````````````````````````````
+
+The following case also illustrates the need for opening and
+closing backtick strings to be equal in length:
+
+```````````````````````````````` example
+`foo``bar``
+.
+<p>`foo<code>bar</code></p>
+````````````````````````````````
+
+
+## Emphasis and strong emphasis
+
+John Gruber's original [Markdown syntax
+description](http://daringfireball.net/projects/markdown/syntax#em) says:
+
+> Markdown treats asterisks (`*`) and underscores (`_`) as indicators of
+> emphasis. Text wrapped with one `*` or `_` will be wrapped with an HTML
+> `<em>` tag; double `*`'s or `_`'s will be wrapped with an HTML `<strong>`
+> tag.
+
+This is enough for most users, but these rules leave much undecided,
+especially when it comes to nested emphasis. The original
+`Markdown.pl` test suite makes it clear that triple `***` and
+`___` delimiters can be used for strong emphasis, and most
+implementations have also allowed the following patterns:
+
+``` markdown
+***strong emph***
+***strong** in emph*
+***emph* in strong**
+**in strong *emph***
+*in emph **strong***
+```
+
+The following patterns are less widely supported, but the intent
+is clear and they are useful (especially in contexts like bibliography
+entries):
+
+``` markdown
+*emph *with emph* in it*
+**strong **with strong** in it**
+```
+
+Many implementations have also restricted intraword emphasis to
+the `*` forms, to avoid unwanted emphasis in words containing
+internal underscores. (It is best practice to put these in code
+spans, but users often do not.)
+
+``` markdown
+internal emphasis: foo*bar*baz
+no emphasis: foo_bar_baz
+```
+
+The rules given below capture all of these patterns, while allowing
+for efficient parsing strategies that do not backtrack.
+
+First, some definitions. A [delimiter run](@) is either
+a sequence of one or more `*` characters that is not preceded or
+followed by a non-backslash-escaped `*` character, or a sequence
+of one or more `_` characters that is not preceded or followed by
+a non-backslash-escaped `_` character.
+
+A [left-flanking delimiter run](@) is
+a [delimiter run] that is (1) not followed by [Unicode whitespace],
+and either (2a) not followed by a [punctuation character], or
+(2b) followed by a [punctuation character] and
+preceded by [Unicode whitespace] or a [punctuation character].
+For purposes of this definition, the beginning and the end of
+the line count as Unicode whitespace.
+
+A [right-flanking delimiter run](@) is
+a [delimiter run] that is (1) not preceded by [Unicode whitespace],
+and either (2a) not preceded by a [punctuation character], or
+(2b) preceded by a [punctuation character] and
+followed by [Unicode whitespace] or a [punctuation character].
+For purposes of this definition, the beginning and the end of
+the line count as Unicode whitespace.
+
+Here are some examples of delimiter runs.
+
+ - left-flanking but not right-flanking:
+
+ ```
+ ***abc
+ _abc
+ **"abc"
+ _"abc"
+ ```
+
+ - right-flanking but not left-flanking:
+
+ ```
+ abc***
+ abc_
+ "abc"**
+ "abc"_
+ ```
+
+ - Both left and right-flanking:
+
+ ```
+ abc***def
+ "abc"_"def"
+ ```
+
+ - Neither left nor right-flanking:
+
+ ```
+ abc *** def
+ a _ b
+ ```
+
+(The idea of distinguishing left-flanking and right-flanking
+delimiter runs based on the character before and the character
+after comes from Roopesh Chander's
+[vfmd](http://www.vfmd.org/vfmd-spec/specification/#procedure-for-identifying-emphasis-tags).
+vfmd uses the terminology "emphasis indicator string" instead of "delimiter
+run," and its rules for distinguishing left- and right-flanking runs
+are a bit more complex than the ones given here.)
+
+The following rules define emphasis and strong emphasis:
+
+1. A single `*` character [can open emphasis](@)
+ iff (if and only if) it is part of a [left-flanking delimiter run].
+
+2. A single `_` character [can open emphasis] iff
+ it is part of a [left-flanking delimiter run]
+ and either (a) not part of a [right-flanking delimiter run]
+ or (b) part of a [right-flanking delimiter run]
+ preceded by punctuation.
+
+3. A single `*` character [can close emphasis](@)
+ iff it is part of a [right-flanking delimiter run].
+
+4. A single `_` character [can close emphasis] iff
+ it is part of a [right-flanking delimiter run]
+ and either (a) not part of a [left-flanking delimiter run]
+ or (b) part of a [left-flanking delimiter run]
+ followed by punctuation.
+
+5. A double `**` [can open strong emphasis](@)
+ iff it is part of a [left-flanking delimiter run].
+
+6. A double `__` [can open strong emphasis] iff
+ it is part of a [left-flanking delimiter run]
+ and either (a) not part of a [right-flanking delimiter run]
+ or (b) part of a [right-flanking delimiter run]
+ preceded by punctuation.
+
+7. A double `**` [can close strong emphasis](@)
+ iff it is part of a [right-flanking delimiter run].
+
+8. A double `__` [can close strong emphasis] iff
+ it is part of a [right-flanking delimiter run]
+ and either (a) not part of a [left-flanking delimiter run]
+ or (b) part of a [left-flanking delimiter run]
+ followed by punctuation.
+
+9. Emphasis begins with a delimiter that [can open emphasis] and ends
+ with a delimiter that [can close emphasis], and that uses the same
+ character (`_` or `*`) as the opening delimiter. The
+ opening and closing delimiters must belong to separate
+ [delimiter runs]. If one of the delimiters can both
+ open and close emphasis, then the sum of the lengths of the
+ delimiter runs containing the opening and closing delimiters
+ must not be a multiple of 3 unless both lengths are
+ multiples of 3.
+
+10. Strong emphasis begins with a delimiter that
+ [can open strong emphasis] and ends with a delimiter that
+ [can close strong emphasis], and that uses the same character
+ (`_` or `*`) as the opening delimiter. The
+ opening and closing delimiters must belong to separate
+ [delimiter runs]. If one of the delimiters can both open
+ and close strong emphasis, then the sum of the lengths of
+ the delimiter runs containing the opening and closing
+ delimiters must not be a multiple of 3 unless both lengths
+ are multiples of 3.
+
+11. A literal `*` character cannot occur at the beginning or end of
+ `*`-delimited emphasis or `**`-delimited strong emphasis, unless it
+ is backslash-escaped.
+
+12. A literal `_` character cannot occur at the beginning or end of
+ `_`-delimited emphasis or `__`-delimited strong emphasis, unless it
+ is backslash-escaped.
+
+Where rules 1--12 above are compatible with multiple parsings,
+the following principles resolve ambiguity:
+
+13. The number of nestings should be minimized. Thus, for example,
+ an interpretation `<strong>...</strong>` is always preferred to
+ `<em><em>...</em></em>`.
+
+14. An interpretation `<em><strong>...</strong></em>` is always
+ preferred to `<strong><em>...</em></strong>`.
+
+15. When two potential emphasis or strong emphasis spans overlap,
+ so that the second begins before the first ends and ends after
+ the first ends, the first takes precedence. Thus, for example,
+ `*foo _bar* baz_` is parsed as `<em>foo _bar</em> baz_` rather
+ than `*foo <em>bar* baz</em>`.
+
+16. When there are two potential emphasis or strong emphasis spans
+ with the same closing delimiter, the shorter one (the one that
+ opens later) takes precedence. Thus, for example,
+ `**foo **bar baz**` is parsed as `**foo <strong>bar baz</strong>`
+ rather than `<strong>foo **bar baz</strong>`.
+
+17. Inline code spans, links, images, and HTML tags group more tightly
+ than emphasis. So, when there is a choice between an interpretation
+ that contains one of these elements and one that does not, the
+ former always wins. Thus, for example, `*[foo*](bar)` is
+ parsed as `*<a href="bar">foo*</a>` rather than as
+ `<em>[foo</em>](bar)`.
+
+These rules can be illustrated through a series of examples.
+
+Rule 1:
+
+```````````````````````````````` example
+*foo bar*
+.
+<p><em>foo bar</em></p>
+````````````````````````````````
+
+
+This is not emphasis, because the opening `*` is followed by
+whitespace, and hence not part of a [left-flanking delimiter run]:
+
+```````````````````````````````` example
+a * foo bar*
+.
+<p>a * foo bar*</p>
+````````````````````````````````
+
+
+This is not emphasis, because the opening `*` is preceded
+by an alphanumeric and followed by punctuation, and hence
+not part of a [left-flanking delimiter run]:
+
+```````````````````````````````` example
+a*"foo"*
+.
+<p>a*&quot;foo&quot;*</p>
+````````````````````````````````
+
+
+Unicode nonbreaking spaces count as whitespace, too:
+
+```````````````````````````````` example
+* a *
+.
+<p>* a *</p>
+````````````````````````````````
+
+
+Intraword emphasis with `*` is permitted:
+
+```````````````````````````````` example
+foo*bar*
+.
+<p>foo<em>bar</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+5*6*78
+.
+<p>5<em>6</em>78</p>
+````````````````````````````````
+
+
+Rule 2:
+
+```````````````````````````````` example
+_foo bar_
+.
+<p><em>foo bar</em></p>
+````````````````````````````````
+
+
+This is not emphasis, because the opening `_` is followed by
+whitespace:
+
+```````````````````````````````` example
+_ foo bar_
+.
+<p>_ foo bar_</p>
+````````````````````````````````
+
+
+This is not emphasis, because the opening `_` is preceded
+by an alphanumeric and followed by punctuation:
+
+```````````````````````````````` example
+a_"foo"_
+.
+<p>a_&quot;foo&quot;_</p>
+````````````````````````````````
+
+
+Emphasis with `_` is not allowed inside words:
+
+```````````````````````````````` example
+foo_bar_
+.
+<p>foo_bar_</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+5_6_78
+.
+<p>5_6_78</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+пристаням_стремятся_
+.
+<p>пристаням_стремятся_</p>
+````````````````````````````````
+
+
+Here `_` does not generate emphasis, because the first delimiter run
+is right-flanking and the second left-flanking:
+
+```````````````````````````````` example
+aa_"bb"_cc
+.
+<p>aa_&quot;bb&quot;_cc</p>
+````````````````````````````````
+
+
+This is emphasis, even though the opening delimiter is
+both left- and right-flanking, because it is preceded by
+punctuation:
+
+```````````````````````````````` example
+foo-_(bar)_
+.
+<p>foo-<em>(bar)</em></p>
+````````````````````````````````
+
+
+Rule 3:
+
+This is not emphasis, because the closing delimiter does
+not match the opening delimiter:
+
+```````````````````````````````` example
+_foo*
+.
+<p>_foo*</p>
+````````````````````````````````
+
+
+This is not emphasis, because the closing `*` is preceded by
+whitespace:
+
+```````````````````````````````` example
+*foo bar *
+.
+<p>*foo bar *</p>
+````````````````````````````````
+
+
+A newline also counts as whitespace:
+
+```````````````````````````````` example
+*foo bar
+*
+.
+<p>*foo bar
+*</p>
+````````````````````````````````
+
+
+This is not emphasis, because the second `*` is
+preceded by punctuation and followed by an alphanumeric
+(hence it is not part of a [right-flanking delimiter run]:
+
+```````````````````````````````` example
+*(*foo)
+.
+<p>*(*foo)</p>
+````````````````````````````````
+
+
+The point of this restriction is more easily appreciated
+with this example:
+
+```````````````````````````````` example
+*(*foo*)*
+.
+<p><em>(<em>foo</em>)</em></p>
+````````````````````````````````
+
+
+Intraword emphasis with `*` is allowed:
+
+```````````````````````````````` example
+*foo*bar
+.
+<p><em>foo</em>bar</p>
+````````````````````````````````
+
+
+
+Rule 4:
+
+This is not emphasis, because the closing `_` is preceded by
+whitespace:
+
+```````````````````````````````` example
+_foo bar _
+.
+<p>_foo bar _</p>
+````````````````````````````````
+
+
+This is not emphasis, because the second `_` is
+preceded by punctuation and followed by an alphanumeric:
+
+```````````````````````````````` example
+_(_foo)
+.
+<p>_(_foo)</p>
+````````````````````````````````
+
+
+This is emphasis within emphasis:
+
+```````````````````````````````` example
+_(_foo_)_
+.
+<p><em>(<em>foo</em>)</em></p>
+````````````````````````````````
+
+
+Intraword emphasis is disallowed for `_`:
+
+```````````````````````````````` example
+_foo_bar
+.
+<p>_foo_bar</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_пристаням_стремятся
+.
+<p>_пристаням_стремятся</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_foo_bar_baz_
+.
+<p><em>foo_bar_baz</em></p>
+````````````````````````````````
+
+
+This is emphasis, even though the closing delimiter is
+both left- and right-flanking, because it is followed by
+punctuation:
+
+```````````````````````````````` example
+_(bar)_.
+.
+<p><em>(bar)</em>.</p>
+````````````````````````````````
+
+
+Rule 5:
+
+```````````````````````````````` example
+**foo bar**
+.
+<p><strong>foo bar</strong></p>
+````````````````````````````````
+
+
+This is not strong emphasis, because the opening delimiter is
+followed by whitespace:
+
+```````````````````````````````` example
+** foo bar**
+.
+<p>** foo bar**</p>
+````````````````````````````````
+
+
+This is not strong emphasis, because the opening `**` is preceded
+by an alphanumeric and followed by punctuation, and hence
+not part of a [left-flanking delimiter run]:
+
+```````````````````````````````` example
+a**"foo"**
+.
+<p>a**&quot;foo&quot;**</p>
+````````````````````````````````
+
+
+Intraword strong emphasis with `**` is permitted:
+
+```````````````````````````````` example
+foo**bar**
+.
+<p>foo<strong>bar</strong></p>
+````````````````````````````````
+
+
+Rule 6:
+
+```````````````````````````````` example
+__foo bar__
+.
+<p><strong>foo bar</strong></p>
+````````````````````````````````
+
+
+This is not strong emphasis, because the opening delimiter is
+followed by whitespace:
+
+```````````````````````````````` example
+__ foo bar__
+.
+<p>__ foo bar__</p>
+````````````````````````````````
+
+
+A newline counts as whitespace:
+```````````````````````````````` example
+__
+foo bar__
+.
+<p>__
+foo bar__</p>
+````````````````````````````````
+
+
+This is not strong emphasis, because the opening `__` is preceded
+by an alphanumeric and followed by punctuation:
+
+```````````````````````````````` example
+a__"foo"__
+.
+<p>a__&quot;foo&quot;__</p>
+````````````````````````````````
+
+
+Intraword strong emphasis is forbidden with `__`:
+
+```````````````````````````````` example
+foo__bar__
+.
+<p>foo__bar__</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+5__6__78
+.
+<p>5__6__78</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+пристаням__стремятся__
+.
+<p>пристаням__стремятся__</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo, __bar__, baz__
+.
+<p><strong>foo, <strong>bar</strong>, baz</strong></p>
+````````````````````````````````
+
+
+This is strong emphasis, even though the opening delimiter is
+both left- and right-flanking, because it is preceded by
+punctuation:
+
+```````````````````````````````` example
+foo-__(bar)__
+.
+<p>foo-<strong>(bar)</strong></p>
+````````````````````````````````
+
+
+
+Rule 7:
+
+This is not strong emphasis, because the closing delimiter is preceded
+by whitespace:
+
+```````````````````````````````` example
+**foo bar **
+.
+<p>**foo bar **</p>
+````````````````````````````````
+
+
+(Nor can it be interpreted as an emphasized `*foo bar *`, because of
+Rule 11.)
+
+This is not strong emphasis, because the second `**` is
+preceded by punctuation and followed by an alphanumeric:
+
+```````````````````````````````` example
+**(**foo)
+.
+<p>**(**foo)</p>
+````````````````````````````````
+
+
+The point of this restriction is more easily appreciated
+with these examples:
+
+```````````````````````````````` example
+*(**foo**)*
+.
+<p><em>(<strong>foo</strong>)</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**Gomphocarpus (*Gomphocarpus physocarpus*, syn.
+*Asclepias physocarpa*)**
+.
+<p><strong>Gomphocarpus (<em>Gomphocarpus physocarpus</em>, syn.
+<em>Asclepias physocarpa</em>)</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo "*bar*" foo**
+.
+<p><strong>foo &quot;<em>bar</em>&quot; foo</strong></p>
+````````````````````````````````
+
+
+Intraword emphasis:
+
+```````````````````````````````` example
+**foo**bar
+.
+<p><strong>foo</strong>bar</p>
+````````````````````````````````
+
+
+Rule 8:
+
+This is not strong emphasis, because the closing delimiter is
+preceded by whitespace:
+
+```````````````````````````````` example
+__foo bar __
+.
+<p>__foo bar __</p>
+````````````````````````````````
+
+
+This is not strong emphasis, because the second `__` is
+preceded by punctuation and followed by an alphanumeric:
+
+```````````````````````````````` example
+__(__foo)
+.
+<p>__(__foo)</p>
+````````````````````````````````
+
+
+The point of this restriction is more easily appreciated
+with this example:
+
+```````````````````````````````` example
+_(__foo__)_
+.
+<p><em>(<strong>foo</strong>)</em></p>
+````````````````````````````````
+
+
+Intraword strong emphasis is forbidden with `__`:
+
+```````````````````````````````` example
+__foo__bar
+.
+<p>__foo__bar</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__пристаням__стремятся
+.
+<p>__пристаням__стремятся</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo__bar__baz__
+.
+<p><strong>foo__bar__baz</strong></p>
+````````````````````````````````
+
+
+This is strong emphasis, even though the closing delimiter is
+both left- and right-flanking, because it is followed by
+punctuation:
+
+```````````````````````````````` example
+__(bar)__.
+.
+<p><strong>(bar)</strong>.</p>
+````````````````````````````````
+
+
+Rule 9:
+
+Any nonempty sequence of inline elements can be the contents of an
+emphasized span.
+
+```````````````````````````````` example
+*foo [bar](/url)*
+.
+<p><em>foo <a href="/url">bar</a></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo
+bar*
+.
+<p><em>foo
+bar</em></p>
+````````````````````````````````
+
+
+In particular, emphasis and strong emphasis can be nested
+inside emphasis:
+
+```````````````````````````````` example
+_foo __bar__ baz_
+.
+<p><em>foo <strong>bar</strong> baz</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_foo _bar_ baz_
+.
+<p><em>foo <em>bar</em> baz</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo_ bar_
+.
+<p><em><em>foo</em> bar</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo *bar**
+.
+<p><em>foo <em>bar</em></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo **bar** baz*
+.
+<p><em>foo <strong>bar</strong> baz</em></p>
+````````````````````````````````
+
+```````````````````````````````` example
+*foo**bar**baz*
+.
+<p><em>foo<strong>bar</strong>baz</em></p>
+````````````````````````````````
+
+Note that in the preceding case, the interpretation
+
+``` markdown
+<p><em>foo</em><em>bar<em></em>baz</em></p>
+```
+
+
+is precluded by the condition that a delimiter that
+can both open and close (like the `*` after `foo`)
+cannot form emphasis if the sum of the lengths of
+the delimiter runs containing the opening and
+closing delimiters is a multiple of 3 unless
+both lengths are multiples of 3.
+
+
+For the same reason, we don't get two consecutive
+emphasis sections in this example:
+
+```````````````````````````````` example
+*foo**bar*
+.
+<p><em>foo**bar</em></p>
+````````````````````````````````
+
+
+The same condition ensures that the following
+cases are all strong emphasis nested inside
+emphasis, even when the interior spaces are
+omitted:
+
+
+```````````````````````````````` example
+***foo** bar*
+.
+<p><em><strong>foo</strong> bar</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo **bar***
+.
+<p><em>foo <strong>bar</strong></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo**bar***
+.
+<p><em>foo<strong>bar</strong></em></p>
+````````````````````````````````
+
+
+When the lengths of the interior closing and opening
+delimiter runs are *both* multiples of 3, though,
+they can match to create emphasis:
+
+```````````````````````````````` example
+foo***bar***baz
+.
+<p>foo<em><strong>bar</strong></em>baz</p>
+````````````````````````````````
+
+```````````````````````````````` example
+foo******bar*********baz
+.
+<p>foo<strong><strong><strong>bar</strong></strong></strong>***baz</p>
+````````````````````````````````
+
+
+Indefinite levels of nesting are possible:
+
+```````````````````````````````` example
+*foo **bar *baz* bim** bop*
+.
+<p><em>foo <strong>bar <em>baz</em> bim</strong> bop</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo [*bar*](/url)*
+.
+<p><em>foo <a href="/url"><em>bar</em></a></em></p>
+````````````````````````````````
+
+
+There can be no empty emphasis or strong emphasis:
+
+```````````````````````````````` example
+** is not an empty emphasis
+.
+<p>** is not an empty emphasis</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**** is not an empty strong emphasis
+.
+<p>**** is not an empty strong emphasis</p>
+````````````````````````````````
+
+
+
+Rule 10:
+
+Any nonempty sequence of inline elements can be the contents of an
+strongly emphasized span.
+
+```````````````````````````````` example
+**foo [bar](/url)**
+.
+<p><strong>foo <a href="/url">bar</a></strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo
+bar**
+.
+<p><strong>foo
+bar</strong></p>
+````````````````````````````````
+
+
+In particular, emphasis and strong emphasis can be nested
+inside strong emphasis:
+
+```````````````````````````````` example
+__foo _bar_ baz__
+.
+<p><strong>foo <em>bar</em> baz</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo __bar__ baz__
+.
+<p><strong>foo <strong>bar</strong> baz</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+____foo__ bar__
+.
+<p><strong><strong>foo</strong> bar</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo **bar****
+.
+<p><strong>foo <strong>bar</strong></strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo *bar* baz**
+.
+<p><strong>foo <em>bar</em> baz</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo*bar*baz**
+.
+<p><strong>foo<em>bar</em>baz</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+***foo* bar**
+.
+<p><strong><em>foo</em> bar</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo *bar***
+.
+<p><strong>foo <em>bar</em></strong></p>
+````````````````````````````````
+
+
+Indefinite levels of nesting are possible:
+
+```````````````````````````````` example
+**foo *bar **baz**
+bim* bop**
+.
+<p><strong>foo <em>bar <strong>baz</strong>
+bim</em> bop</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo [*bar*](/url)**
+.
+<p><strong>foo <a href="/url"><em>bar</em></a></strong></p>
+````````````````````````````````
+
+
+There can be no empty emphasis or strong emphasis:
+
+```````````````````````````````` example
+__ is not an empty emphasis
+.
+<p>__ is not an empty emphasis</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+____ is not an empty strong emphasis
+.
+<p>____ is not an empty strong emphasis</p>
+````````````````````````````````
+
+
+
+Rule 11:
+
+```````````````````````````````` example
+foo ***
+.
+<p>foo ***</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo *\**
+.
+<p>foo <em>*</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo *_*
+.
+<p>foo <em>_</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo *****
+.
+<p>foo *****</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo **\***
+.
+<p>foo <strong>*</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo **_**
+.
+<p>foo <strong>_</strong></p>
+````````````````````````````````
+
+
+Note that when delimiters do not match evenly, Rule 11 determines
+that the excess literal `*` characters will appear outside of the
+emphasis, rather than inside it:
+
+```````````````````````````````` example
+**foo*
+.
+<p>*<em>foo</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo**
+.
+<p><em>foo</em>*</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+***foo**
+.
+<p>*<strong>foo</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+****foo*
+.
+<p>***<em>foo</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**foo***
+.
+<p><strong>foo</strong>*</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo****
+.
+<p><em>foo</em>***</p>
+````````````````````````````````
+
+
+
+Rule 12:
+
+```````````````````````````````` example
+foo ___
+.
+<p>foo ___</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo _\__
+.
+<p>foo <em>_</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo _*_
+.
+<p>foo <em>*</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo _____
+.
+<p>foo _____</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo __\___
+.
+<p>foo <strong>_</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo __*__
+.
+<p>foo <strong>*</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo_
+.
+<p>_<em>foo</em></p>
+````````````````````````````````
+
+
+Note that when delimiters do not match evenly, Rule 12 determines
+that the excess literal `_` characters will appear outside of the
+emphasis, rather than inside it:
+
+```````````````````````````````` example
+_foo__
+.
+<p><em>foo</em>_</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+___foo__
+.
+<p>_<strong>foo</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+____foo_
+.
+<p>___<em>foo</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo___
+.
+<p><strong>foo</strong>_</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_foo____
+.
+<p><em>foo</em>___</p>
+````````````````````````````````
+
+
+Rule 13 implies that if you want emphasis nested directly inside
+emphasis, you must use different delimiters:
+
+```````````````````````````````` example
+**foo**
+.
+<p><strong>foo</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*_foo_*
+.
+<p><em><em>foo</em></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__foo__
+.
+<p><strong>foo</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_*foo*_
+.
+<p><em><em>foo</em></em></p>
+````````````````````````````````
+
+
+However, strong emphasis within strong emphasis is possible without
+switching delimiters:
+
+```````````````````````````````` example
+****foo****
+.
+<p><strong><strong>foo</strong></strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+____foo____
+.
+<p><strong><strong>foo</strong></strong></p>
+````````````````````````````````
+
+
+
+Rule 13 can be applied to arbitrarily long sequences of
+delimiters:
+
+```````````````````````````````` example
+******foo******
+.
+<p><strong><strong><strong>foo</strong></strong></strong></p>
+````````````````````````````````
+
+
+Rule 14:
+
+```````````````````````````````` example
+***foo***
+.
+<p><em><strong>foo</strong></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_____foo_____
+.
+<p><em><strong><strong>foo</strong></strong></em></p>
+````````````````````````````````
+
+
+Rule 15:
+
+```````````````````````````````` example
+*foo _bar* baz_
+.
+<p><em>foo _bar</em> baz_</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo __bar *baz bim__ bam*
+.
+<p><em>foo <strong>bar *baz bim</strong> bam</em></p>
+````````````````````````````````
+
+
+Rule 16:
+
+```````````````````````````````` example
+**foo **bar baz**
+.
+<p>**foo <strong>bar baz</strong></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo *bar baz*
+.
+<p>*foo <em>bar baz</em></p>
+````````````````````````````````
+
+
+Rule 17:
+
+```````````````````````````````` example
+*[bar*](/url)
+.
+<p>*<a href="/url">bar*</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_foo [bar_](/url)
+.
+<p>_foo <a href="/url">bar_</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*<img src="foo" title="*"/>
+.
+<p>*<img src="foo" title="*"/></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**<a href="**">
+.
+<p>**<a href="**"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__<a href="__">
+.
+<p>__<a href="__"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*a `*`*
+.
+<p><em>a <code>*</code></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+_a `_`_
+.
+<p><em>a <code>_</code></em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+**a<http://foo.bar/?q=**>
+.
+<p>**a<a href="http://foo.bar/?q=**">http://foo.bar/?q=**</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+__a<http://foo.bar/?q=__>
+.
+<p>__a<a href="http://foo.bar/?q=__">http://foo.bar/?q=__</a></p>
+````````````````````````````````
+
+
+
+## Links
+
+A link contains [link text] (the visible text), a [link destination]
+(the URI that is the link destination), and optionally a [link title].
+There are two basic kinds of links in Markdown. In [inline links] the
+destination and title are given immediately after the link text. In
+[reference links] the destination and title are defined elsewhere in
+the document.
+
+A [link text](@) consists of a sequence of zero or more
+inline elements enclosed by square brackets (`[` and `]`). The
+following rules apply:
+
+- Links may not contain other links, at any level of nesting. If
+ multiple otherwise valid link definitions appear nested inside each
+ other, the inner-most definition is used.
+
+- Brackets are allowed in the [link text] only if (a) they
+ are backslash-escaped or (b) they appear as a matched pair of brackets,
+ with an open bracket `[`, a sequence of zero or more inlines, and
+ a close bracket `]`.
+
+- Backtick [code spans], [autolinks], and raw [HTML tags] bind more tightly
+ than the brackets in link text. Thus, for example,
+ `` [foo`]` `` could not be a link text, since the second `]`
+ is part of a code span.
+
+- The brackets in link text bind more tightly than markers for
+ [emphasis and strong emphasis]. Thus, for example, `*[foo*](url)` is a link.
+
+A [link destination](@) consists of either
+
+- a sequence of zero or more characters between an opening `<` and a
+ closing `>` that contains no line breaks or unescaped
+ `<` or `>` characters, or
+
+- a nonempty sequence of characters that does not start with
+ `<`, does not include ASCII space or control characters, and
+ includes parentheses only if (a) they are backslash-escaped or
+ (b) they are part of a balanced pair of unescaped parentheses.
+ (Implementations may impose limits on parentheses nesting to
+ avoid performance issues, but at least three levels of nesting
+ should be supported.)
+
+A [link title](@) consists of either
+
+- a sequence of zero or more characters between straight double-quote
+ characters (`"`), including a `"` character only if it is
+ backslash-escaped, or
+
+- a sequence of zero or more characters between straight single-quote
+ characters (`'`), including a `'` character only if it is
+ backslash-escaped, or
+
+- a sequence of zero or more characters between matching parentheses
+ (`(...)`), including a `(` or `)` character only if it is
+ backslash-escaped.
+
+Although [link titles] may span multiple lines, they may not contain
+a [blank line].
+
+An [inline link](@) consists of a [link text] followed immediately
+by a left parenthesis `(`, optional [whitespace], an optional
+[link destination], an optional [link title] separated from the link
+destination by [whitespace], optional [whitespace], and a right
+parenthesis `)`. The link's text consists of the inlines contained
+in the [link text] (excluding the enclosing square brackets).
+The link's URI consists of the link destination, excluding enclosing
+`<...>` if present, with backslash-escapes in effect as described
+above. The link's title consists of the link title, excluding its
+enclosing delimiters, with backslash-escapes in effect as described
+above.
+
+Here is a simple inline link:
+
+```````````````````````````````` example
+[link](/uri "title")
+.
+<p><a href="/uri" title="title">link</a></p>
+````````````````````````````````
+
+
+The title may be omitted:
+
+```````````````````````````````` example
+[link](/uri)
+.
+<p><a href="/uri">link</a></p>
+````````````````````````````````
+
+
+Both the title and the destination may be omitted:
+
+```````````````````````````````` example
+[link]()
+.
+<p><a href="">link</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link](<>)
+.
+<p><a href="">link</a></p>
+````````````````````````````````
+
+The destination can only contain spaces if it is
+enclosed in pointy brackets:
+
+```````````````````````````````` example
+[link](/my uri)
+.
+<p>[link](/my uri)</p>
+````````````````````````````````
+
+```````````````````````````````` example
+[link](</my uri>)
+.
+<p><a href="/my%20uri">link</a></p>
+````````````````````````````````
+
+The destination cannot contain line breaks,
+even if enclosed in pointy brackets:
+
+```````````````````````````````` example
+[link](foo
+bar)
+.
+<p>[link](foo
+bar)</p>
+````````````````````````````````
+
+```````````````````````````````` example
+[link](<foo
+bar>)
+.
+<p>[link](<foo
+bar>)</p>
+````````````````````````````````
+
+The destination can contain `)` if it is enclosed
+in pointy brackets:
+
+```````````````````````````````` example
+[a](<b)c>)
+.
+<p><a href="b)c">a</a></p>
+````````````````````````````````
+
+Pointy brackets that enclose links must be unescaped:
+
+```````````````````````````````` example
+[link](<foo\>)
+.
+<p>[link](&lt;foo&gt;)</p>
+````````````````````````````````
+
+These are not links, because the opening pointy bracket
+is not matched properly:
+
+```````````````````````````````` example
+[a](<b)c
+[a](<b)c>
+[a](<b>c)
+.
+<p>[a](&lt;b)c
+[a](&lt;b)c&gt;
+[a](<b>c)</p>
+````````````````````````````````
+
+Parentheses inside the link destination may be escaped:
+
+```````````````````````````````` example
+[link](\(foo\))
+.
+<p><a href="(foo)">link</a></p>
+````````````````````````````````
+
+Any number of parentheses are allowed without escaping, as long as they are
+balanced:
+
+```````````````````````````````` example
+[link](foo(and(bar)))
+.
+<p><a href="foo(and(bar))">link</a></p>
+````````````````````````````````
+
+However, if you have unbalanced parentheses, you need to escape or use the
+`<...>` form:
+
+```````````````````````````````` example
+[link](foo\(and\(bar\))
+.
+<p><a href="foo(and(bar)">link</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link](<foo(and(bar)>)
+.
+<p><a href="foo(and(bar)">link</a></p>
+````````````````````````````````
+
+
+Parentheses and other symbols can also be escaped, as usual
+in Markdown:
+
+```````````````````````````````` example
+[link](foo\)\:)
+.
+<p><a href="foo):">link</a></p>
+````````````````````````````````
+
+
+A link can contain fragment identifiers and queries:
+
+```````````````````````````````` example
+[link](#fragment)
+
+[link](http://example.com#fragment)
+
+[link](http://example.com?foo=3#frag)
+.
+<p><a href="#fragment">link</a></p>
+<p><a href="http://example.com#fragment">link</a></p>
+<p><a href="http://example.com?foo=3#frag">link</a></p>
+````````````````````````````````
+
+
+Note that a backslash before a non-escapable character is
+just a backslash:
+
+```````````````````````````````` example
+[link](foo\bar)
+.
+<p><a href="foo%5Cbar">link</a></p>
+````````````````````````````````
+
+
+URL-escaping should be left alone inside the destination, as all
+URL-escaped characters are also valid URL characters. Entity and
+numerical character references in the destination will be parsed
+into the corresponding Unicode code points, as usual. These may
+be optionally URL-escaped when written as HTML, but this spec
+does not enforce any particular policy for rendering URLs in
+HTML or other formats. Renderers may make different decisions
+about how to escape or normalize URLs in the output.
+
+```````````````````````````````` example
+[link](foo%20b&auml;)
+.
+<p><a href="foo%20b%C3%A4">link</a></p>
+````````````````````````````````
+
+
+Note that, because titles can often be parsed as destinations,
+if you try to omit the destination and keep the title, you'll
+get unexpected results:
+
+```````````````````````````````` example
+[link]("title")
+.
+<p><a href="%22title%22">link</a></p>
+````````````````````````````````
+
+
+Titles may be in single quotes, double quotes, or parentheses:
+
+```````````````````````````````` example
+[link](/url "title")
+[link](/url 'title')
+[link](/url (title))
+.
+<p><a href="/url" title="title">link</a>
+<a href="/url" title="title">link</a>
+<a href="/url" title="title">link</a></p>
+````````````````````````````````
+
+
+Backslash escapes and entity and numeric character references
+may be used in titles:
+
+```````````````````````````````` example
+[link](/url "title \"&quot;")
+.
+<p><a href="/url" title="title &quot;&quot;">link</a></p>
+````````````````````````````````
+
+
+Titles must be separated from the link using a [whitespace].
+Other [Unicode whitespace] like non-breaking space doesn't work.
+
+```````````````````````````````` example
+[link](/url "title")
+.
+<p><a href="/url%C2%A0%22title%22">link</a></p>
+````````````````````````````````
+
+
+Nested balanced quotes are not allowed without escaping:
+
+```````````````````````````````` example
+[link](/url "title "and" title")
+.
+<p>[link](/url &quot;title &quot;and&quot; title&quot;)</p>
+````````````````````````````````
+
+
+But it is easy to work around this by using a different quote type:
+
+```````````````````````````````` example
+[link](/url 'title "and" title')
+.
+<p><a href="/url" title="title &quot;and&quot; title">link</a></p>
+````````````````````````````````
+
+
+(Note: `Markdown.pl` did allow double quotes inside a double-quoted
+title, and its test suite included a test demonstrating this.
+But it is hard to see a good rationale for the extra complexity this
+brings, since there are already many ways---backslash escaping,
+entity and numeric character references, or using a different
+quote type for the enclosing title---to write titles containing
+double quotes. `Markdown.pl`'s handling of titles has a number
+of other strange features. For example, it allows single-quoted
+titles in inline links, but not reference links. And, in
+reference links but not inline links, it allows a title to begin
+with `"` and end with `)`. `Markdown.pl` 1.0.1 even allows
+titles with no closing quotation mark, though 1.0.2b8 does not.
+It seems preferable to adopt a simple, rational rule that works
+the same way in inline links and link reference definitions.)
+
+[Whitespace] is allowed around the destination and title:
+
+```````````````````````````````` example
+[link]( /uri
+ "title" )
+.
+<p><a href="/uri" title="title">link</a></p>
+````````````````````````````````
+
+
+But it is not allowed between the link text and the
+following parenthesis:
+
+```````````````````````````````` example
+[link] (/uri)
+.
+<p>[link] (/uri)</p>
+````````````````````````````````
+
+
+The link text may contain balanced brackets, but not unbalanced ones,
+unless they are escaped:
+
+```````````````````````````````` example
+[link [foo [bar]]](/uri)
+.
+<p><a href="/uri">link [foo [bar]]</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link] bar](/uri)
+.
+<p>[link] bar](/uri)</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link [bar](/uri)
+.
+<p>[link <a href="/uri">bar</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link \[bar](/uri)
+.
+<p><a href="/uri">link [bar</a></p>
+````````````````````````````````
+
+
+The link text may contain inline content:
+
+```````````````````````````````` example
+[link *foo **bar** `#`*](/uri)
+.
+<p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[![moon](moon.jpg)](/uri)
+.
+<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
+````````````````````````````````
+
+
+However, links may not contain other links, at any level of nesting.
+
+```````````````````````````````` example
+[foo [bar](/uri)](/uri)
+.
+<p>[foo <a href="/uri">bar</a>](/uri)</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo *[bar [baz](/uri)](/uri)*](/uri)
+.
+<p>[foo <em>[bar <a href="/uri">baz</a>](/uri)</em>](/uri)</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![[[foo](uri1)](uri2)](uri3)
+.
+<p><img src="uri3" alt="[foo](uri2)" /></p>
+````````````````````````````````
+
+
+These cases illustrate the precedence of link text grouping over
+emphasis grouping:
+
+```````````````````````````````` example
+*[foo*](/uri)
+.
+<p>*<a href="/uri">foo*</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo *bar](baz*)
+.
+<p><a href="baz*">foo *bar</a></p>
+````````````````````````````````
+
+
+Note that brackets that *aren't* part of links do not take
+precedence:
+
+```````````````````````````````` example
+*foo [bar* baz]
+.
+<p><em>foo [bar</em> baz]</p>
+````````````````````````````````
+
+
+These cases illustrate the precedence of HTML tags, code spans,
+and autolinks over link grouping:
+
+```````````````````````````````` example
+[foo <bar attr="](baz)">
+.
+<p>[foo <bar attr="](baz)"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo`](/uri)`
+.
+<p>[foo<code>](/uri)</code></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo<http://example.com/?search=](uri)>
+.
+<p>[foo<a href="http://example.com/?search=%5D(uri)">http://example.com/?search=](uri)</a></p>
+````````````````````````````````
+
+
+There are three kinds of [reference link](@)s:
+[full](#full-reference-link), [collapsed](#collapsed-reference-link),
+and [shortcut](#shortcut-reference-link).
+
+A [full reference link](@)
+consists of a [link text] immediately followed by a [link label]
+that [matches] a [link reference definition] elsewhere in the document.
+
+A [link label](@) begins with a left bracket (`[`) and ends
+with the first right bracket (`]`) that is not backslash-escaped.
+Between these brackets there must be at least one [non-whitespace character].
+Unescaped square bracket characters are not allowed inside the
+opening and closing square brackets of [link labels]. A link
+label can have at most 999 characters inside the square
+brackets.
+
+One label [matches](@)
+another just in case their normalized forms are equal. To normalize a
+label, strip off the opening and closing brackets,
+perform the *Unicode case fold*, strip leading and trailing
+[whitespace] and collapse consecutive internal
+[whitespace] to a single space. If there are multiple
+matching reference link definitions, the one that comes first in the
+document is used. (It is desirable in such cases to emit a warning.)
+
+The contents of the first link label are parsed as inlines, which are
+used as the link's text. The link's URI and title are provided by the
+matching [link reference definition].
+
+Here is a simple example:
+
+```````````````````````````````` example
+[foo][bar]
+
+[bar]: /url "title"
+.
+<p><a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+The rules for the [link text] are the same as with
+[inline links]. Thus:
+
+The link text may contain balanced brackets, but not unbalanced ones,
+unless they are escaped:
+
+```````````````````````````````` example
+[link [foo [bar]]][ref]
+
+[ref]: /uri
+.
+<p><a href="/uri">link [foo [bar]]</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[link \[bar][ref]
+
+[ref]: /uri
+.
+<p><a href="/uri">link [bar</a></p>
+````````````````````````````````
+
+
+The link text may contain inline content:
+
+```````````````````````````````` example
+[link *foo **bar** `#`*][ref]
+
+[ref]: /uri
+.
+<p><a href="/uri">link <em>foo <strong>bar</strong> <code>#</code></em></a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[![moon](moon.jpg)][ref]
+
+[ref]: /uri
+.
+<p><a href="/uri"><img src="moon.jpg" alt="moon" /></a></p>
+````````````````````````````````
+
+
+However, links may not contain other links, at any level of nesting.
+
+```````````````````````````````` example
+[foo [bar](/uri)][ref]
+
+[ref]: /uri
+.
+<p>[foo <a href="/uri">bar</a>]<a href="/uri">ref</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo *bar [baz][ref]*][ref]
+
+[ref]: /uri
+.
+<p>[foo <em>bar <a href="/uri">baz</a></em>]<a href="/uri">ref</a></p>
+````````````````````````````````
+
+
+(In the examples above, we have two [shortcut reference links]
+instead of one [full reference link].)
+
+The following cases illustrate the precedence of link text grouping over
+emphasis grouping:
+
+```````````````````````````````` example
+*[foo*][ref]
+
+[ref]: /uri
+.
+<p>*<a href="/uri">foo*</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo *bar][ref]
+
+[ref]: /uri
+.
+<p><a href="/uri">foo *bar</a></p>
+````````````````````````````````
+
+
+These cases illustrate the precedence of HTML tags, code spans,
+and autolinks over link grouping:
+
+```````````````````````````````` example
+[foo <bar attr="][ref]">
+
+[ref]: /uri
+.
+<p>[foo <bar attr="][ref]"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo`][ref]`
+
+[ref]: /uri
+.
+<p>[foo<code>][ref]</code></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo<http://example.com/?search=][ref]>
+
+[ref]: /uri
+.
+<p>[foo<a href="http://example.com/?search=%5D%5Bref%5D">http://example.com/?search=][ref]</a></p>
+````````````````````````````````
+
+
+Matching is case-insensitive:
+
+```````````````````````````````` example
+[foo][BaR]
+
+[bar]: /url "title"
+.
+<p><a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+Unicode case fold is used:
+
+```````````````````````````````` example
+[Толпой][Толпой] is a Russian word.
+
+[ТОЛПОЙ]: /url
+.
+<p><a href="/url">Толпой</a> is a Russian word.</p>
+````````````````````````````````
+
+
+Consecutive internal [whitespace] is treated as one space for
+purposes of determining matching:
+
+```````````````````````````````` example
+[Foo
+ bar]: /url
+
+[Baz][Foo bar]
+.
+<p><a href="/url">Baz</a></p>
+````````````````````````````````
+
+
+No [whitespace] is allowed between the [link text] and the
+[link label]:
+
+```````````````````````````````` example
+[foo] [bar]
+
+[bar]: /url "title"
+.
+<p>[foo] <a href="/url" title="title">bar</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo]
+[bar]
+
+[bar]: /url "title"
+.
+<p>[foo]
+<a href="/url" title="title">bar</a></p>
+````````````````````````````````
+
+
+This is a departure from John Gruber's original Markdown syntax
+description, which explicitly allows whitespace between the link
+text and the link label. It brings reference links in line with
+[inline links], which (according to both original Markdown and
+this spec) cannot have whitespace after the link text. More
+importantly, it prevents inadvertent capture of consecutive
+[shortcut reference links]. If whitespace is allowed between the
+link text and the link label, then in the following we will have
+a single reference link, not two shortcut reference links, as
+intended:
+
+``` markdown
+[foo]
+[bar]
+
+[foo]: /url1
+[bar]: /url2
+```
+
+(Note that [shortcut reference links] were introduced by Gruber
+himself in a beta version of `Markdown.pl`, but never included
+in the official syntax description. Without shortcut reference
+links, it is harmless to allow space between the link text and
+link label; but once shortcut references are introduced, it is
+too dangerous to allow this, as it frequently leads to
+unintended results.)
+
+When there are multiple matching [link reference definitions],
+the first is used:
+
+```````````````````````````````` example
+[foo]: /url1
+
+[foo]: /url2
+
+[bar][foo]
+.
+<p><a href="/url1">bar</a></p>
+````````````````````````````````
+
+
+Note that matching is performed on normalized strings, not parsed
+inline content. So the following does not match, even though the
+labels define equivalent inline content:
+
+```````````````````````````````` example
+[bar][foo\!]
+
+[foo!]: /url
+.
+<p>[bar][foo!]</p>
+````````````````````````````````
+
+
+[Link labels] cannot contain brackets, unless they are
+backslash-escaped:
+
+```````````````````````````````` example
+[foo][ref[]
+
+[ref[]: /uri
+.
+<p>[foo][ref[]</p>
+<p>[ref[]: /uri</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo][ref[bar]]
+
+[ref[bar]]: /uri
+.
+<p>[foo][ref[bar]]</p>
+<p>[ref[bar]]: /uri</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[[[foo]]]
+
+[[[foo]]]: /url
+.
+<p>[[[foo]]]</p>
+<p>[[[foo]]]: /url</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[foo][ref\[]
+
+[ref\[]: /uri
+.
+<p><a href="/uri">foo</a></p>
+````````````````````````````````
+
+
+Note that in this example `]` is not backslash-escaped:
+
+```````````````````````````````` example
+[bar\\]: /uri
+
+[bar\\]
+.
+<p><a href="/uri">bar\</a></p>
+````````````````````````````````
+
+
+A [link label] must contain at least one [non-whitespace character]:
+
+```````````````````````````````` example
+[]
+
+[]: /uri
+.
+<p>[]</p>
+<p>[]: /uri</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[
+ ]
+
+[
+ ]: /uri
+.
+<p>[
+]</p>
+<p>[
+]: /uri</p>
+````````````````````````````````
+
+
+A [collapsed reference link](@)
+consists of a [link label] that [matches] a
+[link reference definition] elsewhere in the
+document, followed by the string `[]`.
+The contents of the first link label are parsed as inlines,
+which are used as the link's text. The link's URI and title are
+provided by the matching reference link definition. Thus,
+`[foo][]` is equivalent to `[foo][foo]`.
+
+```````````````````````````````` example
+[foo][]
+
+[foo]: /url "title"
+.
+<p><a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[*foo* bar][]
+
+[*foo* bar]: /url "title"
+.
+<p><a href="/url" title="title"><em>foo</em> bar</a></p>
+````````````````````````````````
+
+
+The link labels are case-insensitive:
+
+```````````````````````````````` example
+[Foo][]
+
+[foo]: /url "title"
+.
+<p><a href="/url" title="title">Foo</a></p>
+````````````````````````````````
+
+
+
+As with full reference links, [whitespace] is not
+allowed between the two sets of brackets:
+
+```````````````````````````````` example
+[foo]
+[]
+
+[foo]: /url "title"
+.
+<p><a href="/url" title="title">foo</a>
+[]</p>
+````````````````````````````````
+
+
+A [shortcut reference link](@)
+consists of a [link label] that [matches] a
+[link reference definition] elsewhere in the
+document and is not followed by `[]` or a link label.
+The contents of the first link label are parsed as inlines,
+which are used as the link's text. The link's URI and title
+are provided by the matching link reference definition.
+Thus, `[foo]` is equivalent to `[foo][]`.
+
+```````````````````````````````` example
+[foo]
+
+[foo]: /url "title"
+.
+<p><a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[*foo* bar]
+
+[*foo* bar]: /url "title"
+.
+<p><a href="/url" title="title"><em>foo</em> bar</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[[*foo* bar]]
+
+[*foo* bar]: /url "title"
+.
+<p>[<a href="/url" title="title"><em>foo</em> bar</a>]</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+[[bar [foo]
+
+[foo]: /url
+.
+<p>[[bar <a href="/url">foo</a></p>
+````````````````````````````````
+
+
+The link labels are case-insensitive:
+
+```````````````````````````````` example
+[Foo]
+
+[foo]: /url "title"
+.
+<p><a href="/url" title="title">Foo</a></p>
+````````````````````````````````
+
+
+A space after the link text should be preserved:
+
+```````````````````````````````` example
+[foo] bar
+
+[foo]: /url
+.
+<p><a href="/url">foo</a> bar</p>
+````````````````````````````````
+
+
+If you just want bracketed text, you can backslash-escape the
+opening bracket to avoid links:
+
+```````````````````````````````` example
+\[foo]
+
+[foo]: /url "title"
+.
+<p>[foo]</p>
+````````````````````````````````
+
+
+Note that this is a link, because a link label ends with the first
+following closing bracket:
+
+```````````````````````````````` example
+[foo*]: /url
+
+*[foo*]
+.
+<p>*<a href="/url">foo*</a></p>
+````````````````````````````````
+
+
+Full and compact references take precedence over shortcut
+references:
+
+```````````````````````````````` example
+[foo][bar]
+
+[foo]: /url1
+[bar]: /url2
+.
+<p><a href="/url2">foo</a></p>
+````````````````````````````````
+
+```````````````````````````````` example
+[foo][]
+
+[foo]: /url1
+.
+<p><a href="/url1">foo</a></p>
+````````````````````````````````
+
+Inline links also take precedence:
+
+```````````````````````````````` example
+[foo]()
+
+[foo]: /url1
+.
+<p><a href="">foo</a></p>
+````````````````````````````````
+
+```````````````````````````````` example
+[foo](not a link)
+
+[foo]: /url1
+.
+<p><a href="/url1">foo</a>(not a link)</p>
+````````````````````````````````
+
+In the following case `[bar][baz]` is parsed as a reference,
+`[foo]` as normal text:
+
+```````````````````````````````` example
+[foo][bar][baz]
+
+[baz]: /url
+.
+<p>[foo]<a href="/url">bar</a></p>
+````````````````````````````````
+
+
+Here, though, `[foo][bar]` is parsed as a reference, since
+`[bar]` is defined:
+
+```````````````````````````````` example
+[foo][bar][baz]
+
+[baz]: /url1
+[bar]: /url2
+.
+<p><a href="/url2">foo</a><a href="/url1">baz</a></p>
+````````````````````````````````
+
+
+Here `[foo]` is not parsed as a shortcut reference, because it
+is followed by a link label (even though `[bar]` is not defined):
+
+```````````````````````````````` example
+[foo][bar][baz]
+
+[baz]: /url1
+[foo]: /url2
+.
+<p>[foo]<a href="/url1">bar</a></p>
+````````````````````````````````
+
+
+
+## Images
+
+Syntax for images is like the syntax for links, with one
+difference. Instead of [link text], we have an
+[image description](@). The rules for this are the
+same as for [link text], except that (a) an
+image description starts with `![` rather than `[`, and
+(b) an image description may contain links.
+An image description has inline elements
+as its contents. When an image is rendered to HTML,
+this is standardly used as the image's `alt` attribute.
+
+```````````````````````````````` example
+![foo](/url "title")
+.
+<p><img src="/url" alt="foo" title="title" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo *bar*]
+
+[foo *bar*]: train.jpg "train & tracks"
+.
+<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo ![bar](/url)](/url2)
+.
+<p><img src="/url2" alt="foo bar" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo [bar](/url)](/url2)
+.
+<p><img src="/url2" alt="foo bar" /></p>
+````````````````````````````````
+
+
+Though this spec is concerned with parsing, not rendering, it is
+recommended that in rendering to HTML, only the plain string content
+of the [image description] be used. Note that in
+the above example, the alt attribute's value is `foo bar`, not `foo
+[bar](/url)` or `foo <a href="/url">bar</a>`. Only the plain string
+content is rendered, without formatting.
+
+```````````````````````````````` example
+![foo *bar*][]
+
+[foo *bar*]: train.jpg "train & tracks"
+.
+<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo *bar*][foobar]
+
+[FOOBAR]: train.jpg "train & tracks"
+.
+<p><img src="train.jpg" alt="foo bar" title="train &amp; tracks" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo](train.jpg)
+.
+<p><img src="train.jpg" alt="foo" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+My ![foo bar](/path/to/train.jpg "title" )
+.
+<p>My <img src="/path/to/train.jpg" alt="foo bar" title="title" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo](<url>)
+.
+<p><img src="url" alt="foo" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![](/url)
+.
+<p><img src="/url" alt="" /></p>
+````````````````````````````````
+
+
+Reference-style:
+
+```````````````````````````````` example
+![foo][bar]
+
+[bar]: /url
+.
+<p><img src="/url" alt="foo" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![foo][bar]
+
+[BAR]: /url
+.
+<p><img src="/url" alt="foo" /></p>
+````````````````````````````````
+
+
+Collapsed:
+
+```````````````````````````````` example
+![foo][]
+
+[foo]: /url "title"
+.
+<p><img src="/url" alt="foo" title="title" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![*foo* bar][]
+
+[*foo* bar]: /url "title"
+.
+<p><img src="/url" alt="foo bar" title="title" /></p>
+````````````````````````````````
+
+
+The labels are case-insensitive:
+
+```````````````````````````````` example
+![Foo][]
+
+[foo]: /url "title"
+.
+<p><img src="/url" alt="Foo" title="title" /></p>
+````````````````````````````````
+
+
+As with reference links, [whitespace] is not allowed
+between the two sets of brackets:
+
+```````````````````````````````` example
+![foo]
+[]
+
+[foo]: /url "title"
+.
+<p><img src="/url" alt="foo" title="title" />
+[]</p>
+````````````````````````````````
+
+
+Shortcut:
+
+```````````````````````````````` example
+![foo]
+
+[foo]: /url "title"
+.
+<p><img src="/url" alt="foo" title="title" /></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+![*foo* bar]
+
+[*foo* bar]: /url "title"
+.
+<p><img src="/url" alt="foo bar" title="title" /></p>
+````````````````````````````````
+
+
+Note that link labels cannot contain unescaped brackets:
+
+```````````````````````````````` example
+![[foo]]
+
+[[foo]]: /url "title"
+.
+<p>![[foo]]</p>
+<p>[[foo]]: /url &quot;title&quot;</p>
+````````````````````````````````
+
+
+The link labels are case-insensitive:
+
+```````````````````````````````` example
+![Foo]
+
+[foo]: /url "title"
+.
+<p><img src="/url" alt="Foo" title="title" /></p>
+````````````````````````````````
+
+
+If you just want a literal `!` followed by bracketed text, you can
+backslash-escape the opening `[`:
+
+```````````````````````````````` example
+!\[foo]
+
+[foo]: /url "title"
+.
+<p>![foo]</p>
+````````````````````````````````
+
+
+If you want a link after a literal `!`, backslash-escape the
+`!`:
+
+```````````````````````````````` example
+\![foo]
+
+[foo]: /url "title"
+.
+<p>!<a href="/url" title="title">foo</a></p>
+````````````````````````````````
+
+
+## Autolinks
+
+[Autolink](@)s are absolute URIs and email addresses inside
+`<` and `>`. They are parsed as links, with the URL or email address
+as the link label.
+
+A [URI autolink](@) consists of `<`, followed by an
+[absolute URI] followed by `>`. It is parsed as
+a link to the URI, with the URI as the link's label.
+
+An [absolute URI](@),
+for these purposes, consists of a [scheme] followed by a colon (`:`)
+followed by zero or more characters other than ASCII
+[whitespace] and control characters, `<`, and `>`. If
+the URI includes these characters, they must be percent-encoded
+(e.g. `%20` for a space).
+
+For purposes of this spec, a [scheme](@) is any sequence
+of 2--32 characters beginning with an ASCII letter and followed
+by any combination of ASCII letters, digits, or the symbols plus
+("+"), period ("."), or hyphen ("-").
+
+Here are some valid autolinks:
+
+```````````````````````````````` example
+<http://foo.bar.baz>
+.
+<p><a href="http://foo.bar.baz">http://foo.bar.baz</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<http://foo.bar.baz/test?q=hello&id=22&boolean>
+.
+<p><a href="http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean">http://foo.bar.baz/test?q=hello&amp;id=22&amp;boolean</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<irc://foo.bar:2233/baz>
+.
+<p><a href="irc://foo.bar:2233/baz">irc://foo.bar:2233/baz</a></p>
+````````````````````````````````
+
+
+Uppercase is also fine:
+
+```````````````````````````````` example
+<MAILTO:FOO@BAR.BAZ>
+.
+<p><a href="MAILTO:FOO@BAR.BAZ">MAILTO:FOO@BAR.BAZ</a></p>
+````````````````````````````````
+
+
+Note that many strings that count as [absolute URIs] for
+purposes of this spec are not valid URIs, because their
+schemes are not registered or because of other problems
+with their syntax:
+
+```````````````````````````````` example
+<a+b+c:d>
+.
+<p><a href="a+b+c:d">a+b+c:d</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<made-up-scheme://foo,bar>
+.
+<p><a href="made-up-scheme://foo,bar">made-up-scheme://foo,bar</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<http://../>
+.
+<p><a href="http://../">http://../</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<localhost:5001/foo>
+.
+<p><a href="localhost:5001/foo">localhost:5001/foo</a></p>
+````````````````````````````````
+
+
+Spaces are not allowed in autolinks:
+
+```````````````````````````````` example
+<http://foo.bar/baz bim>
+.
+<p>&lt;http://foo.bar/baz bim&gt;</p>
+````````````````````````````````
+
+
+Backslash-escapes do not work inside autolinks:
+
+```````````````````````````````` example
+<http://example.com/\[\>
+.
+<p><a href="http://example.com/%5C%5B%5C">http://example.com/\[\</a></p>
+````````````````````````````````
+
+
+An [email autolink](@)
+consists of `<`, followed by an [email address],
+followed by `>`. The link's label is the email address,
+and the URL is `mailto:` followed by the email address.
+
+An [email address](@),
+for these purposes, is anything that matches
+the [non-normative regex from the HTML5
+spec](https://html.spec.whatwg.org/multipage/forms.html#e-mail-state-(type=email)):
+
+ /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?
+ (?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
+
+Examples of email autolinks:
+
+```````````````````````````````` example
+<foo@bar.example.com>
+.
+<p><a href="mailto:foo@bar.example.com">foo@bar.example.com</a></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<foo+special@Bar.baz-bar0.com>
+.
+<p><a href="mailto:foo+special@Bar.baz-bar0.com">foo+special@Bar.baz-bar0.com</a></p>
+````````````````````````````````
+
+
+Backslash-escapes do not work inside email autolinks:
+
+```````````````````````````````` example
+<foo\+@bar.example.com>
+.
+<p>&lt;foo+@bar.example.com&gt;</p>
+````````````````````````````````
+
+
+These are not autolinks:
+
+```````````````````````````````` example
+<>
+.
+<p>&lt;&gt;</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+< http://foo.bar >
+.
+<p>&lt; http://foo.bar &gt;</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<m:abc>
+.
+<p>&lt;m:abc&gt;</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<foo.bar.baz>
+.
+<p>&lt;foo.bar.baz&gt;</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+http://example.com
+.
+<p>http://example.com</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo@bar.example.com
+.
+<p>foo@bar.example.com</p>
+````````````````````````````````
+
+
+## Raw HTML
+
+Text between `<` and `>` that looks like an HTML tag is parsed as a
+raw HTML tag and will be rendered in HTML without escaping.
+Tag and attribute names are not limited to current HTML tags,
+so custom tags (and even, say, DocBook tags) may be used.
+
+Here is the grammar for tags:
+
+A [tag name](@) consists of an ASCII letter
+followed by zero or more ASCII letters, digits, or
+hyphens (`-`).
+
+An [attribute](@) consists of [whitespace],
+an [attribute name], and an optional
+[attribute value specification].
+
+An [attribute name](@)
+consists of an ASCII letter, `_`, or `:`, followed by zero or more ASCII
+letters, digits, `_`, `.`, `:`, or `-`. (Note: This is the XML
+specification restricted to ASCII. HTML5 is laxer.)
+
+An [attribute value specification](@)
+consists of optional [whitespace],
+a `=` character, optional [whitespace], and an [attribute
+value].
+
+An [attribute value](@)
+consists of an [unquoted attribute value],
+a [single-quoted attribute value], or a [double-quoted attribute value].
+
+An [unquoted attribute value](@)
+is a nonempty string of characters not
+including [whitespace], `"`, `'`, `=`, `<`, `>`, or `` ` ``.
+
+A [single-quoted attribute value](@)
+consists of `'`, zero or more
+characters not including `'`, and a final `'`.
+
+A [double-quoted attribute value](@)
+consists of `"`, zero or more
+characters not including `"`, and a final `"`.
+
+An [open tag](@) consists of a `<` character, a [tag name],
+zero or more [attributes], optional [whitespace], an optional `/`
+character, and a `>` character.
+
+A [closing tag](@) consists of the string `</`, a
+[tag name], optional [whitespace], and the character `>`.
+
+An [HTML comment](@) consists of `<!--` + *text* + `-->`,
+where *text* does not start with `>` or `->`, does not end with `-`,
+and does not contain `--`. (See the
+[HTML5 spec](http://www.w3.org/TR/html5/syntax.html#comments).)
+
+A [processing instruction](@)
+consists of the string `<?`, a string
+of characters not including the string `?>`, and the string
+`?>`.
+
+A [declaration](@) consists of the
+string `<!`, a name consisting of one or more uppercase ASCII letters,
+[whitespace], a string of characters not including the
+character `>`, and the character `>`.
+
+A [CDATA section](@) consists of
+the string `<![CDATA[`, a string of characters not including the string
+`]]>`, and the string `]]>`.
+
+An [HTML tag](@) consists of an [open tag], a [closing tag],
+an [HTML comment], a [processing instruction], a [declaration],
+or a [CDATA section].
+
+Here are some simple open tags:
+
+```````````````````````````````` example
+<a><bab><c2c>
+.
+<p><a><bab><c2c></p>
+````````````````````````````````
+
+
+Empty elements:
+
+```````````````````````````````` example
+<a/><b2/>
+.
+<p><a/><b2/></p>
+````````````````````````````````
+
+
+[Whitespace] is allowed:
+
+```````````````````````````````` example
+<a /><b2
+data="foo" >
+.
+<p><a /><b2
+data="foo" ></p>
+````````````````````````````````
+
+
+With attributes:
+
+```````````````````````````````` example
+<a foo="bar" bam = 'baz <em>"</em>'
+_boolean zoop:33=zoop:33 />
+.
+<p><a foo="bar" bam = 'baz <em>"</em>'
+_boolean zoop:33=zoop:33 /></p>
+````````````````````````````````
+
+
+Custom tag names can be used:
+
+```````````````````````````````` example
+Foo <responsive-image src="foo.jpg" />
+.
+<p>Foo <responsive-image src="foo.jpg" /></p>
+````````````````````````````````
+
+
+Illegal tag names, not parsed as HTML:
+
+```````````````````````````````` example
+<33> <__>
+.
+<p>&lt;33&gt; &lt;__&gt;</p>
+````````````````````````````````
+
+
+Illegal attribute names:
+
+```````````````````````````````` example
+<a h*#ref="hi">
+.
+<p>&lt;a h*#ref=&quot;hi&quot;&gt;</p>
+````````````````````````````````
+
+
+Illegal attribute values:
+
+```````````````````````````````` example
+<a href="hi'> <a href=hi'>
+.
+<p>&lt;a href=&quot;hi'&gt; &lt;a href=hi'&gt;</p>
+````````````````````````````````
+
+
+Illegal [whitespace]:
+
+```````````````````````````````` example
+< a><
+foo><bar/ >
+<foo bar=baz
+bim!bop />
+.
+<p>&lt; a&gt;&lt;
+foo&gt;&lt;bar/ &gt;
+&lt;foo bar=baz
+bim!bop /&gt;</p>
+````````````````````````````````
+
+
+Missing [whitespace]:
+
+```````````````````````````````` example
+<a href='bar'title=title>
+.
+<p>&lt;a href='bar'title=title&gt;</p>
+````````````````````````````````
+
+
+Closing tags:
+
+```````````````````````````````` example
+</a></foo >
+.
+<p></a></foo ></p>
+````````````````````````````````
+
+
+Illegal attributes in closing tag:
+
+```````````````````````````````` example
+</a href="foo">
+.
+<p>&lt;/a href=&quot;foo&quot;&gt;</p>
+````````````````````````````````
+
+
+Comments:
+
+```````````````````````````````` example
+foo <!-- this is a
+comment - with hyphen -->
+.
+<p>foo <!-- this is a
+comment - with hyphen --></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo <!-- not a comment -- two hyphens -->
+.
+<p>foo &lt;!-- not a comment -- two hyphens --&gt;</p>
+````````````````````````````````
+
+
+Not comments:
+
+```````````````````````````````` example
+foo <!--> foo -->
+
+foo <!-- foo--->
+.
+<p>foo &lt;!--&gt; foo --&gt;</p>
+<p>foo &lt;!-- foo---&gt;</p>
+````````````````````````````````
+
+
+Processing instructions:
+
+```````````````````````````````` example
+foo <?php echo $a; ?>
+.
+<p>foo <?php echo $a; ?></p>
+````````````````````````````````
+
+
+Declarations:
+
+```````````````````````````````` example
+foo <!ELEMENT br EMPTY>
+.
+<p>foo <!ELEMENT br EMPTY></p>
+````````````````````````````````
+
+
+CDATA sections:
+
+```````````````````````````````` example
+foo <![CDATA[>&<]]>
+.
+<p>foo <![CDATA[>&<]]></p>
+````````````````````````````````
+
+
+Entity and numeric character references are preserved in HTML
+attributes:
+
+```````````````````````````````` example
+foo <a href="&ouml;">
+.
+<p>foo <a href="&ouml;"></p>
+````````````````````````````````
+
+
+Backslash escapes do not work in HTML attributes:
+
+```````````````````````````````` example
+foo <a href="\*">
+.
+<p>foo <a href="\*"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<a href="\"">
+.
+<p>&lt;a href=&quot;&quot;&quot;&gt;</p>
+````````````````````````````````
+
+
+## Hard line breaks
+
+A line break (not in a code span or HTML tag) that is preceded
+by two or more spaces and does not occur at the end of a block
+is parsed as a [hard line break](@) (rendered
+in HTML as a `<br />` tag):
+
+```````````````````````````````` example
+foo
+baz
+.
+<p>foo<br />
+baz</p>
+````````````````````````````````
+
+
+For a more visible alternative, a backslash before the
+[line ending] may be used instead of two spaces:
+
+```````````````````````````````` example
+foo\
+baz
+.
+<p>foo<br />
+baz</p>
+````````````````````````````````
+
+
+More than two spaces can be used:
+
+```````````````````````````````` example
+foo
+baz
+.
+<p>foo<br />
+baz</p>
+````````````````````````````````
+
+
+Leading spaces at the beginning of the next line are ignored:
+
+```````````````````````````````` example
+foo
+ bar
+.
+<p>foo<br />
+bar</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo\
+ bar
+.
+<p>foo<br />
+bar</p>
+````````````````````````````````
+
+
+Line breaks can occur inside emphasis, links, and other constructs
+that allow inline content:
+
+```````````````````````````````` example
+*foo
+bar*
+.
+<p><em>foo<br />
+bar</em></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+*foo\
+bar*
+.
+<p><em>foo<br />
+bar</em></p>
+````````````````````````````````
+
+
+Line breaks do not occur inside code spans
+
+```````````````````````````````` example
+`code
+span`
+.
+<p><code>code span</code></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+`code\
+span`
+.
+<p><code>code\ span</code></p>
+````````````````````````````````
+
+
+or HTML tags:
+
+```````````````````````````````` example
+<a href="foo
+bar">
+.
+<p><a href="foo
+bar"></p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+<a href="foo\
+bar">
+.
+<p><a href="foo\
+bar"></p>
+````````````````````````````````
+
+
+Hard line breaks are for separating inline content within a block.
+Neither syntax for hard line breaks works at the end of a paragraph or
+other block element:
+
+```````````````````````````````` example
+foo\
+.
+<p>foo\</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+foo
+.
+<p>foo</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+### foo\
+.
+<h3>foo\</h3>
+````````````````````````````````
+
+
+```````````````````````````````` example
+### foo
+.
+<h3>foo</h3>
+````````````````````````````````
+
+
+## Soft line breaks
+
+A regular line break (not in a code span or HTML tag) that is not
+preceded by two or more spaces or a backslash is parsed as a
+[softbreak](@). (A softbreak may be rendered in HTML either as a
+[line ending] or as a space. The result will be the same in
+browsers. In the examples here, a [line ending] will be used.)
+
+```````````````````````````````` example
+foo
+baz
+.
+<p>foo
+baz</p>
+````````````````````````````````
+
+
+Spaces at the end of the line and beginning of the next line are
+removed:
+
+```````````````````````````````` example
+foo
+ baz
+.
+<p>foo
+baz</p>
+````````````````````````````````
+
+
+A conforming parser may render a soft line break in HTML either as a
+line break or as a space.
+
+A renderer may also provide an option to render soft line breaks
+as hard line breaks.
+
+## Textual content
+
+Any characters not given an interpretation by the above rules will
+be parsed as plain textual content.
+
+```````````````````````````````` example
+hello $.;'there
+.
+<p>hello $.;'there</p>
+````````````````````````````````
+
+
+```````````````````````````````` example
+Foo χρῆν
+.
+<p>Foo χρῆν</p>
+````````````````````````````````
+
+
+Internal spaces are preserved verbatim:
+
+```````````````````````````````` example
+Multiple spaces
+.
+<p>Multiple spaces</p>
+````````````````````````````````
+
+
+<!-- END TESTS -->
+
+# Appendix: A parsing strategy
+
+In this appendix we describe some features of the parsing strategy
+used in the CommonMark reference implementations.
+
+## Overview
+
+Parsing has two phases:
+
+1. In the first phase, lines of input are consumed and the block
+structure of the document---its division into paragraphs, block quotes,
+list items, and so on---is constructed. Text is assigned to these
+blocks but not parsed. Link reference definitions are parsed and a
+map of links is constructed.
+
+2. In the second phase, the raw text contents of paragraphs and headings
+are parsed into sequences of Markdown inline elements (strings,
+code spans, links, emphasis, and so on), using the map of link
+references constructed in phase 1.
+
+At each point in processing, the document is represented as a tree of
+**blocks**. The root of the tree is a `document` block. The `document`
+may have any number of other blocks as **children**. These children
+may, in turn, have other blocks as children. The last child of a block
+is normally considered **open**, meaning that subsequent lines of input
+can alter its contents. (Blocks that are not open are **closed**.)
+Here, for example, is a possible document tree, with the open blocks
+marked by arrows:
+
+``` tree
+-> document
+ -> block_quote
+ paragraph
+ "Lorem ipsum dolor\nsit amet."
+ -> list (type=bullet tight=true bullet_char=-)
+ list_item
+ paragraph
+ "Qui *quodsi iracundia*"
+ -> list_item
+ -> paragraph
+ "aliquando id"
+```
+
+## Phase 1: block structure
+
+Each line that is processed has an effect on this tree. The line is
+analyzed and, depending on its contents, the document may be altered
+in one or more of the following ways:
+
+1. One or more open blocks may be closed.
+2. One or more new blocks may be created as children of the
+ last open block.
+3. Text may be added to the last (deepest) open block remaining
+ on the tree.
+
+Once a line has been incorporated into the tree in this way,
+it can be discarded, so input can be read in a stream.
+
+For each line, we follow this procedure:
+
+1. First we iterate through the open blocks, starting with the
+root document, and descending through last children down to the last
+open block. Each block imposes a condition that the line must satisfy
+if the block is to remain open. For example, a block quote requires a
+`>` character. A paragraph requires a non-blank line.
+In this phase we may match all or just some of the open
+blocks. But we cannot close unmatched blocks yet, because we may have a
+[lazy continuation line].
+
+2. Next, after consuming the continuation markers for existing
+blocks, we look for new block starts (e.g. `>` for a block quote).
+If we encounter a new block start, we close any blocks unmatched
+in step 1 before creating the new block as a child of the last
+matched block.
+
+3. Finally, we look at the remainder of the line (after block
+markers like `>`, list markers, and indentation have been consumed).
+This is text that can be incorporated into the last open
+block (a paragraph, code block, heading, or raw HTML).
+
+Setext headings are formed when we see a line of a paragraph
+that is a [setext heading underline].
+
+Reference link definitions are detected when a paragraph is closed;
+the accumulated text lines are parsed to see if they begin with
+one or more reference link definitions. Any remainder becomes a
+normal paragraph.
+
+We can see how this works by considering how the tree above is
+generated by four lines of Markdown:
+
+``` markdown
+> Lorem ipsum dolor
+sit amet.
+> - Qui *quodsi iracundia*
+> - aliquando id
+```
+
+At the outset, our document model is just
+
+``` tree
+-> document
+```
+
+The first line of our text,
+
+``` markdown
+> Lorem ipsum dolor
+```
+
+causes a `block_quote` block to be created as a child of our
+open `document` block, and a `paragraph` block as a child of
+the `block_quote`. Then the text is added to the last open
+block, the `paragraph`:
+
+``` tree
+-> document
+ -> block_quote
+ -> paragraph
+ "Lorem ipsum dolor"
+```
+
+The next line,
+
+``` markdown
+sit amet.
+```
+
+is a "lazy continuation" of the open `paragraph`, so it gets added
+to the paragraph's text:
+
+``` tree
+-> document
+ -> block_quote
+ -> paragraph
+ "Lorem ipsum dolor\nsit amet."
+```
+
+The third line,
+
+``` markdown
+> - Qui *quodsi iracundia*
+```
+
+causes the `paragraph` block to be closed, and a new `list` block
+opened as a child of the `block_quote`. A `list_item` is also
+added as a child of the `list`, and a `paragraph` as a child of
+the `list_item`. The text is then added to the new `paragraph`:
+
+``` tree
+-> document
+ -> block_quote
+ paragraph
+ "Lorem ipsum dolor\nsit amet."
+ -> list (type=bullet tight=true bullet_char=-)
+ -> list_item
+ -> paragraph
+ "Qui *quodsi iracundia*"
+```
+
+The fourth line,
+
+``` markdown
+> - aliquando id
+```
+
+causes the `list_item` (and its child the `paragraph`) to be closed,
+and a new `list_item` opened up as child of the `list`. A `paragraph`
+is added as a child of the new `list_item`, to contain the text.
+We thus obtain the final tree:
+
+``` tree
+-> document
+ -> block_quote
+ paragraph
+ "Lorem ipsum dolor\nsit amet."
+ -> list (type=bullet tight=true bullet_char=-)
+ list_item
+ paragraph
+ "Qui *quodsi iracundia*"
+ -> list_item
+ -> paragraph
+ "aliquando id"
+```
+
+## Phase 2: inline structure
+
+Once all of the input has been parsed, all open blocks are closed.
+
+We then "walk the tree," visiting every node, and parse raw
+string contents of paragraphs and headings as inlines. At this
+point we have seen all the link reference definitions, so we can
+resolve reference links as we go.
+
+``` tree
+document
+ block_quote
+ paragraph
+ str "Lorem ipsum dolor"
+ softbreak
+ str "sit amet."
+ list (type=bullet tight=true bullet_char=-)
+ list_item
+ paragraph
+ str "Qui "
+ emph
+ str "quodsi iracundia"
+ list_item
+ paragraph
+ str "aliquando id"
+```
+
+Notice how the [line ending] in the first paragraph has
+been parsed as a `softbreak`, and the asterisks in the first list item
+have become an `emph`.
+
+### An algorithm for parsing nested emphasis and links
+
+By far the trickiest part of inline parsing is handling emphasis,
+strong emphasis, links, and images. This is done using the following
+algorithm.
+
+When we're parsing inlines and we hit either
+
+- a run of `*` or `_` characters, or
+- a `[` or `![`
+
+we insert a text node with these symbols as its literal content, and we
+add a pointer to this text node to the [delimiter stack](@).
+
+The [delimiter stack] is a doubly linked list. Each
+element contains a pointer to a text node, plus information about
+
+- the type of delimiter (`[`, `![`, `*`, `_`)
+- the number of delimiters,
+- whether the delimiter is "active" (all are active to start), and
+- whether the delimiter is a potential opener, a potential closer,
+ or both (which depends on what sort of characters precede
+ and follow the delimiters).
+
+When we hit a `]` character, we call the *look for link or image*
+procedure (see below).
+
+When we hit the end of the input, we call the *process emphasis*
+procedure (see below), with `stack_bottom` = NULL.
+
+#### *look for link or image*
+
+Starting at the top of the delimiter stack, we look backwards
+through the stack for an opening `[` or `![` delimiter.
+
+- If we don't find one, we return a literal text node `]`.
+
+- If we do find one, but it's not *active*, we remove the inactive
+ delimiter from the stack, and return a literal text node `]`.
+
+- If we find one and it's active, then we parse ahead to see if
+ we have an inline link/image, reference link/image, compact reference
+ link/image, or shortcut reference link/image.
+
+ + If we don't, then we remove the opening delimiter from the
+ delimiter stack and return a literal text node `]`.
+
+ + If we do, then
+
+ * We return a link or image node whose children are the inlines
+ after the text node pointed to by the opening delimiter.
+
+ * We run *process emphasis* on these inlines, with the `[` opener
+ as `stack_bottom`.
+
+ * We remove the opening delimiter.
+
+ * If we have a link (and not an image), we also set all
+ `[` delimiters before the opening delimiter to *inactive*. (This
+ will prevent us from getting links within links.)
+
+#### *process emphasis*
+
+Parameter `stack_bottom` sets a lower bound to how far we
+descend in the [delimiter stack]. If it is NULL, we can
+go all the way to the bottom. Otherwise, we stop before
+visiting `stack_bottom`.
+
+Let `current_position` point to the element on the [delimiter stack]
+just above `stack_bottom` (or the first element if `stack_bottom`
+is NULL).
+
+We keep track of the `openers_bottom` for each delimiter
+type (`*`, `_`) and each length of the closing delimiter run
+(modulo 3). Initialize this to `stack_bottom`.
+
+Then we repeat the following until we run out of potential
+closers:
+
+- Move `current_position` forward in the delimiter stack (if needed)
+ until we find the first potential closer with delimiter `*` or `_`.
+ (This will be the potential closer closest
+ to the beginning of the input -- the first one in parse order.)
+
+- Now, look back in the stack (staying above `stack_bottom` and
+ the `openers_bottom` for this delimiter type) for the
+ first matching potential opener ("matching" means same delimiter).
+
+- If one is found:
+
+ + Figure out whether we have emphasis or strong emphasis:
+ if both closer and opener spans have length >= 2, we have
+ strong, otherwise regular.
+
+ + Insert an emph or strong emph node accordingly, after
+ the text node corresponding to the opener.
+
+ + Remove any delimiters between the opener and closer from
+ the delimiter stack.
+
+ + Remove 1 (for regular emph) or 2 (for strong emph) delimiters
+ from the opening and closing text nodes. If they become empty
+ as a result, remove them and remove the corresponding element
+ of the delimiter stack. If the closing node is removed, reset
+ `current_position` to the next element in the stack.
+
+- If none is found:
+
+ + Set `openers_bottom` to the element before `current_position`.
+ (We know that there are no openers for this kind of closer up to and
+ including this point, so this puts a lower bound on future searches.)
+
+ + If the closer at `current_position` is not a potential opener,
+ remove it from the delimiter stack (since we know it can't
+ be a closer either).
+
+ + Advance `current_position` to the next element in the stack.
+
+After we're done, we remove all delimiters above `stack_bottom` from the
+delimiter stack.
+
diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go
new file mode 100644
index 000000000..46fa35deb
--- /dev/null
+++ b/hugolib/testhelpers_test.go
@@ -0,0 +1,1117 @@
+package hugolib
+
+import (
+ "bytes"
+ "fmt"
+ "image/jpeg"
+ "io"
+ "io/fs"
+ "math/rand"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "testing"
+ "text/template"
+ "time"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/output"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+ "github.com/google/go-cmp/cmp"
+
+ "github.com/gohugoio/hugo/parser"
+
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/sanity-io/litter"
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+var (
+ deepEqualsPages = qt.CmpEquals(cmp.Comparer(func(p1, p2 *pageState) bool { return p1 == p2 }))
+ deepEqualsOutputFormats = qt.CmpEquals(cmp.Comparer(func(o1, o2 output.Format) bool {
+ return o1.Name == o2.Name && o1.MediaType.Type() == o2.MediaType.Type()
+ }))
+)
+
+type sitesBuilder struct {
+ Cfg config.Provider
+ environ []string
+
+ Fs *hugofs.Fs
+ T testing.TB
+ depsCfg deps.DepsCfg
+
+ *qt.C
+
+ logger loggers.Logger
+ rnd *rand.Rand
+ dumper litter.Options
+
+ // Used to test partial rebuilds.
+ changedFiles []string
+ removedFiles []string
+
+ // Aka the Hugo server mode.
+ running bool
+
+ H *HugoSites
+
+ theme string
+
+ // Default toml
+ configFormat string
+ configFileSet bool
+ configSet bool
+
+ // Default is empty.
+ // TODO(bep) revisit this and consider always setting it to something.
+ // Consider this in relation to using the BaseFs.PublishFs to all publishing.
+ workingDir string
+
+ addNothing bool
+ // Base data/content
+ contentFilePairs []filenameContent
+ templateFilePairs []filenameContent
+ i18nFilePairs []filenameContent
+ dataFilePairs []filenameContent
+
+ // Additional data/content.
+ // As in "use the base, but add these on top".
+ contentFilePairsAdded []filenameContent
+ templateFilePairsAdded []filenameContent
+ i18nFilePairsAdded []filenameContent
+ dataFilePairsAdded []filenameContent
+}
+
+type filenameContent struct {
+ filename string
+ content string
+}
+
+func newTestSitesBuilder(t testing.TB) *sitesBuilder {
+ v := config.NewWithTestDefaults()
+ fs := hugofs.NewMem(v)
+
+ litterOptions := litter.Options{
+ HidePrivateFields: true,
+ StripPackageNames: true,
+ Separator: " ",
+ }
+
+ return &sitesBuilder{
+ T: t, C: qt.New(t), Fs: fs, configFormat: "toml",
+ dumper: litterOptions, rnd: rand.New(rand.NewSource(time.Now().Unix())),
+ }
+}
+
+func newTestSitesBuilderFromDepsCfg(t testing.TB, d deps.DepsCfg) *sitesBuilder {
+ c := qt.New(t)
+
+ litterOptions := litter.Options{
+ HidePrivateFields: true,
+ StripPackageNames: true,
+ Separator: " ",
+ }
+
+ b := &sitesBuilder{T: t, C: c, depsCfg: d, Fs: d.Fs, dumper: litterOptions, rnd: rand.New(rand.NewSource(time.Now().Unix()))}
+ workingDir := d.Cfg.GetString("workingDir")
+
+ b.WithWorkingDir(workingDir)
+
+ return b.WithViper(d.Cfg.(config.Provider))
+}
+
+func (s *sitesBuilder) Running() *sitesBuilder {
+ s.running = true
+ return s
+}
+
+func (s *sitesBuilder) WithNothingAdded() *sitesBuilder {
+ s.addNothing = true
+ return s
+}
+
+func (s *sitesBuilder) WithLogger(logger loggers.Logger) *sitesBuilder {
+ s.logger = logger
+ return s
+}
+
+func (s *sitesBuilder) WithWorkingDir(dir string) *sitesBuilder {
+ s.workingDir = filepath.FromSlash(dir)
+ return s
+}
+
+func (s *sitesBuilder) WithEnviron(env ...string) *sitesBuilder {
+ for i := 0; i < len(env); i += 2 {
+ s.environ = append(s.environ, fmt.Sprintf("%s=%s", env[i], env[i+1]))
+ }
+ return s
+}
+
+func (s *sitesBuilder) WithConfigTemplate(data any, format, configTemplate string) *sitesBuilder {
+ s.T.Helper()
+
+ if format == "" {
+ format = "toml"
+ }
+
+ templ, err := template.New("test").Parse(configTemplate)
+ if err != nil {
+ s.Fatalf("Template parse failed: %s", err)
+ }
+ var b bytes.Buffer
+ templ.Execute(&b, data)
+ return s.WithConfigFile(format, b.String())
+}
+
+func (s *sitesBuilder) WithViper(v config.Provider) *sitesBuilder {
+ s.T.Helper()
+ if s.configFileSet {
+ s.T.Fatal("WithViper: use Viper or config.toml, not both")
+ }
+ defer func() {
+ s.configSet = true
+ }()
+
+ // Write to a config file to make sure the tests follow the same code path.
+ var buff bytes.Buffer
+ m := v.Get("").(maps.Params)
+ s.Assert(parser.InterfaceToConfig(m, metadecoders.TOML, &buff), qt.IsNil)
+ return s.WithConfigFile("toml", buff.String())
+}
+
+func (s *sitesBuilder) WithConfigFile(format, conf string) *sitesBuilder {
+ s.T.Helper()
+ if s.configSet {
+ s.T.Fatal("WithConfigFile: use config.Config or config.toml, not both")
+ }
+ s.configFileSet = true
+ filename := s.absFilename("config." + format)
+ writeSource(s.T, s.Fs, filename, conf)
+ s.configFormat = format
+ return s
+}
+
+func (s *sitesBuilder) WithThemeConfigFile(format, conf string) *sitesBuilder {
+ s.T.Helper()
+ if s.theme == "" {
+ s.theme = "test-theme"
+ }
+ filename := filepath.Join("themes", s.theme, "config."+format)
+ writeSource(s.T, s.Fs, s.absFilename(filename), conf)
+ return s
+}
+
+func (s *sitesBuilder) WithSourceFile(filenameContent ...string) *sitesBuilder {
+ s.T.Helper()
+ for i := 0; i < len(filenameContent); i += 2 {
+ writeSource(s.T, s.Fs, s.absFilename(filenameContent[i]), filenameContent[i+1])
+ }
+ return s
+}
+
+func (s *sitesBuilder) absFilename(filename string) string {
+ filename = filepath.FromSlash(filename)
+ if filepath.IsAbs(filename) {
+ return filename
+ }
+ if s.workingDir != "" && !strings.HasPrefix(filename, s.workingDir) {
+ filename = filepath.Join(s.workingDir, filename)
+ }
+ return filename
+}
+
+const commonConfigSections = `
+
+[services]
+[services.disqus]
+shortname = "disqus_shortname"
+[services.googleAnalytics]
+id = "UA-ga_id"
+
+[privacy]
+[privacy.disqus]
+disable = false
+[privacy.googleAnalytics]
+respectDoNotTrack = true
+anonymizeIP = true
+[privacy.instagram]
+simple = true
+[privacy.twitter]
+enableDNT = true
+[privacy.vimeo]
+disable = false
+[privacy.youtube]
+disable = false
+privacyEnhanced = true
+
+`
+
+func (s *sitesBuilder) WithSimpleConfigFile() *sitesBuilder {
+ s.T.Helper()
+ return s.WithSimpleConfigFileAndBaseURL("http://example.com/")
+}
+
+func (s *sitesBuilder) WithSimpleConfigFileAndBaseURL(baseURL string) *sitesBuilder {
+ s.T.Helper()
+ return s.WithSimpleConfigFileAndSettings(map[string]any{"baseURL": baseURL})
+}
+
+func (s *sitesBuilder) WithSimpleConfigFileAndSettings(settings any) *sitesBuilder {
+ s.T.Helper()
+ var buf bytes.Buffer
+ parser.InterfaceToConfig(settings, metadecoders.TOML, &buf)
+ config := buf.String() + commonConfigSections
+ return s.WithConfigFile("toml", config)
+}
+
+func (s *sitesBuilder) WithDefaultMultiSiteConfig() *sitesBuilder {
+ defaultMultiSiteConfig := `
+baseURL = "http://example.com/blog"
+
+paginate = 1
+disablePathToLower = true
+defaultContentLanguage = "en"
+defaultContentLanguageInSubdir = true
+
+[permalinks]
+other = "/somewhere/else/:filename"
+
+[Taxonomies]
+tag = "tags"
+
+[Languages]
+[Languages.en]
+weight = 10
+title = "In English"
+languageName = "English"
+[[Languages.en.menu.main]]
+url = "/"
+name = "Home"
+weight = 0
+
+[Languages.fr]
+weight = 20
+title = "Le Français"
+languageName = "Français"
+[Languages.fr.Taxonomies]
+plaque = "plaques"
+
+[Languages.nn]
+weight = 30
+title = "På nynorsk"
+languageName = "Nynorsk"
+paginatePath = "side"
+[Languages.nn.Taxonomies]
+lag = "lag"
+[[Languages.nn.menu.main]]
+url = "/"
+name = "Heim"
+weight = 1
+
+[Languages.nb]
+weight = 40
+title = "På bokmål"
+languageName = "Bokmål"
+paginatePath = "side"
+[Languages.nb.Taxonomies]
+lag = "lag"
+` + commonConfigSections
+
+ return s.WithConfigFile("toml", defaultMultiSiteConfig)
+}
+
+func (s *sitesBuilder) WithSunset(in string) {
+ // Write a real image into one of the bundle above.
+ src, err := os.Open(filepath.FromSlash("testdata/sunset.jpg"))
+ s.Assert(err, qt.IsNil)
+
+ out, err := s.Fs.Source.Create(filepath.FromSlash(filepath.Join(s.workingDir, in)))
+ s.Assert(err, qt.IsNil)
+
+ _, err = io.Copy(out, src)
+ s.Assert(err, qt.IsNil)
+
+ out.Close()
+ src.Close()
+}
+
+func (s *sitesBuilder) createFilenameContent(pairs []string) []filenameContent {
+ var slice []filenameContent
+ s.appendFilenameContent(&slice, pairs...)
+ return slice
+}
+
+func (s *sitesBuilder) appendFilenameContent(slice *[]filenameContent, pairs ...string) {
+ if len(pairs)%2 != 0 {
+ panic("file content mismatch")
+ }
+ for i := 0; i < len(pairs); i += 2 {
+ c := filenameContent{
+ filename: pairs[i],
+ content: pairs[i+1],
+ }
+ *slice = append(*slice, c)
+ }
+}
+
+func (s *sitesBuilder) WithContent(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.contentFilePairs, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithContentAdded(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.contentFilePairsAdded, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithTemplates(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.templateFilePairs, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithTemplatesAdded(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.templateFilePairsAdded, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithData(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.dataFilePairs, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithDataAdded(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.dataFilePairsAdded, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithI18n(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.i18nFilePairs, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) WithI18nAdded(filenameContent ...string) *sitesBuilder {
+ s.appendFilenameContent(&s.i18nFilePairsAdded, filenameContent...)
+ return s
+}
+
+func (s *sitesBuilder) EditFiles(filenameContent ...string) *sitesBuilder {
+ for i := 0; i < len(filenameContent); i += 2 {
+ filename, content := filepath.FromSlash(filenameContent[i]), filenameContent[i+1]
+ absFilename := s.absFilename(filename)
+ s.changedFiles = append(s.changedFiles, absFilename)
+ writeSource(s.T, s.Fs, absFilename, content)
+
+ }
+ return s
+}
+
+func (s *sitesBuilder) RemoveFiles(filenames ...string) *sitesBuilder {
+ for _, filename := range filenames {
+ absFilename := s.absFilename(filename)
+ s.removedFiles = append(s.removedFiles, absFilename)
+ s.Assert(s.Fs.Source.Remove(absFilename), qt.IsNil)
+ }
+ return s
+}
+
+func (s *sitesBuilder) writeFilePairs(folder string, files []filenameContent) *sitesBuilder {
+ // We have had some "filesystem ordering" bugs that we have not discovered in
+ // our tests running with the in memory filesystem.
+ // That file system is backed by a map so not sure how this helps, but some
+ // randomness in tests doesn't hurt.
+ // TODO(bep) this turns out to be more confusing than helpful.
+ // s.rnd.Shuffle(len(files), func(i, j int) { files[i], files[j] = files[j], files[i] })
+
+ for _, fc := range files {
+ target := folder
+ // TODO(bep) clean up this magic.
+ if strings.HasPrefix(fc.filename, folder) {
+ target = ""
+ }
+
+ if s.workingDir != "" {
+ target = filepath.Join(s.workingDir, target)
+ }
+
+ writeSource(s.T, s.Fs, filepath.Join(target, fc.filename), fc.content)
+ }
+ return s
+}
+
+func (s *sitesBuilder) CreateSites() *sitesBuilder {
+ if err := s.CreateSitesE(); err != nil {
+ s.Fatalf("Failed to create sites: %s", err)
+ }
+
+ s.Assert(s.Fs.PublishDir, qt.IsNotNil)
+ s.Assert(s.Fs.WorkingDirReadOnly, qt.IsNotNil)
+
+ return s
+}
+
+func (s *sitesBuilder) LoadConfig() error {
+ if !s.configFileSet {
+ s.WithSimpleConfigFile()
+ }
+
+ cfg, _, err := LoadConfig(ConfigSourceDescriptor{
+ WorkingDir: s.workingDir,
+ Fs: s.Fs.Source,
+ Logger: s.logger,
+ Environ: s.environ,
+ Filename: "config." + s.configFormat,
+ }, func(cfg config.Provider) error {
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+
+ s.Cfg = cfg
+
+ return nil
+}
+
+func (s *sitesBuilder) CreateSitesE() error {
+ if !s.addNothing {
+ if _, ok := s.Fs.Source.(*afero.OsFs); ok {
+ for _, dir := range []string{
+ "content/sect",
+ "layouts/_default",
+ "layouts/_default/_markup",
+ "layouts/partials",
+ "layouts/shortcodes",
+ "data",
+ "i18n",
+ } {
+ if err := os.MkdirAll(filepath.Join(s.workingDir, dir), 0777); err != nil {
+ return fmt.Errorf("failed to create %q: %w", dir, err)
+ }
+ }
+ }
+
+ s.addDefaults()
+ s.writeFilePairs("content", s.contentFilePairsAdded)
+ s.writeFilePairs("layouts", s.templateFilePairsAdded)
+ s.writeFilePairs("data", s.dataFilePairsAdded)
+ s.writeFilePairs("i18n", s.i18nFilePairsAdded)
+
+ s.writeFilePairs("i18n", s.i18nFilePairs)
+ s.writeFilePairs("data", s.dataFilePairs)
+ s.writeFilePairs("content", s.contentFilePairs)
+ s.writeFilePairs("layouts", s.templateFilePairs)
+
+ }
+
+ if err := s.LoadConfig(); err != nil {
+ return fmt.Errorf("failed to load config: %w", err)
+ }
+
+ s.Fs.PublishDir = hugofs.NewCreateCountingFs(s.Fs.PublishDir)
+
+ depsCfg := s.depsCfg
+ depsCfg.Fs = s.Fs
+ depsCfg.Cfg = s.Cfg
+ depsCfg.Logger = s.logger
+ depsCfg.Running = s.running
+
+ sites, err := NewHugoSites(depsCfg)
+ if err != nil {
+ return fmt.Errorf("failed to create sites: %w", err)
+ }
+ s.H = sites
+
+ return nil
+}
+
+func (s *sitesBuilder) BuildE(cfg BuildCfg) error {
+ if s.H == nil {
+ s.CreateSites()
+ }
+
+ return s.H.Build(cfg)
+}
+
+func (s *sitesBuilder) Build(cfg BuildCfg) *sitesBuilder {
+ s.T.Helper()
+ return s.build(cfg, false)
+}
+
+func (s *sitesBuilder) BuildFail(cfg BuildCfg) *sitesBuilder {
+ s.T.Helper()
+ return s.build(cfg, true)
+}
+
+func (s *sitesBuilder) changeEvents() []fsnotify.Event {
+ var events []fsnotify.Event
+
+ for _, v := range s.changedFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Write,
+ })
+ }
+ for _, v := range s.removedFiles {
+ events = append(events, fsnotify.Event{
+ Name: v,
+ Op: fsnotify.Remove,
+ })
+ }
+
+ return events
+}
+
+func (s *sitesBuilder) build(cfg BuildCfg, shouldFail bool) *sitesBuilder {
+ s.Helper()
+ defer func() {
+ s.changedFiles = nil
+ }()
+
+ if s.H == nil {
+ s.CreateSites()
+ }
+
+ err := s.H.Build(cfg, s.changeEvents()...)
+
+ if err == nil {
+ logErrorCount := s.H.NumLogErrors()
+ if logErrorCount > 0 {
+ err = fmt.Errorf("logged %d errors", logErrorCount)
+ }
+ }
+ if err != nil && !shouldFail {
+ s.Fatalf("Build failed: %s", err)
+ } else if err == nil && shouldFail {
+ s.Fatalf("Expected error")
+ }
+
+ return s
+}
+
+func (s *sitesBuilder) addDefaults() {
+ var (
+ contentTemplate = `---
+title: doc1
+weight: 1
+tags:
+ - tag1
+date: "2018-02-28"
+---
+# doc1
+*some "content"*
+{{< shortcode >}}
+{{< lingo >}}
+`
+
+ defaultContent = []string{
+ "content/sect/doc1.en.md", contentTemplate,
+ "content/sect/doc1.fr.md", contentTemplate,
+ "content/sect/doc1.nb.md", contentTemplate,
+ "content/sect/doc1.nn.md", contentTemplate,
+ }
+
+ listTemplateCommon = "{{ $p := .Paginator }}{{ $p.PageNumber }}|{{ .Title }}|{{ i18n \"hello\" }}|{{ .Permalink }}|Pager: {{ template \"_internal/pagination.html\" . }}|Kind: {{ .Kind }}|Content: {{ .Content }}|Len Pages: {{ len .Pages }}|Len RegularPages: {{ len .RegularPages }}| HasParent: {{ if .Parent }}YES{{ else }}NO{{ end }}"
+
+ defaultTemplates = []string{
+ "_default/single.html", "Single: {{ .Title }}|{{ i18n \"hello\" }}|{{.Language.Lang}}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}|{{ .Content }}|Resources: {{ range .Resources }}{{ .MediaType }}: {{ .RelPermalink}} -- {{ end }}|Summary: {{ .Summary }}|Truncated: {{ .Truncated }}|Parent: {{ .Parent.Title }}",
+ "_default/list.html", "List Page " + listTemplateCommon,
+ "index.html", "{{ $p := .Paginator }}Default Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{ .Site.Data.hugo.slogan }}|String Resource: {{ ( \"Hugo Pipes\" | resources.FromString \"text/pipes.txt\").RelPermalink }}",
+ "index.fr.html", "{{ $p := .Paginator }}French Home Page {{ $p.PageNumber }}: {{ .Title }}|{{ .IsHome }}|{{ i18n \"hello\" }}|{{ .Permalink }}|{{ .Site.Data.hugo.slogan }}|String Resource: {{ ( \"Hugo Pipes\" | resources.FromString \"text/pipes.txt\").RelPermalink }}",
+ "_default/terms.html", "Taxonomy Term Page " + listTemplateCommon,
+ "_default/taxonomy.html", "Taxonomy List Page " + listTemplateCommon,
+ // Shortcodes
+ "shortcodes/shortcode.html", "Shortcode: {{ i18n \"hello\" }}",
+ // A shortcode in multiple languages
+ "shortcodes/lingo.html", "LingoDefault",
+ "shortcodes/lingo.fr.html", "LingoFrench",
+ // Special templates
+ "404.html", "404|{{ .Lang }}|{{ .Title }}",
+ "robots.txt", "robots|{{ .Lang }}|{{ .Title }}",
+ }
+
+ defaultI18n = []string{
+ "en.yaml", `
+hello:
+ other: "Hello"
+`,
+ "fr.yaml", `
+hello:
+ other: "Bonjour"
+`,
+ }
+
+ defaultData = []string{
+ "hugo.toml", "slogan = \"Hugo Rocks!\"",
+ }
+ )
+
+ if len(s.contentFilePairs) == 0 {
+ s.writeFilePairs("content", s.createFilenameContent(defaultContent))
+ }
+
+ if len(s.templateFilePairs) == 0 {
+ s.writeFilePairs("layouts", s.createFilenameContent(defaultTemplates))
+ }
+ if len(s.dataFilePairs) == 0 {
+ s.writeFilePairs("data", s.createFilenameContent(defaultData))
+ }
+ if len(s.i18nFilePairs) == 0 {
+ s.writeFilePairs("i18n", s.createFilenameContent(defaultI18n))
+ }
+}
+
+func (s *sitesBuilder) Fatalf(format string, args ...any) {
+ s.T.Helper()
+ s.T.Fatalf(format, args...)
+}
+
+func (s *sitesBuilder) AssertFileContentFn(filename string, f func(s string) bool) {
+ s.T.Helper()
+ content := s.FileContent(filename)
+ if !f(content) {
+ s.Fatalf("Assert failed for %q in content\n%s", filename, content)
+ }
+}
+
+// Helper to migrate tests to new format.
+func (s *sitesBuilder) DumpTxtar() string {
+ var sb strings.Builder
+
+ skipRe := regexp.MustCompile(`^(public|resources|package-lock.json|go.sum)`)
+
+ afero.Walk(s.Fs.Source, s.workingDir, func(path string, info fs.FileInfo, err error) error {
+ rel := strings.TrimPrefix(path, s.workingDir+"/")
+ if skipRe.MatchString(rel) {
+ if info.IsDir() {
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if info == nil || info.IsDir() {
+ return nil
+ }
+ sb.WriteString(fmt.Sprintf("-- %s --\n", rel))
+ b, err := afero.ReadFile(s.Fs.Source, path)
+ s.Assert(err, qt.IsNil)
+ sb.WriteString(strings.TrimSpace(string(b)))
+ sb.WriteString("\n")
+ return nil
+ })
+
+ return sb.String()
+}
+
+func (s *sitesBuilder) AssertHome(matches ...string) {
+ s.AssertFileContent("public/index.html", matches...)
+}
+
+func (s *sitesBuilder) AssertFileContent(filename string, matches ...string) {
+ s.T.Helper()
+ content := s.FileContent(filename)
+ for _, m := range matches {
+ lines := strings.Split(m, "\n")
+ for _, match := range lines {
+ match = strings.TrimSpace(match)
+ if match == "" {
+ continue
+ }
+ if !strings.Contains(content, match) {
+ s.Fatalf("No match for %q in content for %s\n%s\n%q", match, filename, content, content)
+ }
+ }
+ }
+}
+
+func (s *sitesBuilder) AssertFileDoesNotExist(filename string) {
+ if s.CheckExists(filename) {
+ s.Fatalf("File %q exists but must not exist.", filename)
+ }
+}
+
+func (s *sitesBuilder) AssertImage(width, height int, filename string) {
+ f, err := s.Fs.WorkingDirReadOnly.Open(filename)
+ s.Assert(err, qt.IsNil)
+ defer f.Close()
+ cfg, err := jpeg.DecodeConfig(f)
+ s.Assert(err, qt.IsNil)
+ s.Assert(cfg.Width, qt.Equals, width)
+ s.Assert(cfg.Height, qt.Equals, height)
+}
+
+func (s *sitesBuilder) AssertNoDuplicateWrites() {
+ s.Helper()
+ d := s.Fs.PublishDir.(hugofs.DuplicatesReporter)
+ s.Assert(d.ReportDuplicates(), qt.Equals, "")
+}
+
+func (s *sitesBuilder) FileContent(filename string) string {
+ s.Helper()
+ filename = filepath.FromSlash(filename)
+ return readWorkingDir(s.T, s.Fs, filename)
+}
+
+func (s *sitesBuilder) AssertObject(expected string, object any) {
+ s.T.Helper()
+ got := s.dumper.Sdump(object)
+ expected = strings.TrimSpace(expected)
+
+ if expected != got {
+ fmt.Println(got)
+ diff := htesting.DiffStrings(expected, got)
+ s.Fatalf("diff:\n%s\nexpected\n%s\ngot\n%s", diff, expected, got)
+ }
+}
+
+func (s *sitesBuilder) AssertFileContentRe(filename string, matches ...string) {
+ content := readWorkingDir(s.T, s.Fs, filename)
+ for _, match := range matches {
+ r := regexp.MustCompile("(?s)" + match)
+ if !r.MatchString(content) {
+ s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
+ }
+ }
+}
+
+func (s *sitesBuilder) CheckExists(filename string) bool {
+ return workingDirExists(s.Fs, filepath.Clean(filename))
+}
+
+func (s *sitesBuilder) GetPage(ref string) page.Page {
+ p, err := s.H.Sites[0].getPageNew(nil, ref)
+ s.Assert(err, qt.IsNil)
+ return p
+}
+
+func (s *sitesBuilder) GetPageRel(p page.Page, ref string) page.Page {
+ p, err := s.H.Sites[0].getPageNew(p, ref)
+ s.Assert(err, qt.IsNil)
+ return p
+}
+
+func (s *sitesBuilder) NpmInstall() hexec.Runner {
+ sc := security.DefaultConfig
+ sc.Exec.Allow = security.NewWhitelist("npm")
+ ex := hexec.New(sc)
+ command, err := ex.New("npm", "install")
+ s.Assert(err, qt.IsNil)
+ return command
+}
+
+func newTestHelper(cfg config.Provider, fs *hugofs.Fs, t testing.TB) testHelper {
+ return testHelper{
+ Cfg: cfg,
+ Fs: fs,
+ C: qt.New(t),
+ }
+}
+
+type testHelper struct {
+ Cfg config.Provider
+ Fs *hugofs.Fs
+ *qt.C
+}
+
+func (th testHelper) assertFileContent(filename string, matches ...string) {
+ th.Helper()
+ filename = th.replaceDefaultContentLanguageValue(filename)
+ content := readWorkingDir(th, th.Fs, filename)
+ for _, match := range matches {
+ match = th.replaceDefaultContentLanguageValue(match)
+ th.Assert(strings.Contains(content, match), qt.Equals, true, qt.Commentf(match+" not in: \n"+content))
+ }
+}
+
+func (th testHelper) assertFileContentRegexp(filename string, matches ...string) {
+ filename = th.replaceDefaultContentLanguageValue(filename)
+ content := readWorkingDir(th, th.Fs, filename)
+ for _, match := range matches {
+ match = th.replaceDefaultContentLanguageValue(match)
+ r := regexp.MustCompile(match)
+ matches := r.MatchString(content)
+ if !matches {
+ fmt.Println("Expected to match regexp:\n"+match+"\nGot:\n", content)
+ }
+ th.Assert(matches, qt.Equals, true)
+ }
+}
+
+func (th testHelper) assertFileNotExist(filename string) {
+ exists, err := helpers.Exists(filename, th.Fs.PublishDir)
+ th.Assert(err, qt.IsNil)
+ th.Assert(exists, qt.Equals, false)
+}
+
+func (th testHelper) replaceDefaultContentLanguageValue(value string) string {
+ defaultInSubDir := th.Cfg.GetBool("defaultContentLanguageInSubDir")
+ replace := th.Cfg.GetString("defaultContentLanguage") + "/"
+
+ if !defaultInSubDir {
+ value = strings.Replace(value, replace, "", 1)
+ }
+ return value
+}
+
+func loadTestConfig(fs afero.Fs, withConfig ...func(cfg config.Provider) error) (config.Provider, error) {
+ v, _, err := LoadConfig(ConfigSourceDescriptor{Fs: fs}, withConfig...)
+ return v, err
+}
+
+func newTestCfgBasic() (config.Provider, *hugofs.Fs) {
+ mm := afero.NewMemMapFs()
+ v := config.NewWithTestDefaults()
+ v.Set("defaultContentLanguageInSubdir", true)
+
+ fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(mm), v)
+
+ return v, fs
+}
+
+func newTestCfg(withConfig ...func(cfg config.Provider) error) (config.Provider, *hugofs.Fs) {
+ mm := afero.NewMemMapFs()
+
+ v, err := loadTestConfig(mm, func(cfg config.Provider) error {
+ // Default is false, but true is easier to use as default in tests
+ cfg.Set("defaultContentLanguageInSubdir", true)
+
+ for _, w := range withConfig {
+ w(cfg)
+ }
+
+ return nil
+ })
+
+ if err != nil && err != ErrNoConfigFile {
+ panic(err)
+ }
+
+ fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(mm), v)
+
+ return v, fs
+}
+
+func newTestSitesFromConfig(t testing.TB, afs afero.Fs, tomlConfig string, layoutPathContentPairs ...string) (testHelper, *HugoSites) {
+ if len(layoutPathContentPairs)%2 != 0 {
+ t.Fatalf("Layouts must be provided in pairs")
+ }
+
+ c := qt.New(t)
+
+ writeToFs(t, afs, filepath.Join("content", ".gitkeep"), "")
+ writeToFs(t, afs, "config.toml", tomlConfig)
+
+ cfg, err := LoadConfigDefault(afs)
+ c.Assert(err, qt.IsNil)
+
+ fs := hugofs.NewFrom(afs, cfg)
+ th := newTestHelper(cfg, fs, t)
+
+ for i := 0; i < len(layoutPathContentPairs); i += 2 {
+ writeSource(t, fs, layoutPathContentPairs[i], layoutPathContentPairs[i+1])
+ }
+
+ h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
+
+ c.Assert(err, qt.IsNil)
+
+ return th, h
+}
+
+func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tpl.TemplateManager) error {
+ return func(templ tpl.TemplateManager) error {
+ for i := 0; i < len(additionalTemplates); i += 2 {
+ err := templ.AddTemplate(additionalTemplates[i], additionalTemplates[i+1])
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+}
+
+// TODO(bep) replace these with the builder
+func buildSingleSite(t testing.TB, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
+ t.Helper()
+ return buildSingleSiteExpected(t, false, false, depsCfg, buildCfg)
+}
+
+func buildSingleSiteExpected(t testing.TB, expectSiteInitError, expectBuildError bool, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
+ t.Helper()
+ b := newTestSitesBuilderFromDepsCfg(t, depsCfg).WithNothingAdded()
+
+ err := b.CreateSitesE()
+
+ if expectSiteInitError {
+ b.Assert(err, qt.Not(qt.IsNil))
+ return nil
+ } else {
+ b.Assert(err, qt.IsNil)
+ }
+
+ h := b.H
+
+ b.Assert(len(h.Sites), qt.Equals, 1)
+
+ if expectBuildError {
+ b.Assert(h.Build(buildCfg), qt.Not(qt.IsNil))
+ return nil
+
+ }
+
+ b.Assert(h.Build(buildCfg), qt.IsNil)
+
+ return h.Sites[0]
+}
+
+func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...[2]string) {
+ for _, src := range sources {
+ writeSource(t, fs, filepath.Join(base, src[0]), src[1])
+ }
+}
+
+func getPage(in page.Page, ref string) page.Page {
+ p, err := in.GetPage(ref)
+ if err != nil {
+ panic(err)
+ }
+ return p
+}
+
+func content(c resource.ContentProvider) string {
+ cc, err := c.Content()
+ if err != nil {
+ panic(err)
+ }
+
+ ccs, err := cast.ToStringE(cc)
+ if err != nil {
+ panic(err)
+ }
+ return ccs
+}
+
+func pagesToString(pages ...page.Page) string {
+ var paths []string
+ for _, p := range pages {
+ paths = append(paths, p.Pathc())
+ }
+ sort.Strings(paths)
+ return strings.Join(paths, "|")
+}
+
+func dumpPagesLinks(pages ...page.Page) {
+ var links []string
+ for _, p := range pages {
+ links = append(links, p.RelPermalink())
+ }
+ sort.Strings(links)
+
+ for _, link := range links {
+ fmt.Println(link)
+ }
+}
+
+func dumpPages(pages ...page.Page) {
+ fmt.Println("---------")
+ for _, p := range pages {
+ fmt.Printf("Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Lang: %s\n",
+ p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath(), p.Lang())
+ }
+}
+
+func dumpSPages(pages ...*pageState) {
+ for i, p := range pages {
+ fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s\n",
+ i+1,
+ p.Kind(), p.Title(), p.RelPermalink(), p.Pathc(), p.SectionsPath())
+ }
+}
+
+func printStringIndexes(s string) {
+ lines := strings.Split(s, "\n")
+ i := 0
+
+ for _, line := range lines {
+
+ for _, r := range line {
+ fmt.Printf("%-3s", strconv.Itoa(i))
+ i += utf8.RuneLen(r)
+ }
+ i++
+ fmt.Println()
+ for _, r := range line {
+ fmt.Printf("%-3s", string(r))
+ }
+ fmt.Println()
+
+ }
+}
+
+// See https://github.com/golang/go/issues/19280
+// Not in use.
+var parallelEnabled = true
+
+func parallel(t *testing.T) {
+ if parallelEnabled {
+ t.Parallel()
+ }
+}
+
+func skipSymlink(t *testing.T) {
+ if runtime.GOOS == "windows" && os.Getenv("CI") == "" {
+ t.Skip("skip symlink test on local Windows (needs admin)")
+ }
+}
+
+func captureStderr(f func() error) (string, error) {
+ old := os.Stderr
+ r, w, _ := os.Pipe()
+ os.Stderr = w
+
+ err := f()
+
+ w.Close()
+ os.Stderr = old
+
+ var buf bytes.Buffer
+ io.Copy(&buf, r)
+ return buf.String(), err
+}
+
+func captureStdout(f func() error) (string, error) {
+ old := os.Stdout
+ r, w, _ := os.Pipe()
+ os.Stdout = w
+
+ err := f()
+
+ w.Close()
+ os.Stdout = old
+
+ var buf bytes.Buffer
+ io.Copy(&buf, r)
+ return buf.String(), err
+}
diff --git a/hugolib/testsite/.gitignore b/hugolib/testsite/.gitignore
new file mode 100644
index 000000000..ab8b69cbc
--- /dev/null
+++ b/hugolib/testsite/.gitignore
@@ -0,0 +1 @@
+config.toml \ No newline at end of file
diff --git a/hugolib/testsite/CODEOWNERS b/hugolib/testsite/CODEOWNERS
new file mode 100644
index 000000000..41f196327
--- /dev/null
+++ b/hugolib/testsite/CODEOWNERS
@@ -0,0 +1 @@
+* @bep \ No newline at end of file
diff --git a/hugolib/testsite/content/first-post.md b/hugolib/testsite/content/first-post.md
new file mode 100644
index 000000000..4a8007946
--- /dev/null
+++ b/hugolib/testsite/content/first-post.md
@@ -0,0 +1,4 @@
+---
+title: "My First Post"
+lastmod: 2018-02-28
+--- \ No newline at end of file
diff --git a/hugolib/testsite/content_nn/first-post.md b/hugolib/testsite/content_nn/first-post.md
new file mode 100644
index 000000000..1c3b4e831
--- /dev/null
+++ b/hugolib/testsite/content_nn/first-post.md
@@ -0,0 +1,4 @@
+---
+title: "Min første dag"
+lastmod: 1972-02-28
+--- \ No newline at end of file
diff --git a/hugolib/translations.go b/hugolib/translations.go
new file mode 100644
index 000000000..76beafba9
--- /dev/null
+++ b/hugolib/translations.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "github.com/gohugoio/hugo/resources/page"
+)
+
+func pagesToTranslationsMap(sites []*Site) map[string]page.Pages {
+ out := make(map[string]page.Pages)
+
+ for _, s := range sites {
+ s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
+ p := n.p
+ // TranslationKey is implemented for all page types.
+ base := p.TranslationKey()
+
+ pageTranslations, found := out[base]
+ if !found {
+ pageTranslations = make(page.Pages, 0)
+ }
+
+ pageTranslations = append(pageTranslations, p)
+ out[base] = pageTranslations
+
+ return false
+ })
+ }
+
+ return out
+}
+
+func assignTranslationsToPages(allTranslations map[string]page.Pages, sites []*Site) {
+ for _, s := range sites {
+ s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
+ p := n.p
+ base := p.TranslationKey()
+ translations, found := allTranslations[base]
+ if !found {
+ return false
+ }
+ p.setTranslations(translations)
+ return false
+ })
+ }
+}
diff --git a/identity/identity.go b/identity/identity.go
new file mode 100644
index 000000000..9236f0876
--- /dev/null
+++ b/identity/identity.go
@@ -0,0 +1,164 @@
+package identity
+
+import (
+ "path/filepath"
+ "strings"
+ "sync"
+ "sync/atomic"
+)
+
+// NewIdentityManager creates a new Manager starting at id.
+func NewManager(id Provider) Manager {
+ return &identityManager{
+ Provider: id,
+ ids: Identities{id.GetIdentity(): id},
+ }
+}
+
+// NewPathIdentity creates a new Identity with the two identifiers
+// type and path.
+func NewPathIdentity(typ, pat string) PathIdentity {
+ pat = strings.ToLower(strings.TrimPrefix(filepath.ToSlash(pat), "/"))
+ return PathIdentity{Type: typ, Path: pat}
+}
+
+// Identities stores identity providers.
+type Identities map[Identity]Provider
+
+func (ids Identities) search(depth int, id Identity) Provider {
+ if v, found := ids[id.GetIdentity()]; found {
+ return v
+ }
+
+ depth++
+
+ // There may be infinite recursion in templates.
+ if depth > 100 {
+ // Bail out.
+ return nil
+ }
+
+ for _, v := range ids {
+ switch t := v.(type) {
+ case IdentitiesProvider:
+ if nested := t.GetIdentities().search(depth, id); nested != nil {
+ return nested
+ }
+ }
+ }
+ return nil
+}
+
+// IdentitiesProvider provides all Identities.
+type IdentitiesProvider interface {
+ GetIdentities() Identities
+}
+
+// Identity represents an thing that can provide an identify. This can be
+// any Go type, but the Identity returned by GetIdentify must be hashable.
+type Identity interface {
+ Provider
+ Name() string
+}
+
+// Manager manages identities, and is itself a Provider of Identity.
+type Manager interface {
+ SearchProvider
+ Add(ids ...Provider)
+ Reset()
+}
+
+// SearchProvider provides access to the chained set of identities.
+type SearchProvider interface {
+ Provider
+ IdentitiesProvider
+ Search(id Identity) Provider
+}
+
+// A PathIdentity is a common identity identified by a type and a path, e.g. "layouts" and "_default/single.html".
+type PathIdentity struct {
+ Type string
+ Path string
+}
+
+// GetIdentity returns itself.
+func (id PathIdentity) GetIdentity() Identity {
+ return id
+}
+
+// Name returns the Path.
+func (id PathIdentity) Name() string {
+ return id.Path
+}
+
+// A KeyValueIdentity a general purpose identity.
+type KeyValueIdentity struct {
+ Key string
+ Value string
+}
+
+// GetIdentity returns itself.
+func (id KeyValueIdentity) GetIdentity() Identity {
+ return id
+}
+
+// Name returns the Key.
+func (id KeyValueIdentity) Name() string {
+ return id.Key
+}
+
+// Provider provides the hashable Identity.
+type Provider interface {
+ // GetIdentity is for internal use.
+ GetIdentity() Identity
+}
+
+type identityManager struct {
+ sync.Mutex
+ Provider
+ ids Identities
+}
+
+func (im *identityManager) Add(ids ...Provider) {
+ im.Lock()
+ for _, id := range ids {
+ im.ids[id.GetIdentity()] = id
+ }
+ im.Unlock()
+}
+
+func (im *identityManager) Reset() {
+ im.Lock()
+ id := im.GetIdentity()
+ im.ids = Identities{id.GetIdentity(): id}
+ im.Unlock()
+}
+
+// TODO(bep) these identities are currently only read on server reloads
+// so there should be no concurrency issues, but that may change.
+func (im *identityManager) GetIdentities() Identities {
+ im.Lock()
+ defer im.Unlock()
+ return im.ids
+}
+
+func (im *identityManager) Search(id Identity) Provider {
+ im.Lock()
+ defer im.Unlock()
+ return im.ids.search(0, id.GetIdentity())
+}
+
+// Incrementer increments and returns the value.
+// Typically used for IDs.
+type Incrementer interface {
+ Incr() int
+}
+
+// IncrementByOne implements Incrementer adding 1 every time Incr is called.
+type IncrementByOne struct {
+ counter uint64
+}
+
+func (c *IncrementByOne) Incr() int {
+ return int(atomic.AddUint64(&c.counter, uint64(1)))
+}
diff --git a/identity/identity_test.go b/identity/identity_test.go
new file mode 100644
index 000000000..baf2628bb
--- /dev/null
+++ b/identity/identity_test.go
@@ -0,0 +1,89 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package identity
+
+import (
+ "fmt"
+ "math/rand"
+ "strconv"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestIdentityManager(t *testing.T) {
+ c := qt.New(t)
+
+ id1 := testIdentity{name: "id1"}
+ im := NewManager(id1)
+
+ c.Assert(im.Search(id1).GetIdentity(), qt.Equals, id1)
+ c.Assert(im.Search(testIdentity{name: "notfound"}), qt.Equals, nil)
+}
+
+func BenchmarkIdentityManager(b *testing.B) {
+ createIds := func(num int) []Identity {
+ ids := make([]Identity, num)
+ for i := 0; i < num; i++ {
+ ids[i] = testIdentity{name: fmt.Sprintf("id%d", i)}
+ }
+ return ids
+ }
+
+ b.Run("Add", func(b *testing.B) {
+ c := qt.New(b)
+ b.StopTimer()
+ ids := createIds(b.N)
+ im := NewManager(testIdentity{"first"})
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ im.Add(ids[i])
+ }
+
+ b.StopTimer()
+ c.Assert(im.GetIdentities(), qt.HasLen, b.N+1)
+ })
+
+ b.Run("Search", func(b *testing.B) {
+ c := qt.New(b)
+ b.StopTimer()
+ ids := createIds(b.N)
+ im := NewManager(testIdentity{"first"})
+
+ for i := 0; i < b.N; i++ {
+ im.Add(ids[i])
+ }
+
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ name := "id" + strconv.Itoa(rand.Intn(b.N))
+ id := im.Search(testIdentity{name: name})
+ c.Assert(id.GetIdentity().Name(), qt.Equals, name)
+ }
+ })
+}
+
+type testIdentity struct {
+ name string
+}
+
+func (id testIdentity) GetIdentity() Identity {
+ return id
+}
+
+func (id testIdentity) Name() string {
+ return id.name
+}
diff --git a/langs/config.go b/langs/config.go
new file mode 100644
index 000000000..81e6fc2ab
--- /dev/null
+++ b/langs/config.go
@@ -0,0 +1,226 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package langs
+
+import (
+ "fmt"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/spf13/cast"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+type LanguagesConfig struct {
+ Languages Languages
+ Multihost bool
+ DefaultContentLanguageInSubdir bool
+}
+
+func LoadLanguageSettings(cfg config.Provider, oldLangs Languages) (c LanguagesConfig, err error) {
+ defaultLang := strings.ToLower(cfg.GetString("defaultContentLanguage"))
+ if defaultLang == "" {
+ defaultLang = "en"
+ cfg.Set("defaultContentLanguage", defaultLang)
+ }
+
+ var languages map[string]any
+
+ languagesFromConfig := cfg.GetParams("languages")
+ disableLanguages := cfg.GetStringSlice("disableLanguages")
+
+ if len(disableLanguages) == 0 {
+ languages = languagesFromConfig
+ } else {
+ languages = make(maps.Params)
+ for k, v := range languagesFromConfig {
+ for _, disabled := range disableLanguages {
+ if disabled == defaultLang {
+ return c, fmt.Errorf("cannot disable default language %q", defaultLang)
+ }
+
+ if strings.EqualFold(k, disabled) {
+ v.(maps.Params)["disabled"] = true
+ break
+ }
+ }
+ languages[k] = v
+ }
+ }
+
+ var languages2 Languages
+
+ if len(languages) == 0 {
+ languages2 = append(languages2, NewDefaultLanguage(cfg))
+ } else {
+ languages2, err = toSortedLanguages(cfg, languages)
+ if err != nil {
+ return c, fmt.Errorf("Failed to parse multilingual config: %w", err)
+ }
+ }
+
+ if oldLangs != nil {
+ // When in multihost mode, the languages are mapped to a server, so
+ // some structural language changes will need a restart of the dev server.
+ // The validation below isn't complete, but should cover the most
+ // important cases.
+ var invalid bool
+ if languages2.IsMultihost() != oldLangs.IsMultihost() {
+ invalid = true
+ } else {
+ if languages2.IsMultihost() && len(languages2) != len(oldLangs) {
+ invalid = true
+ }
+ }
+
+ if invalid {
+ return c, errors.New("language change needing a server restart detected")
+ }
+
+ if languages2.IsMultihost() {
+ // We need to transfer any server baseURL to the new language
+ for i, ol := range oldLangs {
+ nl := languages2[i]
+ nl.Set("baseURL", ol.GetString("baseURL"))
+ }
+ }
+ }
+
+ // The defaultContentLanguage is something the user has to decide, but it needs
+ // to match a language in the language definition list.
+ langExists := false
+ for _, lang := range languages2 {
+ if lang.Lang == defaultLang {
+ langExists = true
+ break
+ }
+ }
+
+ if !langExists {
+ return c, fmt.Errorf("site config value %q for defaultContentLanguage does not match any language definition", defaultLang)
+ }
+
+ c.Languages = languages2
+ c.Multihost = languages2.IsMultihost()
+ c.DefaultContentLanguageInSubdir = c.Multihost
+
+ sortedDefaultFirst := make(Languages, len(c.Languages))
+ for i, v := range c.Languages {
+ sortedDefaultFirst[i] = v
+ }
+ sort.Slice(sortedDefaultFirst, func(i, j int) bool {
+ li, lj := sortedDefaultFirst[i], sortedDefaultFirst[j]
+ if li.Lang == defaultLang {
+ return true
+ }
+
+ if lj.Lang == defaultLang {
+ return false
+ }
+
+ return i < j
+ })
+
+ cfg.Set("languagesSorted", c.Languages)
+ cfg.Set("languagesSortedDefaultFirst", sortedDefaultFirst)
+ cfg.Set("multilingual", len(languages2) > 1)
+
+ multihost := c.Multihost
+
+ if multihost {
+ cfg.Set("defaultContentLanguageInSubdir", true)
+ cfg.Set("multihost", true)
+ }
+
+ if multihost {
+ // The baseURL may be provided at the language level. If that is true,
+ // then every language must have a baseURL. In this case we always render
+ // to a language sub folder, which is then stripped from all the Permalink URLs etc.
+ for _, l := range languages2 {
+ burl := l.GetLocal("baseURL")
+ if burl == nil {
+ return c, errors.New("baseURL must be set on all or none of the languages")
+ }
+ }
+ }
+
+ for _, language := range c.Languages {
+ if language.initErr != nil {
+ return c, language.initErr
+ }
+ }
+
+ return c, nil
+}
+
+func toSortedLanguages(cfg config.Provider, l map[string]any) (Languages, error) {
+ languages := make(Languages, len(l))
+ i := 0
+
+ for lang, langConf := range l {
+ langsMap, err := maps.ToStringMapE(langConf)
+ if err != nil {
+ return nil, fmt.Errorf("Language config is not a map: %T", langConf)
+ }
+
+ language := NewLanguage(lang, cfg)
+
+ for loki, v := range langsMap {
+ switch loki {
+ case "title":
+ language.Title = cast.ToString(v)
+ case "languagename":
+ language.LanguageName = cast.ToString(v)
+ case "languagedirection":
+ language.LanguageDirection = cast.ToString(v)
+ case "weight":
+ language.Weight = cast.ToInt(v)
+ case "contentdir":
+ language.ContentDir = filepath.Clean(cast.ToString(v))
+ case "disabled":
+ language.Disabled = cast.ToBool(v)
+ case "params":
+ m := maps.ToStringMap(v)
+ // Needed for case insensitive fetching of params values
+ maps.PrepareParams(m)
+ for k, vv := range m {
+ language.SetParam(k, vv)
+ }
+ case "timezone":
+ if err := language.loadLocation(cast.ToString(v)); err != nil {
+ return nil, err
+ }
+ }
+
+ // Put all into the Params map
+ language.SetParam(loki, v)
+
+ // Also set it in the configuration map (for baseURL etc.)
+ language.Set(loki, v)
+ }
+
+ languages[i] = language
+ i++
+ }
+
+ sort.Sort(languages)
+
+ return languages, nil
+}
diff --git a/langs/i18n/i18n.go b/langs/i18n/i18n.go
new file mode 100644
index 000000000..5594c84cb
--- /dev/null
+++ b/langs/i18n/i18n.go
@@ -0,0 +1,196 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package i18n
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/go-i18n/v2/i18n"
+)
+
+type translateFunc func(translationID string, templateData any) string
+
+var i18nWarningLogger = helpers.NewDistinctErrorLogger()
+
+// Translator handles i18n translations.
+type Translator struct {
+ translateFuncs map[string]translateFunc
+ cfg config.Provider
+ logger loggers.Logger
+}
+
+// NewTranslator creates a new Translator for the given language bundle and configuration.
+func NewTranslator(b *i18n.Bundle, cfg config.Provider, logger loggers.Logger) Translator {
+ t := Translator{cfg: cfg, logger: logger, translateFuncs: make(map[string]translateFunc)}
+ t.initFuncs(b)
+ return t
+}
+
+// Func gets the translate func for the given language, or for the default
+// configured language if not found.
+func (t Translator) Func(lang string) translateFunc {
+ if f, ok := t.translateFuncs[lang]; ok {
+ return f
+ }
+ t.logger.Infof("Translation func for language %v not found, use default.", lang)
+ if f, ok := t.translateFuncs[t.cfg.GetString("defaultContentLanguage")]; ok {
+ return f
+ }
+
+ t.logger.Infoln("i18n not initialized; if you need string translations, check that you have a bundle in /i18n that matches the site language or the default language.")
+ return func(translationID string, args any) string {
+ return ""
+ }
+}
+
+func (t Translator) initFuncs(bndl *i18n.Bundle) {
+ enableMissingTranslationPlaceholders := t.cfg.GetBool("enableMissingTranslationPlaceholders")
+ for _, lang := range bndl.LanguageTags() {
+ currentLang := lang
+ currentLangStr := currentLang.String()
+ // This may be pt-BR; make it case insensitive.
+ currentLangKey := strings.ToLower(strings.TrimPrefix(currentLangStr, artificialLangTagPrefix))
+ localizer := i18n.NewLocalizer(bndl, currentLangStr)
+ t.translateFuncs[currentLangKey] = func(translationID string, templateData any) string {
+ pluralCount := getPluralCount(templateData)
+
+ if templateData != nil {
+ tp := reflect.TypeOf(templateData)
+ if hreflect.IsInt(tp.Kind()) {
+ // This was how go-i18n worked in v1,
+ // and we keep it like this to avoid breaking
+ // lots of sites in the wild.
+ templateData = intCount(cast.ToInt(templateData))
+ }
+ }
+
+ translated, translatedLang, err := localizer.LocalizeWithTag(&i18n.LocalizeConfig{
+ MessageID: translationID,
+ TemplateData: templateData,
+ PluralCount: pluralCount,
+ })
+
+ sameLang := currentLang == translatedLang
+
+ if err == nil && sameLang {
+ return translated
+ }
+
+ if err != nil && sameLang && translated != "" {
+ // See #8492
+ // TODO(bep) this needs to be improved/fixed upstream,
+ // but currently we get an error even if the fallback to
+ // "other" succeeds.
+ if fmt.Sprintf("%T", err) == "i18n.pluralFormNotFoundError" {
+ return translated
+ }
+ }
+
+ if _, ok := err.(*i18n.MessageNotFoundErr); !ok {
+ t.logger.Warnf("Failed to get translated string for language %q and ID %q: %s", currentLangStr, translationID, err)
+ }
+
+ if t.cfg.GetBool("logI18nWarnings") {
+ i18nWarningLogger.Printf("i18n|MISSING_TRANSLATION|%s|%s", currentLangStr, translationID)
+ }
+
+ if enableMissingTranslationPlaceholders {
+ return "[i18n] " + translationID
+ }
+
+ return translated
+ }
+ }
+}
+
+// intCount wraps the Count method.
+type intCount int
+
+func (c intCount) Count() int {
+ return int(c)
+}
+
+const countFieldName = "Count"
+
+// getPluralCount gets the plural count as a string (floats) or an integer.
+// If v is nil, nil is returned.
+func getPluralCount(v any) any {
+ if v == nil {
+ // i18n called without any argument, make sure it does not
+ // get any plural count.
+ return nil
+ }
+
+ switch v := v.(type) {
+ case map[string]any:
+ for k, vv := range v {
+ if strings.EqualFold(k, countFieldName) {
+ return toPluralCountValue(vv)
+ }
+ }
+ default:
+ vv := reflect.Indirect(reflect.ValueOf(v))
+ if vv.Kind() == reflect.Interface && !vv.IsNil() {
+ vv = vv.Elem()
+ }
+ tp := vv.Type()
+
+ if tp.Kind() == reflect.Struct {
+ f := vv.FieldByName(countFieldName)
+ if f.IsValid() {
+ return toPluralCountValue(f.Interface())
+ }
+ m := hreflect.GetMethodByName(vv, countFieldName)
+ if m.IsValid() && m.Type().NumIn() == 0 && m.Type().NumOut() == 1 {
+ c := m.Call(nil)
+ return toPluralCountValue(c[0].Interface())
+ }
+ }
+ }
+
+ return toPluralCountValue(v)
+}
+
+// go-i18n expects floats to be represented by string.
+func toPluralCountValue(in any) any {
+ k := reflect.TypeOf(in).Kind()
+ switch {
+ case hreflect.IsFloat(k):
+ f := cast.ToString(in)
+ if !strings.Contains(f, ".") {
+ f += ".0"
+ }
+ return f
+ case k == reflect.String:
+ if _, err := cast.ToFloat64E(in); err == nil {
+ return in
+ }
+ // A non-numeric value.
+ return nil
+ default:
+ if i, err := cast.ToIntE(in); err == nil {
+ return i
+ }
+ return nil
+ }
+}
diff --git a/langs/i18n/i18n_test.go b/langs/i18n/i18n_test.go
new file mode 100644
index 000000000..0048d4b1b
--- /dev/null
+++ b/langs/i18n/i18n_test.go
@@ -0,0 +1,552 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package i18n
+
+import (
+ "fmt"
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/tpl/tplimpl"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/deps"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+var logger = loggers.NewErrorLogger()
+
+type i18nTest struct {
+ name string
+ data map[string][]byte
+ args any
+ lang, id, expected, expectedFlag string
+}
+
+var i18nTests = []i18nTest{
+ // All translations present
+ {
+ name: "all-present",
+ data: map[string][]byte{
+ "en.toml": []byte("[hello]\nother = \"Hello, World!\""),
+ "es.toml": []byte("[hello]\nother = \"¡Hola, Mundo!\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "¡Hola, Mundo!",
+ expectedFlag: "¡Hola, Mundo!",
+ },
+ // Translation missing in current language but present in default
+ {
+ name: "present-in-default",
+ data: map[string][]byte{
+ "en.toml": []byte("[hello]\nother = \"Hello, World!\""),
+ "es.toml": []byte("[goodbye]\nother = \"¡Adiós, Mundo!\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "Hello, World!",
+ expectedFlag: "[i18n] hello",
+ },
+ // Translation missing in default language but present in current
+ {
+ name: "present-in-current",
+ data: map[string][]byte{
+ "en.toml": []byte("[goodbye]\nother = \"Goodbye, World!\""),
+ "es.toml": []byte("[hello]\nother = \"¡Hola, Mundo!\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "¡Hola, Mundo!",
+ expectedFlag: "¡Hola, Mundo!",
+ },
+ // Translation missing in both default and current language
+ {
+ name: "missing",
+ data: map[string][]byte{
+ "en.toml": []byte("[goodbye]\nother = \"Goodbye, World!\""),
+ "es.toml": []byte("[goodbye]\nother = \"¡Adiós, Mundo!\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "",
+ expectedFlag: "[i18n] hello",
+ },
+ // Default translation file missing or empty
+ {
+ name: "file-missing",
+ data: map[string][]byte{
+ "en.toml": []byte(""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "",
+ expectedFlag: "[i18n] hello",
+ },
+ // Context provided
+ {
+ name: "context-provided",
+ data: map[string][]byte{
+ "en.toml": []byte("[wordCount]\nother = \"Hello, {{.WordCount}} people!\""),
+ "es.toml": []byte("[wordCount]\nother = \"¡Hola, {{.WordCount}} gente!\""),
+ },
+ args: struct {
+ WordCount int
+ }{
+ 50,
+ },
+ lang: "es",
+ id: "wordCount",
+ expected: "¡Hola, 50 gente!",
+ expectedFlag: "¡Hola, 50 gente!",
+ },
+ // https://github.com/gohugoio/hugo/issues/7787
+ {
+ name: "readingTime-one",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ .Count }} minutes to read"
+`),
+ },
+ args: 1,
+ lang: "en",
+ id: "readingTime",
+ expected: "One minute to read",
+ expectedFlag: "One minute to read",
+ },
+ {
+ name: "readingTime-many-dot",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ . }} minutes to read"
+`),
+ },
+ args: 21,
+ lang: "en",
+ id: "readingTime",
+ expected: "21 minutes to read",
+ expectedFlag: "21 minutes to read",
+ },
+ {
+ name: "readingTime-many",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ .Count }} minutes to read"
+`),
+ },
+ args: 21,
+ lang: "en",
+ id: "readingTime",
+ expected: "21 minutes to read",
+ expectedFlag: "21 minutes to read",
+ },
+ // Issue #8454
+ {
+ name: "readingTime-map-one",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ .Count }} minutes to read"
+`),
+ },
+ args: map[string]any{"Count": 1},
+ lang: "en",
+ id: "readingTime",
+ expected: "One minute to read",
+ expectedFlag: "One minute to read",
+ },
+ {
+ name: "readingTime-string-one",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ . }} minutes to read"
+`),
+ },
+ args: "1",
+ lang: "en",
+ id: "readingTime",
+ expected: "One minute to read",
+ expectedFlag: "One minute to read",
+ },
+ {
+ name: "readingTime-map-many",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one = "One minute to read"
+other = "{{ .Count }} minutes to read"
+`),
+ },
+ args: map[string]any{"Count": 21},
+ lang: "en",
+ id: "readingTime",
+ expected: "21 minutes to read",
+ expectedFlag: "21 minutes to read",
+ },
+ {
+ name: "argument-float",
+ data: map[string][]byte{
+ "en.toml": []byte(`[float]
+other = "Number is {{ . }}"
+`),
+ },
+ args: 22.5,
+ lang: "en",
+ id: "float",
+ expected: "Number is 22.5",
+ expectedFlag: "Number is 22.5",
+ },
+ // Same id and translation in current language
+ // https://github.com/gohugoio/hugo/issues/2607
+ {
+ name: "same-id-and-translation",
+ data: map[string][]byte{
+ "es.toml": []byte("[hello]\nother = \"hello\""),
+ "en.toml": []byte("[hello]\nother = \"hi\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "hello",
+ expectedFlag: "hello",
+ },
+ // Translation missing in current language, but same id and translation in default
+ {
+ name: "same-id-and-translation-default",
+ data: map[string][]byte{
+ "es.toml": []byte("[bye]\nother = \"bye\""),
+ "en.toml": []byte("[hello]\nother = \"hello\""),
+ },
+ args: nil,
+ lang: "es",
+ id: "hello",
+ expected: "hello",
+ expectedFlag: "[i18n] hello",
+ },
+ // Unknown language code should get its plural spec from en
+ {
+ name: "unknown-language-code",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one ="one minute read"
+other = "{{.Count}} minutes read"`),
+ "klingon.toml": []byte(`[readingTime]
+one = "eitt minutt med lesing"
+other = "{{ .Count }} minuttar lesing"`),
+ },
+ args: 3,
+ lang: "klingon",
+ id: "readingTime",
+ expected: "3 minuttar lesing",
+ expectedFlag: "3 minuttar lesing",
+ },
+ // Issue #7838
+ {
+ name: "unknown-language-codes",
+ data: map[string][]byte{
+ "en.toml": []byte(`[readingTime]
+one ="en one"
+other = "en count {{.Count}}"`),
+ "a1.toml": []byte(`[readingTime]
+one = "a1 one"
+other = "a1 count {{ .Count }}"`),
+ "a2.toml": []byte(`[readingTime]
+one = "a2 one"
+other = "a2 count {{ .Count }}"`),
+ },
+ args: 3,
+ lang: "a2",
+ id: "readingTime",
+ expected: "a2 count 3",
+ expectedFlag: "a2 count 3",
+ },
+ // https://github.com/gohugoio/hugo/issues/7798
+ {
+ name: "known-language-missing-plural",
+ data: map[string][]byte{
+ "oc.toml": []byte(`[oc]
+one = "abc"`),
+ },
+ args: 1,
+ lang: "oc",
+ id: "oc",
+ expected: "abc",
+ expectedFlag: "abc",
+ },
+ // https://github.com/gohugoio/hugo/issues/7794
+ {
+ name: "dotted-bare-key",
+ data: map[string][]byte{
+ "en.toml": []byte(`"shop_nextPage.one" = "Show Me The Money"
+`),
+ },
+ args: nil,
+ lang: "en",
+ id: "shop_nextPage.one",
+ expected: "Show Me The Money",
+ expectedFlag: "Show Me The Money",
+ },
+ // https: //github.com/gohugoio/hugo/issues/7804
+ {
+ name: "lang-with-hyphen",
+ data: map[string][]byte{
+ "pt-br.toml": []byte(`foo.one = "abc"`),
+ },
+ args: 1,
+ lang: "pt-br",
+ id: "foo",
+ expected: "abc",
+ expectedFlag: "abc",
+ },
+}
+
+func TestPlural(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ name string
+ lang string
+ id string
+ templ string
+ variants []types.KeyValue
+ }{
+ {
+ name: "English",
+ lang: "en",
+ id: "hour",
+ templ: `
+[hour]
+one = "{{ . }} hour"
+other = "{{ . }} hours"`,
+ variants: []types.KeyValue{
+ {Key: 1, Value: "1 hour"},
+ {Key: "1", Value: "1 hour"},
+ {Key: 1.5, Value: "1.5 hours"},
+ {Key: "1.5", Value: "1.5 hours"},
+ {Key: 2, Value: "2 hours"},
+ {Key: "2", Value: "2 hours"},
+ },
+ },
+ {
+ name: "Other only",
+ lang: "en",
+ id: "hour",
+ templ: `
+[hour]
+other = "{{ with . }}{{ . }}{{ end }} hours"`,
+ variants: []types.KeyValue{
+ {Key: 1, Value: "1 hours"},
+ {Key: "1", Value: "1 hours"},
+ {Key: 2, Value: "2 hours"},
+ {Key: nil, Value: " hours"},
+ },
+ },
+ {
+ name: "Polish",
+ lang: "pl",
+ id: "day",
+ templ: `
+[day]
+one = "{{ . }} miesiąc"
+few = "{{ . }} miesiące"
+many = "{{ . }} miesięcy"
+other = "{{ . }} miesiąca"
+`,
+ variants: []types.KeyValue{
+ {Key: 1, Value: "1 miesiąc"},
+ {Key: 2, Value: "2 miesiące"},
+ {Key: 100, Value: "100 miesięcy"},
+ {Key: "100.0", Value: "100.0 miesiąca"},
+ {Key: 100.0, Value: "100 miesiąca"},
+ },
+ },
+ } {
+
+ c.Run(test.name, func(c *qt.C) {
+ cfg := getConfig()
+ cfg.Set("enableMissingTranslationPlaceholders", true)
+ fs := hugofs.NewMem(cfg)
+
+ err := afero.WriteFile(fs.Source, filepath.Join("i18n", test.lang+".toml"), []byte(test.templ), 0755)
+ c.Assert(err, qt.IsNil)
+
+ tp := NewTranslationProvider()
+ depsCfg := newDepsConfig(tp, cfg, fs)
+ depsCfg.Logger = loggers.NewWarningLogger()
+ d, err := deps.New(depsCfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(d.LoadResources(), qt.IsNil)
+
+ f := tp.t.Func(test.lang)
+
+ for _, variant := range test.variants {
+ c.Assert(f(test.id, variant.Key), qt.Equals, variant.Value, qt.Commentf("input: %v", variant.Key))
+ c.Assert(int(depsCfg.Logger.LogCounters().WarnCounter.Count()), qt.Equals, 0)
+ }
+
+ })
+
+ }
+}
+
+func doTestI18nTranslate(t testing.TB, test i18nTest, cfg config.Provider) string {
+ tp := prepareTranslationProvider(t, test, cfg)
+ f := tp.t.Func(test.lang)
+ return f(test.id, test.args)
+}
+
+type countField struct {
+ Count any
+}
+
+type noCountField struct {
+ Counts int
+}
+
+type countMethod struct {
+}
+
+func (c countMethod) Count() any {
+ return 32.5
+}
+
+func TestGetPluralCount(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(getPluralCount(map[string]any{"Count": 32}), qt.Equals, 32)
+ c.Assert(getPluralCount(map[string]any{"Count": 1}), qt.Equals, 1)
+ c.Assert(getPluralCount(map[string]any{"Count": 1.5}), qt.Equals, "1.5")
+ c.Assert(getPluralCount(map[string]any{"Count": "32"}), qt.Equals, "32")
+ c.Assert(getPluralCount(map[string]any{"Count": "32.5"}), qt.Equals, "32.5")
+ c.Assert(getPluralCount(map[string]any{"count": 32}), qt.Equals, 32)
+ c.Assert(getPluralCount(map[string]any{"Count": "32"}), qt.Equals, "32")
+ c.Assert(getPluralCount(map[string]any{"Counts": 32}), qt.Equals, nil)
+ c.Assert(getPluralCount("foo"), qt.Equals, nil)
+ c.Assert(getPluralCount(countField{Count: 22}), qt.Equals, 22)
+ c.Assert(getPluralCount(countField{Count: 1.5}), qt.Equals, "1.5")
+ c.Assert(getPluralCount(&countField{Count: 22}), qt.Equals, 22)
+ c.Assert(getPluralCount(noCountField{Counts: 23}), qt.Equals, nil)
+ c.Assert(getPluralCount(countMethod{}), qt.Equals, "32.5")
+ c.Assert(getPluralCount(&countMethod{}), qt.Equals, "32.5")
+
+ c.Assert(getPluralCount(1234), qt.Equals, 1234)
+ c.Assert(getPluralCount(1234.4), qt.Equals, "1234.4")
+ c.Assert(getPluralCount(1234.0), qt.Equals, "1234.0")
+ c.Assert(getPluralCount("1234"), qt.Equals, "1234")
+ c.Assert(getPluralCount("0.5"), qt.Equals, "0.5")
+ c.Assert(getPluralCount(nil), qt.Equals, nil)
+}
+
+func prepareTranslationProvider(t testing.TB, test i18nTest, cfg config.Provider) *TranslationProvider {
+ c := qt.New(t)
+ fs := hugofs.NewMem(cfg)
+
+ for file, content := range test.data {
+ err := afero.WriteFile(fs.Source, filepath.Join("i18n", file), []byte(content), 0755)
+ c.Assert(err, qt.IsNil)
+ }
+
+ tp := NewTranslationProvider()
+ depsCfg := newDepsConfig(tp, cfg, fs)
+ d, err := deps.New(depsCfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(d.LoadResources(), qt.IsNil)
+
+ return tp
+}
+
+func newDepsConfig(tp *TranslationProvider, cfg config.Provider, fs *hugofs.Fs) deps.DepsCfg {
+ l := langs.NewLanguage("en", cfg)
+ l.Set("i18nDir", "i18n")
+ return deps.DepsCfg{
+ Language: l,
+ Site: page.NewDummyHugoSite(cfg),
+ Cfg: cfg,
+ Fs: fs,
+ Logger: logger,
+ TemplateProvider: tplimpl.DefaultTemplateProvider,
+ TranslationProvider: tp,
+ }
+}
+
+func getConfig() config.Provider {
+ v := config.NewWithTestDefaults()
+ langs.LoadLanguageSettings(v, nil)
+ mod, err := modules.CreateProjectModule(v)
+ if err != nil {
+ panic(err)
+ }
+ v.Set("allModules", modules.Modules{mod})
+
+ return v
+}
+
+func TestI18nTranslate(t *testing.T) {
+ c := qt.New(t)
+ var actual, expected string
+ v := getConfig()
+
+ // Test without and with placeholders
+ for _, enablePlaceholders := range []bool{false, true} {
+ v.Set("enableMissingTranslationPlaceholders", enablePlaceholders)
+
+ for _, test := range i18nTests {
+ c.Run(fmt.Sprintf("%s-%t", test.name, enablePlaceholders), func(c *qt.C) {
+ if enablePlaceholders {
+ expected = test.expectedFlag
+ } else {
+ expected = test.expected
+ }
+ actual = doTestI18nTranslate(c, test, v)
+ c.Assert(actual, qt.Equals, expected)
+ })
+ }
+ }
+}
+
+func BenchmarkI18nTranslate(b *testing.B) {
+ v := getConfig()
+ for _, test := range i18nTests {
+ b.Run(test.name, func(b *testing.B) {
+ tp := prepareTranslationProvider(b, test, v)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ f := tp.t.Func(test.lang)
+ actual := f(test.id, test.args)
+ if actual != test.expected {
+ b.Fatalf("expected %v got %v", test.expected, actual)
+ }
+ }
+ })
+ }
+}
diff --git a/langs/i18n/integration_test.go b/langs/i18n/integration_test.go
new file mode 100644
index 000000000..5599859ee
--- /dev/null
+++ b/langs/i18n/integration_test.go
@@ -0,0 +1,57 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package i18n_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestI18nFromTheme(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[module]
+[[module.imports]]
+path = "mytheme"
+-- i18n/en.toml --
+[l1]
+other = 'l1main'
+[l2]
+other = 'l2main'
+-- themes/mytheme/i18n/en.toml --
+[l1]
+other = 'l1theme'
+[l2]
+other = 'l2theme'
+[l3]
+other = 'l3theme'
+-- layouts/index.html --
+l1: {{ i18n "l1" }}|l2: {{ i18n "l2" }}|l3: {{ i18n "l3" }}
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+l1: l1main|l2: l2main|l3: l3theme
+ `)
+}
diff --git a/langs/i18n/translationProvider.go b/langs/i18n/translationProvider.go
new file mode 100644
index 000000000..d9d334567
--- /dev/null
+++ b/langs/i18n/translationProvider.go
@@ -0,0 +1,143 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package i18n
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "golang.org/x/text/language"
+ yaml "gopkg.in/yaml.v2"
+
+ "github.com/gohugoio/go-i18n/v2/i18n"
+ "github.com/gohugoio/hugo/helpers"
+ toml "github.com/pelletier/go-toml/v2"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/source"
+)
+
+// TranslationProvider provides translation handling, i.e. loading
+// of bundles etc.
+type TranslationProvider struct {
+ t Translator
+}
+
+// NewTranslationProvider creates a new translation provider.
+func NewTranslationProvider() *TranslationProvider {
+ return &TranslationProvider{}
+}
+
+// Update updates the i18n func in the provided Deps.
+func (tp *TranslationProvider) Update(d *deps.Deps) error {
+ spec := source.NewSourceSpec(d.PathSpec, nil, nil)
+
+ bundle := i18n.NewBundle(language.English)
+ bundle.RegisterUnmarshalFunc("toml", toml.Unmarshal)
+ bundle.RegisterUnmarshalFunc("yaml", yaml.Unmarshal)
+ bundle.RegisterUnmarshalFunc("yml", yaml.Unmarshal)
+ bundle.RegisterUnmarshalFunc("json", json.Unmarshal)
+
+ // The source dirs are ordered so the most important comes first. Since this is a
+ // last key win situation, we have to reverse the iteration order.
+ dirs := d.BaseFs.I18n.Dirs
+ for i := len(dirs) - 1; i >= 0; i-- {
+ dir := dirs[i]
+ src := spec.NewFilesystemFromFileMetaInfo(dir)
+ files, err := src.Files()
+ if err != nil {
+ return err
+ }
+ for _, file := range files {
+ if err := addTranslationFile(bundle, file); err != nil {
+ return err
+ }
+ }
+ }
+
+ tp.t = NewTranslator(bundle, d.Cfg, d.Log)
+
+ d.Translate = tp.t.Func(d.Language.Lang)
+
+ return nil
+}
+
+const artificialLangTagPrefix = "art-x-"
+
+func addTranslationFile(bundle *i18n.Bundle, r source.File) error {
+ f, err := r.FileInfo().Meta().Open()
+ if err != nil {
+ return fmt.Errorf("failed to open translations file %q:: %w", r.LogicalName(), err)
+ }
+
+ b := helpers.ReaderToBytes(f)
+ f.Close()
+
+ name := r.LogicalName()
+ lang := paths.Filename(name)
+ tag := language.Make(lang)
+ if tag == language.Und {
+ try := artificialLangTagPrefix + lang
+ _, err = language.Parse(try)
+ if err != nil {
+ return fmt.Errorf("%q: %s", try, err)
+ }
+ name = artificialLangTagPrefix + name
+ }
+
+ _, err = bundle.ParseMessageFileBytes(b, name)
+ if err != nil {
+ if strings.Contains(err.Error(), "no plural rule") {
+ // https://github.com/gohugoio/hugo/issues/7798
+ name = artificialLangTagPrefix + name
+ _, err = bundle.ParseMessageFileBytes(b, name)
+ if err == nil {
+ return nil
+ }
+ }
+ return errWithFileContext(fmt.Errorf("failed to load translations: %w", err), r)
+ }
+
+ return nil
+}
+
+// Clone sets the language func for the new language.
+func (tp *TranslationProvider) Clone(d *deps.Deps) error {
+ d.Translate = tp.t.Func(d.Language.Lang)
+
+ return nil
+}
+
+func errWithFileContext(inerr error, r source.File) error {
+ fim, ok := r.FileInfo().(hugofs.FileMetaInfo)
+ if !ok {
+ return inerr
+ }
+
+ meta := fim.Meta()
+ realFilename := meta.Filename
+ f, err := meta.Open()
+ if err != nil {
+ return inerr
+ }
+ defer f.Close()
+
+ return herrors.NewFileErrorFromName(inerr, realFilename).UpdateContent(f, nil)
+
+}
diff --git a/langs/language.go b/langs/language.go
new file mode 100644
index 000000000..d6b30ec10
--- /dev/null
+++ b/langs/language.go
@@ -0,0 +1,331 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package langs
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/text/collate"
+ "golang.org/x/text/language"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/locales"
+ translators "github.com/gohugoio/localescompressed"
+)
+
+// These are the settings that should only be looked up in the global Viper
+// config and not per language.
+// This list may not be complete, but contains only settings that we know
+// will be looked up in both.
+// This isn't perfect, but it is ultimately the user who shoots him/herself in
+// the foot.
+// See the pathSpec.
+var globalOnlySettings = map[string]bool{
+ strings.ToLower("defaultContentLanguageInSubdir"): true,
+ strings.ToLower("defaultContentLanguage"): true,
+ strings.ToLower("multilingual"): true,
+ strings.ToLower("assetDir"): true,
+ strings.ToLower("resourceDir"): true,
+ strings.ToLower("build"): true,
+}
+
+// Language manages specific-language configuration.
+type Language struct {
+ Lang string
+ LanguageName string
+ LanguageDirection string
+ Title string
+ Weight int
+
+ // For internal use.
+ Disabled bool
+
+ // If set per language, this tells Hugo that all content files without any
+ // language indicator (e.g. my-page.en.md) is in this language.
+ // This is usually a path relative to the working dir, but it can be an
+ // absolute directory reference. It is what we get.
+ // For internal use.
+ ContentDir string
+
+ // Global config.
+ // For internal use.
+ Cfg config.Provider
+
+ // Language specific config.
+ // For internal use.
+ LocalCfg config.Provider
+
+ // Composite config.
+ // For internal use.
+ config.Provider
+
+ // These are params declared in the [params] section of the language merged with the
+ // site's params, the most specific (language) wins on duplicate keys.
+ params map[string]any
+ paramsMu sync.Mutex
+ paramsSet bool
+
+ // Used for date formatting etc. We don't want these exported to the
+ // templates.
+ // TODO(bep) do the same for some of the others.
+ translator locales.Translator
+ timeFormatter htime.TimeFormatter
+ tag language.Tag
+ collator *Collator
+ location *time.Location
+
+ // Error during initialization. Will fail the buld.
+ initErr error
+}
+
+// For internal use.
+func (l *Language) String() string {
+ return l.Lang
+}
+
+// NewLanguage creates a new language.
+func NewLanguage(lang string, cfg config.Provider) *Language {
+ // Note that language specific params will be overridden later.
+ // We should improve that, but we need to make a copy:
+ params := make(map[string]any)
+ for k, v := range cfg.GetStringMap("params") {
+ params[k] = v
+ }
+ maps.PrepareParams(params)
+
+ localCfg := config.New()
+ compositeConfig := config.NewCompositeConfig(cfg, localCfg)
+ translator := translators.GetTranslator(lang)
+ if translator == nil {
+ translator = translators.GetTranslator(cfg.GetString("defaultContentLanguage"))
+ if translator == nil {
+ translator = translators.GetTranslator("en")
+ }
+ }
+
+ var coll *Collator
+ tag, err := language.Parse(lang)
+ if err == nil {
+ coll = &Collator{
+ c: collate.New(tag),
+ }
+ } else {
+ coll = &Collator{
+ c: collate.New(language.English),
+ }
+ }
+
+ l := &Language{
+ Lang: lang,
+ ContentDir: cfg.GetString("contentDir"),
+ Cfg: cfg, LocalCfg: localCfg,
+ Provider: compositeConfig,
+ params: params,
+ translator: translator,
+ timeFormatter: htime.NewTimeFormatter(translator),
+ tag: tag,
+ collator: coll,
+ }
+
+ if err := l.loadLocation(cfg.GetString("timeZone")); err != nil {
+ l.initErr = err
+ }
+
+ return l
+}
+
+// NewDefaultLanguage creates the default language for a config.Provider.
+// If not otherwise specified the default is "en".
+func NewDefaultLanguage(cfg config.Provider) *Language {
+ defaultLang := cfg.GetString("defaultContentLanguage")
+
+ if defaultLang == "" {
+ defaultLang = "en"
+ }
+
+ return NewLanguage(defaultLang, cfg)
+}
+
+// Languages is a sortable list of languages.
+type Languages []*Language
+
+// NewLanguages creates a sorted list of languages.
+// NOTE: function is currently unused.
+func NewLanguages(l ...*Language) Languages {
+ languages := make(Languages, len(l))
+ for i := 0; i < len(l); i++ {
+ languages[i] = l[i]
+ }
+ sort.Sort(languages)
+ return languages
+}
+
+func (l Languages) Len() int { return len(l) }
+func (l Languages) Less(i, j int) bool {
+ wi, wj := l[i].Weight, l[j].Weight
+
+ if wi == wj {
+ return l[i].Lang < l[j].Lang
+ }
+
+ return wj == 0 || wi < wj
+}
+
+func (l Languages) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+
+// Params returns language-specific params merged with the global params.
+func (l *Language) Params() maps.Params {
+ // TODO(bep) this construct should not be needed. Create the
+ // language params in one go.
+ l.paramsMu.Lock()
+ defer l.paramsMu.Unlock()
+ if !l.paramsSet {
+ maps.PrepareParams(l.params)
+ l.paramsSet = true
+ }
+ return l.params
+}
+
+func (l Languages) AsSet() map[string]bool {
+ m := make(map[string]bool)
+ for _, lang := range l {
+ m[lang.Lang] = true
+ }
+
+ return m
+}
+
+func (l Languages) AsOrdinalSet() map[string]int {
+ m := make(map[string]int)
+ for i, lang := range l {
+ m[lang.Lang] = i
+ }
+
+ return m
+}
+
+// IsMultihost returns whether there are more than one language and at least one of
+// the languages has baseURL specificed on the language level.
+func (l Languages) IsMultihost() bool {
+ if len(l) <= 1 {
+ return false
+ }
+
+ for _, lang := range l {
+ if lang.GetLocal("baseURL") != nil {
+ return true
+ }
+ }
+ return false
+}
+
+// SetParam sets a param with the given key and value.
+// SetParam is case-insensitive.
+// For internal use.
+func (l *Language) SetParam(k string, v any) {
+ l.paramsMu.Lock()
+ defer l.paramsMu.Unlock()
+ if l.paramsSet {
+ panic("params cannot be changed once set")
+ }
+ l.params[k] = v
+}
+
+// GetLocal gets a configuration value set on language level. It will
+// not fall back to any global value.
+// It will return nil if a value with the given key cannot be found.
+// For internal use.
+func (l *Language) GetLocal(key string) any {
+ if l == nil {
+ panic("language not set")
+ }
+ key = strings.ToLower(key)
+ if !globalOnlySettings[key] {
+ return l.LocalCfg.Get(key)
+ }
+ return nil
+}
+
+// For internal use.
+func (l *Language) Set(k string, v any) {
+ k = strings.ToLower(k)
+ if globalOnlySettings[k] {
+ return
+ }
+ l.Provider.Set(k, v)
+}
+
+// Merge is currently not supported for Language.
+// For internal use.
+func (l *Language) Merge(key string, value any) {
+ panic("Not supported")
+}
+
+// IsSet checks whether the key is set in the language or the related config store.
+// For internal use.
+func (l *Language) IsSet(key string) bool {
+ key = strings.ToLower(key)
+ if !globalOnlySettings[key] {
+ return l.Provider.IsSet(key)
+ }
+ return l.Cfg.IsSet(key)
+}
+
+// Internal access to unexported Language fields.
+// This construct is to prevent them from leaking to the templates.
+
+func GetTimeFormatter(l *Language) htime.TimeFormatter {
+ return l.timeFormatter
+}
+
+func GetTranslator(l *Language) locales.Translator {
+ return l.translator
+}
+
+func GetLocation(l *Language) *time.Location {
+ return l.location
+}
+
+func GetCollator(l *Language) *Collator {
+ return l.collator
+}
+
+func (l *Language) loadLocation(tzStr string) error {
+ location, err := time.LoadLocation(tzStr)
+ if err != nil {
+ return fmt.Errorf("invalid timeZone for language %q: %w", l.Lang, err)
+ }
+ l.location = location
+
+ return nil
+}
+
+type Collator struct {
+ sync.Mutex
+ c *collate.Collator
+}
+
+// CompareStrings compares a and b.
+// It returns -1 if a < b, 1 if a > b and 0 if a == b.
+// Note that the Collator is not thread safe, so you may want
+// to aquire a lock on it before calling this method.
+func (c *Collator) CompareStrings(a, b string) int {
+ return c.c.CompareString(a, b)
+}
diff --git a/langs/language_test.go b/langs/language_test.go
new file mode 100644
index 000000000..264e813a0
--- /dev/null
+++ b/langs/language_test.go
@@ -0,0 +1,108 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package langs
+
+import (
+ "sync"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "golang.org/x/text/collate"
+ "golang.org/x/text/language"
+)
+
+func TestGetGlobalOnlySetting(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("defaultContentLanguageInSubdir", true)
+ v.Set("contentDir", "content")
+ v.Set("paginatePath", "page")
+ lang := NewDefaultLanguage(v)
+ lang.Set("defaultContentLanguageInSubdir", false)
+ lang.Set("paginatePath", "side")
+
+ c.Assert(lang.GetBool("defaultContentLanguageInSubdir"), qt.Equals, true)
+ c.Assert(lang.GetString("paginatePath"), qt.Equals, "side")
+}
+
+func TestLanguageParams(t *testing.T) {
+ c := qt.New(t)
+
+ v := config.NewWithTestDefaults()
+ v.Set("p1", "p1cfg")
+ v.Set("contentDir", "content")
+
+ lang := NewDefaultLanguage(v)
+ lang.SetParam("p1", "p1p")
+
+ c.Assert(lang.Params()["p1"], qt.Equals, "p1p")
+ c.Assert(lang.Get("p1"), qt.Equals, "p1cfg")
+}
+
+func TestCollator(t *testing.T) {
+
+ c := qt.New(t)
+
+ var wg sync.WaitGroup
+
+ coll := &Collator{c: collate.New(language.English, collate.Loose)}
+
+ for i := 0; i < 10; i++ {
+ wg.Add(1)
+ go func() {
+ coll.Lock()
+ defer coll.Unlock()
+ defer wg.Done()
+ for j := 0; j < 10; j++ {
+ k := coll.CompareStrings("abc", "def")
+ c.Assert(k, qt.Equals, -1)
+ }
+ }()
+ }
+ wg.Wait()
+
+}
+
+func BenchmarkCollator(b *testing.B) {
+ s := []string{"foo", "bar", "éntre", "baz", "qux", "quux", "corge", "grault", "garply", "waldo", "fred", "plugh", "xyzzy", "thud"}
+
+ doWork := func(coll *Collator) {
+ for i := 0; i < len(s); i++ {
+ for j := i + 1; j < len(s); j++ {
+ _ = coll.CompareStrings(s[i], s[j])
+ }
+ }
+ }
+
+ b.Run("Single", func(b *testing.B) {
+ coll := &Collator{c: collate.New(language.English, collate.Loose)}
+ for i := 0; i < b.N; i++ {
+ doWork(coll)
+ }
+ })
+
+ b.Run("Para", func(b *testing.B) {
+ b.RunParallel(func(pb *testing.PB) {
+ coll := &Collator{c: collate.New(language.English, collate.Loose)}
+
+ for pb.Next() {
+ coll.Lock()
+ doWork(coll)
+ coll.Unlock()
+ }
+ })
+ })
+
+}
diff --git a/lazy/init.go b/lazy/init.go
new file mode 100644
index 000000000..b998d0305
--- /dev/null
+++ b/lazy/init.go
@@ -0,0 +1,208 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+ "context"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "errors"
+)
+
+// New creates a new empty Init.
+func New() *Init {
+ return &Init{}
+}
+
+// Init holds a graph of lazily initialized dependencies.
+type Init struct {
+ // Used in tests
+ initCount uint64
+
+ mu sync.Mutex
+
+ prev *Init
+ children []*Init
+
+ init onceMore
+ out any
+ err error
+ f func() (any, error)
+}
+
+// Add adds a func as a new child dependency.
+func (ini *Init) Add(initFn func() (any, error)) *Init {
+ if ini == nil {
+ ini = New()
+ }
+ return ini.add(false, initFn)
+}
+
+// InitCount gets the number of this this Init has been initialized.
+func (ini *Init) InitCount() int {
+ i := atomic.LoadUint64(&ini.initCount)
+ return int(i)
+}
+
+// AddWithTimeout is same as Add, but with a timeout that aborts initialization.
+func (ini *Init) AddWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
+ return ini.Add(func() (any, error) {
+ return ini.withTimeout(timeout, f)
+ })
+}
+
+// Branch creates a new dependency branch based on an existing and adds
+// the given dependency as a child.
+func (ini *Init) Branch(initFn func() (any, error)) *Init {
+ if ini == nil {
+ ini = New()
+ }
+ return ini.add(true, initFn)
+}
+
+// BranchdWithTimeout is same as Branch, but with a timeout.
+func (ini *Init) BranchWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
+ return ini.Branch(func() (any, error) {
+ return ini.withTimeout(timeout, f)
+ })
+}
+
+// Do initializes the entire dependency graph.
+func (ini *Init) Do() (any, error) {
+ if ini == nil {
+ panic("init is nil")
+ }
+
+ ini.init.Do(func() {
+ atomic.AddUint64(&ini.initCount, 1)
+ prev := ini.prev
+ if prev != nil {
+ // A branch. Initialize the ancestors.
+ if prev.shouldInitialize() {
+ _, err := prev.Do()
+ if err != nil {
+ ini.err = err
+ return
+ }
+ } else if prev.inProgress() {
+ // Concurrent initialization. The following init func
+ // may depend on earlier state, so wait.
+ prev.wait()
+ }
+ }
+
+ if ini.f != nil {
+ ini.out, ini.err = ini.f()
+ }
+
+ for _, child := range ini.children {
+ if child.shouldInitialize() {
+ _, err := child.Do()
+ if err != nil {
+ ini.err = err
+ return
+ }
+ }
+ }
+ })
+
+ ini.wait()
+
+ return ini.out, ini.err
+}
+
+// TODO(bep) investigate if we can use sync.Cond for this.
+func (ini *Init) wait() {
+ var counter time.Duration
+ for !ini.init.Done() {
+ counter += 10
+ if counter > 600000000 {
+ panic("BUG: timed out in lazy init")
+ }
+ time.Sleep(counter * time.Microsecond)
+ }
+}
+
+func (ini *Init) inProgress() bool {
+ return ini != nil && ini.init.InProgress()
+}
+
+func (ini *Init) shouldInitialize() bool {
+ return !(ini == nil || ini.init.Done() || ini.init.InProgress())
+}
+
+// Reset resets the current and all its dependencies.
+func (ini *Init) Reset() {
+ mu := ini.init.ResetWithLock()
+ ini.err = nil
+ defer mu.Unlock()
+ for _, d := range ini.children {
+ d.Reset()
+ }
+}
+
+func (ini *Init) add(branch bool, initFn func() (any, error)) *Init {
+ ini.mu.Lock()
+ defer ini.mu.Unlock()
+
+ if branch {
+ return &Init{
+ f: initFn,
+ prev: ini,
+ }
+ }
+
+ ini.checkDone()
+ ini.children = append(ini.children, &Init{
+ f: initFn,
+ })
+
+ return ini
+}
+
+func (ini *Init) checkDone() {
+ if ini.init.Done() {
+ panic("init cannot be added to after it has run")
+ }
+}
+
+func (ini *Init) withTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) (any, error) {
+ ctx, cancel := context.WithTimeout(context.Background(), timeout)
+ defer cancel()
+ c := make(chan verr, 1)
+
+ go func() {
+ v, err := f(ctx)
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ c <- verr{v: v, err: err}
+ }
+ }()
+
+ select {
+ case <-ctx.Done():
+ return nil, errors.New("timed out initializing value. You may have a circular loop in a shortcode, or your site may have resources that take longer to build than the `timeout` limit in your Hugo config file.")
+ case ve := <-c:
+ return ve.v, ve.err
+ }
+}
+
+type verr struct {
+ v any
+ err error
+}
diff --git a/lazy/init_test.go b/lazy/init_test.go
new file mode 100644
index 000000000..4d871b937
--- /dev/null
+++ b/lazy/init_test.go
@@ -0,0 +1,241 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+ "context"
+ "errors"
+ "math/rand"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var (
+ rnd = rand.New(rand.NewSource(time.Now().UnixNano()))
+ bigOrSmall = func() int {
+ if rnd.Intn(10) < 5 {
+ return 10000 + rnd.Intn(100000)
+ }
+ return 1 + rnd.Intn(50)
+ }
+)
+
+func doWork() {
+ doWorkOfSize(bigOrSmall())
+}
+
+func doWorkOfSize(size int) {
+ _ = strings.Repeat("Hugo Rocks! ", size)
+}
+
+func TestInit(t *testing.T) {
+ c := qt.New(t)
+
+ var result string
+
+ f1 := func(name string) func() (any, error) {
+ return func() (any, error) {
+ result += name + "|"
+ doWork()
+ return name, nil
+ }
+ }
+
+ f2 := func() func() (any, error) {
+ return func() (any, error) {
+ doWork()
+ return nil, nil
+ }
+ }
+
+ root := New()
+
+ root.Add(f1("root(1)"))
+ root.Add(f1("root(2)"))
+
+ branch1 := root.Branch(f1("branch_1"))
+ branch1.Add(f1("branch_1_1"))
+ branch1_2 := branch1.Add(f1("branch_1_2"))
+ branch1_2_1 := branch1_2.Add(f1("branch_1_2_1"))
+
+ var wg sync.WaitGroup
+
+ // Add some concurrency and randomness to verify thread safety and
+ // init order.
+ for i := 0; i < 100; i++ {
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ var err error
+ if rnd.Intn(10) < 5 {
+ _, err = root.Do()
+ c.Assert(err, qt.IsNil)
+ }
+
+ // Add a new branch on the fly.
+ if rnd.Intn(10) > 5 {
+ branch := branch1_2.Branch(f2())
+ _, err = branch.Do()
+ c.Assert(err, qt.IsNil)
+ } else {
+ _, err = branch1_2_1.Do()
+ c.Assert(err, qt.IsNil)
+ }
+ _, err = branch1_2.Do()
+ c.Assert(err, qt.IsNil)
+ }(i)
+
+ wg.Wait()
+
+ c.Assert(result, qt.Equals, "root(1)|root(2)|branch_1|branch_1_1|branch_1_2|branch_1_2_1|")
+
+ }
+}
+
+func TestInitAddWithTimeout(t *testing.T) {
+ c := qt.New(t)
+
+ init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (any, error) {
+ return nil, nil
+ })
+
+ _, err := init.Do()
+
+ c.Assert(err, qt.IsNil)
+}
+
+func TestInitAddWithTimeoutTimeout(t *testing.T) {
+ c := qt.New(t)
+
+ init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (any, error) {
+ time.Sleep(500 * time.Millisecond)
+ select {
+ case <-ctx.Done():
+ return nil, nil
+ default:
+ }
+ t.Fatal("slept")
+ return nil, nil
+ })
+
+ _, err := init.Do()
+
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ c.Assert(err.Error(), qt.Contains, "timed out")
+
+ time.Sleep(1 * time.Second)
+}
+
+func TestInitAddWithTimeoutError(t *testing.T) {
+ c := qt.New(t)
+
+ init := New().AddWithTimeout(100*time.Millisecond, func(ctx context.Context) (any, error) {
+ return nil, errors.New("failed")
+ })
+
+ _, err := init.Do()
+
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+type T struct {
+ sync.Mutex
+ V1 string
+ V2 string
+}
+
+func (t *T) Add1(v string) {
+ t.Lock()
+ t.V1 += v
+ t.Unlock()
+}
+
+func (t *T) Add2(v string) {
+ t.Lock()
+ t.V2 += v
+ t.Unlock()
+}
+
+// https://github.com/gohugoio/hugo/issues/5901
+func TestInitBranchOrder(t *testing.T) {
+ c := qt.New(t)
+
+ base := New()
+
+ work := func(size int, f func()) func() (any, error) {
+ return func() (any, error) {
+ doWorkOfSize(size)
+ if f != nil {
+ f()
+ }
+
+ return nil, nil
+ }
+ }
+
+ state := &T{}
+
+ base = base.Add(work(10000, func() {
+ state.Add1("A")
+ }))
+
+ inits := make([]*Init, 2)
+ for i := range inits {
+ inits[i] = base.Branch(work(i+1*100, func() {
+ // V1 is A
+ ab := state.V1 + "B"
+ state.Add2(ab)
+ }))
+ }
+
+ var wg sync.WaitGroup
+
+ for _, v := range inits {
+ v := v
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ _, err := v.Do()
+ c.Assert(err, qt.IsNil)
+ }()
+ }
+
+ wg.Wait()
+
+ c.Assert(state.V2, qt.Equals, "ABAB")
+}
+
+// See issue 7043
+func TestResetError(t *testing.T) {
+ c := qt.New(t)
+ r := false
+ i := New().Add(func() (any, error) {
+ if r {
+ return nil, nil
+ }
+ return nil, errors.New("r is false")
+ })
+ _, err := i.Do()
+ c.Assert(err, qt.IsNotNil)
+ i.Reset()
+ r = true
+ _, err = i.Do()
+ c.Assert(err, qt.IsNil)
+
+}
diff --git a/lazy/once.go b/lazy/once.go
new file mode 100644
index 000000000..bdce12c33
--- /dev/null
+++ b/lazy/once.go
@@ -0,0 +1,68 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lazy
+
+import (
+ "sync"
+ "sync/atomic"
+)
+
+// onceMore is similar to sync.Once.
+//
+// Additional features are:
+// * it can be reset, so the action can be repeated if needed
+// * it has methods to check if it's done or in progress
+//
+type onceMore struct {
+ mu sync.Mutex
+ lock uint32
+ done uint32
+}
+
+func (t *onceMore) Do(f func()) {
+ if atomic.LoadUint32(&t.done) == 1 {
+ return
+ }
+
+ // f may call this Do and we would get a deadlock.
+ locked := atomic.CompareAndSwapUint32(&t.lock, 0, 1)
+ if !locked {
+ return
+ }
+ defer atomic.StoreUint32(&t.lock, 0)
+
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
+ // Double check
+ if t.done == 1 {
+ return
+ }
+ defer atomic.StoreUint32(&t.done, 1)
+ f()
+}
+
+func (t *onceMore) InProgress() bool {
+ return atomic.LoadUint32(&t.lock) == 1
+}
+
+func (t *onceMore) Done() bool {
+ return atomic.LoadUint32(&t.done) == 1
+}
+
+func (t *onceMore) ResetWithLock() *sync.Mutex {
+ t.mu.Lock()
+ defer atomic.StoreUint32(&t.done, 0)
+ return &t.mu
+}
diff --git a/livereload/connection.go b/livereload/connection.go
new file mode 100644
index 000000000..4e94e2ee0
--- /dev/null
+++ b/livereload/connection.go
@@ -0,0 +1,66 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package livereload
+
+import (
+ "bytes"
+ "sync"
+
+ "github.com/gorilla/websocket"
+)
+
+type connection struct {
+ // The websocket connection.
+ ws *websocket.Conn
+
+ // Buffered channel of outbound messages.
+ send chan []byte
+
+ // There is a potential data race, especially visible with large files.
+ // This is protected by synchronisation of the send channel's close.
+ closer sync.Once
+}
+
+func (c *connection) close() {
+ c.closer.Do(func() {
+ close(c.send)
+ })
+}
+
+func (c *connection) reader() {
+ for {
+ _, message, err := c.ws.ReadMessage()
+ if err != nil {
+ break
+ }
+ if bytes.Contains(message, []byte(`"command":"hello"`)) {
+ c.send <- []byte(`{
+ "command": "hello",
+ "protocols": [ "http://livereload.com/protocols/official-7" ],
+ "serverName": "Hugo"
+ }`)
+ }
+ }
+ c.ws.Close()
+}
+
+func (c *connection) writer() {
+ for message := range c.send {
+ err := c.ws.WriteMessage(websocket.TextMessage, message)
+ if err != nil {
+ break
+ }
+ }
+ c.ws.Close()
+}
diff --git a/livereload/hub.go b/livereload/hub.go
new file mode 100644
index 000000000..8ab6083ad
--- /dev/null
+++ b/livereload/hub.go
@@ -0,0 +1,56 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package livereload
+
+type hub struct {
+ // Registered connections.
+ connections map[*connection]bool
+
+ // Inbound messages from the connections.
+ broadcast chan []byte
+
+ // Register requests from the connections.
+ register chan *connection
+
+ // Unregister requests from connections.
+ unregister chan *connection
+}
+
+var wsHub = hub{
+ broadcast: make(chan []byte),
+ register: make(chan *connection),
+ unregister: make(chan *connection),
+ connections: make(map[*connection]bool),
+}
+
+func (h *hub) run() {
+ for {
+ select {
+ case c := <-h.register:
+ h.connections[c] = true
+ case c := <-h.unregister:
+ delete(h.connections, c)
+ c.close()
+ case m := <-h.broadcast:
+ for c := range h.connections {
+ select {
+ case c.send <- m:
+ default:
+ delete(h.connections, c)
+ c.close()
+ }
+ }
+ }
+ }
+}
diff --git a/livereload/livereload.go b/livereload/livereload.go
new file mode 100644
index 000000000..246a7393c
--- /dev/null
+++ b/livereload/livereload.go
@@ -0,0 +1,193 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// Contains an embedded version of livereload.js
+//
+// Copyright (c) 2010-2015 Andrey Tarantsov
+//
+// Permission is hereby granted, free of charge, to any person obtaining
+// a copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to
+// permit persons to whom the Software is furnished to do so, subject to
+// the following conditions:
+//
+// The above copyright notice and this permission notice shall be
+// included in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+package livereload
+
+import (
+ "fmt"
+ "net"
+ "net/http"
+ "net/url"
+ "path/filepath"
+
+ "github.com/gorilla/websocket"
+)
+
+// Prefix to signal to LiveReload that we need to navigate to another path.
+const hugoNavigatePrefix = "__hugo_navigate"
+
+var upgrader = &websocket.Upgrader{
+ // Hugo may potentially spin up multiple HTTP servers, so we need to exclude the
+ // port when checking the origin.
+ CheckOrigin: func(r *http.Request) bool {
+ origin := r.Header["Origin"]
+ if len(origin) == 0 {
+ return true
+ }
+ u, err := url.Parse(origin[0])
+ if err != nil {
+ return false
+ }
+
+ rHost := r.Host
+ // For Github codespace in browser #9936
+ if forwardedHost := r.Header.Get("X-Forwarded-Host"); forwardedHost != "" {
+ rHost = forwardedHost
+ }
+
+ if u.Host == rHost {
+ return true
+ }
+
+ h1, _, err := net.SplitHostPort(u.Host)
+ if err != nil {
+ return false
+ }
+ h2, _, err := net.SplitHostPort(r.Host)
+ if err != nil {
+ return false
+ }
+
+ return h1 == h2
+ },
+ ReadBufferSize: 1024, WriteBufferSize: 1024,
+}
+
+// Handler is a HandlerFunc handling the livereload
+// Websocket interaction.
+func Handler(w http.ResponseWriter, r *http.Request) {
+ ws, err := upgrader.Upgrade(w, r, nil)
+ if err != nil {
+ return
+ }
+ c := &connection{send: make(chan []byte, 256), ws: ws}
+ wsHub.register <- c
+ defer func() { wsHub.unregister <- c }()
+ go c.writer()
+ c.reader()
+}
+
+// Initialize starts the Websocket Hub handling live reloads.
+func Initialize() {
+ go wsHub.run()
+}
+
+// ForceRefresh tells livereload to force a hard refresh.
+func ForceRefresh() {
+ RefreshPath("/x.js")
+}
+
+// NavigateToPath tells livereload to navigate to the given path.
+// This translates to `window.location.href = path` in the client.
+func NavigateToPath(path string) {
+ RefreshPath(hugoNavigatePrefix + path)
+}
+
+// NavigateToPathForPort is similar to NavigateToPath but will also
+// set window.location.port to the given port value.
+func NavigateToPathForPort(path string, port int) {
+ refreshPathForPort(hugoNavigatePrefix+path, port)
+}
+
+// RefreshPath tells livereload to refresh only the given path.
+// If that path points to a CSS stylesheet or an image, only the changes
+// will be updated in the browser, not the entire page.
+func RefreshPath(s string) {
+ refreshPathForPort(s, -1)
+}
+
+func refreshPathForPort(s string, port int) {
+ // Tell livereload a file has changed - will force a hard refresh if not CSS or an image
+ urlPath := filepath.ToSlash(s)
+ portStr := ""
+ if port > 0 {
+ portStr = fmt.Sprintf(`, "overrideURL": %d`, port)
+ }
+ msg := fmt.Sprintf(`{"command":"reload","path":%q,"originalPath":"","liveCSS":true,"liveImg":true%s}`, urlPath, portStr)
+ wsHub.broadcast <- []byte(msg)
+}
+
+// ServeJS serves the liverreload.js who's reference is injected into the page.
+func ServeJS(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "application/javascript")
+ w.Write(liveReloadJS())
+}
+
+func liveReloadJS() []byte {
+ return []byte(livereloadJS + hugoLiveReloadPlugin)
+}
+
+var (
+ // This is a patched version, see https://github.com/livereload/livereload-js/pull/84
+ livereloadJS = `!function(){return function e(t,o,n){function r(s,c){if(!o[s]){if(!t[s]){var a="function"==typeof require&&require;if(!c&&a)return a(s,!0);if(i)return i(s,!0);var l=new Error("Cannot find module '"+s+"'");throw l.code="MODULE_NOT_FOUND",l}var h=o[s]={exports:{}};t[s][0].call(h.exports,function(e){return r(t[s][1][e]||e)},h,h.exports,e,t,o,n)}return o[s].exports}for(var i="function"==typeof require&&require,s=0;s<n.length;s++)r(n[s]);return r}}()({1:[function(e,t,o){t.exports=function(e){if("function"!=typeof e)throw TypeError(e+" is not a function!");return e}},{}],2:[function(e,t,o){var n=e("./_wks")("unscopables"),r=Array.prototype;null==r[n]&&e("./_hide")(r,n,{}),t.exports=function(e){r[n][e]=!0}},{"./_hide":17,"./_wks":45}],3:[function(e,t,o){var n=e("./_is-object");t.exports=function(e){if(!n(e))throw TypeError(e+" is not an object!");return e}},{"./_is-object":21}],4:[function(e,t,o){var n=e("./_to-iobject"),r=e("./_to-length"),i=e("./_to-absolute-index");t.exports=function(e){return function(t,o,s){var c,a=n(t),l=r(a.length),h=i(s,l);if(e&&o!=o){for(;l>h;)if((c=a[h++])!=c)return!0}else for(;l>h;h++)if((e||h in a)&&a[h]===o)return e||h||0;return!e&&-1}}},{"./_to-absolute-index":38,"./_to-iobject":40,"./_to-length":41}],5:[function(e,t,o){var n={}.toString;t.exports=function(e){return n.call(e).slice(8,-1)}},{}],6:[function(e,t,o){var n=t.exports={version:"2.6.5"};"number"==typeof __e&&(__e=n)},{}],7:[function(e,t,o){var n=e("./_a-function");t.exports=function(e,t,o){if(n(e),void 0===t)return e;switch(o){case 1:return function(o){return e.call(t,o)};case 2:return function(o,n){return e.call(t,o,n)};case 3:return function(o,n,r){return e.call(t,o,n,r)}}return function(){return e.apply(t,arguments)}}},{"./_a-function":1}],8:[function(e,t,o){t.exports=function(e){if(null==e)throw TypeError("Can't call method on "+e);return e}},{}],9:[function(e,t,o){t.exports=!e("./_fails")(function(){return 7!=Object.defineProperty({},"a",{get:function(){return 7}}).a})},{"./_fails":13}],10:[function(e,t,o){var n=e("./_is-object"),r=e("./_global").document,i=n(r)&&n(r.createElement);t.exports=function(e){return i?r.createElement(e):{}}},{"./_global":15,"./_is-object":21}],11:[function(e,t,o){t.exports="constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(",")},{}],12:[function(e,t,o){var n=e("./_global"),r=e("./_core"),i=e("./_hide"),s=e("./_redefine"),c=e("./_ctx"),a=function(e,t,o){var l,h,u,d,f=e&a.F,p=e&a.G,_=e&a.S,m=e&a.P,g=e&a.B,y=p?n:_?n[t]||(n[t]={}):(n[t]||{}).prototype,v=p?r:r[t]||(r[t]={}),w=v.prototype||(v.prototype={});for(l in p&&(o=t),o)u=((h=!f&&y&&void 0!==y[l])?y:o)[l],d=g&&h?c(u,n):m&&"function"==typeof u?c(Function.call,u):u,y&&s(y,l,u,e&a.U),v[l]!=u&&i(v,l,d),m&&w[l]!=u&&(w[l]=u)};n.core=r,a.F=1,a.G=2,a.S=4,a.P=8,a.B=16,a.W=32,a.U=64,a.R=128,t.exports=a},{"./_core":6,"./_ctx":7,"./_global":15,"./_hide":17,"./_redefine":34}],13:[function(e,t,o){t.exports=function(e){try{return!!e()}catch(e){return!0}}},{}],14:[function(e,t,o){t.exports=e("./_shared")("native-function-to-string",Function.toString)},{"./_shared":37}],15:[function(e,t,o){var n=t.exports="undefined"!=typeof window&&window.Math==Math?window:"undefined"!=typeof self&&self.Math==Math?self:Function("return this")();"number"==typeof __g&&(__g=n)},{}],16:[function(e,t,o){var n={}.hasOwnProperty;t.exports=function(e,t){return n.call(e,t)}},{}],17:[function(e,t,o){var n=e("./_object-dp"),r=e("./_property-desc");t.exports=e("./_descriptors")?function(e,t,o){return n.f(e,t,r(1,o))}:function(e,t,o){return e[t]=o,e}},{"./_descriptors":9,"./_object-dp":28,"./_property-desc":33}],18:[function(e,t,o){var n=e("./_global").document;t.exports=n&&n.documentElement},{"./_global":15}],19:[function(e,t,o){t.exports=!e("./_descriptors")&&!e("./_fails")(function(){return 7!=Object.defineProperty(e("./_dom-create")("div"),"a",{get:function(){return 7}}).a})},{"./_descriptors":9,"./_dom-create":10,"./_fails":13}],20:[function(e,t,o){var n=e("./_cof");t.exports=Object("z").propertyIsEnumerable(0)?Object:function(e){return"String"==n(e)?e.split(""):Object(e)}},{"./_cof":5}],21:[function(e,t,o){t.exports=function(e){return"object"==typeof e?null!==e:"function"==typeof e}},{}],22:[function(e,t,o){"use strict";var n=e("./_object-create"),r=e("./_property-desc"),i=e("./_set-to-string-tag"),s={};e("./_hide")(s,e("./_wks")("iterator"),function(){return this}),t.exports=function(e,t,o){e.prototype=n(s,{next:r(1,o)}),i(e,t+" Iterator")}},{"./_hide":17,"./_object-create":27,"./_property-desc":33,"./_set-to-string-tag":35,"./_wks":45}],23:[function(e,t,o){"use strict";var n=e("./_library"),r=e("./_export"),i=e("./_redefine"),s=e("./_hide"),c=e("./_iterators"),a=e("./_iter-create"),l=e("./_set-to-string-tag"),h=e("./_object-gpo"),u=e("./_wks")("iterator"),d=!([].keys&&"next"in[].keys()),f=function(){return this};t.exports=function(e,t,o,p,_,m,g){a(o,t,p);var y,v,w,b=function(e){if(!d&&e in L)return L[e];switch(e){case"keys":case"values":return function(){return new o(this,e)}}return function(){return new o(this,e)}},S=t+" Iterator",R="values"==_,k=!1,L=e.prototype,x=L[u]||L["@@iterator"]||_&&L[_],j=x||b(_),C=_?R?b("entries"):j:void 0,O="Array"==t&&L.entries||x;if(O&&(w=h(O.call(new e)))!==Object.prototype&&w.next&&(l(w,S,!0),n||"function"==typeof w[u]||s(w,u,f)),R&&x&&"values"!==x.name&&(k=!0,j=function(){return x.call(this)}),n&&!g||!d&&!k&&L[u]||s(L,u,j),c[t]=j,c[S]=f,_)if(y={values:R?j:b("values"),keys:m?j:b("keys"),entries:C},g)for(v in y)v in L||i(L,v,y[v]);else r(r.P+r.F*(d||k),t,y);return y}},{"./_export":12,"./_hide":17,"./_iter-create":22,"./_iterators":25,"./_library":26,"./_object-gpo":30,"./_redefine":34,"./_set-to-string-tag":35,"./_wks":45}],24:[function(e,t,o){t.exports=function(e,t){return{value:t,done:!!e}}},{}],25:[function(e,t,o){t.exports={}},{}],26:[function(e,t,o){t.exports=!1},{}],27:[function(e,t,o){var n=e("./_an-object"),r=e("./_object-dps"),i=e("./_enum-bug-keys"),s=e("./_shared-key")("IE_PROTO"),c=function(){},a=function(){var t,o=e("./_dom-create")("iframe"),n=i.length;for(o.style.display="none",e("./_html").appendChild(o),o.src="javascript:",(t=o.contentWindow.document).open(),t.write("<script>document.F=Object<\/script>"),t.close(),a=t.F;n--;)delete a.prototype[i[n]];return a()};t.exports=Object.create||function(e,t){var o;return null!==e?(c.prototype=n(e),o=new c,c.prototype=null,o[s]=e):o=a(),void 0===t?o:r(o,t)}},{"./_an-object":3,"./_dom-create":10,"./_enum-bug-keys":11,"./_html":18,"./_object-dps":29,"./_shared-key":36}],28:[function(e,t,o){var n=e("./_an-object"),r=e("./_ie8-dom-define"),i=e("./_to-primitive"),s=Object.defineProperty;o.f=e("./_descriptors")?Object.defineProperty:function(e,t,o){if(n(e),t=i(t,!0),n(o),r)try{return s(e,t,o)}catch(e){}if("get"in o||"set"in o)throw TypeError("Accessors not supported!");return"value"in o&&(e[t]=o.value),e}},{"./_an-object":3,"./_descriptors":9,"./_ie8-dom-define":19,"./_to-primitive":43}],29:[function(e,t,o){var n=e("./_object-dp"),r=e("./_an-object"),i=e("./_object-keys");t.exports=e("./_descriptors")?Object.defineProperties:function(e,t){r(e);for(var o,s=i(t),c=s.length,a=0;c>a;)n.f(e,o=s[a++],t[o]);return e}},{"./_an-object":3,"./_descriptors":9,"./_object-dp":28,"./_object-keys":32}],30:[function(e,t,o){var n=e("./_has"),r=e("./_to-object"),i=e("./_shared-key")("IE_PROTO"),s=Object.prototype;t.exports=Object.getPrototypeOf||function(e){return e=r(e),n(e,i)?e[i]:"function"==typeof e.constructor&&e instanceof e.constructor?e.constructor.prototype:e instanceof Object?s:null}},{"./_has":16,"./_shared-key":36,"./_to-object":42}],31:[function(e,t,o){var n=e("./_has"),r=e("./_to-iobject"),i=e("./_array-includes")(!1),s=e("./_shared-key")("IE_PROTO");t.exports=function(e,t){var o,c=r(e),a=0,l=[];for(o in c)o!=s&&n(c,o)&&l.push(o);for(;t.length>a;)n(c,o=t[a++])&&(~i(l,o)||l.push(o));return l}},{"./_array-includes":4,"./_has":16,"./_shared-key":36,"./_to-iobject":40}],32:[function(e,t,o){var n=e("./_object-keys-internal"),r=e("./_enum-bug-keys");t.exports=Object.keys||function(e){return n(e,r)}},{"./_enum-bug-keys":11,"./_object-keys-internal":31}],33:[function(e,t,o){t.exports=function(e,t){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:t}}},{}],34:[function(e,t,o){var n=e("./_global"),r=e("./_hide"),i=e("./_has"),s=e("./_uid")("src"),c=e("./_function-to-string"),a=(""+c).split("toString");e("./_core").inspectSource=function(e){return c.call(e)},(t.exports=function(e,t,o,c){var l="function"==typeof o;l&&(i(o,"name")||r(o,"name",t)),e[t]!==o&&(l&&(i(o,s)||r(o,s,e[t]?""+e[t]:a.join(String(t)))),e===n?e[t]=o:c?e[t]?e[t]=o:r(e,t,o):(delete e[t],r(e,t,o)))})(Function.prototype,"toString",function(){return"function"==typeof this&&this[s]||c.call(this)})},{"./_core":6,"./_function-to-string":14,"./_global":15,"./_has":16,"./_hide":17,"./_uid":44}],35:[function(e,t,o){var n=e("./_object-dp").f,r=e("./_has"),i=e("./_wks")("toStringTag");t.exports=function(e,t,o){e&&!r(e=o?e:e.prototype,i)&&n(e,i,{configurable:!0,value:t})}},{"./_has":16,"./_object-dp":28,"./_wks":45}],36:[function(e,t,o){var n=e("./_shared")("keys"),r=e("./_uid");t.exports=function(e){return n[e]||(n[e]=r(e))}},{"./_shared":37,"./_uid":44}],37:[function(e,t,o){var n=e("./_core"),r=e("./_global"),i=r["__core-js_shared__"]||(r["__core-js_shared__"]={});(t.exports=function(e,t){return i[e]||(i[e]=void 0!==t?t:{})})("versions",[]).push({version:n.version,mode:e("./_library")?"pure":"global",copyright:"© 2019 Denis Pushkarev (zloirock.ru)"})},{"./_core":6,"./_global":15,"./_library":26}],38:[function(e,t,o){var n=e("./_to-integer"),r=Math.max,i=Math.min;t.exports=function(e,t){return(e=n(e))<0?r(e+t,0):i(e,t)}},{"./_to-integer":39}],39:[function(e,t,o){var n=Math.ceil,r=Math.floor;t.exports=function(e){return isNaN(e=+e)?0:(e>0?r:n)(e)}},{}],40:[function(e,t,o){var n=e("./_iobject"),r=e("./_defined");t.exports=function(e){return n(r(e))}},{"./_defined":8,"./_iobject":20}],41:[function(e,t,o){var n=e("./_to-integer"),r=Math.min;t.exports=function(e){return e>0?r(n(e),9007199254740991):0}},{"./_to-integer":39}],42:[function(e,t,o){var n=e("./_defined");t.exports=function(e){return Object(n(e))}},{"./_defined":8}],43:[function(e,t,o){var n=e("./_is-object");t.exports=function(e,t){if(!n(e))return e;var o,r;if(t&&"function"==typeof(o=e.toString)&&!n(r=o.call(e)))return r;if("function"==typeof(o=e.valueOf)&&!n(r=o.call(e)))return r;if(!t&&"function"==typeof(o=e.toString)&&!n(r=o.call(e)))return r;throw TypeError("Can't convert object to primitive value")}},{"./_is-object":21}],44:[function(e,t,o){var n=0,r=Math.random();t.exports=function(e){return"Symbol(".concat(void 0===e?"":e,")_",(++n+r).toString(36))}},{}],45:[function(e,t,o){var n=e("./_shared")("wks"),r=e("./_uid"),i=e("./_global").Symbol,s="function"==typeof i;(t.exports=function(e){return n[e]||(n[e]=s&&i[e]||(s?i:r)("Symbol."+e))}).store=n},{"./_global":15,"./_shared":37,"./_uid":44}],46:[function(e,t,o){"use strict";var n=e("./_add-to-unscopables"),r=e("./_iter-step"),i=e("./_iterators"),s=e("./_to-iobject");t.exports=e("./_iter-define")(Array,"Array",function(e,t){this._t=s(e),this._i=0,this._k=t},function(){var e=this._t,t=this._k,o=this._i++;return!e||o>=e.length?(this._t=void 0,r(1)):r(0,"keys"==t?o:"values"==t?e[o]:[o,e[o]])},"values"),i.Arguments=i.Array,n("keys"),n("values"),n("entries")},{"./_add-to-unscopables":2,"./_iter-define":23,"./_iter-step":24,"./_iterators":25,"./_to-iobject":40}],47:[function(e,t,o){for(var n=e("./es6.array.iterator"),r=e("./_object-keys"),i=e("./_redefine"),s=e("./_global"),c=e("./_hide"),a=e("./_iterators"),l=e("./_wks"),h=l("iterator"),u=l("toStringTag"),d=a.Array,f={CSSRuleList:!0,CSSStyleDeclaration:!1,CSSValueList:!1,ClientRectList:!1,DOMRectList:!1,DOMStringList:!1,DOMTokenList:!0,DataTransferItemList:!1,FileList:!1,HTMLAllCollection:!1,HTMLCollection:!1,HTMLFormElement:!1,HTMLSelectElement:!1,MediaList:!0,MimeTypeArray:!1,NamedNodeMap:!1,NodeList:!0,PaintRequestList:!1,Plugin:!1,PluginArray:!1,SVGLengthList:!1,SVGNumberList:!1,SVGPathSegList:!1,SVGPointList:!1,SVGStringList:!1,SVGTransformList:!1,SourceBufferList:!1,StyleSheetList:!0,TextTrackCueList:!1,TextTrackList:!1,TouchList:!1},p=r(f),_=0;_<p.length;_++){var m,g=p[_],y=f[g],v=s[g],w=v&&v.prototype;if(w&&(w[h]||c(w,h,d),w[u]||c(w,u,g),a[g]=d,y))for(m in n)w[m]||i(w,m,n[m],!0)}},{"./_global":15,"./_hide":17,"./_iterators":25,"./_object-keys":32,"./_redefine":34,"./_wks":45,"./es6.array.iterator":46}],48:[function(e,t,o){"use strict";const{Parser:n,PROTOCOL_6:r,PROTOCOL_7:i}=e("./protocol"),s="3.0.0";o.Connector=class{constructor(e,t,o,r){this.options=e,this.WebSocket=t,this.Timer=o,this.handlers=r;const i=this.options.path?"".concat(this.options.path):"livereload";this._uri="ws".concat(this.options.https?"s":"","://").concat(this.options.host,":").concat(this.options.port,"/").concat(i),this._nextDelay=this.options.mindelay,this._connectionDesired=!1,this.protocol=0,this.protocolParser=new n({connected:e=>(this.protocol=e,this._handshakeTimeout.stop(),this._nextDelay=this.options.mindelay,this._disconnectionReason="broken",this.handlers.connected(this.protocol)),error:e=>(this.handlers.error(e),this._closeOnError()),message:e=>this.handlers.message(e)}),this._handshakeTimeout=new this.Timer(()=>{if(this._isSocketConnected())return this._disconnectionReason="handshake-timeout",this.socket.close()}),this._reconnectTimer=new this.Timer(()=>{if(this._connectionDesired)return this.connect()}),this.connect()}_isSocketConnected(){return this.socket&&this.socket.readyState===this.WebSocket.OPEN}connect(){this._connectionDesired=!0,this._isSocketConnected()||(this._reconnectTimer.stop(),this._disconnectionReason="cannot-connect",this.protocolParser.reset(),this.handlers.connecting(),this.socket=new this.WebSocket(this._uri),this.socket.onopen=(e=>this._onopen(e)),this.socket.onclose=(e=>this._onclose(e)),this.socket.onmessage=(e=>this._onmessage(e)),this.socket.onerror=(e=>this._onerror(e)))}disconnect(){if(this._connectionDesired=!1,this._reconnectTimer.stop(),this._isSocketConnected())return this._disconnectionReason="manual",this.socket.close()}_scheduleReconnection(){this._connectionDesired&&(this._reconnectTimer.running||(this._reconnectTimer.start(this._nextDelay),this._nextDelay=Math.min(this.options.maxdelay,2*this._nextDelay)))}sendCommand(e){if(this.protocol)return this._sendCommand(e)}_sendCommand(e){return this.socket.send(JSON.stringify(e))}_closeOnError(){return this._handshakeTimeout.stop(),this._disconnectionReason="error",this.socket.close()}_onopen(e){this.handlers.socketConnected(),this._disconnectionReason="handshake-failed";const t={command:"hello",protocols:[r,i]};return t.ver=s,this.options.ext&&(t.ext=this.options.ext),this.options.extver&&(t.extver=this.options.extver),this.options.snipver&&(t.snipver=this.options.snipver),this._sendCommand(t),this._handshakeTimeout.start(this.options.handshake_timeout)}_onclose(e){return this.protocol=0,this.handlers.disconnected(this._disconnectionReason,this._nextDelay),this._scheduleReconnection()}_onerror(e){}_onmessage(e){return this.protocolParser.process(e.data)}}},{"./protocol":53}],49:[function(e,t,o){"use strict";const n={bind(e,t,o){if(e.addEventListener)return e.addEventListener(t,o,!1);if(e.attachEvent)return e[t]=1,e.attachEvent("onpropertychange",function(e){if(e.propertyName===t)return o()});throw new Error("Attempt to attach custom event ".concat(t," to something which isn't a DOMElement"))},fire(e,t){if(e.addEventListener){const e=document.createEvent("HTMLEvents");return e.initEvent(t,!0,!0),document.dispatchEvent(e)}if(!e.attachEvent)throw new Error("Attempt to fire custom event ".concat(t," on something which isn't a DOMElement"));if(e[t])return e[t]++}};o.bind=n.bind,o.fire=n.fire},{}],50:[function(e,t,o){"use strict";class n{constructor(e,t){this.window=e,this.host=t}reload(e,t){if(this.window.less&&this.window.less.refresh){if(e.match(/\.less$/i))return this.reloadLess(e);if(t.originalPath.match(/\.less$/i))return this.reloadLess(t.originalPath)}return!1}reloadLess(e){let t;const o=(()=>{const e=[];for(t of Array.from(document.getElementsByTagName("link")))(t.href&&t.rel.match(/^stylesheet\/less$/i)||t.rel.match(/stylesheet/i)&&t.type.match(/^text\/(x-)?less$/i))&&e.push(t);return e})();if(0===o.length)return!1;for(t of Array.from(o))t.href=this.host.generateCacheBustUrl(t.href);return this.host.console.log("LiveReload is asking LESS to recompile all stylesheets"),this.window.less.refresh(!0),!0}analyze(){return{disable:!(!this.window.less||!this.window.less.refresh)}}}n.identifier="less",n.version="1.0",t.exports=n},{}],51:[function(e,t,o){"use strict";e("core-js/modules/web.dom.iterable");const{Connector:n}=e("./connector"),{Timer:r}=e("./timer"),{Options:i}=e("./options"),{Reloader:s}=e("./reloader"),{ProtocolError:c}=e("./protocol");o.LiveReload=class{constructor(e){if(this.window=e,this.listeners={},this.plugins=[],this.pluginIdentifiers={},this.console=this.window.console&&this.window.console.log&&this.window.console.error?this.window.location.href.match(/LR-verbose/)?this.window.console:{log(){},error:this.window.console.error.bind(this.window.console)}:{log(){},error(){}},this.WebSocket=this.window.WebSocket||this.window.MozWebSocket){if("LiveReloadOptions"in e){this.options=new i;for(let t of Object.keys(e.LiveReloadOptions||{})){const o=e.LiveReloadOptions[t];this.options.set(t,o)}}else if(this.options=i.extract(this.window.document),!this.options)return void this.console.error("LiveReload disabled because it could not find its own <SCRIPT> tag");this.reloader=new s(this.window,this.console,r),this.connector=new n(this.options,this.WebSocket,r,{connecting:()=>{},socketConnected:()=>{},connected:e=>("function"==typeof this.listeners.connect&&this.listeners.connect(),this.log("LiveReload is connected to ".concat(this.options.host,":").concat(this.options.port," (protocol v").concat(e,").")),this.analyze()),error:e=>{if(e instanceof c){if("undefined"!=typeof console&&null!==console)return console.log("".concat(e.message,"."))}else if("undefined"!=typeof console&&null!==console)return console.log("LiveReload internal error: ".concat(e.message))},disconnected:(e,t)=>{switch("function"==typeof this.listeners.disconnect&&this.listeners.disconnect(),e){case"cannot-connect":return this.log("LiveReload cannot connect to ".concat(this.options.host,":").concat(this.options.port,", will retry in ").concat(t," sec."));case"broken":return this.log("LiveReload disconnected from ".concat(this.options.host,":").concat(this.options.port,", reconnecting in ").concat(t," sec."));case"handshake-timeout":return this.log("LiveReload cannot connect to ".concat(this.options.host,":").concat(this.options.port," (handshake timeout), will retry in ").concat(t," sec."));case"handshake-failed":return this.log("LiveReload cannot connect to ".concat(this.options.host,":").concat(this.options.port," (handshake failed), will retry in ").concat(t," sec."));case"manual":case"error":default:return this.log("LiveReload disconnected from ".concat(this.options.host,":").concat(this.options.port," (").concat(e,"), reconnecting in ").concat(t," sec."))}},message:e=>{switch(e.command){case"reload":return this.performReload(e);case"alert":return this.performAlert(e)}}}),this.initialized=!0}else this.console.error("LiveReload disabled because the browser does not seem to support web sockets")}on(e,t){this.listeners[e]=t}log(e){return this.console.log("".concat(e))}performReload(e){return this.log("LiveReload received reload request: ".concat(JSON.stringify(e,null,2))),this.reloader.reload(e.path,{liveCSS:null==e.liveCSS||e.liveCSS,liveImg:null==e.liveImg||e.liveImg,reloadMissingCSS:null==e.reloadMissingCSS||e.reloadMissingCSS,originalPath:e.originalPath||"",overrideURL:e.overrideURL||"",serverURL:"http://".concat(this.options.host,":").concat(this.options.port)})}performAlert(e){return alert(e.message)}shutDown(){if(this.initialized)return this.connector.disconnect(),this.log("LiveReload disconnected."),"function"==typeof this.listeners.shutdown?this.listeners.shutdown():void 0}hasPlugin(e){return!!this.pluginIdentifiers[e]}addPlugin(e){if(!this.initialized)return;if(this.hasPlugin(e.identifier))return;this.pluginIdentifiers[e.identifier]=!0;const t=new e(this.window,{_livereload:this,_reloader:this.reloader,_connector:this.connector,console:this.console,Timer:r,generateCacheBustUrl:e=>this.reloader.generateCacheBustUrl(e)});this.plugins.push(t),this.reloader.addPlugin(t)}analyze(){if(!this.initialized)return;if(!(this.connector.protocol>=7))return;const e={};for(let o of this.plugins){var t=("function"==typeof o.analyze?o.analyze():void 0)||{};e[o.constructor.identifier]=t,t.version=o.constructor.version}this.connector.sendCommand({command:"info",plugins:e,url:this.window.location.href})}}},{"./connector":48,"./options":52,"./protocol":53,"./reloader":54,"./timer":56,"core-js/modules/web.dom.iterable":47}],52:[function(e,t,o){"use strict";class n{constructor(){this.https=!1,this.host=null,this.port=35729,this.snipver=null,this.ext=null,this.extver=null,this.mindelay=1e3,this.maxdelay=6e4,this.handshake_timeout=5e3}set(e,t){void 0!==t&&(isNaN(+t)||(t=+t),this[e]=t)}}n.extract=function(e){for(let s of Array.from(e.getElementsByTagName("script"))){var t,o;if((o=s.src)&&(t=o.match(new RegExp("^[^:]+://(.*)/z?livereload\\.js(?:\\?(.*))?$")))){var r;const e=new n;if(e.https=0===o.indexOf("https"),(r=t[1].match(new RegExp("^([^/:]+)(?::(\\d+))?(\\/+.*)?$")))&&(e.host=r[1],r[2]&&(e.port=parseInt(r[2],10))),t[2])for(let o of t[2].split("&")){var i;(i=o.split("=")).length>1&&e.set(i[0].replace(/-/g,"_"),i.slice(1).join("="))}return e}}return null},o.Options=n},{}],53:[function(e,t,o){"use strict";let n,r;o.PROTOCOL_6=n="http://livereload.com/protocols/official-6",o.PROTOCOL_7=r="http://livereload.com/protocols/official-7";class i{constructor(e,t){this.message="LiveReload protocol error (".concat(e,') after receiving data: "').concat(t,'".')}}o.ProtocolError=i,o.Parser=class{constructor(e){this.handlers=e,this.reset()}reset(){this.protocol=null}process(e){try{let t;if(this.protocol){if(6===this.protocol){if(!(t=JSON.parse(e)).length)throw new i("protocol 6 messages must be arrays");const[o,n]=Array.from(t);if("refresh"!==o)throw new i("unknown protocol 6 command");return this.handlers.message({command:"reload",path:n.path,liveCSS:null==n.apply_css_live||n.apply_css_live})}return t=this._parseMessage(e,["reload","alert"]),this.handlers.message(t)}if(e.match(new RegExp("^!!ver:([\\d.]+)$")))this.protocol=6;else if(t=this._parseMessage(e,["hello"])){if(!t.protocols.length)throw new i("no protocols specified in handshake message");if(Array.from(t.protocols).includes(r))this.protocol=7;else{if(!Array.from(t.protocols).includes(n))throw new i("no supported protocols found");this.protocol=6}}return this.handlers.connected(this.protocol)}catch(e){if(e instanceof i)return this.handlers.error(e);throw e}}_parseMessage(e,t){let o;try{o=JSON.parse(e)}catch(t){throw new i("unparsable JSON",e)}if(!o.command)throw new i('missing "command" key',e);if(!t.includes(o.command))throw new i("invalid command '".concat(o.command,"', only valid commands are: ").concat(t.join(", "),")"),e);return o}}},{}],54:[function(e,t,o){"use strict";const n=function(e){let t,o,n;(o=e.indexOf("#"))>=0?(t=e.slice(o),e=e.slice(0,o)):t="";const r=e.indexOf("??");return r>=0?r+1!==e.lastIndexOf("?")&&(o=e.lastIndexOf("?")):o=e.indexOf("?"),o>=0?(n=e.slice(o),e=e.slice(0,o)):n="",{url:e,params:n,hash:t}},r=function(e){if(!e)return"";let t;return({url:e}=n(e)),t=0===e.indexOf("file://")?e.replace(new RegExp("^file://(localhost)?"),""):e.replace(new RegExp("^([^:]+:)?//([^:/]+)(:\\d*)?/"),"/"),decodeURIComponent(t)},i=function(e,t,o){let n,r={score:0};for(let i of t)(n=s(e,o(i)))>r.score&&(r={object:i,score:n});return 0===r.score?null:r};var s=function(e,t){if((e=e.replace(/^\/+/,"").toLowerCase())===(t=t.replace(/^\/+/,"").toLowerCase()))return 1e4;const o=e.split("/").reverse(),n=t.split("/").reverse(),r=Math.min(o.length,n.length);let i=0;for(;i<r&&o[i]===n[i];)++i;return i};const c=(e,t)=>s(e,t)>0,a=[{selector:"background",styleNames:["backgroundImage"]},{selector:"border",styleNames:["borderImage","webkitBorderImage","MozBorderImage"]}];o.Reloader=class{constructor(e,t,o){this.window=e,this.console=t,this.Timer=o,this.document=this.window.document,this.importCacheWaitPeriod=200,this.plugins=[]}addPlugin(e){return this.plugins.push(e)}analyze(e){}reload(e,t){this.options=t,this.options.stylesheetReloadTimeout||(this.options.stylesheetReloadTimeout=15e3);for(let o of Array.from(this.plugins))if(o.reload&&o.reload(e,t))return;if(!(t.liveCSS&&e.match(/\.css(?:\.map)?$/i)&&this.reloadStylesheet(e)))if(t.liveImg&&e.match(/\.(jpe?g|png|gif)$/i))this.reloadImages(e);else{if(!t.isChromeExtension)return this.reloadPage();this.reloadChromeExtension()}}reloadPage(){return this.window.document.location.reload()}reloadChromeExtension(){return this.window.chrome.runtime.reload()}reloadImages(e){let t;const o=this.generateUniqueString();for(t of Array.from(this.document.images))c(e,r(t.src))&&(t.src=this.generateCacheBustUrl(t.src,o));if(this.document.querySelectorAll)for(let{selector:n,styleNames:r}of a)for(t of Array.from(this.document.querySelectorAll("[style*=".concat(n,"]"))))this.reloadStyleImages(t.style,r,e,o);if(this.document.styleSheets)return Array.from(this.document.styleSheets).map(t=>this.reloadStylesheetImages(t,e,o))}reloadStylesheetImages(e,t,o){let n;try{n=(e||{}).cssRules}catch(e){}if(n)for(let e of Array.from(n))switch(e.type){case CSSRule.IMPORT_RULE:this.reloadStylesheetImages(e.styleSheet,t,o);break;case CSSRule.STYLE_RULE:for(let{styleNames:n}of a)this.reloadStyleImages(e.style,n,t,o);break;case CSSRule.MEDIA_RULE:this.reloadStylesheetImages(e,t,o)}}reloadStyleImages(e,t,o,n){for(let i of t){const t=e[i];if("string"==typeof t){const s=t.replace(new RegExp("\\burl\\s*\\(([^)]*)\\)"),(e,t)=>c(o,r(t))?"url(".concat(this.generateCacheBustUrl(t,n),")"):e);s!==t&&(e[i]=s)}}}reloadStylesheet(e){let t,o;const n=(()=>{const e=[];for(o of Array.from(this.document.getElementsByTagName("link")))o.rel.match(/^stylesheet$/i)&&!o.__LiveReload_pendingRemoval&&e.push(o);return e})(),s=[];for(t of Array.from(this.document.getElementsByTagName("style")))t.sheet&&this.collectImportedStylesheets(t,t.sheet,s);for(o of Array.from(n))this.collectImportedStylesheets(o,o.sheet,s);if(this.window.StyleFix&&this.document.querySelectorAll)for(t of Array.from(this.document.querySelectorAll("style[data-href]")))n.push(t);this.console.log("LiveReload found ".concat(n.length," LINKed stylesheets, ").concat(s.length," @imported stylesheets"));const c=i(e,n.concat(s),e=>r(this.linkHref(e)));if(c)c.object.rule?(this.console.log("LiveReload is reloading imported stylesheet: ".concat(c.object.href)),this.reattachImportedRule(c.object)):(this.console.log("LiveReload is reloading stylesheet: ".concat(this.linkHref(c.object))),this.reattachStylesheetLink(c.object));else if(this.options.reloadMissingCSS)for(o of(this.console.log("LiveReload will reload all stylesheets because path '".concat(e,"' did not match any specific one. To disable this behavior, set 'options.reloadMissingCSS' to 'false'.")),Array.from(n)))this.reattachStylesheetLink(o);else this.console.log("LiveReload will not reload path '".concat(e,"' because the stylesheet was not found on the page and 'options.reloadMissingCSS' was set to 'false'."));return!0}collectImportedStylesheets(e,t,o){let n;try{n=(t||{}).cssRules}catch(e){}if(n&&n.length)for(let t=0;t<n.length;t++){const r=n[t];switch(r.type){case CSSRule.CHARSET_RULE:continue;case CSSRule.IMPORT_RULE:o.push({link:e,rule:r,index:t,href:r.href}),this.collectImportedStylesheets(e,r.styleSheet,o)}}}waitUntilCssLoads(e,t){let o=!1;const n=()=>{if(!o)return o=!0,t()};if(e.onload=(()=>(this.console.log("LiveReload: the new stylesheet has finished loading"),this.knownToSupportCssOnLoad=!0,n())),!this.knownToSupportCssOnLoad){let t;(t=(()=>e.sheet?(this.console.log("LiveReload is polling until the new CSS finishes loading..."),n()):this.Timer.start(50,t)))()}return this.Timer.start(this.options.stylesheetReloadTimeout,n)}linkHref(e){return e.href||e.getAttribute&&e.getAttribute("data-href")}reattachStylesheetLink(e){let t;if(e.__LiveReload_pendingRemoval)return;e.__LiveReload_pendingRemoval=!0,"STYLE"===e.tagName?((t=this.document.createElement("link")).rel="stylesheet",t.media=e.media,t.disabled=e.disabled):t=e.cloneNode(!1),t.href=this.generateCacheBustUrl(this.linkHref(e));const o=e.parentNode;return o.lastChild===e?o.appendChild(t):o.insertBefore(t,e.nextSibling),this.waitUntilCssLoads(t,()=>{let o;return o=/AppleWebKit/.test(navigator.userAgent)?5:200,this.Timer.start(o,()=>{if(e.parentNode)return e.parentNode.removeChild(e),t.onreadystatechange=null,this.window.StyleFix?this.window.StyleFix.link(t):void 0})})}reattachImportedRule({rule:e,index:t,link:o}){const n=e.parentStyleSheet,r=this.generateCacheBustUrl(e.href),i=e.media.length?[].join.call(e.media,", "):"",s='@import url("'.concat(r,'") ').concat(i,";");e.__LiveReload_newHref=r;const c=this.document.createElement("link");return c.rel="stylesheet",c.href=r,c.__LiveReload_pendingRemoval=!0,o.parentNode&&o.parentNode.insertBefore(c,o),this.Timer.start(this.importCacheWaitPeriod,()=>{if(c.parentNode&&c.parentNode.removeChild(c),e.__LiveReload_newHref===r)return n.insertRule(s,t),n.deleteRule(t+1),(e=n.cssRules[t]).__LiveReload_newHref=r,this.Timer.start(this.importCacheWaitPeriod,()=>{if(e.__LiveReload_newHref===r)return n.insertRule(s,t),n.deleteRule(t+1)})})}generateUniqueString(){return"livereload=".concat(Date.now())}generateCacheBustUrl(e,t){let o,r;if(t||(t=this.generateUniqueString()),({url:e,hash:o,params:r}=n(e)),this.options.overrideURL&&e.indexOf(this.options.serverURL)<0){const t=e;e=this.options.serverURL+this.options.overrideURL+"?url="+encodeURIComponent(e),this.console.log("LiveReload is overriding source URL ".concat(t," with ").concat(e))}let i=r.replace(/(\?|&)livereload=(\d+)/,(e,o)=>"".concat(o).concat(t));return i===r&&(i=0===r.length?"?".concat(t):"".concat(r,"&").concat(t)),e+i+o}}},{}],55:[function(e,t,o){"use strict";const n=e("./customevents"),r=window.LiveReload=new(e("./livereload").LiveReload)(window);for(let e in window)e.match(/^LiveReloadPlugin/)&&r.addPlugin(window[e]);r.addPlugin(e("./less")),r.on("shutdown",()=>delete window.LiveReload),r.on("connect",()=>n.fire(document,"LiveReloadConnect")),r.on("disconnect",()=>n.fire(document,"LiveReloadDisconnect")),n.bind(document,"LiveReloadShutDown",()=>r.shutDown())},{"./customevents":49,"./less":50,"./livereload":51}],56:[function(e,t,o){"use strict";class n{constructor(e){this.func=e,this.running=!1,this.id=null,this._handler=(()=>(this.running=!1,this.id=null,this.func()))}start(e){this.running&&clearTimeout(this.id),this.id=setTimeout(this._handler,e),this.running=!0}stop(){this.running&&(clearTimeout(this.id),this.running=!1,this.id=null)}}n.start=((e,t)=>setTimeout(t,e)),o.Timer=n},{}]},{},[55]);`
+ hugoLiveReloadPlugin = fmt.Sprintf(`
+/*
+Hugo adds a specific prefix, "__hugo_navigate", to the path in certain situations to signal
+navigation to another content page.
+*/
+
+function HugoReload() {}
+
+HugoReload.identifier = 'hugoReloader';
+HugoReload.version = '0.9';
+
+HugoReload.prototype.reload = function(path, options) {
+ var prefix = %q;
+
+ if (path.lastIndexOf(prefix, 0) !== 0) {
+ return false
+ }
+
+ path = path.substring(prefix.length);
+
+ var portChanged = options.overrideURL && options.overrideURL != window.location.port
+
+ if (!portChanged && window.location.pathname === path) {
+ window.location.reload();
+ } else {
+ if (portChanged) {
+ window.location = location.protocol + "//" + location.hostname + ":" + options.overrideURL + path;
+ } else {
+ window.location.pathname = path;
+ }
+ }
+
+ return true;
+};
+
+LiveReload.addPlugin(HugoReload)
+`, hugoNavigatePrefix)
+)
diff --git a/magefile.go b/magefile.go
new file mode 100644
index 000000000..b2dc54777
--- /dev/null
+++ b/magefile.go
@@ -0,0 +1,375 @@
+//go:build mage
+// +build mage
+
+package main
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/codegen"
+ "github.com/gohugoio/hugo/resources/page/page_generate"
+
+ "github.com/magefile/mage/mg"
+ "github.com/magefile/mage/sh"
+)
+
+const (
+ packageName = "github.com/gohugoio/hugo"
+ noGitLdflags = "-X github.com/gohugoio/hugo/common/hugo.vendorInfo=mage"
+)
+
+var ldflags = noGitLdflags
+
+// allow user to override go executable by running as GOEXE=xxx make ... on unix-like systems
+var goexe = "go"
+
+func init() {
+ if exe := os.Getenv("GOEXE"); exe != "" {
+ goexe = exe
+ }
+
+ // We want to use Go 1.11 modules even if the source lives inside GOPATH.
+ // The default is "auto".
+ os.Setenv("GO111MODULE", "on")
+}
+
+func runWith(env map[string]string, cmd string, inArgs ...any) error {
+ s := argsToStrings(inArgs...)
+ return sh.RunWith(env, cmd, s...)
+}
+
+// Build hugo binary
+func Hugo() error {
+ return runWith(flagEnv(), goexe, "build", "-ldflags", ldflags, buildFlags(), "-tags", buildTags(), packageName)
+}
+
+// Build hugo binary with race detector enabled
+func HugoRace() error {
+ return runWith(flagEnv(), goexe, "build", "-race", "-ldflags", ldflags, buildFlags(), "-tags", buildTags(), packageName)
+}
+
+// Install hugo binary
+func Install() error {
+ return runWith(flagEnv(), goexe, "install", "-ldflags", ldflags, buildFlags(), "-tags", buildTags(), packageName)
+}
+
+// Uninstall hugo binary
+func Uninstall() error {
+ return sh.Run(goexe, "clean", "-i", packageName)
+}
+
+func flagEnv() map[string]string {
+ hash, _ := sh.Output("git", "rev-parse", "--short", "HEAD")
+ return map[string]string{
+ "PACKAGE": packageName,
+ "COMMIT_HASH": hash,
+ "BUILD_DATE": time.Now().Format("2006-01-02T15:04:05Z0700"),
+ }
+}
+
+// Generate autogen packages
+func Generate() error {
+ generatorPackages := []string{
+ //"tpl/tplimpl/embedded/generate",
+ //"resources/page/generate",
+ }
+
+ for _, pkg := range generatorPackages {
+ if err := runWith(flagEnv(), goexe, "generate", path.Join(packageName, pkg)); err != nil {
+ return err
+ }
+ }
+
+ dir, _ := os.Getwd()
+ c := codegen.NewInspector(dir)
+
+ if err := page_generate.Generate(c); err != nil {
+ return err
+ }
+
+ goFmtPatterns := []string{
+ // TODO(bep) check: stat ./resources/page/*autogen*: no such file or directory
+ "./resources/page/page_marshaljson.autogen.go",
+ "./resources/page/page_wrappers.autogen.go",
+ "./resources/page/zero_file.autogen.go",
+ }
+
+ for _, pattern := range goFmtPatterns {
+ if err := sh.Run("gofmt", "-w", filepath.FromSlash(pattern)); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// Generate docs helper
+func GenDocsHelper() error {
+ return runCmd(flagEnv(), goexe, "run", "-tags", buildTags(), "main.go", "gen", "docshelper")
+}
+
+// Build hugo without git info
+func HugoNoGitInfo() error {
+ ldflags = noGitLdflags
+ return Hugo()
+}
+
+var docker = sh.RunCmd("docker")
+
+// Build hugo Docker container
+func Docker() error {
+ if err := docker("build", "-t", "hugo", "."); err != nil {
+ return err
+ }
+ // yes ignore errors here
+ docker("rm", "-f", "hugo-build")
+ if err := docker("run", "--name", "hugo-build", "hugo ls /go/bin"); err != nil {
+ return err
+ }
+ if err := docker("cp", "hugo-build:/go/bin/hugo", "."); err != nil {
+ return err
+ }
+ return docker("rm", "hugo-build")
+}
+
+// Run tests and linters
+func Check() {
+ if runtime.GOARCH == "amd64" && runtime.GOOS != "darwin" {
+ mg.Deps(Test386)
+ } else {
+ fmt.Printf("Skip Test386 on %s and/or %s\n", runtime.GOARCH, runtime.GOOS)
+ }
+
+ mg.Deps(Fmt, Vet)
+
+ // don't run two tests in parallel, they saturate the CPUs anyway, and running two
+ // causes memory issues in CI.
+ mg.Deps(TestRace)
+}
+
+func testGoFlags() string {
+ if isCI() {
+ return ""
+ }
+
+ return "-timeout=1m"
+}
+
+// Run tests in 32-bit mode
+// Note that we don't run with the extended tag. Currently not supported in 32 bit.
+func Test386() error {
+ env := map[string]string{"GOARCH": "386", "GOFLAGS": testGoFlags()}
+ return runCmd(env, goexe, "test", "./...")
+}
+
+// Run tests
+func Test() error {
+ env := map[string]string{"GOFLAGS": testGoFlags()}
+ return runCmd(env, goexe, "test", "./...", buildFlags(), "-tags", buildTags())
+}
+
+// Run tests with race detector
+func TestRace() error {
+ env := map[string]string{"GOFLAGS": testGoFlags()}
+ return runCmd(env, goexe, "test", "-race", "./...", buildFlags(), "-tags", buildTags())
+}
+
+// Run gofmt linter
+func Fmt() error {
+ if !isGoLatest() {
+ return nil
+ }
+ pkgs, err := hugoPackages()
+ if err != nil {
+ return err
+ }
+ failed := false
+ first := true
+ for _, pkg := range pkgs {
+ files, err := filepath.Glob(filepath.Join(pkg, "*.go"))
+ if err != nil {
+ return nil
+ }
+ for _, f := range files {
+ // gofmt doesn't exit with non-zero when it finds unformatted code
+ // so we have to explicitly look for output, and if we find any, we
+ // should fail this target.
+ s, err := sh.Output("gofmt", "-l", f)
+ if err != nil {
+ fmt.Printf("ERROR: running gofmt on %q: %v\n", f, err)
+ failed = true
+ }
+ if s != "" {
+ if first {
+ fmt.Println("The following files are not gofmt'ed:")
+ first = false
+ }
+ failed = true
+ fmt.Println(s)
+ }
+ }
+ }
+ if failed {
+ return errors.New("improperly formatted go files")
+ }
+ return nil
+}
+
+var (
+ pkgPrefixLen = len("github.com/gohugoio/hugo")
+ pkgs []string
+ pkgsInit sync.Once
+)
+
+func hugoPackages() ([]string, error) {
+ var err error
+ pkgsInit.Do(func() {
+ var s string
+ s, err = sh.Output(goexe, "list", "./...")
+ if err != nil {
+ return
+ }
+ pkgs = strings.Split(s, "\n")
+ for i := range pkgs {
+ pkgs[i] = "." + pkgs[i][pkgPrefixLen:]
+ }
+ })
+ return pkgs, err
+}
+
+// Run golint linter
+func Lint() error {
+ pkgs, err := hugoPackages()
+ if err != nil {
+ return err
+ }
+ failed := false
+ for _, pkg := range pkgs {
+ // We don't actually want to fail this target if we find golint errors,
+ // so we don't pass -set_exit_status, but we still print out any failures.
+ if _, err := sh.Exec(nil, os.Stderr, nil, "golint", pkg); err != nil {
+ fmt.Printf("ERROR: running go lint on %q: %v\n", pkg, err)
+ failed = true
+ }
+ }
+ if failed {
+ return errors.New("errors running golint")
+ }
+ return nil
+}
+
+// Run go vet linter
+func Vet() error {
+ if err := sh.Run(goexe, "vet", "./..."); err != nil {
+ return fmt.Errorf("error running go vet: %v", err)
+ }
+ return nil
+}
+
+// Generate test coverage report
+func TestCoverHTML() error {
+ const (
+ coverAll = "coverage-all.out"
+ cover = "coverage.out"
+ )
+ f, err := os.Create(coverAll)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ if _, err := f.Write([]byte("mode: count")); err != nil {
+ return err
+ }
+ pkgs, err := hugoPackages()
+ if err != nil {
+ return err
+ }
+ for _, pkg := range pkgs {
+ if err := sh.Run(goexe, "test", "-coverprofile="+cover, "-covermode=count", pkg); err != nil {
+ return err
+ }
+ b, err := ioutil.ReadFile(cover)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return err
+ }
+ idx := bytes.Index(b, []byte{'\n'})
+ b = b[idx+1:]
+ if _, err := f.Write(b); err != nil {
+ return err
+ }
+ }
+ if err := f.Close(); err != nil {
+ return err
+ }
+ return sh.Run(goexe, "tool", "cover", "-html="+coverAll)
+}
+
+func runCmd(env map[string]string, cmd string, args ...any) error {
+ if mg.Verbose() {
+ return runWith(env, cmd, args...)
+ }
+ output, err := sh.OutputWith(env, cmd, argsToStrings(args...)...)
+ if err != nil {
+ fmt.Fprint(os.Stderr, output)
+ }
+
+ return err
+}
+
+func isGoLatest() bool {
+ return strings.Contains(runtime.Version(), "1.14")
+}
+
+func isCI() bool {
+ return os.Getenv("CI") != ""
+}
+
+func buildFlags() []string {
+ if runtime.GOOS == "windows" {
+ return []string{"-buildmode", "exe"}
+ }
+ return nil
+}
+
+func buildTags() string {
+ // To build the extended Hugo SCSS/SASS enabled version, build with
+ // HUGO_BUILD_TAGS=extended mage install etc.
+ // To build without `hugo deploy` for smaller binary, use HUGO_BUILD_TAGS=nodeploy
+ if envtags := os.Getenv("HUGO_BUILD_TAGS"); envtags != "" {
+ return envtags
+ }
+ return "none"
+}
+
+func argsToStrings(v ...any) []string {
+ var args []string
+ for _, arg := range v {
+ switch v := arg.(type) {
+ case string:
+ if v != "" {
+ args = append(args, v)
+ }
+ case []string:
+ if v != nil {
+ args = append(args, v...)
+ }
+ default:
+ panic("invalid type")
+ }
+ }
+
+ return args
+}
diff --git a/main.go b/main.go
new file mode 100644
index 000000000..8e81854ce
--- /dev/null
+++ b/main.go
@@ -0,0 +1,32 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "os"
+
+ "github.com/gohugoio/hugo/commands"
+)
+
+func main() {
+ resp := commands.Execute(os.Args[1:])
+
+ if resp.Err != nil {
+ if resp.IsUserError() {
+ resp.Cmd.Println("")
+ resp.Cmd.Println(resp.Cmd.UsageString())
+ }
+ os.Exit(-1)
+ }
+}
diff --git a/markup/asciidocext/asciidocext_config/config.go b/markup/asciidocext/asciidocext_config/config.go
new file mode 100644
index 000000000..1409b2783
--- /dev/null
+++ b/markup/asciidocext/asciidocext_config/config.go
@@ -0,0 +1,79 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package asciidoc_config holds asciidoc related configuration.
+package asciidocext_config
+
+var (
+ // Default holds Hugo's default asciidoc configuration.
+ Default = Config{
+ Backend: "html5",
+ Extensions: []string{},
+ Attributes: map[string]string{},
+ NoHeaderOrFooter: true,
+ SafeMode: "unsafe",
+ SectionNumbers: false,
+ Verbose: false,
+ Trace: false,
+ FailureLevel: "fatal",
+ WorkingFolderCurrent: false,
+ PreserveTOC: false,
+ }
+
+ // CliDefault holds Asciidoctor CLI defaults (see https://asciidoctor.org/docs/user-manual/)
+ CliDefault = Config{
+ Backend: "html5",
+ SafeMode: "unsafe",
+ FailureLevel: "fatal",
+ }
+
+ AllowedSafeMode = map[string]bool{
+ "unsafe": true,
+ "safe": true,
+ "server": true,
+ "secure": true,
+ }
+
+ AllowedFailureLevel = map[string]bool{
+ "fatal": true,
+ "warn": true,
+ }
+
+ AllowedBackend = map[string]bool{
+ "html5": true,
+ "html5s": true,
+ "xhtml5": true,
+ "docbook5": true,
+ "docbook45": true,
+ "manpage": true,
+ }
+
+ DisallowedAttributes = map[string]bool{
+ "outdir": true,
+ }
+)
+
+// Config configures asciidoc.
+type Config struct {
+ Backend string
+ Extensions []string
+ Attributes map[string]string
+ NoHeaderOrFooter bool
+ SafeMode string
+ SectionNumbers bool
+ Verbose bool
+ Trace bool
+ FailureLevel string
+ WorkingFolderCurrent bool
+ PreserveTOC bool
+}
diff --git a/markup/asciidocext/convert.go b/markup/asciidocext/convert.go
new file mode 100644
index 000000000..4c83e0e95
--- /dev/null
+++ b/markup/asciidocext/convert.go
@@ -0,0 +1,322 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package asciidocext converts AsciiDoc to HTML using Asciidoctor
+// external binary. The `asciidoc` module is reserved for a future golang
+// implementation.
+package asciidocext
+
+import (
+ "bytes"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/asciidocext/asciidocext_config"
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/gohugoio/hugo/markup/internal"
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+ "golang.org/x/net/html"
+)
+
+/* ToDo: RelPermalink patch for svg posts not working*/
+type pageSubset interface {
+ RelPermalink() string
+}
+
+// Provider is the package entry point.
+var Provider converter.ProviderProvider = provider{}
+
+type provider struct{}
+
+func (p provider) New(cfg converter.ProviderConfig) (converter.Provider, error) {
+ return converter.NewProvider("asciidocext", func(ctx converter.DocumentContext) (converter.Converter, error) {
+ return &asciidocConverter{
+ ctx: ctx,
+ cfg: cfg,
+ }, nil
+ }), nil
+}
+
+type asciidocResult struct {
+ converter.Result
+ toc tableofcontents.Root
+}
+
+func (r asciidocResult) TableOfContents() tableofcontents.Root {
+ return r.toc
+}
+
+type asciidocConverter struct {
+ ctx converter.DocumentContext
+ cfg converter.ProviderConfig
+}
+
+func (a *asciidocConverter) Convert(ctx converter.RenderContext) (converter.Result, error) {
+ b, err := a.getAsciidocContent(ctx.Src, a.ctx)
+ if err != nil {
+ return nil, err
+ }
+ content, toc, err := a.extractTOC(b)
+ if err != nil {
+ return nil, err
+ }
+ return asciidocResult{
+ Result: converter.Bytes(content),
+ toc: toc,
+ }, nil
+}
+
+func (a *asciidocConverter) Supports(_ identity.Identity) bool {
+ return false
+}
+
+// getAsciidocContent calls asciidoctor as an external helper
+// to convert AsciiDoc content to HTML.
+func (a *asciidocConverter) getAsciidocContent(src []byte, ctx converter.DocumentContext) ([]byte, error) {
+ if !hasAsciiDoc() {
+ a.cfg.Logger.Errorln("asciidoctor not found in $PATH: Please install.\n",
+ " Leaving AsciiDoc content unrendered.")
+ return src, nil
+ }
+
+ args := a.parseArgs(ctx)
+ args = append(args, "-")
+
+ a.cfg.Logger.Infoln("Rendering", ctx.DocumentName, " using asciidoctor args", args, "...")
+
+ return internal.ExternallyRenderContent(a.cfg, ctx, src, asciiDocBinaryName, args)
+}
+
+func (a *asciidocConverter) parseArgs(ctx converter.DocumentContext) []string {
+ cfg := a.cfg.MarkupConfig.AsciidocExt
+ args := []string{}
+
+ args = a.appendArg(args, "-b", cfg.Backend, asciidocext_config.CliDefault.Backend, asciidocext_config.AllowedBackend)
+
+ for _, extension := range cfg.Extensions {
+ if strings.LastIndexAny(extension, `\/.`) > -1 {
+ a.cfg.Logger.Errorln("Unsupported asciidoctor extension was passed in. Extension `" + extension + "` ignored. Only installed asciidoctor extensions are allowed.")
+ continue
+ }
+ args = append(args, "-r", extension)
+ }
+
+ for attributeKey, attributeValue := range cfg.Attributes {
+ if asciidocext_config.DisallowedAttributes[attributeKey] {
+ a.cfg.Logger.Errorln("Unsupported asciidoctor attribute was passed in. Attribute `" + attributeKey + "` ignored.")
+ continue
+ }
+
+ args = append(args, "-a", attributeKey+"="+attributeValue)
+ }
+
+ if cfg.WorkingFolderCurrent {
+ contentDir := filepath.Dir(ctx.Filename)
+ sourceDir := a.cfg.Cfg.GetString("source")
+ destinationDir := a.cfg.Cfg.GetString("destination")
+
+ if destinationDir == "" {
+ a.cfg.Logger.Errorln("markup.asciidocext.workingFolderCurrent requires hugo command option --destination to be set")
+ }
+ if !filepath.IsAbs(destinationDir) && sourceDir != "" {
+ destinationDir = filepath.Join(sourceDir, destinationDir)
+ }
+
+ var outDir string
+ var err error
+
+ file := filepath.Base(ctx.Filename)
+ if a.cfg.Cfg.GetBool("uglyUrls") || file == "_index.adoc" || file == "index.adoc" {
+ outDir, err = filepath.Abs(filepath.Dir(filepath.Join(destinationDir, ctx.DocumentName)))
+ } else {
+ postDir := ""
+ page, ok := ctx.Document.(pageSubset)
+ if ok {
+ postDir = filepath.Base(page.RelPermalink())
+ } else {
+ a.cfg.Logger.Errorln("unable to cast interface to pageSubset")
+ }
+
+ outDir, err = filepath.Abs(filepath.Join(destinationDir, filepath.Dir(ctx.DocumentName), postDir))
+ }
+
+ if err != nil {
+ a.cfg.Logger.Errorln("asciidoctor outDir: ", err)
+ }
+
+ args = append(args, "--base-dir", contentDir, "-a", "outdir="+outDir)
+ }
+
+ if cfg.NoHeaderOrFooter {
+ args = append(args, "--no-header-footer")
+ } else {
+ a.cfg.Logger.Warnln("asciidoctor parameter NoHeaderOrFooter is expected for correct html rendering")
+ }
+
+ if cfg.SectionNumbers {
+ args = append(args, "--section-numbers")
+ }
+
+ if cfg.Verbose {
+ args = append(args, "--verbose")
+ }
+
+ if cfg.Trace {
+ args = append(args, "--trace")
+ }
+
+ args = a.appendArg(args, "--failure-level", cfg.FailureLevel, asciidocext_config.CliDefault.FailureLevel, asciidocext_config.AllowedFailureLevel)
+
+ args = a.appendArg(args, "--safe-mode", cfg.SafeMode, asciidocext_config.CliDefault.SafeMode, asciidocext_config.AllowedSafeMode)
+
+ return args
+}
+
+func (a *asciidocConverter) appendArg(args []string, option, value, defaultValue string, allowedValues map[string]bool) []string {
+ if value != defaultValue {
+ if allowedValues[value] {
+ args = append(args, option, value)
+ } else {
+ a.cfg.Logger.Errorln("Unsupported asciidoctor value `" + value + "` for option " + option + " was passed in and will be ignored.")
+ }
+ }
+ return args
+}
+
+const asciiDocBinaryName = "asciidoctor"
+
+func hasAsciiDoc() bool {
+ return hexec.InPath(asciiDocBinaryName)
+}
+
+// extractTOC extracts the toc from the given src html.
+// It returns the html without the TOC, and the TOC data
+func (a *asciidocConverter) extractTOC(src []byte) ([]byte, tableofcontents.Root, error) {
+ var buf bytes.Buffer
+ buf.Write(src)
+ node, err := html.Parse(&buf)
+ if err != nil {
+ return nil, tableofcontents.Root{}, err
+ }
+ var (
+ f func(*html.Node) bool
+ toc tableofcontents.Root
+ toVisit []*html.Node
+ )
+ f = func(n *html.Node) bool {
+ if n.Type == html.ElementNode && n.Data == "div" && attr(n, "id") == "toc" {
+ toc = parseTOC(n)
+ if !a.cfg.MarkupConfig.AsciidocExt.PreserveTOC {
+ n.Parent.RemoveChild(n)
+ }
+ return true
+ }
+ if n.FirstChild != nil {
+ toVisit = append(toVisit, n.FirstChild)
+ }
+ if n.NextSibling != nil && f(n.NextSibling) {
+ return true
+ }
+ for len(toVisit) > 0 {
+ nv := toVisit[0]
+ toVisit = toVisit[1:]
+ if f(nv) {
+ return true
+ }
+ }
+ return false
+ }
+ f(node)
+ if err != nil {
+ return nil, tableofcontents.Root{}, err
+ }
+ buf.Reset()
+ err = html.Render(&buf, node)
+ if err != nil {
+ return nil, tableofcontents.Root{}, err
+ }
+ // ltrim <html><head></head><body> and rtrim </body></html> which are added by html.Render
+ res := buf.Bytes()[25:]
+ res = res[:len(res)-14]
+ return res, toc, nil
+}
+
+// parseTOC returns a TOC root from the given toc Node
+func parseTOC(doc *html.Node) tableofcontents.Root {
+ var (
+ toc tableofcontents.Root
+ f func(*html.Node, int, int)
+ )
+ f = func(n *html.Node, row, level int) {
+ if n.Type == html.ElementNode {
+ switch n.Data {
+ case "ul":
+ if level == 0 {
+ row++
+ }
+ level++
+ f(n.FirstChild, row, level)
+ case "li":
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ if c.Type != html.ElementNode || c.Data != "a" {
+ continue
+ }
+ href := attr(c, "href")[1:]
+ toc.AddAt(tableofcontents.Heading{
+ Text: nodeContent(c),
+ ID: href,
+ }, row, level)
+ }
+ f(n.FirstChild, row, level)
+ }
+ }
+ if n.NextSibling != nil {
+ f(n.NextSibling, row, level)
+ }
+ }
+ f(doc.FirstChild, -1, 0)
+ return toc
+}
+
+func attr(node *html.Node, key string) string {
+ for _, a := range node.Attr {
+ if a.Key == key {
+ return a.Val
+ }
+ }
+ return ""
+}
+
+func nodeContent(node *html.Node) string {
+ var buf bytes.Buffer
+ for c := node.FirstChild; c != nil; c = c.NextSibling {
+ html.Render(&buf, c)
+ }
+ return buf.String()
+}
+
+// Supports returns whether Asciidoctor is installed on this computer.
+func Supports() bool {
+ hasBin := hasAsciiDoc()
+ if htesting.SupportsAll() {
+ if !hasBin {
+ panic("asciidoctor not installed")
+ }
+ return true
+ }
+ return hasBin
+}
diff --git a/markup/asciidocext/convert_test.go b/markup/asciidocext/convert_test.go
new file mode 100644
index 000000000..3a350c5ce
--- /dev/null
+++ b/markup/asciidocext/convert_test.go
@@ -0,0 +1,463 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package asciidocext converts AsciiDoc to HTML using Asciidoctor
+// external binary. The `asciidoc` module is reserved for a future golang
+// implementation.
+
+package asciidocext
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/gohugoio/hugo/markup/markup_config"
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestAsciidoctorDefaultArgs(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ expected := []string{"--no-header-footer"}
+ c.Assert(args, qt.DeepEquals, expected)
+}
+
+func TestAsciidoctorNonDefaultArgs(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.Backend = "manpage"
+ mconf.AsciidocExt.NoHeaderOrFooter = false
+ mconf.AsciidocExt.SafeMode = "safe"
+ mconf.AsciidocExt.SectionNumbers = true
+ mconf.AsciidocExt.Verbose = true
+ mconf.AsciidocExt.Trace = false
+ mconf.AsciidocExt.FailureLevel = "warn"
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ expected := []string{"-b", "manpage", "--section-numbers", "--verbose", "--failure-level", "warn", "--safe-mode", "safe"}
+ c.Assert(args, qt.DeepEquals, expected)
+}
+
+func TestAsciidoctorDisallowedArgs(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.Backend = "disallowed-backend"
+ mconf.AsciidocExt.Extensions = []string{"./disallowed-extension"}
+ mconf.AsciidocExt.Attributes = map[string]string{"outdir": "disallowed-attribute"}
+ mconf.AsciidocExt.SafeMode = "disallowed-safemode"
+ mconf.AsciidocExt.FailureLevel = "disallowed-failurelevel"
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ expected := []string{"--no-header-footer"}
+ c.Assert(args, qt.DeepEquals, expected)
+}
+
+func TestAsciidoctorArbitraryExtension(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.Extensions = []string{"arbitrary-extension"}
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ expected := []string{"-r", "arbitrary-extension", "--no-header-footer"}
+ c.Assert(args, qt.DeepEquals, expected)
+}
+
+func TestAsciidoctorDisallowedExtension(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ for _, disallowedExtension := range []string{
+ `foo-bar//`,
+ `foo-bar\\ `,
+ `../../foo-bar`,
+ `/foo-bar`,
+ `C:\foo-bar`,
+ `foo-bar.rb`,
+ `foo.bar`,
+ } {
+ mconf := markup_config.Default
+ mconf.AsciidocExt.Extensions = []string{disallowedExtension}
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ expected := []string{"--no-header-footer"}
+ c.Assert(args, qt.DeepEquals, expected)
+ }
+}
+
+func TestAsciidoctorWorkingFolderCurrent(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.WorkingFolderCurrent = true
+ mconf.AsciidocExt.Trace = false
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ ctx := converter.DocumentContext{Filename: "/tmp/hugo_asciidoc_ddd/docs/chapter2/index.adoc", DocumentName: "chapter2/index.adoc"}
+ conv, err := p.New(ctx)
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(ctx)
+ c.Assert(len(args), qt.Equals, 5)
+ c.Assert(args[0], qt.Equals, "--base-dir")
+ c.Assert(filepath.ToSlash(args[1]), qt.Matches, "/tmp/hugo_asciidoc_ddd/docs/chapter2")
+ c.Assert(args[2], qt.Equals, "-a")
+ c.Assert(args[3], qt.Matches, `outdir=.*[/\\]{1,2}asciidocext[/\\]{1,2}chapter2`)
+ c.Assert(args[4], qt.Equals, "--no-header-footer")
+}
+
+func TestAsciidoctorWorkingFolderCurrentAndExtensions(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.NoHeaderOrFooter = true
+ mconf.AsciidocExt.Extensions = []string{"asciidoctor-html5s", "asciidoctor-diagram"}
+ mconf.AsciidocExt.Backend = "html5s"
+ mconf.AsciidocExt.WorkingFolderCurrent = true
+ mconf.AsciidocExt.Trace = false
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ c.Assert(len(args), qt.Equals, 11)
+ c.Assert(args[0], qt.Equals, "-b")
+ c.Assert(args[1], qt.Equals, "html5s")
+ c.Assert(args[2], qt.Equals, "-r")
+ c.Assert(args[3], qt.Equals, "asciidoctor-html5s")
+ c.Assert(args[4], qt.Equals, "-r")
+ c.Assert(args[5], qt.Equals, "asciidoctor-diagram")
+ c.Assert(args[6], qt.Equals, "--base-dir")
+ c.Assert(args[7], qt.Equals, ".")
+ c.Assert(args[8], qt.Equals, "-a")
+ c.Assert(args[9], qt.Contains, "outdir=")
+ c.Assert(args[10], qt.Equals, "--no-header-footer")
+}
+
+func TestAsciidoctorAttributes(t *testing.T) {
+ c := qt.New(t)
+ cfg := config.New()
+ mconf := markup_config.Default
+ mconf.AsciidocExt.Attributes = map[string]string{"my-base-url": "https://gohugo.io/", "my-attribute-name": "my value"}
+ mconf.AsciidocExt.Trace = false
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Cfg: cfg,
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ ac := conv.(*asciidocConverter)
+ c.Assert(ac, qt.Not(qt.IsNil))
+
+ expectedValues := map[string]bool{
+ "my-base-url=https://gohugo.io/": true,
+ "my-attribute-name=my value": true,
+ }
+
+ args := ac.parseArgs(converter.DocumentContext{})
+ c.Assert(len(args), qt.Equals, 5)
+ c.Assert(args[0], qt.Equals, "-a")
+ c.Assert(expectedValues[args[1]], qt.Equals, true)
+ c.Assert(args[2], qt.Equals, "-a")
+ c.Assert(expectedValues[args[3]], qt.Equals, true)
+ c.Assert(args[4], qt.Equals, "--no-header-footer")
+}
+
+func getProvider(c *qt.C, mconf markup_config.Config) converter.Provider {
+ sc := security.DefaultConfig
+ sc.Exec.Allow = security.NewWhitelist("asciidoctor")
+
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ Exec: hexec.New(sc),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+ return p
+}
+
+func TestConvert(t *testing.T) {
+ if !Supports() {
+ t.Skip("asciidoctor not installed")
+ }
+ c := qt.New(t)
+
+ p := getProvider(c, markup_config.Default)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+
+ b, err := conv.Convert(converter.RenderContext{Src: []byte("testContent")})
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b.Bytes()), qt.Equals, "<div class=\"paragraph\">\n<p>testContent</p>\n</div>\n")
+}
+
+func TestTableOfContents(t *testing.T) {
+ if !Supports() {
+ t.Skip("asciidoctor not installed")
+ }
+ c := qt.New(t)
+ p := getProvider(c, markup_config.Default)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ r, err := conv.Convert(converter.RenderContext{Src: []byte(`:toc: macro
+:toclevels: 4
+toc::[]
+
+=== Introduction
+
+== Section 1
+
+=== Section 1.1
+
+==== Section 1.1.1
+
+=== Section 1.2
+
+testContent
+
+== Section 2
+`)})
+ c.Assert(err, qt.IsNil)
+ toc, ok := r.(converter.TableOfContentsProvider)
+ c.Assert(ok, qt.Equals, true)
+ expected := tableofcontents.Root{
+ Headings: tableofcontents.Headings{
+ {
+ ID: "",
+ Text: "",
+ Headings: tableofcontents.Headings{
+ {
+ ID: "_introduction",
+ Text: "Introduction",
+ Headings: nil,
+ },
+ {
+ ID: "_section_1",
+ Text: "Section 1",
+ Headings: tableofcontents.Headings{
+ {
+ ID: "_section_1_1",
+ Text: "Section 1.1",
+ Headings: tableofcontents.Headings{
+ {
+ ID: "_section_1_1_1",
+ Text: "Section 1.1.1",
+ Headings: nil,
+ },
+ },
+ },
+ {
+ ID: "_section_1_2",
+ Text: "Section 1.2",
+ Headings: nil,
+ },
+ },
+ },
+ {
+ ID: "_section_2",
+ Text: "Section 2",
+ Headings: nil,
+ },
+ },
+ },
+ },
+ }
+ c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
+ c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
+}
+
+func TestTableOfContentsWithCode(t *testing.T) {
+ if !Supports() {
+ t.Skip("asciidoctor not installed")
+ }
+ c := qt.New(t)
+ p := getProvider(c, markup_config.Default)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ r, err := conv.Convert(converter.RenderContext{Src: []byte(`:toc: auto
+
+== Some ` + "`code`" + ` in the title
+`)})
+ c.Assert(err, qt.IsNil)
+ toc, ok := r.(converter.TableOfContentsProvider)
+ c.Assert(ok, qt.Equals, true)
+ expected := tableofcontents.Root{
+ Headings: tableofcontents.Headings{
+ {
+ ID: "",
+ Text: "",
+ Headings: tableofcontents.Headings{
+ {
+ ID: "_some_code_in_the_title",
+ Text: "Some <code>code</code> in the title",
+ Headings: nil,
+ },
+ },
+ },
+ },
+ }
+ c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
+ c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
+}
+
+func TestTableOfContentsPreserveTOC(t *testing.T) {
+ if !Supports() {
+ t.Skip("asciidoctor not installed")
+ }
+ c := qt.New(t)
+ mconf := markup_config.Default
+ mconf.AsciidocExt.PreserveTOC = true
+ p := getProvider(c, mconf)
+
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ r, err := conv.Convert(converter.RenderContext{Src: []byte(`:toc:
+:idprefix:
+:idseparator: -
+
+== Some title
+`)})
+ c.Assert(err, qt.IsNil)
+ toc, ok := r.(converter.TableOfContentsProvider)
+ c.Assert(ok, qt.Equals, true)
+ expected := tableofcontents.Root{
+ Headings: tableofcontents.Headings{
+ {
+ ID: "",
+ Text: "",
+ Headings: tableofcontents.Headings{
+ {
+ ID: "some-title",
+ Text: "Some title",
+ Headings: nil,
+ },
+ },
+ },
+ },
+ }
+ c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
+ c.Assert(string(r.Bytes()), qt.Contains, "<div id=\"toc\" class=\"toc\">")
+}
diff --git a/markup/blackfriday/anchors.go b/markup/blackfriday/anchors.go
new file mode 100644
index 000000000..90f65a64c
--- /dev/null
+++ b/markup/blackfriday/anchors.go
@@ -0,0 +1,39 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package blackfriday holds some copmpability functions for the old Blackfriday v1 Markdown engine.
+package blackfriday
+
+import "unicode"
+
+// SanitizedAnchorName is how Blackfriday sanitizes anchor names.
+// Implementation borrowed from https://github.com/russross/blackfriday/blob/a477dd1646916742841ed20379f941cfa6c5bb6f/block.go#L1464
+// Note that Hugo removed its Blackfriday support in v0.100.0, but you can still use this strategy for
+// auto ID generation.
+func SanitizedAnchorName(text string) string {
+ var anchorName []rune
+ futureDash := false
+ for _, r := range text {
+ switch {
+ case unicode.IsLetter(r) || unicode.IsNumber(r):
+ if futureDash && len(anchorName) > 0 {
+ anchorName = append(anchorName, '-')
+ }
+ futureDash = false
+ anchorName = append(anchorName, unicode.ToLower(r))
+ default:
+ futureDash = true
+ }
+ }
+ return string(anchorName)
+}
diff --git a/markup/converter/converter.go b/markup/converter/converter.go
new file mode 100644
index 000000000..c760381f4
--- /dev/null
+++ b/markup/converter/converter.go
@@ -0,0 +1,140 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package converter
+
+import (
+ "bytes"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/highlight"
+ "github.com/gohugoio/hugo/markup/markup_config"
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+ "github.com/spf13/afero"
+)
+
+// ProviderConfig configures a new Provider.
+type ProviderConfig struct {
+ MarkupConfig markup_config.Config
+
+ Cfg config.Provider // Site config
+ ContentFs afero.Fs
+ Logger loggers.Logger
+ Exec *hexec.Exec
+ highlight.Highlighter
+}
+
+// ProviderProvider creates converter providers.
+type ProviderProvider interface {
+ New(cfg ProviderConfig) (Provider, error)
+}
+
+// Provider creates converters.
+type Provider interface {
+ New(ctx DocumentContext) (Converter, error)
+ Name() string
+}
+
+// NewProvider creates a new Provider with the given name.
+func NewProvider(name string, create func(ctx DocumentContext) (Converter, error)) Provider {
+ return newConverter{
+ name: name,
+ create: create,
+ }
+}
+
+type newConverter struct {
+ name string
+ create func(ctx DocumentContext) (Converter, error)
+}
+
+func (n newConverter) New(ctx DocumentContext) (Converter, error) {
+ return n.create(ctx)
+}
+
+func (n newConverter) Name() string {
+ return n.name
+}
+
+var NopConverter = new(nopConverter)
+
+type nopConverter int
+
+func (nopConverter) Convert(ctx RenderContext) (Result, error) {
+ return &bytes.Buffer{}, nil
+}
+
+func (nopConverter) Supports(feature identity.Identity) bool {
+ return false
+}
+
+// Converter wraps the Convert method that converts some markup into
+// another format, e.g. Markdown to HTML.
+type Converter interface {
+ Convert(ctx RenderContext) (Result, error)
+ Supports(feature identity.Identity) bool
+}
+
+// Result represents the minimum returned from Convert.
+type Result interface {
+ Bytes() []byte
+}
+
+// DocumentInfo holds additional information provided by some converters.
+type DocumentInfo interface {
+ AnchorSuffix() string
+}
+
+// TableOfContentsProvider provides the content as a ToC structure.
+type TableOfContentsProvider interface {
+ TableOfContents() tableofcontents.Root
+}
+
+// AnchorNameSanitizer tells how a converter sanitizes anchor names.
+type AnchorNameSanitizer interface {
+ SanitizeAnchorName(s string) string
+}
+
+// Bytes holds a byte slice and implements the Result interface.
+type Bytes []byte
+
+// Bytes returns itself
+func (b Bytes) Bytes() []byte {
+ return b
+}
+
+// DocumentContext holds contextual information about the document to convert.
+type DocumentContext struct {
+ Document any // May be nil. Usually a page.Page
+ DocumentID string
+ DocumentName string
+ Filename string
+}
+
+// RenderContext holds contextual information about the content to render.
+type RenderContext struct {
+ // Src is the content to render.
+ Src []byte
+
+ // Whether to render TableOfContents.
+ RenderTOC bool
+
+ // GerRenderer provides hook renderers on demand.
+ GetRenderer hooks.GetRendererFunc
+}
+
+var FeatureRenderHooks = identity.NewPathIdentity("markup", "renderingHooks")
diff --git a/markup/converter/hooks/hooks.go b/markup/converter/hooks/hooks.go
new file mode 100644
index 000000000..a8666bdf0
--- /dev/null
+++ b/markup/converter/hooks/hooks.go
@@ -0,0 +1,111 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hooks
+
+import (
+ "io"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/common/types/hstring"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/internal/attributes"
+)
+
+var _ AttributesOptionsSliceProvider = (*attributes.AttributesHolder)(nil)
+
+type AttributesProvider interface {
+ Attributes() map[string]any
+}
+
+type LinkContext interface {
+ Page() any
+ Destination() string
+ Title() string
+ Text() hstring.RenderedString
+ PlainText() string
+}
+
+type CodeblockContext interface {
+ AttributesProvider
+ text.Positioner
+ Options() map[string]any
+ Type() string
+ Inner() string
+ Ordinal() int
+ Page() any
+}
+
+type AttributesOptionsSliceProvider interface {
+ AttributesSlice() []attributes.Attribute
+ OptionsSlice() []attributes.Attribute
+}
+
+type LinkRenderer interface {
+ RenderLink(w io.Writer, ctx LinkContext) error
+ identity.Provider
+}
+
+type CodeBlockRenderer interface {
+ RenderCodeblock(w hugio.FlexiWriter, ctx CodeblockContext) error
+ identity.Provider
+}
+
+type IsDefaultCodeBlockRendererProvider interface {
+ IsDefaultCodeBlockRenderer() bool
+}
+
+// HeadingContext contains accessors to all attributes that a HeadingRenderer
+// can use to render a heading.
+type HeadingContext interface {
+ // Page is the page containing the heading.
+ Page() any
+ // Level is the level of the header (i.e. 1 for top-level, 2 for sub-level, etc.).
+ Level() int
+ // Anchor is the HTML id assigned to the heading.
+ Anchor() string
+ // Text is the rendered (HTML) heading text, excluding the heading marker.
+ Text() hstring.RenderedString
+ // PlainText is the unrendered version of Text.
+ PlainText() string
+
+ // Attributes (e.g. CSS classes)
+ AttributesProvider
+}
+
+// HeadingRenderer describes a uniquely identifiable rendering hook.
+type HeadingRenderer interface {
+ // Render writes the rendered content to w using the data in w.
+ RenderHeading(w io.Writer, ctx HeadingContext) error
+ identity.Provider
+}
+
+// ElementPositionResolver provides a way to resolve the start Position
+// of a markdown element in the original source document.
+// This may be both slow and approximate, so should only be
+// used for error logging.
+type ElementPositionResolver interface {
+ ResolvePosition(ctx any) text.Position
+}
+
+type RendererType int
+
+const (
+ LinkRendererType RendererType = iota + 1
+ ImageRendererType
+ HeadingRendererType
+ CodeBlockRendererType
+)
+
+type GetRendererFunc func(t RendererType, id any) any
diff --git a/markup/goldmark/autoid.go b/markup/goldmark/autoid.go
new file mode 100644
index 000000000..04313269c
--- /dev/null
+++ b/markup/goldmark/autoid.go
@@ -0,0 +1,133 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark
+
+import (
+ "bytes"
+ "strconv"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/markup/blackfriday"
+
+ "github.com/gohugoio/hugo/markup/goldmark/goldmark_config"
+
+ "github.com/gohugoio/hugo/common/text"
+
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/util"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+)
+
+func sanitizeAnchorNameString(s string, idType string) string {
+ return string(sanitizeAnchorName([]byte(s), idType))
+}
+
+func sanitizeAnchorName(b []byte, idType string) []byte {
+ return sanitizeAnchorNameWithHook(b, idType, nil)
+}
+
+func sanitizeAnchorNameWithHook(b []byte, idType string, hook func(buf *bytes.Buffer)) []byte {
+ buf := bp.GetBuffer()
+
+ if idType == goldmark_config.AutoHeadingIDTypeBlackfriday {
+ // TODO(bep) make it more efficient.
+ buf.WriteString(blackfriday.SanitizedAnchorName(string(b)))
+ } else {
+ asciiOnly := idType == goldmark_config.AutoHeadingIDTypeGitHubAscii
+
+ if asciiOnly {
+ // Normalize it to preserve accents if possible.
+ b = text.RemoveAccents(b)
+ }
+
+ b = bytes.TrimSpace(b)
+
+ for len(b) > 0 {
+ r, size := utf8.DecodeRune(b)
+ switch {
+ case asciiOnly && size != 1:
+ case r == '-' || r == ' ':
+ buf.WriteRune('-')
+ case isAlphaNumeric(r):
+ buf.WriteRune(unicode.ToLower(r))
+ default:
+ }
+
+ b = b[size:]
+ }
+ }
+
+ if hook != nil {
+ hook(buf)
+ }
+
+ result := make([]byte, buf.Len())
+ copy(result, buf.Bytes())
+
+ bp.PutBuffer(buf)
+
+ return result
+}
+
+func isAlphaNumeric(r rune) bool {
+ return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
+}
+
+var _ parser.IDs = (*idFactory)(nil)
+
+type idFactory struct {
+ idType string
+ vals map[string]struct{}
+}
+
+func newIDFactory(idType string) *idFactory {
+ return &idFactory{
+ vals: make(map[string]struct{}),
+ idType: idType,
+ }
+}
+
+func (ids *idFactory) Generate(value []byte, kind ast.NodeKind) []byte {
+ return sanitizeAnchorNameWithHook(value, ids.idType, func(buf *bytes.Buffer) {
+ if buf.Len() == 0 {
+ if kind == ast.KindHeading {
+ buf.WriteString("heading")
+ } else {
+ buf.WriteString("id")
+ }
+ }
+
+ if _, found := ids.vals[util.BytesToReadOnlyString(buf.Bytes())]; found {
+ // Append a hypen and a number, starting with 1.
+ buf.WriteRune('-')
+ pos := buf.Len()
+ for i := 1; ; i++ {
+ buf.WriteString(strconv.Itoa(i))
+ if _, found := ids.vals[util.BytesToReadOnlyString(buf.Bytes())]; !found {
+ break
+ }
+ buf.Truncate(pos)
+ }
+ }
+
+ ids.vals[buf.String()] = struct{}{}
+ })
+}
+
+func (ids *idFactory) Put(value []byte) {
+ ids.vals[util.BytesToReadOnlyString(value)] = struct{}{}
+}
diff --git a/markup/goldmark/autoid_test.go b/markup/goldmark/autoid_test.go
new file mode 100644
index 000000000..0bdb63c12
--- /dev/null
+++ b/markup/goldmark/autoid_test.go
@@ -0,0 +1,143 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/markup/goldmark/goldmark_config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSanitizeAnchorName(t *testing.T) {
+ c := qt.New(t)
+
+ // Tests generated manually on github.com
+ tests := `
+God is good: 神真美好
+Number 32
+Question?
+1+2=3
+Special !"#$%&(parens)=?´* chars
+Resumé
+One-Hyphen
+Multiple--Hyphens
+Trailing hyphen-
+Many spaces here
+Forward/slash
+Backward\slash
+Under_score
+Nonbreaking Space
+Tab Space
+`
+
+ expect := `
+god-is-good-神真美好
+number-32
+question
+123
+special-parens-chars
+resumé
+one-hyphen
+multiple--hyphens
+trailing-hyphen-
+many---spaces--here
+forwardslash
+backwardslash
+under_score
+nonbreakingspace
+tabspace
+`
+
+ tests, expect = strings.TrimSpace(tests), strings.TrimSpace(expect)
+
+ testlines, expectlines := strings.Split(tests, "\n"), strings.Split(expect, "\n")
+
+ testlines = append(testlines, "Trailing Space ")
+ expectlines = append(expectlines, "trailing-space")
+
+ if len(testlines) != len(expectlines) {
+ panic("test setup failed")
+ }
+
+ for i, input := range testlines {
+ input := input
+ expect := expectlines[i]
+ c.Run(input, func(c *qt.C) {
+ b := []byte(input)
+ got := string(sanitizeAnchorName(b, goldmark_config.AutoHeadingIDTypeGitHub))
+ c.Assert(got, qt.Equals, expect)
+ c.Assert(sanitizeAnchorNameString(input, goldmark_config.AutoHeadingIDTypeGitHub), qt.Equals, expect)
+ c.Assert(string(b), qt.Equals, input)
+ })
+ }
+}
+
+func TestSanitizeAnchorNameAsciiOnly(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(sanitizeAnchorNameString("god is神真美好 good", goldmark_config.AutoHeadingIDTypeGitHubAscii), qt.Equals, "god-is-good")
+ c.Assert(sanitizeAnchorNameString("Resumé", goldmark_config.AutoHeadingIDTypeGitHubAscii), qt.Equals, "resume")
+}
+
+func TestSanitizeAnchorNameBlackfriday(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(sanitizeAnchorNameString("Let's try this, shall we?", goldmark_config.AutoHeadingIDTypeBlackfriday), qt.Equals, "let-s-try-this-shall-we")
+}
+
+func BenchmarkSanitizeAnchorName(b *testing.B) {
+ input := []byte("God is good: 神真美好")
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result := sanitizeAnchorName(input, goldmark_config.AutoHeadingIDTypeGitHub)
+ if len(result) != 24 {
+ b.Fatalf("got %d", len(result))
+ }
+ }
+}
+
+func BenchmarkSanitizeAnchorNameAsciiOnly(b *testing.B) {
+ input := []byte("God is good: 神真美好")
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result := sanitizeAnchorName(input, goldmark_config.AutoHeadingIDTypeGitHubAscii)
+ if len(result) != 12 {
+ b.Fatalf("got %d", len(result))
+ }
+ }
+}
+
+func BenchmarkSanitizeAnchorNameBlackfriday(b *testing.B) {
+ input := []byte("God is good: 神真美好")
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result := sanitizeAnchorName(input, goldmark_config.AutoHeadingIDTypeBlackfriday)
+ if len(result) != 24 {
+ b.Fatalf("got %d", len(result))
+ }
+ }
+}
+
+func BenchmarkSanitizeAnchorNameString(b *testing.B) {
+ input := "God is good: 神真美好"
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result := sanitizeAnchorNameString(input, goldmark_config.AutoHeadingIDTypeGitHub)
+ if len(result) != 24 {
+ b.Fatalf("got %d", len(result))
+ }
+ }
+}
diff --git a/markup/goldmark/codeblocks/integration_test.go b/markup/goldmark/codeblocks/integration_test.go
new file mode 100644
index 000000000..199049789
--- /dev/null
+++ b/markup/goldmark/codeblocks/integration_test.go
@@ -0,0 +1,352 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codeblocks_test
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestCodeblocks(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+ [markup.highlight]
+ anchorLineNos = false
+ codeFences = true
+ guessSyntax = false
+ hl_Lines = ''
+ lineAnchors = ''
+ lineNoStart = 1
+ lineNos = false
+ lineNumbersInTable = true
+ noClasses = false
+ style = 'monokai'
+ tabWidth = 4
+-- layouts/_default/_markup/render-codeblock-goat.html --
+{{ $diagram := diagrams.Goat .Inner }}
+Goat SVG:{{ substr $diagram.Wrapped 0 100 | safeHTML }} }}|
+Goat Attribute: {{ .Attributes.width}}|
+-- layouts/_default/_markup/render-codeblock-go.html --
+Go Code: {{ .Inner | safeHTML }}|
+Go Language: {{ .Type }}|
+-- layouts/_default/single.html --
+{{ .Content }}
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Ascii Diagram
+
+§§§goat { width="600" }
+--->
+§§§
+
+## Go Code
+
+§§§go
+fmt.Println("Hello, World!");
+§§§
+
+## Golang Code
+
+§§§golang
+fmt.Println("Hello, Golang!");
+§§§
+
+## Bash Code
+
+§§§bash { linenos=inline,hl_lines=[2,"5-6"],linenostart=32 class=blue }
+echo "l1";
+echo "l2";
+echo "l3";
+echo "l4";
+echo "l5";
+echo "l6";
+echo "l7";
+echo "l8";
+§§§
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+Goat SVG:<svg class='diagram'
+Goat Attribute: 600|
+
+Go Language: go|
+Go Code: fmt.Println("Hello, World!");
+
+Go Code: fmt.Println("Hello, Golang!");
+Go Language: golang|
+
+
+ `,
+ "Goat SVG:<svg class='diagram' xmlns='http://www.w3.org/2000/svg' version='1.1' height='25' width='40'",
+ "Goat Attribute: 600|",
+ "<h2 id=\"go-code\">Go Code</h2>\nGo Code: fmt.Println(\"Hello, World!\");\n|\nGo Language: go|",
+ "<h2 id=\"golang-code\">Golang Code</h2>\nGo Code: fmt.Println(\"Hello, Golang!\");\n|\nGo Language: golang|",
+ "<h2 id=\"bash-code\">Bash Code</h2>\n<div class=\"highlight blue\"><pre tabindex=\"0\" class=\"chroma\"><code class=\"language-bash\" data-lang=\"bash\"><span class=\"line\"><span class=\"ln\">32</span><span class=\"cl\"><span class=\"nb\">echo</span> <span class=\"s2\">&#34;l1&#34;</span><span class=\"p\">;</span>\n</span></span><span class=\"line hl\"><span class=\"ln\">33</span>",
+ )
+}
+
+func TestHighlightCodeblock(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+[markup.highlight]
+anchorLineNos = false
+codeFences = true
+guessSyntax = false
+hl_Lines = ''
+lineAnchors = ''
+lineNoStart = 1
+lineNos = false
+lineNumbersInTable = true
+noClasses = false
+style = 'monokai'
+tabWidth = 4
+-- layouts/_default/_markup/render-codeblock.html --
+{{ $result := transform.HighlightCodeBlock . }}
+Inner: |{{ $result.Inner | safeHTML }}|
+Wrapped: |{{ $result.Wrapped | safeHTML }}|
+-- layouts/_default/single.html --
+{{ .Content }}
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Go Code
+
+§§§go
+fmt.Println("Hello, World!");
+§§§
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ "Inner: |<span class=\"line\"><span class=\"cl\"><span class=\"nx\">fmt</span><span class=\"p\">.</span><span class=\"nf\">Println</span><span class=\"p\">(</span><span class=\"s\">&#34;Hello, World!&#34;</span><span class=\"p\">);</span></span></span>|",
+ "Wrapped: |<div class=\"highlight\"><pre tabindex=\"0\" class=\"chroma\"><code class=\"language-go\" data-lang=\"go\"><span class=\"line\"><span class=\"cl\"><span class=\"nx\">fmt</span><span class=\"p\">.</span><span class=\"nf\">Println</span><span class=\"p\">(</span><span class=\"s\">&#34;Hello, World!&#34;</span><span class=\"p\">);</span></span></span></code></pre></div>|",
+ )
+}
+
+func TestCodeblocksBugs(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- layouts/_default/_markup/render-codeblock.html --
+{{ .Position | safeHTML }}
+-- layouts/_default/single.html --
+{{ .Content }}
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Issue 9627
+
+§§§text
+{{</* foo */>}}
+§§§
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+# Issue 9627: For the Position in code blocks we try to match the .Inner with the original source. This isn't always possible.
+p1.md:0:0
+ `,
+ )
+}
+
+func TestCodeChomp(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+
+§§§bash
+echo "p1";
+§§§
+-- layouts/_default/single.html --
+{{ .Content }}
+-- layouts/_default/_markup/render-codeblock.html --
+|{{ .Inner | safeHTML }}|
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", "|echo \"p1\";|")
+}
+
+func TestCodePosition(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Code
+
+§§§
+echo "p1";
+§§§
+-- layouts/_default/single.html --
+{{ .Content }}
+-- layouts/_default/_markup/render-codeblock.html --
+Position: {{ .Position | safeHTML }}
+
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", filepath.FromSlash("Position: \"/content/p1.md:7:1\""))
+}
+
+// Issue 9571
+func TestAttributesChroma(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Code
+
+§§§LANGUAGE {style=monokai}
+echo "p1";
+§§§
+-- layouts/_default/single.html --
+{{ .Content }}
+-- layouts/_default/_markup/render-codeblock.html --
+Attributes: {{ .Attributes }}|Options: {{ .Options }}|
+
+
+`
+ testLanguage := func(language, expect string) {
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: strings.ReplaceAll(files, "LANGUAGE", language),
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", expect)
+ }
+
+ testLanguage("bash", "Attributes: map[]|Options: map[style:monokai]|")
+ testLanguage("hugo", "Attributes: map[style:monokai]|Options: map[]|")
+}
+
+func TestPanics(t *testing.T) {
+
+ files := `
+-- config.toml --
+[markup]
+[markup.goldmark]
+[markup.goldmark.parser]
+autoHeadingID = true
+autoHeadingIDType = "github"
+[markup.goldmark.parser.attribute]
+block = true
+title = true
+-- content/p1.md --
+---
+title: "p1"
+---
+
+BLOCK
+
+Common
+
+-- layouts/_default/single.html --
+{{ .Content }}
+
+
+`
+
+ for _, test := range []struct {
+ name string
+ markdown string
+ }{
+ {"issue-9819", "asdf\n: {#myid}"},
+ } {
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: strings.ReplaceAll(files, "BLOCK", test.markdown),
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", "Common")
+ })
+ }
+
+}
diff --git a/markup/goldmark/codeblocks/render.go b/markup/goldmark/codeblocks/render.go
new file mode 100644
index 000000000..e245688e7
--- /dev/null
+++ b/markup/goldmark/codeblocks/render.go
@@ -0,0 +1,203 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codeblocks
+
+import (
+ "bytes"
+ "fmt"
+ "sync"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/gohugoio/hugo/common/herrors"
+ htext "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/goldmark/internal/render"
+ "github.com/gohugoio/hugo/markup/internal/attributes"
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/renderer"
+ "github.com/yuin/goldmark/text"
+ "github.com/yuin/goldmark/util"
+)
+
+type (
+ codeBlocksExtension struct{}
+ htmlRenderer struct{}
+)
+
+func New() goldmark.Extender {
+ return &codeBlocksExtension{}
+}
+
+func (e *codeBlocksExtension) Extend(m goldmark.Markdown) {
+ m.Parser().AddOptions(
+ parser.WithASTTransformers(
+ util.Prioritized(&Transformer{}, 100),
+ ),
+ )
+ m.Renderer().AddOptions(renderer.WithNodeRenderers(
+ util.Prioritized(newHTMLRenderer(), 100),
+ ))
+}
+
+func newHTMLRenderer() renderer.NodeRenderer {
+ r := &htmlRenderer{}
+ return r
+}
+
+func (r *htmlRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
+ reg.Register(KindCodeBlock, r.renderCodeBlock)
+}
+
+func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ ctx := w.(*render.Context)
+
+ if entering {
+ return ast.WalkContinue, nil
+ }
+
+ n := node.(*codeBlock)
+ lang := string(n.b.Language(src))
+ renderer := ctx.RenderContext().GetRenderer(hooks.CodeBlockRendererType, lang)
+ if renderer == nil {
+ return ast.WalkStop, fmt.Errorf("no code renderer found for %q", lang)
+ }
+
+ ordinal := n.ordinal
+
+ var buff bytes.Buffer
+
+ l := n.b.Lines().Len()
+ for i := 0; i < l; i++ {
+ line := n.b.Lines().At(i)
+ buff.Write(line.Value(src))
+ }
+
+ s := htext.Chomp(buff.String())
+
+ var info []byte
+ if n.b.Info != nil {
+ info = n.b.Info.Segment.Value(src)
+ }
+
+ attrtp := attributes.AttributesOwnerCodeBlockCustom
+ if isd, ok := renderer.(hooks.IsDefaultCodeBlockRendererProvider); (ok && isd.IsDefaultCodeBlockRenderer()) || lexers.Get(lang) != nil {
+ // We say that this is a Chroma code block if it's the default code block renderer
+ // or if the language is supported by Chroma.
+ attrtp = attributes.AttributesOwnerCodeBlockChroma
+ }
+
+ // IsDefaultCodeBlockRendererProvider
+ attrs := getAttributes(n.b, info)
+ cbctx := &codeBlockContext{
+ page: ctx.DocumentContext().Document,
+ lang: lang,
+ code: s,
+ ordinal: ordinal,
+ AttributesHolder: attributes.New(attrs, attrtp),
+ }
+
+ cbctx.createPos = func() htext.Position {
+ if resolver, ok := renderer.(hooks.ElementPositionResolver); ok {
+ return resolver.ResolvePosition(cbctx)
+ }
+ return htext.Position{
+ Filename: ctx.DocumentContext().Filename,
+ LineNumber: 1,
+ ColumnNumber: 1,
+ }
+ }
+
+ cr := renderer.(hooks.CodeBlockRenderer)
+
+ err := cr.RenderCodeblock(
+ w,
+ cbctx,
+ )
+
+ ctx.AddIdentity(cr)
+
+ if err != nil {
+ return ast.WalkContinue, herrors.NewFileErrorFromPos(err, cbctx.createPos())
+ }
+
+ return ast.WalkContinue, nil
+}
+
+type codeBlockContext struct {
+ page any
+ lang string
+ code string
+ ordinal int
+
+ // This is only used in error situations and is expensive to create,
+ // to deleay creation until needed.
+ pos htext.Position
+ posInit sync.Once
+ createPos func() htext.Position
+
+ *attributes.AttributesHolder
+}
+
+func (c *codeBlockContext) Page() any {
+ return c.page
+}
+
+func (c *codeBlockContext) Type() string {
+ return c.lang
+}
+
+func (c *codeBlockContext) Inner() string {
+ return c.code
+}
+
+func (c *codeBlockContext) Ordinal() int {
+ return c.ordinal
+}
+
+func (c *codeBlockContext) Position() htext.Position {
+ c.posInit.Do(func() {
+ c.pos = c.createPos()
+ })
+ return c.pos
+}
+
+func getAttributes(node *ast.FencedCodeBlock, infostr []byte) []ast.Attribute {
+ if node.Attributes() != nil {
+ return node.Attributes()
+ }
+ if infostr != nil {
+ attrStartIdx := -1
+
+ for idx, char := range infostr {
+ if char == '{' {
+ attrStartIdx = idx
+ break
+ }
+ }
+
+ if attrStartIdx > 0 {
+ n := ast.NewTextBlock() // dummy node for storing attributes
+ attrStr := infostr[attrStartIdx:]
+ if attrs, hasAttr := parser.ParseAttributes(text.NewReader(attrStr)); hasAttr {
+ for _, attr := range attrs {
+ n.SetAttribute(attr.Name, attr.Value)
+ }
+ return n.Attributes()
+ }
+ }
+ }
+ return nil
+}
diff --git a/markup/goldmark/codeblocks/transform.go b/markup/goldmark/codeblocks/transform.go
new file mode 100644
index 000000000..be5334b5f
--- /dev/null
+++ b/markup/goldmark/codeblocks/transform.go
@@ -0,0 +1,54 @@
+package codeblocks
+
+import (
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/text"
+)
+
+// Kind is the kind of an Hugo code block.
+var KindCodeBlock = ast.NewNodeKind("HugoCodeBlock")
+
+// Its raw contents are the plain text of the code block.
+type codeBlock struct {
+ ast.BaseBlock
+ ordinal int
+ b *ast.FencedCodeBlock
+}
+
+func (*codeBlock) Kind() ast.NodeKind { return KindCodeBlock }
+
+func (*codeBlock) IsRaw() bool { return true }
+
+func (b *codeBlock) Dump(src []byte, level int) {
+}
+
+type Transformer struct{}
+
+// Transform transforms the provided Markdown AST.
+func (*Transformer) Transform(doc *ast.Document, reader text.Reader, pctx parser.Context) {
+ var codeBlocks []*ast.FencedCodeBlock
+
+ ast.Walk(doc, func(node ast.Node, enter bool) (ast.WalkStatus, error) {
+ if !enter {
+ return ast.WalkContinue, nil
+ }
+
+ cb, ok := node.(*ast.FencedCodeBlock)
+ if !ok {
+ return ast.WalkContinue, nil
+ }
+
+ codeBlocks = append(codeBlocks, cb)
+
+ return ast.WalkContinue, nil
+ })
+
+ for i, cb := range codeBlocks {
+ b := &codeBlock{b: cb, ordinal: i}
+ parent := cb.Parent()
+ if parent != nil {
+ parent.ReplaceChild(parent, cb, b)
+ }
+ }
+}
diff --git a/markup/goldmark/convert.go b/markup/goldmark/convert.go
new file mode 100644
index 000000000..ba85831b0
--- /dev/null
+++ b/markup/goldmark/convert.go
@@ -0,0 +1,231 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package goldmark converts Markdown to HTML using Goldmark.
+package goldmark
+
+import (
+ "bytes"
+
+ "github.com/gohugoio/hugo/markup/goldmark/codeblocks"
+ "github.com/gohugoio/hugo/markup/goldmark/internal/extensions/attributes"
+ "github.com/gohugoio/hugo/markup/goldmark/internal/render"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/extension"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/renderer"
+ "github.com/yuin/goldmark/renderer/html"
+ "github.com/yuin/goldmark/text"
+)
+
+// Provider is the package entry point.
+var Provider converter.ProviderProvider = provide{}
+
+type provide struct{}
+
+func (p provide) New(cfg converter.ProviderConfig) (converter.Provider, error) {
+ md := newMarkdown(cfg)
+
+ return converter.NewProvider("goldmark", func(ctx converter.DocumentContext) (converter.Converter, error) {
+ return &goldmarkConverter{
+ ctx: ctx,
+ cfg: cfg,
+ md: md,
+ sanitizeAnchorName: func(s string) string {
+ return sanitizeAnchorNameString(s, cfg.MarkupConfig.Goldmark.Parser.AutoHeadingIDType)
+ },
+ }, nil
+ }), nil
+}
+
+var _ converter.AnchorNameSanitizer = (*goldmarkConverter)(nil)
+
+type goldmarkConverter struct {
+ md goldmark.Markdown
+ ctx converter.DocumentContext
+ cfg converter.ProviderConfig
+
+ sanitizeAnchorName func(s string) string
+}
+
+func (c *goldmarkConverter) SanitizeAnchorName(s string) string {
+ return c.sanitizeAnchorName(s)
+}
+
+func newMarkdown(pcfg converter.ProviderConfig) goldmark.Markdown {
+ mcfg := pcfg.MarkupConfig
+ cfg := pcfg.MarkupConfig.Goldmark
+ var rendererOptions []renderer.Option
+
+ if cfg.Renderer.HardWraps {
+ rendererOptions = append(rendererOptions, html.WithHardWraps())
+ }
+
+ if cfg.Renderer.XHTML {
+ rendererOptions = append(rendererOptions, html.WithXHTML())
+ }
+
+ if cfg.Renderer.Unsafe {
+ rendererOptions = append(rendererOptions, html.WithUnsafe())
+ }
+
+ var (
+ extensions = []goldmark.Extender{
+ newLinks(cfg),
+ newTocExtension(rendererOptions),
+ }
+ parserOptions []parser.Option
+ )
+
+ if mcfg.Highlight.CodeFences {
+ extensions = append(extensions, codeblocks.New())
+ }
+
+ if cfg.Extensions.Table {
+ extensions = append(extensions, extension.Table)
+ }
+
+ if cfg.Extensions.Strikethrough {
+ extensions = append(extensions, extension.Strikethrough)
+ }
+
+ if cfg.Extensions.Linkify {
+ extensions = append(extensions, extension.Linkify)
+ }
+
+ if cfg.Extensions.TaskList {
+ extensions = append(extensions, extension.TaskList)
+ }
+
+ if cfg.Extensions.Typographer {
+ extensions = append(extensions, extension.Typographer)
+ }
+
+ if cfg.Extensions.DefinitionList {
+ extensions = append(extensions, extension.DefinitionList)
+ }
+
+ if cfg.Extensions.Footnote {
+ extensions = append(extensions, extension.Footnote)
+ }
+
+ if cfg.Parser.AutoHeadingID {
+ parserOptions = append(parserOptions, parser.WithAutoHeadingID())
+ }
+
+ if cfg.Parser.Attribute.Title {
+ parserOptions = append(parserOptions, parser.WithAttribute())
+ }
+
+ if cfg.Parser.Attribute.Block {
+ extensions = append(extensions, attributes.New())
+ }
+
+ md := goldmark.New(
+ goldmark.WithExtensions(
+ extensions...,
+ ),
+ goldmark.WithParserOptions(
+ parserOptions...,
+ ),
+ goldmark.WithRendererOptions(
+ rendererOptions...,
+ ),
+ )
+
+ return md
+}
+
+var _ identity.IdentitiesProvider = (*converterResult)(nil)
+
+type converterResult struct {
+ converter.Result
+ toc tableofcontents.Root
+ ids identity.Identities
+}
+
+func (c converterResult) TableOfContents() tableofcontents.Root {
+ return c.toc
+}
+
+func (c converterResult) GetIdentities() identity.Identities {
+ return c.ids
+}
+
+var converterIdentity = identity.KeyValueIdentity{Key: "goldmark", Value: "converter"}
+
+func (c *goldmarkConverter) Convert(ctx converter.RenderContext) (result converter.Result, err error) {
+
+ buf := &render.BufWriter{Buffer: &bytes.Buffer{}}
+ result = buf
+ pctx := c.newParserContext(ctx)
+ reader := text.NewReader(ctx.Src)
+
+ doc := c.md.Parser().Parse(
+ reader,
+ parser.WithContext(pctx),
+ )
+
+ rcx := &render.RenderContextDataHolder{
+ Rctx: ctx,
+ Dctx: c.ctx,
+ IDs: identity.NewManager(converterIdentity),
+ }
+
+ w := &render.Context{
+ BufWriter: buf,
+ ContextData: rcx,
+ }
+
+ if err := c.md.Renderer().Render(w, ctx.Src, doc); err != nil {
+ return nil, err
+ }
+
+ return converterResult{
+ Result: buf,
+ ids: rcx.IDs.GetIdentities(),
+ toc: pctx.TableOfContents(),
+ }, nil
+}
+
+var featureSet = map[identity.Identity]bool{
+ converter.FeatureRenderHooks: true,
+}
+
+func (c *goldmarkConverter) Supports(feature identity.Identity) bool {
+ return featureSet[feature.GetIdentity()]
+}
+
+func (c *goldmarkConverter) newParserContext(rctx converter.RenderContext) *parserContext {
+ ctx := parser.NewContext(parser.WithIDs(newIDFactory(c.cfg.MarkupConfig.Goldmark.Parser.AutoHeadingIDType)))
+ ctx.Set(tocEnableKey, rctx.RenderTOC)
+ return &parserContext{
+ Context: ctx,
+ }
+}
+
+type parserContext struct {
+ parser.Context
+}
+
+func (p *parserContext) TableOfContents() tableofcontents.Root {
+ if v := p.Get(tocResultKey); v != nil {
+ return v.(tableofcontents.Root)
+ }
+ return tableofcontents.Root{}
+}
diff --git a/markup/goldmark/convert_test.go b/markup/goldmark/convert_test.go
new file mode 100644
index 000000000..ab96b8c85
--- /dev/null
+++ b/markup/goldmark/convert_test.go
@@ -0,0 +1,501 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/goldmark/goldmark_config"
+
+ "github.com/gohugoio/hugo/markup/highlight"
+
+ "github.com/gohugoio/hugo/markup/markup_config"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func convert(c *qt.C, mconf markup_config.Config, content string) converter.Result {
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+ c.Assert(err, qt.IsNil)
+ h := highlight.New(mconf.Highlight)
+
+ getRenderer := func(t hooks.RendererType, id any) any {
+ if t == hooks.CodeBlockRendererType {
+ return h
+ }
+ return nil
+ }
+
+ conv, err := p.New(converter.DocumentContext{DocumentID: "thedoc"})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{RenderTOC: true, Src: []byte(content), GetRenderer: getRenderer})
+ c.Assert(err, qt.IsNil)
+
+ return b
+}
+
+func TestConvert(t *testing.T) {
+ c := qt.New(t)
+
+ // Smoke test of the default configuration.
+ content := `
+## Links
+
+https://github.com/gohugoio/hugo/issues/6528
+[Live Demo here!](https://docuapi.netlify.com/)
+
+[I'm an inline-style link with title](https://www.google.com "Google's Homepage")
+<https://foo.bar/>
+https://bar.baz/
+<fake@example.com>
+<mailto:fake2@example.com>
+
+
+## Code Fences
+
+§§§bash
+LINE1
+§§§
+
+## Code Fences No Lexer
+
+§§§moo
+LINE1
+§§§
+
+## Custom ID {#custom}
+
+## Auto ID
+
+* Autolink: https://gohugo.io/
+* Strikethrough:~~Hi~~ Hello, world!
+
+## Table
+
+| foo | bar |
+| --- | --- |
+| baz | bim |
+
+## Task Lists (default on)
+
+- [x] Finish my changes[^1]
+- [ ] Push my commits to GitHub
+- [ ] Open a pull request
+
+
+## Smartypants (default on)
+
+* Straight double "quotes" and single 'quotes' into “curly” quote HTML entities
+* Dashes (“--” and “---”) into en- and em-dash entities
+* Three consecutive dots (“...”) into an ellipsis entity
+* Apostrophes are also converted: "That was back in the '90s, that's a long time ago"
+
+## Footnotes
+
+That's some text with a footnote.[^1]
+
+## Definition Lists
+
+date
+: the datetime assigned to this page.
+
+description
+: the description for the content.
+
+
+## 神真美好
+
+## 神真美好
+
+## 神真美好
+
+[^1]: And that's the footnote.
+
+`
+
+ // Code fences
+ content = strings.Replace(content, "§§§", "```", -1)
+ mconf := markup_config.Default
+ mconf.Highlight.NoClasses = false
+ mconf.Goldmark.Renderer.Unsafe = true
+
+ b := convert(c, mconf, content)
+ got := string(b.Bytes())
+
+ fmt.Println(got)
+
+ // Links
+ c.Assert(got, qt.Contains, `<a href="https://docuapi.netlify.com/">Live Demo here!</a>`)
+ c.Assert(got, qt.Contains, `<a href="https://foo.bar/">https://foo.bar/</a>`)
+ c.Assert(got, qt.Contains, `<a href="https://bar.baz/">https://bar.baz/</a>`)
+ c.Assert(got, qt.Contains, `<a href="mailto:fake@example.com">fake@example.com</a>`)
+ c.Assert(got, qt.Contains, `<a href="mailto:fake2@example.com">mailto:fake2@example.com</a></p>`)
+
+ // Header IDs
+ c.Assert(got, qt.Contains, `<h2 id="custom">Custom ID</h2>`, qt.Commentf(got))
+ c.Assert(got, qt.Contains, `<h2 id="auto-id">Auto ID</h2>`, qt.Commentf(got))
+ c.Assert(got, qt.Contains, `<h2 id="神真美好">神真美好</h2>`, qt.Commentf(got))
+ c.Assert(got, qt.Contains, `<h2 id="神真美好-1">神真美好</h2>`, qt.Commentf(got))
+ c.Assert(got, qt.Contains, `<h2 id="神真美好-2">神真美好</h2>`, qt.Commentf(got))
+
+ // Code fences
+ c.Assert(got, qt.Contains, "<div class=\"highlight\"><pre tabindex=\"0\" class=\"chroma\"><code class=\"language-bash\" data-lang=\"bash\"><span class=\"line\"><span class=\"cl\">LINE1\n</span></span></code></pre></div>")
+ c.Assert(got, qt.Contains, "Code Fences No Lexer</h2>\n<pre tabindex=\"0\"><code class=\"language-moo\" data-lang=\"moo\">LINE1\n</code></pre>")
+
+ // Extensions
+ c.Assert(got, qt.Contains, `Autolink: <a href="https://gohugo.io/">https://gohugo.io/</a>`)
+ c.Assert(got, qt.Contains, `Strikethrough:<del>Hi</del> Hello, world`)
+ c.Assert(got, qt.Contains, `<th>foo</th>`)
+ c.Assert(got, qt.Contains, `<li><input disabled="" type="checkbox"> Push my commits to GitHub</li>`)
+
+ c.Assert(got, qt.Contains, `Straight double &ldquo;quotes&rdquo; and single &lsquo;quotes&rsquo;`)
+ c.Assert(got, qt.Contains, `Dashes (“&ndash;” and “&mdash;”) `)
+ c.Assert(got, qt.Contains, `Three consecutive dots (“&hellip;”)`)
+ c.Assert(got, qt.Contains, `&ldquo;That was back in the &rsquo;90s, that&rsquo;s a long time ago&rdquo;`)
+ c.Assert(got, qt.Contains, `footnote.<sup id="fnref1:1"><a href="#fn:1" class="footnote-ref" role="doc-noteref">1</a></sup>`)
+ c.Assert(got, qt.Contains, `<div class="footnotes" role="doc-endnotes">`)
+ c.Assert(got, qt.Contains, `<dt>date</dt>`)
+
+ toc, ok := b.(converter.TableOfContentsProvider)
+ c.Assert(ok, qt.Equals, true)
+ tocHTML := toc.TableOfContents().ToHTML(1, 2, false)
+ c.Assert(tocHTML, qt.Contains, "TableOfContents")
+}
+
+func TestConvertAutoIDAsciiOnly(t *testing.T) {
+ c := qt.New(t)
+
+ content := `
+## God is Good: 神真美好
+`
+ mconf := markup_config.Default
+ mconf.Goldmark.Parser.AutoHeadingIDType = goldmark_config.AutoHeadingIDTypeGitHubAscii
+ b := convert(c, mconf, content)
+ got := string(b.Bytes())
+
+ c.Assert(got, qt.Contains, "<h2 id=\"god-is-good-\">")
+}
+
+func TestConvertAutoIDBlackfriday(t *testing.T) {
+ c := qt.New(t)
+
+ content := `
+## Let's try this, shall we?
+
+`
+ mconf := markup_config.Default
+ mconf.Goldmark.Parser.AutoHeadingIDType = goldmark_config.AutoHeadingIDTypeBlackfriday
+ b := convert(c, mconf, content)
+ got := string(b.Bytes())
+
+ c.Assert(got, qt.Contains, "<h2 id=\"let-s-try-this-shall-we\">")
+}
+
+func TestConvertAttributes(t *testing.T) {
+ c := qt.New(t)
+
+ withBlockAttributes := func(conf *markup_config.Config) {
+ conf.Goldmark.Parser.Attribute.Block = true
+ conf.Goldmark.Parser.Attribute.Title = false
+ }
+
+ withTitleAndBlockAttributes := func(conf *markup_config.Config) {
+ conf.Goldmark.Parser.Attribute.Block = true
+ conf.Goldmark.Parser.Attribute.Title = true
+ }
+
+ for _, test := range []struct {
+ name string
+ withConfig func(conf *markup_config.Config)
+ input string
+ expect any
+ }{
+ {
+ "Title",
+ nil,
+ "## heading {#id .className attrName=attrValue class=\"class1 class2\"}",
+ "<h2 id=\"id\" class=\"className class1 class2\" attrName=\"attrValue\">heading</h2>\n",
+ },
+ {
+ "Blockquote",
+ withBlockAttributes,
+ "> foo\n> bar\n{#id .className attrName=attrValue class=\"class1 class2\"}\n",
+ "<blockquote id=\"id\" class=\"className class1 class2\"><p>foo\nbar</p>\n</blockquote>\n",
+ },
+ /*{
+ // TODO(bep) this needs an upstream fix, see https://github.com/yuin/goldmark/issues/195
+ "Code block, CodeFences=false",
+ func(conf *markup_config.Config) {
+ withBlockAttributes(conf)
+ conf.Highlight.CodeFences = false
+ },
+ "```bash\necho 'foo';\n```\n{.myclass}",
+ "TODO",
+ },*/
+ {
+ "Code block, CodeFences=true",
+ func(conf *markup_config.Config) {
+ withBlockAttributes(conf)
+ conf.Highlight.CodeFences = true
+ },
+ "```bash {.myclass id=\"myid\"}\necho 'foo';\n````\n",
+ "<div class=\"highlight myclass\" id=\"myid\"><pre style",
+ },
+ {
+ "Code block, CodeFences=true,linenos=table",
+ func(conf *markup_config.Config) {
+ withBlockAttributes(conf)
+ conf.Highlight.CodeFences = true
+ },
+ "```bash {linenos=table .myclass id=\"myid\"}\necho 'foo';\n````\n{ .adfadf }",
+ []string{
+ "div class=\"highlight myclass\" id=\"myid\"><div s",
+ "table style",
+ },
+ },
+ {
+ "Code block, CodeFences=true,lineanchors",
+ func(conf *markup_config.Config) {
+ withBlockAttributes(conf)
+ conf.Highlight.CodeFences = true
+ conf.Highlight.NoClasses = false
+ },
+ "```bash {linenos=table, anchorlinenos=true, lineanchors=org-coderef--xyz}\necho 'foo';\n```",
+ "<div class=\"highlight\"><div class=\"chroma\">\n<table class=\"lntable\"><tr><td class=\"lntd\">\n<pre tabindex=\"0\" class=\"chroma\"><code><span class=\"lnt\" id=\"org-coderef--xyz-1\"><a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#org-coderef--xyz-1\">1</a>\n</span></code></pre></td>\n<td class=\"lntd\">\n<pre tabindex=\"0\" class=\"chroma\"><code class=\"language-bash\" data-lang=\"bash\"><span class=\"line\"><span class=\"cl\"><span class=\"nb\">echo</span> <span class=\"s1\">&#39;foo&#39;</span><span class=\"p\">;</span>\n</span></span></code></pre></td></tr></table>\n</div>\n</div>",
+ },
+ {
+ "Code block, CodeFences=true,lineanchors, default ordinal",
+ func(conf *markup_config.Config) {
+ withBlockAttributes(conf)
+ conf.Highlight.CodeFences = true
+ conf.Highlight.NoClasses = false
+ },
+ "```bash {linenos=inline, anchorlinenos=true}\necho 'foo';\nnecho 'bar';\n```\n\n```bash {linenos=inline, anchorlinenos=true}\necho 'baz';\nnecho 'qux';\n```",
+ []string{
+ "<span class=\"ln\" id=\"hl-0-1\"><a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#hl-0-1\">1</a></span><span class=\"cl\"><span class=\"nb\">echo</span> <span class=\"s1\">&#39;foo&#39;</span>",
+ "<span class=\"ln\" id=\"hl-0-2\"><a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#hl-0-2\">2</a></span><span class=\"cl\">necho <span class=\"s1\">&#39;bar&#39;</span>",
+ "<span class=\"ln\" id=\"hl-1-2\"><a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#hl-1-2\">2</a></span><span class=\"cl\">necho <span class=\"s1\">&#39;qux&#39;</span>",
+ },
+ },
+ {
+ "Paragraph",
+ withBlockAttributes,
+ "\nHi there.\n{.myclass }",
+ "<p class=\"myclass\">Hi there.</p>\n",
+ },
+ {
+ "Ordered list",
+ withBlockAttributes,
+ "\n1. First\n2. Second\n{.myclass }",
+ "<ol class=\"myclass\">\n<li>First</li>\n<li>Second</li>\n</ol>\n",
+ },
+ {
+ "Unordered list",
+ withBlockAttributes,
+ "\n* First\n* Second\n{.myclass }",
+ "<ul class=\"myclass\">\n<li>First</li>\n<li>Second</li>\n</ul>\n",
+ },
+ {
+ "Unordered list, indented",
+ withBlockAttributes,
+ `* Fruit
+ * Apple
+ * Orange
+ * Banana
+ {.fruits}
+* Dairy
+ * Milk
+ * Cheese
+ {.dairies}
+{.list}`,
+ []string{"<ul class=\"list\">\n<li>Fruit\n<ul class=\"fruits\">", "<li>Dairy\n<ul class=\"dairies\">"},
+ },
+ {
+ "Table",
+ withBlockAttributes,
+ `| A | B |
+| ------------- |:-------------:| -----:|
+| AV | BV |
+{.myclass }`,
+ "<table class=\"myclass\">\n<thead>",
+ },
+ {
+ "Title and Blockquote",
+ withTitleAndBlockAttributes,
+ "## heading {#id .className attrName=attrValue class=\"class1 class2\"}\n> foo\n> bar\n{.myclass}",
+ "<h2 id=\"id\" class=\"className class1 class2\" attrName=\"attrValue\">heading</h2>\n<blockquote class=\"myclass\"><p>foo\nbar</p>\n</blockquote>\n",
+ },
+ } {
+ c.Run(test.name, func(c *qt.C) {
+ mconf := markup_config.Default
+ if test.withConfig != nil {
+ test.withConfig(&mconf)
+ }
+ b := convert(c, mconf, test.input)
+ got := string(b.Bytes())
+
+ for _, s := range cast.ToStringSlice(test.expect) {
+ c.Assert(got, qt.Contains, s)
+ }
+ })
+ }
+}
+
+func TestConvertIssues(t *testing.T) {
+ c := qt.New(t)
+
+ // https://github.com/gohugoio/hugo/issues/7619
+ c.Run("Hyphen in HTML attributes", func(c *qt.C) {
+ mconf := markup_config.Default
+ mconf.Goldmark.Renderer.Unsafe = true
+ input := `<custom-element>
+ <div>This will be "slotted" into the custom element.</div>
+</custom-element>
+`
+
+ b := convert(c, mconf, input)
+ got := string(b.Bytes())
+
+ c.Assert(got, qt.Contains, "<custom-element>\n <div>This will be \"slotted\" into the custom element.</div>\n</custom-element>\n")
+ })
+}
+
+func TestCodeFence(t *testing.T) {
+ c := qt.New(t)
+
+ lines := `LINE1
+LINE2
+LINE3
+LINE4
+LINE5
+`
+
+ convertForConfig := func(c *qt.C, conf highlight.Config, code, language string) string {
+ mconf := markup_config.Default
+ mconf.Highlight = conf
+
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: mconf,
+ Logger: loggers.NewErrorLogger(),
+ },
+ )
+
+ h := highlight.New(conf)
+
+ getRenderer := func(t hooks.RendererType, id any) any {
+ if t == hooks.CodeBlockRendererType {
+ return h
+ }
+ return nil
+ }
+
+ content := "```" + language + "\n" + code + "\n```"
+
+ c.Assert(err, qt.IsNil)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{Src: []byte(content), GetRenderer: getRenderer})
+ c.Assert(err, qt.IsNil)
+
+ return string(b.Bytes())
+ }
+
+ c.Run("Basic", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+
+ result := convertForConfig(c, cfg, `echo "Hugo Rocks!"`, "bash")
+ // TODO(bep) there is a whitespace mismatch (\n) between this and the highlight template func.
+ c.Assert(result, qt.Equals, "<div class=\"highlight\"><pre tabindex=\"0\" class=\"chroma\"><code class=\"language-bash\" data-lang=\"bash\"><span class=\"line\"><span class=\"cl\"><span class=\"nb\">echo</span> <span class=\"s2\">&#34;Hugo Rocks!&#34;</span>\n</span></span></code></pre></div>")
+ result = convertForConfig(c, cfg, `echo "Hugo Rocks!"`, "unknown")
+ c.Assert(result, qt.Equals, "<pre tabindex=\"0\"><code class=\"language-unknown\" data-lang=\"unknown\">echo &#34;Hugo Rocks!&#34;\n</code></pre>")
+ })
+
+ c.Run("Highlight lines, default config", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+
+ result := convertForConfig(c, cfg, lines, `bash {linenos=table,hl_lines=[2 "4-5"],linenostart=3}`)
+ c.Assert(result, qt.Contains, "<div class=\"highlight\"><div class=\"chroma\">\n<table class=\"lntable\"><tr><td class=\"lntd\">\n<pre tabindex=\"0\" class=\"chroma\"><code><span class")
+ c.Assert(result, qt.Contains, "<span class=\"hl\"><span class=\"lnt\">4")
+
+ result = convertForConfig(c, cfg, lines, "bash {linenos=inline,hl_lines=[2]}")
+ c.Assert(result, qt.Contains, "<span class=\"ln\">2</span><span class=\"cl\">LINE2\n</span></span>")
+ c.Assert(result, qt.Not(qt.Contains), "<table")
+
+ result = convertForConfig(c, cfg, lines, "bash {linenos=true,hl_lines=[2]}")
+ c.Assert(result, qt.Contains, "<table")
+ c.Assert(result, qt.Contains, "<span class=\"hl\"><span class=\"lnt\">2\n</span>")
+ })
+
+ c.Run("Highlight lines, linenumbers default on", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+
+ result := convertForConfig(c, cfg, lines, "bash")
+ c.Assert(result, qt.Contains, "<span class=\"lnt\">2\n</span>")
+
+ result = convertForConfig(c, cfg, lines, "bash {linenos=false,hl_lines=[2]}")
+ c.Assert(result, qt.Not(qt.Contains), "class=\"lnt\"")
+ })
+
+ c.Run("Highlight lines, linenumbers default on, linenumbers in table default off", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ cfg.LineNumbersInTable = false
+
+ result := convertForConfig(c, cfg, lines, "bash")
+ c.Assert(result, qt.Contains, "<span class=\"ln\">2</span><span class=\"cl\">LINE2\n</span>")
+ result = convertForConfig(c, cfg, lines, "bash {linenos=table}")
+ c.Assert(result, qt.Contains, "<span class=\"lnt\">1\n</span>")
+ })
+
+ c.Run("No language", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ cfg.LineNumbersInTable = false
+
+ result := convertForConfig(c, cfg, lines, "")
+ c.Assert(result, qt.Contains, "<pre tabindex=\"0\"><code>LINE1\n")
+ })
+
+ c.Run("No language, guess syntax", func(c *qt.C) {
+ cfg := highlight.DefaultConfig
+ cfg.NoClasses = false
+ cfg.GuessSyntax = true
+ cfg.LineNos = true
+ cfg.LineNumbersInTable = false
+
+ result := convertForConfig(c, cfg, lines, "")
+ c.Assert(result, qt.Contains, "<span class=\"ln\">2</span><span class=\"cl\">LINE2\n</span></span>")
+ })
+}
diff --git a/markup/goldmark/goldmark_config/config.go b/markup/goldmark/goldmark_config/config.go
new file mode 100644
index 000000000..a3238091b
--- /dev/null
+++ b/markup/goldmark/goldmark_config/config.go
@@ -0,0 +1,98 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package goldmark_config holds Goldmark related configuration.
+package goldmark_config
+
+const (
+ AutoHeadingIDTypeGitHub = "github"
+ AutoHeadingIDTypeGitHubAscii = "github-ascii"
+ AutoHeadingIDTypeBlackfriday = "blackfriday"
+)
+
+// DefaultConfig holds the default Goldmark configuration.
+var Default = Config{
+ Extensions: Extensions{
+ Typographer: true,
+ Footnote: true,
+ DefinitionList: true,
+ Table: true,
+ Strikethrough: true,
+ Linkify: true,
+ LinkifyProtocol: "https",
+ TaskList: true,
+ },
+ Renderer: Renderer{
+ Unsafe: false,
+ },
+ Parser: Parser{
+ AutoHeadingID: true,
+ AutoHeadingIDType: AutoHeadingIDTypeGitHub,
+ Attribute: ParserAttribute{
+ Title: true,
+ Block: false,
+ },
+ },
+}
+
+// Config configures Goldmark.
+type Config struct {
+ Renderer Renderer
+ Parser Parser
+ Extensions Extensions
+}
+
+type Extensions struct {
+ Typographer bool
+ Footnote bool
+ DefinitionList bool
+
+ // GitHub flavored markdown
+ Table bool
+ Strikethrough bool
+ Linkify bool
+ LinkifyProtocol string
+ TaskList bool
+}
+
+type Renderer struct {
+ // Whether softline breaks should be rendered as '<br>'
+ HardWraps bool
+
+ // XHTML instead of HTML5.
+ XHTML bool
+
+ // Allow raw HTML etc.
+ Unsafe bool
+}
+
+type Parser struct {
+ // Enables custom heading ids and
+ // auto generated heading ids.
+ AutoHeadingID bool
+
+ // The strategy to use when generating heading IDs.
+ // Available options are "github", "github-ascii".
+ // Default is "github", which will create GitHub-compatible anchor names.
+ AutoHeadingIDType string
+
+ // Enables custom attributes.
+ Attribute ParserAttribute
+}
+
+type ParserAttribute struct {
+ // Enables custom attributes for titles.
+ Title bool
+ // Enables custom attributeds for blocks.
+ Block bool
+}
diff --git a/markup/goldmark/integration_test.go b/markup/goldmark/integration_test.go
new file mode 100644
index 000000000..e1d1445ee
--- /dev/null
+++ b/markup/goldmark/integration_test.go
@@ -0,0 +1,577 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark_test
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+// Issue 9463
+func TestAttributeExclusion(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup.goldmark.renderer]
+ unsafe = false
+[markup.goldmark.parser.attribute]
+ block = true
+ title = true
+-- content/p1.md --
+---
+title: "p1"
+---
+## Heading {class="a" onclick="alert('heading')"}
+
+> Blockquote
+{class="b" ondblclick="alert('blockquote')"}
+
+~~~bash {id="c" onmouseover="alert('code fence')" LINENOS=true}
+foo
+~~~
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+ <h2 class="a" id="heading">
+ <blockquote class="b">
+ <div class="highlight" id="c">
+ `)
+}
+
+// Issue 9511
+func TestAttributeExclusionWithRenderHook(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- content/p1.md --
+---
+title: "p1"
+---
+## Heading {onclick="alert('renderhook')" data-foo="bar"}
+-- layouts/_default/single.html --
+{{ .Content }}
+-- layouts/_default/_markup/render-heading.html --
+<h{{ .Level }}
+ {{- range $k, $v := .Attributes -}}
+ {{- printf " %s=%q" $k $v | safeHTMLAttr -}}
+ {{- end -}}
+>{{ .Text | safeHTML }}</h{{ .Level }}>
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+ <h2 data-foo="bar" id="heading">Heading</h2>
+ `)
+}
+
+func TestAttributesDefaultRenderer(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- content/p1.md --
+---
+title: "p1"
+---
+## Heading Attribute Which Needs Escaping { class="a < b" }
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+class="a &lt; b"
+ `)
+}
+
+// Issue 9558.
+func TestAttributesHookNoEscape(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- content/p1.md --
+---
+title: "p1"
+---
+## Heading Attribute Which Needs Escaping { class="Smith & Wesson" }
+-- layouts/_default/_markup/render-heading.html --
+plain: |{{- range $k, $v := .Attributes -}}{{ $k }}: {{ $v }}|{{ end }}|
+safeHTML: |{{- range $k, $v := .Attributes -}}{{ $k }}: {{ $v | safeHTML }}|{{ end }}|
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+plain: |class: Smith &amp; Wesson|id: heading-attribute-which-needs-escaping|
+safeHTML: |class: Smith & Wesson|id: heading-attribute-which-needs-escaping|
+ `)
+}
+
+// Issue 9504
+func TestLinkInTitle(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- content/p1.md --
+---
+title: "p1"
+---
+## Hello [Test](https://example.com)
+-- layouts/_default/single.html --
+{{ .Content }}
+-- layouts/_default/_markup/render-heading.html --
+<h{{ .Level }} id="{{ .Anchor | safeURL }}">
+ {{ .Text | safeHTML }}
+ <a class="anchor" href="#{{ .Anchor | safeURL }}">#</a>
+</h{{ .Level }}>
+-- layouts/_default/_markup/render-link.html --
+<a href="{{ .Destination | safeURL }}"{{ with .Title}} title="{{ . }}"{{ end }}>{{ .Text | safeHTML }}</a>
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ "<h2 id=\"hello-testhttpsexamplecom\">\n Hello <a href=\"https://example.com\">Test</a>\n\n <a class=\"anchor\" href=\"#hello-testhttpsexamplecom\">#</a>\n</h2>",
+ )
+}
+
+func TestHighlight(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+[markup.highlight]
+anchorLineNos = false
+codeFences = true
+guessSyntax = false
+hl_Lines = ''
+lineAnchors = ''
+lineNoStart = 1
+lineNos = false
+lineNumbersInTable = true
+noClasses = false
+style = 'monokai'
+tabWidth = 4
+-- layouts/_default/single.html --
+{{ .Content }}
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Code Fences
+
+§§§bash
+LINE1
+§§§
+
+## Code Fences No Lexer
+
+§§§moo
+LINE1
+§§§
+
+## Code Fences Simple Attributes
+
+§§A§bash { .myclass id="myid" }
+LINE1
+§§A§
+
+## Code Fences Line Numbers
+
+§§§bash {linenos=table,hl_lines=[8,"15-17"],linenostart=199}
+LINE1
+LINE2
+LINE3
+LINE4
+LINE5
+LINE6
+LINE7
+LINE8
+§§§
+
+
+
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ "<div class=\"highlight\"><pre tabindex=\"0\" class=\"chroma\"><code class=\"language-bash\" data-lang=\"bash\"><span class=\"line\"><span class=\"cl\">LINE1\n</span></span></code></pre></div>",
+ "Code Fences No Lexer</h2>\n<pre tabindex=\"0\"><code class=\"language-moo\" data-lang=\"moo\">LINE1\n</code></pre>",
+ "lnt",
+ )
+}
+
+func BenchmarkRenderHooks(b *testing.B) {
+ files := `
+-- config.toml --
+-- layouts/_default/_markup/render-heading.html --
+<h{{ .Level }} id="{{ .Anchor | safeURL }}">
+ {{ .Text | safeHTML }}
+ <a class="anchor" href="#{{ .Anchor | safeURL }}">#</a>
+</h{{ .Level }}>
+-- layouts/_default/_markup/render-link.html --
+<a href="{{ .Destination | safeURL }}"{{ with .Title}} title="{{ . }}"{{ end }}>{{ .Text | safeHTML }}</a>
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ content := `
+
+## Hello1 [Test](https://example.com)
+
+A.
+
+## Hello2 [Test](https://example.com)
+
+B.
+
+## Hello3 [Test](https://example.com)
+
+C.
+
+## Hello4 [Test](https://example.com)
+
+D.
+
+[Test](https://example.com)
+
+## Hello5
+
+
+`
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/posts/p%d.md --\n"+content, i+1)
+ }
+
+ cfg := hugolib.IntegrationTestConfig{
+ T: b,
+ TxtarString: files,
+ }
+ builders := make([]*hugolib.IntegrationTestBuilder, b.N)
+
+ for i := range builders {
+ builders[i] = hugolib.NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+}
+
+func BenchmarkCodeblocks(b *testing.B) {
+ files := `
+-- config.toml --
+[markup]
+ [markup.highlight]
+ anchorLineNos = false
+ codeFences = true
+ guessSyntax = false
+ hl_Lines = ''
+ lineAnchors = ''
+ lineNoStart = 1
+ lineNos = false
+ lineNumbersInTable = true
+ noClasses = true
+ style = 'monokai'
+ tabWidth = 4
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ content := `
+
+FENCEgo
+package main
+import "fmt"
+func main() {
+ fmt.Println("hello world")
+}
+FENCE
+
+FENCEbash
+#!/bin/bash
+# Usage: Hello World Bash Shell Script Using Variables
+# Author: Vivek Gite
+# -------------------------------------------------
+
+# Define bash shell variable called var
+# Avoid spaces around the assignment operator (=)
+var="Hello World"
+
+# print it
+echo "$var"
+
+# Another way of printing it
+printf "%s\n" "$var"
+FENCE
+`
+
+ content = strings.ReplaceAll(content, "FENCE", "```")
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/posts/p%d.md --\n"+content, i+1)
+ }
+
+ cfg := hugolib.IntegrationTestConfig{
+ T: b,
+ TxtarString: files,
+ }
+ builders := make([]*hugolib.IntegrationTestBuilder, b.N)
+
+ for i := range builders {
+ builders[i] = hugolib.NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+}
+
+// Iisse #8959
+func TestHookInfiniteRecursion(t *testing.T) {
+ t.Parallel()
+
+ for _, renderFunc := range []string{"markdownify", ".Page.RenderString"} {
+ t.Run(renderFunc, func(t *testing.T) {
+
+ files := `
+-- config.toml --
+-- layouts/_default/_markup/render-link.html --
+<a href="{{ .Destination | safeURL }}">{{ .Text | RENDERFUNC }}</a>
+-- layouts/_default/single.html --
+{{ .Content }}
+-- content/p1.md --
+---
+title: "p1"
+---
+
+https://example.org
+
+a@b.com
+
+
+ `
+
+ files = strings.ReplaceAll(files, "RENDERFUNC", renderFunc)
+
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, "text is already rendered, repeating it may cause infinite recursion")
+
+ })
+
+ }
+
+}
+
+// Issue 9594
+func TestQuotesInImgAltAttr(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup.goldmark.extensions]
+ typographer = false
+-- content/p1.md --
+---
+title: "p1"
+---
+!["a"](b.jpg)
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+ <img src="b.jpg" alt="&quot;a&quot;">
+ `)
+}
+
+func TestLinkifyProtocol(t *testing.T) {
+ t.Parallel()
+
+ runTest := func(protocol string, withHook bool) *hugolib.IntegrationTestBuilder {
+
+ files := `
+-- config.toml --
+[markup.goldmark]
+[markup.goldmark.extensions]
+linkify = true
+linkifyProtocol = "PROTOCOL"
+-- content/p1.md --
+---
+title: "p1"
+---
+Link no procol: www.example.org
+Link http procol: http://www.example.org
+Link https procol: https://www.example.org
+
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+ files = strings.ReplaceAll(files, "PROTOCOL", protocol)
+
+ if withHook {
+ files += `-- layouts/_default/_markup/render-link.html --
+<a href="{{ .Destination | safeURL }}">{{ .Text | safeHTML }}</a>`
+ }
+
+ return hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ }
+
+ for _, withHook := range []bool{false, true} {
+
+ b := runTest("https", withHook)
+
+ b.AssertFileContent("public/p1/index.html",
+ "Link no procol: <a href=\"https://www.example.org\">www.example.org</a>",
+ "Link http procol: <a href=\"http://www.example.org\">http://www.example.org</a>",
+ "Link https procol: <a href=\"https://www.example.org\">https://www.example.org</a></p>",
+ )
+
+ b = runTest("http", withHook)
+
+ b.AssertFileContent("public/p1/index.html",
+ "Link no procol: <a href=\"http://www.example.org\">www.example.org</a>",
+ "Link http procol: <a href=\"http://www.example.org\">http://www.example.org</a>",
+ "Link https procol: <a href=\"https://www.example.org\">https://www.example.org</a></p>",
+ )
+
+ b = runTest("gopher", withHook)
+
+ b.AssertFileContent("public/p1/index.html",
+ "Link no procol: <a href=\"gopher://www.example.org\">www.example.org</a>",
+ "Link http procol: <a href=\"http://www.example.org\">http://www.example.org</a>",
+ "Link https procol: <a href=\"https://www.example.org\">https://www.example.org</a></p>",
+ )
+
+ }
+}
+
+func TestGoldmarkBugs(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup.goldmark.renderer]
+unsafe = true
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Issue 9650
+
+a <!-- b --> c
+
+## Issue 9658
+
+- This is a list item <!-- Comment: an innocent-looking comment -->
+
+
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContentExact("public/p1/index.html",
+ // Issue 9650
+ "<p>a <!-- b --> c</p>",
+ // Issue 9658 (crash)
+ "<li>This is a list item <!-- Comment: an innocent-looking comment --></li>",
+ )
+}
diff --git a/markup/goldmark/internal/extensions/attributes/attributes.go b/markup/goldmark/internal/extensions/attributes/attributes.go
new file mode 100644
index 000000000..60ae609ec
--- /dev/null
+++ b/markup/goldmark/internal/extensions/attributes/attributes.go
@@ -0,0 +1,125 @@
+package attributes
+
+import (
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/text"
+ "github.com/yuin/goldmark/util"
+)
+
+// This extension is based on/inspired by https://github.com/mdigger/goldmark-attributes
+// MIT License
+// Copyright (c) 2019 Dmitry Sedykh
+
+var (
+ kindAttributesBlock = ast.NewNodeKind("AttributesBlock")
+
+ defaultParser = new(attrParser)
+ defaultTransformer = new(transformer)
+ attributes goldmark.Extender = new(attrExtension)
+)
+
+func New() goldmark.Extender {
+ return attributes
+}
+
+type attrExtension struct{}
+
+func (a *attrExtension) Extend(m goldmark.Markdown) {
+ m.Parser().AddOptions(
+ parser.WithBlockParsers(
+ util.Prioritized(defaultParser, 100)),
+ parser.WithASTTransformers(
+ util.Prioritized(defaultTransformer, 100),
+ ),
+ )
+}
+
+type attrParser struct{}
+
+func (a *attrParser) CanAcceptIndentedLine() bool {
+ return false
+}
+
+func (a *attrParser) CanInterruptParagraph() bool {
+ return true
+}
+
+func (a *attrParser) Close(node ast.Node, reader text.Reader, pc parser.Context) {
+}
+
+func (a *attrParser) Continue(node ast.Node, reader text.Reader, pc parser.Context) parser.State {
+ return parser.Close
+}
+
+func (a *attrParser) Open(parent ast.Node, reader text.Reader, pc parser.Context) (ast.Node, parser.State) {
+ if attrs, ok := parser.ParseAttributes(reader); ok {
+ // add attributes
+ var node = &attributesBlock{
+ BaseBlock: ast.BaseBlock{},
+ }
+ for _, attr := range attrs {
+ node.SetAttribute(attr.Name, attr.Value)
+ }
+ return node, parser.NoChildren
+ }
+ return nil, parser.RequireParagraph
+}
+
+func (a *attrParser) Trigger() []byte {
+ return []byte{'{'}
+}
+
+type attributesBlock struct {
+ ast.BaseBlock
+}
+
+func (a *attributesBlock) Dump(source []byte, level int) {
+ attrs := a.Attributes()
+ list := make(map[string]string, len(attrs))
+ for _, attr := range attrs {
+ var (
+ name = util.BytesToReadOnlyString(attr.Name)
+ value = util.BytesToReadOnlyString(util.EscapeHTML(attr.Value.([]byte)))
+ )
+ list[name] = value
+ }
+ ast.DumpHelper(a, source, level, list, nil)
+}
+
+func (a *attributesBlock) Kind() ast.NodeKind {
+ return kindAttributesBlock
+}
+
+type transformer struct{}
+
+func (a *transformer) Transform(node *ast.Document, reader text.Reader, pc parser.Context) {
+ var attributes = make([]ast.Node, 0, 500)
+ ast.Walk(node, func(node ast.Node, entering bool) (ast.WalkStatus, error) {
+ if entering && node.Kind() == kindAttributesBlock {
+ // Attributes for fenced code blocks are handled in their own extension,
+ // but note that we currently only support code block attributes when
+ // CodeFences=true.
+ if node.PreviousSibling() != nil && node.PreviousSibling().Kind() != ast.KindFencedCodeBlock && !node.HasBlankPreviousLines() {
+ attributes = append(attributes, node)
+ return ast.WalkSkipChildren, nil
+ }
+ }
+
+ return ast.WalkContinue, nil
+ })
+
+ for _, attr := range attributes {
+ if prev := attr.PreviousSibling(); prev != nil &&
+ prev.Type() == ast.TypeBlock {
+ for _, attr := range attr.Attributes() {
+ if _, found := prev.Attribute(attr.Name); !found {
+ prev.SetAttribute(attr.Name, attr.Value)
+ }
+ }
+ }
+ // remove attributes node
+ attr.Parent().RemoveChild(attr.Parent(), attr)
+ }
+}
diff --git a/markup/goldmark/internal/render/context.go b/markup/goldmark/internal/render/context.go
new file mode 100644
index 000000000..b18983ef3
--- /dev/null
+++ b/markup/goldmark/internal/render/context.go
@@ -0,0 +1,81 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package render
+
+import (
+ "bytes"
+ "math/bits"
+
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/converter"
+)
+
+type BufWriter struct {
+ *bytes.Buffer
+}
+
+const maxInt = 1<<(bits.UintSize-1) - 1
+
+func (b *BufWriter) Available() int {
+ return maxInt
+}
+
+func (b *BufWriter) Buffered() int {
+ return b.Len()
+}
+
+func (b *BufWriter) Flush() error {
+ return nil
+}
+
+type Context struct {
+ *BufWriter
+ positions []int
+ ContextData
+}
+
+func (ctx *Context) PushPos(n int) {
+ ctx.positions = append(ctx.positions, n)
+}
+
+func (ctx *Context) PopPos() int {
+ i := len(ctx.positions) - 1
+ p := ctx.positions[i]
+ ctx.positions = ctx.positions[:i]
+ return p
+}
+
+type ContextData interface {
+ RenderContext() converter.RenderContext
+ DocumentContext() converter.DocumentContext
+ AddIdentity(id identity.Provider)
+}
+
+type RenderContextDataHolder struct {
+ Rctx converter.RenderContext
+ Dctx converter.DocumentContext
+ IDs identity.Manager
+}
+
+func (ctx *RenderContextDataHolder) RenderContext() converter.RenderContext {
+ return ctx.Rctx
+}
+
+func (ctx *RenderContextDataHolder) DocumentContext() converter.DocumentContext {
+ return ctx.Dctx
+}
+
+func (ctx *RenderContextDataHolder) AddIdentity(id identity.Provider) {
+ ctx.IDs.Add(id)
+}
diff --git a/markup/goldmark/render_hooks.go b/markup/goldmark/render_hooks.go
new file mode 100644
index 000000000..e28f816d6
--- /dev/null
+++ b/markup/goldmark/render_hooks.go
@@ -0,0 +1,422 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark
+
+import (
+ "bytes"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/types/hstring"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/goldmark/goldmark_config"
+ "github.com/gohugoio/hugo/markup/goldmark/internal/render"
+ "github.com/gohugoio/hugo/markup/internal/attributes"
+
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/renderer"
+ "github.com/yuin/goldmark/renderer/html"
+ "github.com/yuin/goldmark/util"
+)
+
+var _ renderer.SetOptioner = (*hookedRenderer)(nil)
+
+func newLinkRenderer(cfg goldmark_config.Config) renderer.NodeRenderer {
+ r := &hookedRenderer{
+ linkifyProtocol: []byte(cfg.Extensions.LinkifyProtocol),
+ Config: html.Config{
+ Writer: html.DefaultWriter,
+ },
+ }
+ return r
+}
+
+func newLinks(cfg goldmark_config.Config) goldmark.Extender {
+ return &links{cfg: cfg}
+}
+
+type linkContext struct {
+ page any
+ destination string
+ title string
+ text hstring.RenderedString
+ plainText string
+}
+
+func (ctx linkContext) Destination() string {
+ return ctx.destination
+}
+
+func (ctx linkContext) Resolved() bool {
+ return false
+}
+
+func (ctx linkContext) Page() any {
+ return ctx.page
+}
+
+func (ctx linkContext) Text() hstring.RenderedString {
+ return ctx.text
+}
+
+func (ctx linkContext) PlainText() string {
+ return ctx.plainText
+}
+
+func (ctx linkContext) Title() string {
+ return ctx.title
+}
+
+type headingContext struct {
+ page any
+ level int
+ anchor string
+ text hstring.RenderedString
+ plainText string
+ *attributes.AttributesHolder
+}
+
+func (ctx headingContext) Page() any {
+ return ctx.page
+}
+
+func (ctx headingContext) Level() int {
+ return ctx.level
+}
+
+func (ctx headingContext) Anchor() string {
+ return ctx.anchor
+}
+
+func (ctx headingContext) Text() hstring.RenderedString {
+ return ctx.text
+}
+
+func (ctx headingContext) PlainText() string {
+ return ctx.plainText
+}
+
+type hookedRenderer struct {
+ linkifyProtocol []byte
+ html.Config
+}
+
+func (r *hookedRenderer) SetOption(name renderer.OptionName, value any) {
+ r.Config.SetOption(name, value)
+}
+
+// RegisterFuncs implements NodeRenderer.RegisterFuncs.
+func (r *hookedRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegisterer) {
+ reg.Register(ast.KindLink, r.renderLink)
+ reg.Register(ast.KindAutoLink, r.renderAutoLink)
+ reg.Register(ast.KindImage, r.renderImage)
+ reg.Register(ast.KindHeading, r.renderHeading)
+}
+
+func (r *hookedRenderer) renderImage(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.Image)
+ var lr hooks.LinkRenderer
+
+ ctx, ok := w.(*render.Context)
+ if ok {
+ h := ctx.RenderContext().GetRenderer(hooks.ImageRendererType, nil)
+ ok = h != nil
+ if ok {
+ lr = h.(hooks.LinkRenderer)
+ }
+ }
+
+ if !ok {
+ return r.renderImageDefault(w, source, node, entering)
+ }
+
+ if entering {
+ // Store the current pos so we can capture the rendered text.
+ ctx.PushPos(ctx.Buffer.Len())
+ return ast.WalkContinue, nil
+ }
+
+ pos := ctx.PopPos()
+ text := ctx.Buffer.Bytes()[pos:]
+ ctx.Buffer.Truncate(pos)
+
+ err := lr.RenderLink(
+ w,
+ linkContext{
+ page: ctx.DocumentContext().Document,
+ destination: string(n.Destination),
+ title: string(n.Title),
+ text: hstring.RenderedString(text),
+ plainText: string(n.Text(source)),
+ },
+ )
+
+ ctx.AddIdentity(lr)
+
+ return ast.WalkContinue, err
+}
+
+// Fall back to the default Goldmark render funcs. Method below borrowed from:
+// https://github.com/yuin/goldmark/blob/b611cd333a492416b56aa8d94b04a67bf0096ab2/renderer/html/html.go#L404
+func (r *hookedRenderer) renderImageDefault(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ if !entering {
+ return ast.WalkContinue, nil
+ }
+ n := node.(*ast.Image)
+ _, _ = w.WriteString("<img src=\"")
+ if r.Unsafe || !html.IsDangerousURL(n.Destination) {
+ _, _ = w.Write(util.EscapeHTML(util.URLEscape(n.Destination, true)))
+ }
+ _, _ = w.WriteString(`" alt="`)
+ _, _ = w.Write(util.EscapeHTML(n.Text(source)))
+ _ = w.WriteByte('"')
+ if n.Title != nil {
+ _, _ = w.WriteString(` title="`)
+ r.Writer.Write(w, n.Title)
+ _ = w.WriteByte('"')
+ }
+ if r.XHTML {
+ _, _ = w.WriteString(" />")
+ } else {
+ _, _ = w.WriteString(">")
+ }
+ return ast.WalkSkipChildren, nil
+}
+
+func (r *hookedRenderer) renderLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.Link)
+ var lr hooks.LinkRenderer
+
+ ctx, ok := w.(*render.Context)
+ if ok {
+ h := ctx.RenderContext().GetRenderer(hooks.LinkRendererType, nil)
+ ok = h != nil
+ if ok {
+ lr = h.(hooks.LinkRenderer)
+ }
+ }
+
+ if !ok {
+ return r.renderLinkDefault(w, source, node, entering)
+ }
+
+ if entering {
+ // Store the current pos so we can capture the rendered text.
+ ctx.PushPos(ctx.Buffer.Len())
+ return ast.WalkContinue, nil
+ }
+
+ pos := ctx.PopPos()
+ text := ctx.Buffer.Bytes()[pos:]
+ ctx.Buffer.Truncate(pos)
+
+ err := lr.RenderLink(
+ w,
+ linkContext{
+ page: ctx.DocumentContext().Document,
+ destination: string(n.Destination),
+ title: string(n.Title),
+ text: hstring.RenderedString(text),
+ plainText: string(n.Text(source)),
+ },
+ )
+
+ // TODO(bep) I have a working branch that fixes these rather confusing identity types,
+ // but for now it's important that it's not .GetIdentity() that's added here,
+ // to make sure we search the entire chain on changes.
+ ctx.AddIdentity(lr)
+
+ return ast.WalkContinue, err
+}
+
+// Fall back to the default Goldmark render funcs. Method below borrowed from:
+// https://github.com/yuin/goldmark/blob/b611cd333a492416b56aa8d94b04a67bf0096ab2/renderer/html/html.go#L404
+func (r *hookedRenderer) renderLinkDefault(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.Link)
+ if entering {
+ _, _ = w.WriteString("<a href=\"")
+ if r.Unsafe || !html.IsDangerousURL(n.Destination) {
+ _, _ = w.Write(util.EscapeHTML(util.URLEscape(n.Destination, true)))
+ }
+ _ = w.WriteByte('"')
+ if n.Title != nil {
+ _, _ = w.WriteString(` title="`)
+ r.Writer.Write(w, n.Title)
+ _ = w.WriteByte('"')
+ }
+ _ = w.WriteByte('>')
+ } else {
+ _, _ = w.WriteString("</a>")
+ }
+ return ast.WalkContinue, nil
+}
+
+func (r *hookedRenderer) renderAutoLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ if !entering {
+ return ast.WalkContinue, nil
+ }
+
+ n := node.(*ast.AutoLink)
+ var lr hooks.LinkRenderer
+
+ ctx, ok := w.(*render.Context)
+ if ok {
+ h := ctx.RenderContext().GetRenderer(hooks.LinkRendererType, nil)
+ ok = h != nil
+ if ok {
+ lr = h.(hooks.LinkRenderer)
+ }
+ }
+
+ if !ok {
+ return r.renderAutoLinkDefault(w, source, node, entering)
+ }
+
+ url := string(r.autoLinkURL(n, source))
+ label := string(n.Label(source))
+ if n.AutoLinkType == ast.AutoLinkEmail && !strings.HasPrefix(strings.ToLower(url), "mailto:") {
+ url = "mailto:" + url
+ }
+
+ err := lr.RenderLink(
+ w,
+ linkContext{
+ page: ctx.DocumentContext().Document,
+ destination: url,
+ text: hstring.RenderedString(label),
+ plainText: label,
+ },
+ )
+
+ // TODO(bep) I have a working branch that fixes these rather confusing identity types,
+ // but for now it's important that it's not .GetIdentity() that's added here,
+ // to make sure we search the entire chain on changes.
+ ctx.AddIdentity(lr)
+
+ return ast.WalkContinue, err
+}
+
+// Fall back to the default Goldmark render funcs. Method below borrowed from:
+// https://github.com/yuin/goldmark/blob/5588d92a56fe1642791cf4aa8e9eae8227cfeecd/renderer/html/html.go#L439
+func (r *hookedRenderer) renderAutoLinkDefault(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.AutoLink)
+ if !entering {
+ return ast.WalkContinue, nil
+ }
+
+ _, _ = w.WriteString(`<a href="`)
+ url := r.autoLinkURL(n, source)
+ label := n.Label(source)
+ if n.AutoLinkType == ast.AutoLinkEmail && !bytes.HasPrefix(bytes.ToLower(url), []byte("mailto:")) {
+ _, _ = w.WriteString("mailto:")
+ }
+ _, _ = w.Write(util.EscapeHTML(util.URLEscape(url, false)))
+ if n.Attributes() != nil {
+ _ = w.WriteByte('"')
+ html.RenderAttributes(w, n, html.LinkAttributeFilter)
+ _ = w.WriteByte('>')
+ } else {
+ _, _ = w.WriteString(`">`)
+ }
+ _, _ = w.Write(util.EscapeHTML(label))
+ _, _ = w.WriteString(`</a>`)
+ return ast.WalkContinue, nil
+}
+
+func (r *hookedRenderer) autoLinkURL(n *ast.AutoLink, source []byte) []byte {
+ url := n.URL(source)
+ if len(n.Protocol) > 0 && !bytes.Equal(n.Protocol, r.linkifyProtocol) {
+ // The CommonMark spec says "http" is the correct protocol for links,
+ // but this doesn't make much sense (the fact that they should care about the rendered output).
+ // Note that n.Protocol is not set if protocol is provided by user.
+ url = append(r.linkifyProtocol, url[len(n.Protocol):]...)
+ }
+ return url
+}
+
+func (r *hookedRenderer) renderHeading(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.Heading)
+ var hr hooks.HeadingRenderer
+
+ ctx, ok := w.(*render.Context)
+ if ok {
+ h := ctx.RenderContext().GetRenderer(hooks.HeadingRendererType, nil)
+ ok = h != nil
+ if ok {
+ hr = h.(hooks.HeadingRenderer)
+ }
+ }
+
+ if !ok {
+ return r.renderHeadingDefault(w, source, node, entering)
+ }
+
+ if entering {
+ // Store the current pos so we can capture the rendered text.
+ ctx.PushPos(ctx.Buffer.Len())
+ return ast.WalkContinue, nil
+ }
+
+ pos := ctx.PopPos()
+ text := ctx.Buffer.Bytes()[pos:]
+ ctx.Buffer.Truncate(pos)
+ // All ast.Heading nodes are guaranteed to have an attribute called "id"
+ // that is an array of bytes that encode a valid string.
+ anchori, _ := n.AttributeString("id")
+ anchor := anchori.([]byte)
+
+ err := hr.RenderHeading(
+ w,
+ headingContext{
+ page: ctx.DocumentContext().Document,
+ level: n.Level,
+ anchor: string(anchor),
+ text: hstring.RenderedString(text),
+ plainText: string(n.Text(source)),
+ AttributesHolder: attributes.New(n.Attributes(), attributes.AttributesOwnerGeneral),
+ },
+ )
+
+ ctx.AddIdentity(hr)
+
+ return ast.WalkContinue, err
+}
+
+func (r *hookedRenderer) renderHeadingDefault(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
+ n := node.(*ast.Heading)
+ if entering {
+ _, _ = w.WriteString("<h")
+ _ = w.WriteByte("0123456"[n.Level])
+ if n.Attributes() != nil {
+ attributes.RenderASTAttributes(w, node.Attributes()...)
+ }
+ _ = w.WriteByte('>')
+ } else {
+ _, _ = w.WriteString("</h")
+ _ = w.WriteByte("0123456"[n.Level])
+ _, _ = w.WriteString(">\n")
+ }
+ return ast.WalkContinue, nil
+}
+
+type links struct {
+ cfg goldmark_config.Config
+}
+
+// Extend implements goldmark.Extender.
+func (e *links) Extend(m goldmark.Markdown) {
+ m.Renderer().AddOptions(renderer.WithNodeRenderers(
+ util.Prioritized(newLinkRenderer(e.cfg), 100),
+ ))
+}
diff --git a/markup/goldmark/toc.go b/markup/goldmark/toc.go
new file mode 100644
index 000000000..396c1d071
--- /dev/null
+++ b/markup/goldmark/toc.go
@@ -0,0 +1,128 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package goldmark
+
+import (
+ "bytes"
+
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+
+ "github.com/yuin/goldmark"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/renderer"
+ "github.com/yuin/goldmark/text"
+ "github.com/yuin/goldmark/util"
+)
+
+var (
+ tocResultKey = parser.NewContextKey()
+ tocEnableKey = parser.NewContextKey()
+)
+
+type tocTransformer struct {
+ r renderer.Renderer
+}
+
+func (t *tocTransformer) Transform(n *ast.Document, reader text.Reader, pc parser.Context) {
+ if b, ok := pc.Get(tocEnableKey).(bool); !ok || !b {
+ return
+ }
+
+ var (
+ toc tableofcontents.Root
+ tocHeading tableofcontents.Heading
+ level int
+ row = -1
+ inHeading bool
+ headingText bytes.Buffer
+ )
+
+ ast.Walk(n, func(n ast.Node, entering bool) (ast.WalkStatus, error) {
+ s := ast.WalkStatus(ast.WalkContinue)
+ if n.Kind() == ast.KindHeading {
+ if inHeading && !entering {
+ tocHeading.Text = headingText.String()
+ headingText.Reset()
+ toc.AddAt(tocHeading, row, level-1)
+ tocHeading = tableofcontents.Heading{}
+ inHeading = false
+ return s, nil
+ }
+
+ inHeading = true
+ }
+
+ if !(inHeading && entering) {
+ return s, nil
+ }
+
+ switch n.Kind() {
+ case ast.KindHeading:
+ heading := n.(*ast.Heading)
+ level = heading.Level
+
+ if level == 1 || row == -1 {
+ row++
+ }
+
+ id, found := heading.AttributeString("id")
+ if found {
+ tocHeading.ID = string(id.([]byte))
+ }
+ case
+ ast.KindCodeSpan,
+ ast.KindLink,
+ ast.KindImage,
+ ast.KindEmphasis:
+ err := t.r.Render(&headingText, reader.Source(), n)
+ if err != nil {
+ return s, err
+ }
+
+ return ast.WalkSkipChildren, nil
+ case
+ ast.KindAutoLink,
+ ast.KindRawHTML,
+ ast.KindText,
+ ast.KindString:
+ err := t.r.Render(&headingText, reader.Source(), n)
+ if err != nil {
+ return s, err
+ }
+ }
+
+ return s, nil
+ })
+
+ pc.Set(tocResultKey, toc)
+}
+
+type tocExtension struct {
+ options []renderer.Option
+}
+
+func newTocExtension(options []renderer.Option) goldmark.Extender {
+ return &tocExtension{
+ options: options,
+ }
+}
+
+func (e *tocExtension) Extend(m goldmark.Markdown) {
+ r := goldmark.DefaultRenderer()
+ r.AddOptions(e.options...)
+ m.Parser().AddOptions(parser.WithASTTransformers(util.Prioritized(&tocTransformer{
+ r: r,
+ }, 10)))
+}
diff --git a/markup/goldmark/toc_test.go b/markup/goldmark/toc_test.go
new file mode 100644
index 000000000..947f58a36
--- /dev/null
+++ b/markup/goldmark/toc_test.go
@@ -0,0 +1,137 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package goldmark converts Markdown to HTML using Goldmark.
+package goldmark
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/markup_config"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var nopGetRenderer = func(t hooks.RendererType, id any) any { return nil }
+
+func TestToc(t *testing.T) {
+ c := qt.New(t)
+
+ content := `
+# Header 1
+
+## First h2---now with typography!
+
+Some text.
+
+### H3
+
+Some more text.
+
+## Second h2
+
+And then some.
+
+### Second H3
+
+#### First H4
+
+`
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: markup_config.Default,
+ Logger: loggers.NewErrorLogger(),
+ })
+ c.Assert(err, qt.IsNil)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{Src: []byte(content), RenderTOC: true, GetRenderer: nopGetRenderer})
+ c.Assert(err, qt.IsNil)
+ got := b.(converter.TableOfContentsProvider).TableOfContents().ToHTML(2, 3, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#first-h2---now-with-typography">First h2&mdash;now with typography!</a>
+ <ul>
+ <li><a href="#h3">H3</a></li>
+ </ul>
+ </li>
+ <li><a href="#second-h2">Second h2</a>
+ <ul>
+ <li><a href="#second-h3">Second H3</a></li>
+ </ul>
+ </li>
+ </ul>
+</nav>`, qt.Commentf(got))
+}
+
+func TestEscapeToc(t *testing.T) {
+ c := qt.New(t)
+
+ defaultConfig := markup_config.Default
+
+ safeConfig := defaultConfig
+ unsafeConfig := defaultConfig
+
+ safeConfig.Goldmark.Renderer.Unsafe = false
+ unsafeConfig.Goldmark.Renderer.Unsafe = true
+
+ safeP, _ := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: safeConfig,
+ Logger: loggers.NewErrorLogger(),
+ })
+ unsafeP, _ := Provider.New(
+ converter.ProviderConfig{
+ MarkupConfig: unsafeConfig,
+ Logger: loggers.NewErrorLogger(),
+ })
+ safeConv, _ := safeP.New(converter.DocumentContext{})
+ unsafeConv, _ := unsafeP.New(converter.DocumentContext{})
+
+ content := strings.Join([]string{
+ "# A < B & C > D",
+ "# A < B & C > D <div>foo</div>",
+ "# *EMPHASIS*",
+ "# `echo codeblock`",
+ }, "\n")
+ // content := ""
+ b, err := safeConv.Convert(converter.RenderContext{Src: []byte(content), RenderTOC: true, GetRenderer: nopGetRenderer})
+ c.Assert(err, qt.IsNil)
+ got := b.(converter.TableOfContentsProvider).TableOfContents().ToHTML(1, 2, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#a--b--c--d">A &lt; B &amp; C &gt; D</a></li>
+ <li><a href="#a--b--c--d-divfoodiv">A &lt; B &amp; C &gt; D <!-- raw HTML omitted -->foo<!-- raw HTML omitted --></a></li>
+ <li><a href="#emphasis"><em>EMPHASIS</em></a></li>
+ <li><a href="#echo-codeblock"><code>echo codeblock</code></a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ b, err = unsafeConv.Convert(converter.RenderContext{Src: []byte(content), RenderTOC: true, GetRenderer: nopGetRenderer})
+ c.Assert(err, qt.IsNil)
+ got = b.(converter.TableOfContentsProvider).TableOfContents().ToHTML(1, 2, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#a--b--c--d">A &lt; B &amp; C &gt; D</a></li>
+ <li><a href="#a--b--c--d-divfoodiv">A &lt; B &amp; C &gt; D <div>foo</div></a></li>
+ <li><a href="#emphasis"><em>EMPHASIS</em></a></li>
+ <li><a href="#echo-codeblock"><code>echo codeblock</code></a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+}
diff --git a/markup/highlight/config.go b/markup/highlight/config.go
new file mode 100644
index 000000000..d55958d35
--- /dev/null
+++ b/markup/highlight/config.go
@@ -0,0 +1,292 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package highlight provides code highlighting.
+package highlight
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+const (
+ lineanchorsKey = "lineanchors"
+ lineNosKey = "linenos"
+ hlLinesKey = "hl_lines"
+ linosStartKey = "linenostart"
+ noHlKey = "nohl"
+)
+
+var DefaultConfig = Config{
+ // The highlighter style to use.
+ // See https://xyproto.github.io/splash/docs/all.html
+ Style: "monokai",
+ LineNoStart: 1,
+ CodeFences: true,
+ NoClasses: true,
+ LineNumbersInTable: true,
+ TabWidth: 4,
+}
+
+type Config struct {
+ Style string
+
+ CodeFences bool
+
+ // Use inline CSS styles.
+ NoClasses bool
+
+ // No highlighting.
+ NoHl bool
+
+ // When set, line numbers will be printed.
+ LineNos bool
+ LineNumbersInTable bool
+
+ // When set, add links to line numbers
+ AnchorLineNos bool
+ LineAnchors string
+
+ // Start the line numbers from this value (default is 1).
+ LineNoStart int
+
+ // A space separated list of line numbers, e.g. “3-8 10-20”.
+ Hl_Lines string
+
+ // If set, the markup will not be wrapped in any container.
+ Hl_inline bool
+
+ // A parsed and ready to use list of line ranges.
+ HL_lines_parsed [][2]int `json:"-"`
+
+ // TabWidth sets the number of characters for a tab. Defaults to 4.
+ TabWidth int
+
+ GuessSyntax bool
+}
+
+func (cfg Config) ToHTMLOptions() []html.Option {
+ var lineAnchors string
+ if cfg.LineAnchors != "" {
+ lineAnchors = cfg.LineAnchors + "-"
+ }
+ options := []html.Option{
+ html.TabWidth(cfg.TabWidth),
+ html.WithLineNumbers(cfg.LineNos),
+ html.BaseLineNumber(cfg.LineNoStart),
+ html.LineNumbersInTable(cfg.LineNumbersInTable),
+ html.WithClasses(!cfg.NoClasses),
+ html.LinkableLineNumbers(cfg.AnchorLineNos, lineAnchors),
+ html.InlineCode(cfg.Hl_inline),
+ }
+
+ if cfg.Hl_Lines != "" || cfg.HL_lines_parsed != nil {
+ var ranges [][2]int
+ if cfg.HL_lines_parsed != nil {
+ ranges = cfg.HL_lines_parsed
+ } else {
+ var err error
+ ranges, err = hlLinesToRanges(cfg.LineNoStart, cfg.Hl_Lines)
+ if err != nil {
+ ranges = nil
+ }
+ }
+
+ if ranges != nil {
+ options = append(options, html.HighlightLines(ranges))
+ }
+ }
+
+ return options
+}
+
+func applyOptions(opts any, cfg *Config) error {
+ if opts == nil {
+ return nil
+ }
+ switch vv := opts.(type) {
+ case map[string]any:
+ return applyOptionsFromMap(vv, cfg)
+ default:
+ s, err := cast.ToStringE(opts)
+ if err != nil {
+ return err
+ }
+ return applyOptionsFromString(s, cfg)
+ }
+}
+
+func applyOptionsFromString(opts string, cfg *Config) error {
+ optsm, err := parseHightlightOptions(opts)
+ if err != nil {
+ return err
+ }
+ return mapstructure.WeakDecode(optsm, cfg)
+}
+
+func applyOptionsFromMap(optsm map[string]any, cfg *Config) error {
+ normalizeHighlightOptions(optsm)
+ return mapstructure.WeakDecode(optsm, cfg)
+}
+
+func applyOptionsFromCodeBlockContext(ctx hooks.CodeblockContext, cfg *Config) error {
+ if cfg.LineAnchors == "" {
+ const lineAnchorPrefix = "hl-"
+ // Set it to the ordinal with a prefix.
+ cfg.LineAnchors = fmt.Sprintf("%s%d", lineAnchorPrefix, ctx.Ordinal())
+ }
+
+ return nil
+}
+
+// ApplyLegacyConfig applies legacy config from back when we had
+// Pygments.
+func ApplyLegacyConfig(cfg config.Provider, conf *Config) error {
+ if conf.Style == DefaultConfig.Style {
+ if s := cfg.GetString("pygmentsStyle"); s != "" {
+ conf.Style = s
+ }
+ }
+
+ if conf.NoClasses == DefaultConfig.NoClasses && cfg.IsSet("pygmentsUseClasses") {
+ conf.NoClasses = !cfg.GetBool("pygmentsUseClasses")
+ }
+
+ if conf.CodeFences == DefaultConfig.CodeFences && cfg.IsSet("pygmentsCodeFences") {
+ conf.CodeFences = cfg.GetBool("pygmentsCodeFences")
+ }
+
+ if conf.GuessSyntax == DefaultConfig.GuessSyntax && cfg.IsSet("pygmentsCodefencesGuessSyntax") {
+ conf.GuessSyntax = cfg.GetBool("pygmentsCodefencesGuessSyntax")
+ }
+
+ if cfg.IsSet("pygmentsOptions") {
+ if err := applyOptionsFromString(cfg.GetString("pygmentsOptions"), conf); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func parseHightlightOptions(in string) (map[string]any, error) {
+ in = strings.Trim(in, " ")
+ opts := make(map[string]any)
+
+ if in == "" {
+ return opts, nil
+ }
+
+ for _, v := range strings.Split(in, ",") {
+ keyVal := strings.Split(v, "=")
+ key := strings.ToLower(strings.Trim(keyVal[0], " "))
+ if len(keyVal) != 2 {
+ return opts, fmt.Errorf("invalid Highlight option: %s", key)
+ }
+ opts[key] = keyVal[1]
+
+ }
+
+ normalizeHighlightOptions(opts)
+
+ return opts, nil
+}
+
+func normalizeHighlightOptions(m map[string]any) {
+ if m == nil {
+ return
+ }
+
+ baseLineNumber := 1
+ if v, ok := m[linosStartKey]; ok {
+ baseLineNumber = cast.ToInt(v)
+ }
+
+ for k, v := range m {
+ switch k {
+ case noHlKey:
+ m[noHlKey] = cast.ToBool(v)
+ case lineNosKey:
+ if v == "table" || v == "inline" {
+ m["lineNumbersInTable"] = v == "table"
+ }
+ if vs, ok := v.(string); ok {
+ m[k] = vs != "false"
+ }
+
+ case hlLinesKey:
+ if hlRanges, ok := v.([][2]int); ok {
+ for i := range hlRanges {
+ hlRanges[i][0] += baseLineNumber
+ hlRanges[i][1] += baseLineNumber
+ }
+ delete(m, k)
+ m[k+"_parsed"] = hlRanges
+ }
+ }
+ }
+}
+
+// startLine compensates for https://github.com/alecthomas/chroma/issues/30
+func hlLinesToRanges(startLine int, s string) ([][2]int, error) {
+ var ranges [][2]int
+ s = strings.TrimSpace(s)
+
+ if s == "" {
+ return ranges, nil
+ }
+
+ // Variants:
+ // 1 2 3 4
+ // 1-2 3-4
+ // 1-2 3
+ // 1 3-4
+ // 1 3-4
+ fields := strings.Split(s, " ")
+ for _, field := range fields {
+ field = strings.TrimSpace(field)
+ if field == "" {
+ continue
+ }
+ numbers := strings.Split(field, "-")
+ var r [2]int
+ first, err := strconv.Atoi(numbers[0])
+ if err != nil {
+ return ranges, err
+ }
+ first = first + startLine - 1
+ r[0] = first
+ if len(numbers) > 1 {
+ second, err := strconv.Atoi(numbers[1])
+ if err != nil {
+ return ranges, err
+ }
+ second = second + startLine - 1
+ r[1] = second
+ } else {
+ r[1] = first
+ }
+
+ ranges = append(ranges, r)
+ }
+ return ranges, nil
+}
diff --git a/markup/highlight/config_test.go b/markup/highlight/config_test.go
new file mode 100644
index 000000000..ab92ecf36
--- /dev/null
+++ b/markup/highlight/config_test.go
@@ -0,0 +1,56 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package highlight provides code highlighting.
+package highlight
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestConfig(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("applyLegacyConfig", func(c *qt.C) {
+ v := config.New()
+ v.Set("pygmentsStyle", "hugo")
+ v.Set("pygmentsUseClasses", false)
+ v.Set("pygmentsCodeFences", false)
+ v.Set("pygmentsOptions", "linenos=inline")
+
+ cfg := DefaultConfig
+ err := ApplyLegacyConfig(v, &cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(cfg.Style, qt.Equals, "hugo")
+ c.Assert(cfg.NoClasses, qt.Equals, true)
+ c.Assert(cfg.CodeFences, qt.Equals, false)
+ c.Assert(cfg.LineNos, qt.Equals, true)
+ c.Assert(cfg.LineNumbersInTable, qt.Equals, false)
+ })
+
+ c.Run("parseOptions", func(c *qt.C) {
+ cfg := DefaultConfig
+ opts := "noclasses=true,linenos=inline,linenostart=32,hl_lines=3-8 10-20"
+ err := applyOptionsFromString(opts, &cfg)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(cfg.NoClasses, qt.Equals, true)
+ c.Assert(cfg.LineNos, qt.Equals, true)
+ c.Assert(cfg.LineNumbersInTable, qt.Equals, false)
+ c.Assert(cfg.LineNoStart, qt.Equals, 32)
+ c.Assert(cfg.Hl_Lines, qt.Equals, "3-8 10-20")
+ })
+}
diff --git a/markup/highlight/highlight.go b/markup/highlight/highlight.go
new file mode 100644
index 000000000..5b19d6e8e
--- /dev/null
+++ b/markup/highlight/highlight.go
@@ -0,0 +1,363 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package highlight
+
+import (
+ "fmt"
+ gohtml "html"
+ "html/template"
+ "io"
+ "strings"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/alecthomas/chroma/v2/styles"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/internal/attributes"
+)
+
+// Markdown attributes used by the Chroma hightlighter.
+var chromaHightlightProcessingAttributes = map[string]bool{
+ "anchorLineNos": true,
+ "guessSyntax": true,
+ "hl_Lines": true,
+ "lineAnchors": true,
+ "lineNos": true,
+ "lineNoStart": true,
+ "lineNumbersInTable": true,
+ "noClasses": true,
+ "style": true,
+ "tabWidth": true,
+}
+
+func init() {
+ for k, v := range chromaHightlightProcessingAttributes {
+ chromaHightlightProcessingAttributes[strings.ToLower(k)] = v
+ }
+}
+
+func New(cfg Config) Highlighter {
+ return chromaHighlighter{
+ cfg: cfg,
+ }
+}
+
+type Highlighter interface {
+ Highlight(code, lang string, opts any) (string, error)
+ HighlightCodeBlock(ctx hooks.CodeblockContext, opts any) (HightlightResult, error)
+ hooks.CodeBlockRenderer
+ hooks.IsDefaultCodeBlockRendererProvider
+}
+
+type chromaHighlighter struct {
+ cfg Config
+}
+
+func (h chromaHighlighter) Highlight(code, lang string, opts any) (string, error) {
+ cfg := h.cfg
+ if err := applyOptions(opts, &cfg); err != nil {
+ return "", err
+ }
+ var b strings.Builder
+
+ if _, _, err := highlight(&b, code, lang, nil, cfg); err != nil {
+ return "", err
+ }
+
+ return b.String(), nil
+}
+
+func (h chromaHighlighter) HighlightCodeBlock(ctx hooks.CodeblockContext, opts any) (HightlightResult, error) {
+ cfg := h.cfg
+
+ var b strings.Builder
+
+ attributes := ctx.(hooks.AttributesOptionsSliceProvider).AttributesSlice()
+
+ options := ctx.Options()
+
+ if err := applyOptionsFromMap(options, &cfg); err != nil {
+ return HightlightResult{}, err
+ }
+
+ // Apply these last so the user can override them.
+ if err := applyOptions(opts, &cfg); err != nil {
+ return HightlightResult{}, err
+ }
+
+ if err := applyOptionsFromCodeBlockContext(ctx, &cfg); err != nil {
+ return HightlightResult{}, err
+ }
+
+ low, high, err := highlight(&b, ctx.Inner(), ctx.Type(), attributes, cfg)
+ if err != nil {
+ return HightlightResult{}, err
+ }
+
+ highlighted := b.String()
+ if high == 0 {
+ high = len(highlighted)
+ }
+
+ return HightlightResult{
+ highlighted: template.HTML(highlighted),
+ innerLow: low,
+ innerHigh: high,
+ }, nil
+}
+
+func (h chromaHighlighter) RenderCodeblock(w hugio.FlexiWriter, ctx hooks.CodeblockContext) error {
+ cfg := h.cfg
+
+ attributes := ctx.(hooks.AttributesOptionsSliceProvider).AttributesSlice()
+
+ if err := applyOptionsFromMap(ctx.Options(), &cfg); err != nil {
+ return err
+ }
+
+ if err := applyOptionsFromCodeBlockContext(ctx, &cfg); err != nil {
+ return err
+ }
+
+ code := text.Puts(ctx.Inner())
+
+ _, _, err := highlight(w, code, ctx.Type(), attributes, cfg)
+ return err
+}
+
+func (h chromaHighlighter) IsDefaultCodeBlockRenderer() bool {
+ return true
+}
+
+var id = identity.NewPathIdentity("chroma", "highlight")
+
+func (h chromaHighlighter) GetIdentity() identity.Identity {
+ return id
+}
+
+type HightlightResult struct {
+ innerLow int
+ innerHigh int
+ highlighted template.HTML
+}
+
+func (h HightlightResult) Wrapped() template.HTML {
+ return h.highlighted
+}
+
+func (h HightlightResult) Inner() template.HTML {
+ return h.highlighted[h.innerLow:h.innerHigh]
+}
+
+func highlight(fw hugio.FlexiWriter, code, lang string, attributes []attributes.Attribute, cfg Config) (int, int, error) {
+ var lexer chroma.Lexer
+ if lang != "" {
+ lexer = lexers.Get(lang)
+ }
+
+ if lexer == nil && (cfg.GuessSyntax && !cfg.NoHl) {
+ lexer = lexers.Analyse(code)
+ if lexer == nil {
+ lexer = lexers.Fallback
+ }
+ lang = strings.ToLower(lexer.Config().Name)
+ }
+
+ w := &byteCountFlexiWriter{delegate: fw}
+
+ if lexer == nil {
+ if cfg.Hl_inline {
+ fmt.Fprint(w, fmt.Sprintf("<code%s>%s</code>", inlineCodeAttrs(lang), gohtml.EscapeString(code)))
+ } else {
+ preWrapper := getPreWrapper(lang, w)
+ fmt.Fprint(w, preWrapper.Start(true, ""))
+ fmt.Fprint(w, gohtml.EscapeString(code))
+ fmt.Fprint(w, preWrapper.End(true))
+ }
+ return 0, 0, nil
+ }
+
+ style := styles.Get(cfg.Style)
+ if style == nil {
+ style = styles.Fallback
+ }
+ lexer = chroma.Coalesce(lexer)
+
+ iterator, err := lexer.Tokenise(nil, code)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ if !cfg.Hl_inline {
+ writeDivStart(w, attributes)
+ }
+
+ options := cfg.ToHTMLOptions()
+ var wrapper html.PreWrapper
+
+ if cfg.Hl_inline {
+ wrapper = startEnd{
+ start: func(code bool, styleAttr string) string {
+ if code {
+ return fmt.Sprintf(`<code%s>`, inlineCodeAttrs(lang))
+ }
+ return ``
+ },
+ end: func(code bool) string {
+ if code {
+ return `</code>`
+ }
+
+ return ``
+ },
+ }
+
+ } else {
+ wrapper = getPreWrapper(lang, w)
+ }
+
+ options = append(options, html.WithPreWrapper(wrapper))
+
+ formatter := html.New(options...)
+
+ if err := formatter.Format(w, style, iterator); err != nil {
+ return 0, 0, err
+ }
+
+ if !cfg.Hl_inline {
+ writeDivEnd(w)
+ }
+
+ if p, ok := wrapper.(*preWrapper); ok {
+ return p.low, p.high, nil
+ }
+
+ return 0, 0, nil
+}
+
+func getPreWrapper(language string, writeCounter *byteCountFlexiWriter) *preWrapper {
+ return &preWrapper{language: language, writeCounter: writeCounter}
+}
+
+type preWrapper struct {
+ low int
+ high int
+ writeCounter *byteCountFlexiWriter
+ language string
+}
+
+func (p *preWrapper) Start(code bool, styleAttr string) string {
+ var language string
+ if code {
+ language = p.language
+ }
+ w := &strings.Builder{}
+ WritePreStart(w, language, styleAttr)
+ p.low = p.writeCounter.counter + w.Len()
+ return w.String()
+}
+
+func inlineCodeAttrs(lang string) string {
+ if lang == "" {
+ }
+ return fmt.Sprintf(` class="code-inline language-%s"`, lang)
+}
+
+func WritePreStart(w io.Writer, language, styleAttr string) {
+ fmt.Fprintf(w, `<pre tabindex="0"%s>`, styleAttr)
+ fmt.Fprint(w, "<code")
+ if language != "" {
+ fmt.Fprint(w, ` class="language-`+language+`"`)
+ fmt.Fprint(w, ` data-lang="`+language+`"`)
+ }
+ fmt.Fprint(w, ">")
+}
+
+const preEnd = "</code></pre>"
+
+func (p *preWrapper) End(code bool) string {
+ p.high = p.writeCounter.counter
+ return preEnd
+}
+
+type startEnd struct {
+ start func(code bool, styleAttr string) string
+ end func(code bool) string
+}
+
+func (s startEnd) Start(code bool, styleAttr string) string {
+ return s.start(code, styleAttr)
+}
+
+func (s startEnd) End(code bool) string {
+ return s.end(code)
+}
+
+func WritePreEnd(w io.Writer) {
+ fmt.Fprint(w, preEnd)
+}
+
+func writeDivStart(w hugio.FlexiWriter, attrs []attributes.Attribute) {
+ w.WriteString(`<div class="highlight`)
+ if attrs != nil {
+ for _, attr := range attrs {
+ if attr.Name == "class" {
+ w.WriteString(" " + attr.ValueString())
+ break
+ }
+ }
+ _, _ = w.WriteString("\"")
+ attributes.RenderAttributes(w, true, attrs...)
+ } else {
+ _, _ = w.WriteString("\"")
+ }
+
+ w.WriteString(">")
+}
+
+func writeDivEnd(w hugio.FlexiWriter) {
+ w.WriteString("</div>")
+}
+
+type byteCountFlexiWriter struct {
+ delegate hugio.FlexiWriter
+ counter int
+}
+
+func (w *byteCountFlexiWriter) Write(p []byte) (int, error) {
+ n, err := w.delegate.Write(p)
+ w.counter += n
+ return n, err
+}
+
+func (w *byteCountFlexiWriter) WriteByte(c byte) error {
+ w.counter++
+ return w.delegate.WriteByte(c)
+}
+
+func (w *byteCountFlexiWriter) WriteString(s string) (int, error) {
+ n, err := w.delegate.WriteString(s)
+ w.counter += n
+ return n, err
+}
+
+func (w *byteCountFlexiWriter) WriteRune(r rune) (int, error) {
+ n, err := w.delegate.WriteRune(r)
+ w.counter += n
+ return n, err
+}
diff --git a/markup/highlight/highlight_test.go b/markup/highlight/highlight_test.go
new file mode 100644
index 000000000..53e53b1d9
--- /dev/null
+++ b/markup/highlight/highlight_test.go
@@ -0,0 +1,149 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package highlight provides code highlighting.
+package highlight
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestHighlight(t *testing.T) {
+ c := qt.New(t)
+
+ lines := `LINE1
+LINE2
+LINE3
+LINE4
+LINE5
+`
+ coalesceNeeded := `GET /foo HTTP/1.1
+Content-Type: application/json
+User-Agent: foo
+
+{
+ "hello": "world"
+}`
+
+ c.Run("Basic", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ h := New(cfg)
+
+ result, _ := h.Highlight(`echo "Hugo Rocks!"`, "bash", "")
+ c.Assert(result, qt.Equals, `<div class="highlight"><pre tabindex="0" class="chroma"><code class="language-bash" data-lang="bash"><span class="line"><span class="cl"><span class="nb">echo</span> <span class="s2">&#34;Hugo Rocks!&#34;</span></span></span></code></pre></div>`)
+ result, _ = h.Highlight(`echo "Hugo Rocks!"`, "unknown", "")
+ c.Assert(result, qt.Equals, `<pre tabindex="0"><code class="language-unknown" data-lang="unknown">echo &#34;Hugo Rocks!&#34;</code></pre>`)
+ })
+
+ c.Run("Highlight lines, default config", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "bash", "linenos=table,hl_lines=2 4-5,linenostart=3")
+ c.Assert(result, qt.Contains, "<div class=\"highlight\"><div class=\"chroma\">\n<table class=\"lntable\"><tr><td class=\"lntd\">\n<pre tabindex=\"0\" class=\"chroma\"><code><span class")
+ c.Assert(result, qt.Contains, "<span class=\"hl\"><span class=\"lnt\">4")
+
+ result, _ = h.Highlight(lines, "bash", "linenos=inline,hl_lines=2")
+ c.Assert(result, qt.Contains, "<span class=\"ln\">2</span><span class=\"cl\">LINE2\n</span></span>")
+ c.Assert(result, qt.Not(qt.Contains), "<table")
+
+ result, _ = h.Highlight(lines, "bash", "linenos=true,hl_lines=2")
+ c.Assert(result, qt.Contains, "<table")
+ c.Assert(result, qt.Contains, "<span class=\"hl\"><span class=\"lnt\">2\n</span>")
+ })
+
+ c.Run("Highlight lines, linenumbers default on", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "bash", "")
+ c.Assert(result, qt.Contains, "<span class=\"lnt\">2\n</span>")
+ result, _ = h.Highlight(lines, "bash", "linenos=false,hl_lines=2")
+ c.Assert(result, qt.Not(qt.Contains), "class=\"lnt\"")
+ })
+
+ c.Run("Highlight lines, linenumbers default on, anchorlinenumbers default on", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ cfg.AnchorLineNos = true
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "bash", "")
+ // From Chroma v0.8.2 this is linkable: https://github.com/alecthomas/chroma/commit/ab61726cdb54d5a98b6efe7ed76af6aa0698ab4a
+ c.Assert(result, qt.Contains, "<span class=\"lnt\" id=\"2\"><a style=\"outline: none; text-decoration:none; color:inherit\" href=\"#2\">2</a>\n</span>")
+ result, _ = h.Highlight(lines, "bash", "lineanchors=test")
+ result, _ = h.Highlight(lines, "bash", "anchorlinenos=false,hl_lines=2")
+ c.Assert(result, qt.Not(qt.Contains), "id=\"2\"")
+ })
+
+ c.Run("Highlight lines, linenumbers default on, linenumbers in table default off", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ cfg.LineNumbersInTable = false
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "bash", "")
+ c.Assert(result, qt.Contains, "<span class=\"cl\">LINE2\n</span></span>")
+ result, _ = h.Highlight(lines, "bash", "linenos=table")
+ c.Assert(result, qt.Contains, "<span class=\"lnt\">1\n</span>")
+ })
+
+ c.Run("No language", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ cfg.LineNos = true
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "", "")
+ c.Assert(result, qt.Equals, "<pre tabindex=\"0\"><code>LINE1\nLINE2\nLINE3\nLINE4\nLINE5\n</code></pre>")
+ })
+
+ c.Run("No language, guess syntax", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ cfg.GuessSyntax = true
+ cfg.LineNos = true
+ cfg.LineNumbersInTable = false
+ h := New(cfg)
+
+ result, _ := h.Highlight(lines, "", "")
+ c.Assert(result, qt.Contains, "<span class=\"cl\">LINE2\n</span></span>")
+ })
+
+ c.Run("No language, Escape HTML string", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ h := New(cfg)
+
+ result, _ := h.Highlight("Escaping less-than in code block? <fail>", "", "")
+ c.Assert(result, qt.Contains, "&lt;fail&gt;")
+ })
+
+ c.Run("Highlight lines, default config", func(c *qt.C) {
+ cfg := DefaultConfig
+ cfg.NoClasses = false
+ h := New(cfg)
+
+ result, _ := h.Highlight(coalesceNeeded, "http", "linenos=true,hl_lines=2")
+ c.Assert(result, qt.Contains, "hello")
+ c.Assert(result, qt.Contains, "}")
+ })
+}
diff --git a/markup/highlight/integration_test.go b/markup/highlight/integration_test.go
new file mode 100644
index 000000000..2b4379bc2
--- /dev/null
+++ b/markup/highlight/integration_test.go
@@ -0,0 +1,85 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package highlight_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestHighlightInline(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+[markup]
+[markup.highlight]
+codeFences = true
+noClasses = false
+-- content/p1.md --
+---
+title: "p1"
+---
+
+## Inline in Shortcode
+
+Inline:{{< highlight emacs "hl_inline=true" >}}(message "this highlight shortcode"){{< /highlight >}}:End.
+Inline Unknown:{{< highlight foo "hl_inline=true" >}}(message "this highlight shortcode"){{< /highlight >}}:End.
+
+## Inline in code block
+
+Not sure if this makes sense, but add a test for it:
+
+§§§bash {hl_inline=true}
+(message "highlight me")
+§§§
+
+## HighlightCodeBlock in hook
+
+§§§html
+(message "highlight me 2")
+§§§
+
+## Unknown lexer
+
+§§§foo {hl_inline=true}
+(message "highlight me 3")
+§§§
+
+
+-- layouts/_default/_markup/render-codeblock-html.html --
+{{ $opts := dict "hl_inline" true }}
+{{ $result := transform.HighlightCodeBlock . $opts }}
+HighlightCodeBlock: Wrapped:{{ $result.Wrapped }}|Inner:{{ $result.Inner }}
+-- layouts/_default/single.html --
+{{ .Content }}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: false,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/p1/index.html",
+ "Inline:<code class=\"code-inline language-emacs\"><span class=\"p\">(</span><span class=\"nf\">message</span> <span class=\"s\">&#34;this highlight shortcode&#34;</span><span class=\"p\">)</span></code>:End.",
+ "Inline Unknown:<code class=\"code-inline language-foo\">(message &#34;this highlight shortcode&#34;)</code>:End.",
+ "Not sure if this makes sense, but add a test for it:</p>\n<code class=\"code-inline language-bash\"><span class=\"o\">(</span>message <span class=\"s2\">&#34;highlight me&#34;</span><span class=\"o\">)</span>\n</code>",
+ "HighlightCodeBlock: Wrapped:<code class=\"code-inline language-html\">(message &#34;highlight me 2&#34;)</code>|Inner:<code class=\"code-inline language-html\">(message &#34;highlight me 2&#34;)</code>",
+ "<code class=\"code-inline language-foo\">(message &#34;highlight me 3&#34;)\n</code>",
+ )
+}
diff --git a/markup/internal/attributes/attributes.go b/markup/internal/attributes/attributes.go
new file mode 100644
index 000000000..688740983
--- /dev/null
+++ b/markup/internal/attributes/attributes.go
@@ -0,0 +1,221 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package attributes
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/spf13/cast"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/util"
+)
+
+// Markdown attributes used as options by the Chroma highlighter.
+var chromaHightlightProcessingAttributes = map[string]bool{
+ "anchorLineNos": true,
+ "guessSyntax": true,
+ "hl_Lines": true,
+ "hl_inline": true,
+ "lineAnchors": true,
+ "lineNos": true,
+ "lineNoStart": true,
+ "lineNumbersInTable": true,
+ "noClasses": true,
+ "nohl": true,
+ "style": true,
+ "tabWidth": true,
+}
+
+func init() {
+ for k, v := range chromaHightlightProcessingAttributes {
+ chromaHightlightProcessingAttributes[strings.ToLower(k)] = v
+ }
+}
+
+type AttributesOwnerType int
+
+const (
+ AttributesOwnerGeneral AttributesOwnerType = iota
+ AttributesOwnerCodeBlockChroma
+ AttributesOwnerCodeBlockCustom
+)
+
+func New(astAttributes []ast.Attribute, ownerType AttributesOwnerType) *AttributesHolder {
+ var (
+ attrs []Attribute
+ opts []Attribute
+ )
+ for _, v := range astAttributes {
+ nameLower := strings.ToLower(string(v.Name))
+ if strings.HasPrefix(string(nameLower), "on") {
+ continue
+ }
+ var vv any
+ switch vvv := v.Value.(type) {
+ case bool, float64:
+ vv = vvv
+ case []any:
+ // Highlight line number hlRanges.
+ var hlRanges [][2]int
+ for _, l := range vvv {
+ if ln, ok := l.(float64); ok {
+ hlRanges = append(hlRanges, [2]int{int(ln) - 1, int(ln) - 1})
+ } else if rng, ok := l.([]uint8); ok {
+ slices := strings.Split(string([]byte(rng)), "-")
+ lhs, err := strconv.Atoi(slices[0])
+ if err != nil {
+ continue
+ }
+ rhs := lhs
+ if len(slices) > 1 {
+ rhs, err = strconv.Atoi(slices[1])
+ if err != nil {
+ continue
+ }
+ }
+ hlRanges = append(hlRanges, [2]int{lhs - 1, rhs - 1})
+ }
+ }
+ vv = hlRanges
+ case []byte:
+ // Note that we don't do any HTML escaping here.
+ // We used to do that, but that changed in #9558.
+ // Noww it's up to the templates to decide.
+ vv = string(vvv)
+ default:
+ panic(fmt.Sprintf("not implemented: %T", vvv))
+ }
+
+ if ownerType == AttributesOwnerCodeBlockChroma && chromaHightlightProcessingAttributes[nameLower] {
+ attr := Attribute{Name: string(v.Name), Value: vv}
+ opts = append(opts, attr)
+ } else {
+ attr := Attribute{Name: nameLower, Value: vv}
+ attrs = append(attrs, attr)
+ }
+
+ }
+
+ return &AttributesHolder{
+ attributes: attrs,
+ options: opts,
+ }
+}
+
+type Attribute struct {
+ Name string
+ Value any
+}
+
+func (a Attribute) ValueString() string {
+ return cast.ToString(a.Value)
+}
+
+type AttributesHolder struct {
+ // What we get from Goldmark.
+ attributes []Attribute
+
+ // Attributes considered to be an option (code blocks)
+ options []Attribute
+
+ // What we send to the the render hooks.
+ attributesMapInit sync.Once
+ attributesMap map[string]any
+ optionsMapInit sync.Once
+ optionsMap map[string]any
+}
+
+type Attributes map[string]any
+
+func (a *AttributesHolder) Attributes() map[string]any {
+ a.attributesMapInit.Do(func() {
+ a.attributesMap = make(map[string]any)
+ for _, v := range a.attributes {
+ a.attributesMap[v.Name] = v.Value
+ }
+ })
+ return a.attributesMap
+}
+
+func (a *AttributesHolder) Options() map[string]any {
+ a.optionsMapInit.Do(func() {
+ a.optionsMap = make(map[string]any)
+ for _, v := range a.options {
+ a.optionsMap[v.Name] = v.Value
+ }
+ })
+ return a.optionsMap
+}
+
+func (a *AttributesHolder) AttributesSlice() []Attribute {
+ return a.attributes
+}
+
+func (a *AttributesHolder) OptionsSlice() []Attribute {
+ return a.options
+}
+
+// RenderASTAttributes writes the AST attributes to the given as attributes to an HTML element.
+// This is used by the default HTML renderers, e.g. for headings etc. where no hook template could be found.
+// This performs HTML esacaping of string attributes.
+func RenderASTAttributes(w hugio.FlexiWriter, attributes ...ast.Attribute) {
+ for _, attr := range attributes {
+
+ a := strings.ToLower(string(attr.Name))
+ if strings.HasPrefix(a, "on") {
+ continue
+ }
+
+ _, _ = w.WriteString(" ")
+ _, _ = w.Write(attr.Name)
+ _, _ = w.WriteString(`="`)
+
+ switch v := attr.Value.(type) {
+ case []byte:
+ _, _ = w.Write(util.EscapeHTML(v))
+ default:
+ w.WriteString(cast.ToString(v))
+ }
+
+ _ = w.WriteByte('"')
+ }
+}
+
+// Render writes the attributes to the given as attributes to an HTML element.
+// This is used for the default codeblock renderering.
+// This performs HTML esacaping of string attributes.
+func RenderAttributes(w hugio.FlexiWriter, skipClass bool, attributes ...Attribute) {
+ for _, attr := range attributes {
+ a := strings.ToLower(string(attr.Name))
+ if skipClass && a == "class" {
+ continue
+ }
+ _, _ = w.WriteString(" ")
+ _, _ = w.WriteString(attr.Name)
+ _, _ = w.WriteString(`="`)
+
+ switch v := attr.Value.(type) {
+ case []byte:
+ _, _ = w.Write(util.EscapeHTML(v))
+ default:
+ w.WriteString(cast.ToString(v))
+ }
+
+ _ = w.WriteByte('"')
+ }
+}
diff --git a/markup/internal/external.go b/markup/internal/external.go
new file mode 100644
index 000000000..97cf5cc7d
--- /dev/null
+++ b/markup/internal/external.go
@@ -0,0 +1,71 @@
+package internal
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/markup/converter"
+)
+
+func ExternallyRenderContent(
+ cfg converter.ProviderConfig,
+ ctx converter.DocumentContext,
+ content []byte, binaryName string, args []string) ([]byte, error) {
+ logger := cfg.Logger
+
+ if strings.Contains(binaryName, "/") {
+ panic(fmt.Sprintf("should be no slash in %q", binaryName))
+ }
+
+ argsv := collections.StringSliceToInterfaceSlice(args)
+
+ var out, cmderr bytes.Buffer
+ argsv = append(argsv, hexec.WithStdout(&out))
+ argsv = append(argsv, hexec.WithStderr(&cmderr))
+ argsv = append(argsv, hexec.WithStdin(bytes.NewReader(content)))
+
+ cmd, err := cfg.Exec.New(binaryName, argsv...)
+ if err != nil {
+ return nil, err
+ }
+
+ err = cmd.Run()
+
+ // Most external helpers exit w/ non-zero exit code only if severe, i.e.
+ // halting errors occurred. -> log stderr output regardless of state of err
+ for _, item := range strings.Split(cmderr.String(), "\n") {
+ item := strings.TrimSpace(item)
+ if item != "" {
+ if err == nil {
+ logger.Warnf("%s: %s", ctx.DocumentName, item)
+ } else {
+ logger.Errorf("%s: %s", ctx.DocumentName, item)
+ }
+ }
+ }
+
+ if err != nil {
+ logger.Errorf("%s rendering %s: %v", binaryName, ctx.DocumentName, err)
+ }
+
+ return normalizeExternalHelperLineFeeds(out.Bytes()), nil
+}
+
+// Strips carriage returns from third-party / external processes (useful for Windows)
+func normalizeExternalHelperLineFeeds(content []byte) []byte {
+ return bytes.Replace(content, []byte("\r"), []byte(""), -1)
+}
+
+var pythonBinaryCandidates = []string{"python", "python.exe"}
+
+func GetPythonBinaryAndExecPath() (string, string) {
+ for _, p := range pythonBinaryCandidates {
+ if pth := hexec.LookPath(p); pth != "" {
+ return p, pth
+ }
+ }
+ return "", ""
+}
diff --git a/markup/markup.go b/markup/markup.go
new file mode 100644
index 000000000..1345867f9
--- /dev/null
+++ b/markup/markup.go
@@ -0,0 +1,132 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package markup
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/markup/highlight"
+
+ "github.com/gohugoio/hugo/markup/markup_config"
+
+ "github.com/gohugoio/hugo/markup/goldmark"
+
+ "github.com/gohugoio/hugo/markup/org"
+
+ "github.com/gohugoio/hugo/markup/asciidocext"
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/gohugoio/hugo/markup/pandoc"
+ "github.com/gohugoio/hugo/markup/rst"
+)
+
+func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, error) {
+ converters := make(map[string]converter.Provider)
+
+ markupConfig, err := markup_config.Decode(cfg.Cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ if cfg.Highlighter == nil {
+ cfg.Highlighter = highlight.New(markupConfig.Highlight)
+ }
+
+ cfg.MarkupConfig = markupConfig
+ defaultHandler := cfg.MarkupConfig.DefaultMarkdownHandler
+ var defaultFound bool
+
+ add := func(p converter.ProviderProvider, aliases ...string) error {
+ c, err := p.New(cfg)
+ if err != nil {
+ return err
+ }
+
+ name := c.Name()
+
+ aliases = append(aliases, name)
+
+ if strings.EqualFold(name, defaultHandler) {
+ aliases = append(aliases, "markdown")
+ defaultFound = true
+ }
+
+ addConverter(converters, c, aliases...)
+ return nil
+ }
+
+ if err := add(goldmark.Provider); err != nil {
+ return nil, err
+ }
+ if err := add(asciidocext.Provider, "ad", "adoc"); err != nil {
+ return nil, err
+ }
+ if err := add(rst.Provider); err != nil {
+ return nil, err
+ }
+ if err := add(pandoc.Provider, "pdc"); err != nil {
+ return nil, err
+ }
+ if err := add(org.Provider); err != nil {
+ return nil, err
+ }
+
+ if !defaultFound {
+ msg := "markup: Configured defaultMarkdownHandler %q not found."
+ if defaultHandler == "blackfriday" {
+ msg += " Did you mean to use goldmark? Blackfriday was removed in Hugo v0.100.0."
+ }
+ return nil, fmt.Errorf(msg, defaultHandler)
+ }
+
+ return &converterRegistry{
+ config: cfg,
+ converters: converters,
+ }, nil
+}
+
+type ConverterProvider interface {
+ Get(name string) converter.Provider
+ // Default() converter.Provider
+ GetMarkupConfig() markup_config.Config
+ GetHighlighter() highlight.Highlighter
+}
+
+type converterRegistry struct {
+ // Maps name (md, markdown, goldmark etc.) to a converter provider.
+ // Note that this is also used for aliasing, so the same converter
+ // may be registered multiple times.
+ // All names are lower case.
+ converters map[string]converter.Provider
+
+ config converter.ProviderConfig
+}
+
+func (r *converterRegistry) Get(name string) converter.Provider {
+ return r.converters[strings.ToLower(name)]
+}
+
+func (r *converterRegistry) GetHighlighter() highlight.Highlighter {
+ return r.config.Highlighter
+}
+
+func (r *converterRegistry) GetMarkupConfig() markup_config.Config {
+ return r.config.MarkupConfig
+}
+
+func addConverter(m map[string]converter.Provider, c converter.Provider, aliases ...string) {
+ for _, alias := range aliases {
+ m[alias] = c
+ }
+}
diff --git a/markup/markup_config/config.go b/markup/markup_config/config.go
new file mode 100644
index 000000000..e254ba7a0
--- /dev/null
+++ b/markup/markup_config/config.go
@@ -0,0 +1,94 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package markup_config
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/docshelper"
+ "github.com/gohugoio/hugo/markup/asciidocext/asciidocext_config"
+ "github.com/gohugoio/hugo/markup/goldmark/goldmark_config"
+ "github.com/gohugoio/hugo/markup/highlight"
+ "github.com/gohugoio/hugo/markup/tableofcontents"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/mitchellh/mapstructure"
+)
+
+type Config struct {
+ // Default markdown handler for md/markdown extensions.
+ // Default is "goldmark".
+ // Before Hugo 0.60 this was "blackfriday".
+ DefaultMarkdownHandler string
+
+ Highlight highlight.Config
+ TableOfContents tableofcontents.Config
+
+ // Content renderers
+ Goldmark goldmark_config.Config
+ AsciidocExt asciidocext_config.Config
+}
+
+func Decode(cfg config.Provider) (conf Config, err error) {
+ conf = Default
+
+ m := cfg.GetStringMap("markup")
+ if m == nil {
+ return
+ }
+ normalizeConfig(m)
+
+ err = mapstructure.WeakDecode(m, &conf)
+ if err != nil {
+ return
+ }
+
+ if err = highlight.ApplyLegacyConfig(cfg, &conf.Highlight); err != nil {
+ return
+ }
+
+ return
+}
+
+func normalizeConfig(m map[string]any) {
+ v, err := maps.GetNestedParam("goldmark.parser", ".", m)
+ if err != nil {
+ return
+ }
+ vm := maps.ToStringMap(v)
+ // Changed from a bool in 0.81.0
+ if vv, found := vm["attribute"]; found {
+ if vvb, ok := vv.(bool); ok {
+ vm["attribute"] = goldmark_config.ParserAttribute{
+ Title: vvb,
+ }
+ }
+ }
+}
+
+var Default = Config{
+ DefaultMarkdownHandler: "goldmark",
+
+ TableOfContents: tableofcontents.DefaultConfig,
+ Highlight: highlight.DefaultConfig,
+
+ Goldmark: goldmark_config.Default,
+ AsciidocExt: asciidocext_config.Default,
+}
+
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ return docshelper.DocProvider{"config": map[string]any{"markup": parser.LowerCaseCamelJSONMarshaller{Value: Default}}}
+ }
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/markup/markup_config/config_test.go b/markup/markup_config/config_test.go
new file mode 100644
index 000000000..a320e6912
--- /dev/null
+++ b/markup/markup_config/config_test.go
@@ -0,0 +1,55 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package markup_config
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestConfig(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Decode", func(c *qt.C) {
+ c.Parallel()
+ v := config.New()
+
+ v.Set("markup", map[string]any{
+ "goldmark": map[string]any{
+ "renderer": map[string]any{
+ "unsafe": true,
+ },
+ },
+ "asciidocext": map[string]any{
+ "workingFolderCurrent": true,
+ "safeMode": "save",
+ "extensions": []string{"asciidoctor-html5s"},
+ },
+ })
+
+ conf, err := Decode(v)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(conf.Goldmark.Renderer.Unsafe, qt.Equals, true)
+ c.Assert(conf.Goldmark.Parser.Attribute.Title, qt.Equals, true)
+ c.Assert(conf.Goldmark.Parser.Attribute.Block, qt.Equals, false)
+
+ c.Assert(conf.AsciidocExt.WorkingFolderCurrent, qt.Equals, true)
+ c.Assert(conf.AsciidocExt.Extensions[0], qt.Equals, "asciidoctor-html5s")
+ })
+
+}
diff --git a/markup/markup_test.go b/markup/markup_test.go
new file mode 100644
index 000000000..5ec27c45c
--- /dev/null
+++ b/markup/markup_test.go
@@ -0,0 +1,46 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package markup
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/markup/converter"
+)
+
+func TestConverterRegistry(t *testing.T) {
+ c := qt.New(t)
+
+ r, err := NewConverterProvider(converter.ProviderConfig{Cfg: config.New()})
+
+ c.Assert(err, qt.IsNil)
+ c.Assert("goldmark", qt.Equals, r.GetMarkupConfig().DefaultMarkdownHandler)
+
+ checkName := func(name string) {
+ p := r.Get(name)
+ c.Assert(p, qt.Not(qt.IsNil))
+ c.Assert(p.Name(), qt.Equals, name)
+ }
+
+ c.Assert(r.Get("foo"), qt.IsNil)
+ c.Assert(r.Get("markdown").Name(), qt.Equals, "goldmark")
+
+ checkName("goldmark")
+ checkName("asciidocext")
+ checkName("rst")
+ checkName("pandoc")
+ checkName("org")
+}
diff --git a/markup/org/convert.go b/markup/org/convert.go
new file mode 100644
index 000000000..603ec8f19
--- /dev/null
+++ b/markup/org/convert.go
@@ -0,0 +1,73 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package org converts Emacs Org-Mode to HTML.
+package org
+
+import (
+ "bytes"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/markup/converter"
+ "github.com/niklasfasching/go-org/org"
+ "github.com/spf13/afero"
+)
+
+// Provider is the package entry point.
+var Provider converter.ProviderProvider = provide{}
+
+type provide struct{}
+
+func (p provide) New(cfg converter.ProviderConfig) (converter.Provider, error) {
+ return converter.NewProvider("org", func(ctx converter.DocumentContext) (converter.Converter, error) {
+ return &orgConverter{
+ ctx: ctx,
+ cfg: cfg,
+ }, nil
+ }), nil
+}
+
+type orgConverter struct {
+ ctx converter.DocumentContext
+ cfg converter.ProviderConfig
+}
+
+func (c *orgConverter) Convert(ctx converter.RenderContext) (converter.Result, error) {
+ logger := c.cfg.Logger
+ config := org.New()
+ config.Log = logger.Warn()
+ config.ReadFile = func(filename string) ([]byte, error) {
+ return afero.ReadFile(c.cfg.ContentFs, filename)
+ }
+ writer := org.NewHTMLWriter()
+ writer.HighlightCodeBlock = func(source, lang string, inline bool) string {
+ highlightedSource, err := c.cfg.Highlight(source, lang, "")
+ if err != nil {
+ logger.Errorf("Could not highlight source as lang %s. Using raw source.", lang)
+ return source
+ }
+ return highlightedSource
+ }
+
+ html, err := config.Parse(bytes.NewReader(ctx.Src), c.ctx.DocumentName).Write(writer)
+ if err != nil {
+ logger.Errorf("Could not render org: %s. Using unrendered content.", err)
+ return converter.Bytes(ctx.Src), nil
+ }
+ return converter.Bytes([]byte(html)), nil
+}
+
+func (c *orgConverter) Supports(feature identity.Identity) bool {
+ return false
+}
diff --git a/markup/org/convert_test.go b/markup/org/convert_test.go
new file mode 100644
index 000000000..e3676fc34
--- /dev/null
+++ b/markup/org/convert_test.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestConvert(t *testing.T) {
+ c := qt.New(t)
+ p, err := Provider.New(converter.ProviderConfig{
+ Logger: loggers.NewErrorLogger(),
+ Cfg: config.New(),
+ })
+ c.Assert(err, qt.IsNil)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{Src: []byte("testContent")})
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b.Bytes()), qt.Equals, "<p>testContent</p>\n")
+}
diff --git a/markup/pandoc/convert.go b/markup/pandoc/convert.go
new file mode 100644
index 000000000..ae90cf417
--- /dev/null
+++ b/markup/pandoc/convert.go
@@ -0,0 +1,90 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package pandoc converts content to HTML using Pandoc as an external helper.
+package pandoc
+
+import (
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/internal"
+
+ "github.com/gohugoio/hugo/markup/converter"
+)
+
+// Provider is the package entry point.
+var Provider converter.ProviderProvider = provider{}
+
+type provider struct {
+}
+
+func (p provider) New(cfg converter.ProviderConfig) (converter.Provider, error) {
+ return converter.NewProvider("pandoc", func(ctx converter.DocumentContext) (converter.Converter, error) {
+ return &pandocConverter{
+ ctx: ctx,
+ cfg: cfg,
+ }, nil
+ }), nil
+}
+
+type pandocConverter struct {
+ ctx converter.DocumentContext
+ cfg converter.ProviderConfig
+}
+
+func (c *pandocConverter) Convert(ctx converter.RenderContext) (converter.Result, error) {
+ b, err := c.getPandocContent(ctx.Src, c.ctx)
+ if err != nil {
+ return nil, err
+ }
+ return converter.Bytes(b), nil
+}
+
+func (c *pandocConverter) Supports(feature identity.Identity) bool {
+ return false
+}
+
+// getPandocContent calls pandoc as an external helper to convert pandoc markdown to HTML.
+func (c *pandocConverter) getPandocContent(src []byte, ctx converter.DocumentContext) ([]byte, error) {
+ logger := c.cfg.Logger
+ binaryName := getPandocBinaryName()
+ if binaryName == "" {
+ logger.Println("pandoc not found in $PATH: Please install.\n",
+ " Leaving pandoc content unrendered.")
+ return src, nil
+ }
+ args := []string{"--mathjax"}
+ return internal.ExternallyRenderContent(c.cfg, ctx, src, binaryName, args)
+}
+
+const pandocBinary = "pandoc"
+
+func getPandocBinaryName() string {
+ if hexec.InPath(pandocBinary) {
+ return pandocBinary
+ }
+ return ""
+}
+
+// Supports returns whether Pandoc is installed on this computer.
+func Supports() bool {
+ hasBin := getPandocBinaryName() != ""
+ if htesting.SupportsAll() {
+ if !hasBin {
+ panic("pandoc not installed")
+ }
+ return true
+ }
+ return hasBin
+}
diff --git a/markup/pandoc/convert_test.go b/markup/pandoc/convert_test.go
new file mode 100644
index 000000000..f549d5f4f
--- /dev/null
+++ b/markup/pandoc/convert_test.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pandoc
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config/security"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestConvert(t *testing.T) {
+ if !Supports() {
+ t.Skip("pandoc not installed")
+ }
+ c := qt.New(t)
+ sc := security.DefaultConfig
+ sc.Exec.Allow = security.NewWhitelist("pandoc")
+ p, err := Provider.New(converter.ProviderConfig{Exec: hexec.New(sc), Logger: loggers.NewErrorLogger()})
+ c.Assert(err, qt.IsNil)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{Src: []byte("testContent")})
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b.Bytes()), qt.Equals, "<p>testContent</p>\n")
+}
diff --git a/markup/rst/convert.go b/markup/rst/convert.go
new file mode 100644
index 000000000..b86b35f1b
--- /dev/null
+++ b/markup/rst/convert.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package rst converts content to HTML using the RST external helper.
+package rst
+
+import (
+ "bytes"
+ "runtime"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/markup/internal"
+
+ "github.com/gohugoio/hugo/markup/converter"
+)
+
+// Provider is the package entry point.
+var Provider converter.ProviderProvider = provider{}
+
+type provider struct {
+}
+
+func (p provider) New(cfg converter.ProviderConfig) (converter.Provider, error) {
+ return converter.NewProvider("rst", func(ctx converter.DocumentContext) (converter.Converter, error) {
+ return &rstConverter{
+ ctx: ctx,
+ cfg: cfg,
+ }, nil
+ }), nil
+}
+
+type rstConverter struct {
+ ctx converter.DocumentContext
+ cfg converter.ProviderConfig
+}
+
+func (c *rstConverter) Convert(ctx converter.RenderContext) (converter.Result, error) {
+ b, err := c.getRstContent(ctx.Src, c.ctx)
+ if err != nil {
+ return nil, err
+ }
+ return converter.Bytes(b), nil
+}
+
+func (c *rstConverter) Supports(feature identity.Identity) bool {
+ return false
+}
+
+// getRstContent calls the Python script rst2html as an external helper
+// to convert reStructuredText content to HTML.
+func (c *rstConverter) getRstContent(src []byte, ctx converter.DocumentContext) ([]byte, error) {
+ logger := c.cfg.Logger
+ binaryName, binaryPath := getRstBinaryNameAndPath()
+
+ if binaryName == "" {
+ logger.Println("rst2html / rst2html.py not found in $PATH: Please install.\n",
+ " Leaving reStructuredText content unrendered.")
+ return src, nil
+ }
+
+ logger.Infoln("Rendering", ctx.DocumentName, "with", binaryName, "...")
+
+ var result []byte
+ var err error
+
+ // certain *nix based OSs wrap executables in scripted launchers
+ // invoking binaries on these OSs via python interpreter causes SyntaxError
+ // invoke directly so that shebangs work as expected
+ // handle Windows manually because it doesn't do shebangs
+ if runtime.GOOS == "windows" {
+ pythonBinary, _ := internal.GetPythonBinaryAndExecPath()
+ args := []string{binaryPath, "--leave-comments", "--initial-header-level=2"}
+ result, err = internal.ExternallyRenderContent(c.cfg, ctx, src, pythonBinary, args)
+ } else {
+ args := []string{"--leave-comments", "--initial-header-level=2"}
+ result, err = internal.ExternallyRenderContent(c.cfg, ctx, src, binaryName, args)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ // TODO(bep) check if rst2html has a body only option.
+ bodyStart := bytes.Index(result, []byte("<body>\n"))
+ if bodyStart < 0 {
+ bodyStart = -7 // compensate for length
+ }
+
+ bodyEnd := bytes.Index(result, []byte("\n</body>"))
+ if bodyEnd < 0 || bodyEnd >= len(result) {
+ bodyEnd = len(result) - 1
+ if bodyEnd < 0 {
+ bodyEnd = 0
+ }
+ }
+
+ return result[bodyStart+7 : bodyEnd], err
+}
+
+var rst2Binaries = []string{"rst2html", "rst2html.py"}
+
+func getRstBinaryNameAndPath() (string, string) {
+ for _, candidate := range rst2Binaries {
+ if pth := hexec.LookPath(candidate); pth != "" {
+ return candidate, pth
+ }
+ }
+ return "", ""
+}
+
+// Supports returns whether rst is (or should be) installed on this computer.
+func Supports() bool {
+ name, _ := getRstBinaryNameAndPath()
+ hasBin := name != ""
+ if htesting.SupportsAll() {
+ if !hasBin {
+ panic("rst not installed")
+ }
+ return true
+ }
+ return hasBin
+}
diff --git a/markup/rst/convert_test.go b/markup/rst/convert_test.go
new file mode 100644
index 000000000..5d2882de1
--- /dev/null
+++ b/markup/rst/convert_test.go
@@ -0,0 +1,47 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package rst
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config/security"
+
+ "github.com/gohugoio/hugo/markup/converter"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestConvert(t *testing.T) {
+ if !Supports() {
+ t.Skip("rst not installed")
+ }
+ c := qt.New(t)
+ sc := security.DefaultConfig
+ sc.Exec.Allow = security.NewWhitelist("rst", "python")
+
+ p, err := Provider.New(
+ converter.ProviderConfig{
+ Logger: loggers.NewErrorLogger(),
+ Exec: hexec.New(sc),
+ })
+ c.Assert(err, qt.IsNil)
+ conv, err := p.New(converter.DocumentContext{})
+ c.Assert(err, qt.IsNil)
+ b, err := conv.Convert(converter.RenderContext{Src: []byte("testContent")})
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b.Bytes()), qt.Equals, "<div class=\"document\">\n\n\n<p>testContent</p>\n</div>")
+}
diff --git a/markup/tableofcontents/tableofcontents.go b/markup/tableofcontents/tableofcontents.go
new file mode 100644
index 000000000..2e7f47d20
--- /dev/null
+++ b/markup/tableofcontents/tableofcontents.go
@@ -0,0 +1,170 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tableofcontents
+
+import (
+ "strings"
+)
+
+// Headings holds the top level headings.
+type Headings []Heading
+
+// Heading holds the data about a heading and its children.
+type Heading struct {
+ ID string
+ Text string
+
+ Headings Headings
+}
+
+// IsZero is true when no ID or Text is set.
+func (h Heading) IsZero() bool {
+ return h.ID == "" && h.Text == ""
+}
+
+// Root implements AddAt, which can be used to build the
+// data structure for the ToC.
+type Root struct {
+ Headings Headings
+}
+
+// AddAt adds the heading into the given location.
+func (toc *Root) AddAt(h Heading, row, level int) {
+ for i := len(toc.Headings); i <= row; i++ {
+ toc.Headings = append(toc.Headings, Heading{})
+ }
+
+ if level == 0 {
+ toc.Headings[row] = h
+ return
+ }
+
+ heading := &toc.Headings[row]
+
+ for i := 1; i < level; i++ {
+ if len(heading.Headings) == 0 {
+ heading.Headings = append(heading.Headings, Heading{})
+ }
+ heading = &heading.Headings[len(heading.Headings)-1]
+ }
+ heading.Headings = append(heading.Headings, h)
+}
+
+// ToHTML renders the ToC as HTML.
+func (toc Root) ToHTML(startLevel, stopLevel int, ordered bool) string {
+ b := &tocBuilder{
+ s: strings.Builder{},
+ h: toc.Headings,
+ startLevel: startLevel,
+ stopLevel: stopLevel,
+ ordered: ordered,
+ }
+ b.Build()
+ return b.s.String()
+}
+
+type tocBuilder struct {
+ s strings.Builder
+ h Headings
+
+ startLevel int
+ stopLevel int
+ ordered bool
+}
+
+func (b *tocBuilder) Build() {
+ b.writeNav(b.h)
+}
+
+func (b *tocBuilder) writeNav(h Headings) {
+ b.s.WriteString("<nav id=\"TableOfContents\">")
+ b.writeHeadings(1, 0, b.h)
+ b.s.WriteString("</nav>")
+}
+
+func (b *tocBuilder) writeHeadings(level, indent int, h Headings) {
+ if level < b.startLevel {
+ for _, h := range h {
+ b.writeHeadings(level+1, indent, h.Headings)
+ }
+ return
+ }
+
+ if b.stopLevel != -1 && level > b.stopLevel {
+ return
+ }
+
+ hasChildren := len(h) > 0
+
+ if hasChildren {
+ b.s.WriteString("\n")
+ b.indent(indent + 1)
+ if b.ordered {
+ b.s.WriteString("<ol>\n")
+ } else {
+ b.s.WriteString("<ul>\n")
+ }
+ }
+
+ for _, h := range h {
+ b.writeHeading(level+1, indent+2, h)
+ }
+
+ if hasChildren {
+ b.indent(indent + 1)
+ if b.ordered {
+ b.s.WriteString("</ol>")
+ } else {
+ b.s.WriteString("</ul>")
+ }
+ b.s.WriteString("\n")
+ b.indent(indent)
+ }
+}
+
+func (b *tocBuilder) writeHeading(level, indent int, h Heading) {
+ b.indent(indent)
+ b.s.WriteString("<li>")
+ if !h.IsZero() {
+ b.s.WriteString("<a href=\"#" + h.ID + "\">" + h.Text + "</a>")
+ }
+ b.writeHeadings(level, indent, h.Headings)
+ b.s.WriteString("</li>\n")
+}
+
+func (b *tocBuilder) indent(n int) {
+ for i := 0; i < n; i++ {
+ b.s.WriteString(" ")
+ }
+}
+
+// DefaultConfig is the default ToC configuration.
+var DefaultConfig = Config{
+ StartLevel: 2,
+ EndLevel: 3,
+ Ordered: false,
+}
+
+type Config struct {
+ // Heading start level to include in the table of contents, starting
+ // at h1 (inclusive).
+ StartLevel int
+
+ // Heading end level, inclusive, to include in the table of contents.
+ // Default is 3, a value of -1 will include everything.
+ EndLevel int
+
+ // Whether to produce a ordered list or not.
+ Ordered bool
+}
diff --git a/markup/tableofcontents/tableofcontents_test.go b/markup/tableofcontents/tableofcontents_test.go
new file mode 100644
index 000000000..daeb9f991
--- /dev/null
+++ b/markup/tableofcontents/tableofcontents_test.go
@@ -0,0 +1,155 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tableofcontents
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestToc(t *testing.T) {
+ c := qt.New(t)
+
+ toc := &Root{}
+
+ toc.AddAt(Heading{Text: "Heading 1", ID: "h1-1"}, 0, 0)
+ toc.AddAt(Heading{Text: "1-H2-1", ID: "1-h2-1"}, 0, 1)
+ toc.AddAt(Heading{Text: "1-H2-2", ID: "1-h2-2"}, 0, 1)
+ toc.AddAt(Heading{Text: "1-H3-1", ID: "1-h2-2"}, 0, 2)
+ toc.AddAt(Heading{Text: "Heading 2", ID: "h1-2"}, 1, 0)
+
+ got := toc.ToHTML(1, -1, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#h1-1">Heading 1</a>
+ <ul>
+ <li><a href="#1-h2-1">1-H2-1</a></li>
+ <li><a href="#1-h2-2">1-H2-2</a>
+ <ul>
+ <li><a href="#1-h2-2">1-H3-1</a></li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ <li><a href="#h1-2">Heading 2</a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(1, 1, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#h1-1">Heading 1</a></li>
+ <li><a href="#h1-2">Heading 2</a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(1, 2, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#h1-1">Heading 1</a>
+ <ul>
+ <li><a href="#1-h2-1">1-H2-1</a></li>
+ <li><a href="#1-h2-2">1-H2-2</a></li>
+ </ul>
+ </li>
+ <li><a href="#h1-2">Heading 2</a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(2, 2, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#1-h2-1">1-H2-1</a></li>
+ <li><a href="#1-h2-2">1-H2-2</a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(1, -1, true)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ol>
+ <li><a href="#h1-1">Heading 1</a>
+ <ol>
+ <li><a href="#1-h2-1">1-H2-1</a></li>
+ <li><a href="#1-h2-2">1-H2-2</a>
+ <ol>
+ <li><a href="#1-h2-2">1-H3-1</a></li>
+ </ol>
+ </li>
+ </ol>
+ </li>
+ <li><a href="#h1-2">Heading 2</a></li>
+ </ol>
+</nav>`, qt.Commentf(got))
+}
+
+func TestTocMissingParent(t *testing.T) {
+ c := qt.New(t)
+
+ toc := &Root{}
+
+ toc.AddAt(Heading{Text: "H2", ID: "h2"}, 0, 1)
+ toc.AddAt(Heading{Text: "H3", ID: "h3"}, 1, 2)
+ toc.AddAt(Heading{Text: "H3", ID: "h3"}, 1, 2)
+
+ got := toc.ToHTML(1, -1, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li>
+ <ul>
+ <li><a href="#h2">H2</a></li>
+ </ul>
+ </li>
+ <li>
+ <ul>
+ <li>
+ <ul>
+ <li><a href="#h3">H3</a></li>
+ <li><a href="#h3">H3</a></li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(3, 3, false)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ul>
+ <li><a href="#h3">H3</a></li>
+ <li><a href="#h3">H3</a></li>
+ </ul>
+</nav>`, qt.Commentf(got))
+
+ got = toc.ToHTML(1, -1, true)
+ c.Assert(got, qt.Equals, `<nav id="TableOfContents">
+ <ol>
+ <li>
+ <ol>
+ <li><a href="#h2">H2</a></li>
+ </ol>
+ </li>
+ <li>
+ <ol>
+ <li>
+ <ol>
+ <li><a href="#h3">H3</a></li>
+ <li><a href="#h3">H3</a></li>
+ </ol>
+ </li>
+ </ol>
+ </li>
+ </ol>
+</nav>`, qt.Commentf(got))
+}
diff --git a/media/docshelper.go b/media/docshelper.go
new file mode 100644
index 000000000..d37c08eb3
--- /dev/null
+++ b/media/docshelper.go
@@ -0,0 +1,13 @@
+package media
+
+import (
+ "github.com/gohugoio/hugo/docshelper"
+)
+
+// This is is just some helpers used to create some JSON used in the Hugo docs.
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ return docshelper.DocProvider{"media": map[string]any{"types": DefaultTypes}}
+ }
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/media/mediaType.go b/media/mediaType.go
new file mode 100644
index 000000000..3ac3123ac
--- /dev/null
+++ b/media/mediaType.go
@@ -0,0 +1,536 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package media
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "sort"
+ "strings"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+var zero Type
+
+const (
+ defaultDelimiter = "."
+)
+
+// Type (also known as MIME type and content type) is a two-part identifier for
+// file formats and format contents transmitted on the Internet.
+// For Hugo's use case, we use the top-level type name / subtype name + suffix.
+// One example would be application/svg+xml
+// If suffix is not provided, the sub type will be used.
+// See // https://en.wikipedia.org/wiki/Media_type
+type Type struct {
+ MainType string `json:"mainType"` // i.e. text
+ SubType string `json:"subType"` // i.e. html
+ Delimiter string `json:"delimiter"` // e.g. "."
+
+ // FirstSuffix holds the first suffix defined for this Type.
+ FirstSuffix SuffixInfo `json:"firstSuffix"`
+
+ // This is the optional suffix after the "+" in the MIME type,
+ // e.g. "xml" in "application/rss+xml".
+ mimeSuffix string
+
+ // E.g. "jpg,jpeg"
+ // Stored as a string to make Type comparable.
+ suffixesCSV string
+}
+
+// SuffixInfo holds information about a Type's suffix.
+type SuffixInfo struct {
+ Suffix string `json:"suffix"`
+ FullSuffix string `json:"fullSuffix"`
+}
+
+// FromContent resolve the Type primarily using http.DetectContentType.
+// If http.DetectContentType resolves to application/octet-stream, a zero Type is returned.
+// If http.DetectContentType resolves to text/plain or application/xml, we try to get more specific using types and ext.
+func FromContent(types Types, extensionHints []string, content []byte) Type {
+ t := strings.Split(http.DetectContentType(content), ";")[0]
+ if t == "application/octet-stream" {
+ return zero
+ }
+
+ var found bool
+ m, found := types.GetByType(t)
+ if !found {
+ if t == "text/xml" {
+ // This is how it's configured in Hugo by default.
+ m, found = types.GetByType("application/xml")
+ }
+ }
+
+ if !found {
+ return zero
+ }
+
+ var mm Type
+
+ for _, extension := range extensionHints {
+ extension = strings.TrimPrefix(extension, ".")
+ mm, _, found = types.GetFirstBySuffix(extension)
+ if found {
+ break
+ }
+ }
+
+ if found {
+ if m == mm {
+ return m
+ }
+
+ if m.IsText() && mm.IsText() {
+ // http.DetectContentType isn't brilliant when it comes to common text formats, so we need to do better.
+ // For now we say that if it's detected to be a text format and the extension/content type in header reports
+ // it to be a text format, then we use that.
+ return mm
+ }
+
+ // E.g. an image with a *.js extension.
+ return zero
+ }
+
+ return m
+}
+
+// FromStringAndExt creates a Type from a MIME string and a given extension.
+func FromStringAndExt(t, ext string) (Type, error) {
+ tp, err := fromString(t)
+ if err != nil {
+ return tp, err
+ }
+ tp.suffixesCSV = strings.TrimPrefix(ext, ".")
+ tp.Delimiter = defaultDelimiter
+ tp.init()
+ return tp, nil
+}
+
+// FromString creates a new Type given a type string on the form MainType/SubType and
+// an optional suffix, e.g. "text/html" or "text/html+html".
+func fromString(t string) (Type, error) {
+ t = strings.ToLower(t)
+ parts := strings.Split(t, "/")
+ if len(parts) != 2 {
+ return Type{}, fmt.Errorf("cannot parse %q as a media type", t)
+ }
+ mainType := parts[0]
+ subParts := strings.Split(parts[1], "+")
+
+ subType := strings.Split(subParts[0], ";")[0]
+
+ var suffix string
+
+ if len(subParts) > 1 {
+ suffix = subParts[1]
+ }
+
+ return Type{MainType: mainType, SubType: subType, mimeSuffix: suffix}, nil
+}
+
+// Type returns a string representing the main- and sub-type of a media type, e.g. "text/css".
+// A suffix identifier will be appended after a "+" if set, e.g. "image/svg+xml".
+// Hugo will register a set of default media types.
+// These can be overridden by the user in the configuration,
+// by defining a media type with the same Type.
+func (m Type) Type() string {
+ // Examples are
+ // image/svg+xml
+ // text/css
+ if m.mimeSuffix != "" {
+ return m.MainType + "/" + m.SubType + "+" + m.mimeSuffix
+ }
+ return m.MainType + "/" + m.SubType
+}
+
+// For internal use.
+func (m Type) String() string {
+ return m.Type()
+}
+
+// Suffixes returns all valid file suffixes for this type.
+func (m Type) Suffixes() []string {
+ if m.suffixesCSV == "" {
+ return nil
+ }
+
+ return strings.Split(m.suffixesCSV, ",")
+}
+
+// IsText returns whether this Type is a text format.
+// Note that this may currently return false negatives.
+// TODO(bep) improve
+func (m Type) IsText() bool {
+ if m.MainType == "text" {
+ return true
+ }
+ switch m.SubType {
+ case "javascript", "json", "rss", "xml", "svg", TOMLType.SubType, YAMLType.SubType:
+ return true
+ }
+ return false
+}
+
+func (m *Type) init() {
+ m.FirstSuffix.FullSuffix = ""
+ m.FirstSuffix.Suffix = ""
+ if suffixes := m.Suffixes(); suffixes != nil {
+ m.FirstSuffix.Suffix = suffixes[0]
+ m.FirstSuffix.FullSuffix = m.Delimiter + m.FirstSuffix.Suffix
+ }
+}
+
+// WithDelimiterAndSuffixes is used in tests.
+func WithDelimiterAndSuffixes(t Type, delimiter, suffixesCSV string) Type {
+ t.Delimiter = delimiter
+ t.suffixesCSV = suffixesCSV
+ t.init()
+ return t
+}
+
+func newMediaType(main, sub string, suffixes []string) Type {
+ t := Type{MainType: main, SubType: sub, suffixesCSV: strings.Join(suffixes, ","), Delimiter: defaultDelimiter}
+ t.init()
+ return t
+}
+
+func newMediaTypeWithMimeSuffix(main, sub, mimeSuffix string, suffixes []string) Type {
+ mt := newMediaType(main, sub, suffixes)
+ mt.mimeSuffix = mimeSuffix
+ mt.init()
+ return mt
+}
+
+// Definitions from https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types etc.
+// Note that from Hugo 0.44 we only set Suffix if it is part of the MIME type.
+var (
+ CalendarType = newMediaType("text", "calendar", []string{"ics"})
+ CSSType = newMediaType("text", "css", []string{"css"})
+ SCSSType = newMediaType("text", "x-scss", []string{"scss"})
+ SASSType = newMediaType("text", "x-sass", []string{"sass"})
+ CSVType = newMediaType("text", "csv", []string{"csv"})
+ HTMLType = newMediaType("text", "html", []string{"html"})
+ JavascriptType = newMediaType("application", "javascript", []string{"js", "jsm", "mjs"})
+ TypeScriptType = newMediaType("application", "typescript", []string{"ts"})
+ TSXType = newMediaType("text", "tsx", []string{"tsx"})
+ JSXType = newMediaType("text", "jsx", []string{"jsx"})
+
+ JSONType = newMediaType("application", "json", []string{"json"})
+ WebAppManifestType = newMediaTypeWithMimeSuffix("application", "manifest", "json", []string{"webmanifest"})
+ RSSType = newMediaTypeWithMimeSuffix("application", "rss", "xml", []string{"xml", "rss"})
+ XMLType = newMediaType("application", "xml", []string{"xml"})
+ SVGType = newMediaTypeWithMimeSuffix("image", "svg", "xml", []string{"svg"})
+ TextType = newMediaType("text", "plain", []string{"txt"})
+ TOMLType = newMediaType("application", "toml", []string{"toml"})
+ YAMLType = newMediaType("application", "yaml", []string{"yaml", "yml"})
+
+ // Common image types
+ PNGType = newMediaType("image", "png", []string{"png"})
+ JPEGType = newMediaType("image", "jpeg", []string{"jpg", "jpeg", "jpe", "jif", "jfif"})
+ GIFType = newMediaType("image", "gif", []string{"gif"})
+ TIFFType = newMediaType("image", "tiff", []string{"tif", "tiff"})
+ BMPType = newMediaType("image", "bmp", []string{"bmp"})
+ WEBPType = newMediaType("image", "webp", []string{"webp"})
+
+ // Common font types
+ TrueTypeFontType = newMediaType("font", "ttf", []string{"ttf"})
+ OpenTypeFontType = newMediaType("font", "otf", []string{"otf"})
+
+ // Common document types
+ PDFType = newMediaType("application", "pdf", []string{"pdf"})
+ MarkdownType = newMediaType("text", "markdown", []string{"md", "markdown"})
+
+ // Common video types
+ AVIType = newMediaType("video", "x-msvideo", []string{"avi"})
+ MPEGType = newMediaType("video", "mpeg", []string{"mpg", "mpeg"})
+ MP4Type = newMediaType("video", "mp4", []string{"mp4"})
+ OGGType = newMediaType("video", "ogg", []string{"ogv"})
+ WEBMType = newMediaType("video", "webm", []string{"webm"})
+ GPPType = newMediaType("video", "3gpp", []string{"3gpp", "3gp"})
+
+ OctetType = newMediaType("application", "octet-stream", nil)
+)
+
+// DefaultTypes is the default media types supported by Hugo.
+var DefaultTypes = Types{
+ CalendarType,
+ CSSType,
+ CSVType,
+ SCSSType,
+ SASSType,
+ HTMLType,
+ MarkdownType,
+ JavascriptType,
+ TypeScriptType,
+ TSXType,
+ JSXType,
+ JSONType,
+ WebAppManifestType,
+ RSSType,
+ XMLType,
+ SVGType,
+ TextType,
+ OctetType,
+ YAMLType,
+ TOMLType,
+ PNGType,
+ GIFType,
+ BMPType,
+ JPEGType,
+ WEBPType,
+ AVIType,
+ MPEGType,
+ MP4Type,
+ OGGType,
+ WEBMType,
+ GPPType,
+ OpenTypeFontType,
+ TrueTypeFontType,
+ PDFType,
+}
+
+func init() {
+ sort.Sort(DefaultTypes)
+
+ // Sanity check.
+ seen := make(map[Type]bool)
+ for _, t := range DefaultTypes {
+ if seen[t] {
+ panic(fmt.Sprintf("MediaType %s duplicated in list", t))
+ }
+ seen[t] = true
+ }
+}
+
+// Types is a slice of media types.
+type Types []Type
+
+func (t Types) Len() int { return len(t) }
+func (t Types) Swap(i, j int) { t[i], t[j] = t[j], t[i] }
+func (t Types) Less(i, j int) bool { return t[i].Type() < t[j].Type() }
+
+// GetByType returns a media type for tp.
+func (t Types) GetByType(tp string) (Type, bool) {
+ for _, tt := range t {
+ if strings.EqualFold(tt.Type(), tp) {
+ return tt, true
+ }
+ }
+
+ if !strings.Contains(tp, "+") {
+ // Try with the main and sub type
+ parts := strings.Split(tp, "/")
+ if len(parts) == 2 {
+ return t.GetByMainSubType(parts[0], parts[1])
+ }
+ }
+
+ return Type{}, false
+}
+
+// BySuffix will return all media types matching a suffix.
+func (t Types) BySuffix(suffix string) []Type {
+ suffix = strings.ToLower(suffix)
+ var types []Type
+ for _, tt := range t {
+ if tt.hasSuffix(suffix) {
+ types = append(types, tt)
+ }
+ }
+ return types
+}
+
+// GetFirstBySuffix will return the first type matching the given suffix.
+func (t Types) GetFirstBySuffix(suffix string) (Type, SuffixInfo, bool) {
+ suffix = strings.ToLower(suffix)
+ for _, tt := range t {
+ if tt.hasSuffix(suffix) {
+ return tt, SuffixInfo{
+ FullSuffix: tt.Delimiter + suffix,
+ Suffix: suffix,
+ }, true
+ }
+ }
+ return Type{}, SuffixInfo{}, false
+}
+
+// GetBySuffix gets a media type given as suffix, e.g. "html".
+// It will return false if no format could be found, or if the suffix given
+// is ambiguous.
+// The lookup is case insensitive.
+func (t Types) GetBySuffix(suffix string) (tp Type, si SuffixInfo, found bool) {
+ suffix = strings.ToLower(suffix)
+ for _, tt := range t {
+ if tt.hasSuffix(suffix) {
+ if found {
+ // ambiguous
+ found = false
+ return
+ }
+ tp = tt
+ si = SuffixInfo{
+ FullSuffix: tt.Delimiter + suffix,
+ Suffix: suffix,
+ }
+ found = true
+ }
+ }
+ return
+}
+
+func (m Type) hasSuffix(suffix string) bool {
+ return strings.Contains(","+m.suffixesCSV+",", ","+suffix+",")
+}
+
+// GetByMainSubType gets a media type given a main and a sub type e.g. "text" and "plain".
+// It will return false if no format could be found, or if the combination given
+// is ambiguous.
+// The lookup is case insensitive.
+func (t Types) GetByMainSubType(mainType, subType string) (tp Type, found bool) {
+ for _, tt := range t {
+ if strings.EqualFold(mainType, tt.MainType) && strings.EqualFold(subType, tt.SubType) {
+ if found {
+ // ambiguous
+ found = false
+ return
+ }
+
+ tp = tt
+ found = true
+ }
+ }
+ return
+}
+
+func suffixIsRemoved() error {
+ return errors.New(`MediaType.Suffix is removed. Before Hugo 0.44 this was used both to set a custom file suffix and as way
+to augment the mediatype definition (what you see after the "+", e.g. "image/svg+xml").
+
+This had its limitations. For one, it was only possible with one file extension per MIME type.
+
+Now you can specify multiple file suffixes using "suffixes", but you need to specify the full MIME type
+identifier:
+
+[mediaTypes]
+[mediaTypes."image/svg+xml"]
+suffixes = ["svg", "abc" ]
+
+In most cases, it will be enough to just change:
+
+[mediaTypes]
+[mediaTypes."my/custom-mediatype"]
+suffix = "txt"
+
+To:
+
+[mediaTypes]
+[mediaTypes."my/custom-mediatype"]
+suffixes = ["txt"]
+
+Note that you can still get the Media Type's suffix from a template: {{ $mediaType.Suffix }}. But this will now map to the MIME type filename.
+`)
+}
+
+// DecodeTypes takes a list of media type configurations and merges those,
+// in the order given, with the Hugo defaults as the last resort.
+func DecodeTypes(mms ...map[string]any) (Types, error) {
+ var m Types
+
+ // Maps type string to Type. Type string is the full application/svg+xml.
+ mmm := make(map[string]Type)
+ for _, dt := range DefaultTypes {
+ mmm[dt.Type()] = dt
+ }
+
+ for _, mm := range mms {
+ for k, v := range mm {
+ var mediaType Type
+
+ mediaType, found := mmm[k]
+ if !found {
+ var err error
+ mediaType, err = fromString(k)
+ if err != nil {
+ return m, err
+ }
+ }
+
+ if err := mapstructure.WeakDecode(v, &mediaType); err != nil {
+ return m, err
+ }
+
+ vm := maps.ToStringMap(v)
+ maps.PrepareParams(vm)
+ _, delimiterSet := vm["delimiter"]
+ _, suffixSet := vm["suffix"]
+
+ if suffixSet {
+ return Types{}, suffixIsRemoved()
+ }
+
+ if suffixes, found := vm["suffixes"]; found {
+ mediaType.suffixesCSV = strings.TrimSpace(strings.ToLower(strings.Join(cast.ToStringSlice(suffixes), ",")))
+ }
+
+ // The user may set the delimiter as an empty string.
+ if !delimiterSet && mediaType.suffixesCSV != "" {
+ mediaType.Delimiter = defaultDelimiter
+ }
+
+ mediaType.init()
+
+ mmm[k] = mediaType
+
+ }
+ }
+
+ for _, v := range mmm {
+ m = append(m, v)
+ }
+ sort.Sort(m)
+
+ return m, nil
+}
+
+// IsZero reports whether this Type represents a zero value.
+// For internal use.
+func (m Type) IsZero() bool {
+ return m.SubType == ""
+}
+
+// MarshalJSON returns the JSON encoding of m.
+// For internal use.
+func (m Type) MarshalJSON() ([]byte, error) {
+ type Alias Type
+ return json.Marshal(&struct {
+ Alias
+ Type string `json:"type"`
+ String string `json:"string"`
+ Suffixes []string `json:"suffixes"`
+ }{
+ Alias: (Alias)(m),
+ Type: m.Type(),
+ String: m.String(),
+ Suffixes: strings.Split(m.suffixesCSV, ","),
+ })
+}
diff --git a/media/mediaType_test.go b/media/mediaType_test.go
new file mode 100644
index 000000000..af7123cb5
--- /dev/null
+++ b/media/mediaType_test.go
@@ -0,0 +1,346 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package media
+
+import (
+ "encoding/json"
+ "io/ioutil"
+ "path/filepath"
+ "sort"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/paths"
+)
+
+func TestDefaultTypes(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ tp Type
+ expectedMainType string
+ expectedSubType string
+ expectedSuffix string
+ expectedType string
+ expectedString string
+ }{
+ {CalendarType, "text", "calendar", "ics", "text/calendar", "text/calendar"},
+ {CSSType, "text", "css", "css", "text/css", "text/css"},
+ {SCSSType, "text", "x-scss", "scss", "text/x-scss", "text/x-scss"},
+ {CSVType, "text", "csv", "csv", "text/csv", "text/csv"},
+ {HTMLType, "text", "html", "html", "text/html", "text/html"},
+ {JavascriptType, "application", "javascript", "js", "application/javascript", "application/javascript"},
+ {TypeScriptType, "application", "typescript", "ts", "application/typescript", "application/typescript"},
+ {TSXType, "text", "tsx", "tsx", "text/tsx", "text/tsx"},
+ {JSXType, "text", "jsx", "jsx", "text/jsx", "text/jsx"},
+ {JSONType, "application", "json", "json", "application/json", "application/json"},
+ {RSSType, "application", "rss", "xml", "application/rss+xml", "application/rss+xml"},
+ {SVGType, "image", "svg", "svg", "image/svg+xml", "image/svg+xml"},
+ {TextType, "text", "plain", "txt", "text/plain", "text/plain"},
+ {XMLType, "application", "xml", "xml", "application/xml", "application/xml"},
+ {TOMLType, "application", "toml", "toml", "application/toml", "application/toml"},
+ {YAMLType, "application", "yaml", "yaml", "application/yaml", "application/yaml"},
+ {PDFType, "application", "pdf", "pdf", "application/pdf", "application/pdf"},
+ {TrueTypeFontType, "font", "ttf", "ttf", "font/ttf", "font/ttf"},
+ {OpenTypeFontType, "font", "otf", "otf", "font/otf", "font/otf"},
+ } {
+ c.Assert(test.tp.MainType, qt.Equals, test.expectedMainType)
+ c.Assert(test.tp.SubType, qt.Equals, test.expectedSubType)
+
+ c.Assert(test.tp.Type(), qt.Equals, test.expectedType)
+ c.Assert(test.tp.String(), qt.Equals, test.expectedString)
+
+ }
+
+ c.Assert(len(DefaultTypes), qt.Equals, 34)
+}
+
+func TestGetByType(t *testing.T) {
+ c := qt.New(t)
+
+ types := Types{HTMLType, RSSType}
+
+ mt, found := types.GetByType("text/HTML")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(HTMLType, qt.Equals, mt)
+
+ _, found = types.GetByType("text/nono")
+ c.Assert(found, qt.Equals, false)
+
+ mt, found = types.GetByType("application/rss+xml")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(RSSType, qt.Equals, mt)
+
+ mt, found = types.GetByType("application/rss")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(RSSType, qt.Equals, mt)
+}
+
+func TestGetByMainSubType(t *testing.T) {
+ c := qt.New(t)
+ f, found := DefaultTypes.GetByMainSubType("text", "plain")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, TextType)
+ _, found = DefaultTypes.GetByMainSubType("foo", "plain")
+ c.Assert(found, qt.Equals, false)
+}
+
+func TestBySuffix(t *testing.T) {
+ c := qt.New(t)
+ formats := DefaultTypes.BySuffix("xml")
+ c.Assert(len(formats), qt.Equals, 2)
+ c.Assert(formats[0].SubType, qt.Equals, "rss")
+ c.Assert(formats[1].SubType, qt.Equals, "xml")
+}
+
+func TestGetFirstBySuffix(t *testing.T) {
+ c := qt.New(t)
+
+ types := DefaultTypes
+
+ // Issue #8406
+ geoJSON := newMediaTypeWithMimeSuffix("application", "geo", "json", []string{"geojson", "gjson"})
+ types = append(types, geoJSON)
+ sort.Sort(types)
+
+ check := func(suffix string, expectedType Type) {
+ t, f, found := types.GetFirstBySuffix(suffix)
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, SuffixInfo{
+ Suffix: suffix,
+ FullSuffix: "." + suffix,
+ })
+ c.Assert(t, qt.Equals, expectedType)
+ }
+
+ check("js", JavascriptType)
+ check("json", JSONType)
+ check("geojson", geoJSON)
+ check("gjson", geoJSON)
+}
+
+func TestFromTypeString(t *testing.T) {
+ c := qt.New(t)
+ f, err := fromString("text/html")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f.Type(), qt.Equals, HTMLType.Type())
+
+ f, err = fromString("application/custom")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f, qt.Equals, Type{MainType: "application", SubType: "custom", mimeSuffix: ""})
+
+ f, err = fromString("application/custom+sfx")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f, qt.Equals, Type{MainType: "application", SubType: "custom", mimeSuffix: "sfx"})
+
+ _, err = fromString("noslash")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ f, err = fromString("text/xml; charset=utf-8")
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(f, qt.Equals, Type{MainType: "text", SubType: "xml", mimeSuffix: ""})
+}
+
+func TestFromStringAndExt(t *testing.T) {
+ c := qt.New(t)
+ f, err := FromStringAndExt("text/html", "html")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f, qt.Equals, HTMLType)
+ f, err = FromStringAndExt("text/html", ".html")
+ c.Assert(err, qt.IsNil)
+ c.Assert(f, qt.Equals, HTMLType)
+}
+
+// Add a test for the SVG case
+// https://github.com/gohugoio/hugo/issues/4920
+func TestFromExtensionMultipleSuffixes(t *testing.T) {
+ c := qt.New(t)
+ tp, si, found := DefaultTypes.GetBySuffix("svg")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(tp.String(), qt.Equals, "image/svg+xml")
+ c.Assert(si.Suffix, qt.Equals, "svg")
+ c.Assert(si.FullSuffix, qt.Equals, ".svg")
+ c.Assert(tp.FirstSuffix.Suffix, qt.Equals, si.Suffix)
+ c.Assert(tp.FirstSuffix.FullSuffix, qt.Equals, si.FullSuffix)
+ ftp, found := DefaultTypes.GetByType("image/svg+xml")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(ftp.String(), qt.Equals, "image/svg+xml")
+ c.Assert(found, qt.Equals, true)
+}
+
+func TestFromContent(t *testing.T) {
+ c := qt.New(t)
+
+ files, err := filepath.Glob("./testdata/resource.*")
+ c.Assert(err, qt.IsNil)
+ mtypes := DefaultTypes
+
+ for _, filename := range files {
+ name := filepath.Base(filename)
+ c.Run(name, func(c *qt.C) {
+ content, err := ioutil.ReadFile(filename)
+ c.Assert(err, qt.IsNil)
+ ext := strings.TrimPrefix(paths.Ext(filename), ".")
+ var exts []string
+ if ext == "jpg" {
+ exts = append(exts, "foo", "bar", "jpg")
+ } else {
+ exts = []string{ext}
+ }
+ expected, _, found := mtypes.GetFirstBySuffix(ext)
+ c.Assert(found, qt.IsTrue)
+ got := FromContent(mtypes, exts, content)
+ c.Assert(got, qt.Equals, expected)
+ })
+ }
+}
+
+func TestFromContentFakes(t *testing.T) {
+ c := qt.New(t)
+
+ files, err := filepath.Glob("./testdata/fake.*")
+ c.Assert(err, qt.IsNil)
+ mtypes := DefaultTypes
+
+ for _, filename := range files {
+ name := filepath.Base(filename)
+ c.Run(name, func(c *qt.C) {
+ content, err := ioutil.ReadFile(filename)
+ c.Assert(err, qt.IsNil)
+ ext := strings.TrimPrefix(paths.Ext(filename), ".")
+ got := FromContent(mtypes, []string{ext}, content)
+ c.Assert(got, qt.Equals, zero)
+ })
+ }
+}
+
+func TestDecodeTypes(t *testing.T) {
+ c := qt.New(t)
+
+ tests := []struct {
+ name string
+ maps []map[string]any
+ shouldError bool
+ assert func(t *testing.T, name string, tt Types)
+ }{
+ {
+ "Redefine JSON",
+ []map[string]any{
+ {
+ "application/json": map[string]any{
+ "suffixes": []string{"jasn"},
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, tt Types) {
+ c.Assert(len(tt), qt.Equals, len(DefaultTypes))
+ json, si, found := tt.GetBySuffix("jasn")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(json.String(), qt.Equals, "application/json")
+ c.Assert(si.FullSuffix, qt.Equals, ".jasn")
+ },
+ },
+ {
+ "MIME suffix in key, multiple file suffixes, custom delimiter",
+ []map[string]any{
+ {
+ "application/hugo+hg": map[string]any{
+ "suffixes": []string{"hg1", "hG2"},
+ "Delimiter": "_",
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, tt Types) {
+ c.Assert(len(tt), qt.Equals, len(DefaultTypes)+1)
+ hg, si, found := tt.GetBySuffix("hg2")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(hg.mimeSuffix, qt.Equals, "hg")
+ c.Assert(hg.FirstSuffix.Suffix, qt.Equals, "hg1")
+ c.Assert(hg.FirstSuffix.FullSuffix, qt.Equals, "_hg1")
+ c.Assert(si.Suffix, qt.Equals, "hg2")
+ c.Assert(si.FullSuffix, qt.Equals, "_hg2")
+ c.Assert(hg.String(), qt.Equals, "application/hugo+hg")
+
+ _, found = tt.GetByType("application/hugo+hg")
+ c.Assert(found, qt.Equals, true)
+ },
+ },
+ {
+ "Add custom media type",
+ []map[string]any{
+ {
+ "text/hugo+hgo": map[string]any{
+ "Suffixes": []string{"hgo2"},
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, tp Types) {
+ c.Assert(len(tp), qt.Equals, len(DefaultTypes)+1)
+ // Make sure we have not broken the default config.
+
+ _, _, found := tp.GetBySuffix("json")
+ c.Assert(found, qt.Equals, true)
+
+ hugo, _, found := tp.GetBySuffix("hgo2")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(hugo.String(), qt.Equals, "text/hugo+hgo")
+ },
+ },
+ }
+
+ for _, test := range tests {
+ result, err := DecodeTypes(test.maps...)
+ if test.shouldError {
+ c.Assert(err, qt.Not(qt.IsNil))
+ } else {
+ c.Assert(err, qt.IsNil)
+ test.assert(t, test.name, result)
+ }
+ }
+}
+
+func TestToJSON(t *testing.T) {
+ c := qt.New(t)
+ b, err := json.Marshal(MPEGType)
+ c.Assert(err, qt.IsNil)
+ c.Assert(string(b), qt.Equals, `{"mainType":"video","subType":"mpeg","delimiter":".","firstSuffix":{"suffix":"mpg","fullSuffix":".mpg"},"type":"video/mpeg","string":"video/mpeg","suffixes":["mpg","mpeg"]}`)
+}
+
+func BenchmarkTypeOps(b *testing.B) {
+ mt := MPEGType
+ mts := DefaultTypes
+ for i := 0; i < b.N; i++ {
+ ff := mt.FirstSuffix
+ _ = ff.FullSuffix
+ _ = mt.IsZero()
+ c, err := mt.MarshalJSON()
+ if c == nil || err != nil {
+ b.Fatal("failed")
+ }
+ _ = mt.String()
+ _ = ff.Suffix
+ _ = mt.Suffixes
+ _ = mt.Type()
+ _ = mts.BySuffix("xml")
+ _, _ = mts.GetByMainSubType("application", "xml")
+ _, _, _ = mts.GetBySuffix("xml")
+ _, _ = mts.GetByType("application")
+ _, _, _ = mts.GetFirstBySuffix("xml")
+
+ }
+}
diff --git a/media/testdata/fake.js b/media/testdata/fake.js
new file mode 100644
index 000000000..08ae570d2
--- /dev/null
+++ b/media/testdata/fake.js
Binary files differ
diff --git a/media/testdata/fake.png b/media/testdata/fake.png
new file mode 100644
index 000000000..75ba3b7fe
--- /dev/null
+++ b/media/testdata/fake.png
@@ -0,0 +1,3 @@
+function foo() {
+ return "foo";
+} \ No newline at end of file
diff --git a/media/testdata/reosurce.otf b/media/testdata/reosurce.otf
new file mode 100644
index 000000000..99034a2de
--- /dev/null
+++ b/media/testdata/reosurce.otf
Binary files differ
diff --git a/media/testdata/resource.bmp b/media/testdata/resource.bmp
new file mode 100644
index 000000000..19759b33d
--- /dev/null
+++ b/media/testdata/resource.bmp
Binary files differ
diff --git a/media/testdata/resource.css b/media/testdata/resource.css
new file mode 100644
index 000000000..a267873b5
--- /dev/null
+++ b/media/testdata/resource.css
@@ -0,0 +1,8 @@
+body {
+ background-color: lightblue;
+ }
+
+ h1 {
+ color: navy;
+ margin-left: 20px;
+ } \ No newline at end of file
diff --git a/media/testdata/resource.csv b/media/testdata/resource.csv
new file mode 100644
index 000000000..ee6b058b6
--- /dev/null
+++ b/media/testdata/resource.csv
@@ -0,0 +1,130 @@
+"LatD", "LatM", "LatS", "NS", "LonD", "LonM", "LonS", "EW", "City", "State"
+ 41, 5, 59, "N", 80, 39, 0, "W", "Youngstown", OH
+ 42, 52, 48, "N", 97, 23, 23, "W", "Yankton", SD
+ 46, 35, 59, "N", 120, 30, 36, "W", "Yakima", WA
+ 42, 16, 12, "N", 71, 48, 0, "W", "Worcester", MA
+ 43, 37, 48, "N", 89, 46, 11, "W", "Wisconsin Dells", WI
+ 36, 5, 59, "N", 80, 15, 0, "W", "Winston-Salem", NC
+ 49, 52, 48, "N", 97, 9, 0, "W", "Winnipeg", MB
+ 39, 11, 23, "N", 78, 9, 36, "W", "Winchester", VA
+ 34, 14, 24, "N", 77, 55, 11, "W", "Wilmington", NC
+ 39, 45, 0, "N", 75, 33, 0, "W", "Wilmington", DE
+ 48, 9, 0, "N", 103, 37, 12, "W", "Williston", ND
+ 41, 15, 0, "N", 77, 0, 0, "W", "Williamsport", PA
+ 37, 40, 48, "N", 82, 16, 47, "W", "Williamson", WV
+ 33, 54, 0, "N", 98, 29, 23, "W", "Wichita Falls", TX
+ 37, 41, 23, "N", 97, 20, 23, "W", "Wichita", KS
+ 40, 4, 11, "N", 80, 43, 12, "W", "Wheeling", WV
+ 26, 43, 11, "N", 80, 3, 0, "W", "West Palm Beach", FL
+ 47, 25, 11, "N", 120, 19, 11, "W", "Wenatchee", WA
+ 41, 25, 11, "N", 122, 23, 23, "W", "Weed", CA
+ 31, 13, 11, "N", 82, 20, 59, "W", "Waycross", GA
+ 44, 57, 35, "N", 89, 38, 23, "W", "Wausau", WI
+ 42, 21, 36, "N", 87, 49, 48, "W", "Waukegan", IL
+ 44, 54, 0, "N", 97, 6, 36, "W", "Watertown", SD
+ 43, 58, 47, "N", 75, 55, 11, "W", "Watertown", NY
+ 42, 30, 0, "N", 92, 20, 23, "W", "Waterloo", IA
+ 41, 32, 59, "N", 73, 3, 0, "W", "Waterbury", CT
+ 38, 53, 23, "N", 77, 1, 47, "W", "Washington", DC
+ 41, 50, 59, "N", 79, 8, 23, "W", "Warren", PA
+ 46, 4, 11, "N", 118, 19, 48, "W", "Walla Walla", WA
+ 31, 32, 59, "N", 97, 8, 23, "W", "Waco", TX
+ 38, 40, 48, "N", 87, 31, 47, "W", "Vincennes", IN
+ 28, 48, 35, "N", 97, 0, 36, "W", "Victoria", TX
+ 32, 20, 59, "N", 90, 52, 47, "W", "Vicksburg", MS
+ 49, 16, 12, "N", 123, 7, 12, "W", "Vancouver", BC
+ 46, 55, 11, "N", 98, 0, 36, "W", "Valley City", ND
+ 30, 49, 47, "N", 83, 16, 47, "W", "Valdosta", GA
+ 43, 6, 36, "N", 75, 13, 48, "W", "Utica", NY
+ 39, 54, 0, "N", 79, 43, 48, "W", "Uniontown", PA
+ 32, 20, 59, "N", 95, 18, 0, "W", "Tyler", TX
+ 42, 33, 36, "N", 114, 28, 12, "W", "Twin Falls", ID
+ 33, 12, 35, "N", 87, 34, 11, "W", "Tuscaloosa", AL
+ 34, 15, 35, "N", 88, 42, 35, "W", "Tupelo", MS
+ 36, 9, 35, "N", 95, 54, 36, "W", "Tulsa", OK
+ 32, 13, 12, "N", 110, 58, 12, "W", "Tucson", AZ
+ 37, 10, 11, "N", 104, 30, 36, "W", "Trinidad", CO
+ 40, 13, 47, "N", 74, 46, 11, "W", "Trenton", NJ
+ 44, 45, 35, "N", 85, 37, 47, "W", "Traverse City", MI
+ 43, 39, 0, "N", 79, 22, 47, "W", "Toronto", ON
+ 39, 2, 59, "N", 95, 40, 11, "W", "Topeka", KS
+ 41, 39, 0, "N", 83, 32, 24, "W", "Toledo", OH
+ 33, 25, 48, "N", 94, 3, 0, "W", "Texarkana", TX
+ 39, 28, 12, "N", 87, 24, 36, "W", "Terre Haute", IN
+ 27, 57, 0, "N", 82, 26, 59, "W", "Tampa", FL
+ 30, 27, 0, "N", 84, 16, 47, "W", "Tallahassee", FL
+ 47, 14, 24, "N", 122, 25, 48, "W", "Tacoma", WA
+ 43, 2, 59, "N", 76, 9, 0, "W", "Syracuse", NY
+ 32, 35, 59, "N", 82, 20, 23, "W", "Swainsboro", GA
+ 33, 55, 11, "N", 80, 20, 59, "W", "Sumter", SC
+ 40, 59, 24, "N", 75, 11, 24, "W", "Stroudsburg", PA
+ 37, 57, 35, "N", 121, 17, 24, "W", "Stockton", CA
+ 44, 31, 12, "N", 89, 34, 11, "W", "Stevens Point", WI
+ 40, 21, 36, "N", 80, 37, 12, "W", "Steubenville", OH
+ 40, 37, 11, "N", 103, 13, 12, "W", "Sterling", CO
+ 38, 9, 0, "N", 79, 4, 11, "W", "Staunton", VA
+ 39, 55, 11, "N", 83, 48, 35, "W", "Springfield", OH
+ 37, 13, 12, "N", 93, 17, 24, "W", "Springfield", MO
+ 42, 5, 59, "N", 72, 35, 23, "W", "Springfield", MA
+ 39, 47, 59, "N", 89, 39, 0, "W", "Springfield", IL
+ 47, 40, 11, "N", 117, 24, 36, "W", "Spokane", WA
+ 41, 40, 48, "N", 86, 15, 0, "W", "South Bend", IN
+ 43, 32, 24, "N", 96, 43, 48, "W", "Sioux Falls", SD
+ 42, 29, 24, "N", 96, 23, 23, "W", "Sioux City", IA
+ 32, 30, 35, "N", 93, 45, 0, "W", "Shreveport", LA
+ 33, 38, 23, "N", 96, 36, 36, "W", "Sherman", TX
+ 44, 47, 59, "N", 106, 57, 35, "W", "Sheridan", WY
+ 35, 13, 47, "N", 96, 40, 48, "W", "Seminole", OK
+ 32, 25, 11, "N", 87, 1, 11, "W", "Selma", AL
+ 38, 42, 35, "N", 93, 13, 48, "W", "Sedalia", MO
+ 47, 35, 59, "N", 122, 19, 48, "W", "Seattle", WA
+ 41, 24, 35, "N", 75, 40, 11, "W", "Scranton", PA
+ 41, 52, 11, "N", 103, 39, 36, "W", "Scottsbluff", NB
+ 42, 49, 11, "N", 73, 56, 59, "W", "Schenectady", NY
+ 32, 4, 48, "N", 81, 5, 23, "W", "Savannah", GA
+ 46, 29, 24, "N", 84, 20, 59, "W", "Sault Sainte Marie", MI
+ 27, 20, 24, "N", 82, 31, 47, "W", "Sarasota", FL
+ 38, 26, 23, "N", 122, 43, 12, "W", "Santa Rosa", CA
+ 35, 40, 48, "N", 105, 56, 59, "W", "Santa Fe", NM
+ 34, 25, 11, "N", 119, 41, 59, "W", "Santa Barbara", CA
+ 33, 45, 35, "N", 117, 52, 12, "W", "Santa Ana", CA
+ 37, 20, 24, "N", 121, 52, 47, "W", "San Jose", CA
+ 37, 46, 47, "N", 122, 25, 11, "W", "San Francisco", CA
+ 41, 27, 0, "N", 82, 42, 35, "W", "Sandusky", OH
+ 32, 42, 35, "N", 117, 9, 0, "W", "San Diego", CA
+ 34, 6, 36, "N", 117, 18, 35, "W", "San Bernardino", CA
+ 29, 25, 12, "N", 98, 30, 0, "W", "San Antonio", TX
+ 31, 27, 35, "N", 100, 26, 24, "W", "San Angelo", TX
+ 40, 45, 35, "N", 111, 52, 47, "W", "Salt Lake City", UT
+ 38, 22, 11, "N", 75, 35, 59, "W", "Salisbury", MD
+ 36, 40, 11, "N", 121, 39, 0, "W", "Salinas", CA
+ 38, 50, 24, "N", 97, 36, 36, "W", "Salina", KS
+ 38, 31, 47, "N", 106, 0, 0, "W", "Salida", CO
+ 44, 56, 23, "N", 123, 1, 47, "W", "Salem", OR
+ 44, 57, 0, "N", 93, 5, 59, "W", "Saint Paul", MN
+ 38, 37, 11, "N", 90, 11, 24, "W", "Saint Louis", MO
+ 39, 46, 12, "N", 94, 50, 23, "W", "Saint Joseph", MO
+ 42, 5, 59, "N", 86, 28, 48, "W", "Saint Joseph", MI
+ 44, 25, 11, "N", 72, 1, 11, "W", "Saint Johnsbury", VT
+ 45, 34, 11, "N", 94, 10, 11, "W", "Saint Cloud", MN
+ 29, 53, 23, "N", 81, 19, 11, "W", "Saint Augustine", FL
+ 43, 25, 48, "N", 83, 56, 24, "W", "Saginaw", MI
+ 38, 35, 24, "N", 121, 29, 23, "W", "Sacramento", CA
+ 43, 36, 36, "N", 72, 58, 12, "W", "Rutland", VT
+ 33, 24, 0, "N", 104, 31, 47, "W", "Roswell", NM
+ 35, 56, 23, "N", 77, 48, 0, "W", "Rocky Mount", NC
+ 41, 35, 24, "N", 109, 13, 48, "W", "Rock Springs", WY
+ 42, 16, 12, "N", 89, 5, 59, "W", "Rockford", IL
+ 43, 9, 35, "N", 77, 36, 36, "W", "Rochester", NY
+ 44, 1, 12, "N", 92, 27, 35, "W", "Rochester", MN
+ 37, 16, 12, "N", 79, 56, 24, "W", "Roanoke", VA
+ 37, 32, 24, "N", 77, 26, 59, "W", "Richmond", VA
+ 39, 49, 48, "N", 84, 53, 23, "W", "Richmond", IN
+ 38, 46, 12, "N", 112, 5, 23, "W", "Richfield", UT
+ 45, 38, 23, "N", 89, 25, 11, "W", "Rhinelander", WI
+ 39, 31, 12, "N", 119, 48, 35, "W", "Reno", NV
+ 50, 25, 11, "N", 104, 39, 0, "W", "Regina", SA
+ 40, 10, 48, "N", 122, 14, 23, "W", "Red Bluff", CA
+ 40, 19, 48, "N", 75, 55, 48, "W", "Reading", PA
+ 41, 9, 35, "N", 81, 14, 23, "W", "Ravenna", OH
+
diff --git a/media/testdata/resource.gif b/media/testdata/resource.gif
new file mode 100644
index 000000000..9549c0b9d
--- /dev/null
+++ b/media/testdata/resource.gif
Binary files differ
diff --git a/media/testdata/resource.ics b/media/testdata/resource.ics
new file mode 100644
index 000000000..b9a263e93
--- /dev/null
+++ b/media/testdata/resource.ics
@@ -0,0 +1,24 @@
+BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//ZContent.net//Zap Calendar 1.0//EN
+CALSCALE:GREGORIAN
+METHOD:PUBLISH
+BEGIN:VEVENT
+SUMMARY:Abraham Lincoln
+UID:c7614cff-3549-4a00-9152-d25cc1fe077d
+SEQUENCE:0
+STATUS:CONFIRMED
+TRANSP:TRANSPARENT
+RRULE:FREQ=YEARLY;INTERVAL=1;BYMONTH=2;BYMONTHDAY=12
+DTSTART:20080212
+DTEND:20080213
+DTSTAMP:20150421T141403
+CATEGORIES:U.S. Presidents,Civil War People
+LOCATION:Hodgenville\, Kentucky
+GEO:37.5739497;-85.7399606
+DESCRIPTION:Born February 12\, 1809\nSixteenth President (1861-1865)\n\n\n
+ \nhttp://AmericanHistoryCalendar.com
+URL:http://americanhistorycalendar.com/peoplecalendar/1,328-abraham-lincol
+ n
+END:VEVENT
+END:VCALENDAR \ No newline at end of file
diff --git a/media/testdata/resource.jpe b/media/testdata/resource.jpe
new file mode 100644
index 000000000..a9049e81b
--- /dev/null
+++ b/media/testdata/resource.jpe
Binary files differ
diff --git a/media/testdata/resource.jpg b/media/testdata/resource.jpg
new file mode 100644
index 000000000..a9049e81b
--- /dev/null
+++ b/media/testdata/resource.jpg
Binary files differ
diff --git a/media/testdata/resource.js b/media/testdata/resource.js
new file mode 100644
index 000000000..75ba3b7fe
--- /dev/null
+++ b/media/testdata/resource.js
@@ -0,0 +1,3 @@
+function foo() {
+ return "foo";
+} \ No newline at end of file
diff --git a/media/testdata/resource.json b/media/testdata/resource.json
new file mode 100644
index 000000000..446899897
--- /dev/null
+++ b/media/testdata/resource.json
@@ -0,0 +1,14 @@
+{
+ "firstName": "Joe",
+ "lastName": "Jackson",
+ "gender": "male",
+ "age": 28,
+ "address": {
+ "streetAddress": "101",
+ "city": "San Diego",
+ "state": "CA"
+ },
+ "phoneNumbers": [
+ { "type": "home", "number": "7349282382" }
+ ]
+} \ No newline at end of file
diff --git a/media/testdata/resource.pdf b/media/testdata/resource.pdf
new file mode 100644
index 000000000..c0e31a076
--- /dev/null
+++ b/media/testdata/resource.pdf
@@ -0,0 +1,198 @@
+%PDF-1.3
+%
+
+1 0 obj
+<<
+/Type /Catalog
+/Outlines 2 0 R
+/Pages 3 0 R
+>>
+endobj
+
+2 0 obj
+<<
+/Type /Outlines
+/Count 0
+>>
+endobj
+
+3 0 obj
+<<
+/Type /Pages
+/Count 2
+/Kids [ 4 0 R 6 0 R ]
+>>
+endobj
+
+4 0 obj
+<<
+/Type /Page
+/Parent 3 0 R
+/Resources <<
+/Font <<
+/F1 9 0 R
+>>
+/ProcSet 8 0 R
+>>
+/MediaBox [0 0 612.0000 792.0000]
+/Contents 5 0 R
+>>
+endobj
+
+5 0 obj
+<< /Length 1074 >>
+stream
+2 J
+BT
+0 0 0 rg
+/F1 0027 Tf
+57.3750 722.2800 Td
+( A Simple PDF File ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 688.6080 Td
+( This is a small demonstration .pdf file - ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 664.7040 Td
+( just for use in the Virtual Mechanics tutorials. More text. And more ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 652.7520 Td
+( text. And more text. And more text. And more text. ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 628.8480 Td
+( And more text. And more text. And more text. And more text. And more ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 616.8960 Td
+( text. And more text. Boring, zzzzz. And more text. And more text. And ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 604.9440 Td
+( more text. And more text. And more text. And more text. And more text. ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 592.9920 Td
+( And more text. And more text. ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 569.0880 Td
+( And more text. And more text. And more text. And more text. And more ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 557.1360 Td
+( text. And more text. And more text. Even more. Continued on page 2 ...) Tj
+ET
+endstream
+endobj
+
+6 0 obj
+<<
+/Type /Page
+/Parent 3 0 R
+/Resources <<
+/Font <<
+/F1 9 0 R
+>>
+/ProcSet 8 0 R
+>>
+/MediaBox [0 0 612.0000 792.0000]
+/Contents 7 0 R
+>>
+endobj
+
+7 0 obj
+<< /Length 676 >>
+stream
+2 J
+BT
+0 0 0 rg
+/F1 0027 Tf
+57.3750 722.2800 Td
+( Simple PDF File 2 ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 688.6080 Td
+( ...continued from page 1. Yet more text. And more text. And more text. ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 676.6560 Td
+( And more text. And more text. And more text. And more text. And more ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 664.7040 Td
+( text. Oh, how boring typing this stuff. But not as boring as watching ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 652.7520 Td
+( paint dry. And more text. And more text. And more text. And more text. ) Tj
+ET
+BT
+/F1 0010 Tf
+69.2500 640.8000 Td
+( Boring. More, a little more text. The end, and just as well. ) Tj
+ET
+endstream
+endobj
+
+8 0 obj
+[/PDF /Text]
+endobj
+
+9 0 obj
+<<
+/Type /Font
+/Subtype /Type1
+/Name /F1
+/BaseFont /Helvetica
+/Encoding /WinAnsiEncoding
+>>
+endobj
+
+10 0 obj
+<<
+/Creator (Rave \(http://www.nevrona.com/rave\))
+/Producer (Nevrona Designs)
+/CreationDate (D:20060301072826)
+>>
+endobj
+
+xref
+0 11
+0000000000 65535 f
+0000000019 00000 n
+0000000093 00000 n
+0000000147 00000 n
+0000000222 00000 n
+0000000390 00000 n
+0000001522 00000 n
+0000001690 00000 n
+0000002423 00000 n
+0000002456 00000 n
+0000002574 00000 n
+
+trailer
+<<
+/Size 11
+/Root 1 0 R
+/Info 10 0 R
+>>
+
+startxref
+2714
+%%EOF
diff --git a/media/testdata/resource.png b/media/testdata/resource.png
new file mode 100644
index 000000000..08ae570d2
--- /dev/null
+++ b/media/testdata/resource.png
Binary files differ
diff --git a/media/testdata/resource.rss b/media/testdata/resource.rss
new file mode 100644
index 000000000..b20b0fcca
--- /dev/null
+++ b/media/testdata/resource.rss
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<rss version="2.0">
+
+<channel>
+ <title>W3Schools Home Page</title>
+ <link>https://www.w3schools.com</link>
+ <description>Free web building tutorials</description>
+ <item>
+ <title>RSS Tutorial</title>
+ <link>https://www.w3schools.com/xml/xml_rss.asp</link>
+ <description>New RSS tutorial on W3Schools</description>
+ </item>
+ <item>
+ <title>XML Tutorial</title>
+ <link>https://www.w3schools.com/xml</link>
+ <description>New XML tutorial on W3Schools</description>
+ </item>
+</channel>
+
+</rss> \ No newline at end of file
diff --git a/media/testdata/resource.sass b/media/testdata/resource.sass
new file mode 100644
index 000000000..ad857fac7
--- /dev/null
+++ b/media/testdata/resource.sass
@@ -0,0 +1,6 @@
+$font-stack: Helvetica, sans-serif
+$primary-color: #333
+
+body
+ font: 100% $font-stack
+ color: $primary-color \ No newline at end of file
diff --git a/media/testdata/resource.scss b/media/testdata/resource.scss
new file mode 100644
index 000000000..d63e420f6
--- /dev/null
+++ b/media/testdata/resource.scss
@@ -0,0 +1,7 @@
+$font-stack: Helvetica, sans-serif;
+$primary-color: #333;
+
+body {
+ font: 100% $font-stack;
+ color: $primary-color;
+} \ No newline at end of file
diff --git a/media/testdata/resource.svg b/media/testdata/resource.svg
new file mode 100644
index 000000000..2759ae703
--- /dev/null
+++ b/media/testdata/resource.svg
@@ -0,0 +1,5 @@
+<svg height="100" width="100">
+ <circle cx="50" cy="50" r="40" stroke="black" stroke-width="3" fill="red" />
+ Sorry, your browser does not support inline SVG.
+</svg>
+ \ No newline at end of file
diff --git a/media/testdata/resource.ttf b/media/testdata/resource.ttf
new file mode 100644
index 000000000..8bc614d06
--- /dev/null
+++ b/media/testdata/resource.ttf
Binary files differ
diff --git a/media/testdata/resource.webp b/media/testdata/resource.webp
new file mode 100644
index 000000000..4365e7b9f
--- /dev/null
+++ b/media/testdata/resource.webp
Binary files differ
diff --git a/media/testdata/resource.xml b/media/testdata/resource.xml
new file mode 100644
index 000000000..fa0c0a5b6
--- /dev/null
+++ b/media/testdata/resource.xml
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<note>
+ <to>Tove</to>
+ <from>Jani</from>
+ <heading>Reminder</heading>
+ <body>Don't forget me this weekend!</body>
+</note> \ No newline at end of file
diff --git a/merge-release.sh b/merge-release.sh
new file mode 100755
index 000000000..a87f9f4a1
--- /dev/null
+++ b/merge-release.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+
+if (( $# < 1 ));
+ then
+ echo "USAGE: ./merge-release.sh 0.76.0"
+ exit 1
+fi
+
+die() { echo "$*" 1>&2 ; exit 1; }
+
+v=$1
+git merge "release-${v}" || die;
+git push || die;
+
+git checkout stable || die;
+git reset --hard "v${v}" || die;
+git push -f || die;
+
+git checkout master || die;
+
+ git subtree push --prefix=docs/ docs-local "tempv${v}";
+
+
diff --git a/metrics/metrics.go b/metrics/metrics.go
new file mode 100644
index 000000000..c57b1177d
--- /dev/null
+++ b/metrics/metrics.go
@@ -0,0 +1,293 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package metrics provides simple metrics tracking features.
+package metrics
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// The Provider interface defines an interface for measuring metrics.
+type Provider interface {
+ // MeasureSince adds a measurement for key to the metric store.
+ // Used with defer and time.Now().
+ MeasureSince(key string, start time.Time)
+
+ // WriteMetrics will write a summary of the metrics to w.
+ WriteMetrics(w io.Writer)
+
+ // TrackValue tracks the value for diff calculations etc.
+ TrackValue(key string, value any, cached bool)
+
+ // Reset clears the metric store.
+ Reset()
+}
+
+type diff struct {
+ baseline any
+ count int
+ simSum int
+}
+
+func (d *diff) add(v any) *diff {
+ if types.IsNil(d.baseline) {
+ d.baseline = v
+ d.count = 1
+ d.simSum = 100 // If we get only one it is very cache friendly.
+ return d
+ }
+ adder := howSimilar(v, d.baseline)
+ d.simSum += adder
+ d.count++
+
+ return d
+}
+
+// Store provides storage for a set of metrics.
+type Store struct {
+ calculateHints bool
+ metrics map[string][]time.Duration
+ mu sync.Mutex
+ diffs map[string]*diff
+ diffmu sync.Mutex
+ cached map[string]int
+ cachedmu sync.Mutex
+}
+
+// NewProvider returns a new instance of a metric store.
+func NewProvider(calculateHints bool) Provider {
+ return &Store{
+ calculateHints: calculateHints,
+ metrics: make(map[string][]time.Duration),
+ diffs: make(map[string]*diff),
+ cached: make(map[string]int),
+ }
+}
+
+// Reset clears the metrics store.
+func (s *Store) Reset() {
+ s.mu.Lock()
+ s.metrics = make(map[string][]time.Duration)
+ s.mu.Unlock()
+
+ s.diffmu.Lock()
+ s.diffs = make(map[string]*diff)
+ s.diffmu.Unlock()
+
+ s.cachedmu.Lock()
+ s.cached = make(map[string]int)
+ s.cachedmu.Unlock()
+}
+
+// TrackValue tracks the value for diff calculations etc.
+func (s *Store) TrackValue(key string, value any, cached bool) {
+ if !s.calculateHints {
+ return
+ }
+
+ s.diffmu.Lock()
+ d, found := s.diffs[key]
+
+ if !found {
+ d = &diff{}
+ s.diffs[key] = d
+ }
+
+ d.add(value)
+ s.diffmu.Unlock()
+
+ if cached {
+ s.cachedmu.Lock()
+ s.cached[key] = s.cached[key] + 1
+ s.cachedmu.Unlock()
+ }
+}
+
+// MeasureSince adds a measurement for key to the metric store.
+func (s *Store) MeasureSince(key string, start time.Time) {
+ s.mu.Lock()
+ s.metrics[key] = append(s.metrics[key], time.Since(start))
+ s.mu.Unlock()
+}
+
+// WriteMetrics writes a summary of the metrics to w.
+func (s *Store) WriteMetrics(w io.Writer) {
+ s.mu.Lock()
+
+ results := make([]result, len(s.metrics))
+
+ var i int
+ for k, v := range s.metrics {
+ var sum time.Duration
+ var max time.Duration
+
+ diff, found := s.diffs[k]
+
+ cacheFactor := 0
+ if found {
+ cacheFactor = int(math.Floor(float64(diff.simSum) / float64(diff.count)))
+ }
+
+ for _, d := range v {
+ sum += d
+ if d > max {
+ max = d
+ }
+ }
+
+ avg := time.Duration(int(sum) / len(v))
+ cacheCount := s.cached[k]
+
+ results[i] = result{key: k, count: len(v), max: max, sum: sum, avg: avg, cacheCount: cacheCount, cacheFactor: cacheFactor}
+ i++
+ }
+
+ s.mu.Unlock()
+
+ if s.calculateHints {
+ fmt.Fprintf(w, " %13s %12s %12s %9s %7s %6s %5s %s\n", "cumulative", "average", "maximum", "cache", "percent", "cached", "total", "")
+ fmt.Fprintf(w, " %13s %12s %12s %9s %7s %6s %5s %s\n", "duration", "duration", "duration", "potential", "cached", "count", "count", "template")
+ fmt.Fprintf(w, " %13s %12s %12s %9s %7s %6s %5s %s\n", "----------", "--------", "--------", "---------", "-------", "------", "-----", "--------")
+ } else {
+ fmt.Fprintf(w, " %13s %12s %12s %5s %s\n", "cumulative", "average", "maximum", "", "")
+ fmt.Fprintf(w, " %13s %12s %12s %5s %s\n", "duration", "duration", "duration", "count", "template")
+ fmt.Fprintf(w, " %13s %12s %12s %5s %s\n", "----------", "--------", "--------", "-----", "--------")
+
+ }
+
+ sort.Sort(bySum(results))
+ for _, v := range results {
+ if s.calculateHints {
+ fmt.Fprintf(w, " %13s %12s %12s %9d %7.f %6d %5d %s\n", v.sum, v.avg, v.max, v.cacheFactor, float64(v.cacheCount)/float64(v.count)*100, v.cacheCount, v.count, v.key)
+ } else {
+ fmt.Fprintf(w, " %13s %12s %12s %5d %s\n", v.sum, v.avg, v.max, v.count, v.key)
+ }
+ }
+}
+
+// A result represents the calculated results for a given metric.
+type result struct {
+ key string
+ count int
+ cacheCount int
+ cacheFactor int
+ sum time.Duration
+ max time.Duration
+ avg time.Duration
+}
+
+type bySum []result
+
+func (b bySum) Len() int { return len(b) }
+func (b bySum) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+func (b bySum) Less(i, j int) bool { return b[i].sum > b[j].sum }
+
+// howSimilar is a naive diff implementation that returns
+// a number between 0-100 indicating how similar a and b are.
+func howSimilar(a, b any) int {
+ t1, t2 := reflect.TypeOf(a), reflect.TypeOf(b)
+ if t1 != t2 {
+ return 0
+ }
+
+ if t1.Comparable() && t2.Comparable() {
+ if a == b {
+ return 100
+ }
+ }
+
+ as, ok1 := types.TypeToString(a)
+ bs, ok2 := types.TypeToString(b)
+
+ if ok1 && ok2 {
+ return howSimilarStrings(as, bs)
+ }
+
+ if ok1 != ok2 {
+ return 0
+ }
+
+ e1, ok1 := a.(compare.Eqer)
+ e2, ok2 := b.(compare.Eqer)
+ if ok1 && ok2 && e1.Eq(e2) {
+ return 100
+ }
+
+ pe1, pok1 := a.(compare.ProbablyEqer)
+ pe2, pok2 := b.(compare.ProbablyEqer)
+ if pok1 && pok2 && pe1.ProbablyEq(pe2) {
+ return 90
+ }
+
+ h1, h2 := helpers.HashString(a), helpers.HashString(b)
+ if h1 == h2 {
+ return 100
+ }
+ return 0
+}
+
+// howSimilar is a naive diff implementation that returns
+// a number between 0-100 indicating how similar a and b are.
+// 100 is when all words in a also exists in b.
+func howSimilarStrings(a, b string) int {
+ if a == b {
+ return 100
+ }
+
+ // Give some weight to the word positions.
+ const partitionSize = 4
+
+ af, bf := strings.Fields(a), strings.Fields(b)
+ if len(bf) > len(af) {
+ af, bf = bf, af
+ }
+
+ m1 := make(map[string]bool)
+ for i, x := range bf {
+ partition := partition(i, partitionSize)
+ key := x + "/" + strconv.Itoa(partition)
+ m1[key] = true
+ }
+
+ common := 0
+ for i, x := range af {
+ partition := partition(i, partitionSize)
+ key := x + "/" + strconv.Itoa(partition)
+ if m1[key] {
+ common++
+ }
+ }
+
+ if common == 0 && common == len(af) {
+ return 100
+ }
+
+ return int(math.Floor((float64(common) / float64(len(af)) * 100)))
+}
+
+func partition(d, scale int) int {
+ return int(math.Floor((float64(d) / float64(scale)))) * scale
+}
diff --git a/metrics/metrics_test.go b/metrics/metrics_test.go
new file mode 100644
index 000000000..6e799a393
--- /dev/null
+++ b/metrics/metrics_test.go
@@ -0,0 +1,69 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metrics
+
+import (
+ "html/template"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/page"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSimilarPercentage(t *testing.T) {
+ c := qt.New(t)
+
+ sentence := "this is some words about nothing, Hugo!"
+ words := strings.Fields(sentence)
+ for i, j := 0, len(words)-1; i < j; i, j = i+1, j-1 {
+ words[i], words[j] = words[j], words[i]
+ }
+ sentenceReversed := strings.Join(words, " ")
+
+ c.Assert(howSimilar("Hugo Rules", "Hugo Rules"), qt.Equals, 100)
+ c.Assert(howSimilar("Hugo Rules", "Hugo Rocks"), qt.Equals, 50)
+ c.Assert(howSimilar("The Hugo Rules", "The Hugo Rocks"), qt.Equals, 66)
+ c.Assert(howSimilar("The Hugo Rules", "The Hugo"), qt.Equals, 66)
+ c.Assert(howSimilar("The Hugo", "The Hugo Rules"), qt.Equals, 66)
+ c.Assert(howSimilar("Totally different", "Not Same"), qt.Equals, 0)
+ c.Assert(howSimilar(sentence, sentenceReversed), qt.Equals, 14)
+ c.Assert(howSimilar(template.HTML("Hugo Rules"), template.HTML("Hugo Rules")), qt.Equals, 100)
+ c.Assert(howSimilar(map[string]any{"a": 32, "b": 33}, map[string]any{"a": 32, "b": 33}), qt.Equals, 100)
+ c.Assert(howSimilar(map[string]any{"a": 32, "b": 33}, map[string]any{"a": 32, "b": 34}), qt.Equals, 0)
+ c.Assert(howSimilar("\n", ""), qt.Equals, 100)
+}
+
+type testStruct struct {
+ Name string
+}
+
+func TestSimilarPercentageNonString(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(howSimilar(page.NopPage, page.NopPage), qt.Equals, 100)
+ c.Assert(howSimilar(page.Pages{}, page.Pages{}), qt.Equals, 90)
+ c.Assert(howSimilar(testStruct{Name: "A"}, testStruct{Name: "B"}), qt.Equals, 0)
+ c.Assert(howSimilar(testStruct{Name: "A"}, testStruct{Name: "A"}), qt.Equals, 100)
+}
+
+func BenchmarkHowSimilar(b *testing.B) {
+ s1 := "Hugo is cool and " + strings.Repeat("fun ", 10) + "!"
+ s2 := "Hugo is cool and " + strings.Repeat("cool ", 10) + "!"
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ howSimilar(s1, s2)
+ }
+}
diff --git a/minifiers/config.go b/minifiers/config.go
new file mode 100644
index 000000000..233f53c27
--- /dev/null
+++ b/minifiers/config.go
@@ -0,0 +1,131 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifiers
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/docshelper"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/spf13/cast"
+
+ "github.com/mitchellh/mapstructure"
+ "github.com/tdewolff/minify/v2/css"
+ "github.com/tdewolff/minify/v2/html"
+ "github.com/tdewolff/minify/v2/js"
+ "github.com/tdewolff/minify/v2/json"
+ "github.com/tdewolff/minify/v2/svg"
+ "github.com/tdewolff/minify/v2/xml"
+)
+
+var defaultTdewolffConfig = tdewolffConfig{
+ HTML: html.Minifier{
+ KeepDocumentTags: true,
+ KeepConditionalComments: true,
+ KeepEndTags: true,
+ KeepDefaultAttrVals: true,
+ KeepWhitespace: false,
+ },
+ CSS: css.Minifier{
+ Precision: 0,
+ KeepCSS2: true,
+ },
+ JS: js.Minifier{},
+ JSON: json.Minifier{},
+ SVG: svg.Minifier{
+ KeepComments: false,
+ Precision: 0,
+ },
+ XML: xml.Minifier{
+ KeepWhitespace: false,
+ },
+}
+
+type tdewolffConfig struct {
+ HTML html.Minifier
+ CSS css.Minifier
+ JS js.Minifier
+ JSON json.Minifier
+ SVG svg.Minifier
+ XML xml.Minifier
+}
+
+type minifyConfig struct {
+ // Whether to minify the published output (the HTML written to /public).
+ MinifyOutput bool
+
+ DisableHTML bool
+ DisableCSS bool
+ DisableJS bool
+ DisableJSON bool
+ DisableSVG bool
+ DisableXML bool
+
+ Tdewolff tdewolffConfig
+}
+
+var defaultConfig = minifyConfig{
+ Tdewolff: defaultTdewolffConfig,
+}
+
+func decodeConfig(cfg config.Provider) (conf minifyConfig, err error) {
+ conf = defaultConfig
+
+ // May be set by CLI.
+ conf.MinifyOutput = cfg.GetBool("minifyOutput")
+
+ v := cfg.Get("minify")
+ if v == nil {
+ return
+ }
+
+ // Legacy.
+ if b, ok := v.(bool); ok {
+ conf.MinifyOutput = b
+ return
+ }
+
+ m := maps.ToStringMap(v)
+
+ // Handle upstream renames.
+ if td, found := m["tdewolff"]; found {
+ tdm := maps.ToStringMap(td)
+ for _, key := range []string{"css", "svg"} {
+ if v, found := tdm[key]; found {
+ vm := maps.ToStringMap(v)
+ if vv, found := vm["decimal"]; found {
+ vvi := cast.ToInt(vv)
+ if vvi > 0 {
+ vm["precision"] = vvi
+ }
+ }
+ }
+ }
+ }
+
+ err = mapstructure.WeakDecode(m, &conf)
+
+ if err != nil {
+ return
+ }
+
+ return
+}
+
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ return docshelper.DocProvider{"config": map[string]any{"minify": parser.LowerCaseCamelJSONMarshaller{Value: defaultConfig}}}
+ }
+ docshelper.AddDocProviderFunc(docsProvider)
+}
diff --git a/minifiers/config_test.go b/minifiers/config_test.go
new file mode 100644
index 000000000..57f2e5659
--- /dev/null
+++ b/minifiers/config_test.go
@@ -0,0 +1,63 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifiers
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+)
+
+func TestConfig(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+
+ v.Set("minify", map[string]any{
+ "disablexml": true,
+ "tdewolff": map[string]any{
+ "html": map[string]any{
+ "keepwhitespace": false,
+ },
+ },
+ })
+
+ conf, err := decodeConfig(v)
+
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(conf.MinifyOutput, qt.Equals, false)
+
+ // explicitly set value
+ c.Assert(conf.Tdewolff.HTML.KeepWhitespace, qt.Equals, false)
+ // default value
+ c.Assert(conf.Tdewolff.HTML.KeepEndTags, qt.Equals, true)
+ c.Assert(conf.Tdewolff.CSS.KeepCSS2, qt.Equals, true)
+
+ // `enable` flags
+ c.Assert(conf.DisableHTML, qt.Equals, false)
+ c.Assert(conf.DisableXML, qt.Equals, true)
+}
+
+func TestConfigLegacy(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+
+ // This was a bool < Hugo v0.58.
+ v.Set("minify", true)
+
+ conf, err := decodeConfig(v)
+ c.Assert(err, qt.IsNil)
+ c.Assert(conf.MinifyOutput, qt.Equals, true)
+}
diff --git a/minifiers/minifiers.go b/minifiers/minifiers.go
new file mode 100644
index 000000000..5a5cec121
--- /dev/null
+++ b/minifiers/minifiers.go
@@ -0,0 +1,135 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package minifiers contains minifiers mapped to MIME types. This package is used
+// in both the resource transformation, i.e. resources.Minify, and in the publishing
+// chain.
+package minifiers
+
+import (
+ "io"
+ "regexp"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/transform"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/tdewolff/minify/v2"
+)
+
+// Client wraps a minifier.
+type Client struct {
+ // Whether output minification is enabled (HTML in /public)
+ MinifyOutput bool
+
+ m *minify.M
+}
+
+// Transformer returns a func that can be used in the transformer publishing chain.
+// TODO(bep) minify config etc
+func (m Client) Transformer(mediatype media.Type) transform.Transformer {
+ _, params, min := m.m.Match(mediatype.Type())
+ if min == nil {
+ // No minifier for this MIME type
+ return nil
+ }
+
+ return func(ft transform.FromTo) error {
+ // Note that the source io.Reader will already be buffered, but it implements
+ // the Bytes() method, which is recognized by the Minify library.
+ return min.Minify(m.m, ft.To(), ft.From(), params)
+ }
+}
+
+// Minify tries to minify the src into dst given a MIME type.
+func (m Client) Minify(mediatype media.Type, dst io.Writer, src io.Reader) error {
+ return m.m.Minify(mediatype.Type(), dst, src)
+}
+
+// noopMinifier implements minify.Minifier [1], but doesn't minify content. This means
+// that we can avoid missing minifiers for any MIME types in our minify.M, which
+// causes minify to return errors, while still allowing minification to be
+// disabled for specific types.
+//
+// [1]: https://pkg.go.dev/github.com/tdewolff/minify#Minifier
+type noopMinifier struct{}
+
+// Minify copies r into w without transformation.
+func (m noopMinifier) Minify(_ *minify.M, w io.Writer, r io.Reader, _ map[string]string) error {
+ _, err := io.Copy(w, r)
+ return err
+}
+
+// New creates a new Client with the provided MIME types as the mapping foundation.
+// The HTML minifier is also registered for additional HTML types (AMP etc.) in the
+// provided list of output formats.
+func New(mediaTypes media.Types, outputFormats output.Formats, cfg config.Provider) (Client, error) {
+ conf, err := decodeConfig(cfg)
+
+ m := minify.New()
+ if err != nil {
+ return Client{}, err
+ }
+
+ // We use the Type definition of the media types defined in the site if found.
+ addMinifier(m, mediaTypes, "css", getMinifier(conf, "css"))
+
+ addMinifier(m, mediaTypes, "js", getMinifier(conf, "js"))
+ m.AddRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), getMinifier(conf, "js"))
+
+ addMinifier(m, mediaTypes, "json", getMinifier(conf, "json"))
+ m.AddRegexp(regexp.MustCompile(`^(application|text)/(x-|(ld|manifest)\+)?json$`), getMinifier(conf, "json"))
+
+ addMinifier(m, mediaTypes, "svg", getMinifier(conf, "svg"))
+
+ addMinifier(m, mediaTypes, "xml", getMinifier(conf, "xml"))
+
+ // HTML
+ addMinifier(m, mediaTypes, "html", getMinifier(conf, "html"))
+ for _, of := range outputFormats {
+ if of.IsHTML {
+ m.Add(of.MediaType.Type(), getMinifier(conf, "html"))
+ }
+ }
+
+ return Client{m: m, MinifyOutput: conf.MinifyOutput}, nil
+}
+
+// getMinifier returns the appropriate minify.MinifierFunc for the MIME
+// type suffix s, given the config c.
+func getMinifier(c minifyConfig, s string) minify.Minifier {
+ switch {
+ case s == "css" && !c.DisableCSS:
+ return &c.Tdewolff.CSS
+ case s == "js" && !c.DisableJS:
+ return &c.Tdewolff.JS
+ case s == "json" && !c.DisableJSON:
+ return &c.Tdewolff.JSON
+ case s == "svg" && !c.DisableSVG:
+ return &c.Tdewolff.SVG
+ case s == "xml" && !c.DisableXML:
+ return &c.Tdewolff.XML
+ case s == "html" && !c.DisableHTML:
+ return &c.Tdewolff.HTML
+ default:
+ return noopMinifier{}
+ }
+}
+
+func addMinifier(m *minify.M, mt media.Types, suffix string, min minify.Minifier) {
+ types := mt.BySuffix(suffix)
+ for _, t := range types {
+ m.Add(t.Type(), min)
+ }
+}
diff --git a/minifiers/minifiers_test.go b/minifiers/minifiers_test.go
new file mode 100644
index 000000000..1096ca2d1
--- /dev/null
+++ b/minifiers/minifiers_test.go
@@ -0,0 +1,220 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifiers
+
+import (
+ "bytes"
+ "encoding/json"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+ "github.com/tdewolff/minify/v2/html"
+)
+
+func TestNew(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ m, _ := New(media.DefaultTypes, output.DefaultFormats, v)
+
+ var rawJS string
+ var minJS string
+ rawJS = " var foo =1 ; foo ++ ; "
+ minJS = "var foo=1;foo++"
+
+ var rawJSON string
+ var minJSON string
+ rawJSON = " { \"a\" : 123 , \"b\":2, \"c\": 5 } "
+ minJSON = "{\"a\":123,\"b\":2,\"c\":5}"
+
+ for _, test := range []struct {
+ tp media.Type
+ rawString string
+ expectedMinString string
+ }{
+ {media.CSSType, " body { color: blue; } ", "body{color:blue}"},
+ {media.RSSType, " <hello> Hugo! </hello> ", "<hello>Hugo!</hello>"}, // RSS should be handled as XML
+ {media.JSONType, rawJSON, minJSON},
+ {media.JavascriptType, rawJS, minJS},
+ // JS Regex minifiers
+ {media.Type{MainType: "application", SubType: "ecmascript"}, rawJS, minJS},
+ {media.Type{MainType: "application", SubType: "javascript"}, rawJS, minJS},
+ {media.Type{MainType: "application", SubType: "x-javascript"}, rawJS, minJS},
+ {media.Type{MainType: "application", SubType: "x-ecmascript"}, rawJS, minJS},
+ {media.Type{MainType: "text", SubType: "ecmascript"}, rawJS, minJS},
+ {media.Type{MainType: "text", SubType: "javascript"}, rawJS, minJS},
+ {media.Type{MainType: "text", SubType: "x-javascript"}, rawJS, minJS},
+ {media.Type{MainType: "text", SubType: "x-ecmascript"}, rawJS, minJS},
+ // JSON Regex minifiers
+ {media.Type{MainType: "application", SubType: "json"}, rawJSON, minJSON},
+ {media.Type{MainType: "application", SubType: "x-json"}, rawJSON, minJSON},
+ {media.Type{MainType: "application", SubType: "ld+json"}, rawJSON, minJSON},
+ {media.Type{MainType: "text", SubType: "json"}, rawJSON, minJSON},
+ {media.Type{MainType: "text", SubType: "x-json"}, rawJSON, minJSON},
+ {media.Type{MainType: "text", SubType: "ld+json"}, rawJSON, minJSON},
+ } {
+ var b bytes.Buffer
+
+ c.Assert(m.Minify(test.tp, &b, strings.NewReader(test.rawString)), qt.IsNil)
+ c.Assert(b.String(), qt.Equals, test.expectedMinString)
+ }
+}
+
+func TestConfigureMinify(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("minify", map[string]any{
+ "disablexml": true,
+ "tdewolff": map[string]any{
+ "html": map[string]any{
+ "keepwhitespace": true,
+ },
+ },
+ })
+ m, _ := New(media.DefaultTypes, output.DefaultFormats, v)
+
+ for _, test := range []struct {
+ tp media.Type
+ rawString string
+ expectedMinString string
+ errorExpected bool
+ }{
+ {media.HTMLType, "<hello> Hugo! </hello>", "<hello> Hugo! </hello>", false}, // configured minifier
+ {media.CSSType, " body { color: blue; } ", "body{color:blue}", false}, // default minifier
+ {media.XMLType, " <hello> Hugo! </hello> ", " <hello> Hugo! </hello> ", false}, // disable Xml minification
+ } {
+ var b bytes.Buffer
+ if !test.errorExpected {
+ c.Assert(m.Minify(test.tp, &b, strings.NewReader(test.rawString)), qt.IsNil)
+ c.Assert(b.String(), qt.Equals, test.expectedMinString)
+ } else {
+ err := m.Minify(test.tp, &b, strings.NewReader(test.rawString))
+ c.Assert(err, qt.ErrorMatches, "minifier does not exist for mimetype")
+ }
+ }
+}
+
+func TestJSONRoundTrip(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ m, _ := New(media.DefaultTypes, output.DefaultFormats, v)
+
+ for _, test := range []string{`{
+ "glossary": {
+ "title": "example glossary",
+ "GlossDiv": {
+ "title": "S",
+ "GlossList": {
+ "GlossEntry": {
+ "ID": "SGML",
+ "SortAs": "SGML",
+ "GlossTerm": "Standard Generalized Markup Language",
+ "Acronym": "SGML",
+ "Abbrev": "ISO 8879:1986",
+ "GlossDef": {
+ "para": "A meta-markup language, used to create markup languages such as DocBook.",
+ "GlossSeeAlso": ["GML", "XML"]
+ },
+ "GlossSee": "markup"
+ }
+ }
+ }
+ }
+}`} {
+
+ var b bytes.Buffer
+ m1 := make(map[string]any)
+ m2 := make(map[string]any)
+ c.Assert(json.Unmarshal([]byte(test), &m1), qt.IsNil)
+ c.Assert(m.Minify(media.JSONType, &b, strings.NewReader(test)), qt.IsNil)
+ c.Assert(json.Unmarshal(b.Bytes(), &m2), qt.IsNil)
+ c.Assert(m1, qt.DeepEquals, m2)
+ }
+}
+
+func TestBugs(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ m, _ := New(media.DefaultTypes, output.DefaultFormats, v)
+
+ for _, test := range []struct {
+ tp media.Type
+ rawString string
+ expectedMinString string
+ }{
+ // https://github.com/gohugoio/hugo/issues/5506
+ {media.CSSType, " body { color: rgba(000, 000, 000, 0.7); }", "body{color:rgba(0,0,0,.7)}"},
+ // https://github.com/gohugoio/hugo/issues/8332
+ {media.HTMLType, "<i class='fas fa-tags fa-fw'></i> Tags", `<i class='fas fa-tags fa-fw'></i> Tags`},
+ } {
+ var b bytes.Buffer
+
+ c.Assert(m.Minify(test.tp, &b, strings.NewReader(test.rawString)), qt.IsNil)
+ c.Assert(b.String(), qt.Equals, test.expectedMinString)
+ }
+}
+
+// Renamed to Precision in v2.7.0. Check that we support both.
+func TestDecodeConfigDecimalIsNowPrecision(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("minify", map[string]any{
+ "disablexml": true,
+ "tdewolff": map[string]any{
+ "css": map[string]any{
+ "decimal": 3,
+ },
+ "svg": map[string]any{
+ "decimal": 3,
+ },
+ },
+ })
+
+ conf, err := decodeConfig(v)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(conf.Tdewolff.CSS.Precision, qt.Equals, 3)
+
+}
+
+// Issue 9456
+func TestDecodeConfigKeepWhitespace(t *testing.T) {
+ c := qt.New(t)
+ v := config.NewWithTestDefaults()
+ v.Set("minify", map[string]any{
+ "tdewolff": map[string]any{
+ "html": map[string]any{
+ "keepEndTags": false,
+ },
+ },
+ })
+
+ conf, err := decodeConfig(v)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(conf.Tdewolff.HTML, qt.DeepEquals,
+ html.Minifier{
+ KeepComments: false,
+ KeepConditionalComments: true,
+ KeepDefaultAttrVals: true,
+ KeepDocumentTags: true,
+ KeepEndTags: false,
+ KeepQuotes: false,
+ KeepWhitespace: false},
+ )
+
+}
diff --git a/modules/client.go b/modules/client.go
new file mode 100644
index 000000000..78ba9f5ae
--- /dev/null
+++ b/modules/client.go
@@ -0,0 +1,836 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/hexec"
+
+ hglob "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gobwas/glob"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/rogpeppe/go-internal/module"
+
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "errors"
+
+ "github.com/spf13/afero"
+)
+
+var fileSeparator = string(os.PathSeparator)
+
+const (
+ goBinaryStatusOK goBinaryStatus = iota
+ goBinaryStatusNotFound
+ goBinaryStatusTooOld
+)
+
+// The "vendor" dir is reserved for Go Modules.
+const vendord = "_vendor"
+
+const (
+ goModFilename = "go.mod"
+ goSumFilename = "go.sum"
+)
+
+// NewClient creates a new Client that can be used to manage the Hugo Components
+// in a given workingDir.
+// The Client will resolve the dependencies recursively, but needs the top
+// level imports to start out.
+func NewClient(cfg ClientConfig) *Client {
+ fs := cfg.Fs
+ n := filepath.Join(cfg.WorkingDir, goModFilename)
+ goModEnabled, _ := afero.Exists(fs, n)
+ var goModFilename string
+ if goModEnabled {
+ goModFilename = n
+ }
+
+ var env []string
+ mcfg := cfg.ModuleConfig
+
+ config.SetEnvVars(&env,
+ "PWD", cfg.WorkingDir,
+ "GO111MODULE", "on",
+ "GOPROXY", mcfg.Proxy,
+ "GOPRIVATE", mcfg.Private,
+ "GONOPROXY", mcfg.NoProxy,
+ "GOPATH", cfg.CacheDir,
+ "GOWORK", mcfg.Workspace, // Requires Go 1.18, see https://tip.golang.org/doc/go1.18
+ // GOCACHE was introduced in Go 1.15. This matches the location derived from GOPATH above.
+ "GOCACHE", filepath.Join(cfg.CacheDir, "pkg", "mod"),
+ )
+
+ logger := cfg.Logger
+ if logger == nil {
+ logger = loggers.NewWarningLogger()
+ }
+
+ var noVendor glob.Glob
+ if cfg.ModuleConfig.NoVendor != "" {
+ noVendor, _ = hglob.GetGlob(hglob.NormalizePath(cfg.ModuleConfig.NoVendor))
+ }
+
+ return &Client{
+ fs: fs,
+ ccfg: cfg,
+ logger: logger,
+ noVendor: noVendor,
+ moduleConfig: mcfg,
+ environ: env,
+ GoModulesFilename: goModFilename,
+ }
+}
+
+// Client contains most of the API provided by this package.
+type Client struct {
+ fs afero.Fs
+ logger loggers.Logger
+
+ noVendor glob.Glob
+
+ ccfg ClientConfig
+
+ // The top level module config
+ moduleConfig Config
+
+ // Environment variables used in "go get" etc.
+ environ []string
+
+ // Set when Go modules are initialized in the current repo, that is:
+ // a go.mod file exists.
+ GoModulesFilename string
+
+ // Set if we get a exec.ErrNotFound when running Go, which is most likely
+ // due to being run on a system without Go installed. We record it here
+ // so we can give an instructional error at the end if module/theme
+ // resolution fails.
+ goBinaryStatus goBinaryStatus
+}
+
+// Graph writes a module dependenchy graph to the given writer.
+func (c *Client) Graph(w io.Writer) error {
+ mc, coll := c.collect(true)
+ if coll.err != nil {
+ return coll.err
+ }
+ for _, module := range mc.AllModules {
+ if module.Owner() == nil {
+ continue
+ }
+
+ prefix := ""
+ if module.Disabled() {
+ prefix = "DISABLED "
+ }
+ dep := pathVersion(module.Owner()) + " " + pathVersion(module)
+ if replace := module.Replace(); replace != nil {
+ if replace.Version() != "" {
+ dep += " => " + pathVersion(replace)
+ } else {
+ // Local dir.
+ dep += " => " + replace.Dir()
+ }
+ }
+ fmt.Fprintln(w, prefix+dep)
+ }
+
+ return nil
+}
+
+// Tidy can be used to remove unused dependencies from go.mod and go.sum.
+func (c *Client) Tidy() error {
+ tc, coll := c.collect(false)
+ if coll.err != nil {
+ return coll.err
+ }
+
+ if coll.skipTidy {
+ return nil
+ }
+
+ return c.tidy(tc.AllModules, false)
+}
+
+// Vendor writes all the module dependencies to a _vendor folder.
+//
+// Unlike Go, we support it for any level.
+//
+// We, by default, use the /_vendor folder first, if found. To disable,
+// run with
+// hugo --ignoreVendorPaths=".*"
+//
+// Given a module tree, Hugo will pick the first module for a given path,
+// meaning that if the top-level module is vendored, that will be the full
+// set of dependencies.
+func (c *Client) Vendor() error {
+ vendorDir := filepath.Join(c.ccfg.WorkingDir, vendord)
+ if err := c.rmVendorDir(vendorDir); err != nil {
+ return err
+ }
+ if err := c.fs.MkdirAll(vendorDir, 0755); err != nil {
+ return err
+ }
+
+ // Write the modules list to modules.txt.
+ //
+ // On the form:
+ //
+ // # github.com/alecthomas/chroma v0.6.3
+ //
+ // This is how "go mod vendor" does it. Go also lists
+ // the packages below it, but that is currently not applicable to us.
+ //
+ var modulesContent bytes.Buffer
+
+ tc, coll := c.collect(true)
+ if coll.err != nil {
+ return coll.err
+ }
+
+ for _, t := range tc.AllModules {
+ if t.Owner() == nil {
+ // This is the project.
+ continue
+ }
+
+ if !c.shouldVendor(t.Path()) {
+ continue
+ }
+
+ if !t.IsGoMod() && !t.Vendor() {
+ // We currently do not vendor components living in the
+ // theme directory, see https://github.com/gohugoio/hugo/issues/5993
+ continue
+ }
+
+ // See https://github.com/gohugoio/hugo/issues/8239
+ // This is an error situation. We need something to vendor.
+ if t.Mounts() == nil {
+ return fmt.Errorf("cannot vendor module %q, need at least one mount", t.Path())
+ }
+
+ fmt.Fprintln(&modulesContent, "# "+t.Path()+" "+t.Version())
+
+ dir := t.Dir()
+
+ for _, mount := range t.Mounts() {
+ sourceFilename := filepath.Join(dir, mount.Source)
+ targetFilename := filepath.Join(vendorDir, t.Path(), mount.Source)
+ fi, err := c.fs.Stat(sourceFilename)
+ if err != nil {
+ return fmt.Errorf("failed to vendor module: %w", err)
+ }
+
+ if fi.IsDir() {
+ if err := hugio.CopyDir(c.fs, sourceFilename, targetFilename, nil); err != nil {
+ return fmt.Errorf("failed to copy module to vendor dir: %w", err)
+ }
+ } else {
+ targetDir := filepath.Dir(targetFilename)
+
+ if err := c.fs.MkdirAll(targetDir, 0755); err != nil {
+ return fmt.Errorf("failed to make target dir: %w", err)
+ }
+
+ if err := hugio.CopyFile(c.fs, sourceFilename, targetFilename); err != nil {
+ return fmt.Errorf("failed to copy module file to vendor: %w", err)
+ }
+ }
+ }
+
+ // Include the resource cache if present.
+ resourcesDir := filepath.Join(dir, files.FolderResources)
+ _, err := c.fs.Stat(resourcesDir)
+ if err == nil {
+ if err := hugio.CopyDir(c.fs, resourcesDir, filepath.Join(vendorDir, t.Path(), files.FolderResources), nil); err != nil {
+ return fmt.Errorf("failed to copy resources to vendor dir: %w", err)
+ }
+ }
+
+ // Include the config directory if present.
+ configDir := filepath.Join(dir, "config")
+ _, err = c.fs.Stat(configDir)
+ if err == nil {
+ if err := hugio.CopyDir(c.fs, configDir, filepath.Join(vendorDir, t.Path(), "config"), nil); err != nil {
+ return fmt.Errorf("failed to copy config dir to vendor dir: %w", err)
+ }
+ }
+
+ // Also include any theme.toml or config.* files in the root.
+ configFiles, _ := afero.Glob(c.fs, filepath.Join(dir, "config.*"))
+ configFiles = append(configFiles, filepath.Join(dir, "theme.toml"))
+ for _, configFile := range configFiles {
+ if err := hugio.CopyFile(c.fs, configFile, filepath.Join(vendorDir, t.Path(), filepath.Base(configFile))); err != nil {
+ if !os.IsNotExist(err) {
+ return err
+ }
+ }
+ }
+ }
+
+ if modulesContent.Len() > 0 {
+ if err := afero.WriteFile(c.fs, filepath.Join(vendorDir, vendorModulesFilename), modulesContent.Bytes(), 0666); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// Get runs "go get" with the supplied arguments.
+func (c *Client) Get(args ...string) error {
+ if len(args) == 0 || (len(args) == 1 && strings.Contains(args[0], "-u")) {
+ update := len(args) != 0
+ patch := update && (args[0] == "-u=patch") //
+
+ // We need to be explicit about the modules to get.
+ for _, m := range c.moduleConfig.Imports {
+ if !isProbablyModule(m.Path) {
+ // Skip themes/components stored below /themes etc.
+ // There may be false positives in the above, but those
+ // should be rare, and they will fail below with an
+ // "cannot find module providing ..." message.
+ continue
+ }
+ var args []string
+
+ if update && !patch {
+ args = append(args, "-u")
+ } else if update && patch {
+ args = append(args, "-u=patch")
+ }
+ args = append(args, m.Path)
+
+ if err := c.get(args...); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ }
+
+ return c.get(args...)
+}
+
+func (c *Client) get(args ...string) error {
+ var hasD bool
+ for _, arg := range args {
+ if arg == "-d" {
+ hasD = true
+ break
+ }
+ }
+ if !hasD {
+ // go get without the -d flag does not make sense to us, as
+ // it will try to build and install go packages.
+ args = append([]string{"-d"}, args...)
+ }
+ if err := c.runGo(context.Background(), c.logger.Out(), append([]string{"get"}, args...)...); err != nil {
+ return fmt.Errorf("failed to get %q: %w", args, err)
+ }
+ return nil
+}
+
+// Init initializes this as a Go Module with the given path.
+// If path is empty, Go will try to guess.
+// If this succeeds, this project will be marked as Go Module.
+func (c *Client) Init(path string) error {
+ err := c.runGo(context.Background(), c.logger.Out(), "mod", "init", path)
+ if err != nil {
+ return fmt.Errorf("failed to init modules: %w", err)
+ }
+
+ c.GoModulesFilename = filepath.Join(c.ccfg.WorkingDir, goModFilename)
+
+ return nil
+}
+
+var verifyErrorDirRe = regexp.MustCompile(`dir has been modified \((.*?)\)`)
+
+// Verify checks that the dependencies of the current module,
+// which are stored in a local downloaded source cache, have not been
+// modified since being downloaded.
+func (c *Client) Verify(clean bool) error {
+ // TODO(bep) add path to mod clean
+ err := c.runVerify()
+ if err != nil {
+ if clean {
+ m := verifyErrorDirRe.FindAllStringSubmatch(err.Error(), -1)
+ if m != nil {
+ for i := 0; i < len(m); i++ {
+ c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1])
+ if err != nil {
+ return err
+ }
+ fmt.Println("Cleaned", c)
+ }
+ }
+ // Try to verify it again.
+ err = c.runVerify()
+ }
+ }
+ return err
+}
+
+func (c *Client) Clean(pattern string) error {
+ mods, err := c.listGoMods()
+ if err != nil {
+ return err
+ }
+
+ var g glob.Glob
+
+ if pattern != "" {
+ var err error
+ g, err = hglob.GetGlob(pattern)
+ if err != nil {
+ return err
+ }
+ }
+
+ for _, m := range mods {
+ if m.Replace != nil || m.Main {
+ continue
+ }
+
+ if g != nil && !g.Match(m.Path) {
+ continue
+ }
+ _, err = hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m.Dir)
+ if err == nil {
+ c.logger.Printf("hugo: cleaned module cache for %q", m.Path)
+ }
+ }
+ return err
+}
+
+func (c *Client) runVerify() error {
+ return c.runGo(context.Background(), ioutil.Discard, "mod", "verify")
+}
+
+func isProbablyModule(path string) bool {
+ return module.CheckPath(path) == nil
+}
+
+func (c *Client) listGoMods() (goModules, error) {
+ if c.GoModulesFilename == "" || !c.moduleConfig.hasModuleImport() {
+ return nil, nil
+ }
+
+ downloadModules := func(modules ...string) error {
+ args := []string{"mod", "download"}
+ args = append(args, modules...)
+ out := ioutil.Discard
+ err := c.runGo(context.Background(), out, args...)
+ if err != nil {
+ return fmt.Errorf("failed to download modules: %w", err)
+ }
+ return nil
+ }
+
+ if err := downloadModules(); err != nil {
+ return nil, err
+ }
+
+ listAndDecodeModules := func(handle func(m *goModule) error, modules ...string) error {
+ b := &bytes.Buffer{}
+ args := []string{"list", "-m", "-json"}
+ if len(modules) > 0 {
+ args = append(args, modules...)
+ } else {
+ args = append(args, "all")
+ }
+ err := c.runGo(context.Background(), b, args...)
+ if err != nil {
+ return fmt.Errorf("failed to list modules: %w", err)
+ }
+
+ dec := json.NewDecoder(b)
+ for {
+ m := &goModule{}
+ if err := dec.Decode(m); err != nil {
+ if err == io.EOF {
+ break
+ }
+ return fmt.Errorf("failed to decode modules list: %w", err)
+ }
+
+ if err := handle(m); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ var modules goModules
+ err := listAndDecodeModules(func(m *goModule) error {
+ modules = append(modules, m)
+ return nil
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ // From Go 1.17, go lazy loads transitive dependencies.
+ // That does not work for us.
+ // So, download these modules and update the Dir in the modules list.
+ var modulesToDownload []string
+ for _, m := range modules {
+ if m.Dir == "" {
+ modulesToDownload = append(modulesToDownload, fmt.Sprintf("%s@%s", m.Path, m.Version))
+ }
+ }
+
+ if len(modulesToDownload) > 0 {
+ if err := downloadModules(modulesToDownload...); err != nil {
+ return nil, err
+ }
+ err := listAndDecodeModules(func(m *goModule) error {
+ if mm := modules.GetByPath(m.Path); mm != nil {
+ mm.Dir = m.Dir
+ }
+ return nil
+ }, modulesToDownload...)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return modules, err
+}
+
+func (c *Client) rewriteGoMod(name string, isGoMod map[string]bool) error {
+ data, err := c.rewriteGoModRewrite(name, isGoMod)
+ if err != nil {
+ return err
+ }
+ if data != nil {
+ if err := afero.WriteFile(c.fs, filepath.Join(c.ccfg.WorkingDir, name), data, 0666); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (c *Client) rewriteGoModRewrite(name string, isGoMod map[string]bool) ([]byte, error) {
+ if name == goModFilename && c.GoModulesFilename == "" {
+ // Already checked.
+ return nil, nil
+ }
+
+ modlineSplitter := getModlineSplitter(name == goModFilename)
+
+ b := &bytes.Buffer{}
+ f, err := c.fs.Open(filepath.Join(c.ccfg.WorkingDir, name))
+ if err != nil {
+ if os.IsNotExist(err) {
+ // It's been deleted.
+ return nil, nil
+ }
+ return nil, err
+ }
+ defer f.Close()
+
+ scanner := bufio.NewScanner(f)
+ var dirty bool
+
+ for scanner.Scan() {
+ line := scanner.Text()
+ var doWrite bool
+
+ if parts := modlineSplitter(line); parts != nil {
+ modname, modver := parts[0], parts[1]
+ modver = strings.TrimSuffix(modver, "/"+goModFilename)
+ modnameVer := modname + " " + modver
+ doWrite = isGoMod[modnameVer]
+ } else {
+ doWrite = true
+ }
+
+ if doWrite {
+ fmt.Fprintln(b, line)
+ } else {
+ dirty = true
+ }
+ }
+
+ if !dirty {
+ // Nothing changed
+ return nil, nil
+ }
+
+ return b.Bytes(), nil
+}
+
+func (c *Client) rmVendorDir(vendorDir string) error {
+ modulestxt := filepath.Join(vendorDir, vendorModulesFilename)
+
+ if _, err := c.fs.Stat(vendorDir); err != nil {
+ return nil
+ }
+
+ _, err := c.fs.Stat(modulestxt)
+ if err != nil {
+ // If we have a _vendor dir without modules.txt it sounds like
+ // a _vendor dir created by others.
+ return errors.New("found _vendor dir without modules.txt, skip delete")
+ }
+
+ return c.fs.RemoveAll(vendorDir)
+}
+
+func (c *Client) runGo(
+ ctx context.Context,
+ stdout io.Writer,
+ args ...string) error {
+ if c.goBinaryStatus != 0 {
+ return nil
+ }
+
+ stderr := new(bytes.Buffer)
+
+ argsv := collections.StringSliceToInterfaceSlice(args)
+ argsv = append(argsv, hexec.WithEnviron(c.environ))
+ argsv = append(argsv, hexec.WithStderr(io.MultiWriter(stderr, os.Stderr)))
+ argsv = append(argsv, hexec.WithStdout(stdout))
+ argsv = append(argsv, hexec.WithDir(c.ccfg.WorkingDir))
+ argsv = append(argsv, hexec.WithContext(ctx))
+
+ cmd, err := c.ccfg.Exec.New("go", argsv...)
+ if err != nil {
+ return err
+ }
+
+ if err := cmd.Run(); err != nil {
+ if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
+ c.goBinaryStatus = goBinaryStatusNotFound
+ return nil
+ }
+
+ if strings.Contains(stderr.String(), "invalid version: unknown revision") {
+ // See https://github.com/gohugoio/hugo/issues/6825
+ c.logger.Println(`An unknown revision most likely means that someone has deleted the remote ref (e.g. with a force push to GitHub).
+To resolve this, you need to manually edit your go.mod file and replace the version for the module in question with a valid ref.
+
+The easiest is to just enter a valid branch name there, e.g. master, which would be what you put in place of 'v0.5.1' in the example below.
+
+require github.com/gohugoio/hugo-mod-jslibs/instantpage v0.5.1
+
+If you then run 'hugo mod graph' it should resolve itself to the most recent version (or commit if no semver versions are available).`)
+ }
+
+ _, ok := err.(*exec.ExitError)
+ if !ok {
+ return fmt.Errorf("failed to execute 'go %v': %s %T", args, err, err)
+ }
+
+ // Too old Go version
+ if strings.Contains(stderr.String(), "flag provided but not defined") {
+ c.goBinaryStatus = goBinaryStatusTooOld
+ return nil
+ }
+
+ return fmt.Errorf("go command failed: %s", stderr)
+
+ }
+
+ return nil
+}
+
+func (c *Client) tidy(mods Modules, goModOnly bool) error {
+ isGoMod := make(map[string]bool)
+ for _, m := range mods {
+ if m.Owner() == nil {
+ continue
+ }
+ if m.IsGoMod() {
+ // Matching the format in go.mod
+ pathVer := m.Path() + " " + m.Version()
+ isGoMod[pathVer] = true
+ }
+ }
+
+ if err := c.rewriteGoMod(goModFilename, isGoMod); err != nil {
+ return err
+ }
+
+ if goModOnly {
+ return nil
+ }
+
+ if err := c.rewriteGoMod(goSumFilename, isGoMod); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (c *Client) shouldVendor(path string) bool {
+ return c.noVendor == nil || !c.noVendor.Match(path)
+}
+
+func (c *Client) createThemeDirname(modulePath string, isProjectMod bool) (string, error) {
+ invalid := fmt.Errorf("invalid module path %q; must be relative to themesDir when defined outside of the project", modulePath)
+
+ modulePath = filepath.Clean(modulePath)
+ if filepath.IsAbs(modulePath) {
+ if isProjectMod {
+ return modulePath, nil
+ }
+ return "", invalid
+ }
+
+ moduleDir := filepath.Join(c.ccfg.ThemesDir, modulePath)
+ if !isProjectMod && !strings.HasPrefix(moduleDir, c.ccfg.ThemesDir) {
+ return "", invalid
+ }
+ return moduleDir, nil
+}
+
+// ClientConfig configures the module Client.
+type ClientConfig struct {
+ Fs afero.Fs
+ Logger loggers.Logger
+
+ // If set, it will be run before we do any duplicate checks for modules
+ // etc.
+ HookBeforeFinalize func(m *ModulesConfig) error
+
+ // Ignore any _vendor directory for module paths matching the given pattern.
+ // This can be nil.
+ IgnoreVendor glob.Glob
+
+ // Absolute path to the project dir.
+ WorkingDir string
+
+ // Absolute path to the project's themes dir.
+ ThemesDir string
+
+ // Eg. "production"
+ Environment string
+
+ Exec *hexec.Exec
+
+ CacheDir string // Module cache
+ ModuleConfig Config
+}
+
+func (c ClientConfig) shouldIgnoreVendor(path string) bool {
+ return c.IgnoreVendor != nil && c.IgnoreVendor.Match(path)
+}
+
+type goBinaryStatus int
+
+type goModule struct {
+ Path string // module path
+ Version string // module version
+ Versions []string // available module versions (with -versions)
+ Replace *goModule // replaced by this module
+ Time *time.Time // time version was created
+ Update *goModule // available update, if any (with -u)
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file for this module, if any
+ Error *goModuleError // error loading module
+}
+
+type goModuleError struct {
+ Err string // the error itself
+}
+
+type goModules []*goModule
+
+func (modules goModules) GetByPath(p string) *goModule {
+ if modules == nil {
+ return nil
+ }
+
+ for _, m := range modules {
+ if strings.EqualFold(p, m.Path) {
+ return m
+ }
+ }
+
+ return nil
+}
+
+func (modules goModules) GetMain() *goModule {
+ for _, m := range modules {
+ if m.Main {
+ return m
+ }
+ }
+
+ return nil
+}
+
+func getModlineSplitter(isGoMod bool) func(line string) []string {
+ if isGoMod {
+ return func(line string) []string {
+ if strings.HasPrefix(line, "require (") {
+ return nil
+ }
+ if !strings.HasPrefix(line, "require") && !strings.HasPrefix(line, "\t") {
+ return nil
+ }
+ line = strings.TrimPrefix(line, "require")
+ line = strings.TrimSpace(line)
+ line = strings.TrimSuffix(line, "// indirect")
+
+ return strings.Fields(line)
+ }
+ }
+
+ return func(line string) []string {
+ return strings.Fields(line)
+ }
+}
+
+func pathVersion(m Module) string {
+ versionStr := m.Version()
+ if m.Vendor() {
+ versionStr += "+vendor"
+ }
+ if versionStr == "" {
+ return m.Path()
+ }
+ return fmt.Sprintf("%s@%s", m.Path(), versionStr)
+}
diff --git a/modules/client_test.go b/modules/client_test.go
new file mode 100644
index 000000000..75e3c2b08
--- /dev/null
+++ b/modules/client_test.go
@@ -0,0 +1,213 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync/atomic"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestClient(t *testing.T) {
+ modName := "hugo-modules-basic-test"
+ modPath := "github.com/gohugoio/tests/" + modName
+ defaultImport := "modh2_2"
+ expect := `github.com/gohugoio/tests/hugo-modules-basic-test github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0
+github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0 github.com/gohugoio/hugoTestModules1_darwin/modh2_2_1v@v1.3.0
+github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0 github.com/gohugoio/hugoTestModules1_darwin/modh2_2_2@v1.3.0
+`
+
+ c := qt.New(t)
+ var clientID uint64 // we increment this to get each test in its own directory.
+
+ newClient := func(c *qt.C, withConfig func(cfg *ClientConfig), imp string) (*Client, func()) {
+ atomic.AddUint64(&clientID, uint64(1))
+ workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, fmt.Sprintf("%s-%d", modName, clientID))
+ c.Assert(err, qt.IsNil)
+ themesDir := filepath.Join(workingDir, "themes")
+ err = os.Mkdir(themesDir, 0777)
+ c.Assert(err, qt.IsNil)
+
+ ccfg := ClientConfig{
+ Fs: hugofs.Os,
+ WorkingDir: workingDir,
+ CacheDir: filepath.Join(workingDir, "modcache"),
+ ThemesDir: themesDir,
+ Exec: hexec.New(security.DefaultConfig),
+ }
+
+ withConfig(&ccfg)
+ ccfg.ModuleConfig.Imports = []Import{{Path: "github.com/gohugoio/hugoTestModules1_darwin/" + imp}}
+ client := NewClient(ccfg)
+
+ return client, clean
+ }
+
+ c.Run("All", func(c *qt.C) {
+ client, clean := newClient(c, func(cfg *ClientConfig) {
+ cfg.ModuleConfig = DefaultModuleConfig
+ }, defaultImport)
+ defer clean()
+
+ // Test Init
+ c.Assert(client.Init(modPath), qt.IsNil)
+
+ // Test Collect
+ mc, err := client.Collect()
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(mc.AllModules), qt.Equals, 4)
+ for _, m := range mc.AllModules {
+ c.Assert(m, qt.Not(qt.IsNil))
+ }
+
+ // Test Graph
+ var graphb bytes.Buffer
+ c.Assert(client.Graph(&graphb), qt.IsNil)
+
+ c.Assert(graphb.String(), qt.Equals, expect)
+
+ // Test Vendor
+ c.Assert(client.Vendor(), qt.IsNil)
+ graphb.Reset()
+ c.Assert(client.Graph(&graphb), qt.IsNil)
+
+ expectVendored := `project github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0+vendor
+project github.com/gohugoio/hugoTestModules1_darwin/modh2_2_1v@v1.3.0+vendor
+project github.com/gohugoio/hugoTestModules1_darwin/modh2_2_2@v1.3.0+vendor
+`
+
+ c.Assert(graphb.String(), qt.Equals, expectVendored)
+
+ // Test Tidy
+ c.Assert(client.Tidy(), qt.IsNil)
+ })
+
+ c.Run("IgnoreVendor", func(c *qt.C) {
+ client, clean := newClient(
+ c, func(cfg *ClientConfig) {
+ cfg.ModuleConfig = DefaultModuleConfig
+ cfg.IgnoreVendor = globAll
+ }, defaultImport)
+ defer clean()
+
+ c.Assert(client.Init(modPath), qt.IsNil)
+ _, err := client.Collect()
+ c.Assert(err, qt.IsNil)
+ c.Assert(client.Vendor(), qt.IsNil)
+
+ var graphb bytes.Buffer
+ c.Assert(client.Graph(&graphb), qt.IsNil)
+ c.Assert(graphb.String(), qt.Equals, expect)
+ })
+
+ c.Run("NoVendor", func(c *qt.C) {
+ mcfg := DefaultModuleConfig
+ mcfg.NoVendor = "**"
+ client, clean := newClient(
+ c, func(cfg *ClientConfig) {
+ cfg.ModuleConfig = mcfg
+ }, defaultImport)
+ defer clean()
+
+ c.Assert(client.Init(modPath), qt.IsNil)
+ _, err := client.Collect()
+ c.Assert(err, qt.IsNil)
+ c.Assert(client.Vendor(), qt.IsNil)
+
+ var graphb bytes.Buffer
+ c.Assert(client.Graph(&graphb), qt.IsNil)
+ c.Assert(graphb.String(), qt.Equals, expect)
+ })
+
+ c.Run("VendorClosest", func(c *qt.C) {
+ mcfg := DefaultModuleConfig
+ mcfg.VendorClosest = true
+
+ client, clean := newClient(
+ c, func(cfg *ClientConfig) {
+ cfg.ModuleConfig = mcfg
+ s := "github.com/gohugoio/hugoTestModules1_darwin/modh1_1v"
+ g, _ := glob.GetGlob(s)
+ cfg.IgnoreVendor = g
+ }, "modh1v")
+ defer clean()
+
+ c.Assert(client.Init(modPath), qt.IsNil)
+ _, err := client.Collect()
+ c.Assert(err, qt.IsNil)
+ c.Assert(client.Vendor(), qt.IsNil)
+
+ var graphb bytes.Buffer
+ c.Assert(client.Graph(&graphb), qt.IsNil)
+
+ c.Assert(graphb.String(), qt.Contains, "github.com/gohugoio/hugoTestModules1_darwin/modh1_1v@v1.3.0 github.com/gohugoio/hugoTestModules1_darwin/modh1_1_1v@v1.1.0+vendor")
+ })
+
+ // https://github.com/gohugoio/hugo/issues/7908
+ c.Run("createThemeDirname", func(c *qt.C) {
+ mcfg := DefaultModuleConfig
+ client, clean := newClient(
+ c, func(cfg *ClientConfig) {
+ cfg.ModuleConfig = mcfg
+ }, defaultImport)
+ defer clean()
+
+ dirname, err := client.createThemeDirname("foo", false)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirname, qt.Equals, filepath.Join(client.ccfg.ThemesDir, "foo"))
+
+ dirname, err = client.createThemeDirname("../../foo", true)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirname, qt.Equals, filepath.Join(client.ccfg.ThemesDir, "../../foo"))
+
+ dirname, err = client.createThemeDirname("../../foo", false)
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ absDir := filepath.Join(client.ccfg.WorkingDir, "..", "..")
+ dirname, err = client.createThemeDirname(absDir, true)
+ c.Assert(err, qt.IsNil)
+ c.Assert(dirname, qt.Equals, absDir)
+ dirname, err = client.createThemeDirname(absDir, false)
+ fmt.Println(dirname)
+ c.Assert(err, qt.Not(qt.IsNil))
+ })
+}
+
+var globAll, _ = glob.GetGlob("**")
+
+func TestGetModlineSplitter(t *testing.T) {
+ c := qt.New(t)
+
+ gomodSplitter := getModlineSplitter(true)
+
+ c.Assert(gomodSplitter("\tgithub.com/BurntSushi/toml v0.3.1"), qt.DeepEquals, []string{"github.com/BurntSushi/toml", "v0.3.1"})
+ c.Assert(gomodSplitter("\tgithub.com/cpuguy83/go-md2man v1.0.8 // indirect"), qt.DeepEquals, []string{"github.com/cpuguy83/go-md2man", "v1.0.8"})
+ c.Assert(gomodSplitter("require ("), qt.IsNil)
+
+ gosumSplitter := getModlineSplitter(false)
+ c.Assert(gosumSplitter("github.com/BurntSushi/toml v0.3.1"), qt.DeepEquals, []string{"github.com/BurntSushi/toml", "v0.3.1"})
+}
diff --git a/modules/collect.go b/modules/collect.go
new file mode 100644
index 000000000..ff83f9ecc
--- /dev/null
+++ b/modules/collect.go
@@ -0,0 +1,722 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "bufio"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+
+ "github.com/bep/debounce"
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/rogpeppe/go-internal/module"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/afero"
+)
+
+var ErrNotExist = errors.New("module does not exist")
+
+const vendorModulesFilename = "modules.txt"
+
+// IsNotExist returns whether an error means that a module could not be found.
+func IsNotExist(err error) bool {
+ return errors.Is(err, os.ErrNotExist)
+}
+
+// CreateProjectModule creates modules from the given config.
+// This is used in tests only.
+func CreateProjectModule(cfg config.Provider) (Module, error) {
+ workingDir := cfg.GetString("workingDir")
+ var modConfig Config
+
+ mod := createProjectModule(nil, workingDir, modConfig)
+ if err := ApplyProjectConfigDefaults(cfg, mod); err != nil {
+ return nil, err
+ }
+
+ return mod, nil
+}
+
+func (h *Client) Collect() (ModulesConfig, error) {
+ mc, coll := h.collect(true)
+ if coll.err != nil {
+ return mc, coll.err
+ }
+
+ if err := (&mc).setActiveMods(h.logger); err != nil {
+ return mc, err
+ }
+
+ if h.ccfg.HookBeforeFinalize != nil {
+ if err := h.ccfg.HookBeforeFinalize(&mc); err != nil {
+ return mc, err
+ }
+ }
+
+ if err := (&mc).finalize(h.logger); err != nil {
+ return mc, err
+ }
+
+ return mc, nil
+}
+
+func (h *Client) collect(tidy bool) (ModulesConfig, *collector) {
+ c := &collector{
+ Client: h,
+ }
+
+ c.collect()
+ if c.err != nil {
+ return ModulesConfig{}, c
+ }
+
+ // https://github.com/gohugoio/hugo/issues/6115
+ /*if !c.skipTidy && tidy {
+ if err := h.tidy(c.modules, true); err != nil {
+ c.err = err
+ return ModulesConfig{}, c
+ }
+ }*/
+
+ return ModulesConfig{
+ AllModules: c.modules,
+ GoModulesFilename: c.GoModulesFilename,
+ }, c
+}
+
+type ModulesConfig struct {
+ // All modules, including any disabled.
+ AllModules Modules
+
+ // All active modules.
+ ActiveModules Modules
+
+ // Set if this is a Go modules enabled project.
+ GoModulesFilename string
+}
+
+func (m *ModulesConfig) setActiveMods(logger loggers.Logger) error {
+ var activeMods Modules
+ for _, mod := range m.AllModules {
+ if !mod.Config().HugoVersion.IsValid() {
+ logger.Warnf(`Module %q is not compatible with this Hugo version; run "hugo mod graph" for more information.`, mod.Path())
+ }
+ if !mod.Disabled() {
+ activeMods = append(activeMods, mod)
+ }
+ }
+
+ m.ActiveModules = activeMods
+
+ return nil
+}
+
+func (m *ModulesConfig) finalize(logger loggers.Logger) error {
+ for _, mod := range m.AllModules {
+ m := mod.(*moduleAdapter)
+ m.mounts = filterUnwantedMounts(m.mounts)
+ }
+ return nil
+}
+
+func filterUnwantedMounts(mounts []Mount) []Mount {
+ // Remove duplicates
+ seen := make(map[string]bool)
+ tmp := mounts[:0]
+ for _, m := range mounts {
+ if !seen[m.key()] {
+ tmp = append(tmp, m)
+ }
+ seen[m.key()] = true
+ }
+ return tmp
+}
+
+type collected struct {
+ // Pick the first and prevent circular loops.
+ seen map[string]bool
+
+ // Maps module path to a _vendor dir. These values are fetched from
+ // _vendor/modules.txt, and the first (top-most) will win.
+ vendored map[string]vendoredModule
+
+ // Set if a Go modules enabled project.
+ gomods goModules
+
+ // Ordered list of collected modules, including Go Modules and theme
+ // components stored below /themes.
+ modules Modules
+}
+
+// Collects and creates a module tree.
+type collector struct {
+ *Client
+
+ // Store away any non-fatal error and return at the end.
+ err error
+
+ // Set to disable any Tidy operation in the end.
+ skipTidy bool
+
+ *collected
+}
+
+func (c *collector) initModules() error {
+ c.collected = &collected{
+ seen: make(map[string]bool),
+ vendored: make(map[string]vendoredModule),
+ gomods: goModules{},
+ }
+
+ // If both these are true, we don't even need Go installed to build.
+ if c.ccfg.IgnoreVendor == nil && c.isVendored(c.ccfg.WorkingDir) {
+ return nil
+ }
+
+ // We may fail later if we don't find the mods.
+ return c.loadModules()
+}
+
+func (c *collector) isSeen(path string) bool {
+ key := pathKey(path)
+ if c.seen[key] {
+ return true
+ }
+ c.seen[key] = true
+ return false
+}
+
+func (c *collector) getVendoredDir(path string) (vendoredModule, bool) {
+ v, found := c.vendored[path]
+ return v, found
+}
+
+func (c *collector) add(owner *moduleAdapter, moduleImport Import, disabled bool) (*moduleAdapter, error) {
+ var (
+ mod *goModule
+ moduleDir string
+ version string
+ vendored bool
+ )
+
+ modulePath := moduleImport.Path
+ var realOwner Module = owner
+
+ if !c.ccfg.shouldIgnoreVendor(modulePath) {
+ if err := c.collectModulesTXT(owner); err != nil {
+ return nil, err
+ }
+
+ // Try _vendor first.
+ var vm vendoredModule
+ vm, vendored = c.getVendoredDir(modulePath)
+ if vendored {
+ moduleDir = vm.Dir
+ realOwner = vm.Owner
+ version = vm.Version
+
+ if owner.projectMod {
+ // We want to keep the go.mod intact with the versions and all.
+ c.skipTidy = true
+ }
+
+ }
+ }
+
+ if moduleDir == "" {
+ var versionQuery string
+ mod = c.gomods.GetByPath(modulePath)
+ if mod != nil {
+ moduleDir = mod.Dir
+ versionQuery = mod.Version
+ }
+
+ if moduleDir == "" {
+ if c.GoModulesFilename != "" && isProbablyModule(modulePath) {
+ // Try to "go get" it and reload the module configuration.
+ if versionQuery == "" {
+ // See https://golang.org/ref/mod#version-queries
+ // This will select the latest release-version (not beta etc.).
+ versionQuery = "upgrade"
+ }
+ if err := c.Get(fmt.Sprintf("%s@%s", modulePath, versionQuery)); err != nil {
+ return nil, err
+ }
+ if err := c.loadModules(); err != nil {
+ return nil, err
+ }
+
+ mod = c.gomods.GetByPath(modulePath)
+ if mod != nil {
+ moduleDir = mod.Dir
+ }
+ }
+
+ // Fall back to project/themes/<mymodule>
+ if moduleDir == "" {
+ var err error
+ moduleDir, err = c.createThemeDirname(modulePath, owner.projectMod || moduleImport.pathProjectReplaced)
+ if err != nil {
+ c.err = err
+ return nil, nil
+ }
+ if found, _ := afero.Exists(c.fs, moduleDir); !found {
+ c.err = c.wrapModuleNotFound(fmt.Errorf(`module %q not found; either add it as a Hugo Module or store it in %q.`, modulePath, c.ccfg.ThemesDir))
+ return nil, nil
+ }
+ }
+ }
+ }
+
+ if found, _ := afero.Exists(c.fs, moduleDir); !found {
+ c.err = c.wrapModuleNotFound(fmt.Errorf("%q not found", moduleDir))
+ return nil, nil
+ }
+
+ if !strings.HasSuffix(moduleDir, fileSeparator) {
+ moduleDir += fileSeparator
+ }
+
+ ma := &moduleAdapter{
+ dir: moduleDir,
+ vendor: vendored,
+ disabled: disabled,
+ gomod: mod,
+ version: version,
+ // This may be the owner of the _vendor dir
+ owner: realOwner,
+ }
+
+ if mod == nil {
+ ma.path = modulePath
+ }
+
+ if !moduleImport.IgnoreConfig {
+ if err := c.applyThemeConfig(ma); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := c.applyMounts(moduleImport, ma); err != nil {
+ return nil, err
+ }
+
+ c.modules = append(c.modules, ma)
+ return ma, nil
+}
+
+func (c *collector) addAndRecurse(owner *moduleAdapter, disabled bool) error {
+ moduleConfig := owner.Config()
+ if owner.projectMod {
+ if err := c.applyMounts(Import{}, owner); err != nil {
+ return err
+ }
+ }
+
+ for _, moduleImport := range moduleConfig.Imports {
+ disabled := disabled || moduleImport.Disable
+
+ if !c.isSeen(moduleImport.Path) {
+ tc, err := c.add(owner, moduleImport, disabled)
+ if err != nil {
+ return err
+ }
+ if tc == nil || moduleImport.IgnoreImports {
+ continue
+ }
+ if err := c.addAndRecurse(tc, disabled); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (c *collector) applyMounts(moduleImport Import, mod *moduleAdapter) error {
+ if moduleImport.NoMounts {
+ mod.mounts = nil
+ return nil
+ }
+
+ mounts := moduleImport.Mounts
+
+ modConfig := mod.Config()
+
+ if len(mounts) == 0 {
+ // Mounts not defined by the import.
+ mounts = modConfig.Mounts
+ }
+
+ if !mod.projectMod && len(mounts) == 0 {
+ // Create default mount points for every component folder that
+ // exists in the module.
+ for _, componentFolder := range files.ComponentFolders {
+ sourceDir := filepath.Join(mod.Dir(), componentFolder)
+ _, err := c.fs.Stat(sourceDir)
+ if err == nil {
+ mounts = append(mounts, Mount{
+ Source: componentFolder,
+ Target: componentFolder,
+ })
+ }
+ }
+ }
+
+ var err error
+ mounts, err = c.normalizeMounts(mod, mounts)
+ if err != nil {
+ return err
+ }
+
+ mounts, err = c.mountCommonJSConfig(mod, mounts)
+ if err != nil {
+ return err
+ }
+
+ mod.mounts = mounts
+ return nil
+}
+
+func (c *collector) applyThemeConfig(tc *moduleAdapter) error {
+ var (
+ configFilename string
+ themeCfg map[string]any
+ hasConfigFile bool
+ err error
+ )
+
+ // Viper supports more, but this is the sub-set supported by Hugo.
+ for _, configFormats := range config.ValidConfigFileExtensions {
+ configFilename = filepath.Join(tc.Dir(), "config."+configFormats)
+ hasConfigFile, _ = afero.Exists(c.fs, configFilename)
+ if hasConfigFile {
+ break
+ }
+ }
+
+ // The old theme information file.
+ themeTOML := filepath.Join(tc.Dir(), "theme.toml")
+
+ hasThemeTOML, _ := afero.Exists(c.fs, themeTOML)
+ if hasThemeTOML {
+ data, err := afero.ReadFile(c.fs, themeTOML)
+ if err != nil {
+ return err
+ }
+ themeCfg, err = metadecoders.Default.UnmarshalToMap(data, metadecoders.TOML)
+ if err != nil {
+ c.logger.Warnf("Failed to read module config for %q in %q: %s", tc.Path(), themeTOML, err)
+ } else {
+ maps.PrepareParams(themeCfg)
+ }
+ }
+
+ if hasConfigFile {
+ if configFilename != "" {
+ var err error
+ tc.cfg, err = config.FromFile(c.fs, configFilename)
+ if err != nil {
+ return err
+ }
+ }
+
+ tc.configFilenames = append(tc.configFilenames, configFilename)
+
+ }
+
+ // Also check for a config dir, which we overlay on top of the file configuration.
+ configDir := filepath.Join(tc.Dir(), "config")
+ dcfg, dirnames, err := config.LoadConfigFromDir(c.fs, configDir, c.ccfg.Environment)
+ if err != nil {
+ return err
+ }
+
+ if len(dirnames) > 0 {
+ tc.configFilenames = append(tc.configFilenames, dirnames...)
+
+ if hasConfigFile {
+ // Set will overwrite existing keys.
+ tc.cfg.Set("", dcfg.Get(""))
+ } else {
+ tc.cfg = dcfg
+ }
+ }
+
+ config, err := decodeConfig(tc.cfg, c.moduleConfig.replacementsMap)
+ if err != nil {
+ return err
+ }
+
+ const oldVersionKey = "min_version"
+
+ if hasThemeTOML {
+
+ // Merge old with new
+ if minVersion, found := themeCfg[oldVersionKey]; found {
+ if config.HugoVersion.Min == "" {
+ config.HugoVersion.Min = hugo.VersionString(cast.ToString(minVersion))
+ }
+ }
+
+ if config.Params == nil {
+ config.Params = make(map[string]any)
+ }
+
+ for k, v := range themeCfg {
+ if k == oldVersionKey {
+ continue
+ }
+ config.Params[k] = v
+ }
+
+ }
+
+ tc.config = config
+
+ return nil
+}
+
+func (c *collector) collect() {
+ defer c.logger.PrintTimerIfDelayed(time.Now(), "hugo: collected modules")
+ d := debounce.New(2 * time.Second)
+ d(func() {
+ c.logger.Println("hugo: downloading modules …")
+ })
+ defer d(func() {})
+
+ if err := c.initModules(); err != nil {
+ c.err = err
+ return
+ }
+
+ projectMod := createProjectModule(c.gomods.GetMain(), c.ccfg.WorkingDir, c.moduleConfig)
+
+ if err := c.addAndRecurse(projectMod, false); err != nil {
+ c.err = err
+ return
+ }
+
+ // Add the project mod on top.
+ c.modules = append(Modules{projectMod}, c.modules...)
+}
+
+func (c *collector) isVendored(dir string) bool {
+ _, err := c.fs.Stat(filepath.Join(dir, vendord, vendorModulesFilename))
+ return err == nil
+}
+
+func (c *collector) collectModulesTXT(owner Module) error {
+ vendorDir := filepath.Join(owner.Dir(), vendord)
+ filename := filepath.Join(vendorDir, vendorModulesFilename)
+
+ f, err := c.fs.Open(filename)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+
+ return err
+ }
+
+ defer f.Close()
+
+ scanner := bufio.NewScanner(f)
+
+ for scanner.Scan() {
+ // # github.com/alecthomas/chroma v0.6.3
+ line := scanner.Text()
+ line = strings.Trim(line, "# ")
+ line = strings.TrimSpace(line)
+ parts := strings.Fields(line)
+ if len(parts) != 2 {
+ return fmt.Errorf("invalid modules list: %q", filename)
+ }
+ path := parts[0]
+
+ shouldAdd := c.Client.moduleConfig.VendorClosest
+
+ if !shouldAdd {
+ if _, found := c.vendored[path]; !found {
+ shouldAdd = true
+ }
+ }
+
+ if shouldAdd {
+ c.vendored[path] = vendoredModule{
+ Owner: owner,
+ Dir: filepath.Join(vendorDir, path),
+ Version: parts[1],
+ }
+ }
+
+ }
+ return nil
+}
+
+func (c *collector) loadModules() error {
+ modules, err := c.listGoMods()
+ if err != nil {
+ return err
+ }
+ c.gomods = modules
+ return nil
+}
+
+// Matches postcss.config.js etc.
+var commonJSConfigs = regexp.MustCompile(`(babel|postcss|tailwind)\.config\.js`)
+
+func (c *collector) mountCommonJSConfig(owner *moduleAdapter, mounts []Mount) ([]Mount, error) {
+ for _, m := range mounts {
+ if strings.HasPrefix(m.Target, files.JsConfigFolderMountPrefix) {
+ // This follows the convention of the other component types (assets, content, etc.),
+ // if one or more is specified by the user, we skip the defaults.
+ // These mounts were added to Hugo in 0.75.
+ return mounts, nil
+ }
+ }
+
+ // Mount the common JS config files.
+ fis, err := afero.ReadDir(c.fs, owner.Dir())
+ if err != nil {
+ return mounts, err
+ }
+
+ for _, fi := range fis {
+ n := fi.Name()
+
+ should := n == files.FilenamePackageHugoJSON || n == files.FilenamePackageJSON
+ should = should || commonJSConfigs.MatchString(n)
+
+ if should {
+ mounts = append(mounts, Mount{
+ Source: n,
+ Target: filepath.Join(files.ComponentFolderAssets, files.FolderJSConfig, n),
+ })
+ }
+
+ }
+
+ return mounts, nil
+}
+
+func (c *collector) normalizeMounts(owner *moduleAdapter, mounts []Mount) ([]Mount, error) {
+ var out []Mount
+ dir := owner.Dir()
+
+ for _, mnt := range mounts {
+ errMsg := fmt.Sprintf("invalid module config for %q", owner.Path())
+
+ if mnt.Source == "" || mnt.Target == "" {
+ return nil, errors.New(errMsg + ": both source and target must be set")
+ }
+
+ mnt.Source = filepath.Clean(mnt.Source)
+ mnt.Target = filepath.Clean(mnt.Target)
+ var sourceDir string
+
+ if owner.projectMod && filepath.IsAbs(mnt.Source) {
+ // Abs paths in the main project is allowed.
+ sourceDir = mnt.Source
+ } else {
+ sourceDir = filepath.Join(dir, mnt.Source)
+ }
+
+ // Verify that Source exists
+ _, err := c.fs.Stat(sourceDir)
+ if err != nil {
+ continue
+ }
+
+ // Verify that target points to one of the predefined component dirs
+ targetBase := mnt.Target
+ idxPathSep := strings.Index(mnt.Target, string(os.PathSeparator))
+ if idxPathSep != -1 {
+ targetBase = mnt.Target[0:idxPathSep]
+ }
+ if !files.IsComponentFolder(targetBase) {
+ return nil, fmt.Errorf("%s: mount target must be one of: %v", errMsg, files.ComponentFolders)
+ }
+
+ out = append(out, mnt)
+ }
+
+ return out, nil
+}
+
+func (c *collector) wrapModuleNotFound(err error) error {
+ err = fmt.Errorf(err.Error()+": %w", ErrNotExist)
+ if c.GoModulesFilename == "" {
+ return err
+ }
+
+ baseMsg := "we found a go.mod file in your project, but"
+
+ switch c.goBinaryStatus {
+ case goBinaryStatusNotFound:
+ return fmt.Errorf(baseMsg+" you need to install Go to use it. See https://golang.org/dl/ : %q", err)
+ case goBinaryStatusTooOld:
+ return fmt.Errorf(baseMsg+" you need to a newer version of Go to use it. See https://golang.org/dl/ : %w", err)
+ }
+
+ return err
+}
+
+type vendoredModule struct {
+ Owner Module
+ Dir string
+ Version string
+}
+
+func createProjectModule(gomod *goModule, workingDir string, conf Config) *moduleAdapter {
+ // Create a pseudo module for the main project.
+ var path string
+ if gomod == nil {
+ path = "project"
+ }
+
+ return &moduleAdapter{
+ path: path,
+ dir: workingDir,
+ gomod: gomod,
+ projectMod: true,
+ config: conf,
+ }
+}
+
+// In the first iteration of Hugo Modules, we do not support multiple
+// major versions running at the same time, so we pick the first (upper most).
+// We will investigate namespaces in future versions.
+// TODO(bep) add a warning when the above happens.
+func pathKey(p string) string {
+ prefix, _, _ := module.SplitPathVersion(p)
+
+ return strings.ToLower(prefix)
+}
diff --git a/modules/collect_test.go b/modules/collect_test.go
new file mode 100644
index 000000000..9487c0a0e
--- /dev/null
+++ b/modules/collect_test.go
@@ -0,0 +1,51 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPathKey(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ in string
+ expect string
+ }{
+ {"github.com/foo", "github.com/foo"},
+ {"github.com/foo/v2", "github.com/foo"},
+ {"github.com/foo/v12", "github.com/foo"},
+ {"github.com/foo/v3d", "github.com/foo/v3d"},
+ {"MyTheme", "mytheme"},
+ } {
+ c.Assert(pathKey(test.in), qt.Equals, test.expect)
+ }
+}
+
+func TestFilterUnwantedMounts(t *testing.T) {
+ mounts := []Mount{
+ {Source: "a", Target: "b", Lang: "en"},
+ {Source: "a", Target: "b", Lang: "en"},
+ {Source: "b", Target: "c", Lang: "en"},
+ }
+
+ filtered := filterUnwantedMounts(mounts)
+
+ c := qt.New(t)
+ c.Assert(len(filtered), qt.Equals, 2)
+ c.Assert(filtered, qt.DeepEquals, []Mount{{Source: "a", Target: "b", Lang: "en"}, {Source: "b", Target: "c", Lang: "en"}})
+}
diff --git a/modules/config.go b/modules/config.go
new file mode 100644
index 000000000..08154bc11
--- /dev/null
+++ b/modules/config.go
@@ -0,0 +1,421 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/mitchellh/mapstructure"
+)
+
+var DefaultModuleConfig = Config{
+
+ // Default to direct, which means "git clone" and similar. We
+ // will investigate proxy settings in more depth later.
+ // See https://github.com/golang/go/issues/26334
+ Proxy: "direct",
+
+ // Comma separated glob list matching paths that should not use the
+ // proxy configured above.
+ NoProxy: "none",
+
+ // Comma separated glob list matching paths that should be
+ // treated as private.
+ Private: "*.*",
+
+ // A list of replacement directives mapping a module path to a directory
+ // or a theme component in the themes folder.
+ // Note that this will turn the component into a traditional theme component
+ // that does not partake in vendoring etc.
+ // The syntax is the similar to the replacement directives used in go.mod, e.g:
+ // github.com/mod1 -> ../mod1,github.com/mod2 -> ../mod2
+ Replacements: nil,
+}
+
+// ApplyProjectConfigDefaults applies default/missing module configuration for
+// the main project.
+func ApplyProjectConfigDefaults(cfg config.Provider, mod Module) error {
+ moda := mod.(*moduleAdapter)
+
+ // Map legacy directory config into the new module.
+ languages := cfg.Get("languagesSortedDefaultFirst").(langs.Languages)
+ isMultiHost := languages.IsMultihost()
+
+ // To bridge between old and new configuration format we need
+ // a way to make sure all of the core components are configured on
+ // the basic level.
+ componentsConfigured := make(map[string]bool)
+ for _, mnt := range moda.mounts {
+ if !strings.HasPrefix(mnt.Target, files.JsConfigFolderMountPrefix) {
+ componentsConfigured[mnt.Component()] = true
+ }
+ }
+
+ type dirKeyComponent struct {
+ key string
+ component string
+ multilingual bool
+ }
+
+ dirKeys := []dirKeyComponent{
+ {"contentDir", files.ComponentFolderContent, true},
+ {"dataDir", files.ComponentFolderData, false},
+ {"layoutDir", files.ComponentFolderLayouts, false},
+ {"i18nDir", files.ComponentFolderI18n, false},
+ {"archetypeDir", files.ComponentFolderArchetypes, false},
+ {"assetDir", files.ComponentFolderAssets, false},
+ {"", files.ComponentFolderStatic, isMultiHost},
+ }
+
+ createMountsFor := func(d dirKeyComponent, cfg config.Provider) []Mount {
+ var lang string
+ if language, ok := cfg.(*langs.Language); ok {
+ lang = language.Lang
+ }
+
+ // Static mounts are a little special.
+ if d.component == files.ComponentFolderStatic {
+ var mounts []Mount
+ staticDirs := getStaticDirs(cfg)
+ if len(staticDirs) > 0 {
+ componentsConfigured[d.component] = true
+ }
+
+ for _, dir := range staticDirs {
+ mounts = append(mounts, Mount{Lang: lang, Source: dir, Target: d.component})
+ }
+
+ return mounts
+
+ }
+
+ if cfg.IsSet(d.key) {
+ source := cfg.GetString(d.key)
+ componentsConfigured[d.component] = true
+
+ return []Mount{{
+ // No lang set for layouts etc.
+ Source: source,
+ Target: d.component,
+ }}
+ }
+
+ return nil
+ }
+
+ createMounts := func(d dirKeyComponent) []Mount {
+ var mounts []Mount
+ if d.multilingual {
+ if d.component == files.ComponentFolderContent {
+ seen := make(map[string]bool)
+ hasContentDir := false
+ for _, language := range languages {
+ if language.ContentDir != "" {
+ hasContentDir = true
+ break
+ }
+ }
+
+ if hasContentDir {
+ for _, language := range languages {
+ contentDir := language.ContentDir
+ if contentDir == "" {
+ contentDir = files.ComponentFolderContent
+ }
+ if contentDir == "" || seen[contentDir] {
+ continue
+ }
+ seen[contentDir] = true
+ mounts = append(mounts, Mount{Lang: language.Lang, Source: contentDir, Target: d.component})
+ }
+ }
+
+ componentsConfigured[d.component] = len(seen) > 0
+
+ } else {
+ for _, language := range languages {
+ mounts = append(mounts, createMountsFor(d, language)...)
+ }
+ }
+ } else {
+ mounts = append(mounts, createMountsFor(d, cfg)...)
+ }
+
+ return mounts
+ }
+
+ var mounts []Mount
+ for _, dirKey := range dirKeys {
+ if componentsConfigured[dirKey.component] {
+ continue
+ }
+
+ mounts = append(mounts, createMounts(dirKey)...)
+
+ }
+
+ // Add default configuration
+ for _, dirKey := range dirKeys {
+ if componentsConfigured[dirKey.component] {
+ continue
+ }
+ mounts = append(mounts, Mount{Source: dirKey.component, Target: dirKey.component})
+ }
+
+ // Prepend the mounts from configuration.
+ mounts = append(moda.mounts, mounts...)
+
+ moda.mounts = mounts
+
+ return nil
+}
+
+// DecodeConfig creates a modules Config from a given Hugo configuration.
+func DecodeConfig(cfg config.Provider) (Config, error) {
+ return decodeConfig(cfg, nil)
+}
+
+func decodeConfig(cfg config.Provider, pathReplacements map[string]string) (Config, error) {
+ c := DefaultModuleConfig
+ c.replacementsMap = pathReplacements
+
+ if cfg == nil {
+ return c, nil
+ }
+
+ themeSet := cfg.IsSet("theme")
+ moduleSet := cfg.IsSet("module")
+
+ if moduleSet {
+ m := cfg.GetStringMap("module")
+ if err := mapstructure.WeakDecode(m, &c); err != nil {
+ return c, err
+ }
+
+ if c.replacementsMap == nil {
+
+ if len(c.Replacements) == 1 {
+ c.Replacements = strings.Split(c.Replacements[0], ",")
+ }
+
+ for i, repl := range c.Replacements {
+ c.Replacements[i] = strings.TrimSpace(repl)
+ }
+
+ c.replacementsMap = make(map[string]string)
+ for _, repl := range c.Replacements {
+ parts := strings.Split(repl, "->")
+ if len(parts) != 2 {
+ return c, fmt.Errorf(`invalid module.replacements: %q; configure replacement pairs on the form "oldpath->newpath" `, repl)
+ }
+
+ c.replacementsMap[strings.TrimSpace(parts[0])] = strings.TrimSpace(parts[1])
+ }
+ }
+
+ if c.replacementsMap != nil && c.Imports != nil {
+ for i, imp := range c.Imports {
+ if newImp, found := c.replacementsMap[imp.Path]; found {
+ imp.Path = newImp
+ imp.pathProjectReplaced = true
+ c.Imports[i] = imp
+ }
+ }
+ }
+
+ for i, mnt := range c.Mounts {
+ mnt.Source = filepath.Clean(mnt.Source)
+ mnt.Target = filepath.Clean(mnt.Target)
+ c.Mounts[i] = mnt
+ }
+
+ }
+
+ if themeSet {
+ imports := config.GetStringSlicePreserveString(cfg, "theme")
+ for _, imp := range imports {
+ c.Imports = append(c.Imports, Import{
+ Path: imp,
+ })
+ }
+
+ }
+
+ return c, nil
+}
+
+// Config holds a module config.
+type Config struct {
+ Mounts []Mount
+ Imports []Import
+
+ // Meta info about this module (license information etc.).
+ Params map[string]any
+
+ // Will be validated against the running Hugo version.
+ HugoVersion HugoVersion
+
+ // A optional Glob pattern matching module paths to skip when vendoring, e.g.
+ // "github.com/**".
+ NoVendor string
+
+ // When enabled, we will pick the vendored module closest to the module
+ // using it.
+ // The default behaviour is to pick the first.
+ // Note that there can still be only one dependency of a given module path,
+ // so once it is in use it cannot be redefined.
+ VendorClosest bool
+
+ Replacements []string
+ replacementsMap map[string]string
+
+ // Configures GOPROXY.
+ Proxy string
+ // Configures GONOPROXY.
+ NoProxy string
+ // Configures GOPRIVATE.
+ Private string
+
+ // Set the workspace file to use, e.g. hugo.work.
+ // Enables Go "Workspace" mode.
+ // Requires Go 1.18+
+ // See https://tip.golang.org/doc/go1.18
+ Workspace string
+}
+
+// hasModuleImport reports whether the project config have one or more
+// modules imports, e.g. github.com/bep/myshortcodes.
+func (c Config) hasModuleImport() bool {
+ for _, imp := range c.Imports {
+ if isProbablyModule(imp.Path) {
+ return true
+ }
+ }
+ return false
+}
+
+// HugoVersion holds Hugo binary version requirements for a module.
+type HugoVersion struct {
+ // The minimum Hugo version that this module works with.
+ Min hugo.VersionString
+
+ // The maximum Hugo version that this module works with.
+ Max hugo.VersionString
+
+ // Set if the extended version is needed.
+ Extended bool
+}
+
+func (v HugoVersion) String() string {
+ extended := ""
+ if v.Extended {
+ extended = " extended"
+ }
+
+ if v.Min != "" && v.Max != "" {
+ return fmt.Sprintf("%s/%s%s", v.Min, v.Max, extended)
+ }
+
+ if v.Min != "" {
+ return fmt.Sprintf("Min %s%s", v.Min, extended)
+ }
+
+ if v.Max != "" {
+ return fmt.Sprintf("Max %s%s", v.Max, extended)
+ }
+
+ return extended
+}
+
+// IsValid reports whether this version is valid compared to the running
+// Hugo binary.
+func (v HugoVersion) IsValid() bool {
+ current := hugo.CurrentVersion.Version()
+ if v.Extended && !hugo.IsExtended {
+ return false
+ }
+
+ isValid := true
+
+ if v.Min != "" && current.Compare(v.Min) > 0 {
+ isValid = false
+ }
+
+ if v.Max != "" && current.Compare(v.Max) < 0 {
+ isValid = false
+ }
+
+ return isValid
+}
+
+type Import struct {
+ Path string // Module path
+ pathProjectReplaced bool // Set when Path is replaced in project config.
+ IgnoreConfig bool // Ignore any config in config.toml (will still follow imports).
+ IgnoreImports bool // Do not follow any configured imports.
+ NoMounts bool // Do not mount any folder in this import.
+ NoVendor bool // Never vendor this import (only allowed in main project).
+ Disable bool // Turn off this module.
+ Mounts []Mount
+}
+
+type Mount struct {
+ Source string // relative path in source repo, e.g. "scss"
+ Target string // relative target path, e.g. "assets/bootstrap/scss"
+
+ Lang string // any language code associated with this mount.
+
+ // Include only files matching the given Glob patterns (string or slice).
+ IncludeFiles any
+
+ // Exclude all files matching the given Glob patterns (string or slice).
+ ExcludeFiles any
+}
+
+// Used as key to remove duplicates.
+func (m Mount) key() string {
+ return strings.Join([]string{m.Lang, m.Source, m.Target}, "/")
+}
+
+func (m Mount) Component() string {
+ return strings.Split(m.Target, fileSeparator)[0]
+}
+
+func (m Mount) ComponentAndName() (string, string) {
+ c, n, _ := strings.Cut(m.Target, fileSeparator)
+ return c, n
+}
+
+func getStaticDirs(cfg config.Provider) []string {
+ var staticDirs []string
+ for i := -1; i <= 10; i++ {
+ staticDirs = append(staticDirs, getStringOrStringSlice(cfg, "staticDir", i)...)
+ }
+ return staticDirs
+}
+
+func getStringOrStringSlice(cfg config.Provider, key string, id int) []string {
+ if id >= 0 {
+ key = fmt.Sprintf("%s%d", key, id)
+ }
+
+ return config.GetStringSlicePreserveString(cfg, key)
+}
diff --git a/modules/config_test.go b/modules/config_test.go
new file mode 100644
index 000000000..371aab056
--- /dev/null
+++ b/modules/config_test.go
@@ -0,0 +1,161 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package modules
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestConfigHugoVersionIsValid(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ in HugoVersion
+ expect bool
+ }{
+ {HugoVersion{Min: "0.33.0"}, true},
+ {HugoVersion{Min: "0.56.0-DEV"}, true},
+ {HugoVersion{Min: "0.33.0", Max: "0.55.0"}, false},
+ {HugoVersion{Min: "0.33.0", Max: "0.199.0"}, true},
+ } {
+ c.Assert(test.in.IsValid(), qt.Equals, test.expect, qt.Commentf("%#v", test.in))
+ }
+}
+
+func TestDecodeConfig(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("Basic", func(c *qt.C) {
+ tomlConfig := `
+[module]
+
+[module.hugoVersion]
+min = "0.54.2"
+max = "0.199.0"
+extended = true
+
+[[module.mounts]]
+source="src/project/blog"
+target="content/blog"
+lang="en"
+[[module.imports]]
+path="github.com/bep/mycomponent"
+[[module.imports.mounts]]
+source="scss"
+target="assets/bootstrap/scss"
+[[module.imports.mounts]]
+source="src/markdown/blog"
+target="content/blog"
+lang="en"
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ mcfg, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+
+ v056 := hugo.VersionString("0.56.0")
+
+ hv := mcfg.HugoVersion
+
+ c.Assert(v056.Compare(hv.Min), qt.Equals, -1)
+ c.Assert(v056.Compare(hv.Max), qt.Equals, 1)
+ c.Assert(hv.Extended, qt.Equals, true)
+
+ if hugo.IsExtended {
+ c.Assert(hv.IsValid(), qt.Equals, true)
+ }
+
+ c.Assert(len(mcfg.Mounts), qt.Equals, 1)
+ c.Assert(len(mcfg.Imports), qt.Equals, 1)
+ imp := mcfg.Imports[0]
+ imp.Path = "github.com/bep/mycomponent"
+ c.Assert(imp.Mounts[1].Source, qt.Equals, "src/markdown/blog")
+ c.Assert(imp.Mounts[1].Target, qt.Equals, "content/blog")
+ c.Assert(imp.Mounts[1].Lang, qt.Equals, "en")
+ })
+
+ c.Run("Replacements", func(c *qt.C) {
+ for _, tomlConfig := range []string{`
+[module]
+replacements="a->b,github.com/bep/mycomponent->c"
+[[module.imports]]
+path="github.com/bep/mycomponent"
+`, `
+[module]
+replacements=["a->b","github.com/bep/mycomponent->c"]
+[[module.imports]]
+path="github.com/bep/mycomponent"
+`} {
+
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ mcfg, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(mcfg.Replacements, qt.DeepEquals, []string{"a->b", "github.com/bep/mycomponent->c"})
+ c.Assert(mcfg.replacementsMap, qt.DeepEquals, map[string]string{
+ "a": "b",
+ "github.com/bep/mycomponent": "c",
+ })
+
+ c.Assert(mcfg.Imports[0].Path, qt.Equals, "c")
+
+ }
+ })
+}
+
+func TestDecodeConfigBothOldAndNewProvided(t *testing.T) {
+ c := qt.New(t)
+ tomlConfig := `
+
+theme = ["b", "c"]
+
+[module]
+[[module.imports]]
+path="a"
+
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ modCfg, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(modCfg.Imports), qt.Equals, 3)
+ c.Assert(modCfg.Imports[0].Path, qt.Equals, "a")
+}
+
+// Test old style theme import.
+func TestDecodeConfigTheme(t *testing.T) {
+ c := qt.New(t)
+ tomlConfig := `
+
+theme = ["a", "b"]
+`
+ cfg, err := config.FromConfigString(tomlConfig, "toml")
+ c.Assert(err, qt.IsNil)
+
+ mcfg, err := DecodeConfig(cfg)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(len(mcfg.Imports), qt.Equals, 2)
+ c.Assert(mcfg.Imports[0].Path, qt.Equals, "a")
+ c.Assert(mcfg.Imports[1].Path, qt.Equals, "b")
+}
diff --git a/modules/module.go b/modules/module.go
new file mode 100644
index 000000000..0d094fe87
--- /dev/null
+++ b/modules/module.go
@@ -0,0 +1,188 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package modules provides a client that can be used to manage Hugo Components,
+// what's referred to as Hugo Modules. Hugo Modules is built on top of Go Modules,
+// but also supports vendoring and components stored directly in the themes dir.
+package modules
+
+import (
+ "time"
+
+ "github.com/gohugoio/hugo/config"
+)
+
+var _ Module = (*moduleAdapter)(nil)
+
+type Module interface {
+
+ // Optional config read from the configFilename above.
+ Cfg() config.Provider
+
+ // The decoded module config and mounts.
+ Config() Config
+
+ // Optional configuration filenames (e.g. "/themes/mytheme/config.json").
+ // This will be added to the special configuration watch list when in
+ // server mode.
+ ConfigFilenames() []string
+
+ // Directory holding files for this module.
+ Dir() string
+
+ // This module is disabled.
+ Disabled() bool
+
+ // Returns whether this is a Go Module.
+ IsGoMod() bool
+
+ // Any directory remappings.
+ Mounts() []Mount
+
+ // In the dependency tree, this is the first module that defines this module
+ // as a dependency.
+ Owner() Module
+
+ // Returns the path to this module.
+ // This will either be the module path, e.g. "github.com/gohugoio/myshortcodes",
+ // or the path below your /theme folder, e.g. "mytheme".
+ Path() string
+
+ // Replaced by this module.
+ Replace() Module
+
+ // Returns whether Dir points below the _vendor dir.
+ Vendor() bool
+
+ // The module version.
+ Version() string
+
+ // Time version was created.
+ Time() time.Time
+
+ // Whether this module's dir is a watch candidate.
+ Watch() bool
+}
+
+type Modules []Module
+
+type moduleAdapter struct {
+ path string
+ dir string
+ version string
+ vendor bool
+ disabled bool
+ projectMod bool
+ owner Module
+
+ mounts []Mount
+
+ configFilenames []string
+ cfg config.Provider
+ config Config
+
+ // Set if a Go module.
+ gomod *goModule
+}
+
+func (m *moduleAdapter) Cfg() config.Provider {
+ return m.cfg
+}
+
+func (m *moduleAdapter) Config() Config {
+ return m.config
+}
+
+func (m *moduleAdapter) ConfigFilenames() []string {
+ return m.configFilenames
+}
+
+func (m *moduleAdapter) Dir() string {
+ // This may point to the _vendor dir.
+ if !m.IsGoMod() || m.dir != "" {
+ return m.dir
+ }
+ return m.gomod.Dir
+}
+
+func (m *moduleAdapter) Disabled() bool {
+ return m.disabled
+}
+
+func (m *moduleAdapter) IsGoMod() bool {
+ return m.gomod != nil
+}
+
+func (m *moduleAdapter) Mounts() []Mount {
+ return m.mounts
+}
+
+func (m *moduleAdapter) Owner() Module {
+ return m.owner
+}
+
+func (m *moduleAdapter) Path() string {
+ if !m.IsGoMod() || m.path != "" {
+ return m.path
+ }
+ return m.gomod.Path
+}
+
+func (m *moduleAdapter) Replace() Module {
+ if m.IsGoMod() && !m.Vendor() && m.gomod.Replace != nil {
+ return &moduleAdapter{
+ gomod: m.gomod.Replace,
+ owner: m.owner,
+ }
+ }
+ return nil
+}
+
+func (m *moduleAdapter) Vendor() bool {
+ return m.vendor
+}
+
+func (m *moduleAdapter) Version() string {
+ if !m.IsGoMod() || m.version != "" {
+ return m.version
+ }
+ return m.gomod.Version
+}
+
+func (m *moduleAdapter) Time() time.Time {
+ if !m.IsGoMod() || m.gomod.Time == nil {
+ return time.Time{}
+ }
+
+ return *m.gomod.Time
+
+}
+
+func (m *moduleAdapter) Watch() bool {
+ if m.Owner() == nil {
+ // Main project
+ return true
+ }
+
+ if !m.IsGoMod() {
+ // Module inside /themes
+ return true
+ }
+
+ if m.Replace() != nil {
+ // Version is not set when replaced by a local folder.
+ return m.Replace().Version() == ""
+ }
+
+ return false
+}
diff --git a/modules/npm/package_builder.go b/modules/npm/package_builder.go
new file mode 100644
index 000000000..9bdc7eb78
--- /dev/null
+++ b/modules/npm/package_builder.go
@@ -0,0 +1,237 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package npm
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+const (
+ dependenciesKey = "dependencies"
+ devDependenciesKey = "devDependencies"
+
+ packageJSONName = "package.json"
+
+ packageJSONTemplate = `{
+ "name": "%s",
+ "version": "%s"
+}`
+)
+
+func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error {
+ var b *packageBuilder
+
+ // Have a package.hugo.json?
+ fi, err := fs.Stat(files.FilenamePackageHugoJSON)
+ if err != nil {
+ // Have a package.json?
+ fi, err = fs.Stat(packageJSONName)
+ if err == nil {
+ // Preserve the original in package.hugo.json.
+ if err = hugio.CopyFile(fs, packageJSONName, files.FilenamePackageHugoJSON); err != nil {
+ return fmt.Errorf("npm pack: failed to copy package file: %w", err)
+ }
+ } else {
+ // Create one.
+ name := "project"
+ // Use the Hugo site's folder name as the default name.
+ // The owner can change it later.
+ rfi, err := fs.Stat("")
+ if err == nil {
+ name = rfi.Name()
+ }
+ packageJSONContent := fmt.Sprintf(packageJSONTemplate, name, "0.1.0")
+ if err = afero.WriteFile(fs, files.FilenamePackageHugoJSON, []byte(packageJSONContent), 0666); err != nil {
+ return err
+ }
+ fi, err = fs.Stat(files.FilenamePackageHugoJSON)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ meta := fi.(hugofs.FileMetaInfo).Meta()
+ masterFilename := meta.Filename
+ f, err := meta.Open()
+ if err != nil {
+ return fmt.Errorf("npm pack: failed to open package file: %w", err)
+ }
+ b = newPackageBuilder(meta.Module, f)
+ f.Close()
+
+ for _, fi := range fis {
+ if fi.IsDir() {
+ // We only care about the files in the root.
+ continue
+ }
+
+ if fi.Name() != files.FilenamePackageHugoJSON {
+ continue
+ }
+
+ meta := fi.(hugofs.FileMetaInfo).Meta()
+
+ if meta.Filename == masterFilename {
+ continue
+ }
+
+ f, err := meta.Open()
+ if err != nil {
+ return fmt.Errorf("npm pack: failed to open package file: %w", err)
+ }
+ b.Add(meta.Module, f)
+ f.Close()
+ }
+
+ if b.Err() != nil {
+ return fmt.Errorf("npm pack: failed to build: %w", b.Err())
+ }
+
+ // Replace the dependencies in the original template with the merged set.
+ b.originalPackageJSON[dependenciesKey] = b.dependencies
+ b.originalPackageJSON[devDependenciesKey] = b.devDependencies
+ var commentsm map[string]any
+ comments, found := b.originalPackageJSON["comments"]
+ if found {
+ commentsm = maps.ToStringMap(comments)
+ } else {
+ commentsm = make(map[string]any)
+ }
+ commentsm[dependenciesKey] = b.dependenciesComments
+ commentsm[devDependenciesKey] = b.devDependenciesComments
+ b.originalPackageJSON["comments"] = commentsm
+
+ // Write it out to the project package.json
+ packageJSONData := new(bytes.Buffer)
+ encoder := json.NewEncoder(packageJSONData)
+ encoder.SetEscapeHTML(false)
+ encoder.SetIndent("", strings.Repeat(" ", 2))
+ if err := encoder.Encode(b.originalPackageJSON); err != nil {
+ return fmt.Errorf("npm pack: failed to marshal JSON: %w", err)
+ }
+
+ if err := afero.WriteFile(fs, packageJSONName, packageJSONData.Bytes(), 0666); err != nil {
+ return fmt.Errorf("npm pack: failed to write package.json: %w", err)
+ }
+
+ return nil
+}
+
+func newPackageBuilder(source string, first io.Reader) *packageBuilder {
+ b := &packageBuilder{
+ devDependencies: make(map[string]any),
+ devDependenciesComments: make(map[string]any),
+ dependencies: make(map[string]any),
+ dependenciesComments: make(map[string]any),
+ }
+
+ m := b.unmarshal(first)
+ if b.err != nil {
+ return b
+ }
+
+ b.addm(source, m)
+ b.originalPackageJSON = m
+
+ return b
+}
+
+type packageBuilder struct {
+ err error
+
+ // The original package.hugo.json.
+ originalPackageJSON map[string]any
+
+ devDependencies map[string]any
+ devDependenciesComments map[string]any
+ dependencies map[string]any
+ dependenciesComments map[string]any
+}
+
+func (b *packageBuilder) Add(source string, r io.Reader) *packageBuilder {
+ if b.err != nil {
+ return b
+ }
+
+ m := b.unmarshal(r)
+ if b.err != nil {
+ return b
+ }
+
+ b.addm(source, m)
+
+ return b
+}
+
+func (b *packageBuilder) addm(source string, m map[string]any) {
+ if source == "" {
+ source = "project"
+ }
+
+ // The version selection is currently very simple.
+ // We may consider minimal version selection or something
+ // after testing this out.
+ //
+ // But for now, the first version string for a given dependency wins.
+ // These packages will be added by order of import (project, module1, module2...),
+ // so that should at least give the project control over the situation.
+ if devDeps, found := m[devDependenciesKey]; found {
+ mm := maps.ToStringMapString(devDeps)
+ for k, v := range mm {
+ if _, added := b.devDependencies[k]; !added {
+ b.devDependencies[k] = v
+ b.devDependenciesComments[k] = source
+ }
+ }
+ }
+
+ if deps, found := m[dependenciesKey]; found {
+ mm := maps.ToStringMapString(deps)
+ for k, v := range mm {
+ if _, added := b.dependencies[k]; !added {
+ b.dependencies[k] = v
+ b.dependenciesComments[k] = source
+ }
+ }
+ }
+}
+
+func (b *packageBuilder) unmarshal(r io.Reader) map[string]any {
+ m := make(map[string]any)
+ err := json.Unmarshal(helpers.ReaderToBytes(r), &m)
+ if err != nil {
+ b.err = err
+ }
+ return m
+}
+
+func (b *packageBuilder) Err() error {
+ return b.err
+}
diff --git a/modules/npm/package_builder_test.go b/modules/npm/package_builder_test.go
new file mode 100644
index 000000000..2523292ee
--- /dev/null
+++ b/modules/npm/package_builder_test.go
@@ -0,0 +1,95 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package npm
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const templ = `{
+ "name": "foo",
+ "version": "0.1.1",
+ "scripts": {},
+ "dependencies": {
+ "react-dom": "1.1.1",
+ "tailwindcss": "1.2.0",
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5"
+ },
+ "devDependencies": {
+ "postcss-cli": "7.1.0",
+ "tailwindcss": "1.2.0",
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5"
+ }
+}`
+
+func TestPackageBuilder(t *testing.T) {
+ c := qt.New(t)
+
+ b := newPackageBuilder("", strings.NewReader(templ))
+ c.Assert(b.Err(), qt.IsNil)
+
+ b.Add("mymod", strings.NewReader(`{
+"dependencies": {
+ "react-dom": "9.1.1",
+ "add1": "1.1.1"
+},
+"devDependencies": {
+ "tailwindcss": "error",
+ "add2": "2.1.1"
+}
+}`))
+
+ b.Add("mymod", strings.NewReader(`{
+"dependencies": {
+ "react-dom": "error",
+ "add1": "error",
+ "add3": "3.1.1"
+},
+"devDependencies": {
+ "tailwindcss": "error",
+ "add2": "error",
+ "add4": "4.1.1"
+
+}
+}`))
+
+ c.Assert(b.Err(), qt.IsNil)
+
+ c.Assert(b.dependencies, qt.DeepEquals, map[string]any{
+ "@babel/cli": "7.8.4",
+ "add1": "1.1.1",
+ "add3": "3.1.1",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5",
+ "react-dom": "1.1.1",
+ "tailwindcss": "1.2.0",
+ })
+
+ c.Assert(b.devDependencies, qt.DeepEquals, map[string]any{
+ "tailwindcss": "1.2.0",
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "add2": "2.1.1",
+ "add4": "4.1.1",
+ "@babel/preset-env": "7.9.5",
+ "postcss-cli": "7.1.0",
+ })
+}
diff --git a/navigation/menu.go b/navigation/menu.go
new file mode 100644
index 000000000..5e4996f39
--- /dev/null
+++ b/navigation/menu.go
@@ -0,0 +1,315 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package navigation
+
+import (
+ "fmt"
+ "html/template"
+ "sort"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/compare"
+
+ "github.com/spf13/cast"
+)
+
+var smc = newMenuCache()
+
+// MenuEntry represents a menu item defined in either Page front matter
+// or in the site config.
+type MenuEntry struct {
+ // The URL value from front matter / config.
+ ConfiguredURL string
+
+ // The Page connected to this menu entry.
+ Page Page
+
+ // The path to the page, only relevant for menus defined in site config.
+ PageRef string
+
+ // The name of the menu entry.
+ Name string
+
+ // The menu containing this menu entry.
+ Menu string
+
+ // Used to identify this menu entry.
+ Identifier string
+
+ title string
+
+ // If set, will be rendered before this menu entry.
+ Pre template.HTML
+
+ // If set, will be rendered after this menu entry.
+ Post template.HTML
+
+ // The weight of this menu entry, used for sorting.
+ // Set to a non-zero value, negative or positive.
+ Weight int
+
+ // Identifier of the parent menu entry.
+ Parent string
+
+ // Child entries.
+ Children Menu
+
+ // User defined params.
+ Params maps.Params
+}
+
+func (m *MenuEntry) URL() string {
+
+ // Check page first.
+ // In Hugo 0.86.0 we added `pageRef`,
+ // a way to connect menu items in site config to pages.
+ // This means that you now can have both a Page
+ // and a configured URL.
+ // Having the configured URL as a fallback if the Page isn't found
+ // is obviously more useful, especially in multilingual sites.
+ if !types.IsNil(m.Page) {
+ return m.Page.RelPermalink()
+ }
+
+ return m.ConfiguredURL
+}
+
+// A narrow version of page.Page.
+type Page interface {
+ LinkTitle() string
+ RelPermalink() string
+ Path() string
+ Section() string
+ Weight() int
+ IsPage() bool
+ IsSection() bool
+ IsAncestor(other any) (bool, error)
+ Params() maps.Params
+}
+
+// Menu is a collection of menu entries.
+type Menu []*MenuEntry
+
+// Menus is a dictionary of menus.
+type Menus map[string]Menu
+
+// PageMenus is a dictionary of menus defined in the Pages.
+type PageMenus map[string]*MenuEntry
+
+// HasChildren returns whether this menu item has any children.
+func (m *MenuEntry) HasChildren() bool {
+ return m.Children != nil
+}
+
+// KeyName returns the key used to identify this menu entry.
+func (m *MenuEntry) KeyName() string {
+ if m.Identifier != "" {
+ return m.Identifier
+ }
+ return m.Name
+}
+
+func (m *MenuEntry) hopefullyUniqueID() string {
+ if m.Identifier != "" {
+ return m.Identifier
+ } else if m.URL() != "" {
+ return m.URL()
+ } else {
+ return m.Name
+ }
+}
+
+// IsEqual returns whether the two menu entries represents the same menu entry.
+func (m *MenuEntry) IsEqual(inme *MenuEntry) bool {
+ return m.hopefullyUniqueID() == inme.hopefullyUniqueID() && m.Parent == inme.Parent
+}
+
+// IsSameResource returns whether the two menu entries points to the same
+// resource (URL).
+func (m *MenuEntry) IsSameResource(inme *MenuEntry) bool {
+ if m.isSamePage(inme.Page) {
+ return m.Page == inme.Page
+ }
+ murl, inmeurl := m.URL(), inme.URL()
+ return murl != "" && inmeurl != "" && murl == inmeurl
+}
+
+func (m *MenuEntry) isSamePage(p Page) bool {
+ if !types.IsNil(m.Page) && !types.IsNil(p) {
+ return m.Page == p
+ }
+ return false
+}
+
+// For internal use.
+func (m *MenuEntry) MarshallMap(ime map[string]any) error {
+ var err error
+ for k, v := range ime {
+ loki := strings.ToLower(k)
+ switch loki {
+ case "url":
+ m.ConfiguredURL = cast.ToString(v)
+ case "pageref":
+ m.PageRef = cast.ToString(v)
+ case "weight":
+ m.Weight = cast.ToInt(v)
+ case "name":
+ m.Name = cast.ToString(v)
+ case "title":
+ m.title = cast.ToString(v)
+ case "pre":
+ m.Pre = template.HTML(cast.ToString(v))
+ case "post":
+ m.Post = template.HTML(cast.ToString(v))
+ case "identifier":
+ m.Identifier = cast.ToString(v)
+ case "parent":
+ m.Parent = cast.ToString(v)
+ case "params":
+ var ok bool
+ m.Params, ok = maps.ToParamsAndPrepare(v)
+ if !ok {
+ err = fmt.Errorf("cannot convert %T to Params", v)
+ }
+ }
+ }
+
+ if err != nil {
+ return fmt.Errorf("failed to marshal menu entry %q: %w", m.KeyName(), err)
+ }
+
+ return nil
+}
+
+// This is for internal use only.
+func (m Menu) Add(me *MenuEntry) Menu {
+ m = append(m, me)
+ // TODO(bep)
+ m.Sort()
+ return m
+}
+
+/*
+ * Implementation of a custom sorter for Menu
+ */
+
+// A type to implement the sort interface for Menu
+type menuSorter struct {
+ menu Menu
+ by menuEntryBy
+}
+
+// Closure used in the Sort.Less method.
+type menuEntryBy func(m1, m2 *MenuEntry) bool
+
+func (by menuEntryBy) Sort(menu Menu) {
+ ms := &menuSorter{
+ menu: menu,
+ by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
+ }
+ sort.Stable(ms)
+}
+
+var defaultMenuEntrySort = func(m1, m2 *MenuEntry) bool {
+ if m1.Weight == m2.Weight {
+ c := compare.Strings(m1.Name, m2.Name)
+ if c == 0 {
+ return m1.Identifier < m2.Identifier
+ }
+ return c < 0
+ }
+
+ if m2.Weight == 0 {
+ return true
+ }
+
+ if m1.Weight == 0 {
+ return false
+ }
+
+ return m1.Weight < m2.Weight
+}
+
+func (ms *menuSorter) Len() int { return len(ms.menu) }
+func (ms *menuSorter) Swap(i, j int) { ms.menu[i], ms.menu[j] = ms.menu[j], ms.menu[i] }
+
+// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
+func (ms *menuSorter) Less(i, j int) bool { return ms.by(ms.menu[i], ms.menu[j]) }
+
+// Sort sorts the menu by weight, name and then by identifier.
+func (m Menu) Sort() Menu {
+ menuEntryBy(defaultMenuEntrySort).Sort(m)
+ return m
+}
+
+// Limit limits the returned menu to n entries.
+func (m Menu) Limit(n int) Menu {
+ if len(m) > n {
+ return m[0:n]
+ }
+ return m
+}
+
+// ByWeight sorts the menu by the weight defined in the menu configuration.
+func (m Menu) ByWeight() Menu {
+ const key = "menuSort.ByWeight"
+ menus, _ := smc.get(key, menuEntryBy(defaultMenuEntrySort).Sort, m)
+
+ return menus
+}
+
+// ByName sorts the menu by the name defined in the menu configuration.
+func (m Menu) ByName() Menu {
+ const key = "menuSort.ByName"
+ title := func(m1, m2 *MenuEntry) bool {
+ return compare.LessStrings(m1.Name, m2.Name)
+ }
+
+ menus, _ := smc.get(key, menuEntryBy(title).Sort, m)
+
+ return menus
+}
+
+// Reverse reverses the order of the menu entries.
+func (m Menu) Reverse() Menu {
+ const key = "menuSort.Reverse"
+ reverseFunc := func(menu Menu) {
+ for i, j := 0, len(menu)-1; i < j; i, j = i+1, j-1 {
+ menu[i], menu[j] = menu[j], menu[i]
+ }
+ }
+ menus, _ := smc.get(key, reverseFunc, m)
+
+ return menus
+}
+
+// Clone clones the menu entries.
+// This is for internal use only.
+func (m Menu) Clone() Menu {
+ return append(Menu(nil), m...)
+}
+
+func (m *MenuEntry) Title() string {
+ if m.title != "" {
+ return m.title
+ }
+
+ if m.Page != nil {
+ return m.Page.LinkTitle()
+ }
+
+ return ""
+}
diff --git a/navigation/menu_cache.go b/navigation/menu_cache.go
new file mode 100644
index 000000000..6a3266431
--- /dev/null
+++ b/navigation/menu_cache.go
@@ -0,0 +1,113 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+package navigation
+
+import (
+ "sync"
+)
+
+type menuCacheEntry struct {
+ in []Menu
+ out Menu
+}
+
+func (entry menuCacheEntry) matches(menuList []Menu) bool {
+ if len(entry.in) != len(menuList) {
+ return false
+ }
+ for i, m := range menuList {
+ if !menuEqual(m, entry.in[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func newMenuCache() *menuCache {
+ return &menuCache{m: make(map[string][]menuCacheEntry)}
+}
+
+func (c *menuCache) clear() {
+ c.Lock()
+ defer c.Unlock()
+ c.m = make(map[string][]menuCacheEntry)
+}
+
+type menuCache struct {
+ sync.RWMutex
+ m map[string][]menuCacheEntry
+}
+
+func menuEqual(m1, m2 Menu) bool {
+ if m1 == nil && m2 == nil {
+ return true
+ }
+
+ if m1 == nil || m2 == nil {
+ return false
+ }
+
+ if len(m1) != len(m2) {
+ return false
+ }
+
+ if len(m1) == 0 {
+ return true
+ }
+
+ for i := 0; i < len(m1); i++ {
+ if m1[i] != m2[i] {
+ return false
+ }
+ }
+ return true
+}
+
+func (c *menuCache) get(key string, apply func(m Menu), menuLists ...Menu) (Menu, bool) {
+ return c.getP(key, func(m *Menu) {
+ if apply != nil {
+ apply(*m)
+ }
+ }, menuLists...)
+}
+
+func (c *menuCache) getP(key string, apply func(m *Menu), menuLists ...Menu) (Menu, bool) {
+ c.Lock()
+ defer c.Unlock()
+
+ if cached, ok := c.m[key]; ok {
+ for _, entry := range cached {
+ if entry.matches(menuLists) {
+ return entry.out, true
+ }
+ }
+ }
+
+ m := menuLists[0]
+ menuCopy := append(Menu(nil), m...)
+
+ if apply != nil {
+ apply(&menuCopy)
+ }
+
+ entry := menuCacheEntry{in: menuLists, out: menuCopy}
+ if v, ok := c.m[key]; ok {
+ c.m[key] = append(v, entry)
+ } else {
+ c.m[key] = []menuCacheEntry{entry}
+ }
+
+ return menuCopy, false
+}
diff --git a/navigation/menu_cache_test.go b/navigation/menu_cache_test.go
new file mode 100644
index 000000000..4bb8921bf
--- /dev/null
+++ b/navigation/menu_cache_test.go
@@ -0,0 +1,81 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package navigation
+
+import (
+ "sync"
+ "sync/atomic"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func createSortTestMenu(num int) Menu {
+ menu := make(Menu, num)
+ for i := 0; i < num; i++ {
+ m := &MenuEntry{}
+ menu[i] = m
+ }
+ return menu
+}
+
+func TestMenuCache(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ c1 := newMenuCache()
+
+ changeFirst := func(m Menu) {
+ m[0].title = "changed"
+ }
+
+ var o1 uint64
+ var o2 uint64
+
+ var wg sync.WaitGroup
+
+ var l1 sync.Mutex
+ var l2 sync.Mutex
+
+ var testMenuSets []Menu
+
+ for i := 0; i < 50; i++ {
+ testMenuSets = append(testMenuSets, createSortTestMenu(i+1))
+ }
+
+ for j := 0; j < 100; j++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for k, menu := range testMenuSets {
+ l1.Lock()
+ m, ca := c1.get("k1", nil, menu)
+ c.Assert(ca, qt.Equals, !atomic.CompareAndSwapUint64(&o1, uint64(k), uint64(k+1)))
+ l1.Unlock()
+ m2, c2 := c1.get("k1", nil, m)
+ c.Assert(c2, qt.Equals, true)
+ c.Assert(menuEqual(m, m2), qt.Equals, true)
+ c.Assert(menuEqual(m, menu), qt.Equals, true)
+ c.Assert(m, qt.Not(qt.IsNil))
+
+ l2.Lock()
+ m3, c3 := c1.get("k2", changeFirst, menu)
+ c.Assert(c3, qt.Equals, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)))
+ l2.Unlock()
+ c.Assert(m3, qt.Not(qt.IsNil))
+ c.Assert("changed", qt.Equals, m3[0].title)
+ }
+ }()
+ }
+ wg.Wait()
+}
diff --git a/navigation/pagemenus.go b/navigation/pagemenus.go
new file mode 100644
index 000000000..7b4f6f648
--- /dev/null
+++ b/navigation/pagemenus.go
@@ -0,0 +1,230 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package navigation
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/spf13/cast"
+)
+
+type PageMenusProvider interface {
+ PageMenusGetter
+ MenuQueryProvider
+}
+
+type PageMenusGetter interface {
+ Menus() PageMenus
+}
+
+type MenusGetter interface {
+ Menus() Menus
+}
+
+type MenuQueryProvider interface {
+ HasMenuCurrent(menuID string, me *MenuEntry) bool
+ IsMenuCurrent(menuID string, inme *MenuEntry) bool
+}
+
+func PageMenusFromPage(p Page) (PageMenus, error) {
+ params := p.Params()
+
+ ms, ok := params["menus"]
+ if !ok {
+ ms, ok = params["menu"]
+ }
+
+ pm := PageMenus{}
+
+ if !ok {
+ return nil, nil
+ }
+
+ me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight()}
+
+ // Could be the name of the menu to attach it to
+ mname, err := cast.ToStringE(ms)
+
+ if err == nil {
+ me.Menu = mname
+ pm[mname] = &me
+ return pm, nil
+ }
+
+ // Could be a slice of strings
+ mnames, err := cast.ToStringSliceE(ms)
+
+ if err == nil {
+ for _, mname := range mnames {
+ me.Menu = mname
+ pm[mname] = &me
+ }
+ return pm, nil
+ }
+
+ var wrapErr = func(err error) error {
+ return fmt.Errorf("unable to process menus for page %q: %w", p.Path(), err)
+ }
+
+ // Could be a structured menu entry
+ menus, err := maps.ToStringMapE(ms)
+ if err != nil {
+ return pm, wrapErr(err)
+ }
+
+ for name, menu := range menus {
+ menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight(), Menu: name}
+ if menu != nil {
+ ime, err := maps.ToStringMapE(menu)
+ if err != nil {
+ return pm, wrapErr(err)
+ }
+
+ if err = menuEntry.MarshallMap(ime); err != nil {
+ return pm, wrapErr(err)
+ }
+ }
+ pm[name] = &menuEntry
+ }
+
+ return pm, nil
+}
+
+func NewMenuQueryProvider(
+ pagem PageMenusGetter,
+ sitem MenusGetter,
+ p Page) MenuQueryProvider {
+ return &pageMenus{
+ p: p,
+ pagem: pagem,
+ sitem: sitem,
+ }
+}
+
+type pageMenus struct {
+ pagem PageMenusGetter
+ sitem MenusGetter
+ p Page
+}
+
+func (pm *pageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool {
+ if !types.IsNil(me.Page) && me.Page.IsSection() {
+ if ok, _ := me.Page.IsAncestor(pm.p); ok {
+ return true
+ }
+ }
+
+ if !me.HasChildren() {
+ return false
+ }
+
+ menus := pm.pagem.Menus()
+
+ if m, ok := menus[menuID]; ok {
+ for _, child := range me.Children {
+ if child.IsEqual(m) {
+ return true
+ }
+ if pm.HasMenuCurrent(menuID, child) {
+ return true
+ }
+ }
+ }
+
+ if pm.p == nil {
+ return false
+ }
+
+ for _, child := range me.Children {
+ if child.isSamePage(pm.p) {
+ return true
+ }
+
+ if pm.HasMenuCurrent(menuID, child) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (pm *pageMenus) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
+ menus := pm.pagem.Menus()
+
+ if me, ok := menus[menuID]; ok {
+ if me.IsEqual(inme) {
+ return true
+ }
+ }
+
+ if pm.p == nil {
+ return false
+ }
+
+ if !inme.isSamePage(pm.p) {
+ return false
+ }
+
+ // This resource may be included in several menus.
+ // Search for it to make sure that it is in the menu with the given menuId.
+ if menu, ok := pm.sitem.Menus()[menuID]; ok {
+ for _, menuEntry := range menu {
+ if menuEntry.IsSameResource(inme) {
+ return true
+ }
+
+ descendantFound := pm.isSameAsDescendantMenu(inme, menuEntry)
+ if descendantFound {
+ return descendantFound
+ }
+
+ }
+ }
+
+ return false
+}
+
+func (pm *pageMenus) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {
+ if parent.HasChildren() {
+ for _, child := range parent.Children {
+ if child.IsSameResource(inme) {
+ return true
+ }
+ descendantFound := pm.isSameAsDescendantMenu(inme, child)
+ if descendantFound {
+ return descendantFound
+ }
+ }
+ }
+ return false
+}
+
+var NopPageMenus = new(nopPageMenus)
+
+type nopPageMenus int
+
+func (m nopPageMenus) Menus() PageMenus {
+ return PageMenus{}
+}
+
+func (m nopPageMenus) HasMenuCurrent(menuID string, me *MenuEntry) bool {
+ return false
+}
+
+func (m nopPageMenus) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
+ return false
+}
diff --git a/output/docshelper.go b/output/docshelper.go
new file mode 100644
index 000000000..abfedd148
--- /dev/null
+++ b/output/docshelper.go
@@ -0,0 +1,102 @@
+package output
+
+import (
+ "strings"
+
+ // "fmt"
+
+ "github.com/gohugoio/hugo/docshelper"
+)
+
+// This is is just some helpers used to create some JSON used in the Hugo docs.
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ return docshelper.DocProvider{
+ "output": map[string]any{
+ "formats": DefaultFormats,
+ "layouts": createLayoutExamples(),
+ },
+ }
+ }
+
+ docshelper.AddDocProviderFunc(docsProvider)
+}
+
+func createLayoutExamples() any {
+ type Example struct {
+ Example string
+ Kind string
+ OutputFormat string
+ Suffix string
+ Layouts []string `json:"Template Lookup Order"`
+ }
+
+ var (
+ basicExamples []Example
+ demoLayout = "demolayout"
+ demoType = "demotype"
+ )
+
+ for _, example := range []struct {
+ name string
+ d LayoutDescriptor
+ f Format
+ }{
+ // Taxonomy output.LayoutDescriptor={categories category taxonomy en false Type Section
+ {"Single page in \"posts\" section", LayoutDescriptor{Kind: "page", Type: "posts"}, HTMLFormat},
+ {"Base template for single page in \"posts\" section", LayoutDescriptor{Baseof: true, Kind: "page", Type: "posts"}, HTMLFormat},
+ {"Single page in \"posts\" section with layout set", LayoutDescriptor{Kind: "page", Type: "posts", Layout: demoLayout}, HTMLFormat},
+ {"Base template for single page in \"posts\" section with layout set", LayoutDescriptor{Baseof: true, Kind: "page", Type: "posts", Layout: demoLayout}, HTMLFormat},
+ {"AMP single page", LayoutDescriptor{Kind: "page", Type: "posts"}, AMPFormat},
+ {"AMP single page, French language", LayoutDescriptor{Kind: "page", Type: "posts", Lang: "fr"}, AMPFormat},
+ // All section or typeless pages gets "page" as type
+ {"Home page", LayoutDescriptor{Kind: "home", Type: "page"}, HTMLFormat},
+ {"Base template for home page", LayoutDescriptor{Baseof: true, Kind: "home", Type: "page"}, HTMLFormat},
+ {"Home page with type set", LayoutDescriptor{Kind: "home", Type: demoType}, HTMLFormat},
+ {"Base template for home page with type set", LayoutDescriptor{Baseof: true, Kind: "home", Type: demoType}, HTMLFormat},
+ {"Home page with layout set", LayoutDescriptor{Kind: "home", Type: "page", Layout: demoLayout}, HTMLFormat},
+ {"AMP home, French language", LayoutDescriptor{Kind: "home", Type: "page", Lang: "fr"}, AMPFormat},
+ {"JSON home", LayoutDescriptor{Kind: "home", Type: "page"}, JSONFormat},
+ {"RSS home", LayoutDescriptor{Kind: "home", Type: "page"}, RSSFormat},
+ {"RSS section posts", LayoutDescriptor{Kind: "section", Type: "posts"}, RSSFormat},
+ {"Taxonomy in categories", LayoutDescriptor{Kind: "taxonomy", Type: "categories", Section: "category"}, RSSFormat},
+ {"Term in categories", LayoutDescriptor{Kind: "term", Type: "categories", Section: "category"}, RSSFormat},
+ {"Section list for \"posts\" section", LayoutDescriptor{Kind: "section", Type: "posts", Section: "posts"}, HTMLFormat},
+ {"Section list for \"posts\" section with type set to \"blog\"", LayoutDescriptor{Kind: "section", Type: "blog", Section: "posts"}, HTMLFormat},
+ {"Section list for \"posts\" section with layout set to \"demoLayout\"", LayoutDescriptor{Kind: "section", Layout: demoLayout, Section: "posts"}, HTMLFormat},
+
+ {"Taxonomy list in categories", LayoutDescriptor{Kind: "taxonomy", Type: "categories", Section: "category"}, HTMLFormat},
+ {"Taxonomy term in categories", LayoutDescriptor{Kind: "term", Type: "categories", Section: "category"}, HTMLFormat},
+ } {
+
+ l := NewLayoutHandler()
+ layouts, _ := l.For(example.d, example.f)
+
+ basicExamples = append(basicExamples, Example{
+ Example: example.name,
+ Kind: example.d.Kind,
+ OutputFormat: example.f.Name,
+ Suffix: example.f.MediaType.FirstSuffix.Suffix,
+ Layouts: makeLayoutsPresentable(layouts),
+ })
+ }
+
+ return basicExamples
+}
+
+func makeLayoutsPresentable(l []string) []string {
+ var filtered []string
+ for _, ll := range l {
+ if strings.Contains(ll, "page/") {
+ // This is a valid lookup, but it's more confusing than useful.
+ continue
+ }
+ ll = "layouts/" + strings.TrimPrefix(ll, "_text/")
+
+ if !strings.Contains(ll, "indexes") {
+ filtered = append(filtered, ll)
+ }
+ }
+
+ return filtered
+}
diff --git a/output/layout.go b/output/layout.go
new file mode 100644
index 000000000..dcbdf461a
--- /dev/null
+++ b/output/layout.go
@@ -0,0 +1,302 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package output
+
+import (
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// These may be used as content sections with potential conflicts. Avoid that.
+var reservedSections = map[string]bool{
+ "shortcodes": true,
+ "partials": true,
+}
+
+// LayoutDescriptor describes how a layout should be chosen. This is
+// typically built from a Page.
+type LayoutDescriptor struct {
+ Type string
+ Section string
+
+ // E.g. "page", but also used for the _markup render kinds, e.g. "render-image".
+ Kind string
+
+ // Comma-separated list of kind variants, e.g. "go,json" as variants which would find "render-codeblock-go.html"
+ KindVariants string
+
+ Lang string
+ Layout string
+ // LayoutOverride indicates what we should only look for the above layout.
+ LayoutOverride bool
+
+ RenderingHook bool
+ Baseof bool
+}
+
+func (d LayoutDescriptor) isList() bool {
+ return !d.RenderingHook && d.Kind != "page" && d.Kind != "404"
+}
+
+// LayoutHandler calculates the layout template to use to render a given output type.
+type LayoutHandler struct {
+ mu sync.RWMutex
+ cache map[layoutCacheKey][]string
+}
+
+type layoutCacheKey struct {
+ d LayoutDescriptor
+ f string
+}
+
+// NewLayoutHandler creates a new LayoutHandler.
+func NewLayoutHandler() *LayoutHandler {
+ return &LayoutHandler{cache: make(map[layoutCacheKey][]string)}
+}
+
+// For returns a layout for the given LayoutDescriptor and options.
+// Layouts are rendered and cached internally.
+func (l *LayoutHandler) For(d LayoutDescriptor, f Format) ([]string, error) {
+ // We will get lots of requests for the same layouts, so avoid recalculations.
+ key := layoutCacheKey{d, f.Name}
+ l.mu.RLock()
+ if cacheVal, found := l.cache[key]; found {
+ l.mu.RUnlock()
+ return cacheVal, nil
+ }
+ l.mu.RUnlock()
+
+ layouts := resolvePageTemplate(d, f)
+
+ layouts = helpers.UniqueStringsReuse(layouts)
+
+ l.mu.Lock()
+ l.cache[key] = layouts
+ l.mu.Unlock()
+
+ return layouts, nil
+}
+
+type layoutBuilder struct {
+ layoutVariations []string
+ typeVariations []string
+ d LayoutDescriptor
+ f Format
+}
+
+func (l *layoutBuilder) addLayoutVariations(vars ...string) {
+ for _, layoutVar := range vars {
+ if l.d.Baseof && layoutVar != "baseof" {
+ l.layoutVariations = append(l.layoutVariations, layoutVar+"-baseof")
+ continue
+ }
+ if !l.d.RenderingHook && !l.d.Baseof && l.d.LayoutOverride && layoutVar != l.d.Layout {
+ continue
+ }
+ l.layoutVariations = append(l.layoutVariations, layoutVar)
+ }
+}
+
+func (l *layoutBuilder) addTypeVariations(vars ...string) {
+ for _, typeVar := range vars {
+ if !reservedSections[typeVar] {
+ if l.d.RenderingHook {
+ typeVar = typeVar + renderingHookRoot
+ }
+ l.typeVariations = append(l.typeVariations, typeVar)
+ }
+ }
+}
+
+func (l *layoutBuilder) addSectionType() {
+ if l.d.Section != "" {
+ l.addTypeVariations(l.d.Section)
+ }
+}
+
+func (l *layoutBuilder) addKind() {
+ l.addLayoutVariations(l.d.Kind)
+ l.addTypeVariations(l.d.Kind)
+}
+
+const renderingHookRoot = "/_markup"
+
+func resolvePageTemplate(d LayoutDescriptor, f Format) []string {
+ b := &layoutBuilder{d: d, f: f}
+
+ if !d.RenderingHook && d.Layout != "" {
+ b.addLayoutVariations(d.Layout)
+ }
+ if d.Type != "" {
+ b.addTypeVariations(d.Type)
+ }
+
+ if d.RenderingHook {
+ if d.KindVariants != "" {
+ // Add the more specific variants first.
+ for _, variant := range strings.Split(d.KindVariants, ",") {
+ b.addLayoutVariations(d.Kind + "-" + variant)
+ }
+ }
+ b.addLayoutVariations(d.Kind)
+ b.addSectionType()
+ }
+
+ switch d.Kind {
+ case "page":
+ b.addLayoutVariations("single")
+ b.addSectionType()
+ case "home":
+ b.addLayoutVariations("index", "home")
+ // Also look in the root
+ b.addTypeVariations("")
+ case "section":
+ if d.Section != "" {
+ b.addLayoutVariations(d.Section)
+ }
+ b.addSectionType()
+ b.addKind()
+ case "term":
+ b.addKind()
+ if d.Section != "" {
+ b.addLayoutVariations(d.Section)
+ }
+ b.addLayoutVariations("taxonomy")
+ b.addTypeVariations("taxonomy")
+ b.addSectionType()
+ case "taxonomy":
+ if d.Section != "" {
+ b.addLayoutVariations(d.Section + ".terms")
+ }
+ b.addSectionType()
+ b.addLayoutVariations("terms")
+ // For legacy reasons this is deliberately put last.
+ b.addKind()
+ case "404":
+ b.addLayoutVariations("404")
+ b.addTypeVariations("")
+ }
+
+ isRSS := f.Name == RSSFormat.Name
+ if !d.RenderingHook && !d.Baseof && isRSS {
+ // The historic and common rss.xml case
+ b.addLayoutVariations("")
+ }
+
+ if d.Baseof || d.Kind != "404" {
+ // Most have _default in their lookup path
+ b.addTypeVariations("_default")
+ }
+
+ if d.isList() {
+ // Add the common list type
+ b.addLayoutVariations("list")
+ }
+
+ if d.Baseof {
+ b.addLayoutVariations("baseof")
+ }
+
+ layouts := b.resolveVariations()
+
+ if !d.RenderingHook && !d.Baseof && isRSS {
+ layouts = append(layouts, "_internal/_default/rss.xml")
+ }
+
+ return layouts
+}
+
+func (l *layoutBuilder) resolveVariations() []string {
+ var layouts []string
+
+ var variations []string
+ name := strings.ToLower(l.f.Name)
+
+ if l.d.Lang != "" {
+ // We prefer the most specific type before language.
+ variations = append(variations, []string{l.d.Lang + "." + name, name, l.d.Lang}...)
+ } else {
+ variations = append(variations, name)
+ }
+
+ variations = append(variations, "")
+
+ for _, typeVar := range l.typeVariations {
+ for _, variation := range variations {
+ for _, layoutVar := range l.layoutVariations {
+ if variation == "" && layoutVar == "" {
+ continue
+ }
+
+ s := constructLayoutPath(typeVar, layoutVar, variation, l.f.MediaType.FirstSuffix.Suffix)
+ if s != "" {
+ layouts = append(layouts, s)
+ }
+ }
+ }
+ }
+
+ return layouts
+}
+
+// constructLayoutPath constructs a layout path given a type, layout,
+// variations, and extension. The path constructed follows the pattern of
+// type/layout.variations.extension. If any value is empty, it will be left out
+// of the path construction.
+//
+// Path construction requires at least 2 of 3 out of layout, variations, and extension.
+// If more than one of those is empty, an empty string is returned.
+func constructLayoutPath(typ, layout, variations, extension string) string {
+ // we already know that layout and variations are not both empty because of
+ // checks in resolveVariants().
+ if extension == "" && (layout == "" || variations == "") {
+ return ""
+ }
+
+ // Commence valid path construction...
+
+ var (
+ p strings.Builder
+ needDot bool
+ )
+
+ if typ != "" {
+ p.WriteString(typ)
+ p.WriteString("/")
+ }
+
+ if layout != "" {
+ p.WriteString(layout)
+ needDot = true
+ }
+
+ if variations != "" {
+ if needDot {
+ p.WriteString(".")
+ }
+ p.WriteString(variations)
+ needDot = true
+ }
+
+ if extension != "" {
+ if needDot {
+ p.WriteString(".")
+ }
+ p.WriteString(extension)
+ }
+
+ return p.String()
+}
diff --git a/output/layout_test.go b/output/layout_test.go
new file mode 100644
index 000000000..8b7a2b541
--- /dev/null
+++ b/output/layout_test.go
@@ -0,0 +1,1008 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package output
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/kylelemons/godebug/diff"
+)
+
+func TestLayout(t *testing.T) {
+ c := qt.New(t)
+
+ noExtNoDelimMediaType := media.WithDelimiterAndSuffixes(media.TextType, "", "")
+ noExtMediaType := media.WithDelimiterAndSuffixes(media.TextType, ".", "")
+
+ var (
+ ampType = Format{
+ Name: "AMP",
+ MediaType: media.HTMLType,
+ BaseName: "index",
+ }
+
+ htmlFormat = HTMLFormat
+
+ noExtDelimFormat = Format{
+ Name: "NEM",
+ MediaType: noExtNoDelimMediaType,
+ BaseName: "_redirects",
+ }
+
+ noExt = Format{
+ Name: "NEX",
+ MediaType: noExtMediaType,
+ BaseName: "next",
+ }
+ )
+
+ for _, this := range []struct {
+ name string
+ layoutDescriptor LayoutDescriptor
+ layoutOverride string
+ format Format
+ expect []string
+ }{
+ {
+ "Home",
+ LayoutDescriptor{Kind: "home"},
+ "", ampType,
+ []string{
+ "index.amp.html",
+ "home.amp.html",
+ "list.amp.html",
+ "index.html",
+ "home.html",
+ "list.html",
+ "_default/index.amp.html",
+ "_default/home.amp.html",
+ "_default/list.amp.html",
+ "_default/index.html",
+ "_default/home.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Home baseof",
+ LayoutDescriptor{Kind: "home", Baseof: true},
+ "", ampType,
+ []string{
+ "index-baseof.amp.html",
+ "home-baseof.amp.html",
+ "list-baseof.amp.html",
+ "baseof.amp.html",
+ "index-baseof.html",
+ "home-baseof.html",
+ "list-baseof.html",
+ "baseof.html",
+ "_default/index-baseof.amp.html",
+ "_default/home-baseof.amp.html",
+ "_default/list-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/index-baseof.html",
+ "_default/home-baseof.html",
+ "_default/list-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Home, HTML",
+ LayoutDescriptor{Kind: "home"},
+ "", htmlFormat,
+ // We will eventually get to index.html. This looks stuttery, but makes the lookup logic easy to understand.
+ []string{
+ "index.html.html",
+ "home.html.html",
+ "list.html.html",
+ "index.html",
+ "home.html",
+ "list.html",
+ "_default/index.html.html",
+ "_default/home.html.html",
+ "_default/list.html.html",
+ "_default/index.html",
+ "_default/home.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Home, HTML, baseof",
+ LayoutDescriptor{Kind: "home", Baseof: true},
+ "", htmlFormat,
+ []string{
+ "index-baseof.html.html",
+ "home-baseof.html.html",
+ "list-baseof.html.html",
+ "baseof.html.html",
+ "index-baseof.html",
+ "home-baseof.html",
+ "list-baseof.html",
+ "baseof.html",
+ "_default/index-baseof.html.html",
+ "_default/home-baseof.html.html",
+ "_default/list-baseof.html.html",
+ "_default/baseof.html.html",
+ "_default/index-baseof.html",
+ "_default/home-baseof.html",
+ "_default/list-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Home, french language",
+ LayoutDescriptor{Kind: "home", Lang: "fr"},
+ "", ampType,
+ []string{
+ "index.fr.amp.html",
+ "home.fr.amp.html",
+ "list.fr.amp.html",
+ "index.amp.html",
+ "home.amp.html",
+ "list.amp.html",
+ "index.fr.html",
+ "home.fr.html",
+ "list.fr.html",
+ "index.html",
+ "home.html",
+ "list.html",
+ "_default/index.fr.amp.html",
+ "_default/home.fr.amp.html",
+ "_default/list.fr.amp.html",
+ "_default/index.amp.html",
+ "_default/home.amp.html",
+ "_default/list.amp.html",
+ "_default/index.fr.html",
+ "_default/home.fr.html",
+ "_default/list.fr.html",
+ "_default/index.html",
+ "_default/home.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Home, no ext or delim",
+ LayoutDescriptor{Kind: "home"},
+ "", noExtDelimFormat,
+ []string{
+ "index.nem",
+ "home.nem",
+ "list.nem",
+ "_default/index.nem",
+ "_default/home.nem",
+ "_default/list.nem",
+ },
+ },
+ {
+ "Home, no ext",
+ LayoutDescriptor{Kind: "home"},
+ "", noExt,
+ []string{
+ "index.nex",
+ "home.nex",
+ "list.nex",
+ "_default/index.nex",
+ "_default/home.nex",
+ "_default/list.nex",
+ },
+ },
+ {
+ "Page, no ext or delim",
+ LayoutDescriptor{Kind: "page"},
+ "", noExtDelimFormat,
+ []string{"_default/single.nem"},
+ },
+ {
+ "Section",
+ LayoutDescriptor{Kind: "section", Section: "sect1"},
+ "", ampType,
+ []string{
+ "sect1/sect1.amp.html",
+ "sect1/section.amp.html",
+ "sect1/list.amp.html",
+ "sect1/sect1.html",
+ "sect1/section.html",
+ "sect1/list.html",
+ "section/sect1.amp.html",
+ "section/section.amp.html",
+ "section/list.amp.html",
+ "section/sect1.html",
+ "section/section.html",
+ "section/list.html",
+ "_default/sect1.amp.html",
+ "_default/section.amp.html",
+ "_default/list.amp.html",
+ "_default/sect1.html",
+ "_default/section.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Section, baseof",
+ LayoutDescriptor{Kind: "section", Section: "sect1", Baseof: true},
+ "", ampType,
+ []string{
+ "sect1/sect1-baseof.amp.html",
+ "sect1/section-baseof.amp.html",
+ "sect1/list-baseof.amp.html",
+ "sect1/baseof.amp.html",
+ "sect1/sect1-baseof.html",
+ "sect1/section-baseof.html",
+ "sect1/list-baseof.html",
+ "sect1/baseof.html",
+ "section/sect1-baseof.amp.html",
+ "section/section-baseof.amp.html",
+ "section/list-baseof.amp.html",
+ "section/baseof.amp.html",
+ "section/sect1-baseof.html",
+ "section/section-baseof.html",
+ "section/list-baseof.html",
+ "section/baseof.html",
+ "_default/sect1-baseof.amp.html",
+ "_default/section-baseof.amp.html",
+ "_default/list-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/sect1-baseof.html",
+ "_default/section-baseof.html",
+ "_default/list-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Section, baseof, French, AMP",
+ LayoutDescriptor{Kind: "section", Section: "sect1", Lang: "fr", Baseof: true},
+ "", ampType,
+ []string{
+ "sect1/sect1-baseof.fr.amp.html",
+ "sect1/section-baseof.fr.amp.html",
+ "sect1/list-baseof.fr.amp.html",
+ "sect1/baseof.fr.amp.html",
+ "sect1/sect1-baseof.amp.html",
+ "sect1/section-baseof.amp.html",
+ "sect1/list-baseof.amp.html",
+ "sect1/baseof.amp.html",
+ "sect1/sect1-baseof.fr.html",
+ "sect1/section-baseof.fr.html",
+ "sect1/list-baseof.fr.html",
+ "sect1/baseof.fr.html",
+ "sect1/sect1-baseof.html",
+ "sect1/section-baseof.html",
+ "sect1/list-baseof.html",
+ "sect1/baseof.html",
+ "section/sect1-baseof.fr.amp.html",
+ "section/section-baseof.fr.amp.html",
+ "section/list-baseof.fr.amp.html",
+ "section/baseof.fr.amp.html",
+ "section/sect1-baseof.amp.html",
+ "section/section-baseof.amp.html",
+ "section/list-baseof.amp.html",
+ "section/baseof.amp.html",
+ "section/sect1-baseof.fr.html",
+ "section/section-baseof.fr.html",
+ "section/list-baseof.fr.html",
+ "section/baseof.fr.html",
+ "section/sect1-baseof.html",
+ "section/section-baseof.html",
+ "section/list-baseof.html",
+ "section/baseof.html",
+ "_default/sect1-baseof.fr.amp.html",
+ "_default/section-baseof.fr.amp.html",
+ "_default/list-baseof.fr.amp.html",
+ "_default/baseof.fr.amp.html",
+ "_default/sect1-baseof.amp.html",
+ "_default/section-baseof.amp.html",
+ "_default/list-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/sect1-baseof.fr.html",
+ "_default/section-baseof.fr.html",
+ "_default/list-baseof.fr.html",
+ "_default/baseof.fr.html",
+ "_default/sect1-baseof.html",
+ "_default/section-baseof.html",
+ "_default/list-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Section with layout",
+ LayoutDescriptor{Kind: "section", Section: "sect1", Layout: "mylayout"},
+ "", ampType,
+ []string{
+ "sect1/mylayout.amp.html",
+ "sect1/sect1.amp.html",
+ "sect1/section.amp.html",
+ "sect1/list.amp.html",
+ "sect1/mylayout.html",
+ "sect1/sect1.html",
+ "sect1/section.html",
+ "sect1/list.html",
+ "section/mylayout.amp.html",
+ "section/sect1.amp.html",
+ "section/section.amp.html",
+ "section/list.amp.html",
+ "section/mylayout.html",
+ "section/sect1.html",
+ "section/section.html",
+ "section/list.html",
+ "_default/mylayout.amp.html",
+ "_default/sect1.amp.html",
+ "_default/section.amp.html",
+ "_default/list.amp.html",
+ "_default/mylayout.html",
+ "_default/sect1.html",
+ "_default/section.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Term, French, AMP",
+ LayoutDescriptor{Kind: "term", Section: "tags", Lang: "fr"},
+ "", ampType,
+ []string{
+ "term/term.fr.amp.html",
+ "term/tags.fr.amp.html",
+ "term/taxonomy.fr.amp.html",
+ "term/list.fr.amp.html",
+ "term/term.amp.html",
+ "term/tags.amp.html",
+ "term/taxonomy.amp.html",
+ "term/list.amp.html",
+ "term/term.fr.html",
+ "term/tags.fr.html",
+ "term/taxonomy.fr.html",
+ "term/list.fr.html",
+ "term/term.html",
+ "term/tags.html",
+ "term/taxonomy.html",
+ "term/list.html",
+ "taxonomy/term.fr.amp.html",
+ "taxonomy/tags.fr.amp.html",
+ "taxonomy/taxonomy.fr.amp.html",
+ "taxonomy/list.fr.amp.html",
+ "taxonomy/term.amp.html",
+ "taxonomy/tags.amp.html",
+ "taxonomy/taxonomy.amp.html",
+ "taxonomy/list.amp.html",
+ "taxonomy/term.fr.html",
+ "taxonomy/tags.fr.html",
+ "taxonomy/taxonomy.fr.html",
+ "taxonomy/list.fr.html",
+ "taxonomy/term.html",
+ "taxonomy/tags.html",
+ "taxonomy/taxonomy.html",
+ "taxonomy/list.html",
+ "tags/term.fr.amp.html",
+ "tags/tags.fr.amp.html",
+ "tags/taxonomy.fr.amp.html",
+ "tags/list.fr.amp.html",
+ "tags/term.amp.html",
+ "tags/tags.amp.html",
+ "tags/taxonomy.amp.html",
+ "tags/list.amp.html",
+ "tags/term.fr.html",
+ "tags/tags.fr.html",
+ "tags/taxonomy.fr.html",
+ "tags/list.fr.html",
+ "tags/term.html",
+ "tags/tags.html",
+ "tags/taxonomy.html",
+ "tags/list.html",
+ "_default/term.fr.amp.html",
+ "_default/tags.fr.amp.html",
+ "_default/taxonomy.fr.amp.html",
+ "_default/list.fr.amp.html",
+ "_default/term.amp.html",
+ "_default/tags.amp.html",
+ "_default/taxonomy.amp.html",
+ "_default/list.amp.html",
+ "_default/term.fr.html",
+ "_default/tags.fr.html",
+ "_default/taxonomy.fr.html",
+ "_default/list.fr.html",
+ "_default/term.html",
+ "_default/tags.html",
+ "_default/taxonomy.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Term, baseof, French, AMP",
+ LayoutDescriptor{Kind: "term", Section: "tags", Lang: "fr", Baseof: true},
+ "", ampType,
+ []string{
+ "term/term-baseof.fr.amp.html",
+ "term/tags-baseof.fr.amp.html",
+ "term/taxonomy-baseof.fr.amp.html",
+ "term/list-baseof.fr.amp.html",
+ "term/baseof.fr.amp.html",
+ "term/term-baseof.amp.html",
+ "term/tags-baseof.amp.html",
+ "term/taxonomy-baseof.amp.html",
+ "term/list-baseof.amp.html",
+ "term/baseof.amp.html",
+ "term/term-baseof.fr.html",
+ "term/tags-baseof.fr.html",
+ "term/taxonomy-baseof.fr.html",
+ "term/list-baseof.fr.html",
+ "term/baseof.fr.html",
+ "term/term-baseof.html",
+ "term/tags-baseof.html",
+ "term/taxonomy-baseof.html",
+ "term/list-baseof.html",
+ "term/baseof.html",
+ "taxonomy/term-baseof.fr.amp.html",
+ "taxonomy/tags-baseof.fr.amp.html",
+ "taxonomy/taxonomy-baseof.fr.amp.html",
+ "taxonomy/list-baseof.fr.amp.html",
+ "taxonomy/baseof.fr.amp.html",
+ "taxonomy/term-baseof.amp.html",
+ "taxonomy/tags-baseof.amp.html",
+ "taxonomy/taxonomy-baseof.amp.html",
+ "taxonomy/list-baseof.amp.html",
+ "taxonomy/baseof.amp.html",
+ "taxonomy/term-baseof.fr.html",
+ "taxonomy/tags-baseof.fr.html",
+ "taxonomy/taxonomy-baseof.fr.html",
+ "taxonomy/list-baseof.fr.html",
+ "taxonomy/baseof.fr.html",
+ "taxonomy/term-baseof.html",
+ "taxonomy/tags-baseof.html",
+ "taxonomy/taxonomy-baseof.html",
+ "taxonomy/list-baseof.html",
+ "taxonomy/baseof.html",
+ "tags/term-baseof.fr.amp.html",
+ "tags/tags-baseof.fr.amp.html",
+ "tags/taxonomy-baseof.fr.amp.html",
+ "tags/list-baseof.fr.amp.html",
+ "tags/baseof.fr.amp.html",
+ "tags/term-baseof.amp.html",
+ "tags/tags-baseof.amp.html",
+ "tags/taxonomy-baseof.amp.html",
+ "tags/list-baseof.amp.html",
+ "tags/baseof.amp.html",
+ "tags/term-baseof.fr.html",
+ "tags/tags-baseof.fr.html",
+ "tags/taxonomy-baseof.fr.html",
+ "tags/list-baseof.fr.html",
+ "tags/baseof.fr.html",
+ "tags/term-baseof.html",
+ "tags/tags-baseof.html",
+ "tags/taxonomy-baseof.html",
+ "tags/list-baseof.html",
+ "tags/baseof.html",
+ "_default/term-baseof.fr.amp.html",
+ "_default/tags-baseof.fr.amp.html",
+ "_default/taxonomy-baseof.fr.amp.html",
+ "_default/list-baseof.fr.amp.html",
+ "_default/baseof.fr.amp.html",
+ "_default/term-baseof.amp.html",
+ "_default/tags-baseof.amp.html",
+ "_default/taxonomy-baseof.amp.html",
+ "_default/list-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/term-baseof.fr.html",
+ "_default/tags-baseof.fr.html",
+ "_default/taxonomy-baseof.fr.html",
+ "_default/list-baseof.fr.html",
+ "_default/baseof.fr.html",
+ "_default/term-baseof.html",
+ "_default/tags-baseof.html",
+ "_default/taxonomy-baseof.html",
+ "_default/list-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Term",
+ LayoutDescriptor{Kind: "term", Section: "tags"},
+ "", ampType,
+ []string{
+ "term/term.amp.html",
+ "term/tags.amp.html",
+ "term/taxonomy.amp.html",
+ "term/list.amp.html",
+ "term/term.html",
+ "term/tags.html",
+ "term/taxonomy.html",
+ "term/list.html",
+ "taxonomy/term.amp.html",
+ "taxonomy/tags.amp.html",
+ "taxonomy/taxonomy.amp.html",
+ "taxonomy/list.amp.html",
+ "taxonomy/term.html",
+ "taxonomy/tags.html",
+ "taxonomy/taxonomy.html",
+ "taxonomy/list.html",
+ "tags/term.amp.html",
+ "tags/tags.amp.html",
+ "tags/taxonomy.amp.html",
+ "tags/list.amp.html",
+ "tags/term.html",
+ "tags/tags.html",
+ "tags/taxonomy.html",
+ "tags/list.html",
+ "_default/term.amp.html",
+ "_default/tags.amp.html",
+ "_default/taxonomy.amp.html",
+ "_default/list.amp.html",
+ "_default/term.html",
+ "_default/tags.html",
+ "_default/taxonomy.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Taxonomy",
+ LayoutDescriptor{Kind: "taxonomy", Section: "categories"},
+ "", ampType,
+ []string{
+ "categories/categories.terms.amp.html",
+ "categories/terms.amp.html",
+ "categories/taxonomy.amp.html",
+ "categories/list.amp.html",
+ "categories/categories.terms.html",
+ "categories/terms.html",
+ "categories/taxonomy.html",
+ "categories/list.html",
+ "taxonomy/categories.terms.amp.html",
+ "taxonomy/terms.amp.html",
+ "taxonomy/taxonomy.amp.html",
+ "taxonomy/list.amp.html",
+ "taxonomy/categories.terms.html",
+ "taxonomy/terms.html",
+ "taxonomy/taxonomy.html",
+ "taxonomy/list.html",
+ "_default/categories.terms.amp.html",
+ "_default/terms.amp.html",
+ "_default/taxonomy.amp.html",
+ "_default/list.amp.html",
+ "_default/categories.terms.html",
+ "_default/terms.html",
+ "_default/taxonomy.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Page",
+ LayoutDescriptor{Kind: "page"},
+ "", ampType,
+ []string{
+ "_default/single.amp.html",
+ "_default/single.html",
+ },
+ },
+ {
+ "Page, baseof",
+ LayoutDescriptor{Kind: "page", Baseof: true},
+ "", ampType,
+ []string{
+ "_default/single-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/single-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Page with layout",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout"},
+ "", ampType,
+ []string{
+ "_default/mylayout.amp.html",
+ "_default/single.amp.html",
+ "_default/mylayout.html",
+ "_default/single.html",
+ },
+ },
+ {
+ "Page with layout, baseof",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout", Baseof: true},
+ "", ampType,
+ []string{
+ "_default/mylayout-baseof.amp.html",
+ "_default/single-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/mylayout-baseof.html",
+ "_default/single-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Page with layout and type",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout", Type: "myttype"},
+ "", ampType,
+ []string{
+ "myttype/mylayout.amp.html",
+ "myttype/single.amp.html",
+ "myttype/mylayout.html",
+ "myttype/single.html",
+ "_default/mylayout.amp.html",
+ "_default/single.amp.html",
+ "_default/mylayout.html",
+ "_default/single.html",
+ },
+ },
+ {
+ "Page baseof with layout and type",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout", Type: "myttype", Baseof: true},
+ "", ampType,
+ []string{
+ "myttype/mylayout-baseof.amp.html",
+ "myttype/single-baseof.amp.html",
+ "myttype/baseof.amp.html",
+ "myttype/mylayout-baseof.html",
+ "myttype/single-baseof.html",
+ "myttype/baseof.html",
+ "_default/mylayout-baseof.amp.html",
+ "_default/single-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/mylayout-baseof.html",
+ "_default/single-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Page baseof with layout and type in French",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout", Type: "myttype", Lang: "fr", Baseof: true},
+ "", ampType,
+ []string{
+ "myttype/mylayout-baseof.fr.amp.html",
+ "myttype/single-baseof.fr.amp.html",
+ "myttype/baseof.fr.amp.html",
+ "myttype/mylayout-baseof.amp.html",
+ "myttype/single-baseof.amp.html",
+ "myttype/baseof.amp.html",
+ "myttype/mylayout-baseof.fr.html",
+ "myttype/single-baseof.fr.html",
+ "myttype/baseof.fr.html",
+ "myttype/mylayout-baseof.html",
+ "myttype/single-baseof.html",
+ "myttype/baseof.html",
+ "_default/mylayout-baseof.fr.amp.html",
+ "_default/single-baseof.fr.amp.html",
+ "_default/baseof.fr.amp.html",
+ "_default/mylayout-baseof.amp.html",
+ "_default/single-baseof.amp.html",
+ "_default/baseof.amp.html",
+ "_default/mylayout-baseof.fr.html",
+ "_default/single-baseof.fr.html",
+ "_default/baseof.fr.html",
+ "_default/mylayout-baseof.html",
+ "_default/single-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Page with layout and type with subtype",
+ LayoutDescriptor{Kind: "page", Layout: "mylayout", Type: "myttype/mysubtype"},
+ "", ampType,
+ []string{
+ "myttype/mysubtype/mylayout.amp.html",
+ "myttype/mysubtype/single.amp.html",
+ "myttype/mysubtype/mylayout.html",
+ "myttype/mysubtype/single.html",
+ "_default/mylayout.amp.html",
+ "_default/single.amp.html",
+ "_default/mylayout.html",
+ "_default/single.html",
+ },
+ },
+ // RSS
+ {
+ "RSS Home",
+ LayoutDescriptor{Kind: "home"},
+ "", RSSFormat,
+ []string{
+ "index.rss.xml",
+ "home.rss.xml",
+ "rss.xml",
+ "list.rss.xml",
+ "index.xml",
+ "home.xml",
+ "list.xml",
+ "_default/index.rss.xml",
+ "_default/home.rss.xml",
+ "_default/rss.xml",
+ "_default/list.rss.xml",
+ "_default/index.xml",
+ "_default/home.xml",
+ "_default/list.xml",
+ "_internal/_default/rss.xml",
+ },
+ },
+ {
+ "RSS Home, baseof",
+ LayoutDescriptor{Kind: "home", Baseof: true},
+ "", RSSFormat,
+ []string{
+ "index-baseof.rss.xml",
+ "home-baseof.rss.xml",
+ "list-baseof.rss.xml",
+ "baseof.rss.xml",
+ "index-baseof.xml",
+ "home-baseof.xml",
+ "list-baseof.xml",
+ "baseof.xml",
+ "_default/index-baseof.rss.xml",
+ "_default/home-baseof.rss.xml",
+ "_default/list-baseof.rss.xml",
+ "_default/baseof.rss.xml",
+ "_default/index-baseof.xml",
+ "_default/home-baseof.xml",
+ "_default/list-baseof.xml",
+ "_default/baseof.xml",
+ },
+ },
+ {
+ "RSS Section",
+ LayoutDescriptor{Kind: "section", Section: "sect1"},
+ "", RSSFormat,
+ []string{
+ "sect1/sect1.rss.xml",
+ "sect1/section.rss.xml",
+ "sect1/rss.xml",
+ "sect1/list.rss.xml",
+ "sect1/sect1.xml",
+ "sect1/section.xml",
+ "sect1/list.xml",
+ "section/sect1.rss.xml",
+ "section/section.rss.xml",
+ "section/rss.xml",
+ "section/list.rss.xml",
+ "section/sect1.xml",
+ "section/section.xml",
+ "section/list.xml",
+ "_default/sect1.rss.xml",
+ "_default/section.rss.xml",
+ "_default/rss.xml",
+ "_default/list.rss.xml",
+ "_default/sect1.xml",
+ "_default/section.xml",
+ "_default/list.xml",
+ "_internal/_default/rss.xml",
+ },
+ },
+ {
+ "RSS Term",
+ LayoutDescriptor{Kind: "term", Section: "tag"},
+ "", RSSFormat,
+ []string{
+ "term/term.rss.xml",
+ "term/tag.rss.xml",
+ "term/taxonomy.rss.xml",
+ "term/rss.xml",
+ "term/list.rss.xml",
+ "term/term.xml",
+ "term/tag.xml",
+ "term/taxonomy.xml",
+ "term/list.xml",
+ "taxonomy/term.rss.xml",
+ "taxonomy/tag.rss.xml",
+ "taxonomy/taxonomy.rss.xml",
+ "taxonomy/rss.xml",
+ "taxonomy/list.rss.xml",
+ "taxonomy/term.xml",
+ "taxonomy/tag.xml",
+ "taxonomy/taxonomy.xml",
+ "taxonomy/list.xml",
+ "tag/term.rss.xml",
+ "tag/tag.rss.xml",
+ "tag/taxonomy.rss.xml",
+ "tag/rss.xml",
+ "tag/list.rss.xml",
+ "tag/term.xml",
+ "tag/tag.xml",
+ "tag/taxonomy.xml",
+ "tag/list.xml",
+ "_default/term.rss.xml",
+ "_default/tag.rss.xml",
+ "_default/taxonomy.rss.xml",
+ "_default/rss.xml",
+ "_default/list.rss.xml",
+ "_default/term.xml",
+ "_default/tag.xml",
+ "_default/taxonomy.xml",
+ "_default/list.xml",
+ "_internal/_default/rss.xml",
+ },
+ },
+ {
+ "RSS Taxonomy",
+ LayoutDescriptor{Kind: "taxonomy", Section: "tag"},
+ "", RSSFormat,
+ []string{
+ "tag/tag.terms.rss.xml",
+ "tag/terms.rss.xml",
+ "tag/taxonomy.rss.xml",
+ "tag/rss.xml",
+ "tag/list.rss.xml",
+ "tag/tag.terms.xml",
+ "tag/terms.xml",
+ "tag/taxonomy.xml",
+ "tag/list.xml",
+ "taxonomy/tag.terms.rss.xml",
+ "taxonomy/terms.rss.xml",
+ "taxonomy/taxonomy.rss.xml",
+ "taxonomy/rss.xml",
+ "taxonomy/list.rss.xml",
+ "taxonomy/tag.terms.xml",
+ "taxonomy/terms.xml",
+ "taxonomy/taxonomy.xml",
+ "taxonomy/list.xml",
+ "_default/tag.terms.rss.xml",
+ "_default/terms.rss.xml",
+ "_default/taxonomy.rss.xml",
+ "_default/rss.xml",
+ "_default/list.rss.xml",
+ "_default/tag.terms.xml",
+ "_default/terms.xml",
+ "_default/taxonomy.xml",
+ "_default/list.xml",
+ "_internal/_default/rss.xml",
+ },
+ },
+ {
+ "Home plain text",
+ LayoutDescriptor{Kind: "home"},
+ "", JSONFormat,
+ []string{
+ "index.json.json",
+ "home.json.json",
+ "list.json.json",
+ "index.json",
+ "home.json",
+ "list.json",
+ "_default/index.json.json",
+ "_default/home.json.json",
+ "_default/list.json.json",
+ "_default/index.json",
+ "_default/home.json",
+ "_default/list.json",
+ },
+ },
+ {
+ "Page plain text",
+ LayoutDescriptor{Kind: "page"},
+ "", JSONFormat,
+ []string{
+ "_default/single.json.json",
+ "_default/single.json",
+ },
+ },
+ {
+ "Reserved section, shortcodes",
+ LayoutDescriptor{Kind: "section", Section: "shortcodes", Type: "shortcodes"},
+ "", ampType,
+ []string{
+ "section/shortcodes.amp.html",
+ "section/section.amp.html",
+ "section/list.amp.html",
+ "section/shortcodes.html",
+ "section/section.html",
+ "section/list.html",
+ "_default/shortcodes.amp.html",
+ "_default/section.amp.html",
+ "_default/list.amp.html",
+ "_default/shortcodes.html",
+ "_default/section.html",
+ "_default/list.html",
+ },
+ },
+ {
+ "Reserved section, partials",
+ LayoutDescriptor{Kind: "section", Section: "partials", Type: "partials"},
+ "", ampType,
+ []string{
+ "section/partials.amp.html",
+ "section/section.amp.html",
+ "section/list.amp.html",
+ "section/partials.html",
+ "section/section.html",
+ "section/list.html",
+ "_default/partials.amp.html",
+ "_default/section.amp.html",
+ "_default/list.amp.html",
+ "_default/partials.html",
+ "_default/section.html",
+ "_default/list.html",
+ },
+ },
+ // This is currently always HTML only
+ {
+ "404, HTML",
+ LayoutDescriptor{Kind: "404"},
+ "", htmlFormat,
+ []string{
+ "404.html.html",
+ "404.html",
+ },
+ },
+ {
+ "404, HTML baseof",
+ LayoutDescriptor{Kind: "404", Baseof: true},
+ "", htmlFormat,
+ []string{
+ "404-baseof.html.html",
+ "baseof.html.html",
+ "404-baseof.html",
+ "baseof.html",
+ "_default/404-baseof.html.html",
+ "_default/baseof.html.html",
+ "_default/404-baseof.html",
+ "_default/baseof.html",
+ },
+ },
+ {
+ "Content hook",
+ LayoutDescriptor{Kind: "render-link", RenderingHook: true, Layout: "mylayout", Section: "blog"},
+ "", ampType,
+ []string{
+ "blog/_markup/render-link.amp.html",
+ "blog/_markup/render-link.html",
+ "_default/_markup/render-link.amp.html",
+ "_default/_markup/render-link.html",
+ },
+ },
+ } {
+ c.Run(this.name, func(c *qt.C) {
+ l := NewLayoutHandler()
+
+ layouts, err := l.For(this.layoutDescriptor, this.format)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(layouts, qt.Not(qt.IsNil), qt.Commentf(this.layoutDescriptor.Kind))
+
+ if !reflect.DeepEqual(layouts, this.expect) {
+ r := strings.NewReplacer(
+ "[", "\t\"",
+ "]", "\",",
+ " ", "\",\n\t\"",
+ )
+ fmtGot := r.Replace(fmt.Sprintf("%v", layouts))
+ fmtExp := r.Replace(fmt.Sprintf("%v", this.expect))
+
+ c.Fatalf("got %d items, expected %d:\nGot:\n\t%v\nExpected:\n\t%v\nDiff:\n%s", len(layouts), len(this.expect), layouts, this.expect, diff.Diff(fmtExp, fmtGot))
+
+ }
+ })
+ }
+}
+
+func BenchmarkLayout(b *testing.B) {
+ descriptor := LayoutDescriptor{Kind: "taxonomy", Section: "categories"}
+ l := NewLayoutHandler()
+
+ for i := 0; i < b.N; i++ {
+ _, err := l.For(descriptor, HTMLFormat)
+ if err != nil {
+ panic(err)
+ }
+ }
+}
+
+func BenchmarkLayoutUncached(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ descriptor := LayoutDescriptor{Kind: "taxonomy", Section: "categories"}
+ l := NewLayoutHandler()
+
+ _, err := l.For(descriptor, HTMLFormat)
+ if err != nil {
+ panic(err)
+ }
+ }
+}
diff --git a/output/outputFormat.go b/output/outputFormat.go
new file mode 100644
index 000000000..722079df9
--- /dev/null
+++ b/output/outputFormat.go
@@ -0,0 +1,412 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package output
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+
+ "github.com/mitchellh/mapstructure"
+
+ "github.com/gohugoio/hugo/media"
+)
+
+// Format represents an output representation, usually to a file on disk.
+type Format struct {
+ // The Name is used as an identifier. Internal output formats (i.e. HTML and RSS)
+ // can be overridden by providing a new definition for those types.
+ Name string `json:"name"`
+
+ MediaType media.Type `json:"-"`
+
+ // Must be set to a value when there are two or more conflicting mediatype for the same resource.
+ Path string `json:"path"`
+
+ // The base output file name used when not using "ugly URLs", defaults to "index".
+ BaseName string `json:"baseName"`
+
+ // The value to use for rel links
+ //
+ // See https://www.w3schools.com/tags/att_link_rel.asp
+ //
+ // AMP has a special requirement in this department, see:
+ // https://www.ampproject.org/docs/guides/deploy/discovery
+ // I.e.:
+ // <link rel="amphtml" href="https://www.example.com/url/to/amp/document.html">
+ Rel string `json:"rel"`
+
+ // The protocol to use, i.e. "webcal://". Defaults to the protocol of the baseURL.
+ Protocol string `json:"protocol"`
+
+ // IsPlainText decides whether to use text/template or html/template
+ // as template parser.
+ IsPlainText bool `json:"isPlainText"`
+
+ // IsHTML returns whether this format is int the HTML family. This includes
+ // HTML, AMP etc. This is used to decide when to create alias redirects etc.
+ IsHTML bool `json:"isHTML"`
+
+ // Enable to ignore the global uglyURLs setting.
+ NoUgly bool `json:"noUgly"`
+
+ // Enable if it doesn't make sense to include this format in an alternative
+ // format listing, CSS being one good example.
+ // Note that we use the term "alternative" and not "alternate" here, as it
+ // does not necessarily replace the other format, it is an alternative representation.
+ NotAlternative bool `json:"notAlternative"`
+
+ // Setting this will make this output format control the value of
+ // .Permalink and .RelPermalink for a rendered Page.
+ // If not set, these values will point to the main (first) output format
+ // configured. That is probably the behaviour you want in most situations,
+ // as you probably don't want to link back to the RSS version of a page, as an
+ // example. AMP would, however, be a good example of an output format where this
+ // behaviour is wanted.
+ Permalinkable bool `json:"permalinkable"`
+
+ // Setting this to a non-zero value will be used as the first sort criteria.
+ Weight int `json:"weight"`
+}
+
+// An ordered list of built-in output formats.
+var (
+ AMPFormat = Format{
+ Name: "AMP",
+ MediaType: media.HTMLType,
+ BaseName: "index",
+ Path: "amp",
+ Rel: "amphtml",
+ IsHTML: true,
+ Permalinkable: true,
+ // See https://www.ampproject.org/learn/overview/
+ }
+
+ CalendarFormat = Format{
+ Name: "Calendar",
+ MediaType: media.CalendarType,
+ IsPlainText: true,
+ Protocol: "webcal://",
+ BaseName: "index",
+ Rel: "alternate",
+ }
+
+ CSSFormat = Format{
+ Name: "CSS",
+ MediaType: media.CSSType,
+ BaseName: "styles",
+ IsPlainText: true,
+ Rel: "stylesheet",
+ NotAlternative: true,
+ }
+ CSVFormat = Format{
+ Name: "CSV",
+ MediaType: media.CSVType,
+ BaseName: "index",
+ IsPlainText: true,
+ Rel: "alternate",
+ }
+
+ HTMLFormat = Format{
+ Name: "HTML",
+ MediaType: media.HTMLType,
+ BaseName: "index",
+ Rel: "canonical",
+ IsHTML: true,
+ Permalinkable: true,
+
+ // Weight will be used as first sort criteria. HTML will, by default,
+ // be rendered first, but set it to 10 so it's easy to put one above it.
+ Weight: 10,
+ }
+
+ MarkdownFormat = Format{
+ Name: "MARKDOWN",
+ MediaType: media.MarkdownType,
+ BaseName: "index",
+ Rel: "alternate",
+ IsPlainText: true,
+ }
+
+ JSONFormat = Format{
+ Name: "JSON",
+ MediaType: media.JSONType,
+ BaseName: "index",
+ IsPlainText: true,
+ Rel: "alternate",
+ }
+
+ WebAppManifestFormat = Format{
+ Name: "WebAppManifest",
+ MediaType: media.WebAppManifestType,
+ BaseName: "manifest",
+ IsPlainText: true,
+ NotAlternative: true,
+ Rel: "manifest",
+ }
+
+ RobotsTxtFormat = Format{
+ Name: "ROBOTS",
+ MediaType: media.TextType,
+ BaseName: "robots",
+ IsPlainText: true,
+ Rel: "alternate",
+ }
+
+ RSSFormat = Format{
+ Name: "RSS",
+ MediaType: media.RSSType,
+ BaseName: "index",
+ NoUgly: true,
+ Rel: "alternate",
+ }
+
+ SitemapFormat = Format{
+ Name: "Sitemap",
+ MediaType: media.XMLType,
+ BaseName: "sitemap",
+ NoUgly: true,
+ Rel: "sitemap",
+ }
+)
+
+// DefaultFormats contains the default output formats supported by Hugo.
+var DefaultFormats = Formats{
+ AMPFormat,
+ CalendarFormat,
+ CSSFormat,
+ CSVFormat,
+ HTMLFormat,
+ JSONFormat,
+ MarkdownFormat,
+ WebAppManifestFormat,
+ RobotsTxtFormat,
+ RSSFormat,
+ SitemapFormat,
+}
+
+func init() {
+ sort.Sort(DefaultFormats)
+}
+
+// Formats is a slice of Format.
+type Formats []Format
+
+func (formats Formats) Len() int { return len(formats) }
+func (formats Formats) Swap(i, j int) { formats[i], formats[j] = formats[j], formats[i] }
+func (formats Formats) Less(i, j int) bool {
+ fi, fj := formats[i], formats[j]
+ if fi.Weight == fj.Weight {
+ return fi.Name < fj.Name
+ }
+
+ if fj.Weight == 0 {
+ return true
+ }
+
+ return fi.Weight > 0 && fi.Weight < fj.Weight
+}
+
+// GetBySuffix gets a output format given as suffix, e.g. "html".
+// It will return false if no format could be found, or if the suffix given
+// is ambiguous.
+// The lookup is case insensitive.
+func (formats Formats) GetBySuffix(suffix string) (f Format, found bool) {
+ for _, ff := range formats {
+ for _, suffix2 := range ff.MediaType.Suffixes() {
+ if strings.EqualFold(suffix, suffix2) {
+ if found {
+ // ambiguous
+ found = false
+ return
+ }
+ f = ff
+ found = true
+ }
+ }
+ }
+ return
+}
+
+// GetByName gets a format by its identifier name.
+func (formats Formats) GetByName(name string) (f Format, found bool) {
+ for _, ff := range formats {
+ if strings.EqualFold(name, ff.Name) {
+ f = ff
+ found = true
+ return
+ }
+ }
+ return
+}
+
+// GetByNames gets a list of formats given a list of identifiers.
+func (formats Formats) GetByNames(names ...string) (Formats, error) {
+ var types []Format
+
+ for _, name := range names {
+ tpe, ok := formats.GetByName(name)
+ if !ok {
+ return types, fmt.Errorf("OutputFormat with key %q not found", name)
+ }
+ types = append(types, tpe)
+ }
+ return types, nil
+}
+
+// FromFilename gets a Format given a filename.
+func (formats Formats) FromFilename(filename string) (f Format, found bool) {
+ // mytemplate.amp.html
+ // mytemplate.html
+ // mytemplate
+ var ext, outFormat string
+
+ parts := strings.Split(filename, ".")
+ if len(parts) > 2 {
+ outFormat = parts[1]
+ ext = parts[2]
+ } else if len(parts) > 1 {
+ ext = parts[1]
+ }
+
+ if outFormat != "" {
+ return formats.GetByName(outFormat)
+ }
+
+ if ext != "" {
+ f, found = formats.GetBySuffix(ext)
+ if !found && len(parts) == 2 {
+ // For extensionless output formats (e.g. Netlify's _redirects)
+ // we must fall back to using the extension as format lookup.
+ f, found = formats.GetByName(ext)
+ }
+ }
+ return
+}
+
+// DecodeFormats takes a list of output format configurations and merges those,
+// in the order given, with the Hugo defaults as the last resort.
+func DecodeFormats(mediaTypes media.Types, maps ...map[string]any) (Formats, error) {
+ f := make(Formats, len(DefaultFormats))
+ copy(f, DefaultFormats)
+
+ for _, m := range maps {
+ for k, v := range m {
+ found := false
+ for i, vv := range f {
+ if strings.EqualFold(k, vv.Name) {
+ // Merge it with the existing
+ if err := decode(mediaTypes, v, &f[i]); err != nil {
+ return f, err
+ }
+ found = true
+ }
+ }
+ if !found {
+ var newOutFormat Format
+ newOutFormat.Name = k
+ if err := decode(mediaTypes, v, &newOutFormat); err != nil {
+ return f, err
+ }
+
+ // We need values for these
+ if newOutFormat.BaseName == "" {
+ newOutFormat.BaseName = "index"
+ }
+ if newOutFormat.Rel == "" {
+ newOutFormat.Rel = "alternate"
+ }
+
+ f = append(f, newOutFormat)
+
+ }
+ }
+ }
+
+ sort.Sort(f)
+
+ return f, nil
+}
+
+func decode(mediaTypes media.Types, input any, output *Format) error {
+ config := &mapstructure.DecoderConfig{
+ Metadata: nil,
+ Result: output,
+ WeaklyTypedInput: true,
+ DecodeHook: func(a reflect.Type, b reflect.Type, c any) (any, error) {
+ if a.Kind() == reflect.Map {
+ dataVal := reflect.Indirect(reflect.ValueOf(c))
+ for _, key := range dataVal.MapKeys() {
+ keyStr, ok := key.Interface().(string)
+ if !ok {
+ // Not a string key
+ continue
+ }
+ if strings.EqualFold(keyStr, "mediaType") {
+ // If mediaType is a string, look it up and replace it
+ // in the map.
+ vv := dataVal.MapIndex(key)
+ vvi := vv.Interface()
+
+ switch vviv := vvi.(type) {
+ case media.Type:
+ // OK
+ case string:
+ mediaType, found := mediaTypes.GetByType(vviv)
+ if !found {
+ return c, fmt.Errorf("media type %q not found", vviv)
+ }
+ dataVal.SetMapIndex(key, reflect.ValueOf(mediaType))
+ default:
+ return nil, fmt.Errorf("invalid output format configuration; wrong type for media type, expected string (e.g. text/html), got %T", vvi)
+ }
+ }
+ }
+ }
+ return c, nil
+ },
+ }
+
+ decoder, err := mapstructure.NewDecoder(config)
+ if err != nil {
+ return err
+ }
+
+ if err = decoder.Decode(input); err != nil {
+ return fmt.Errorf("failed to decode output format configuration: %w", err)
+ }
+
+ return nil
+
+}
+
+// BaseFilename returns the base filename of f including an extension (ie.
+// "index.xml").
+func (f Format) BaseFilename() string {
+ return f.BaseName + f.MediaType.FirstSuffix.FullSuffix
+}
+
+// MarshalJSON returns the JSON encoding of f.
+func (f Format) MarshalJSON() ([]byte, error) {
+ type Alias Format
+ return json.Marshal(&struct {
+ MediaType string `json:"mediaType"`
+ Alias
+ }{
+ MediaType: f.MediaType.String(),
+ Alias: (Alias)(f),
+ })
+}
diff --git a/output/outputFormat_test.go b/output/outputFormat_test.go
new file mode 100644
index 000000000..c5c4534bf
--- /dev/null
+++ b/output/outputFormat_test.go
@@ -0,0 +1,267 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package output
+
+import (
+ "sort"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/media"
+)
+
+func TestDefaultTypes(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(CalendarFormat.Name, qt.Equals, "Calendar")
+ c.Assert(CalendarFormat.MediaType, qt.Equals, media.CalendarType)
+ c.Assert(CalendarFormat.Protocol, qt.Equals, "webcal://")
+ c.Assert(CalendarFormat.Path, qt.HasLen, 0)
+ c.Assert(CalendarFormat.IsPlainText, qt.Equals, true)
+ c.Assert(CalendarFormat.IsHTML, qt.Equals, false)
+
+ c.Assert(CSSFormat.Name, qt.Equals, "CSS")
+ c.Assert(CSSFormat.MediaType, qt.Equals, media.CSSType)
+ c.Assert(CSSFormat.Path, qt.HasLen, 0)
+ c.Assert(CSSFormat.Protocol, qt.HasLen, 0) // Will inherit the BaseURL protocol.
+ c.Assert(CSSFormat.IsPlainText, qt.Equals, true)
+ c.Assert(CSSFormat.IsHTML, qt.Equals, false)
+
+ c.Assert(CSVFormat.Name, qt.Equals, "CSV")
+ c.Assert(CSVFormat.MediaType, qt.Equals, media.CSVType)
+ c.Assert(CSVFormat.Path, qt.HasLen, 0)
+ c.Assert(CSVFormat.Protocol, qt.HasLen, 0)
+ c.Assert(CSVFormat.IsPlainText, qt.Equals, true)
+ c.Assert(CSVFormat.IsHTML, qt.Equals, false)
+ c.Assert(CSVFormat.Permalinkable, qt.Equals, false)
+
+ c.Assert(HTMLFormat.Name, qt.Equals, "HTML")
+ c.Assert(HTMLFormat.MediaType, qt.Equals, media.HTMLType)
+ c.Assert(HTMLFormat.Path, qt.HasLen, 0)
+ c.Assert(HTMLFormat.Protocol, qt.HasLen, 0)
+ c.Assert(HTMLFormat.IsPlainText, qt.Equals, false)
+ c.Assert(HTMLFormat.IsHTML, qt.Equals, true)
+ c.Assert(AMPFormat.Permalinkable, qt.Equals, true)
+
+ c.Assert(AMPFormat.Name, qt.Equals, "AMP")
+ c.Assert(AMPFormat.MediaType, qt.Equals, media.HTMLType)
+ c.Assert(AMPFormat.Path, qt.Equals, "amp")
+ c.Assert(AMPFormat.Protocol, qt.HasLen, 0)
+ c.Assert(AMPFormat.IsPlainText, qt.Equals, false)
+ c.Assert(AMPFormat.IsHTML, qt.Equals, true)
+ c.Assert(AMPFormat.Permalinkable, qt.Equals, true)
+
+ c.Assert(RSSFormat.Name, qt.Equals, "RSS")
+ c.Assert(RSSFormat.MediaType, qt.Equals, media.RSSType)
+ c.Assert(RSSFormat.Path, qt.HasLen, 0)
+ c.Assert(RSSFormat.IsPlainText, qt.Equals, false)
+ c.Assert(RSSFormat.NoUgly, qt.Equals, true)
+ c.Assert(CalendarFormat.IsHTML, qt.Equals, false)
+
+ c.Assert(len(DefaultFormats), qt.Equals, 11)
+
+}
+
+func TestGetFormatByName(t *testing.T) {
+ c := qt.New(t)
+ formats := Formats{AMPFormat, CalendarFormat}
+ tp, _ := formats.GetByName("AMp")
+ c.Assert(tp, qt.Equals, AMPFormat)
+ _, found := formats.GetByName("HTML")
+ c.Assert(found, qt.Equals, false)
+ _, found = formats.GetByName("FOO")
+ c.Assert(found, qt.Equals, false)
+}
+
+func TestGetFormatByExt(t *testing.T) {
+ c := qt.New(t)
+ formats1 := Formats{AMPFormat, CalendarFormat}
+ formats2 := Formats{AMPFormat, HTMLFormat, CalendarFormat}
+ tp, _ := formats1.GetBySuffix("html")
+ c.Assert(tp, qt.Equals, AMPFormat)
+ tp, _ = formats1.GetBySuffix("ics")
+ c.Assert(tp, qt.Equals, CalendarFormat)
+ _, found := formats1.GetBySuffix("not")
+ c.Assert(found, qt.Equals, false)
+
+ // ambiguous
+ _, found = formats2.GetBySuffix("html")
+ c.Assert(found, qt.Equals, false)
+}
+
+func TestGetFormatByFilename(t *testing.T) {
+ c := qt.New(t)
+ noExtNoDelimMediaType := media.TextType
+ noExtNoDelimMediaType.Delimiter = ""
+
+ noExtMediaType := media.TextType
+
+ var (
+ noExtDelimFormat = Format{
+ Name: "NEM",
+ MediaType: noExtNoDelimMediaType,
+ BaseName: "_redirects",
+ }
+ noExt = Format{
+ Name: "NEX",
+ MediaType: noExtMediaType,
+ BaseName: "next",
+ }
+ )
+
+ formats := Formats{AMPFormat, HTMLFormat, noExtDelimFormat, noExt, CalendarFormat}
+ f, found := formats.FromFilename("my.amp.html")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, AMPFormat)
+ _, found = formats.FromFilename("my.ics")
+ c.Assert(found, qt.Equals, true)
+ f, found = formats.FromFilename("my.html")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, HTMLFormat)
+ f, found = formats.FromFilename("my.nem")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, noExtDelimFormat)
+ f, found = formats.FromFilename("my.nex")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(f, qt.Equals, noExt)
+ _, found = formats.FromFilename("my.css")
+ c.Assert(found, qt.Equals, false)
+}
+
+func TestDecodeFormats(t *testing.T) {
+ c := qt.New(t)
+
+ mediaTypes := media.Types{media.JSONType, media.XMLType}
+
+ tests := []struct {
+ name string
+ maps []map[string]any
+ shouldError bool
+ assert func(t *testing.T, name string, f Formats)
+ }{
+ {
+ "Redefine JSON",
+ []map[string]any{
+ {
+ "JsON": map[string]any{
+ "baseName": "myindex",
+ "isPlainText": "false",
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, f Formats) {
+ msg := qt.Commentf(name)
+ c.Assert(len(f), qt.Equals, len(DefaultFormats), msg)
+ json, _ := f.GetByName("JSON")
+ c.Assert(json.BaseName, qt.Equals, "myindex")
+ c.Assert(json.MediaType, qt.Equals, media.JSONType)
+ c.Assert(json.IsPlainText, qt.Equals, false)
+ },
+ },
+ {
+ "Add XML format with string as mediatype",
+ []map[string]any{
+ {
+ "MYXMLFORMAT": map[string]any{
+ "baseName": "myxml",
+ "mediaType": "application/xml",
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, f Formats) {
+ c.Assert(len(f), qt.Equals, len(DefaultFormats)+1)
+ xml, found := f.GetByName("MYXMLFORMAT")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(xml.BaseName, qt.Equals, "myxml")
+ c.Assert(xml.MediaType, qt.Equals, media.XMLType)
+
+ // Verify that we haven't changed the DefaultFormats slice.
+ json, _ := f.GetByName("JSON")
+ c.Assert(json.BaseName, qt.Equals, "index")
+ },
+ },
+ {
+ "Add format unknown mediatype",
+ []map[string]any{
+ {
+ "MYINVALID": map[string]any{
+ "baseName": "mymy",
+ "mediaType": "application/hugo",
+ },
+ },
+ },
+ true,
+ func(t *testing.T, name string, f Formats) {
+ },
+ },
+ {
+ "Add and redefine XML format",
+ []map[string]any{
+ {
+ "MYOTHERXMLFORMAT": map[string]any{
+ "baseName": "myotherxml",
+ "mediaType": media.XMLType,
+ },
+ },
+ {
+ "MYOTHERXMLFORMAT": map[string]any{
+ "baseName": "myredefined",
+ },
+ },
+ },
+ false,
+ func(t *testing.T, name string, f Formats) {
+ c.Assert(len(f), qt.Equals, len(DefaultFormats)+1)
+ xml, found := f.GetByName("MYOTHERXMLFORMAT")
+ c.Assert(found, qt.Equals, true)
+ c.Assert(xml.BaseName, qt.Equals, "myredefined")
+ c.Assert(xml.MediaType, qt.Equals, media.XMLType)
+ },
+ },
+ }
+
+ for _, test := range tests {
+ result, err := DecodeFormats(mediaTypes, test.maps...)
+ msg := qt.Commentf(test.name)
+
+ if test.shouldError {
+ c.Assert(err, qt.Not(qt.IsNil), msg)
+ } else {
+ c.Assert(err, qt.IsNil, msg)
+ test.assert(t, test.name, result)
+ }
+ }
+}
+
+func TestSort(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(DefaultFormats[0].Name, qt.Equals, "HTML")
+ c.Assert(DefaultFormats[1].Name, qt.Equals, "AMP")
+
+ json := JSONFormat
+ json.Weight = 1
+
+ formats := Formats{
+ AMPFormat,
+ HTMLFormat,
+ json,
+ }
+
+ sort.Sort(formats)
+
+ c.Assert(formats[0].Name, qt.Equals, "JSON")
+ c.Assert(formats[1].Name, qt.Equals, "HTML")
+ c.Assert(formats[2].Name, qt.Equals, "AMP")
+}
diff --git a/parser/frontmatter.go b/parser/frontmatter.go
new file mode 100644
index 000000000..150ce038f
--- /dev/null
+++ b/parser/frontmatter.go
@@ -0,0 +1,118 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "encoding/json"
+ "errors"
+ "io"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ toml "github.com/pelletier/go-toml/v2"
+
+ yaml "gopkg.in/yaml.v2"
+
+ xml "github.com/clbanning/mxj/v2"
+)
+
+const (
+ yamlDelimLf = "---\n"
+ tomlDelimLf = "+++\n"
+)
+
+func InterfaceToConfig(in any, format metadecoders.Format, w io.Writer) error {
+ if in == nil {
+ return errors.New("input was nil")
+ }
+
+ switch format {
+ case metadecoders.YAML:
+ b, err := yaml.Marshal(in)
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write(b)
+ return err
+
+ case metadecoders.TOML:
+ enc := toml.NewEncoder(w)
+ enc.SetIndentTables(true)
+ return enc.Encode(in)
+ case metadecoders.JSON:
+ b, err := json.MarshalIndent(in, "", " ")
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write(b)
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write([]byte{'\n'})
+ return err
+ case metadecoders.XML:
+ b, err := xml.AnyXmlIndent(in, "", "\t", "root")
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write(b)
+ return err
+ default:
+ return errors.New("unsupported Format provided")
+ }
+}
+
+func InterfaceToFrontMatter(in any, format metadecoders.Format, w io.Writer) error {
+ if in == nil {
+ return errors.New("input was nil")
+ }
+
+ switch format {
+ case metadecoders.YAML:
+ _, err := w.Write([]byte(yamlDelimLf))
+ if err != nil {
+ return err
+ }
+
+ err = InterfaceToConfig(in, format, w)
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write([]byte(yamlDelimLf))
+ return err
+
+ case metadecoders.TOML:
+ _, err := w.Write([]byte(tomlDelimLf))
+ if err != nil {
+ return err
+ }
+
+ err = InterfaceToConfig(in, format, w)
+
+ if err != nil {
+ return err
+ }
+
+ _, err = w.Write([]byte("\n" + tomlDelimLf))
+ return err
+
+ default:
+ return InterfaceToConfig(in, format, w)
+ }
+}
diff --git a/parser/frontmatter_test.go b/parser/frontmatter_test.go
new file mode 100644
index 000000000..7b3ffc100
--- /dev/null
+++ b/parser/frontmatter_test.go
@@ -0,0 +1,78 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "bytes"
+ "reflect"
+ "testing"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+)
+
+func TestInterfaceToConfig(t *testing.T) {
+ cases := []struct {
+ input any
+ format metadecoders.Format
+ want []byte
+ isErr bool
+ }{
+ // TOML
+ {map[string]any{}, metadecoders.TOML, nil, false},
+ {
+ map[string]any{"title": "test' 1"},
+ metadecoders.TOML,
+ []byte("title = \"test' 1\"\n"),
+ false,
+ },
+
+ // YAML
+ {map[string]any{}, metadecoders.YAML, []byte("{}\n"), false},
+ {
+ map[string]any{"title": "test 1"},
+ metadecoders.YAML,
+ []byte("title: test 1\n"),
+ false,
+ },
+
+ // JSON
+ {map[string]any{}, metadecoders.JSON, []byte("{}\n"), false},
+ {
+ map[string]any{"title": "test 1"},
+ metadecoders.JSON,
+ []byte("{\n \"title\": \"test 1\"\n}\n"),
+ false,
+ },
+
+ // Errors
+ {nil, metadecoders.TOML, nil, true},
+ {map[string]any{}, "foo", nil, true},
+ }
+
+ for i, c := range cases {
+ var buf bytes.Buffer
+
+ err := InterfaceToConfig(c.input, c.format, &buf)
+ if err != nil {
+ if c.isErr {
+ continue
+ }
+ t.Fatalf("[%d] unexpected error value: %v", i, err)
+ }
+
+ if !reflect.DeepEqual(buf.Bytes(), c.want) {
+ t.Errorf("[%d] not equal:\nwant %q,\n got %q", i, c.want, buf.Bytes())
+ }
+ }
+}
diff --git a/parser/lowercase_camel_json.go b/parser/lowercase_camel_json.go
new file mode 100644
index 000000000..e6605c803
--- /dev/null
+++ b/parser/lowercase_camel_json.go
@@ -0,0 +1,59 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package parser
+
+import (
+ "bytes"
+ "encoding/json"
+ "regexp"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Regexp definitions
+var (
+ keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`)
+ wordBarrierRegex = regexp.MustCompile(`(\w)([A-Z])`)
+)
+
+// Code adapted from https://gist.github.com/piersy/b9934790a8892db1a603820c0c23e4a7
+type LowerCaseCamelJSONMarshaller struct {
+ Value any
+}
+
+func (c LowerCaseCamelJSONMarshaller) MarshalJSON() ([]byte, error) {
+ marshalled, err := json.Marshal(c.Value)
+
+ converted := keyMatchRegex.ReplaceAllFunc(
+ marshalled,
+ func(match []byte) []byte {
+ // Attributes on the form XML, JSON etc.
+ if bytes.Equal(match, bytes.ToUpper(match)) {
+ return bytes.ToLower(match)
+ }
+
+ // Empty keys are valid JSON, only lowercase if we do not have an
+ // empty key.
+ if len(match) > 2 {
+ // Decode first rune after the double quotes
+ r, width := utf8.DecodeRune(match[1:])
+ r = unicode.ToLower(r)
+ utf8.EncodeRune(match[1:width+1], r)
+ }
+ return match
+ },
+ )
+
+ return converted, err
+}
diff --git a/parser/metadecoders/decoder.go b/parser/metadecoders/decoder.go
new file mode 100644
index 000000000..7a76b8eea
--- /dev/null
+++ b/parser/metadecoders/decoder.go
@@ -0,0 +1,311 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadecoders
+
+import (
+ "bytes"
+ "encoding/csv"
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/niklasfasching/go-org/org"
+
+ xml "github.com/clbanning/mxj/v2"
+ toml "github.com/pelletier/go-toml/v2"
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+ jww "github.com/spf13/jwalterweatherman"
+ yaml "gopkg.in/yaml.v2"
+)
+
+// Decoder provides some configuration options for the decoders.
+type Decoder struct {
+ // Delimiter is the field delimiter used in the CSV decoder. It defaults to ','.
+ Delimiter rune
+
+ // Comment, if not 0, is the comment character ued in the CSV decoder. Lines beginning with the
+ // Comment character without preceding whitespace are ignored.
+ Comment rune
+}
+
+// OptionsKey is used in cache keys.
+func (d Decoder) OptionsKey() string {
+ var sb strings.Builder
+ sb.WriteRune(d.Delimiter)
+ sb.WriteRune(d.Comment)
+ return sb.String()
+}
+
+// Default is a Decoder in its default configuration.
+var Default = Decoder{
+ Delimiter: ',',
+}
+
+// UnmarshalToMap will unmarshall data in format f into a new map. This is
+// what's needed for Hugo's front matter decoding.
+func (d Decoder) UnmarshalToMap(data []byte, f Format) (map[string]any, error) {
+ m := make(map[string]any)
+ if data == nil {
+ return m, nil
+ }
+
+ err := d.UnmarshalTo(data, f, &m)
+
+ return m, err
+}
+
+// UnmarshalFileToMap is the same as UnmarshalToMap, but reads the data from
+// the given filename.
+func (d Decoder) UnmarshalFileToMap(fs afero.Fs, filename string) (map[string]any, error) {
+ format := FormatFromString(filename)
+ if format == "" {
+ return nil, fmt.Errorf("%q is not a valid configuration format", filename)
+ }
+
+ data, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ return nil, err
+ }
+ return d.UnmarshalToMap(data, format)
+}
+
+// UnmarshalStringTo tries to unmarshal data to a new instance of type typ.
+func (d Decoder) UnmarshalStringTo(data string, typ any) (any, error) {
+ data = strings.TrimSpace(data)
+ // We only check for the possible types in YAML, JSON and TOML.
+ switch typ.(type) {
+ case string:
+ return data, nil
+ case map[string]any:
+ format := d.FormatFromContentString(data)
+ return d.UnmarshalToMap([]byte(data), format)
+ case []any:
+ // A standalone slice. Let YAML handle it.
+ return d.Unmarshal([]byte(data), YAML)
+ case bool:
+ return cast.ToBoolE(data)
+ case int:
+ return cast.ToIntE(data)
+ case int64:
+ return cast.ToInt64E(data)
+ case float64:
+ return cast.ToFloat64E(data)
+ default:
+ return nil, fmt.Errorf("unmarshal: %T not supported", typ)
+ }
+}
+
+// Unmarshal will unmarshall data in format f into an interface{}.
+// This is what's needed for Hugo's /data handling.
+func (d Decoder) Unmarshal(data []byte, f Format) (any, error) {
+ if data == nil {
+ switch f {
+ case CSV:
+ return make([][]string, 0), nil
+ default:
+ return make(map[string]any), nil
+ }
+ }
+ var v any
+ err := d.UnmarshalTo(data, f, &v)
+
+ return v, err
+}
+
+// UnmarshalTo unmarshals data in format f into v.
+func (d Decoder) UnmarshalTo(data []byte, f Format, v any) error {
+ var err error
+
+ switch f {
+ case ORG:
+ err = d.unmarshalORG(data, v)
+ case JSON:
+ err = json.Unmarshal(data, v)
+ case XML:
+ var xmlRoot xml.Map
+ xmlRoot, err = xml.NewMapXml(data)
+
+ var xmlValue map[string]any
+ if err == nil {
+ xmlRootName, err := xmlRoot.Root()
+ if err != nil {
+ return toFileError(f, data, fmt.Errorf("failed to unmarshal XML: %w", err))
+ }
+ xmlValue = xmlRoot[xmlRootName].(map[string]any)
+ }
+
+ switch v := v.(type) {
+ case *map[string]any:
+ *v = xmlValue
+ case *any:
+ *v = xmlValue
+ }
+ case TOML:
+ err = toml.Unmarshal(data, v)
+ case YAML:
+ err = yaml.Unmarshal(data, v)
+ if err != nil {
+ return toFileError(f, data, fmt.Errorf("failed to unmarshal YAML: %w", err))
+ }
+
+ // To support boolean keys, the YAML package unmarshals maps to
+ // map[interface{}]interface{}. Here we recurse through the result
+ // and change all maps to map[string]interface{} like we would've
+ // gotten from `json`.
+ var ptr any
+ switch v.(type) {
+ case *map[string]any:
+ ptr = *v.(*map[string]any)
+ case *any:
+ ptr = *v.(*any)
+ default:
+ // Not a map.
+ }
+
+ if ptr != nil {
+ if mm, changed := stringifyMapKeys(ptr); changed {
+ switch v.(type) {
+ case *map[string]any:
+ *v.(*map[string]any) = mm.(map[string]any)
+ case *any:
+ *v.(*any) = mm
+ }
+ }
+ }
+ case CSV:
+ return d.unmarshalCSV(data, v)
+
+ default:
+ return fmt.Errorf("unmarshal of format %q is not supported", f)
+ }
+
+ if err == nil {
+ return nil
+ }
+
+ return toFileError(f, data, fmt.Errorf("unmarshal failed: %w", err))
+}
+
+func (d Decoder) unmarshalCSV(data []byte, v any) error {
+ r := csv.NewReader(bytes.NewReader(data))
+ r.Comma = d.Delimiter
+ r.Comment = d.Comment
+
+ records, err := r.ReadAll()
+ if err != nil {
+ return err
+ }
+
+ switch v.(type) {
+ case *any:
+ *v.(*any) = records
+ default:
+ return fmt.Errorf("CSV cannot be unmarshaled into %T", v)
+
+ }
+
+ return nil
+}
+
+func parseORGDate(s string) string {
+ r := regexp.MustCompile(`[<\[](\d{4}-\d{2}-\d{2}) .*[>\]]`)
+ if m := r.FindStringSubmatch(s); m != nil {
+ return m[1]
+ }
+ return s
+}
+
+func (d Decoder) unmarshalORG(data []byte, v any) error {
+ config := org.New()
+ config.Log = jww.WARN
+ document := config.Parse(bytes.NewReader(data), "")
+ if document.Error != nil {
+ return document.Error
+ }
+ frontMatter := make(map[string]any, len(document.BufferSettings))
+ for k, v := range document.BufferSettings {
+ k = strings.ToLower(k)
+ if strings.HasSuffix(k, "[]") {
+ frontMatter[k[:len(k)-2]] = strings.Fields(v)
+ } else if k == "tags" || k == "categories" || k == "aliases" {
+ jww.WARN.Printf("Please use '#+%s[]:' notation, automatic conversion is deprecated.", k)
+ frontMatter[k] = strings.Fields(v)
+ } else if k == "date" {
+ frontMatter[k] = parseORGDate(v)
+ } else {
+ frontMatter[k] = v
+ }
+ }
+ switch v.(type) {
+ case *map[string]any:
+ *v.(*map[string]any) = frontMatter
+ default:
+ *v.(*any) = frontMatter
+ }
+ return nil
+}
+
+func toFileError(f Format, data []byte, err error) error {
+ return herrors.NewFileErrorFromName(err, fmt.Sprintf("_stream.%s", f)).UpdateContent(bytes.NewReader(data), nil)
+}
+
+// stringifyMapKeys recurses into in and changes all instances of
+// map[interface{}]interface{} to map[string]interface{}. This is useful to
+// work around the impedance mismatch between JSON and YAML unmarshaling that's
+// described here: https://github.com/go-yaml/yaml/issues/139
+//
+// Inspired by https://github.com/stripe/stripe-mock, MIT licensed
+func stringifyMapKeys(in any) (any, bool) {
+ switch in := in.(type) {
+ case []any:
+ for i, v := range in {
+ if vv, replaced := stringifyMapKeys(v); replaced {
+ in[i] = vv
+ }
+ }
+ case map[string]any:
+ for k, v := range in {
+ if vv, changed := stringifyMapKeys(v); changed {
+ in[k] = vv
+ }
+ }
+ case map[any]any:
+ res := make(map[string]any)
+ var (
+ ok bool
+ err error
+ )
+ for k, v := range in {
+ var ks string
+
+ if ks, ok = k.(string); !ok {
+ ks, err = cast.ToStringE(k)
+ if err != nil {
+ ks = fmt.Sprintf("%v", k)
+ }
+ }
+ if vv, replaced := stringifyMapKeys(v); replaced {
+ res[ks] = vv
+ } else {
+ res[ks] = v
+ }
+ }
+ return res, true
+ }
+
+ return nil, false
+}
diff --git a/parser/metadecoders/decoder_test.go b/parser/metadecoders/decoder_test.go
new file mode 100644
index 000000000..7b762667c
--- /dev/null
+++ b/parser/metadecoders/decoder_test.go
@@ -0,0 +1,299 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadecoders
+
+import (
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestUnmarshalXML(t *testing.T) {
+ c := qt.New(t)
+
+ xmlDoc := `<?xml version="1.0" encoding="utf-8" standalone="yes"?>
+ <rss version="2.0"
+ xmlns:atom="http://www.w3.org/2005/Atom">
+ <channel>
+ <title>Example feed</title>
+ <link>https://example.com/</link>
+ <description>Example feed</description>
+ <generator>Hugo -- gohugo.io</generator>
+ <language>en-us</language>
+ <copyright>Example</copyright>
+ <lastBuildDate>Fri, 08 Jan 2021 14:44:10 +0000</lastBuildDate>
+ <atom:link href="https://example.com/feed.xml" rel="self" type="application/rss+xml"/>
+ <item>
+ <title>Example title</title>
+ <link>https://example.com/2021/11/30/example-title/</link>
+ <pubDate>Tue, 30 Nov 2021 15:00:00 +0000</pubDate>
+ <guid>https://example.com/2021/11/30/example-title/</guid>
+ <description>Example description</description>
+ </item>
+ </channel>
+ </rss>`
+
+ expect := map[string]any{
+ "-atom": "http://www.w3.org/2005/Atom", "-version": "2.0",
+ "channel": map[string]any{
+ "copyright": "Example",
+ "description": "Example feed",
+ "generator": "Hugo -- gohugo.io",
+ "item": map[string]any{
+ "description": "Example description",
+ "guid": "https://example.com/2021/11/30/example-title/",
+ "link": "https://example.com/2021/11/30/example-title/",
+ "pubDate": "Tue, 30 Nov 2021 15:00:00 +0000",
+ "title": "Example title"},
+ "language": "en-us",
+ "lastBuildDate": "Fri, 08 Jan 2021 14:44:10 +0000",
+ "link": []any{"https://example.com/", map[string]any{
+ "-href": "https://example.com/feed.xml",
+ "-rel": "self",
+ "-type": "application/rss+xml"}},
+ "title": "Example feed",
+ }}
+
+ d := Default
+
+ m, err := d.Unmarshal([]byte(xmlDoc), XML)
+ c.Assert(err, qt.IsNil)
+ c.Assert(m, qt.DeepEquals, expect)
+
+}
+func TestUnmarshalToMap(t *testing.T) {
+ c := qt.New(t)
+
+ expect := map[string]any{"a": "b"}
+
+ d := Default
+
+ for i, test := range []struct {
+ data string
+ format Format
+ expect any
+ }{
+ {`a = "b"`, TOML, expect},
+ {`a: "b"`, YAML, expect},
+ // Make sure we get all string keys, even for YAML
+ {"a: Easy!\nb:\n c: 2\n d: [3, 4]", YAML, map[string]any{"a": "Easy!", "b": map[string]any{"c": 2, "d": []any{3, 4}}}},
+ {"a:\n true: 1\n false: 2", YAML, map[string]any{"a": map[string]any{"true": 1, "false": 2}}},
+ {`{ "a": "b" }`, JSON, expect},
+ {`<root><a>b</a></root>`, XML, expect},
+ {`#+a: b`, ORG, expect},
+ // errors
+ {`a = b`, TOML, false},
+ {`a,b,c`, CSV, false}, // Use Unmarshal for CSV
+ } {
+ msg := qt.Commentf("%d: %s", i, test.format)
+ m, err := d.UnmarshalToMap([]byte(test.data), test.format)
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), msg)
+ } else {
+ c.Assert(err, qt.IsNil, msg)
+ c.Assert(m, qt.DeepEquals, test.expect, msg)
+ }
+ }
+}
+
+func TestUnmarshalToInterface(t *testing.T) {
+ c := qt.New(t)
+
+ expect := map[string]any{"a": "b"}
+
+ d := Default
+
+ for i, test := range []struct {
+ data string
+ format Format
+ expect any
+ }{
+ {`[ "Brecker", "Blake", "Redman" ]`, JSON, []any{"Brecker", "Blake", "Redman"}},
+ {`{ "a": "b" }`, JSON, expect},
+ {`#+a: b`, ORG, expect},
+ {`#+DATE: <2020-06-26 Fri>`, ORG, map[string]any{"date": "2020-06-26"}},
+ {`a = "b"`, TOML, expect},
+ {`a: "b"`, YAML, expect},
+ {`<root><a>b</a></root>`, XML, expect},
+ {`a,b,c`, CSV, [][]string{{"a", "b", "c"}}},
+ {"a: Easy!\nb:\n c: 2\n d: [3, 4]", YAML, map[string]any{"a": "Easy!", "b": map[string]any{"c": 2, "d": []any{3, 4}}}},
+ // errors
+ {`a = "`, TOML, false},
+ } {
+ msg := qt.Commentf("%d: %s", i, test.format)
+ m, err := d.Unmarshal([]byte(test.data), test.format)
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), msg)
+ } else {
+ c.Assert(err, qt.IsNil, msg)
+ c.Assert(m, qt.DeepEquals, test.expect, msg)
+ }
+
+ }
+}
+
+func TestUnmarshalStringTo(t *testing.T) {
+ c := qt.New(t)
+
+ d := Default
+
+ expectMap := map[string]any{"a": "b"}
+
+ for i, test := range []struct {
+ data string
+ to any
+ expect any
+ }{
+ {"a string", "string", "a string"},
+ {`{ "a": "b" }`, make(map[string]any), expectMap},
+ {"32", int64(1234), int64(32)},
+ {"32", int(1234), int(32)},
+ {"3.14159", float64(1), float64(3.14159)},
+ {"[3,7,9]", []any{}, []any{3, 7, 9}},
+ {"[3.1,7.2,9.3]", []any{}, []any{3.1, 7.2, 9.3}},
+ } {
+ msg := qt.Commentf("%d: %T", i, test.to)
+ m, err := d.UnmarshalStringTo(test.data, test.to)
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), msg)
+ } else {
+ c.Assert(err, qt.IsNil, msg)
+ c.Assert(m, qt.DeepEquals, test.expect, msg)
+ }
+
+ }
+}
+
+func TestStringifyYAMLMapKeys(t *testing.T) {
+ cases := []struct {
+ input any
+ want any
+ replaced bool
+ }{
+ {
+ map[any]any{"a": 1, "b": 2},
+ map[string]any{"a": 1, "b": 2},
+ true,
+ },
+ {
+ map[any]any{"a": []any{1, map[any]any{"b": 2}}},
+ map[string]any{"a": []any{1, map[string]any{"b": 2}}},
+ true,
+ },
+ {
+ map[any]any{true: 1, "b": false},
+ map[string]any{"true": 1, "b": false},
+ true,
+ },
+ {
+ map[any]any{1: "a", 2: "b"},
+ map[string]any{"1": "a", "2": "b"},
+ true,
+ },
+ {
+ map[any]any{"a": map[any]any{"b": 1}},
+ map[string]any{"a": map[string]any{"b": 1}},
+ true,
+ },
+ {
+ map[string]any{"a": map[string]any{"b": 1}},
+ map[string]any{"a": map[string]any{"b": 1}},
+ false,
+ },
+ {
+ []any{map[any]any{1: "a", 2: "b"}},
+ []any{map[string]any{"1": "a", "2": "b"}},
+ false,
+ },
+ }
+
+ for i, c := range cases {
+ res, replaced := stringifyMapKeys(c.input)
+
+ if c.replaced != replaced {
+ t.Fatalf("[%d] Replaced mismatch: %t", i, replaced)
+ }
+ if !c.replaced {
+ res = c.input
+ }
+ if !reflect.DeepEqual(res, c.want) {
+ t.Errorf("[%d] given %q\nwant: %q\n got: %q", i, c.input, c.want, res)
+ }
+ }
+}
+
+func BenchmarkStringifyMapKeysStringsOnlyInterfaceMaps(b *testing.B) {
+ maps := make([]map[any]any, b.N)
+ for i := 0; i < b.N; i++ {
+ maps[i] = map[any]any{
+ "a": map[any]any{
+ "b": 32,
+ "c": 43,
+ "d": map[any]any{
+ "b": 32,
+ "c": 43,
+ },
+ },
+ "b": []any{"a", "b"},
+ "c": "d",
+ }
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ stringifyMapKeys(maps[i])
+ }
+}
+
+func BenchmarkStringifyMapKeysStringsOnlyStringMaps(b *testing.B) {
+ m := map[string]any{
+ "a": map[string]any{
+ "b": 32,
+ "c": 43,
+ "d": map[string]any{
+ "b": 32,
+ "c": 43,
+ },
+ },
+ "b": []any{"a", "b"},
+ "c": "d",
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ stringifyMapKeys(m)
+ }
+}
+
+func BenchmarkStringifyMapKeysIntegers(b *testing.B) {
+ maps := make([]map[any]any, b.N)
+ for i := 0; i < b.N; i++ {
+ maps[i] = map[any]any{
+ 1: map[any]any{
+ 4: 32,
+ 5: 43,
+ 6: map[any]any{
+ 7: 32,
+ 8: 43,
+ },
+ },
+ 2: []any{"a", "b"},
+ 3: "d",
+ }
+ }
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ stringifyMapKeys(maps[i])
+ }
+}
diff --git a/parser/metadecoders/format.go b/parser/metadecoders/format.go
new file mode 100644
index 000000000..d34a261bf
--- /dev/null
+++ b/parser/metadecoders/format.go
@@ -0,0 +1,118 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadecoders
+
+import (
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/media"
+)
+
+type Format string
+
+const (
+ // These are the supported metdata formats in Hugo. Most of these are also
+ // supported as /data formats.
+ ORG Format = "org"
+ JSON Format = "json"
+ TOML Format = "toml"
+ YAML Format = "yaml"
+ CSV Format = "csv"
+ XML Format = "xml"
+)
+
+// FormatFromString turns formatStr, typically a file extension without any ".",
+// into a Format. It returns an empty string for unknown formats.
+func FormatFromString(formatStr string) Format {
+ formatStr = strings.ToLower(formatStr)
+ if strings.Contains(formatStr, ".") {
+ // Assume a filename
+ formatStr = strings.TrimPrefix(filepath.Ext(formatStr), ".")
+ }
+ switch formatStr {
+ case "yaml", "yml":
+ return YAML
+ case "json":
+ return JSON
+ case "toml":
+ return TOML
+ case "org":
+ return ORG
+ case "csv":
+ return CSV
+ case "xml":
+ return XML
+ }
+
+ return ""
+}
+
+// FormatFromMediaType gets the Format given a MIME type, empty string
+// if unknown.
+func FormatFromMediaType(m media.Type) Format {
+ for _, suffix := range m.Suffixes() {
+ if f := FormatFromString(suffix); f != "" {
+ return f
+ }
+ }
+
+ return ""
+}
+
+// FormatFromContentString tries to detect the format (JSON, YAML, TOML or XML)
+// in the given string.
+// It return an empty string if no format could be detected.
+func (d Decoder) FormatFromContentString(data string) Format {
+ csvIdx := strings.IndexRune(data, d.Delimiter)
+ jsonIdx := strings.Index(data, "{")
+ yamlIdx := strings.Index(data, ":")
+ xmlIdx := strings.Index(data, "<")
+ tomlIdx := strings.Index(data, "=")
+
+ if isLowerIndexThan(csvIdx, jsonIdx, yamlIdx, xmlIdx, tomlIdx) {
+ return CSV
+ }
+
+ if isLowerIndexThan(jsonIdx, yamlIdx, xmlIdx, tomlIdx) {
+ return JSON
+ }
+
+ if isLowerIndexThan(yamlIdx, xmlIdx, tomlIdx) {
+ return YAML
+ }
+
+ if isLowerIndexThan(xmlIdx, tomlIdx) {
+ return XML
+ }
+
+ if tomlIdx != -1 {
+ return TOML
+ }
+
+ return ""
+}
+
+func isLowerIndexThan(first int, others ...int) bool {
+ if first == -1 {
+ return false
+ }
+ for _, other := range others {
+ if other != -1 && other < first {
+ return false
+ }
+ }
+
+ return true
+}
diff --git a/parser/metadecoders/format_test.go b/parser/metadecoders/format_test.go
new file mode 100644
index 000000000..db33a7d8c
--- /dev/null
+++ b/parser/metadecoders/format_test.go
@@ -0,0 +1,86 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadecoders
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFormatFromString(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ s string
+ expect Format
+ }{
+ {"json", JSON},
+ {"yaml", YAML},
+ {"yml", YAML},
+ {"xml", XML},
+ {"toml", TOML},
+ {"config.toml", TOML},
+ {"tOMl", TOML},
+ {"org", ORG},
+ {"foo", ""},
+ } {
+ c.Assert(FormatFromString(test.s), qt.Equals, test.expect)
+ }
+}
+
+func TestFormatFromMediaType(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ m media.Type
+ expect Format
+ }{
+ {media.JSONType, JSON},
+ {media.YAMLType, YAML},
+ {media.XMLType, XML},
+ {media.RSSType, XML},
+ {media.TOMLType, TOML},
+ {media.CalendarType, ""},
+ } {
+ c.Assert(FormatFromMediaType(test.m), qt.Equals, test.expect)
+ }
+}
+
+func TestFormatFromContentString(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ data string
+ expect any
+ }{
+ {`foo = "bar"`, TOML},
+ {` foo = "bar"`, TOML},
+ {`foo="bar"`, TOML},
+ {`foo: "bar"`, YAML},
+ {`foo:"bar"`, YAML},
+ {`{ "foo": "bar"`, JSON},
+ {`a,b,c"`, CSV},
+ {`<foo>bar</foo>"`, XML},
+ {`asdfasdf`, Format("")},
+ {``, Format("")},
+ } {
+ errMsg := qt.Commentf("[%d] %s", i, test.data)
+
+ result := Default.FormatFromContentString(test.data)
+
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
diff --git a/parser/pageparser/doc.go b/parser/pageparser/doc.go
new file mode 100644
index 000000000..43907f3cd
--- /dev/null
+++ b/parser/pageparser/doc.go
@@ -0,0 +1,18 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package pageparser provides a parser for Hugo content files (Markdown, HTML etc.) in Hugo.
+// This implementation is highly inspired by the great talk given by Rob Pike called "Lexical Scanning in Go"
+// It's on YouTube, Google it!.
+// See slides here: http://cuddle.googlecode.com/hg/talk/lex.html
+package pageparser
diff --git a/parser/pageparser/item.go b/parser/pageparser/item.go
new file mode 100644
index 000000000..52546be41
--- /dev/null
+++ b/parser/pageparser/item.go
@@ -0,0 +1,182 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+ "strconv"
+
+ "github.com/yuin/goldmark/util"
+)
+
+type Item struct {
+ Type ItemType
+ Pos int
+ Val []byte
+ isString bool
+}
+
+type Items []Item
+
+func (i Item) ValStr() string {
+ return string(i.Val)
+}
+
+func (i Item) ValTyped() any {
+ str := i.ValStr()
+ if i.isString {
+ // A quoted value that is a string even if it looks like a number etc.
+ return str
+ }
+
+ if boolRe.MatchString(str) {
+ return str == "true"
+ }
+
+ if intRe.MatchString(str) {
+ num, err := strconv.Atoi(str)
+ if err != nil {
+ return str
+ }
+ return num
+ }
+
+ if floatRe.MatchString(str) {
+ num, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ return str
+ }
+ return num
+ }
+
+ return str
+}
+
+func (i Item) IsText() bool {
+ return i.Type == tText || i.Type == tIndentation
+}
+
+func (i Item) IsIndentation() bool {
+ return i.Type == tIndentation
+}
+
+func (i Item) IsNonWhitespace() bool {
+ return len(bytes.TrimSpace(i.Val)) > 0
+}
+
+func (i Item) IsShortcodeName() bool {
+ return i.Type == tScName
+}
+
+func (i Item) IsInlineShortcodeName() bool {
+ return i.Type == tScNameInline
+}
+
+func (i Item) IsLeftShortcodeDelim() bool {
+ return i.Type == tLeftDelimScWithMarkup || i.Type == tLeftDelimScNoMarkup
+}
+
+func (i Item) IsRightShortcodeDelim() bool {
+ return i.Type == tRightDelimScWithMarkup || i.Type == tRightDelimScNoMarkup
+}
+
+func (i Item) IsShortcodeClose() bool {
+ return i.Type == tScClose
+}
+
+func (i Item) IsShortcodeParam() bool {
+ return i.Type == tScParam
+}
+
+func (i Item) IsShortcodeParamVal() bool {
+ return i.Type == tScParamVal
+}
+
+func (i Item) IsShortcodeMarkupDelimiter() bool {
+ return i.Type == tLeftDelimScWithMarkup || i.Type == tRightDelimScWithMarkup
+}
+
+func (i Item) IsFrontMatter() bool {
+ return i.Type >= TypeFrontMatterYAML && i.Type <= TypeFrontMatterORG
+}
+
+func (i Item) IsDone() bool {
+ return i.Type == tError || i.Type == tEOF
+}
+
+func (i Item) IsEOF() bool {
+ return i.Type == tEOF
+}
+
+func (i Item) IsError() bool {
+ return i.Type == tError
+}
+
+func (i Item) String() string {
+ switch {
+ case i.Type == tEOF:
+ return "EOF"
+ case i.Type == tError:
+ return string(i.Val)
+ case i.Type == tIndentation:
+ return fmt.Sprintf("%s:[%s]", i.Type, util.VisualizeSpaces(i.Val))
+ case i.Type > tKeywordMarker:
+ return fmt.Sprintf("<%s>", i.Val)
+ case len(i.Val) > 50:
+ return fmt.Sprintf("%v:%.20q...", i.Type, i.Val)
+ }
+ return fmt.Sprintf("%v:[%s]", i.Type, i.Val)
+}
+
+type ItemType int
+
+const (
+ tError ItemType = iota
+ tEOF
+
+ // page items
+ TypeLeadSummaryDivider // <!--more-->, # more
+ TypeFrontMatterYAML
+ TypeFrontMatterTOML
+ TypeFrontMatterJSON
+ TypeFrontMatterORG
+ TypeEmoji
+ TypeIgnore // // The BOM Unicode byte order marker and possibly others
+
+ // shortcode items
+ tLeftDelimScNoMarkup
+ tRightDelimScNoMarkup
+ tLeftDelimScWithMarkup
+ tRightDelimScWithMarkup
+ tScClose
+ tScName
+ tScNameInline
+ tScParam
+ tScParamVal
+
+ tIndentation
+
+ tText // plain text
+
+ // preserved for later - keywords come after this
+ tKeywordMarker
+)
+
+var (
+ boolRe = regexp.MustCompile(`^(true$)|(false$)`)
+ intRe = regexp.MustCompile(`^[-+]?\d+$`)
+ floatRe = regexp.MustCompile(`^[-+]?\d*\.\d+$`)
+)
diff --git a/parser/pageparser/item_test.go b/parser/pageparser/item_test.go
new file mode 100644
index 000000000..cd01202c6
--- /dev/null
+++ b/parser/pageparser/item_test.go
@@ -0,0 +1,34 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestItemValTyped(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(Item{Val: []byte("3.14")}.ValTyped(), qt.Equals, float64(3.14))
+ c.Assert(Item{Val: []byte(".14")}.ValTyped(), qt.Equals, float64(.14))
+ c.Assert(Item{Val: []byte("314")}.ValTyped(), qt.Equals, 314)
+ c.Assert(Item{Val: []byte("314x")}.ValTyped(), qt.Equals, "314x")
+ c.Assert(Item{Val: []byte("314 ")}.ValTyped(), qt.Equals, "314 ")
+ c.Assert(Item{Val: []byte("314"), isString: true}.ValTyped(), qt.Equals, "314")
+ c.Assert(Item{Val: []byte("true")}.ValTyped(), qt.Equals, true)
+ c.Assert(Item{Val: []byte("false")}.ValTyped(), qt.Equals, false)
+ c.Assert(Item{Val: []byte("trues")}.ValTyped(), qt.Equals, "trues")
+}
diff --git a/parser/pageparser/itemtype_string.go b/parser/pageparser/itemtype_string.go
new file mode 100644
index 000000000..b0b849ade
--- /dev/null
+++ b/parser/pageparser/itemtype_string.go
@@ -0,0 +1,43 @@
+// Code generated by "stringer -type ItemType"; DO NOT EDIT.
+
+package pageparser
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[tError-0]
+ _ = x[tEOF-1]
+ _ = x[TypeLeadSummaryDivider-2]
+ _ = x[TypeFrontMatterYAML-3]
+ _ = x[TypeFrontMatterTOML-4]
+ _ = x[TypeFrontMatterJSON-5]
+ _ = x[TypeFrontMatterORG-6]
+ _ = x[TypeEmoji-7]
+ _ = x[TypeIgnore-8]
+ _ = x[tLeftDelimScNoMarkup-9]
+ _ = x[tRightDelimScNoMarkup-10]
+ _ = x[tLeftDelimScWithMarkup-11]
+ _ = x[tRightDelimScWithMarkup-12]
+ _ = x[tScClose-13]
+ _ = x[tScName-14]
+ _ = x[tScNameInline-15]
+ _ = x[tScParam-16]
+ _ = x[tScParamVal-17]
+ _ = x[tIndentation-18]
+ _ = x[tText-19]
+ _ = x[tKeywordMarker-20]
+}
+
+const _ItemType_name = "tErrortEOFTypeLeadSummaryDividerTypeFrontMatterYAMLTypeFrontMatterTOMLTypeFrontMatterJSONTypeFrontMatterORGTypeEmojiTypeIgnoretLeftDelimScNoMarkuptRightDelimScNoMarkuptLeftDelimScWithMarkuptRightDelimScWithMarkuptScClosetScNametScNameInlinetScParamtScParamValtIndentationtTexttKeywordMarker"
+
+var _ItemType_index = [...]uint16{0, 6, 10, 32, 51, 70, 89, 107, 116, 126, 146, 167, 189, 212, 220, 227, 240, 248, 259, 271, 276, 290}
+
+func (i ItemType) String() string {
+ if i < 0 || i >= ItemType(len(_ItemType_index)-1) {
+ return "ItemType(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _ItemType_name[_ItemType_index[i]:_ItemType_index[i+1]]
+}
diff --git a/parser/pageparser/pagelexer.go b/parser/pageparser/pagelexer.go
new file mode 100644
index 000000000..770f26eb9
--- /dev/null
+++ b/parser/pageparser/pagelexer.go
@@ -0,0 +1,557 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "bytes"
+ "fmt"
+ "unicode"
+ "unicode/utf8"
+)
+
+const eof = -1
+
+// returns the next state in scanner.
+type stateFunc func(*pageLexer) stateFunc
+
+type pageLexer struct {
+ input []byte
+ stateStart stateFunc
+ state stateFunc
+ pos int // input position
+ start int // item start position
+ width int // width of last element
+
+ // Contains lexers for shortcodes and other main section
+ // elements.
+ sectionHandlers *sectionHandlers
+
+ cfg Config
+
+ // The summary divider to look for.
+ summaryDivider []byte
+ // Set when we have parsed any summary divider
+ summaryDividerChecked bool
+ // Whether we're in a HTML comment.
+ isInHTMLComment bool
+
+ lexerShortcodeState
+
+ // items delivered to client
+ items Items
+}
+
+// Implement the Result interface
+func (l *pageLexer) Iterator() *Iterator {
+ return l.newIterator()
+}
+
+func (l *pageLexer) Input() []byte {
+ return l.input
+}
+
+type Config struct {
+ EnableEmoji bool
+}
+
+// note: the input position here is normally 0 (start), but
+// can be set if position of first shortcode is known
+func newPageLexer(input []byte, stateStart stateFunc, cfg Config) *pageLexer {
+ lexer := &pageLexer{
+ input: input,
+ stateStart: stateStart,
+ cfg: cfg,
+ lexerShortcodeState: lexerShortcodeState{
+ currLeftDelimItem: tLeftDelimScNoMarkup,
+ currRightDelimItem: tRightDelimScNoMarkup,
+ openShortcodes: make(map[string]bool),
+ },
+ items: make([]Item, 0, 5),
+ }
+
+ lexer.sectionHandlers = createSectionHandlers(lexer)
+
+ return lexer
+}
+
+func (l *pageLexer) newIterator() *Iterator {
+ return &Iterator{l: l, lastPos: -1}
+}
+
+// main loop
+func (l *pageLexer) run() *pageLexer {
+ for l.state = l.stateStart; l.state != nil; {
+ l.state = l.state(l)
+ }
+ return l
+}
+
+// Page syntax
+var (
+ byteOrderMark = '\ufeff'
+ summaryDivider = []byte("<!--more-->")
+ summaryDividerOrg = []byte("# more")
+ delimTOML = []byte("+++")
+ delimYAML = []byte("---")
+ delimOrg = []byte("#+")
+ htmlCommentStart = []byte("<!--")
+ htmlCommentEnd = []byte("-->")
+
+ emojiDelim = byte(':')
+)
+
+func (l *pageLexer) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+
+ runeValue, runeWidth := utf8.DecodeRune(l.input[l.pos:])
+ l.width = runeWidth
+ l.pos += l.width
+
+ return runeValue
+}
+
+// peek, but no consume
+func (l *pageLexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+// steps back one
+func (l *pageLexer) backup() {
+ l.pos -= l.width
+}
+
+// sends an item back to the client.
+func (l *pageLexer) emit(t ItemType) {
+ defer func() {
+ l.start = l.pos
+ }()
+
+ if t == tText {
+ // Identify any trailing whitespace/intendation.
+ // We currently only care about the last one.
+ for i := l.pos - 1; i >= l.start; i-- {
+ b := l.input[i]
+ if b != ' ' && b != '\t' && b != '\r' && b != '\n' {
+ break
+ }
+ if i == l.start && b != '\n' {
+ l.items = append(l.items, Item{tIndentation, l.start, l.input[l.start:l.pos], false})
+ return
+ } else if b == '\n' && i < l.pos-1 {
+ l.items = append(l.items, Item{t, l.start, l.input[l.start : i+1], false})
+ l.items = append(l.items, Item{tIndentation, i + 1, l.input[i+1 : l.pos], false})
+ return
+ } else if b == '\n' && i == l.pos-1 {
+ break
+ }
+
+ }
+ }
+
+ l.items = append(l.items, Item{t, l.start, l.input[l.start:l.pos], false})
+
+}
+
+// sends a string item back to the client.
+func (l *pageLexer) emitString(t ItemType) {
+ l.items = append(l.items, Item{t, l.start, l.input[l.start:l.pos], true})
+ l.start = l.pos
+}
+
+func (l *pageLexer) isEOF() bool {
+ return l.pos >= len(l.input)
+}
+
+// special case, do not send '\\' back to client
+func (l *pageLexer) ignoreEscapesAndEmit(t ItemType, isString bool) {
+ val := bytes.Map(func(r rune) rune {
+ if r == '\\' {
+ return -1
+ }
+ return r
+ }, l.input[l.start:l.pos])
+ l.items = append(l.items, Item{t, l.start, val, isString})
+ l.start = l.pos
+}
+
+// gets the current value (for debugging and error handling)
+func (l *pageLexer) current() []byte {
+ return l.input[l.start:l.pos]
+}
+
+// ignore current element
+func (l *pageLexer) ignore() {
+ l.start = l.pos
+}
+
+var lf = []byte("\n")
+
+// nil terminates the parser
+func (l *pageLexer) errorf(format string, args ...any) stateFunc {
+ l.items = append(l.items, Item{tError, l.start, []byte(fmt.Sprintf(format, args...)), true})
+ return nil
+}
+
+func (l *pageLexer) consumeCRLF() bool {
+ var consumed bool
+ for _, r := range crLf {
+ if l.next() != r {
+ l.backup()
+ } else {
+ consumed = true
+ }
+ }
+ return consumed
+}
+
+func (l *pageLexer) consumeToNextLine() {
+ for {
+ r := l.next()
+ if r == eof || isEndOfLine(r) {
+ return
+ }
+ }
+}
+
+func (l *pageLexer) consumeToSpace() {
+ for {
+ r := l.next()
+ if r == eof || unicode.IsSpace(r) {
+ l.backup()
+ return
+ }
+ }
+}
+
+func (l *pageLexer) consumeSpace() {
+ for {
+ r := l.next()
+ if r == eof || !unicode.IsSpace(r) {
+ l.backup()
+ return
+ }
+ }
+}
+
+// lex a string starting at ":"
+func lexEmoji(l *pageLexer) stateFunc {
+ pos := l.pos + 1
+ valid := false
+
+ for i := pos; i < len(l.input); i++ {
+ if i > pos && l.input[i] == emojiDelim {
+ pos = i + 1
+ valid = true
+ break
+ }
+ r, _ := utf8.DecodeRune(l.input[i:])
+ if !(isAlphaNumericOrHyphen(r) || r == '+') {
+ break
+ }
+ }
+
+ if valid {
+ l.pos = pos
+ l.emit(TypeEmoji)
+ } else {
+ l.pos++
+ l.emit(tText)
+ }
+
+ return lexMainSection
+}
+
+type sectionHandlers struct {
+ l *pageLexer
+
+ // Set when none of the sections are found so we
+ // can safely stop looking and skip to the end.
+ skipAll bool
+
+ handlers []*sectionHandler
+ skipIndexes []int
+}
+
+func (s *sectionHandlers) skip() int {
+ if s.skipAll {
+ return -1
+ }
+
+ s.skipIndexes = s.skipIndexes[:0]
+ var shouldSkip bool
+ for _, skipper := range s.handlers {
+ idx := skipper.skip()
+ if idx != -1 {
+ shouldSkip = true
+ s.skipIndexes = append(s.skipIndexes, idx)
+ }
+ }
+
+ if !shouldSkip {
+ s.skipAll = true
+ return -1
+ }
+
+ return minIndex(s.skipIndexes...)
+}
+
+func createSectionHandlers(l *pageLexer) *sectionHandlers {
+ shortCodeHandler := &sectionHandler{
+ l: l,
+ skipFunc: func(l *pageLexer) int {
+ return l.index(leftDelimSc)
+ },
+ lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
+ if !l.isShortCodeStart() {
+ return origin, false
+ }
+
+ if l.isInline {
+ // If we're inside an inline shortcode, the only valid shortcode markup is
+ // the markup which closes it.
+ b := l.input[l.pos+3:]
+ end := indexNonWhiteSpace(b, '/')
+ if end != len(l.input)-1 {
+ b = bytes.TrimSpace(b[end+1:])
+ if end == -1 || !bytes.HasPrefix(b, []byte(l.currShortcodeName+" ")) {
+ return l.errorf("inline shortcodes do not support nesting"), true
+ }
+ }
+ }
+
+ if l.hasPrefix(leftDelimScWithMarkup) {
+ l.currLeftDelimItem = tLeftDelimScWithMarkup
+ l.currRightDelimItem = tRightDelimScWithMarkup
+ } else {
+ l.currLeftDelimItem = tLeftDelimScNoMarkup
+ l.currRightDelimItem = tRightDelimScNoMarkup
+ }
+
+ return lexShortcodeLeftDelim, true
+ },
+ }
+
+ summaryDividerHandler := &sectionHandler{
+ l: l,
+ skipFunc: func(l *pageLexer) int {
+ if l.summaryDividerChecked || l.summaryDivider == nil {
+ return -1
+ }
+ return l.index(l.summaryDivider)
+ },
+ lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
+ if !l.hasPrefix(l.summaryDivider) {
+ return origin, false
+ }
+
+ l.summaryDividerChecked = true
+ l.pos += len(l.summaryDivider)
+ // This makes it a little easier to reason about later.
+ l.consumeSpace()
+ l.emit(TypeLeadSummaryDivider)
+
+ return origin, true
+ },
+ }
+
+ handlers := []*sectionHandler{shortCodeHandler, summaryDividerHandler}
+
+ if l.cfg.EnableEmoji {
+ emojiHandler := &sectionHandler{
+ l: l,
+ skipFunc: func(l *pageLexer) int {
+ return l.indexByte(emojiDelim)
+ },
+ lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
+ return lexEmoji, true
+ },
+ }
+
+ handlers = append(handlers, emojiHandler)
+ }
+
+ return &sectionHandlers{
+ l: l,
+ handlers: handlers,
+ skipIndexes: make([]int, len(handlers)),
+ }
+}
+
+func (s *sectionHandlers) lex(origin stateFunc) stateFunc {
+ if s.skipAll {
+ return nil
+ }
+
+ if s.l.pos > s.l.start {
+ s.l.emit(tText)
+ }
+
+ for _, handler := range s.handlers {
+ if handler.skipAll {
+ continue
+ }
+
+ next, handled := handler.lexFunc(origin, handler.l)
+ if next == nil || handled {
+ return next
+ }
+ }
+
+ // Not handled by the above.
+ s.l.pos++
+
+ return origin
+}
+
+type sectionHandler struct {
+ l *pageLexer
+
+ // No more sections of this type.
+ skipAll bool
+
+ // Returns the index of the next match, -1 if none found.
+ skipFunc func(l *pageLexer) int
+
+ // Lex lexes the current section and returns the next state func and
+ // a bool telling if this section was handled.
+ // Note that returning nil as the next state will terminate the
+ // lexer.
+ lexFunc func(origin stateFunc, l *pageLexer) (stateFunc, bool)
+}
+
+func (s *sectionHandler) skip() int {
+ if s.skipAll {
+ return -1
+ }
+
+ idx := s.skipFunc(s.l)
+ if idx == -1 {
+ s.skipAll = true
+ }
+ return idx
+}
+
+func lexMainSection(l *pageLexer) stateFunc {
+ if l.isEOF() {
+ return lexDone
+ }
+
+ if l.isInHTMLComment {
+ return lexEndFrontMatterHTMLComment
+ }
+
+ // Fast forward as far as possible.
+ skip := l.sectionHandlers.skip()
+
+ if skip == -1 {
+ l.pos = len(l.input)
+ return lexDone
+ } else if skip > 0 {
+ l.pos += skip
+ }
+
+ next := l.sectionHandlers.lex(lexMainSection)
+ if next != nil {
+ return next
+ }
+
+ l.pos = len(l.input)
+ return lexDone
+}
+
+func lexDone(l *pageLexer) stateFunc {
+ // Done!
+ if l.pos > l.start {
+ l.emit(tText)
+ }
+ l.emit(tEOF)
+ return nil
+}
+
+func (l *pageLexer) printCurrentInput() {
+ fmt.Printf("input[%d:]: %q", l.pos, string(l.input[l.pos:]))
+}
+
+// state helpers
+
+func (l *pageLexer) index(sep []byte) int {
+ return bytes.Index(l.input[l.pos:], sep)
+}
+
+func (l *pageLexer) indexByte(sep byte) int {
+ return bytes.IndexByte(l.input[l.pos:], sep)
+}
+
+func (l *pageLexer) hasPrefix(prefix []byte) bool {
+ return bytes.HasPrefix(l.input[l.pos:], prefix)
+}
+
+// helper functions
+
+// returns the min index >= 0
+func minIndex(indices ...int) int {
+ min := -1
+
+ for _, j := range indices {
+ if j < 0 {
+ continue
+ }
+ if min == -1 {
+ min = j
+ } else if j < min {
+ min = j
+ }
+ }
+ return min
+}
+
+func indexNonWhiteSpace(s []byte, in rune) int {
+ idx := bytes.IndexFunc(s, func(r rune) bool {
+ return !unicode.IsSpace(r)
+ })
+
+ if idx == -1 {
+ return -1
+ }
+
+ r, _ := utf8.DecodeRune(s[idx:])
+ if r == in {
+ return idx
+ }
+ return -1
+}
+
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t'
+}
+
+func isAlphaNumericOrHyphen(r rune) bool {
+ // let unquoted YouTube ids as positional params slip through (they contain hyphens)
+ return isAlphaNumeric(r) || r == '-'
+}
+
+var crLf = []rune{'\r', '\n'}
+
+func isEndOfLine(r rune) bool {
+ return r == '\r' || r == '\n'
+}
+
+func isAlphaNumeric(r rune) bool {
+ return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
+}
diff --git a/parser/pageparser/pagelexer_intro.go b/parser/pageparser/pagelexer_intro.go
new file mode 100644
index 000000000..6e4617998
--- /dev/null
+++ b/parser/pageparser/pagelexer_intro.go
@@ -0,0 +1,189 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+func lexIntroSection(l *pageLexer) stateFunc {
+ l.summaryDivider = summaryDivider
+
+LOOP:
+ for {
+ r := l.next()
+ if r == eof {
+ break
+ }
+
+ switch {
+ case r == '+':
+ return l.lexFrontMatterSection(TypeFrontMatterTOML, r, "TOML", delimTOML)
+ case r == '-':
+ return l.lexFrontMatterSection(TypeFrontMatterYAML, r, "YAML", delimYAML)
+ case r == '{':
+ return lexFrontMatterJSON
+ case r == '#':
+ return lexFrontMatterOrgMode
+ case r == byteOrderMark:
+ l.emit(TypeIgnore)
+ case !isSpace(r) && !isEndOfLine(r):
+ if r == '<' {
+ l.backup()
+ if l.hasPrefix(htmlCommentStart) {
+ // This may be commented out front matter, which should
+ // still be read.
+ l.consumeToNextLine()
+ l.isInHTMLComment = true
+ l.emit(TypeIgnore)
+ continue LOOP
+ } else {
+ return l.errorf("plain HTML documents not supported")
+ }
+ }
+ break LOOP
+ }
+ }
+
+ // Now move on to the shortcodes.
+ return lexMainSection
+}
+
+func lexEndFrontMatterHTMLComment(l *pageLexer) stateFunc {
+ l.isInHTMLComment = false
+ right := l.index(htmlCommentEnd)
+ if right == -1 {
+ return l.errorf("starting HTML comment with no end")
+ }
+ l.pos += right + len(htmlCommentEnd)
+ l.emit(TypeIgnore)
+
+ // Now move on to the shortcodes.
+ return lexMainSection
+}
+
+func lexFrontMatterJSON(l *pageLexer) stateFunc {
+ // Include the left delimiter
+ l.backup()
+
+ var (
+ inQuote bool
+ level int
+ )
+
+ for {
+
+ r := l.next()
+
+ switch {
+ case r == eof:
+ return l.errorf("unexpected EOF parsing JSON front matter")
+ case r == '{':
+ if !inQuote {
+ level++
+ }
+ case r == '}':
+ if !inQuote {
+ level--
+ }
+ case r == '"':
+ inQuote = !inQuote
+ case r == '\\':
+ // This may be an escaped quote. Make sure it's not marked as a
+ // real one.
+ l.next()
+ }
+
+ if level == 0 {
+ break
+ }
+ }
+
+ l.consumeCRLF()
+ l.emit(TypeFrontMatterJSON)
+
+ return lexMainSection
+}
+
+func lexFrontMatterOrgMode(l *pageLexer) stateFunc {
+ /*
+ #+TITLE: Test File For chaseadamsio/goorgeous
+ #+AUTHOR: Chase Adams
+ #+DESCRIPTION: Just another golang parser for org content!
+ */
+
+ l.summaryDivider = summaryDividerOrg
+
+ l.backup()
+
+ if !l.hasPrefix(delimOrg) {
+ return lexMainSection
+ }
+
+ // Read lines until we no longer see a #+ prefix
+LOOP:
+ for {
+
+ r := l.next()
+
+ switch {
+ case r == '\n':
+ if !l.hasPrefix(delimOrg) {
+ break LOOP
+ }
+ case r == eof:
+ break LOOP
+
+ }
+ }
+
+ l.emit(TypeFrontMatterORG)
+
+ return lexMainSection
+}
+
+// Handle YAML or TOML front matter.
+func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc {
+ for i := 0; i < 2; i++ {
+ if r := l.next(); r != delimr {
+ return l.errorf("invalid %s delimiter", name)
+ }
+ }
+
+ // Let front matter start at line 1
+ wasEndOfLine := l.consumeCRLF()
+ // We don't care about the delimiters.
+ l.ignore()
+
+ var r rune
+
+ for {
+ if !wasEndOfLine {
+ r = l.next()
+ if r == eof {
+ return l.errorf("EOF looking for end %s front matter delimiter", name)
+ }
+ }
+
+ if wasEndOfLine || isEndOfLine(r) {
+ if l.hasPrefix(delim) {
+ l.emit(tp)
+ l.pos += 3
+ l.consumeCRLF()
+ l.ignore()
+ break
+ }
+ }
+
+ wasEndOfLine = false
+ }
+
+ return lexMainSection
+}
diff --git a/parser/pageparser/pagelexer_shortcode.go b/parser/pageparser/pagelexer_shortcode.go
new file mode 100644
index 000000000..5b4e0ae55
--- /dev/null
+++ b/parser/pageparser/pagelexer_shortcode.go
@@ -0,0 +1,364 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+type lexerShortcodeState struct {
+ currLeftDelimItem ItemType
+ currRightDelimItem ItemType
+ isInline bool
+ currShortcodeName string // is only set when a shortcode is in opened state
+ closingState int // > 0 = on its way to be closed
+ elementStepNum int // step number in element
+ paramElements int // number of elements (name + value = 2) found first
+ openShortcodes map[string]bool // set of shortcodes in open state
+
+}
+
+// Shortcode syntax
+var (
+ leftDelimSc = []byte("{{")
+ leftDelimScNoMarkup = []byte("{{<")
+ rightDelimScNoMarkup = []byte(">}}")
+ leftDelimScWithMarkup = []byte("{{%")
+ rightDelimScWithMarkup = []byte("%}}")
+ leftComment = []byte("/*") // comments in this context us used to to mark shortcodes as "not really a shortcode"
+ rightComment = []byte("*/")
+)
+
+func (l *pageLexer) isShortCodeStart() bool {
+ return l.hasPrefix(leftDelimScWithMarkup) || l.hasPrefix(leftDelimScNoMarkup)
+}
+
+func lexShortcodeLeftDelim(l *pageLexer) stateFunc {
+ l.pos += len(l.currentLeftShortcodeDelim())
+ if l.hasPrefix(leftComment) {
+ return lexShortcodeComment
+ }
+ l.emit(l.currentLeftShortcodeDelimItem())
+ l.elementStepNum = 0
+ l.paramElements = 0
+ return lexInsideShortcode
+}
+
+func lexShortcodeComment(l *pageLexer) stateFunc {
+ posRightComment := l.index(append(rightComment, l.currentRightShortcodeDelim()...))
+ if posRightComment <= 1 {
+ return l.errorf("comment must be closed")
+ }
+ // we emit all as text, except the comment markers
+ l.emit(tText)
+ l.pos += len(leftComment)
+ l.ignore()
+ l.pos += posRightComment - len(leftComment)
+ l.emit(tText)
+ l.pos += len(rightComment)
+ l.ignore()
+ l.pos += len(l.currentRightShortcodeDelim())
+ l.emit(tText)
+ return lexMainSection
+}
+
+func lexShortcodeRightDelim(l *pageLexer) stateFunc {
+ l.closingState = 0
+ l.pos += len(l.currentRightShortcodeDelim())
+ l.emit(l.currentRightShortcodeDelimItem())
+ return lexMainSection
+}
+
+// either:
+// 1. param
+// 2. "param" or "param\"
+// 3. param="123" or param="123\"
+// 4. param="Some \"escaped\" text"
+// 5. `param`
+// 6. param=`123`
+func lexShortcodeParam(l *pageLexer, escapedQuoteStart bool) stateFunc {
+ first := true
+ nextEq := false
+
+ var r rune
+
+ for {
+ r = l.next()
+ if first {
+ if r == '"' || (r == '`' && !escapedQuoteStart) {
+ // a positional param with quotes
+ if l.paramElements == 2 {
+ return l.errorf("got quoted positional parameter. Cannot mix named and positional parameters")
+ }
+ l.paramElements = 1
+ l.backup()
+ if r == '"' {
+ return lexShortcodeQuotedParamVal(l, !escapedQuoteStart, tScParam)
+ }
+ return lexShortCodeParamRawStringVal(l, tScParam)
+
+ } else if r == '`' && escapedQuoteStart {
+ return l.errorf("unrecognized escape character")
+ }
+ first = false
+ } else if r == '=' {
+ // a named param
+ l.backup()
+ nextEq = true
+ break
+ }
+
+ if !isAlphaNumericOrHyphen(r) && r != '.' { // Floats have period
+ l.backup()
+ break
+ }
+ }
+
+ if l.paramElements == 0 {
+ l.paramElements++
+
+ if nextEq {
+ l.paramElements++
+ }
+ } else {
+ if nextEq && l.paramElements == 1 {
+ return l.errorf("got named parameter '%s'. Cannot mix named and positional parameters", l.current())
+ } else if !nextEq && l.paramElements == 2 {
+ return l.errorf("got positional parameter '%s'. Cannot mix named and positional parameters", l.current())
+ }
+ }
+
+ l.emit(tScParam)
+ return lexInsideShortcode
+}
+
+func lexShortcodeParamVal(l *pageLexer) stateFunc {
+ l.consumeToSpace()
+ l.emit(tScParamVal)
+ return lexInsideShortcode
+}
+
+func lexShortCodeParamRawStringVal(l *pageLexer, typ ItemType) stateFunc {
+ openBacktickFound := false
+
+Loop:
+ for {
+ switch r := l.next(); {
+ case r == '`':
+ if openBacktickFound {
+ l.backup()
+ break Loop
+ } else {
+ openBacktickFound = true
+ l.ignore()
+ }
+ case r == eof:
+ return l.errorf("unterminated raw string in shortcode parameter-argument: '%s'", l.current())
+ }
+ }
+
+ l.emitString(typ)
+ l.next()
+ l.ignore()
+
+ return lexInsideShortcode
+}
+
+func lexShortcodeQuotedParamVal(l *pageLexer, escapedQuotedValuesAllowed bool, typ ItemType) stateFunc {
+ openQuoteFound := false
+ escapedInnerQuoteFound := false
+ escapedQuoteState := 0
+
+Loop:
+ for {
+ switch r := l.next(); {
+ case r == '\\':
+ if l.peek() == '"' {
+ if openQuoteFound && !escapedQuotedValuesAllowed {
+ l.backup()
+ break Loop
+ } else if openQuoteFound {
+ // the coming quote is inside
+ escapedInnerQuoteFound = true
+ escapedQuoteState = 1
+ }
+ } else if l.peek() == '`' {
+ return l.errorf("unrecognized escape character")
+ }
+ case r == eof, r == '\n':
+ return l.errorf("unterminated quoted string in shortcode parameter-argument: '%s'", l.current())
+ case r == '"':
+ if escapedQuoteState == 0 {
+ if openQuoteFound {
+ l.backup()
+ break Loop
+
+ } else {
+ openQuoteFound = true
+ l.ignore()
+ }
+ } else {
+ escapedQuoteState = 0
+ }
+ }
+ }
+
+ if escapedInnerQuoteFound {
+ l.ignoreEscapesAndEmit(typ, true)
+ } else {
+ l.emitString(typ)
+ }
+
+ r := l.next()
+
+ if r == '\\' {
+ if l.peek() == '"' {
+ // ignore the escaped closing quote
+ l.ignore()
+ l.next()
+ l.ignore()
+ }
+ } else if r == '"' {
+ // ignore closing quote
+ l.ignore()
+ } else {
+ // handled by next state
+ l.backup()
+ }
+
+ return lexInsideShortcode
+}
+
+// Inline shortcodes has the form {{< myshortcode.inline >}}
+var inlineIdentifier = []byte("inline ")
+
+// scans an alphanumeric inside shortcode
+func lexIdentifierInShortcode(l *pageLexer) stateFunc {
+ lookForEnd := false
+Loop:
+ for {
+ switch r := l.next(); {
+ case isAlphaNumericOrHyphen(r):
+ // Allow forward slash inside names to make it possible to create namespaces.
+ case r == '/':
+ case r == '.':
+ l.isInline = l.hasPrefix(inlineIdentifier)
+ if !l.isInline {
+ return l.errorf("period in shortcode name only allowed for inline identifiers")
+ }
+ default:
+ l.backup()
+ word := string(l.input[l.start:l.pos])
+ if l.closingState > 0 && !l.openShortcodes[word] {
+ return l.errorf("closing tag for shortcode '%s' does not match start tag", word)
+ } else if l.closingState > 0 {
+ l.openShortcodes[word] = false
+ lookForEnd = true
+ }
+
+ l.closingState = 0
+ l.currShortcodeName = word
+ l.openShortcodes[word] = true
+ l.elementStepNum++
+ if l.isInline {
+ l.emit(tScNameInline)
+ } else {
+ l.emit(tScName)
+ }
+ break Loop
+ }
+ }
+
+ if lookForEnd {
+ return lexEndOfShortcode
+ }
+ return lexInsideShortcode
+}
+
+func lexEndOfShortcode(l *pageLexer) stateFunc {
+ l.isInline = false
+ if l.hasPrefix(l.currentRightShortcodeDelim()) {
+ return lexShortcodeRightDelim
+ }
+ switch r := l.next(); {
+ case isSpace(r):
+ l.ignore()
+ default:
+ return l.errorf("unclosed shortcode")
+ }
+ return lexEndOfShortcode
+}
+
+// scans the elements inside shortcode tags
+func lexInsideShortcode(l *pageLexer) stateFunc {
+ if l.hasPrefix(l.currentRightShortcodeDelim()) {
+ return lexShortcodeRightDelim
+ }
+ switch r := l.next(); {
+ case r == eof:
+ // eol is allowed inside shortcodes; this may go to end of document before it fails
+ return l.errorf("unclosed shortcode action")
+ case isSpace(r), isEndOfLine(r):
+ l.ignore()
+ case r == '=':
+ l.consumeSpace()
+ l.ignore()
+ peek := l.peek()
+ if peek == '"' || peek == '\\' {
+ return lexShortcodeQuotedParamVal(l, peek != '\\', tScParamVal)
+ } else if peek == '`' {
+ return lexShortCodeParamRawStringVal(l, tScParamVal)
+ }
+ return lexShortcodeParamVal
+ case r == '/':
+ if l.currShortcodeName == "" {
+ return l.errorf("got closing shortcode, but none is open")
+ }
+ l.closingState++
+ l.isInline = false
+ l.emit(tScClose)
+ case r == '\\':
+ l.ignore()
+ if l.peek() == '"' || l.peek() == '`' {
+ return lexShortcodeParam(l, true)
+ }
+ case l.elementStepNum > 0 && (isAlphaNumericOrHyphen(r) || r == '"' || r == '`'): // positional params can have quotes
+ l.backup()
+ return lexShortcodeParam(l, false)
+ case isAlphaNumeric(r):
+ l.backup()
+ return lexIdentifierInShortcode
+ default:
+ return l.errorf("unrecognized character in shortcode action: %#U. Note: Parameters with non-alphanumeric args must be quoted", r)
+ }
+ return lexInsideShortcode
+}
+
+func (l *pageLexer) currentLeftShortcodeDelimItem() ItemType {
+ return l.currLeftDelimItem
+}
+
+func (l *pageLexer) currentRightShortcodeDelimItem() ItemType {
+ return l.currRightDelimItem
+}
+
+func (l *pageLexer) currentLeftShortcodeDelim() []byte {
+ if l.currLeftDelimItem == tLeftDelimScWithMarkup {
+ return leftDelimScWithMarkup
+ }
+ return leftDelimScNoMarkup
+}
+
+func (l *pageLexer) currentRightShortcodeDelim() []byte {
+ if l.currRightDelimItem == tRightDelimScWithMarkup {
+ return rightDelimScWithMarkup
+ }
+ return rightDelimScNoMarkup
+}
diff --git a/parser/pageparser/pagelexer_test.go b/parser/pageparser/pagelexer_test.go
new file mode 100644
index 000000000..00669c27b
--- /dev/null
+++ b/parser/pageparser/pagelexer_test.go
@@ -0,0 +1,28 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMinIndex(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(minIndex(4, 1, 2, 3), qt.Equals, 1)
+ c.Assert(minIndex(4, 0, -2, 2, 5), qt.Equals, 0)
+ c.Assert(minIndex(), qt.Equals, -1)
+ c.Assert(minIndex(-2, -3), qt.Equals, -1)
+}
diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go
new file mode 100644
index 000000000..67abefc30
--- /dev/null
+++ b/parser/pageparser/pageparser.go
@@ -0,0 +1,195 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+
+ "github.com/gohugoio/hugo/parser/metadecoders"
+)
+
+// Result holds the parse result.
+type Result interface {
+ // Iterator returns a new Iterator positioned at the beginning of the parse tree.
+ Iterator() *Iterator
+ // Input returns the input to Parse.
+ Input() []byte
+}
+
+var _ Result = (*pageLexer)(nil)
+
+// Parse parses the page in the given reader according to the given Config.
+// TODO(bep) now that we have improved the "lazy order" init, it *may* be
+// some potential saving in doing a buffered approach where the first pass does
+// the frontmatter only.
+func Parse(r io.Reader, cfg Config) (Result, error) {
+ return parseSection(r, cfg, lexIntroSection)
+}
+
+type ContentFrontMatter struct {
+ Content []byte
+ FrontMatter map[string]any
+ FrontMatterFormat metadecoders.Format
+}
+
+// ParseFrontMatterAndContent is a convenience method to extract front matter
+// and content from a content page.
+func ParseFrontMatterAndContent(r io.Reader) (ContentFrontMatter, error) {
+ var cf ContentFrontMatter
+
+ psr, err := Parse(r, Config{})
+ if err != nil {
+ return cf, err
+ }
+
+ var frontMatterSource []byte
+
+ iter := psr.Iterator()
+
+ walkFn := func(item Item) bool {
+ if frontMatterSource != nil {
+ // The rest is content.
+ cf.Content = psr.Input()[item.Pos:]
+ // Done
+ return false
+ } else if item.IsFrontMatter() {
+ cf.FrontMatterFormat = FormatFromFrontMatterType(item.Type)
+ frontMatterSource = item.Val
+ }
+ return true
+ }
+
+ iter.PeekWalk(walkFn)
+
+ cf.FrontMatter, err = metadecoders.Default.UnmarshalToMap(frontMatterSource, cf.FrontMatterFormat)
+ return cf, err
+}
+
+func FormatFromFrontMatterType(typ ItemType) metadecoders.Format {
+ switch typ {
+ case TypeFrontMatterJSON:
+ return metadecoders.JSON
+ case TypeFrontMatterORG:
+ return metadecoders.ORG
+ case TypeFrontMatterTOML:
+ return metadecoders.TOML
+ case TypeFrontMatterYAML:
+ return metadecoders.YAML
+ default:
+ return ""
+ }
+}
+
+// ParseMain parses starting with the main section. Used in tests.
+func ParseMain(r io.Reader, cfg Config) (Result, error) {
+ return parseSection(r, cfg, lexMainSection)
+}
+
+func parseSection(r io.Reader, cfg Config, start stateFunc) (Result, error) {
+ b, err := ioutil.ReadAll(r)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read page content: %w", err)
+ }
+ return parseBytes(b, cfg, start)
+}
+
+func parseBytes(b []byte, cfg Config, start stateFunc) (Result, error) {
+ lexer := newPageLexer(b, start, cfg)
+ lexer.run()
+ return lexer, nil
+}
+
+// An Iterator has methods to iterate a parsed page with support going back
+// if needed.
+type Iterator struct {
+ l *pageLexer
+ lastPos int // position of the last item returned by nextItem
+}
+
+// consumes and returns the next item
+func (t *Iterator) Next() Item {
+ t.lastPos++
+ return t.Current()
+}
+
+// Input returns the input source.
+func (t *Iterator) Input() []byte {
+ return t.l.Input()
+}
+
+var errIndexOutOfBounds = Item{tError, 0, []byte("no more tokens"), true}
+
+// Current will repeatably return the current item.
+func (t *Iterator) Current() Item {
+ if t.lastPos >= len(t.l.items) {
+ return errIndexOutOfBounds
+ }
+ return t.l.items[t.lastPos]
+}
+
+// backs up one token.
+func (t *Iterator) Backup() {
+ if t.lastPos < 0 {
+ panic("need to go forward before going back")
+ }
+ t.lastPos--
+}
+
+// Pos returns the current position in the input.
+func (t *Iterator) Pos() int {
+ return t.lastPos
+}
+
+// check for non-error and non-EOF types coming next
+func (t *Iterator) IsValueNext() bool {
+ i := t.Peek()
+ return i.Type != tError && i.Type != tEOF
+}
+
+// look at, but do not consume, the next item
+// repeated, sequential calls will return the same item
+func (t *Iterator) Peek() Item {
+ return t.l.items[t.lastPos+1]
+}
+
+// PeekWalk will feed the next items in the iterator to walkFn
+// until it returns false.
+func (t *Iterator) PeekWalk(walkFn func(item Item) bool) {
+ for i := t.lastPos + 1; i < len(t.l.items); i++ {
+ item := t.l.items[i]
+ if !walkFn(item) {
+ break
+ }
+ }
+}
+
+// Consume is a convenience method to consume the next n tokens,
+// but back off Errors and EOF.
+func (t *Iterator) Consume(cnt int) {
+ for i := 0; i < cnt; i++ {
+ token := t.Next()
+ if token.Type == tError || token.Type == tEOF {
+ t.Backup()
+ break
+ }
+ }
+}
+
+// LineNumber returns the current line number. Used for logging.
+func (t *Iterator) LineNumber() int {
+ return bytes.Count(t.l.input[:t.Current().Pos], lf) + 1
+}
diff --git a/parser/pageparser/pageparser_intro_test.go b/parser/pageparser/pageparser_intro_test.go
new file mode 100644
index 000000000..1b903d546
--- /dev/null
+++ b/parser/pageparser/pageparser_intro_test.go
@@ -0,0 +1,126 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+type lexerTest struct {
+ name string
+ input string
+ items []Item
+}
+
+func nti(tp ItemType, val string) Item {
+ return Item{tp, 0, []byte(val), false}
+}
+
+var (
+ tstJSON = `{ "a": { "b": "\"Hugo\"}" } }`
+ tstFrontMatterTOML = nti(TypeFrontMatterTOML, "foo = \"bar\"\n")
+ tstFrontMatterYAML = nti(TypeFrontMatterYAML, "foo: \"bar\"\n")
+ tstFrontMatterYAMLCRLF = nti(TypeFrontMatterYAML, "foo: \"bar\"\r\n")
+ tstFrontMatterJSON = nti(TypeFrontMatterJSON, tstJSON+"\r\n")
+ tstSomeText = nti(tText, "\nSome text.\n")
+ tstSummaryDivider = nti(TypeLeadSummaryDivider, "<!--more-->\n")
+ tstNewline = nti(tText, "\n")
+
+ tstORG = `
+#+TITLE: T1
+#+AUTHOR: A1
+#+DESCRIPTION: D1
+`
+ tstFrontMatterORG = nti(TypeFrontMatterORG, tstORG)
+)
+
+var crLfReplacer = strings.NewReplacer("\r", "#", "\n", "$")
+
+// TODO(bep) a way to toggle ORG mode vs the rest.
+var frontMatterTests = []lexerTest{
+ {"empty", "", []Item{tstEOF}},
+ {"Byte order mark", "\ufeff\nSome text.\n", []Item{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}},
+ {"HTML Document", ` <html> `, []Item{nti(tError, "plain HTML documents not supported")}},
+ {"HTML Document with shortcode", `<html>{{< sc1 >}}</html>`, []Item{nti(tError, "plain HTML documents not supported")}},
+ {"No front matter", "\nSome text.\n", []Item{tstSomeText, tstEOF}},
+ {"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []Item{tstFrontMatterYAML, tstSomeText, tstEOF}},
+ {"YAML empty front matter", "---\n---\n\nSome text.\n", []Item{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}},
+ {"YAML commented out front matter", "<!--\n---\nfoo: \"bar\"\n---\n-->\nSome text.\n", []Item{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(TypeIgnore, "-->"), tstSomeText, tstEOF}},
+ {"YAML commented out front matter, no end", "<!--\n---\nfoo: \"bar\"\n---\nSome text.\n", []Item{nti(TypeIgnore, "<!--\n"), tstFrontMatterYAML, nti(tError, "starting HTML comment with no end")}},
+ // Note that we keep all bytes as they are, but we need to handle CRLF
+ {"YAML front matter CRLF", "---\r\nfoo: \"bar\"\r\n---\n\nSome text.\n", []Item{tstFrontMatterYAMLCRLF, tstSomeText, tstEOF}},
+ {"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstEOF}},
+ {"JSON front matter", tstJSON + "\r\n\nSome text.\n", []Item{tstFrontMatterJSON, tstSomeText, tstEOF}},
+ {"ORG front matter", tstORG + "\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, tstEOF}},
+ {"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []Item{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more\n"), nti(tText, "Some text.\n"), tstEOF}},
+ {"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}},
+ {"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.<!--more-->Some text.\n", []Item{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, "<!--more-->"), nti(tText, "Some text.\n"), tstEOF}},
+ // https://github.com/gohugoio/hugo/issues/5402
+ {"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->{{< sc1 >}}\nSome text.\n", []Item{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, "<!--more-->"), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}},
+ // https://github.com/gohugoio/hugo/issues/5464
+ {"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n<!--more-->\n{{< sc2 >}}", []Item{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}},
+}
+
+func TestFrontMatter(t *testing.T) {
+ t.Parallel()
+ for i, test := range frontMatterTests {
+ items := collect([]byte(test.input), false, lexIntroSection)
+ if !equal(items, test.items) {
+ got := crLfReplacer.Replace(fmt.Sprint(items))
+ expected := crLfReplacer.Replace(fmt.Sprint(test.items))
+ t.Errorf("[%d] %s: got\n\t%v\nexpected\n\t%v", i, test.name, got, expected)
+ }
+ }
+}
+
+func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item) {
+ l := newPageLexer(input, stateStart, cfg)
+ l.run()
+ t := l.newIterator()
+
+ for {
+ item := t.Next()
+ items = append(items, item)
+ if item.Type == tEOF || item.Type == tError {
+ break
+ }
+ }
+ return
+}
+
+func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item) {
+ var cfg Config
+
+ return collectWithConfig(input, skipFrontMatter, stateStart, cfg)
+}
+
+// no positional checking, for now ...
+func equal(i1, i2 []Item) bool {
+ if len(i1) != len(i2) {
+ return false
+ }
+ for k := range i1 {
+ if i1[k].Type != i2[k].Type {
+ return false
+ }
+
+ if !reflect.DeepEqual(i1[k].Val, i2[k].Val) {
+ return false
+ }
+ }
+ return true
+}
diff --git a/parser/pageparser/pageparser_main_test.go b/parser/pageparser/pageparser_main_test.go
new file mode 100644
index 000000000..8fed2bffa
--- /dev/null
+++ b/parser/pageparser/pageparser_main_test.go
@@ -0,0 +1,40 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestMain(t *testing.T) {
+ t.Parallel()
+
+ mainTests := []lexerTest{
+ {"emoji #1", "Some text with :emoji:", []Item{nti(tText, "Some text with "), nti(TypeEmoji, ":emoji:"), tstEOF}},
+ {"emoji #2", "Some text with :emoji: and some text.", []Item{nti(tText, "Some text with "), nti(TypeEmoji, ":emoji:"), nti(tText, " and some text."), tstEOF}},
+ {"looks like an emoji #1", "Some text and then :emoji", []Item{nti(tText, "Some text and then "), nti(tText, ":"), nti(tText, "emoji"), tstEOF}},
+ {"looks like an emoji #2", "Some text and then ::", []Item{nti(tText, "Some text and then "), nti(tText, ":"), nti(tText, ":"), tstEOF}},
+ {"looks like an emoji #3", ":Some :text", []Item{nti(tText, ":"), nti(tText, "Some "), nti(tText, ":"), nti(tText, "text"), tstEOF}},
+ }
+
+ for i, test := range mainTests {
+ items := collectWithConfig([]byte(test.input), false, lexMainSection, Config{EnableEmoji: true})
+ if !equal(items, test.items) {
+ got := crLfReplacer.Replace(fmt.Sprint(items))
+ expected := crLfReplacer.Replace(fmt.Sprint(test.items))
+ t.Errorf("[%d] %s: got\n\t%v\nexpected\n\t%v", i, test.name, got, expected)
+ }
+ }
+}
diff --git a/parser/pageparser/pageparser_shortcode_test.go b/parser/pageparser/pageparser_shortcode_test.go
new file mode 100644
index 000000000..ce1297573
--- /dev/null
+++ b/parser/pageparser/pageparser_shortcode_test.go
@@ -0,0 +1,279 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "testing"
+)
+
+var (
+ tstEOF = nti(tEOF, "")
+ tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<")
+ tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}")
+ tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%")
+ tstRightMD = nti(tRightDelimScWithMarkup, "%}}")
+ tstSCClose = nti(tScClose, "/")
+ tstSC1 = nti(tScName, "sc1")
+ tstSC1Inline = nti(tScNameInline, "sc1.inline")
+ tstSC2Inline = nti(tScNameInline, "sc2.inline")
+ tstSC2 = nti(tScName, "sc2")
+ tstSC3 = nti(tScName, "sc3")
+ tstSCSlash = nti(tScName, "sc/sub")
+ tstParam1 = nti(tScParam, "param1")
+ tstParam2 = nti(tScParam, "param2")
+ tstParamBoolTrue = nti(tScParam, "true")
+ tstParamBoolFalse = nti(tScParam, "false")
+ tstParamInt = nti(tScParam, "32")
+ tstParamFloat = nti(tScParam, "3.14")
+ tstVal = nti(tScParamVal, "Hello World")
+ tstText = nti(tText, "Hello World")
+)
+
+var shortCodeLexerTests = []lexerTest{
+ {"empty", "", []Item{tstEOF}},
+ {"spaces", " \t\n", []Item{nti(tText, " \t\n"), tstEOF}},
+ {"text", `to be or not`, []Item{nti(tText, "to be or not"), tstEOF}},
+ {"no markup", `{{< sc1 >}}`, []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
+ {"with EOL", "{{< sc1 \n >}}", []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
+
+ {"forward slash inside name", `{{< sc/sub >}}`, []Item{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}},
+
+ {"simple with markup", `{{% sc1 %}}`, []Item{tstLeftMD, tstSC1, tstRightMD, tstEOF}},
+ {"with spaces", `{{< sc1 >}}`, []Item{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}},
+ {"indented on new line", "Hello\n {{% sc1 %}}", []Item{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}},
+ {"indented on new line tab", "Hello\n\t{{% sc1 %}}", []Item{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}},
+ {"indented on first line", " {{% sc1 %}}", []Item{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}},
+ {"mismatched rightDelim", `{{< sc1 %}}`, []Item{
+ tstLeftNoMD, tstSC1,
+ nti(tError, "unrecognized character in shortcode action: U+0025 '%'. Note: Parameters with non-alphanumeric args must be quoted"),
+ }},
+ {"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []Item{
+ tstLeftMD,
+ tstSC1,
+ tstRightMD,
+ nti(tText, " inner "),
+ tstLeftMD,
+ tstSCClose,
+ tstSC1,
+ tstRightMD,
+ tstEOF,
+ }},
+ {"close, but no open", `{{< /sc1 >}}`, []Item{
+ tstLeftNoMD, nti(tError, "got closing shortcode, but none is open"),
+ }},
+ {"close wrong", `{{< sc1 >}}{{< /another >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
+ nti(tError, "closing tag for shortcode 'another' does not match start tag"),
+ }},
+ {"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
+ nti(tError, "closing tag for shortcode 'another' does not match start tag"),
+ }},
+ {"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []Item{
+ tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1,
+ nti(tError, "unclosed shortcode"),
+ }},
+ {"float param, positional", `{{< sc1 3.14 >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "3.14"), tstRightNoMD, tstEOF,
+ }},
+ {"float param, named", `{{< sc1 param1=3.14 >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF,
+ }},
+ {"named param, raw string", `{{< sc1 param1=` + "`" + "Hello World" + "`" + " >}}", []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "Hello World"), tstRightNoMD, tstEOF,
+ }},
+ {"float param, named, space before", `{{< sc1 param1= 3.14 >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF,
+ }},
+ {"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-Q_456igdO-4"), tstRightNoMD, tstEOF,
+ }},
+ {"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-.%QigdO-4"), tstRightNoMD, tstEOF,
+ }},
+ {"raw string", `{{< sc1` + "`" + "Hello World" + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), tstRightNoMD, tstEOF,
+ }},
+ {"raw string with newline", `{{< sc1` + "`" + `Hello
+ World` + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, `Hello
+ World`), tstRightNoMD, tstEOF,
+ }},
+ {"raw string with escape character", `{{< sc1` + "`" + `Hello \b World` + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, `Hello \b World`), tstRightNoMD, tstEOF,
+ }},
+ {"two params", `{{< sc1 param1 param2 >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstParam2, tstRightNoMD, tstEOF,
+ }},
+ // issue #934
+ {"self-closing", `{{< sc1 />}}`, []Item{
+ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF,
+ }},
+ // Issue 2498
+ {"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []Item{
+ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD,
+ tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF,
+ }},
+ {"self-closing with param", `{{< sc1 param1 />}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
+ }},
+ {"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
+ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
+ }},
+ {"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
+ tstLeftNoMD, tstSC2, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
+ }},
+ {"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstRightNoMD,
+ tstLeftNoMD, tstSC2, tstRightNoMD,
+ tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF,
+ }},
+ {"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []Item{
+ tstLeftNoMD, tstSC1, tstRightNoMD,
+ nti(tText, "ab"),
+ tstLeftMD, tstSC2, tstParam1, tstRightMD,
+ nti(tText, "cd"),
+ tstLeftNoMD, tstSC3, tstRightNoMD,
+ nti(tText, "ef"),
+ tstLeftNoMD, tstSCClose, tstSC3, tstRightNoMD,
+ nti(tText, "gh"),
+ tstLeftMD, tstSCClose, tstSC2, tstRightMD,
+ nti(tText, "ij"),
+ tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD,
+ nti(tText, "kl"), tstEOF,
+ }},
+
+ {"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "param nr. 1"), nti(tScParam, "param nr. 2"), tstRightNoMD, tstEOF,
+ }},
+ {"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstParam2, nti(tScParamVal, "p2Val"), tstRightNoMD, tstEOF,
+ }},
+ {"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstVal, tstRightNoMD, tstEOF,
+ }},
+ {"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstRightNoMD, tstEOF,
+ }},
+ {"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1,
+ nti(tScParamVal, `Hello `), nti(tError, `got positional parameter 'escaped'. Cannot mix named and positional parameters`),
+ }},
+ {
+ "escaped quotes inside nonescaped quotes",
+ `{{< sc1 param1="Hello \"escaped\" World" >}}`,
+ []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello "escaped" World`), tstRightNoMD, tstEOF,
+ },
+ },
+ {
+ "escaped quotes inside nonescaped quotes in positional param",
+ `{{< sc1 "Hello \"escaped\" World" >}}`,
+ []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, `Hello "escaped" World`), tstRightNoMD, tstEOF,
+ },
+ },
+ {"escaped raw string, named param", `{{< sc1 param1=` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"),
+ }},
+ {"escaped raw string, positional param", `{{< sc1 param1 ` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"),
+ }},
+ {"two raw string params", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tScParam, "Second Param"), tstRightNoMD, tstEOF,
+ }},
+ {"unterminated quote", `{{< sc1 param2="Hello World>}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam2, nti(tError, "unterminated quoted string in shortcode parameter-argument: 'Hello World>}}'"),
+ }},
+ {"unterminated raw string", `{{< sc1` + "`" + "Hello World" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tError, "unterminated raw string in shortcode parameter-argument: 'Hello World >}}'"),
+ }},
+ {"unterminated raw string in second argument", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tError, "unterminated raw string in shortcode parameter-argument: 'Second Param >}}'"),
+ }},
+ {"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstVal,
+ nti(tError, "got positional parameter 'p2'. Cannot mix named and positional parameters"),
+ }},
+ {"one named param, one quoted positional param, both raw strings", `{{< sc1 param1=` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstVal,
+ nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"),
+ }},
+ {"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1, tstVal,
+ nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"),
+ }},
+ {"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1,
+ nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"),
+ }},
+ {"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []Item{
+ tstLeftNoMD, tstSC1, tstParam1,
+ nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"),
+ }},
+ {"commented out", `{{</* sc1 */>}}`, []Item{
+ nti(tText, "{{<"), nti(tText, " sc1 "), nti(tText, ">}}"), tstEOF,
+ }},
+ {"commented out, with asterisk inside", `{{</* sc1 "**/*.pdf" */>}}`, []Item{
+ nti(tText, "{{<"), nti(tText, " sc1 \"**/*.pdf\" "), nti(tText, ">}}"), tstEOF,
+ }},
+ {"commented out, missing close", `{{</* sc1 >}}`, []Item{
+ nti(tError, "comment must be closed"),
+ }},
+ {"commented out, misplaced close", `{{</* sc1 >}}*/`, []Item{
+ nti(tError, "comment must be closed"),
+ }},
+ // Inline shortcodes
+ {"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []Item{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}},
+ {"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []Item{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}},
+ {"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []Item{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}},
+ {"inline self closing, then a new inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}{{< sc2.inline >}}Hello World{{< /sc2.inline >}}`, []Item{
+ tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD,
+ tstLeftNoMD, tstSC2Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC2Inline, tstRightNoMD, tstEOF,
+ }},
+ {"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []Item{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}},
+ {"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []Item{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}},
+ {"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []Item{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}},
+}
+
+func TestShortcodeLexer(t *testing.T) {
+ t.Parallel()
+ for i, test := range shortCodeLexerTests {
+ t.Run(test.name, func(t *testing.T) {
+ items := collect([]byte(test.input), true, lexMainSection)
+ if !equal(items, test.items) {
+ t.Errorf("[%d] %s: got\n\t%v\nexpected\n\t%v", i, test.name, items, test.items)
+ }
+ })
+ }
+}
+
+func BenchmarkShortcodeLexer(b *testing.B) {
+ testInputs := make([][]byte, len(shortCodeLexerTests))
+ for i, input := range shortCodeLexerTests {
+ testInputs[i] = []byte(input.input)
+ }
+ var cfg Config
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ for _, input := range testInputs {
+ items := collectWithConfig(input, true, lexMainSection, cfg)
+ if len(items) == 0 {
+ }
+
+ }
+ }
+}
diff --git a/parser/pageparser/pageparser_test.go b/parser/pageparser/pageparser_test.go
new file mode 100644
index 000000000..f7f719938
--- /dev/null
+++ b/parser/pageparser/pageparser_test.go
@@ -0,0 +1,90 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pageparser
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+)
+
+func BenchmarkParse(b *testing.B) {
+ start := `
+
+
+---
+title: "Front Matters"
+description: "It really does"
+---
+
+This is some summary. This is some summary. This is some summary. This is some summary.
+
+ <!--more-->
+
+
+`
+ input := []byte(start + strings.Repeat(strings.Repeat("this is text", 30)+"{{< myshortcode >}}This is some inner content.{{< /myshortcode >}}", 10))
+ cfg := Config{EnableEmoji: false}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if _, err := parseBytes(input, cfg, lexIntroSection); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func BenchmarkParseWithEmoji(b *testing.B) {
+ start := `
+
+
+---
+title: "Front Matters"
+description: "It really does"
+---
+
+This is some summary. This is some summary. This is some summary. This is some summary.
+
+ <!--more-->
+
+
+`
+ input := []byte(start + strings.Repeat("this is not emoji: ", 50) + strings.Repeat("some text ", 70) + strings.Repeat("this is not: ", 50) + strings.Repeat("but this is a :smile: ", 3) + strings.Repeat("some text ", 70))
+ cfg := Config{EnableEmoji: true}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if _, err := parseBytes(input, cfg, lexIntroSection); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func TestFormatFromFrontMatterType(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ typ ItemType
+ expect metadecoders.Format
+ }{
+ {TypeFrontMatterJSON, metadecoders.JSON},
+ {TypeFrontMatterTOML, metadecoders.TOML},
+ {TypeFrontMatterYAML, metadecoders.YAML},
+ {TypeFrontMatterORG, metadecoders.ORG},
+ {TypeIgnore, ""},
+ } {
+ c.Assert(FormatFromFrontMatterType(test.typ), qt.Equals, test.expect)
+ }
+}
diff --git a/publisher/htmlElementsCollector.go b/publisher/htmlElementsCollector.go
new file mode 100644
index 000000000..ca6e2d940
--- /dev/null
+++ b/publisher/htmlElementsCollector.go
@@ -0,0 +1,443 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package publisher
+
+import (
+ "bytes"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/net/html"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+const eof = -1
+
+var (
+ htmlJsonFixer = strings.NewReplacer(", ", "\n")
+ jsonAttrRe = regexp.MustCompile(`'?(.*?)'?:.*`)
+ classAttrRe = regexp.MustCompile(`(?i)^class$|transition`)
+
+ skipInnerElementRe = regexp.MustCompile(`(?i)^(pre|textarea|script|style)`)
+ skipAllElementRe = regexp.MustCompile(`(?i)^!DOCTYPE`)
+ endTagRe = regexp.MustCompile(`(?i)<\/\s*([a-zA-Z]+)\s*>$`)
+
+ exceptionList = map[string]bool{
+ "thead": true,
+ "tbody": true,
+ "tfoot": true,
+ "td": true,
+ "tr": true,
+ }
+)
+
+func newHTMLElementsCollector() *htmlElementsCollector {
+ return &htmlElementsCollector{
+ elementSet: make(map[string]bool),
+ }
+}
+
+func newHTMLElementsCollectorWriter(collector *htmlElementsCollector) *htmlElementsCollectorWriter {
+ w := &htmlElementsCollectorWriter{
+ collector: collector,
+ state: htmlLexStart,
+ }
+
+ w.defaultLexElementInside = w.lexElementInside(htmlLexStart)
+
+ return w
+}
+
+// HTMLElements holds lists of tags and attribute values for classes and id.
+type HTMLElements struct {
+ Tags []string `json:"tags"`
+ Classes []string `json:"classes"`
+ IDs []string `json:"ids"`
+}
+
+func (h *HTMLElements) Merge(other HTMLElements) {
+ h.Tags = append(h.Tags, other.Tags...)
+ h.Classes = append(h.Classes, other.Classes...)
+ h.IDs = append(h.IDs, other.IDs...)
+
+ h.Tags = helpers.UniqueStringsReuse(h.Tags)
+ h.Classes = helpers.UniqueStringsReuse(h.Classes)
+ h.IDs = helpers.UniqueStringsReuse(h.IDs)
+}
+
+func (h *HTMLElements) Sort() {
+ sort.Strings(h.Tags)
+ sort.Strings(h.Classes)
+ sort.Strings(h.IDs)
+}
+
+type htmlElement struct {
+ Tag string
+ Classes []string
+ IDs []string
+}
+
+type htmlElementsCollector struct {
+ // Contains the raw HTML string. We will get the same element
+ // several times, and want to avoid costly reparsing when this
+ // is used for aggregated data only.
+ elementSet map[string]bool
+
+ elements []htmlElement
+
+ mu sync.RWMutex
+}
+
+func (c *htmlElementsCollector) getHTMLElements() HTMLElements {
+ var (
+ classes []string
+ ids []string
+ tags []string
+ )
+
+ for _, el := range c.elements {
+ classes = append(classes, el.Classes...)
+ ids = append(ids, el.IDs...)
+ tags = append(tags, el.Tag)
+ }
+
+ classes = helpers.UniqueStringsSorted(classes)
+ ids = helpers.UniqueStringsSorted(ids)
+ tags = helpers.UniqueStringsSorted(tags)
+
+ els := HTMLElements{
+ Classes: classes,
+ IDs: ids,
+ Tags: tags,
+ }
+
+ return els
+}
+
+type htmlElementsCollectorWriter struct {
+ collector *htmlElementsCollector
+
+ r rune // Current rune
+ width int // The width in bytes of r
+ input []byte // The current slice written to Write
+ pos int // The current position in input
+
+ err error
+
+ inQuote rune
+
+ buff bytes.Buffer
+
+ // Current state
+ state htmlCollectorStateFunc
+
+ // Precompiled state funcs
+ defaultLexElementInside htmlCollectorStateFunc
+}
+
+// Write collects HTML elements from p, which must contain complete runes.
+func (w *htmlElementsCollectorWriter) Write(p []byte) (int, error) {
+ if p == nil {
+ return 0, nil
+ }
+
+ w.input = p
+
+ for {
+ w.r = w.next()
+ if w.r == eof || w.r == utf8.RuneError {
+ break
+ }
+ w.state = w.state(w)
+ }
+
+ w.pos = 0
+ w.input = nil
+
+ return len(p), nil
+}
+
+func (l *htmlElementsCollectorWriter) backup() {
+ l.pos -= l.width
+ l.r, _ = utf8.DecodeRune(l.input[l.pos:])
+}
+
+func (w *htmlElementsCollectorWriter) consumeBuffUntil(condition func() bool, resolve htmlCollectorStateFunc) htmlCollectorStateFunc {
+ var s htmlCollectorStateFunc
+ s = func(*htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ w.buff.WriteRune(w.r)
+ if condition() {
+ w.buff.Reset()
+ return resolve
+ }
+ return s
+ }
+ return s
+}
+
+func (w *htmlElementsCollectorWriter) consumeRuneUntil(condition func(r rune) bool, resolve htmlCollectorStateFunc) htmlCollectorStateFunc {
+ var s htmlCollectorStateFunc
+ s = func(*htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ if condition(w.r) {
+ return resolve
+ }
+ return s
+ }
+ return s
+}
+
+// Starts with e.g. "<body " or "<div"
+func (w *htmlElementsCollectorWriter) lexElementInside(resolve htmlCollectorStateFunc) htmlCollectorStateFunc {
+ var s htmlCollectorStateFunc
+ s = func(w *htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ w.buff.WriteRune(w.r)
+
+ // Skip any text inside a quote.
+ if w.r == '\'' || w.r == '"' {
+ if w.inQuote == w.r {
+ w.inQuote = 0
+ } else if w.inQuote == 0 {
+ w.inQuote = w.r
+ }
+ }
+
+ if w.inQuote != 0 {
+ return s
+ }
+
+ if w.r == '>' {
+
+ // Work with the bytes slice as long as it's practical,
+ // to save memory allocations.
+ b := w.buff.Bytes()
+
+ defer func() {
+ w.buff.Reset()
+ }()
+
+ // First check if we have processed this element before.
+ w.collector.mu.RLock()
+
+ seen := w.collector.elementSet[string(b)]
+ w.collector.mu.RUnlock()
+ if seen {
+ return resolve
+ }
+
+ s := w.buff.String()
+
+ if s == "" {
+ return resolve
+ }
+
+ // Parse each collected element.
+ el, err := parseHTMLElement(s)
+ if err != nil {
+ w.err = err
+ return resolve
+ }
+
+ // Write this tag to the element set.
+ w.collector.mu.Lock()
+ w.collector.elementSet[s] = true
+ w.collector.elements = append(w.collector.elements, el)
+ w.collector.mu.Unlock()
+
+ return resolve
+
+ }
+
+ return s
+ }
+
+ return s
+}
+
+func (l *htmlElementsCollectorWriter) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+
+ runeValue, runeWidth := utf8.DecodeRune(l.input[l.pos:])
+
+ l.width = runeWidth
+ l.pos += l.width
+ return runeValue
+}
+
+// returns the next state in HTML element scanner.
+type htmlCollectorStateFunc func(*htmlElementsCollectorWriter) htmlCollectorStateFunc
+
+// At "<", buffer empty.
+// Potentially starting a HTML element.
+func htmlLexElementStart(w *htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ if w.r == '>' || unicode.IsSpace(w.r) {
+ if w.buff.Len() < 2 || bytes.HasPrefix(w.buff.Bytes(), []byte("</")) {
+ w.buff.Reset()
+ return htmlLexStart
+ }
+
+ tagName := w.buff.Bytes()[1:]
+
+ switch {
+ case skipInnerElementRe.Match(tagName):
+ // pre, script etc. We collect classes etc. on the surrounding
+ // element, but skip the inner content.
+ w.backup()
+
+ // tagName will be overwritten, so make a copy.
+ tagNameCopy := make([]byte, len(tagName))
+ copy(tagNameCopy, tagName)
+
+ return w.lexElementInside(
+ w.consumeBuffUntil(
+ func() bool {
+ if w.r != '>' {
+ return false
+ }
+ m := endTagRe.FindSubmatch(w.buff.Bytes())
+ if m == nil {
+ return false
+ }
+ return bytes.EqualFold(m[1], tagNameCopy)
+ },
+ htmlLexStart,
+ ))
+ case skipAllElementRe.Match(tagName):
+ // E.g. "<!DOCTYPE ..."
+ w.buff.Reset()
+ return w.consumeRuneUntil(func(r rune) bool {
+ return r == '>'
+ }, htmlLexStart)
+ default:
+ w.backup()
+ return w.defaultLexElementInside
+ }
+ }
+
+ w.buff.WriteRune(w.r)
+
+ // If it's a comment, skip to its end.
+ if w.r == '-' && bytes.Equal(w.buff.Bytes(), []byte("<!--")) {
+ w.buff.Reset()
+ return htmlLexToEndOfComment
+ }
+
+ return htmlLexElementStart
+}
+
+// Entry state func.
+// Looks for a opening bracket, '<'.
+func htmlLexStart(w *htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ if w.r == '<' {
+ w.backup()
+ w.buff.Reset()
+ return htmlLexElementStart
+ }
+
+ return htmlLexStart
+}
+
+// After "<!--", buff empty.
+func htmlLexToEndOfComment(w *htmlElementsCollectorWriter) htmlCollectorStateFunc {
+ w.buff.WriteRune(w.r)
+
+ if w.r == '>' && bytes.HasSuffix(w.buff.Bytes(), []byte("-->")) {
+ // Done, start looking for HTML elements again.
+ return htmlLexStart
+ }
+
+ return htmlLexToEndOfComment
+}
+
+func parseHTMLElement(elStr string) (el htmlElement, err error) {
+
+ tagName := parseStartTag(elStr)
+
+ el.Tag = strings.ToLower(tagName)
+ tagNameToParse := el.Tag
+
+ // The net/html parser does not handle single table elements as input, e.g. tbody.
+ // We only care about the element/class/ids, so just store away the original tag name
+ // and pretend it's a <div>.
+ if exceptionList[el.Tag] {
+ elStr = strings.Replace(elStr, tagName, "div", 1)
+ tagNameToParse = "div"
+ }
+
+ n, err := html.Parse(strings.NewReader(elStr))
+ if err != nil {
+ return
+ }
+
+ var walk func(*html.Node)
+ walk = func(n *html.Node) {
+ if n.Type == html.ElementNode && n.Data == tagNameToParse {
+ for _, a := range n.Attr {
+ switch {
+ case strings.EqualFold(a.Key, "id"):
+ // There should be only one, but one never knows...
+ el.IDs = append(el.IDs, a.Val)
+ default:
+ if classAttrRe.MatchString(a.Key) {
+ el.Classes = append(el.Classes, strings.Fields(a.Val)...)
+ } else {
+ key := strings.ToLower(a.Key)
+ val := strings.TrimSpace(a.Val)
+ if strings.Contains(key, "class") && strings.HasPrefix(val, "{") {
+ // This looks like a Vue or AlpineJS class binding.
+ val = htmlJsonFixer.Replace(strings.Trim(val, "{}"))
+ lines := strings.Split(val, "\n")
+ for i, l := range lines {
+ lines[i] = strings.TrimSpace(l)
+ }
+ val = strings.Join(lines, "\n")
+ val = jsonAttrRe.ReplaceAllString(val, "$1")
+ el.Classes = append(el.Classes, strings.Fields(val)...)
+ }
+ }
+ }
+ }
+ }
+
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ walk(c)
+ }
+ }
+
+ walk(n)
+
+ return
+}
+
+// Variants of s
+// <body class="b a">
+// <div>
+func parseStartTag(s string) string {
+ spaceIndex := strings.IndexFunc(s, func(r rune) bool {
+ return unicode.IsSpace(r)
+ })
+
+ if spaceIndex == -1 {
+ return s[1 : len(s)-1]
+ }
+
+ return s[1:spaceIndex]
+}
diff --git a/publisher/htmlElementsCollector_test.go b/publisher/htmlElementsCollector_test.go
new file mode 100644
index 000000000..1d9da6cb3
--- /dev/null
+++ b/publisher/htmlElementsCollector_test.go
@@ -0,0 +1,216 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package publisher
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "math/rand"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/minifiers"
+ "github.com/gohugoio/hugo/output"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestClassCollector(t *testing.T) {
+ c := qt.New((t))
+ rnd := rand.New(rand.NewSource(time.Now().Unix()))
+
+ f := func(tags, classes, ids string) HTMLElements {
+ var tagss, classess, idss []string
+ if tags != "" {
+ tagss = strings.Split(tags, " ")
+ }
+ if classes != "" {
+ classess = strings.Split(classes, " ")
+ }
+ if ids != "" {
+ idss = strings.Split(ids, " ")
+ }
+ return HTMLElements{
+ Tags: tagss,
+ Classes: classess,
+ IDs: idss,
+ }
+ }
+
+ skipMinifyTest := map[string]bool{
+ "Script tags content should be skipped": true, // https://github.com/tdewolff/minify/issues/396
+ }
+
+ for _, test := range []struct {
+ name string
+ html string
+ expect HTMLElements
+ }{
+ {"basic", `<body class="b a"></body>`, f("body", "a b", "")},
+ {"duplicates", `<div class="b a b"></div><div class="b a b"></div>x'`, f("div", "a b", "")},
+ {"single quote", `<body class='b a'></body>`, f("body", "a b", "")},
+ {"no quote", `<body class=b id=myelement></body>`, f("body", "b", "myelement")},
+ {"short", `<i>`, f("i", "", "")},
+ {"invalid", `< body class="b a"></body><div></div>`, f("div", "", "")},
+ // https://github.com/gohugoio/hugo/issues/7318
+ {"thead", `<table class="cl1">
+ <thead class="cl2"><tr class="cl3"><td class="cl4"></td></tr></thead>
+ <tbody class="cl5"><tr class="cl6"><td class="cl7"></td></tr></tbody>
+</table>`, f("table tbody td thead tr", "cl1 cl2 cl3 cl4 cl5 cl6 cl7", "")},
+ {"thead uppercase", `<TABLE class="CL1">
+ <THEAD class="CL2"><TR class="CL3"><TD class="CL4"></TD></TR></THEAD>
+ <TBODY class="CL5"><TR class="CL6"><TD class="CL7"></TD></TR></TBODY>
+</TABLE>`, f("table tbody td thead tr", "CL1 CL2 CL3 CL4 CL5 CL6 CL7", "")},
+ // https://github.com/gohugoio/hugo/issues/7161
+ {"minified a href", `<a class="b a" href=/></a>`, f("a", "a b", "")},
+ {"AlpineJS bind 1", `<body>
+ <div x-bind:class="{
+ 'class1': data.open,
+ 'class2 class3': data.foo == 'bar'
+ }">
+ </div>
+</body>`, f("body div", "class1 class2 class3", "")},
+ {"AlpineJS bind 2", `<div x-bind:class="{ 'bg-black': filter.checked }" class="inline-block mr-1 mb-2 rounded bg-gray-300 px-2 py-2">FOO</div>`,
+ f("div", "bg-black bg-gray-300 inline-block mb-2 mr-1 px-2 py-2 rounded", ""),
+ },
+ {"AlpineJS bind 3", `<div x-bind:class="{ 'text-gray-800': !checked, 'text-white': checked }"></div>`, f("div", "text-gray-800 text-white", "")},
+ {"AlpineJS bind 4", `<div x-bind:class="{ 'text-gray-800': !checked,
+ 'text-white': checked }"></div>`, f("div", "text-gray-800 text-white", "")},
+ {"AlpineJS bind 5", `<a x-bind:class="{
+ 'text-a': a && b,
+ 'text-b': !a && b || c,
+ 'pl-3': a === 1,
+ pl-2: b == 3,
+ 'text-gray-600': (a > 1)
+ }" class="block w-36 cursor-pointer pr-3 no-underline capitalize"></a>`, f("a", "block capitalize cursor-pointer no-underline pl-2 pl-3 pr-3 text-a text-b text-gray-600 w-36", "")},
+ {"AlpineJS transition 1", `<div x-transition:enter-start="opacity-0 transform mobile:-translate-x-8 sm:-translate-y-8">`, f("div", "mobile:-translate-x-8 opacity-0 sm:-translate-y-8 transform", "")},
+ {"Vue bind", `<div v-bind:class="{ active: isActive }"></div>`, f("div", "active", "")},
+ // Issue #7746
+ {"Apostrophe inside attribute value", `<a class="missingclass" title="Plus d'information">my text</a><div></div>`, f("a div", "missingclass", "")},
+ // Issue #7567
+ {"Script tags content should be skipped", `<script><span>foo</span><span>bar</span></script><div class="foo"></div>`, f("div script", "foo", "")},
+ {"Style tags content should be skipped", `<style>p{color: red;font-size: 20px;}</style><div class="foo"></div>`, f("div style", "foo", "")},
+ {"Pre tags content should be skipped", `<pre class="preclass"><span>foo</span><span>bar</span></pre><div class="foo"></div>`, f("div pre", "foo preclass", "")},
+ {"Textarea tags content should be skipped", `<textarea class="textareaclass"><span>foo</span><span>bar</span></textarea><div class="foo"></div>`, f("div textarea", "foo textareaclass", "")},
+ {"DOCTYPE should beskipped", `<!DOCTYPE html>`, f("", "", "")},
+ {"Comments should be skipped", `<!-- example comment -->`, f("", "", "")},
+ {"Comments with elements before and after", `<div></div><!-- example comment --><span><span>`, f("div span", "", "")},
+ // Issue #8530
+ {"Comment with single quote", `<!-- Hero Area Image d'accueil --><i class="foo">`, f("i", "foo", "")},
+ {"Uppercase tags", `<DIV></DIV>`, f("div", "", "")},
+ {"Predefined tags with distinct casing", `<script>if (a < b) { nothing(); }</SCRIPT><div></div>`, f("div script", "", "")},
+ // Issue #8417
+ {"Tabs inline", `<hr id="a" class="foo"><div class="bar">d</div>`, f("div hr", "bar foo", "a")},
+ {"Tabs on multiple rows", `<form
+ id="a"
+ action="www.example.com"
+ method="post"
+></form>
+<div id="b" class="foo">d</div>`, f("div form", "foo", "a b")},
+ {"Big input, multibyte runes", strings.Repeat(`神真美好 `, rnd.Intn(500)+1) + "<div id=\"神真美好\" class=\"foo\">" + strings.Repeat(`神真美好 `, rnd.Intn(100)+1) + " <span>神真美好</span>", f("div span", "foo", "神真美好")},
+ } {
+
+ for _, variant := range []struct {
+ minify bool
+ }{
+ {minify: false},
+ {minify: true},
+ } {
+
+ c.Run(fmt.Sprintf("%s--minify-%t", test.name, variant.minify), func(c *qt.C) {
+ w := newHTMLElementsCollectorWriter(newHTMLElementsCollector())
+ if variant.minify {
+ if skipMinifyTest[test.name] {
+ c.Skip("skip minify test")
+ }
+ v := config.NewWithTestDefaults()
+ m, _ := minifiers.New(media.DefaultTypes, output.DefaultFormats, v)
+ m.Minify(media.HTMLType, w, strings.NewReader(test.html))
+
+ } else {
+ var buff bytes.Buffer
+ buff.WriteString(test.html)
+ io.Copy(w, &buff)
+ }
+ got := w.collector.getHTMLElements()
+ c.Assert(got, qt.DeepEquals, test.expect)
+ })
+ }
+ }
+
+}
+
+func BenchmarkElementsCollectorWriter(b *testing.B) {
+ const benchHTML = `
+<!DOCTYPE html>
+<html>
+<head>
+<title>title</title>
+<style>
+ a {color: red;}
+ .c {color: blue;}
+</style>
+</head>
+<body id="i1" class="a b c d">
+<a class="c d e"></a>
+<hr>
+<a class="c d e"></a>
+<a class="c d e"></a>
+<hr>
+<a id="i2" class="c d e f"></a>
+<a id="i3" class="c d e"></a>
+<a class="c d e"></a>
+<p>To force<br> line breaks<br> in a text,<br> use the br<br> element.</p>
+<hr>
+<a class="c d e"></a>
+<a class="c d e"></a>
+<a class="c d e"></a>
+<a class="c d e"></a>
+<table>
+ <thead class="ch">
+ <tr>
+ <th>Month</th>
+ <th>Savings</th>
+ </tr>
+ </thead>
+ <tbody class="cb">
+ <tr>
+ <td>January</td>
+ <td>$100</td>
+ </tr>
+ <tr>
+ <td>February</td>
+ <td>$200</td>
+ </tr>
+ </tbody>
+ <tfoot class="cf">
+ <tr>
+ <td></td>
+ <td>$300</td>
+ </tr>
+ </tfoot>
+</table>
+</body>
+</html>
+`
+ for i := 0; i < b.N; i++ {
+ w := newHTMLElementsCollectorWriter(newHTMLElementsCollector())
+ fmt.Fprint(w, benchHTML)
+
+ }
+}
diff --git a/publisher/publisher.go b/publisher/publisher.go
new file mode 100644
index 000000000..63eb1011f
--- /dev/null
+++ b/publisher/publisher.go
@@ -0,0 +1,190 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package publisher
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/url"
+ "sync/atomic"
+
+ "github.com/gohugoio/hugo/resources"
+
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/gohugoio/hugo/minifiers"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/transform"
+ "github.com/gohugoio/hugo/transform/livereloadinject"
+ "github.com/gohugoio/hugo/transform/metainject"
+ "github.com/gohugoio/hugo/transform/urlreplacers"
+)
+
+// Descriptor describes the needed publishing chain for an item.
+type Descriptor struct {
+ // The content to publish.
+ Src io.Reader
+
+ // The OutputFormat of the this content.
+ OutputFormat output.Format
+
+ // Where to publish this content. This is a filesystem-relative path.
+ TargetPath string
+
+ // Counter for the end build summary.
+ StatCounter *uint64
+
+ // Configuration that trigger pre-processing.
+ // LiveReload script will be injected if this is != nil
+ LiveReloadBaseURL *url.URL
+
+ // Enable to inject the Hugo generated tag in the header. Is currently only
+ // injected on the home page for HTML type of output formats.
+ AddHugoGeneratorTag bool
+
+ // If set, will replace all relative URLs with this one.
+ AbsURLPath string
+
+ // Enable to minify the output using the OutputFormat defined above to
+ // pick the correct minifier configuration.
+ Minify bool
+}
+
+// DestinationPublisher is the default and currently only publisher in Hugo. This
+// publisher prepares and publishes an item to the defined destination, e.g. /public.
+type DestinationPublisher struct {
+ fs afero.Fs
+ min minifiers.Client
+ htmlElementsCollector *htmlElementsCollector
+}
+
+// NewDestinationPublisher creates a new DestinationPublisher.
+func NewDestinationPublisher(rs *resources.Spec, outputFormats output.Formats, mediaTypes media.Types) (pub DestinationPublisher, err error) {
+ fs := rs.BaseFs.PublishFs
+ cfg := rs.Cfg
+ var classCollector *htmlElementsCollector
+ if rs.BuildConfig.WriteStats {
+ classCollector = newHTMLElementsCollector()
+ }
+ pub = DestinationPublisher{fs: fs, htmlElementsCollector: classCollector}
+ pub.min, err = minifiers.New(mediaTypes, outputFormats, cfg)
+ return
+}
+
+// Publish applies any relevant transformations and writes the file
+// to its destination, e.g. /public.
+func (p DestinationPublisher) Publish(d Descriptor) error {
+ if d.TargetPath == "" {
+ return errors.New("Publish: must provide a TargetPath")
+ }
+
+ src := d.Src
+
+ transformers := p.createTransformerChain(d)
+
+ if len(transformers) != 0 {
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+
+ if err := transformers.Apply(b, d.Src); err != nil {
+ return fmt.Errorf("failed to process %q: %w", d.TargetPath, err)
+ }
+
+ // This is now what we write to disk.
+ src = b
+ }
+
+ f, err := helpers.OpenFileForWriting(p.fs, d.TargetPath)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ var w io.Writer = f
+
+ if p.htmlElementsCollector != nil && d.OutputFormat.IsHTML {
+ w = io.MultiWriter(w, newHTMLElementsCollectorWriter(p.htmlElementsCollector))
+ }
+
+ _, err = io.Copy(w, src)
+ if err == nil && d.StatCounter != nil {
+ atomic.AddUint64(d.StatCounter, uint64(1))
+ }
+
+ return err
+}
+
+func (p DestinationPublisher) PublishStats() PublishStats {
+ if p.htmlElementsCollector == nil {
+ return PublishStats{}
+ }
+
+ return PublishStats{
+ HTMLElements: p.htmlElementsCollector.getHTMLElements(),
+ }
+}
+
+type PublishStats struct {
+ HTMLElements HTMLElements `json:"htmlElements"`
+}
+
+// Publisher publishes a result file.
+type Publisher interface {
+ Publish(d Descriptor) error
+ PublishStats() PublishStats
+}
+
+// XML transformer := transform.New(urlreplacers.NewAbsURLInXMLTransformer(path))
+func (p DestinationPublisher) createTransformerChain(f Descriptor) transform.Chain {
+ transformers := transform.NewEmpty()
+
+ isHTML := f.OutputFormat.IsHTML
+
+ if f.AbsURLPath != "" {
+ if isHTML {
+ transformers = append(transformers, urlreplacers.NewAbsURLTransformer(f.AbsURLPath))
+ } else {
+ // Assume XML.
+ transformers = append(transformers, urlreplacers.NewAbsURLInXMLTransformer(f.AbsURLPath))
+ }
+ }
+
+ if isHTML {
+ if f.LiveReloadBaseURL != nil {
+ transformers = append(transformers, livereloadinject.New(*f.LiveReloadBaseURL))
+ }
+
+ // This is only injected on the home page.
+ if f.AddHugoGeneratorTag {
+ transformers = append(transformers, metainject.HugoGenerator)
+ }
+
+ }
+
+ if p.min.MinifyOutput {
+ minifyTransformer := p.min.Transformer(f.OutputFormat.MediaType)
+ if minifyTransformer != nil {
+ transformers = append(transformers, minifyTransformer)
+ }
+ }
+
+ return transformers
+}
diff --git a/pull-docs.sh b/pull-docs.sh
new file mode 100755
index 000000000..b8850530a
--- /dev/null
+++ b/pull-docs.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+HUGO_DOCS_BRANCH="${HUGO_DOCS_BRANCH-master}"
+
+# We may extend this to also push changes in the other direction, but this is the most important step.
+git subtree pull --prefix=docs/ https://github.com/gohugoio/hugoDocs.git ${HUGO_DOCS_BRANCH} --squash
+
diff --git a/related/inverted_index.go b/related/inverted_index.go
new file mode 100644
index 000000000..1b1f69e3e
--- /dev/null
+++ b/related/inverted_index.go
@@ -0,0 +1,458 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package related holds code to help finding related content.
+package related
+
+import (
+ "errors"
+ "fmt"
+ "math"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/mitchellh/mapstructure"
+)
+
+var (
+ _ Keyword = (*StringKeyword)(nil)
+ zeroDate = time.Time{}
+
+ // DefaultConfig is the default related config.
+ DefaultConfig = Config{
+ Threshold: 80,
+ Indices: IndexConfigs{
+ IndexConfig{Name: "keywords", Weight: 100},
+ IndexConfig{Name: "date", Weight: 10},
+ },
+ }
+)
+
+/*
+Config is the top level configuration element used to configure how to retrieve
+related content in Hugo.
+
+An example site config.toml:
+
+ [related]
+ threshold = 1
+ [[related.indices]]
+ name = "keywords"
+ weight = 200
+ [[related.indices]]
+ name = "tags"
+ weight = 100
+ [[related.indices]]
+ name = "date"
+ weight = 1
+ pattern = "2006"
+*/
+type Config struct {
+ // Only include matches >= threshold, a normalized rank between 0 and 100.
+ Threshold int
+
+ // To get stable "See also" sections we, by default, exclude newer related pages.
+ IncludeNewer bool
+
+ // Will lower case all string values and queries to the indices.
+ // May get better results, but at a slight performance cost.
+ ToLower bool
+
+ Indices IndexConfigs
+}
+
+// Add adds a given index.
+func (c *Config) Add(index IndexConfig) {
+ if c.ToLower {
+ index.ToLower = true
+ }
+ c.Indices = append(c.Indices, index)
+}
+
+// IndexConfigs holds a set of index configurations.
+type IndexConfigs []IndexConfig
+
+// IndexConfig configures an index.
+type IndexConfig struct {
+ // The index name. This directly maps to a field or Param name.
+ Name string
+
+ // Contextual pattern used to convert the Param value into a string.
+ // Currently only used for dates. Can be used to, say, bump posts in the same
+ // time frame when searching for related documents.
+ // For dates it follows Go's time.Format patterns, i.e.
+ // "2006" for YYYY and "200601" for YYYYMM.
+ Pattern string
+
+ // This field's weight when doing multi-index searches. Higher is "better".
+ Weight int
+
+ // Will lower case all string values in and queries tothis index.
+ // May get better accurate results, but at a slight performance cost.
+ ToLower bool
+}
+
+// Document is the interface an indexable document in Hugo must fulfill.
+type Document interface {
+ // RelatedKeywords returns a list of keywords for the given index config.
+ RelatedKeywords(cfg IndexConfig) ([]Keyword, error)
+
+ // When this document was or will be published.
+ PublishDate() time.Time
+
+ // Name is used as an tiebreaker if both Weight and PublishDate are
+ // the same.
+ Name() string
+}
+
+// InvertedIndex holds an inverted index, also sometimes named posting list, which
+// lists, for every possible search term, the documents that contain that term.
+type InvertedIndex struct {
+ cfg Config
+ index map[string]map[Keyword][]Document
+
+ minWeight int
+ maxWeight int
+}
+
+func (idx *InvertedIndex) getIndexCfg(name string) (IndexConfig, bool) {
+ for _, conf := range idx.cfg.Indices {
+ if conf.Name == name {
+ return conf, true
+ }
+ }
+
+ return IndexConfig{}, false
+}
+
+// NewInvertedIndex creates a new InvertedIndex.
+// Documents to index must be added in Add.
+func NewInvertedIndex(cfg Config) *InvertedIndex {
+ idx := &InvertedIndex{index: make(map[string]map[Keyword][]Document), cfg: cfg}
+ for _, conf := range cfg.Indices {
+ idx.index[conf.Name] = make(map[Keyword][]Document)
+ if conf.Weight < idx.minWeight {
+ // By default, the weight scale starts at 0, but we allow
+ // negative weights.
+ idx.minWeight = conf.Weight
+ }
+ if conf.Weight > idx.maxWeight {
+ idx.maxWeight = conf.Weight
+ }
+ }
+ return idx
+}
+
+// Add documents to the inverted index.
+// The value must support == and !=.
+func (idx *InvertedIndex) Add(docs ...Document) error {
+ var err error
+ for _, config := range idx.cfg.Indices {
+ if config.Weight == 0 {
+ // Disabled
+ continue
+ }
+ setm := idx.index[config.Name]
+
+ for _, doc := range docs {
+ var words []Keyword
+ words, err = doc.RelatedKeywords(config)
+ if err != nil {
+ continue
+ }
+
+ for _, keyword := range words {
+ setm[keyword] = append(setm[keyword], doc)
+ }
+ }
+ }
+
+ return err
+}
+
+// queryElement holds the index name and keywords that can be used to compose a
+// search for related content.
+type queryElement struct {
+ Index string
+ Keywords []Keyword
+}
+
+func newQueryElement(index string, keywords ...Keyword) queryElement {
+ return queryElement{Index: index, Keywords: keywords}
+}
+
+type ranks []*rank
+
+type rank struct {
+ Doc Document
+ Weight int
+ Matches int
+}
+
+func (r *rank) addWeight(w int) {
+ r.Weight += w
+ r.Matches++
+}
+
+func newRank(doc Document, weight int) *rank {
+ return &rank{Doc: doc, Weight: weight, Matches: 1}
+}
+
+func (r ranks) Len() int { return len(r) }
+func (r ranks) Swap(i, j int) { r[i], r[j] = r[j], r[i] }
+func (r ranks) Less(i, j int) bool {
+ if r[i].Weight == r[j].Weight {
+ if r[i].Doc.PublishDate() == r[j].Doc.PublishDate() {
+ return r[i].Doc.Name() < r[j].Doc.Name()
+ }
+ return r[i].Doc.PublishDate().After(r[j].Doc.PublishDate())
+ }
+ return r[i].Weight > r[j].Weight
+}
+
+// SearchDoc finds the documents matching any of the keywords in the given indices
+// against the given document.
+// The resulting document set will be sorted according to number of matches
+// and the index weights, and any matches with a rank below the configured
+// threshold (normalize to 0..100) will be removed.
+// If an index name is provided, only that index will be queried.
+func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document, error) {
+ var q []queryElement
+
+ var configs IndexConfigs
+
+ if len(indices) == 0 {
+ configs = idx.cfg.Indices
+ } else {
+ configs = make(IndexConfigs, len(indices))
+ for i, indexName := range indices {
+ cfg, found := idx.getIndexCfg(indexName)
+ if !found {
+ return nil, fmt.Errorf("index %q not found", indexName)
+ }
+ configs[i] = cfg
+ }
+ }
+
+ for _, cfg := range configs {
+ keywords, err := doc.RelatedKeywords(cfg)
+ if err != nil {
+ return nil, err
+ }
+
+ q = append(q, newQueryElement(cfg.Name, keywords...))
+
+ }
+
+ return idx.searchDate(doc.PublishDate(), q...)
+}
+
+// ToKeywords returns a Keyword slice of the given input.
+func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
+ var (
+ keywords []Keyword
+ toLower = cfg.ToLower
+ )
+ switch vv := v.(type) {
+ case string:
+ if toLower {
+ vv = strings.ToLower(vv)
+ }
+ keywords = append(keywords, StringKeyword(vv))
+ case []string:
+ if toLower {
+ vc := make([]string, len(vv))
+ copy(vc, vv)
+ for i := 0; i < len(vc); i++ {
+ vc[i] = strings.ToLower(vc[i])
+ }
+ vv = vc
+ }
+ keywords = append(keywords, StringsToKeywords(vv...)...)
+ case time.Time:
+ layout := "2006"
+ if cfg.Pattern != "" {
+ layout = cfg.Pattern
+ }
+ keywords = append(keywords, StringKeyword(vv.Format(layout)))
+ case nil:
+ return keywords, nil
+ default:
+ return keywords, fmt.Errorf("indexing currently not supported for index %q and type %T", cfg.Name, vv)
+ }
+
+ return keywords, nil
+}
+
+// SearchKeyValues finds the documents matching any of the keywords in the given indices.
+// The resulting document set will be sorted according to number of matches
+// and the index weights, and any matches with a rank below the configured
+// threshold (normalize to 0..100) will be removed.
+func (idx *InvertedIndex) SearchKeyValues(args ...types.KeyValues) ([]Document, error) {
+ q := make([]queryElement, len(args))
+
+ for i, arg := range args {
+ var keywords []Keyword
+ key := arg.KeyString()
+ if key == "" {
+ return nil, fmt.Errorf("index %q not valid", arg.Key)
+ }
+ conf, found := idx.getIndexCfg(key)
+ if !found {
+ return nil, fmt.Errorf("index %q not found", key)
+ }
+
+ for _, val := range arg.Values {
+ k, err := conf.ToKeywords(val)
+ if err != nil {
+ return nil, err
+ }
+ keywords = append(keywords, k...)
+ }
+
+ q[i] = newQueryElement(conf.Name, keywords...)
+
+ }
+
+ return idx.search(q...)
+}
+
+func (idx *InvertedIndex) search(query ...queryElement) ([]Document, error) {
+ return idx.searchDate(zeroDate, query...)
+}
+
+func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement) ([]Document, error) {
+ matchm := make(map[Document]*rank, 200)
+ applyDateFilter := !idx.cfg.IncludeNewer && !upperDate.IsZero()
+
+ for _, el := range query {
+ setm, found := idx.index[el.Index]
+ if !found {
+ return []Document{}, fmt.Errorf("index for %q not found", el.Index)
+ }
+
+ config, found := idx.getIndexCfg(el.Index)
+ if !found {
+ return []Document{}, fmt.Errorf("index config for %q not found", el.Index)
+ }
+
+ for _, kw := range el.Keywords {
+ if docs, found := setm[kw]; found {
+ for _, doc := range docs {
+ if applyDateFilter {
+ // Exclude newer than the limit given
+ if doc.PublishDate().After(upperDate) {
+ continue
+ }
+ }
+ r, found := matchm[doc]
+ if !found {
+ matchm[doc] = newRank(doc, config.Weight)
+ } else {
+ r.addWeight(config.Weight)
+ }
+ }
+ }
+ }
+ }
+
+ if len(matchm) == 0 {
+ return []Document{}, nil
+ }
+
+ matches := make(ranks, 0, 100)
+
+ for _, v := range matchm {
+ avgWeight := v.Weight / v.Matches
+ weight := norm(avgWeight, idx.minWeight, idx.maxWeight)
+ threshold := idx.cfg.Threshold / v.Matches
+
+ if weight >= threshold {
+ matches = append(matches, v)
+ }
+ }
+
+ sort.Stable(matches)
+
+ result := make([]Document, len(matches))
+
+ for i, m := range matches {
+ result[i] = m.Doc
+ }
+
+ return result, nil
+}
+
+// normalizes num to a number between 0 and 100.
+func norm(num, min, max int) int {
+ if min > max {
+ panic("min > max")
+ }
+ return int(math.Floor((float64(num-min) / float64(max-min) * 100) + 0.5))
+}
+
+// DecodeConfig decodes a slice of map into Config.
+func DecodeConfig(m maps.Params) (Config, error) {
+ if m == nil {
+ return Config{}, errors.New("no related config provided")
+ }
+
+ if len(m) == 0 {
+ return Config{}, errors.New("empty related config provided")
+ }
+
+ var c Config
+
+ if err := mapstructure.WeakDecode(m, &c); err != nil {
+ return c, err
+ }
+
+ if c.Threshold < 0 || c.Threshold > 100 {
+ return Config{}, errors.New("related threshold must be between 0 and 100")
+ }
+
+ if c.ToLower {
+ for i := range c.Indices {
+ c.Indices[i].ToLower = true
+ }
+ }
+
+ return c, nil
+}
+
+// StringKeyword is a string search keyword.
+type StringKeyword string
+
+func (s StringKeyword) String() string {
+ return string(s)
+}
+
+// Keyword is the interface a keyword in the search index must implement.
+type Keyword interface {
+ String() string
+}
+
+// StringsToKeywords converts the given slice of strings to a slice of Keyword.
+func StringsToKeywords(s ...string) []Keyword {
+ kw := make([]Keyword, len(s))
+
+ for i := 0; i < len(s); i++ {
+ kw[i] = StringKeyword(s[i])
+ }
+
+ return kw
+}
diff --git a/related/inverted_index_test.go b/related/inverted_index_test.go
new file mode 100644
index 000000000..fdef06b13
--- /dev/null
+++ b/related/inverted_index_test.go
@@ -0,0 +1,316 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package related
+
+import (
+ "fmt"
+ "math/rand"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type testDoc struct {
+ keywords map[string][]Keyword
+ date time.Time
+ name string
+}
+
+func (d *testDoc) String() string {
+ s := "\n"
+ for k, v := range d.keywords {
+ s += k + ":\t\t"
+ for _, vv := range v {
+ s += " " + vv.String()
+ }
+ s += "\n"
+ }
+ return s
+}
+
+func (d *testDoc) Name() string {
+ return d.name
+}
+
+func newTestDoc(name string, keywords ...string) *testDoc {
+ time.Sleep(1 * time.Millisecond)
+ return newTestDocWithDate(name, time.Now(), keywords...)
+}
+
+func newTestDocWithDate(name string, date time.Time, keywords ...string) *testDoc {
+ km := make(map[string][]Keyword)
+
+ kw := &testDoc{keywords: km, date: date}
+
+ kw.addKeywords(name, keywords...)
+ return kw
+}
+
+func (d *testDoc) addKeywords(name string, keywords ...string) *testDoc {
+ keywordm := createTestKeywords(name, keywords...)
+
+ for k, v := range keywordm {
+ keywords := make([]Keyword, len(v))
+ for i := 0; i < len(v); i++ {
+ keywords[i] = StringKeyword(v[i])
+ }
+ d.keywords[k] = keywords
+ }
+ return d
+}
+
+func createTestKeywords(name string, keywords ...string) map[string][]string {
+ return map[string][]string{
+ name: keywords,
+ }
+}
+
+func (d *testDoc) RelatedKeywords(cfg IndexConfig) ([]Keyword, error) {
+ return d.keywords[cfg.Name], nil
+}
+
+func (d *testDoc) PublishDate() time.Time {
+ return d.date
+}
+
+func TestSearch(t *testing.T) {
+ config := Config{
+ Threshold: 90,
+ IncludeNewer: false,
+ Indices: IndexConfigs{
+ IndexConfig{Name: "tags", Weight: 50},
+ IndexConfig{Name: "keywords", Weight: 65},
+ },
+ }
+
+ idx := NewInvertedIndex(config)
+ // idx.debug = true
+
+ docs := []Document{
+ newTestDoc("tags", "a", "b", "c", "d"),
+ newTestDoc("tags", "b", "d", "g"),
+ newTestDoc("tags", "b", "h").addKeywords("keywords", "a"),
+ newTestDoc("tags", "g", "h").addKeywords("keywords", "a", "b"),
+ }
+
+ idx.Add(docs...)
+
+ t.Run("count", func(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(len(idx.index), qt.Equals, 2)
+ set1, found := idx.index["tags"]
+ c.Assert(found, qt.Equals, true)
+ // 6 tags
+ c.Assert(len(set1), qt.Equals, 6)
+
+ set2, found := idx.index["keywords"]
+ c.Assert(found, qt.Equals, true)
+ c.Assert(len(set2), qt.Equals, 2)
+ })
+
+ t.Run("search-tags", func(t *testing.T) {
+ c := qt.New(t)
+ m, err := idx.search(newQueryElement("tags", StringsToKeywords("a", "b", "d", "z")...))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 2)
+ c.Assert(m[0], qt.Equals, docs[0])
+ c.Assert(m[1], qt.Equals, docs[1])
+ })
+
+ t.Run("search-tags-and-keywords", func(t *testing.T) {
+ c := qt.New(t)
+ m, err := idx.search(
+ newQueryElement("tags", StringsToKeywords("a", "b", "z")...),
+ newQueryElement("keywords", StringsToKeywords("a", "b")...))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 3)
+ c.Assert(m[0], qt.Equals, docs[3])
+ c.Assert(m[1], qt.Equals, docs[2])
+ c.Assert(m[2], qt.Equals, docs[0])
+ })
+
+ t.Run("searchdoc-all", func(t *testing.T) {
+ c := qt.New(t)
+ doc := newTestDoc("tags", "a").addKeywords("keywords", "a")
+ m, err := idx.SearchDoc(doc)
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 2)
+ c.Assert(m[0], qt.Equals, docs[3])
+ c.Assert(m[1], qt.Equals, docs[2])
+ })
+
+ t.Run("searchdoc-tags", func(t *testing.T) {
+ c := qt.New(t)
+ doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
+ m, err := idx.SearchDoc(doc, "tags")
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 2)
+ c.Assert(m[0], qt.Equals, docs[0])
+ c.Assert(m[1], qt.Equals, docs[1])
+ })
+
+ t.Run("searchdoc-keywords-date", func(t *testing.T) {
+ c := qt.New(t)
+ doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
+ // This will get a date newer than the others.
+ newDoc := newTestDoc("keywords", "a", "b")
+ idx.Add(newDoc)
+
+ m, err := idx.SearchDoc(doc, "keywords")
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 2)
+ c.Assert(m[0], qt.Equals, docs[3])
+ })
+
+ t.Run("searchdoc-keywords-same-date", func(t *testing.T) {
+ c := qt.New(t)
+ idx := NewInvertedIndex(config)
+
+ date := time.Now()
+
+ doc := newTestDocWithDate("keywords", date, "a", "b")
+ doc.name = "thedoc"
+
+ for i := 0; i < 10; i++ {
+ docc := *doc
+ docc.name = fmt.Sprintf("doc%d", i)
+ idx.Add(&docc)
+ }
+
+ m, err := idx.SearchDoc(doc, "keywords")
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(m), qt.Equals, 10)
+ for i := 0; i < 10; i++ {
+ c.Assert(m[i].Name(), qt.Equals, fmt.Sprintf("doc%d", i))
+ }
+ })
+}
+
+func TestToKeywordsToLower(t *testing.T) {
+ c := qt.New(t)
+ slice := []string{"A", "B", "C"}
+ config := IndexConfig{ToLower: true}
+ keywords, err := config.ToKeywords(slice)
+ c.Assert(err, qt.IsNil)
+ c.Assert(slice, qt.DeepEquals, []string{"A", "B", "C"})
+ c.Assert(keywords, qt.DeepEquals, []Keyword{
+ StringKeyword("a"),
+ StringKeyword("b"),
+ StringKeyword("c"),
+ })
+}
+
+func BenchmarkRelatedNewIndex(b *testing.B) {
+ pages := make([]*testDoc, 100)
+ numkeywords := 30
+ allKeywords := make([]string, numkeywords)
+ for i := 0; i < numkeywords; i++ {
+ allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
+ }
+
+ for i := 0; i < len(pages); i++ {
+ start := rand.Intn(len(allKeywords))
+ end := start + 3
+ if end >= len(allKeywords) {
+ end = start + 1
+ }
+
+ kw := newTestDoc("tags", allKeywords[start:end]...)
+ if i%5 == 0 {
+ start := rand.Intn(len(allKeywords))
+ end := start + 3
+ if end >= len(allKeywords) {
+ end = start + 1
+ }
+ kw.addKeywords("keywords", allKeywords[start:end]...)
+ }
+
+ pages[i] = kw
+ }
+
+ cfg := Config{
+ Threshold: 50,
+ Indices: IndexConfigs{
+ IndexConfig{Name: "tags", Weight: 100},
+ IndexConfig{Name: "keywords", Weight: 200},
+ },
+ }
+
+ b.Run("singles", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ idx := NewInvertedIndex(cfg)
+ for _, doc := range pages {
+ idx.Add(doc)
+ }
+ }
+ })
+
+ b.Run("all", func(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ idx := NewInvertedIndex(cfg)
+ docs := make([]Document, len(pages))
+ for i := 0; i < len(pages); i++ {
+ docs[i] = pages[i]
+ }
+ idx.Add(docs...)
+ }
+ })
+}
+
+func BenchmarkRelatedMatchesIn(b *testing.B) {
+ q1 := newQueryElement("tags", StringsToKeywords("keyword2", "keyword5", "keyword32", "asdf")...)
+ q2 := newQueryElement("keywords", StringsToKeywords("keyword3", "keyword4")...)
+
+ docs := make([]*testDoc, 1000)
+ numkeywords := 20
+ allKeywords := make([]string, numkeywords)
+ for i := 0; i < numkeywords; i++ {
+ allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
+ }
+
+ cfg := Config{
+ Threshold: 20,
+ Indices: IndexConfigs{
+ IndexConfig{Name: "tags", Weight: 100},
+ IndexConfig{Name: "keywords", Weight: 200},
+ },
+ }
+
+ idx := NewInvertedIndex(cfg)
+
+ for i := 0; i < len(docs); i++ {
+ start := rand.Intn(len(allKeywords))
+ end := start + 3
+ if end >= len(allKeywords) {
+ end = start + 1
+ }
+
+ index := "tags"
+ if i%5 == 0 {
+ index = "keywords"
+ }
+
+ idx.Add(newTestDoc(index, allKeywords[start:end]...))
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ if i%10 == 0 {
+ idx.search(q2)
+ } else {
+ idx.search(q1)
+ }
+ }
+}
diff --git a/releaser/git.go b/releaser/git.go
new file mode 100644
index 000000000..ced363a9d
--- /dev/null
+++ b/releaser/git.go
@@ -0,0 +1,253 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package releaser
+
+import (
+ "fmt"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+)
+
+var issueRe = regexp.MustCompile(`(?i)(?:Updates?|Closes?|Fix.*|See) #(\d+)`)
+
+type changeLog struct {
+ Version string
+ Notes gitInfos
+ All gitInfos
+ Docs gitInfos
+
+ // Overall stats
+ Repo *gitHubRepo
+ ContributorCount int
+ ThemeCount int
+}
+
+func newChangeLog(infos, docInfos gitInfos) *changeLog {
+ log := &changeLog{
+ Docs: docInfos,
+ }
+
+ for _, info := range infos {
+ // TODO(bep) improve
+ if regexp.MustCompile("(?i)deprecate|note").MatchString(info.Subject) {
+ log.Notes = append(log.Notes, info)
+ }
+
+ log.All = append(log.All, info)
+ info.Subject = strings.TrimSpace(info.Subject)
+
+ }
+
+ return log
+}
+
+type gitInfo struct {
+ Hash string
+ Author string
+ Subject string
+ Body string
+
+ GitHubCommit *gitHubCommit
+}
+
+func (g gitInfo) Issues() []int {
+ return extractIssues(g.Body)
+}
+
+func (g gitInfo) AuthorID() string {
+ if g.GitHubCommit != nil {
+ return g.GitHubCommit.Author.Login
+ }
+ return g.Author
+}
+
+func extractIssues(body string) []int {
+ var i []int
+ m := issueRe.FindAllStringSubmatch(body, -1)
+ for _, mm := range m {
+ issueID, err := strconv.Atoi(mm[1])
+ if err != nil {
+ continue
+ }
+ i = append(i, issueID)
+ }
+ return i
+}
+
+type gitInfos []gitInfo
+
+func git(args ...string) (string, error) {
+ cmd, _ := hexec.SafeCommand("git", args...)
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ return "", fmt.Errorf("git failed: %q: %q (%q)", err, out, args)
+ }
+ return string(out), nil
+}
+
+func getGitInfos(tag, repo, repoPath string, remote bool) (gitInfos, error) {
+ return getGitInfosBefore("HEAD", tag, repo, repoPath, remote)
+}
+
+type countribCount struct {
+ Author string
+ GitHubAuthor gitHubAuthor
+ Count int
+}
+
+func (c countribCount) AuthorLink() string {
+ if c.GitHubAuthor.HTMLURL != "" {
+ return fmt.Sprintf("[@%s](%s)", c.GitHubAuthor.Login, c.GitHubAuthor.HTMLURL)
+ }
+
+ if !strings.Contains(c.Author, "@") {
+ return c.Author
+ }
+
+ return c.Author[:strings.Index(c.Author, "@")]
+}
+
+type contribCounts []countribCount
+
+func (c contribCounts) Less(i, j int) bool { return c[i].Count > c[j].Count }
+func (c contribCounts) Len() int { return len(c) }
+func (c contribCounts) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
+
+func (g gitInfos) ContribCountPerAuthor() contribCounts {
+ var c contribCounts
+
+ counters := make(map[string]countribCount)
+
+ for _, gi := range g {
+ authorID := gi.AuthorID()
+ if count, ok := counters[authorID]; ok {
+ count.Count = count.Count + 1
+ counters[authorID] = count
+ } else {
+ var ghA gitHubAuthor
+ if gi.GitHubCommit != nil {
+ ghA = gi.GitHubCommit.Author
+ }
+ authorCount := countribCount{Count: 1, Author: gi.Author, GitHubAuthor: ghA}
+ counters[authorID] = authorCount
+ }
+ }
+
+ for _, v := range counters {
+ c = append(c, v)
+ }
+
+ sort.Sort(c)
+ return c
+}
+
+func getGitInfosBefore(ref, tag, repo, repoPath string, remote bool) (gitInfos, error) {
+ client := newGitHubAPI(repo)
+ var g gitInfos
+
+ log, err := gitLogBefore(ref, tag, repoPath)
+ if err != nil {
+ return g, err
+ }
+
+ log = strings.Trim(log, "\n\x1e'")
+ entries := strings.Split(log, "\x1e")
+
+ for _, entry := range entries {
+ items := strings.Split(entry, "\x1f")
+ gi := gitInfo{}
+
+ if len(items) > 0 {
+ gi.Hash = items[0]
+ }
+ if len(items) > 1 {
+ gi.Author = items[1]
+ }
+ if len(items) > 2 {
+ gi.Subject = items[2]
+ }
+ if len(items) > 3 {
+ gi.Body = items[3]
+ }
+
+ if remote && gi.Hash != "" {
+ gc, err := client.fetchCommit(gi.Hash)
+ if err == nil {
+ gi.GitHubCommit = &gc
+ }
+ }
+ g = append(g, gi)
+ }
+
+ return g, nil
+}
+
+// Ignore autogenerated commits etc. in change log. This is a regexp.
+const ignoredCommits = "snapcraft:|Merge commit|Squashed"
+
+func gitLogBefore(ref, tag, repoPath string) (string, error) {
+ var prevTag string
+ var err error
+ if tag != "" {
+ prevTag = tag
+ } else {
+ prevTag, err = gitVersionTagBefore(ref)
+ if err != nil {
+ return "", err
+ }
+ }
+
+ defaultArgs := []string{"log", "-E", fmt.Sprintf("--grep=%s", ignoredCommits), "--invert-grep", "--pretty=format:%x1e%h%x1f%aE%x1f%s%x1f%b", "--abbrev-commit", prevTag + ".." + ref}
+
+ var args []string
+
+ if repoPath != "" {
+ args = append([]string{"-C", repoPath}, defaultArgs...)
+ } else {
+ args = defaultArgs
+ }
+
+ log, err := git(args...)
+ if err != nil {
+ return ",", err
+ }
+
+ return log, err
+}
+
+func gitVersionTagBefore(ref string) (string, error) {
+ return gitShort("describe", "--tags", "--abbrev=0", "--always", "--match", "v[0-9]*", ref+"^")
+}
+
+func gitShort(args ...string) (output string, err error) {
+ output, err = git(args...)
+ return strings.Replace(strings.Split(output, "\n")[0], "'", "", -1), err
+}
+
+func tagExists(tag string) (bool, error) {
+ out, err := git("tag", "-l", tag)
+ if err != nil {
+ return false, err
+ }
+
+ if strings.Contains(out, tag) {
+ return true, nil
+ }
+
+ return false, nil
+}
diff --git a/releaser/git_test.go b/releaser/git_test.go
new file mode 100644
index 000000000..ff77eb8c6
--- /dev/null
+++ b/releaser/git_test.go
@@ -0,0 +1,86 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package releaser
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGitInfos(t *testing.T) {
+ c := qt.New(t)
+ skipIfCI(t)
+ infos, err := getGitInfos("v0.20", "hugo", "", false)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(infos) > 0, qt.Equals, true)
+}
+
+func TestIssuesRe(t *testing.T) {
+ c := qt.New(t)
+
+ body := `
+This is a commit message.
+
+Updates #123
+Fix #345
+closes #543
+See #456
+ `
+
+ issues := extractIssues(body)
+
+ c.Assert(len(issues), qt.Equals, 4)
+ c.Assert(issues[0], qt.Equals, 123)
+ c.Assert(issues[2], qt.Equals, 543)
+
+ bodyNoIssues := `
+This is a commit message without issue refs.
+
+But it has e #10 to make old regexp confused.
+Streets #20.
+ `
+
+ emptyIssuesList := extractIssues(bodyNoIssues)
+ c.Assert(len(emptyIssuesList), qt.Equals, 0)
+}
+
+func TestGitVersionTagBefore(t *testing.T) {
+ skipIfCI(t)
+ c := qt.New(t)
+ v1, err := gitVersionTagBefore("v0.18")
+ c.Assert(err, qt.IsNil)
+ c.Assert(v1, qt.Equals, "v0.17")
+}
+
+func TestTagExists(t *testing.T) {
+ skipIfCI(t)
+ c := qt.New(t)
+ b1, err := tagExists("v0.18")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b1, qt.Equals, true)
+
+ b2, err := tagExists("adfagdsfg")
+ c.Assert(err, qt.IsNil)
+ c.Assert(b2, qt.Equals, false)
+}
+
+func skipIfCI(t *testing.T) {
+ if isCI() {
+ // Travis has an ancient git with no --invert-grep: https://github.com/travis-ci/travis-ci/issues/6328
+ // Also Travis clones very shallowly, making some of the tests above shaky.
+ t.Skip("Skip git test on Linux to make Travis happy.")
+ }
+}
diff --git a/releaser/github.go b/releaser/github.go
new file mode 100644
index 000000000..ffb880423
--- /dev/null
+++ b/releaser/github.go
@@ -0,0 +1,143 @@
+package releaser
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strings"
+)
+
+var (
+ gitHubCommitsAPI = "https://api.github.com/repos/gohugoio/REPO/commits/%s"
+ gitHubRepoAPI = "https://api.github.com/repos/gohugoio/REPO"
+ gitHubContributorsAPI = "https://api.github.com/repos/gohugoio/REPO/contributors"
+)
+
+type gitHubAPI struct {
+ commitsAPITemplate string
+ repoAPI string
+ contributorsAPITemplate string
+}
+
+func newGitHubAPI(repo string) *gitHubAPI {
+ return &gitHubAPI{
+ commitsAPITemplate: strings.Replace(gitHubCommitsAPI, "REPO", repo, -1),
+ repoAPI: strings.Replace(gitHubRepoAPI, "REPO", repo, -1),
+ contributorsAPITemplate: strings.Replace(gitHubContributorsAPI, "REPO", repo, -1),
+ }
+}
+
+type gitHubCommit struct {
+ Author gitHubAuthor `json:"author"`
+ HTMLURL string `json:"html_url"`
+}
+
+type gitHubAuthor struct {
+ ID int `json:"id"`
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ AvatarURL string `json:"avatar_url"`
+}
+
+type gitHubRepo struct {
+ ID int `json:"id"`
+ Name string `json:"name"`
+ Description string `json:"description"`
+ HTMLURL string `json:"html_url"`
+ Stars int `json:"stargazers_count"`
+ Contributors []gitHubContributor
+}
+
+type gitHubContributor struct {
+ ID int `json:"id"`
+ Login string `json:"login"`
+ HTMLURL string `json:"html_url"`
+ Contributions int `json:"contributions"`
+}
+
+func (g *gitHubAPI) fetchCommit(ref string) (gitHubCommit, error) {
+ var commit gitHubCommit
+
+ u := fmt.Sprintf(g.commitsAPITemplate, ref)
+
+ req, err := http.NewRequest("GET", u, nil)
+ if err != nil {
+ return commit, err
+ }
+
+ err = doGitHubRequest(req, &commit)
+
+ return commit, err
+}
+
+func (g *gitHubAPI) fetchRepo() (gitHubRepo, error) {
+ var repo gitHubRepo
+
+ req, err := http.NewRequest("GET", g.repoAPI, nil)
+ if err != nil {
+ return repo, err
+ }
+
+ err = doGitHubRequest(req, &repo)
+ if err != nil {
+ return repo, err
+ }
+
+ var contributors []gitHubContributor
+ page := 0
+ for {
+ page++
+ var currPage []gitHubContributor
+ url := fmt.Sprintf(g.contributorsAPITemplate+"?page=%d", page)
+
+ req, err = http.NewRequest("GET", url, nil)
+ if err != nil {
+ return repo, err
+ }
+
+ err = doGitHubRequest(req, &currPage)
+ if err != nil {
+ return repo, err
+ }
+ if len(currPage) == 0 {
+ break
+ }
+
+ contributors = append(contributors, currPage...)
+
+ }
+
+ repo.Contributors = contributors
+
+ return repo, err
+}
+
+func doGitHubRequest(req *http.Request, v any) error {
+ addGitHubToken(req)
+
+ resp, err := http.DefaultClient.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+
+ if isError(resp) {
+ b, _ := ioutil.ReadAll(resp.Body)
+ return fmt.Errorf("GitHub lookup failed: %s", string(b))
+ }
+
+ return json.NewDecoder(resp.Body).Decode(v)
+}
+
+func isError(resp *http.Response) bool {
+ return resp.StatusCode < 200 || resp.StatusCode > 299
+}
+
+func addGitHubToken(req *http.Request) {
+ gitHubToken := os.Getenv("GITHUB_TOKEN")
+ if gitHubToken != "" {
+ req.Header.Add("Authorization", "token "+gitHubToken)
+ }
+}
diff --git a/releaser/github_test.go b/releaser/github_test.go
new file mode 100644
index 000000000..23331bf38
--- /dev/null
+++ b/releaser/github_test.go
@@ -0,0 +1,46 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package releaser
+
+import (
+ "fmt"
+ "os"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGitHubLookupCommit(t *testing.T) {
+ skipIfNoToken(t)
+ c := qt.New(t)
+ client := newGitHubAPI("hugo")
+ commit, err := client.fetchCommit("793554108763c0984f1a1b1a6ee5744b560d78d0")
+ c.Assert(err, qt.IsNil)
+ fmt.Println(commit)
+}
+
+func TestFetchRepo(t *testing.T) {
+ skipIfNoToken(t)
+ c := qt.New(t)
+ client := newGitHubAPI("hugo")
+ repo, err := client.fetchRepo()
+ c.Assert(err, qt.IsNil)
+ fmt.Println(">>", len(repo.Contributors))
+}
+
+func skipIfNoToken(t *testing.T) {
+ if os.Getenv("GITHUB_TOKEN") == "" {
+ t.Skip("Skip test against GitHub as no GITHUB_TOKEN set.")
+ }
+}
diff --git a/releaser/releasenotes_writer.go b/releaser/releasenotes_writer.go
new file mode 100644
index 000000000..5c50e4de4
--- /dev/null
+++ b/releaser/releasenotes_writer.go
@@ -0,0 +1,191 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package releaser implements a set of utilities and a wrapper around Goreleaser
+// to help automate the Hugo release process.
+package releaser
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strings"
+ "text/template"
+)
+
+const (
+ issueLinkTemplate = "#%d"
+ linkTemplate = "[%s](%s)"
+ releaseNotesMarkdownTemplatePatchRelease = `
+{{ if eq (len .All) 1 }}
+This is a bug-fix release with one important fix.
+{{ else }}
+This is a bug-fix release with a couple of important fixes.
+{{ end }}
+{{ range .All }}
+{{- if .GitHubCommit -}}
+* {{ .Subject }} {{ .Hash }} {{ . | author }} {{ range .Issues }}{{ . | issue }} {{ end }}
+{{ else -}}
+* {{ .Subject }} {{ range .Issues }}{{ . | issue }} {{ end }}
+{{ end -}}
+{{- end }}
+
+
+`
+ releaseNotesMarkdownTemplate = `
+{{- $contribsPerAuthor := .All.ContribCountPerAuthor -}}
+{{- $docsContribsPerAuthor := .Docs.ContribCountPerAuthor -}}
+
+This release represents **{{ len .All }} contributions by {{ len $contribsPerAuthor }} contributors** to the main Hugo code base.
+
+{{- if gt (len $contribsPerAuthor) 3 -}}
+{{- $u1 := index $contribsPerAuthor 0 -}}
+{{- $u2 := index $contribsPerAuthor 1 -}}
+{{- $u3 := index $contribsPerAuthor 2 -}}
+{{- $u4 := index $contribsPerAuthor 3 -}}
+{{- $u1.AuthorLink }} leads the Hugo development with a significant amount of contributions, but also a big shoutout to {{ $u2.AuthorLink }}, {{ $u3.AuthorLink }}, and {{ $u4.AuthorLink }} for their ongoing contributions.
+And thanks to [@digitalcraftsman](https://github.com/digitalcraftsman) for his ongoing work on keeping the themes site in pristine condition.
+{{ end }}
+Many have also been busy writing and fixing the documentation in [hugoDocs](https://github.com/gohugoio/hugoDocs),
+which has received **{{ len .Docs }} contributions by {{ len $docsContribsPerAuthor }} contributors**.
+{{- if gt (len $docsContribsPerAuthor) 3 -}}
+{{- $u1 := index $docsContribsPerAuthor 0 -}}
+{{- $u2 := index $docsContribsPerAuthor 1 -}}
+{{- $u3 := index $docsContribsPerAuthor 2 -}}
+{{- $u4 := index $docsContribsPerAuthor 3 }} A special thanks to {{ $u1.AuthorLink }}, {{ $u2.AuthorLink }}, {{ $u3.AuthorLink }}, and {{ $u4.AuthorLink }} for their work on the documentation site.
+{{ end }}
+
+Hugo now has:
+
+{{ with .Repo -}}
+* {{ .Stars }}+ [stars](https://github.com/gohugoio/hugo/stargazers)
+* {{ len .Contributors }}+ [contributors](https://github.com/gohugoio/hugo/graphs/contributors)
+{{- end -}}
+{{ with .ThemeCount }}
+* {{ . }}+ [themes](http://themes.gohugo.io/)
+{{ end }}
+{{ with .Notes }}
+## Notes
+{{ template "change-section" . }}
+{{- end -}}
+{{ with .All }}
+## Changes
+{{ template "change-section" . }}
+{{ end }}
+
+{{ define "change-section" }}
+{{ range . }}
+{{- if .GitHubCommit -}}
+* {{ .Subject }} {{ .Hash }} {{ . | author }} {{ range .Issues }}{{ . | issue }} {{ end }}
+{{ else -}}
+* {{ .Subject }} {{ range .Issues }}{{ . | issue }} {{ end }}
+{{ end -}}
+{{- end }}
+{{ end }}
+`
+)
+
+var templateFuncs = template.FuncMap{
+ "isPatch": func(c changeLog) bool {
+ return !strings.HasSuffix(c.Version, "0")
+ },
+ "issue": func(id int) string {
+ return fmt.Sprintf(issueLinkTemplate, id)
+ },
+ "commitURL": func(info gitInfo) string {
+ if info.GitHubCommit.HTMLURL == "" {
+ return ""
+ }
+ return fmt.Sprintf(linkTemplate, info.Hash, info.GitHubCommit.HTMLURL)
+ },
+ "author": func(info gitInfo) string {
+ return "@" + info.GitHubCommit.Author.Login
+ },
+}
+
+func writeReleaseNotes(version string, infosMain, infosDocs gitInfos, to io.Writer) error {
+ client := newGitHubAPI("hugo")
+ changes := newChangeLog(infosMain, infosDocs)
+ changes.Version = version
+ repo, err := client.fetchRepo()
+ if err == nil {
+ changes.Repo = &repo
+ }
+ themeCount, err := fetchThemeCount()
+ if err == nil {
+ changes.ThemeCount = themeCount
+ }
+
+ mtempl := releaseNotesMarkdownTemplate
+
+ if !strings.HasSuffix(version, "0") {
+ mtempl = releaseNotesMarkdownTemplatePatchRelease
+ }
+
+ tmpl, err := template.New("").Funcs(templateFuncs).Parse(mtempl)
+ if err != nil {
+ return err
+ }
+
+ err = tmpl.Execute(to, changes)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func fetchThemeCount() (int, error) {
+ resp, err := http.Get("https://raw.githubusercontent.com/gohugoio/hugoThemesSiteBuilder/main/themes.txt")
+ if err != nil {
+ return 0, err
+ }
+ defer resp.Body.Close()
+
+ b, _ := ioutil.ReadAll(resp.Body)
+ return bytes.Count(b, []byte("\n")) - bytes.Count(b, []byte("#")), nil
+}
+
+func getReleaseNotesFilename(version string) string {
+ return filepath.FromSlash(fmt.Sprintf("temp/%s-relnotes-ready.md", version))
+}
+
+func (r *ReleaseHandler) writeReleaseNotesToTemp(version string, isPatch bool, infosMain, infosDocs gitInfos) (string, error) {
+ filename := getReleaseNotesFilename(version)
+
+ var w io.WriteCloser
+
+ if !r.try {
+ f, err := os.Create(filename)
+ if err != nil {
+ return "", err
+ }
+
+ defer f.Close()
+
+ w = f
+
+ } else {
+ w = os.Stdout
+ }
+
+ if err := writeReleaseNotes(version, infosMain, infosDocs, w); err != nil {
+ return "", err
+ }
+
+ return filename, nil
+}
diff --git a/releaser/releasenotes_writer_test.go b/releaser/releasenotes_writer_test.go
new file mode 100644
index 000000000..7dcd0ccaa
--- /dev/null
+++ b/releaser/releasenotes_writer_test.go
@@ -0,0 +1,46 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package commands defines and implements command-line commands and flags
+// used by Hugo. Commands and flags are implemented using Cobra.
+
+package releaser
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func _TestReleaseNotesWriter(t *testing.T) {
+ skipIfNoToken(t)
+ if os.Getenv("CI") != "" {
+ // Travis has an ancient git with no --invert-grep: https://github.com/travis-ci/travis-ci/issues/6328
+ t.Skip("Skip git test on CI to make Travis happy..")
+ }
+
+ c := qt.New(t)
+
+ var b bytes.Buffer
+
+ // TODO(bep) consider to query GitHub directly for the gitlog with author info, probably faster.
+ infos, err := getGitInfosBefore("HEAD", "v0.89.0", "hugo", "", false)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(writeReleaseNotes("0.89.0", infos, infos, &b), qt.IsNil)
+
+ fmt.Println(b.String())
+}
diff --git a/releaser/releaser.go b/releaser/releaser.go
new file mode 100644
index 000000000..ebc344e98
--- /dev/null
+++ b/releaser/releaser.go
@@ -0,0 +1,304 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package releaser implements a set of utilities and a wrapper around Goreleaser
+// to help automate the Hugo release process.
+package releaser
+
+import (
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/hugo"
+)
+
+const commitPrefix = "releaser:"
+
+// ReleaseHandler provides functionality to release a new version of Hugo.
+// Test this locally without doing an actual release:
+// go run -tags release main.go release --skip-publish --try -r 0.90.0
+// Or a variation of the above -- the skip-publish flag makes sure that any changes are performed to the local Git only.
+type ReleaseHandler struct {
+ cliVersion string
+
+ skipPublish bool
+
+ // Just simulate, no actual changes.
+ try bool
+
+ git func(args ...string) (string, error)
+}
+
+func (r ReleaseHandler) calculateVersions() (hugo.Version, hugo.Version) {
+ newVersion := hugo.MustParseVersion(r.cliVersion)
+ finalVersion := newVersion.Next()
+ finalVersion.PatchLevel = 0
+
+ if newVersion.Suffix != "-test" {
+ newVersion.Suffix = ""
+ }
+
+ finalVersion.Suffix = "-DEV"
+
+ return newVersion, finalVersion
+}
+
+// New initialises a ReleaseHandler.
+func New(version string, skipPublish, try bool) *ReleaseHandler {
+ // When triggered from CI release branch
+ version = strings.TrimPrefix(version, "release-")
+ version = strings.TrimPrefix(version, "v")
+ rh := &ReleaseHandler{cliVersion: version, skipPublish: skipPublish, try: try}
+
+ if try {
+ rh.git = func(args ...string) (string, error) {
+ fmt.Println("git", strings.Join(args, " "))
+ return "", nil
+ }
+ } else {
+ rh.git = git
+ }
+
+ return rh
+}
+
+// Run creates a new release.
+func (r *ReleaseHandler) Run() error {
+ if os.Getenv("GITHUB_TOKEN") == "" {
+ return errors.New("GITHUB_TOKEN not set, create one here with the repo scope selected: https://github.com/settings/tokens/new")
+ }
+
+ fmt.Printf("Start release from %q\n", wd())
+
+ newVersion, finalVersion := r.calculateVersions()
+
+ version := newVersion.String()
+ tag := "v" + version
+ isPatch := newVersion.PatchLevel > 0
+ mainVersion := newVersion
+ mainVersion.PatchLevel = 0
+
+ // Exit early if tag already exists
+ exists, err := tagExists(tag)
+ if err != nil {
+ return err
+ }
+
+ if exists {
+ return fmt.Errorf("tag %q already exists", tag)
+ }
+
+ var changeLogFromTag string
+
+ if newVersion.PatchLevel == 0 {
+ // There may have been patch releases between, so set the tag explicitly.
+ changeLogFromTag = "v" + newVersion.Prev().String()
+ exists, _ := tagExists(changeLogFromTag)
+ if !exists {
+ // fall back to one that exists.
+ changeLogFromTag = ""
+ }
+ }
+
+ var (
+ gitCommits gitInfos
+ gitCommitsDocs gitInfos
+ )
+
+ defer r.gitPush() // TODO(bep)
+
+ gitCommits, err = getGitInfos(changeLogFromTag, "hugo", "", !r.try)
+ if err != nil {
+ return err
+ }
+
+ // TODO(bep) explicit tag?
+ gitCommitsDocs, err = getGitInfos("", "hugoDocs", "../hugoDocs", !r.try)
+ if err != nil {
+ return err
+ }
+
+ releaseNotesFile, err := r.writeReleaseNotesToTemp(version, isPatch, gitCommits, gitCommitsDocs)
+ if err != nil {
+ return err
+ }
+
+ if _, err := r.git("add", releaseNotesFile); err != nil {
+ return err
+ }
+
+ commitMsg := fmt.Sprintf("%s Add release notes for %s", commitPrefix, newVersion)
+ commitMsg += "\n[ci skip]"
+
+ if _, err := r.git("commit", "-m", commitMsg); err != nil {
+ return err
+ }
+
+ if err := r.bumpVersions(newVersion); err != nil {
+ return err
+ }
+
+ if _, err := r.git("commit", "-a", "-m", fmt.Sprintf("%s Bump versions for release of %s\n\n[ci skip]", commitPrefix, newVersion)); err != nil {
+ return err
+ }
+
+ if _, err := r.git("tag", "-a", tag, "-m", fmt.Sprintf("%s %s\n\n[ci skip]", commitPrefix, newVersion)); err != nil {
+ return err
+ }
+
+ if !r.skipPublish {
+ if _, err := r.git("push", "origin", tag); err != nil {
+ return err
+ }
+ }
+
+ if err := r.release(releaseNotesFile); err != nil {
+ return err
+ }
+
+ if err := r.bumpVersions(finalVersion); err != nil {
+ return err
+ }
+
+ if !r.try {
+ // No longer needed.
+ if err := os.Remove(releaseNotesFile); err != nil {
+ return err
+ }
+ }
+
+ if _, err := r.git("commit", "-a", "-m", fmt.Sprintf("%s Prepare repository for %s\n\n[ci skip]", commitPrefix, finalVersion)); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (r *ReleaseHandler) gitPush() {
+ if r.skipPublish {
+ return
+ }
+ if _, err := r.git("push", "origin", "HEAD"); err != nil {
+ log.Fatal("push failed:", err)
+ }
+}
+
+func (r *ReleaseHandler) release(releaseNotesFile string) error {
+ if r.try {
+ fmt.Println("Skip goreleaser...")
+ return nil
+ }
+
+ args := []string{"--parallelism", "2", "--timeout", "120m", "--rm-dist", "--release-notes", releaseNotesFile}
+ if r.skipPublish {
+ args = append(args, "--skip-publish")
+ }
+
+ cmd, _ := hexec.SafeCommand("goreleaser", args...)
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ err := cmd.Run()
+ if err != nil {
+ return fmt.Errorf("goreleaser failed: %w", err)
+ }
+ return nil
+}
+
+func (r *ReleaseHandler) bumpVersions(ver hugo.Version) error {
+ toDev := ""
+
+ if ver.Suffix != "" {
+ toDev = ver.Suffix
+ }
+
+ if err := r.replaceInFile("common/hugo/version_current.go",
+ `Minor:(\s*)(\d*),`, fmt.Sprintf(`Minor:${1}%d,`, ver.Minor),
+ `PatchLevel:(\s*)(\d*),`, fmt.Sprintf(`PatchLevel:${1}%d,`, ver.PatchLevel),
+ `Suffix:(\s*)".*",`, fmt.Sprintf(`Suffix:${1}"%s",`, toDev)); err != nil {
+ return err
+ }
+
+ snapcraftGrade := "stable"
+ if ver.Suffix != "" {
+ snapcraftGrade = "devel"
+ }
+ if err := r.replaceInFile("snap/snapcraft.yaml",
+ `version: "(.*)"`, fmt.Sprintf(`version: "%s"`, ver),
+ `grade: (.*) #`, fmt.Sprintf(`grade: %s #`, snapcraftGrade)); err != nil {
+ return err
+ }
+
+ var minVersion string
+ if ver.Suffix != "" {
+ // People use the DEV version in daily use, and we cannot create new themes
+ // with the next version before it is released.
+ minVersion = ver.Prev().String()
+ } else {
+ minVersion = ver.String()
+ }
+
+ if err := r.replaceInFile("commands/new.go",
+ `min_version = "(.*)"`, fmt.Sprintf(`min_version = "%s"`, minVersion)); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (r *ReleaseHandler) replaceInFile(filename string, oldNew ...string) error {
+ filename = filepath.FromSlash(filename)
+ fi, err := os.Stat(filename)
+ if err != nil {
+ return err
+ }
+
+ if r.try {
+ fmt.Printf("Replace in %q: %q\n", filename, oldNew)
+ return nil
+ }
+
+ b, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return err
+ }
+ newContent := string(b)
+
+ for i := 0; i < len(oldNew); i += 2 {
+ re := regexp.MustCompile(oldNew[i])
+ newContent = re.ReplaceAllString(newContent, oldNew[i+1])
+ }
+
+ return ioutil.WriteFile(filename, []byte(newContent), fi.Mode())
+}
+
+func isCI() bool {
+ return os.Getenv("CI") != ""
+}
+
+func wd() string {
+ p, err := os.Getwd()
+ if err != nil {
+ log.Fatal(err)
+ }
+ return p
+
+}
diff --git a/resources/errorResource.go b/resources/errorResource.go
new file mode 100644
index 000000000..81375cc48
--- /dev/null
+++ b/resources/errorResource.go
@@ -0,0 +1,132 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "image"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/resources/images/exif"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ error = (*errorResource)(nil)
+ // Imnage covers all current Resource implementations.
+ _ images.ImageResource = (*errorResource)(nil)
+ // The list of user facing and exported interfaces in resource.go
+ // Note that if we're missing some interface here, the user will still
+ // get an error, but not as pretty.
+ _ resource.ContentResource = (*errorResource)(nil)
+ _ resource.ReadSeekCloserResource = (*errorResource)(nil)
+ _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil)
+ // Make sure it also fails when passed to a pipe function.
+ _ ResourceTransformer = (*errorResource)(nil)
+)
+
+// NewErrorResource wraps err in a Resource where all but the Err method will panic.
+func NewErrorResource(err resource.ResourceError) resource.Resource {
+ return &errorResource{ResourceError: err}
+}
+
+type errorResource struct {
+ resource.ResourceError
+}
+
+func (e *errorResource) Err() resource.ResourceError {
+ return e.ResourceError
+}
+
+func (e *errorResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Content() (any, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) ResourceType() string {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) MediaType() media.Type {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Permalink() string {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) RelPermalink() string {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Name() string {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Title() string {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Params() maps.Params {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Data() any {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Height() int {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Width() int {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Crop(spec string) (images.ImageResource, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Fill(spec string) (images.ImageResource, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Fit(spec string) (images.ImageResource, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Resize(spec string) (images.ImageResource, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Filter(filters ...any) (images.ImageResource, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Exif() *exif.ExifInfo {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) DecodeImage() (image.Image, error) {
+ panic(e.ResourceError)
+}
+
+func (e *errorResource) Transform(...ResourceTransformation) (ResourceTransformer, error) {
+ panic(e.ResourceError)
+}
diff --git a/resources/image.go b/resources/image.go
new file mode 100644
index 000000000..8551cc2ab
--- /dev/null
+++ b/resources/image.go
@@ -0,0 +1,452 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "encoding/json"
+ "fmt"
+ "image"
+ "image/color"
+ "image/draw"
+ "image/gif"
+ _ "image/gif"
+ _ "image/png"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/disintegration/gift"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/resources/images/exif"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/images"
+
+ // Blind import for image.Decode
+ _ "golang.org/x/image/webp"
+)
+
+var (
+ _ images.ImageResource = (*imageResource)(nil)
+ _ resource.Source = (*imageResource)(nil)
+ _ resource.Cloner = (*imageResource)(nil)
+)
+
+// imageResource represents an image resource.
+type imageResource struct {
+ *images.Image
+
+ // When a image is processed in a chain, this holds the reference to the
+ // original (first).
+ root *imageResource
+
+ metaInit sync.Once
+ metaInitErr error
+ meta *imageMeta
+
+ baseResource
+}
+
+type imageMeta struct {
+ Exif *exif.ExifInfo
+}
+
+func (i *imageResource) Exif() *exif.ExifInfo {
+ return i.root.getExif()
+}
+
+func (i *imageResource) getExif() *exif.ExifInfo {
+ i.metaInit.Do(func() {
+ supportsExif := i.Format == images.JPEG || i.Format == images.TIFF
+ if !supportsExif {
+ return
+ }
+
+ key := i.getImageMetaCacheTargetPath()
+
+ read := func(info filecache.ItemInfo, r io.ReadSeeker) error {
+ meta := &imageMeta{}
+ data, err := ioutil.ReadAll(r)
+ if err != nil {
+ return err
+ }
+
+ if err = json.Unmarshal(data, &meta); err != nil {
+ return err
+ }
+
+ i.meta = meta
+
+ return nil
+ }
+
+ create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
+ f, err := i.root.ReadSeekCloser()
+ if err != nil {
+ i.metaInitErr = err
+ return
+ }
+ defer f.Close()
+
+ x, err := i.getSpec().imaging.DecodeExif(f)
+ if err != nil {
+ i.getSpec().Logger.Warnf("Unable to decode Exif metadata from image: %s", i.Key())
+ return nil
+ }
+
+ i.meta = &imageMeta{Exif: x}
+
+ // Also write it to cache
+ enc := json.NewEncoder(w)
+ return enc.Encode(i.meta)
+ }
+
+ _, i.metaInitErr = i.getSpec().imageCache.fileCache.ReadOrCreate(key, read, create)
+ })
+
+ if i.metaInitErr != nil {
+ panic(fmt.Sprintf("metadata init failed: %s", i.metaInitErr))
+ }
+
+ if i.meta == nil {
+ return nil
+ }
+
+ return i.meta.Exif
+}
+
+// Clone is for internal use.
+func (i *imageResource) Clone() resource.Resource {
+ gr := i.baseResource.Clone().(baseResource)
+ return &imageResource{
+ root: i.root,
+ Image: i.WithSpec(gr),
+ baseResource: gr,
+ }
+}
+
+func (i *imageResource) cloneTo(targetPath string) resource.Resource {
+ gr := i.baseResource.cloneTo(targetPath).(baseResource)
+ return &imageResource{
+ root: i.root,
+ Image: i.WithSpec(gr),
+ baseResource: gr,
+ }
+}
+
+func (i *imageResource) cloneWithUpdates(u *transformationUpdate) (baseResource, error) {
+ base, err := i.baseResource.cloneWithUpdates(u)
+ if err != nil {
+ return nil, err
+ }
+
+ var img *images.Image
+
+ if u.isContentChanged() {
+ img = i.WithSpec(base)
+ } else {
+ img = i.Image
+ }
+
+ return &imageResource{
+ root: i.root,
+ Image: img,
+ baseResource: base,
+ }, nil
+}
+
+// Resize resizes the image to the specified width and height using the specified resampling
+// filter and returns the transformed image. If one of width or height is 0, the image aspect
+// ratio is preserved.
+func (i *imageResource) Resize(spec string) (images.ImageResource, error) {
+ conf, err := i.decodeImageConfig("resize", spec)
+ if err != nil {
+ return nil, err
+ }
+
+ return i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.ApplyFiltersFromConfig(src, conf)
+ })
+}
+
+// Crop the image to the specified dimensions without resizing using the given anchor point.
+// Space delimited config, e.g. `200x300 TopLeft`.
+func (i *imageResource) Crop(spec string) (images.ImageResource, error) {
+ conf, err := i.decodeImageConfig("crop", spec)
+ if err != nil {
+ return nil, err
+ }
+
+ return i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.ApplyFiltersFromConfig(src, conf)
+ })
+}
+
+// Fit scales down the image using the specified resample filter to fit the specified
+// maximum width and height.
+func (i *imageResource) Fit(spec string) (images.ImageResource, error) {
+ conf, err := i.decodeImageConfig("fit", spec)
+ if err != nil {
+ return nil, err
+ }
+
+ return i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.ApplyFiltersFromConfig(src, conf)
+ })
+}
+
+// Fill scales the image to the smallest possible size that will cover the specified dimensions,
+// crops the resized image to the specified dimensions using the given anchor point.
+// Space delimited config, e.g. `200x300 TopLeft`.
+func (i *imageResource) Fill(spec string) (images.ImageResource, error) {
+ conf, err := i.decodeImageConfig("fill", spec)
+ if err != nil {
+ return nil, err
+ }
+
+ img, err := i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.ApplyFiltersFromConfig(src, conf)
+ })
+
+ if err != nil {
+ return nil, err
+ }
+
+ if conf.Anchor == 0 && img.Width() == 0 || img.Height() == 0 {
+ // See https://github.com/gohugoio/hugo/issues/7955
+ // Smartcrop fails silently in some rare cases.
+ // Fall back to a center fill.
+ conf.Anchor = gift.CenterAnchor
+ conf.AnchorStr = "center"
+ return i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.ApplyFiltersFromConfig(src, conf)
+ })
+ }
+
+ return img, err
+}
+
+func (i *imageResource) Filter(filters ...any) (images.ImageResource, error) {
+ conf := images.GetDefaultImageConfig("filter", i.Proc.Cfg)
+
+ var gfilters []gift.Filter
+
+ for _, f := range filters {
+ gfilters = append(gfilters, images.ToFilters(f)...)
+ }
+
+ conf.Key = helpers.HashString(gfilters)
+ conf.TargetFormat = i.Format
+
+ return i.doWithImageConfig(conf, func(src image.Image) (image.Image, error) {
+ return i.Proc.Filter(src, gfilters...)
+ })
+}
+
+// Serialize image processing. The imaging library spins up its own set of Go routines,
+// so there is not much to gain from adding more load to the mix. That
+// can even have negative effect in low resource scenarios.
+// Note that this only effects the non-cached scenario. Once the processed
+// image is written to disk, everything is fast, fast fast.
+const imageProcWorkers = 1
+
+var imageProcSem = make(chan bool, imageProcWorkers)
+
+func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src image.Image) (image.Image, error)) (images.ImageResource, error) {
+ img, err := i.getSpec().imageCache.getOrCreate(i, conf, func() (*imageResource, image.Image, error) {
+ imageProcSem <- true
+ defer func() {
+ <-imageProcSem
+ }()
+
+ errOp := conf.Action
+ errPath := i.getSourceFilename()
+
+ src, err := i.DecodeImage()
+ if err != nil {
+ return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
+ }
+
+ converted, err := f(src)
+ if err != nil {
+ return nil, nil, &os.PathError{Op: errOp, Path: errPath, Err: err}
+ }
+
+ hasAlpha := !images.IsOpaque(converted)
+ shouldFill := conf.BgColor != nil && hasAlpha
+ shouldFill = shouldFill || (!conf.TargetFormat.SupportsTransparency() && hasAlpha)
+ var bgColor color.Color
+
+ if shouldFill {
+ bgColor = conf.BgColor
+ if bgColor == nil {
+ bgColor = i.Proc.Cfg.BgColor
+ }
+ tmp := image.NewRGBA(converted.Bounds())
+ draw.Draw(tmp, tmp.Bounds(), image.NewUniform(bgColor), image.Point{}, draw.Src)
+ draw.Draw(tmp, tmp.Bounds(), converted, converted.Bounds().Min, draw.Over)
+ converted = tmp
+ }
+
+ if conf.TargetFormat == images.PNG {
+ // Apply the colour palette from the source
+ if paletted, ok := src.(*image.Paletted); ok {
+ palette := paletted.Palette
+ if bgColor != nil && len(palette) < 256 {
+ palette = images.AddColorToPalette(bgColor, palette)
+ } else if bgColor != nil {
+ images.ReplaceColorInPalette(bgColor, palette)
+ }
+ tmp := image.NewPaletted(converted.Bounds(), palette)
+ draw.FloydSteinberg.Draw(tmp, tmp.Bounds(), converted, converted.Bounds().Min)
+ converted = tmp
+ }
+ }
+
+ ci := i.clone(converted)
+ ci.setBasePath(conf)
+ ci.Format = conf.TargetFormat
+ ci.setMediaType(conf.TargetFormat.MediaType())
+
+ return ci, converted, nil
+ })
+ if err != nil {
+ if i.root != nil && i.root.getFileInfo() != nil {
+ return nil, fmt.Errorf("image %q: %w", i.root.getFileInfo().Meta().Filename, err)
+ }
+ }
+ return img, nil
+}
+
+func (i *imageResource) decodeImageConfig(action, spec string) (images.ImageConfig, error) {
+ conf, err := images.DecodeImageConfig(action, spec, i.Proc.Cfg, i.Format)
+ if err != nil {
+ return conf, err
+ }
+
+ return conf, nil
+}
+
+type giphy struct {
+ image.Image
+ gif *gif.GIF
+}
+
+func (g *giphy) GIF() *gif.GIF {
+ return g.gif
+}
+
+// DecodeImage decodes the image source into an Image.
+// This an internal method and may change.
+func (i *imageResource) DecodeImage() (image.Image, error) {
+ f, err := i.ReadSeekCloser()
+ if err != nil {
+ return nil, fmt.Errorf("failed to open image for decode: %w", err)
+ }
+ defer f.Close()
+
+ if i.Format == images.GIF {
+ g, err := gif.DecodeAll(f)
+ if err != nil {
+ return nil, fmt.Errorf("failed to decode gif: %w", err)
+ }
+ return &giphy{gif: g, Image: g.Image[0]}, nil
+ }
+ img, _, err := image.Decode(f)
+ return img, err
+}
+
+func (i *imageResource) clone(img image.Image) *imageResource {
+ spec := i.baseResource.Clone().(baseResource)
+
+ var image *images.Image
+ if img != nil {
+ image = i.WithImage(img)
+ } else {
+ image = i.WithSpec(spec)
+ }
+
+ return &imageResource{
+ Image: image,
+ root: i.root,
+ baseResource: spec,
+ }
+}
+
+func (i *imageResource) setBasePath(conf images.ImageConfig) {
+ i.getResourcePaths().relTargetDirFile = i.relTargetPathFromConfig(conf)
+}
+
+func (i *imageResource) getImageMetaCacheTargetPath() string {
+ const imageMetaVersionNumber = 1 // Increment to invalidate the meta cache
+
+ cfgHash := i.getSpec().imaging.Cfg.CfgHash
+ df := i.getResourcePaths().relTargetDirFile
+ if fi := i.getFileInfo(); fi != nil {
+ df.dir = filepath.Dir(fi.Meta().Path)
+ }
+ p1, _ := paths.FileAndExt(df.file)
+ h, _ := i.hash()
+ idStr := helpers.HashString(h, i.size(), imageMetaVersionNumber, cfgHash)
+ p := path.Join(df.dir, fmt.Sprintf("%s_%s.json", p1, idStr))
+ return p
+}
+
+func (i *imageResource) relTargetPathFromConfig(conf images.ImageConfig) dirFile {
+ p1, p2 := paths.FileAndExt(i.getResourcePaths().relTargetDirFile.file)
+ if conf.TargetFormat != i.Format {
+ p2 = conf.TargetFormat.DefaultExtension()
+ }
+
+ h, _ := i.hash()
+ idStr := fmt.Sprintf("_hu%s_%d", h, i.size())
+
+ // Do not change for no good reason.
+ const md5Threshold = 100
+
+ key := conf.GetKey(i.Format)
+
+ // It is useful to have the key in clear text, but when nesting transforms, it
+ // can easily be too long to read, and maybe even too long
+ // for the different OSes to handle.
+ if len(p1)+len(idStr)+len(p2) > md5Threshold {
+ key = helpers.MD5String(p1 + key + p2)
+ huIdx := strings.Index(p1, "_hu")
+ if huIdx != -1 {
+ p1 = p1[:huIdx]
+ } else {
+ // This started out as a very long file name. Making it even longer
+ // could melt ice in the Arctic.
+ p1 = ""
+ }
+ } else if strings.Contains(p1, idStr) {
+ // On scaling an already scaled image, we get the file info from the original.
+ // Repeating the same info in the filename makes it stuttery for no good reason.
+ idStr = ""
+ }
+
+ return dirFile{
+ dir: i.getResourcePaths().relTargetDirFile.dir,
+ file: fmt.Sprintf("%s%s_%s%s", p1, idStr, key, p2),
+ }
+}
diff --git a/resources/image_cache.go b/resources/image_cache.go
new file mode 100644
index 000000000..ca651fd5c
--- /dev/null
+++ b/resources/image_cache.go
@@ -0,0 +1,168 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "image"
+ "io"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/images"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/helpers"
+)
+
+type imageCache struct {
+ pathSpec *helpers.PathSpec
+
+ fileCache *filecache.Cache
+
+ mu sync.RWMutex
+ store map[string]*resourceAdapter
+}
+
+func (c *imageCache) deleteIfContains(s string) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ s = c.normalizeKeyBase(s)
+ for k := range c.store {
+ if strings.Contains(k, s) {
+ delete(c.store, k)
+ }
+ }
+}
+
+// The cache key is a lowercase path with Unix style slashes and it always starts with
+// a leading slash.
+func (c *imageCache) normalizeKey(key string) string {
+ return "/" + c.normalizeKeyBase(key)
+}
+
+func (c *imageCache) normalizeKeyBase(key string) string {
+ return strings.Trim(strings.ToLower(filepath.ToSlash(key)), "/")
+}
+
+func (c *imageCache) clear() {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ c.store = make(map[string]*resourceAdapter)
+}
+
+func (c *imageCache) getOrCreate(
+ parent *imageResource, conf images.ImageConfig,
+ createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) {
+ relTarget := parent.relTargetPathFromConfig(conf)
+ memKey := parent.relTargetPathForRel(relTarget.path(), false, false, false)
+ memKey = c.normalizeKey(memKey)
+
+ // For the file cache we want to generate and store it once if possible.
+ fileKeyPath := relTarget
+ if fi := parent.root.getFileInfo(); fi != nil {
+ fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path))
+ }
+ fileKey := fileKeyPath.path()
+
+ // First check the in-memory store, then the disk.
+ c.mu.RLock()
+ cachedImage, found := c.store[memKey]
+ c.mu.RUnlock()
+
+ if found {
+ return cachedImage, nil
+ }
+
+ var img *imageResource
+
+ // These funcs are protected by a named lock.
+ // read clones the parent to its new name and copies
+ // the content to the destinations.
+ read := func(info filecache.ItemInfo, r io.ReadSeeker) error {
+ img = parent.clone(nil)
+ rp := img.getResourcePaths()
+ rp.relTargetDirFile.file = relTarget.file
+ img.setSourceFilename(info.Name)
+ img.setMediaType(conf.TargetFormat.MediaType())
+
+ if err := img.InitConfig(r); err != nil {
+ return err
+ }
+
+ r.Seek(0, 0)
+
+ w, err := img.openDestinationsForWriting()
+ if err != nil {
+ return err
+ }
+
+ if w == nil {
+ // Nothing to write.
+ return nil
+ }
+
+ defer w.Close()
+ _, err = io.Copy(w, r)
+
+ return err
+ }
+
+ // create creates the image and encodes it to the cache (w).
+ create := func(info filecache.ItemInfo, w io.WriteCloser) (err error) {
+ defer w.Close()
+
+ var conv image.Image
+ img, conv, err = createImage()
+ if err != nil {
+ return
+ }
+ rp := img.getResourcePaths()
+ rp.relTargetDirFile.file = relTarget.file
+ img.setSourceFilename(info.Name)
+
+ return img.EncodeTo(conf, conv, w)
+ }
+
+ // Now look in the file cache.
+
+ // The definition of this counter is not that we have processed that amount
+ // (e.g. resized etc.), it can be fetched from file cache,
+ // but the count of processed image variations for this site.
+ c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
+
+ _, err := c.fileCache.ReadOrCreate(fileKey, read, create)
+ if err != nil {
+ return nil, err
+ }
+
+ // The file is now stored in this cache.
+ img.setSourceFs(c.fileCache.Fs)
+
+ c.mu.Lock()
+ if cachedImage, found = c.store[memKey]; found {
+ c.mu.Unlock()
+ return cachedImage, nil
+ }
+
+ imgAdapter := newResourceAdapter(parent.getSpec(), true, img)
+ c.store[memKey] = imgAdapter
+ c.mu.Unlock()
+
+ return imgAdapter, nil
+}
+
+func newImageCache(fileCache *filecache.Cache, ps *helpers.PathSpec) *imageCache {
+ return &imageCache{fileCache: fileCache, pathSpec: ps, store: make(map[string]*resourceAdapter)}
+}
diff --git a/resources/image_extended_test.go b/resources/image_extended_test.go
new file mode 100644
index 000000000..a0b274f3e
--- /dev/null
+++ b/resources/image_extended_test.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build extended
+// +build extended
+
+package resources
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestImageResizeWebP(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchImage(c, "sunset.webp")
+
+ c.Assert(image.MediaType(), qt.Equals, media.WEBPType)
+ c.Assert(image.RelPermalink(), qt.Equals, "/a/sunset.webp")
+ c.Assert(image.ResourceType(), qt.Equals, "image")
+ c.Assert(image.Exif(), qt.IsNil)
+
+ resized, err := image.Resize("123x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(image.MediaType(), qt.Equals, media.WEBPType)
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/sunset_hu36ee0b61ba924719ad36da960c273f96_59826_123x0_resize_q68_h2_linear_2.webp")
+ c.Assert(resized.Width(), qt.Equals, 123)
+}
diff --git a/resources/image_test.go b/resources/image_test.go
new file mode 100644
index 000000000..153a4e8c4
--- /dev/null
+++ b/resources/image_test.go
@@ -0,0 +1,843 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "fmt"
+ "image"
+ "image/gif"
+ "io/ioutil"
+ "math/big"
+ "math/rand"
+ "os"
+ "path"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/resources/images/webp"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/spf13/afero"
+
+ "github.com/disintegration/gift"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/google/go-cmp/cmp"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var eq = qt.CmpEquals(
+ cmp.Comparer(func(p1, p2 *resourceAdapter) bool {
+ return p1.resourceAdapterInner == p2.resourceAdapterInner
+ }),
+ cmp.Comparer(func(p1, p2 os.FileInfo) bool {
+ return p1.Name() == p2.Name() && p1.Size() == p2.Size() && p1.IsDir() == p2.IsDir()
+ }),
+ cmp.Comparer(func(p1, p2 *genericResource) bool { return p1 == p2 }),
+ cmp.Comparer(func(m1, m2 media.Type) bool {
+ return m1.Type() == m2.Type()
+ }),
+ cmp.Comparer(
+ func(v1, v2 *big.Rat) bool {
+ return v1.RatString() == v2.RatString()
+ },
+ ),
+ cmp.Comparer(func(v1, v2 time.Time) bool {
+ return v1.Unix() == v2.Unix()
+ }),
+)
+
+func TestImageTransformBasic(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchSunset(c)
+
+ fileCache := image.(specProvider).getSpec().FileCaches.ImageCache().Fs
+
+ assertWidthHeight := func(img images.ImageResource, w, h int) {
+ c.Helper()
+ c.Assert(img, qt.Not(qt.IsNil))
+ c.Assert(img.Width(), qt.Equals, w)
+ c.Assert(img.Height(), qt.Equals, h)
+ }
+
+ c.Assert(image.RelPermalink(), qt.Equals, "/a/sunset.jpg")
+ c.Assert(image.ResourceType(), qt.Equals, "image")
+ assertWidthHeight(image, 900, 562)
+
+ resized, err := image.Resize("300x200")
+ c.Assert(err, qt.IsNil)
+ c.Assert(image != resized, qt.Equals, true)
+ c.Assert(image, qt.Not(eq), resized)
+ assertWidthHeight(resized, 300, 200)
+ assertWidthHeight(image, 900, 562)
+
+ resized0x, err := image.Resize("x200")
+ c.Assert(err, qt.IsNil)
+ assertWidthHeight(resized0x, 320, 200)
+ assertFileCache(c, fileCache, path.Base(resized0x.RelPermalink()), 320, 200)
+
+ resizedx0, err := image.Resize("200x")
+ c.Assert(err, qt.IsNil)
+ assertWidthHeight(resizedx0, 200, 125)
+ assertFileCache(c, fileCache, path.Base(resizedx0.RelPermalink()), 200, 125)
+
+ resizedAndRotated, err := image.Resize("x200 r90")
+ c.Assert(err, qt.IsNil)
+ assertWidthHeight(resizedAndRotated, 125, 200)
+
+ assertWidthHeight(resized, 300, 200)
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg")
+
+ fitted, err := resized.Fit("50x50")
+ c.Assert(err, qt.IsNil)
+ c.Assert(fitted.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_625708021e2bb281c9f1002f88e4753f.jpg")
+ assertWidthHeight(fitted, 50, 33)
+
+ // Check the MD5 key threshold
+ fittedAgain, _ := fitted.Fit("10x20")
+ fittedAgain, err = fittedAgain.Fit("10x20")
+ c.Assert(err, qt.IsNil)
+ c.Assert(fittedAgain.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f65ba24dc2b7fba0f56d7f104519157.jpg")
+ assertWidthHeight(fittedAgain, 10, 7)
+
+ filled, err := image.Fill("200x100 bottomLeft")
+ c.Assert(err, qt.IsNil)
+ c.Assert(filled.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg")
+ assertWidthHeight(filled, 200, 100)
+
+ smart, err := image.Fill("200x100 smart")
+ c.Assert(err, qt.IsNil)
+ c.Assert(smart.RelPermalink(), qt.Equals, fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", 1))
+ assertWidthHeight(smart, 200, 100)
+
+ // Check cache
+ filledAgain, err := image.Fill("200x100 bottomLeft")
+ c.Assert(err, qt.IsNil)
+ c.Assert(filled, eq, filledAgain)
+
+ cropped, err := image.Crop("300x300 topRight")
+ c.Assert(err, qt.IsNil)
+ c.Assert(cropped.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x300_crop_q68_linear_topright.jpg")
+ assertWidthHeight(cropped, 300, 300)
+
+ smartcropped, err := image.Crop("200x200 smart")
+ c.Assert(err, qt.IsNil)
+ c.Assert(smartcropped.RelPermalink(), qt.Equals, fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x200_crop_q68_linear_smart%d.jpg", 1))
+ assertWidthHeight(smartcropped, 200, 200)
+
+ // Check cache
+ croppedAgain, err := image.Crop("300x300 topRight")
+ c.Assert(err, qt.IsNil)
+ c.Assert(cropped, eq, croppedAgain)
+
+}
+
+func TestImageTransformFormat(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchSunset(c)
+
+ fileCache := image.(specProvider).getSpec().FileCaches.ImageCache().Fs
+
+ assertExtWidthHeight := func(img images.ImageResource, ext string, w, h int) {
+ c.Helper()
+ c.Assert(img, qt.Not(qt.IsNil))
+ c.Assert(paths.Ext(img.RelPermalink()), qt.Equals, ext)
+ c.Assert(img.Width(), qt.Equals, w)
+ c.Assert(img.Height(), qt.Equals, h)
+ }
+
+ c.Assert(image.RelPermalink(), qt.Equals, "/a/sunset.jpg")
+ c.Assert(image.ResourceType(), qt.Equals, "image")
+ assertExtWidthHeight(image, ".jpg", 900, 562)
+
+ imagePng, err := image.Resize("450x png")
+ c.Assert(err, qt.IsNil)
+ c.Assert(imagePng.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_450x0_resize_linear.png")
+ c.Assert(imagePng.ResourceType(), qt.Equals, "image")
+ assertExtWidthHeight(imagePng, ".png", 450, 281)
+ c.Assert(imagePng.Name(), qt.Equals, "sunset.jpg")
+ c.Assert(imagePng.MediaType().String(), qt.Equals, "image/png")
+
+ assertFileCache(c, fileCache, path.Base(imagePng.RelPermalink()), 450, 281)
+
+ imageGif, err := image.Resize("225x gif")
+ c.Assert(err, qt.IsNil)
+ c.Assert(imageGif.RelPermalink(), qt.Equals, "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_225x0_resize_linear.gif")
+ c.Assert(imageGif.ResourceType(), qt.Equals, "image")
+ assertExtWidthHeight(imageGif, ".gif", 225, 141)
+ c.Assert(imageGif.Name(), qt.Equals, "sunset.jpg")
+ c.Assert(imageGif.MediaType().String(), qt.Equals, "image/gif")
+
+ assertFileCache(c, fileCache, path.Base(imageGif.RelPermalink()), 225, 141)
+}
+
+// https://github.com/gohugoio/hugo/issues/5730
+func TestImagePermalinkPublishOrder(t *testing.T) {
+ for _, checkOriginalFirst := range []bool{true, false} {
+ name := "OriginalFirst"
+ if !checkOriginalFirst {
+ name = "ResizedFirst"
+ }
+
+ t.Run(name, func(t *testing.T) {
+ c := qt.New(t)
+ spec, workDir := newTestResourceOsFs(c)
+ defer func() {
+ os.Remove(workDir)
+ }()
+
+ check1 := func(img images.ImageResource) {
+ resizedLink := "/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x50_resize_q75_box.jpg"
+ c.Assert(img.RelPermalink(), qt.Equals, resizedLink)
+ assertImageFile(c, spec.PublishFs, resizedLink, 100, 50)
+ }
+
+ check2 := func(img images.ImageResource) {
+ c.Assert(img.RelPermalink(), qt.Equals, "/a/sunset.jpg")
+ assertImageFile(c, spec.PublishFs, "a/sunset.jpg", 900, 562)
+ }
+
+ orignal := fetchImageForSpec(spec, c, "sunset.jpg")
+ c.Assert(orignal, qt.Not(qt.IsNil))
+
+ if checkOriginalFirst {
+ check2(orignal)
+ }
+
+ resized, err := orignal.Resize("100x50")
+ c.Assert(err, qt.IsNil)
+
+ check1(resized.(images.ImageResource))
+
+ if !checkOriginalFirst {
+ check2(orignal)
+ }
+ })
+ }
+}
+
+func TestImageBugs(t *testing.T) {
+ c := qt.New(t)
+
+ // Issue #4261
+ c.Run("Transform long filename", func(c *qt.C) {
+ image := fetchImage(c, "1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg")
+ c.Assert(image, qt.Not(qt.IsNil))
+
+ resized, err := image.Resize("200x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized, qt.Not(qt.IsNil))
+ c.Assert(resized.Width(), qt.Equals, 200)
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/_hu59e56ffff1bc1d8d122b1403d34e039f_90587_65b757a6e14debeae720fe8831f0a9bc.jpg")
+ resized, err = resized.Resize("100x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized, qt.Not(qt.IsNil))
+ c.Assert(resized.Width(), qt.Equals, 100)
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c876768085288f41211f768147ba2647.jpg")
+
+ })
+
+ // Issue #6137
+ c.Run("Transform upper case extension", func(c *qt.C) {
+ image := fetchImage(c, "sunrise.JPG")
+
+ resized, err := image.Resize("200x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized, qt.Not(qt.IsNil))
+ c.Assert(resized.Width(), qt.Equals, 200)
+
+ })
+
+ // Issue #7955
+ c.Run("Fill with smartcrop", func(c *qt.C) {
+ sunset := fetchImage(c, "sunset.jpg")
+
+ for _, test := range []struct {
+ originalDimensions string
+ targetWH int
+ }{
+ {"408x403", 400},
+ {"425x403", 400},
+ {"459x429", 400},
+ {"476x442", 400},
+ {"544x403", 400},
+ {"476x468", 400},
+ {"578x585", 550},
+ {"578x598", 550},
+ } {
+ c.Run(test.originalDimensions, func(c *qt.C) {
+ image, err := sunset.Resize(test.originalDimensions)
+ c.Assert(err, qt.IsNil)
+ resized, err := image.Fill(fmt.Sprintf("%dx%d smart", test.targetWH, test.targetWH))
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized, qt.Not(qt.IsNil))
+ c.Assert(resized.Width(), qt.Equals, test.targetWH)
+ c.Assert(resized.Height(), qt.Equals, test.targetWH)
+ })
+
+ }
+
+ })
+}
+
+func TestImageTransformConcurrent(t *testing.T) {
+ var wg sync.WaitGroup
+
+ c := qt.New(t)
+
+ spec, workDir := newTestResourceOsFs(c)
+ defer func() {
+ os.Remove(workDir)
+ }()
+
+ image := fetchImageForSpec(spec, c, "sunset.jpg")
+
+ for i := 0; i < 4; i++ {
+ wg.Add(1)
+ go func(id int) {
+ defer wg.Done()
+ for j := 0; j < 5; j++ {
+ img := image
+ for k := 0; k < 2; k++ {
+ r1, err := img.Resize(fmt.Sprintf("%dx", id-k))
+ if err != nil {
+ t.Error(err)
+ }
+
+ if r1.Width() != id-k {
+ t.Errorf("Width: %d:%d", r1.Width(), j)
+ }
+
+ r2, err := r1.Resize(fmt.Sprintf("%dx", id-k-1))
+ if err != nil {
+ t.Error(err)
+ }
+
+ img = r2
+ }
+ }
+ }(i + 20)
+ }
+
+ wg.Wait()
+}
+
+func TestImageWithMetadata(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchSunset(c)
+
+ meta := []map[string]any{
+ {
+ "title": "My Sunset",
+ "name": "Sunset #:counter",
+ "src": "*.jpg",
+ },
+ }
+
+ c.Assert(AssignMetadata(meta, image), qt.IsNil)
+ c.Assert(image.Name(), qt.Equals, "Sunset #1")
+
+ resized, err := image.Resize("200x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized.Name(), qt.Equals, "Sunset #1")
+}
+
+func TestImageResize8BitPNG(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchImage(c, "gohugoio.png")
+
+ c.Assert(image.MediaType().Type(), qt.Equals, "image/png")
+ c.Assert(image.RelPermalink(), qt.Equals, "/a/gohugoio.png")
+ c.Assert(image.ResourceType(), qt.Equals, "image")
+ c.Assert(image.Exif(), qt.IsNil)
+
+ resized, err := image.Resize("800x")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized.MediaType().Type(), qt.Equals, "image/png")
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/gohugoio_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_800x0_resize_linear_3.png")
+ c.Assert(resized.Width(), qt.Equals, 800)
+}
+
+func TestImageResizeInSubPath(t *testing.T) {
+ c := qt.New(t)
+
+ image := fetchImage(c, "sub/gohugoio2.png")
+
+ c.Assert(image.MediaType(), eq, media.PNGType)
+ c.Assert(image.RelPermalink(), qt.Equals, "/a/sub/gohugoio2.png")
+ c.Assert(image.ResourceType(), qt.Equals, "image")
+ c.Assert(image.Exif(), qt.IsNil)
+
+ resized, err := image.Resize("101x101")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resized.MediaType().Type(), qt.Equals, "image/png")
+ c.Assert(resized.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png")
+ c.Assert(resized.Width(), qt.Equals, 101)
+ c.Assert(resized.Exif(), qt.IsNil)
+
+ publishedImageFilename := filepath.Clean(resized.RelPermalink())
+
+ spec := image.(specProvider).getSpec()
+
+ assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
+ c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
+
+ // Clear mem cache to simulate reading from the file cache.
+ spec.imageCache.clear()
+
+ resizedAgain, err := image.Resize("101x101")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resizedAgain.RelPermalink(), qt.Equals, "/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_3.png")
+ c.Assert(resizedAgain.Width(), qt.Equals, 101)
+ assertImageFile(c, image.(specProvider).getSpec().BaseFs.PublishFs, publishedImageFilename, 101, 101)
+}
+
+func TestSVGImage(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ svg := fetchResourceForSpec(spec, c, "circle.svg")
+ c.Assert(svg, qt.Not(qt.IsNil))
+}
+
+func TestSVGImageContent(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ svg := fetchResourceForSpec(spec, c, "circle.svg")
+ c.Assert(svg, qt.Not(qt.IsNil))
+
+ content, err := svg.Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, hqt.IsSameType, "")
+ c.Assert(content.(string), qt.Contains, `<svg height="100" width="100">`)
+}
+
+func TestImageExif(t *testing.T) {
+ c := qt.New(t)
+ fs := afero.NewMemMapFs()
+ spec := newTestResourceSpec(specDescriptor{fs: fs, c: c})
+ image := fetchResourceForSpec(spec, c, "sunset.jpg").(images.ImageResource)
+
+ getAndCheckExif := func(c *qt.C, image images.ImageResource) {
+ x := image.Exif()
+ c.Assert(x, qt.Not(qt.IsNil))
+
+ c.Assert(x.Date.Format("2006-01-02"), qt.Equals, "2017-10-27")
+
+ // Malaga: https://goo.gl/taazZy
+ c.Assert(x.Lat, qt.Equals, float64(36.59744166666667))
+ c.Assert(x.Long, qt.Equals, float64(-4.50846))
+
+ v, found := x.Tags["LensModel"]
+ c.Assert(found, qt.Equals, true)
+ lensModel, ok := v.(string)
+ c.Assert(ok, qt.Equals, true)
+ c.Assert(lensModel, qt.Equals, "smc PENTAX-DA* 16-50mm F2.8 ED AL [IF] SDM")
+ resized, _ := image.Resize("300x200")
+ x2 := resized.Exif()
+ c.Assert(x2, eq, x)
+ }
+
+ getAndCheckExif(c, image)
+ image = fetchResourceForSpec(spec, c, "sunset.jpg").(images.ImageResource)
+ // This will read from file cache.
+ getAndCheckExif(c, image)
+}
+
+func BenchmarkImageExif(b *testing.B) {
+ getImages := func(c *qt.C, b *testing.B, fs afero.Fs) []images.ImageResource {
+ spec := newTestResourceSpec(specDescriptor{fs: fs, c: c})
+ imgs := make([]images.ImageResource, b.N)
+ for i := 0; i < b.N; i++ {
+ imgs[i] = fetchResourceForSpec(spec, c, "sunset.jpg", strconv.Itoa(i)).(images.ImageResource)
+ }
+ return imgs
+ }
+
+ getAndCheckExif := func(c *qt.C, image images.ImageResource) {
+ x := image.Exif()
+ c.Assert(x, qt.Not(qt.IsNil))
+ c.Assert(x.Long, qt.Equals, float64(-4.50846))
+ }
+
+ b.Run("Cold cache", func(b *testing.B) {
+ b.StopTimer()
+ c := qt.New(b)
+ images := getImages(c, b, afero.NewMemMapFs())
+
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ getAndCheckExif(c, images[i])
+ }
+ })
+
+ b.Run("Cold cache, 10", func(b *testing.B) {
+ b.StopTimer()
+ c := qt.New(b)
+ images := getImages(c, b, afero.NewMemMapFs())
+
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ for j := 0; j < 10; j++ {
+ getAndCheckExif(c, images[i])
+ }
+ }
+ })
+
+ b.Run("Warm cache", func(b *testing.B) {
+ b.StopTimer()
+ c := qt.New(b)
+ fs := afero.NewMemMapFs()
+ images := getImages(c, b, fs)
+ for i := 0; i < b.N; i++ {
+ getAndCheckExif(c, images[i])
+ }
+
+ images = getImages(c, b, fs)
+
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ getAndCheckExif(c, images[i])
+ }
+ })
+}
+
+// usesFMA indicates whether "fused multiply and add" (FMA) instruction is
+// used. The command "grep FMADD go/test/codegen/floats.go" can help keep
+// the FMA-using architecture list updated.
+var usesFMA = runtime.GOARCH == "s390x" ||
+ runtime.GOARCH == "ppc64" ||
+ runtime.GOARCH == "ppc64le" ||
+ runtime.GOARCH == "arm64"
+
+// goldenEqual compares two NRGBA images. It is used in golden tests only.
+// A small tolerance is allowed on architectures using "fused multiply and add"
+// (FMA) instruction to accommodate for floating-point rounding differences
+// with control golden images that were generated on amd64 architecture.
+// See https://golang.org/ref/spec#Floating_point_operators
+// and https://github.com/gohugoio/hugo/issues/6387 for more information.
+//
+// Borrowed from https://github.com/disintegration/gift/blob/a999ff8d5226e5ab14b64a94fca07c4ac3f357cf/gift_test.go#L598-L625
+// Copyright (c) 2014-2019 Grigory Dryapak
+// Licensed under the MIT License.
+func goldenEqual(img1, img2 *image.NRGBA) bool {
+ maxDiff := 0
+ if usesFMA {
+ maxDiff = 1
+ }
+ if !img1.Rect.Eq(img2.Rect) {
+ return false
+ }
+ if len(img1.Pix) != len(img2.Pix) {
+ return false
+ }
+ for i := 0; i < len(img1.Pix); i++ {
+ diff := int(img1.Pix[i]) - int(img2.Pix[i])
+ if diff < 0 {
+ diff = -diff
+ }
+ if diff > maxDiff {
+ return false
+ }
+ }
+ return true
+}
+
+// Issue #8729
+func TestImageOperationsGoldenWebp(t *testing.T) {
+ if !webp.Supports() {
+ t.Skip("skip webp test")
+ }
+ c := qt.New(t)
+ c.Parallel()
+
+ devMode := false
+
+ testImages := []string{"fuzzy-cirlcle.png"}
+
+ spec, workDir := newTestResourceOsFs(c)
+ defer func() {
+ if !devMode {
+ os.Remove(workDir)
+ }
+ }()
+
+ if devMode {
+ fmt.Println(workDir)
+ }
+
+ for _, imageName := range testImages {
+ image := fetchImageForSpec(spec, c, imageName)
+ imageWebp, err := image.Resize("200x webp")
+ c.Assert(err, qt.IsNil)
+ c.Assert(imageWebp.Width(), qt.Equals, 200)
+ }
+
+ if devMode {
+ return
+ }
+
+ dir1 := filepath.Join(workDir, "resources/_gen/images")
+ dir2 := filepath.FromSlash("testdata/golden_webp")
+
+ assetGoldenDirs(c, dir1, dir2)
+
+}
+
+func TestImageOperationsGolden(t *testing.T) {
+ c := qt.New(t)
+ c.Parallel()
+
+ // Note, if you're enabling this on a MacOS M1 (ARM) you need to run the test with GOARCH=amd64.
+ // GOARCH=amd64 go test -timeout 30s -run "^TestImageOperationsGolden$" ./resources -v
+ devMode := false
+
+ testImages := []string{"sunset.jpg", "gohugoio8.png", "gohugoio24.png"}
+
+ spec, workDir := newTestResourceOsFs(c)
+ defer func() {
+ if !devMode {
+ os.Remove(workDir)
+ }
+ }()
+
+ if devMode {
+ fmt.Println(workDir)
+ }
+
+ gopher := fetchImageForSpec(spec, c, "gopher-hero8.png")
+ var err error
+ gopher, err = gopher.Resize("30x")
+ c.Assert(err, qt.IsNil)
+
+ // Test PNGs with alpha channel.
+ for _, img := range []string{"gopher-hero8.png", "gradient-circle.png"} {
+ orig := fetchImageForSpec(spec, c, img)
+ for _, resizeSpec := range []string{"200x #e3e615", "200x jpg #e3e615"} {
+ resized, err := orig.Resize(resizeSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+ }
+
+ // A simple Gif file (no animation).
+ orig := fetchImageForSpec(spec, c, "gohugoio-card.gif")
+ for _, resizeSpec := range []string{"100x", "220x"} {
+ resized, err := orig.Resize(resizeSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ // Animated GIF
+ orig = fetchImageForSpec(spec, c, "giphy.gif")
+ for _, resizeSpec := range []string{"200x", "512x"} {
+ resized, err := orig.Resize(resizeSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ for _, img := range testImages {
+
+ orig := fetchImageForSpec(spec, c, img)
+ for _, resizeSpec := range []string{"200x100", "600x", "200x r90 q50 Box"} {
+ resized, err := orig.Resize(resizeSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ for _, fillSpec := range []string{"300x200 Gaussian Smart", "100x100 Center", "300x100 TopLeft NearestNeighbor", "400x200 BottomLeft"} {
+ resized, err := orig.Fill(fillSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ for _, fitSpec := range []string{"300x200 Linear"} {
+ resized, err := orig.Fit(fitSpec)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ f := &images.Filters{}
+
+ filters := []gift.Filter{
+ f.Grayscale(),
+ f.GaussianBlur(6),
+ f.Saturation(50),
+ f.Sepia(100),
+ f.Brightness(30),
+ f.ColorBalance(10, -10, -10),
+ f.Colorize(240, 50, 100),
+ f.Gamma(1.5),
+ f.UnsharpMask(1, 1, 0),
+ f.Sigmoid(0.5, 7),
+ f.Pixelate(5),
+ f.Invert(),
+ f.Hue(22),
+ f.Contrast(32.5),
+ f.Overlay(gopher.(images.ImageSource), 20, 30),
+ f.Text("No options"),
+ f.Text("This long text is to test line breaks. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."),
+ f.Text("Hugo rocks!", map[string]any{"x": 3, "y": 3, "size": 20, "color": "#fc03b1"}),
+ }
+
+ resized, err := orig.Fill("400x200 center")
+ c.Assert(err, qt.IsNil)
+
+ for _, filter := range filters {
+ resized, err := resized.Filter(filter)
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ resized, err = resized.Filter(filters[0:4])
+ c.Assert(err, qt.IsNil)
+ rel := resized.RelPermalink()
+ c.Assert(rel, qt.Not(qt.Equals), "")
+ }
+
+ if devMode {
+ return
+ }
+
+ dir1 := filepath.Join(workDir, "resources/_gen/images")
+ dir2 := filepath.FromSlash("testdata/golden")
+
+ assetGoldenDirs(c, dir1, dir2)
+
+}
+
+func assetGoldenDirs(c *qt.C, dir1, dir2 string) {
+
+ // The two dirs above should now be the same.
+ dirinfos1, err := ioutil.ReadDir(dir1)
+ c.Assert(err, qt.IsNil)
+ dirinfos2, err := ioutil.ReadDir(dir2)
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(dirinfos1), qt.Equals, len(dirinfos2))
+
+ for i, fi1 := range dirinfos1 {
+ fi2 := dirinfos2[i]
+ c.Assert(fi1.Name(), qt.Equals, fi2.Name())
+
+ f1, err := os.Open(filepath.Join(dir1, fi1.Name()))
+ c.Assert(err, qt.IsNil)
+ f2, err := os.Open(filepath.Join(dir2, fi2.Name()))
+ c.Assert(err, qt.IsNil)
+
+ decodeAll := func(f *os.File) []image.Image {
+ var images []image.Image
+
+ if strings.HasSuffix(f.Name(), ".gif") {
+ gif, err := gif.DecodeAll(f)
+ c.Assert(err, qt.IsNil)
+ images = make([]image.Image, len(gif.Image))
+ for i, img := range gif.Image {
+ images[i] = img
+ }
+ } else {
+ img, _, err := image.Decode(f)
+ c.Assert(err, qt.IsNil)
+ images = append(images, img)
+ }
+ return images
+ }
+
+ imgs1 := decodeAll(f1)
+ imgs2 := decodeAll(f2)
+ c.Assert(len(imgs1), qt.Equals, len(imgs2))
+
+ LOOP:
+ for i, img1 := range imgs1 {
+ img2 := imgs2[i]
+ nrgba1 := image.NewNRGBA(img1.Bounds())
+ gift.New().Draw(nrgba1, img1)
+ nrgba2 := image.NewNRGBA(img2.Bounds())
+ gift.New().Draw(nrgba2, img2)
+
+ if !goldenEqual(nrgba1, nrgba2) {
+ switch fi1.Name() {
+ case "gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_73c19c5f80881858a85aa23cd0ca400d.png",
+ "gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_ae631e5252bb5d7b92bc766ad1a89069.png",
+ "gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d1bbfa2629bffb90118cacce3fcfb924.png",
+ "giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_200x0_resize_box.gif":
+ c.Log("expectedly differs from golden due to dithering:", fi1.Name())
+ default:
+ c.Errorf("resulting image differs from golden: %s", fi1.Name())
+ break LOOP
+ }
+ }
+ }
+
+ if !usesFMA {
+ c.Assert(fi1, eq, fi2)
+
+ _, err = f1.Seek(0, 0)
+ c.Assert(err, qt.IsNil)
+ _, err = f2.Seek(0, 0)
+ c.Assert(err, qt.IsNil)
+
+ hash1, err := helpers.MD5FromReader(f1)
+ c.Assert(err, qt.IsNil)
+ hash2, err := helpers.MD5FromReader(f2)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(hash1, qt.Equals, hash2)
+ }
+
+ f1.Close()
+ f2.Close()
+ }
+}
+
+func BenchmarkResizeParallel(b *testing.B) {
+ c := qt.New(b)
+ img := fetchSunset(c)
+
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ w := rand.Intn(10) + 10
+ resized, err := img.Resize(strconv.Itoa(w) + "x")
+ if err != nil {
+ b.Fatal(err)
+ }
+ _, err = resized.Resize(strconv.Itoa(w-1) + "x")
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/resources/images/color.go b/resources/images/color.go
new file mode 100644
index 000000000..057a9fb71
--- /dev/null
+++ b/resources/images/color.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "encoding/hex"
+ "fmt"
+ "image/color"
+ "strings"
+)
+
+// AddColorToPalette adds c as the first color in p if not already there.
+// Note that it does no additional checks, so callers must make sure
+// that the palette is valid for the relevant format.
+func AddColorToPalette(c color.Color, p color.Palette) color.Palette {
+ var found bool
+ for _, cc := range p {
+ if c == cc {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ p = append(color.Palette{c}, p...)
+ }
+
+ return p
+}
+
+// ReplaceColorInPalette will replace the color in palette p closest to c in Euclidean
+// R,G,B,A space with c.
+func ReplaceColorInPalette(c color.Color, p color.Palette) {
+ p[p.Index(c)] = c
+}
+
+func hexStringToColor(s string) (color.Color, error) {
+ s = strings.TrimPrefix(s, "#")
+
+ if len(s) != 3 && len(s) != 6 {
+ return nil, fmt.Errorf("invalid color code: %q", s)
+ }
+
+ s = strings.ToLower(s)
+
+ if len(s) == 3 {
+ var v string
+ for _, r := range s {
+ v += string(r) + string(r)
+ }
+ s = v
+ }
+
+ // Standard colors.
+ if s == "ffffff" {
+ return color.White, nil
+ }
+
+ if s == "000000" {
+ return color.Black, nil
+ }
+
+ // Set Alfa to white.
+ s += "ff"
+
+ b, err := hex.DecodeString(s)
+ if err != nil {
+ return nil, err
+ }
+
+ return color.RGBA{b[0], b[1], b[2], b[3]}, nil
+}
diff --git a/resources/images/color_test.go b/resources/images/color_test.go
new file mode 100644
index 000000000..52871e691
--- /dev/null
+++ b/resources/images/color_test.go
@@ -0,0 +1,89 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "image/color"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestHexStringToColor(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ arg string
+ expect any
+ }{
+ {"f", false},
+ {"#f", false},
+ {"#fffffff", false},
+ {"fffffff", false},
+ {"#fff", color.White},
+ {"fff", color.White},
+ {"FFF", color.White},
+ {"FfF", color.White},
+ {"#ffffff", color.White},
+ {"ffffff", color.White},
+ {"#000", color.Black},
+ {"#4287f5", color.RGBA{R: 0x42, G: 0x87, B: 0xf5, A: 0xff}},
+ {"777", color.RGBA{R: 0x77, G: 0x77, B: 0x77, A: 0xff}},
+ } {
+
+ test := test
+ c.Run(test.arg, func(c *qt.C) {
+ c.Parallel()
+
+ result, err := hexStringToColor(test.arg)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ return
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, test.expect)
+ })
+
+ }
+}
+
+func TestAddColorToPalette(t *testing.T) {
+ c := qt.New(t)
+
+ palette := color.Palette{color.White, color.Black}
+
+ c.Assert(AddColorToPalette(color.White, palette), qt.HasLen, 2)
+
+ blue1, _ := hexStringToColor("34c3eb")
+ blue2, _ := hexStringToColor("34c3eb")
+ white, _ := hexStringToColor("fff")
+
+ c.Assert(AddColorToPalette(white, palette), qt.HasLen, 2)
+ c.Assert(AddColorToPalette(blue1, palette), qt.HasLen, 3)
+ c.Assert(AddColorToPalette(blue2, palette), qt.HasLen, 3)
+}
+
+func TestReplaceColorInPalette(t *testing.T) {
+ c := qt.New(t)
+
+ palette := color.Palette{color.White, color.Black}
+ offWhite, _ := hexStringToColor("fcfcfc")
+
+ ReplaceColorInPalette(offWhite, palette)
+
+ c.Assert(palette, qt.HasLen, 2)
+ c.Assert(palette[0], qt.Equals, offWhite)
+}
diff --git a/resources/images/config.go b/resources/images/config.go
new file mode 100644
index 000000000..62b5c72d8
--- /dev/null
+++ b/resources/images/config.go
@@ -0,0 +1,462 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "fmt"
+ "image/color"
+ "strconv"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/media"
+
+ "errors"
+
+ "github.com/bep/gowebp/libwebp/webpoptions"
+
+ "github.com/disintegration/gift"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+var (
+ imageFormats = map[string]Format{
+ ".jpg": JPEG,
+ ".jpeg": JPEG,
+ ".jpe": JPEG,
+ ".jif": JPEG,
+ ".jfif": JPEG,
+ ".png": PNG,
+ ".tif": TIFF,
+ ".tiff": TIFF,
+ ".bmp": BMP,
+ ".gif": GIF,
+ ".webp": WEBP,
+ }
+
+ imageFormatsBySubType = map[string]Format{
+ media.JPEGType.SubType: JPEG,
+ media.PNGType.SubType: PNG,
+ media.TIFFType.SubType: TIFF,
+ media.BMPType.SubType: BMP,
+ media.GIFType.SubType: GIF,
+ media.WEBPType.SubType: WEBP,
+ }
+
+ // Add or increment if changes to an image format's processing requires
+ // re-generation.
+ imageFormatsVersions = map[Format]int{
+ PNG: 3, // Fix transparency issue with 32 bit images.
+ WEBP: 2, // Fix transparency issue with 32 bit images.
+ }
+
+ // Increment to mark all processed images as stale. Only use when absolutely needed.
+ // See the finer grained smartCropVersionNumber and imageFormatsVersions.
+ mainImageVersionNumber = 0
+)
+
+var anchorPositions = map[string]gift.Anchor{
+ strings.ToLower("Center"): gift.CenterAnchor,
+ strings.ToLower("TopLeft"): gift.TopLeftAnchor,
+ strings.ToLower("Top"): gift.TopAnchor,
+ strings.ToLower("TopRight"): gift.TopRightAnchor,
+ strings.ToLower("Left"): gift.LeftAnchor,
+ strings.ToLower("Right"): gift.RightAnchor,
+ strings.ToLower("BottomLeft"): gift.BottomLeftAnchor,
+ strings.ToLower("Bottom"): gift.BottomAnchor,
+ strings.ToLower("BottomRight"): gift.BottomRightAnchor,
+}
+
+// These encoding hints are currently only relevant for Webp.
+var hints = map[string]webpoptions.EncodingPreset{
+ "picture": webpoptions.EncodingPresetPicture,
+ "photo": webpoptions.EncodingPresetPhoto,
+ "drawing": webpoptions.EncodingPresetDrawing,
+ "icon": webpoptions.EncodingPresetIcon,
+ "text": webpoptions.EncodingPresetText,
+}
+
+var imageFilters = map[string]gift.Resampling{
+
+ strings.ToLower("NearestNeighbor"): gift.NearestNeighborResampling,
+ strings.ToLower("Box"): gift.BoxResampling,
+ strings.ToLower("Linear"): gift.LinearResampling,
+ strings.ToLower("Hermite"): hermiteResampling,
+ strings.ToLower("MitchellNetravali"): mitchellNetravaliResampling,
+ strings.ToLower("CatmullRom"): catmullRomResampling,
+ strings.ToLower("BSpline"): bSplineResampling,
+ strings.ToLower("Gaussian"): gaussianResampling,
+ strings.ToLower("Lanczos"): gift.LanczosResampling,
+ strings.ToLower("Hann"): hannResampling,
+ strings.ToLower("Hamming"): hammingResampling,
+ strings.ToLower("Blackman"): blackmanResampling,
+ strings.ToLower("Bartlett"): bartlettResampling,
+ strings.ToLower("Welch"): welchResampling,
+ strings.ToLower("Cosine"): cosineResampling,
+}
+
+func ImageFormatFromExt(ext string) (Format, bool) {
+ f, found := imageFormats[ext]
+ return f, found
+}
+
+func ImageFormatFromMediaSubType(sub string) (Format, bool) {
+ f, found := imageFormatsBySubType[sub]
+ return f, found
+}
+
+const (
+ defaultJPEGQuality = 75
+ defaultResampleFilter = "box"
+ defaultBgColor = "ffffff"
+ defaultHint = "photo"
+)
+
+var defaultImaging = Imaging{
+ ResampleFilter: defaultResampleFilter,
+ BgColor: defaultBgColor,
+ Hint: defaultHint,
+ Quality: defaultJPEGQuality,
+}
+
+func DecodeConfig(m map[string]any) (ImagingConfig, error) {
+ if m == nil {
+ m = make(map[string]any)
+ }
+
+ i := ImagingConfig{
+ Cfg: defaultImaging,
+ CfgHash: helpers.HashString(m),
+ }
+
+ if err := mapstructure.WeakDecode(m, &i.Cfg); err != nil {
+ return i, err
+ }
+
+ if err := i.Cfg.init(); err != nil {
+ return i, err
+ }
+
+ var err error
+ i.BgColor, err = hexStringToColor(i.Cfg.BgColor)
+ if err != nil {
+ return i, err
+ }
+
+ if i.Cfg.Anchor != "" && i.Cfg.Anchor != smartCropIdentifier {
+ anchor, found := anchorPositions[i.Cfg.Anchor]
+ if !found {
+ return i, fmt.Errorf("invalid anchor value %q in imaging config", i.Anchor)
+ }
+ i.Anchor = anchor
+ } else {
+ i.Cfg.Anchor = smartCropIdentifier
+ }
+
+ filter, found := imageFilters[i.Cfg.ResampleFilter]
+ if !found {
+ return i, fmt.Errorf("%q is not a valid resample filter", filter)
+ }
+ i.ResampleFilter = filter
+
+ if strings.TrimSpace(i.Cfg.Exif.IncludeFields) == "" && strings.TrimSpace(i.Cfg.Exif.ExcludeFields) == "" {
+ // Don't change this for no good reason. Please don't.
+ i.Cfg.Exif.ExcludeFields = "GPS|Exif|Exposure[M|P|B]|Contrast|Resolution|Sharp|JPEG|Metering|Sensing|Saturation|ColorSpace|Flash|WhiteBalance"
+ }
+
+ return i, nil
+}
+
+func DecodeImageConfig(action, config string, defaults ImagingConfig, sourceFormat Format) (ImageConfig, error) {
+ var (
+ c ImageConfig = GetDefaultImageConfig(action, defaults)
+ err error
+ )
+
+ c.Action = action
+
+ if config == "" {
+ return c, errors.New("image config cannot be empty")
+ }
+
+ parts := strings.Fields(config)
+ for _, part := range parts {
+ part = strings.ToLower(part)
+
+ if part == smartCropIdentifier {
+ c.AnchorStr = smartCropIdentifier
+ } else if pos, ok := anchorPositions[part]; ok {
+ c.Anchor = pos
+ c.AnchorStr = part
+ } else if filter, ok := imageFilters[part]; ok {
+ c.Filter = filter
+ c.FilterStr = part
+ } else if hint, ok := hints[part]; ok {
+ c.Hint = hint
+ } else if part[0] == '#' {
+ c.BgColorStr = part[1:]
+ c.BgColor, err = hexStringToColor(c.BgColorStr)
+ if err != nil {
+ return c, err
+ }
+ } else if part[0] == 'q' {
+ c.Quality, err = strconv.Atoi(part[1:])
+ if err != nil {
+ return c, err
+ }
+ if c.Quality < 1 || c.Quality > 100 {
+ return c, errors.New("quality ranges from 1 to 100 inclusive")
+ }
+ c.qualitySetForImage = true
+ } else if part[0] == 'r' {
+ c.Rotate, err = strconv.Atoi(part[1:])
+ if err != nil {
+ return c, err
+ }
+ } else if strings.Contains(part, "x") {
+ widthHeight := strings.Split(part, "x")
+ if len(widthHeight) <= 2 {
+ first := widthHeight[0]
+ if first != "" {
+ c.Width, err = strconv.Atoi(first)
+ if err != nil {
+ return c, err
+ }
+ }
+
+ if len(widthHeight) == 2 {
+ second := widthHeight[1]
+ if second != "" {
+ c.Height, err = strconv.Atoi(second)
+ if err != nil {
+ return c, err
+ }
+ }
+ }
+ } else {
+ return c, errors.New("invalid image dimensions")
+ }
+ } else if f, ok := ImageFormatFromExt("." + part); ok {
+ c.TargetFormat = f
+ }
+ }
+
+ switch c.Action {
+ case "crop", "fill", "fit":
+ if c.Width == 0 || c.Height == 0 {
+ return c, errors.New("must provide Width and Height")
+ }
+ case "resize":
+ if c.Width == 0 && c.Height == 0 {
+ return c, errors.New("must provide Width or Height")
+ }
+ default:
+ return c, fmt.Errorf("BUG: unknown action %q encountered while decoding image configuration", c.Action)
+ }
+
+ if c.FilterStr == "" {
+ c.FilterStr = defaults.Cfg.ResampleFilter
+ c.Filter = defaults.ResampleFilter
+ }
+
+ if c.Hint == 0 {
+ c.Hint = webpoptions.EncodingPresetPhoto
+ }
+
+ if c.AnchorStr == "" {
+ c.AnchorStr = defaults.Cfg.Anchor
+ c.Anchor = defaults.Anchor
+ }
+
+ // default to the source format
+ if c.TargetFormat == 0 {
+ c.TargetFormat = sourceFormat
+ }
+
+ if c.Quality <= 0 && c.TargetFormat.RequiresDefaultQuality() {
+ // We need a quality setting for all JPEGs and WEBPs.
+ c.Quality = defaults.Cfg.Quality
+ }
+
+ if c.BgColor == nil && c.TargetFormat != sourceFormat {
+ if sourceFormat.SupportsTransparency() && !c.TargetFormat.SupportsTransparency() {
+ c.BgColor = defaults.BgColor
+ c.BgColorStr = defaults.Cfg.BgColor
+ }
+ }
+
+ return c, nil
+}
+
+// ImageConfig holds configuration to create a new image from an existing one, resize etc.
+type ImageConfig struct {
+ // This defines the output format of the output image. It defaults to the source format.
+ TargetFormat Format
+
+ Action string
+
+ // If set, this will be used as the key in filenames etc.
+ Key string
+
+ // Quality ranges from 1 to 100 inclusive, higher is better.
+ // This is only relevant for JPEG and WEBP images.
+ // Default is 75.
+ Quality int
+ qualitySetForImage bool // Whether the above is set for this image.
+
+ // Rotate rotates an image by the given angle counter-clockwise.
+ // The rotation will be performed first.
+ Rotate int
+
+ // Used to fill any transparency.
+ // When set in site config, it's used when converting to a format that does
+ // not support transparency.
+ // When set per image operation, it's used even for formats that does support
+ // transparency.
+ BgColor color.Color
+ BgColorStr string
+
+ // Hint about what type of picture this is. Used to optimize encoding
+ // when target is set to webp.
+ Hint webpoptions.EncodingPreset
+
+ Width int
+ Height int
+
+ Filter gift.Resampling
+ FilterStr string
+
+ Anchor gift.Anchor
+ AnchorStr string
+}
+
+func (i ImageConfig) GetKey(format Format) string {
+ if i.Key != "" {
+ return i.Action + "_" + i.Key
+ }
+
+ k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height)
+ if i.Action != "" {
+ k += "_" + i.Action
+ }
+ // This slightly odd construct is here to preserve the old image keys.
+ if i.qualitySetForImage || i.TargetFormat.RequiresDefaultQuality() {
+ k += "_q" + strconv.Itoa(i.Quality)
+ }
+ if i.Rotate != 0 {
+ k += "_r" + strconv.Itoa(i.Rotate)
+ }
+ if i.BgColorStr != "" {
+ k += "_bg" + i.BgColorStr
+ }
+
+ if i.TargetFormat == WEBP {
+ k += "_h" + strconv.Itoa(int(i.Hint))
+ }
+
+ anchor := i.AnchorStr
+ if anchor == smartCropIdentifier {
+ anchor = anchor + strconv.Itoa(smartCropVersionNumber)
+ }
+
+ k += "_" + i.FilterStr
+
+ if strings.EqualFold(i.Action, "fill") || strings.EqualFold(i.Action, "crop") {
+ k += "_" + anchor
+ }
+
+ if v, ok := imageFormatsVersions[format]; ok {
+ k += "_" + strconv.Itoa(v)
+ }
+
+ if mainImageVersionNumber > 0 {
+ k += "_" + strconv.Itoa(mainImageVersionNumber)
+ }
+
+ return k
+}
+
+type ImagingConfig struct {
+ BgColor color.Color
+ Hint webpoptions.EncodingPreset
+ ResampleFilter gift.Resampling
+ Anchor gift.Anchor
+
+ // Config as provided by the user.
+ Cfg Imaging
+
+ // Hash of the config map provided by the user.
+ CfgHash string
+}
+
+// Imaging contains default image processing configuration. This will be fetched
+// from site (or language) config.
+type Imaging struct {
+ // Default image quality setting (1-100). Only used for JPEG images.
+ Quality int
+
+ // Resample filter to use in resize operations.
+ ResampleFilter string
+
+ // Hint about what type of image this is.
+ // Currently only used when encoding to Webp.
+ // Default is "photo".
+ // Valid values are "picture", "photo", "drawing", "icon", or "text".
+ Hint string
+
+ // The anchor to use in Fill. Default is "smart", i.e. Smart Crop.
+ Anchor string
+
+ // Default color used in fill operations (e.g. "fff" for white).
+ BgColor string
+
+ Exif ExifConfig
+}
+
+func (cfg *Imaging) init() error {
+ if cfg.Quality < 0 || cfg.Quality > 100 {
+ return errors.New("image quality must be a number between 1 and 100")
+ }
+
+ cfg.BgColor = strings.ToLower(strings.TrimPrefix(cfg.BgColor, "#"))
+ cfg.Anchor = strings.ToLower(cfg.Anchor)
+ cfg.ResampleFilter = strings.ToLower(cfg.ResampleFilter)
+ cfg.Hint = strings.ToLower(cfg.Hint)
+
+ return nil
+}
+
+type ExifConfig struct {
+
+ // Regexp matching the Exif fields you want from the (massive) set of Exif info
+ // available. As we cache this info to disk, this is for performance and
+ // disk space reasons more than anything.
+ // If you want it all, put ".*" in this config setting.
+ // Note that if neither this or ExcludeFields is set, Hugo will return a small
+ // default set.
+ IncludeFields string
+
+ // Regexp matching the Exif fields you want to exclude. This may be easier to use
+ // than IncludeFields above, depending on what you want.
+ ExcludeFields string
+
+ // Hugo extracts the "photo taken" date/time into .Date by default.
+ // Set this to true to turn it off.
+ DisableDate bool
+
+ // Hugo extracts the "photo taken where" (GPS latitude and longitude) into
+ // .Long and .Lat. Set this to true to turn it off.
+ DisableLatLong bool
+}
diff --git a/resources/images/config_test.go b/resources/images/config_test.go
new file mode 100644
index 000000000..1b785f7ca
--- /dev/null
+++ b/resources/images/config_test.go
@@ -0,0 +1,158 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDecodeConfig(t *testing.T) {
+ c := qt.New(t)
+ m := map[string]any{
+ "quality": 42,
+ "resampleFilter": "NearestNeighbor",
+ "anchor": "topLeft",
+ }
+
+ imagingConfig, err := DecodeConfig(m)
+
+ c.Assert(err, qt.IsNil)
+ imaging := imagingConfig.Cfg
+ c.Assert(imaging.Quality, qt.Equals, 42)
+ c.Assert(imaging.ResampleFilter, qt.Equals, "nearestneighbor")
+ c.Assert(imaging.Anchor, qt.Equals, "topleft")
+
+ m = map[string]any{}
+
+ imagingConfig, err = DecodeConfig(m)
+ c.Assert(err, qt.IsNil)
+ imaging = imagingConfig.Cfg
+ c.Assert(imaging.ResampleFilter, qt.Equals, "box")
+ c.Assert(imaging.Anchor, qt.Equals, "smart")
+
+ _, err = DecodeConfig(map[string]any{
+ "quality": 123,
+ })
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = DecodeConfig(map[string]any{
+ "resampleFilter": "asdf",
+ })
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = DecodeConfig(map[string]any{
+ "anchor": "asdf",
+ })
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ imagingConfig, err = DecodeConfig(map[string]any{
+ "anchor": "Smart",
+ })
+ imaging = imagingConfig.Cfg
+ c.Assert(err, qt.IsNil)
+ c.Assert(imaging.Anchor, qt.Equals, "smart")
+
+ imagingConfig, err = DecodeConfig(map[string]any{
+ "exif": map[string]any{
+ "disableLatLong": true,
+ },
+ })
+ c.Assert(err, qt.IsNil)
+ imaging = imagingConfig.Cfg
+ c.Assert(imaging.Exif.DisableLatLong, qt.Equals, true)
+ c.Assert(imaging.Exif.ExcludeFields, qt.Equals, "GPS|Exif|Exposure[M|P|B]|Contrast|Resolution|Sharp|JPEG|Metering|Sensing|Saturation|ColorSpace|Flash|WhiteBalance")
+}
+
+func TestDecodeImageConfig(t *testing.T) {
+ for i, this := range []struct {
+ action string
+ in string
+ expect any
+ }{
+ {"resize", "300x400", newImageConfig("resize", 300, 400, 75, 0, "box", "smart", "")},
+ {"resize", "300x400 #fff", newImageConfig("resize", 300, 400, 75, 0, "box", "smart", "fff")},
+ {"resize", "100x200 bottomRight", newImageConfig("resize", 100, 200, 75, 0, "box", "BottomRight", "")},
+ {"resize", "10x20 topleft Lanczos", newImageConfig("resize", 10, 20, 75, 0, "Lanczos", "topleft", "")},
+ {"resize", "linear left 10x r180", newImageConfig("resize", 10, 0, 75, 180, "linear", "left", "")},
+ {"resize", "x20 riGht Cosine q95", newImageConfig("resize", 0, 20, 95, 0, "cosine", "right", "")},
+ {"crop", "300x400", newImageConfig("crop", 300, 400, 75, 0, "box", "smart", "")},
+ {"fill", "300x400", newImageConfig("fill", 300, 400, 75, 0, "box", "smart", "")},
+ {"fit", "300x400", newImageConfig("fit", 300, 400, 75, 0, "box", "smart", "")},
+
+ {"resize", "", false},
+ {"resize", "foo", false},
+ {"crop", "100x", false},
+ {"fill", "100x", false},
+ {"fit", "100x", false},
+ {"foo", "100x", false},
+ } {
+
+ cfg, err := DecodeConfig(nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ result, err := DecodeImageConfig(this.action, this.in, cfg, PNG)
+ if b, ok := this.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] parseImageConfig didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Fatalf("[%d] err: %s", i, err)
+ }
+ if fmt.Sprint(result) != fmt.Sprint(this.expect) {
+ t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect)
+ }
+ }
+ }
+}
+
+func newImageConfig(action string, width, height, quality, rotate int, filter, anchor, bgColor string) ImageConfig {
+ var c ImageConfig = GetDefaultImageConfig(action, ImagingConfig{})
+ c.TargetFormat = PNG
+ c.Hint = 2
+ c.Width = width
+ c.Height = height
+ c.Quality = quality
+ c.qualitySetForImage = quality != 75
+ c.Rotate = rotate
+ c.BgColorStr = bgColor
+ c.BgColor, _ = hexStringToColor(bgColor)
+
+ if filter != "" {
+ filter = strings.ToLower(filter)
+ if v, ok := imageFilters[filter]; ok {
+ c.Filter = v
+ c.FilterStr = filter
+ }
+ }
+
+ if anchor != "" {
+ if anchor == smartCropIdentifier {
+ c.AnchorStr = anchor
+ } else {
+ anchor = strings.ToLower(anchor)
+ if v, ok := anchorPositions[anchor]; ok {
+ c.Anchor = v
+ c.AnchorStr = anchor
+ }
+ }
+ }
+
+ return c
+}
diff --git a/resources/images/exif/exif.go b/resources/images/exif/exif.go
new file mode 100644
index 000000000..487f250d5
--- /dev/null
+++ b/resources/images/exif/exif.go
@@ -0,0 +1,272 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package exif
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "math/big"
+ "regexp"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/bep/tmc"
+
+ _exif "github.com/rwcarlsen/goexif/exif"
+ "github.com/rwcarlsen/goexif/tiff"
+)
+
+const exifTimeLayout = "2006:01:02 15:04:05"
+
+// ExifInfo holds the decoded Exif data for an Image.
+type ExifInfo struct {
+ // GPS latitude in degrees.
+ Lat float64
+
+ // GPS longitude in degrees.
+ Long float64
+
+ // Image creation date/time.
+ Date time.Time
+
+ // A collection of the available Exif tags for this Image.
+ Tags Tags
+}
+
+type Decoder struct {
+ includeFieldsRe *regexp.Regexp
+ excludeFieldsrRe *regexp.Regexp
+ noDate bool
+ noLatLong bool
+}
+
+func IncludeFields(expression string) func(*Decoder) error {
+ return func(d *Decoder) error {
+ re, err := compileRegexp(expression)
+ if err != nil {
+ return err
+ }
+ d.includeFieldsRe = re
+ return nil
+ }
+}
+
+func ExcludeFields(expression string) func(*Decoder) error {
+ return func(d *Decoder) error {
+ re, err := compileRegexp(expression)
+ if err != nil {
+ return err
+ }
+ d.excludeFieldsrRe = re
+ return nil
+ }
+}
+
+func WithLatLongDisabled(disabled bool) func(*Decoder) error {
+ return func(d *Decoder) error {
+ d.noLatLong = disabled
+ return nil
+ }
+}
+
+func WithDateDisabled(disabled bool) func(*Decoder) error {
+ return func(d *Decoder) error {
+ d.noDate = disabled
+ return nil
+ }
+}
+
+func compileRegexp(expression string) (*regexp.Regexp, error) {
+ expression = strings.TrimSpace(expression)
+ if expression == "" {
+ return nil, nil
+ }
+ if !strings.HasPrefix(expression, "(") {
+ // Make it case insensitive
+ expression = "(?i)" + expression
+ }
+
+ return regexp.Compile(expression)
+}
+
+func NewDecoder(options ...func(*Decoder) error) (*Decoder, error) {
+ d := &Decoder{}
+ for _, opt := range options {
+ if err := opt(d); err != nil {
+ return nil, err
+ }
+ }
+
+ return d, nil
+}
+
+func (d *Decoder) Decode(r io.Reader) (ex *ExifInfo, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ err = fmt.Errorf("Exif failed: %v", r)
+ }
+ }()
+
+ var x *_exif.Exif
+ x, err = _exif.Decode(r)
+ if err != nil {
+ if err.Error() == "EOF" {
+ // Found no Exif
+ return nil, nil
+ }
+ return
+ }
+
+ var tm time.Time
+ var lat, long float64
+
+ if !d.noDate {
+ tm, _ = x.DateTime()
+ }
+
+ if !d.noLatLong {
+ lat, long, _ = x.LatLong()
+ }
+
+ walker := &exifWalker{x: x, vals: make(map[string]any), includeMatcher: d.includeFieldsRe, excludeMatcher: d.excludeFieldsrRe}
+ if err = x.Walk(walker); err != nil {
+ return
+ }
+
+ ex = &ExifInfo{Lat: lat, Long: long, Date: tm, Tags: walker.vals}
+
+ return
+}
+
+func decodeTag(x *_exif.Exif, f _exif.FieldName, t *tiff.Tag) (any, error) {
+ switch t.Format() {
+ case tiff.StringVal, tiff.UndefVal:
+ s := nullString(t.Val)
+ if strings.Contains(string(f), "DateTime") {
+ if d, err := tryParseDate(x, s); err == nil {
+ return d, nil
+ }
+ }
+ return s, nil
+ case tiff.OtherVal:
+ return "unknown", nil
+ }
+
+ var rv []any
+
+ for i := 0; i < int(t.Count); i++ {
+ switch t.Format() {
+ case tiff.RatVal:
+ n, d, _ := t.Rat2(i)
+ rat := big.NewRat(n, d)
+ if n == 1 {
+ rv = append(rv, rat)
+ } else {
+ f, _ := rat.Float64()
+ rv = append(rv, f)
+ }
+
+ case tiff.FloatVal:
+ v, _ := t.Float(i)
+ rv = append(rv, v)
+ case tiff.IntVal:
+ v, _ := t.Int(i)
+ rv = append(rv, v)
+ }
+ }
+
+ if t.Count == 1 {
+ if len(rv) == 1 {
+ return rv[0], nil
+ }
+ }
+
+ return rv, nil
+}
+
+// Code borrowed from exif.DateTime and adjusted.
+func tryParseDate(x *_exif.Exif, s string) (time.Time, error) {
+ dateStr := strings.TrimRight(s, "\x00")
+ // TODO(bep): look for timezone offset, GPS time, etc.
+ timeZone := time.Local
+ if tz, _ := x.TimeZone(); tz != nil {
+ timeZone = tz
+ }
+ return time.ParseInLocation(exifTimeLayout, dateStr, timeZone)
+}
+
+type exifWalker struct {
+ x *_exif.Exif
+ vals map[string]any
+ includeMatcher *regexp.Regexp
+ excludeMatcher *regexp.Regexp
+}
+
+func (e *exifWalker) Walk(f _exif.FieldName, tag *tiff.Tag) error {
+ name := string(f)
+ if e.excludeMatcher != nil && e.excludeMatcher.MatchString(name) {
+ return nil
+ }
+ if e.includeMatcher != nil && !e.includeMatcher.MatchString(name) {
+ return nil
+ }
+ val, err := decodeTag(e.x, f, tag)
+ if err != nil {
+ return err
+ }
+ e.vals[name] = val
+ return nil
+}
+
+func nullString(in []byte) string {
+ var rv bytes.Buffer
+ for len(in) > 0 {
+ r, size := utf8.DecodeRune(in)
+ if unicode.IsGraphic(r) {
+ rv.WriteRune(r)
+ }
+ in = in[size:]
+ }
+ return rv.String()
+}
+
+var tcodec *tmc.Codec
+
+func init() {
+ var err error
+ tcodec, err = tmc.New()
+ if err != nil {
+ panic(err)
+ }
+}
+
+type Tags map[string]any
+
+func (v *Tags) UnmarshalJSON(b []byte) error {
+ vv := make(map[string]any)
+ if err := tcodec.Unmarshal(b, &vv); err != nil {
+ return err
+ }
+
+ *v = vv
+
+ return nil
+}
+
+func (v Tags) MarshalJSON() ([]byte, error) {
+ return tcodec.Marshal(v)
+}
diff --git a/resources/images/exif/exif_test.go b/resources/images/exif/exif_test.go
new file mode 100644
index 000000000..cd5961404
--- /dev/null
+++ b/resources/images/exif/exif_test.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package exif
+
+import (
+ "encoding/json"
+ "math/big"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+ "github.com/google/go-cmp/cmp"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestExif(t *testing.T) {
+ c := qt.New(t)
+ f, err := os.Open(filepath.FromSlash("../../testdata/sunset.jpg"))
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+
+ d, err := NewDecoder(IncludeFields("Lens|Date"))
+ c.Assert(err, qt.IsNil)
+ x, err := d.Decode(f)
+ c.Assert(err, qt.IsNil)
+ c.Assert(x.Date.Format("2006-01-02"), qt.Equals, "2017-10-27")
+
+ // Malaga: https://goo.gl/taazZy
+ c.Assert(x.Lat, qt.Equals, float64(36.59744166666667))
+ c.Assert(x.Long, qt.Equals, float64(-4.50846))
+
+ v, found := x.Tags["LensModel"]
+ c.Assert(found, qt.Equals, true)
+ lensModel, ok := v.(string)
+ c.Assert(ok, qt.Equals, true)
+ c.Assert(lensModel, qt.Equals, "smc PENTAX-DA* 16-50mm F2.8 ED AL [IF] SDM")
+
+ v, found = x.Tags["DateTime"]
+ c.Assert(found, qt.Equals, true)
+ c.Assert(v, hqt.IsSameType, time.Time{})
+
+ // Verify that it survives a round-trip to JSON and back.
+ data, err := json.Marshal(x)
+ c.Assert(err, qt.IsNil)
+ x2 := &ExifInfo{}
+ err = json.Unmarshal(data, x2)
+
+ c.Assert(x2, eq, x)
+}
+
+func TestExifPNG(t *testing.T) {
+ c := qt.New(t)
+
+ f, err := os.Open(filepath.FromSlash("../../testdata/gohugoio.png"))
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+
+ d, err := NewDecoder()
+ c.Assert(err, qt.IsNil)
+ _, err = d.Decode(f)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestIssue8079(t *testing.T) {
+ c := qt.New(t)
+
+ f, err := os.Open(filepath.FromSlash("../../testdata/iss8079.jpg"))
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+
+ d, err := NewDecoder()
+ c.Assert(err, qt.IsNil)
+ x, err := d.Decode(f)
+ c.Assert(err, qt.IsNil)
+ c.Assert(x.Tags["ImageDescription"], qt.Equals, "Città del Vaticano #nanoblock #vatican #vaticancity")
+}
+
+func TestNullString(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ in string
+ expect string
+ }{
+ {"foo", "foo"},
+ {"\x20", "\x20"},
+ {"\xc4\x81", "\xc4\x81"}, // \u0101
+ {"\u0160", "\u0160"}, // non-breaking space
+ } {
+ res := nullString([]byte(test.in))
+ c.Assert(res, qt.Equals, test.expect)
+ }
+}
+
+func BenchmarkDecodeExif(b *testing.B) {
+ c := qt.New(b)
+ f, err := os.Open(filepath.FromSlash("../../testdata/sunset.jpg"))
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+
+ d, err := NewDecoder()
+ c.Assert(err, qt.IsNil)
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ _, err = d.Decode(f)
+ c.Assert(err, qt.IsNil)
+ f.Seek(0, 0)
+ }
+}
+
+var eq = qt.CmpEquals(
+ cmp.Comparer(
+ func(v1, v2 *big.Rat) bool {
+ return v1.RatString() == v2.RatString()
+ },
+ ),
+ cmp.Comparer(func(v1, v2 time.Time) bool {
+ return v1.Unix() == v2.Unix()
+ }),
+)
diff --git a/resources/images/filters.go b/resources/images/filters.go
new file mode 100644
index 000000000..90667af7c
--- /dev/null
+++ b/resources/images/filters.go
@@ -0,0 +1,236 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package images provides template functions for manipulating images.
+package images
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/disintegration/gift"
+ "github.com/spf13/cast"
+)
+
+// Increment for re-generation of images using these filters.
+const filterAPIVersion = 0
+
+type Filters struct {
+}
+
+// Overlay creates a filter that overlays src at position x y.
+func (*Filters) Overlay(src ImageSource, x, y any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(src.Key(), x, y),
+ Filter: overlayFilter{src: src, x: cast.ToInt(x), y: cast.ToInt(y)},
+ }
+}
+
+// Text creates a filter that draws text with the given options.
+func (*Filters) Text(text string, options ...any) gift.Filter {
+ tf := textFilter{
+ text: text,
+ color: "#ffffff",
+ size: 20,
+ x: 10,
+ y: 10,
+ linespacing: 2,
+ }
+
+ var opt maps.Params
+ if len(options) > 0 {
+ opt = maps.MustToParamsAndPrepare(options[0])
+ for option, v := range opt {
+ switch option {
+ case "color":
+ tf.color = cast.ToString(v)
+ case "size":
+ tf.size = cast.ToFloat64(v)
+ case "x":
+ tf.x = cast.ToInt(v)
+ case "y":
+ tf.y = cast.ToInt(v)
+ case "linespacing":
+ tf.linespacing = cast.ToInt(v)
+ case "font":
+ if err, ok := v.(error); ok {
+ panic(fmt.Sprintf("invalid font source: %s", err))
+ }
+ fontSource, ok1 := v.(hugio.ReadSeekCloserProvider)
+ identifier, ok2 := v.(resource.Identifier)
+
+ if !(ok1 && ok2) {
+ panic(fmt.Sprintf("invalid text font source: %T", v))
+ }
+
+ tf.fontSource = fontSource
+
+ // The input value isn't hashable and will not make a stable key.
+ // Replace it with a string in the map used as basis for the
+ // hash string.
+ opt["font"] = identifier.Key()
+
+ }
+ }
+ }
+
+ return filter{
+ Options: newFilterOpts(text, opt),
+ Filter: tf,
+ }
+}
+
+// Brightness creates a filter that changes the brightness of an image.
+// The percentage parameter must be in range (-100, 100).
+func (*Filters) Brightness(percentage any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(percentage),
+ Filter: gift.Brightness(cast.ToFloat32(percentage)),
+ }
+}
+
+// ColorBalance creates a filter that changes the color balance of an image.
+// The percentage parameters for each color channel (red, green, blue) must be in range (-100, 500).
+func (*Filters) ColorBalance(percentageRed, percentageGreen, percentageBlue any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(percentageRed, percentageGreen, percentageBlue),
+ Filter: gift.ColorBalance(cast.ToFloat32(percentageRed), cast.ToFloat32(percentageGreen), cast.ToFloat32(percentageBlue)),
+ }
+}
+
+// Colorize creates a filter that produces a colorized version of an image.
+// The hue parameter is the angle on the color wheel, typically in range (0, 360).
+// The saturation parameter must be in range (0, 100).
+// The percentage parameter specifies the strength of the effect, it must be in range (0, 100).
+func (*Filters) Colorize(hue, saturation, percentage any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(hue, saturation, percentage),
+ Filter: gift.Colorize(cast.ToFloat32(hue), cast.ToFloat32(saturation), cast.ToFloat32(percentage)),
+ }
+}
+
+// Contrast creates a filter that changes the contrast of an image.
+// The percentage parameter must be in range (-100, 100).
+func (*Filters) Contrast(percentage any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(percentage),
+ Filter: gift.Contrast(cast.ToFloat32(percentage)),
+ }
+}
+
+// Gamma creates a filter that performs a gamma correction on an image.
+// The gamma parameter must be positive. Gamma = 1 gives the original image.
+// Gamma less than 1 darkens the image and gamma greater than 1 lightens it.
+func (*Filters) Gamma(gamma any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(gamma),
+ Filter: gift.Gamma(cast.ToFloat32(gamma)),
+ }
+}
+
+// GaussianBlur creates a filter that applies a gaussian blur to an image.
+func (*Filters) GaussianBlur(sigma any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(sigma),
+ Filter: gift.GaussianBlur(cast.ToFloat32(sigma)),
+ }
+}
+
+// Grayscale creates a filter that produces a grayscale version of an image.
+func (*Filters) Grayscale() gift.Filter {
+ return filter{
+ Filter: gift.Grayscale(),
+ }
+}
+
+// Hue creates a filter that rotates the hue of an image.
+// The hue angle shift is typically in range -180 to 180.
+func (*Filters) Hue(shift any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(shift),
+ Filter: gift.Hue(cast.ToFloat32(shift)),
+ }
+}
+
+// Invert creates a filter that negates the colors of an image.
+func (*Filters) Invert() gift.Filter {
+ return filter{
+ Filter: gift.Invert(),
+ }
+}
+
+// Pixelate creates a filter that applies a pixelation effect to an image.
+func (*Filters) Pixelate(size any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(size),
+ Filter: gift.Pixelate(cast.ToInt(size)),
+ }
+}
+
+// Saturation creates a filter that changes the saturation of an image.
+func (*Filters) Saturation(percentage any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(percentage),
+ Filter: gift.Saturation(cast.ToFloat32(percentage)),
+ }
+}
+
+// Sepia creates a filter that produces a sepia-toned version of an image.
+func (*Filters) Sepia(percentage any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(percentage),
+ Filter: gift.Sepia(cast.ToFloat32(percentage)),
+ }
+}
+
+// Sigmoid creates a filter that changes the contrast of an image using a sigmoidal function and returns the adjusted image.
+// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
+func (*Filters) Sigmoid(midpoint, factor any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(midpoint, factor),
+ Filter: gift.Sigmoid(cast.ToFloat32(midpoint), cast.ToFloat32(factor)),
+ }
+}
+
+// UnsharpMask creates a filter that sharpens an image.
+// The sigma parameter is used in a gaussian function and affects the radius of effect.
+// Sigma must be positive. Sharpen radius roughly equals 3 * sigma.
+// The amount parameter controls how much darker and how much lighter the edge borders become. Typically between 0.5 and 1.5.
+// The threshold parameter controls the minimum brightness change that will be sharpened. Typically between 0 and 0.05.
+func (*Filters) UnsharpMask(sigma, amount, threshold any) gift.Filter {
+ return filter{
+ Options: newFilterOpts(sigma, amount, threshold),
+ Filter: gift.UnsharpMask(cast.ToFloat32(sigma), cast.ToFloat32(amount), cast.ToFloat32(threshold)),
+ }
+}
+
+type filter struct {
+ Options filterOpts
+ gift.Filter
+}
+
+// For cache-busting.
+type filterOpts struct {
+ Version int
+ Vals any
+}
+
+func newFilterOpts(vals ...any) filterOpts {
+ return filterOpts{
+ Version: filterAPIVersion,
+ Vals: vals,
+ }
+}
diff --git a/resources/images/filters_test.go b/resources/images/filters_test.go
new file mode 100644
index 000000000..84c8b540d
--- /dev/null
+++ b/resources/images/filters_test.go
@@ -0,0 +1,33 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFilterHash(t *testing.T) {
+ c := qt.New(t)
+
+ f := &Filters{}
+
+ c.Assert(helpers.HashString(f.Grayscale()), qt.Equals, helpers.HashString(f.Grayscale()))
+ c.Assert(helpers.HashString(f.Grayscale()), qt.Not(qt.Equals), helpers.HashString(f.Invert()))
+ c.Assert(helpers.HashString(f.Gamma(32)), qt.Not(qt.Equals), helpers.HashString(f.Gamma(33)))
+ c.Assert(helpers.HashString(f.Gamma(32)), qt.Equals, helpers.HashString(f.Gamma(32)))
+}
diff --git a/resources/images/image.go b/resources/images/image.go
new file mode 100644
index 000000000..4ffbaa229
--- /dev/null
+++ b/resources/images/image.go
@@ -0,0 +1,410 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "fmt"
+ "image"
+ "image/color"
+ "image/draw"
+ "image/gif"
+ "image/jpeg"
+ "image/png"
+ "io"
+ "sync"
+
+ "github.com/bep/gowebp/libwebp/webpoptions"
+ "github.com/gohugoio/hugo/resources/images/webp"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/images/exif"
+
+ "github.com/disintegration/gift"
+ "golang.org/x/image/bmp"
+ "golang.org/x/image/tiff"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/hugio"
+)
+
+func NewImage(f Format, proc *ImageProcessor, img image.Image, s Spec) *Image {
+ if img != nil {
+ return &Image{
+ Format: f,
+ Proc: proc,
+ Spec: s,
+ imageConfig: &imageConfig{
+ config: imageConfigFromImage(img),
+ configLoaded: true,
+ },
+ }
+ }
+ return &Image{Format: f, Proc: proc, Spec: s, imageConfig: &imageConfig{}}
+}
+
+type Image struct {
+ Format Format
+ Proc *ImageProcessor
+ Spec Spec
+ *imageConfig
+}
+
+func (i *Image) EncodeTo(conf ImageConfig, img image.Image, w io.Writer) error {
+ switch conf.TargetFormat {
+ case JPEG:
+
+ var rgba *image.RGBA
+ quality := conf.Quality
+
+ if nrgba, ok := img.(*image.NRGBA); ok {
+ if nrgba.Opaque() {
+ rgba = &image.RGBA{
+ Pix: nrgba.Pix,
+ Stride: nrgba.Stride,
+ Rect: nrgba.Rect,
+ }
+ }
+ }
+ if rgba != nil {
+ return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
+ }
+ return jpeg.Encode(w, img, &jpeg.Options{Quality: quality})
+ case PNG:
+ encoder := png.Encoder{CompressionLevel: png.DefaultCompression}
+ return encoder.Encode(w, img)
+
+ case GIF:
+ if giphy, ok := img.(Giphy); ok {
+ g := giphy.GIF()
+ return gif.EncodeAll(w, g)
+ }
+ return gif.Encode(w, img, &gif.Options{
+ NumColors: 256,
+ })
+ case TIFF:
+ return tiff.Encode(w, img, &tiff.Options{Compression: tiff.Deflate, Predictor: true})
+
+ case BMP:
+ return bmp.Encode(w, img)
+ case WEBP:
+ return webp.Encode(
+ w,
+ img, webpoptions.EncodingOptions{
+ Quality: conf.Quality,
+ EncodingPreset: webpoptions.EncodingPreset(conf.Hint),
+ UseSharpYuv: true,
+ },
+ )
+ default:
+ return errors.New("format not supported")
+ }
+}
+
+// Height returns i's height.
+func (i *Image) Height() int {
+ i.initConfig()
+ return i.config.Height
+}
+
+// Width returns i's width.
+func (i *Image) Width() int {
+ i.initConfig()
+ return i.config.Width
+}
+
+func (i Image) WithImage(img image.Image) *Image {
+ i.Spec = nil
+ i.imageConfig = &imageConfig{
+ config: imageConfigFromImage(img),
+ configLoaded: true,
+ }
+
+ return &i
+}
+
+func (i Image) WithSpec(s Spec) *Image {
+ i.Spec = s
+ i.imageConfig = &imageConfig{}
+ return &i
+}
+
+// InitConfig reads the image config from the given reader.
+func (i *Image) InitConfig(r io.Reader) error {
+ var err error
+ i.configInit.Do(func() {
+ i.config, _, err = image.DecodeConfig(r)
+ })
+ return err
+}
+
+func (i *Image) initConfig() error {
+ var err error
+ i.configInit.Do(func() {
+ if i.configLoaded {
+ return
+ }
+
+ var f hugio.ReadSeekCloser
+
+ f, err = i.Spec.ReadSeekCloser()
+ if err != nil {
+ return
+ }
+ defer f.Close()
+
+ i.config, _, err = image.DecodeConfig(f)
+ })
+
+ if err != nil {
+ return fmt.Errorf("failed to load image config: %w", err)
+ }
+
+ return nil
+}
+
+func NewImageProcessor(cfg ImagingConfig) (*ImageProcessor, error) {
+ e := cfg.Cfg.Exif
+ exifDecoder, err := exif.NewDecoder(
+ exif.WithDateDisabled(e.DisableDate),
+ exif.WithLatLongDisabled(e.DisableLatLong),
+ exif.ExcludeFields(e.ExcludeFields),
+ exif.IncludeFields(e.IncludeFields),
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ return &ImageProcessor{
+ Cfg: cfg,
+ exifDecoder: exifDecoder,
+ }, nil
+}
+
+type ImageProcessor struct {
+ Cfg ImagingConfig
+ exifDecoder *exif.Decoder
+}
+
+func (p *ImageProcessor) DecodeExif(r io.Reader) (*exif.ExifInfo, error) {
+ return p.exifDecoder.Decode(r)
+}
+
+func (p *ImageProcessor) ApplyFiltersFromConfig(src image.Image, conf ImageConfig) (image.Image, error) {
+ var filters []gift.Filter
+
+ if conf.Rotate != 0 {
+ // Apply any rotation before any resize.
+ filters = append(filters, gift.Rotate(float32(conf.Rotate), color.Transparent, gift.NearestNeighborInterpolation))
+ }
+
+ switch conf.Action {
+ case "resize":
+ filters = append(filters, gift.Resize(conf.Width, conf.Height, conf.Filter))
+ case "crop":
+ if conf.AnchorStr == smartCropIdentifier {
+ bounds, err := p.smartCrop(src, conf.Width, conf.Height, conf.Filter)
+ if err != nil {
+ return nil, err
+ }
+
+ // First crop using the bounds returned by smartCrop.
+ filters = append(filters, gift.Crop(bounds))
+ // Then center crop the image to get an image the desired size without resizing.
+ filters = append(filters, gift.CropToSize(conf.Width, conf.Height, gift.CenterAnchor))
+
+ } else {
+ filters = append(filters, gift.CropToSize(conf.Width, conf.Height, conf.Anchor))
+ }
+ case "fill":
+ if conf.AnchorStr == smartCropIdentifier {
+ bounds, err := p.smartCrop(src, conf.Width, conf.Height, conf.Filter)
+ if err != nil {
+ return nil, err
+ }
+
+ // First crop it, then resize it.
+ filters = append(filters, gift.Crop(bounds))
+ filters = append(filters, gift.Resize(conf.Width, conf.Height, conf.Filter))
+
+ } else {
+ filters = append(filters, gift.ResizeToFill(conf.Width, conf.Height, conf.Filter, conf.Anchor))
+ }
+ case "fit":
+ filters = append(filters, gift.ResizeToFit(conf.Width, conf.Height, conf.Filter))
+ default:
+ return nil, fmt.Errorf("unsupported action: %q", conf.Action)
+ }
+
+ img, err := p.Filter(src, filters...)
+ if err != nil {
+ return nil, err
+ }
+
+ return img, nil
+}
+
+func (p *ImageProcessor) Filter(src image.Image, filters ...gift.Filter) (image.Image, error) {
+
+ filter := gift.New(filters...)
+
+ if giph, ok := src.(Giphy); ok && len(giph.GIF().Image) > 1 {
+ g := giph.GIF()
+ var bounds image.Rectangle
+ firstFrame := g.Image[0]
+ tmp := image.NewNRGBA(firstFrame.Bounds())
+ for i := range g.Image {
+ gift.New().DrawAt(tmp, g.Image[i], g.Image[i].Bounds().Min, gift.OverOperator)
+ bounds = filter.Bounds(tmp.Bounds())
+ dst := image.NewPaletted(bounds, g.Image[i].Palette)
+ filter.Draw(dst, tmp)
+ g.Image[i] = dst
+ }
+ g.Config.Width = bounds.Dx()
+ g.Config.Height = bounds.Dy()
+
+ return giph, nil
+ }
+
+ bounds := filter.Bounds(src.Bounds())
+
+ var dst draw.Image
+ switch src.(type) {
+ case *image.RGBA:
+ dst = image.NewRGBA(bounds)
+ case *image.NRGBA:
+ dst = image.NewNRGBA(bounds)
+ case *image.Gray:
+ dst = image.NewGray(bounds)
+ default:
+ dst = image.NewNRGBA(bounds)
+ }
+ filter.Draw(dst, src)
+
+ return dst, nil
+}
+
+func GetDefaultImageConfig(action string, defaults ImagingConfig) ImageConfig {
+ return ImageConfig{
+ Action: action,
+ Hint: defaults.Hint,
+ Quality: defaults.Cfg.Quality,
+ }
+}
+
+type Spec interface {
+ // Loads the image source.
+ ReadSeekCloser() (hugio.ReadSeekCloser, error)
+}
+
+// Format is an image file format.
+type Format int
+
+const (
+ JPEG Format = iota + 1
+ PNG
+ GIF
+ TIFF
+ BMP
+ WEBP
+)
+
+// RequiresDefaultQuality returns if the default quality needs to be applied to
+// images of this format.
+func (f Format) RequiresDefaultQuality() bool {
+ return f == JPEG || f == WEBP
+}
+
+// SupportsTransparency reports whether it supports transparency in any form.
+func (f Format) SupportsTransparency() bool {
+ return f != JPEG
+}
+
+// DefaultExtension returns the default file extension of this format, starting with a dot.
+// For example: .jpg for JPEG
+func (f Format) DefaultExtension() string {
+ return f.MediaType().FirstSuffix.FullSuffix
+}
+
+// MediaType returns the media type of this image, e.g. image/jpeg for JPEG
+func (f Format) MediaType() media.Type {
+ switch f {
+ case JPEG:
+ return media.JPEGType
+ case PNG:
+ return media.PNGType
+ case GIF:
+ return media.GIFType
+ case TIFF:
+ return media.TIFFType
+ case BMP:
+ return media.BMPType
+ case WEBP:
+ return media.WEBPType
+ default:
+ panic(fmt.Sprintf("%d is not a valid image format", f))
+ }
+}
+
+type imageConfig struct {
+ config image.Config
+ configInit sync.Once
+ configLoaded bool
+}
+
+func imageConfigFromImage(img image.Image) image.Config {
+ b := img.Bounds()
+ return image.Config{Width: b.Max.X, Height: b.Max.Y}
+}
+
+func ToFilters(in any) []gift.Filter {
+ switch v := in.(type) {
+ case []gift.Filter:
+ return v
+ case []filter:
+ vv := make([]gift.Filter, len(v))
+ for i, f := range v {
+ vv[i] = f
+ }
+ return vv
+ case gift.Filter:
+ return []gift.Filter{v}
+ default:
+ panic(fmt.Sprintf("%T is not an image filter", in))
+ }
+}
+
+// IsOpaque returns false if the image has alpha channel and there is at least 1
+// pixel that is not (fully) opaque.
+func IsOpaque(img image.Image) bool {
+ if oim, ok := img.(interface {
+ Opaque() bool
+ }); ok {
+ return oim.Opaque()
+ }
+
+ return false
+}
+
+// ImageSource identifies and decodes an image.
+type ImageSource interface {
+ DecodeImage() (image.Image, error)
+ Key() string
+}
+
+// Giphy represents a GIF Image that may be animated.
+type Giphy interface {
+ image.Image // The first frame.
+ GIF() *gif.GIF // All frames.
+}
diff --git a/resources/images/image_resource.go b/resources/images/image_resource.go
new file mode 100644
index 000000000..e0fec15a0
--- /dev/null
+++ b/resources/images/image_resource.go
@@ -0,0 +1,53 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "image"
+
+ "github.com/gohugoio/hugo/resources/images/exif"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// ImageResource represents an image resource.
+type ImageResource interface {
+ resource.Resource
+ ImageResourceOps
+}
+
+type ImageResourceOps interface {
+ // Height returns the height of the Image.
+ Height() int
+ // Width returns the width of the Image.
+ Width() int
+
+ // Crop an image to match the given dimensions without resizing.
+ // You must provide both width and height.
+ // Use the anchor option to change the crop box anchor point.
+ // {{ $image := $image.Crop "600x400" }}
+ Crop(spec string) (ImageResource, error)
+ Fill(spec string) (ImageResource, error)
+ Fit(spec string) (ImageResource, error)
+ Resize(spec string) (ImageResource, error)
+
+ // Filter applies one or more filters to an Image.
+ // {{ $image := $image.Filter (images.GaussianBlur 6) (images.Pixelate 8) }}
+ Filter(filters ...any) (ImageResource, error)
+
+ // Exif returns an ExifInfo object containing Image metadata.
+ Exif() *exif.ExifInfo
+
+ // Internal
+ DecodeImage() (image.Image, error)
+}
diff --git a/resources/images/overlay.go b/resources/images/overlay.go
new file mode 100644
index 000000000..780e28fd1
--- /dev/null
+++ b/resources/images/overlay.go
@@ -0,0 +1,43 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "fmt"
+ "image"
+ "image/draw"
+
+ "github.com/disintegration/gift"
+)
+
+var _ gift.Filter = (*overlayFilter)(nil)
+
+type overlayFilter struct {
+ src ImageSource
+ x, y int
+}
+
+func (f overlayFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
+ overlaySrc, err := f.src.DecodeImage()
+ if err != nil {
+ panic(fmt.Sprintf("failed to decode image: %s", err))
+ }
+
+ gift.New().Draw(dst, src)
+ gift.New().DrawAt(dst, overlaySrc, image.Pt(f.x, f.y), gift.OverOperator)
+}
+
+func (f overlayFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
+ return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
+}
diff --git a/resources/images/resampling.go b/resources/images/resampling.go
new file mode 100644
index 000000000..0cb267684
--- /dev/null
+++ b/resources/images/resampling.go
@@ -0,0 +1,214 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import "math"
+
+// We moved from imaging to the gift package for image processing at some point.
+// That package had more, but also less resampling filters. So we add the missing
+// ones here. They are fairly exotic, but someone may use them, so keep them here
+// for now.
+//
+// The filters below are ported from https://github.com/disintegration/imaging/blob/9aab30e6aa535fe3337b489b76759ef97dfaf362/resize.go#L369
+// MIT License.
+
+var (
+ // Hermite cubic spline filter (BC-spline; B=0; C=0).
+ hermiteResampling = resamp{
+ name: "Hermite",
+ support: 1.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 1.0 {
+ return bcspline(x, 0.0, 0.0)
+ }
+ return 0
+ },
+ }
+
+ // Mitchell-Netravali cubic filter (BC-spline; B=1/3; C=1/3).
+ mitchellNetravaliResampling = resamp{
+ name: "MitchellNetravali",
+ support: 2.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 2.0 {
+ return bcspline(x, 1.0/3.0, 1.0/3.0)
+ }
+ return 0
+ },
+ }
+
+ // Catmull-Rom - sharp cubic filter (BC-spline; B=0; C=0.5).
+ catmullRomResampling = resamp{
+ name: "CatmullRomResampling",
+ support: 2.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 2.0 {
+ return bcspline(x, 0.0, 0.5)
+ }
+ return 0
+ },
+ }
+
+ // BSpline is a smooth cubic filter (BC-spline; B=1; C=0).
+ bSplineResampling = resamp{
+ name: "BSplineResampling",
+ support: 2.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 2.0 {
+ return bcspline(x, 1.0, 0.0)
+ }
+ return 0
+ },
+ }
+
+ // Gaussian blurring filter.
+ gaussianResampling = resamp{
+ name: "GaussianResampling",
+ support: 2.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 2.0 {
+ return float32(math.Exp(float64(-2 * x * x)))
+ }
+ return 0
+ },
+ }
+
+ // Hann-windowed sinc filter (3 lobes).
+ hannResampling = resamp{
+ name: "HannResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * float32(0.5+0.5*math.Cos(math.Pi*float64(x)/3.0))
+ }
+ return 0
+ },
+ }
+
+ hammingResampling = resamp{
+ name: "HammingResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * float32(0.54+0.46*math.Cos(math.Pi*float64(x)/3.0))
+ }
+ return 0
+ },
+ }
+
+ // Blackman-windowed sinc filter (3 lobes).
+ blackmanResampling = resamp{
+ name: "BlackmanResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * float32(0.42-0.5*math.Cos(math.Pi*float64(x)/3.0+math.Pi)+0.08*math.Cos(2.0*math.Pi*float64(x)/3.0))
+ }
+ return 0
+ },
+ }
+
+ bartlettResampling = resamp{
+ name: "BartlettResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * (3.0 - x) / 3.0
+ }
+ return 0
+ },
+ }
+
+ // Welch-windowed sinc filter (parabolic window, 3 lobes).
+ welchResampling = resamp{
+ name: "WelchResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * (1.0 - (x * x / 9.0))
+ }
+ return 0
+ },
+ }
+
+ // Cosine-windowed sinc filter (3 lobes).
+ cosineResampling = resamp{
+ name: "CosineResampling",
+ support: 3.0,
+ kernel: func(x float32) float32 {
+ x = absf32(x)
+ if x < 3.0 {
+ return sinc(x) * float32(math.Cos((math.Pi/2.0)*(float64(x)/3.0)))
+ }
+ return 0
+ },
+ }
+)
+
+// The following code is borrowed from https://raw.githubusercontent.com/disintegration/gift/master/resize.go
+// MIT licensed.
+type resamp struct {
+ name string
+ support float32
+ kernel func(float32) float32
+}
+
+func (r resamp) String() string {
+ return r.name
+}
+
+func (r resamp) Support() float32 {
+ return r.support
+}
+
+func (r resamp) Kernel(x float32) float32 {
+ return r.kernel(x)
+}
+
+func bcspline(x, b, c float32) float32 {
+ if x < 0 {
+ x = -x
+ }
+ if x < 1 {
+ return ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6
+ }
+ if x < 2 {
+ return ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6
+ }
+ return 0
+}
+
+func absf32(x float32) float32 {
+ if x < 0 {
+ return -x
+ }
+ return x
+}
+
+func sinc(x float32) float32 {
+ if x == 0 {
+ return 1
+ }
+ return float32(math.Sin(math.Pi*float64(x)) / (math.Pi * float64(x)))
+}
diff --git a/resources/images/smartcrop.go b/resources/images/smartcrop.go
new file mode 100644
index 000000000..864c6de0a
--- /dev/null
+++ b/resources/images/smartcrop.go
@@ -0,0 +1,104 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "image"
+ "math"
+
+ "github.com/disintegration/gift"
+
+ "github.com/muesli/smartcrop"
+)
+
+const (
+ // Do not change.
+ smartCropIdentifier = "smart"
+
+ // This is just a increment, starting on 1. If Smart Crop improves its cropping, we
+ // need a way to trigger a re-generation of the crops in the wild, so increment this.
+ smartCropVersionNumber = 1
+)
+
+func (p *ImageProcessor) newSmartCropAnalyzer(filter gift.Resampling) smartcrop.Analyzer {
+ return smartcrop.NewAnalyzer(imagingResizer{p: p, filter: filter})
+}
+
+// Needed by smartcrop
+type imagingResizer struct {
+ p *ImageProcessor
+ filter gift.Resampling
+}
+
+func (r imagingResizer) Resize(img image.Image, width, height uint) image.Image {
+ // See https://github.com/gohugoio/hugo/issues/7955#issuecomment-861710681
+ scaleX, scaleY := calcFactorsNfnt(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy()))
+ if width == 0 {
+ width = uint(math.Ceil(float64(img.Bounds().Dx()) / scaleX))
+ }
+ if height == 0 {
+ height = uint(math.Ceil(float64(img.Bounds().Dy()) / scaleY))
+ }
+ result, _ := r.p.Filter(img, gift.Resize(int(width), int(height), r.filter))
+ return result
+}
+
+func (p *ImageProcessor) smartCrop(img image.Image, width, height int, filter gift.Resampling) (image.Rectangle, error) {
+ if width <= 0 || height <= 0 {
+ return image.Rectangle{}, nil
+ }
+
+ srcBounds := img.Bounds()
+ srcW := srcBounds.Dx()
+ srcH := srcBounds.Dy()
+
+ if srcW <= 0 || srcH <= 0 {
+ return image.Rectangle{}, nil
+ }
+
+ if srcW == width && srcH == height {
+ return srcBounds, nil
+ }
+
+ smart := p.newSmartCropAnalyzer(filter)
+
+ rect, err := smart.FindBestCrop(img, width, height)
+ if err != nil {
+ return image.Rectangle{}, err
+ }
+
+ return img.Bounds().Intersect(rect), nil
+}
+
+// Calculates scaling factors using old and new image dimensions.
+// Code borrowed from https://github.com/nfnt/resize/blob/83c6a9932646f83e3267f353373d47347b6036b2/resize.go#L593
+func calcFactorsNfnt(width, height uint, oldWidth, oldHeight float64) (scaleX, scaleY float64) {
+ if width == 0 {
+ if height == 0 {
+ scaleX = 1.0
+ scaleY = 1.0
+ } else {
+ scaleY = oldHeight / float64(height)
+ scaleX = scaleY
+ }
+ } else {
+ scaleX = oldWidth / float64(width)
+ if height == 0 {
+ scaleY = scaleX
+ } else {
+ scaleY = oldHeight / float64(height)
+ }
+ }
+ return
+}
diff --git a/resources/images/text.go b/resources/images/text.go
new file mode 100644
index 000000000..cc67a5d1d
--- /dev/null
+++ b/resources/images/text.go
@@ -0,0 +1,108 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "image"
+ "image/draw"
+ "io"
+ "strings"
+
+ "github.com/disintegration/gift"
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "golang.org/x/image/font"
+ "golang.org/x/image/font/gofont/goregular"
+ "golang.org/x/image/font/opentype"
+ "golang.org/x/image/math/fixed"
+)
+
+var _ gift.Filter = (*textFilter)(nil)
+
+type textFilter struct {
+ text, color string
+ x, y int
+ size float64
+ linespacing int
+ fontSource hugio.ReadSeekCloserProvider
+}
+
+func (f textFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
+ color, err := hexStringToColor(f.color)
+ if err != nil {
+ panic(err)
+ }
+
+ // Load and parse font
+ ttf := goregular.TTF
+ if f.fontSource != nil {
+ rs, err := f.fontSource.ReadSeekCloser()
+ if err != nil {
+ panic(err)
+ }
+ defer rs.Close()
+ ttf, err = io.ReadAll(rs)
+ if err != nil {
+ panic(err)
+ }
+ }
+ otf, err := opentype.Parse(ttf)
+ if err != nil {
+ panic(err)
+ }
+
+ // Set font options
+ face, err := opentype.NewFace(otf, &opentype.FaceOptions{
+ Size: f.size,
+ DPI: 72,
+ Hinting: font.HintingNone,
+ })
+ if err != nil {
+ panic(err)
+ }
+
+ d := font.Drawer{
+ Dst: dst,
+ Src: image.NewUniform(color),
+ Face: face,
+ }
+
+ gift.New().Draw(dst, src)
+
+ // Draw text, consider and include linebreaks
+ maxWidth := dst.Bounds().Dx() - 20
+ fontHeight := face.Metrics().Ascent.Ceil()
+
+ // Correct y position based on font and size
+ f.y = f.y + fontHeight
+
+ // Start position
+ y := f.y
+ d.Dot = fixed.P(f.x, f.y)
+
+ // Draw text and break line at max width
+ parts := strings.Fields(f.text)
+ for _, str := range parts {
+ strWith := font.MeasureString(face, str)
+ if (d.Dot.X.Ceil() + strWith.Ceil()) >= maxWidth {
+ y = y + fontHeight + f.linespacing
+ d.Dot = fixed.P(f.x, y)
+ }
+ d.DrawString(str + " ")
+ }
+}
+
+func (f textFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
+ return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
+}
diff --git a/resources/images/webp/webp.go b/resources/images/webp/webp.go
new file mode 100644
index 000000000..28336d2e0
--- /dev/null
+++ b/resources/images/webp/webp.go
@@ -0,0 +1,36 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build extended
+// +build extended
+
+package webp
+
+import (
+ "image"
+ "io"
+
+ "github.com/bep/gowebp/libwebp"
+ "github.com/bep/gowebp/libwebp/webpoptions"
+)
+
+// Encode writes the Image m to w in Webp format with the given
+// options.
+func Encode(w io.Writer, m image.Image, o webpoptions.EncodingOptions) error {
+ return libwebp.Encode(w, m, o)
+}
+
+// Supports returns whether webp encoding is supported in this build.
+func Supports() bool {
+ return true
+}
diff --git a/resources/images/webp/webp_notavailable.go b/resources/images/webp/webp_notavailable.go
new file mode 100644
index 000000000..70407f94e
--- /dev/null
+++ b/resources/images/webp/webp_notavailable.go
@@ -0,0 +1,36 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !extended
+// +build !extended
+
+package webp
+
+import (
+ "image"
+ "io"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ "github.com/bep/gowebp/libwebp/webpoptions"
+)
+
+// Encode is only available in the extended version.
+func Encode(w io.Writer, m image.Image, o webpoptions.EncodingOptions) error {
+ return herrors.ErrFeatureNotAvailable
+}
+
+// Supports returns whether webp encoding is supported in this build.
+func Supports() bool {
+ return false
+}
diff --git a/resources/integration_test.go b/resources/integration_test.go
new file mode 100644
index 000000000..92abcb612
--- /dev/null
+++ b/resources/integration_test.go
@@ -0,0 +1,96 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources_test
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+// Issue 8931
+func TestImageCache(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = "https://example.org"
+-- content/mybundle/index.md --
+---
+title: "My Bundle"
+---
+-- content/mybundle/pixel.png --
+iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
+-- layouts/foo.html --
+-- layouts/index.html --
+{{ $p := site.GetPage "mybundle"}}
+{{ $img := $p.Resources.Get "pixel.png" }}
+{{ $gif := $img.Resize "1x1 gif" }}
+{{ $bmp := $img.Resize "1x1 bmp" }}
+
+gif: {{ $gif.RelPermalink }}|{{ $gif.MediaType }}|
+bmp: {{ $bmp.RelPermalink }}|{{ $bmp.MediaType }}|
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ Running: true,
+ }).Build()
+
+ assertImages := func() {
+ b.AssertFileContent("public/index.html", `
+ gif: /mybundle/pixel_hu8aa3346827e49d756ff4e630147c42b5_70_1x1_resize_box_3.gif|image/gif|
+ bmp: /mybundle/pixel_hu8aa3346827e49d756ff4e630147c42b5_70_1x1_resize_box_3.bmp|image/bmp|
+
+ `)
+ }
+
+ assertImages()
+
+ b.EditFileReplace("content/mybundle/index.md", func(s string) string { return strings.ReplaceAll(s, "Bundle", "BUNDLE") })
+ b.Build()
+
+ assertImages()
+
+}
+
+func TestSVGError(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- assets/circle.svg --
+<svg height="100" width="100"><circle cx="50" cy="50" r="40" stroke="black" stroke-width="3" fill="red" /></svg>
+-- layouts/index.html --
+{{ $svg := resources.Get "circle.svg" }}
+Width: {{ $svg.Width }}
+`
+
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ Running: true,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `error calling Width: this method is only available for raster images. To determine if an image is SVG, you can do {{ if eq .MediaType.SubType "svg" }}{{ end }}`)
+
+}
diff --git a/resources/internal/key.go b/resources/internal/key.go
new file mode 100644
index 000000000..1b45d4cc4
--- /dev/null
+++ b/resources/internal/key.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import "github.com/gohugoio/hugo/helpers"
+
+// ResourceTransformationKey are provided by the different transformation implementations.
+// It identifies the transformation (name) and its configuration (elements).
+// We combine this in a chain with the rest of the transformations
+// with the target filename and a content hash of the origin to use as cache key.
+type ResourceTransformationKey struct {
+ Name string
+ elements []any
+}
+
+// NewResourceTransformationKey creates a new ResourceTransformationKey from the transformation
+// name and elements. We will create a 64 bit FNV hash from the elements, which when combined
+// with the other key elements should be unique for all practical applications.
+func NewResourceTransformationKey(name string, elements ...any) ResourceTransformationKey {
+ return ResourceTransformationKey{Name: name, elements: elements}
+}
+
+// Value returns the Key as a string.
+// Do not change this without good reasons.
+func (k ResourceTransformationKey) Value() string {
+ if len(k.elements) == 0 {
+ return k.Name
+ }
+
+ return k.Name + "_" + helpers.HashString(k.elements...)
+}
diff --git a/resources/internal/key_test.go b/resources/internal/key_test.go
new file mode 100644
index 000000000..38286333d
--- /dev/null
+++ b/resources/internal/key_test.go
@@ -0,0 +1,36 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type testStruct struct {
+ Name string
+ V1 int64
+ V2 int32
+ V3 int
+ V4 uint64
+}
+
+func TestResourceTransformationKey(t *testing.T) {
+ // We really need this key to be portable across OSes.
+ key := NewResourceTransformationKey("testing",
+ testStruct{Name: "test", V1: int64(10), V2: int32(20), V3: 30, V4: uint64(40)})
+ c := qt.New(t)
+ c.Assert(key.Value(), qt.Equals, "testing_518996646957295636")
+}
diff --git a/resources/jsconfig/jsconfig.go b/resources/jsconfig/jsconfig.go
new file mode 100644
index 000000000..1fd6d6103
--- /dev/null
+++ b/resources/jsconfig/jsconfig.go
@@ -0,0 +1,92 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package jsconfig
+
+import (
+ "path/filepath"
+ "sort"
+ "sync"
+)
+
+// Builder builds a jsconfig.json file that, currently, is used only to assist
+// intellinsense in editors.
+type Builder struct {
+ sourceRootsMu sync.RWMutex
+ sourceRoots map[string]bool
+}
+
+// NewBuilder creates a new Builder.
+func NewBuilder() *Builder {
+ return &Builder{sourceRoots: make(map[string]bool)}
+}
+
+// Build builds a new Config with paths relative to dir.
+// This method is thread safe.
+func (b *Builder) Build(dir string) *Config {
+ b.sourceRootsMu.RLock()
+ defer b.sourceRootsMu.RUnlock()
+
+ if len(b.sourceRoots) == 0 {
+ return nil
+ }
+ conf := newJSConfig()
+
+ var roots []string
+ for root := range b.sourceRoots {
+ rel, err := filepath.Rel(dir, filepath.Join(root, "*"))
+ if err == nil {
+ roots = append(roots, rel)
+ }
+ }
+ sort.Strings(roots)
+ conf.CompilerOptions.Paths["*"] = roots
+
+ return conf
+}
+
+// AddSourceRoot adds a new source root.
+// This method is thread safe.
+func (b *Builder) AddSourceRoot(root string) {
+ b.sourceRootsMu.RLock()
+ found := b.sourceRoots[root]
+ b.sourceRootsMu.RUnlock()
+
+ if found {
+ return
+ }
+
+ b.sourceRootsMu.Lock()
+ b.sourceRoots[root] = true
+ b.sourceRootsMu.Unlock()
+}
+
+// CompilerOptions holds compilerOptions for jsonconfig.json.
+type CompilerOptions struct {
+ BaseURL string `json:"baseUrl"`
+ Paths map[string][]string `json:"paths"`
+}
+
+// Config holds the data for jsconfig.json.
+type Config struct {
+ CompilerOptions CompilerOptions `json:"compilerOptions"`
+}
+
+func newJSConfig() *Config {
+ return &Config{
+ CompilerOptions: CompilerOptions{
+ BaseURL: ".",
+ Paths: make(map[string][]string),
+ },
+ }
+}
diff --git a/resources/jsconfig/jsconfig_test.go b/resources/jsconfig/jsconfig_test.go
new file mode 100644
index 000000000..9a9657843
--- /dev/null
+++ b/resources/jsconfig/jsconfig_test.go
@@ -0,0 +1,35 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package jsconfig
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestJsConfigBuilder(t *testing.T) {
+ c := qt.New(t)
+
+ b := NewBuilder()
+ b.AddSourceRoot("/c/assets")
+ b.AddSourceRoot("/d/assets")
+
+ conf := b.Build("/a/b")
+ c.Assert(conf.CompilerOptions.BaseURL, qt.Equals, ".")
+ c.Assert(conf.CompilerOptions.Paths["*"], qt.DeepEquals, []string{filepath.FromSlash("../../c/assets/*"), filepath.FromSlash("../../d/assets/*")})
+
+ c.Assert(NewBuilder().Build("/a/b"), qt.IsNil)
+}
diff --git a/resources/page/integration_test.go b/resources/page/integration_test.go
new file mode 100644
index 000000000..9dc322b4a
--- /dev/null
+++ b/resources/page/integration_test.go
@@ -0,0 +1,138 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestGroupByLocalizedDate(t *testing.T) {
+
+ files := `
+-- config.toml --
+defaultContentLanguage = 'en'
+defaultContentLanguageInSubdir = true
+[languages]
+[languages.en]
+title = 'My blog'
+weight = 1
+[languages.fr]
+title = 'Mon blogue'
+weight = 2
+[languages.nn]
+title = 'Bloggen min'
+weight = 3
+-- content/p1.md --
+---
+title: "Post 1"
+date: "2020-01-01"
+---
+-- content/p2.md --
+---
+title: "Post 2"
+date: "2020-02-01"
+---
+-- content/p1.fr.md --
+---
+title: "Post 1"
+date: "2020-01-01"
+---
+-- content/p2.fr.md --
+---
+title: "Post 2"
+date: "2020-02-01"
+---
+-- layouts/index.html --
+{{ range $k, $v := site.RegularPages.GroupByDate "January, 2006" }}{{ $k }}|{{ $v.Key }}|{{ $v.Pages }}{{ end }}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ b.AssertFileContent("public/en/index.html", "0|February, 2020|Pages(1)1|January, 2020|Pages(1)")
+ b.AssertFileContent("public/fr/index.html", "0|février, 2020|Pages(1)1|janvier, 2020|Pages(1)")
+}
+
+func TestPagesSortCollation(t *testing.T) {
+
+ files := `
+-- config.toml --
+defaultContentLanguage = 'en'
+defaultContentLanguageInSubdir = true
+[languages]
+[languages.en]
+title = 'My blog'
+weight = 1
+[languages.fr]
+title = 'Mon blogue'
+weight = 2
+[languages.nn]
+title = 'Bloggen min'
+weight = 3
+-- content/p1.md --
+---
+title: "zulu"
+date: "2020-01-01"
+param1: "xylophone"
+tags: ["xylophone", "éclair", "zulu", "emma"]
+---
+-- content/p2.md --
+---
+title: "émotion"
+date: "2020-01-01"
+param1: "violin"
+---
+-- content/p3.md --
+---
+title: "alpha"
+date: "2020-01-01"
+param1: "éclair"
+---
+-- layouts/index.html --
+ByTitle: {{ range site.RegularPages.ByTitle }}{{ .Title }}|{{ end }}
+ByLinkTitle: {{ range site.RegularPages.ByLinkTitle }}{{ .Title }}|{{ end }}
+ByParam: {{ range site.RegularPages.ByParam "param1" }}{{ .Params.param1 }}|{{ end }}
+Tags Alphabetical: {{ range site.Taxonomies.tags.Alphabetical }}{{ .Term }}|{{ end }}
+GroupBy: {{ range site.RegularPages.GroupBy "Title" }}{{ .Key }}|{{ end }}
+{{ with (site.GetPage "p1").Params.tags }}
+Sort: {{ sort . }}
+ByWeight: {{ range site.RegularPages.ByWeight }}{{ .Title }}|{{ end }}
+{{ end }}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ b.AssertFileContent("public/en/index.html", `
+ByTitle: alpha|émotion|zulu|
+ByLinkTitle: alpha|émotion|zulu|
+ByParam: éclair|violin|xylophone
+Tags Alphabetical: éclair|emma|xylophone|zulu|
+GroupBy: alpha|émotion|zulu|
+Sort: [éclair emma xylophone zulu]
+ByWeight: alpha|émotion|zulu|
+`)
+}
diff --git a/resources/page/page.go b/resources/page/page.go
new file mode 100644
index 000000000..50459c465
--- /dev/null
+++ b/resources/page/page.go
@@ -0,0 +1,420 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "html/template"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/related"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/source"
+)
+
+// Clear clears any global package state.
+func Clear() error {
+ spc.clear()
+ return nil
+}
+
+// AlternativeOutputFormatsProvider provides alternative output formats for a
+// Page.
+type AlternativeOutputFormatsProvider interface {
+ // AlternativeOutputFormats gives the alternative output formats for the
+ // current output.
+ // Note that we use the term "alternative" and not "alternate" here, as it
+ // does not necessarily replace the other format, it is an alternative representation.
+ AlternativeOutputFormats() OutputFormats
+}
+
+// AuthorProvider provides author information.
+type AuthorProvider interface {
+ // Deprecated.
+ Author() Author
+ // Deprecated.
+ Authors() AuthorList
+}
+
+// ChildCareProvider provides accessors to child resources.
+type ChildCareProvider interface {
+ Pages() Pages
+
+ // RegularPages returns a list of pages of kind 'Page'.
+ // In Hugo 0.57 we changed the Pages method so it returns all page
+ // kinds, even sections. If you want the old behaviour, you can
+ // use RegularPages.
+ RegularPages() Pages
+
+ // RegularPagesRecursive returns all regular pages below the current
+ // section.
+ RegularPagesRecursive() Pages
+
+ Resources() resource.Resources
+}
+
+// ContentProvider provides the content related values for a Page.
+type ContentProvider interface {
+ Content() (any, error)
+
+ // Plain returns the Page Content stripped of HTML markup.
+ Plain() string
+
+ // PlainWords returns a string slice from splitting Plain using https://pkg.go.dev/strings#Fields.
+ PlainWords() []string
+
+ // Summary returns a generated summary of the content.
+ // The breakpoint can be set manually by inserting a summary separator in the source file.
+ Summary() template.HTML
+
+ // Truncated returns whether the Summary is truncated or not.
+ Truncated() bool
+
+ // FuzzyWordCount returns the approximate number of words in the content.
+ FuzzyWordCount() int
+
+ // WordCount returns the number of words in the content.
+ WordCount() int
+
+ // ReadingTime returns the reading time based on the length of plain text.
+ ReadingTime() int
+
+ // Len returns the length of the content.
+ Len() int
+}
+
+// FileProvider provides the source file.
+type FileProvider interface {
+ File() source.File
+}
+
+// GetPageProvider provides the GetPage method.
+type GetPageProvider interface {
+ // GetPage looks up a page for the given ref.
+ // {{ with .GetPage "blog" }}{{ .Title }}{{ end }}
+ //
+ // This will return nil when no page could be found, and will return
+ // an error if the ref is ambiguous.
+ GetPage(ref string) (Page, error)
+
+ // GetPageWithTemplateInfo is for internal use only.
+ GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error)
+}
+
+// GitInfoProvider provides Git info.
+type GitInfoProvider interface {
+ GitInfo() *gitmap.GitInfo
+ CodeOwners() []string
+}
+
+// InSectionPositioner provides section navigation.
+type InSectionPositioner interface {
+ NextInSection() Page
+ PrevInSection() Page
+}
+
+// InternalDependencies is considered an internal interface.
+type InternalDependencies interface {
+ // GetRelatedDocsHandler is for internal use only.
+ GetRelatedDocsHandler() *RelatedDocsHandler
+}
+
+// OutputFormatsProvider provides the OutputFormats of a Page.
+type OutputFormatsProvider interface {
+ OutputFormats() OutputFormats
+}
+
+// Page is the core interface in Hugo.
+type Page interface {
+ ContentProvider
+ TableOfContentsProvider
+ PageWithoutContent
+}
+
+// PageMetaProvider provides page metadata, typically provided via front matter.
+type PageMetaProvider interface {
+ // The 4 page dates
+ resource.Dated
+
+ // Aliases forms the base for redirects generation.
+ Aliases() []string
+
+ // BundleType returns the bundle type: `leaf`, `branch` or an empty string.
+ BundleType() files.ContentClass
+
+ // A configured description.
+ Description() string
+
+ // Whether this is a draft. Will only be true if run with the --buildDrafts (-D) flag.
+ Draft() bool
+
+ // IsHome returns whether this is the home page.
+ IsHome() bool
+
+ // Configured keywords.
+ Keywords() []string
+
+ // The Page Kind. One of page, home, section, taxonomy, term.
+ Kind() string
+
+ // The configured layout to use to render this page. Typically set in front matter.
+ Layout() string
+
+ // The title used for links.
+ LinkTitle() string
+
+ // IsNode returns whether this is an item of one of the list types in Hugo,
+ // i.e. not a regular content
+ IsNode() bool
+
+ // IsPage returns whether this is a regular content
+ IsPage() bool
+
+ // Param looks for a param in Page and then in Site config.
+ Param(key any) (any, error)
+
+ // Path gets the relative path, including file name and extension if relevant,
+ // to the source of this Page. It will be relative to any content root.
+ Path() string
+
+ // This is just a temporary bridge method. Use Path in templates.
+ // Pathc is for internal usage only.
+ Pathc() string
+
+ // The slug, typically defined in front matter.
+ Slug() string
+
+ // This page's language code. Will be the same as the site's.
+ Lang() string
+
+ // IsSection returns whether this is a section
+ IsSection() bool
+
+ // Section returns the first path element below the content root.
+ Section() string
+
+ // Returns a slice of sections (directories if it's a file) to this
+ // Page.
+ SectionsEntries() []string
+
+ // SectionsPath is SectionsEntries joined with a /.
+ SectionsPath() string
+
+ // Sitemap returns the sitemap configuration for this page.
+ Sitemap() config.Sitemap
+
+ // Type is a discriminator used to select layouts etc. It is typically set
+ // in front matter, but will fall back to the root section.
+ Type() string
+
+ // The configured weight, used as the first sort value in the default
+ // page sort if non-zero.
+ Weight() int
+}
+
+// PageRenderProvider provides a way for a Page to render content.
+type PageRenderProvider interface {
+ Render(layout ...string) (template.HTML, error)
+ RenderString(args ...any) (template.HTML, error)
+}
+
+// PageWithoutContent is the Page without any of the content methods.
+type PageWithoutContent interface {
+ RawContentProvider
+ resource.Resource
+ PageMetaProvider
+ resource.LanguageProvider
+
+ // For pages backed by a file.
+ FileProvider
+
+ GitInfoProvider
+
+ // Output formats
+ OutputFormatsProvider
+ AlternativeOutputFormatsProvider
+
+ // Tree navigation
+ ChildCareProvider
+ TreeProvider
+
+ // Horizontal navigation
+ InSectionPositioner
+ PageRenderProvider
+ PaginatorProvider
+ Positioner
+ navigation.PageMenusProvider
+
+ // TODO(bep)
+ AuthorProvider
+
+ // Page lookups/refs
+ GetPageProvider
+ RefProvider
+
+ resource.TranslationKeyProvider
+ TranslationsProvider
+
+ SitesProvider
+
+ // Helper methods
+ ShortcodeInfoProvider
+ compare.Eqer
+
+ // Scratch returns a Scratch that can be used to store temporary state.
+ // Note that this Scratch gets reset on server rebuilds. See Store() for a variant that survives.
+ maps.Scratcher
+
+ // Store returns a Scratch that can be used to store temporary state.
+ // In contrast to Scratch(), this Scratch is not reset on server rebuilds.
+ Store() *maps.Scratch
+
+ RelatedKeywordsProvider
+
+ // GetTerms gets the terms of a given taxonomy,
+ // e.g. GetTerms("categories")
+ GetTerms(taxonomy string) Pages
+
+ // Used in change/dependency tracking.
+ identity.Provider
+
+ DeprecatedWarningPageMethods
+}
+
+// Positioner provides next/prev navigation.
+type Positioner interface {
+ Next() Page
+ Prev() Page
+
+ // Deprecated: Use Prev. Will be removed in Hugo 0.57
+ PrevPage() Page
+
+ // Deprecated: Use Next. Will be removed in Hugo 0.57
+ NextPage() Page
+}
+
+// RawContentProvider provides the raw, unprocessed content of the page.
+type RawContentProvider interface {
+ RawContent() string
+}
+
+// RefProvider provides the methods needed to create reflinks to pages.
+type RefProvider interface {
+ Ref(argsm map[string]any) (string, error)
+
+ // RefFrom is for internal use only.
+ RefFrom(argsm map[string]any, source any) (string, error)
+
+ RelRef(argsm map[string]any) (string, error)
+
+ // RefFrom is for internal use only.
+ RelRefFrom(argsm map[string]any, source any) (string, error)
+}
+
+// RelatedKeywordsProvider allows a Page to be indexed.
+type RelatedKeywordsProvider interface {
+ // Make it indexable as a related.Document
+ // RelatedKeywords is meant for internal usage only.
+ RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error)
+}
+
+// ShortcodeInfoProvider provides info about the shortcodes in a Page.
+type ShortcodeInfoProvider interface {
+ // HasShortcode return whether the page has a shortcode with the given name.
+ // This method is mainly motivated with the Hugo Docs site's need for a list
+ // of pages with the `todo` shortcode in it.
+ HasShortcode(name string) bool
+}
+
+// SitesProvider provide accessors to get sites.
+type SitesProvider interface {
+ Site() Site
+ Sites() Sites
+}
+
+// TableOfContentsProvider provides the table of contents for a Page.
+type TableOfContentsProvider interface {
+ TableOfContents() template.HTML
+}
+
+// TranslationsProvider provides access to any translations.
+type TranslationsProvider interface {
+
+ // IsTranslated returns whether this content file is translated to
+ // other language(s).
+ IsTranslated() bool
+
+ // AllTranslations returns all translations, including the current Page.
+ AllTranslations() Pages
+
+ // Translations returns the translations excluding the current Page.
+ Translations() Pages
+}
+
+// TreeProvider provides section tree navigation.
+type TreeProvider interface {
+
+ // IsAncestor returns whether the current page is an ancestor of the given
+ // Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+ IsAncestor(other any) (bool, error)
+
+ // CurrentSection returns the page's current section or the page itself if home or a section.
+ // Note that this will return nil for pages that is not regular, home or section pages.
+ CurrentSection() Page
+
+ // IsDescendant returns whether the current page is a descendant of the given
+ // Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
+ IsDescendant(other any) (bool, error)
+
+ // FirstSection returns the section on level 1 below home, e.g. "/docs".
+ // For the home page, this will return itself.
+ FirstSection() Page
+
+ // InSection returns whether the given page is in the current section.
+ // Note that this will always return false for pages that are
+ // not either regular, home or section pages.
+ InSection(other any) (bool, error)
+
+ // Parent returns a section's parent section or a page's section.
+ // To get a section's subsections, see Page's Sections method.
+ Parent() Page
+
+ // Sections returns this section's subsections, if any.
+ // Note that for non-sections, this method will always return an empty list.
+ Sections() Pages
+
+ // Page returns a reference to the Page itself, kept here mostly
+ // for legacy reasons.
+ Page() Page
+}
+
+// DeprecatedWarningPageMethods lists deprecated Page methods that will trigger
+// a WARNING if invoked.
+// This was added in Hugo 0.55.
+type DeprecatedWarningPageMethods any // This was emptied in Hugo 0.93.0.
+
+// Move here to trigger ERROR instead of WARNING.
+// TODO(bep) create wrappers and put into the Page once it has some methods.
+type DeprecatedErrorPageMethods any
diff --git a/resources/page/page_author.go b/resources/page/page_author.go
new file mode 100644
index 000000000..58be20426
--- /dev/null
+++ b/resources/page/page_author.go
@@ -0,0 +1,44 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+// AuthorList is a list of all authors and their metadata.
+type AuthorList map[string]Author
+
+// Author contains details about the author of a page.
+type Author struct {
+ GivenName string
+ FamilyName string
+ DisplayName string
+ Thumbnail string
+ Image string
+ ShortBio string
+ LongBio string
+ Email string
+ Social AuthorSocial
+}
+
+// AuthorSocial is a place to put social details per author. These are the
+// standard keys that themes will expect to have available, but can be
+// expanded to any others on a per site basis
+// - website
+// - github
+// - facebook
+// - twitter
+// - pinterest
+// - instagram
+// - youtube
+// - linkedin
+// - skype
+type AuthorSocial map[string]string
diff --git a/resources/page/page_data.go b/resources/page/page_data.go
new file mode 100644
index 000000000..a7806438a
--- /dev/null
+++ b/resources/page/page_data.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "fmt"
+)
+
+// Data represents the .Data element in a Page in Hugo. We make this
+// a type so we can do lazy loading of .Data.Pages
+type Data map[string]any
+
+// Pages returns the pages stored with key "pages". If this is a func,
+// it will be invoked.
+func (d Data) Pages() Pages {
+ v, found := d["pages"]
+ if !found {
+ return nil
+ }
+
+ switch vv := v.(type) {
+ case Pages:
+ return vv
+ case func() Pages:
+ return vv()
+ default:
+ panic(fmt.Sprintf("%T is not Pages", v))
+ }
+}
diff --git a/resources/page/page_data_test.go b/resources/page/page_data_test.go
new file mode 100644
index 000000000..c7d764d8a
--- /dev/null
+++ b/resources/page/page_data_test.go
@@ -0,0 +1,55 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "bytes"
+ "testing"
+ "text/template"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPageData(t *testing.T) {
+ c := qt.New(t)
+
+ data := make(Data)
+
+ c.Assert(data.Pages(), qt.IsNil)
+
+ pages := Pages{
+ &testPage{title: "a1"},
+ &testPage{title: "a2"},
+ }
+
+ data["pages"] = pages
+
+ c.Assert(data.Pages(), eq, pages)
+
+ data["pages"] = func() Pages {
+ return pages
+ }
+
+ c.Assert(data.Pages(), eq, pages)
+
+ templ, err := template.New("").Parse(`Pages: {{ .Pages }}`)
+
+ c.Assert(err, qt.IsNil)
+
+ var buff bytes.Buffer
+
+ c.Assert(templ.Execute(&buff, data), qt.IsNil)
+
+ c.Assert(buff.String(), qt.Contains, "Pages(2)")
+}
diff --git a/resources/page/page_generate/.gitignore b/resources/page/page_generate/.gitignore
new file mode 100644
index 000000000..84fd70a9f
--- /dev/null
+++ b/resources/page/page_generate/.gitignore
@@ -0,0 +1 @@
+generate \ No newline at end of file
diff --git a/resources/page/page_generate/generate_page_wrappers.go b/resources/page/page_generate/generate_page_wrappers.go
new file mode 100644
index 000000000..f4b40f717
--- /dev/null
+++ b/resources/page/page_generate/generate_page_wrappers.go
@@ -0,0 +1,280 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page_generate
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "reflect"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/codegen"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/source"
+)
+
+const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+`
+
+var (
+ pageInterfaceDeprecated = reflect.TypeOf((*page.DeprecatedWarningPageMethods)(nil)).Elem()
+ pageInterface = reflect.TypeOf((*page.Page)(nil)).Elem()
+
+ packageDir = filepath.FromSlash("resources/page")
+)
+
+func Generate(c *codegen.Inspector) error {
+ if err := generateMarshalJSON(c); err != nil {
+ return fmt.Errorf("failed to generate JSON marshaler: %w", err)
+ }
+
+ if err := generateDeprecatedWrappers(c); err != nil {
+ return fmt.Errorf("failed to generate deprecate wrappers: %w", err)
+ }
+
+ if err := generateFileIsZeroWrappers(c); err != nil {
+ return fmt.Errorf("failed to generate file wrappers: %w", err)
+ }
+
+ return nil
+}
+
+func generateMarshalJSON(c *codegen.Inspector) error {
+ filename := filepath.Join(c.ProjectRootDir, packageDir, "page_marshaljson.autogen.go")
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ includes := []reflect.Type{pageInterface}
+
+ // Exclude these methods
+ excludes := []reflect.Type{
+ // We need to evaluate the deprecated vs JSON in the future,
+ // but leave them out for now.
+ pageInterfaceDeprecated,
+
+ // Leave this out for now. We need to revisit the author issue.
+ reflect.TypeOf((*page.AuthorProvider)(nil)).Elem(),
+
+ reflect.TypeOf((*resource.ErrProvider)(nil)).Elem(),
+
+ // navigation.PageMenus
+
+ // Prevent loops.
+ reflect.TypeOf((*page.SitesProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.Positioner)(nil)).Elem(),
+
+ reflect.TypeOf((*page.ChildCareProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.TreeProvider)(nil)).Elem(),
+ reflect.TypeOf((*page.InSectionPositioner)(nil)).Elem(),
+ reflect.TypeOf((*page.PaginatorProvider)(nil)).Elem(),
+ reflect.TypeOf((*maps.Scratcher)(nil)).Elem(),
+ }
+
+ methods := c.MethodsFromTypes(
+ includes,
+ excludes)
+
+ if len(methods) == 0 {
+ return errors.New("no methods found")
+ }
+
+ marshalJSON, pkgImports := methods.ToMarshalJSON(
+ "Page",
+ "github.com/gohugoio/hugo/resources/page",
+ // Exclusion regexps. Matches method names.
+ `\bPage\b`,
+ )
+
+ fmt.Fprintf(f, `%s
+
+package page
+
+%s
+
+
+%s
+
+
+`, header, importsString(pkgImports), marshalJSON)
+
+ return nil
+}
+
+func generateDeprecatedWrappers(c *codegen.Inspector) error {
+ filename := filepath.Join(c.ProjectRootDir, packageDir, "page_wrappers.autogen.go")
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Generate a wrapper for deprecated page methods
+
+ reasons := map[string]string{
+ "IsDraft": "Use .Draft.",
+ "Hugo": "Use the global hugo function.",
+ "LanguagePrefix": "Use .Site.LanguagePrefix.",
+ "GetParam": "Use .Param or .Params.myParam.",
+ "RSSLink": `Use the Output Format's link, e.g. something like:
+ {{ with .OutputFormats.Get "RSS" }}{{ .RelPermalink }}{{ end }}`,
+ "URL": "Use .Permalink or .RelPermalink. If what you want is the front matter URL value, use .Params.url",
+ }
+
+ deprecated := func(name string, tp reflect.Type) string {
+ alternative, found := reasons[name]
+ if !found {
+ panic(fmt.Sprintf("no deprecated reason found for %q", name))
+ }
+
+ return fmt.Sprintf("helpers.Deprecated(%q, %q, true)", "Page."+name, alternative)
+ }
+
+ var buff bytes.Buffer
+
+ methods := c.MethodsFromTypes([]reflect.Type{pageInterfaceDeprecated}, nil)
+
+ for _, m := range methods {
+ fmt.Fprint(&buff, m.Declaration("*pageDeprecated"))
+ fmt.Fprintln(&buff, " {")
+ fmt.Fprintf(&buff, "\t%s\n", deprecated(m.Name, m.Owner))
+ fmt.Fprintf(&buff, "\t%s\n}\n", m.Delegate("p", "p"))
+
+ }
+
+ pkgImports := methods.Imports()
+ // pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/helpers")
+
+ fmt.Fprintf(f, `%s
+
+package page
+
+%s
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+ return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+ p DeprecatedWarningPageMethods
+}
+
+%s
+
+`, header, importsString(pkgImports), buff.String())
+
+ return nil
+}
+
+func generateFileIsZeroWrappers(c *codegen.Inspector) error {
+ filename := filepath.Join(c.ProjectRootDir, packageDir, "zero_file.autogen.go")
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ // Generate warnings for zero file access
+
+ warning := func(name string, tp reflect.Type) string {
+ msg := fmt.Sprintf(".File.%s on zero object. Wrap it in if or with: {{ with .File }}{{ .%s }}{{ end }}", name, name)
+
+ // We made this a Warning in 0.92.0.
+ // When we remove this construct in 0.93.0, people will get a nil pointer.
+ return fmt.Sprintf("z.log.Warnln(%q)", msg)
+ }
+
+ var buff bytes.Buffer
+
+ methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil)
+
+ for _, m := range methods {
+ if m.Name == "IsZero" {
+ continue
+ }
+ fmt.Fprint(&buff, m.DeclarationNamed("zeroFile"))
+ fmt.Fprintln(&buff, " {")
+ fmt.Fprintf(&buff, "\t%s\n", warning(m.Name, m.Owner))
+ if len(m.Out) > 0 {
+ fmt.Fprintln(&buff, "\treturn")
+ }
+ fmt.Fprintln(&buff, "}")
+
+ }
+
+ pkgImports := append(methods.Imports(), "github.com/gohugoio/hugo/common/loggers", "github.com/gohugoio/hugo/source")
+
+ fmt.Fprintf(f, `%s
+
+package page
+
+%s
+
+// ZeroFile represents a zero value of source.File with warnings if invoked.
+type zeroFile struct {
+ log loggers.Logger
+}
+
+func NewZeroFile(log loggers.Logger) source.File {
+ return zeroFile{log: log}
+}
+
+func (zeroFile) IsZero() bool {
+ return true
+}
+
+%s
+
+`, header, importsString(pkgImports), buff.String())
+
+ return nil
+}
+
+func importsString(imps []string) string {
+ if len(imps) == 0 {
+ return ""
+ }
+
+ if len(imps) == 1 {
+ return fmt.Sprintf("import %q", imps[0])
+ }
+
+ impsStr := "import (\n"
+ for _, imp := range imps {
+ impsStr += fmt.Sprintf("%q\n", imp)
+ }
+
+ return impsStr + ")"
+}
diff --git a/resources/page/page_kinds.go b/resources/page/page_kinds.go
new file mode 100644
index 000000000..719375f66
--- /dev/null
+++ b/resources/page/page_kinds.go
@@ -0,0 +1,47 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import "strings"
+
+const (
+ KindPage = "page"
+
+ // The rest are node types; home page, sections etc.
+
+ KindHome = "home"
+ KindSection = "section"
+
+ // Note tha before Hugo 0.73 these were confusingly named
+ // taxonomy (now: term)
+ // taxonomyTerm (now: taxonomy)
+ KindTaxonomy = "taxonomy"
+ KindTerm = "term"
+)
+
+var kindMap = map[string]string{
+ strings.ToLower(KindPage): KindPage,
+ strings.ToLower(KindHome): KindHome,
+ strings.ToLower(KindSection): KindSection,
+ strings.ToLower(KindTaxonomy): KindTaxonomy,
+ strings.ToLower(KindTerm): KindTerm,
+
+ // Legacy, pre v0.53.0.
+ "taxonomyterm": KindTaxonomy,
+}
+
+// GetKind gets the page kind given a string, empty if not found.
+func GetKind(s string) string {
+ return kindMap[strings.ToLower(s)]
+}
diff --git a/resources/page/page_kinds_test.go b/resources/page/page_kinds_test.go
new file mode 100644
index 000000000..357be6739
--- /dev/null
+++ b/resources/page/page_kinds_test.go
@@ -0,0 +1,37 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestKind(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ // Add tests for these constants to make sure they don't change
+ c.Assert(KindPage, qt.Equals, "page")
+ c.Assert(KindHome, qt.Equals, "home")
+ c.Assert(KindSection, qt.Equals, "section")
+ c.Assert(KindTaxonomy, qt.Equals, "taxonomy")
+ c.Assert(KindTerm, qt.Equals, "term")
+
+ c.Assert(GetKind("TAXONOMYTERM"), qt.Equals, KindTaxonomy)
+ c.Assert(GetKind("Taxonomy"), qt.Equals, KindTaxonomy)
+ c.Assert(GetKind("Page"), qt.Equals, KindPage)
+ c.Assert(GetKind("Home"), qt.Equals, KindHome)
+ c.Assert(GetKind("SEction"), qt.Equals, KindSection)
+}
diff --git a/resources/page/page_lazy_contentprovider.go b/resources/page/page_lazy_contentprovider.go
new file mode 100644
index 000000000..ba4f8f8ef
--- /dev/null
+++ b/resources/page/page_lazy_contentprovider.go
@@ -0,0 +1,124 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "html/template"
+
+ "github.com/gohugoio/hugo/lazy"
+)
+
+// OutputFormatContentProvider represents the method set that is "outputFormat aware" and that we
+// provide lazy initialization for in case they get invoked outside of their normal rendering context, e.g. via .Translations.
+// Note that this set is currently not complete, but should cover the most common use cases.
+// For the others, the implementation will be from the page.NoopPage.
+type OutputFormatContentProvider interface {
+ ContentProvider
+ TableOfContentsProvider
+ PageRenderProvider
+}
+
+// LazyContentProvider initializes itself when read. Each method of the
+// ContentProvider interface initializes a content provider and shares it
+// with other methods.
+//
+// Used in cases where we cannot guarantee whether the content provider
+// will be needed. Must create via NewLazyContentProvider.
+type LazyContentProvider struct {
+ init *lazy.Init
+ cp OutputFormatContentProvider
+}
+
+// NewLazyContentProvider returns a LazyContentProvider initialized with
+// function f. The resulting LazyContentProvider calls f in order to
+// retrieve a ContentProvider
+func NewLazyContentProvider(f func() (OutputFormatContentProvider, error)) *LazyContentProvider {
+ lcp := LazyContentProvider{
+ init: lazy.New(),
+ cp: NopPage,
+ }
+ lcp.init.Add(func() (any, error) {
+ cp, err := f()
+ if err != nil {
+ return nil, err
+ }
+ lcp.cp = cp
+ return nil, nil
+ })
+ return &lcp
+}
+
+func (lcp *LazyContentProvider) Reset() {
+ lcp.init.Reset()
+}
+
+func (lcp *LazyContentProvider) Content() (any, error) {
+ lcp.init.Do()
+ return lcp.cp.Content()
+}
+
+func (lcp *LazyContentProvider) Plain() string {
+ lcp.init.Do()
+ return lcp.cp.Plain()
+}
+
+func (lcp *LazyContentProvider) PlainWords() []string {
+ lcp.init.Do()
+ return lcp.cp.PlainWords()
+}
+
+func (lcp *LazyContentProvider) Summary() template.HTML {
+ lcp.init.Do()
+ return lcp.cp.Summary()
+}
+
+func (lcp *LazyContentProvider) Truncated() bool {
+ lcp.init.Do()
+ return lcp.cp.Truncated()
+}
+
+func (lcp *LazyContentProvider) FuzzyWordCount() int {
+ lcp.init.Do()
+ return lcp.cp.FuzzyWordCount()
+}
+
+func (lcp *LazyContentProvider) WordCount() int {
+ lcp.init.Do()
+ return lcp.cp.WordCount()
+}
+
+func (lcp *LazyContentProvider) ReadingTime() int {
+ lcp.init.Do()
+ return lcp.cp.ReadingTime()
+}
+
+func (lcp *LazyContentProvider) Len() int {
+ lcp.init.Do()
+ return lcp.cp.Len()
+}
+
+func (lcp *LazyContentProvider) Render(layout ...string) (template.HTML, error) {
+ lcp.init.Do()
+ return lcp.cp.Render(layout...)
+}
+
+func (lcp *LazyContentProvider) RenderString(args ...any) (template.HTML, error) {
+ lcp.init.Do()
+ return lcp.cp.RenderString(args...)
+}
+
+func (lcp *LazyContentProvider) TableOfContents() template.HTML {
+ lcp.init.Do()
+ return lcp.cp.TableOfContents()
+}
diff --git a/resources/page/page_marshaljson.autogen.go b/resources/page/page_marshaljson.autogen.go
new file mode 100644
index 000000000..0f73d81ae
--- /dev/null
+++ b/resources/page/page_marshaljson.autogen.go
@@ -0,0 +1,211 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+ "encoding/json"
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/navigation"
+ "github.com/gohugoio/hugo/source"
+ "html/template"
+ "time"
+)
+
+func MarshalPageToJSON(p Page) ([]byte, error) {
+ content, err := p.Content()
+ if err != nil {
+ return nil, err
+ }
+ plain := p.Plain()
+ plainWords := p.PlainWords()
+ summary := p.Summary()
+ truncated := p.Truncated()
+ fuzzyWordCount := p.FuzzyWordCount()
+ wordCount := p.WordCount()
+ readingTime := p.ReadingTime()
+ length := p.Len()
+ tableOfContents := p.TableOfContents()
+ rawContent := p.RawContent()
+ resourceType := p.ResourceType()
+ mediaType := p.MediaType()
+ permalink := p.Permalink()
+ relPermalink := p.RelPermalink()
+ name := p.Name()
+ title := p.Title()
+ params := p.Params()
+ data := p.Data()
+ date := p.Date()
+ lastmod := p.Lastmod()
+ publishDate := p.PublishDate()
+ expiryDate := p.ExpiryDate()
+ aliases := p.Aliases()
+ bundleType := p.BundleType()
+ description := p.Description()
+ draft := p.Draft()
+ isHome := p.IsHome()
+ keywords := p.Keywords()
+ kind := p.Kind()
+ layout := p.Layout()
+ linkTitle := p.LinkTitle()
+ isNode := p.IsNode()
+ isPage := p.IsPage()
+ path := p.Path()
+ pathc := p.Pathc()
+ slug := p.Slug()
+ lang := p.Lang()
+ isSection := p.IsSection()
+ section := p.Section()
+ sectionsEntries := p.SectionsEntries()
+ sectionsPath := p.SectionsPath()
+ sitemap := p.Sitemap()
+ typ := p.Type()
+ weight := p.Weight()
+ language := p.Language()
+ file := p.File()
+ gitInfo := p.GitInfo()
+ outputFormats := p.OutputFormats()
+ alternativeOutputFormats := p.AlternativeOutputFormats()
+ menus := p.Menus()
+ translationKey := p.TranslationKey()
+ isTranslated := p.IsTranslated()
+ allTranslations := p.AllTranslations()
+ translations := p.Translations()
+ getIdentity := p.GetIdentity()
+
+ s := struct {
+ Content any
+ Plain string
+ PlainWords []string
+ Summary template.HTML
+ Truncated bool
+ FuzzyWordCount int
+ WordCount int
+ ReadingTime int
+ Len int
+ TableOfContents template.HTML
+ RawContent string
+ ResourceType string
+ MediaType media.Type
+ Permalink string
+ RelPermalink string
+ Name string
+ Title string
+ Params maps.Params
+ Data any
+ Date time.Time
+ Lastmod time.Time
+ PublishDate time.Time
+ ExpiryDate time.Time
+ Aliases []string
+ BundleType files.ContentClass
+ Description string
+ Draft bool
+ IsHome bool
+ Keywords []string
+ Kind string
+ Layout string
+ LinkTitle string
+ IsNode bool
+ IsPage bool
+ Path string
+ Pathc string
+ Slug string
+ Lang string
+ IsSection bool
+ Section string
+ SectionsEntries []string
+ SectionsPath string
+ Sitemap config.Sitemap
+ Type string
+ Weight int
+ Language *langs.Language
+ File source.File
+ GitInfo *gitmap.GitInfo
+ OutputFormats OutputFormats
+ AlternativeOutputFormats OutputFormats
+ Menus navigation.PageMenus
+ TranslationKey string
+ IsTranslated bool
+ AllTranslations Pages
+ Translations Pages
+ GetIdentity identity.Identity
+ }{
+ Content: content,
+ Plain: plain,
+ PlainWords: plainWords,
+ Summary: summary,
+ Truncated: truncated,
+ FuzzyWordCount: fuzzyWordCount,
+ WordCount: wordCount,
+ ReadingTime: readingTime,
+ Len: length,
+ TableOfContents: tableOfContents,
+ RawContent: rawContent,
+ ResourceType: resourceType,
+ MediaType: mediaType,
+ Permalink: permalink,
+ RelPermalink: relPermalink,
+ Name: name,
+ Title: title,
+ Params: params,
+ Data: data,
+ Date: date,
+ Lastmod: lastmod,
+ PublishDate: publishDate,
+ ExpiryDate: expiryDate,
+ Aliases: aliases,
+ BundleType: bundleType,
+ Description: description,
+ Draft: draft,
+ IsHome: isHome,
+ Keywords: keywords,
+ Kind: kind,
+ Layout: layout,
+ LinkTitle: linkTitle,
+ IsNode: isNode,
+ IsPage: isPage,
+ Path: path,
+ Pathc: pathc,
+ Slug: slug,
+ Lang: lang,
+ IsSection: isSection,
+ Section: section,
+ SectionsEntries: sectionsEntries,
+ SectionsPath: sectionsPath,
+ Sitemap: sitemap,
+ Type: typ,
+ Weight: weight,
+ Language: language,
+ File: file,
+ GitInfo: gitInfo,
+ OutputFormats: outputFormats,
+ AlternativeOutputFormats: alternativeOutputFormats,
+ Menus: menus,
+ TranslationKey: translationKey,
+ IsTranslated: isTranslated,
+ AllTranslations: allTranslations,
+ Translations: translations,
+ GetIdentity: getIdentity,
+ }
+
+ return json.Marshal(&s)
+}
diff --git a/resources/page/page_matcher.go b/resources/page/page_matcher.go
new file mode 100644
index 000000000..c302ff21a
--- /dev/null
+++ b/resources/page/page_matcher.go
@@ -0,0 +1,142 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/mitchellh/mapstructure"
+)
+
+// A PageMatcher can be used to match a Page with Glob patterns.
+// Note that the pattern matching is case insensitive.
+type PageMatcher struct {
+ // A Glob pattern matching the content path below /content.
+ // Expects Unix-styled slashes.
+ // Note that this is the virtual path, so it starts at the mount root
+ // with a leading "/".
+ Path string
+
+ // A Glob pattern matching the Page's Kind(s), e.g. "{home,section}"
+ Kind string
+
+ // A Glob pattern matching the Page's language, e.g. "{en,sv}".
+ Lang string
+
+ // A Glob pattern matching the Page's Environment, e.g. "{production,development}".
+ Environment string
+}
+
+// Matches returns whether p matches this matcher.
+func (m PageMatcher) Matches(p Page) bool {
+ if m.Kind != "" {
+ g, err := glob.GetGlob(m.Kind)
+ if err == nil && !g.Match(p.Kind()) {
+ return false
+ }
+ }
+
+ if m.Lang != "" {
+ g, err := glob.GetGlob(m.Lang)
+ if err == nil && !g.Match(p.Lang()) {
+ return false
+ }
+ }
+
+ if m.Path != "" {
+ g, err := glob.GetGlob(m.Path)
+ // TODO(bep) Path() vs filepath vs leading slash.
+ p := strings.ToLower(filepath.ToSlash(p.Pathc()))
+ if !(strings.HasPrefix(p, "/")) {
+ p = "/" + p
+ }
+ if err == nil && !g.Match(p) {
+ return false
+ }
+ }
+
+ if m.Environment != "" {
+ g, err := glob.GetGlob(m.Environment)
+ if err == nil && !g.Match(p.Site().Hugo().Environment) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// DecodeCascade decodes in which could be either a map or a slice of maps.
+func DecodeCascade(in any) (map[PageMatcher]maps.Params, error) {
+ m, err := maps.ToSliceStringMap(in)
+ if err != nil {
+ return map[PageMatcher]maps.Params{
+ {}: maps.ToStringMap(in),
+ }, nil
+ }
+
+ cascade := make(map[PageMatcher]maps.Params)
+
+ for _, vv := range m {
+ var m PageMatcher
+ if mv, found := vv["_target"]; found {
+ err := DecodePageMatcher(mv, &m)
+ if err != nil {
+ return nil, err
+ }
+ }
+ c, found := cascade[m]
+ if found {
+ // Merge
+ for k, v := range vv {
+ if _, found := c[k]; !found {
+ c[k] = v
+ }
+ }
+ } else {
+ cascade[m] = vv
+ }
+ }
+
+ return cascade, nil
+}
+
+// DecodePageMatcher decodes m into v.
+func DecodePageMatcher(m any, v *PageMatcher) error {
+ if err := mapstructure.WeakDecode(m, v); err != nil {
+ return err
+ }
+
+ v.Kind = strings.ToLower(v.Kind)
+ if v.Kind != "" {
+ g, _ := glob.GetGlob(v.Kind)
+ found := false
+ for _, k := range kindMap {
+ if g.Match(k) {
+ found = true
+ break
+ }
+ }
+ if !found {
+ return fmt.Errorf("%q did not match a valid Page Kind", v.Kind)
+ }
+ }
+
+ v.Path = filepath.ToSlash(strings.ToLower(v.Path))
+
+ return nil
+}
diff --git a/resources/page/page_matcher_test.go b/resources/page/page_matcher_test.go
new file mode 100644
index 000000000..4a59dc502
--- /dev/null
+++ b/resources/page/page_matcher_test.go
@@ -0,0 +1,83 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPageMatcher(t *testing.T) {
+ c := qt.New(t)
+ developmentTestSite := testSite{h: hugo.NewInfo("development", nil)}
+ productionTestSite := testSite{h: hugo.NewInfo("production", nil)}
+
+ p1, p2, p3 :=
+ &testPage{path: "/p1", kind: "section", lang: "en", site: developmentTestSite},
+ &testPage{path: "p2", kind: "page", lang: "no", site: productionTestSite},
+ &testPage{path: "p3", kind: "page", lang: "en"}
+
+ c.Run("Matches", func(c *qt.C) {
+ m := PageMatcher{Kind: "section"}
+
+ c.Assert(m.Matches(p1), qt.Equals, true)
+ c.Assert(m.Matches(p2), qt.Equals, false)
+
+ m = PageMatcher{Kind: "page"}
+ c.Assert(m.Matches(p1), qt.Equals, false)
+ c.Assert(m.Matches(p2), qt.Equals, true)
+ c.Assert(m.Matches(p3), qt.Equals, true)
+
+ m = PageMatcher{Kind: "page", Path: "/p2"}
+ c.Assert(m.Matches(p1), qt.Equals, false)
+ c.Assert(m.Matches(p2), qt.Equals, true)
+ c.Assert(m.Matches(p3), qt.Equals, false)
+
+ m = PageMatcher{Path: "/p*"}
+ c.Assert(m.Matches(p1), qt.Equals, true)
+ c.Assert(m.Matches(p2), qt.Equals, true)
+ c.Assert(m.Matches(p3), qt.Equals, true)
+
+ m = PageMatcher{Lang: "en"}
+ c.Assert(m.Matches(p1), qt.Equals, true)
+ c.Assert(m.Matches(p2), qt.Equals, false)
+ c.Assert(m.Matches(p3), qt.Equals, true)
+
+ m = PageMatcher{Environment: "development"}
+ c.Assert(m.Matches(p1), qt.Equals, true)
+ c.Assert(m.Matches(p2), qt.Equals, false)
+ c.Assert(m.Matches(p3), qt.Equals, false)
+
+ m = PageMatcher{Environment: "production"}
+ c.Assert(m.Matches(p1), qt.Equals, false)
+ c.Assert(m.Matches(p2), qt.Equals, true)
+ c.Assert(m.Matches(p3), qt.Equals, false)
+ })
+
+ c.Run("Decode", func(c *qt.C) {
+ var v PageMatcher
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "foo"}, &v), qt.Not(qt.IsNil))
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "{foo,bar}"}, &v), qt.Not(qt.IsNil))
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "taxonomy"}, &v), qt.IsNil)
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "{taxonomy,foo}"}, &v), qt.IsNil)
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "{taxonomy,term}"}, &v), qt.IsNil)
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "*"}, &v), qt.IsNil)
+ c.Assert(DecodePageMatcher(map[string]any{"kind": "home", "path": filepath.FromSlash("/a/b/**")}, &v), qt.IsNil)
+ c.Assert(v, qt.Equals, PageMatcher{Kind: "home", Path: "/a/b/**"})
+ })
+}
diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go
new file mode 100644
index 000000000..cdc5fd8b1
--- /dev/null
+++ b/resources/page/page_nop.go
@@ -0,0 +1,515 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "html/template"
+ "time"
+
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/navigation"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/source"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/related"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ NopPage Page = new(nopPage)
+ NilPage *nopPage
+)
+
+// PageNop implements Page, but does nothing.
+type nopPage int
+
+func (p *nopPage) Err() resource.ResourceError {
+ return nil
+}
+
+func (p *nopPage) Aliases() []string {
+ return nil
+}
+
+func (p *nopPage) Sitemap() config.Sitemap {
+ return config.Sitemap{}
+}
+
+func (p *nopPage) Layout() string {
+ return ""
+}
+
+func (p *nopPage) RSSLink() template.URL {
+ return ""
+}
+
+func (p *nopPage) Author() Author {
+ return Author{}
+}
+
+func (p *nopPage) Authors() AuthorList {
+ return nil
+}
+
+func (p *nopPage) AllTranslations() Pages {
+ return nil
+}
+
+func (p *nopPage) LanguagePrefix() string {
+ return ""
+}
+
+func (p *nopPage) AlternativeOutputFormats() OutputFormats {
+ return nil
+}
+
+func (p *nopPage) BaseFileName() string {
+ return ""
+}
+
+func (p *nopPage) BundleType() files.ContentClass {
+ return ""
+}
+
+func (p *nopPage) Content() (any, error) {
+ return "", nil
+}
+
+func (p *nopPage) ContentBaseName() string {
+ return ""
+}
+
+func (p *nopPage) CurrentSection() Page {
+ return nil
+}
+
+func (p *nopPage) Data() any {
+ return nil
+}
+
+func (p *nopPage) Date() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Description() string {
+ return ""
+}
+
+func (p *nopPage) RefFrom(argsm map[string]any, source any) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) RelRefFrom(argsm map[string]any, source any) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) Dir() string {
+ return ""
+}
+
+func (p *nopPage) Draft() bool {
+ return false
+}
+
+func (p *nopPage) Eq(other any) bool {
+ return p == other
+}
+
+func (p *nopPage) ExpiryDate() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Ext() string {
+ return ""
+}
+
+func (p *nopPage) Extension() string {
+ return ""
+}
+
+var nilFile *source.FileInfo
+
+func (p *nopPage) File() source.File {
+ return nilFile
+}
+
+func (p *nopPage) FileInfo() hugofs.FileMetaInfo {
+ return nil
+}
+
+func (p *nopPage) Filename() string {
+ return ""
+}
+
+func (p *nopPage) FirstSection() Page {
+ return nil
+}
+
+func (p *nopPage) FuzzyWordCount() int {
+ return 0
+}
+
+func (p *nopPage) GetPage(ref string) (Page, error) {
+ return nil, nil
+}
+
+func (p *nopPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) {
+ return nil, nil
+}
+
+func (p *nopPage) GetParam(key string) any {
+ return nil
+}
+
+func (p *nopPage) GetTerms(taxonomy string) Pages {
+ return nil
+}
+
+func (p *nopPage) GitInfo() *gitmap.GitInfo {
+ return nil
+}
+
+func (p *nopPage) CodeOwners() []string {
+ return nil
+}
+
+func (p *nopPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+ return false
+}
+
+func (p *nopPage) HasShortcode(name string) bool {
+ return false
+}
+
+func (p *nopPage) Hugo() (h hugo.Info) {
+ return
+}
+
+func (p *nopPage) InSection(other any) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsAncestor(other any) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsDescendant(other any) (bool, error) {
+ return false, nil
+}
+
+func (p *nopPage) IsDraft() bool {
+ return false
+}
+
+func (p *nopPage) IsHome() bool {
+ return false
+}
+
+func (p *nopPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+ return false
+}
+
+func (p *nopPage) IsNode() bool {
+ return false
+}
+
+func (p *nopPage) IsPage() bool {
+ return false
+}
+
+func (p *nopPage) IsSection() bool {
+ return false
+}
+
+func (p *nopPage) IsTranslated() bool {
+ return false
+}
+
+func (p *nopPage) Keywords() []string {
+ return nil
+}
+
+func (p *nopPage) Kind() string {
+ return ""
+}
+
+func (p *nopPage) Lang() string {
+ return ""
+}
+
+func (p *nopPage) Language() *langs.Language {
+ return nil
+}
+
+func (p *nopPage) Lastmod() (t time.Time) {
+ return
+}
+
+func (p *nopPage) Len() int {
+ return 0
+}
+
+func (p *nopPage) LinkTitle() string {
+ return ""
+}
+
+func (p *nopPage) LogicalName() string {
+ return ""
+}
+
+func (p *nopPage) MediaType() (m media.Type) {
+ return
+}
+
+func (p *nopPage) Menus() (m navigation.PageMenus) {
+ return
+}
+
+func (p *nopPage) Name() string {
+ return ""
+}
+
+func (p *nopPage) Next() Page {
+ return nil
+}
+
+func (p *nopPage) OutputFormats() OutputFormats {
+ return nil
+}
+
+func (p *nopPage) Pages() Pages {
+ return nil
+}
+
+func (p *nopPage) RegularPages() Pages {
+ return nil
+}
+
+func (p *nopPage) RegularPagesRecursive() Pages {
+ return nil
+}
+
+func (p *nopPage) Paginate(seq any, options ...any) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Paginator(options ...any) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Param(key any) (any, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Params() maps.Params {
+ return nil
+}
+
+func (p *nopPage) Page() Page {
+ return p
+}
+
+func (p *nopPage) Parent() Page {
+ return nil
+}
+
+func (p *nopPage) Path() string {
+ return ""
+}
+
+func (p *nopPage) Pathc() string {
+ return ""
+}
+
+func (p *nopPage) Permalink() string {
+ return ""
+}
+
+func (p *nopPage) Plain() string {
+ return ""
+}
+
+func (p *nopPage) PlainWords() []string {
+ return nil
+}
+
+func (p *nopPage) Prev() Page {
+ return nil
+}
+
+func (p *nopPage) PublishDate() (t time.Time) {
+ return
+}
+
+func (p *nopPage) PrevInSection() Page {
+ return nil
+}
+
+func (p *nopPage) NextInSection() Page {
+ return nil
+}
+
+func (p *nopPage) PrevPage() Page {
+ return nil
+}
+
+func (p *nopPage) NextPage() Page {
+ return nil
+}
+
+func (p *nopPage) RawContent() string {
+ return ""
+}
+
+func (p *nopPage) ReadingTime() int {
+ return 0
+}
+
+func (p *nopPage) Ref(argsm map[string]any) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) RelPermalink() string {
+ return ""
+}
+
+func (p *nopPage) RelRef(argsm map[string]any) (string, error) {
+ return "", nil
+}
+
+func (p *nopPage) Render(layout ...string) (template.HTML, error) {
+ return "", nil
+}
+
+func (p *nopPage) RenderString(args ...any) (template.HTML, error) {
+ return "", nil
+}
+
+func (p *nopPage) ResourceType() string {
+ return ""
+}
+
+func (p *nopPage) Resources() resource.Resources {
+ return nil
+}
+
+func (p *nopPage) Scratch() *maps.Scratch {
+ return nil
+}
+
+func (p *nopPage) Store() *maps.Scratch {
+ return nil
+}
+
+func (p *nopPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+ return nil, nil
+}
+
+func (p *nopPage) Section() string {
+ return ""
+}
+
+func (p *nopPage) Sections() Pages {
+ return nil
+}
+
+func (p *nopPage) SectionsEntries() []string {
+ return nil
+}
+
+func (p *nopPage) SectionsPath() string {
+ return ""
+}
+
+func (p *nopPage) Site() Site {
+ return nil
+}
+
+func (p *nopPage) Sites() Sites {
+ return nil
+}
+
+func (p *nopPage) Slug() string {
+ return ""
+}
+
+func (p *nopPage) String() string {
+ return "nopPage"
+}
+
+func (p *nopPage) Summary() template.HTML {
+ return ""
+}
+
+func (p *nopPage) TableOfContents() template.HTML {
+ return ""
+}
+
+func (p *nopPage) Title() string {
+ return ""
+}
+
+func (p *nopPage) TranslationBaseName() string {
+ return ""
+}
+
+func (p *nopPage) TranslationKey() string {
+ return ""
+}
+
+func (p *nopPage) Translations() Pages {
+ return nil
+}
+
+func (p *nopPage) Truncated() bool {
+ return false
+}
+
+func (p *nopPage) Type() string {
+ return ""
+}
+
+func (p *nopPage) URL() string {
+ return ""
+}
+
+func (p *nopPage) UniqueID() string {
+ return ""
+}
+
+func (p *nopPage) Weight() int {
+ return 0
+}
+
+func (p *nopPage) WordCount() int {
+ return 0
+}
+
+func (p *nopPage) GetIdentity() identity.Identity {
+ return identity.NewPathIdentity("content", "foo/bar.md")
+}
diff --git a/resources/page/page_outputformat.go b/resources/page/page_outputformat.go
new file mode 100644
index 000000000..44f290025
--- /dev/null
+++ b/resources/page/page_outputformat.go
@@ -0,0 +1,95 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package page contains the core interfaces and types for the Page resource,
+// a core component in Hugo.
+package page
+
+import (
+ "strings"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+)
+
+// OutputFormats holds a list of the relevant output formats for a given page.
+type OutputFormats []OutputFormat
+
+// OutputFormat links to a representation of a resource.
+type OutputFormat struct {
+ // Rel contains a value that can be used to construct a rel link.
+ // This is value is fetched from the output format definition.
+ // Note that for pages with only one output format,
+ // this method will always return "canonical".
+ // As an example, the AMP output format will, by default, return "amphtml".
+ //
+ // See:
+ // https://www.ampproject.org/docs/guides/deploy/discovery
+ //
+ // Most other output formats will have "alternate" as value for this.
+ Rel string
+
+ Format output.Format
+
+ relPermalink string
+ permalink string
+}
+
+// Name returns this OutputFormat's name, i.e. HTML, AMP, JSON etc.
+func (o OutputFormat) Name() string {
+ return o.Format.Name
+}
+
+// MediaType returns this OutputFormat's MediaType (MIME type).
+func (o OutputFormat) MediaType() media.Type {
+ return o.Format.MediaType
+}
+
+// Permalink returns the absolute permalink to this output format.
+func (o OutputFormat) Permalink() string {
+ return o.permalink
+}
+
+// RelPermalink returns the relative permalink to this output format.
+func (o OutputFormat) RelPermalink() string {
+ return o.relPermalink
+}
+
+func NewOutputFormat(relPermalink, permalink string, isCanonical bool, f output.Format) OutputFormat {
+ isUserConfigured := true
+ for _, d := range output.DefaultFormats {
+ if strings.EqualFold(d.Name, f.Name) {
+ isUserConfigured = false
+ }
+ }
+ rel := f.Rel
+ // If the output format is the canonical format for the content, we want
+ // to specify this in the "rel" attribute of an HTML "link" element.
+ // However, for custom output formats, we don't want to surprise users by
+ // overwriting "rel"
+ if isCanonical && !isUserConfigured {
+ rel = "canonical"
+ }
+ return OutputFormat{Rel: rel, Format: f, relPermalink: relPermalink, permalink: permalink}
+}
+
+// Get gets a OutputFormat given its name, i.e. json, html etc.
+// It returns nil if none found.
+func (o OutputFormats) Get(name string) *OutputFormat {
+ for _, f := range o {
+ if strings.EqualFold(f.Format.Name, name) {
+ return &f
+ }
+ }
+ return nil
+}
diff --git a/resources/page/page_paths.go b/resources/page/page_paths.go
new file mode 100644
index 000000000..3d34866d1
--- /dev/null
+++ b/resources/page/page_paths.go
@@ -0,0 +1,342 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/output"
+)
+
+const slash = "/"
+
+// TargetPathDescriptor describes how a file path for a given resource
+// should look like on the file system. The same descriptor is then later used to
+// create both the permalinks and the relative links, paginator URLs etc.
+//
+// The big motivating behind this is to have only one source of truth for URLs,
+// and by that also get rid of most of the fragile string parsing/encoding etc.
+//
+//
+type TargetPathDescriptor struct {
+ PathSpec *helpers.PathSpec
+
+ Type output.Format
+ Kind string
+
+ Sections []string
+
+ // For regular content pages this is either
+ // 1) the Slug, if set,
+ // 2) the file base name (TranslationBaseName).
+ BaseName string
+
+ // Source directory.
+ Dir string
+
+ // Typically a language prefix added to file paths.
+ PrefixFilePath string
+
+ // Typically a language prefix added to links.
+ PrefixLink string
+
+ // If in multihost mode etc., every link/path needs to be prefixed, even
+ // if set in URL.
+ ForcePrefix bool
+
+ // URL from front matter if set. Will override any Slug etc.
+ URL string
+
+ // Used to create paginator links.
+ Addends string
+
+ // The expanded permalink if defined for the section, ready to use.
+ ExpandedPermalink string
+
+ // Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
+ UglyURLs bool
+}
+
+// TODO(bep) move this type.
+type TargetPaths struct {
+
+ // Where to store the file on disk relative to the publish dir. OS slashes.
+ TargetFilename string
+
+ // The directory to write sub-resources of the above.
+ SubResourceBaseTarget string
+
+ // The base for creating links to sub-resources of the above.
+ SubResourceBaseLink string
+
+ // The relative permalink to this resources. Unix slashes.
+ Link string
+}
+
+func (p TargetPaths) RelPermalink(s *helpers.PathSpec) string {
+ return s.PrependBasePath(p.Link, false)
+}
+
+func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Format) string {
+ var baseURL string
+ var err error
+ if f.Protocol != "" {
+ baseURL, err = s.BaseURL.WithProtocol(f.Protocol)
+ if err != nil {
+ return ""
+ }
+ } else {
+ baseURL = s.BaseURL.String()
+ }
+
+ return s.PermalinkForBaseURL(p.Link, baseURL)
+}
+
+func isHtmlIndex(s string) bool {
+ return strings.HasSuffix(s, "/index.html")
+}
+
+func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) {
+ if d.Type.Name == "" {
+ panic("CreateTargetPath: missing type")
+ }
+
+ // Normalize all file Windows paths to simplify what's next.
+ if helpers.FilePathSeparator != slash {
+ d.Dir = filepath.ToSlash(d.Dir)
+ d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath)
+
+ }
+
+ if d.URL != "" && !strings.HasPrefix(d.URL, "/") {
+ // Treat this as a context relative URL
+ d.ForcePrefix = true
+ }
+
+ pagePath := slash
+ fullSuffix := d.Type.MediaType.FirstSuffix.FullSuffix
+
+ var (
+ pagePathDir string
+ link string
+ linkDir string
+ )
+
+ // The top level index files, i.e. the home page etc., needs
+ // the index base even when uglyURLs is enabled.
+ needsBase := true
+
+ isUgly := d.UglyURLs && !d.Type.NoUgly
+ baseNameSameAsType := d.BaseName != "" && d.BaseName == d.Type.BaseName
+
+ if d.ExpandedPermalink == "" && baseNameSameAsType {
+ isUgly = true
+ }
+
+ if d.Kind != KindPage && d.URL == "" && len(d.Sections) > 0 {
+ if d.ExpandedPermalink != "" {
+ pagePath = pjoin(pagePath, d.ExpandedPermalink)
+ } else {
+ pagePath = pjoin(d.Sections...)
+ }
+ needsBase = false
+ }
+
+ if d.Type.Path != "" {
+ pagePath = pjoin(pagePath, d.Type.Path)
+ }
+
+ if d.Kind != KindHome && d.URL != "" {
+ pagePath = pjoin(pagePath, d.URL)
+
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ pagePathDir = pagePath
+ link = pagePath
+ hasDot := strings.Contains(d.URL, ".")
+ hasSlash := strings.HasSuffix(d.URL, slash)
+
+ if hasSlash || !hasDot {
+ pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix)
+ } else if hasDot {
+ pagePathDir = path.Dir(pagePathDir)
+ }
+
+ if !isHtmlIndex(pagePath) {
+ link = pagePath
+ } else if !hasSlash {
+ link += slash
+ }
+
+ linkDir = pagePathDir
+
+ if d.ForcePrefix {
+
+ // Prepend language prefix if not already set in URL
+ if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, slash+d.PrefixFilePath) {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" && !strings.HasPrefix(d.URL, slash+d.PrefixLink) {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+ }
+
+ } else if d.Kind == KindPage {
+
+ if d.ExpandedPermalink != "" {
+ pagePath = pjoin(pagePath, d.ExpandedPermalink)
+ } else {
+ if d.Dir != "" {
+ pagePath = pjoin(pagePath, d.Dir)
+ }
+ if d.BaseName != "" {
+ pagePath = pjoin(pagePath, d.BaseName)
+ }
+ }
+
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ link = pagePath
+
+ // TODO(bep) this should not happen after the fix in https://github.com/gohugoio/hugo/issues/4870
+ // but we may need some more testing before we can remove it.
+ if baseNameSameAsType {
+ link = strings.TrimSuffix(link, d.BaseName)
+ }
+
+ pagePathDir = link
+ link = link + slash
+ linkDir = pagePathDir
+
+ if isUgly {
+ pagePath = addSuffix(pagePath, fullSuffix)
+ } else {
+ pagePath = pjoin(pagePath, d.Type.BaseName+fullSuffix)
+ }
+
+ if !isHtmlIndex(pagePath) {
+ link = pagePath
+ }
+
+ if d.PrefixFilePath != "" {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+
+ } else {
+ if d.Addends != "" {
+ pagePath = pjoin(pagePath, d.Addends)
+ }
+
+ needsBase = needsBase && d.Addends == ""
+
+ // No permalink expansion etc. for node type pages (for now)
+ base := ""
+
+ if needsBase || !isUgly {
+ base = d.Type.BaseName
+ }
+
+ pagePathDir = pagePath
+ link = pagePath
+ linkDir = pagePathDir
+
+ if base != "" {
+ pagePath = path.Join(pagePath, addSuffix(base, fullSuffix))
+ } else {
+ pagePath = addSuffix(pagePath, fullSuffix)
+ }
+
+ if !isHtmlIndex(pagePath) {
+ link = pagePath
+ } else {
+ link += slash
+ }
+
+ if d.PrefixFilePath != "" {
+ pagePath = pjoin(d.PrefixFilePath, pagePath)
+ pagePathDir = pjoin(d.PrefixFilePath, pagePathDir)
+ }
+
+ if d.PrefixLink != "" {
+ link = pjoin(d.PrefixLink, link)
+ linkDir = pjoin(d.PrefixLink, linkDir)
+ }
+ }
+
+ pagePath = pjoin(slash, pagePath)
+ pagePathDir = strings.TrimSuffix(path.Join(slash, pagePathDir), slash)
+
+ hadSlash := strings.HasSuffix(link, slash)
+ link = strings.Trim(link, slash)
+ if hadSlash {
+ link += slash
+ }
+
+ if !strings.HasPrefix(link, slash) {
+ link = slash + link
+ }
+
+ linkDir = strings.TrimSuffix(path.Join(slash, linkDir), slash)
+
+ // if page URL is explicitly set in frontmatter,
+ // preserve its value without sanitization
+ if d.Kind != KindPage || d.URL == "" {
+ // Note: MakePathSanitized will lower case the path if
+ // disablePathToLower isn't set.
+ pagePath = d.PathSpec.MakePathSanitized(pagePath)
+ pagePathDir = d.PathSpec.MakePathSanitized(pagePathDir)
+ link = d.PathSpec.MakePathSanitized(link)
+ linkDir = d.PathSpec.MakePathSanitized(linkDir)
+ }
+
+ tp.TargetFilename = filepath.FromSlash(pagePath)
+ tp.SubResourceBaseTarget = filepath.FromSlash(pagePathDir)
+ tp.SubResourceBaseLink = linkDir
+ tp.Link = d.PathSpec.URLizeFilename(link)
+ if tp.Link == "" {
+ tp.Link = slash
+ }
+
+ return
+}
+
+func addSuffix(s, suffix string) string {
+ return strings.Trim(s, slash) + suffix
+}
+
+// Like path.Join, but preserves one trailing slash if present.
+func pjoin(elem ...string) string {
+ hadSlash := strings.HasSuffix(elem[len(elem)-1], slash)
+ joined := path.Join(elem...)
+ if hadSlash && !strings.HasSuffix(joined, slash) {
+ return joined + slash
+ }
+ return joined
+}
diff --git a/resources/page/page_paths_test.go b/resources/page/page_paths_test.go
new file mode 100644
index 000000000..28937899f
--- /dev/null
+++ b/resources/page/page_paths_test.go
@@ -0,0 +1,293 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/gohugoio/hugo/output"
+)
+
+func TestPageTargetPath(t *testing.T) {
+ pathSpec := newTestPathSpec()
+
+ noExtNoDelimMediaType := media.WithDelimiterAndSuffixes(media.TextType, "", "")
+ noExtNoDelimMediaType.Delimiter = ""
+
+ // Netlify style _redirects
+ noExtDelimFormat := output.Format{
+ Name: "NER",
+ MediaType: noExtNoDelimMediaType,
+ BaseName: "_redirects",
+ }
+
+ for _, langPrefixPath := range []string{"", "no"} {
+ for _, langPrefixLink := range []string{"", "no"} {
+ for _, uglyURLs := range []bool{false, true} {
+
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ expected TargetPaths
+ }{
+ {"JSON home", TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat}, TargetPaths{TargetFilename: "/index.json", SubResourceBaseTarget: "", Link: "/index.json"}},
+ {"AMP home", TargetPathDescriptor{Kind: KindHome, Type: output.AMPFormat}, TargetPaths{TargetFilename: "/amp/index.html", SubResourceBaseTarget: "/amp", Link: "/amp/"}},
+ {"HTML home", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: output.HTMLFormat}, TargetPaths{TargetFilename: "/index.html", SubResourceBaseTarget: "", Link: "/"}},
+ {"Netlify redirects", TargetPathDescriptor{Kind: KindHome, BaseName: "_index", Type: noExtDelimFormat}, TargetPaths{TargetFilename: "/_redirects", SubResourceBaseTarget: "", Link: "/_redirects"}},
+ {"HTML section list", TargetPathDescriptor{
+ Kind: KindSection,
+ Sections: []string{"sect1"},
+ BaseName: "_index",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/sect1/index.html", SubResourceBaseTarget: "/sect1", Link: "/sect1/"}},
+ {"HTML taxonomy term", TargetPathDescriptor{
+ Kind: KindTerm,
+ Sections: []string{"tags", "hugo"},
+ BaseName: "_index",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/tags/hugo/index.html", SubResourceBaseTarget: "/tags/hugo", Link: "/tags/hugo/"}},
+ {"HTML taxonomy", TargetPathDescriptor{
+ Kind: KindTaxonomy,
+ Sections: []string{"tags"},
+ BaseName: "_index",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/tags/index.html", SubResourceBaseTarget: "/tags", Link: "/tags/"}},
+ {
+ "HTML page", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ Sections: []string{"a"},
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/a/b/mypage/index.html", SubResourceBaseTarget: "/a/b/mypage", Link: "/a/b/mypage/"},
+ },
+
+ {
+ "HTML page with index as base", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "index",
+ Sections: []string{"a"},
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/a/b/index.html", SubResourceBaseTarget: "/a/b", Link: "/a/b/"},
+ },
+
+ {
+ "HTML page with special chars", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "My Page!",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/a/b/my-page/index.html", SubResourceBaseTarget: "/a/b/my-page", Link: "/a/b/my-page/"},
+ },
+ {"RSS home", TargetPathDescriptor{Kind: "rss", Type: output.RSSFormat}, TargetPaths{TargetFilename: "/index.xml", SubResourceBaseTarget: "", Link: "/index.xml"}},
+ {"RSS section list", TargetPathDescriptor{
+ Kind: "rss",
+ Sections: []string{"sect1"},
+ Type: output.RSSFormat,
+ }, TargetPaths{TargetFilename: "/sect1/index.xml", SubResourceBaseTarget: "/sect1", Link: "/sect1/index.xml"}},
+ {
+ "AMP page", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b/c",
+ BaseName: "myamp",
+ Type: output.AMPFormat,
+ }, TargetPaths{TargetFilename: "/amp/a/b/c/myamp/index.html", SubResourceBaseTarget: "/amp/a/b/c/myamp", Link: "/amp/a/b/c/myamp/"},
+ },
+ {
+ "AMP page with URL with suffix", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/url.xhtml",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/some/other/url.xhtml", SubResourceBaseTarget: "/some/other", Link: "/some/other/url.xhtml"},
+ },
+ {
+ "JSON page with URL without suffix", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path/",
+ Type: output.JSONFormat,
+ }, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"},
+ },
+ {
+ "JSON page with URL without suffix and no trailing slash", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path",
+ Type: output.JSONFormat,
+ }, TargetPaths{TargetFilename: "/some/other/path/index.json", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/index.json"},
+ },
+ {
+ "HTML page with URL without suffix and no trailing slash", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other/path",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/some/other/path/index.html", SubResourceBaseTarget: "/some/other/path", Link: "/some/other/path/"},
+ },
+ {
+ "HTML page with URL containing double hyphen", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/sect/",
+ BaseName: "mypage",
+ URL: "/some/other--url/",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/some/other--url/index.html", SubResourceBaseTarget: "/some/other--url", Link: "/some/other--url/"},
+ },
+ {
+ "HTML page with expanded permalink", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ ExpandedPermalink: "/2017/10/my-title/",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/2017/10/my-title/index.html", SubResourceBaseTarget: "/2017/10/my-title", Link: "/2017/10/my-title/"},
+ },
+ {
+ "Paginated HTML home", TargetPathDescriptor{
+ Kind: KindHome,
+ BaseName: "_index",
+ Type: output.HTMLFormat,
+ Addends: "page/3",
+ }, TargetPaths{TargetFilename: "/page/3/index.html", SubResourceBaseTarget: "/page/3", Link: "/page/3/"},
+ },
+ {
+ "Paginated Taxonomy terms list", TargetPathDescriptor{
+ Kind: KindTerm,
+ BaseName: "_index",
+ Sections: []string{"tags", "hugo"},
+ Type: output.HTMLFormat,
+ Addends: "page/3",
+ }, TargetPaths{TargetFilename: "/tags/hugo/page/3/index.html", SubResourceBaseTarget: "/tags/hugo/page/3", Link: "/tags/hugo/page/3/"},
+ },
+ {
+ "Regular page with addend", TargetPathDescriptor{
+ Kind: KindPage,
+ Dir: "/a/b",
+ BaseName: "mypage",
+ Addends: "c/d/e",
+ Type: output.HTMLFormat,
+ }, TargetPaths{TargetFilename: "/a/b/mypage/c/d/e/index.html", SubResourceBaseTarget: "/a/b/mypage/c/d/e", Link: "/a/b/mypage/c/d/e/"},
+ },
+ }
+
+ for i, test := range tests {
+ t.Run(fmt.Sprintf("langPrefixPath=%s,langPrefixLink=%s,uglyURLs=%t,name=%s", langPrefixPath, langPrefixLink, uglyURLs, test.name),
+ func(t *testing.T) {
+ test.d.ForcePrefix = true
+ test.d.PathSpec = pathSpec
+ test.d.UglyURLs = uglyURLs
+ test.d.PrefixFilePath = langPrefixPath
+ test.d.PrefixLink = langPrefixLink
+ test.d.Dir = filepath.FromSlash(test.d.Dir)
+ isUgly := uglyURLs && !test.d.Type.NoUgly
+
+ expected := test.expected
+
+ // TODO(bep) simplify
+ if test.d.Kind == KindPage && test.d.BaseName == test.d.Type.BaseName {
+ } else if test.d.Kind == KindHome && test.d.Type.Path != "" {
+ } else if test.d.Type.MediaType.FirstSuffix.Suffix != "" && (!strings.HasPrefix(expected.TargetFilename, "/index") || test.d.Addends != "") && test.d.URL == "" && isUgly {
+ expected.TargetFilename = strings.Replace(expected.TargetFilename,
+ "/"+test.d.Type.BaseName+"."+test.d.Type.MediaType.FirstSuffix.Suffix,
+ "."+test.d.Type.MediaType.FirstSuffix.Suffix, 1)
+ expected.Link = strings.TrimSuffix(expected.Link, "/") + "." + test.d.Type.MediaType.FirstSuffix.Suffix
+
+ }
+
+ if test.d.PrefixFilePath != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixFilePath) {
+ expected.TargetFilename = "/" + test.d.PrefixFilePath + expected.TargetFilename
+ expected.SubResourceBaseTarget = "/" + test.d.PrefixFilePath + expected.SubResourceBaseTarget
+ }
+
+ if test.d.PrefixLink != "" && !strings.HasPrefix(test.d.URL, "/"+test.d.PrefixLink) {
+ expected.Link = "/" + test.d.PrefixLink + expected.Link
+ }
+
+ expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+ expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+ pagePath := CreateTargetPaths(test.d)
+
+ if !eqTargetPaths(pagePath, expected) {
+ t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+ }
+ })
+ }
+ }
+ }
+ }
+}
+
+func TestPageTargetPathPrefix(t *testing.T) {
+ pathSpec := newTestPathSpec()
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ expected TargetPaths
+ }{
+ {
+ "URL set, prefix both, no force",
+ TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: false, PrefixFilePath: "pf", PrefixLink: "pl"},
+ TargetPaths{TargetFilename: "/mydir/my.json", SubResourceBaseTarget: "/mydir", SubResourceBaseLink: "/mydir", Link: "/mydir/my.json"},
+ },
+ {
+ "URL set, prefix both, force",
+ TargetPathDescriptor{Kind: KindPage, Type: output.JSONFormat, URL: "/mydir/my.json", ForcePrefix: true, PrefixFilePath: "pf", PrefixLink: "pl"},
+ TargetPaths{TargetFilename: "/pf/mydir/my.json", SubResourceBaseTarget: "/pf/mydir", SubResourceBaseLink: "/pl/mydir", Link: "/pl/mydir/my.json"},
+ },
+ }
+
+ for i, test := range tests {
+ t.Run(fmt.Sprintf(test.name),
+ func(t *testing.T) {
+ test.d.PathSpec = pathSpec
+ expected := test.expected
+ expected.TargetFilename = filepath.FromSlash(expected.TargetFilename)
+ expected.SubResourceBaseTarget = filepath.FromSlash(expected.SubResourceBaseTarget)
+
+ pagePath := CreateTargetPaths(test.d)
+
+ if pagePath != expected {
+ t.Fatalf("[%d] [%s] targetPath expected\n%#v, got:\n%#v", i, test.name, expected, pagePath)
+ }
+ })
+ }
+}
+
+func eqTargetPaths(p1, p2 TargetPaths) bool {
+ if p1.Link != p2.Link {
+ return false
+ }
+
+ if p1.SubResourceBaseTarget != p2.SubResourceBaseTarget {
+ return false
+ }
+
+ if p1.TargetFilename != p2.TargetFilename {
+ return false
+ }
+
+ return true
+}
diff --git a/resources/page/page_wrappers.autogen.go b/resources/page/page_wrappers.autogen.go
new file mode 100644
index 000000000..55dff47d5
--- /dev/null
+++ b/resources/page/page_wrappers.autogen.go
@@ -0,0 +1,25 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+// NewDeprecatedWarningPage adds deprecation warnings to the given implementation.
+func NewDeprecatedWarningPage(p DeprecatedWarningPageMethods) DeprecatedWarningPageMethods {
+ return &pageDeprecated{p: p}
+}
+
+type pageDeprecated struct {
+ p DeprecatedWarningPageMethods
+}
diff --git a/resources/page/pagegroup.go b/resources/page/pagegroup.go
new file mode 100644
index 000000000..3b32a1fae
--- /dev/null
+++ b/resources/page/pagegroup.go
@@ -0,0 +1,460 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+var (
+ _ collections.Slicer = PageGroup{}
+ _ compare.ProbablyEqer = PageGroup{}
+ _ compare.ProbablyEqer = PagesGroup{}
+)
+
+// PageGroup represents a group of pages, grouped by the key.
+// The key is typically a year or similar.
+type PageGroup struct {
+ // The key, typically a year or similar.
+ Key any
+
+ // The Pages in this group.
+ Pages
+}
+
+type mapKeyValues []reflect.Value
+
+func (v mapKeyValues) Len() int { return len(v) }
+func (v mapKeyValues) Swap(i, j int) { v[i], v[j] = v[j], v[i] }
+
+type mapKeyByInt struct{ mapKeyValues }
+
+func (s mapKeyByInt) Less(i, j int) bool { return s.mapKeyValues[i].Int() < s.mapKeyValues[j].Int() }
+
+type mapKeyByStr struct {
+ less func(a, b string) bool
+ mapKeyValues
+}
+
+func (s mapKeyByStr) Less(i, j int) bool {
+ return s.less(s.mapKeyValues[i].String(), s.mapKeyValues[j].String())
+}
+
+func sortKeys(examplePage Page, v []reflect.Value, order string) []reflect.Value {
+ if len(v) <= 1 {
+ return v
+ }
+
+ switch v[0].Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ if order == "desc" {
+ sort.Sort(sort.Reverse(mapKeyByInt{v}))
+ } else {
+ sort.Sort(mapKeyByInt{v})
+ }
+ case reflect.String:
+ stringLess, close := collatorStringLess(examplePage)
+ defer close()
+ if order == "desc" {
+ sort.Sort(sort.Reverse(mapKeyByStr{stringLess, v}))
+ } else {
+ sort.Sort(mapKeyByStr{stringLess, v})
+ }
+ }
+ return v
+}
+
+// PagesGroup represents a list of page groups.
+// This is what you get when doing page grouping in the templates.
+type PagesGroup []PageGroup
+
+// Reverse reverses the order of this list of page groups.
+func (p PagesGroup) Reverse() PagesGroup {
+ for i, j := 0, len(p)-1; i < j; i, j = i+1, j-1 {
+ p[i], p[j] = p[j], p[i]
+ }
+
+ return p
+}
+
+var (
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+ pagePtrType = reflect.TypeOf((*Page)(nil)).Elem()
+ pagesType = reflect.TypeOf(Pages{})
+)
+
+// GroupBy groups by the value in the given field or method name and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ direction := "asc"
+
+ if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
+ direction = "desc"
+ }
+
+ var ft any
+ index := hreflect.GetMethodIndexByName(pagePtrType, key)
+ if index != -1 {
+ m := pagePtrType.Method(index)
+ if m.Type.NumOut() == 0 || m.Type.NumOut() > 2 {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ if m.Type.NumOut() == 1 && m.Type.Out(0).Implements(errorType) {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ if m.Type.NumOut() == 2 && !m.Type.Out(1).Implements(errorType) {
+ return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
+ }
+ ft = m
+ } else {
+ var ok bool
+ ft, ok = pagePtrType.Elem().FieldByName(key)
+ if !ok {
+ return nil, errors.New(key + " is neither a field nor a method of Page")
+ }
+ }
+
+ var tmp reflect.Value
+ switch e := ft.(type) {
+ case reflect.StructField:
+ tmp = reflect.MakeMap(reflect.MapOf(e.Type, pagesType))
+ case reflect.Method:
+ tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), pagesType))
+ }
+
+ for _, e := range p {
+ ppv := reflect.ValueOf(e)
+ var fv reflect.Value
+ switch ft.(type) {
+ case reflect.StructField:
+ fv = ppv.Elem().FieldByName(key)
+ case reflect.Method:
+ fv = hreflect.GetMethodByName(ppv, key).Call([]reflect.Value{})[0]
+ }
+ if !fv.IsValid() {
+ continue
+ }
+ if !tmp.MapIndex(fv).IsValid() {
+ tmp.SetMapIndex(fv, reflect.MakeSlice(pagesType, 0, 0))
+ }
+ tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
+ }
+
+ sortedKeys := sortKeys(p[0], tmp.MapKeys(), direction)
+ r := make([]PageGroup, len(sortedKeys))
+ for i, k := range sortedKeys {
+ r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)}
+ }
+
+ return r, nil
+}
+
+// GroupByParam groups by the given page parameter key's value and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ direction := "asc"
+
+ if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
+ direction = "desc"
+ }
+
+ var tmp reflect.Value
+ var keyt reflect.Type
+ for _, e := range p {
+ param := resource.GetParamToLower(e, key)
+ if param != nil {
+ if _, ok := param.([]string); !ok {
+ keyt = reflect.TypeOf(param)
+ tmp = reflect.MakeMap(reflect.MapOf(keyt, pagesType))
+ break
+ }
+ }
+ }
+ if !tmp.IsValid() {
+ return nil, errors.New("there is no such a param")
+ }
+
+ for _, e := range p {
+ param := resource.GetParam(e, key)
+
+ if param == nil || reflect.TypeOf(param) != keyt {
+ continue
+ }
+ v := reflect.ValueOf(param)
+ if !tmp.MapIndex(v).IsValid() {
+ tmp.SetMapIndex(v, reflect.MakeSlice(pagesType, 0, 0))
+ }
+ tmp.SetMapIndex(v, reflect.Append(tmp.MapIndex(v), reflect.ValueOf(e)))
+ }
+
+ var r []PageGroup
+ for _, k := range sortKeys(p[0], tmp.MapKeys(), direction) {
+ r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)})
+ }
+
+ return r, nil
+}
+
+func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDate func(p Page) time.Time, order ...string) (PagesGroup, error) {
+ if len(p) < 1 {
+ return nil, nil
+ }
+
+ sp := sorter(p)
+
+ if !(len(order) > 0 && (strings.ToLower(order[0]) == "asc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse")) {
+ sp = sp.Reverse()
+ }
+
+ if sp == nil {
+ return nil, nil
+ }
+
+ firstPage := sp[0].(Page)
+ date := getDate(firstPage)
+
+ // Pages may be a mix of multiple languages, so we need to use the language
+ // for the currently rendered Site.
+ currentSite := firstPage.Site().Current()
+ formatter := langs.GetTimeFormatter(currentSite.Language())
+ formatted := formatter.Format(date, format)
+ var r []PageGroup
+ r = append(r, PageGroup{Key: formatted, Pages: make(Pages, 0)})
+ r[0].Pages = append(r[0].Pages, sp[0])
+
+ i := 0
+ for _, e := range sp[1:] {
+ date = getDate(e.(Page))
+ formatted := formatter.Format(date, format)
+ if r[i].Key.(string) != formatted {
+ r = append(r, PageGroup{Key: formatted})
+ i++
+ }
+ r[i].Pages = append(r[i].Pages, e)
+ }
+ return r, nil
+}
+
+// GroupByDate groups by the given page's Date value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByDate()
+ }
+ getDate := func(p Page) time.Time {
+ return p.Date()
+ }
+ return p.groupByDateField(format, sorter, getDate, order...)
+}
+
+// GroupByPublishDate groups by the given page's PublishDate value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByPublishDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByPublishDate()
+ }
+ getDate := func(p Page) time.Time {
+ return p.PublishDate()
+ }
+ return p.groupByDateField(format, sorter, getDate, order...)
+}
+
+// GroupByExpiryDate groups by the given page's ExpireDate value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByExpiryDate(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByExpiryDate()
+ }
+ getDate := func(p Page) time.Time {
+ return p.ExpiryDate()
+ }
+ return p.groupByDateField(format, sorter, getDate, order...)
+}
+
+// GroupByLastmod groups by the given page's Lastmod value in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByLastmod(format string, order ...string) (PagesGroup, error) {
+ sorter := func(p Pages) Pages {
+ return p.ByLastmod()
+ }
+ getDate := func(p Page) time.Time {
+ return p.Lastmod()
+ }
+ return p.groupByDateField(format, sorter, getDate, order...)
+}
+
+// GroupByParamDate groups by a date set as a param on the page in
+// the given format and with the given order.
+// Valid values for order is asc, desc, rev and reverse.
+// For valid format strings, see https://golang.org/pkg/time/#Time.Format
+func (p Pages) GroupByParamDate(key string, format string, order ...string) (PagesGroup, error) {
+ // Cache the dates.
+ dates := make(map[Page]time.Time)
+
+ sorter := func(pages Pages) Pages {
+ var r Pages
+
+ for _, p := range pages {
+ param := resource.GetParam(p, key)
+ var t time.Time
+
+ if param != nil {
+ var ok bool
+ if t, ok = param.(time.Time); !ok {
+ // Probably a string. Try to convert it to time.Time.
+ t = cast.ToTime(param)
+ }
+ }
+
+ dates[p] = t
+ r = append(r, p)
+ }
+
+ pdate := func(p1, p2 Page) bool {
+ return dates[p1].Unix() < dates[p2].Unix()
+ }
+ pageBy(pdate).Sort(r)
+ return r
+ }
+ getDate := func(p Page) time.Time {
+ return dates[p]
+ }
+ return p.groupByDateField(format, sorter, getDate, order...)
+}
+
+// ProbablyEq wraps compare.ProbablyEqer
+// For internal use.
+func (p PageGroup) ProbablyEq(other any) bool {
+ otherP, ok := other.(PageGroup)
+ if !ok {
+ return false
+ }
+
+ if p.Key != otherP.Key {
+ return false
+ }
+
+ return p.Pages.ProbablyEq(otherP.Pages)
+}
+
+// Slice is for internal use.
+// for the template functions. See collections.Slice.
+func (p PageGroup) Slice(in any) (any, error) {
+ switch items := in.(type) {
+ case PageGroup:
+ return items, nil
+ case []any:
+ groups := make(PagesGroup, len(items))
+ for i, v := range items {
+ g, ok := v.(PageGroup)
+ if !ok {
+ return nil, fmt.Errorf("type %T is not a PageGroup", v)
+ }
+ groups[i] = g
+ }
+ return groups, nil
+ default:
+ return nil, fmt.Errorf("invalid slice type %T", items)
+ }
+}
+
+// Len returns the number of pages in the page group.
+func (psg PagesGroup) Len() int {
+ l := 0
+ for _, pg := range psg {
+ l += len(pg.Pages)
+ }
+ return l
+}
+
+// ProbablyEq wraps compare.ProbablyEqer
+func (psg PagesGroup) ProbablyEq(other any) bool {
+ otherPsg, ok := other.(PagesGroup)
+ if !ok {
+ return false
+ }
+
+ if len(psg) != len(otherPsg) {
+ return false
+ }
+
+ for i := range psg {
+ if !psg[i].ProbablyEq(otherPsg[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// ToPagesGroup tries to convert seq into a PagesGroup.
+func ToPagesGroup(seq any) (PagesGroup, error) {
+ switch v := seq.(type) {
+ case nil:
+ return nil, nil
+ case PagesGroup:
+ return v, nil
+ case []PageGroup:
+ return PagesGroup(v), nil
+ case []any:
+ l := len(v)
+ if l == 0 {
+ break
+ }
+ switch v[0].(type) {
+ case PageGroup:
+ pagesGroup := make(PagesGroup, l)
+ for i, ipg := range v {
+ if pg, ok := ipg.(PageGroup); ok {
+ pagesGroup[i] = pg
+ } else {
+ return nil, fmt.Errorf("unsupported type in paginate from slice, got %T instead of PageGroup", ipg)
+ }
+ }
+ return pagesGroup, nil
+ }
+ }
+
+ return nil, nil
+}
diff --git a/resources/page/pagegroup_test.go b/resources/page/pagegroup_test.go
new file mode 100644
index 000000000..ef0d24471
--- /dev/null
+++ b/resources/page/pagegroup_test.go
@@ -0,0 +1,466 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "reflect"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/cast"
+)
+
+type pageGroupTestObject struct {
+ path string
+ weight int
+ date string
+ param string
+}
+
+var pageGroupTestSources = []pageGroupTestObject{
+ {"/section1/testpage1.md", 3, "2012-04-06", "foo"},
+ {"/section1/testpage2.md", 3, "2012-01-01", "bar"},
+ {"/section1/testpage3.md", 2, "2012-04-06", "foo"},
+ {"/section2/testpage4.md", 1, "2012-03-02", "bar"},
+ // date might also be a full datetime:
+ {"/section2/testpage5.md", 1, "2012-04-06T00:00:00Z", "baz"},
+}
+
+func preparePageGroupTestPages(t *testing.T) Pages {
+ var pages Pages
+ for _, src := range pageGroupTestSources {
+ p := newTestPage()
+ p.path = src.path
+ if p.path != "" {
+ p.section = strings.Split(strings.TrimPrefix(p.path, "/"), "/")[0]
+ }
+ p.weight = src.weight
+ p.date = cast.ToTime(src.date)
+ p.pubDate = cast.ToTime(src.date)
+ p.expiryDate = cast.ToTime(src.date)
+ p.lastMod = cast.ToTime(src.date).AddDate(3, 0, 0)
+ p.params["custom_param"] = src.param
+ p.params["custom_date"] = cast.ToTime(src.date)
+ p.params["custom_string_date"] = src.date
+ pages = append(pages, p)
+ }
+ return pages
+}
+
+func TestGroupByWithFieldNameArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: 1, Pages: Pages{pages[3], pages[4]}},
+ {Key: 2, Pages: Pages{pages[2]}},
+ {Key: 3, Pages: Pages{pages[0], pages[1]}},
+ }
+
+ groups, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByWithMethodNameArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
+ {Key: "section2", Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Type")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByWithSectionArg(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
+ {Key: "section2", Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Section")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be\n%#v, got\n%#v", expect, groups)
+ }
+}
+
+func TestGroupByInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: 3, Pages: Pages{pages[0], pages[1]}},
+ {Key: 2, Pages: Pages{pages[2]}},
+ {Key: 1, Pages: Pages{pages[3], pages[4]}},
+ }
+
+ groups, err := pages.GroupBy("Weight", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByCalledWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByParamCalledWithUnavailableKey(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ _, err := pages.GroupByParam("UnavailableKey")
+ if err == nil {
+ t.Errorf("GroupByParam should return an error but didn't")
+ }
+}
+
+func TestReverse(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+
+ groups1, err := pages.GroupBy("Weight", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+
+ groups2, err := pages.GroupBy("Weight")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ groups2 = groups2.Reverse()
+
+ if !reflect.DeepEqual(groups2, groups1) {
+ t.Errorf("PagesGroup is sorted in unexpected order. It should be %#v, got %#v", groups2, groups1)
+ }
+}
+
+func TestGroupByParam(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "bar", Pages: Pages{pages[1], pages[3]}},
+ {Key: "baz", Pages: Pages{pages[4]}},
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ {Key: "baz", Pages: Pages{pages[4]}},
+ {Key: "bar", Pages: Pages{pages[1], pages[3]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param", "desc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
+ c := qt.New(t)
+ testStr := "TestString"
+ p := newTestPage()
+ p.params["custom_param"] = testStr
+ pages := Pages{p}
+
+ groups, err := pages.GroupByParam("custom_param")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(groups[0].Key, qt.Equals, testStr)
+}
+
+func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ delete(pages[1].Params(), "custom_param")
+ delete(pages[3].Params(), "custom_param")
+ delete(pages[4].Params(), "custom_param")
+
+ expect := PagesGroup{
+ {Key: "foo", Pages: Pages{pages[0], pages[2]}},
+ }
+
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamCalledWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByParam("custom_param")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByParamCalledWithUnavailableParam(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ _, err := pages.GroupByParam("unavailable_param")
+ if err == nil {
+ t.Errorf("GroupByParam should return an error but didn't")
+ }
+}
+
+func TestGroupByDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByPublishDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByDate("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByPublishDateWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByPublishDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
+
+func TestGroupByExpiryDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByExpiryDate("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDate(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+// https://github.com/gohugoio/hugo/issues/3983
+func TestGroupByParamDateWithStringParams(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByParamDate("custom_string_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByLastmod(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2015-04", Pages: Pages{pages[4], pages[2], pages[0]}},
+ {Key: "2015-03", Pages: Pages{pages[3]}},
+ {Key: "2015-01", Pages: Pages{pages[1]}},
+ }
+
+ groups, err := pages.GroupByLastmod("2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByLastmodInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2015-01", Pages: Pages{pages[1]}},
+ {Key: "2015-03", Pages: Pages{pages[3]}},
+ {Key: "2015-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByLastmod("2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be\n%#v, got\n%#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDateInReverseOrder(t *testing.T) {
+ t.Parallel()
+ pages := preparePageGroupTestPages(t)
+ expect := PagesGroup{
+ {Key: "2012-01", Pages: Pages{pages[1]}},
+ {Key: "2012-03", Pages: Pages{pages[3]}},
+ {Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
+ }
+
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01", "asc")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if !reflect.DeepEqual(groups, expect) {
+ t.Errorf("PagesGroup has unexpected groups. It should be %#v, got %#v", expect, groups)
+ }
+}
+
+func TestGroupByParamDateWithEmptyPages(t *testing.T) {
+ t.Parallel()
+ var pages Pages
+ groups, err := pages.GroupByParamDate("custom_date", "2006-01")
+ if err != nil {
+ t.Fatalf("Unable to make PagesGroup array: %s", err)
+ }
+ if groups != nil {
+ t.Errorf("PagesGroup isn't empty. It should be %#v, got %#v", nil, groups)
+ }
+}
diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go
new file mode 100644
index 000000000..bc82773e8
--- /dev/null
+++ b/resources/page/pagemeta/page_frontmatter.go
@@ -0,0 +1,427 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/spf13/cast"
+)
+
+// FrontMatterHandler maps front matter into Page fields and .Params.
+// Note that we currently have only extracted the date logic.
+type FrontMatterHandler struct {
+ fmConfig frontmatterConfig
+
+ dateHandler frontMatterFieldHandler
+ lastModHandler frontMatterFieldHandler
+ publishDateHandler frontMatterFieldHandler
+ expiryDateHandler frontMatterFieldHandler
+
+ // A map of all date keys configured, including any custom.
+ allDateKeys map[string]bool
+
+ logger loggers.Logger
+}
+
+// FrontMatterDescriptor describes how to handle front matter for a given Page.
+// It has pointers to values in the receiving page which gets updated.
+type FrontMatterDescriptor struct {
+
+ // This the Page's front matter.
+ Frontmatter map[string]any
+
+ // This is the Page's base filename (BaseFilename), e.g. page.md., or
+ // if page is a leaf bundle, the bundle folder name (ContentBaseName).
+ BaseFilename string
+
+ // The content file's mod time.
+ ModTime time.Time
+
+ // May be set from the author date in Git.
+ GitAuthorDate time.Time
+
+ // The below are pointers to values on Page and will be modified.
+
+ // This is the Page's params.
+ Params map[string]any
+
+ // This is the Page's dates.
+ Dates *resource.Dates
+
+ // This is the Page's Slug etc.
+ PageURLs *URLPath
+
+ // The Location to use to parse dates without time zone info.
+ Location *time.Location
+}
+
+var dateFieldAliases = map[string][]string{
+ fmDate: {},
+ fmLastmod: {"modified"},
+ fmPubDate: {"pubdate", "published"},
+ fmExpiryDate: {"unpublishdate"},
+}
+
+// HandleDates updates all the dates given the current configuration and the
+// supplied front matter params. Note that this requires all lower-case keys
+// in the params map.
+func (f FrontMatterHandler) HandleDates(d *FrontMatterDescriptor) error {
+ if d.Dates == nil {
+ panic("missing dates")
+ }
+
+ if f.dateHandler == nil {
+ panic("missing date handler")
+ }
+
+ if _, err := f.dateHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.lastModHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.publishDateHandler(d); err != nil {
+ return err
+ }
+
+ if _, err := f.expiryDateHandler(d); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// IsDateKey returns whether the given front matter key is considered a date by the current
+// configuration.
+func (f FrontMatterHandler) IsDateKey(key string) bool {
+ return f.allDateKeys[key]
+}
+
+// A Zero date is a signal that the name can not be parsed.
+// This follows the format as outlined in Jekyll, https://jekyllrb.com/docs/posts/:
+// "Where YEAR is a four-digit number, MONTH and DAY are both two-digit numbers"
+func dateAndSlugFromBaseFilename(location *time.Location, name string) (time.Time, string) {
+ withoutExt, _ := paths.FileAndExt(name)
+
+ if len(withoutExt) < 10 {
+ // This can not be a date.
+ return time.Time{}, ""
+ }
+
+ d, err := htime.ToTimeInDefaultLocationE(withoutExt[:10], location)
+ if err != nil {
+ return time.Time{}, ""
+ }
+
+ // Be a little lenient with the format here.
+ slug := strings.Trim(withoutExt[10:], " -_")
+
+ return d, slug
+}
+
+type frontMatterFieldHandler func(d *FrontMatterDescriptor) (bool, error)
+
+func (f FrontMatterHandler) newChainedFrontMatterFieldHandler(handlers ...frontMatterFieldHandler) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ for _, h := range handlers {
+ // First successful handler wins.
+ success, err := h(d)
+ if err != nil {
+ f.logger.Errorln(err)
+ } else if success {
+ return true, nil
+ }
+ }
+ return false, nil
+ }
+}
+
+type frontmatterConfig struct {
+ date []string
+ lastmod []string
+ publishDate []string
+ expiryDate []string
+}
+
+const (
+ // These are all the date handler identifiers
+ // All identifiers not starting with a ":" maps to a front matter parameter.
+ fmDate = "date"
+ fmPubDate = "publishdate"
+ fmLastmod = "lastmod"
+ fmExpiryDate = "expirydate"
+
+ // Gets date from filename, e.g 218-02-22-mypage.md
+ fmFilename = ":filename"
+
+ // Gets date from file OS mod time.
+ fmModTime = ":filemodtime"
+
+ // Gets date from Git
+ fmGitAuthorDate = ":git"
+)
+
+// This is the config you get when doing nothing.
+func newDefaultFrontmatterConfig() frontmatterConfig {
+ return frontmatterConfig{
+ date: []string{fmDate, fmPubDate, fmLastmod},
+ lastmod: []string{fmGitAuthorDate, fmLastmod, fmDate, fmPubDate},
+ publishDate: []string{fmPubDate, fmDate},
+ expiryDate: []string{fmExpiryDate},
+ }
+}
+
+func newFrontmatterConfig(cfg config.Provider) (frontmatterConfig, error) {
+ c := newDefaultFrontmatterConfig()
+ defaultConfig := c
+
+ if cfg.IsSet("frontmatter") {
+ fm := cfg.GetStringMap("frontmatter")
+ for k, v := range fm {
+ loki := strings.ToLower(k)
+ switch loki {
+ case fmDate:
+ c.date = toLowerSlice(v)
+ case fmPubDate:
+ c.publishDate = toLowerSlice(v)
+ case fmLastmod:
+ c.lastmod = toLowerSlice(v)
+ case fmExpiryDate:
+ c.expiryDate = toLowerSlice(v)
+ }
+ }
+ }
+
+ expander := func(c, d []string) []string {
+ out := expandDefaultValues(c, d)
+ out = addDateFieldAliases(out)
+ return out
+ }
+
+ c.date = expander(c.date, defaultConfig.date)
+ c.publishDate = expander(c.publishDate, defaultConfig.publishDate)
+ c.lastmod = expander(c.lastmod, defaultConfig.lastmod)
+ c.expiryDate = expander(c.expiryDate, defaultConfig.expiryDate)
+
+ return c, nil
+}
+
+func addDateFieldAliases(values []string) []string {
+ var complete []string
+
+ for _, v := range values {
+ complete = append(complete, v)
+ if aliases, found := dateFieldAliases[v]; found {
+ complete = append(complete, aliases...)
+ }
+ }
+ return helpers.UniqueStringsReuse(complete)
+}
+
+func expandDefaultValues(values []string, defaults []string) []string {
+ var out []string
+ for _, v := range values {
+ if v == ":default" {
+ out = append(out, defaults...)
+ } else {
+ out = append(out, v)
+ }
+ }
+ return out
+}
+
+func toLowerSlice(in any) []string {
+ out := cast.ToStringSlice(in)
+ for i := 0; i < len(out); i++ {
+ out[i] = strings.ToLower(out[i])
+ }
+
+ return out
+}
+
+// NewFrontmatterHandler creates a new FrontMatterHandler with the given logger and configuration.
+// If no logger is provided, one will be created.
+func NewFrontmatterHandler(logger loggers.Logger, cfg config.Provider) (FrontMatterHandler, error) {
+ if logger == nil {
+ logger = loggers.NewErrorLogger()
+ }
+
+ frontMatterConfig, err := newFrontmatterConfig(cfg)
+ if err != nil {
+ return FrontMatterHandler{}, err
+ }
+
+ allDateKeys := make(map[string]bool)
+ addKeys := func(vals []string) {
+ for _, k := range vals {
+ if !strings.HasPrefix(k, ":") {
+ allDateKeys[k] = true
+ }
+ }
+ }
+
+ addKeys(frontMatterConfig.date)
+ addKeys(frontMatterConfig.expiryDate)
+ addKeys(frontMatterConfig.lastmod)
+ addKeys(frontMatterConfig.publishDate)
+
+ f := FrontMatterHandler{logger: logger, fmConfig: frontMatterConfig, allDateKeys: allDateKeys}
+
+ if err := f.createHandlers(); err != nil {
+ return f, err
+ }
+
+ return f, nil
+}
+
+func (f *FrontMatterHandler) createHandlers() error {
+ var err error
+
+ if f.dateHandler, err = f.createDateHandler(f.fmConfig.date,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ d.Dates.FDate = t
+ setParamIfNotSet(fmDate, t, d)
+ }); err != nil {
+ return err
+ }
+
+ if f.lastModHandler, err = f.createDateHandler(f.fmConfig.lastmod,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmLastmod, t, d)
+ d.Dates.FLastmod = t
+ }); err != nil {
+ return err
+ }
+
+ if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.publishDate,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmPubDate, t, d)
+ d.Dates.FPublishDate = t
+ }); err != nil {
+ return err
+ }
+
+ if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.expiryDate,
+ func(d *FrontMatterDescriptor, t time.Time) {
+ setParamIfNotSet(fmExpiryDate, t, d)
+ d.Dates.FExpiryDate = t
+ }); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func setParamIfNotSet(key string, value any, d *FrontMatterDescriptor) {
+ if _, found := d.Params[key]; found {
+ return
+ }
+ d.Params[key] = value
+}
+
+func (f FrontMatterHandler) createDateHandler(identifiers []string, setter func(d *FrontMatterDescriptor, t time.Time)) (frontMatterFieldHandler, error) {
+ var h *frontmatterFieldHandlers
+ var handlers []frontMatterFieldHandler
+
+ for _, identifier := range identifiers {
+ switch identifier {
+ case fmFilename:
+ handlers = append(handlers, h.newDateFilenameHandler(setter))
+ case fmModTime:
+ handlers = append(handlers, h.newDateModTimeHandler(setter))
+ case fmGitAuthorDate:
+ handlers = append(handlers, h.newDateGitAuthorDateHandler(setter))
+ default:
+ handlers = append(handlers, h.newDateFieldHandler(identifier, setter))
+ }
+ }
+
+ return f.newChainedFrontMatterFieldHandler(handlers...), nil
+}
+
+type frontmatterFieldHandlers int
+
+func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ v, found := d.Frontmatter[key]
+
+ if !found {
+ return false, nil
+ }
+
+ date, err := htime.ToTimeInDefaultLocationE(v, d.Location)
+ if err != nil {
+ return false, nil
+ }
+
+ // We map several date keys to one, so, for example,
+ // "expirydate", "unpublishdate" will all set .ExpiryDate (first found).
+ setter(d, date)
+
+ // This is the params key as set in front matter.
+ d.Params[key] = date
+
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ date, slug := dateAndSlugFromBaseFilename(d.Location, d.BaseFilename)
+ if date.IsZero() {
+ return false, nil
+ }
+
+ setter(d, date)
+
+ if _, found := d.Frontmatter["slug"]; !found {
+ // Use slug from filename
+ d.PageURLs.Slug = slug
+ }
+
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateModTimeHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ if d.ModTime.IsZero() {
+ return false, nil
+ }
+ setter(d, d.ModTime)
+ return true, nil
+ }
+}
+
+func (f *frontmatterFieldHandlers) newDateGitAuthorDateHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
+ return func(d *FrontMatterDescriptor) (bool, error) {
+ if d.GitAuthorDate.IsZero() {
+ return false, nil
+ }
+ setter(d, d.GitAuthorDate)
+ return true, nil
+ }
+}
diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go
new file mode 100644
index 000000000..c5c4ccf2d
--- /dev/null
+++ b/resources/page/pagemeta/page_frontmatter_test.go
@@ -0,0 +1,257 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDateAndSlugFromBaseFilename(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ tests := []struct {
+ name string
+ date string
+ slug string
+ }{
+ {"page.md", "0001-01-01", ""},
+ {"2012-09-12-page.md", "2012-09-12", "page"},
+ {"2018-02-28-page.md", "2018-02-28", "page"},
+ {"2018-02-28_page.md", "2018-02-28", "page"},
+ {"2018-02-28 page.md", "2018-02-28", "page"},
+ {"2018-02-28page.md", "2018-02-28", "page"},
+ {"2018-02-28-.md", "2018-02-28", ""},
+ {"2018-02-28-.md", "2018-02-28", ""},
+ {"2018-02-28.md", "2018-02-28", ""},
+ {"2018-02-28-page", "2018-02-28", "page"},
+ {"2012-9-12-page.md", "0001-01-01", ""},
+ {"asdfasdf.md", "0001-01-01", ""},
+ }
+
+ for _, test := range tests {
+ expecteFDate, err := time.Parse("2006-01-02", test.date)
+ c.Assert(err, qt.IsNil)
+
+ gotDate, gotSlug := dateAndSlugFromBaseFilename(time.UTC, test.name)
+
+ c.Assert(gotDate, qt.Equals, expecteFDate)
+ c.Assert(gotSlug, qt.Equals, test.slug)
+
+ }
+}
+
+func newTestFd() *FrontMatterDescriptor {
+ return &FrontMatterDescriptor{
+ Frontmatter: make(map[string]any),
+ Params: make(map[string]any),
+ Dates: &resource.Dates{},
+ PageURLs: &URLPath{},
+ Location: time.UTC,
+ }
+}
+
+func TestFrontMatterNewConfig(t *testing.T) {
+ c := qt.New(t)
+
+ cfg := config.New()
+
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{"publishDate", "LastMod"},
+ "Lastmod": []string{"publishDate"},
+ "expiryDate": []string{"lastMod"},
+ "publishDate": []string{"date"},
+ })
+
+ fc, err := newFrontmatterConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(fc.date, qt.DeepEquals, []string{"publishdate", "pubdate", "published", "lastmod", "modified"})
+ c.Assert(fc.lastmod, qt.DeepEquals, []string{"publishdate", "pubdate", "published"})
+ c.Assert(fc.expiryDate, qt.DeepEquals, []string{"lastmod", "modified"})
+ c.Assert(fc.publishDate, qt.DeepEquals, []string{"date"})
+
+ // Default
+ cfg = config.New()
+ fc, err = newFrontmatterConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(fc.date, qt.DeepEquals, []string{"date", "publishdate", "pubdate", "published", "lastmod", "modified"})
+ c.Assert(fc.lastmod, qt.DeepEquals, []string{":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"})
+ c.Assert(fc.expiryDate, qt.DeepEquals, []string{"expirydate", "unpublishdate"})
+ c.Assert(fc.publishDate, qt.DeepEquals, []string{"publishdate", "pubdate", "published", "date"})
+
+ // :default keyword
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{"d1", ":default"},
+ "lastmod": []string{"d2", ":default"},
+ "expiryDate": []string{"d3", ":default"},
+ "publishDate": []string{"d4", ":default"},
+ })
+ fc, err = newFrontmatterConfig(cfg)
+ c.Assert(err, qt.IsNil)
+ c.Assert(fc.date, qt.DeepEquals, []string{"d1", "date", "publishdate", "pubdate", "published", "lastmod", "modified"})
+ c.Assert(fc.lastmod, qt.DeepEquals, []string{"d2", ":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"})
+ c.Assert(fc.expiryDate, qt.DeepEquals, []string{"d3", "expirydate", "unpublishdate"})
+ c.Assert(fc.publishDate, qt.DeepEquals, []string{"d4", "publishdate", "pubdate", "published", "date"})
+}
+
+func TestFrontMatterDatesHandlers(t *testing.T) {
+ c := qt.New(t)
+
+ for _, handlerID := range []string{":filename", ":fileModTime", ":git"} {
+
+ cfg := config.New()
+
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{handlerID, "date"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ c.Assert(err, qt.IsNil)
+
+ d1, _ := time.Parse("2006-01-02", "2018-02-01")
+ d2, _ := time.Parse("2006-01-02", "2018-02-02")
+
+ d := newTestFd()
+ switch strings.ToLower(handlerID) {
+ case ":filename":
+ d.BaseFilename = "2018-02-01-page.md"
+ case ":filemodtime":
+ d.ModTime = d1
+ case ":git":
+ d.GitAuthorDate = d1
+ }
+ d.Frontmatter["date"] = d2
+ c.Assert(handler.HandleDates(d), qt.IsNil)
+ c.Assert(d.Dates.FDate, qt.Equals, d1)
+ c.Assert(d.Params["date"], qt.Equals, d2)
+
+ d = newTestFd()
+ d.Frontmatter["date"] = d2
+ c.Assert(handler.HandleDates(d), qt.IsNil)
+ c.Assert(d.Dates.FDate, qt.Equals, d2)
+ c.Assert(d.Params["date"], qt.Equals, d2)
+
+ }
+}
+
+func TestFrontMatterDatesCustomConfig(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ cfg := config.New()
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{"mydate"},
+ "lastmod": []string{"publishdate"},
+ "publishdate": []string{"publishdate"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ c.Assert(err, qt.IsNil)
+
+ testDate, err := time.Parse("2006-01-02", "2018-02-01")
+ c.Assert(err, qt.IsNil)
+
+ d := newTestFd()
+ d.Frontmatter["mydate"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["date"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["lastmod"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["publishdate"] = testDate
+ testDate = testDate.Add(24 * time.Hour)
+ d.Frontmatter["expirydate"] = testDate
+
+ c.Assert(handler.HandleDates(d), qt.IsNil)
+
+ c.Assert(d.Dates.FDate.Day(), qt.Equals, 1)
+ c.Assert(d.Dates.FLastmod.Day(), qt.Equals, 4)
+ c.Assert(d.Dates.FPublishDate.Day(), qt.Equals, 4)
+ c.Assert(d.Dates.FExpiryDate.Day(), qt.Equals, 5)
+
+ c.Assert(d.Params["date"], qt.Equals, d.Dates.FDate)
+ c.Assert(d.Params["mydate"], qt.Equals, d.Dates.FDate)
+ c.Assert(d.Params["publishdate"], qt.Equals, d.Dates.FPublishDate)
+ c.Assert(d.Params["expirydate"], qt.Equals, d.Dates.FExpiryDate)
+
+ c.Assert(handler.IsDateKey("date"), qt.Equals, false) // This looks odd, but is configured like this.
+ c.Assert(handler.IsDateKey("mydate"), qt.Equals, true)
+ c.Assert(handler.IsDateKey("publishdate"), qt.Equals, true)
+ c.Assert(handler.IsDateKey("pubdate"), qt.Equals, true)
+}
+
+func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ cfg := config.New()
+
+ cfg.Set("frontmatter", map[string]any{
+ "date": []string{"mydate", ":default"},
+ "publishdate": []string{":default", "mypubdate"},
+ })
+
+ handler, err := NewFrontmatterHandler(nil, cfg)
+ c.Assert(err, qt.IsNil)
+
+ testDate, _ := time.Parse("2006-01-02", "2018-02-01")
+ d := newTestFd()
+ d.Frontmatter["mydate"] = testDate
+ d.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour)
+ d.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour)
+ d.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour)
+
+ c.Assert(handler.HandleDates(d), qt.IsNil)
+
+ c.Assert(d.Dates.FDate.Day(), qt.Equals, 1)
+ c.Assert(d.Dates.FLastmod.Day(), qt.Equals, 2)
+ c.Assert(d.Dates.FPublishDate.Day(), qt.Equals, 4)
+ c.Assert(d.Dates.FExpiryDate.IsZero(), qt.Equals, true)
+}
+
+func TestExpandDefaultValues(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(expandDefaultValues([]string{"a", ":default", "d"}, []string{"b", "c"}), qt.DeepEquals, []string{"a", "b", "c", "d"})
+ c.Assert(expandDefaultValues([]string{"a", "b", "c"}, []string{"a", "b", "c"}), qt.DeepEquals, []string{"a", "b", "c"})
+ c.Assert(expandDefaultValues([]string{":default", "a", ":default", "d"}, []string{"b", "c"}), qt.DeepEquals, []string{"b", "c", "a", "b", "c", "d"})
+}
+
+func TestFrontMatterDateFieldHandler(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ handlers := new(frontmatterFieldHandlers)
+
+ fd := newTestFd()
+ d, _ := time.Parse("2006-01-02", "2018-02-01")
+ fd.Frontmatter["date"] = d
+ h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.FDate = t })
+
+ handled, err := h(fd)
+ c.Assert(handled, qt.Equals, true)
+ c.Assert(err, qt.IsNil)
+ c.Assert(fd.Dates.FDate, qt.Equals, d)
+}
diff --git a/resources/page/pagemeta/pagemeta.go b/resources/page/pagemeta/pagemeta.go
new file mode 100644
index 000000000..94c6b00aa
--- /dev/null
+++ b/resources/page/pagemeta/pagemeta.go
@@ -0,0 +1,110 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "github.com/mitchellh/mapstructure"
+)
+
+type URLPath struct {
+ URL string
+ Permalink string
+ Slug string
+ Section string
+}
+
+const (
+ Never = "never"
+ Always = "always"
+ ListLocally = "local"
+ Link = "link"
+)
+
+var defaultBuildConfig = BuildConfig{
+ List: Always,
+ Render: Always,
+ PublishResources: true,
+ set: true,
+}
+
+// BuildConfig holds configuration options about how to handle a Page in Hugo's
+// build process.
+type BuildConfig struct {
+ // Whether to add it to any of the page collections.
+ // Note that the page can always be found with .Site.GetPage.
+ // Valid values: never, always, local.
+ // Setting it to 'local' means they will be available via the local
+ // page collections, e.g. $section.Pages.
+ // Note: before 0.57.2 this was a bool, so we accept those too.
+ List string
+
+ // Whether to render it.
+ // Valid values: never, always, link.
+ // The value link means it will not be rendered, but it will get a RelPermalink/Permalink.
+ // Note that before 0.76.0 this was a bool, so we accept those too.
+ Render string
+
+ // Whether to publish its resources. These will still be published on demand,
+ // but enabling this can be useful if the originals (e.g. images) are
+ // never used.
+ PublishResources bool
+
+ set bool // BuildCfg is non-zero if this is set to true.
+}
+
+// Disable sets all options to their off value.
+func (b *BuildConfig) Disable() {
+ b.List = Never
+ b.Render = Never
+ b.PublishResources = false
+ b.set = true
+}
+
+func (b BuildConfig) IsZero() bool {
+ return !b.set
+}
+
+func DecodeBuildConfig(m any) (BuildConfig, error) {
+ b := defaultBuildConfig
+ if m == nil {
+ return b, nil
+ }
+
+ err := mapstructure.WeakDecode(m, &b)
+
+ // In 0.67.1 we changed the list attribute from a bool to a string (enum).
+ // Bool values will become 0 or 1.
+ switch b.List {
+ case "0":
+ b.List = Never
+ case "1":
+ b.List = Always
+ case Always, Never, ListLocally:
+ default:
+ b.List = Always
+ }
+
+ // In 0.76.0 we changed the Render from bool to a string.
+ switch b.Render {
+ case "0":
+ b.Render = Never
+ case "1":
+ b.Render = Always
+ case Always, Never, Link:
+ default:
+ b.Render = Always
+ }
+
+ return b, err
+}
diff --git a/resources/page/pagemeta/pagemeta_test.go b/resources/page/pagemeta/pagemeta_test.go
new file mode 100644
index 000000000..288dc7e26
--- /dev/null
+++ b/resources/page/pagemeta/pagemeta_test.go
@@ -0,0 +1,92 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pagemeta
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDecodeBuildConfig(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ configTempl := `
+[_build]
+render = %s
+list = %s
+publishResources = true`
+
+ for _, test := range []struct {
+ args []any
+ expect BuildConfig
+ }{
+ {
+ []any{"true", "true"},
+ BuildConfig{
+ Render: Always,
+ List: Always,
+ PublishResources: true,
+ set: true,
+ },
+ },
+ {[]any{"true", "false"}, BuildConfig{
+ Render: Always,
+ List: Never,
+ PublishResources: true,
+ set: true,
+ }},
+ {[]any{`"always"`, `"always"`}, BuildConfig{
+ Render: Always,
+ List: Always,
+ PublishResources: true,
+ set: true,
+ }},
+ {[]any{`"never"`, `"never"`}, BuildConfig{
+ Render: Never,
+ List: Never,
+ PublishResources: true,
+ set: true,
+ }},
+ {[]any{`"link"`, `"local"`}, BuildConfig{
+ Render: Link,
+ List: ListLocally,
+ PublishResources: true,
+ set: true,
+ }},
+ {[]any{`"always"`, `"asdfadf"`}, BuildConfig{
+ Render: Always,
+ List: Always,
+ PublishResources: true,
+ set: true,
+ }},
+ } {
+ cfg, err := config.FromConfigString(fmt.Sprintf(configTempl, test.args...), "toml")
+ c.Assert(err, qt.IsNil)
+ bcfg, err := DecodeBuildConfig(cfg.Get("_build"))
+ c.Assert(err, qt.IsNil)
+
+ eq := qt.CmpEquals(hqt.DeepAllowUnexported(BuildConfig{}))
+
+ c.Assert(bcfg, eq, test.expect)
+
+ }
+}
diff --git a/resources/page/pages.go b/resources/page/pages.go
new file mode 100644
index 000000000..f47af5114
--- /dev/null
+++ b/resources/page/pages.go
@@ -0,0 +1,157 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "math/rand"
+
+ "github.com/gohugoio/hugo/compare"
+
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Pages is a slice of Page objects. This is the most common list type in Hugo.
+type Pages []Page
+
+// String returns a string representation of the list.
+// For internal use.
+func (ps Pages) String() string {
+ return fmt.Sprintf("Pages(%d)", len(ps))
+}
+
+// Used in tests.
+func (ps Pages) shuffle() {
+ for i := range ps {
+ j := rand.Intn(i + 1)
+ ps[i], ps[j] = ps[j], ps[i]
+ }
+}
+
+// ToResources wraps resource.ResourcesConverter.
+// For internal use.
+func (pages Pages) ToResources() resource.Resources {
+ r := make(resource.Resources, len(pages))
+ for i, p := range pages {
+ r[i] = p
+ }
+ return r
+}
+
+// ToPages tries to convert seq into Pages.
+func ToPages(seq any) (Pages, error) {
+ if seq == nil {
+ return Pages{}, nil
+ }
+
+ switch v := seq.(type) {
+ case Pages:
+ return v, nil
+ case *Pages:
+ return *(v), nil
+ case WeightedPages:
+ return v.Pages(), nil
+ case PageGroup:
+ return v.Pages, nil
+ case []Page:
+ pages := make(Pages, len(v))
+ for i, vv := range v {
+ pages[i] = vv
+ }
+ return pages, nil
+ case []any:
+ pages := make(Pages, len(v))
+ success := true
+ for i, vv := range v {
+ p, ok := vv.(Page)
+ if !ok {
+ success = false
+ break
+ }
+ pages[i] = p
+ }
+ if success {
+ return pages, nil
+ }
+ }
+
+ return nil, fmt.Errorf("cannot convert type %T to Pages", seq)
+}
+
+// Group groups the pages in in by key.
+// This implements collections.Grouper.
+func (p Pages) Group(key any, in any) (any, error) {
+ pages, err := ToPages(in)
+ if err != nil {
+ return PageGroup{}, err
+ }
+ return PageGroup{Key: key, Pages: pages}, nil
+}
+
+// Len returns the number of pages in the list.
+func (p Pages) Len() int {
+ return len(p)
+}
+
+// ProbablyEq wraps compare.ProbablyEqer
+// For internal use.
+func (pages Pages) ProbablyEq(other any) bool {
+ otherPages, ok := other.(Pages)
+ if !ok {
+ return false
+ }
+
+ if len(pages) != len(otherPages) {
+ return false
+ }
+
+ step := 1
+
+ for i := 0; i < len(pages); i += step {
+ if !pages[i].Eq(otherPages[i]) {
+ return false
+ }
+
+ if i > 50 {
+ // This is most likely the same.
+ step = 50
+ }
+ }
+
+ return true
+}
+
+func (ps Pages) removeFirstIfFound(p Page) Pages {
+ ii := -1
+ for i, pp := range ps {
+ if p.Eq(pp) {
+ ii = i
+ break
+ }
+ }
+
+ if ii != -1 {
+ ps = append(ps[:ii], ps[ii+1:]...)
+ }
+ return ps
+}
+
+// PagesFactory somehow creates some Pages.
+// We do a lot of lazy Pages initialization in Hugo, so we need a type.
+type PagesFactory func() Pages
+
+var (
+ _ resource.ResourcesConverter = Pages{}
+ _ compare.ProbablyEqer = Pages{}
+)
diff --git a/resources/page/pages_cache.go b/resources/page/pages_cache.go
new file mode 100644
index 000000000..9435cb308
--- /dev/null
+++ b/resources/page/pages_cache.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "sync"
+)
+
+type pageCacheEntry struct {
+ in []Pages
+ out Pages
+}
+
+func (entry pageCacheEntry) matches(pageLists []Pages) bool {
+ if len(entry.in) != len(pageLists) {
+ return false
+ }
+ for i, p := range pageLists {
+ if !pagesEqual(p, entry.in[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+type pageCache struct {
+ sync.RWMutex
+ m map[string][]pageCacheEntry
+}
+
+func newPageCache() *pageCache {
+ return &pageCache{m: make(map[string][]pageCacheEntry)}
+}
+
+func (c *pageCache) clear() {
+ c.Lock()
+ defer c.Unlock()
+ c.m = make(map[string][]pageCacheEntry)
+}
+
+// get/getP gets a Pages slice from the cache matching the given key and
+// all the provided Pages slices.
+// If none found in cache, a copy of the first slice is created.
+//
+// If an apply func is provided, that func is applied to the newly created copy.
+//
+// The getP variant' apply func takes a pointer to Pages.
+//
+// The cache and the execution of the apply func is protected by a RWMutex.
+func (c *pageCache) get(key string, apply func(p Pages), pageLists ...Pages) (Pages, bool) {
+ return c.getP(key, func(p *Pages) {
+ if apply != nil {
+ apply(*p)
+ }
+ }, pageLists...)
+}
+
+func (c *pageCache) getP(key string, apply func(p *Pages), pageLists ...Pages) (Pages, bool) {
+ c.RLock()
+ if cached, ok := c.m[key]; ok {
+ for _, entry := range cached {
+ if entry.matches(pageLists) {
+ c.RUnlock()
+ return entry.out, true
+ }
+ }
+ }
+ c.RUnlock()
+
+ c.Lock()
+ defer c.Unlock()
+
+ // double-check
+ if cached, ok := c.m[key]; ok {
+ for _, entry := range cached {
+ if entry.matches(pageLists) {
+ return entry.out, true
+ }
+ }
+ }
+
+ p := pageLists[0]
+ pagesCopy := append(Pages(nil), p...)
+
+ if apply != nil {
+ apply(&pagesCopy)
+ }
+
+ entry := pageCacheEntry{in: pageLists, out: pagesCopy}
+ if v, ok := c.m[key]; ok {
+ c.m[key] = append(v, entry)
+ } else {
+ c.m[key] = []pageCacheEntry{entry}
+ }
+
+ return pagesCopy, false
+}
+
+// pagesEqual returns whether p1 and p2 are equal.
+func pagesEqual(p1, p2 Pages) bool {
+ if p1 == nil && p2 == nil {
+ return true
+ }
+
+ if p1 == nil || p2 == nil {
+ return false
+ }
+
+ if p1.Len() != p2.Len() {
+ return false
+ }
+
+ if p1.Len() == 0 {
+ return true
+ }
+
+ for i := 0; i < len(p1); i++ {
+ if p1[i] != p2[i] {
+ return false
+ }
+ }
+ return true
+}
diff --git a/resources/page/pages_cache_test.go b/resources/page/pages_cache_test.go
new file mode 100644
index 000000000..825bdc31f
--- /dev/null
+++ b/resources/page/pages_cache_test.go
@@ -0,0 +1,87 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "strconv"
+ "sync"
+ "sync/atomic"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestPageCache(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ c1 := newPageCache()
+
+ changeFirst := func(p Pages) {
+ p[0].(*testPage).description = "changed"
+ }
+
+ var o1 uint64
+ var o2 uint64
+
+ var wg sync.WaitGroup
+
+ var l1 sync.Mutex
+ var l2 sync.Mutex
+
+ var testPageSets []Pages
+
+ for i := 0; i < 50; i++ {
+ testPageSets = append(testPageSets, createSortTestPages(i+1))
+ }
+
+ for j := 0; j < 100; j++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for k, pages := range testPageSets {
+ l1.Lock()
+ p, ca := c1.get("k1", nil, pages)
+ c.Assert(ca, qt.Equals, !atomic.CompareAndSwapUint64(&o1, uint64(k), uint64(k+1)))
+ l1.Unlock()
+ p2, c2 := c1.get("k1", nil, p)
+ c.Assert(c2, qt.Equals, true)
+ c.Assert(pagesEqual(p, p2), qt.Equals, true)
+ c.Assert(pagesEqual(p, pages), qt.Equals, true)
+ c.Assert(p, qt.Not(qt.IsNil))
+
+ l2.Lock()
+ p3, c3 := c1.get("k2", changeFirst, pages)
+ c.Assert(c3, qt.Equals, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)))
+ l2.Unlock()
+ c.Assert(p3, qt.Not(qt.IsNil))
+ c.Assert("changed", qt.Equals, p3[0].(*testPage).description)
+ }
+ }()
+ }
+ wg.Wait()
+}
+
+func BenchmarkPageCache(b *testing.B) {
+ cache := newPageCache()
+ pages := make(Pages, 30)
+ for i := 0; i < 30; i++ {
+ pages[i] = &testPage{title: "p" + strconv.Itoa(i)}
+ }
+ key := "key"
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ cache.getP(key, nil, pages)
+ }
+}
diff --git a/resources/page/pages_language_merge.go b/resources/page/pages_language_merge.go
new file mode 100644
index 000000000..4c5a926cf
--- /dev/null
+++ b/resources/page/pages_language_merge.go
@@ -0,0 +1,62 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+)
+
+var _ pagesLanguageMerger = (*Pages)(nil)
+
+type pagesLanguageMerger interface {
+ MergeByLanguage(other Pages) Pages
+ // Needed for integration with the tpl package.
+ MergeByLanguageInterface(other any) (any, error)
+}
+
+// MergeByLanguage supplies missing translations in p1 with values from p2.
+// The result is sorted by the default sort order for pages.
+func (p1 Pages) MergeByLanguage(p2 Pages) Pages {
+ merge := func(pages *Pages) {
+ m := make(map[string]bool)
+ for _, p := range *pages {
+ m[p.TranslationKey()] = true
+ }
+
+ for _, p := range p2 {
+ if _, found := m[p.TranslationKey()]; !found {
+ *pages = append(*pages, p)
+ }
+ }
+
+ SortByDefault(*pages)
+ }
+
+ out, _ := spc.getP("pages.MergeByLanguage", merge, p1, p2)
+
+ return out
+}
+
+// MergeByLanguageInterface is the generic version of MergeByLanguage. It
+// is here just so it can be called from the tpl package.
+func (p1 Pages) MergeByLanguageInterface(in any) (any, error) {
+ if in == nil {
+ return p1, nil
+ }
+ p2, ok := in.(Pages)
+ if !ok {
+ return nil, fmt.Errorf("%T cannot be merged by language", in)
+ }
+ return p1.MergeByLanguage(p2), nil
+}
diff --git a/resources/page/pages_prev_next.go b/resources/page/pages_prev_next.go
new file mode 100644
index 000000000..753a3e415
--- /dev/null
+++ b/resources/page/pages_prev_next.go
@@ -0,0 +1,34 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+// Next returns the next page relative to the given
+func (p Pages) Next(cur Page) Page {
+ x := searchPage(cur, p)
+ if x <= 0 {
+ return nil
+ }
+ return p[x-1]
+}
+
+// Prev returns the previous page reletive to the given
+func (p Pages) Prev(cur Page) Page {
+ x := searchPage(cur, p)
+
+ if x == -1 || len(p)-x < 2 {
+ return nil
+ }
+
+ return p[x+1]
+}
diff --git a/resources/page/pages_prev_next_test.go b/resources/page/pages_prev_next_test.go
new file mode 100644
index 000000000..0ee1564cd
--- /dev/null
+++ b/resources/page/pages_prev_next_test.go
@@ -0,0 +1,91 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/cast"
+)
+
+type pagePNTestObject struct {
+ path string
+ weight int
+ date string
+}
+
+var pagePNTestSources = []pagePNTestObject{
+ {"/section1/testpage1.md", 5, "2012-04-06"},
+ {"/section1/testpage2.md", 4, "2012-01-01"},
+ {"/section1/testpage3.md", 3, "2012-04-06"},
+ {"/section2/testpage4.md", 2, "2012-03-02"},
+ {"/section2/testpage5.md", 1, "2012-04-06"},
+}
+
+func TestPrev(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := preparePageGroupTestPages(t)
+
+ c.Assert(pages.Prev(pages[3]), qt.Equals, pages[4])
+ c.Assert(pages.Prev(pages[1]), qt.Equals, pages[2])
+ c.Assert(pages.Prev(pages[4]), qt.IsNil)
+}
+
+func TestNext(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := preparePageGroupTestPages(t)
+
+ c.Assert(pages.Next(pages[0]), qt.IsNil)
+ c.Assert(pages.Next(pages[1]), qt.Equals, pages[0])
+ c.Assert(pages.Next(pages[4]), qt.Equals, pages[3])
+}
+
+func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages {
+ w := WeightedPages{}
+
+ for _, src := range pagePNTestSources {
+ p := newTestPage()
+ p.path = src.path
+ p.weight = src.weight
+ p.date = cast.ToTime(src.date)
+ p.pubDate = cast.ToTime(src.date)
+ w = append(w, WeightedPage{Weight: p.weight, Page: p})
+ }
+
+ w.Sort()
+ return w
+}
+
+func TestWeightedPagesPrev(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ w := prepareWeightedPagesPrevNext(t)
+
+ c.Assert(w.Prev(w[0].Page), qt.Equals, w[1].Page)
+ c.Assert(w.Prev(w[1].Page), qt.Equals, w[2].Page)
+ c.Assert(w.Prev(w[4].Page), qt.IsNil)
+}
+
+func TestWeightedPagesNext(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ w := prepareWeightedPagesPrevNext(t)
+
+ c.Assert(w.Next(w[0].Page), qt.IsNil)
+ c.Assert(w.Next(w[1].Page), qt.Equals, w[0].Page)
+ c.Assert(w.Next(w[4].Page), qt.Equals, w[3].Page)
+}
diff --git a/resources/page/pages_related.go b/resources/page/pages_related.go
new file mode 100644
index 000000000..35bb2965a
--- /dev/null
+++ b/resources/page/pages_related.go
@@ -0,0 +1,195 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/related"
+ "github.com/spf13/cast"
+)
+
+var (
+ // Assert that Pages and PageGroup implements the PageGenealogist interface.
+ _ PageGenealogist = (Pages)(nil)
+ _ PageGenealogist = PageGroup{}
+)
+
+// A PageGenealogist finds related pages in a page collection. This interface is implemented
+// by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc.
+type PageGenealogist interface {
+
+ // Template example:
+ // {{ $related := .RegularPages.Related . }}
+ Related(doc related.Document) (Pages, error)
+
+ // Template example:
+ // {{ $related := .RegularPages.RelatedIndices . "tags" "date" }}
+ RelatedIndices(doc related.Document, indices ...any) (Pages, error)
+
+ // Template example:
+ // {{ $related := .RegularPages.RelatedTo ( keyVals "tags" "hugo", "rocks") ( keyVals "date" .Date ) }}
+ RelatedTo(args ...types.KeyValues) (Pages, error)
+}
+
+// Related searches all the configured indices with the search keywords from the
+// supplied document.
+func (p Pages) Related(doc related.Document) (Pages, error) {
+ result, err := p.searchDoc(doc)
+ if err != nil {
+ return nil, err
+ }
+
+ if page, ok := doc.(Page); ok {
+ return result.removeFirstIfFound(page), nil
+ }
+
+ return result, nil
+}
+
+// RelatedIndices searches the given indices with the search keywords from the
+// supplied document.
+func (p Pages) RelatedIndices(doc related.Document, indices ...any) (Pages, error) {
+ indicesStr, err := cast.ToStringSliceE(indices)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err := p.searchDoc(doc, indicesStr...)
+ if err != nil {
+ return nil, err
+ }
+
+ if page, ok := doc.(Page); ok {
+ return result.removeFirstIfFound(page), nil
+ }
+
+ return result, nil
+}
+
+// RelatedTo searches the given indices with the corresponding values.
+func (p Pages) RelatedTo(args ...types.KeyValues) (Pages, error) {
+ if len(p) == 0 {
+ return nil, nil
+ }
+
+ return p.search(args...)
+}
+
+func (p Pages) search(args ...types.KeyValues) (Pages, error) {
+ return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
+ return idx.SearchKeyValues(args...)
+ })
+}
+
+func (p Pages) searchDoc(doc related.Document, indices ...string) (Pages, error) {
+ return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
+ return idx.SearchDoc(doc, indices...)
+ })
+}
+
+func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]related.Document, error)) (Pages, error) {
+ if len(p) == 0 {
+ return nil, nil
+ }
+
+ d, ok := p[0].(InternalDependencies)
+ if !ok {
+ return nil, fmt.Errorf("invalid type %T in related search", p[0])
+ }
+
+ cache := d.GetRelatedDocsHandler()
+
+ searchIndex, err := cache.getOrCreateIndex(p)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err := search(searchIndex)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(result) > 0 {
+ mp := make(Pages, len(result))
+ for i, match := range result {
+ mp[i] = match.(Page)
+ }
+ return mp, nil
+ }
+
+ return nil, nil
+}
+
+type cachedPostingList struct {
+ p Pages
+
+ postingList *related.InvertedIndex
+}
+
+type RelatedDocsHandler struct {
+ cfg related.Config
+
+ postingLists []*cachedPostingList
+ mu sync.RWMutex
+}
+
+func NewRelatedDocsHandler(cfg related.Config) *RelatedDocsHandler {
+ return &RelatedDocsHandler{cfg: cfg}
+}
+
+func (s *RelatedDocsHandler) Clone() *RelatedDocsHandler {
+ return NewRelatedDocsHandler(s.cfg)
+}
+
+// This assumes that a lock has been acquired.
+func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
+ for _, ci := range s.postingLists {
+ if pagesEqual(p, ci.p) {
+ return ci.postingList
+ }
+ }
+ return nil
+}
+
+func (s *RelatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex, error) {
+ s.mu.RLock()
+ cachedIndex := s.getIndex(p)
+ if cachedIndex != nil {
+ s.mu.RUnlock()
+ return cachedIndex, nil
+ }
+ s.mu.RUnlock()
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ if cachedIndex := s.getIndex(p); cachedIndex != nil {
+ return cachedIndex, nil
+ }
+
+ searchIndex := related.NewInvertedIndex(s.cfg)
+
+ for _, page := range p {
+ if err := searchIndex.Add(page); err != nil {
+ return nil, err
+ }
+ }
+
+ s.postingLists = append(s.postingLists, &cachedPostingList{p: p, postingList: searchIndex})
+
+ return searchIndex, nil
+}
diff --git a/resources/page/pages_related_test.go b/resources/page/pages_related_test.go
new file mode 100644
index 000000000..3c5780a9a
--- /dev/null
+++ b/resources/page/pages_related_test.go
@@ -0,0 +1,86 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestRelated(t *testing.T) {
+ c := qt.New(t)
+
+ t.Parallel()
+
+ pages := Pages{
+ &testPage{
+ title: "Page 1",
+ pubDate: mustParseDate("2017-01-03"),
+ params: map[string]any{
+ "keywords": []string{"hugo", "says"},
+ },
+ },
+ &testPage{
+ title: "Page 2",
+ pubDate: mustParseDate("2017-01-02"),
+ params: map[string]any{
+ "keywords": []string{"hugo", "rocks"},
+ },
+ },
+ &testPage{
+ title: "Page 3",
+ pubDate: mustParseDate("2017-01-01"),
+ params: map[string]any{
+ "keywords": []string{"bep", "says"},
+ },
+ },
+ }
+
+ result, err := pages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(result), qt.Equals, 2)
+ c.Assert(result[0].Title(), qt.Equals, "Page 2")
+ c.Assert(result[1].Title(), qt.Equals, "Page 1")
+
+ result, err = pages.Related(pages[0])
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(result), qt.Equals, 2)
+ c.Assert(result[0].Title(), qt.Equals, "Page 2")
+ c.Assert(result[1].Title(), qt.Equals, "Page 3")
+
+ result, err = pages.RelatedIndices(pages[0], "keywords")
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(result), qt.Equals, 2)
+ c.Assert(result[0].Title(), qt.Equals, "Page 2")
+ c.Assert(result[1].Title(), qt.Equals, "Page 3")
+
+ result, err = pages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
+ c.Assert(err, qt.IsNil)
+ c.Assert(len(result), qt.Equals, 2)
+ c.Assert(result[0].Title(), qt.Equals, "Page 2")
+ c.Assert(result[1].Title(), qt.Equals, "Page 3")
+}
+
+func mustParseDate(s string) time.Time {
+ d, err := time.Parse("2006-01-02", s)
+ if err != nil {
+ panic(err)
+ }
+ return d
+}
diff --git a/resources/page/pages_sort.go b/resources/page/pages_sort.go
new file mode 100644
index 000000000..08cb34a32
--- /dev/null
+++ b/resources/page/pages_sort.go
@@ -0,0 +1,412 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "sort"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/compare"
+ "github.com/spf13/cast"
+)
+
+var spc = newPageCache()
+
+/*
+ * Implementation of a custom sorter for Pages
+ */
+
+// A pageSorter implements the sort interface for Pages
+type pageSorter struct {
+ pages Pages
+ by pageBy
+}
+
+// pageBy is a closure used in the Sort.Less method.
+type pageBy func(p1, p2 Page) bool
+
+func getOrdinals(p1, p2 Page) (int, int) {
+ p1o, ok1 := p1.(collections.Order)
+ if !ok1 {
+ return -1, -1
+ }
+ p2o, ok2 := p2.(collections.Order)
+ if !ok2 {
+ return -1, -1
+ }
+
+ return p1o.Ordinal(), p2o.Ordinal()
+}
+
+// Sort stable sorts the pages given the receiver's sort order.
+func (by pageBy) Sort(pages Pages) {
+ ps := &pageSorter{
+ pages: pages,
+ by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
+ }
+ sort.Stable(ps)
+}
+
+var (
+
+ // DefaultPageSort is the default sort func for pages in Hugo:
+ // Order by Ordinal, Weight, Date, LinkTitle and then full file path.
+ DefaultPageSort = func(p1, p2 Page) bool {
+ o1, o2 := getOrdinals(p1, p2)
+ if o1 != o2 && o1 != -1 && o2 != -1 {
+ return o1 < o2
+ }
+ if p1.Weight() == p2.Weight() {
+ if p1.Date().Unix() == p2.Date().Unix() {
+ c := collatorStringCompare(func(p Page) string { return p.LinkTitle() }, p1, p2)
+ if c == 0 {
+ if p1.File().IsZero() || p2.File().IsZero() {
+ return p1.File().IsZero()
+ }
+ return compare.LessStrings(p1.File().Filename(), p2.File().Filename())
+ }
+ return c < 0
+ }
+ return p1.Date().Unix() > p2.Date().Unix()
+ }
+
+ if p2.Weight() == 0 {
+ return true
+ }
+
+ if p1.Weight() == 0 {
+ return false
+ }
+
+ return p1.Weight() < p2.Weight()
+ }
+
+ lessPageLanguage = func(p1, p2 Page) bool {
+ if p1.Language().Weight == p2.Language().Weight {
+ if p1.Date().Unix() == p2.Date().Unix() {
+ c := compare.Strings(p1.LinkTitle(), p2.LinkTitle())
+ if c == 0 {
+ if !p1.File().IsZero() && !p2.File().IsZero() {
+ return compare.LessStrings(p1.File().Filename(), p2.File().Filename())
+ }
+ }
+ return c < 0
+ }
+ return p1.Date().Unix() > p2.Date().Unix()
+ }
+
+ if p2.Language().Weight == 0 {
+ return true
+ }
+
+ if p1.Language().Weight == 0 {
+ return false
+ }
+
+ return p1.Language().Weight < p2.Language().Weight
+ }
+
+ lessPageTitle = func(p1, p2 Page) bool {
+ return collatorStringCompare(func(p Page) string { return p.Title() }, p1, p2) < 0
+ }
+
+ lessPageLinkTitle = func(p1, p2 Page) bool {
+ return collatorStringCompare(func(p Page) string { return p.LinkTitle() }, p1, p2) < 0
+ }
+
+ lessPageDate = func(p1, p2 Page) bool {
+ return p1.Date().Unix() < p2.Date().Unix()
+ }
+
+ lessPagePubDate = func(p1, p2 Page) bool {
+ return p1.PublishDate().Unix() < p2.PublishDate().Unix()
+ }
+)
+
+func (ps *pageSorter) Len() int { return len(ps.pages) }
+func (ps *pageSorter) Swap(i, j int) { ps.pages[i], ps.pages[j] = ps.pages[j], ps.pages[i] }
+
+// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
+func (ps *pageSorter) Less(i, j int) bool { return ps.by(ps.pages[i], ps.pages[j]) }
+
+// Limit limits the number of pages returned to n.
+func (p Pages) Limit(n int) Pages {
+ if len(p) > n {
+ return p[0:n]
+ }
+ return p
+}
+
+var collatorStringSort = func(getString func(Page) string) func(p Pages) {
+ return func(p Pages) {
+ if len(p) == 0 {
+ return
+ }
+ // Pages may be a mix of multiple languages, so we need to use the language
+ // for the currently rendered Site.
+ currentSite := p[0].Site().Current()
+ coll := langs.GetCollator(currentSite.Language())
+ coll.Lock()
+ defer coll.Unlock()
+
+ sort.SliceStable(p, func(i, j int) bool {
+ return coll.CompareStrings(getString(p[i]), getString(p[j])) < 0
+ })
+ }
+}
+
+var collatorStringCompare = func(getString func(Page) string, p1, p2 Page) int {
+ currentSite := p1.Site().Current()
+ coll := langs.GetCollator(currentSite.Language())
+ coll.Lock()
+ c := coll.CompareStrings(getString(p1), getString(p2))
+ coll.Unlock()
+ return c
+}
+
+var collatorStringLess = func(p Page) (less func(s1, s2 string) bool, close func()) {
+ currentSite := p.Site().Current()
+ coll := langs.GetCollator(currentSite.Language())
+ coll.Lock()
+ return func(s1, s2 string) bool {
+ return coll.CompareStrings(s1, s2) < 1
+ },
+ func() {
+ coll.Unlock()
+ }
+
+}
+
+// ByWeight sorts the Pages by weight and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByWeight() Pages {
+ const key = "pageSort.ByWeight"
+ pages, _ := spc.get(key, pageBy(DefaultPageSort).Sort, p)
+ return pages
+}
+
+// SortByDefault sorts pages by the default sort.
+func SortByDefault(pages Pages) {
+ pageBy(DefaultPageSort).Sort(pages)
+}
+
+// ByTitle sorts the Pages by title and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByTitle() Pages {
+ const key = "pageSort.ByTitle"
+
+ pages, _ := spc.get(key, collatorStringSort(func(p Page) string { return p.Title() }), p)
+
+ return pages
+}
+
+// ByLinkTitle sorts the Pages by link title and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLinkTitle() Pages {
+ const key = "pageSort.ByLinkTitle"
+
+ pages, _ := spc.get(key, collatorStringSort(func(p Page) string { return p.LinkTitle() }), p)
+
+ return pages
+}
+
+// ByDate sorts the Pages by date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByDate() Pages {
+ const key = "pageSort.ByDate"
+
+ pages, _ := spc.get(key, pageBy(lessPageDate).Sort, p)
+
+ return pages
+}
+
+// ByPublishDate sorts the Pages by publish date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByPublishDate() Pages {
+ const key = "pageSort.ByPublishDate"
+
+ pages, _ := spc.get(key, pageBy(lessPagePubDate).Sort, p)
+
+ return pages
+}
+
+// ByExpiryDate sorts the Pages by publish date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByExpiryDate() Pages {
+ const key = "pageSort.ByExpiryDate"
+
+ expDate := func(p1, p2 Page) bool {
+ return p1.ExpiryDate().Unix() < p2.ExpiryDate().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(expDate).Sort, p)
+
+ return pages
+}
+
+// ByLastmod sorts the Pages by the last modification date and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLastmod() Pages {
+ const key = "pageSort.ByLastmod"
+
+ date := func(p1, p2 Page) bool {
+ return p1.Lastmod().Unix() < p2.Lastmod().Unix()
+ }
+
+ pages, _ := spc.get(key, pageBy(date).Sort, p)
+
+ return pages
+}
+
+// ByLength sorts the Pages by length and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLength() Pages {
+ const key = "pageSort.ByLength"
+
+ length := func(p1, p2 Page) bool {
+ p1l, ok1 := p1.(resource.LengthProvider)
+ p2l, ok2 := p2.(resource.LengthProvider)
+
+ if !ok1 {
+ return true
+ }
+
+ if !ok2 {
+ return false
+ }
+
+ return p1l.Len() < p2l.Len()
+ }
+
+ pages, _ := spc.get(key, pageBy(length).Sort, p)
+
+ return pages
+}
+
+// ByLanguage sorts the Pages by the language's Weight.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByLanguage() Pages {
+ const key = "pageSort.ByLanguage"
+
+ pages, _ := spc.get(key, pageBy(lessPageLanguage).Sort, p)
+
+ return pages
+}
+
+// SortByLanguage sorts the pages by language.
+func SortByLanguage(pages Pages) {
+ pageBy(lessPageLanguage).Sort(pages)
+}
+
+// Reverse reverses the order in Pages and returns a copy.
+//
+// Adjacent invocations on the same receiver will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) Reverse() Pages {
+ const key = "pageSort.Reverse"
+
+ reverseFunc := func(pages Pages) {
+ for i, j := 0, len(pages)-1; i < j; i, j = i+1, j-1 {
+ pages[i], pages[j] = pages[j], pages[i]
+ }
+ }
+
+ pages, _ := spc.get(key, reverseFunc, p)
+
+ return pages
+}
+
+// ByParam sorts the pages according to the given page Params key.
+//
+// Adjacent invocations on the same receiver with the same paramsKey will return a cached result.
+//
+// This may safely be executed in parallel.
+func (p Pages) ByParam(paramsKey any) Pages {
+ if len(p) < 2 {
+ return p
+ }
+ paramsKeyStr := cast.ToString(paramsKey)
+ key := "pageSort.ByParam." + paramsKeyStr
+
+ stringLess, close := collatorStringLess(p[0])
+ defer close()
+
+ paramsKeyComparator := func(p1, p2 Page) bool {
+ v1, _ := p1.Param(paramsKeyStr)
+ v2, _ := p2.Param(paramsKeyStr)
+
+ if v1 == nil {
+ return false
+ }
+
+ if v2 == nil {
+ return true
+ }
+
+ isNumeric := func(v any) bool {
+ switch v.(type) {
+ case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64:
+ return true
+ default:
+ return false
+ }
+ }
+
+ if isNumeric(v1) && isNumeric(v2) {
+ return cast.ToFloat64(v1) < cast.ToFloat64(v2)
+ }
+
+ s1 := cast.ToString(v1)
+ s2 := cast.ToString(v2)
+
+ return stringLess(s1, s2)
+
+ }
+
+ pages, _ := spc.get(key, pageBy(paramsKeyComparator).Sort, p)
+
+ return pages
+}
diff --git a/resources/page/pages_sort_search.go b/resources/page/pages_sort_search.go
new file mode 100644
index 000000000..b400f61e8
--- /dev/null
+++ b/resources/page/pages_sort_search.go
@@ -0,0 +1,125 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import "sort"
+
+// Used in page binary search, the most common in front.
+var pageLessFunctions = []func(p1, p2 Page) bool{
+ DefaultPageSort,
+ lessPageDate,
+ lessPagePubDate,
+ lessPageTitle,
+ lessPageLinkTitle,
+}
+
+func searchPage(p Page, pages Pages) int {
+ if len(pages) < 1000 {
+ // For smaller data sets, doing a linear search is faster.
+ return searchPageLinear(p, pages, 0)
+ }
+
+ less := isPagesProbablySorted(pages, pageLessFunctions...)
+ if less == nil {
+ return searchPageLinear(p, pages, 0)
+ }
+
+ i := searchPageBinary(p, pages, less)
+ if i != -1 {
+ return i
+ }
+
+ return searchPageLinear(p, pages, 0)
+}
+
+func searchPageLinear(p Page, pages Pages, start int) int {
+ for i := start; i < len(pages); i++ {
+ c := pages[i]
+ if c.Eq(p) {
+ return i
+ }
+ }
+ return -1
+}
+
+func searchPageBinary(p Page, pages Pages, less func(p1, p2 Page) bool) int {
+ n := len(pages)
+
+ f := func(i int) bool {
+ c := pages[i]
+ isLess := less(c, p)
+ return !isLess || c.Eq(p)
+ }
+
+ i := sort.Search(n, f)
+
+ if i == n {
+ return -1
+ }
+
+ return searchPageLinear(p, pages, i)
+}
+
+// isProbablySorted tests if the pages slice is probably sorted.
+func isPagesProbablySorted(pages Pages, lessFuncs ...func(p1, p2 Page) bool) func(p1, p2 Page) bool {
+ n := len(pages)
+ step := 1
+ if n > 500 {
+ step = 50
+ }
+
+ is := func(less func(p1, p2 Page) bool) bool {
+ samples := 0
+
+ for i := n - 1; i > 0; i = i - step {
+ if less(pages[i], pages[i-1]) {
+ return false
+ }
+ samples++
+ if samples >= 15 {
+ return true
+ }
+ }
+ return samples > 0
+ }
+
+ isReverse := func(less func(p1, p2 Page) bool) bool {
+ samples := 0
+
+ for i := 0; i < n-1; i = i + step {
+ if less(pages[i], pages[i+1]) {
+ return false
+ }
+ samples++
+
+ if samples > 15 {
+ return true
+ }
+ }
+ return samples > 0
+ }
+
+ for _, less := range lessFuncs {
+ if is(less) {
+ return less
+ }
+ if isReverse(less) {
+ return func(p1, p2 Page) bool {
+ return less(p2, p1)
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/resources/page/pages_sort_search_test.go b/resources/page/pages_sort_search_test.go
new file mode 100644
index 000000000..8f115109c
--- /dev/null
+++ b/resources/page/pages_sort_search_test.go
@@ -0,0 +1,122 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "math/rand"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSearchPage(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createSortTestPages(10)
+ for i, p := range pages {
+ p.(*testPage).title = fmt.Sprintf("Title %d", i%2)
+ }
+
+ for _, pages := range []Pages{pages.ByTitle(), pages.ByTitle().Reverse()} {
+ less := isPagesProbablySorted(pages, lessPageTitle)
+ c.Assert(less, qt.Not(qt.IsNil))
+ for i, p := range pages {
+ idx := searchPageBinary(p, pages, less)
+ c.Assert(idx, qt.Equals, i)
+ }
+ }
+}
+
+func BenchmarkSearchPage(b *testing.B) {
+ type Variant struct {
+ name string
+ preparePages func(pages Pages) Pages
+ search func(p Page, pages Pages) int
+ }
+
+ shufflePages := func(pages Pages) Pages {
+ rand.Shuffle(len(pages), func(i, j int) { pages[i], pages[j] = pages[j], pages[i] })
+ return pages
+ }
+
+ linearSearch := func(p Page, pages Pages) int {
+ return searchPageLinear(p, pages, 0)
+ }
+
+ createPages := func(num int) Pages {
+ pages := createSortTestPages(num)
+ for _, p := range pages {
+ tp := p.(*testPage)
+ tp.weight = rand.Intn(len(pages))
+ tp.title = fmt.Sprintf("Title %d", rand.Intn(len(pages)))
+
+ tp.pubDate = time.Now().Add(time.Duration(rand.Intn(len(pages)/5)) * time.Hour)
+ tp.date = time.Now().Add(time.Duration(rand.Intn(len(pages)/5)) * time.Hour)
+ }
+
+ return pages
+ }
+
+ for _, variant := range []Variant{
+ {"Shuffled", shufflePages, searchPage},
+ {"ByWeight", func(pages Pages) Pages {
+ return pages.ByWeight()
+ }, searchPage},
+ {"ByWeight.Reverse", func(pages Pages) Pages {
+ return pages.ByWeight().Reverse()
+ }, searchPage},
+ {"ByDate", func(pages Pages) Pages {
+ return pages.ByDate()
+ }, searchPage},
+ {"ByPublishDate", func(pages Pages) Pages {
+ return pages.ByPublishDate()
+ }, searchPage},
+ {"ByTitle", func(pages Pages) Pages {
+ return pages.ByTitle()
+ }, searchPage},
+ {"ByTitle Linear", func(pages Pages) Pages {
+ return pages.ByTitle()
+ }, linearSearch},
+ } {
+ for _, numPages := range []int{100, 500, 1000, 5000} {
+ b.Run(fmt.Sprintf("%s-%d", variant.name, numPages), func(b *testing.B) {
+ b.StopTimer()
+ pages := createPages(numPages)
+ if variant.preparePages != nil {
+ pages = variant.preparePages(pages)
+ }
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ j := rand.Intn(numPages)
+ k := variant.search(pages[j], pages)
+ if k != j {
+ b.Fatalf("%d != %d", k, j)
+ }
+ }
+ })
+ }
+ }
+}
+
+func TestIsPagesProbablySorted(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ c.Assert(isPagesProbablySorted(createSortTestPages(6).ByWeight(), DefaultPageSort), qt.Not(qt.IsNil))
+ c.Assert(isPagesProbablySorted(createSortTestPages(300).ByWeight(), DefaultPageSort), qt.Not(qt.IsNil))
+ c.Assert(isPagesProbablySorted(createSortTestPages(6), DefaultPageSort), qt.IsNil)
+ c.Assert(isPagesProbablySorted(createSortTestPages(300).ByTitle(), pageLessFunctions...), qt.Not(qt.IsNil))
+}
diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go
new file mode 100644
index 000000000..cf4e339ee
--- /dev/null
+++ b/resources/page/pages_sort_test.go
@@ -0,0 +1,289 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/google/go-cmp/cmp"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var eq = qt.CmpEquals(
+ cmp.Comparer(func(p1, p2 testPage) bool {
+ return p1.path == p2.path && p1.weight == p2.weight
+ }),
+)
+
+func TestDefaultSort(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ d1 := time.Now()
+ d2 := d1.Add(-1 * time.Hour)
+ d3 := d1.Add(-2 * time.Hour)
+ d4 := d1.Add(-3 * time.Hour)
+
+ p := createSortTestPages(4)
+
+ // first by weight
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p)
+ SortByDefault(p)
+
+ c.Assert(p[0].Weight(), qt.Equals, 1)
+
+ // Consider zero weight, issue #2673
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p)
+ SortByDefault(p)
+
+ c.Assert(p[0].Weight(), qt.Equals, 1)
+
+ // next by date
+ setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p)
+ SortByDefault(p)
+ c.Assert(p[0].Date(), qt.Equals, d1)
+
+ // finally by link title
+ setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p)
+ SortByDefault(p)
+ c.Assert(p[0].LinkTitle(), qt.Equals, "al")
+ c.Assert(p[1].LinkTitle(), qt.Equals, "bl")
+ c.Assert(p[2].LinkTitle(), qt.Equals, "cl")
+}
+
+// https://github.com/gohugoio/hugo/issues/4953
+func TestSortByLinkTitle(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createSortTestPages(6)
+
+ for i, p := range pages {
+ pp := p.(*testPage)
+ if i < 5 {
+ pp.title = fmt.Sprintf("title%d", i)
+ }
+
+ if i > 2 {
+ pp.linkTitle = fmt.Sprintf("linkTitle%d", i)
+ }
+
+ }
+
+ pages.shuffle()
+
+ bylt := pages.ByLinkTitle()
+
+ for i, p := range bylt {
+ if i < 3 {
+ c.Assert(p.LinkTitle(), qt.Equals, fmt.Sprintf("linkTitle%d", i+3))
+ } else {
+ c.Assert(p.LinkTitle(), qt.Equals, fmt.Sprintf("title%d", i-3))
+ }
+ }
+}
+
+func TestSortByN(t *testing.T) {
+ t.Parallel()
+ d1 := time.Now()
+ d2 := d1.Add(-2 * time.Hour)
+ d3 := d1.Add(-10 * time.Hour)
+ d4 := d1.Add(-20 * time.Hour)
+
+ p := createSortTestPages(4)
+
+ for i, this := range []struct {
+ sortFunc func(p Pages) Pages
+ assertFunc func(p Pages) bool
+ }{
+ {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }},
+ {(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }},
+ {(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
+ {(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }},
+ {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
+ {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
+ {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
+ {(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len(p[0].(*testPage).content) }},
+ } {
+ setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
+
+ sorted := this.sortFunc(p)
+ if !this.assertFunc(sorted) {
+ t.Errorf("[%d] sort error", i)
+ }
+ }
+}
+
+func TestLimit(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ p := createSortTestPages(10)
+ firstFive := p.Limit(5)
+ c.Assert(len(firstFive), qt.Equals, 5)
+ for i := 0; i < 5; i++ {
+ c.Assert(firstFive[i], qt.Equals, p[i])
+ }
+ c.Assert(p.Limit(10), eq, p)
+ c.Assert(p.Limit(11), eq, p)
+}
+
+func TestPageSortReverse(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ p1 := createSortTestPages(10)
+ c.Assert(p1[0].(*testPage).fuzzyWordCount, qt.Equals, 0)
+ c.Assert(p1[9].(*testPage).fuzzyWordCount, qt.Equals, 9)
+ p2 := p1.Reverse()
+ c.Assert(p2[0].(*testPage).fuzzyWordCount, qt.Equals, 9)
+ c.Assert(p2[9].(*testPage).fuzzyWordCount, qt.Equals, 0)
+ // cached
+ c.Assert(pagesEqual(p2, p1.Reverse()), qt.Equals, true)
+}
+
+func TestPageSortByParam(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ var k any = "arbitrarily.nested"
+
+ unsorted := createSortTestPages(10)
+ delete(unsorted[9].Params(), "arbitrarily")
+
+ firstSetValue, _ := unsorted[0].Param(k)
+ secondSetValue, _ := unsorted[1].Param(k)
+ lastSetValue, _ := unsorted[8].Param(k)
+ unsetValue, _ := unsorted[9].Param(k)
+
+ c.Assert(firstSetValue, qt.Equals, "xyz100")
+ c.Assert(secondSetValue, qt.Equals, "xyz99")
+ c.Assert(lastSetValue, qt.Equals, "xyz92")
+ c.Assert(unsetValue, qt.Equals, nil)
+
+ sorted := unsorted.ByParam("arbitrarily.nested")
+ firstSetSortedValue, _ := sorted[0].Param(k)
+ secondSetSortedValue, _ := sorted[1].Param(k)
+ lastSetSortedValue, _ := sorted[8].Param(k)
+ unsetSortedValue, _ := sorted[9].Param(k)
+
+ c.Assert(firstSetSortedValue, qt.Equals, firstSetValue)
+ c.Assert(lastSetSortedValue, qt.Equals, secondSetValue)
+ c.Assert(secondSetSortedValue, qt.Equals, lastSetValue)
+ c.Assert(unsetSortedValue, qt.Equals, unsetValue)
+}
+
+func TestPageSortByParamNumeric(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ var k any = "arbitrarily.nested"
+
+ n := 10
+ unsorted := createSortTestPages(n)
+ for i := 0; i < n; i++ {
+ v := 100 - i
+ if i%2 == 0 {
+ v = 100.0 - i
+ }
+
+ unsorted[i].(*testPage).params = map[string]any{
+ "arbitrarily": map[string]any{
+ "nested": v,
+ },
+ }
+ }
+ delete(unsorted[9].Params(), "arbitrarily")
+
+ firstSetValue, _ := unsorted[0].Param(k)
+ secondSetValue, _ := unsorted[1].Param(k)
+ lastSetValue, _ := unsorted[8].Param(k)
+ unsetValue, _ := unsorted[9].Param(k)
+
+ c.Assert(firstSetValue, qt.Equals, 100)
+ c.Assert(secondSetValue, qt.Equals, 99)
+ c.Assert(lastSetValue, qt.Equals, 92)
+ c.Assert(unsetValue, qt.Equals, nil)
+
+ sorted := unsorted.ByParam("arbitrarily.nested")
+ firstSetSortedValue, _ := sorted[0].Param(k)
+ secondSetSortedValue, _ := sorted[1].Param(k)
+ lastSetSortedValue, _ := sorted[8].Param(k)
+ unsetSortedValue, _ := sorted[9].Param(k)
+
+ c.Assert(firstSetSortedValue, qt.Equals, 92)
+ c.Assert(secondSetSortedValue, qt.Equals, 93)
+ c.Assert(lastSetSortedValue, qt.Equals, 100)
+ c.Assert(unsetSortedValue, qt.Equals, unsetValue)
+}
+
+func BenchmarkSortByWeightAndReverse(b *testing.B) {
+ p := createSortTestPages(300)
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ p = p.ByWeight().Reverse()
+ }
+}
+
+func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) {
+ for i := range dates {
+ this := pages[i].(*testPage)
+ other := pages[len(dates)-1-i].(*testPage)
+
+ this.date = dates[i]
+ this.lastMod = dates[i]
+ this.weight = weights[i]
+ this.title = titles[i]
+ // make sure we compare apples and ... apples ...
+ other.linkTitle = this.Title() + "l"
+ other.pubDate = dates[i]
+ other.expiryDate = dates[i]
+ other.content = titles[i] + "_content"
+ }
+ lastLastMod := pages[2].Lastmod()
+ pages[2].(*testPage).lastMod = pages[1].Lastmod()
+ pages[1].(*testPage).lastMod = lastLastMod
+
+ for _, p := range pages {
+ p.(*testPage).content = ""
+ }
+}
+
+func createSortTestPages(num int) Pages {
+ pages := make(Pages, num)
+
+ for i := 0; i < num; i++ {
+ p := newTestPage()
+ p.path = fmt.Sprintf("/x/y/p%d.md", i)
+ p.title = fmt.Sprintf("Title %d", i%(num+1/2))
+ p.params = map[string]any{
+ "arbitrarily": map[string]any{
+ "nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
+ },
+ }
+
+ w := 5
+
+ if i%2 == 0 {
+ w = 10
+ }
+ p.fuzzyWordCount = i
+ p.weight = w
+ p.description = "initial"
+
+ pages[i] = p
+ }
+
+ return pages
+}
diff --git a/resources/page/pages_test.go b/resources/page/pages_test.go
new file mode 100644
index 000000000..22ee698da
--- /dev/null
+++ b/resources/page/pages_test.go
@@ -0,0 +1,72 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestProbablyEq(t *testing.T) {
+ p1, p2, p3 := &testPage{title: "p1"}, &testPage{title: "p2"}, &testPage{title: "p3"}
+ pages12 := Pages{p1, p2}
+ pages21 := Pages{p2, p1}
+ pages123 := Pages{p1, p2, p3}
+
+ t.Run("Pages", func(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(pages12.ProbablyEq(pages12), qt.Equals, true)
+ c.Assert(pages123.ProbablyEq(pages12), qt.Equals, false)
+ c.Assert(pages12.ProbablyEq(pages21), qt.Equals, false)
+ })
+
+ t.Run("PageGroup", func(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(PageGroup{Key: "a", Pages: pages12}.ProbablyEq(PageGroup{Key: "a", Pages: pages12}), qt.Equals, true)
+ c.Assert(PageGroup{Key: "a", Pages: pages12}.ProbablyEq(PageGroup{Key: "b", Pages: pages12}), qt.Equals, false)
+ })
+
+ t.Run("PagesGroup", func(t *testing.T) {
+ c := qt.New(t)
+
+ pg1, pg2 := PageGroup{Key: "a", Pages: pages12}, PageGroup{Key: "b", Pages: pages123}
+
+ c.Assert(PagesGroup{pg1, pg2}.ProbablyEq(PagesGroup{pg1, pg2}), qt.Equals, true)
+ c.Assert(PagesGroup{pg1, pg2}.ProbablyEq(PagesGroup{pg2, pg1}), qt.Equals, false)
+ })
+}
+
+func TestToPages(t *testing.T) {
+ c := qt.New(t)
+
+ p1, p2 := &testPage{title: "p1"}, &testPage{title: "p2"}
+ pages12 := Pages{p1, p2}
+
+ mustToPages := func(in any) Pages {
+ p, err := ToPages(in)
+ c.Assert(err, qt.IsNil)
+ return p
+ }
+
+ c.Assert(mustToPages(nil), eq, Pages{})
+ c.Assert(mustToPages(pages12), eq, pages12)
+ c.Assert(mustToPages([]Page{p1, p2}), eq, pages12)
+ c.Assert(mustToPages([]any{p1, p2}), eq, pages12)
+
+ _, err := ToPages("not a page")
+ c.Assert(err, qt.Not(qt.IsNil))
+}
diff --git a/resources/page/pagination.go b/resources/page/pagination.go
new file mode 100644
index 000000000..9f4bfcff5
--- /dev/null
+++ b/resources/page/pagination.go
@@ -0,0 +1,396 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "math"
+ "reflect"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/spf13/cast"
+)
+
+// PaginatorProvider provides two ways to create a page paginator.
+type PaginatorProvider interface {
+ Paginator(options ...any) (*Pager, error)
+ Paginate(seq any, options ...any) (*Pager, error)
+}
+
+// Pager represents one of the elements in a paginator.
+// The number, starting on 1, represents its place.
+type Pager struct {
+ number int
+ *Paginator
+}
+
+func (p Pager) String() string {
+ return fmt.Sprintf("Pager %d", p.number)
+}
+
+type paginatedElement interface {
+ Len() int
+}
+
+type pagers []*Pager
+
+var (
+ paginatorEmptyPages Pages
+ paginatorEmptyPageGroups PagesGroup
+)
+
+type Paginator struct {
+ paginatedElements []paginatedElement
+ pagers
+ paginationURLFactory
+ total int
+ size int
+}
+
+type paginationURLFactory func(int) string
+
+// PageNumber returns the current page's number in the pager sequence.
+func (p *Pager) PageNumber() int {
+ return p.number
+}
+
+// URL returns the URL to the current page.
+func (p *Pager) URL() template.HTML {
+ return template.HTML(p.paginationURLFactory(p.PageNumber()))
+}
+
+// Pages returns the Pages on this page.
+// Note: If this return a non-empty result, then PageGroups() will return empty.
+func (p *Pager) Pages() Pages {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPages
+ }
+
+ if pages, ok := p.element().(Pages); ok {
+ return pages
+ }
+
+ return paginatorEmptyPages
+}
+
+// PageGroups return Page groups for this page.
+// Note: If this return non-empty result, then Pages() will return empty.
+func (p *Pager) PageGroups() PagesGroup {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPageGroups
+ }
+
+ if groups, ok := p.element().(PagesGroup); ok {
+ return groups
+ }
+
+ return paginatorEmptyPageGroups
+}
+
+func (p *Pager) element() paginatedElement {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPages
+ }
+ return p.paginatedElements[p.PageNumber()-1]
+}
+
+// page returns the Page with the given index
+func (p *Pager) page(index int) (Page, error) {
+ if pages, ok := p.element().(Pages); ok {
+ if pages != nil && len(pages) > index {
+ return pages[index], nil
+ }
+ return nil, nil
+ }
+
+ // must be PagesGroup
+ // this construction looks clumsy, but ...
+ // ... it is the difference between 99.5% and 100% test coverage :-)
+ groups := p.element().(PagesGroup)
+
+ i := 0
+ for _, v := range groups {
+ for _, page := range v.Pages {
+ if i == index {
+ return page, nil
+ }
+ i++
+ }
+ }
+ return nil, nil
+}
+
+// NumberOfElements gets the number of elements on this page.
+func (p *Pager) NumberOfElements() int {
+ return p.element().Len()
+}
+
+// HasPrev tests whether there are page(s) before the current.
+func (p *Pager) HasPrev() bool {
+ return p.PageNumber() > 1
+}
+
+// Prev returns the pager for the previous page.
+func (p *Pager) Prev() *Pager {
+ if !p.HasPrev() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()-2]
+}
+
+// HasNext tests whether there are page(s) after the current.
+func (p *Pager) HasNext() bool {
+ return p.PageNumber() < len(p.paginatedElements)
+}
+
+// Next returns the pager for the next page.
+func (p *Pager) Next() *Pager {
+ if !p.HasNext() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()]
+}
+
+// First returns the pager for the first page.
+func (p *Pager) First() *Pager {
+ return p.pagers[0]
+}
+
+// Last returns the pager for the last page.
+func (p *Pager) Last() *Pager {
+ return p.pagers[len(p.pagers)-1]
+}
+
+// Pagers returns a list of pagers that can be used to build a pagination menu.
+func (p *Paginator) Pagers() pagers {
+ return p.pagers
+}
+
+// PageSize returns the size of each paginator page.
+func (p *Paginator) PageSize() int {
+ return p.size
+}
+
+// TotalPages returns the number of pages in the paginator.
+func (p *Paginator) TotalPages() int {
+ return len(p.paginatedElements)
+}
+
+// TotalNumberOfElements returns the number of elements on all pages in this paginator.
+func (p *Paginator) TotalNumberOfElements() int {
+ return p.total
+}
+
+func splitPages(pages Pages, size int) []paginatedElement {
+ var split []paginatedElement
+ for low, j := 0, len(pages); low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(len(pages))))
+ split = append(split, pages[low:high])
+ }
+
+ return split
+}
+
+func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
+ type keyPage struct {
+ key any
+ page Page
+ }
+
+ var (
+ split []paginatedElement
+ flattened []keyPage
+ )
+
+ for _, g := range pageGroups {
+ for _, p := range g.Pages {
+ flattened = append(flattened, keyPage{g.Key, p})
+ }
+ }
+
+ numPages := len(flattened)
+
+ for low, j := 0, numPages; low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(numPages)))
+
+ var (
+ pg PagesGroup
+ key any
+ groupIndex = -1
+ )
+
+ for k := low; k < high; k++ {
+ kp := flattened[k]
+ if key == nil || key != kp.key {
+ key = kp.key
+ pg = append(pg, PageGroup{Key: key})
+ groupIndex++
+ }
+ pg[groupIndex].Pages = append(pg[groupIndex].Pages, kp.page)
+ }
+ split = append(split, pg)
+ }
+
+ return split
+}
+
+func ResolvePagerSize(cfg config.Provider, options ...any) (int, error) {
+ if len(options) == 0 {
+ return cfg.GetInt("paginate"), nil
+ }
+
+ if len(options) > 1 {
+ return -1, errors.New("too many arguments, 'pager size' is currently the only option")
+ }
+
+ pas, err := cast.ToIntE(options[0])
+
+ if err != nil || pas <= 0 {
+ return -1, errors.New(("'pager size' must be a positive integer"))
+ }
+
+ return pas, nil
+}
+
+func Paginate(td TargetPathDescriptor, seq any, pagerSize int) (*Paginator, error) {
+ if pagerSize <= 0 {
+ return nil, errors.New("'paginate' configuration setting must be positive to paginate")
+ }
+
+ urlFactory := newPaginationURLFactory(td)
+
+ var paginator *Paginator
+
+ groups, err := ToPagesGroup(seq)
+ if err != nil {
+ return nil, err
+ }
+ if groups != nil {
+ paginator, _ = newPaginatorFromPageGroups(groups, pagerSize, urlFactory)
+ } else {
+ pages, err := ToPages(seq)
+ if err != nil {
+ return nil, err
+ }
+ paginator, _ = newPaginatorFromPages(pages, pagerSize, urlFactory)
+ }
+
+ return paginator, nil
+}
+
+// probablyEqual checks page lists for probable equality.
+// It may return false positives.
+// The motivation behind this is to avoid potential costly reflect.DeepEqual
+// when "probably" is good enough.
+func probablyEqualPageLists(a1 any, a2 any) bool {
+ if a1 == nil || a2 == nil {
+ return a1 == a2
+ }
+
+ t1 := reflect.TypeOf(a1)
+ t2 := reflect.TypeOf(a2)
+
+ if t1 != t2 {
+ return false
+ }
+
+ if g1, ok := a1.(PagesGroup); ok {
+ g2 := a2.(PagesGroup)
+ if len(g1) != len(g2) {
+ return false
+ }
+ if len(g1) == 0 {
+ return true
+ }
+ if g1.Len() != g2.Len() {
+ return false
+ }
+
+ return g1[0].Pages[0] == g2[0].Pages[0]
+ }
+
+ p1, err1 := ToPages(a1)
+ p2, err2 := ToPages(a2)
+
+ // probably the same wrong type
+ if err1 != nil && err2 != nil {
+ return true
+ }
+
+ if len(p1) != len(p2) {
+ return false
+ }
+
+ if len(p1) == 0 {
+ return true
+ }
+
+ return p1[0] == p2[0]
+}
+
+func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+ if size <= 0 {
+ return nil, errors.New("Paginator size must be positive")
+ }
+
+ split := splitPages(pages, size)
+
+ return newPaginator(split, len(pages), size, urlFactory)
+}
+
+func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+ if size <= 0 {
+ return nil, errors.New("Paginator size must be positive")
+ }
+
+ split := splitPageGroups(pageGroups, size)
+
+ return newPaginator(split, pageGroups.Len(), size, urlFactory)
+}
+
+func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*Paginator, error) {
+ p := &Paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
+
+ var ps pagers
+
+ if len(elements) > 0 {
+ ps = make(pagers, len(elements))
+ for i := range p.paginatedElements {
+ ps[i] = &Pager{number: (i + 1), Paginator: p}
+ }
+ } else {
+ ps = make(pagers, 1)
+ ps[0] = &Pager{number: 1, Paginator: p}
+ }
+
+ p.pagers = ps
+
+ return p, nil
+}
+
+func newPaginationURLFactory(d TargetPathDescriptor) paginationURLFactory {
+ return func(pageNumber int) string {
+ pathDescriptor := d
+ var rel string
+ if pageNumber > 1 {
+ rel = fmt.Sprintf("/%s/%d/", d.PathSpec.PaginatePath, pageNumber)
+ pathDescriptor.Addends = rel
+ }
+
+ return CreateTargetPaths(pathDescriptor).RelPermalink(d.PathSpec)
+ }
+}
diff --git a/resources/page/pagination_test.go b/resources/page/pagination_test.go
new file mode 100644
index 000000000..e379f9b6b
--- /dev/null
+++ b/resources/page/pagination_test.go
@@ -0,0 +1,310 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "html/template"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/output"
+)
+
+func TestSplitPages(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createTestPages(21)
+ chunks := splitPages(pages, 5)
+ c.Assert(len(chunks), qt.Equals, 5)
+
+ for i := 0; i < 4; i++ {
+ c.Assert(chunks[i].Len(), qt.Equals, 5)
+ }
+
+ lastChunk := chunks[4]
+ c.Assert(lastChunk.Len(), qt.Equals, 1)
+}
+
+func TestSplitPageGroups(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+ chunks := splitPageGroups(groups, 5)
+ c.Assert(len(chunks), qt.Equals, 5)
+
+ firstChunk := chunks[0]
+
+ // alternate weight 5 and 10
+ if groups, ok := firstChunk.(PagesGroup); ok {
+ c.Assert(groups.Len(), qt.Equals, 5)
+ for _, pg := range groups {
+ // first group 10 in weight
+ c.Assert(pg.Key, qt.Equals, 10)
+ for _, p := range pg.Pages {
+ c.Assert(p.FuzzyWordCount()%2 == 0, qt.Equals, true) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+
+ lastChunk := chunks[4]
+
+ if groups, ok := lastChunk.(PagesGroup); ok {
+ c.Assert(groups.Len(), qt.Equals, 1)
+ for _, pg := range groups {
+ // last should have 5 in weight
+ c.Assert(pg.Key, qt.Equals, 5)
+ for _, p := range pg.Pages {
+ c.Assert(p.FuzzyWordCount()%2 != 0, qt.Equals, true) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+}
+
+func TestPager(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ _, err := newPaginatorFromPages(pages, -1, urlFactory)
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ pag, err := newPaginatorFromPages(pages, 5, urlFactory)
+ c.Assert(err, qt.IsNil)
+ doTestPages(t, pag)
+ first := pag.Pagers()[0].First()
+ c.Assert(first.String(), qt.Equals, "Pager 1")
+ c.Assert(first.Pages(), qt.Not(qt.HasLen), 0)
+ c.Assert(first.PageGroups(), qt.HasLen, 0)
+
+ pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ c.Assert(err, qt.IsNil)
+ doTestPages(t, pag)
+ first = pag.Pagers()[0].First()
+ c.Assert(first.PageGroups(), qt.Not(qt.HasLen), 0)
+ c.Assert(first.Pages(), qt.HasLen, 0)
+}
+
+func doTestPages(t *testing.T, paginator *Paginator) {
+ c := qt.New(t)
+ paginatorPages := paginator.Pagers()
+
+ c.Assert(len(paginatorPages), qt.Equals, 5)
+ c.Assert(paginator.TotalNumberOfElements(), qt.Equals, 21)
+ c.Assert(paginator.PageSize(), qt.Equals, 5)
+ c.Assert(paginator.TotalPages(), qt.Equals, 5)
+
+ first := paginatorPages[0]
+ c.Assert(first.URL(), qt.Equals, template.HTML("page/1/"))
+ c.Assert(first.First(), qt.Equals, first)
+ c.Assert(first.HasNext(), qt.Equals, true)
+ c.Assert(first.Next(), qt.Equals, paginatorPages[1])
+ c.Assert(first.HasPrev(), qt.Equals, false)
+ c.Assert(first.Prev(), qt.IsNil)
+ c.Assert(first.NumberOfElements(), qt.Equals, 5)
+ c.Assert(first.PageNumber(), qt.Equals, 1)
+
+ third := paginatorPages[2]
+ c.Assert(third.HasNext(), qt.Equals, true)
+ c.Assert(third.HasPrev(), qt.Equals, true)
+ c.Assert(third.Prev(), qt.Equals, paginatorPages[1])
+
+ last := paginatorPages[4]
+ c.Assert(last.URL(), qt.Equals, template.HTML("page/5/"))
+ c.Assert(last.Last(), qt.Equals, last)
+ c.Assert(last.HasNext(), qt.Equals, false)
+ c.Assert(last.Next(), qt.IsNil)
+ c.Assert(last.HasPrev(), qt.Equals, true)
+ c.Assert(last.NumberOfElements(), qt.Equals, 1)
+ c.Assert(last.PageNumber(), qt.Equals, 5)
+}
+
+func TestPagerNoPages(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ pages := createTestPages(0)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first := paginator.Pagers()[0].First()
+ c.Assert(first.PageGroups(), qt.HasLen, 0)
+ c.Assert(first.Pages(), qt.HasLen, 0)
+
+ paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first = paginator.Pagers()[0].First()
+ c.Assert(first.PageGroups(), qt.HasLen, 0)
+ c.Assert(first.Pages(), qt.HasLen, 0)
+}
+
+func doTestPagerNoPages(t *testing.T, paginator *Paginator) {
+ paginatorPages := paginator.Pagers()
+ c := qt.New(t)
+ c.Assert(len(paginatorPages), qt.Equals, 1)
+ c.Assert(paginator.TotalNumberOfElements(), qt.Equals, 0)
+ c.Assert(paginator.PageSize(), qt.Equals, 5)
+ c.Assert(paginator.TotalPages(), qt.Equals, 0)
+
+ // pageOne should be nothing but the first
+ pageOne := paginatorPages[0]
+ c.Assert(pageOne.First(), qt.Not(qt.IsNil))
+ c.Assert(pageOne.HasNext(), qt.Equals, false)
+ c.Assert(pageOne.HasPrev(), qt.Equals, false)
+ c.Assert(pageOne.Next(), qt.IsNil)
+ c.Assert(len(pageOne.Pagers()), qt.Equals, 1)
+ c.Assert(pageOne.Pages().Len(), qt.Equals, 0)
+ c.Assert(pageOne.NumberOfElements(), qt.Equals, 0)
+ c.Assert(pageOne.TotalNumberOfElements(), qt.Equals, 0)
+ c.Assert(pageOne.TotalPages(), qt.Equals, 0)
+ c.Assert(pageOne.PageNumber(), qt.Equals, 1)
+ c.Assert(pageOne.PageSize(), qt.Equals, 5)
+}
+
+func TestPaginationURLFactory(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ cfg := config.New()
+ cfg.Set("paginatePath", "zoo")
+
+ for _, uglyURLs := range []bool{false, true} {
+ c.Run(fmt.Sprintf("uglyURLs=%t", uglyURLs), func(c *qt.C) {
+ tests := []struct {
+ name string
+ d TargetPathDescriptor
+ baseURL string
+ page int
+ expected string
+ expectedUgly string
+ }{
+ {
+ "HTML home page 32",
+ TargetPathDescriptor{Kind: KindHome, Type: output.HTMLFormat},
+ "http://example.com/", 32, "/zoo/32/", "/zoo/32.html",
+ },
+ {
+ "JSON home page 42",
+ TargetPathDescriptor{Kind: KindHome, Type: output.JSONFormat},
+ "http://example.com/", 42, "/zoo/42/index.json", "/zoo/42.json",
+ },
+ }
+
+ for _, test := range tests {
+ d := test.d
+ cfg.Set("baseURL", test.baseURL)
+ cfg.Set("uglyURLs", uglyURLs)
+ d.UglyURLs = uglyURLs
+
+ pathSpec := newTestPathSpecFor(cfg)
+ d.PathSpec = pathSpec
+
+ factory := newPaginationURLFactory(d)
+
+ got := factory(test.page)
+
+ if uglyURLs {
+ c.Assert(got, qt.Equals, test.expectedUgly)
+ } else {
+ c.Assert(got, qt.Equals, test.expected)
+ }
+
+ }
+ })
+ }
+}
+
+func TestProbablyEqualPageLists(t *testing.T) {
+ t.Parallel()
+ fivePages := createTestPages(5)
+ zeroPages := createTestPages(0)
+ zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc")
+ fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc")
+ ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc")
+
+ for i, this := range []struct {
+ v1 any
+ v2 any
+ expect bool
+ }{
+ {nil, nil, true},
+ {"a", "b", true},
+ {"a", fivePages, false},
+ {fivePages, "a", false},
+ {fivePages, createTestPages(2), false},
+ {fivePages, fivePages, true},
+ {zeroPages, zeroPages, true},
+ {fivePagesByWeight, fivePagesByWeight, true},
+ {zeroPagesByWeight, fivePagesByWeight, false},
+ {zeroPagesByWeight, zeroPagesByWeight, true},
+ {fivePagesByWeight, fivePages, false},
+ {fivePagesByWeight, ninePagesByWeight, false},
+ } {
+ result := probablyEqualPageLists(this.v1, this.v2)
+
+ if result != this.expect {
+ t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
+ }
+ }
+}
+
+func TestPaginationPage(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ fivePages := createTestPages(7)
+ fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
+
+ p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
+ p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
+
+ f1 := p1.pagers[0].First()
+ f2 := p2.pagers[0].First()
+
+ page11, _ := f1.page(1)
+ page1Nil, _ := f1.page(3)
+
+ page21, _ := f2.page(1)
+ page2Nil, _ := f2.page(3)
+
+ c.Assert(page11.FuzzyWordCount(), qt.Equals, 3)
+ c.Assert(page1Nil, qt.IsNil)
+
+ c.Assert(page21, qt.Not(qt.IsNil))
+ c.Assert(page21.FuzzyWordCount(), qt.Equals, 3)
+ c.Assert(page2Nil, qt.IsNil)
+}
diff --git a/resources/page/permalinks.go b/resources/page/permalinks.go
new file mode 100644
index 000000000..c31d22a3c
--- /dev/null
+++ b/resources/page/permalinks.go
@@ -0,0 +1,371 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// PermalinkExpander holds permalin mappings per section.
+type PermalinkExpander struct {
+ // knownPermalinkAttributes maps :tags in a permalink specification to a
+ // function which, given a page and the tag, returns the resulting string
+ // to be used to replace that tag.
+ knownPermalinkAttributes map[string]pageToPermaAttribute
+
+ expanders map[string]func(Page) (string, error)
+
+ ps *helpers.PathSpec
+}
+
+// Time for checking date formats. Every field is different than the
+// Go reference time for date formatting. This ensures that formatting this date
+// with a Go time format always has a different output than the format itself.
+var referenceTime = time.Date(2019, time.November, 9, 23, 1, 42, 1, time.UTC)
+
+// Return the callback for the given permalink attribute and a boolean indicating if the attribute is valid or not.
+func (p PermalinkExpander) callback(attr string) (pageToPermaAttribute, bool) {
+ if callback, ok := p.knownPermalinkAttributes[attr]; ok {
+ return callback, true
+ }
+
+ if strings.HasPrefix(attr, "sections[") {
+ fn := p.toSliceFunc(strings.TrimPrefix(attr, "sections"))
+ return func(p Page, s string) (string, error) {
+ return path.Join(fn(p.CurrentSection().SectionsEntries())...), nil
+ }, true
+ }
+
+ // Make sure this comes after all the other checks.
+ if referenceTime.Format(attr) != attr {
+ return p.pageToPermalinkDate, true
+ }
+
+ return nil, false
+}
+
+// NewPermalinkExpander creates a new PermalinkExpander configured by the given
+// PathSpec.
+func NewPermalinkExpander(ps *helpers.PathSpec) (PermalinkExpander, error) {
+ p := PermalinkExpander{ps: ps}
+
+ p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
+ "year": p.pageToPermalinkDate,
+ "month": p.pageToPermalinkDate,
+ "monthname": p.pageToPermalinkDate,
+ "day": p.pageToPermalinkDate,
+ "weekday": p.pageToPermalinkDate,
+ "weekdayname": p.pageToPermalinkDate,
+ "yearday": p.pageToPermalinkDate,
+ "section": p.pageToPermalinkSection,
+ "sections": p.pageToPermalinkSections,
+ "title": p.pageToPermalinkTitle,
+ "slug": p.pageToPermalinkSlugElseTitle,
+ "slugorfilename": p.pageToPermalinkSlugElseFilename,
+ "filename": p.pageToPermalinkFilename,
+ }
+
+ patterns := ps.Cfg.GetStringMapString("permalinks")
+ if patterns == nil {
+ return p, nil
+ }
+
+ e, err := p.parse(patterns)
+ if err != nil {
+ return p, err
+ }
+
+ p.expanders = e
+
+ return p, nil
+}
+
+// Expand expands the path in p according to the rules defined for the given key.
+// If no rules are found for the given key, an empty string is returned.
+func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
+ expand, found := l.expanders[key]
+
+ if !found {
+ return "", nil
+ }
+
+ return expand(p)
+}
+
+func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) {
+ expanders := make(map[string]func(Page) (string, error))
+
+ // Allow " " and / to represent the root section.
+ const sectionCutSet = " /" + string(os.PathSeparator)
+
+ for k, pattern := range patterns {
+ k = strings.Trim(k, sectionCutSet)
+
+ if !l.validate(pattern) {
+ return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkIllFormed}
+ }
+
+ pattern := pattern
+ matches := attributeRegexp.FindAllStringSubmatch(pattern, -1)
+
+ callbacks := make([]pageToPermaAttribute, len(matches))
+ replacements := make([]string, len(matches))
+ for i, m := range matches {
+ replacement := m[0]
+ attr := replacement[1:]
+ replacements[i] = replacement
+ callback, ok := l.callback(attr)
+
+ if !ok {
+ return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkAttributeUnknown}
+ }
+
+ callbacks[i] = callback
+ }
+
+ expanders[k] = func(p Page) (string, error) {
+ if matches == nil {
+ return pattern, nil
+ }
+
+ newField := pattern
+
+ for i, replacement := range replacements {
+ attr := replacement[1:]
+ callback := callbacks[i]
+ newAttr, err := callback(p, attr)
+ if err != nil {
+ return "", &permalinkExpandError{pattern: pattern, err: err}
+ }
+
+ newField = strings.Replace(newField, replacement, newAttr, 1)
+
+ }
+
+ return newField, nil
+ }
+
+ }
+
+ return expanders, nil
+}
+
+// pageToPermaAttribute is the type of a function which, given a page and a tag
+// can return a string to go in that position in the page (or an error)
+type pageToPermaAttribute func(Page, string) (string, error)
+
+var attributeRegexp = regexp.MustCompile(`:\w+(\[.+\])?`)
+
+// validate determines if a PathPattern is well-formed
+func (l PermalinkExpander) validate(pp string) bool {
+ fragments := strings.Split(pp[1:], "/")
+ bail := false
+ for i := range fragments {
+ if bail {
+ return false
+ }
+ if len(fragments[i]) == 0 {
+ bail = true
+ continue
+ }
+
+ matches := attributeRegexp.FindAllStringSubmatch(fragments[i], -1)
+ if matches == nil {
+ continue
+ }
+
+ for _, match := range matches {
+ k := match[0][1:]
+ if _, ok := l.callback(k); !ok {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+type permalinkExpandError struct {
+ pattern string
+ err error
+}
+
+func (pee *permalinkExpandError) Error() string {
+ return fmt.Sprintf("error expanding %q: %s", pee.pattern, pee.err)
+}
+
+var (
+ errPermalinkIllFormed = errors.New("permalink ill-formed")
+ errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
+)
+
+func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string, error) {
+ // a Page contains a Node which provides a field Date, time.Time
+ switch dateField {
+ case "year":
+ return strconv.Itoa(p.Date().Year()), nil
+ case "month":
+ return fmt.Sprintf("%02d", int(p.Date().Month())), nil
+ case "monthname":
+ return p.Date().Month().String(), nil
+ case "day":
+ return fmt.Sprintf("%02d", p.Date().Day()), nil
+ case "weekday":
+ return strconv.Itoa(int(p.Date().Weekday())), nil
+ case "weekdayname":
+ return p.Date().Weekday().String(), nil
+ case "yearday":
+ return strconv.Itoa(p.Date().YearDay()), nil
+ }
+
+ return p.Date().Format(dateField), nil
+}
+
+// pageToPermalinkTitle returns the URL-safe form of the title
+func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
+ return l.ps.URLize(p.Title()), nil
+}
+
+// pageToPermalinkFilename returns the URL-safe form of the filename
+func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, error) {
+ name := p.File().TranslationBaseName()
+ if name == "index" {
+ // Page bundles; the directory name will hopefully have a better name.
+ dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
+ _, name = filepath.Split(dir)
+ }
+
+ return l.ps.URLize(name), nil
+}
+
+// if the page has a slug, return the slug, else return the title
+func (l PermalinkExpander) pageToPermalinkSlugElseTitle(p Page, a string) (string, error) {
+ if p.Slug() != "" {
+ return l.ps.URLize(p.Slug()), nil
+ }
+ return l.pageToPermalinkTitle(p, a)
+}
+
+// if the page has a slug, return the slug, else return the filename
+func (l PermalinkExpander) pageToPermalinkSlugElseFilename(p Page, a string) (string, error) {
+ if p.Slug() != "" {
+ return l.ps.URLize(p.Slug()), nil
+ }
+ return l.pageToPermalinkFilename(p, a)
+}
+
+func (l PermalinkExpander) pageToPermalinkSection(p Page, _ string) (string, error) {
+ return p.Section(), nil
+}
+
+func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, error) {
+ return p.CurrentSection().SectionsPath(), nil
+}
+
+var (
+ nilSliceFunc = func(s []string) []string {
+ return nil
+ }
+ allSliceFunc = func(s []string) []string {
+ return s
+ }
+)
+
+// toSliceFunc returns a slice func that slices s according to the cut spec.
+// The cut spec must be on form [low:high] (one or both can be omitted),
+// also allowing single slice indices (e.g. [2]) and the special [last] keyword
+// giving the last element of the slice.
+// The returned function will be lenient and not panic in out of bounds situation.
+//
+// The current use case for this is to use parts of the sections path in permalinks.
+func (l PermalinkExpander) toSliceFunc(cut string) func(s []string) []string {
+ cut = strings.ToLower(strings.TrimSpace(cut))
+ if cut == "" {
+ return allSliceFunc
+ }
+
+ if len(cut) < 3 || (cut[0] != '[' || cut[len(cut)-1] != ']') {
+ return nilSliceFunc
+ }
+
+ toNFunc := func(s string, low bool) func(ss []string) int {
+ if s == "" {
+ if low {
+ return func(ss []string) int {
+ return 0
+ }
+ } else {
+ return func(ss []string) int {
+ return len(ss)
+ }
+ }
+ }
+
+ if s == "last" {
+ return func(ss []string) int {
+ return len(ss) - 1
+ }
+ }
+
+ n, _ := strconv.Atoi(s)
+ if n < 0 {
+ n = 0
+ }
+ return func(ss []string) int {
+ // Prevent out of bound situations. It would not make
+ // much sense to panic here.
+ if n > len(ss) {
+ return len(ss)
+ }
+ return n
+ }
+ }
+
+ opsStr := cut[1 : len(cut)-1]
+ opts := strings.Split(opsStr, ":")
+
+ if !strings.Contains(opsStr, ":") {
+ toN := toNFunc(opts[0], true)
+ return func(s []string) []string {
+ if len(s) == 0 {
+ return nil
+ }
+ v := s[toN(s)]
+ if v == "" {
+ return nil
+ }
+ return []string{v}
+ }
+ }
+
+ toN1, toN2 := toNFunc(opts[0], true), toNFunc(opts[1], false)
+
+ return func(s []string) []string {
+ if len(s) == 0 {
+ return nil
+ }
+ return s[toN1(s):toN2(s)]
+ }
+
+}
diff --git a/resources/page/permalinks_test.go b/resources/page/permalinks_test.go
new file mode 100644
index 000000000..7baf16503
--- /dev/null
+++ b/resources/page/permalinks_test.go
@@ -0,0 +1,241 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "regexp"
+ "sync"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// testdataPermalinks is used by a couple of tests; the expandsTo content is
+// subject to the data in simplePageJSON.
+var testdataPermalinks = []struct {
+ spec string
+ valid bool
+ expandsTo string
+}{
+ {":title", true, "spf13-vim-3.0-release-and-new-website"},
+ {"/:year-:month-:title", true, "/2012-04-spf13-vim-3.0-release-and-new-website"},
+ {"/:year/:yearday/:month/:monthname/:day/:weekday/:weekdayname/", true, "/2012/97/04/April/06/5/Friday/"}, // Dates
+ {"/:section/", true, "/blue/"}, // Section
+ {"/:title/", true, "/spf13-vim-3.0-release-and-new-website/"}, // Title
+ {"/:slug/", true, "/the-slug/"}, // Slug
+ {"/:slugorfilename/", true, "/the-slug/"}, // Slug or filename
+ {"/:filename/", true, "/test-page/"}, // Filename
+ {"/:06-:1-:2-:Monday", true, "/12-4-6-Friday"}, // Dates with Go formatting
+ {"/:2006_01_02_15_04_05.000", true, "/2012_04_06_03_01_59.000"}, // Complicated custom date format
+ {"/:sections/", true, "/a/b/c/"}, // Sections
+ {"/:sections[last]/", true, "/c/"}, // Sections
+
+ // Failures
+ {"/blog/:fred", false, ""},
+ {"/:year//:title", false, ""},
+ {"/:TITLE", false, ""}, // case is not normalized
+ {"/:2017", false, ""}, // invalid date format
+ {"/:2006-01-02", false, ""}, // valid date format but invalid attribute name
+}
+
+func TestPermalinkExpansion(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ page := newTestPageWithFile("/test-page/index.md")
+ page.title = "Spf13 Vim 3.0 Release and new website"
+ d, _ := time.Parse("2006-01-02 15:04:05", "2012-04-06 03:01:59")
+ page.date = d
+ page.section = "blue"
+ page.slug = "The Slug"
+
+ for _, item := range testdataPermalinks {
+ if !item.valid {
+ continue
+ }
+
+ specNameCleaner := regexp.MustCompile(`[\:\/\[\]]`)
+ name := specNameCleaner.ReplaceAllString(item.spec, "")
+
+ c.Run(name, func(c *qt.C) {
+
+ permalinksConfig := map[string]string{
+ "posts": item.spec,
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ c.Assert(err, qt.IsNil)
+
+ expanded, err := expander.Expand("posts", page)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, item.expandsTo)
+ })
+
+ }
+}
+
+func TestPermalinkExpansionMultiSection(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ page := newTestPage()
+ page.title = "Page Title"
+ d, _ := time.Parse("2006-01-02", "2012-04-06")
+ page.date = d
+ page.section = "blue"
+ page.slug = "The Slug"
+
+ page_slug_fallback := newTestPageWithFile("/page-filename/index.md")
+ page_slug_fallback.title = "Page Title"
+
+ permalinksConfig := map[string]string{
+ "posts": "/:slug",
+ "blog": "/:section/:year",
+ "recipes": "/:slugorfilename",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ c.Assert(err, qt.IsNil)
+
+ expanded, err := expander.Expand("posts", page)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, "/the-slug")
+
+ expanded, err = expander.Expand("blog", page)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, "/blue/2012")
+
+ expanded, err = expander.Expand("posts", page_slug_fallback)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, "/page-title")
+
+ expanded, err = expander.Expand("recipes", page_slug_fallback)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, "/page-filename")
+}
+
+func TestPermalinkExpansionConcurrent(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ permalinksConfig := map[string]string{
+ "posts": "/:slug/",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ c.Assert(err, qt.IsNil)
+
+ var wg sync.WaitGroup
+
+ for i := 1; i < 20; i++ {
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ page := newTestPage()
+ for j := 1; j < 20; j++ {
+ page.slug = fmt.Sprintf("slug%d", i+j)
+ expanded, err := expander.Expand("posts", page)
+ c.Assert(err, qt.IsNil)
+ c.Assert(expanded, qt.Equals, fmt.Sprintf("/%s/", page.slug))
+ }
+ }(i)
+ }
+
+ wg.Wait()
+}
+
+func TestPermalinkExpansionSliceSyntax(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ exp, _ := NewPermalinkExpander(newTestPathSpec())
+ slice := []string{"a", "b", "c", "d"}
+ fn := func(s string) []string {
+ return exp.toSliceFunc(s)(slice)
+ }
+
+ c.Run("Basic", func(c *qt.C) {
+ c.Assert(fn("[1:3]"), qt.DeepEquals, []string{"b", "c"})
+ c.Assert(fn("[1:]"), qt.DeepEquals, []string{"b", "c", "d"})
+ c.Assert(fn("[:2]"), qt.DeepEquals, []string{"a", "b"})
+ c.Assert(fn("[0:2]"), qt.DeepEquals, []string{"a", "b"})
+ c.Assert(fn("[:]"), qt.DeepEquals, []string{"a", "b", "c", "d"})
+ c.Assert(fn(""), qt.DeepEquals, []string{"a", "b", "c", "d"})
+ c.Assert(fn("[last]"), qt.DeepEquals, []string{"d"})
+ c.Assert(fn("[:last]"), qt.DeepEquals, []string{"a", "b", "c"})
+
+ })
+
+ c.Run("Out of bounds", func(c *qt.C) {
+ c.Assert(fn("[1:5]"), qt.DeepEquals, []string{"b", "c", "d"})
+ c.Assert(fn("[-1:5]"), qt.DeepEquals, []string{"a", "b", "c", "d"})
+ c.Assert(fn("[5:]"), qt.DeepEquals, []string{})
+ c.Assert(fn("[5:]"), qt.DeepEquals, []string{})
+ c.Assert(fn("[5:32]"), qt.DeepEquals, []string{})
+ c.Assert(exp.toSliceFunc("[:1]")(nil), qt.DeepEquals, []string(nil))
+ c.Assert(exp.toSliceFunc("[:1]")([]string{}), qt.DeepEquals, []string(nil))
+
+ // These all return nil
+ c.Assert(fn("[]"), qt.IsNil)
+ c.Assert(fn("[1:}"), qt.IsNil)
+ c.Assert(fn("foo"), qt.IsNil)
+
+ })
+
+}
+
+func BenchmarkPermalinkExpand(b *testing.B) {
+ page := newTestPage()
+ page.title = "Hugo Rocks"
+ d, _ := time.Parse("2006-01-02", "2019-02-28")
+ page.date = d
+
+ permalinksConfig := map[string]string{
+ "posts": "/:year-:month-:title",
+ }
+
+ ps := newTestPathSpec()
+ ps.Cfg.Set("permalinks", permalinksConfig)
+
+ expander, err := NewPermalinkExpander(ps)
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ s, err := expander.Expand("posts", page)
+ if err != nil {
+ b.Fatal(err)
+ }
+ if s != "/2019-02-hugo-rocks" {
+ b.Fatal(s)
+ }
+
+ }
+}
diff --git a/resources/page/site.go b/resources/page/site.go
new file mode 100644
index 000000000..f5806280c
--- /dev/null
+++ b/resources/page/site.go
@@ -0,0 +1,167 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "html/template"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/navigation"
+)
+
+// Site represents a site in the build. This is currently a very narrow interface,
+// but the actual implementation will be richer, see hugolib.SiteInfo.
+type Site interface {
+ // Returns the Language configured for this Site.
+ Language() *langs.Language
+
+ // Returns all the regular Pages in this Site.
+ RegularPages() Pages
+
+ // Returns all Pages in this Site.
+ Pages() Pages
+
+ // A shortcut to the home page.
+ Home() Page
+
+ // Returns true if we're running in a server.
+ IsServer() bool
+
+ // Returns the server port.
+ ServerPort() int
+
+ // Returns the configured title for this Site.
+ Title() string
+
+ // Returns all Sites for all languages.
+ Sites() Sites
+
+ // Returns Site currently rendering.
+ Current() Site
+
+ // Returns a struct with some information about the build.
+ Hugo() hugo.Info
+
+ // Returns the BaseURL for this Site.
+ BaseURL() template.URL
+
+ // Retuns a taxonomy map.
+ Taxonomies() any
+
+ // Returns the last modification date of the content.
+ LastChange() time.Time
+
+ // Returns the Menus for this site.
+ Menus() navigation.Menus
+
+ // Returns the Params configured for this site.
+ Params() maps.Params
+
+ // Returns a map of all the data inside /data.
+ Data() map[string]any
+}
+
+// Sites represents an ordered list of sites (languages).
+type Sites []Site
+
+// First is a convenience method to get the first Site, i.e. the main language.
+func (s Sites) First() Site {
+ if len(s) == 0 {
+ return nil
+ }
+ return s[0]
+}
+
+type testSite struct {
+ h hugo.Info
+ l *langs.Language
+}
+
+func (t testSite) Hugo() hugo.Info {
+ return t.h
+}
+
+func (t testSite) ServerPort() int {
+ return 1313
+}
+
+func (testSite) LastChange() (t time.Time) {
+ return
+}
+
+func (t testSite) Title() string {
+ return "foo"
+}
+
+func (t testSite) Sites() Sites {
+ return nil
+}
+
+func (t testSite) Current() Site {
+ return t
+}
+
+func (t testSite) IsServer() bool {
+ return false
+}
+
+func (t testSite) Language() *langs.Language {
+ return t.l
+}
+
+func (t testSite) Home() Page {
+ return nil
+}
+
+func (t testSite) Pages() Pages {
+ return nil
+}
+
+func (t testSite) RegularPages() Pages {
+ return nil
+}
+
+func (t testSite) Menus() navigation.Menus {
+ return nil
+}
+
+func (t testSite) Taxonomies() any {
+ return nil
+}
+
+func (t testSite) BaseURL() template.URL {
+ return ""
+}
+
+func (t testSite) Params() maps.Params {
+ return nil
+}
+
+func (t testSite) Data() map[string]any {
+ return nil
+}
+
+// NewDummyHugoSite creates a new minimal test site.
+func NewDummyHugoSite(cfg config.Provider) Site {
+ return testSite{
+ h: hugo.NewInfo(hugo.EnvironmentProduction, nil),
+ l: langs.NewLanguage("en", cfg),
+ }
+}
diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go
new file mode 100644
index 000000000..30b8e4dff
--- /dev/null
+++ b/resources/page/testhelpers_test.go
@@ -0,0 +1,622 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "html/template"
+ "path"
+ "path/filepath"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/bep/gitmap"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/navigation"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/related"
+
+ "github.com/gohugoio/hugo/source"
+)
+
+var (
+ _ resource.LengthProvider = (*testPage)(nil)
+ _ Page = (*testPage)(nil)
+)
+
+var relatedDocsHandler = NewRelatedDocsHandler(related.DefaultConfig)
+
+func newTestPage() *testPage {
+ return newTestPageWithFile("/a/b/c.md")
+}
+
+func newTestPageWithFile(filename string) *testPage {
+ filename = filepath.FromSlash(filename)
+ file := source.NewTestFile(filename)
+ return &testPage{
+ params: make(map[string]any),
+ data: make(map[string]any),
+ file: file,
+ currentSection: &testPage{
+ sectionEntries: []string{"a", "b", "c"},
+ },
+ site: testSite{l: langs.NewDefaultLanguage(config.New())},
+ }
+}
+
+func newTestPathSpec() *helpers.PathSpec {
+ return newTestPathSpecFor(config.New())
+}
+
+func newTestPathSpecFor(cfg config.Provider) *helpers.PathSpec {
+ config.SetBaseTestDefaults(cfg)
+ langs.LoadLanguageSettings(cfg, nil)
+ mod, err := modules.CreateProjectModule(cfg)
+ if err != nil {
+ panic(err)
+ }
+ cfg.Set("allModules", modules.Modules{mod})
+ fs := hugofs.NewMem(cfg)
+ s, err := helpers.NewPathSpec(fs, cfg, nil)
+ if err != nil {
+ panic(err)
+ }
+ return s
+}
+
+type testPage struct {
+ kind string
+ description string
+ title string
+ linkTitle string
+ lang string
+ section string
+ site testSite
+
+ content string
+
+ fuzzyWordCount int
+
+ path string
+
+ slug string
+
+ // Dates
+ date time.Time
+ lastMod time.Time
+ expiryDate time.Time
+ pubDate time.Time
+
+ weight int
+
+ params map[string]any
+ data map[string]any
+
+ file source.File
+
+ currentSection *testPage
+ sectionEntries []string
+}
+
+func (p *testPage) Err() resource.ResourceError {
+ return nil
+}
+
+func (p *testPage) Aliases() []string {
+ panic("not implemented")
+}
+
+func (p *testPage) AllTranslations() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) AlternativeOutputFormats() OutputFormats {
+ panic("not implemented")
+}
+
+func (p *testPage) Author() Author {
+ return Author{}
+}
+
+func (p *testPage) Authors() AuthorList {
+ return nil
+}
+
+func (p *testPage) BaseFileName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) BundleType() files.ContentClass {
+ panic("not implemented")
+}
+
+func (p *testPage) Content() (any, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) ContentBaseName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) CurrentSection() Page {
+ return p.currentSection
+}
+
+func (p *testPage) Data() any {
+ return p.data
+}
+
+func (p *testPage) Sitemap() config.Sitemap {
+ return config.Sitemap{}
+}
+
+func (p *testPage) Layout() string {
+ return ""
+}
+
+func (p *testPage) Date() time.Time {
+ return p.date
+}
+
+func (p *testPage) Description() string {
+ return ""
+}
+
+func (p *testPage) Dir() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Draft() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Eq(other any) bool {
+ return p == other
+}
+
+func (p *testPage) ExpiryDate() time.Time {
+ return p.expiryDate
+}
+
+func (p *testPage) Ext() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Extension() string {
+ panic("not implemented")
+}
+
+func (p *testPage) File() source.File {
+ return p.file
+}
+
+func (p *testPage) FileInfo() hugofs.FileMetaInfo {
+ panic("not implemented")
+}
+
+func (p *testPage) Filename() string {
+ panic("not implemented")
+}
+
+func (p *testPage) FirstSection() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) FuzzyWordCount() int {
+ return p.fuzzyWordCount
+}
+
+func (p *testPage) GetPage(ref string) (Page, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) GetPageWithTemplateInfo(info tpl.Info, ref string) (Page, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) GetParam(key string) any {
+ panic("not implemented")
+}
+
+func (p *testPage) GetTerms(taxonomy string) Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) GetRelatedDocsHandler() *RelatedDocsHandler {
+ return relatedDocsHandler
+}
+
+func (p *testPage) GitInfo() *gitmap.GitInfo {
+ return nil
+}
+
+func (p *testPage) CodeOwners() []string {
+ return nil
+}
+
+func (p *testPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) HasShortcode(name string) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Hugo() hugo.Info {
+ panic("not implemented")
+}
+
+func (p *testPage) InSection(other any) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsAncestor(other any) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsDescendant(other any) (bool, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) IsDraft() bool {
+ return false
+}
+
+func (p *testPage) IsHome() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsNode() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsPage() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsSection() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) IsTranslated() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Keywords() []string {
+ return nil
+}
+
+func (p *testPage) Kind() string {
+ return p.kind
+}
+
+func (p *testPage) Lang() string {
+ return p.lang
+}
+
+func (p *testPage) Language() *langs.Language {
+ panic("not implemented")
+}
+
+func (p *testPage) LanguagePrefix() string {
+ return ""
+}
+
+func (p *testPage) Lastmod() time.Time {
+ return p.lastMod
+}
+
+func (p *testPage) Len() int {
+ return len(p.content)
+}
+
+func (p *testPage) LinkTitle() string {
+ if p.linkTitle == "" {
+ if p.title == "" {
+ return p.path
+ }
+ return p.title
+ }
+ return p.linkTitle
+}
+
+func (p *testPage) LogicalName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) MediaType() media.Type {
+ panic("not implemented")
+}
+
+func (p *testPage) Menus() navigation.PageMenus {
+ return navigation.PageMenus{}
+}
+
+func (p *testPage) Name() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Next() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) NextInSection() Page {
+ return nil
+}
+
+func (p *testPage) NextPage() Page {
+ return nil
+}
+
+func (p *testPage) OutputFormats() OutputFormats {
+ panic("not implemented")
+}
+
+func (p *testPage) Pages() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) RegularPages() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) RegularPagesRecursive() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) Paginate(seq any, options ...any) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *testPage) Paginator(options ...any) (*Pager, error) {
+ return nil, nil
+}
+
+func (p *testPage) Param(key any) (any, error) {
+ return resource.Param(p, nil, key)
+}
+
+func (p *testPage) Params() maps.Params {
+ return p.params
+}
+
+func (p *testPage) Page() Page {
+ return p
+}
+
+func (p *testPage) Parent() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) Path() string {
+ return p.path
+}
+
+func (p *testPage) Pathc() string {
+ return p.path
+}
+
+func (p *testPage) Permalink() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Plain() string {
+ panic("not implemented")
+}
+
+func (p *testPage) PlainWords() []string {
+ panic("not implemented")
+}
+
+func (p *testPage) Prev() Page {
+ panic("not implemented")
+}
+
+func (p *testPage) PrevInSection() Page {
+ return nil
+}
+
+func (p *testPage) PrevPage() Page {
+ return nil
+}
+
+func (p *testPage) PublishDate() time.Time {
+ return p.pubDate
+}
+
+func (p *testPage) RSSLink() template.URL {
+ return ""
+}
+
+func (p *testPage) RawContent() string {
+ panic("not implemented")
+}
+
+func (p *testPage) ReadingTime() int {
+ panic("not implemented")
+}
+
+func (p *testPage) Ref(argsm map[string]any) (string, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) RefFrom(argsm map[string]any, source any) (string, error) {
+ return "", nil
+}
+
+func (p *testPage) RelPermalink() string {
+ panic("not implemented")
+}
+
+func (p *testPage) RelRef(argsm map[string]any) (string, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) RelRefFrom(argsm map[string]any, source any) (string, error) {
+ return "", nil
+}
+
+func (p *testPage) Render(layout ...string) (template.HTML, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) RenderString(args ...any) (template.HTML, error) {
+ panic("not implemented")
+}
+
+func (p *testPage) ResourceType() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Resources() resource.Resources {
+ panic("not implemented")
+}
+
+func (p *testPage) Scratch() *maps.Scratch {
+ panic("not implemented")
+}
+
+func (p *testPage) Store() *maps.Scratch {
+ panic("not implemented")
+}
+
+func (p *testPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
+ v, err := p.Param(cfg.Name)
+ if err != nil {
+ return nil, err
+ }
+
+ return cfg.ToKeywords(v)
+}
+
+func (p *testPage) Section() string {
+ return p.section
+}
+
+func (p *testPage) Sections() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) SectionsEntries() []string {
+ return p.sectionEntries
+}
+
+func (p *testPage) SectionsPath() string {
+ return path.Join(p.sectionEntries...)
+}
+
+func (p *testPage) Site() Site {
+ return p.site
+}
+
+func (p *testPage) Sites() Sites {
+ panic("not implemented")
+}
+
+func (p *testPage) Slug() string {
+ return p.slug
+}
+
+func (p *testPage) String() string {
+ return p.path
+}
+
+func (p *testPage) Summary() template.HTML {
+ panic("not implemented")
+}
+
+func (p *testPage) TableOfContents() template.HTML {
+ panic("not implemented")
+}
+
+func (p *testPage) Title() string {
+ return p.title
+}
+
+func (p *testPage) TranslationBaseName() string {
+ panic("not implemented")
+}
+
+func (p *testPage) TranslationKey() string {
+ return p.path
+}
+
+func (p *testPage) Translations() Pages {
+ panic("not implemented")
+}
+
+func (p *testPage) Truncated() bool {
+ panic("not implemented")
+}
+
+func (p *testPage) Type() string {
+ return p.section
+}
+
+func (p *testPage) URL() string {
+ return ""
+}
+
+func (p *testPage) UniqueID() string {
+ panic("not implemented")
+}
+
+func (p *testPage) Weight() int {
+ return p.weight
+}
+
+func (p *testPage) WordCount() int {
+ panic("not implemented")
+}
+
+func (p *testPage) GetIdentity() identity.Identity {
+ panic("not implemented")
+}
+
+func createTestPages(num int) Pages {
+ pages := make(Pages, num)
+
+ for i := 0; i < num; i++ {
+ m := &testPage{
+ path: fmt.Sprintf("/x/y/z/p%d.md", i),
+ weight: 5,
+ fuzzyWordCount: i + 2, // magic
+ }
+
+ if i%2 == 0 {
+ m.weight = 10
+ }
+ pages[i] = m
+
+ }
+
+ return pages
+}
diff --git a/resources/page/weighted.go b/resources/page/weighted.go
new file mode 100644
index 000000000..39034d26c
--- /dev/null
+++ b/resources/page/weighted.go
@@ -0,0 +1,138 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package page
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/gohugoio/hugo/common/collections"
+)
+
+var _ collections.Slicer = WeightedPage{}
+
+// WeightedPages is a list of Pages with their corresponding (and relative) weight
+// [{Weight: 30, Page: *1}, {Weight: 40, Page: *2}]
+type WeightedPages []WeightedPage
+
+// Page will return the Page (of Kind taxonomyList) that represents this set
+// of pages. This method will panic if p is empty, as that should never happen.
+func (p WeightedPages) Page() Page {
+ if len(p) == 0 {
+ panic("WeightedPages is empty")
+ }
+
+ first := p[0]
+
+ // TODO(bep) fix tests
+ if first.owner == nil {
+ return nil
+ }
+
+ return first.owner
+}
+
+// A WeightedPage is a Page with a weight.
+type WeightedPage struct {
+ Weight int
+ Page
+
+ // Reference to the owning Page. This avoids having to do
+ // manual .Site.GetPage lookups. It is implemented in this roundabout way
+ // because we cannot add additional state to the WeightedPages slice
+ // without breaking lots of templates in the wild.
+ owner Page
+}
+
+func NewWeightedPage(weight int, p Page, owner Page) WeightedPage {
+ return WeightedPage{Weight: weight, Page: p, owner: owner}
+}
+
+func (w WeightedPage) String() string {
+ return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title())
+}
+
+// Slice is for internal use.
+// for the template functions. See collections.Slice.
+func (p WeightedPage) Slice(in any) (any, error) {
+ switch items := in.(type) {
+ case WeightedPages:
+ return items, nil
+ case []any:
+ weighted := make(WeightedPages, len(items))
+ for i, v := range items {
+ g, ok := v.(WeightedPage)
+ if !ok {
+ return nil, fmt.Errorf("type %T is not a WeightedPage", v)
+ }
+ weighted[i] = g
+ }
+ return weighted, nil
+ default:
+ return nil, fmt.Errorf("invalid slice type %T", items)
+ }
+}
+
+// Pages returns the Pages in this weighted page set.
+func (wp WeightedPages) Pages() Pages {
+ pages := make(Pages, len(wp))
+ for i := range wp {
+ pages[i] = wp[i].Page
+ }
+ return pages
+}
+
+// Next returns the next Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Next(cur Page) Page {
+ for x, c := range wp {
+ if c.Page.Eq(cur) {
+ if x == 0 {
+ return nil
+ }
+ return wp[x-1].Page
+ }
+ }
+ return nil
+}
+
+// Prev returns the previous Page relative to the given Page in
+// this weighted page set.
+func (wp WeightedPages) Prev(cur Page) Page {
+ for x, c := range wp {
+ if c.Page.Eq(cur) {
+ if x < len(wp)-1 {
+ return wp[x+1].Page
+ }
+ return nil
+ }
+ }
+ return nil
+}
+
+func (wp WeightedPages) Len() int { return len(wp) }
+func (wp WeightedPages) Swap(i, j int) { wp[i], wp[j] = wp[j], wp[i] }
+
+// Sort stable sorts this weighted page set.
+func (wp WeightedPages) Sort() { sort.Stable(wp) }
+
+// Count returns the number of pages in this weighted page set.
+func (wp WeightedPages) Count() int { return len(wp) }
+
+func (wp WeightedPages) Less(i, j int) bool {
+ if wp[i].Weight == wp[j].Weight {
+ return DefaultPageSort(wp[i].Page, wp[j].Page)
+ }
+ return wp[i].Weight < wp[j].Weight
+}
diff --git a/resources/page/zero_file.autogen.go b/resources/page/zero_file.autogen.go
new file mode 100644
index 000000000..72d98998e
--- /dev/null
+++ b/resources/page/zero_file.autogen.go
@@ -0,0 +1,88 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// This file is autogenerated.
+
+package page
+
+import (
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/source"
+)
+
+// ZeroFile represents a zero value of source.File with warnings if invoked.
+type zeroFile struct {
+ log loggers.Logger
+}
+
+func NewZeroFile(log loggers.Logger) source.File {
+ return zeroFile{log: log}
+}
+
+func (zeroFile) IsZero() bool {
+ return true
+}
+
+func (z zeroFile) Path() (o0 string) {
+ z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}")
+ return
+}
+func (z zeroFile) Section() (o0 string) {
+ z.log.Warnln(".File.Section on zero object. Wrap it in if or with: {{ with .File }}{{ .Section }}{{ end }}")
+ return
+}
+func (z zeroFile) Lang() (o0 string) {
+ z.log.Warnln(".File.Lang on zero object. Wrap it in if or with: {{ with .File }}{{ .Lang }}{{ end }}")
+ return
+}
+func (z zeroFile) Filename() (o0 string) {
+ z.log.Warnln(".File.Filename on zero object. Wrap it in if or with: {{ with .File }}{{ .Filename }}{{ end }}")
+ return
+}
+func (z zeroFile) Dir() (o0 string) {
+ z.log.Warnln(".File.Dir on zero object. Wrap it in if or with: {{ with .File }}{{ .Dir }}{{ end }}")
+ return
+}
+func (z zeroFile) Extension() (o0 string) {
+ z.log.Warnln(".File.Extension on zero object. Wrap it in if or with: {{ with .File }}{{ .Extension }}{{ end }}")
+ return
+}
+func (z zeroFile) Ext() (o0 string) {
+ z.log.Warnln(".File.Ext on zero object. Wrap it in if or with: {{ with .File }}{{ .Ext }}{{ end }}")
+ return
+}
+func (z zeroFile) LogicalName() (o0 string) {
+ z.log.Warnln(".File.LogicalName on zero object. Wrap it in if or with: {{ with .File }}{{ .LogicalName }}{{ end }}")
+ return
+}
+func (z zeroFile) BaseFileName() (o0 string) {
+ z.log.Warnln(".File.BaseFileName on zero object. Wrap it in if or with: {{ with .File }}{{ .BaseFileName }}{{ end }}")
+ return
+}
+func (z zeroFile) TranslationBaseName() (o0 string) {
+ z.log.Warnln(".File.TranslationBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .TranslationBaseName }}{{ end }}")
+ return
+}
+func (z zeroFile) ContentBaseName() (o0 string) {
+ z.log.Warnln(".File.ContentBaseName on zero object. Wrap it in if or with: {{ with .File }}{{ .ContentBaseName }}{{ end }}")
+ return
+}
+func (z zeroFile) UniqueID() (o0 string) {
+ z.log.Warnln(".File.UniqueID on zero object. Wrap it in if or with: {{ with .File }}{{ .UniqueID }}{{ end }}")
+ return
+}
+func (z zeroFile) FileInfo() (o0 hugofs.FileMetaInfo) {
+ z.log.Warnln(".File.FileInfo on zero object. Wrap it in if or with: {{ with .File }}{{ .FileInfo }}{{ end }}")
+ return
+}
diff --git a/resources/post_publish.go b/resources/post_publish.go
new file mode 100644
index 000000000..b2adfa5ce
--- /dev/null
+++ b/resources/post_publish.go
@@ -0,0 +1,51 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "github.com/gohugoio/hugo/resources/postpub"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+type transformationKeyer interface {
+ TransformationKey() string
+}
+
+// PostProcess wraps the given Resource for later processing.
+func (spec *Spec) PostProcess(r resource.Resource) (postpub.PostPublishedResource, error) {
+ key := r.(transformationKeyer).TransformationKey()
+ spec.postProcessMu.RLock()
+ result, found := spec.PostProcessResources[key]
+ spec.postProcessMu.RUnlock()
+ if found {
+ return result, nil
+ }
+
+ spec.postProcessMu.Lock()
+ defer spec.postProcessMu.Unlock()
+
+ // Double check
+ result, found = spec.PostProcessResources[key]
+ if found {
+ return result, nil
+ }
+
+ result = postpub.NewPostPublishResource(spec.incr.Incr(), r)
+ if result == nil {
+ panic("got nil result")
+ }
+ spec.PostProcessResources[key] = result
+
+ return result, nil
+}
diff --git a/resources/postpub/fields.go b/resources/postpub/fields.go
new file mode 100644
index 000000000..13b2963ce
--- /dev/null
+++ b/resources/postpub/fields.go
@@ -0,0 +1,59 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postpub
+
+import (
+ "reflect"
+)
+
+const (
+ FieldNotSupported = "__field_not_supported"
+)
+
+func structToMapWithPlaceholders(root string, in any, createPlaceholder func(s string) string) map[string]any {
+ m := structToMap(in)
+ insertFieldPlaceholders(root, m, createPlaceholder)
+ return m
+}
+
+func structToMap(s any) map[string]any {
+ m := make(map[string]any)
+ t := reflect.TypeOf(s)
+
+ for i := 0; i < t.NumMethod(); i++ {
+ method := t.Method(i)
+ if method.PkgPath != "" {
+ continue
+ }
+ if method.Type.NumIn() == 1 {
+ m[method.Name] = ""
+ }
+ }
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ if field.PkgPath != "" {
+ continue
+ }
+ m[field.Name] = ""
+ }
+ return m
+}
+
+// insert placeholder for the templates. Do it very shallow for now.
+func insertFieldPlaceholders(root string, m map[string]any, createPlaceholder func(s string) string) {
+ for k := range m {
+ m[k] = createPlaceholder(root + "." + k)
+ }
+}
diff --git a/resources/postpub/fields_test.go b/resources/postpub/fields_test.go
new file mode 100644
index 000000000..8e80063f1
--- /dev/null
+++ b/resources/postpub/fields_test.go
@@ -0,0 +1,45 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postpub
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/media"
+)
+
+func TestCreatePlaceholders(t *testing.T) {
+ c := qt.New(t)
+
+ m := structToMap(media.CSSType)
+
+ insertFieldPlaceholders("foo", m, func(s string) string {
+ return "pre_" + s + "_post"
+ })
+
+ c.Assert(m, qt.DeepEquals, map[string]any{
+ "IsZero": "pre_foo.IsZero_post",
+ "MarshalJSON": "pre_foo.MarshalJSON_post",
+ "Suffixes": "pre_foo.Suffixes_post",
+ "Delimiter": "pre_foo.Delimiter_post",
+ "FirstSuffix": "pre_foo.FirstSuffix_post",
+ "IsText": "pre_foo.IsText_post",
+ "String": "pre_foo.String_post",
+ "Type": "pre_foo.Type_post",
+ "MainType": "pre_foo.MainType_post",
+ "SubType": "pre_foo.SubType_post",
+ })
+}
diff --git a/resources/postpub/postpub.go b/resources/postpub/postpub.go
new file mode 100644
index 000000000..400e00aa4
--- /dev/null
+++ b/resources/postpub/postpub.go
@@ -0,0 +1,181 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postpub
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+type PostPublishedResource interface {
+ resource.ResourceTypeProvider
+ resource.ResourceLinksProvider
+ resource.ResourceMetaProvider
+ resource.ResourceParamsProvider
+ resource.ResourceDataProvider
+ resource.OriginProvider
+
+ MediaType() map[string]any
+}
+
+const (
+ PostProcessPrefix = "__h_pp_l1"
+
+ // The suffix has an '=' in it to prevent the minifier to remove any enclosing
+ // quoutes around the attribute values.
+ // See issue #8884.
+ PostProcessSuffix = "__e="
+)
+
+func NewPostPublishResource(id int, r resource.Resource) PostPublishedResource {
+ return &PostPublishResource{
+ prefix: PostProcessPrefix + "_" + strconv.Itoa(id) + "_",
+ delegate: r,
+ }
+}
+
+// postPublishResource holds a Resource to be transformed post publishing.
+type PostPublishResource struct {
+ prefix string
+ delegate resource.Resource
+}
+
+func (r *PostPublishResource) field(name string) string {
+ return r.prefix + name + PostProcessSuffix
+}
+
+func (r *PostPublishResource) Permalink() string {
+ return r.field("Permalink")
+}
+
+func (r *PostPublishResource) RelPermalink() string {
+ return r.field("RelPermalink")
+}
+
+func (r *PostPublishResource) Origin() resource.Resource {
+ return r.delegate
+}
+
+func (r *PostPublishResource) GetFieldString(pattern string) (string, bool) {
+ if r == nil {
+ panic("resource is nil")
+ }
+ prefixIdx := strings.Index(pattern, r.prefix)
+ if prefixIdx == -1 {
+ // Not a method on this resource.
+ return "", false
+ }
+
+ fieldAccessor := pattern[prefixIdx+len(r.prefix) : strings.Index(pattern, PostProcessSuffix)]
+
+ d := r.delegate
+ switch {
+ case fieldAccessor == "RelPermalink":
+ return d.RelPermalink(), true
+ case fieldAccessor == "Permalink":
+ return d.Permalink(), true
+ case fieldAccessor == "Name":
+ return d.Name(), true
+ case fieldAccessor == "Title":
+ return d.Title(), true
+ case fieldAccessor == "ResourceType":
+ return d.ResourceType(), true
+ case fieldAccessor == "Content":
+ content, err := d.(resource.ContentProvider).Content()
+ if err != nil {
+ return "", true
+ }
+ return cast.ToString(content), true
+ case strings.HasPrefix(fieldAccessor, "MediaType"):
+ return r.fieldToString(d.MediaType(), fieldAccessor), true
+ case fieldAccessor == "Data.Integrity":
+ return cast.ToString((d.Data().(map[string]any)["Integrity"])), true
+ default:
+ panic(fmt.Sprintf("unknown field accessor %q", fieldAccessor))
+ }
+}
+
+func (r *PostPublishResource) fieldToString(receiver any, path string) string {
+ fieldname := strings.Split(path, ".")[1]
+
+ receiverv := reflect.ValueOf(receiver)
+ switch receiverv.Kind() {
+ case reflect.Map:
+ v := receiverv.MapIndex(reflect.ValueOf(fieldname))
+ return cast.ToString(v.Interface())
+ default:
+ v := receiverv.FieldByName(fieldname)
+ if !v.IsValid() {
+ method := hreflect.GetMethodByName(receiverv, fieldname)
+ if method.IsValid() {
+ vals := method.Call(nil)
+ if len(vals) > 0 {
+ v = vals[0]
+ }
+
+ }
+ }
+
+ if v.IsValid() {
+ return cast.ToString(v.Interface())
+ }
+ return ""
+ }
+}
+
+func (r *PostPublishResource) Data() any {
+ m := map[string]any{
+ "Integrity": "",
+ }
+ insertFieldPlaceholders("Data", m, r.field)
+ return m
+}
+
+func (r *PostPublishResource) MediaType() map[string]any {
+ m := structToMapWithPlaceholders("MediaType", media.Type{}, r.field)
+ return m
+}
+
+func (r *PostPublishResource) ResourceType() string {
+ return r.field("ResourceType")
+}
+
+func (r *PostPublishResource) Name() string {
+ return r.field("Name")
+}
+
+func (r *PostPublishResource) Title() string {
+ return r.field("Title")
+}
+
+func (r *PostPublishResource) Params() maps.Params {
+ panic(r.fieldNotSupported("Params"))
+}
+
+func (r *PostPublishResource) Content() (any, error) {
+ return r.field("Content"), nil
+}
+
+func (r *PostPublishResource) fieldNotSupported(name string) string {
+ return fmt.Sprintf("method .%s is currently not supported in post-publish transformations.", name)
+}
diff --git a/resources/resource.go b/resources/resource.go
new file mode 100644
index 000000000..fd60fd4f6
--- /dev/null
+++ b/resources/resource.go
@@ -0,0 +1,709 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/source"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+var (
+ _ resource.ContentResource = (*genericResource)(nil)
+ _ resource.ReadSeekCloserResource = (*genericResource)(nil)
+ _ resource.Resource = (*genericResource)(nil)
+ _ resource.Source = (*genericResource)(nil)
+ _ resource.Cloner = (*genericResource)(nil)
+ _ resource.ResourcesLanguageMerger = (*resource.Resources)(nil)
+ _ permalinker = (*genericResource)(nil)
+ _ resource.Identifier = (*genericResource)(nil)
+ _ fileInfo = (*genericResource)(nil)
+)
+
+type ResourceSourceDescriptor struct {
+ // TargetPaths is a callback to fetch paths's relative to its owner.
+ TargetPaths func() page.TargetPaths
+
+ // Need one of these to load the resource content.
+ SourceFile source.File
+ OpenReadSeekCloser resource.OpenReadSeekCloser
+
+ FileInfo os.FileInfo
+
+ // If OpenReadSeekerCloser is not set, we use this to open the file.
+ SourceFilename string
+
+ Fs afero.Fs
+
+ // Set when its known up front, else it's resolved from the target filename.
+ MediaType media.Type
+
+ // The relative target filename without any language code.
+ RelTargetFilename string
+
+ // Any base paths prepended to the target path. This will also typically be the
+ // language code, but setting it here means that it should not have any effect on
+ // the permalink.
+ // This may be several values. In multihost mode we may publish the same resources to
+ // multiple targets.
+ TargetBasePaths []string
+
+ // Delay publishing until either Permalink or RelPermalink is called. Maybe never.
+ LazyPublish bool
+}
+
+func (r ResourceSourceDescriptor) Filename() string {
+ if r.SourceFile != nil {
+ return r.SourceFile.Filename()
+ }
+ return r.SourceFilename
+}
+
+type ResourceTransformer interface {
+ resource.Resource
+ Transformer
+}
+
+type Transformer interface {
+ Transform(...ResourceTransformation) (ResourceTransformer, error)
+}
+
+func NewFeatureNotAvailableTransformer(key string, elements ...any) ResourceTransformation {
+ return transformerNotAvailable{
+ key: internal.NewResourceTransformationKey(key, elements...),
+ }
+}
+
+type transformerNotAvailable struct {
+ key internal.ResourceTransformationKey
+}
+
+func (t transformerNotAvailable) Transform(ctx *ResourceTransformationCtx) error {
+ return herrors.ErrFeatureNotAvailable
+}
+
+func (t transformerNotAvailable) Key() internal.ResourceTransformationKey {
+ return t.key
+}
+
+// resourceCopier is for internal use.
+type resourceCopier interface {
+ cloneTo(targetPath string) resource.Resource
+}
+
+// Copy copies r to the targetPath given.
+func Copy(r resource.Resource, targetPath string) resource.Resource {
+ if r.Err() != nil {
+ panic(fmt.Sprintf("Resource has an .Err: %s", r.Err()))
+ }
+ return r.(resourceCopier).cloneTo(targetPath)
+}
+
+type baseResourceResource interface {
+ resource.Cloner
+ resourceCopier
+ resource.ContentProvider
+ resource.Resource
+ resource.Identifier
+}
+
+type baseResourceInternal interface {
+ resource.Source
+
+ fileInfo
+ metaAssigner
+ targetPather
+
+ ReadSeekCloser() (hugio.ReadSeekCloser, error)
+
+ // Internal
+ cloneWithUpdates(*transformationUpdate) (baseResource, error)
+ tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser
+
+ specProvider
+ getResourcePaths() *resourcePathDescriptor
+ getTargetFilenames() []string
+ openDestinationsForWriting() (io.WriteCloser, error)
+ openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error)
+
+ relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string
+}
+
+type specProvider interface {
+ getSpec() *Spec
+}
+
+type baseResource interface {
+ baseResourceResource
+ baseResourceInternal
+}
+
+type commonResource struct {
+}
+
+// Slice is for internal use.
+// for the template functions. See collections.Slice.
+func (commonResource) Slice(in any) (any, error) {
+ switch items := in.(type) {
+ case resource.Resources:
+ return items, nil
+ case []any:
+ groups := make(resource.Resources, len(items))
+ for i, v := range items {
+ g, ok := v.(resource.Resource)
+ if !ok {
+ return nil, fmt.Errorf("type %T is not a Resource", v)
+ }
+ groups[i] = g
+ {
+ }
+ }
+ return groups, nil
+ default:
+ return nil, fmt.Errorf("invalid slice type %T", items)
+ }
+}
+
+type dirFile struct {
+ // This is the directory component with Unix-style slashes.
+ dir string
+ // This is the file component.
+ file string
+}
+
+func (d dirFile) path() string {
+ return path.Join(d.dir, d.file)
+}
+
+type fileInfo interface {
+ getSourceFilename() string
+ setSourceFilename(string)
+ setSourceFs(afero.Fs)
+ getFileInfo() hugofs.FileMetaInfo
+ hash() (string, error)
+ size() int
+}
+
+// genericResource represents a generic linkable resource.
+type genericResource struct {
+ *resourcePathDescriptor
+ *resourceFileInfo
+ *resourceContent
+
+ spec *Spec
+
+ title string
+ name string
+ params map[string]any
+ data map[string]any
+
+ resourceType string
+ mediaType media.Type
+}
+
+func (l *genericResource) Clone() resource.Resource {
+ return l.clone()
+}
+
+func (l *genericResource) cloneTo(targetPath string) resource.Resource {
+ c := l.clone()
+
+ targetPath = helpers.ToSlashTrimLeading(targetPath)
+ dir, file := path.Split(targetPath)
+
+ c.resourcePathDescriptor = &resourcePathDescriptor{
+ relTargetDirFile: dirFile{dir: dir, file: file},
+ }
+
+ return c
+
+}
+
+func (l *genericResource) Content() (any, error) {
+ if err := l.initContent(); err != nil {
+ return nil, err
+ }
+
+ return l.content, nil
+}
+
+func (r *genericResource) Err() resource.ResourceError {
+ return nil
+}
+
+func (l *genericResource) Data() any {
+ return l.data
+}
+
+func (l *genericResource) Key() string {
+ if l.spec.BasePath == "" {
+ return l.RelPermalink()
+ }
+ return strings.TrimPrefix(l.RelPermalink(), l.spec.BasePath)
+}
+
+func (l *genericResource) MediaType() media.Type {
+ return l.mediaType
+}
+
+func (l *genericResource) setMediaType(mediaType media.Type) {
+ l.mediaType = mediaType
+}
+
+func (l *genericResource) Name() string {
+ return l.name
+}
+
+func (l *genericResource) Params() maps.Params {
+ return l.params
+}
+
+func (l *genericResource) Permalink() string {
+ return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetDirFile.path(), true), l.spec.BaseURL.HostURL())
+}
+
+func (l *genericResource) Publish() error {
+ var err error
+ l.publishInit.Do(func() {
+ var fr hugio.ReadSeekCloser
+ fr, err = l.ReadSeekCloser()
+ if err != nil {
+ return
+ }
+ defer fr.Close()
+
+ var fw io.WriteCloser
+ fw, err = helpers.OpenFilesForWriting(l.spec.BaseFs.PublishFs, l.getTargetFilenames()...)
+ if err != nil {
+ return
+ }
+ defer fw.Close()
+
+ _, err = io.Copy(fw, fr)
+ })
+
+ return err
+}
+
+func (l *genericResource) RelPermalink() string {
+ return l.relPermalinkFor(l.relTargetDirFile.path())
+}
+
+func (l *genericResource) ResourceType() string {
+ return l.resourceType
+}
+
+func (l *genericResource) String() string {
+ return fmt.Sprintf("Resource(%s: %s)", l.resourceType, l.name)
+}
+
+// Path is stored with Unix style slashes.
+func (l *genericResource) TargetPath() string {
+ return l.relTargetDirFile.path()
+}
+
+func (l *genericResource) Title() string {
+ return l.title
+}
+
+func (l *genericResource) createBasePath(rel string, isURL bool) string {
+ if l.targetPathBuilder == nil {
+ return rel
+ }
+ tp := l.targetPathBuilder()
+
+ if isURL {
+ return path.Join(tp.SubResourceBaseLink, rel)
+ }
+
+ // TODO(bep) path
+ return path.Join(filepath.ToSlash(tp.SubResourceBaseTarget), rel)
+}
+
+func (l *genericResource) initContent() error {
+ var err error
+ l.contentInit.Do(func() {
+ var r hugio.ReadSeekCloser
+ r, err = l.ReadSeekCloser()
+ if err != nil {
+ return
+ }
+ defer r.Close()
+
+ var b []byte
+ b, err = ioutil.ReadAll(r)
+ if err != nil {
+ return
+ }
+
+ l.content = string(b)
+ })
+
+ return err
+}
+
+func (l *genericResource) setName(name string) {
+ l.name = name
+}
+
+func (l *genericResource) getResourcePaths() *resourcePathDescriptor {
+ return l.resourcePathDescriptor
+}
+
+func (l *genericResource) getSpec() *Spec {
+ return l.spec
+}
+
+func (l *genericResource) getTargetFilenames() []string {
+ paths := l.relTargetPaths()
+ for i, p := range paths {
+ paths[i] = filepath.Clean(p)
+ }
+ return paths
+}
+
+func (l *genericResource) setTitle(title string) {
+ l.title = title
+}
+
+func (r *genericResource) tryTransformedFileCache(key string, u *transformationUpdate) io.ReadCloser {
+ fi, f, meta, found := r.spec.ResourceCache.getFromFile(key)
+ if !found {
+ return nil
+ }
+ u.sourceFilename = &fi.Name
+ mt, _ := r.spec.MediaTypes.GetByType(meta.MediaTypeV)
+ u.mediaType = mt
+ u.data = meta.MetaData
+ u.targetPath = meta.Target
+ return f
+}
+
+func (r *genericResource) mergeData(in map[string]any) {
+ if len(in) == 0 {
+ return
+ }
+ if r.data == nil {
+ r.data = make(map[string]any)
+ }
+ for k, v := range in {
+ if _, found := r.data[k]; !found {
+ r.data[k] = v
+ }
+ }
+}
+
+func (rc *genericResource) cloneWithUpdates(u *transformationUpdate) (baseResource, error) {
+ r := rc.clone()
+
+ if u.content != nil {
+ r.contentInit.Do(func() {
+ r.content = *u.content
+ r.openReadSeekerCloser = func() (hugio.ReadSeekCloser, error) {
+ return hugio.NewReadSeekerNoOpCloserFromString(r.content), nil
+ }
+ })
+ }
+
+ r.mediaType = u.mediaType
+
+ if u.sourceFilename != nil {
+ r.setSourceFilename(*u.sourceFilename)
+ }
+
+ if u.sourceFs != nil {
+ r.setSourceFs(u.sourceFs)
+ }
+
+ if u.targetPath == "" {
+ return nil, errors.New("missing targetPath")
+ }
+
+ fpath, fname := path.Split(u.targetPath)
+ r.resourcePathDescriptor.relTargetDirFile = dirFile{dir: fpath, file: fname}
+
+ r.mergeData(u.data)
+
+ return r, nil
+}
+
+func (l genericResource) clone() *genericResource {
+ gi := *l.resourceFileInfo
+ rp := *l.resourcePathDescriptor
+ l.resourceFileInfo = &gi
+ l.resourcePathDescriptor = &rp
+ l.resourceContent = &resourceContent{}
+ return &l
+}
+
+// returns an opened file or nil if nothing to write (it may already be published).
+func (l *genericResource) openDestinationsForWriting() (w io.WriteCloser, err error) {
+ l.publishInit.Do(func() {
+ targetFilenames := l.getTargetFilenames()
+ var changedFilenames []string
+
+ // Fast path:
+ // This is a processed version of the original;
+ // check if it already exists at the destination.
+ for _, targetFilename := range targetFilenames {
+ if _, err := l.getSpec().BaseFs.PublishFs.Stat(targetFilename); err == nil {
+ continue
+ }
+
+ changedFilenames = append(changedFilenames, targetFilename)
+ }
+
+ if len(changedFilenames) == 0 {
+ return
+ }
+
+ w, err = helpers.OpenFilesForWriting(l.getSpec().BaseFs.PublishFs, changedFilenames...)
+ })
+
+ return
+}
+
+func (r *genericResource) openPublishFileForWriting(relTargetPath string) (io.WriteCloser, error) {
+ return helpers.OpenFilesForWriting(r.spec.BaseFs.PublishFs, r.relTargetPathsFor(relTargetPath)...)
+}
+
+func (l *genericResource) permalinkFor(target string) string {
+ return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(target, true), l.spec.BaseURL.HostURL())
+}
+
+func (l *genericResource) relPermalinkFor(target string) string {
+ return l.relPermalinkForRel(target, false)
+}
+
+func (l *genericResource) relPermalinkForRel(rel string, isAbs bool) string {
+ return l.spec.PathSpec.URLizeFilename(l.relTargetPathForRel(rel, false, isAbs, true))
+}
+
+func (l *genericResource) relTargetPathForRel(rel string, addBaseTargetPath, isAbs, isURL bool) string {
+ if addBaseTargetPath && len(l.baseTargetPathDirs) > 1 {
+ panic("multiple baseTargetPathDirs")
+ }
+ var basePath string
+ if addBaseTargetPath && len(l.baseTargetPathDirs) > 0 {
+ basePath = l.baseTargetPathDirs[0]
+ }
+
+ return l.relTargetPathForRelAndBasePath(rel, basePath, isAbs, isURL)
+}
+
+func (l *genericResource) relTargetPathForRelAndBasePath(rel, basePath string, isAbs, isURL bool) string {
+ rel = l.createBasePath(rel, isURL)
+
+ if basePath != "" {
+ rel = path.Join(basePath, rel)
+ }
+
+ if l.baseOffset != "" {
+ rel = path.Join(l.baseOffset, rel)
+ }
+
+ if isURL {
+ bp := l.spec.PathSpec.GetBasePath(!isAbs)
+ if bp != "" {
+ rel = path.Join(bp, rel)
+ }
+ }
+
+ if len(rel) == 0 || rel[0] != '/' {
+ rel = "/" + rel
+ }
+
+ return rel
+}
+
+func (l *genericResource) relTargetPaths() []string {
+ return l.relTargetPathsForRel(l.TargetPath())
+}
+
+func (l *genericResource) relTargetPathsFor(target string) []string {
+ return l.relTargetPathsForRel(target)
+}
+
+func (l *genericResource) relTargetPathsForRel(rel string) []string {
+ if len(l.baseTargetPathDirs) == 0 {
+ return []string{l.relTargetPathForRelAndBasePath(rel, "", false, false)}
+ }
+
+ targetPaths := make([]string, len(l.baseTargetPathDirs))
+ for i, dir := range l.baseTargetPathDirs {
+ targetPaths[i] = l.relTargetPathForRelAndBasePath(rel, dir, false, false)
+ }
+ return targetPaths
+}
+
+func (l *genericResource) updateParams(params map[string]any) {
+ if l.params == nil {
+ l.params = params
+ return
+ }
+
+ // Sets the params not already set
+ for k, v := range params {
+ if _, found := l.params[k]; !found {
+ l.params[k] = v
+ }
+ }
+}
+
+type targetPather interface {
+ TargetPath() string
+}
+
+type permalinker interface {
+ targetPather
+ permalinkFor(target string) string
+ relPermalinkFor(target string) string
+ relTargetPaths() []string
+ relTargetPathsFor(target string) []string
+}
+
+type resourceContent struct {
+ content string
+ contentInit sync.Once
+
+ publishInit sync.Once
+}
+
+type resourceFileInfo struct {
+ // Will be set if this resource is backed by something other than a file.
+ openReadSeekerCloser resource.OpenReadSeekCloser
+
+ // This may be set to tell us to look in another filesystem for this resource.
+ // We, by default, use the sourceFs filesystem in the spec below.
+ sourceFs afero.Fs
+
+ // Absolute filename to the source, including any content folder path.
+ // Note that this is absolute in relation to the filesystem it is stored in.
+ // It can be a base path filesystem, and then this filename will not match
+ // the path to the file on the real filesystem.
+ sourceFilename string
+
+ fi hugofs.FileMetaInfo
+
+ // A hash of the source content. Is only calculated in caching situations.
+ h *resourceHash
+}
+
+func (fi *resourceFileInfo) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
+ if fi.openReadSeekerCloser != nil {
+ return fi.openReadSeekerCloser()
+ }
+
+ f, err := fi.getSourceFs().Open(fi.getSourceFilename())
+ if err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+func (fi *resourceFileInfo) getFileInfo() hugofs.FileMetaInfo {
+ return fi.fi
+}
+
+func (fi *resourceFileInfo) getSourceFilename() string {
+ return fi.sourceFilename
+}
+
+func (fi *resourceFileInfo) setSourceFilename(s string) {
+ // Make sure it's always loaded by sourceFilename.
+ fi.openReadSeekerCloser = nil
+ fi.sourceFilename = s
+}
+
+func (fi *resourceFileInfo) getSourceFs() afero.Fs {
+ return fi.sourceFs
+}
+
+func (fi *resourceFileInfo) setSourceFs(fs afero.Fs) {
+ fi.sourceFs = fs
+}
+
+func (fi *resourceFileInfo) hash() (string, error) {
+ var err error
+ fi.h.init.Do(func() {
+ var hash string
+ var f hugio.ReadSeekCloser
+ f, err = fi.ReadSeekCloser()
+ if err != nil {
+ err = fmt.Errorf("failed to open source file: %w", err)
+ return
+ }
+ defer f.Close()
+
+ hash, err = helpers.MD5FromFileFast(f)
+ if err != nil {
+ return
+ }
+ fi.h.value = hash
+ })
+
+ return fi.h.value, err
+}
+
+func (fi *resourceFileInfo) size() int {
+ if fi.fi == nil {
+ return 0
+ }
+
+ return int(fi.fi.Size())
+}
+
+type resourceHash struct {
+ value string
+ init sync.Once
+}
+
+type resourcePathDescriptor struct {
+ // The relative target directory and filename.
+ relTargetDirFile dirFile
+
+ // Callback used to construct a target path relative to its owner.
+ targetPathBuilder func() page.TargetPaths
+
+ // This will normally be the same as above, but this will only apply to publishing
+ // of resources. It may be multiple values when in multihost mode.
+ baseTargetPathDirs []string
+
+ // baseOffset is set when the output format's path has a offset, e.g. for AMP.
+ baseOffset string
+}
diff --git a/resources/resource/dates.go b/resources/resource/dates.go
new file mode 100644
index 000000000..6d19ca7b9
--- /dev/null
+++ b/resources/resource/dates.go
@@ -0,0 +1,93 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+ "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+)
+
+var _ Dated = Dates{}
+
+// Dated wraps a "dated resource". These are the 4 dates that makes
+// the date logic in Hugo.
+type Dated interface {
+ // Date returns the date of the resource.
+ Date() time.Time
+
+ // Lastmod returns the last modification date of the resource.
+ Lastmod() time.Time
+
+ // PublishDate returns the publish date of the resource.
+ PublishDate() time.Time
+
+ // ExpiryDate returns the expiration date of the resource.
+ ExpiryDate() time.Time
+}
+
+// Dates holds the 4 Hugo dates.
+type Dates struct {
+ FDate time.Time
+ FLastmod time.Time
+ FPublishDate time.Time
+ FExpiryDate time.Time
+}
+
+func (d *Dates) UpdateDateAndLastmodIfAfter(in Dated) {
+ if in.Date().After(d.Date()) {
+ d.FDate = in.Date()
+ }
+ if in.Lastmod().After(d.Lastmod()) {
+ d.FLastmod = in.Lastmod()
+ }
+}
+
+// IsFuture returns whether the argument represents the future.
+func IsFuture(d Dated) bool {
+ if d.PublishDate().IsZero() {
+ return false
+ }
+
+ return d.PublishDate().After(htime.Now())
+}
+
+// IsExpired returns whether the argument is expired.
+func IsExpired(d Dated) bool {
+ if d.ExpiryDate().IsZero() {
+ return false
+ }
+ return d.ExpiryDate().Before(htime.Now())
+}
+
+// IsZeroDates returns true if all of the dates are zero.
+func IsZeroDates(d Dated) bool {
+ return d.Date().IsZero() && d.Lastmod().IsZero() && d.ExpiryDate().IsZero() && d.PublishDate().IsZero()
+}
+
+func (p Dates) Date() time.Time {
+ return p.FDate
+}
+
+func (p Dates) Lastmod() time.Time {
+ return p.FLastmod
+}
+
+func (p Dates) PublishDate() time.Time {
+ return p.FPublishDate
+}
+
+func (p Dates) ExpiryDate() time.Time {
+ return p.FExpiryDate
+}
diff --git a/resources/resource/params.go b/resources/resource/params.go
new file mode 100644
index 000000000..d88424e9d
--- /dev/null
+++ b/resources/resource/params.go
@@ -0,0 +1,33 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/spf13/cast"
+)
+
+func Param(r ResourceParamsProvider, fallback maps.Params, key any) (any, error) {
+ keyStr, err := cast.ToStringE(key)
+ if err != nil {
+ return nil, err
+ }
+
+ if fallback == nil {
+ return maps.GetNestedParam(keyStr, ".", r.Params())
+ }
+
+ return maps.GetNestedParam(keyStr, ".", r.Params(), fallback)
+}
diff --git a/resources/resource/resource_helpers.go b/resources/resource/resource_helpers.go
new file mode 100644
index 000000000..29f783ce3
--- /dev/null
+++ b/resources/resource/resource_helpers.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/spf13/cast"
+)
+
+// GetParam will return the param with the given key from the Resource,
+// nil if not found.
+func GetParam(r Resource, key string) any {
+ return getParam(r, key, false)
+}
+
+// GetParamToLower is the same as GetParam but it will lower case any string
+// result, including string slices.
+func GetParamToLower(r Resource, key string) any {
+ return getParam(r, key, true)
+}
+
+func getParam(r Resource, key string, stringToLower bool) any {
+ v := r.Params()[strings.ToLower(key)]
+
+ if v == nil {
+ return nil
+ }
+
+ switch val := v.(type) {
+ case bool:
+ return val
+ case string:
+ if stringToLower {
+ return strings.ToLower(val)
+ }
+ return val
+ case int64, int32, int16, int8, int:
+ return cast.ToInt(v)
+ case float64, float32:
+ return cast.ToFloat64(v)
+ case time.Time:
+ return val
+ case []string:
+ if stringToLower {
+ return helpers.SliceToLower(val)
+ }
+ return v
+ case map[string]any:
+ return v
+ case map[any]any:
+ return v
+ }
+
+ return nil
+}
diff --git a/resources/resource/resources.go b/resources/resource/resources.go
new file mode 100644
index 000000000..a888d6fb4
--- /dev/null
+++ b/resources/resource/resources.go
@@ -0,0 +1,198 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/spf13/cast"
+)
+
+var _ ResourceFinder = (*Resources)(nil)
+
+// Resources represents a slice of resources, which can be a mix of different types.
+// I.e. both pages and images etc.
+type Resources []Resource
+
+// var _ resource.ResourceFinder = (*Namespace)(nil)
+// ResourcesConverter converts a given slice of Resource objects to Resources.
+type ResourcesConverter interface {
+ // For internal use.
+ ToResources() Resources
+}
+
+// ByType returns resources of a given resource type (e.g. "image").
+func (r Resources) ByType(typ any) Resources {
+ tpstr, err := cast.ToStringE(typ)
+ if err != nil {
+ panic(err)
+ }
+ var filtered Resources
+
+ for _, resource := range r {
+ if resource.ResourceType() == tpstr {
+ filtered = append(filtered, resource)
+ }
+ }
+ return filtered
+}
+
+// Get locates the name given in Resources.
+// The search is case insensitive.
+func (r Resources) Get(name any) Resource {
+ namestr, err := cast.ToStringE(name)
+ if err != nil {
+ panic(err)
+ }
+ namestr = strings.ToLower(namestr)
+ for _, resource := range r {
+ if strings.EqualFold(namestr, resource.Name()) {
+ return resource
+ }
+ }
+ return nil
+}
+
+// GetMatch finds the first Resource matching the given pattern, or nil if none found.
+// See Match for a more complete explanation about the rules used.
+func (r Resources) GetMatch(pattern any) Resource {
+ patternstr, err := cast.ToStringE(pattern)
+ if err != nil {
+ panic(err)
+ }
+
+ g, err := glob.GetGlob(patternstr)
+ if err != nil {
+ panic(err)
+ }
+
+ for _, resource := range r {
+ if g.Match(strings.ToLower(resource.Name())) {
+ return resource
+ }
+ }
+
+ return nil
+}
+
+// Match gets all resources matching the given base filename prefix, e.g
+// "*.png" will match all png files. The "*" does not match path delimiters (/),
+// so if you organize your resources in sub-folders, you need to be explicit about it, e.g.:
+// "images/*.png". To match any PNG image anywhere in the bundle you can do "**.png", and
+// to match all PNG images below the images folder, use "images/**.jpg".
+// The matching is case insensitive.
+// Match matches by using the value of Resource.Name, which, by default, is a filename with
+// path relative to the bundle root with Unix style slashes (/) and no leading slash, e.g. "images/logo.png".
+// See https://github.com/gobwas/glob for the full rules set.
+func (r Resources) Match(pattern any) Resources {
+ patternstr, err := cast.ToStringE(pattern)
+ if err != nil {
+ panic(err)
+ }
+
+ g, err := glob.GetGlob(patternstr)
+ if err != nil {
+ panic(err)
+ }
+
+ var matches Resources
+ for _, resource := range r {
+ if g.Match(strings.ToLower(resource.Name())) {
+ matches = append(matches, resource)
+ }
+ }
+ return matches
+}
+
+type translatedResource interface {
+ TranslationKey() string
+}
+
+// MergeByLanguage adds missing translations in r1 from r2.
+func (r Resources) MergeByLanguage(r2 Resources) Resources {
+ result := append(Resources(nil), r...)
+ m := make(map[string]bool)
+ for _, rr := range r {
+ if translated, ok := rr.(translatedResource); ok {
+ m[translated.TranslationKey()] = true
+ }
+ }
+
+ for _, rr := range r2 {
+ if translated, ok := rr.(translatedResource); ok {
+ if _, found := m[translated.TranslationKey()]; !found {
+ result = append(result, rr)
+ }
+ }
+ }
+ return result
+}
+
+// MergeByLanguageInterface is the generic version of MergeByLanguage. It
+// is here just so it can be called from the tpl package.
+func (r Resources) MergeByLanguageInterface(in any) (any, error) {
+ r2, ok := in.(Resources)
+ if !ok {
+ return nil, fmt.Errorf("%T cannot be merged by language", in)
+ }
+ return r.MergeByLanguage(r2), nil
+}
+
+// Source is an internal template and not meant for use in the templates. It
+// may change without notice.
+type Source interface {
+ Publish() error
+}
+
+// ResourceFinder provides methods to find Resources.
+// Note that GetRemote (as found in resources.GetRemote) is
+// not covered by this interface, as this is only available as a global template function.
+type ResourceFinder interface {
+
+ // Get locates the Resource with the given name in the current context (e.g. in .Page.Resources).
+ //
+ // It returns nil if no Resource could found, panics if name is invalid.
+ Get(name any) Resource
+
+ // GetMatch finds the first Resource matching the given pattern, or nil if none found.
+ //
+ // See Match for a more complete explanation about the rules used.
+ //
+ // It returns nil if no Resource could found, panics if pattern is invalid.
+ GetMatch(pattern any) Resource
+
+ // Match gets all resources matching the given base path prefix, e.g
+ // "*.png" will match all png files. The "*" does not match path delimiters (/),
+ // so if you organize your resources in sub-folders, you need to be explicit about it, e.g.:
+ // "images/*.png". To match any PNG image anywhere in the bundle you can do "**.png", and
+ // to match all PNG images below the images folder, use "images/**.jpg".
+ //
+ // The matching is case insensitive.
+ //
+ // Match matches by using a relative pathwith Unix style slashes (/) and no
+ // leading slash, e.g. "images/logo.png".
+ //
+ // See https://github.com/gobwas/glob for the full rules set.
+ //
+ // See Match for a more complete explanation about the rules used.
+ //
+ // It returns nil if no Resources could found, panics if pattern is invalid.
+ Match(pattern any) Resources
+
+ // ByType returns resources of a given resource type (e.g. "image").
+ // It returns nil if no Resources could found, panics if typ is invalid.
+ ByType(typ any) Resources
+}
diff --git a/resources/resource/resourcetypes.go b/resources/resource/resourcetypes.go
new file mode 100644
index 000000000..4ba95c170
--- /dev/null
+++ b/resources/resource/resourcetypes.go
@@ -0,0 +1,224 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resource
+
+import (
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/gohugoio/hugo/common/hugio"
+)
+
+var (
+ _ ResourceDataProvider = (*resourceError)(nil)
+ _ ResourceError = (*resourceError)(nil)
+)
+
+// Cloner is for internal use.
+type Cloner interface {
+ Clone() Resource
+}
+
+// OriginProvider provides the original Resource if this is wrapped.
+// This is an internal Hugo interface and not meant for use in the templates.
+type OriginProvider interface {
+ Origin() Resource
+ GetFieldString(pattern string) (string, bool)
+}
+
+// NewResourceError creates a new ResourceError.
+func NewResourceError(err error, data any) ResourceError {
+ return &resourceError{
+ error: err,
+ data: data,
+ }
+}
+
+type resourceError struct {
+ error
+ data any
+}
+
+// The data associated with this error.
+func (e *resourceError) Data() any {
+ return e.data
+}
+
+// ResourceError is the error return from .Err in Resource in error situations.
+type ResourceError interface {
+ error
+ ResourceDataProvider
+}
+
+// ErrProvider provides an Err.
+type ErrProvider interface {
+ Err() ResourceError
+}
+
+// Resource represents a linkable resource, i.e. a content page, image etc.
+type Resource interface {
+ ResourceTypeProvider
+ MediaTypeProvider
+ ResourceLinksProvider
+ ResourceMetaProvider
+ ResourceParamsProvider
+ ResourceDataProvider
+ ErrProvider
+}
+
+type ResourceTypeProvider interface {
+ // ResourceType is the resource type. For most file types, this is the main
+ // part of the MIME type, e.g. "image", "application", "text" etc.
+ // For content pages, this value is "page".
+ ResourceType() string
+}
+
+type ResourceTypesProvider interface {
+ ResourceTypeProvider
+ MediaTypeProvider
+}
+
+type MediaTypeProvider interface {
+ // MediaType is this resource's MIME type.
+ MediaType() media.Type
+}
+
+type ResourceLinksProvider interface {
+ // Permalink represents the absolute link to this resource.
+ Permalink() string
+
+ // RelPermalink represents the host relative link to this resource.
+ RelPermalink() string
+}
+
+type ResourceMetaProvider interface {
+ // Name is the logical name of this resource. This can be set in the front matter
+ // metadata for this resource. If not set, Hugo will assign a value.
+ // This will in most cases be the base filename.
+ // So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg".
+ // The value returned by this method will be used in the GetByPrefix and ByPrefix methods
+ // on Resources.
+ Name() string
+
+ // Title returns the title if set in front matter. For content pages, this will be the expected value.
+ Title() string
+}
+
+type ResourceParamsProvider interface {
+ // Params set in front matter for this resource.
+ Params() maps.Params
+}
+
+type ResourceDataProvider interface {
+ // Resource specific data set by Hugo.
+ // One example would be.Data.Digest for fingerprinted resources.
+ Data() any
+}
+
+// ResourcesLanguageMerger describes an interface for merging resources from a
+// different language.
+type ResourcesLanguageMerger interface {
+ MergeByLanguage(other Resources) Resources
+
+ // Needed for integration with the tpl package.
+ // For internal use.
+ MergeByLanguageInterface(other any) (any, error)
+}
+
+// Identifier identifies a resource.
+type Identifier interface {
+ Key() string
+}
+
+// ContentResource represents a Resource that provides a way to get to its content.
+// Most Resource types in Hugo implements this interface, including Page.
+type ContentResource interface {
+ MediaType() media.Type
+ ContentProvider
+}
+
+// ContentProvider provides Content.
+// This should be used with care, as it will read the file content into memory, but it
+// should be cached as effectively as possible by the implementation.
+type ContentProvider interface {
+ // Content returns this resource's content. It will be equivalent to reading the content
+ // that RelPermalink points to in the published folder.
+ // The return type will be contextual, and should be what you would expect:
+ // * Page: template.HTML
+ // * JSON: String
+ // * Etc.
+ Content() (any, error)
+}
+
+// OpenReadSeekCloser allows setting some other way (than reading from a filesystem)
+// to open or create a ReadSeekCloser.
+type OpenReadSeekCloser func() (hugio.ReadSeekCloser, error)
+
+// ReadSeekCloserResource is a Resource that supports loading its content.
+type ReadSeekCloserResource interface {
+ MediaType() media.Type
+ hugio.ReadSeekCloserProvider
+}
+
+// LengthProvider is a Resource that provides a length
+// (typically the length of the content).
+type LengthProvider interface {
+ Len() int
+}
+
+// LanguageProvider is a Resource in a language.
+type LanguageProvider interface {
+ Language() *langs.Language
+}
+
+// TranslationKeyProvider connects translations of the same Resource.
+type TranslationKeyProvider interface {
+ TranslationKey() string
+}
+
+// UnmarshableResource represents a Resource that can be unmarshaled to some other format.
+type UnmarshableResource interface {
+ ReadSeekCloserResource
+ Identifier
+}
+
+type resourceTypesHolder struct {
+ mediaType media.Type
+ resourceType string
+}
+
+func (r resourceTypesHolder) MediaType() media.Type {
+ return r.mediaType
+}
+
+func (r resourceTypesHolder) ResourceType() string {
+ return r.resourceType
+}
+
+func NewResourceTypesProvider(mediaType media.Type, resourceType string) ResourceTypesProvider {
+ return resourceTypesHolder{mediaType: mediaType, resourceType: resourceType}
+}
+
+type languageHolder struct {
+ lang *langs.Language
+}
+
+func (l languageHolder) Language() *langs.Language {
+ return l.lang
+}
+
+func NewLanguageProvider(lang *langs.Language) LanguageProvider {
+ return languageHolder{lang: lang}
+}
diff --git a/resources/resource_cache.go b/resources/resource_cache.go
new file mode 100644
index 000000000..52a48871e
--- /dev/null
+++ b/resources/resource_cache.go
@@ -0,0 +1,305 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "encoding/json"
+ "io"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+
+ "github.com/BurntSushi/locker"
+)
+
+const (
+ CACHE_CLEAR_ALL = "clear_all"
+ CACHE_OTHER = "other"
+)
+
+type ResourceCache struct {
+ rs *Spec
+
+ sync.RWMutex
+
+ // Either resource.Resource or resource.Resources.
+ cache map[string]any
+
+ fileCache *filecache.Cache
+
+ // Provides named resource locks.
+ nlocker *locker.Locker
+}
+
+// ResourceCacheKey converts the filename into the format used in the resource
+// cache.
+func ResourceCacheKey(filename string) string {
+ filename = filepath.ToSlash(filename)
+ return path.Join(resourceKeyPartition(filename), filename)
+}
+
+func resourceKeyPartition(filename string) string {
+ ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(filename)), ".")
+ if ext == "" {
+ ext = CACHE_OTHER
+ }
+ return ext
+}
+
+// Commonly used aliases and directory names used for some types.
+var extAliasKeywords = map[string][]string{
+ "sass": {"scss"},
+ "scss": {"sass"},
+}
+
+// ResourceKeyPartitions resolves a ordered slice of partitions that is
+// used to do resource cache invalidations.
+//
+// We use the first directory path element and the extension, so:
+// a/b.json => "a", "json"
+// b.json => "json"
+//
+// For some of the extensions we will also map to closely related types,
+// e.g. "scss" will also return "sass".
+//
+func ResourceKeyPartitions(filename string) []string {
+ var partitions []string
+ filename = glob.NormalizePath(filename)
+ dir, name := path.Split(filename)
+ ext := strings.TrimPrefix(path.Ext(filepath.ToSlash(name)), ".")
+
+ if dir != "" {
+ partitions = append(partitions, strings.Split(dir, "/")[0])
+ }
+
+ if ext != "" {
+ partitions = append(partitions, ext)
+ }
+
+ if aliases, found := extAliasKeywords[ext]; found {
+ partitions = append(partitions, aliases...)
+ }
+
+ if len(partitions) == 0 {
+ partitions = []string{CACHE_OTHER}
+ }
+
+ return helpers.UniqueStringsSorted(partitions)
+}
+
+// ResourceKeyContainsAny returns whether the key is a member of any of the
+// given partitions.
+//
+// This is used for resource cache invalidation.
+func ResourceKeyContainsAny(key string, partitions []string) bool {
+ parts := strings.Split(key, "/")
+ for _, p1 := range partitions {
+ for _, p2 := range parts {
+ if p1 == p2 {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func newResourceCache(rs *Spec) *ResourceCache {
+ return &ResourceCache{
+ rs: rs,
+ fileCache: rs.FileCaches.AssetsCache(),
+ cache: make(map[string]any),
+ nlocker: locker.NewLocker(),
+ }
+}
+
+func (c *ResourceCache) clear() {
+ c.Lock()
+ defer c.Unlock()
+
+ c.cache = make(map[string]any)
+ c.nlocker = locker.NewLocker()
+}
+
+func (c *ResourceCache) Contains(key string) bool {
+ key = c.cleanKey(filepath.ToSlash(key))
+ _, found := c.get(key)
+ return found
+}
+
+func (c *ResourceCache) cleanKey(key string) string {
+ return strings.TrimPrefix(path.Clean(strings.ToLower(key)), "/")
+}
+
+func (c *ResourceCache) get(key string) (any, bool) {
+ c.RLock()
+ defer c.RUnlock()
+ r, found := c.cache[key]
+ return r, found
+}
+
+func (c *ResourceCache) GetOrCreate(key string, f func() (resource.Resource, error)) (resource.Resource, error) {
+ r, err := c.getOrCreate(key, func() (any, error) { return f() })
+ if r == nil || err != nil {
+ return nil, err
+ }
+ return r.(resource.Resource), nil
+}
+
+func (c *ResourceCache) GetOrCreateResources(key string, f func() (resource.Resources, error)) (resource.Resources, error) {
+ r, err := c.getOrCreate(key, func() (any, error) { return f() })
+ if r == nil || err != nil {
+ return nil, err
+ }
+ return r.(resource.Resources), nil
+}
+
+func (c *ResourceCache) getOrCreate(key string, f func() (any, error)) (any, error) {
+ key = c.cleanKey(key)
+ // First check in-memory cache.
+ r, found := c.get(key)
+ if found {
+ return r, nil
+ }
+ // This is a potentially long running operation, so get a named lock.
+ c.nlocker.Lock(key)
+
+ // Double check in-memory cache.
+ r, found = c.get(key)
+ if found {
+ c.nlocker.Unlock(key)
+ return r, nil
+ }
+
+ defer c.nlocker.Unlock(key)
+
+ r, err := f()
+ if err != nil {
+ return nil, err
+ }
+
+ c.set(key, r)
+
+ return r, nil
+}
+
+func (c *ResourceCache) getFilenames(key string) (string, string) {
+ filenameMeta := key + ".json"
+ filenameContent := key + ".content"
+
+ return filenameMeta, filenameContent
+}
+
+func (c *ResourceCache) getFromFile(key string) (filecache.ItemInfo, io.ReadCloser, transformedResourceMetadata, bool) {
+ c.RLock()
+ defer c.RUnlock()
+
+ var meta transformedResourceMetadata
+ filenameMeta, filenameContent := c.getFilenames(key)
+
+ _, jsonContent, _ := c.fileCache.GetBytes(filenameMeta)
+ if jsonContent == nil {
+ return filecache.ItemInfo{}, nil, meta, false
+ }
+
+ if err := json.Unmarshal(jsonContent, &meta); err != nil {
+ return filecache.ItemInfo{}, nil, meta, false
+ }
+
+ fi, rc, _ := c.fileCache.Get(filenameContent)
+
+ return fi, rc, meta, rc != nil
+}
+
+// writeMeta writes the metadata to file and returns a writer for the content part.
+func (c *ResourceCache) writeMeta(key string, meta transformedResourceMetadata) (filecache.ItemInfo, io.WriteCloser, error) {
+ filenameMeta, filenameContent := c.getFilenames(key)
+ raw, err := json.Marshal(meta)
+ if err != nil {
+ return filecache.ItemInfo{}, nil, err
+ }
+
+ _, fm, err := c.fileCache.WriteCloser(filenameMeta)
+ if err != nil {
+ return filecache.ItemInfo{}, nil, err
+ }
+ defer fm.Close()
+
+ if _, err := fm.Write(raw); err != nil {
+ return filecache.ItemInfo{}, nil, err
+ }
+
+ fi, fc, err := c.fileCache.WriteCloser(filenameContent)
+
+ return fi, fc, err
+}
+
+func (c *ResourceCache) set(key string, r any) {
+ c.Lock()
+ defer c.Unlock()
+ c.cache[key] = r
+}
+
+func (c *ResourceCache) DeletePartitions(partitions ...string) {
+ partitionsSet := map[string]bool{
+ // Always clear out the resources not matching any partition.
+ "other": true,
+ }
+ for _, p := range partitions {
+ partitionsSet[p] = true
+ }
+
+ if partitionsSet[CACHE_CLEAR_ALL] {
+ c.clear()
+ return
+ }
+
+ c.Lock()
+ defer c.Unlock()
+
+ for k := range c.cache {
+ clear := false
+ for p := range partitionsSet {
+ if strings.Contains(k, p) {
+ // There will be some false positive, but that's fine.
+ clear = true
+ break
+ }
+ }
+
+ if clear {
+ delete(c.cache, k)
+ }
+ }
+}
+
+func (c *ResourceCache) DeleteMatches(re *regexp.Regexp) {
+ c.Lock()
+ defer c.Unlock()
+
+ for k := range c.cache {
+ if re.MatchString(k) {
+ delete(c.cache, k)
+ }
+ }
+}
diff --git a/resources/resource_cache_test.go b/resources/resource_cache_test.go
new file mode 100644
index 000000000..bcb241025
--- /dev/null
+++ b/resources/resource_cache_test.go
@@ -0,0 +1,58 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestResourceKeyPartitions(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ input string
+ expected []string
+ }{
+ {"a.js", []string{"js"}},
+ {"a.scss", []string{"sass", "scss"}},
+ {"a.sass", []string{"sass", "scss"}},
+ {"d/a.js", []string{"d", "js"}},
+ {"js/a.js", []string{"js"}},
+ {"D/a.JS", []string{"d", "js"}},
+ {"d/a", []string{"d"}},
+ {filepath.FromSlash("/d/a.js"), []string{"d", "js"}},
+ {filepath.FromSlash("/d/e/a.js"), []string{"d", "js"}},
+ } {
+ c.Assert(ResourceKeyPartitions(test.input), qt.DeepEquals, test.expected, qt.Commentf(test.input))
+ }
+}
+
+func TestResourceKeyContainsAny(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ key string
+ filename string
+ expected bool
+ }{
+ {"styles/css", "asdf.css", true},
+ {"styles/css", "styles/asdf.scss", true},
+ {"js/foo.bar", "asdf.css", false},
+ } {
+ c.Assert(ResourceKeyContainsAny(test.key, ResourceKeyPartitions(test.filename)), qt.Equals, test.expected)
+ }
+}
diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go
new file mode 100644
index 000000000..7de228227
--- /dev/null
+++ b/resources/resource_factories/bundler/bundler.go
@@ -0,0 +1,148 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package bundler contains functions for concatenation etc. of Resource objects.
+package bundler
+
+import (
+ "fmt"
+ "io"
+ "path"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Client contains methods perform concatenation and other bundling related
+// tasks to Resource objects.
+type Client struct {
+ rs *resources.Spec
+}
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec) *Client {
+ return &Client{rs: rs}
+}
+
+type multiReadSeekCloser struct {
+ mr io.Reader
+ sources []hugio.ReadSeekCloser
+}
+
+func toReaders(sources []hugio.ReadSeekCloser) []io.Reader {
+ readers := make([]io.Reader, len(sources))
+ for i, r := range sources {
+ readers[i] = r
+ }
+ return readers
+}
+
+func newMultiReadSeekCloser(sources ...hugio.ReadSeekCloser) *multiReadSeekCloser {
+ mr := io.MultiReader(toReaders(sources)...)
+ return &multiReadSeekCloser{mr, sources}
+}
+
+func (r *multiReadSeekCloser) Read(p []byte) (n int, err error) {
+ return r.mr.Read(p)
+}
+
+func (r *multiReadSeekCloser) Seek(offset int64, whence int) (newOffset int64, err error) {
+ for _, s := range r.sources {
+ newOffset, err = s.Seek(offset, whence)
+ if err != nil {
+ return
+ }
+ }
+
+ r.mr = io.MultiReader(toReaders(r.sources)...)
+
+ return
+}
+
+func (r *multiReadSeekCloser) Close() error {
+ for _, s := range r.sources {
+ s.Close()
+ }
+ return nil
+}
+
+// Concat concatenates the list of Resource objects.
+func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resource, error) {
+ // The CACHE_OTHER will make sure this will be re-created and published on rebuilds.
+ return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) {
+ var resolvedm media.Type
+
+ // The given set of resources must be of the same Media Type.
+ // We may improve on that in the future, but then we need to know more.
+ for i, r := range r {
+ if i > 0 && r.MediaType().Type() != resolvedm.Type() {
+ return nil, fmt.Errorf("resources in Concat must be of the same Media Type, got %q and %q", r.MediaType().Type(), resolvedm.Type())
+ }
+ resolvedm = r.MediaType()
+ }
+
+ concatr := func() (hugio.ReadSeekCloser, error) {
+ var rcsources []hugio.ReadSeekCloser
+ for _, s := range r {
+ rcr, ok := s.(resource.ReadSeekCloserResource)
+ if !ok {
+ return nil, fmt.Errorf("resource %T does not implement resource.ReadSeekerCloserResource", s)
+ }
+ rc, err := rcr.ReadSeekCloser()
+ if err != nil {
+ // Close the already opened.
+ for _, rcs := range rcsources {
+ rcs.Close()
+ }
+ return nil, err
+ }
+
+ rcsources = append(rcsources, rc)
+ }
+
+ // Arbitrary JavaScript files require a barrier between them to be safely concatenated together.
+ // Without this, the last line of one file can affect the first line of the next file and change how both files are interpreted.
+ if resolvedm.MainType == media.JavascriptType.MainType && resolvedm.SubType == media.JavascriptType.SubType {
+ readers := make([]hugio.ReadSeekCloser, 2*len(rcsources)-1)
+ j := 0
+ for i := 0; i < len(rcsources); i++ {
+ if i > 0 {
+ readers[j] = hugio.NewReadSeekerNoOpCloserFromString("\n;\n")
+ j++
+ }
+ readers[j] = rcsources[i]
+ j++
+ }
+ return newMultiReadSeekCloser(readers...), nil
+ }
+
+ return newMultiReadSeekCloser(rcsources...), nil
+ }
+
+ composite, err := c.rs.New(
+ resources.ResourceSourceDescriptor{
+ Fs: c.rs.FileCaches.AssetsCache().Fs,
+ LazyPublish: true,
+ OpenReadSeekCloser: concatr,
+ RelTargetFilename: filepath.Clean(targetPath),
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return composite, nil
+ })
+}
diff --git a/resources/resource_factories/bundler/bundler_test.go b/resources/resource_factories/bundler/bundler_test.go
new file mode 100644
index 000000000..17a74cc88
--- /dev/null
+++ b/resources/resource_factories/bundler/bundler_test.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bundler
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/hugio"
+)
+
+func TestMultiReadSeekCloser(t *testing.T) {
+ c := qt.New(t)
+
+ rc := newMultiReadSeekCloser(
+ hugio.NewReadSeekerNoOpCloserFromString("A"),
+ hugio.NewReadSeekerNoOpCloserFromString("B"),
+ hugio.NewReadSeekerNoOpCloserFromString("C"),
+ )
+
+ for i := 0; i < 3; i++ {
+ s1 := helpers.ReaderToString(rc)
+ c.Assert(s1, qt.Equals, "ABC")
+ _, err := rc.Seek(0, 0)
+ c.Assert(err, qt.IsNil)
+ }
+}
diff --git a/resources/resource_factories/create/create.go b/resources/resource_factories/create/create.go
new file mode 100644
index 000000000..075d25736
--- /dev/null
+++ b/resources/resource_factories/create/create.go
@@ -0,0 +1,151 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package create contains functions for to create Resource objects. This will
+// typically non-files.
+package create
+
+import (
+ "net/http"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Client contains methods to create Resource objects.
+// tasks to Resource objects.
+type Client struct {
+ rs *resources.Spec
+ httpClient *http.Client
+ cacheGetResource *filecache.Cache
+}
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec) *Client {
+ return &Client{
+ rs: rs,
+ httpClient: &http.Client{
+ Timeout: 10 * time.Second,
+ },
+ cacheGetResource: rs.FileCaches.GetResourceCache(),
+ }
+}
+
+// Copy copies r to the new targetPath.
+func (c *Client) Copy(r resource.Resource, targetPath string) (resource.Resource, error) {
+ return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(targetPath), func() (resource.Resource, error) {
+ return resources.Copy(r, targetPath), nil
+ })
+}
+
+// Get creates a new Resource by opening the given filename in the assets filesystem.
+func (c *Client) Get(filename string) (resource.Resource, error) {
+ filename = filepath.Clean(filename)
+ return c.rs.ResourceCache.GetOrCreate(resources.ResourceCacheKey(filename), func() (resource.Resource, error) {
+ return c.rs.New(resources.ResourceSourceDescriptor{
+ Fs: c.rs.BaseFs.Assets.Fs,
+ LazyPublish: true,
+ SourceFilename: filename,
+ })
+ })
+}
+
+// Match gets the resources matching the given pattern from the assets filesystem.
+func (c *Client) Match(pattern string) (resource.Resources, error) {
+ return c.match("__match", pattern, nil, false)
+}
+
+func (c *Client) ByType(tp string) resource.Resources {
+ res, err := c.match(path.Join("_byType", tp), "**", func(r resource.Resource) bool { return r.ResourceType() == tp }, false)
+ if err != nil {
+ panic(err)
+ }
+ return res
+}
+
+// GetMatch gets first resource matching the given pattern from the assets filesystem.
+func (c *Client) GetMatch(pattern string) (resource.Resource, error) {
+ res, err := c.match("__get-match", pattern, nil, true)
+ if err != nil || len(res) == 0 {
+ return nil, err
+ }
+ return res[0], err
+}
+
+func (c *Client) match(name, pattern string, matchFunc func(r resource.Resource) bool, firstOnly bool) (resource.Resources, error) {
+ pattern = glob.NormalizePath(pattern)
+ partitions := glob.FilterGlobParts(strings.Split(pattern, "/"))
+ if len(partitions) == 0 {
+ partitions = []string{resources.CACHE_OTHER}
+ }
+ key := path.Join(name, path.Join(partitions...))
+ key = path.Join(key, pattern)
+
+ return c.rs.ResourceCache.GetOrCreateResources(key, func() (resource.Resources, error) {
+ var res resource.Resources
+
+ handle := func(info hugofs.FileMetaInfo) (bool, error) {
+ meta := info.Meta()
+ r, err := c.rs.New(resources.ResourceSourceDescriptor{
+ LazyPublish: true,
+ FileInfo: info,
+ OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
+ return meta.Open()
+ },
+ RelTargetFilename: meta.Path,
+ })
+ if err != nil {
+ return true, err
+ }
+
+ if matchFunc != nil && !matchFunc(r) {
+ return false, nil
+ }
+
+ res = append(res, r)
+
+ return firstOnly, nil
+ }
+
+ if err := hugofs.Glob(c.rs.BaseFs.Assets.Fs, pattern, handle); err != nil {
+ return nil, err
+ }
+
+ return res, nil
+ })
+}
+
+// FromString creates a new Resource from a string with the given relative target path.
+func (c *Client) FromString(targetPath, content string) (resource.Resource, error) {
+ return c.rs.ResourceCache.GetOrCreate(path.Join(resources.CACHE_OTHER, targetPath), func() (resource.Resource, error) {
+ return c.rs.New(
+ resources.ResourceSourceDescriptor{
+ Fs: c.rs.FileCaches.AssetsCache().Fs,
+ LazyPublish: true,
+ OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
+ return hugio.NewReadSeekerNoOpCloserFromString(content), nil
+ },
+ RelTargetFilename: filepath.Clean(targetPath),
+ })
+ })
+}
diff --git a/resources/resource_factories/create/remote.go b/resources/resource_factories/create/remote.go
new file mode 100644
index 000000000..51199dc93
--- /dev/null
+++ b/resources/resource_factories/create/remote.go
@@ -0,0 +1,279 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package create
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "mime"
+ "net/http"
+ "net/http/httputil"
+ "net/url"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/mitchellh/mapstructure"
+)
+
+type HTTPError struct {
+ error
+ Data map[string]any
+
+ StatusCode int
+ Body string
+}
+
+func toHTTPError(err error, res *http.Response) *HTTPError {
+ if err == nil {
+ panic("err is nil")
+ }
+ if res == nil {
+ return &HTTPError{
+ error: err,
+ Data: map[string]any{},
+ }
+ }
+
+ var body []byte
+ body, _ = ioutil.ReadAll(res.Body)
+
+ return &HTTPError{
+ error: err,
+ Data: map[string]any{
+ "StatusCode": res.StatusCode,
+ "Status": res.Status,
+ "Body": string(body),
+ "TransferEncoding": res.TransferEncoding,
+ "ContentLength": res.ContentLength,
+ "ContentType": res.Header.Get("Content-Type"),
+ },
+ }
+}
+
+// FromRemote expects one or n-parts of a URL to a resource
+// If you provide multiple parts they will be joined together to the final URL.
+func (c *Client) FromRemote(uri string, optionsm map[string]any) (resource.Resource, error) {
+ rURL, err := url.Parse(uri)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse URL for resource %s: %w", uri, err)
+ }
+
+ resourceID := calculateResourceID(uri, optionsm)
+
+ _, httpResponse, err := c.cacheGetResource.GetOrCreate(resourceID, func() (io.ReadCloser, error) {
+ options, err := decodeRemoteOptions(optionsm)
+ if err != nil {
+ return nil, fmt.Errorf("failed to decode options for resource %s: %w", uri, err)
+ }
+ if err := c.validateFromRemoteArgs(uri, options); err != nil {
+ return nil, err
+ }
+
+ req, err := http.NewRequest(options.Method, uri, options.BodyReader())
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request for resource %s: %w", uri, err)
+ }
+ addDefaultHeaders(req)
+
+ if options.Headers != nil {
+ addUserProvidedHeaders(options.Headers, req)
+ }
+
+ res, err := c.httpClient.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ httpResponse, err := httputil.DumpResponse(res, true)
+ if err != nil {
+ return nil, toHTTPError(err, res)
+ }
+
+ if res.StatusCode != http.StatusNotFound {
+ if res.StatusCode < 200 || res.StatusCode > 299 {
+ return nil, toHTTPError(fmt.Errorf("failed to fetch remote resource: %s", http.StatusText(res.StatusCode)), res)
+
+ }
+ }
+
+ return hugio.ToReadCloser(bytes.NewReader(httpResponse)), nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ defer httpResponse.Close()
+
+ res, err := http.ReadResponse(bufio.NewReader(httpResponse), nil)
+ if err != nil {
+ return nil, err
+ }
+
+ if res.StatusCode == http.StatusNotFound {
+ // Not found. This matches how looksup for local resources work.
+ return nil, nil
+ }
+
+ body, err := ioutil.ReadAll(res.Body)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read remote resource %q: %w", uri, err)
+ }
+
+ filename := path.Base(rURL.Path)
+ if _, params, _ := mime.ParseMediaType(res.Header.Get("Content-Disposition")); params != nil {
+ if _, ok := params["filename"]; ok {
+ filename = params["filename"]
+ }
+ }
+
+ var extensionHints []string
+
+ contentType := res.Header.Get("Content-Type")
+
+ // mime.ExtensionsByType gives a long list of extensions for text/plain,
+ // just use ".txt".
+ if strings.HasPrefix(contentType, "text/plain") {
+ extensionHints = []string{".txt"}
+ } else {
+ exts, _ := mime.ExtensionsByType(contentType)
+ if exts != nil {
+ extensionHints = exts
+ }
+ }
+
+ // Look for a file extension. If it's .txt, look for a more specific.
+ if extensionHints == nil || extensionHints[0] == ".txt" {
+ if ext := path.Ext(filename); ext != "" {
+ extensionHints = []string{ext}
+ }
+ }
+
+ // Now resolve the media type primarily using the content.
+ mediaType := media.FromContent(c.rs.MediaTypes, extensionHints, body)
+ if mediaType.IsZero() {
+ return nil, fmt.Errorf("failed to resolve media type for remote resource %q", uri)
+ }
+
+ resourceID = filename[:len(filename)-len(path.Ext(filename))] + "_" + resourceID + mediaType.FirstSuffix.FullSuffix
+
+ return c.rs.New(
+ resources.ResourceSourceDescriptor{
+ MediaType: mediaType,
+ LazyPublish: true,
+ OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
+ return hugio.NewReadSeekerNoOpCloser(bytes.NewReader(body)), nil
+ },
+ RelTargetFilename: filepath.Clean(resourceID),
+ })
+}
+
+func (c *Client) validateFromRemoteArgs(uri string, options fromRemoteOptions) error {
+ if err := c.rs.ExecHelper.Sec().CheckAllowedHTTPURL(uri); err != nil {
+ return err
+ }
+
+ if err := c.rs.ExecHelper.Sec().CheckAllowedHTTPMethod(options.Method); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func calculateResourceID(uri string, optionsm map[string]any) string {
+ if key, found := maps.LookupEqualFold(optionsm, "key"); found {
+ return helpers.HashString(key)
+ }
+ return helpers.HashString(uri, optionsm)
+}
+
+func addDefaultHeaders(req *http.Request, accepts ...string) {
+ for _, accept := range accepts {
+ if !hasHeaderValue(req.Header, "Accept", accept) {
+ req.Header.Add("Accept", accept)
+ }
+ }
+ if !hasHeaderKey(req.Header, "User-Agent") {
+ req.Header.Add("User-Agent", "Hugo Static Site Generator")
+ }
+}
+
+func addUserProvidedHeaders(headers map[string]any, req *http.Request) {
+ if headers == nil {
+ return
+ }
+ for key, val := range headers {
+ vals := types.ToStringSlicePreserveString(val)
+ for _, s := range vals {
+ req.Header.Add(key, s)
+ }
+ }
+}
+
+func hasHeaderValue(m http.Header, key, value string) bool {
+ var s []string
+ var ok bool
+
+ if s, ok = m[key]; !ok {
+ return false
+ }
+
+ for _, v := range s {
+ if v == value {
+ return true
+ }
+ }
+ return false
+}
+
+func hasHeaderKey(m http.Header, key string) bool {
+ _, ok := m[key]
+ return ok
+}
+
+type fromRemoteOptions struct {
+ Method string
+ Headers map[string]any
+ Body []byte
+}
+
+func (o fromRemoteOptions) BodyReader() io.Reader {
+ if o.Body == nil {
+ return nil
+ }
+ return bytes.NewBuffer(o.Body)
+}
+
+func decodeRemoteOptions(optionsm map[string]any) (fromRemoteOptions, error) {
+ options := fromRemoteOptions{
+ Method: "GET",
+ }
+
+ err := mapstructure.WeakDecode(optionsm, &options)
+ if err != nil {
+ return options, err
+ }
+ options.Method = strings.ToUpper(options.Method)
+
+ return options, nil
+}
diff --git a/resources/resource_factories/create/remote_test.go b/resources/resource_factories/create/remote_test.go
new file mode 100644
index 000000000..c2a3b7b32
--- /dev/null
+++ b/resources/resource_factories/create/remote_test.go
@@ -0,0 +1,96 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package create
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestDecodeRemoteOptions(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ name string
+ args map[string]any
+ want fromRemoteOptions
+ wantErr bool
+ }{
+ {
+ "POST",
+ map[string]any{
+ "meThod": "PoST",
+ "headers": map[string]any{
+ "foo": "bar",
+ },
+ },
+ fromRemoteOptions{
+ Method: "POST",
+ Headers: map[string]any{
+ "foo": "bar",
+ },
+ },
+ false,
+ },
+ {
+ "Body",
+ map[string]any{
+ "meThod": "POST",
+ "body": []byte("foo"),
+ },
+ fromRemoteOptions{
+ Method: "POST",
+ Body: []byte("foo"),
+ },
+ false,
+ },
+ {
+ "Body, string",
+ map[string]any{
+ "meThod": "POST",
+ "body": "foo",
+ },
+ fromRemoteOptions{
+ Method: "POST",
+ Body: []byte("foo"),
+ },
+ false,
+ },
+ } {
+ c.Run(test.name, func(c *qt.C) {
+ got, err := decodeRemoteOptions(test.args)
+ isErr := qt.IsNil
+ if test.wantErr {
+ isErr = qt.IsNotNil
+ }
+
+ c.Assert(err, isErr)
+ c.Assert(got, qt.DeepEquals, test.want)
+ })
+
+ }
+
+}
+
+func TestCalculateResourceID(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(calculateResourceID("foo", nil), qt.Equals, "5917621528921068675")
+ c.Assert(calculateResourceID("foo", map[string]any{"bar": "baz"}), qt.Equals, "7294498335241413323")
+
+ c.Assert(calculateResourceID("foo", map[string]any{"key": "1234", "bar": "baz"}), qt.Equals, "14904296279238663669")
+ c.Assert(calculateResourceID("asdf", map[string]any{"key": "1234", "bar": "asdf"}), qt.Equals, "14904296279238663669")
+ c.Assert(calculateResourceID("asdf", map[string]any{"key": "12345", "bar": "asdf"}), qt.Equals, "12191037851845371770")
+}
diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go
new file mode 100644
index 000000000..8954a5109
--- /dev/null
+++ b/resources/resource_metadata.go
@@ -0,0 +1,144 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
+)
+
+var (
+ _ metaAssigner = (*genericResource)(nil)
+ _ metaAssigner = (*imageResource)(nil)
+ _ metaAssignerProvider = (*resourceAdapter)(nil)
+)
+
+type metaAssignerProvider interface {
+ getMetaAssigner() metaAssigner
+}
+
+// metaAssigner allows updating metadata in resources that supports it.
+type metaAssigner interface {
+ setTitle(title string)
+ setName(name string)
+ setMediaType(mediaType media.Type)
+ updateParams(params map[string]any)
+}
+
+const counterPlaceHolder = ":counter"
+
+// AssignMetadata assigns the given metadata to those resources that supports updates
+// and matching by wildcard given in `src` using `filepath.Match` with lower cased values.
+// This assignment is additive, but the most specific match needs to be first.
+// The `name` and `title` metadata field support shell-matched collection it got a match in.
+// See https://golang.org/pkg/path/#Match
+func AssignMetadata(metadata []map[string]any, resources ...resource.Resource) error {
+ counters := make(map[string]int)
+
+ for _, r := range resources {
+ var ma metaAssigner
+ mp, ok := r.(metaAssignerProvider)
+ if ok {
+ ma = mp.getMetaAssigner()
+ } else {
+ ma, ok = r.(metaAssigner)
+ if !ok {
+ continue
+ }
+ }
+
+ var (
+ nameSet, titleSet bool
+ nameCounter, titleCounter = 0, 0
+ nameCounterFound, titleCounterFound bool
+ resourceSrcKey = strings.ToLower(r.Name())
+ )
+
+ for _, meta := range metadata {
+ src, found := meta["src"]
+ if !found {
+ return fmt.Errorf("missing 'src' in metadata for resource")
+ }
+
+ srcKey := strings.ToLower(cast.ToString(src))
+
+ glob, err := glob.GetGlob(srcKey)
+ if err != nil {
+ return fmt.Errorf("failed to match resource with metadata: %w", err)
+ }
+
+ match := glob.Match(resourceSrcKey)
+
+ if match {
+ if !nameSet {
+ name, found := meta["name"]
+ if found {
+ name := cast.ToString(name)
+ if !nameCounterFound {
+ nameCounterFound = strings.Contains(name, counterPlaceHolder)
+ }
+ if nameCounterFound && nameCounter == 0 {
+ counterKey := "name_" + srcKey
+ nameCounter = counters[counterKey] + 1
+ counters[counterKey] = nameCounter
+ }
+
+ ma.setName(replaceResourcePlaceholders(name, nameCounter))
+ nameSet = true
+ }
+ }
+
+ if !titleSet {
+ title, found := meta["title"]
+ if found {
+ title := cast.ToString(title)
+ if !titleCounterFound {
+ titleCounterFound = strings.Contains(title, counterPlaceHolder)
+ }
+ if titleCounterFound && titleCounter == 0 {
+ counterKey := "title_" + srcKey
+ titleCounter = counters[counterKey] + 1
+ counters[counterKey] = titleCounter
+ }
+ ma.setTitle((replaceResourcePlaceholders(title, titleCounter)))
+ titleSet = true
+ }
+ }
+
+ params, found := meta["params"]
+ if found {
+ m := maps.ToStringMap(params)
+ // Needed for case insensitive fetching of params values
+ maps.PrepareParams(m)
+ ma.updateParams(m)
+ }
+ }
+ }
+ }
+
+ return nil
+}
+
+func replaceResourcePlaceholders(in string, counter int) string {
+ return strings.Replace(in, counterPlaceHolder, strconv.Itoa(counter), -1)
+}
diff --git a/resources/resource_metadata_test.go b/resources/resource_metadata_test.go
new file mode 100644
index 000000000..fa9659162
--- /dev/null
+++ b/resources/resource_metadata_test.go
@@ -0,0 +1,221 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestAssignMetadata(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ var foo1, foo2, foo3, logo1, logo2, logo3 resource.Resource
+ var resources resource.Resources
+
+ for _, this := range []struct {
+ metaData []map[string]any
+ assertFunc func(err error)
+ }{
+ {[]map[string]any{
+ {
+ "title": "My Resource",
+ "name": "My Name",
+ "src": "*",
+ },
+ }, func(err error) {
+ c.Assert(logo1.Title(), qt.Equals, "My Resource")
+ c.Assert(logo1.Name(), qt.Equals, "My Name")
+ c.Assert(foo2.Name(), qt.Equals, "My Name")
+ }},
+ {[]map[string]any{
+ {
+ "title": "My Logo",
+ "src": "*loGo*",
+ },
+ {
+ "title": "My Resource",
+ "name": "My Name",
+ "src": "*",
+ },
+ }, func(err error) {
+ c.Assert(logo1.Title(), qt.Equals, "My Logo")
+ c.Assert(logo2.Title(), qt.Equals, "My Logo")
+ c.Assert(logo1.Name(), qt.Equals, "My Name")
+ c.Assert(foo2.Name(), qt.Equals, "My Name")
+ c.Assert(foo3.Name(), qt.Equals, "My Name")
+ c.Assert(foo3.Title(), qt.Equals, "My Resource")
+ }},
+ {[]map[string]any{
+ {
+ "title": "My Logo",
+ "src": "*loGo*",
+ "params": map[string]any{
+ "Param1": true,
+ "icon": "logo",
+ },
+ },
+ {
+ "title": "My Resource",
+ "src": "*",
+ "params": map[string]any{
+ "Param2": true,
+ "icon": "resource",
+ },
+ },
+ }, func(err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(logo1.Title(), qt.Equals, "My Logo")
+ c.Assert(foo3.Title(), qt.Equals, "My Resource")
+ _, p1 := logo2.Params()["param1"]
+ _, p2 := foo2.Params()["param2"]
+ _, p1_2 := foo2.Params()["param1"]
+ _, p2_2 := logo2.Params()["param2"]
+
+ icon1 := logo2.Params()["icon"]
+ icon2 := foo2.Params()["icon"]
+
+ c.Assert(p1, qt.Equals, true)
+ c.Assert(p2, qt.Equals, true)
+
+ // Check merge
+ c.Assert(p2_2, qt.Equals, true)
+ c.Assert(p1_2, qt.Equals, false)
+
+ c.Assert(icon1, qt.Equals, "logo")
+ c.Assert(icon2, qt.Equals, "resource")
+ }},
+ {[]map[string]any{
+ {
+ "name": "Logo Name #:counter",
+ "src": "*logo*",
+ },
+ {
+ "title": "Resource #:counter",
+ "name": "Name #:counter",
+ "src": "*",
+ },
+ }, func(err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(logo2.Title(), qt.Equals, "Resource #2")
+ c.Assert(logo2.Name(), qt.Equals, "Logo Name #1")
+ c.Assert(logo1.Title(), qt.Equals, "Resource #4")
+ c.Assert(logo1.Name(), qt.Equals, "Logo Name #2")
+ c.Assert(foo2.Title(), qt.Equals, "Resource #1")
+ c.Assert(foo1.Title(), qt.Equals, "Resource #3")
+ c.Assert(foo1.Name(), qt.Equals, "Name #2")
+ c.Assert(foo3.Title(), qt.Equals, "Resource #5")
+
+ c.Assert(resources.GetMatch("logo name #1*"), qt.Equals, logo2)
+ }},
+ {[]map[string]any{
+ {
+ "title": "Third Logo #:counter",
+ "src": "logo3.png",
+ },
+ {
+ "title": "Other Logo #:counter",
+ "name": "Name #:counter",
+ "src": "logo*",
+ },
+ }, func(err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(logo3.Title(), qt.Equals, "Third Logo #1")
+ c.Assert(logo3.Name(), qt.Equals, "Name #3")
+ c.Assert(logo2.Title(), qt.Equals, "Other Logo #1")
+ c.Assert(logo2.Name(), qt.Equals, "Name #1")
+ c.Assert(logo1.Title(), qt.Equals, "Other Logo #2")
+ c.Assert(logo1.Name(), qt.Equals, "Name #2")
+ }},
+ {[]map[string]any{
+ {
+ "title": "Third Logo",
+ "src": "logo3.png",
+ },
+ {
+ "title": "Other Logo #:counter",
+ "name": "Name #:counter",
+ "src": "logo*",
+ },
+ }, func(err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(logo3.Title(), qt.Equals, "Third Logo")
+ c.Assert(logo3.Name(), qt.Equals, "Name #3")
+ c.Assert(logo2.Title(), qt.Equals, "Other Logo #1")
+ c.Assert(logo2.Name(), qt.Equals, "Name #1")
+ c.Assert(logo1.Title(), qt.Equals, "Other Logo #2")
+ c.Assert(logo1.Name(), qt.Equals, "Name #2")
+ }},
+ {[]map[string]any{
+ {
+ "name": "third-logo",
+ "src": "logo3.png",
+ },
+ {
+ "title": "Logo #:counter",
+ "name": "Name #:counter",
+ "src": "logo*",
+ },
+ }, func(err error) {
+ c.Assert(err, qt.IsNil)
+ c.Assert(logo3.Title(), qt.Equals, "Logo #3")
+ c.Assert(logo3.Name(), qt.Equals, "third-logo")
+ c.Assert(logo2.Title(), qt.Equals, "Logo #1")
+ c.Assert(logo2.Name(), qt.Equals, "Name #1")
+ c.Assert(logo1.Title(), qt.Equals, "Logo #2")
+ c.Assert(logo1.Name(), qt.Equals, "Name #2")
+ }},
+ {[]map[string]any{
+ {
+ "title": "Third Logo #:counter",
+ },
+ }, func(err error) {
+ // Missing src
+ c.Assert(err, qt.Not(qt.IsNil))
+ }},
+ {[]map[string]any{
+ {
+ "title": "Title",
+ "src": "[]",
+ },
+ }, func(err error) {
+ // Invalid pattern
+ c.Assert(err, qt.Not(qt.IsNil))
+ }},
+ } {
+
+ foo2 = spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType)
+ logo2 = spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType)
+ foo1 = spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType)
+ logo1 = spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType)
+ foo3 = spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType)
+ logo3 = spec.newGenericResource(nil, nil, nil, "/b/logo3.png", "logo3.png", pngType)
+
+ resources = resource.Resources{
+ foo2,
+ logo2,
+ foo1,
+ logo1,
+ foo3,
+ logo3,
+ }
+
+ this.assertFunc(AssignMetadata(this.metaData, resources...))
+ }
+}
diff --git a/resources/resource_spec.go b/resources/resource_spec.go
new file mode 100644
index 000000000..fd9653012
--- /dev/null
+++ b/resources/resource_spec.go
@@ -0,0 +1,345 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "errors"
+ "fmt"
+ "mime"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/resources/jsconfig"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/hexec"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/identity"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/resources/postpub"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/tpl"
+ "github.com/spf13/afero"
+)
+
+func NewSpec(
+ s *helpers.PathSpec,
+ fileCaches filecache.Caches,
+ incr identity.Incrementer,
+ logger loggers.Logger,
+ errorHandler herrors.ErrorSender,
+ execHelper *hexec.Exec,
+ outputFormats output.Formats,
+ mimeTypes media.Types) (*Spec, error) {
+ imgConfig, err := images.DecodeConfig(s.Cfg.GetStringMap("imaging"))
+ if err != nil {
+ return nil, err
+ }
+
+ imaging, err := images.NewImageProcessor(imgConfig)
+ if err != nil {
+ return nil, err
+ }
+
+ if incr == nil {
+ incr = &identity.IncrementByOne{}
+ }
+
+ if logger == nil {
+ logger = loggers.NewErrorLogger()
+ }
+
+ permalinks, err := page.NewPermalinkExpander(s)
+ if err != nil {
+ return nil, err
+ }
+
+ rs := &Spec{
+ PathSpec: s,
+ Logger: logger,
+ ErrorSender: errorHandler,
+ imaging: imaging,
+ ExecHelper: execHelper,
+ incr: incr,
+ MediaTypes: mimeTypes,
+ OutputFormats: outputFormats,
+ Permalinks: permalinks,
+ BuildConfig: config.DecodeBuild(s.Cfg),
+ FileCaches: fileCaches,
+ PostBuildAssets: &PostBuildAssets{
+ PostProcessResources: make(map[string]postpub.PostPublishedResource),
+ JSConfigBuilder: jsconfig.NewBuilder(),
+ },
+ imageCache: newImageCache(
+ fileCaches.ImageCache(),
+
+ s,
+ ),
+ }
+
+ rs.ResourceCache = newResourceCache(rs)
+
+ return rs, nil
+}
+
+type Spec struct {
+ *helpers.PathSpec
+
+ MediaTypes media.Types
+ OutputFormats output.Formats
+
+ Logger loggers.Logger
+ ErrorSender herrors.ErrorSender
+
+ TextTemplates tpl.TemplateParseFinder
+
+ Permalinks page.PermalinkExpander
+ BuildConfig config.Build
+
+ // Holds default filter settings etc.
+ imaging *images.ImageProcessor
+
+ ExecHelper *hexec.Exec
+
+ incr identity.Incrementer
+ imageCache *imageCache
+ ResourceCache *ResourceCache
+ FileCaches filecache.Caches
+
+ // Assets used after the build is done.
+ // This is shared between all sites.
+ *PostBuildAssets
+}
+
+type PostBuildAssets struct {
+ postProcessMu sync.RWMutex
+ PostProcessResources map[string]postpub.PostPublishedResource
+ JSConfigBuilder *jsconfig.Builder
+}
+
+func (r *Spec) New(fd ResourceSourceDescriptor) (resource.Resource, error) {
+ return r.newResourceFor(fd)
+}
+
+func (r *Spec) CacheStats() string {
+ r.imageCache.mu.RLock()
+ defer r.imageCache.mu.RUnlock()
+
+ s := fmt.Sprintf("Cache entries: %d", len(r.imageCache.store))
+
+ count := 0
+ for k := range r.imageCache.store {
+ if count > 5 {
+ break
+ }
+ s += "\n" + k
+ count++
+ }
+
+ return s
+}
+
+func (r *Spec) ClearCaches() {
+ r.imageCache.clear()
+ r.ResourceCache.clear()
+}
+
+func (r *Spec) DeleteBySubstring(s string) {
+ r.imageCache.deleteIfContains(s)
+}
+
+func (s *Spec) String() string {
+ return "spec"
+}
+
+// TODO(bep) clean up below
+func (r *Spec) newGenericResource(sourceFs afero.Fs,
+ targetPathBuilder func() page.TargetPaths,
+ osFileInfo os.FileInfo,
+ sourceFilename,
+ baseFilename string,
+ mediaType media.Type) *genericResource {
+ return r.newGenericResourceWithBase(
+ sourceFs,
+ nil,
+ nil,
+ targetPathBuilder,
+ osFileInfo,
+ sourceFilename,
+ baseFilename,
+ mediaType,
+ )
+}
+
+func (r *Spec) newGenericResourceWithBase(
+ sourceFs afero.Fs,
+ openReadSeekerCloser resource.OpenReadSeekCloser,
+ targetPathBaseDirs []string,
+ targetPathBuilder func() page.TargetPaths,
+ osFileInfo os.FileInfo,
+ sourceFilename,
+ baseFilename string,
+ mediaType media.Type) *genericResource {
+ if osFileInfo != nil && osFileInfo.IsDir() {
+ panic(fmt.Sprintf("dirs not supported resource types: %v", osFileInfo))
+ }
+
+ // This value is used both to construct URLs and file paths, but start
+ // with a Unix-styled path.
+ baseFilename = helpers.ToSlashTrimLeading(baseFilename)
+ fpath, fname := path.Split(baseFilename)
+
+ resourceType := mediaType.MainType
+
+ pathDescriptor := &resourcePathDescriptor{
+ baseTargetPathDirs: helpers.UniqueStringsReuse(targetPathBaseDirs),
+ targetPathBuilder: targetPathBuilder,
+ relTargetDirFile: dirFile{dir: fpath, file: fname},
+ }
+
+ var fim hugofs.FileMetaInfo
+ if osFileInfo != nil {
+ fim = osFileInfo.(hugofs.FileMetaInfo)
+ }
+
+ gfi := &resourceFileInfo{
+ fi: fim,
+ openReadSeekerCloser: openReadSeekerCloser,
+ sourceFs: sourceFs,
+ sourceFilename: sourceFilename,
+ h: &resourceHash{},
+ }
+
+ g := &genericResource{
+ resourceFileInfo: gfi,
+ resourcePathDescriptor: pathDescriptor,
+ mediaType: mediaType,
+ resourceType: resourceType,
+ spec: r,
+ params: make(map[string]any),
+ name: baseFilename,
+ title: baseFilename,
+ resourceContent: &resourceContent{},
+ }
+
+ return g
+}
+
+func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (resource.Resource, error) {
+ fi := fd.FileInfo
+ var sourceFilename string
+
+ if fd.OpenReadSeekCloser != nil {
+ } else if fd.SourceFilename != "" {
+ var err error
+ fi, err = sourceFs.Stat(fd.SourceFilename)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
+ }
+ sourceFilename = fd.SourceFilename
+ } else {
+ sourceFilename = fd.SourceFile.Filename()
+ }
+
+ if fd.RelTargetFilename == "" {
+ fd.RelTargetFilename = sourceFilename
+ }
+
+ mimeType := fd.MediaType
+ if mimeType.IsZero() {
+ ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename))
+ var (
+ found bool
+ suffixInfo media.SuffixInfo
+ )
+ mimeType, suffixInfo, found = r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, "."))
+ // TODO(bep) we need to handle these ambiguous types better, but in this context
+ // we most likely want the application/xml type.
+ if suffixInfo.Suffix == "xml" && mimeType.SubType == "rss" {
+ mimeType, found = r.MediaTypes.GetByType("application/xml")
+ }
+
+ if !found {
+ // A fallback. Note that mime.TypeByExtension is slow by Hugo standards,
+ // so we should configure media types to avoid this lookup for most
+ // situations.
+ mimeStr := mime.TypeByExtension(ext)
+ if mimeStr != "" {
+ mimeType, _ = media.FromStringAndExt(mimeStr, ext)
+ }
+ }
+ }
+
+ gr := r.newGenericResourceWithBase(
+ sourceFs,
+ fd.OpenReadSeekCloser,
+ fd.TargetBasePaths,
+ fd.TargetPaths,
+ fi,
+ sourceFilename,
+ fd.RelTargetFilename,
+ mimeType)
+
+ if mimeType.MainType == "image" {
+ imgFormat, ok := images.ImageFormatFromMediaSubType(mimeType.SubType)
+ if ok {
+ ir := &imageResource{
+ Image: images.NewImage(imgFormat, r.imaging, nil, gr),
+ baseResource: gr,
+ }
+ ir.root = ir
+ return newResourceAdapter(gr.spec, fd.LazyPublish, ir), nil
+ }
+
+ }
+
+ return newResourceAdapter(gr.spec, fd.LazyPublish, gr), nil
+}
+
+func (r *Spec) newResourceFor(fd ResourceSourceDescriptor) (resource.Resource, error) {
+ if fd.OpenReadSeekCloser == nil {
+ if fd.SourceFile != nil && fd.SourceFilename != "" {
+ return nil, errors.New("both SourceFile and AbsSourceFilename provided")
+ } else if fd.SourceFile == nil && fd.SourceFilename == "" {
+ return nil, errors.New("either SourceFile or AbsSourceFilename must be provided")
+ }
+ }
+
+ if fd.RelTargetFilename == "" {
+ fd.RelTargetFilename = fd.Filename()
+ }
+
+ if len(fd.TargetBasePaths) == 0 {
+ // If not set, we publish the same resource to all hosts.
+ fd.TargetBasePaths = r.MultihostTargetBasePaths
+ }
+
+ return r.newResource(fd.Fs, fd)
+}
diff --git a/resources/resource_test.go b/resources/resource_test.go
new file mode 100644
index 000000000..031c7b3c6
--- /dev/null
+++ b/resources/resource_test.go
@@ -0,0 +1,270 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "fmt"
+ "math/rand"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/media"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGenericResource(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ r := spec.newGenericResource(nil, nil, nil, "/a/foo.css", "foo.css", media.CSSType)
+
+ c.Assert(r.Permalink(), qt.Equals, "https://example.com/foo.css")
+ c.Assert(r.RelPermalink(), qt.Equals, "/foo.css")
+ c.Assert(r.ResourceType(), qt.Equals, "text")
+}
+
+func TestGenericResourceWithLinkFactory(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ factory := newTargetPaths("/foo")
+
+ r := spec.newGenericResource(nil, factory, nil, "/a/foo.css", "foo.css", media.CSSType)
+
+ c.Assert(r.Permalink(), qt.Equals, "https://example.com/foo/foo.css")
+ c.Assert(r.RelPermalink(), qt.Equals, "/foo/foo.css")
+ c.Assert(r.Key(), qt.Equals, "/foo/foo.css")
+ c.Assert(r.ResourceType(), qt.Equals, "text")
+}
+
+func TestNewResourceFromFilename(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
+ writeSource(t, spec.Fs, "content/a/b/data.json", "json")
+
+ bfs := afero.NewBasePathFs(spec.Fs.Source, "content")
+
+ r, err := spec.New(ResourceSourceDescriptor{Fs: bfs, SourceFilename: "a/b/logo.png"})
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(r.ResourceType(), qt.Equals, "image")
+ c.Assert(r.RelPermalink(), qt.Equals, "/a/b/logo.png")
+ c.Assert(r.Permalink(), qt.Equals, "https://example.com/a/b/logo.png")
+
+ r, err = spec.New(ResourceSourceDescriptor{Fs: bfs, SourceFilename: "a/b/data.json"})
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(r.ResourceType(), qt.Equals, "application")
+}
+
+func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c, baseURL: "https://example.com/docs"})
+
+ writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
+ bfs := afero.NewBasePathFs(spec.Fs.Source, "content")
+
+ fmt.Println()
+ r, err := spec.New(ResourceSourceDescriptor{Fs: bfs, SourceFilename: filepath.FromSlash("a/b/logo.png")})
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+ c.Assert(r.ResourceType(), qt.Equals, "image")
+ c.Assert(r.RelPermalink(), qt.Equals, "/docs/a/b/logo.png")
+ c.Assert(r.Permalink(), qt.Equals, "https://example.com/docs/a/b/logo.png")
+}
+
+var pngType, _ = media.FromStringAndExt("image/png", "png")
+
+func TestResourcesByType(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ resources := resource.Resources{
+ spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/a/logo.png", "logo.css", pngType),
+ spec.newGenericResource(nil, nil, nil, "/a/foo2.css", "foo2.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/a/foo3.css", "foo3.css", media.CSSType),
+ }
+
+ c.Assert(len(resources.ByType("text")), qt.Equals, 3)
+ c.Assert(len(resources.ByType("image")), qt.Equals, 1)
+}
+
+func TestResourcesGetByPrefix(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ resources := resource.Resources{
+ spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
+ spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType),
+ spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType),
+ }
+
+ c.Assert(resources.GetMatch("asdf*"), qt.IsNil)
+ c.Assert(resources.GetMatch("logo*").RelPermalink(), qt.Equals, "/logo1.png")
+ c.Assert(resources.GetMatch("loGo*").RelPermalink(), qt.Equals, "/logo1.png")
+ c.Assert(resources.GetMatch("logo2*").RelPermalink(), qt.Equals, "/Logo2.png")
+ c.Assert(resources.GetMatch("foo2*").RelPermalink(), qt.Equals, "/foo2.css")
+ c.Assert(resources.GetMatch("foo1*").RelPermalink(), qt.Equals, "/foo1.css")
+ c.Assert(resources.GetMatch("foo1*").RelPermalink(), qt.Equals, "/foo1.css")
+ c.Assert(resources.GetMatch("asdfasdf*"), qt.IsNil)
+
+ c.Assert(len(resources.Match("logo*")), qt.Equals, 2)
+ c.Assert(len(resources.Match("logo2*")), qt.Equals, 1)
+
+ logo := resources.GetMatch("logo*")
+ c.Assert(logo.Params(), qt.Not(qt.IsNil))
+ c.Assert(logo.Name(), qt.Equals, "logo1.png")
+ c.Assert(logo.Title(), qt.Equals, "logo1.png")
+}
+
+func TestResourcesGetMatch(t *testing.T) {
+ c := qt.New(t)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ resources := resource.Resources{
+ spec.newGenericResource(nil, nil, nil, "/a/foo1.css", "foo1.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/a/logo1.png", "logo1.png", pngType),
+ spec.newGenericResource(nil, nil, nil, "/b/Logo2.png", "Logo2.png", pngType),
+ spec.newGenericResource(nil, nil, nil, "/b/foo2.css", "foo2.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/b/foo3.css", "foo3.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/b/c/foo4.css", "c/foo4.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/b/c/foo5.css", "c/foo5.css", media.CSSType),
+ spec.newGenericResource(nil, nil, nil, "/b/c/d/foo6.css", "c/d/foo6.css", media.CSSType),
+ }
+
+ c.Assert(resources.GetMatch("logo*").RelPermalink(), qt.Equals, "/logo1.png")
+ c.Assert(resources.GetMatch("loGo*").RelPermalink(), qt.Equals, "/logo1.png")
+ c.Assert(resources.GetMatch("logo2*").RelPermalink(), qt.Equals, "/Logo2.png")
+ c.Assert(resources.GetMatch("foo2*").RelPermalink(), qt.Equals, "/foo2.css")
+ c.Assert(resources.GetMatch("foo1*").RelPermalink(), qt.Equals, "/foo1.css")
+ c.Assert(resources.GetMatch("foo1*").RelPermalink(), qt.Equals, "/foo1.css")
+ c.Assert(resources.GetMatch("*/foo*").RelPermalink(), qt.Equals, "/c/foo4.css")
+
+ c.Assert(resources.GetMatch("asdfasdf"), qt.IsNil)
+
+ c.Assert(len(resources.Match("Logo*")), qt.Equals, 2)
+ c.Assert(len(resources.Match("logo2*")), qt.Equals, 1)
+ c.Assert(len(resources.Match("c/*")), qt.Equals, 2)
+
+ c.Assert(len(resources.Match("**.css")), qt.Equals, 6)
+ c.Assert(len(resources.Match("**/*.css")), qt.Equals, 3)
+ c.Assert(len(resources.Match("c/**/*.css")), qt.Equals, 1)
+
+ // Matches only CSS files in c/
+ c.Assert(len(resources.Match("c/**.css")), qt.Equals, 3)
+
+ // Matches all CSS files below c/ (including in c/d/)
+ c.Assert(len(resources.Match("c/**.css")), qt.Equals, 3)
+
+ // Patterns beginning with a slash will not match anything.
+ // We could maybe consider trimming that slash, but let's be explicit about this.
+ // (it is possible for users to do a rename)
+ // This is analogous to standing in a directory and doing "ls *.*".
+ c.Assert(len(resources.Match("/c/**.css")), qt.Equals, 0)
+}
+
+func BenchmarkResourcesMatch(b *testing.B) {
+ resources := benchResources(b)
+ prefixes := []string{"abc*", "jkl*", "nomatch*", "sub/*"}
+
+ b.RunParallel(func(pb *testing.PB) {
+ for pb.Next() {
+ resources.Match(prefixes[rand.Intn(len(prefixes))])
+ }
+ })
+}
+
+// This adds a benchmark for the a100 test case as described by Russ Cox here:
+// https://research.swtch.com/glob (really interesting article)
+// I don't expect Hugo users to "stumble upon" this problem, so this is more to satisfy
+// my own curiosity.
+func BenchmarkResourcesMatchA100(b *testing.B) {
+ c := qt.New(b)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ a100 := strings.Repeat("a", 100)
+ pattern := "a*a*a*a*a*a*a*a*b"
+
+ resources := resource.Resources{spec.newGenericResource(nil, nil, nil, "/a/"+a100, a100, media.CSSType)}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ resources.Match(pattern)
+ }
+}
+
+func benchResources(b *testing.B) resource.Resources {
+ c := qt.New(b)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ var resources resource.Resources
+
+ for i := 0; i < 30; i++ {
+ name := fmt.Sprintf("abcde%d_%d.css", i%5, i)
+ resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
+ }
+
+ for i := 0; i < 30; i++ {
+ name := fmt.Sprintf("efghi%d_%d.css", i%5, i)
+ resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
+ }
+
+ for i := 0; i < 30; i++ {
+ name := fmt.Sprintf("jklmn%d_%d.css", i%5, i)
+ resources = append(resources, spec.newGenericResource(nil, nil, nil, "/b/sub/"+name, "sub/"+name, media.CSSType))
+ }
+
+ return resources
+}
+
+func BenchmarkAssignMetadata(b *testing.B) {
+ c := qt.New(b)
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ for i := 0; i < b.N; i++ {
+ b.StopTimer()
+ var resources resource.Resources
+ meta := []map[string]any{
+ {
+ "title": "Foo #:counter",
+ "name": "Foo Name #:counter",
+ "src": "foo1*",
+ },
+ {
+ "title": "Rest #:counter",
+ "name": "Rest Name #:counter",
+ "src": "*",
+ },
+ }
+ for i := 0; i < 20; i++ {
+ name := fmt.Sprintf("foo%d_%d.css", i%5, i)
+ resources = append(resources, spec.newGenericResource(nil, nil, nil, "/a/"+name, name, media.CSSType))
+ }
+ b.StartTimer()
+
+ if err := AssignMetadata(meta, resources...); err != nil {
+ b.Fatal(err)
+ }
+
+ }
+}
diff --git a/resources/resource_transformers/babel/babel.go b/resources/resource_transformers/babel/babel.go
new file mode 100644
index 000000000..9a9110f62
--- /dev/null
+++ b/resources/resource_transformers/babel/babel.go
@@ -0,0 +1,239 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package babel
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strconv"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/mitchellh/mapstructure"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Options from https://babeljs.io/docs/en/options
+type Options struct {
+ Config string // Custom path to config file
+
+ Minified bool
+ NoComments bool
+ Compact *bool
+ Verbose bool
+ NoBabelrc bool
+ SourceMap string
+}
+
+// DecodeOptions decodes options to and generates command flags
+func DecodeOptions(m map[string]any) (opts Options, err error) {
+ if m == nil {
+ return
+ }
+ err = mapstructure.WeakDecode(m, &opts)
+ return
+}
+
+func (opts Options) toArgs() []any {
+ var args []any
+
+ // external is not a known constant on the babel command line
+ // .sourceMaps must be a boolean, "inline", "both", or undefined
+ switch opts.SourceMap {
+ case "external":
+ args = append(args, "--source-maps")
+ case "inline":
+ args = append(args, "--source-maps=inline")
+ }
+ if opts.Minified {
+ args = append(args, "--minified")
+ }
+ if opts.NoComments {
+ args = append(args, "--no-comments")
+ }
+ if opts.Compact != nil {
+ args = append(args, "--compact="+strconv.FormatBool(*opts.Compact))
+ }
+ if opts.Verbose {
+ args = append(args, "--verbose")
+ }
+ if opts.NoBabelrc {
+ args = append(args, "--no-babelrc")
+ }
+ return args
+}
+
+// Client is the client used to do Babel transformations.
+type Client struct {
+ rs *resources.Spec
+}
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec) *Client {
+ return &Client{rs: rs}
+}
+
+type babelTransformation struct {
+ options Options
+ rs *resources.Spec
+}
+
+func (t *babelTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("babel", t.options)
+}
+
+// Transform shells out to babel-cli to do the heavy lifting.
+// For this to work, you need some additional tools. To install them globally:
+// npm install -g @babel/core @babel/cli
+// If you want to use presets or plugins such as @babel/preset-env
+// Then you should install those globally as well. e.g:
+// npm install -g @babel/preset-env
+// Instead of installing globally, you can also install everything as a dev-dependency (--save-dev instead of -g)
+func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ const binaryName = "babel"
+
+ ex := t.rs.ExecHelper
+
+ if err := ex.Sec().CheckAllowedExec(binaryName); err != nil {
+ return err
+ }
+
+ var configFile string
+ logger := t.rs.Logger
+
+ var errBuf bytes.Buffer
+ infoW := loggers.LoggerToWriterWithPrefix(logger.Info(), "babel")
+
+ if t.options.Config != "" {
+ configFile = t.options.Config
+ } else {
+ configFile = "babel.config.js"
+ }
+
+ configFile = filepath.Clean(configFile)
+
+ // We need an absolute filename to the config file.
+ if !filepath.IsAbs(configFile) {
+ configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
+ if configFile == "" && t.options.Config != "" {
+ // Only fail if the user specified config file is not found.
+ return fmt.Errorf("babel config %q not found:", configFile)
+ }
+ }
+
+ ctx.ReplaceOutPathExtension(".js")
+
+ var cmdArgs []any
+
+ if configFile != "" {
+ logger.Infoln("babel: use config file", configFile)
+ cmdArgs = []any{"--config-file", configFile}
+ }
+
+ if optArgs := t.options.toArgs(); len(optArgs) > 0 {
+ cmdArgs = append(cmdArgs, optArgs...)
+ }
+ cmdArgs = append(cmdArgs, "--filename="+ctx.SourcePath)
+
+ // Create compile into a real temp file:
+ // 1. separate stdout/stderr messages from babel (https://github.com/gohugoio/hugo/issues/8136)
+ // 2. allow generation and retrieval of external source map.
+ compileOutput, err := ioutil.TempFile("", "compileOut-*.js")
+ if err != nil {
+ return err
+ }
+
+ cmdArgs = append(cmdArgs, "--out-file="+compileOutput.Name())
+ stderr := io.MultiWriter(infoW, &errBuf)
+ cmdArgs = append(cmdArgs, hexec.WithStderr(stderr))
+ cmdArgs = append(cmdArgs, hexec.WithStdout(stderr))
+ cmdArgs = append(cmdArgs, hexec.WithEnviron(hugo.GetExecEnviron(t.rs.WorkingDir, t.rs.Cfg, t.rs.BaseFs.Assets.Fs)))
+
+ defer os.Remove(compileOutput.Name())
+
+ // ARGA [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel812882892/babel.config.js --source-maps --filename=js/main2.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-2237820197.js]
+ // [--no-install babel --config-file /private/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/hugo-test-babel332846848/babel.config.js --filename=js/main.js --out-file=/var/folders/_g/j3j21hts4fn7__h04w2x8gb40000gn/T/compileOut-1451390834.js 0x10304ee60 0x10304ed60 0x10304f060]
+ cmd, err := ex.Npx(binaryName, cmdArgs...)
+
+ if err != nil {
+ if hexec.IsNotFound(err) {
+ // This may be on a CI server etc. Will fall back to pre-built assets.
+ return herrors.ErrFeatureNotAvailable
+ }
+ return err
+ }
+
+ stdin, err := cmd.StdinPipe()
+
+ if err != nil {
+ return err
+ }
+
+ go func() {
+ defer stdin.Close()
+ io.Copy(stdin, ctx.From)
+ }()
+
+ err = cmd.Run()
+ if err != nil {
+ if hexec.IsNotFound(err) {
+ return herrors.ErrFeatureNotAvailable
+ }
+ return fmt.Errorf(errBuf.String()+": %w", err)
+ }
+
+ content, err := ioutil.ReadAll(compileOutput)
+ if err != nil {
+ return err
+ }
+
+ mapFile := compileOutput.Name() + ".map"
+ if _, err := os.Stat(mapFile); err == nil {
+ defer os.Remove(mapFile)
+ sourceMap, err := ioutil.ReadFile(mapFile)
+ if err != nil {
+ return err
+ }
+ if err = ctx.PublishSourceMap(string(sourceMap)); err != nil {
+ return err
+ }
+ targetPath := path.Base(ctx.OutPath) + ".map"
+ re := regexp.MustCompile(`//# sourceMappingURL=.*\n?`)
+ content = []byte(re.ReplaceAllString(string(content), "//# sourceMappingURL="+targetPath+"\n"))
+ }
+
+ ctx.To.Write(content)
+
+ return nil
+}
+
+// Process transforms the given Resource with the Babel processor.
+func (c *Client) Process(res resources.ResourceTransformer, options Options) (resource.Resource, error) {
+ return res.Transform(
+ &babelTransformation{rs: c.rs, options: options},
+ )
+}
diff --git a/resources/resource_transformers/babel/integration_test.go b/resources/resource_transformers/babel/integration_test.go
new file mode 100644
index 000000000..164e7fd40
--- /dev/null
+++ b/resources/resource_transformers/babel/integration_test.go
@@ -0,0 +1,94 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package babel_test
+
+import (
+ "testing"
+
+ jww "github.com/spf13/jwalterweatherman"
+
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestTransformBabel(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ files := `
+-- assets/js/main.js --
+/* A Car */
+class Car {
+ constructor(brand) {
+ this.carname = brand;
+ }
+}
+-- assets/js/main2.js --
+/* A Car2 */
+class Car2 {
+ constructor(brand) {
+ this.carname = brand;
+ }
+}
+-- babel.config.js --
+console.error("Hugo Environment:", process.env.HUGO_ENVIRONMENT );
+
+module.exports = {
+ presets: ["@babel/preset-env"],
+};
+-- config.toml --
+disablekinds = ['taxonomy', 'term', 'page']
+[security]
+ [security.exec]
+ allow = ['^npx$', '^babel$']
+-- layouts/index.html --
+{{ $options := dict "noComments" true }}
+{{ $transpiled := resources.Get "js/main.js" | babel -}}
+Transpiled: {{ $transpiled.Content | safeJS }}
+
+{{ $transpiled := resources.Get "js/main2.js" | babel (dict "sourceMap" "inline") -}}
+Transpiled2: {{ $transpiled.Content | safeJS }}
+
+{{ $transpiled := resources.Get "js/main2.js" | babel (dict "sourceMap" "external") -}}
+Transpiled3: {{ $transpiled.Permalink }}
+-- package.json --
+{
+ "scripts": {},
+
+ "devDependencies": {
+ "@babel/cli": "7.8.4",
+ "@babel/core": "7.9.0",
+ "@babel/preset-env": "7.9.5"
+ }
+}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ LogLevel: jww.LevelInfo,
+ }).Build()
+
+ b.AssertLogContains("babel: Hugo Environment: production")
+ b.AssertFileContent("public/index.html", `var Car2 =`)
+ b.AssertFileContent("public/js/main2.js", `var Car2 =`)
+ b.AssertFileContent("public/js/main2.js.map", `{"version":3,`)
+ b.AssertFileContent("public/index.html", `
+//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozL`)
+}
diff --git a/resources/resource_transformers/htesting/testhelpers.go b/resources/resource_transformers/htesting/testhelpers.go
new file mode 100644
index 000000000..3c91fc0dd
--- /dev/null
+++ b/resources/resource_transformers/htesting/testhelpers.go
@@ -0,0 +1,78 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package htesting
+
+import (
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/spf13/afero"
+)
+
+func NewTestResourceSpec() (*resources.Spec, error) {
+ cfg := config.NewWithTestDefaults()
+
+ imagingCfg := map[string]any{
+ "resampleFilter": "linear",
+ "quality": 68,
+ "anchor": "left",
+ }
+
+ cfg.Set("imaging", imagingCfg)
+
+ fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(afero.NewMemMapFs()), cfg)
+
+ s, err := helpers.NewPathSpec(fs, cfg, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ filecaches, err := filecache.NewCaches(s)
+ if err != nil {
+ return nil, err
+ }
+
+ spec, err := resources.NewSpec(s, filecaches, nil, nil, nil, nil, output.DefaultFormats, media.DefaultTypes)
+ return spec, err
+}
+
+func NewResourceTransformer(filename, content string) (resources.ResourceTransformer, error) {
+ spec, err := NewTestResourceSpec()
+ if err != nil {
+ return nil, err
+ }
+ return NewResourceTransformerForSpec(spec, filename, content)
+}
+
+func NewResourceTransformerForSpec(spec *resources.Spec, filename, content string) (resources.ResourceTransformer, error) {
+ filename = filepath.FromSlash(filename)
+
+ fs := spec.Fs.Source
+ if err := afero.WriteFile(fs, filename, []byte(content), 0777); err != nil {
+ return nil, err
+ }
+
+ r, err := spec.New(resources.ResourceSourceDescriptor{Fs: fs, SourceFilename: filename})
+ if err != nil {
+ return nil, err
+ }
+
+ return r.(resources.ResourceTransformer), nil
+}
diff --git a/resources/resource_transformers/integrity/integrity.go b/resources/resource_transformers/integrity/integrity.go
new file mode 100644
index 000000000..e15754685
--- /dev/null
+++ b/resources/resource_transformers/integrity/integrity.go
@@ -0,0 +1,120 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package integrity
+
+import (
+ "crypto/md5"
+ "crypto/sha256"
+ "crypto/sha512"
+ "encoding/base64"
+ "encoding/hex"
+ "fmt"
+ "hash"
+ "html/template"
+ "io"
+
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+const defaultHashAlgo = "sha256"
+
+// Client contains methods to fingerprint (cachebusting) and other integrity-related
+// methods.
+type Client struct {
+ rs *resources.Spec
+}
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec) *Client {
+ return &Client{rs: rs}
+}
+
+type fingerprintTransformation struct {
+ algo string
+}
+
+func (t *fingerprintTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("fingerprint", t.algo)
+}
+
+// Transform creates a MD5 hash of the Resource content and inserts that hash before
+// the extension in the filename.
+func (t *fingerprintTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ h, err := newHash(t.algo)
+ if err != nil {
+ return err
+ }
+
+ var w io.Writer
+ if rc, ok := ctx.From.(io.ReadSeeker); ok {
+ // This transformation does not change the content, so try to
+ // avoid writing to To if we can.
+ defer rc.Seek(0, 0)
+ w = h
+ } else {
+ w = io.MultiWriter(h, ctx.To)
+ }
+
+ io.Copy(w, ctx.From)
+ d, err := digest(h)
+ if err != nil {
+ return err
+ }
+
+ ctx.Data["Integrity"] = integrity(t.algo, d)
+ ctx.AddOutPathIdentifier("." + hex.EncodeToString(d[:]))
+ return nil
+}
+
+func newHash(algo string) (hash.Hash, error) {
+ switch algo {
+ case "md5":
+ return md5.New(), nil
+ case "sha256":
+ return sha256.New(), nil
+ case "sha384":
+ return sha512.New384(), nil
+ case "sha512":
+ return sha512.New(), nil
+ default:
+ return nil, fmt.Errorf("unsupported crypto algo: %q, use either md5, sha256, sha384 or sha512", algo)
+ }
+}
+
+// Fingerprint applies fingerprinting of the given resource and hash algorithm.
+// It defaults to sha256 if none given, and the options are md5, sha256 or sha512.
+// The same algo is used for both the fingerprinting part (aka cache busting) and
+// the base64-encoded Subresource Integrity hash, so you will have to stay away from
+// md5 if you plan to use both.
+// See https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity
+func (c *Client) Fingerprint(res resources.ResourceTransformer, algo string) (resource.Resource, error) {
+ if algo == "" {
+ algo = defaultHashAlgo
+ }
+
+ return res.Transform(&fingerprintTransformation{algo: algo})
+}
+
+func integrity(algo string, sum []byte) template.HTMLAttr {
+ encoded := base64.StdEncoding.EncodeToString(sum)
+ return template.HTMLAttr(algo + "-" + encoded)
+}
+
+func digest(h hash.Hash) ([]byte, error) {
+ sum := h.Sum(nil)
+ return sum, nil
+}
diff --git a/resources/resource_transformers/integrity/integrity_test.go b/resources/resource_transformers/integrity/integrity_test.go
new file mode 100644
index 000000000..cba993d1e
--- /dev/null
+++ b/resources/resource_transformers/integrity/integrity_test.go
@@ -0,0 +1,69 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package integrity
+
+import (
+ "html/template"
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/resource_transformers/htesting"
+)
+
+func TestHashFromAlgo(t *testing.T) {
+ for _, algo := range []struct {
+ name string
+ bits int
+ }{
+ {"md5", 128},
+ {"sha256", 256},
+ {"sha384", 384},
+ {"sha512", 512},
+ {"shaman", -1},
+ } {
+ t.Run(algo.name, func(t *testing.T) {
+ c := qt.New(t)
+ h, err := newHash(algo.name)
+ if algo.bits > 0 {
+ c.Assert(err, qt.IsNil)
+ c.Assert(h.Size(), qt.Equals, algo.bits/8)
+ } else {
+ c.Assert(err, qt.Not(qt.IsNil))
+ c.Assert(err.Error(), qt.Contains, "use either md5, sha256, sha384 or sha512")
+ }
+ })
+ }
+}
+
+func TestTransform(t *testing.T) {
+ c := qt.New(t)
+
+ spec, err := htesting.NewTestResourceSpec()
+ c.Assert(err, qt.IsNil)
+ client := New(spec)
+
+ r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.txt", "Hugo Rocks!")
+ c.Assert(err, qt.IsNil)
+
+ transformed, err := client.Fingerprint(r, "")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.a5ad1c6961214a55de53c1ce6e60d27b6b761f54851fa65e33066460dfa6a0db.txt")
+ c.Assert(transformed.Data(), qt.DeepEquals, map[string]any{"Integrity": template.HTMLAttr("sha256-pa0caWEhSlXeU8HObmDSe2t2H1SFH6ZeMwZkYN+moNs=")})
+ content, err := transformed.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, qt.Equals, "Hugo Rocks!")
+}
diff --git a/resources/resource_transformers/js/build.go b/resources/resource_transformers/js/build.go
new file mode 100644
index 000000000..23e28f675
--- /dev/null
+++ b/resources/resource_transformers/js/build.go
@@ -0,0 +1,222 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "errors"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/text"
+
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/evanw/esbuild/pkg/api"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Client context for ESBuild.
+type Client struct {
+ rs *resources.Spec
+ sfs *filesystems.SourceFilesystem
+}
+
+// New creates a new client context.
+func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) *Client {
+ return &Client{
+ rs: rs,
+ sfs: fs,
+ }
+}
+
+type buildTransformation struct {
+ optsm map[string]any
+ c *Client
+}
+
+func (t *buildTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("jsbuild", t.optsm)
+}
+
+func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ ctx.OutMediaType = media.JavascriptType
+
+ opts, err := decodeOptions(t.optsm)
+ if err != nil {
+ return err
+ }
+
+ if opts.TargetPath != "" {
+ ctx.OutPath = opts.TargetPath
+ } else {
+ ctx.ReplaceOutPathExtension(".js")
+ }
+
+ src, err := ioutil.ReadAll(ctx.From)
+ if err != nil {
+ return err
+ }
+
+ opts.sourceDir = filepath.FromSlash(path.Dir(ctx.SourcePath))
+ opts.resolveDir = t.c.rs.WorkingDir // where node_modules gets resolved
+ opts.contents = string(src)
+ opts.mediaType = ctx.InMediaType
+
+ buildOptions, err := toBuildOptions(opts)
+ if err != nil {
+ return err
+ }
+
+ buildOptions.Plugins, err = createBuildPlugins(t.c, opts)
+ if err != nil {
+ return err
+ }
+
+ if buildOptions.Sourcemap == api.SourceMapExternal && buildOptions.Outdir == "" {
+ buildOptions.Outdir, err = ioutil.TempDir(os.TempDir(), "compileOutput")
+ if err != nil {
+ return err
+ }
+ defer os.Remove(buildOptions.Outdir)
+ }
+
+ if opts.Inject != nil {
+ // Resolve the absolute filenames.
+ for i, ext := range opts.Inject {
+ impPath := filepath.FromSlash(ext)
+ if filepath.IsAbs(impPath) {
+ return fmt.Errorf("inject: absolute paths not supported, must be relative to /assets")
+ }
+
+ m := resolveComponentInAssets(t.c.rs.Assets.Fs, impPath)
+
+ if m == nil {
+ return fmt.Errorf("inject: file %q not found", ext)
+ }
+
+ opts.Inject[i] = m.Filename
+
+ }
+
+ buildOptions.Inject = opts.Inject
+
+ }
+
+ result := api.Build(buildOptions)
+
+ if len(result.Errors) > 0 {
+
+ createErr := func(msg api.Message) error {
+ loc := msg.Location
+ if loc == nil {
+ return errors.New(msg.Text)
+ }
+ path := loc.File
+ if path == stdinImporter {
+ path = ctx.SourcePath
+ }
+
+ errorMessage := msg.Text
+ errorMessage = strings.ReplaceAll(errorMessage, nsImportHugo+":", "")
+
+ var (
+ f afero.File
+ err error
+ )
+
+ if strings.HasPrefix(path, nsImportHugo) {
+ path = strings.TrimPrefix(path, nsImportHugo+":")
+ f, err = hugofs.Os.Open(path)
+ } else {
+ var fi os.FileInfo
+ fi, err = t.c.sfs.Fs.Stat(path)
+ if err == nil {
+ m := fi.(hugofs.FileMetaInfo).Meta()
+ path = m.Filename
+ f, err = m.Open()
+ }
+
+ }
+
+ if err == nil {
+ fe := herrors.
+ NewFileErrorFromName(errors.New(errorMessage), path).
+ UpdatePosition(text.Position{Offset: -1, LineNumber: loc.Line, ColumnNumber: loc.Column}).
+ UpdateContent(f, nil)
+
+ f.Close()
+ return fe
+ }
+
+ return fmt.Errorf("%s", errorMessage)
+ }
+
+ var errors []error
+
+ for _, msg := range result.Errors {
+ errors = append(errors, createErr(msg))
+ }
+
+ // Return 1, log the rest.
+ for i, err := range errors {
+ if i > 0 {
+ t.c.rs.Logger.Errorf("js.Build failed: %s", err)
+ }
+ }
+
+ return errors[0]
+ }
+
+ if buildOptions.Sourcemap == api.SourceMapExternal {
+ content := string(result.OutputFiles[1].Contents)
+ symPath := path.Base(ctx.OutPath) + ".map"
+ re := regexp.MustCompile(`//# sourceMappingURL=.*\n?`)
+ content = re.ReplaceAllString(content, "//# sourceMappingURL="+symPath+"\n")
+
+ if err = ctx.PublishSourceMap(string(result.OutputFiles[0].Contents)); err != nil {
+ return err
+ }
+ _, err := ctx.To.Write([]byte(content))
+ if err != nil {
+ return err
+ }
+ } else {
+ _, err := ctx.To.Write(result.OutputFiles[0].Contents)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// Process process esbuild transform
+func (c *Client) Process(res resources.ResourceTransformer, opts map[string]any) (resource.Resource, error) {
+ return res.Transform(
+ &buildTransformation{c: c, optsm: opts},
+ )
+}
diff --git a/resources/resource_transformers/js/build_test.go b/resources/resource_transformers/js/build_test.go
new file mode 100644
index 000000000..30a4490ed
--- /dev/null
+++ b/resources/resource_transformers/js/build_test.go
@@ -0,0 +1,14 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js
diff --git a/resources/resource_transformers/js/integration_test.go b/resources/resource_transformers/js/integration_test.go
new file mode 100644
index 000000000..b9f466873
--- /dev/null
+++ b/resources/resource_transformers/js/integration_test.go
@@ -0,0 +1,261 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js_test
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestBuildVariants(t *testing.T) {
+ c := qt.New(t)
+
+ mainWithImport := `
+-- config.toml --
+disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"]
+-- assets/js/main.js --
+import { hello1, hello2 } from './util1';
+hello1();
+hello2();
+-- assets/js/util1.js --
+import { hello3 } from './util2';
+export function hello1() {
+ return 'abcd';
+}
+export function hello2() {
+ return hello3();
+}
+-- assets/js/util2.js --
+export function hello3() {
+ return 'efgh';
+}
+-- layouts/index.html --
+{{ $js := resources.Get "js/main.js" | js.Build }}
+JS Content:{{ $js.Content }}:End:
+
+ `
+
+ c.Run("Basic", func(c *qt.C) {
+ b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, NeedsOsFS: true, TxtarString: mainWithImport}).Build()
+
+ b.AssertFileContent("public/index.html", `abcd`)
+ })
+
+ c.Run("Edit Import", func(c *qt.C) {
+ b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build()
+
+ b.AssertFileContent("public/index.html", `abcd`)
+ b.EditFileReplace("assets/js/util1.js", func(s string) string { return strings.ReplaceAll(s, "abcd", "1234") }).Build()
+ b.AssertFileContent("public/index.html", `1234`)
+ })
+
+ c.Run("Edit Import Nested", func(c *qt.C) {
+ b := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, Running: true, NeedsOsFS: true, TxtarString: mainWithImport}).Build()
+
+ b.AssertFileContent("public/index.html", `efgh`)
+ b.EditFileReplace("assets/js/util2.js", func(s string) string { return strings.ReplaceAll(s, "efgh", "1234") }).Build()
+ b.AssertFileContent("public/index.html", `1234`)
+ })
+}
+
+func TestBuildWithModAndNpm(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("skip (relative) long running modules test when running locally")
+ }
+
+ c := qt.New(t)
+
+ files := `
+-- config.toml --
+baseURL = "https://example.org"
+disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"]
+[module]
+[[module.imports]]
+path="github.com/gohugoio/hugoTestProjectJSModImports"
+-- go.mod --
+module github.com/gohugoio/tests/testHugoModules
+
+go 1.16
+
+require github.com/gohugoio/hugoTestProjectJSModImports v0.10.0 // indirect
+-- package.json --
+{
+ "dependencies": {
+ "date-fns": "^2.16.1"
+ }
+}
+
+`
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ TxtarString: files,
+ Verbose: true,
+ }).Build()
+
+ b.AssertFileContent("public/js/main.js", `
+greeting: "greeting configured in mod2"
+Hello1 from mod1: $
+return "Hello2 from mod1";
+var Hugo = "Rocks!";
+Hello3 from mod2. Date from date-fns: ${today}
+Hello from lib in the main project
+Hello5 from mod2.
+var myparam = "Hugo Rocks!";
+shim cwd
+`)
+
+ // React JSX, verify the shimming.
+ b.AssertFileContent("public/js/like.js", filepath.FromSlash(`@v0.10.0/assets/js/shims/react.js
+module.exports = window.ReactDOM;
+`))
+}
+
+func TestBuildWithNpm(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("skip (relative) long running modules test when running locally")
+ }
+
+ c := qt.New(t)
+
+ files := `
+-- assets/js/included.js --
+console.log("included");
+-- assets/js/main.js --
+import "./included";
+ import { toCamelCase } from "to-camel-case";
+
+ console.log("main");
+ console.log("To camel:", toCamelCase("space case"));
+-- assets/js/myjsx.jsx --
+import * as React from 'react'
+import * as ReactDOM from 'react-dom'
+
+ ReactDOM.render(
+ <h1>Hello, world!</h1>,
+ document.getElementById('root')
+ );
+-- assets/js/myts.ts --
+function greeter(person: string) {
+ return "Hello, " + person;
+}
+let user = [0, 1, 2];
+document.body.textContent = greeter(user);
+-- config.toml --
+disablekinds = ['taxonomy', 'term', 'page']
+-- content/p1.md --
+Content.
+-- data/hugo.toml --
+slogan = "Hugo Rocks!"
+-- i18n/en.yaml --
+hello:
+ other: "Hello"
+-- i18n/fr.yaml --
+hello:
+ other: "Bonjour"
+-- layouts/index.html --
+{{ $options := dict "minify" false "externals" (slice "react" "react-dom") }}
+{{ $js := resources.Get "js/main.js" | js.Build $options }}
+JS: {{ template "print" $js }}
+{{ $jsx := resources.Get "js/myjsx.jsx" | js.Build $options }}
+JSX: {{ template "print" $jsx }}
+{{ $ts := resources.Get "js/myts.ts" | js.Build (dict "sourcemap" "inline")}}
+TS: {{ template "print" $ts }}
+{{ $ts2 := resources.Get "js/myts.ts" | js.Build (dict "sourcemap" "external" "TargetPath" "js/myts2.js")}}
+TS2: {{ template "print" $ts2 }}
+{{ define "print" }}RelPermalink: {{.RelPermalink}}|MIME: {{ .MediaType }}|Content: {{ .Content | safeJS }}{{ end }}
+-- package.json --
+{
+ "scripts": {},
+
+ "dependencies": {
+ "to-camel-case": "1.0.0"
+ }
+}
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ TxtarString: files,
+ }).Build()
+
+ b.AssertFileContent("public/js/myts.js", `//# sourceMappingURL=data:application/json;base64,ewogICJ2ZXJz`)
+ b.AssertFileContent("public/js/myts2.js.map", `"version": 3,`)
+ b.AssertFileContent("public/index.html", `
+ console.log(&#34;included&#34;);
+ if (hasSpace.test(string))
+ var React = __toESM(__require(&#34;react&#34;));
+ function greeter(person) {
+`)
+}
+
+func TestBuildError(t *testing.T) {
+ c := qt.New(t)
+
+ filesTemplate := `
+-- config.toml --
+disableKinds=["page", "section", "taxonomy", "term", "sitemap", "robotsTXT"]
+-- assets/js/main.js --
+// A comment.
+import { hello1, hello2 } from './util1';
+hello1();
+hello2();
+-- assets/js/util1.js --
+/* Some
+comments.
+*/
+import { hello3 } from './util2';
+export function hello1() {
+ return 'abcd';
+}
+export function hello2() {
+ return hello3();
+}
+-- assets/js/util2.js --
+export function hello3() {
+ return 'efgh';
+}
+-- layouts/index.html --
+{{ $js := resources.Get "js/main.js" | js.Build }}
+JS Content:{{ $js.Content }}:End:
+
+ `
+
+ c.Run("Import from main not found", func(c *qt.C) {
+ c.Parallel()
+ files := strings.Replace(filesTemplate, "import { hello1, hello2 }", "import { hello1, hello2, FOOBAR }", 1)
+ b, err := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, NeedsOsFS: true, TxtarString: files}).BuildE()
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `main.js:2:25": No matching export`)
+ })
+
+ c.Run("Import from import not found", func(c *qt.C) {
+ c.Parallel()
+ files := strings.Replace(filesTemplate, "import { hello3 } from './util2';", "import { hello3, FOOBAR } from './util2';", 1)
+ b, err := hugolib.NewIntegrationTestBuilder(hugolib.IntegrationTestConfig{T: c, NeedsOsFS: true, TxtarString: files}).BuildE()
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `util1.js:4:17": No matching export in`)
+ })
+
+}
diff --git a/resources/resource_transformers/js/options.go b/resources/resource_transformers/js/options.go
new file mode 100644
index 000000000..2987f5915
--- /dev/null
+++ b/resources/resource_transformers/js/options.go
@@ -0,0 +1,424 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/spf13/afero"
+
+ "github.com/evanw/esbuild/pkg/api"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/mitchellh/mapstructure"
+)
+
+const (
+ nsImportHugo = "ns-hugo"
+ nsParams = "ns-params"
+
+ stdinImporter = "<stdin>"
+)
+
+// Options esbuild configuration
+type Options struct {
+ // If not set, the source path will be used as the base target path.
+ // Note that the target path's extension may change if the target MIME type
+ // is different, e.g. when the source is TypeScript.
+ TargetPath string
+
+ // Whether to minify to output.
+ Minify bool
+
+ // Whether to write mapfiles
+ SourceMap string
+
+ // The language target.
+ // One of: es2015, es2016, es2017, es2018, es2019, es2020 or esnext.
+ // Default is esnext.
+ Target string
+
+ // The output format.
+ // One of: iife, cjs, esm
+ // Default is to esm.
+ Format string
+
+ // External dependencies, e.g. "react".
+ Externals []string
+
+ // This option allows you to automatically replace a global variable with an import from another file.
+ // The filenames must be relative to /assets.
+ // See https://esbuild.github.io/api/#inject
+ Inject []string
+
+ // User defined symbols.
+ Defines map[string]any
+
+ // Maps a component import to another.
+ Shims map[string]string
+
+ // User defined params. Will be marshaled to JSON and available as "@params", e.g.
+ // import * as params from '@params';
+ Params any
+
+ // What to use instead of React.createElement.
+ JSXFactory string
+
+ // What to use instead of React.Fragment.
+ JSXFragment string
+
+ // There is/was a bug in WebKit with severe performance issue with the tracking
+ // of TDZ checks in JavaScriptCore.
+ //
+ // Enabling this flag removes the TDZ and `const` assignment checks and
+ // may improve performance of larger JS codebases until the WebKit fix
+ // is in widespread use.
+ //
+ // See https://bugs.webkit.org/show_bug.cgi?id=199866
+ // Deprecated: This no longer have any effect and will be removed.
+ // TODO(bep) remove. See https://github.com/evanw/esbuild/commit/869e8117b499ca1dbfc5b3021938a53ffe934dba
+ AvoidTDZ bool
+
+ mediaType media.Type
+ outDir string
+ contents string
+ sourceDir string
+ resolveDir string
+ tsConfig string
+}
+
+func decodeOptions(m map[string]any) (Options, error) {
+ var opts Options
+
+ if err := mapstructure.WeakDecode(m, &opts); err != nil {
+ return opts, err
+ }
+
+ if opts.TargetPath != "" {
+ opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ }
+
+ opts.Target = strings.ToLower(opts.Target)
+ opts.Format = strings.ToLower(opts.Format)
+
+ return opts, nil
+}
+
+var extensionToLoaderMap = map[string]api.Loader{
+ ".js": api.LoaderJS,
+ ".mjs": api.LoaderJS,
+ ".cjs": api.LoaderJS,
+ ".jsx": api.LoaderJSX,
+ ".ts": api.LoaderTS,
+ ".tsx": api.LoaderTSX,
+ ".css": api.LoaderCSS,
+ ".json": api.LoaderJSON,
+ ".txt": api.LoaderText,
+}
+
+func loaderFromFilename(filename string) api.Loader {
+ l, found := extensionToLoaderMap[filepath.Ext(filename)]
+ if found {
+ return l
+ }
+ return api.LoaderJS
+}
+
+func resolveComponentInAssets(fs afero.Fs, impPath string) *hugofs.FileMeta {
+ findFirst := func(base string) *hugofs.FileMeta {
+ // This is the most common sub-set of ESBuild's default extensions.
+ // We assume that imports of JSON, CSS etc. will be using their full
+ // name with extension.
+ for _, ext := range []string{".js", ".ts", ".tsx", ".jsx"} {
+ if strings.HasSuffix(impPath, ext) {
+ // Import of foo.js.js need the full name.
+ return nil
+ }
+ if fi, err := fs.Stat(base + ext); err == nil {
+ return fi.(hugofs.FileMetaInfo).Meta()
+ }
+ }
+
+ // Not found.
+ return nil
+ }
+
+ var m *hugofs.FileMeta
+
+ // See issue #8949.
+ // We need to check if this is a regular file imported without an extension.
+ // There may be ambigous situations where both foo.js and foo/index.js exists.
+ // This import order is in line with both how Node and ESBuild's native
+ // import resolver works.
+ // This was fixed in Hugo 0.88.
+
+ // It may be a regular file imported without an extension, e.g.
+ // foo or foo/index.
+ m = findFirst(impPath)
+ if m != nil {
+ return m
+ }
+ if filepath.Base(impPath) == "index" {
+ m = findFirst(impPath + ".esm")
+ if m != nil {
+ return m
+ }
+ }
+
+ // Finally check the path as is.
+ fi, err := fs.Stat(impPath)
+
+ if err == nil {
+ if fi.IsDir() {
+ m = findFirst(filepath.Join(impPath, "index"))
+ if m == nil {
+ m = findFirst(filepath.Join(impPath, "index.esm"))
+ }
+ } else {
+ m = fi.(hugofs.FileMetaInfo).Meta()
+ }
+ }
+
+ return m
+}
+
+func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) {
+ fs := c.rs.Assets
+
+ resolveImport := func(args api.OnResolveArgs) (api.OnResolveResult, error) {
+ impPath := args.Path
+ if opts.Shims != nil {
+ override, found := opts.Shims[impPath]
+ if found {
+ impPath = override
+ }
+ }
+ isStdin := args.Importer == stdinImporter
+ var relDir string
+ if !isStdin {
+ rel, found := fs.MakePathRelative(args.Importer)
+ if !found {
+ // Not in any of the /assets folders.
+ // This is an import from a node_modules, let
+ // ESBuild resolve this.
+ return api.OnResolveResult{}, nil
+ }
+ relDir = filepath.Dir(rel)
+ } else {
+ relDir = opts.sourceDir
+ }
+
+ // Imports not starting with a "." is assumed to live relative to /assets.
+ // Hugo makes no assumptions about the directory structure below /assets.
+ if relDir != "" && strings.HasPrefix(impPath, ".") {
+ impPath = filepath.Join(relDir, impPath)
+ }
+
+ m := resolveComponentInAssets(fs.Fs, impPath)
+
+ if m != nil {
+ // Store the source root so we can create a jsconfig.json
+ // to help intellisense when the build is done.
+ // This should be a small number of elements, and when
+ // in server mode, we may get stale entries on renames etc.,
+ // but that shouldn't matter too much.
+ c.rs.JSConfigBuilder.AddSourceRoot(m.SourceRoot)
+ return api.OnResolveResult{Path: m.Filename, Namespace: nsImportHugo}, nil
+ }
+
+ // Fall back to ESBuild's resolve.
+ return api.OnResolveResult{}, nil
+ }
+
+ importResolver := api.Plugin{
+ Name: "hugo-import-resolver",
+ Setup: func(build api.PluginBuild) {
+ build.OnResolve(api.OnResolveOptions{Filter: `.*`},
+ func(args api.OnResolveArgs) (api.OnResolveResult, error) {
+ return resolveImport(args)
+ })
+ build.OnLoad(api.OnLoadOptions{Filter: `.*`, Namespace: nsImportHugo},
+ func(args api.OnLoadArgs) (api.OnLoadResult, error) {
+ b, err := ioutil.ReadFile(args.Path)
+ if err != nil {
+ return api.OnLoadResult{}, fmt.Errorf("failed to read %q: %w", args.Path, err)
+ }
+ c := string(b)
+ return api.OnLoadResult{
+ // See https://github.com/evanw/esbuild/issues/502
+ // This allows all modules to resolve dependencies
+ // in the main project's node_modules.
+ ResolveDir: opts.resolveDir,
+ Contents: &c,
+ Loader: loaderFromFilename(args.Path),
+ }, nil
+ })
+ },
+ }
+
+ params := opts.Params
+ if params == nil {
+ // This way @params will always resolve to something.
+ params = make(map[string]any)
+ }
+
+ b, err := json.Marshal(params)
+ if err != nil {
+ return nil, fmt.Errorf("failed to marshal params: %w", err)
+ }
+ bs := string(b)
+ paramsPlugin := api.Plugin{
+ Name: "hugo-params-plugin",
+ Setup: func(build api.PluginBuild) {
+ build.OnResolve(api.OnResolveOptions{Filter: `^@params$`},
+ func(args api.OnResolveArgs) (api.OnResolveResult, error) {
+ return api.OnResolveResult{
+ Path: args.Path,
+ Namespace: nsParams,
+ }, nil
+ })
+ build.OnLoad(api.OnLoadOptions{Filter: `.*`, Namespace: nsParams},
+ func(args api.OnLoadArgs) (api.OnLoadResult, error) {
+ return api.OnLoadResult{
+ Contents: &bs,
+ Loader: api.LoaderJSON,
+ }, nil
+ })
+ },
+ }
+
+ return []api.Plugin{importResolver, paramsPlugin}, nil
+}
+
+func toBuildOptions(opts Options) (buildOptions api.BuildOptions, err error) {
+ var target api.Target
+ switch opts.Target {
+ case "", "esnext":
+ target = api.ESNext
+ case "es5":
+ target = api.ES5
+ case "es6", "es2015":
+ target = api.ES2015
+ case "es2016":
+ target = api.ES2016
+ case "es2017":
+ target = api.ES2017
+ case "es2018":
+ target = api.ES2018
+ case "es2019":
+ target = api.ES2019
+ case "es2020":
+ target = api.ES2020
+ default:
+ err = fmt.Errorf("invalid target: %q", opts.Target)
+ return
+ }
+
+ mediaType := opts.mediaType
+ if mediaType.IsZero() {
+ mediaType = media.JavascriptType
+ }
+
+ var loader api.Loader
+ switch mediaType.SubType {
+ // TODO(bep) ESBuild support a set of other loaders, but I currently fail
+ // to see the relevance. That may change as we start using this.
+ case media.JavascriptType.SubType:
+ loader = api.LoaderJS
+ case media.TypeScriptType.SubType:
+ loader = api.LoaderTS
+ case media.TSXType.SubType:
+ loader = api.LoaderTSX
+ case media.JSXType.SubType:
+ loader = api.LoaderJSX
+ default:
+ err = fmt.Errorf("unsupported Media Type: %q", opts.mediaType)
+ return
+ }
+
+ var format api.Format
+ // One of: iife, cjs, esm
+ switch opts.Format {
+ case "", "iife":
+ format = api.FormatIIFE
+ case "esm":
+ format = api.FormatESModule
+ case "cjs":
+ format = api.FormatCommonJS
+ default:
+ err = fmt.Errorf("unsupported script output format: %q", opts.Format)
+ return
+ }
+
+ var defines map[string]string
+ if opts.Defines != nil {
+ defines = maps.ToStringMapString(opts.Defines)
+ }
+
+ // By default we only need to specify outDir and no outFile
+ outDir := opts.outDir
+ outFile := ""
+ var sourceMap api.SourceMap
+ switch opts.SourceMap {
+ case "inline":
+ sourceMap = api.SourceMapInline
+ case "external":
+ sourceMap = api.SourceMapExternal
+ case "":
+ sourceMap = api.SourceMapNone
+ default:
+ err = fmt.Errorf("unsupported sourcemap type: %q", opts.SourceMap)
+ return
+ }
+
+ buildOptions = api.BuildOptions{
+ Outfile: outFile,
+ Bundle: true,
+
+ Target: target,
+ Format: format,
+ Sourcemap: sourceMap,
+
+ MinifyWhitespace: opts.Minify,
+ MinifyIdentifiers: opts.Minify,
+ MinifySyntax: opts.Minify,
+
+ Outdir: outDir,
+ Define: defines,
+
+ External: opts.Externals,
+
+ JSXFactory: opts.JSXFactory,
+ JSXFragment: opts.JSXFragment,
+
+ Tsconfig: opts.tsConfig,
+
+ // Note: We're not passing Sourcefile to ESBuild.
+ // This makes ESBuild pass `stdin` as the Importer to the import
+ // resolver, which is what we need/expect.
+ Stdin: &api.StdinOptions{
+ Contents: opts.contents,
+ ResolveDir: opts.resolveDir,
+ Loader: loader,
+ },
+ }
+ return
+}
diff --git a/resources/resource_transformers/js/options_test.go b/resources/resource_transformers/js/options_test.go
new file mode 100644
index 000000000..135164d18
--- /dev/null
+++ b/resources/resource_transformers/js/options_test.go
@@ -0,0 +1,184 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/evanw/esbuild/pkg/api"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// This test is added to test/warn against breaking the "stability" of the
+// cache key. It's sometimes needed to break this, but should be avoided if possible.
+func TestOptionKey(t *testing.T) {
+ c := qt.New(t)
+
+ opts := map[string]any{
+ "TargetPath": "foo",
+ "Target": "es2018",
+ }
+
+ key := (&buildTransformation{optsm: opts}).Key()
+
+ c.Assert(key.Value(), qt.Equals, "jsbuild_7891849149754191852")
+}
+
+func TestToBuildOptions(t *testing.T) {
+ c := qt.New(t)
+
+ opts, err := toBuildOptions(Options{mediaType: media.JavascriptType})
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts, qt.DeepEquals, api.BuildOptions{
+ Bundle: true,
+ Target: api.ESNext,
+ Format: api.FormatIIFE,
+ Stdin: &api.StdinOptions{
+ Loader: api.LoaderJS,
+ },
+ })
+
+ opts, err = toBuildOptions(Options{
+ Target: "es2018",
+ Format: "cjs",
+ Minify: true,
+ mediaType: media.JavascriptType,
+ AvoidTDZ: true,
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts, qt.DeepEquals, api.BuildOptions{
+ Bundle: true,
+ Target: api.ES2018,
+ Format: api.FormatCommonJS,
+ MinifyIdentifiers: true,
+ MinifySyntax: true,
+ MinifyWhitespace: true,
+ Stdin: &api.StdinOptions{
+ Loader: api.LoaderJS,
+ },
+ })
+
+ opts, err = toBuildOptions(Options{
+ Target: "es2018", Format: "cjs", Minify: true, mediaType: media.JavascriptType,
+ SourceMap: "inline",
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts, qt.DeepEquals, api.BuildOptions{
+ Bundle: true,
+ Target: api.ES2018,
+ Format: api.FormatCommonJS,
+ MinifyIdentifiers: true,
+ MinifySyntax: true,
+ MinifyWhitespace: true,
+ Sourcemap: api.SourceMapInline,
+ Stdin: &api.StdinOptions{
+ Loader: api.LoaderJS,
+ },
+ })
+
+ opts, err = toBuildOptions(Options{
+ Target: "es2018", Format: "cjs", Minify: true, mediaType: media.JavascriptType,
+ SourceMap: "inline",
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts, qt.DeepEquals, api.BuildOptions{
+ Bundle: true,
+ Target: api.ES2018,
+ Format: api.FormatCommonJS,
+ MinifyIdentifiers: true,
+ MinifySyntax: true,
+ MinifyWhitespace: true,
+ Sourcemap: api.SourceMapInline,
+ Stdin: &api.StdinOptions{
+ Loader: api.LoaderJS,
+ },
+ })
+
+ opts, err = toBuildOptions(Options{
+ Target: "es2018", Format: "cjs", Minify: true, mediaType: media.JavascriptType,
+ SourceMap: "external",
+ })
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts, qt.DeepEquals, api.BuildOptions{
+ Bundle: true,
+ Target: api.ES2018,
+ Format: api.FormatCommonJS,
+ MinifyIdentifiers: true,
+ MinifySyntax: true,
+ MinifyWhitespace: true,
+ Sourcemap: api.SourceMapExternal,
+ Stdin: &api.StdinOptions{
+ Loader: api.LoaderJS,
+ },
+ })
+}
+
+func TestResolveComponentInAssets(t *testing.T) {
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ name string
+ files []string
+ impPath string
+ expect string
+ }{
+ {"Basic, extension", []string{"foo.js", "bar.js"}, "foo.js", "foo.js"},
+ {"Basic, no extension", []string{"foo.js", "bar.js"}, "foo", "foo.js"},
+ {"Basic, no extension, typescript", []string{"foo.ts", "bar.js"}, "foo", "foo.ts"},
+ {"Not found", []string{"foo.js", "bar.js"}, "moo.js", ""},
+ {"Not found, double js extension", []string{"foo.js.js", "bar.js"}, "foo.js", ""},
+ {"Index file, folder only", []string{"foo/index.js", "bar.js"}, "foo", "foo/index.js"},
+ {"Index file, folder and index", []string{"foo/index.js", "bar.js"}, "foo/index", "foo/index.js"},
+ {"Index file, folder and index and suffix", []string{"foo/index.js", "bar.js"}, "foo/index.js", "foo/index.js"},
+ {"Index ESM file, folder only", []string{"foo/index.esm.js", "bar.js"}, "foo", "foo/index.esm.js"},
+ {"Index ESM file, folder and index", []string{"foo/index.esm.js", "bar.js"}, "foo/index", "foo/index.esm.js"},
+ {"Index ESM file, folder and index and suffix", []string{"foo/index.esm.js", "bar.js"}, "foo/index.esm.js", "foo/index.esm.js"},
+ // We added these index.esm.js cases in v0.101.0. The case below is unlikely to happen in the wild, but add a test
+ // to document Hugo's behavior. We pick the file with the name index.js; anything else would be breaking.
+ {"Index and Index ESM file, folder only", []string{"foo/index.esm.js", "foo/index.js", "bar.js"}, "foo", "foo/index.js"},
+
+ // Issue #8949
+ {"Check file before directory", []string{"foo.js", "foo/index.js"}, "foo", "foo.js"},
+ } {
+ c.Run(test.name, func(c *qt.C) {
+ baseDir := "assets"
+ mfs := afero.NewMemMapFs()
+
+ for _, filename := range test.files {
+ c.Assert(afero.WriteFile(mfs, filepath.Join(baseDir, filename), []byte("let foo='bar';"), 0777), qt.IsNil)
+ }
+
+ bfs := hugofs.DecorateBasePathFs(afero.NewBasePathFs(mfs, baseDir).(*afero.BasePathFs))
+
+ got := resolveComponentInAssets(bfs, test.impPath)
+
+ gotPath := ""
+ if got != nil {
+ gotPath = filepath.ToSlash(got.Path)
+ }
+
+ c.Assert(gotPath, qt.Equals, test.expect)
+ })
+
+ }
+}
diff --git a/resources/resource_transformers/minifier/integration_test.go b/resources/resource_transformers/minifier/integration_test.go
new file mode 100644
index 000000000..fb4cc7a65
--- /dev/null
+++ b/resources/resource_transformers/minifier/integration_test.go
@@ -0,0 +1,47 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifier_test
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+// Issue 8954
+func TestTransformMinify(t *testing.T) {
+ c := qt.New(t)
+
+ files := `
+-- assets/js/test.js --
+new Date(2002, 04, 11)
+-- config.toml --
+-- layouts/index.html --
+{{ $js := resources.Get "js/test.js" | minify }}
+<script>
+{{ $js.Content }}
+</script>
+`
+
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: files,
+ },
+ ).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err, qt.ErrorMatches, "(?s).*legacy octal numbers.*line 1.*")
+}
diff --git a/resources/resource_transformers/minifier/minify.go b/resources/resource_transformers/minifier/minify.go
new file mode 100644
index 000000000..c00d478af
--- /dev/null
+++ b/resources/resource_transformers/minifier/minify.go
@@ -0,0 +1,59 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifier
+
+import (
+ "github.com/gohugoio/hugo/minifiers"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/internal"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// Client for minification of Resource objects. Supported minifiers are:
+// css, html, js, json, svg and xml.
+type Client struct {
+ rs *resources.Spec
+ m minifiers.Client
+}
+
+// New creates a new Client given a specification. Note that it is the media types
+// configured for the site that is used to match files to the correct minifier.
+func New(rs *resources.Spec) (*Client, error) {
+ m, err := minifiers.New(rs.MediaTypes, rs.OutputFormats, rs.Cfg)
+ if err != nil {
+ return nil, err
+ }
+ return &Client{rs: rs, m: m}, nil
+}
+
+type minifyTransformation struct {
+ rs *resources.Spec
+ m minifiers.Client
+}
+
+func (t *minifyTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("minify")
+}
+
+func (t *minifyTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ ctx.AddOutPathIdentifier(".min")
+ return t.m.Minify(ctx.InMediaType, ctx.To, ctx.From)
+}
+
+func (c *Client) Minify(res resources.ResourceTransformer) (resource.Resource, error) {
+ return res.Transform(&minifyTransformation{
+ rs: c.rs,
+ m: c.m,
+ })
+}
diff --git a/resources/resource_transformers/minifier/minify_test.go b/resources/resource_transformers/minifier/minify_test.go
new file mode 100644
index 000000000..b0ebe3171
--- /dev/null
+++ b/resources/resource_transformers/minifier/minify_test.go
@@ -0,0 +1,42 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package minifier
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/resources/resource_transformers/htesting"
+)
+
+func TestTransform(t *testing.T) {
+ c := qt.New(t)
+
+ spec, err := htesting.NewTestResourceSpec()
+ c.Assert(err, qt.IsNil)
+ client, _ := New(spec)
+
+ r, err := htesting.NewResourceTransformerForSpec(spec, "hugo.html", "<h1> Hugo Rocks! </h1>")
+ c.Assert(err, qt.IsNil)
+
+ transformed, err := client.Minify(r)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.min.html")
+ content, err := transformed.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, qt.Equals, "<h1>Hugo Rocks!</h1>")
+}
diff --git a/resources/resource_transformers/postcss/integration_test.go b/resources/resource_transformers/postcss/integration_test.go
new file mode 100644
index 000000000..ab48297e4
--- /dev/null
+++ b/resources/resource_transformers/postcss/integration_test.go
@@ -0,0 +1,244 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postcss_test
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ jww "github.com/spf13/jwalterweatherman"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+const postCSSIntegrationTestFiles = `
+-- assets/css/components/a.css --
+/* A comment. */
+/* Another comment. */
+class-in-a {
+ color: blue;
+}
+
+-- assets/css/components/all.css --
+@import "a.css";
+@import "b.css";
+-- assets/css/components/b.css --
+@import "a.css";
+
+class-in-b {
+ color: blue;
+}
+
+-- assets/css/styles.css --
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+ @import "components/all.css";
+h1 {
+ @apply text-2xl font-bold;
+}
+
+-- config.toml --
+disablekinds = ['taxonomy', 'term', 'page']
+baseURL = "https://example.com"
+[build]
+useResourceCacheWhen = 'never'
+-- content/p1.md --
+-- data/hugo.toml --
+slogan = "Hugo Rocks!"
+-- i18n/en.yaml --
+hello:
+ other: "Hello"
+-- i18n/fr.yaml --
+hello:
+ other: "Bonjour"
+-- layouts/index.html --
+{{ $options := dict "inlineImports" true }}
+{{ $styles := resources.Get "css/styles.css" | resources.PostCSS $options }}
+Styles RelPermalink: {{ $styles.RelPermalink }}
+{{ $cssContent := $styles.Content }}
+Styles Content: Len: {{ len $styles.Content }}|
+-- package.json --
+{
+ "scripts": {},
+
+ "devDependencies": {
+ "postcss-cli": "7.1.0",
+ "tailwindcss": "1.2.0"
+ }
+}
+-- postcss.config.js --
+console.error("Hugo Environment:", process.env.HUGO_ENVIRONMENT );
+// https://github.com/gohugoio/hugo/issues/7656
+console.error("package.json:", process.env.HUGO_FILE_PACKAGE_JSON );
+console.error("PostCSS Config File:", process.env.HUGO_FILE_POSTCSS_CONFIG_JS );
+
+module.exports = {
+ plugins: [
+ require('tailwindcss')
+ ]
+}
+
+`
+
+func TestTransformPostCSS(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ c := qt.New(t)
+ tempDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-integration-test")
+ c.Assert(err, qt.IsNil)
+ c.Cleanup(clean)
+
+ for _, s := range []string{"never", "always"} {
+
+ repl := strings.NewReplacer(
+ "https://example.com",
+ "https://example.com/foo",
+ "useResourceCacheWhen = 'never'",
+ fmt.Sprintf("useResourceCacheWhen = '%s'", s),
+ )
+
+ files := repl.Replace(postCSSIntegrationTestFiles)
+
+ fmt.Println("===>", s, files)
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ LogLevel: jww.LevelInfo,
+ WorkingDir: tempDir,
+ TxtarString: files,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `
+Styles RelPermalink: /foo/css/styles.css
+Styles Content: Len: 770917|
+`)
+
+ }
+
+}
+
+// 9880
+func TestTransformPostCSSError(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ c := qt.New(t)
+
+ s, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ TxtarString: strings.ReplaceAll(postCSSIntegrationTestFiles, "color: blue;", "@apply foo;"), // Syntax error
+ }).BuildE()
+
+ s.AssertIsFileError(err)
+ c.Assert(err.Error(), qt.Contains, "a.css:4:2")
+
+}
+
+// #9895
+func TestTransformPostCSSImportError(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ c := qt.New(t)
+
+ s, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ LogLevel: jww.LevelInfo,
+ TxtarString: strings.ReplaceAll(postCSSIntegrationTestFiles, `@import "components/all.css";`, `@import "components/doesnotexist.css";`),
+ }).BuildE()
+
+ s.AssertIsFileError(err)
+ c.Assert(err.Error(), qt.Contains, "styles.css:4:3")
+ c.Assert(err.Error(), qt.Contains, filepath.FromSlash(`failed to resolve CSS @import "css/components/doesnotexist.css"`))
+
+}
+
+func TestTransformPostCSSImporSkipInlineImportsNotFound(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ c := qt.New(t)
+
+ files := strings.ReplaceAll(postCSSIntegrationTestFiles, `@import "components/all.css";`, `@import "components/doesnotexist.css";`)
+ files = strings.ReplaceAll(files, `{{ $options := dict "inlineImports" true }}`, `{{ $options := dict "inlineImports" true "skipInlineImportsNotFound" true }}`)
+
+ s := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ LogLevel: jww.LevelInfo,
+ TxtarString: files,
+ }).Build()
+
+ s.AssertFileContent("public/css/styles.css", `@import "components/doesnotexist.css";`)
+
+}
+
+// Issue 9787
+func TestTransformPostCSSResourceCacheWithPathInBaseURL(t *testing.T) {
+ if !htesting.IsCI() {
+ t.Skip("Skip long running test when running locally")
+ }
+
+ c := qt.New(t)
+ tempDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-integration-test")
+ c.Assert(err, qt.IsNil)
+ c.Cleanup(clean)
+
+ for i := 0; i < 2; i++ {
+ files := postCSSIntegrationTestFiles
+
+ if i == 1 {
+ files = strings.ReplaceAll(files, "https://example.com", "https://example.com/foo")
+ files = strings.ReplaceAll(files, "useResourceCacheWhen = 'never'", " useResourceCacheWhen = 'always'")
+ }
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ NeedsOsFS: true,
+ NeedsNpmInstall: true,
+ LogLevel: jww.LevelInfo,
+ TxtarString: files,
+ WorkingDir: tempDir,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `
+Styles Content: Len: 770917
+`)
+
+ }
+
+}
diff --git a/resources/resource_transformers/postcss/postcss.go b/resources/resource_transformers/postcss/postcss.go
new file mode 100644
index 000000000..eab52b8c5
--- /dev/null
+++ b/resources/resource_transformers/postcss/postcss.go
@@ -0,0 +1,440 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postcss
+
+import (
+ "bytes"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "path"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/common/hugo"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/resources/internal"
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+
+ "errors"
+
+ "github.com/mitchellh/mapstructure"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+const importIdentifier = "@import"
+
+var (
+ cssSyntaxErrorRe = regexp.MustCompile(`> (\d+) \|`)
+ shouldImportRe = regexp.MustCompile(`^@import ["'].*["'];?\s*(/\*.*\*/)?$`)
+)
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec) *Client {
+ return &Client{rs: rs}
+}
+
+func decodeOptions(m map[string]any) (opts Options, err error) {
+ if m == nil {
+ return
+ }
+ err = mapstructure.WeakDecode(m, &opts)
+
+ if !opts.NoMap {
+ // There was for a long time a discrepancy between documentation and
+ // implementation for the noMap property, so we need to support both
+ // camel and snake case.
+ opts.NoMap = cast.ToBool(m["no-map"])
+ }
+
+ return
+}
+
+// Client is the client used to do PostCSS transformations.
+type Client struct {
+ rs *resources.Spec
+}
+
+// Process transforms the given Resource with the PostCSS processor.
+func (c *Client) Process(res resources.ResourceTransformer, options map[string]any) (resource.Resource, error) {
+ return res.Transform(&postcssTransformation{rs: c.rs, optionsm: options})
+}
+
+// Some of the options from https://github.com/postcss/postcss-cli
+type Options struct {
+
+ // Set a custom path to look for a config file.
+ Config string
+
+ NoMap bool // Disable the default inline sourcemaps
+
+ // Enable inlining of @import statements.
+ // Does so recursively, but currently once only per file;
+ // that is, it's not possible to import the same file in
+ // different scopes (root, media query...)
+ // Note that this import routine does not care about the CSS spec,
+ // so you can have @import anywhere in the file.
+ InlineImports bool
+
+ // When InlineImports is enabled, we fail the build if an import cannot be resolved.
+ // You can enable this to allow the build to continue and leave the import statement in place.
+ // Note that the inline importer does not process url location or imports with media queries,
+ // so those will be left as-is even without enabling this option.
+ SkipInlineImportsNotFound bool
+
+ // Options for when not using a config file
+ Use string // List of postcss plugins to use
+ Parser string // Custom postcss parser
+ Stringifier string // Custom postcss stringifier
+ Syntax string // Custom postcss syntax
+}
+
+func (opts Options) toArgs() []string {
+ var args []string
+ if opts.NoMap {
+ args = append(args, "--no-map")
+ }
+ if opts.Use != "" {
+ args = append(args, "--use")
+ args = append(args, strings.Fields(opts.Use)...)
+ }
+ if opts.Parser != "" {
+ args = append(args, "--parser", opts.Parser)
+ }
+ if opts.Stringifier != "" {
+ args = append(args, "--stringifier", opts.Stringifier)
+ }
+ if opts.Syntax != "" {
+ args = append(args, "--syntax", opts.Syntax)
+ }
+ return args
+}
+
+type postcssTransformation struct {
+ optionsm map[string]any
+ rs *resources.Spec
+}
+
+func (t *postcssTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("postcss", t.optionsm)
+}
+
+// Transform shells out to postcss-cli to do the heavy lifting.
+// For this to work, you need some additional tools. To install them globally:
+// npm install -g postcss-cli
+// npm install -g autoprefixer
+func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ const binaryName = "postcss"
+
+ ex := t.rs.ExecHelper
+
+ var configFile string
+ logger := t.rs.Logger
+
+ var options Options
+ if t.optionsm != nil {
+ var err error
+ options, err = decodeOptions(t.optionsm)
+ if err != nil {
+ return err
+ }
+ }
+
+ if options.Config != "" {
+ configFile = options.Config
+ } else {
+ configFile = "postcss.config.js"
+ }
+
+ configFile = filepath.Clean(configFile)
+
+ // We need an absolute filename to the config file.
+ if !filepath.IsAbs(configFile) {
+ configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
+ if configFile == "" && options.Config != "" {
+ // Only fail if the user specified config file is not found.
+ return fmt.Errorf("postcss config %q not found:", options.Config)
+ }
+ }
+
+ var cmdArgs []any
+
+ if configFile != "" {
+ logger.Infoln("postcss: use config file", configFile)
+ cmdArgs = []any{"--config", configFile}
+ }
+
+ if optArgs := options.toArgs(); len(optArgs) > 0 {
+ cmdArgs = append(cmdArgs, collections.StringSliceToInterfaceSlice(optArgs)...)
+ }
+
+ var errBuf bytes.Buffer
+ infoW := loggers.LoggerToWriterWithPrefix(logger.Info(), "postcss")
+
+ stderr := io.MultiWriter(infoW, &errBuf)
+ cmdArgs = append(cmdArgs, hexec.WithStderr(stderr))
+ cmdArgs = append(cmdArgs, hexec.WithStdout(ctx.To))
+ cmdArgs = append(cmdArgs, hexec.WithEnviron(hugo.GetExecEnviron(t.rs.WorkingDir, t.rs.Cfg, t.rs.BaseFs.Assets.Fs)))
+
+ cmd, err := ex.Npx(binaryName, cmdArgs...)
+ if err != nil {
+ if hexec.IsNotFound(err) {
+ // This may be on a CI server etc. Will fall back to pre-built assets.
+ return herrors.ErrFeatureNotAvailable
+ }
+ return err
+ }
+
+ stdin, err := cmd.StdinPipe()
+ if err != nil {
+ return err
+ }
+
+ src := ctx.From
+
+ imp := newImportResolver(
+ ctx.From,
+ ctx.InPath,
+ options,
+ t.rs.Assets.Fs, t.rs.Logger,
+ )
+
+ if options.InlineImports {
+ var err error
+ src, err = imp.resolve()
+ if err != nil {
+ return err
+ }
+ }
+
+ go func() {
+ defer stdin.Close()
+ io.Copy(stdin, src)
+ }()
+
+ err = cmd.Run()
+ if err != nil {
+ if hexec.IsNotFound(err) {
+ return herrors.ErrFeatureNotAvailable
+ }
+ return imp.toFileError(errBuf.String())
+ }
+
+ return nil
+}
+
+type fileOffset struct {
+ Filename string
+ Offset int
+}
+
+type importResolver struct {
+ r io.Reader
+ inPath string
+ opts Options
+
+ contentSeen map[string]bool
+ linemap map[int]fileOffset
+ fs afero.Fs
+ logger loggers.Logger
+}
+
+func newImportResolver(r io.Reader, inPath string, opts Options, fs afero.Fs, logger loggers.Logger) *importResolver {
+ return &importResolver{
+ r: r,
+ inPath: inPath,
+ fs: fs, logger: logger,
+ linemap: make(map[int]fileOffset), contentSeen: make(map[string]bool),
+ opts: opts,
+ }
+}
+
+func (imp *importResolver) contentHash(filename string) ([]byte, string) {
+ b, err := afero.ReadFile(imp.fs, filename)
+ if err != nil {
+ return nil, ""
+ }
+ h := sha256.New()
+ h.Write(b)
+ return b, hex.EncodeToString(h.Sum(nil))
+}
+
+func (imp *importResolver) importRecursive(
+ lineNum int,
+ content string,
+ inPath string) (int, string, error) {
+ basePath := path.Dir(inPath)
+
+ var replacements []string
+ lines := strings.Split(content, "\n")
+
+ trackLine := func(i, offset int, line string) {
+ // TODO(bep) this is not very efficient.
+ imp.linemap[i+lineNum] = fileOffset{Filename: inPath, Offset: offset}
+ }
+
+ i := 0
+ for offset, line := range lines {
+ i++
+ lineTrimmed := strings.TrimSpace(line)
+ column := strings.Index(line, lineTrimmed)
+ line = lineTrimmed
+
+ if !imp.shouldImport(line) {
+ trackLine(i, offset, line)
+ } else {
+ path := strings.Trim(strings.TrimPrefix(line, importIdentifier), " \"';")
+ filename := filepath.Join(basePath, path)
+ importContent, hash := imp.contentHash(filename)
+
+ if importContent == nil {
+ if imp.opts.SkipInlineImportsNotFound {
+ trackLine(i, offset, line)
+ continue
+ }
+ pos := text.Position{
+ Filename: inPath,
+ LineNumber: offset + 1,
+ ColumnNumber: column + 1,
+ }
+ return 0, "", herrors.NewFileErrorFromFileInPos(fmt.Errorf("failed to resolve CSS @import \"%s\"", filename), pos, imp.fs, nil)
+ }
+
+ i--
+
+ if imp.contentSeen[hash] {
+ i++
+ // Just replace the line with an empty string.
+ replacements = append(replacements, []string{line, ""}...)
+ trackLine(i, offset, "IMPORT")
+ continue
+ }
+
+ imp.contentSeen[hash] = true
+
+ // Handle recursive imports.
+ l, nested, err := imp.importRecursive(i+lineNum, string(importContent), filepath.ToSlash(filename))
+ if err != nil {
+ return 0, "", err
+ }
+
+ trackLine(i, offset, line)
+
+ i += l
+
+ importContent = []byte(nested)
+
+ replacements = append(replacements, []string{line, string(importContent)}...)
+ }
+ }
+
+ if len(replacements) > 0 {
+ repl := strings.NewReplacer(replacements...)
+ content = repl.Replace(content)
+ }
+
+ return i, content, nil
+}
+
+func (imp *importResolver) resolve() (io.Reader, error) {
+ const importIdentifier = "@import"
+
+ content, err := ioutil.ReadAll(imp.r)
+ if err != nil {
+ return nil, err
+ }
+
+ contents := string(content)
+
+ _, newContent, err := imp.importRecursive(0, contents, imp.inPath)
+ if err != nil {
+ return nil, err
+ }
+
+ return strings.NewReader(newContent), nil
+}
+
+// See https://www.w3schools.com/cssref/pr_import_rule.asp
+// We currently only support simple file imports, no urls, no media queries.
+// So this is OK:
+// @import "navigation.css";
+// This is not:
+// @import url("navigation.css");
+// @import "mobstyle.css" screen and (max-width: 768px);
+func (imp *importResolver) shouldImport(s string) bool {
+ if !strings.HasPrefix(s, importIdentifier) {
+ return false
+ }
+ if strings.Contains(s, "url(") {
+ return false
+ }
+
+ return shouldImportRe.MatchString(s)
+}
+
+func (imp *importResolver) toFileError(output string) error {
+ output = strings.TrimSpace(loggers.RemoveANSIColours(output))
+ inErr := errors.New(output)
+
+ match := cssSyntaxErrorRe.FindStringSubmatch(output)
+ if match == nil {
+ return inErr
+ }
+
+ lineNum, err := strconv.Atoi(match[1])
+ if err != nil {
+ return inErr
+ }
+
+ file, ok := imp.linemap[lineNum]
+ if !ok {
+ return inErr
+ }
+
+ fi, err := imp.fs.Stat(file.Filename)
+ if err != nil {
+ return inErr
+ }
+
+ meta := fi.(hugofs.FileMetaInfo).Meta()
+ realFilename := meta.Filename
+ f, err := meta.Open()
+ if err != nil {
+ return inErr
+ }
+ defer f.Close()
+
+ ferr := herrors.NewFileErrorFromName(inErr, realFilename)
+ pos := ferr.Position()
+ pos.LineNumber = file.Offset + 1
+ return ferr.UpdatePosition(pos).UpdateContent(f, nil)
+
+ //return herrors.NewFileErrorFromFile(inErr, file.Filename, realFilename, hugofs.Os, herrors.SimpleLineMatcher)
+
+}
diff --git a/resources/resource_transformers/postcss/postcss_test.go b/resources/resource_transformers/postcss/postcss_test.go
new file mode 100644
index 000000000..6901d69de
--- /dev/null
+++ b/resources/resource_transformers/postcss/postcss_test.go
@@ -0,0 +1,166 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package postcss
+
+import (
+ "regexp"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+// Issue 6166
+func TestDecodeOptions(t *testing.T) {
+ c := qt.New(t)
+ opts1, err := decodeOptions(map[string]any{
+ "no-map": true,
+ })
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts1.NoMap, qt.Equals, true)
+
+ opts2, err := decodeOptions(map[string]any{
+ "noMap": true,
+ })
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(opts2.NoMap, qt.Equals, true)
+}
+
+func TestShouldImport(t *testing.T) {
+ c := qt.New(t)
+ var imp *importResolver
+
+ for _, test := range []struct {
+ input string
+ expect bool
+ }{
+ {input: `@import "navigation.css";`, expect: true},
+ {input: `@import "navigation.css"; /* Using a string */`, expect: true},
+ {input: `@import "navigation.css"`, expect: true},
+ {input: `@import 'navigation.css';`, expect: true},
+ {input: `@import url("navigation.css");`, expect: false},
+ {input: `@import url('https://fonts.googleapis.com/css?family=Open+Sans:400,400i,800,800i&display=swap');`, expect: false},
+ {input: `@import "printstyle.css" print;`, expect: false},
+ } {
+ c.Assert(imp.shouldImport(test.input), qt.Equals, test.expect)
+ }
+}
+
+func TestImportResolver(t *testing.T) {
+ c := qt.New(t)
+ fs := afero.NewMemMapFs()
+
+ writeFile := func(name, content string) {
+ c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil)
+ }
+
+ writeFile("a.css", `@import "b.css";
+@import "c.css";
+A_STYLE1
+A_STYLE2
+`)
+
+ writeFile("b.css", `B_STYLE`)
+ writeFile("c.css", "@import \"d.css\"\nC_STYLE")
+ writeFile("d.css", "@import \"a.css\"\n\nD_STYLE")
+ writeFile("e.css", "E_STYLE")
+
+ mainStyles := strings.NewReader(`@import "a.css";
+@import "b.css";
+LOCAL_STYLE
+@import "c.css";
+@import "e.css";`)
+
+ imp := newImportResolver(
+ mainStyles,
+ "styles.css",
+ Options{},
+ fs, loggers.NewErrorLogger(),
+ )
+
+ r, err := imp.resolve()
+ c.Assert(err, qt.IsNil)
+ rs := helpers.ReaderToString(r)
+ result := regexp.MustCompile(`\n+`).ReplaceAllString(rs, "\n")
+
+ c.Assert(result, hqt.IsSameString, `B_STYLE
+D_STYLE
+C_STYLE
+A_STYLE1
+A_STYLE2
+LOCAL_STYLE
+E_STYLE`)
+
+ dline := imp.linemap[3]
+ c.Assert(dline, qt.DeepEquals, fileOffset{
+ Offset: 1,
+ Filename: "d.css",
+ })
+}
+
+func BenchmarkImportResolver(b *testing.B) {
+ c := qt.New(b)
+ fs := afero.NewMemMapFs()
+
+ writeFile := func(name, content string) {
+ c.Assert(afero.WriteFile(fs, name, []byte(content), 0777), qt.IsNil)
+ }
+
+ writeFile("a.css", `@import "b.css";
+@import "c.css";
+A_STYLE1
+A_STYLE2
+`)
+
+ writeFile("b.css", `B_STYLE`)
+ writeFile("c.css", "@import \"d.css\"\nC_STYLE"+strings.Repeat("\nSTYLE", 12))
+ writeFile("d.css", "@import \"a.css\"\n\nD_STYLE"+strings.Repeat("\nSTYLE", 55))
+ writeFile("e.css", "E_STYLE")
+
+ mainStyles := `@import "a.css";
+@import "b.css";
+LOCAL_STYLE
+@import "c.css";
+@import "e.css";
+@import "missing.css";`
+
+ logger := loggers.NewErrorLogger()
+
+ for i := 0; i < b.N; i++ {
+ b.StopTimer()
+ imp := newImportResolver(
+ strings.NewReader(mainStyles),
+ "styles.css",
+ Options{},
+ fs, logger,
+ )
+
+ b.StartTimer()
+
+ _, err := imp.resolve()
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ }
+}
diff --git a/resources/resource_transformers/templates/execute_as_template.go b/resources/resource_transformers/templates/execute_as_template.go
new file mode 100644
index 000000000..5fe4230f1
--- /dev/null
+++ b/resources/resource_transformers/templates/execute_as_template.go
@@ -0,0 +1,74 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package templates contains functions for template processing of Resource objects.
+package templates
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/internal"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+// Client contains methods to perform template processing of Resource objects.
+type Client struct {
+ rs *resources.Spec
+ t tpl.TemplatesProvider
+}
+
+// New creates a new Client with the given specification.
+func New(rs *resources.Spec, t tpl.TemplatesProvider) *Client {
+ if rs == nil {
+ panic("must provice a resource Spec")
+ }
+ if t == nil {
+ panic("must provide a template provider")
+ }
+ return &Client{rs: rs, t: t}
+}
+
+type executeAsTemplateTransform struct {
+ rs *resources.Spec
+ t tpl.TemplatesProvider
+ targetPath string
+ data any
+}
+
+func (t *executeAsTemplateTransform) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey("execute-as-template", t.targetPath)
+}
+
+func (t *executeAsTemplateTransform) Transform(ctx *resources.ResourceTransformationCtx) error {
+ tplStr := helpers.ReaderToString(ctx.From)
+ templ, err := t.t.TextTmpl().Parse(ctx.InPath, tplStr)
+ if err != nil {
+ return fmt.Errorf("failed to parse Resource %q as Template:: %w", ctx.InPath, err)
+ }
+
+ ctx.OutPath = t.targetPath
+
+ return t.t.Tmpl().Execute(templ, ctx.To, t.data)
+}
+
+func (c *Client) ExecuteAsTemplate(res resources.ResourceTransformer, targetPath string, data any) (resource.Resource, error) {
+ return res.Transform(&executeAsTemplateTransform{
+ rs: c.rs,
+ targetPath: helpers.ToSlashTrimLeading(targetPath),
+ t: c.t,
+ data: data,
+ })
+}
diff --git a/resources/resource_transformers/templates/integration_test.go b/resources/resource_transformers/templates/integration_test.go
new file mode 100644
index 000000000..4eaac8e27
--- /dev/null
+++ b/resources/resource_transformers/templates/integration_test.go
@@ -0,0 +1,77 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package templates_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestExecuteAsTemplateMultipleLanguages(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = "http://example.com/blog"
+defaultContentLanguage = "fr"
+defaultContentLanguageInSubdir = true
+[Languages]
+[Languages.en]
+weight = 10
+title = "In English"
+languageName = "English"
+[Languages.fr]
+weight = 20
+title = "Le Français"
+languageName = "Français"
+-- i18n/en.toml --
+[hello]
+other = "Hello"
+-- i18n/fr.toml --
+[hello]
+other = "Bonjour"
+-- layouts/index.fr.html --
+Lang: {{ site.Language.Lang }}
+{{ $templ := "{{T \"hello\"}}" | resources.FromString "f1.html" }}
+{{ $helloResource := $templ | resources.ExecuteAsTemplate (print "f%s.html" .Lang) . }}
+Hello1: {{T "hello"}}
+Hello2: {{ $helloResource.Content }}
+LangURL: {{ relLangURL "foo" }}
+-- layouts/index.html --
+Lang: {{ site.Language.Lang }}
+{{ $templ := "{{T \"hello\"}}" | resources.FromString "f1.html" }}
+{{ $helloResource := $templ | resources.ExecuteAsTemplate (print "f%s.html" .Lang) . }}
+Hello1: {{T "hello"}}
+Hello2: {{ $helloResource.Content }}
+LangURL: {{ relLangURL "foo" }}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ }).Build()
+
+ b.AssertFileContent("public/en/index.html", `
+ Hello1: Hello
+ Hello2: Hello
+ `)
+
+ b.AssertFileContent("public/fr/index.html", `
+ Hello1: Bonjour
+ Hello2: Bonjour
+ `)
+}
diff --git a/resources/resource_transformers/tocss/dartsass/client.go b/resources/resource_transformers/tocss/dartsass/client.go
new file mode 100644
index 000000000..7c3a7ecba
--- /dev/null
+++ b/resources/resource_transformers/tocss/dartsass/client.go
@@ -0,0 +1,143 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package godartsass integrates with the Dass Sass Embedded protocol to transpile
+// SCSS/SASS.
+package dartsass
+
+import (
+ "fmt"
+ "io"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/afero"
+
+ "github.com/bep/godartsass"
+ "github.com/mitchellh/mapstructure"
+)
+
+// used as part of the cache key.
+const transformationName = "tocss-dart"
+
+// See https://github.com/sass/dart-sass-embedded/issues/24
+// Note: This prefix must be all lower case.
+const dartSassStdinPrefix = "hugostdin:"
+
+func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) {
+ if !Supports() {
+ return &Client{dartSassNotAvailable: true}, nil
+ }
+
+ if err := rs.ExecHelper.Sec().CheckAllowedExec(dartSassEmbeddedBinaryName); err != nil {
+ return nil, err
+ }
+
+ transpiler, err := godartsass.Start(godartsass.Options{
+ LogEventHandler: func(event godartsass.LogEvent) {
+ message := strings.ReplaceAll(event.Message, dartSassStdinPrefix, "")
+ switch event.Type {
+ case godartsass.LogEventTypeDebug:
+ // Log as Info for now, we may adjust this if it gets too chatty.
+ rs.Logger.Infof("Dart Sass: %s", message)
+ default:
+ // The rest are either deprecations or @warn statements.
+ rs.Logger.Warnf("Dart Sass: %s", message)
+ }
+ },
+ })
+ if err != nil {
+ return nil, err
+ }
+ return &Client{sfs: fs, workFs: rs.BaseFs.Work, rs: rs, transpiler: transpiler}, nil
+}
+
+type Client struct {
+ dartSassNotAvailable bool
+ rs *resources.Spec
+ sfs *filesystems.SourceFilesystem
+ workFs afero.Fs
+ transpiler *godartsass.Transpiler
+}
+
+func (c *Client) ToCSS(res resources.ResourceTransformer, args map[string]any) (resource.Resource, error) {
+ if c.dartSassNotAvailable {
+ return res.Transform(resources.NewFeatureNotAvailableTransformer(transformationName, args))
+ }
+ return res.Transform(&transform{c: c, optsm: args})
+}
+
+func (c *Client) Close() error {
+ if c.transpiler == nil {
+ return nil
+ }
+ return c.transpiler.Close()
+}
+
+func (c *Client) toCSS(args godartsass.Args, src io.Reader) (godartsass.Result, error) {
+ var res godartsass.Result
+
+ in := helpers.ReaderToString(src)
+ args.Source = in
+
+ res, err := c.transpiler.Execute(args)
+ if err != nil {
+ if err.Error() == "unexpected EOF" {
+ return res, fmt.Errorf("got unexpected EOF when executing %q. The user running hugo must have read and execute permissions on this program. With execute permissions only, this error is thrown.", dartSassEmbeddedBinaryName)
+ }
+ return res, herrors.NewFileErrorFromFileInErr(err, hugofs.Os, herrors.OffsetMatcher)
+ }
+
+ return res, err
+}
+
+type Options struct {
+
+ // Hugo, will by default, just replace the extension of the source
+ // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can
+ // control this by setting this, e.g. "styles/main.css" will create
+ // a Resource with that as a base for RelPermalink etc.
+ TargetPath string
+
+ // Hugo automatically adds the entry directories (where the main.scss lives)
+ // for project and themes to the list of include paths sent to LibSASS.
+ // Any paths set in this setting will be appended. Note that these will be
+ // treated as relative to the working dir, i.e. no include paths outside the
+ // project/themes.
+ IncludePaths []string
+
+ // Default is nested.
+ // One of nested, expanded, compact, compressed.
+ OutputStyle string
+
+ // When enabled, Hugo will generate a source map.
+ EnableSourceMap bool
+}
+
+func decodeOptions(m map[string]any) (opts Options, err error) {
+ if m == nil {
+ return
+ }
+ err = mapstructure.WeakDecode(m, &opts)
+
+ if opts.TargetPath != "" {
+ opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ }
+
+ return
+}
diff --git a/resources/resource_transformers/tocss/dartsass/integration_test.go b/resources/resource_transformers/tocss/dartsass/integration_test.go
new file mode 100644
index 000000000..c127057a5
--- /dev/null
+++ b/resources/resource_transformers/tocss/dartsass/integration_test.go
@@ -0,0 +1,273 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package dartsass_test
+
+import (
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass"
+ jww "github.com/spf13/jwalterweatherman"
+)
+
+func TestTransformIncludePaths(t *testing.T) {
+ if !dartsass.Supports() {
+ t.Skip()
+ }
+
+ files := `
+-- assets/scss/main.scss --
+@import "moo";
+-- node_modules/foo/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- config.toml --
+-- layouts/index.html --
+{{ $cssOpts := (dict "includePaths" (slice "node_modules/foo") "transpiler" "dartsass" ) }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `T1: moo{color:#fff}`)
+}
+
+func TestTransformImportRegularCSS(t *testing.T) {
+ if !dartsass.Supports() {
+ t.Skip()
+ }
+
+ files := `
+-- assets/scss/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- assets/scss/another.css --
+
+-- assets/scss/main.scss --
+@import "moo";
+@import "regular.css";
+@import "moo";
+@import "another.css";
+
+/* foo */
+-- assets/scss/regular.css --
+
+-- config.toml --
+-- layouts/index.html --
+{{ $r := resources.Get "scss/main.scss" | toCSS (dict "transpiler" "dartsass") }}
+T1: {{ $r.Content | safeHTML }}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ ).Build()
+
+ // Dart Sass does not follow regular CSS import, but they
+ // get pulled to the top.
+ b.AssertFileContent("public/index.html", `T1: @import "regular.css";
+ @import "another.css";
+ moo {
+ color: #fff;
+ }
+
+ moo {
+ color: #fff;
+ }
+
+ /* foo */`)
+}
+
+func TestTransformThemeOverrides(t *testing.T) {
+ if !dartsass.Supports() {
+ t.Skip()
+ }
+
+ files := `
+-- assets/scss/components/_boo.scss --
+$boolor: green;
+
+boo {
+ color: $boolor;
+}
+-- assets/scss/components/_moo.scss --
+$moolor: #ccc;
+
+moo {
+ color: $moolor;
+}
+-- config.toml --
+theme = 'mytheme'
+-- layouts/index.html --
+{{ $cssOpts := (dict "includePaths" (slice "node_modules/foo" ) "transpiler" "dartsass" ) }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+-- themes/mytheme/assets/scss/components/_boo.scss --
+$boolor: orange;
+
+boo {
+ color: $boolor;
+}
+-- themes/mytheme/assets/scss/components/_imports.scss --
+@import "moo";
+@import "_boo";
+@import "_zoo";
+-- themes/mytheme/assets/scss/components/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- themes/mytheme/assets/scss/components/_zoo.scss --
+$zoolor: pink;
+
+zoo {
+ color: $zoolor;
+}
+-- themes/mytheme/assets/scss/main.scss --
+@import "components/imports";
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `T1: moo{color:#ccc}boo{color:green}zoo{color:pink}`)
+}
+
+func TestTransformLogging(t *testing.T) {
+ if !dartsass.Supports() {
+ t.Skip()
+ }
+
+ files := `
+-- assets/scss/main.scss --
+@warn "foo";
+@debug "bar";
+
+-- config.toml --
+disableKinds = ["term", "taxonomy", "section", "page"]
+-- layouts/index.html --
+{{ $cssOpts := (dict "transpiler" "dartsass" ) }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts }}
+T1: {{ $r.Content }}
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ LogLevel: jww.LevelInfo,
+ }).Build()
+
+ b.AssertLogMatches(`WARN.*Dart Sass: foo`)
+ b.AssertLogMatches(`INFO.*Dart Sass: .*assets.*main.scss:1:0: bar`)
+
+}
+
+func TestTransformErrors(t *testing.T) {
+ if !dartsass.Supports() {
+ t.Skip()
+ }
+
+ c := qt.New(t)
+
+ const filesTemplate = `
+-- config.toml --
+-- assets/scss/components/_foo.scss --
+/* comment line 1 */
+$foocolor: #ccc;
+
+foo {
+ color: $foocolor;
+}
+-- assets/scss/main.scss --
+/* comment line 1 */
+/* comment line 2 */
+@import "components/foo";
+/* comment line 4 */
+
+ $maincolor: #eee;
+
+body {
+ color: $maincolor;
+}
+
+-- layouts/index.html --
+{{ $cssOpts := dict "transpiler" "dartsass" }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+
+ `
+
+ c.Run("error in main", func(c *qt.C) {
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: strings.Replace(filesTemplate, "$maincolor: #eee;", "$maincolor #eee;", 1),
+ NeedsOsFS: true,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `main.scss:8:13":`)
+ b.Assert(err.Error(), qt.Contains, `: expected ":".`)
+ fe := b.AssertIsFileError(err)
+ b.Assert(fe.ErrorContext(), qt.IsNotNil)
+ b.Assert(fe.ErrorContext().Lines, qt.DeepEquals, []string{" $maincolor #eee;", "", "body {", "\tcolor: $maincolor;", "}"})
+ b.Assert(fe.ErrorContext().ChromaLexer, qt.Equals, "scss")
+
+ })
+
+ c.Run("error in import", func(c *qt.C) {
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: strings.Replace(filesTemplate, "$foocolor: #ccc;", "$foocolor #ccc;", 1),
+ NeedsOsFS: true,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `_foo.scss:2:10":`)
+ b.Assert(err.Error(), qt.Contains, `: expected ":".`)
+ fe := b.AssertIsFileError(err)
+ b.Assert(fe.ErrorContext(), qt.IsNotNil)
+ b.Assert(fe.ErrorContext().Lines, qt.DeepEquals, []string{"/* comment line 1 */", "$foocolor #ccc;", "", "foo {"})
+ b.Assert(fe.ErrorContext().ChromaLexer, qt.Equals, "scss")
+
+ })
+
+}
diff --git a/resources/resource_transformers/tocss/dartsass/transform.go b/resources/resource_transformers/tocss/dartsass/transform.go
new file mode 100644
index 000000000..9d17d3bcc
--- /dev/null
+++ b/resources/resource_transformers/tocss/dartsass/transform.go
@@ -0,0 +1,182 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package dartsass
+
+import (
+ "fmt"
+ "io"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/paths"
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/media"
+
+ "github.com/gohugoio/hugo/resources"
+
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/bep/godartsass"
+)
+
+const (
+ dartSassEmbeddedBinaryName = "dart-sass-embedded"
+)
+
+// Supports returns whether dart-sass-embedded is found in $PATH.
+func Supports() bool {
+ if htesting.SupportsAll() {
+ return true
+ }
+ return hexec.InPath(dartSassEmbeddedBinaryName)
+}
+
+type transform struct {
+ optsm map[string]any
+ c *Client
+}
+
+func (t *transform) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey(transformationName, t.optsm)
+}
+
+func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error {
+ ctx.OutMediaType = media.CSSType
+
+ opts, err := decodeOptions(t.optsm)
+ if err != nil {
+ return err
+ }
+
+ if opts.TargetPath != "" {
+ ctx.OutPath = opts.TargetPath
+ } else {
+ ctx.ReplaceOutPathExtension(".css")
+ }
+
+ baseDir := path.Dir(ctx.SourcePath)
+ filename := dartSassStdinPrefix
+
+ if ctx.SourcePath != "" {
+ filename += t.c.sfs.RealFilename(ctx.SourcePath)
+ }
+
+ args := godartsass.Args{
+ URL: filename,
+ IncludePaths: t.c.sfs.RealDirs(baseDir),
+ ImportResolver: importResolver{
+ baseDir: baseDir,
+ c: t.c,
+ },
+ OutputStyle: godartsass.ParseOutputStyle(opts.OutputStyle),
+ EnableSourceMap: opts.EnableSourceMap,
+ }
+
+ // Append any workDir relative include paths
+ for _, ip := range opts.IncludePaths {
+ info, err := t.c.workFs.Stat(filepath.Clean(ip))
+ if err == nil {
+ filename := info.(hugofs.FileMetaInfo).Meta().Filename
+ args.IncludePaths = append(args.IncludePaths, filename)
+ }
+ }
+
+ if ctx.InMediaType.SubType == media.SASSType.SubType {
+ args.SourceSyntax = godartsass.SourceSyntaxSASS
+ }
+
+ res, err := t.c.toCSS(args, ctx.From)
+ if err != nil {
+ return err
+ }
+
+ out := res.CSS
+
+ _, err = io.WriteString(ctx.To, out)
+ if err != nil {
+ return err
+ }
+
+ if opts.EnableSourceMap && res.SourceMap != "" {
+ if err := ctx.PublishSourceMap(res.SourceMap); err != nil {
+ return err
+ }
+ _, err = fmt.Fprintf(ctx.To, "\n\n/*# sourceMappingURL=%s */", path.Base(ctx.OutPath)+".map")
+ }
+
+ return err
+}
+
+type importResolver struct {
+ baseDir string
+ c *Client
+}
+
+func (t importResolver) CanonicalizeURL(url string) (string, error) {
+ filePath, isURL := paths.UrlToFilename(url)
+ var prevDir string
+ var pathDir string
+ if isURL {
+ var found bool
+ prevDir, found = t.c.sfs.MakePathRelative(filepath.Dir(filePath))
+
+ if !found {
+ // Not a member of this filesystem, let Dart Sass handle it.
+ return "", nil
+ }
+ } else {
+ prevDir = t.baseDir
+ pathDir = path.Dir(url)
+ }
+
+ basePath := filepath.Join(prevDir, pathDir)
+ name := filepath.Base(filePath)
+
+ // Pick the first match.
+ var namePatterns []string
+ if strings.Contains(name, ".") {
+ namePatterns = []string{"_%s", "%s"}
+ } else if strings.HasPrefix(name, "_") {
+ namePatterns = []string{"_%s.scss", "_%s.sass"}
+ } else {
+ namePatterns = []string{"_%s.scss", "%s.scss", "_%s.sass", "%s.sass"}
+ }
+
+ name = strings.TrimPrefix(name, "_")
+
+ for _, namePattern := range namePatterns {
+ filenameToCheck := filepath.Join(basePath, fmt.Sprintf(namePattern, name))
+ fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
+ if err == nil {
+ if fim, ok := fi.(hugofs.FileMetaInfo); ok {
+ return "file://" + filepath.ToSlash(fim.Meta().Filename), nil
+ }
+ }
+ }
+
+ // Not found, let Dart Dass handle it
+ return "", nil
+}
+
+func (t importResolver) Load(url string) (string, error) {
+ filename, _ := paths.UrlToFilename(url)
+ b, err := afero.ReadFile(hugofs.Os, filename)
+ return string(b), err
+}
diff --git a/resources/resource_transformers/tocss/scss/client.go b/resources/resource_transformers/tocss/scss/client.go
new file mode 100644
index 000000000..ecaceaa6c
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/client.go
@@ -0,0 +1,90 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scss
+
+import (
+ "regexp"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugolib/filesystems"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/spf13/afero"
+
+ "github.com/mitchellh/mapstructure"
+)
+
+const transformationName = "tocss"
+
+type Client struct {
+ rs *resources.Spec
+ sfs *filesystems.SourceFilesystem
+ workFs afero.Fs
+}
+
+func New(fs *filesystems.SourceFilesystem, rs *resources.Spec) (*Client, error) {
+ return &Client{sfs: fs, workFs: rs.BaseFs.Work, rs: rs}, nil
+}
+
+type Options struct {
+
+ // Hugo, will by default, just replace the extension of the source
+ // to .css, e.g. "scss/main.scss" becomes "scss/main.css". You can
+ // control this by setting this, e.g. "styles/main.css" will create
+ // a Resource with that as a base for RelPermalink etc.
+ TargetPath string
+
+ // Hugo automatically adds the entry directories (where the main.scss lives)
+ // for project and themes to the list of include paths sent to LibSASS.
+ // Any paths set in this setting will be appended. Note that these will be
+ // treated as relative to the working dir, i.e. no include paths outside the
+ // project/themes.
+ IncludePaths []string
+
+ // Default is nested.
+ // One of nested, expanded, compact, compressed.
+ OutputStyle string
+
+ // Precision of floating point math.
+ Precision int
+
+ // When enabled, Hugo will generate a source map.
+ EnableSourceMap bool
+}
+
+func DecodeOptions(m map[string]any) (opts Options, err error) {
+ if m == nil {
+ return
+ }
+ err = mapstructure.WeakDecode(m, &opts)
+
+ if opts.TargetPath != "" {
+ opts.TargetPath = helpers.ToSlashTrimLeading(opts.TargetPath)
+ }
+
+ return
+}
+
+var (
+ regularCSSImportTo = regexp.MustCompile(`.*(@import "(.*\.css)";).*`)
+ regularCSSImportFrom = regexp.MustCompile(`.*(\/\* HUGO_IMPORT_START (.*) HUGO_IMPORT_END \*\/).*`)
+)
+
+func replaceRegularImportsIn(s string) (string, bool) {
+ replaced := regularCSSImportTo.ReplaceAllString(s, "/* HUGO_IMPORT_START $2 HUGO_IMPORT_END */")
+ return replaced, s != replaced
+}
+
+func replaceRegularImportsOut(s string) string {
+ return regularCSSImportFrom.ReplaceAllString(s, "@import \"$2\";")
+}
diff --git a/resources/resource_transformers/tocss/scss/client_extended.go b/resources/resource_transformers/tocss/scss/client_extended.go
new file mode 100644
index 000000000..bfee39499
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/client_extended.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build extended
+// +build extended
+
+package scss
+
+import (
+ "github.com/bep/golibsass/libsass"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/internal"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+type options struct {
+ // The options we receive from the end user.
+ from Options
+
+ // The options we send to the SCSS library.
+ to libsass.Options
+}
+
+func (c *Client) ToCSS(res resources.ResourceTransformer, opts Options) (resource.Resource, error) {
+ internalOptions := options{
+ from: opts,
+ }
+
+ // Transfer values from client.
+ internalOptions.to.Precision = opts.Precision
+ internalOptions.to.OutputStyle = libsass.ParseOutputStyle(opts.OutputStyle)
+
+ if internalOptions.to.Precision == 0 {
+ // bootstrap-sass requires 8 digits precision. The libsass default is 5.
+ // https://github.com/twbs/bootstrap-sass/blob/master/README.md#sass-number-precision
+ internalOptions.to.Precision = 8
+ }
+
+ return res.Transform(&toCSSTransformation{c: c, options: internalOptions})
+
+}
+
+type toCSSTransformation struct {
+ c *Client
+ options options
+}
+
+func (t *toCSSTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey(transformationName, t.options.from)
+}
diff --git a/resources/resource_transformers/tocss/scss/client_notavailable.go b/resources/resource_transformers/tocss/scss/client_notavailable.go
new file mode 100644
index 000000000..efd79109b
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/client_notavailable.go
@@ -0,0 +1,31 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !extended
+// +build !extended
+
+package scss
+
+import (
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+func (c *Client) ToCSS(res resources.ResourceTransformer, opts Options) (resource.Resource, error) {
+ return res.Transform(resources.NewFeatureNotAvailableTransformer(transformationName, opts))
+}
+
+// Used in tests.
+func Supports() bool {
+ return false
+}
diff --git a/resources/resource_transformers/tocss/scss/client_test.go b/resources/resource_transformers/tocss/scss/client_test.go
new file mode 100644
index 000000000..9dddd3869
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/client_test.go
@@ -0,0 +1,49 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scss
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestReplaceRegularCSSImports(t *testing.T) {
+ c := qt.New(t)
+
+ scssWithImport := `
+
+@import "moo";
+@import "regular.css";
+@import "moo";
+@import "another.css";
+@import "foo.scss";
+
+/* foo */`
+
+ scssWithoutImport := `
+@import "moo";
+/* foo */`
+
+ res, replaced := replaceRegularImportsIn(scssWithImport)
+ c.Assert(replaced, qt.Equals, true)
+ c.Assert(res, qt.Equals, "\n\t\n@import \"moo\";\n/* HUGO_IMPORT_START regular.css HUGO_IMPORT_END */\n@import \"moo\";\n/* HUGO_IMPORT_START another.css HUGO_IMPORT_END */\n@import \"foo.scss\";\n\n/* foo */")
+
+ res2, replaced2 := replaceRegularImportsIn(scssWithoutImport)
+ c.Assert(replaced2, qt.Equals, false)
+ c.Assert(res2, qt.Equals, scssWithoutImport)
+
+ reverted := replaceRegularImportsOut(res)
+ c.Assert(reverted, qt.Equals, scssWithImport)
+}
diff --git a/resources/resource_transformers/tocss/scss/integration_test.go b/resources/resource_transformers/tocss/scss/integration_test.go
new file mode 100644
index 000000000..13b664cc7
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/integration_test.go
@@ -0,0 +1,247 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scss_test
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss"
+)
+
+func TestTransformIncludePaths(t *testing.T) {
+ if !scss.Supports() {
+ t.Skip()
+ }
+ c := qt.New(t)
+
+ files := `
+-- assets/scss/main.scss --
+@import "moo";
+-- node_modules/foo/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- config.toml --
+-- layouts/index.html --
+{{ $cssOpts := (dict "includePaths" (slice "node_modules/foo") ) }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `T1: moo{color:#fff}`)
+}
+
+func TestTransformImportRegularCSS(t *testing.T) {
+ if !scss.Supports() {
+ t.Skip()
+ }
+
+ c := qt.New(t)
+
+ files := `
+-- assets/scss/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- assets/scss/another.css --
+
+-- assets/scss/main.scss --
+@import "moo";
+@import "regular.css";
+@import "moo";
+@import "another.css";
+
+/* foo */
+-- assets/scss/regular.css --
+
+-- config.toml --
+-- layouts/index.html --
+{{ $r := resources.Get "scss/main.scss" | toCSS }}
+T1: {{ $r.Content | safeHTML }}
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ // LibSass does not support regular CSS imports. There
+ // is an open bug about it that probably will never be resolved.
+ // Hugo works around this by preserving them in place:
+ b.AssertFileContent("public/index.html", `
+ T1: moo {
+ color: #fff; }
+
+@import "regular.css";
+moo {
+ color: #fff; }
+
+@import "another.css";
+/* foo */
+
+`)
+}
+
+func TestTransformThemeOverrides(t *testing.T) {
+ if !scss.Supports() {
+ t.Skip()
+ }
+
+ c := qt.New(t)
+
+ files := `
+-- assets/scss/components/_boo.scss --
+$boolor: green;
+
+boo {
+ color: $boolor;
+}
+-- assets/scss/components/_moo.scss --
+$moolor: #ccc;
+
+moo {
+ color: $moolor;
+}
+-- config.toml --
+theme = 'mytheme'
+-- layouts/index.html --
+{{ $cssOpts := (dict "includePaths" (slice "node_modules/foo" ) ) }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+-- themes/mytheme/assets/scss/components/_boo.scss --
+$boolor: orange;
+
+boo {
+ color: $boolor;
+}
+-- themes/mytheme/assets/scss/components/_imports.scss --
+@import "moo";
+@import "_boo";
+@import "_zoo";
+-- themes/mytheme/assets/scss/components/_moo.scss --
+$moolor: #fff;
+
+moo {
+ color: $moolor;
+}
+-- themes/mytheme/assets/scss/components/_zoo.scss --
+$zoolor: pink;
+
+zoo {
+ color: $zoolor;
+}
+-- themes/mytheme/assets/scss/main.scss --
+@import "components/imports";
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: files,
+ NeedsOsFS: true,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `T1: moo{color:#ccc}boo{color:green}zoo{color:pink}`)
+}
+
+func TestTransformErrors(t *testing.T) {
+ if !scss.Supports() {
+ t.Skip()
+ }
+
+ c := qt.New(t)
+
+ const filesTemplate = `
+-- config.toml --
+theme = 'mytheme'
+-- assets/scss/components/_foo.scss --
+/* comment line 1 */
+$foocolor: #ccc;
+
+foo {
+ color: $foocolor;
+}
+-- themes/mytheme/assets/scss/main.scss --
+/* comment line 1 */
+/* comment line 2 */
+@import "components/foo";
+/* comment line 4 */
+
+$maincolor: #eee;
+
+body {
+ color: $maincolor;
+}
+
+-- layouts/index.html --
+{{ $cssOpts := dict }}
+{{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }}
+T1: {{ $r.Content }}
+
+ `
+
+ c.Run("error in main", func(c *qt.C) {
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: strings.Replace(filesTemplate, "$maincolor: #eee;", "$maincolor #eee;", 1),
+ NeedsOsFS: true,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`themes/mytheme/assets/scss/main.scss:6:1": expected ':' after $maincolor in assignment statement`))
+ fe := b.AssertIsFileError(err)
+ b.Assert(fe.ErrorContext(), qt.IsNotNil)
+ b.Assert(fe.ErrorContext().Lines, qt.DeepEquals, []string{"/* comment line 4 */", "", "$maincolor #eee;", "", "body {"})
+ b.Assert(fe.ErrorContext().ChromaLexer, qt.Equals, "scss")
+
+ })
+
+ c.Run("error in import", func(c *qt.C) {
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: c,
+ TxtarString: strings.Replace(filesTemplate, "$foocolor: #ccc;", "$foocolor #ccc;", 1),
+ NeedsOsFS: true,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+ b.Assert(err.Error(), qt.Contains, `assets/scss/components/_foo.scss:2:1": expected ':' after $foocolor in assignment statement`)
+ fe := b.AssertIsFileError(err)
+ b.Assert(fe.ErrorContext(), qt.IsNotNil)
+ b.Assert(fe.ErrorContext().Lines, qt.DeepEquals, []string{"/* comment line 1 */", "$foocolor #ccc;", "", "foo {"})
+ b.Assert(fe.ErrorContext().ChromaLexer, qt.Equals, "scss")
+
+ })
+
+}
diff --git a/resources/resource_transformers/tocss/scss/tocss.go b/resources/resource_transformers/tocss/scss/tocss.go
new file mode 100644
index 000000000..57ac16711
--- /dev/null
+++ b/resources/resource_transformers/tocss/scss/tocss.go
@@ -0,0 +1,204 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build extended
+// +build extended
+
+package scss
+
+import (
+ "fmt"
+ "io"
+ "path"
+
+ "path/filepath"
+ "strings"
+
+ "github.com/bep/golibsass/libsass"
+ "github.com/bep/golibsass/libsass/libsasserrors"
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources"
+)
+
+// Used in tests. This feature requires Hugo to be built with the extended tag.
+func Supports() bool {
+ return true
+}
+
+func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx) error {
+ ctx.OutMediaType = media.CSSType
+
+ var outName string
+ if t.options.from.TargetPath != "" {
+ ctx.OutPath = t.options.from.TargetPath
+ } else {
+ ctx.ReplaceOutPathExtension(".css")
+ }
+
+ outName = path.Base(ctx.OutPath)
+
+ options := t.options
+ baseDir := path.Dir(ctx.SourcePath)
+ options.to.IncludePaths = t.c.sfs.RealDirs(baseDir)
+
+ // Append any workDir relative include paths
+ for _, ip := range options.from.IncludePaths {
+ info, err := t.c.workFs.Stat(filepath.Clean(ip))
+ if err == nil {
+ filename := info.(hugofs.FileMetaInfo).Meta().Filename
+ options.to.IncludePaths = append(options.to.IncludePaths, filename)
+ }
+ }
+
+ // To allow for overrides of SCSS files anywhere in the project/theme hierarchy, we need
+ // to help libsass revolve the filename by looking in the composite filesystem first.
+ // We add the entry directories for both project and themes to the include paths list, but
+ // that only work for overrides on the top level.
+ options.to.ImportResolver = func(url string, prev string) (newUrl string, body string, resolved bool) {
+ // We get URL paths from LibSASS, but we need file paths.
+ url = filepath.FromSlash(url)
+ prev = filepath.FromSlash(prev)
+
+ var basePath string
+ urlDir := filepath.Dir(url)
+ var prevDir string
+
+ if prev == "stdin" {
+ prevDir = baseDir
+ } else {
+ prevDir, _ = t.c.sfs.MakePathRelative(filepath.Dir(prev))
+
+ if prevDir == "" {
+ // Not a member of this filesystem. Let LibSASS handle it.
+ return "", "", false
+ }
+ }
+
+ basePath = filepath.Join(prevDir, urlDir)
+ name := filepath.Base(url)
+
+ // Libsass throws an error in cases where you have several possible candidates.
+ // We make this simpler and pick the first match.
+ var namePatterns []string
+ if strings.Contains(name, ".") {
+ namePatterns = []string{"_%s", "%s"}
+ } else if strings.HasPrefix(name, "_") {
+ namePatterns = []string{"_%s.scss", "_%s.sass"}
+ } else {
+ namePatterns = []string{"_%s.scss", "%s.scss", "_%s.sass", "%s.sass"}
+ }
+
+ name = strings.TrimPrefix(name, "_")
+
+ for _, namePattern := range namePatterns {
+ filenameToCheck := filepath.Join(basePath, fmt.Sprintf(namePattern, name))
+ fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
+ if err == nil {
+ if fim, ok := fi.(hugofs.FileMetaInfo); ok {
+ return fim.Meta().Filename, "", true
+ }
+ }
+ }
+
+ // Not found, let LibSASS handle it
+ return "", "", false
+ }
+
+ if ctx.InMediaType.SubType == media.SASSType.SubType {
+ options.to.SassSyntax = true
+ }
+
+ if options.from.EnableSourceMap {
+
+ options.to.SourceMapOptions.Filename = outName + ".map"
+ options.to.SourceMapOptions.Root = t.c.rs.WorkingDir
+
+ // Setting this to the relative input filename will get the source map
+ // more correct for the main entry path (main.scss typically), but
+ // it will mess up the import mappings. As a workaround, we do a replacement
+ // in the source map itself (see below).
+ // options.InputPath = inputPath
+ options.to.SourceMapOptions.OutputPath = outName
+ options.to.SourceMapOptions.Contents = true
+ options.to.SourceMapOptions.OmitURL = false
+ options.to.SourceMapOptions.EnableEmbedded = false
+ }
+
+ res, err := t.c.toCSS(options.to, ctx.To, ctx.From)
+ if err != nil {
+ if sasserr, ok := err.(libsasserrors.Error); ok {
+ if sasserr.File == "stdin" && ctx.SourcePath != "" {
+ sasserr.File = t.c.sfs.RealFilename(ctx.SourcePath)
+ err = sasserr
+ }
+ }
+ return herrors.NewFileErrorFromFileInErr(err, hugofs.Os, nil)
+
+ }
+
+ if options.from.EnableSourceMap && res.SourceMapContent != "" {
+ sourcePath := t.c.sfs.RealFilename(ctx.SourcePath)
+
+ if strings.HasPrefix(sourcePath, t.c.rs.WorkingDir) {
+ sourcePath = strings.TrimPrefix(sourcePath, t.c.rs.WorkingDir+helpers.FilePathSeparator)
+ }
+
+ // This needs to be Unix-style slashes, even on Windows.
+ // See https://github.com/gohugoio/hugo/issues/4968
+ sourcePath = filepath.ToSlash(sourcePath)
+
+ // This is a workaround for what looks like a bug in Libsass. But
+ // getting this resolution correct in tools like Chrome Workspaces
+ // is important enough to go this extra mile.
+ mapContent := strings.Replace(res.SourceMapContent, `stdin",`, fmt.Sprintf("%s\",", sourcePath), 1)
+
+ return ctx.PublishSourceMap(mapContent)
+ }
+ return nil
+}
+
+func (c *Client) toCSS(options libsass.Options, dst io.Writer, src io.Reader) (libsass.Result, error) {
+ var res libsass.Result
+
+ transpiler, err := libsass.New(options)
+ if err != nil {
+ return res, err
+ }
+
+ in := helpers.ReaderToString(src)
+
+ // See https://github.com/gohugoio/hugo/issues/7059
+ // We need to preserve the regular CSS imports. This is by far
+ // a perfect solution, and only works for the main entry file, but
+ // that should cover many use cases, e.g. using SCSS as a preprocessor
+ // for Tailwind.
+ var importsReplaced bool
+ in, importsReplaced = replaceRegularImportsIn(in)
+
+ res, err = transpiler.Execute(in)
+ if err != nil {
+ return res, err
+ }
+
+ out := res.CSS
+ if importsReplaced {
+ out = replaceRegularImportsOut(out)
+ }
+
+ _, err = io.WriteString(dst, out)
+
+ return res, err
+}
diff --git a/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg b/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg
new file mode 100644
index 000000000..7d7307bed
--- /dev/null
+++ b/resources/testdata/1234567890qwertyuiopasdfghjklzxcvbnm5to6eeeeee7via8eleph.jpg
Binary files differ
diff --git a/resources/testdata/circle.svg b/resources/testdata/circle.svg
new file mode 100644
index 000000000..2759ae703
--- /dev/null
+++ b/resources/testdata/circle.svg
@@ -0,0 +1,5 @@
+<svg height="100" width="100">
+ <circle cx="50" cy="50" r="40" stroke="black" stroke-width="3" fill="red" />
+ Sorry, your browser does not support inline SVG.
+</svg>
+ \ No newline at end of file
diff --git a/resources/testdata/fuzzy-cirlcle.png b/resources/testdata/fuzzy-cirlcle.png
new file mode 100644
index 000000000..95497d822
--- /dev/null
+++ b/resources/testdata/fuzzy-cirlcle.png
Binary files differ
diff --git a/resources/testdata/giphy.gif b/resources/testdata/giphy.gif
new file mode 100644
index 000000000..f82b32cbe
--- /dev/null
+++ b/resources/testdata/giphy.gif
Binary files differ
diff --git a/resources/testdata/gohugoio-card.gif b/resources/testdata/gohugoio-card.gif
new file mode 100644
index 000000000..6bc20d83a
--- /dev/null
+++ b/resources/testdata/gohugoio-card.gif
Binary files differ
diff --git a/resources/testdata/gohugoio.png b/resources/testdata/gohugoio.png
new file mode 100644
index 000000000..0591db959
--- /dev/null
+++ b/resources/testdata/gohugoio.png
Binary files differ
diff --git a/resources/testdata/gohugoio24.png b/resources/testdata/gohugoio24.png
new file mode 100644
index 000000000..9b004b897
--- /dev/null
+++ b/resources/testdata/gohugoio24.png
Binary files differ
diff --git a/resources/testdata/gohugoio8.png b/resources/testdata/gohugoio8.png
new file mode 100644
index 000000000..0993f90e4
--- /dev/null
+++ b/resources/testdata/gohugoio8.png
Binary files differ
diff --git a/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_200x0_resize_box.gif b/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_200x0_resize_box.gif
new file mode 100644
index 000000000..ca826432c
--- /dev/null
+++ b/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_200x0_resize_box.gif
Binary files differ
diff --git a/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_512x0_resize_box.gif b/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_512x0_resize_box.gif
new file mode 100644
index 000000000..590d2a780
--- /dev/null
+++ b/resources/testdata/golden/giphy_hu3eafc418e52414ace6236bf1d31f82e1_52213_512x0_resize_box.gif
Binary files differ
diff --git a/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_100x0_resize_box.gif b/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_100x0_resize_box.gif
new file mode 100644
index 000000000..7d810c1f9
--- /dev/null
+++ b/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_100x0_resize_box.gif
Binary files differ
diff --git a/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_220x0_resize_box.gif b/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_220x0_resize_box.gif
new file mode 100644
index 000000000..c4b39b041
--- /dev/null
+++ b/resources/testdata/golden/gohugoio-card_hu4d09f75255d3942fd4680641110a1a73_10820_220x0_resize_box.gif
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_100x100_fill_box_center_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_100x100_fill_box_center_3.png
new file mode 100644
index 000000000..d2f0afd27
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_100x100_fill_box_center_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_1a923841aa34545db29f46a8fc4c5b0d.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_1a923841aa34545db29f46a8fc4c5b0d.png
new file mode 100644
index 000000000..a48a0f25a
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_1a923841aa34545db29f46a8fc4c5b0d.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x0_resize_q50_r90_box_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x0_resize_q50_r90_box_3.png
new file mode 100644
index 000000000..5abf378b4
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x0_resize_q50_r90_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x100_resize_box_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x100_resize_box_3.png
new file mode 100644
index 000000000..cd56200ea
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_200x100_resize_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x100_fill_nearestneighbor_topleft_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x100_fill_nearestneighbor_topleft_3.png
new file mode 100644
index 000000000..dd11ce7ed
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x100_fill_nearestneighbor_topleft_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fill_gaussian_smart1_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fill_gaussian_smart1_3.png
new file mode 100644
index 000000000..4ef633564
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fill_gaussian_smart1_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fit_linear_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fit_linear_3.png
new file mode 100644
index 000000000..5ad74bf79
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_300x200_fit_linear_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_324b4d42c8746a684068d123fad8b744.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_324b4d42c8746a684068d123fad8b744.png
new file mode 100644
index 000000000..eba4b1e66
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_324b4d42c8746a684068d123fad8b744.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_bottomleft_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_bottomleft_3.png
new file mode 100644
index 000000000..76deeabc7
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_bottomleft_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_center_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_center_3.png
new file mode 100644
index 000000000..76deeabc7
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_400x200_fill_box_center_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_43055c40cb4a15bd8491bfc502799f43.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_43055c40cb4a15bd8491bfc502799f43.png
new file mode 100644
index 000000000..0ce82e49c
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_43055c40cb4a15bd8491bfc502799f43.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_4ea8f246299cc5fba9744bdf162bd57d.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_4ea8f246299cc5fba9744bdf162bd57d.png
new file mode 100644
index 000000000..841d369ef
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_4ea8f246299cc5fba9744bdf162bd57d.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_600x0_resize_box_3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_600x0_resize_box_3.png
new file mode 100644
index 000000000..28028b72d
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_600x0_resize_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_60c098f0ca6626668d9e3ad6bfb38b5b.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_60c098f0ca6626668d9e3ad6bfb38b5b.png
new file mode 100644
index 000000000..46fa3fd1b
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_60c098f0ca6626668d9e3ad6bfb38b5b.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_6575f3a3c39a30cba9d76a6045c36de6.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_6575f3a3c39a30cba9d76a6045c36de6.png
new file mode 100644
index 000000000..056648a74
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_6575f3a3c39a30cba9d76a6045c36de6.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_8166ccaf22bdabb94c9bb90bffe64133.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_8166ccaf22bdabb94c9bb90bffe64133.png
new file mode 100644
index 000000000..2fece7804
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_8166ccaf22bdabb94c9bb90bffe64133.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9165e5559db8ba31a401327b5617c098.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9165e5559db8ba31a401327b5617c098.png
new file mode 100644
index 000000000..50fae767a
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9165e5559db8ba31a401327b5617c098.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9a8d95423df65a9c230a4cc88056c13a.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9a8d95423df65a9c230a4cc88056c13a.png
new file mode 100644
index 000000000..32c5b49d8
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_9a8d95423df65a9c230a4cc88056c13a.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a38a1924befb1721a09be7d432f5f70f.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a38a1924befb1721a09be7d432f5f70f.png
new file mode 100644
index 000000000..603b95ae0
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a38a1924befb1721a09be7d432f5f70f.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a487ef4bea3dba1e1a84be5358cfef39.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a487ef4bea3dba1e1a84be5358cfef39.png
new file mode 100644
index 000000000..dde14757c
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a487ef4bea3dba1e1a84be5358cfef39.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a751b6cd969d7feab12540a8bb0ca927.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a751b6cd969d7feab12540a8bb0ca927.png
new file mode 100644
index 000000000..93f8dfda2
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_a751b6cd969d7feab12540a8bb0ca927.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_abcdd770eaed9301cfff4bc2f96459ba.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_abcdd770eaed9301cfff4bc2f96459ba.png
new file mode 100644
index 000000000..0991ca984
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_abcdd770eaed9301cfff4bc2f96459ba.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_aeaaf23afe6fb4702bd3992426d0cad3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_aeaaf23afe6fb4702bd3992426d0cad3.png
new file mode 100644
index 000000000..ce791767f
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_aeaaf23afe6fb4702bd3992426d0cad3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_bdde5e36f15689c1451933f92fd357b3.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_bdde5e36f15689c1451933f92fd357b3.png
new file mode 100644
index 000000000..25ac82485
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_bdde5e36f15689c1451933f92fd357b3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d111079da5d8d143b6cae10d6fedbc24.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d111079da5d8d143b6cae10d6fedbc24.png
new file mode 100644
index 000000000..362be673b
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d111079da5d8d143b6cae10d6fedbc24.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d87fd348ad697a9b16399709441d9d56.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d87fd348ad697a9b16399709441d9d56.png
new file mode 100644
index 000000000..174649232
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_d87fd348ad697a9b16399709441d9d56.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_e8ef2efdde4357a79694ea9c2be82f63.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_e8ef2efdde4357a79694ea9c2be82f63.png
new file mode 100644
index 000000000..697ac914e
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_e8ef2efdde4357a79694ea9c2be82f63.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_ee57777f148caaa6993972d9709fdf2d.png b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_ee57777f148caaa6993972d9709fdf2d.png
new file mode 100644
index 000000000..c1a64b59f
--- /dev/null
+++ b/resources/testdata/golden/gohugoio24_huc57dd738f4724f4b341121e66fd85555_267952_ee57777f148caaa6993972d9709fdf2d.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_00cd4ff18b53ecbd78e42aefe5fbf522.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_00cd4ff18b53ecbd78e42aefe5fbf522.png
new file mode 100644
index 000000000..1fa2bc9de
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_00cd4ff18b53ecbd78e42aefe5fbf522.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_100x100_fill_box_center_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_100x100_fill_box_center_3.png
new file mode 100644
index 000000000..0eef0aaf3
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_100x100_fill_box_center_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x0_resize_q50_r90_box_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x0_resize_q50_r90_box_3.png
new file mode 100644
index 000000000..c35f00722
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x0_resize_q50_r90_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x100_resize_box_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x100_resize_box_3.png
new file mode 100644
index 000000000..6ddb55158
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_200x100_resize_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_276650b97daa7ae98e79b929d7f87c19.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_276650b97daa7ae98e79b929d7f87c19.png
new file mode 100644
index 000000000..0b914391c
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_276650b97daa7ae98e79b929d7f87c19.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_2e05d39f4cb329be10e8c515494cef76.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_2e05d39f4cb329be10e8c515494cef76.png
new file mode 100644
index 000000000..795a608e8
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_2e05d39f4cb329be10e8c515494cef76.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x100_fill_nearestneighbor_topleft_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x100_fill_nearestneighbor_topleft_3.png
new file mode 100644
index 000000000..08eccf7cd
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x100_fill_nearestneighbor_topleft_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fill_gaussian_smart1_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fill_gaussian_smart1_3.png
new file mode 100644
index 000000000..162dc4ec9
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fill_gaussian_smart1_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fit_linear_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fit_linear_3.png
new file mode 100644
index 000000000..0660c20d7
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_300x200_fit_linear_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3980c5868e0b6f20ec95424dfdcb1d67.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3980c5868e0b6f20ec95424dfdcb1d67.png
new file mode 100644
index 000000000..7134de473
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3980c5868e0b6f20ec95424dfdcb1d67.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_398ca764abfff83bb15318068105dcb9.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_398ca764abfff83bb15318068105dcb9.png
new file mode 100644
index 000000000..37dc0f798
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_398ca764abfff83bb15318068105dcb9.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3ed273f49d1dc83891f5736e21fc5f44.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3ed273f49d1dc83891f5736e21fc5f44.png
new file mode 100644
index 000000000..1a229a429
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_3ed273f49d1dc83891f5736e21fc5f44.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_bottomleft_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_bottomleft_3.png
new file mode 100644
index 000000000..acde6a0f7
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_bottomleft_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_center_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_center_3.png
new file mode 100644
index 000000000..acde6a0f7
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_400x200_fill_box_center_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_428e769d14483c2fcdd6f5c5138e2066.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_428e769d14483c2fcdd6f5c5138e2066.png
new file mode 100644
index 000000000..c96e04108
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_428e769d14483c2fcdd6f5c5138e2066.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_600x0_resize_box_3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_600x0_resize_box_3.png
new file mode 100644
index 000000000..40fffa23a
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_600x0_resize_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_73c19c5f80881858a85aa23cd0ca400d.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_73c19c5f80881858a85aa23cd0ca400d.png
new file mode 100644
index 000000000..51f6cfa7e
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_73c19c5f80881858a85aa23cd0ca400d.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_871826faffc414ca3746f65fc9910eed.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_871826faffc414ca3746f65fc9910eed.png
new file mode 100644
index 000000000..53dd0b224
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_871826faffc414ca3746f65fc9910eed.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0505112c99af88626ac9b9a16a27acb.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0505112c99af88626ac9b9a16a27acb.png
new file mode 100644
index 000000000..156b42f43
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0505112c99af88626ac9b9a16a27acb.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0ffc0f22f22e6920f3cad414d6db6ba.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0ffc0f22f22e6920f3cad414d6db6ba.png
new file mode 100644
index 000000000..a5852e14c
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_a0ffc0f22f22e6920f3cad414d6db6ba.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_ae631e5252bb5d7b92bc766ad1a89069.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_ae631e5252bb5d7b92bc766ad1a89069.png
new file mode 100644
index 000000000..c8f782598
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_ae631e5252bb5d7b92bc766ad1a89069.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_b34412412a1cf1658e516a335b0a8dd4.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_b34412412a1cf1658e516a335b0a8dd4.png
new file mode 100644
index 000000000..c29c6e613
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_b34412412a1cf1658e516a335b0a8dd4.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_c5140f11378ddb13843432a5b489594a.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_c5140f11378ddb13843432a5b489594a.png
new file mode 100644
index 000000000..09d991972
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_c5140f11378ddb13843432a5b489594a.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d17d0184674fcf0a4d770c90bed503db.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d17d0184674fcf0a4d770c90bed503db.png
new file mode 100644
index 000000000..325c31acd
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d17d0184674fcf0a4d770c90bed503db.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d1bbfa2629bffb90118cacce3fcfb924.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d1bbfa2629bffb90118cacce3fcfb924.png
new file mode 100644
index 000000000..2def214c8
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_d1bbfa2629bffb90118cacce3fcfb924.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_e4d38674b70d9ef559c5df72c9262790.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_e4d38674b70d9ef559c5df72c9262790.png
new file mode 100644
index 000000000..414acff3b
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_e4d38674b70d9ef559c5df72c9262790.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_eff9583d9b94ac79c60cb099846ce8f3.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_eff9583d9b94ac79c60cb099846ce8f3.png
new file mode 100644
index 000000000..69aa35885
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_eff9583d9b94ac79c60cb099846ce8f3.png
Binary files differ
diff --git a/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_f17bba59421e7a500387232295512fc0.png b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_f17bba59421e7a500387232295512fc0.png
new file mode 100644
index 000000000..64b0b3f7a
--- /dev/null
+++ b/resources/testdata/golden/gohugoio8_hu7f72c00afdf7634587afaa5eff2a25b2_73538_f17bba59421e7a500387232295512fc0.png
Binary files differ
diff --git a/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_bge3e615_box_3.png b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_bge3e615_box_3.png
new file mode 100644
index 000000000..50c55c9eb
--- /dev/null
+++ b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_bge3e615_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_q75_bge3e615_box_3.jpg b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_q75_bge3e615_box_3.jpg
new file mode 100644
index 000000000..17fca6e6a
--- /dev/null
+++ b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_200x0_resize_q75_bge3e615_box_3.jpg
Binary files differ
diff --git a/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_30x0_resize_box_3.png b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_30x0_resize_box_3.png
new file mode 100644
index 000000000..eb9f1170c
--- /dev/null
+++ b/resources/testdata/golden/gopher-hero8_huaa0cd7d2cfc14ff32a57f171896f2285_13327_30x0_resize_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_bge3e615_box_3.png b/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_bge3e615_box_3.png
new file mode 100644
index 000000000..b01efee50
--- /dev/null
+++ b/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_bge3e615_box_3.png
Binary files differ
diff --git a/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_q75_bge3e615_box_3.jpg b/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_q75_bge3e615_box_3.jpg
new file mode 100644
index 000000000..56642d7e1
--- /dev/null
+++ b/resources/testdata/golden/gradient-circle_huf3d35257a40a8d6f525263a856c5ecfd_20069_200x0_resize_q75_bge3e615_box_3.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_0d1b300da7a815ed567b6dadb6f2ce5e.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_0d1b300da7a815ed567b6dadb6f2ce5e.jpg
new file mode 100644
index 000000000..1e2cb535b
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_0d1b300da7a815ed567b6dadb6f2ce5e.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x100_fill_q75_box_center.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x100_fill_q75_box_center.jpg
new file mode 100644
index 000000000..8e6164e32
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_100x100_fill_q75_box_center.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_17fd3c558d78ce249b5f0bcbe1ddbffb.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_17fd3c558d78ce249b5f0bcbe1ddbffb.jpg
new file mode 100644
index 000000000..2aa3dad2b
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_17fd3c558d78ce249b5f0bcbe1ddbffb.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x0_resize_q50_r90_box.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x0_resize_q50_r90_box.jpg
new file mode 100644
index 000000000..05d98c67a
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x0_resize_q50_r90_box.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_resize_q75_box.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_resize_q75_box.jpg
new file mode 100644
index 000000000..f12dd18fc
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_resize_q75_box.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x100_fill_q75_nearestneighbor_topleft.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x100_fill_q75_nearestneighbor_topleft.jpg
new file mode 100644
index 000000000..8ac3b2524
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x100_fill_q75_nearestneighbor_topleft.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fill_q75_gaussian_smart1.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fill_q75_gaussian_smart1.jpg
new file mode 100644
index 000000000..03de912fb
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fill_q75_gaussian_smart1.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fit_q75_linear.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fit_q75_linear.jpg
new file mode 100644
index 000000000..3801c17d9
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_fit_q75_linear.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_30fc2aab35ca0861bf396d09aebc85a4.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_30fc2aab35ca0861bf396d09aebc85a4.jpg
new file mode 100644
index 000000000..60207a829
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_30fc2aab35ca0861bf396d09aebc85a4.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_352eb0101b7c88107520ba719432bbb2.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_352eb0101b7c88107520ba719432bbb2.jpg
new file mode 100644
index 000000000..f7e84e33d
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_352eb0101b7c88107520ba719432bbb2.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3efc2d0f29a8e12c5a690fc6c9288854.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3efc2d0f29a8e12c5a690fc6c9288854.jpg
new file mode 100644
index 000000000..17a5927e2
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3efc2d0f29a8e12c5a690fc6c9288854.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f1b1455c4a7d13c5aeb7510f9a6a581.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f1b1455c4a7d13c5aeb7510f9a6a581.jpg
new file mode 100644
index 000000000..93b914161
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_3f1b1455c4a7d13c5aeb7510f9a6a581.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_bottomleft.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_bottomleft.jpg
new file mode 100644
index 000000000..9a6255687
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_bottomleft.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_center.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_center.jpg
new file mode 100644
index 000000000..b2db97485
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_400x200_fill_q75_box_center.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_510813cc53c37e2d489d2f9fdb13f749.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_510813cc53c37e2d489d2f9fdb13f749.jpg
new file mode 100644
index 000000000..6c3da1385
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_510813cc53c37e2d489d2f9fdb13f749.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_600x0_resize_q75_box.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_600x0_resize_q75_box.jpg
new file mode 100644
index 000000000..a5ad199d8
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_600x0_resize_q75_box.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6673ece428cb7d523234ca0d7c299542.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6673ece428cb7d523234ca0d7c299542.jpg
new file mode 100644
index 000000000..7e2bdeef0
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6673ece428cb7d523234ca0d7c299542.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6c5c12ac79d3455ccb1993d51eec3cdf.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6c5c12ac79d3455ccb1993d51eec3cdf.jpg
new file mode 100644
index 000000000..e77e78d7b
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_6c5c12ac79d3455ccb1993d51eec3cdf.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_7d9bc4700565266807dc476421066137.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_7d9bc4700565266807dc476421066137.jpg
new file mode 100644
index 000000000..ee246814d
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_7d9bc4700565266807dc476421066137.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_9f00027c376fe8556cc9996c47f23f78.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_9f00027c376fe8556cc9996c47f23f78.jpg
new file mode 100644
index 000000000..e7db706c2
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_9f00027c376fe8556cc9996c47f23f78.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_abf356affd7d70d6bec3b3498b572191.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_abf356affd7d70d6bec3b3498b572191.jpg
new file mode 100644
index 000000000..9688c99c3
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_abf356affd7d70d6bec3b3498b572191.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c2d24766b49f3147f5a4137a8db592ac.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c2d24766b49f3147f5a4137a8db592ac.jpg
new file mode 100644
index 000000000..b425b0d92
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c2d24766b49f3147f5a4137a8db592ac.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c36da6818db1ab630c3f87f65170003b.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c36da6818db1ab630c3f87f65170003b.jpg
new file mode 100644
index 000000000..41b42a883
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c36da6818db1ab630c3f87f65170003b.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c50a17db1e6d1bd0fe31a9a3444f1587.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c50a17db1e6d1bd0fe31a9a3444f1587.jpg
new file mode 100644
index 000000000..1857f8758
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_c50a17db1e6d1bd0fe31a9a3444f1587.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_cb45fcba865177290c89dc9f41d6ff7a.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_cb45fcba865177290c89dc9f41d6ff7a.jpg
new file mode 100644
index 000000000..f09ff9e33
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_cb45fcba865177290c89dc9f41d6ff7a.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_d30c10468b33df9010d185a8fe8f0491.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_d30c10468b33df9010d185a8fe8f0491.jpg
new file mode 100644
index 000000000..0b7d4e5d0
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_d30c10468b33df9010d185a8fe8f0491.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_de1fe6c0f40e7165355507d0f1748083.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_de1fe6c0f40e7165355507d0f1748083.jpg
new file mode 100644
index 000000000..7e35750db
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_de1fe6c0f40e7165355507d0f1748083.jpg
Binary files differ
diff --git a/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_f6d8fe32ce3e83abf130e91e33456914.jpg b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_f6d8fe32ce3e83abf130e91e33456914.jpg
new file mode 100644
index 000000000..b67650061
--- /dev/null
+++ b/resources/testdata/golden/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_f6d8fe32ce3e83abf130e91e33456914.jpg
Binary files differ
diff --git a/resources/testdata/golden_webp/fuzzy-cirlcle_hu525d1a6cf670e85f5e8f19890241399b_26792_200x0_resize_q75_h2_box_3.webp b/resources/testdata/golden_webp/fuzzy-cirlcle_hu525d1a6cf670e85f5e8f19890241399b_26792_200x0_resize_q75_h2_box_3.webp
new file mode 100644
index 000000000..0b9e6752a
--- /dev/null
+++ b/resources/testdata/golden_webp/fuzzy-cirlcle_hu525d1a6cf670e85f5e8f19890241399b_26792_200x0_resize_q75_h2_box_3.webp
Binary files differ
diff --git a/resources/testdata/gopher-hero8.png b/resources/testdata/gopher-hero8.png
new file mode 100644
index 000000000..08ae570d2
--- /dev/null
+++ b/resources/testdata/gopher-hero8.png
Binary files differ
diff --git a/resources/testdata/gradient-circle.png b/resources/testdata/gradient-circle.png
new file mode 100644
index 000000000..a4ace53a1
--- /dev/null
+++ b/resources/testdata/gradient-circle.png
Binary files differ
diff --git a/resources/testdata/iss8079.jpg b/resources/testdata/iss8079.jpg
new file mode 100644
index 000000000..a9049e81b
--- /dev/null
+++ b/resources/testdata/iss8079.jpg
Binary files differ
diff --git a/resources/testdata/sub/gohugoio2.png b/resources/testdata/sub/gohugoio2.png
new file mode 100644
index 000000000..0591db959
--- /dev/null
+++ b/resources/testdata/sub/gohugoio2.png
Binary files differ
diff --git a/resources/testdata/sunrise.JPG b/resources/testdata/sunrise.JPG
new file mode 100644
index 000000000..7d7307bed
--- /dev/null
+++ b/resources/testdata/sunrise.JPG
Binary files differ
diff --git a/resources/testdata/sunset.jpg b/resources/testdata/sunset.jpg
new file mode 100644
index 000000000..7d7307bed
--- /dev/null
+++ b/resources/testdata/sunset.jpg
Binary files differ
diff --git a/resources/testdata/sunset.webp b/resources/testdata/sunset.webp
new file mode 100644
index 000000000..4365e7b9f
--- /dev/null
+++ b/resources/testdata/sunset.webp
Binary files differ
diff --git a/resources/testhelpers_test.go b/resources/testhelpers_test.go
new file mode 100644
index 000000000..3a4e7e580
--- /dev/null
+++ b/resources/testhelpers_test.go
@@ -0,0 +1,205 @@
+package resources
+
+import (
+ "image"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/modules"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/afero"
+)
+
+type specDescriptor struct {
+ baseURL string
+ c *qt.C
+ fs afero.Fs
+}
+
+func createTestCfg() config.Provider {
+ cfg := config.New()
+ cfg.Set("resourceDir", "resources")
+ cfg.Set("contentDir", "content")
+ cfg.Set("dataDir", "data")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("assetDir", "assets")
+ cfg.Set("archetypeDir", "archetypes")
+ cfg.Set("publishDir", "public")
+
+ langs.LoadLanguageSettings(cfg, nil)
+ mod, err := modules.CreateProjectModule(cfg)
+ if err != nil {
+ panic(err)
+ }
+ cfg.Set("allModules", modules.Modules{mod})
+
+ return cfg
+}
+
+func newTestResourceSpec(desc specDescriptor) *Spec {
+ baseURL := desc.baseURL
+ if baseURL == "" {
+ baseURL = "https://example.com/"
+ }
+
+ afs := desc.fs
+ if afs == nil {
+ afs = afero.NewMemMapFs()
+ }
+
+ afs = hugofs.NewBaseFileDecorator(afs)
+
+ c := desc.c
+
+ cfg := createTestCfg()
+ cfg.Set("baseURL", baseURL)
+
+ imagingCfg := map[string]any{
+ "resampleFilter": "linear",
+ "quality": 68,
+ "anchor": "left",
+ }
+
+ cfg.Set("imaging", imagingCfg)
+
+ fs := hugofs.NewFrom(afs, cfg)
+ fs.PublishDir = hugofs.NewCreateCountingFs(fs.PublishDir)
+
+ s, err := helpers.NewPathSpec(fs, cfg, nil)
+ c.Assert(err, qt.IsNil)
+
+ filecaches, err := filecache.NewCaches(s)
+ c.Assert(err, qt.IsNil)
+
+ spec, err := NewSpec(s, filecaches, nil, nil, nil, nil, output.DefaultFormats, media.DefaultTypes)
+ c.Assert(err, qt.IsNil)
+ return spec
+}
+
+func newTargetPaths(link string) func() page.TargetPaths {
+ return func() page.TargetPaths {
+ return page.TargetPaths{
+ SubResourceBaseTarget: filepath.FromSlash(link),
+ SubResourceBaseLink: link,
+ }
+ }
+}
+
+func newTestResourceOsFs(c *qt.C) (*Spec, string) {
+ cfg := createTestCfg()
+ cfg.Set("baseURL", "https://example.com")
+
+ workDir, err := ioutil.TempDir("", "hugores")
+ c.Assert(err, qt.IsNil)
+ c.Assert(workDir, qt.Not(qt.Equals), "")
+
+ if runtime.GOOS == "darwin" && !strings.HasPrefix(workDir, "/private") {
+ // To get the entry folder in line with the rest. This its a little bit
+ // mysterious, but so be it.
+ workDir = "/private" + workDir
+ }
+
+ cfg.Set("workingDir", workDir)
+
+ fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(hugofs.Os), cfg)
+
+ s, err := helpers.NewPathSpec(fs, cfg, nil)
+ c.Assert(err, qt.IsNil)
+
+ filecaches, err := filecache.NewCaches(s)
+ c.Assert(err, qt.IsNil)
+
+ spec, err := NewSpec(s, filecaches, nil, nil, nil, nil, output.DefaultFormats, media.DefaultTypes)
+ c.Assert(err, qt.IsNil)
+
+ return spec, workDir
+}
+
+func fetchSunset(c *qt.C) images.ImageResource {
+ return fetchImage(c, "sunset.jpg")
+}
+
+func fetchImage(c *qt.C, name string) images.ImageResource {
+ spec := newTestResourceSpec(specDescriptor{c: c})
+ return fetchImageForSpec(spec, c, name)
+}
+
+func fetchImageForSpec(spec *Spec, c *qt.C, name string) images.ImageResource {
+ r := fetchResourceForSpec(spec, c, name)
+
+ img := r.(images.ImageResource)
+
+ c.Assert(img, qt.Not(qt.IsNil))
+ c.Assert(img.(specProvider).getSpec(), qt.Not(qt.IsNil))
+
+ return img
+}
+
+func fetchResourceForSpec(spec *Spec, c *qt.C, name string, targetPathAddends ...string) resource.ContentResource {
+ src, err := os.Open(filepath.FromSlash("testdata/" + name))
+ c.Assert(err, qt.IsNil)
+ workDir := spec.WorkingDir
+ if len(targetPathAddends) > 0 {
+ addends := strings.Join(targetPathAddends, "_")
+ name = addends + "_" + name
+ }
+ targetFilename := filepath.Join(workDir, name)
+ out, err := helpers.OpenFileForWriting(spec.Fs.Source, targetFilename)
+ c.Assert(err, qt.IsNil)
+ _, err = io.Copy(out, src)
+ out.Close()
+ src.Close()
+ c.Assert(err, qt.IsNil)
+
+ factory := newTargetPaths("/a")
+
+ r, err := spec.New(ResourceSourceDescriptor{Fs: spec.Fs.Source, TargetPaths: factory, LazyPublish: true, RelTargetFilename: name, SourceFilename: targetFilename})
+ c.Assert(err, qt.IsNil)
+ c.Assert(r, qt.Not(qt.IsNil))
+
+ return r.(resource.ContentResource)
+}
+
+func assertImageFile(c *qt.C, fs afero.Fs, filename string, width, height int) {
+ filename = filepath.Clean(filename)
+ f, err := fs.Open(filename)
+ c.Assert(err, qt.IsNil)
+ defer f.Close()
+
+ config, _, err := image.DecodeConfig(f)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(config.Width, qt.Equals, width)
+ c.Assert(config.Height, qt.Equals, height)
+}
+
+func assertFileCache(c *qt.C, fs afero.Fs, filename string, width, height int) {
+ assertImageFile(c, fs, filepath.Clean(filename), width, height)
+}
+
+func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
+ writeToFs(t, fs.Source, filename, content)
+}
+
+func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
+ if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
+ t.Fatalf("Failed to write file: %s", err)
+ }
+}
diff --git a/resources/transform.go b/resources/transform.go
new file mode 100644
index 000000000..7d81f9b21
--- /dev/null
+++ b/resources/transform.go
@@ -0,0 +1,670 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "bytes"
+ "fmt"
+ "image"
+ "io"
+ "path"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/resources/images/exif"
+ "github.com/spf13/afero"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/internal"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/media"
+)
+
+var (
+ _ resource.ContentResource = (*resourceAdapter)(nil)
+ _ resourceCopier = (*resourceAdapter)(nil)
+ _ resource.ReadSeekCloserResource = (*resourceAdapter)(nil)
+ _ resource.Resource = (*resourceAdapter)(nil)
+ _ resource.Source = (*resourceAdapter)(nil)
+ _ resource.Identifier = (*resourceAdapter)(nil)
+ _ resource.ResourceMetaProvider = (*resourceAdapter)(nil)
+)
+
+// These are transformations that need special support in Hugo that may not
+// be available when building the theme/site so we write the transformation
+// result to disk and reuse if needed for these,
+// TODO(bep) it's a little fragile having these constants redefined here.
+var transformationsToCacheOnDisk = map[string]bool{
+ "postcss": true,
+ "tocss": true,
+ "tocss-dart": true,
+}
+
+func newResourceAdapter(spec *Spec, lazyPublish bool, target transformableResource) *resourceAdapter {
+ var po *publishOnce
+ if lazyPublish {
+ po = &publishOnce{}
+ }
+ return &resourceAdapter{
+ resourceTransformations: &resourceTransformations{},
+ resourceAdapterInner: &resourceAdapterInner{
+ spec: spec,
+ publishOnce: po,
+ target: target,
+ },
+ }
+}
+
+// ResourceTransformation is the interface that a resource transformation step
+// needs to implement.
+type ResourceTransformation interface {
+ Key() internal.ResourceTransformationKey
+ Transform(ctx *ResourceTransformationCtx) error
+}
+
+type ResourceTransformationCtx struct {
+ // The content to transform.
+ From io.Reader
+
+ // The target of content transformation.
+ // The current implementation requires that r is written to w
+ // even if no transformation is performed.
+ To io.Writer
+
+ // This is the relative path to the original source. Unix styled slashes.
+ SourcePath string
+
+ // This is the relative target path to the resource. Unix styled slashes.
+ InPath string
+
+ // The relative target path to the transformed resource. Unix styled slashes.
+ OutPath string
+
+ // The input media type
+ InMediaType media.Type
+
+ // The media type of the transformed resource.
+ OutMediaType media.Type
+
+ // Data data can be set on the transformed Resource. Not that this need
+ // to be simple types, as it needs to be serialized to JSON and back.
+ Data map[string]any
+
+ // This is used to publish additional artifacts, e.g. source maps.
+ // We may improve this.
+ OpenResourcePublisher func(relTargetPath string) (io.WriteCloser, error)
+}
+
+// AddOutPathIdentifier transforming InPath to OutPath adding an identifier,
+// eg '.min' before any extension.
+func (ctx *ResourceTransformationCtx) AddOutPathIdentifier(identifier string) {
+ ctx.OutPath = ctx.addPathIdentifier(ctx.InPath, identifier)
+}
+
+// PublishSourceMap writes the content to the target folder of the main resource
+// with the ".map" extension added.
+func (ctx *ResourceTransformationCtx) PublishSourceMap(content string) error {
+ target := ctx.OutPath + ".map"
+ f, err := ctx.OpenResourcePublisher(target)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ _, err = f.Write([]byte(content))
+ return err
+}
+
+// ReplaceOutPathExtension transforming InPath to OutPath replacing the file
+// extension, e.g. ".scss"
+func (ctx *ResourceTransformationCtx) ReplaceOutPathExtension(newExt string) {
+ dir, file := path.Split(ctx.InPath)
+ base, _ := paths.PathAndExt(file)
+ ctx.OutPath = path.Join(dir, (base + newExt))
+}
+
+func (ctx *ResourceTransformationCtx) addPathIdentifier(inPath, identifier string) string {
+ dir, file := path.Split(inPath)
+ base, ext := paths.PathAndExt(file)
+ return path.Join(dir, (base + identifier + ext))
+}
+
+type publishOnce struct {
+ publisherInit sync.Once
+ publisherErr error
+}
+
+type resourceAdapter struct {
+ commonResource
+ *resourceTransformations
+ *resourceAdapterInner
+}
+
+func (r *resourceAdapter) Content() (any, error) {
+ r.init(false, true)
+ if r.transformationsErr != nil {
+ return nil, r.transformationsErr
+ }
+ return r.target.Content()
+}
+
+func (r *resourceAdapter) Err() resource.ResourceError {
+ return nil
+}
+
+func (r *resourceAdapter) Data() any {
+ r.init(false, false)
+ return r.target.Data()
+}
+
+func (r resourceAdapter) cloneTo(targetPath string) resource.Resource {
+ newtTarget := r.target.cloneTo(targetPath)
+ newInner := &resourceAdapterInner{
+ spec: r.spec,
+ target: newtTarget.(transformableResource),
+ }
+ if r.resourceAdapterInner.publishOnce != nil {
+ newInner.publishOnce = &publishOnce{}
+ }
+ r.resourceAdapterInner = newInner
+ return &r
+}
+
+func (r *resourceAdapter) Crop(spec string) (images.ImageResource, error) {
+ return r.getImageOps().Crop(spec)
+}
+
+func (r *resourceAdapter) Fill(spec string) (images.ImageResource, error) {
+ return r.getImageOps().Fill(spec)
+}
+
+func (r *resourceAdapter) Fit(spec string) (images.ImageResource, error) {
+ return r.getImageOps().Fit(spec)
+}
+
+func (r *resourceAdapter) Filter(filters ...any) (images.ImageResource, error) {
+ return r.getImageOps().Filter(filters...)
+}
+
+func (r *resourceAdapter) Height() int {
+ return r.getImageOps().Height()
+}
+
+func (r *resourceAdapter) Exif() *exif.ExifInfo {
+ return r.getImageOps().Exif()
+}
+
+func (r *resourceAdapter) Key() string {
+ r.init(false, false)
+ return r.target.(resource.Identifier).Key()
+}
+
+func (r *resourceAdapter) MediaType() media.Type {
+ r.init(false, false)
+ return r.target.MediaType()
+}
+
+func (r *resourceAdapter) Name() string {
+ r.init(false, false)
+ return r.target.Name()
+}
+
+func (r *resourceAdapter) Params() maps.Params {
+ r.init(false, false)
+ return r.target.Params()
+}
+
+func (r *resourceAdapter) Permalink() string {
+ r.init(true, false)
+ return r.target.Permalink()
+}
+
+func (r *resourceAdapter) Publish() error {
+ r.init(false, false)
+
+ return r.target.Publish()
+}
+
+func (r *resourceAdapter) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
+ r.init(false, false)
+ return r.target.ReadSeekCloser()
+}
+
+func (r *resourceAdapter) RelPermalink() string {
+ r.init(true, false)
+ return r.target.RelPermalink()
+}
+
+func (r *resourceAdapter) Resize(spec string) (images.ImageResource, error) {
+ return r.getImageOps().Resize(spec)
+}
+
+func (r *resourceAdapter) ResourceType() string {
+ r.init(false, false)
+ return r.target.ResourceType()
+}
+
+func (r *resourceAdapter) String() string {
+ return r.Name()
+}
+
+func (r *resourceAdapter) Title() string {
+ r.init(false, false)
+ return r.target.Title()
+}
+
+func (r resourceAdapter) Transform(t ...ResourceTransformation) (ResourceTransformer, error) {
+ r.resourceTransformations = &resourceTransformations{
+ transformations: append(r.transformations, t...),
+ }
+
+ r.resourceAdapterInner = &resourceAdapterInner{
+ spec: r.spec,
+ publishOnce: &publishOnce{},
+ target: r.target,
+ }
+
+ return &r, nil
+}
+
+func (r *resourceAdapter) Width() int {
+ return r.getImageOps().Width()
+}
+
+func (r *resourceAdapter) DecodeImage() (image.Image, error) {
+ return r.getImageOps().DecodeImage()
+}
+
+func (r *resourceAdapter) getImageOps() images.ImageResourceOps {
+ img, ok := r.target.(images.ImageResourceOps)
+ if !ok {
+ if r.MediaType().SubType == "svg" {
+ panic("this method is only available for raster images. To determine if an image is SVG, you can do {{ if eq .MediaType.SubType \"svg\" }}{{ end }}")
+ }
+ fmt.Println(r.MediaType().SubType)
+ panic("this method is only available for image resources")
+ }
+ r.init(false, false)
+ return img
+}
+
+func (r *resourceAdapter) getMetaAssigner() metaAssigner {
+ return r.target
+}
+
+func (r *resourceAdapter) getSpec() *Spec {
+ return r.spec
+}
+
+func (r *resourceAdapter) publish() {
+ if r.publishOnce == nil {
+ return
+ }
+
+ r.publisherInit.Do(func() {
+ r.publisherErr = r.target.Publish()
+
+ if r.publisherErr != nil {
+ r.spec.Logger.Errorf("Failed to publish Resource: %s", r.publisherErr)
+ }
+ })
+}
+
+func (r *resourceAdapter) TransformationKey() string {
+ // Files with a suffix will be stored in cache (both on disk and in memory)
+ // partitioned by their suffix.
+ var key string
+ for _, tr := range r.transformations {
+ key = key + "_" + tr.Key().Value()
+ }
+
+ base := ResourceCacheKey(r.target.Key())
+ return r.spec.ResourceCache.cleanKey(base) + "_" + helpers.MD5String(key)
+}
+
+func (r *resourceAdapter) transform(publish, setContent bool) error {
+ cache := r.spec.ResourceCache
+
+ key := r.TransformationKey()
+
+ cached, found := cache.get(key)
+
+ if found {
+ r.resourceAdapterInner = cached.(*resourceAdapterInner)
+ return nil
+ }
+
+ // Acquire a write lock for the named transformation.
+ cache.nlocker.Lock(key)
+ // Check the cache again.
+ cached, found = cache.get(key)
+ if found {
+ r.resourceAdapterInner = cached.(*resourceAdapterInner)
+ cache.nlocker.Unlock(key)
+ return nil
+ }
+
+ defer cache.nlocker.Unlock(key)
+ defer cache.set(key, r.resourceAdapterInner)
+
+ b1 := bp.GetBuffer()
+ b2 := bp.GetBuffer()
+ defer bp.PutBuffer(b1)
+ defer bp.PutBuffer(b2)
+
+ tctx := &ResourceTransformationCtx{
+ Data: make(map[string]any),
+ OpenResourcePublisher: r.target.openPublishFileForWriting,
+ }
+
+ tctx.InMediaType = r.target.MediaType()
+ tctx.OutMediaType = r.target.MediaType()
+
+ startCtx := *tctx
+ updates := &transformationUpdate{startCtx: startCtx}
+
+ var contentrc hugio.ReadSeekCloser
+
+ contentrc, err := contentReadSeekerCloser(r.target)
+ if err != nil {
+ return err
+ }
+
+ defer contentrc.Close()
+
+ tctx.From = contentrc
+ tctx.To = b1
+
+ tctx.InPath = r.target.TargetPath()
+ tctx.SourcePath = tctx.InPath
+
+ counter := 0
+ writeToFileCache := false
+
+ var transformedContentr io.Reader
+
+ for i, tr := range r.transformations {
+ if i != 0 {
+ tctx.InMediaType = tctx.OutMediaType
+ }
+
+ mayBeCachedOnDisk := transformationsToCacheOnDisk[tr.Key().Name]
+ if !writeToFileCache {
+ writeToFileCache = mayBeCachedOnDisk
+ }
+
+ if i > 0 {
+ hasWrites := tctx.To.(*bytes.Buffer).Len() > 0
+ if hasWrites {
+ counter++
+ // Switch the buffers
+ if counter%2 == 0 {
+ tctx.From = b2
+ b1.Reset()
+ tctx.To = b1
+ } else {
+ tctx.From = b1
+ b2.Reset()
+ tctx.To = b2
+ }
+ }
+ }
+
+ newErr := func(err error) error {
+ msg := fmt.Sprintf("%s: failed to transform %q (%s)", strings.ToUpper(tr.Key().Name), tctx.InPath, tctx.InMediaType.Type())
+
+ if err == herrors.ErrFeatureNotAvailable {
+ var errMsg string
+ if tr.Key().Name == "postcss" {
+ // This transformation is not available in this
+ // Most likely because PostCSS is not installed.
+ errMsg = ". Check your PostCSS installation; install with \"npm install postcss-cli\". See https://gohugo.io/hugo-pipes/postcss/"
+ } else if tr.Key().Name == "tocss" {
+ errMsg = ". Check your Hugo installation; you need the extended version to build SCSS/SASS."
+ } else if tr.Key().Name == "tocss-dart" {
+ errMsg = ". You need dart-sass-embedded in your system $PATH."
+
+ } else if tr.Key().Name == "babel" {
+ errMsg = ". You need to install Babel, see https://gohugo.io/hugo-pipes/babel/"
+ }
+
+ return fmt.Errorf(msg+errMsg+": %w", err)
+ }
+
+ return fmt.Errorf(msg+": %w", err)
+ }
+
+ var tryFileCache bool
+
+ if mayBeCachedOnDisk && r.spec.BuildConfig.UseResourceCache(nil) {
+ tryFileCache = true
+ } else {
+ err = tr.Transform(tctx)
+ if err != nil && err != herrors.ErrFeatureNotAvailable {
+ return newErr(err)
+ }
+
+ if mayBeCachedOnDisk {
+ tryFileCache = r.spec.BuildConfig.UseResourceCache(err)
+ }
+ if err != nil && !tryFileCache {
+ return newErr(err)
+ }
+ }
+
+ if tryFileCache {
+ f := r.target.tryTransformedFileCache(key, updates)
+ if f == nil {
+ if err != nil {
+ return newErr(err)
+ }
+ return newErr(fmt.Errorf("resource %q not found in file cache", key))
+ }
+ transformedContentr = f
+ updates.sourceFs = cache.fileCache.Fs
+ defer f.Close()
+
+ // The reader above is all we need.
+ break
+ }
+
+ if tctx.OutPath != "" {
+ tctx.InPath = tctx.OutPath
+ tctx.OutPath = ""
+ }
+ }
+
+ if transformedContentr == nil {
+ updates.updateFromCtx(tctx)
+ }
+
+ var publishwriters []io.WriteCloser
+
+ if publish {
+ publicw, err := r.target.openPublishFileForWriting(updates.targetPath)
+ if err != nil {
+ return err
+ }
+ publishwriters = append(publishwriters, publicw)
+ }
+
+ if transformedContentr == nil {
+ if writeToFileCache {
+ // Also write it to the cache
+ fi, metaw, err := cache.writeMeta(key, updates.toTransformedResourceMetadata())
+ if err != nil {
+ return err
+ }
+ updates.sourceFilename = &fi.Name
+ updates.sourceFs = cache.fileCache.Fs
+ publishwriters = append(publishwriters, metaw)
+ }
+
+ // Any transformations reading from From must also write to To.
+ // This means that if the target buffer is empty, we can just reuse
+ // the original reader.
+ if b, ok := tctx.To.(*bytes.Buffer); ok && b.Len() > 0 {
+ transformedContentr = tctx.To.(*bytes.Buffer)
+ } else {
+ transformedContentr = contentrc
+ }
+ }
+
+ // Also write it to memory
+ var contentmemw *bytes.Buffer
+
+ setContent = setContent || !writeToFileCache
+
+ if setContent {
+ contentmemw = bp.GetBuffer()
+ defer bp.PutBuffer(contentmemw)
+ publishwriters = append(publishwriters, hugio.ToWriteCloser(contentmemw))
+ }
+
+ publishw := hugio.NewMultiWriteCloser(publishwriters...)
+ _, err = io.Copy(publishw, transformedContentr)
+ if err != nil {
+ return err
+ }
+ publishw.Close()
+
+ if setContent {
+ s := contentmemw.String()
+ updates.content = &s
+ }
+
+ newTarget, err := r.target.cloneWithUpdates(updates)
+ if err != nil {
+ return err
+ }
+ r.target = newTarget
+
+ return nil
+}
+
+func (r *resourceAdapter) init(publish, setContent bool) {
+ r.initTransform(publish, setContent)
+}
+
+func (r *resourceAdapter) initTransform(publish, setContent bool) {
+ r.transformationsInit.Do(func() {
+ if len(r.transformations) == 0 {
+ // Nothing to do.
+ return
+ }
+
+ if publish {
+ // The transformation will write the content directly to
+ // the destination.
+ r.publishOnce = nil
+ }
+
+ r.transformationsErr = r.transform(publish, setContent)
+ if r.transformationsErr != nil {
+ if r.spec.ErrorSender != nil {
+ r.spec.ErrorSender.SendError(r.transformationsErr)
+ } else {
+ r.spec.Logger.Errorf("Transformation failed: %s", r.transformationsErr)
+ }
+ }
+ })
+
+ if publish && r.publishOnce != nil {
+ r.publish()
+ }
+}
+
+type resourceAdapterInner struct {
+ target transformableResource
+
+ spec *Spec
+
+ // Handles publishing (to /public) if needed.
+ *publishOnce
+}
+
+type resourceTransformations struct {
+ transformationsInit sync.Once
+ transformationsErr error
+ transformations []ResourceTransformation
+}
+
+type transformableResource interface {
+ baseResourceInternal
+
+ resource.ContentProvider
+ resource.Resource
+ resource.Identifier
+ resourceCopier
+}
+
+type transformationUpdate struct {
+ content *string
+ sourceFilename *string
+ sourceFs afero.Fs
+ targetPath string
+ mediaType media.Type
+ data map[string]any
+
+ startCtx ResourceTransformationCtx
+}
+
+func (u *transformationUpdate) isContentChanged() bool {
+ return u.content != nil || u.sourceFilename != nil
+}
+
+func (u *transformationUpdate) toTransformedResourceMetadata() transformedResourceMetadata {
+ return transformedResourceMetadata{
+ MediaTypeV: u.mediaType.Type(),
+ Target: u.targetPath,
+ MetaData: u.data,
+ }
+}
+
+func (u *transformationUpdate) updateFromCtx(ctx *ResourceTransformationCtx) {
+ u.targetPath = ctx.OutPath
+ u.mediaType = ctx.OutMediaType
+ u.data = ctx.Data
+ u.targetPath = ctx.InPath
+}
+
+// We will persist this information to disk.
+type transformedResourceMetadata struct {
+ Target string `json:"Target"`
+ MediaTypeV string `json:"MediaType"`
+ MetaData map[string]any `json:"Data"`
+}
+
+// contentReadSeekerCloser returns a ReadSeekerCloser if possible for a given Resource.
+func contentReadSeekerCloser(r resource.Resource) (hugio.ReadSeekCloser, error) {
+ switch rr := r.(type) {
+ case resource.ReadSeekCloserResource:
+ rc, err := rr.ReadSeekCloser()
+ if err != nil {
+ return nil, err
+ }
+ return rc, nil
+ default:
+ return nil, fmt.Errorf("cannot transform content of Resource of type %T", r)
+
+ }
+}
diff --git a/resources/transform_test.go b/resources/transform_test.go
new file mode 100644
index 000000000..af8ccbc1f
--- /dev/null
+++ b/resources/transform_test.go
@@ -0,0 +1,440 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "encoding/base64"
+ "fmt"
+ "io"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/media"
+ "github.com/gohugoio/hugo/resources/images"
+ "github.com/gohugoio/hugo/resources/internal"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const gopher = `iVBORw0KGgoAAAANSUhEUgAAAEsAAAA8CAAAAAALAhhPAAAFfUlEQVRYw62XeWwUVRzHf2+OPbo9d7tsWyiyaZti6eWGAhISoIGKECEKCAiJJkYTiUgTMYSIosYYBBIUIxoSPIINEBDi2VhwkQrVsj1ESgu9doHWdrul7ba73WNm3vOPtsseM9MdwvvrzTs+8/t95ze/33sI5BqiabU6m9En8oNjduLnAEDLUsQXFF8tQ5oxK3vmnNmDSMtrncks9Hhtt/qeWZapHb1ha3UqYSWVl2ZmpWgaXMXGohQAvmeop3bjTRtv6SgaK/Pb9/bFzUrYslbFAmHPp+3WhAYdr+7GN/YnpN46Opv55VDsJkoEpMrY/vO2BIYQ6LLvm0ThY3MzDzzeSJeeWNyTkgnIE5ePKsvKlcg/0T9QMzXalwXMlj54z4c0rh/mzEfr+FgWEz2w6uk8dkzFAgcARAgNp1ZYef8bH2AgvuStbc2/i6CiWGj98y2tw2l4FAXKkQBIf+exyRnteY83LfEwDQAYCoK+P6bxkZm/0966LxcAAILHB56kgD95PPxltuYcMtFTWw/FKkY/6Opf3GGd9ZF+Qp6mzJxzuRSractOmJrH1u8XTvWFHINNkLQLMR+XHXvfPPHw967raE1xxwtA36IMRfkAAG29/7mLuQcb2WOnsJReZGfpiHsSBX81cvMKywYZHhX5hFPtOqPGWZCXnhWGAu6lX91ElKXSalcLXu3UaOXVay57ZSe5f6Gpx7J2MXAsi7EqSp09b/MirKSyJfnfEEgeDjl8FgDAfvewP03zZ+AJ0m9aFRM8eEHBDRKjfcreDXnZdQuAxXpT2NRJ7xl3UkLBhuVGU16gZiGOgZmrSbRdqkILuL/yYoSXHHkl9KXgqNu3PB8oRg0geC5vFmLjad6mUyTKLmF3OtraWDIfACyXqmephaDABawfpi6tqqBZytfQMqOz6S09iWXhktrRaB8Xz4Yi/8gyABDm5NVe6qq/3VzPrcjELWrebVuyY2T7ar4zQyybUCtsQ5Es1FGaZVrRVQwAgHGW2ZCRZshI5bGQi7HesyE972pOSeMM0dSktlzxRdrlqb3Osa6CCS8IJoQQQgBAbTAa5l5epO34rJszibJI8rxLfGzcp1dRosutGeb2VDNgqYrwTiPNsLxXiPi3dz7LiS1WBRBDBOnqEjyy3aQb+/bLiJzz9dIkscVBBLxMfSEac7kO4Fpkngi0ruNBeSOal+u8jgOuqPz12nryMLCniEjtOOOmpt+KEIqsEdocJjYXwrh9OZqWJQyPCTo67LNS/TdxLAv6R5ZNK9npEjbYdT33gRo4o5oTqR34R+OmaSzDBWsAIPhuRcgyoteNi9gF0KzNYWVItPf2TLoXEg+7isNC7uJkgo1iQWOfRSP9NR11RtbZZ3OMG/VhL6jvx+J1m87+RCfJChAtEBQkSBX2PnSiihc/Twh3j0h7qdYQAoRVsRGmq7HU2QRbaxVGa1D6nIOqaIWRjyRZpHMQKWKpZM5feA+lzC4ZFultV8S6T0mzQGhQohi5I8iw+CsqBSxhFMuwyLgSwbghGb0AiIKkSDmGZVmJSiKihsiyOAUs70UkywooYP0bii9GdH4sfr1UNysd3fUyLLMQN+rsmo3grHl9VNJHbbwxoa47Vw5gupIqrZcjPh9R4Nye3nRDk199V+aetmvVtDRE8/+cbgAAgMIWGb3UA0MGLE9SCbWX670TDy1y98c3D27eppUjsZ6fql3jcd5rUe7+ZIlLNQny3Rd+E5Tct3WVhTM5RBCEdiEK0b6B+/ca2gYU393nFj/n1AygRQxPIUA043M42u85+z2SnssKrPl8Mx76NL3E6eXc3be7OD+H4WHbJkKI8AU8irbITQjZ+0hQcPEgId/Fn/pl9crKH02+5o2b9T/eMx7pKoskYgAAAABJRU5ErkJggg==`
+
+func gopherPNG() io.Reader { return base64.NewDecoder(base64.StdEncoding, strings.NewReader(gopher)) }
+
+func TestTransform(t *testing.T) {
+ c := qt.New(t)
+
+ createTransformer := func(spec *Spec, filename, content string) Transformer {
+ filename = filepath.FromSlash(filename)
+ fs := spec.Fs.Source
+ afero.WriteFile(fs, filename, []byte(content), 0777)
+ r, _ := spec.New(ResourceSourceDescriptor{Fs: fs, SourceFilename: filename})
+ return r.(Transformer)
+ }
+
+ createContentReplacer := func(name, old, new string) ResourceTransformation {
+ return &testTransformation{
+ name: name,
+ transform: func(ctx *ResourceTransformationCtx) error {
+ in := helpers.ReaderToString(ctx.From)
+ in = strings.Replace(in, old, new, 1)
+ ctx.AddOutPathIdentifier("." + name)
+ fmt.Fprint(ctx.To, in)
+ return nil
+ },
+ }
+ }
+
+ // Verify that we publish the same file once only.
+ assertNoDuplicateWrites := func(c *qt.C, spec *Spec) {
+ c.Helper()
+ d := spec.Fs.PublishDir.(hugofs.DuplicatesReporter)
+ c.Assert(d.ReportDuplicates(), qt.Equals, "")
+ }
+
+ assertShouldExist := func(c *qt.C, spec *Spec, filename string, should bool) {
+ c.Helper()
+ exists, _ := helpers.Exists(filepath.FromSlash(filename), spec.Fs.WorkingDirReadOnly)
+ c.Assert(exists, qt.Equals, should)
+ }
+
+ c.Run("All values", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ transformation := &testTransformation{
+ name: "test",
+ transform: func(ctx *ResourceTransformationCtx) error {
+ // Content
+ in := helpers.ReaderToString(ctx.From)
+ in = strings.Replace(in, "blue", "green", 1)
+ fmt.Fprint(ctx.To, in)
+
+ // Media type
+ ctx.OutMediaType = media.CSVType
+
+ // Change target
+ ctx.ReplaceOutPathExtension(".csv")
+
+ // Add some data to context
+ ctx.Data["mydata"] = "Hugo Rocks!"
+
+ return nil
+ },
+ }
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ tr, err := r.Transform(transformation)
+ c.Assert(err, qt.IsNil)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(content, qt.Equals, "color is green")
+ c.Assert(tr.MediaType(), eq, media.CSVType)
+ c.Assert(tr.RelPermalink(), qt.Equals, "/f1.csv")
+ assertShouldExist(c, spec, "public/f1.csv", true)
+
+ data := tr.Data().(map[string]any)
+ c.Assert(data["mydata"], qt.Equals, "Hugo Rocks!")
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Meta only", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ transformation := &testTransformation{
+ name: "test",
+ transform: func(ctx *ResourceTransformationCtx) error {
+ // Change media type only
+ ctx.OutMediaType = media.CSVType
+ ctx.ReplaceOutPathExtension(".csv")
+
+ return nil
+ },
+ }
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ tr, err := r.Transform(transformation)
+ c.Assert(err, qt.IsNil)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(content, qt.Equals, "color is blue")
+ c.Assert(tr.MediaType(), eq, media.CSVType)
+
+ // The transformed file should only be published if RelPermalink
+ // or Permalink is called.
+ n := htesting.Rnd.Intn(3)
+ shouldExist := true
+ switch n {
+ case 0:
+ tr.RelPermalink()
+ case 1:
+ tr.Permalink()
+ default:
+ shouldExist = false
+ }
+
+ assertShouldExist(c, spec, "public/f1.csv", shouldExist)
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Memory-cached transformation", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ // Two transformations with same id, different behaviour.
+ t1 := createContentReplacer("t1", "blue", "green")
+ t2 := createContentReplacer("t1", "color", "car")
+
+ for i, transformation := range []ResourceTransformation{t1, t2} {
+ r := createTransformer(spec, "f1.txt", "color is blue")
+ tr, _ := r.Transform(transformation)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, qt.Equals, "color is green", qt.Commentf("i=%d", i))
+
+ assertShouldExist(c, spec, "public/f1.t1.txt", false)
+ }
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("File-cached transformation", func(c *qt.C) {
+ c.Parallel()
+
+ fs := afero.NewMemMapFs()
+
+ for i := 0; i < 2; i++ {
+ spec := newTestResourceSpec(specDescriptor{c: c, fs: fs})
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ var transformation ResourceTransformation
+
+ if i == 0 {
+ // There is currently a hardcoded list of transformations that we
+ // persist to disk (tocss, postcss).
+ transformation = &testTransformation{
+ name: "tocss",
+ transform: func(ctx *ResourceTransformationCtx) error {
+ in := helpers.ReaderToString(ctx.From)
+ in = strings.Replace(in, "blue", "green", 1)
+ ctx.AddOutPathIdentifier("." + "cached")
+ ctx.OutMediaType = media.CSVType
+ ctx.Data = map[string]any{
+ "Hugo": "Rocks!",
+ }
+ fmt.Fprint(ctx.To, in)
+ return nil
+ },
+ }
+ } else {
+ // Force read from file cache.
+ transformation = &testTransformation{
+ name: "tocss",
+ transform: func(ctx *ResourceTransformationCtx) error {
+ return herrors.ErrFeatureNotAvailable
+ },
+ }
+ }
+
+ msg := qt.Commentf("i=%d", i)
+
+ tr, _ := r.Transform(transformation)
+ c.Assert(tr.RelPermalink(), qt.Equals, "/f1.cached.txt", msg)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, qt.Equals, "color is green", msg)
+ c.Assert(tr.MediaType(), eq, media.CSVType)
+ c.Assert(tr.Data(), qt.DeepEquals, map[string]any{
+ "Hugo": "Rocks!",
+ })
+
+ assertNoDuplicateWrites(c, spec)
+ assertShouldExist(c, spec, "public/f1.cached.txt", true)
+
+ }
+ })
+
+ c.Run("Access RelPermalink first", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ t1 := createContentReplacer("t1", "blue", "green")
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ tr, _ := r.Transform(t1)
+
+ relPermalink := tr.RelPermalink()
+
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(relPermalink, qt.Equals, "/f1.t1.txt")
+ c.Assert(content, qt.Equals, "color is green")
+ c.Assert(tr.MediaType(), eq, media.TextType)
+
+ assertNoDuplicateWrites(c, spec)
+ assertShouldExist(c, spec, "public/f1.t1.txt", true)
+ })
+
+ c.Run("Content two", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ t1 := createContentReplacer("t1", "blue", "green")
+ t2 := createContentReplacer("t1", "color", "car")
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ tr, _ := r.Transform(t1, t2)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(content, qt.Equals, "car is green")
+ c.Assert(tr.MediaType(), eq, media.TextType)
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Content two chained", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ t1 := createContentReplacer("t1", "blue", "green")
+ t2 := createContentReplacer("t2", "color", "car")
+
+ r := createTransformer(spec, "f1.txt", "color is blue")
+
+ tr1, _ := r.Transform(t1)
+ tr2, _ := tr1.Transform(t2)
+
+ content1, err := tr1.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ content2, err := tr2.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(content1, qt.Equals, "color is green")
+ c.Assert(content2, qt.Equals, "car is green")
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Content many", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ const count = 26 // A-Z
+
+ transformations := make([]ResourceTransformation, count)
+ for i := 0; i < count; i++ {
+ transformations[i] = createContentReplacer(fmt.Sprintf("t%d", i), fmt.Sprint(i), string(rune(i+65)))
+ }
+
+ var countstr strings.Builder
+ for i := 0; i < count; i++ {
+ countstr.WriteString(fmt.Sprint(i))
+ }
+
+ r := createTransformer(spec, "f1.txt", countstr.String())
+
+ tr, _ := r.Transform(transformations...)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(content, qt.Equals, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Image", func(c *qt.C) {
+ c.Parallel()
+
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ transformation := &testTransformation{
+ name: "test",
+ transform: func(ctx *ResourceTransformationCtx) error {
+ ctx.AddOutPathIdentifier(".changed")
+ return nil
+ },
+ }
+
+ r := createTransformer(spec, "gopher.png", helpers.ReaderToString(gopherPNG()))
+
+ tr, err := r.Transform(transformation)
+ c.Assert(err, qt.IsNil)
+ c.Assert(tr.MediaType(), eq, media.PNGType)
+
+ img, ok := tr.(images.ImageResource)
+ c.Assert(ok, qt.Equals, true)
+
+ c.Assert(img.Width(), qt.Equals, 75)
+ c.Assert(img.Height(), qt.Equals, 60)
+
+ // RelPermalink called.
+ resizedPublished1, err := img.Resize("40x40")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resizedPublished1.Height(), qt.Equals, 40)
+ c.Assert(resizedPublished1.RelPermalink(), qt.Equals, "/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_40x40_resize_linear_3.png")
+ assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_40x40_resize_linear_3.png", true)
+
+ // Permalink called.
+ resizedPublished2, err := img.Resize("30x30")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resizedPublished2.Height(), qt.Equals, 30)
+ c.Assert(resizedPublished2.Permalink(), qt.Equals, "https://example.com/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_30x30_resize_linear_3.png")
+ assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_30x30_resize_linear_3.png", true)
+
+ // Not published because none of RelPermalink or Permalink was called.
+ resizedNotPublished, err := img.Resize("50x50")
+ c.Assert(err, qt.IsNil)
+ c.Assert(resizedNotPublished.Height(), qt.Equals, 50)
+ // c.Assert(resized.RelPermalink(), qt.Equals, "/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_50x50_resize_linear_2.png")
+ assertShouldExist(c, spec, "public/gopher.changed_hu2e827f5a78333ebc04166dd643235dea_1462_50x50_resize_linear_3.png", false)
+
+ assertNoDuplicateWrites(c, spec)
+ })
+
+ c.Run("Concurrent", func(c *qt.C) {
+ spec := newTestResourceSpec(specDescriptor{c: c})
+
+ transformers := make([]Transformer, 10)
+ transformations := make([]ResourceTransformation, 10)
+
+ for i := 0; i < 10; i++ {
+ transformers[i] = createTransformer(spec, fmt.Sprintf("f%d.txt", i), fmt.Sprintf("color is %d", i))
+ transformations[i] = createContentReplacer("test", strconv.Itoa(i), "blue")
+ }
+
+ var wg sync.WaitGroup
+
+ for i := 0; i < 13; i++ {
+ wg.Add(1)
+ go func(i int) {
+ defer wg.Done()
+ for j := 0; j < 23; j++ {
+ id := (i + j) % 10
+ tr, err := transformers[id].Transform(transformations[id])
+ c.Assert(err, qt.IsNil)
+ content, err := tr.(resource.ContentProvider).Content()
+ c.Assert(err, qt.IsNil)
+ c.Assert(content, qt.Equals, "color is blue")
+ c.Assert(tr.RelPermalink(), qt.Equals, fmt.Sprintf("/f%d.test.txt", id))
+ }
+ }(i)
+ }
+ wg.Wait()
+
+ assertNoDuplicateWrites(c, spec)
+ })
+}
+
+type testTransformation struct {
+ name string
+ transform func(ctx *ResourceTransformationCtx) error
+}
+
+func (t *testTransformation) Key() internal.ResourceTransformationKey {
+ return internal.NewResourceTransformationKey(t.name)
+}
+
+func (t *testTransformation) Transform(ctx *ResourceTransformationCtx) error {
+ return t.transform(ctx)
+}
diff --git a/scripts/fork_go_templates/.gitignore b/scripts/fork_go_templates/.gitignore
new file mode 100644
index 000000000..81af73f40
--- /dev/null
+++ b/scripts/fork_go_templates/.gitignore
@@ -0,0 +1 @@
+fork_go_templates
diff --git a/scripts/fork_go_templates/main.go b/scripts/fork_go_templates/main.go
new file mode 100644
index 000000000..9296b7bdd
--- /dev/null
+++ b/scripts/fork_go_templates/main.go
@@ -0,0 +1,228 @@
+package main
+
+import (
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hexec"
+
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "github.com/spf13/afero"
+)
+
+func main() {
+ // The current is built with 41a82aa9c3 text/template/parse: allow space after continue or break
+ fmt.Println("Forking ...")
+ defer fmt.Println("Done ...")
+
+ cleanFork()
+
+ htmlRoot := filepath.Join(forkRoot, "htmltemplate")
+
+ for _, pkg := range goPackages {
+ copyGoPackage(pkg.dstPkg, pkg.srcPkg)
+ }
+
+ for _, pkg := range goPackages {
+ doWithGoFiles(pkg.dstPkg, pkg.rewriter, pkg.replacer)
+ }
+
+ goimports(htmlRoot)
+ gofmt(forkRoot)
+}
+
+const (
+ // TODO(bep)
+ goSource = "/Users/bep/dev/go/misc/go/src"
+ forkRoot = "../../tpl/internal/go_templates"
+)
+
+type goPackage struct {
+ srcPkg string
+ dstPkg string
+ replacer func(name, content string) string
+ rewriter func(name string)
+}
+
+var (
+ textTemplateReplacers = strings.NewReplacer(
+ `"text/template/`, `"github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/`,
+ `"internal/fmtsort"`, `"github.com/gohugoio/hugo/tpl/internal/go_templates/fmtsort"`,
+ `"internal/testenv"`, `"github.com/gohugoio/hugo/tpl/internal/go_templates/testenv"`,
+ "TestLinkerGC", "_TestLinkerGC",
+ // Rename types and function that we want to overload.
+ "type state struct", "type stateOld struct",
+ "func (s *state) evalFunction", "func (s *state) evalFunctionOld",
+ "func (s *state) evalField(", "func (s *state) evalFieldOld(",
+ "func (s *state) evalCall(", "func (s *state) evalCallOld(",
+ "func isTrue(val reflect.Value) (truth, ok bool) {", "func isTrueOld(val reflect.Value) (truth, ok bool) {",
+ )
+
+ testEnvReplacers = strings.NewReplacer(
+ `"internal/cfg"`, `"github.com/gohugoio/hugo/tpl/internal/go_templates/cfg"`,
+ )
+
+ htmlTemplateReplacers = strings.NewReplacer(
+ `. "html/template"`, `. "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"`,
+ `"html/template"`, `template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"`,
+ "\"text/template\"\n", "template \"github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate\"\n",
+ `"html/template"`, `htmltemplate "html/template"`,
+ `"fmt"`, `htmltemplate "html/template"`,
+ `t.Skip("this test currently fails with -race; see issue #39807")`, `// t.Skip("this test currently fails with -race; see issue #39807")`,
+ )
+)
+
+func commonReplace(name, content string) string {
+ if strings.HasSuffix(name, "_test.go") {
+ content = strings.Replace(content, "package template\n", `// +build go1.13,!windows
+
+package template
+`, 1)
+ content = strings.Replace(content, "package template_test\n", `// +build go1.13
+
+package template_test
+`, 1)
+
+ content = strings.Replace(content, "package parse\n", `// +build go1.13
+
+package parse
+`, 1)
+
+ }
+
+ return content
+}
+
+var goPackages = []goPackage{
+ {
+ srcPkg: "text/template", dstPkg: "texttemplate",
+ replacer: func(name, content string) string { return textTemplateReplacers.Replace(commonReplace(name, content)) },
+ },
+ {
+ srcPkg: "html/template", dstPkg: "htmltemplate", replacer: func(name, content string) string {
+ if strings.HasSuffix(name, "content.go") {
+ // Remove template.HTML types. We need to use the Go types.
+ content = removeAll(`(?s)// Strings of content.*?\)\n`, content)
+ }
+
+ content = commonReplace(name, content)
+
+ return htmlTemplateReplacers.Replace(content)
+ },
+ rewriter: func(name string) {
+ for _, s := range []string{"CSS", "HTML", "HTMLAttr", "JS", "JSStr", "URL", "Srcset"} {
+ rewrite(name, fmt.Sprintf("%s -> htmltemplate.%s", s, s))
+ }
+ rewrite(name, `"text/template/parse" -> "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"`)
+ },
+ },
+ {srcPkg: "internal/fmtsort", dstPkg: "fmtsort", rewriter: func(name string) {
+ rewrite(name, `"internal/fmtsort" -> "github.com/gohugoio/hugo/tpl/internal/go_templates/fmtsort"`)
+ }},
+ {
+ srcPkg: "internal/testenv", dstPkg: "testenv",
+ replacer: func(name, content string) string { return testEnvReplacers.Replace(content) }, rewriter: func(name string) {
+ rewrite(name, `"internal/testenv" -> "github.com/gohugoio/hugo/tpl/internal/go_templates/testenv"`)
+ },
+ },
+ {srcPkg: "internal/cfg", dstPkg: "cfg", rewriter: func(name string) {
+ rewrite(name, `"internal/cfg" -> "github.com/gohugoio/hugo/tpl/internal/go_templates/cfg"`)
+ }},
+}
+
+var fs = afero.NewOsFs()
+
+// Removes all non-Hugo files in the go_templates folder.
+func cleanFork() {
+ must(filepath.Walk(filepath.Join(forkRoot), func(path string, info os.FileInfo, err error) error {
+ if !info.IsDir() && len(path) > 10 && !strings.Contains(path, "hugo") {
+ must(fs.Remove(path))
+ }
+ return nil
+ }))
+}
+
+func must(err error, what ...string) {
+ if err != nil {
+ log.Fatal(what, " ERROR: ", err)
+ }
+}
+
+func copyGoPackage(dst, src string) {
+ from := filepath.Join(goSource, src)
+ to := filepath.Join(forkRoot, dst)
+ fmt.Println("Copy", from, "to", to)
+ must(hugio.CopyDir(fs, from, to, func(s string) bool { return true }))
+}
+
+func doWithGoFiles(dir string,
+ rewrite func(name string),
+ transform func(name, in string) string) {
+ if rewrite == nil && transform == nil {
+ return
+ }
+ must(filepath.Walk(filepath.Join(forkRoot, dir), func(path string, info os.FileInfo, err error) error {
+ if info.IsDir() {
+ return nil
+ }
+
+ if !strings.HasSuffix(path, ".go") || strings.Contains(path, "hugo_") {
+ return nil
+ }
+
+ fmt.Println("Handle", path)
+
+ if rewrite != nil {
+ rewrite(path)
+ }
+
+ if transform == nil {
+ return nil
+ }
+
+ data, err := ioutil.ReadFile(path)
+ must(err)
+ f, err := os.Create(path)
+ must(err)
+ defer f.Close()
+ _, err = f.WriteString(transform(path, string(data)))
+ must(err)
+
+ return nil
+ }))
+}
+
+func removeAll(expression, content string) string {
+ re := regexp.MustCompile(expression)
+ return re.ReplaceAllString(content, "")
+}
+
+func rewrite(filename, rule string) {
+ cmf, _ := hexec.SafeCommand("gofmt", "-w", "-r", rule, filename)
+ out, err := cmf.CombinedOutput()
+ if err != nil {
+ log.Fatal("gofmt failed:", string(out))
+ }
+}
+
+func goimports(dir string) {
+ cmf, _ := hexec.SafeCommand("goimports", "-w", dir)
+ out, err := cmf.CombinedOutput()
+ if err != nil {
+ log.Fatal("goimports failed:", string(out))
+ }
+}
+
+func gofmt(dir string) {
+ cmf, _ := hexec.SafeCommand("gofmt", "-w", dir)
+ out, err := cmf.CombinedOutput()
+ if err != nil {
+ log.Fatal("gofmt failed:", string(out))
+ }
+}
diff --git a/snap/plugins/x-nodejs.yaml b/snap/plugins/x-nodejs.yaml
new file mode 100644
index 000000000..60b465459
--- /dev/null
+++ b/snap/plugins/x-nodejs.yaml
@@ -0,0 +1,8 @@
+options:
+ source:
+ required: true
+ source-type:
+ source-tag:
+ source-branch:
+ nodejs-target:
+ required: true
diff --git a/snap/plugins/x_nodejs.py b/snap/plugins/x_nodejs.py
new file mode 100644
index 000000000..d6e29f750
--- /dev/null
+++ b/snap/plugins/x_nodejs.py
@@ -0,0 +1,332 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+#
+# Modified by Anthony Fok on 2018-10-01 to add support for ppc64el and s390x
+#
+# Copyright (C) 2015-2017 Canonical Ltd
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 3 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""The nodejs plugin is useful for node/npm based parts.
+
+The plugin uses node to install dependencies from `package.json`. It
+also sets up binaries defined in `package.json` into the `PATH`.
+
+This plugin uses the common plugin keywords as well as those for "sources".
+For more information check the 'plugins' topic for the former and the
+'sources' topic for the latter.
+
+Additionally, this plugin uses the following plugin-specific keywords:
+
+ - node-packages:
+ (list)
+ A list of dependencies to fetch using npm.
+ - node-engine:
+ (string)
+ The version of nodejs you want the snap to run on.
+ - npm-run:
+ (list)
+ A list of targets to `npm run`.
+ These targets will be run in order, after `npm install`
+ - npm-flags:
+ (list)
+ A list of flags for npm.
+ - node-package-manager
+ (string; default: npm)
+ The language package manager to use to drive installation
+ of node packages. Can be either `npm` (default) or `yarn`.
+"""
+
+import collections
+import contextlib
+import json
+import logging
+import os
+import shutil
+import subprocess
+import sys
+
+import snapcraft
+from snapcraft import sources
+from snapcraft.file_utils import link_or_copy_tree
+from snapcraft.internal import errors
+
+logger = logging.getLogger(__name__)
+
+_NODEJS_BASE = "node-v{version}-linux-{arch}"
+_NODEJS_VERSION = "12.18.4"
+_NODEJS_TMPL = "https://nodejs.org/dist/v{version}/{base}.tar.gz"
+_NODEJS_ARCHES = {"i386": "x86", "amd64": "x64", "armhf": "armv7l", "arm64": "arm64", "ppc64el": "ppc64le", "s390x": "s390x"}
+_YARN_URL = "https://yarnpkg.com/latest.tar.gz"
+
+
+class NodePlugin(snapcraft.BasePlugin):
+ @classmethod
+ def schema(cls):
+ schema = super().schema()
+
+ schema["properties"]["node-packages"] = {
+ "type": "array",
+ "minitems": 1,
+ "uniqueItems": True,
+ "items": {"type": "string"},
+ "default": [],
+ }
+ schema["properties"]["node-engine"] = {
+ "type": "string",
+ "default": _NODEJS_VERSION,
+ }
+ schema["properties"]["node-package-manager"] = {
+ "type": "string",
+ "default": "npm",
+ "enum": ["npm", "yarn"],
+ }
+ schema["properties"]["npm-run"] = {
+ "type": "array",
+ "minitems": 1,
+ "uniqueItems": False,
+ "items": {"type": "string"},
+ "default": [],
+ }
+ schema["properties"]["npm-flags"] = {
+ "type": "array",
+ "minitems": 1,
+ "uniqueItems": False,
+ "items": {"type": "string"},
+ "default": [],
+ }
+
+ if "required" in schema:
+ del schema["required"]
+
+ return schema
+
+ @classmethod
+ def get_build_properties(cls):
+ # Inform Snapcraft of the properties associated with building. If these
+ # change in the YAML Snapcraft will consider the build step dirty.
+ return ["node-packages", "npm-run", "npm-flags"]
+
+ @classmethod
+ def get_pull_properties(cls):
+ # Inform Snapcraft of the properties associated with pulling. If these
+ # change in the YAML Snapcraft will consider the build step dirty.
+ return ["node-engine", "node-package-manager"]
+
+ @property
+ def _nodejs_tar(self):
+ if self._nodejs_tar_handle is None:
+ self._nodejs_tar_handle = sources.Tar(
+ self._nodejs_release_uri, self._npm_dir
+ )
+ return self._nodejs_tar_handle
+
+ @property
+ def _yarn_tar(self):
+ if self._yarn_tar_handle is None:
+ self._yarn_tar_handle = sources.Tar(_YARN_URL, self._npm_dir)
+ return self._yarn_tar_handle
+
+ def __init__(self, name, options, project):
+ super().__init__(name, options, project)
+ self._source_package_json = os.path.join(
+ os.path.abspath(self.options.source), "package.json"
+ )
+ self._npm_dir = os.path.join(self.partdir, "npm")
+ self._manifest = collections.OrderedDict()
+ self._nodejs_release_uri = get_nodejs_release(
+ self.options.node_engine, self.project.deb_arch
+ )
+ self._nodejs_tar_handle = None
+ self._yarn_tar_handle = None
+
+ def pull(self):
+ super().pull()
+ os.makedirs(self._npm_dir, exist_ok=True)
+ self._nodejs_tar.download()
+ if self.options.node_package_manager == "yarn":
+ self._yarn_tar.download()
+ # do the install in the pull phase to download all dependencies.
+ if self.options.node_package_manager == "npm":
+ self._npm_install(rootdir=self.sourcedir)
+ else:
+ self._yarn_install(rootdir=self.sourcedir)
+
+ def clean_pull(self):
+ super().clean_pull()
+
+ # Remove the npm directory (if any)
+ if os.path.exists(self._npm_dir):
+ shutil.rmtree(self._npm_dir)
+
+ def build(self):
+ super().build()
+ if self.options.node_package_manager == "npm":
+ installed_node_packages = self._npm_install(rootdir=self.builddir)
+ # Copy the content of the symlink to the build directory
+ # LP: #1702661
+ modules_dir = os.path.join(self.installdir, "lib", "node_modules")
+ _copy_symlinked_content(modules_dir)
+ else:
+ installed_node_packages = self._yarn_install(rootdir=self.builddir)
+ lock_file_path = os.path.join(self.sourcedir, "yarn.lock")
+ if os.path.isfile(lock_file_path):
+ with open(lock_file_path) as lock_file:
+ self._manifest["yarn-lock-contents"] = lock_file.read()
+
+ self._manifest["node-packages"] = [
+ "{}={}".format(name, installed_node_packages[name])
+ for name in installed_node_packages
+ ]
+
+ def _npm_install(self, rootdir):
+ self._nodejs_tar.provision(
+ self.installdir, clean_target=False, keep_tarball=True
+ )
+ npm_cmd = ["npm"] + self.options.npm_flags
+ npm_install = npm_cmd + ["--cache-min=Infinity", "install"]
+ for pkg in self.options.node_packages:
+ self.run(npm_install + ["--global"] + [pkg], cwd=rootdir)
+ if os.path.exists(os.path.join(rootdir, "package.json")):
+ self.run(npm_install, cwd=rootdir)
+ self.run(npm_install + ["--global"], cwd=rootdir)
+ for target in self.options.npm_run:
+ self.run(npm_cmd + ["run", target], cwd=rootdir)
+ return self._get_installed_node_packages("npm", self.installdir)
+
+ def _yarn_install(self, rootdir):
+ self._nodejs_tar.provision(
+ self.installdir, clean_target=False, keep_tarball=True
+ )
+ self._yarn_tar.provision(self._npm_dir, clean_target=False, keep_tarball=True)
+ yarn_cmd = [os.path.join(self._npm_dir, "bin", "yarn")]
+ yarn_cmd.extend(self.options.npm_flags)
+ if "http_proxy" in os.environ:
+ yarn_cmd.extend(["--proxy", os.environ["http_proxy"]])
+ if "https_proxy" in os.environ:
+ yarn_cmd.extend(["--https-proxy", os.environ["https_proxy"]])
+ flags = []
+ if rootdir == self.builddir:
+ yarn_add = yarn_cmd + ["global", "add"]
+ flags.extend(
+ [
+ "--offline",
+ "--prod",
+ "--global-folder",
+ self.installdir,
+ "--prefix",
+ self.installdir,
+ ]
+ )
+ else:
+ yarn_add = yarn_cmd + ["add"]
+ for pkg in self.options.node_packages:
+ self.run(yarn_add + [pkg] + flags, cwd=rootdir)
+
+ # local packages need to be added as if they were remote, we
+ # remove the local package.json so `yarn add` doesn't pollute it.
+ if os.path.exists(self._source_package_json):
+ with contextlib.suppress(FileNotFoundError):
+ os.unlink(os.path.join(rootdir, "package.json"))
+ shutil.copy(
+ self._source_package_json, os.path.join(rootdir, "package.json")
+ )
+ self.run(yarn_add + ["file:{}".format(rootdir)] + flags, cwd=rootdir)
+
+ # npm run would require to bring back package.json
+ if self.options.npm_run and os.path.exists(self._source_package_json):
+ # The current package.json is the yarn prefilled one.
+ with contextlib.suppress(FileNotFoundError):
+ os.unlink(os.path.join(rootdir, "package.json"))
+ os.link(self._source_package_json, os.path.join(rootdir, "package.json"))
+ for target in self.options.npm_run:
+ self.run(
+ yarn_cmd + ["run", target],
+ cwd=rootdir,
+ env=self._build_environment(rootdir),
+ )
+ return self._get_installed_node_packages("npm", self.installdir)
+
+ def _get_installed_node_packages(self, package_manager, cwd):
+ try:
+ output = self.run_output(
+ [package_manager, "ls", "--global", "--json"], cwd=cwd
+ )
+ except subprocess.CalledProcessError as error:
+ # XXX When dependencies have missing dependencies, an error like
+ # this is printed to stderr:
+ # npm ERR! peer dep missing: glob@*, required by glob-promise@3.1.0
+ # retcode is not 0, which raises an exception.
+ output = error.output.decode(sys.getfilesystemencoding()).strip()
+ packages = collections.OrderedDict()
+ dependencies = json.loads(output, object_pairs_hook=collections.OrderedDict)[
+ "dependencies"
+ ]
+ while dependencies:
+ key, value = dependencies.popitem(last=False)
+ # XXX Just as above, dependencies without version are the ones
+ # missing.
+ if "version" in value:
+ packages[key] = value["version"]
+ if "dependencies" in value:
+ dependencies.update(value["dependencies"])
+ return packages
+
+ def get_manifest(self):
+ return self._manifest
+
+ def _build_environment(self, rootdir):
+ env = os.environ.copy()
+ if rootdir.endswith("src"):
+ hidden_path = os.path.join(rootdir, "node_modules", ".bin")
+ if env.get("PATH"):
+ new_path = "{}:{}".format(hidden_path, env.get("PATH"))
+ else:
+ new_path = hidden_path
+ env["PATH"] = new_path
+ return env
+
+
+def _get_nodejs_base(node_engine, machine):
+ if machine not in _NODEJS_ARCHES:
+ raise errors.SnapcraftEnvironmentError(
+ "architecture not supported ({})".format(machine)
+ )
+ return _NODEJS_BASE.format(version=node_engine, arch=_NODEJS_ARCHES[machine])
+
+
+def get_nodejs_release(node_engine, arch):
+ return _NODEJS_TMPL.format(
+ version=node_engine, base=_get_nodejs_base(node_engine, arch)
+ )
+
+
+def _copy_symlinked_content(modules_dir):
+ """Copy symlinked content.
+
+ When running newer versions of npm, symlinks to the local tree are
+ created from the part's installdir to the root of the builddir of the
+ part (this only affects some build configurations in some projects)
+ which is valid when running from the context of the part but invalid
+ as soon as the artifacts migrate across the steps,
+ i.e.; stage and prime.
+
+ If modules_dir does not exist we simply return.
+ """
+ if not os.path.exists(modules_dir):
+ return
+ modules = [os.path.join(modules_dir, d) for d in os.listdir(modules_dir)]
+ symlinks = [l for l in modules if os.path.islink(l)]
+ for link_path in symlinks:
+ link_target = os.path.realpath(link_path)
+ os.unlink(link_path)
+ link_or_copy_tree(link_target, link_path)
diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml
new file mode 100644
index 000000000..a5070dc79
--- /dev/null
+++ b/snap/snapcraft.yaml
@@ -0,0 +1,110 @@
+name: hugo
+version: git
+summary: Fast and Flexible Static Site Generator
+description: |
+ Hugo is a static HTML and CSS website generator written in Go. It is
+ optimized for speed, easy use and configurability. Hugo takes a directory
+ with content and templates and renders them into a full HTML website.
+license: "Apache-2.0"
+base: core20
+confinement: strict
+grade: devel # "devel" or "stable"
+
+package-repositories:
+ - type: apt
+ components: [main]
+ suites: [focal]
+ key-id: 9FD3B784BC1C6FC31A8A0A1C1655A0AB68576280
+ url: https://deb.nodesource.com/node_16.x
+
+plugs:
+ etc-gitconfig:
+ interface: system-files
+ read:
+ - /etc/gitconfig
+ gitconfig:
+ interface: personal-files
+ read:
+ - $HOME/.gitconfig
+ - $HOME/.config/git/config
+
+apps:
+ hugo:
+ environment:
+ HOME: $SNAP_REAL_HOME
+ command: bin/hugo
+ completer: hugo-completion
+ plugs:
+ - home
+ - network-bind
+ - removable-media
+ - etc-gitconfig
+ - gitconfig
+
+parts:
+ git:
+ plugin: nil
+ stage-packages:
+ - git
+ organize:
+ usr/bin/: bin/
+ prime:
+ - bin/git
+
+ hugo:
+ plugin: nil
+ build-snaps: [go/1.18/stable]
+ source: .
+ override-build: |
+ set -ex
+
+ echo "\nStarting override-build:"
+ export GOPATH=$(realpath ../go)
+ export PATH=$GOPATH/bin:$PATH
+
+ echo " * SNAPCRAFT_IMAGE_INFO=${SNAPCRAFT_IMAGE_INFO=}"
+ # Example: SNAPCRAFT_IMAGE_INFO='{"build_url": "https://launchpad.net/~gohugoio/+snap/hugo-extended-dev/+build/344022"}'
+ export HUGO_BUILD_TAGS=""
+ if echo $SNAPCRAFT_IMAGE_INFO | grep -q '/+snap/hugo-extended'; then
+ HUGO_BUILD_TAGS="extended"
+ fi
+ echo " * Building hugo (HUGO_BUILD_TAGS=\"$HUGO_BUILD_TAGS\")..."
+ go build -v -ldflags '-X github.com/gohugoio/hugo/common/hugo.vendorInfo=snap' -tags "$HUGO_BUILD_TAGS"
+ ./hugo version
+ ldd hugo || :
+
+ echo " * Building shell completion..."
+ ./hugo completion bash > hugo-completion
+
+ echo " * Installing to ${SNAPCRAFT_PART_INSTALL}..."
+ install -d $SNAPCRAFT_PART_INSTALL/bin
+ cp -av hugo $SNAPCRAFT_PART_INSTALL/bin/
+ mv -v hugo-completion $SNAPCRAFT_PART_INSTALL/
+
+ echo " * Stripping binary..."
+ ls -l $SNAPCRAFT_PART_INSTALL/bin/hugo
+ strip --remove-section=.comment --remove-section=.note $SNAPCRAFT_PART_INSTALL/bin/hugo
+ ls -l $SNAPCRAFT_PART_INSTALL/bin/hugo
+
+ node:
+ plugin: nil
+ stage-packages:
+ - nodejs
+ organize:
+ usr/bin/: bin/
+ usr/lib/: lib/
+ prime:
+ - bin/node
+ - lib/*/lib*.so*
+
+ pandoc:
+ plugin: nil
+ stage-packages:
+ - libatomic1
+ - pandoc
+ - pandoc-data
+ organize:
+ usr/bin/: bin/
+ usr/lib/: lib/
+ prime:
+ - bin/pandoc
diff --git a/source/content_directory_test.go b/source/content_directory_test.go
new file mode 100644
index 000000000..4d800cb5a
--- /dev/null
+++ b/source/content_directory_test.go
@@ -0,0 +1,66 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+func TestIgnoreDotFilesAndDirectories(t *testing.T) {
+ c := qt.New(t)
+
+ tests := []struct {
+ path string
+ ignore bool
+ ignoreFilesRegexpes any
+ }{
+ {".foobar/", true, nil},
+ {"foobar/.barfoo/", true, nil},
+ {"barfoo.md", false, nil},
+ {"foobar/barfoo.md", false, nil},
+ {"foobar/.barfoo.md", true, nil},
+ {".barfoo.md", true, nil},
+ {".md", true, nil},
+ {"foobar/barfoo.md~", true, nil},
+ {".foobar/barfoo.md~", true, nil},
+ {"foobar~/barfoo.md", false, nil},
+ {"foobar/bar~foo.md", false, nil},
+ {"foobar/foo.md", true, []string{"\\.md$", "\\.boo$"}},
+ {"foobar/foo.html", false, []string{"\\.md$", "\\.boo$"}},
+ {"foobar/foo.md", true, []string{"foo.md$"}},
+ {"foobar/foo.md", true, []string{"*", "\\.md$", "\\.boo$"}},
+ {"foobar/.#content.md", true, []string{"/\\.#"}},
+ {".#foobar.md", true, []string{"^\\.#"}},
+ }
+
+ for i, test := range tests {
+ v := newTestConfig()
+ v.Set("ignoreFiles", test.ignoreFilesRegexpes)
+ fs := hugofs.NewMem(v)
+ ps, err := helpers.NewPathSpec(fs, v, nil)
+ c.Assert(err, qt.IsNil)
+
+ s := NewSourceSpec(ps, nil, fs.Source)
+
+ if ignored := s.IgnoreFile(filepath.FromSlash(test.path)); test.ignore != ignored {
+ t.Errorf("[%d] File not ignored", i)
+ }
+ }
+}
diff --git a/source/fileInfo.go b/source/fileInfo.go
new file mode 100644
index 000000000..f882eb898
--- /dev/null
+++ b/source/fileInfo.go
@@ -0,0 +1,296 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/paths"
+
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ "github.com/gohugoio/hugo/common/hugio"
+
+ "github.com/gohugoio/hugo/hugofs"
+
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// fileInfo implements the File interface.
+var (
+ _ File = (*FileInfo)(nil)
+)
+
+// File represents a source file.
+// This is a temporary construct until we resolve page.Page conflicts.
+// TODO(bep) remove this construct once we have resolved page deprecations
+type File interface {
+ fileOverlap
+ FileWithoutOverlap
+}
+
+// Temporary to solve duplicate/deprecated names in page.Page
+type fileOverlap interface {
+ // Path gets the relative path including file name and extension.
+ // The directory is relative to the content root.
+ Path() string
+
+ // Section is first directory below the content root.
+ // For page bundles in root, the Section will be empty.
+ Section() string
+
+ // Lang is the language code for this page. It will be the
+ // same as the site's language code.
+ Lang() string
+
+ IsZero() bool
+}
+
+type FileWithoutOverlap interface {
+
+ // Filename gets the full path and filename to the file.
+ Filename() string
+
+ // Dir gets the name of the directory that contains this file.
+ // The directory is relative to the content root.
+ Dir() string
+
+ // Extension is an alias to Ext().
+ // Deprecated: Use Ext instead.
+ Extension() string
+
+ // Ext gets the file extension, i.e "myblogpost.md" will return "md".
+ Ext() string
+
+ // LogicalName is filename and extension of the file.
+ LogicalName() string
+
+ // BaseFileName is a filename without extension.
+ BaseFileName() string
+
+ // TranslationBaseName is a filename with no extension,
+ // not even the optional language extension part.
+ TranslationBaseName() string
+
+ // ContentBaseName is a either TranslationBaseName or name of containing folder
+ // if file is a leaf bundle.
+ ContentBaseName() string
+
+ // UniqueID is the MD5 hash of the file's path and is for most practical applications,
+ // Hugo content files being one of them, considered to be unique.
+ UniqueID() string
+
+ FileInfo() hugofs.FileMetaInfo
+}
+
+// FileInfo describes a source file.
+type FileInfo struct {
+
+ // Absolute filename to the file on disk.
+ filename string
+
+ sp *SourceSpec
+
+ fi hugofs.FileMetaInfo
+
+ // Derived from filename
+ ext string // Extension without any "."
+ lang string
+
+ name string
+
+ dir string
+ relDir string
+ relPath string
+ baseName string
+ translationBaseName string
+ contentBaseName string
+ section string
+ classifier files.ContentClass
+
+ uniqueID string
+
+ lazyInit sync.Once
+}
+
+// Filename returns a file's absolute path and filename on disk.
+func (fi *FileInfo) Filename() string { return fi.filename }
+
+// Path gets the relative path including file name and extension. The directory
+// is relative to the content root.
+func (fi *FileInfo) Path() string { return fi.relPath }
+
+// Dir gets the name of the directory that contains this file. The directory is
+// relative to the content root.
+func (fi *FileInfo) Dir() string { return fi.relDir }
+
+// Extension is an alias to Ext().
+func (fi *FileInfo) Extension() string {
+ helpers.Deprecated(".File.Extension", "Use .File.Ext instead. ", false)
+ return fi.Ext()
+}
+
+// Ext returns a file's extension without the leading period (ie. "md").
+func (fi *FileInfo) Ext() string { return fi.ext }
+
+// Lang returns a file's language (ie. "sv").
+func (fi *FileInfo) Lang() string { return fi.lang }
+
+// LogicalName returns a file's name and extension (ie. "page.sv.md").
+func (fi *FileInfo) LogicalName() string { return fi.name }
+
+// BaseFileName returns a file's name without extension (ie. "page.sv").
+func (fi *FileInfo) BaseFileName() string { return fi.baseName }
+
+// TranslationBaseName returns a file's translation base name without the
+// language segment (ie. "page").
+func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName }
+
+// ContentBaseName is a either TranslationBaseName or name of containing folder
+// if file is a leaf bundle.
+func (fi *FileInfo) ContentBaseName() string {
+ fi.init()
+ return fi.contentBaseName
+}
+
+// Section returns a file's section.
+func (fi *FileInfo) Section() string {
+ fi.init()
+ return fi.section
+}
+
+// UniqueID returns a file's unique, MD5 hash identifier.
+func (fi *FileInfo) UniqueID() string {
+ fi.init()
+ return fi.uniqueID
+}
+
+// FileInfo returns a file's underlying os.FileInfo.
+func (fi *FileInfo) FileInfo() hugofs.FileMetaInfo { return fi.fi }
+
+func (fi *FileInfo) String() string { return fi.BaseFileName() }
+
+// Open implements ReadableFile.
+func (fi *FileInfo) Open() (hugio.ReadSeekCloser, error) {
+ f, err := fi.fi.Meta().Open()
+
+ return f, err
+}
+
+func (fi *FileInfo) IsZero() bool {
+ return fi == nil
+}
+
+// We create a lot of these FileInfo objects, but there are parts of it used only
+// in some cases that is slightly expensive to construct.
+func (fi *FileInfo) init() {
+ fi.lazyInit.Do(func() {
+ relDir := strings.Trim(fi.relDir, helpers.FilePathSeparator)
+ parts := strings.Split(relDir, helpers.FilePathSeparator)
+ var section string
+ if (fi.classifier != files.ContentClassLeaf && len(parts) == 1) || len(parts) > 1 {
+ section = parts[0]
+ }
+ fi.section = section
+
+ if fi.classifier.IsBundle() && len(parts) > 0 {
+ fi.contentBaseName = parts[len(parts)-1]
+ } else {
+ fi.contentBaseName = fi.translationBaseName
+ }
+
+ fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.relPath))
+ })
+}
+
+// NewTestFile creates a partially filled File used in unit tests.
+// TODO(bep) improve this package
+func NewTestFile(filename string) *FileInfo {
+ base := filepath.Base(filepath.Dir(filename))
+ return &FileInfo{
+ filename: filename,
+ translationBaseName: base,
+ }
+}
+
+func (sp *SourceSpec) NewFileInfoFrom(path, filename string) (*FileInfo, error) {
+ meta := &hugofs.FileMeta{
+ Filename: filename,
+ Path: path,
+ }
+
+ return sp.NewFileInfo(hugofs.NewFileMetaInfo(nil, meta))
+}
+
+func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) {
+ m := fi.Meta()
+
+ filename := m.Filename
+ relPath := m.Path
+
+ if relPath == "" {
+ return nil, fmt.Errorf("no Path provided by %v (%T)", m, m.Fs)
+ }
+
+ if filename == "" {
+ return nil, fmt.Errorf("no Filename provided by %v (%T)", m, m.Fs)
+ }
+
+ relDir := filepath.Dir(relPath)
+ if relDir == "." {
+ relDir = ""
+ }
+ if !strings.HasSuffix(relDir, helpers.FilePathSeparator) {
+ relDir = relDir + helpers.FilePathSeparator
+ }
+
+ lang := m.Lang
+ translationBaseName := m.TranslationBaseName
+
+ dir, name := filepath.Split(relPath)
+ if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
+ dir = dir + helpers.FilePathSeparator
+ }
+
+ ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), "."))
+ baseName := paths.Filename(name)
+
+ if translationBaseName == "" {
+ // This is usually provided by the filesystem. But this FileInfo is also
+ // created in a standalone context when doing "hugo new". This is
+ // an approximate implementation, which is "good enough" in that case.
+ fileLangExt := filepath.Ext(baseName)
+ translationBaseName = strings.TrimSuffix(baseName, fileLangExt)
+ }
+
+ f := &FileInfo{
+ sp: sp,
+ filename: filename,
+ fi: fi,
+ lang: lang,
+ ext: ext,
+ dir: dir,
+ relDir: relDir, // Dir()
+ relPath: relPath, // Path()
+ name: name,
+ baseName: baseName, // BaseFileName()
+ translationBaseName: translationBaseName,
+ classifier: m.Classifier,
+ }
+
+ return f, nil
+}
diff --git a/source/fileInfo_test.go b/source/fileInfo_test.go
new file mode 100644
index 000000000..b8bb33cd3
--- /dev/null
+++ b/source/fileInfo_test.go
@@ -0,0 +1,57 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFileInfo(t *testing.T) {
+ c := qt.New(t)
+
+ s := newTestSourceSpec()
+
+ for _, this := range []struct {
+ base string
+ filename string
+ assert func(f *FileInfo)
+ }{
+ {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.md"), func(f *FileInfo) {
+ c.Assert(f.Filename(), qt.Equals, filepath.FromSlash("/a/b/page.md"))
+ c.Assert(f.Dir(), qt.Equals, filepath.FromSlash("b/"))
+ c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.md"))
+ c.Assert(f.Section(), qt.Equals, "b")
+ c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page"))
+ c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page"))
+ }},
+ {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/c/d/page.md"), func(f *FileInfo) {
+ c.Assert(f.Section(), qt.Equals, "b")
+ }},
+ {filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/page.en.MD"), func(f *FileInfo) {
+ c.Assert(f.Section(), qt.Equals, "b")
+ c.Assert(f.Path(), qt.Equals, filepath.FromSlash("b/page.en.MD"))
+ c.Assert(f.TranslationBaseName(), qt.Equals, filepath.FromSlash("page"))
+ c.Assert(f.BaseFileName(), qt.Equals, filepath.FromSlash("page.en"))
+ }},
+ } {
+ path := strings.TrimPrefix(this.filename, this.base)
+ f, err := s.NewFileInfoFrom(path, this.filename)
+ c.Assert(err, qt.IsNil)
+ this.assert(f)
+ }
+}
diff --git a/source/filesystem.go b/source/filesystem.go
new file mode 100644
index 000000000..79d027c5c
--- /dev/null
+++ b/source/filesystem.go
@@ -0,0 +1,121 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "fmt"
+ "path/filepath"
+ "sync"
+
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+// Filesystem represents a source filesystem.
+type Filesystem struct {
+ files []File
+ filesInit sync.Once
+ filesInitErr error
+
+ Base string
+
+ fi hugofs.FileMetaInfo
+
+ SourceSpec
+}
+
+// NewFilesystem returns a new filesytem for a given source spec.
+func (sp SourceSpec) NewFilesystem(base string) *Filesystem {
+ return &Filesystem{SourceSpec: sp, Base: base}
+}
+
+func (sp SourceSpec) NewFilesystemFromFileMetaInfo(fi hugofs.FileMetaInfo) *Filesystem {
+ return &Filesystem{SourceSpec: sp, fi: fi}
+}
+
+// Files returns a slice of readable files.
+func (f *Filesystem) Files() ([]File, error) {
+ f.filesInit.Do(func() {
+ err := f.captureFiles()
+ if err != nil {
+ f.filesInitErr = fmt.Errorf("capture files: %w", err)
+ }
+ })
+ return f.files, f.filesInitErr
+}
+
+// add populates a file in the Filesystem.files
+func (f *Filesystem) add(name string, fi hugofs.FileMetaInfo) (err error) {
+ var file File
+
+ file, err = f.SourceSpec.NewFileInfo(fi)
+ if err != nil {
+ return err
+ }
+
+ f.files = append(f.files, file)
+
+ return err
+}
+
+func (f *Filesystem) captureFiles() error {
+ walker := func(path string, fi hugofs.FileMetaInfo, err error) error {
+ if err != nil {
+ return err
+ }
+
+ if fi.IsDir() {
+ return nil
+ }
+
+ meta := fi.Meta()
+ filename := meta.Filename
+
+ b, err := f.shouldRead(filename, fi)
+ if err != nil {
+ return err
+ }
+
+ if b {
+ err = f.add(filename, fi)
+ }
+
+ return err
+ }
+
+ w := hugofs.NewWalkway(hugofs.WalkwayConfig{
+ Fs: f.SourceFs,
+ Info: f.fi,
+ Root: f.Base,
+ WalkFn: walker,
+ })
+
+ return w.Walk()
+}
+
+func (f *Filesystem) shouldRead(filename string, fi hugofs.FileMetaInfo) (bool, error) {
+ ignore := f.SourceSpec.IgnoreFile(fi.Meta().Filename)
+
+ if fi.IsDir() {
+ if ignore {
+ return false, filepath.SkipDir
+ }
+ return false, nil
+ }
+
+ if ignore {
+ return false, nil
+ }
+
+ return true, nil
+}
diff --git a/source/filesystem_test.go b/source/filesystem_test.go
new file mode 100644
index 000000000..31e3bdd70
--- /dev/null
+++ b/source/filesystem_test.go
@@ -0,0 +1,102 @@
+// Copyright 2015 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "fmt"
+ "path/filepath"
+ "runtime"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+func TestEmptySourceFilesystem(t *testing.T) {
+ c := qt.New(t)
+ ss := newTestSourceSpec()
+ src := ss.NewFilesystem("")
+ files, err := src.Files()
+ c.Assert(err, qt.IsNil)
+ if len(files) != 0 {
+ t.Errorf("new filesystem should contain 0 files.")
+ }
+}
+
+func TestUnicodeNorm(t *testing.T) {
+ if runtime.GOOS != "darwin" {
+ // Normalization code is only for Mac OS, since it is not necessary for other OSes.
+ return
+ }
+
+ c := qt.New(t)
+
+ paths := []struct {
+ NFC string
+ NFD string
+ }{
+ {NFC: "å", NFD: "\x61\xcc\x8a"},
+ {NFC: "é", NFD: "\x65\xcc\x81"},
+ }
+
+ ss := newTestSourceSpec()
+ fi := hugofs.NewFileMetaInfo(nil, hugofs.NewFileMeta())
+
+ for i, path := range paths {
+ base := fmt.Sprintf("base%d", i)
+ c.Assert(afero.WriteFile(ss.Fs.Source, filepath.Join(base, path.NFD), []byte("some data"), 0777), qt.IsNil)
+ src := ss.NewFilesystem(base)
+ _ = src.add(path.NFD, fi)
+ files, err := src.Files()
+ c.Assert(err, qt.IsNil)
+ f := files[0]
+ if f.BaseFileName() != path.NFC {
+ t.Fatalf("file %q name in NFD form should be normalized (%s)", f.BaseFileName(), path.NFC)
+ }
+ }
+}
+
+func newTestConfig() config.Provider {
+ v := config.NewWithTestDefaults()
+ _, err := langs.LoadLanguageSettings(v, nil)
+ if err != nil {
+ panic(err)
+ }
+ mod, err := modules.CreateProjectModule(v)
+ if err != nil {
+ panic(err)
+ }
+ v.Set("allModules", modules.Modules{mod})
+
+ return v
+}
+
+func newTestSourceSpec() *SourceSpec {
+ v := newTestConfig()
+ fs := hugofs.NewFrom(hugofs.NewBaseFileDecorator(afero.NewMemMapFs()), v)
+ ps, err := helpers.NewPathSpec(fs, v, nil)
+ if err != nil {
+ panic(err)
+ }
+ return NewSourceSpec(ps, nil, fs.Source)
+}
diff --git a/source/sourceSpec.go b/source/sourceSpec.go
new file mode 100644
index 000000000..0b7306a75
--- /dev/null
+++ b/source/sourceSpec.go
@@ -0,0 +1,158 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package source
+
+import (
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+
+ "github.com/gohugoio/hugo/hugofs/glob"
+
+ "github.com/gohugoio/hugo/langs"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/cast"
+)
+
+// SourceSpec abstracts language-specific file creation.
+// TODO(bep) rename to Spec
+type SourceSpec struct {
+ *helpers.PathSpec
+
+ SourceFs afero.Fs
+
+ shouldInclude func(filename string) bool
+
+ Languages map[string]any
+ DefaultContentLanguage string
+ DisabledLanguages map[string]bool
+}
+
+// NewSourceSpec initializes SourceSpec using languages the given filesystem and PathSpec.
+func NewSourceSpec(ps *helpers.PathSpec, inclusionFilter *glob.FilenameFilter, fs afero.Fs) *SourceSpec {
+ cfg := ps.Cfg
+ defaultLang := cfg.GetString("defaultContentLanguage")
+ languages := cfg.GetStringMap("languages")
+
+ disabledLangsSet := make(map[string]bool)
+
+ for _, disabledLang := range cfg.GetStringSlice("disableLanguages") {
+ disabledLangsSet[disabledLang] = true
+ }
+
+ if len(languages) == 0 {
+ l := langs.NewDefaultLanguage(cfg)
+ languages[l.Lang] = l
+ defaultLang = l.Lang
+ }
+
+ ignoreFiles := cast.ToStringSlice(cfg.Get("ignoreFiles"))
+ var regexps []*regexp.Regexp
+ if len(ignoreFiles) > 0 {
+ for _, ignorePattern := range ignoreFiles {
+ re, err := regexp.Compile(ignorePattern)
+ if err != nil {
+ helpers.DistinctErrorLog.Printf("Invalid regexp %q in ignoreFiles: %s", ignorePattern, err)
+ } else {
+ regexps = append(regexps, re)
+ }
+
+ }
+ }
+ shouldInclude := func(filename string) bool {
+ if !inclusionFilter.Match(filename, false) {
+ return false
+ }
+ for _, r := range regexps {
+ if r.MatchString(filename) {
+ return false
+ }
+ }
+ return true
+ }
+
+ return &SourceSpec{shouldInclude: shouldInclude, PathSpec: ps, SourceFs: fs, Languages: languages, DefaultContentLanguage: defaultLang, DisabledLanguages: disabledLangsSet}
+}
+
+// IgnoreFile returns whether a given file should be ignored.
+func (s *SourceSpec) IgnoreFile(filename string) bool {
+ if filename == "" {
+ if _, ok := s.SourceFs.(*afero.OsFs); ok {
+ return true
+ }
+ return false
+ }
+
+ base := filepath.Base(filename)
+
+ if len(base) > 0 {
+ first := base[0]
+ last := base[len(base)-1]
+ if first == '.' ||
+ first == '#' ||
+ last == '~' {
+ return true
+ }
+ }
+
+ if !s.shouldInclude(filename) {
+ return true
+ }
+
+ if runtime.GOOS == "windows" {
+ // Also check the forward slash variant if different.
+ unixFilename := filepath.ToSlash(filename)
+ if unixFilename != filename {
+ if !s.shouldInclude(unixFilename) {
+ return true
+ }
+ }
+ }
+
+ return false
+}
+
+// IsRegularSourceFile returns whether filename represents a regular file in the
+// source filesystem.
+func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) {
+ fi, err := helpers.LstatIfPossible(s.SourceFs, filename)
+ if err != nil {
+ return false, err
+ }
+
+ if fi.IsDir() {
+ return false, nil
+ }
+
+ if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
+ link, err := filepath.EvalSymlinks(filename)
+ if err != nil {
+ return false, err
+ }
+
+ fi, err = helpers.LstatIfPossible(s.SourceFs, link)
+ if err != nil {
+ return false, err
+ }
+
+ if fi.IsDir() {
+ return false, nil
+ }
+ }
+
+ return true, nil
+}
diff --git a/temp/0.86.1-relnotes-ready.md b/temp/0.86.1-relnotes-ready.md
new file mode 100644
index 000000000..686862375
--- /dev/null
+++ b/temp/0.86.1-relnotes-ready.md
@@ -0,0 +1,8 @@
+
+
+This is a bug-fix release with one important fix.
+
+* config: Fix a potential deadlock in config reading [94b616bd](https://github.com/gohugoio/hugo/commit/94b616bdfad177daa99f5e87535943f509198f6f) [@bep](https://github.com/bep) [#8791](https://github.com/gohugoio/hugo/issues/8791)
+
+
+
diff --git a/tpl/cast/cast.go b/tpl/cast/cast.go
new file mode 100644
index 000000000..535697f9e
--- /dev/null
+++ b/tpl/cast/cast.go
@@ -0,0 +1,63 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package cast provides template functions for data type conversions.
+package cast
+
+import (
+ "html/template"
+
+ _cast "github.com/spf13/cast"
+)
+
+// New returns a new instance of the cast-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "cast" namespace.
+type Namespace struct {
+}
+
+// ToInt converts v to an int.
+func (ns *Namespace) ToInt(v any) (int, error) {
+ v = convertTemplateToString(v)
+ return _cast.ToIntE(v)
+}
+
+// ToString converts v to a string.
+func (ns *Namespace) ToString(v any) (string, error) {
+ return _cast.ToStringE(v)
+}
+
+// ToFloat converts v to a float.
+func (ns *Namespace) ToFloat(v any) (float64, error) {
+ v = convertTemplateToString(v)
+ return _cast.ToFloat64E(v)
+}
+
+func convertTemplateToString(v any) any {
+ switch vv := v.(type) {
+ case template.HTML:
+ v = string(vv)
+ case template.CSS:
+ v = string(vv)
+ case template.HTMLAttr:
+ v = string(vv)
+ case template.JS:
+ v = string(vv)
+ case template.JSStr:
+ v = string(vv)
+ }
+ return v
+}
diff --git a/tpl/cast/cast_test.go b/tpl/cast/cast_test.go
new file mode 100644
index 000000000..5b4a36c3a
--- /dev/null
+++ b/tpl/cast/cast_test.go
@@ -0,0 +1,119 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cast
+
+import (
+ "html/template"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestToInt(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for i, test := range []struct {
+ v any
+ expect any
+ }{
+ {"1", 1},
+ {template.HTML("2"), 2},
+ {template.CSS("3"), 3},
+ {template.HTMLAttr("4"), 4},
+ {template.JS("5"), 5},
+ {template.JSStr("6"), 6},
+ {"a", false},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.v)
+
+ result, err := ns.ToInt(test.v)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestToString(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for i, test := range []struct {
+ v any
+ expect any
+ }{
+ {1, "1"},
+ {template.HTML("2"), "2"},
+ {"a", "a"},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.v)
+
+ result, err := ns.ToString(test.v)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestToFloat(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for i, test := range []struct {
+ v any
+ expect any
+ }{
+ {"1", 1.0},
+ {template.HTML("2"), 2.0},
+ {template.CSS("3"), 3.0},
+ {template.HTMLAttr("4"), 4.0},
+ {template.JS("-5.67"), -5.67},
+ {template.JSStr("6"), 6.0},
+ {"1.23", 1.23},
+ {"-1.23", -1.23},
+ {"0", 0.0},
+ {float64(2.12), 2.12},
+ {int64(123), 123.0},
+ {2, 2.0},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.v)
+
+ result, err := ns.ToFloat(test.v)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
diff --git a/tpl/cast/docshelper.go b/tpl/cast/docshelper.go
new file mode 100644
index 000000000..1c7b9c888
--- /dev/null
+++ b/tpl/cast/docshelper.go
@@ -0,0 +1,56 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cast
+
+import (
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/docshelper"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/resources/page"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+// This file provides documentation support and is randomly put into this package.
+func init() {
+ docsProvider := func() docshelper.DocProvider {
+ cfg := config.New()
+ d := &deps.Deps{
+ Cfg: cfg,
+ Log: loggers.NewErrorLogger(),
+ BuildStartListeners: &deps.Listeners{},
+ Language: langs.NewDefaultLanguage(cfg),
+ Site: page.NewDummyHugoSite(newTestConfig()),
+ }
+
+ var namespaces internal.TemplateFuncsNamespaces
+
+ for _, nsf := range internal.TemplateFuncsNamespaceRegistry {
+ nf := nsf(d)
+ namespaces = append(namespaces, nf)
+
+ }
+
+ return docshelper.DocProvider{"tpl": map[string]any{"funcs": namespaces}}
+ }
+
+ docshelper.AddDocProviderFunc(docsProvider)
+}
+
+func newTestConfig() config.Provider {
+ v := config.New()
+ v.Set("contentDir", "content")
+ return v
+}
diff --git a/tpl/cast/init.go b/tpl/cast/init.go
new file mode 100644
index 000000000..f1badf993
--- /dev/null
+++ b/tpl/cast/init.go
@@ -0,0 +1,57 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cast
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "cast"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.ToInt,
+ []string{"int"},
+ [][2]string{
+ {`{{ "1234" | int | printf "%T" }}`, `int`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ToString,
+ []string{"string"},
+ [][2]string{
+ {`{{ 1234 | string | printf "%T" }}`, `string`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ToFloat,
+ []string{"float"},
+ [][2]string{
+ {`{{ "1234" | float | printf "%T" }}`, `float64`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/collections/append.go b/tpl/collections/append.go
new file mode 100644
index 000000000..752ad2fa0
--- /dev/null
+++ b/tpl/collections/append.go
@@ -0,0 +1,37 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+
+ "github.com/gohugoio/hugo/common/collections"
+)
+
+// Append appends the arguments up to the last one to the slice in the last argument.
+// This construct allows template constructs like this:
+// {{ $pages = $pages | append $p2 $p1 }}
+// Note that with 2 arguments where both are slices of the same type,
+// the first slice will be appended to the second:
+// {{ $pages = $pages | append .Site.RegularPages }}
+func (ns *Namespace) Append(args ...any) (any, error) {
+ if len(args) < 2 {
+ return nil, errors.New("need at least 2 arguments to append")
+ }
+
+ to := args[len(args)-1]
+ from := args[:len(args)-1]
+
+ return collections.Append(to, from...)
+}
diff --git a/tpl/collections/append_test.go b/tpl/collections/append_test.go
new file mode 100644
index 000000000..232781522
--- /dev/null
+++ b/tpl/collections/append_test.go
@@ -0,0 +1,69 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+)
+
+// Also see tests in common/collection.
+func TestAppend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ start any
+ addend []any
+ expected any
+ }{
+ {[]string{"a", "b"}, []any{"c"}, []string{"a", "b", "c"}},
+ {[]string{"a", "b"}, []any{"c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
+ {[]string{"a", "b"}, []any{[]string{"c", "d", "e"}}, []string{"a", "b", "c", "d", "e"}},
+ // Errors
+ {"", []any{[]string{"a", "b"}}, false},
+ {[]string{"a", "b"}, []any{}, false},
+ // No string concatenation.
+ {
+ "ab",
+ []any{"c"},
+ false,
+ },
+ } {
+
+ errMsg := qt.Commentf("[%d]", i)
+
+ args := append(test.addend, test.start)
+
+ result, err := ns.Append(args...)
+
+ if b, ok := test.expected.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+
+ if !reflect.DeepEqual(test.expected, result) {
+ t.Fatalf("%s got\n%T: %v\nexpected\n%T: %v", errMsg, result, result, test.expected, test.expected)
+ }
+ }
+}
diff --git a/tpl/collections/apply.go b/tpl/collections/apply.go
new file mode 100644
index 000000000..9fd5c2d0c
--- /dev/null
+++ b/tpl/collections/apply.go
@@ -0,0 +1,164 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+// Apply takes a map, array, or slice and returns a new slice with the function fname applied over it.
+func (ns *Namespace) Apply(ctx context.Context, seq any, fname string, args ...any) (any, error) {
+ if seq == nil {
+ return make([]any, 0), nil
+ }
+
+ if fname == "apply" {
+ return nil, errors.New("can't apply myself (no turtles allowed)")
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ fnv, found := ns.lookupFunc(fname)
+ if !found {
+ return nil, errors.New("can't find function " + fname)
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice:
+ r := make([]any, seqv.Len())
+ for i := 0; i < seqv.Len(); i++ {
+ vv := seqv.Index(i)
+
+ vvv, err := applyFnToThis(ctx, fnv, vv, args...)
+ if err != nil {
+ return nil, err
+ }
+
+ r[i] = vvv.Interface()
+ }
+
+ return r, nil
+ default:
+ return nil, fmt.Errorf("can't apply over %v", seq)
+ }
+}
+
+func applyFnToThis(ctx context.Context, fn, this reflect.Value, args ...any) (reflect.Value, error) {
+ num := fn.Type().NumIn()
+ if num > 0 && fn.Type().In(0).Implements(hreflect.ContextInterface) {
+ args = append([]any{ctx}, args...)
+ }
+
+ n := make([]reflect.Value, len(args))
+ for i, arg := range args {
+ if arg == "." {
+ n[i] = this
+ } else {
+ n[i] = reflect.ValueOf(arg)
+ }
+ }
+
+ if fn.Type().IsVariadic() {
+ num--
+ }
+
+ // TODO(bep) see #1098 - also see template_tests.go
+ /*if len(args) < num {
+ return reflect.ValueOf(nil), errors.New("Too few arguments")
+ } else if len(args) > num {
+ return reflect.ValueOf(nil), errors.New("Too many arguments")
+ }*/
+
+ for i := 0; i < num; i++ {
+ // AssignableTo reports whether xt is assignable to type targ.
+ if xt, targ := n[i].Type(), fn.Type().In(i); !xt.AssignableTo(targ) {
+ return reflect.ValueOf(nil), errors.New("called apply using " + xt.String() + " as type " + targ.String())
+ }
+ }
+
+ res := fn.Call(n)
+
+ if len(res) == 1 || res[1].IsNil() {
+ return res[0], nil
+ }
+ return reflect.ValueOf(nil), res[1].Interface().(error)
+}
+
+func (ns *Namespace) lookupFunc(fname string) (reflect.Value, bool) {
+ namespace, methodName, ok := strings.Cut(fname, ".")
+ if !ok {
+ templ := ns.deps.Tmpl().(tpl.TemplateFuncGetter)
+ return templ.GetFunc(fname)
+ }
+
+ // Namespace
+ nv, found := ns.lookupFunc(namespace)
+ if !found {
+ return reflect.Value{}, false
+ }
+
+ fn, ok := nv.Interface().(func(...any) (any, error))
+ if !ok {
+ return reflect.Value{}, false
+ }
+ v, err := fn()
+ if err != nil {
+ panic(err)
+ }
+ nv = reflect.ValueOf(v)
+
+ // method
+ m := hreflect.GetMethodByName(nv, methodName)
+
+ if m.Kind() == reflect.Invalid {
+ return reflect.Value{}, false
+ }
+ return m, true
+}
+
+// indirect is borrowed from the Go stdlib: 'text/template/exec.go'
+func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
+ for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() {
+ if v.IsNil() {
+ return v, true
+ }
+ if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
+ break
+ }
+ }
+ return v, false
+}
+
+func indirectInterface(v reflect.Value) (rv reflect.Value, isNil bool) {
+ for ; v.Kind() == reflect.Interface; v = v.Elem() {
+ if v.IsNil() {
+ return v, true
+ }
+ if v.Kind() == reflect.Interface && v.NumMethod() > 0 {
+ break
+ }
+ }
+ return v, false
+}
diff --git a/tpl/collections/apply_test.go b/tpl/collections/apply_test.go
new file mode 100644
index 000000000..2c7783fd9
--- /dev/null
+++ b/tpl/collections/apply_test.go
@@ -0,0 +1,98 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "reflect"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/output"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+type templateFinder int
+
+func (templateFinder) Lookup(name string) (tpl.Template, bool) {
+ return nil, false
+}
+
+func (templateFinder) HasTemplate(name string) bool {
+ return false
+}
+
+func (templateFinder) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+ return nil, false, false
+}
+
+func (templateFinder) LookupVariants(name string) []tpl.Template {
+ return nil
+}
+
+func (templateFinder) LookupLayout(d output.LayoutDescriptor, f output.Format) (tpl.Template, bool, error) {
+ return nil, false, nil
+}
+
+func (templateFinder) Execute(t tpl.Template, wr io.Writer, data any) error {
+ return nil
+}
+
+func (templateFinder) ExecuteWithContext(ctx context.Context, t tpl.Template, wr io.Writer, data any) error {
+ return nil
+}
+
+func (templateFinder) GetFunc(name string) (reflect.Value, bool) {
+ if name == "dobedobedo" {
+ return reflect.Value{}, false
+ }
+
+ return reflect.ValueOf(fmt.Sprint), true
+}
+
+func TestApply(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ d := &deps.Deps{Language: langs.NewDefaultLanguage(config.New())}
+ d.SetTmpl(new(templateFinder))
+ ns := New(d)
+
+ strings := []any{"a\n", "b\n"}
+
+ ctx := context.Background()
+
+ result, err := ns.Apply(ctx, strings, "print", "a", "b", "c")
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, []any{"abc", "abc"})
+
+ _, err = ns.Apply(ctx, strings, "apply", ".")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ var nilErr *error
+ _, err = ns.Apply(ctx, nilErr, "chomp", ".")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = ns.Apply(ctx, strings, "dobedobedo", ".")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = ns.Apply(ctx, strings, "foo.Chomp", "c\n")
+ if err == nil {
+ t.Errorf("apply with unknown func should fail")
+ }
+}
diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go
new file mode 100644
index 000000000..299a504f4
--- /dev/null
+++ b/tpl/collections/collections.go
@@ -0,0 +1,781 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package collections provides template functions for manipulating collections
+// such as arrays, maps, and slices.
+package collections
+
+import (
+ "fmt"
+ "html/template"
+ "math/rand"
+ "net/url"
+ "reflect"
+ "strings"
+ "time"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/collections"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/common/types"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/tpl/compare"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ // htime.Now cannot be used here
+ rand.Seed(time.Now().UTC().UnixNano())
+}
+
+// New returns a new instance of the collections-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ if deps.Language == nil {
+ panic("language must be set")
+ }
+
+ loc := langs.GetLocation(deps.Language)
+
+ return &Namespace{
+ loc: loc,
+ sortComp: compare.New(loc, true),
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "collections" namespace.
+type Namespace struct {
+ loc *time.Location
+ sortComp *compare.Namespace
+ deps *deps.Deps
+}
+
+// After returns all the items after the first N in a rangeable list.
+func (ns *Namespace) After(index any, seq any) (any, error) {
+ if index == nil || seq == nil {
+ return nil, errors.New("both limit and seq must be provided")
+ }
+
+ indexv, err := cast.ToIntE(index)
+ if err != nil {
+ return nil, err
+ }
+
+ if indexv < 0 {
+ return nil, errors.New("sequence bounds out of range [" + cast.ToString(indexv) + ":]")
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ // okay
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(seq).Type().String())
+ }
+
+ if indexv >= seqv.Len() {
+ return seqv.Slice(0, 0).Interface(), nil
+ }
+
+ return seqv.Slice(indexv, seqv.Len()).Interface(), nil
+}
+
+// Delimit takes a given sequence and returns a delimited HTML string.
+// If last is passed to the function, it will be used as the final delimiter.
+func (ns *Namespace) Delimit(seq, delimiter any, last ...any) (template.HTML, error) {
+ d, err := cast.ToStringE(delimiter)
+ if err != nil {
+ return "", err
+ }
+
+ var dLast *string
+ if len(last) > 0 {
+ l := last[0]
+ dStr, err := cast.ToStringE(l)
+ if err != nil {
+ dLast = nil
+ } else {
+ dLast = &dStr
+ }
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return "", errors.New("can't iterate over a nil value")
+ }
+
+ var str string
+ switch seqv.Kind() {
+ case reflect.Map:
+ sortSeq, err := ns.Sort(seq)
+ if err != nil {
+ return "", err
+ }
+ seqv = reflect.ValueOf(sortSeq)
+ fallthrough
+ case reflect.Array, reflect.Slice, reflect.String:
+ for i := 0; i < seqv.Len(); i++ {
+ val := seqv.Index(i).Interface()
+ valStr, err := cast.ToStringE(val)
+ if err != nil {
+ continue
+ }
+ switch {
+ case i == seqv.Len()-2 && dLast != nil:
+ str += valStr + *dLast
+ case i == seqv.Len()-1:
+ str += valStr
+ default:
+ str += valStr + d
+ }
+ }
+
+ default:
+ return "", fmt.Errorf("can't iterate over %v", seq)
+ }
+
+ return template.HTML(str), nil
+}
+
+// Dictionary creates a map[string]interface{} from the given parameters by
+// walking the parameters and treating them as key-value pairs. The number
+// of parameters must be even.
+// The keys can be string slices, which will create the needed nested structure.
+func (ns *Namespace) Dictionary(values ...any) (map[string]any, error) {
+ if len(values)%2 != 0 {
+ return nil, errors.New("invalid dictionary call")
+ }
+
+ root := make(map[string]any)
+
+ for i := 0; i < len(values); i += 2 {
+ dict := root
+ var key string
+ switch v := values[i].(type) {
+ case string:
+ key = v
+ case []string:
+ for i := 0; i < len(v)-1; i++ {
+ key = v[i]
+ var m map[string]any
+ v, found := dict[key]
+ if found {
+ m = v.(map[string]any)
+ } else {
+ m = make(map[string]any)
+ dict[key] = m
+ }
+ dict = m
+ }
+ key = v[len(v)-1]
+ default:
+ return nil, errors.New("invalid dictionary key")
+ }
+ dict[key] = values[i+1]
+ }
+
+ return root, nil
+}
+
+// EchoParam returns a given value if it is set; otherwise, it returns an
+// empty string.
+func (ns *Namespace) EchoParam(a, key any) any {
+ av, isNil := indirect(reflect.ValueOf(a))
+ if isNil {
+ return ""
+ }
+
+ var avv reflect.Value
+ switch av.Kind() {
+ case reflect.Array, reflect.Slice:
+ index, ok := key.(int)
+ if ok && av.Len() > index {
+ avv = av.Index(index)
+ }
+ case reflect.Map:
+ kv := reflect.ValueOf(key)
+ if kv.Type().AssignableTo(av.Type().Key()) {
+ avv = av.MapIndex(kv)
+ }
+ }
+
+ avv, isNil = indirect(avv)
+
+ if isNil {
+ return ""
+ }
+
+ if avv.IsValid() {
+ switch avv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return avv.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return avv.Uint()
+ case reflect.Float32, reflect.Float64:
+ return avv.Float()
+ case reflect.String:
+ return avv.String()
+ }
+ }
+
+ return ""
+}
+
+// First returns the first N items in a rangeable list.
+func (ns *Namespace) First(limit any, seq any) (any, error) {
+ if limit == nil || seq == nil {
+ return nil, errors.New("both limit and seq must be provided")
+ }
+
+ limitv, err := cast.ToIntE(limit)
+ if err != nil {
+ return nil, err
+ }
+
+ if limitv < 0 {
+ return nil, errors.New("sequence length must be non-negative")
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ // okay
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(seq).Type().String())
+ }
+
+ if limitv > seqv.Len() {
+ limitv = seqv.Len()
+ }
+
+ return seqv.Slice(0, limitv).Interface(), nil
+}
+
+// In returns whether v is in the set l. l may be an array or slice.
+func (ns *Namespace) In(l any, v any) (bool, error) {
+ if l == nil || v == nil {
+ return false, nil
+ }
+
+ lv := reflect.ValueOf(l)
+ vv := reflect.ValueOf(v)
+
+ vvk := normalize(vv)
+
+ switch lv.Kind() {
+ case reflect.Array, reflect.Slice:
+ for i := 0; i < lv.Len(); i++ {
+ lvv, isNil := indirectInterface(lv.Index(i))
+ if isNil {
+ continue
+ }
+
+ lvvk := normalize(lvv)
+
+ if lvvk == vvk {
+ return true, nil
+ }
+ }
+ }
+ ss, err := cast.ToStringE(l)
+ if err != nil {
+ return false, nil
+ }
+
+ su, err := cast.ToStringE(v)
+ if err != nil {
+ return false, nil
+ }
+ return strings.Contains(ss, su), nil
+}
+
+// Intersect returns the common elements in the given sets, l1 and l2. l1 and
+// l2 must be of the same type and may be either arrays or slices.
+func (ns *Namespace) Intersect(l1, l2 any) (any, error) {
+ if l1 == nil || l2 == nil {
+ return make([]any, 0), nil
+ }
+
+ var ins *intersector
+
+ l1v := reflect.ValueOf(l1)
+ l2v := reflect.ValueOf(l2)
+
+ switch l1v.Kind() {
+ case reflect.Array, reflect.Slice:
+ ins = &intersector{r: reflect.MakeSlice(l1v.Type(), 0, 0), seen: make(map[any]bool)}
+ switch l2v.Kind() {
+ case reflect.Array, reflect.Slice:
+ for i := 0; i < l1v.Len(); i++ {
+ l1vv := l1v.Index(i)
+ if !l1vv.Type().Comparable() {
+ return make([]any, 0), errors.New("intersect does not support slices or arrays of uncomparable types")
+ }
+
+ for j := 0; j < l2v.Len(); j++ {
+ l2vv := l2v.Index(j)
+ if !l2vv.Type().Comparable() {
+ return make([]any, 0), errors.New("intersect does not support slices or arrays of uncomparable types")
+ }
+
+ ins.handleValuePair(l1vv, l2vv)
+ }
+ }
+ return ins.r.Interface(), nil
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(l2).Type().String())
+ }
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(l1).Type().String())
+ }
+}
+
+// Group groups a set of elements by the given key.
+// This is currently only supported for Pages.
+func (ns *Namespace) Group(key any, items any) (any, error) {
+ if key == nil {
+ return nil, errors.New("nil is not a valid key to group by")
+ }
+
+ if g, ok := items.(collections.Grouper); ok {
+ return g.Group(key, items)
+ }
+
+ in := newSliceElement(items)
+
+ if g, ok := in.(collections.Grouper); ok {
+ return g.Group(key, items)
+ }
+
+ return nil, fmt.Errorf("grouping not supported for type %T %T", items, in)
+}
+
+// IsSet returns whether a given array, channel, slice, or map has a key
+// defined.
+func (ns *Namespace) IsSet(a any, key any) (bool, error) {
+ av := reflect.ValueOf(a)
+ kv := reflect.ValueOf(key)
+
+ switch av.Kind() {
+ case reflect.Array, reflect.Chan, reflect.Slice:
+ k, err := cast.ToIntE(key)
+ if err != nil {
+ return false, fmt.Errorf("isset unable to use key of type %T as index", key)
+ }
+ if av.Len() > k {
+ return true, nil
+ }
+ case reflect.Map:
+ if kv.Type() == av.Type().Key() {
+ return av.MapIndex(kv).IsValid(), nil
+ }
+ default:
+ helpers.DistinctErrorLog.Printf("WARNING: calling IsSet with unsupported type %q (%T) will always return false.\n", av.Kind(), a)
+ }
+
+ return false, nil
+}
+
+// Last returns the last N items in a rangeable list.
+func (ns *Namespace) Last(limit any, seq any) (any, error) {
+ if limit == nil || seq == nil {
+ return nil, errors.New("both limit and seq must be provided")
+ }
+
+ limitv, err := cast.ToIntE(limit)
+ if err != nil {
+ return nil, err
+ }
+
+ if limitv < 0 {
+ return nil, errors.New("sequence length must be non-negative")
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ // okay
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(seq).Type().String())
+ }
+
+ if limitv > seqv.Len() {
+ limitv = seqv.Len()
+ }
+
+ return seqv.Slice(seqv.Len()-limitv, seqv.Len()).Interface(), nil
+}
+
+// Querify encodes the given parameters in URL-encoded form ("bar=baz&foo=quux") sorted by key.
+func (ns *Namespace) Querify(params ...any) (string, error) {
+ qs := url.Values{}
+
+ if len(params) == 1 {
+ switch v := params[0].(type) {
+ case []string:
+ if len(v)%2 != 0 {
+ return "", errors.New("invalid query")
+ }
+
+ for i := 0; i < len(v); i += 2 {
+ qs.Add(v[i], v[i+1])
+ }
+
+ return qs.Encode(), nil
+
+ case []any:
+ params = v
+
+ default:
+ return "", errors.New("query keys must be strings")
+ }
+ }
+
+ if len(params)%2 != 0 {
+ return "", errors.New("invalid query")
+ }
+
+ for i := 0; i < len(params); i += 2 {
+ switch v := params[i].(type) {
+ case string:
+ qs.Add(v, fmt.Sprintf("%v", params[i+1]))
+ default:
+ return "", errors.New("query keys must be strings")
+ }
+ }
+
+ return qs.Encode(), nil
+}
+
+// Reverse creates a copy of slice and reverses it.
+func (ns *Namespace) Reverse(slice any) (any, error) {
+ if slice == nil {
+ return nil, nil
+ }
+ v := reflect.ValueOf(slice)
+
+ switch v.Kind() {
+ case reflect.Slice:
+ default:
+ return nil, errors.New("argument must be a slice")
+ }
+
+ sliceCopy := reflect.MakeSlice(v.Type(), v.Len(), v.Len())
+
+ for i := v.Len() - 1; i >= 0; i-- {
+ element := sliceCopy.Index(i)
+ element.Set(v.Index(v.Len() - 1 - i))
+ }
+
+ return sliceCopy.Interface(), nil
+}
+
+// Seq creates a sequence of integers. It's named and used as GNU's seq.
+//
+// Examples:
+// 3 => 1, 2, 3
+// 1 2 4 => 1, 3
+// -3 => -1, -2, -3
+// 1 4 => 1, 2, 3, 4
+// 1 -2 => 1, 0, -1, -2
+func (ns *Namespace) Seq(args ...any) ([]int, error) {
+ if len(args) < 1 || len(args) > 3 {
+ return nil, errors.New("invalid number of arguments to Seq")
+ }
+
+ intArgs := cast.ToIntSlice(args)
+ if len(intArgs) < 1 || len(intArgs) > 3 {
+ return nil, errors.New("invalid arguments to Seq")
+ }
+
+ inc := 1
+ var last int
+ first := intArgs[0]
+
+ if len(intArgs) == 1 {
+ last = first
+ if last == 0 {
+ return []int{}, nil
+ } else if last > 0 {
+ first = 1
+ } else {
+ first = -1
+ inc = -1
+ }
+ } else if len(intArgs) == 2 {
+ last = intArgs[1]
+ if last < first {
+ inc = -1
+ }
+ } else {
+ inc = intArgs[1]
+ last = intArgs[2]
+ if inc == 0 {
+ return nil, errors.New("'increment' must not be 0")
+ }
+ if first < last && inc < 0 {
+ return nil, errors.New("'increment' must be > 0")
+ }
+ if first > last && inc > 0 {
+ return nil, errors.New("'increment' must be < 0")
+ }
+ }
+
+ // sanity check
+ if last < -100000 {
+ return nil, errors.New("size of result exceeds limit")
+ }
+ size := ((last - first) / inc) + 1
+
+ // sanity check
+ if size <= 0 || size > 2000 {
+ return nil, errors.New("size of result exceeds limit")
+ }
+
+ seq := make([]int, size)
+ val := first
+ for i := 0; ; i++ {
+ seq[i] = val
+ val += inc
+ if (inc < 0 && val < last) || (inc > 0 && val > last) {
+ break
+ }
+ }
+
+ return seq, nil
+}
+
+// Shuffle returns the given rangeable list in a randomised order.
+func (ns *Namespace) Shuffle(seq any) (any, error) {
+ if seq == nil {
+ return nil, errors.New("both count and seq must be provided")
+ }
+
+ seqv := reflect.ValueOf(seq)
+ seqv, isNil := indirect(seqv)
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ // okay
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(seq).Type().String())
+ }
+
+ shuffled := reflect.MakeSlice(reflect.TypeOf(seq), seqv.Len(), seqv.Len())
+
+ randomIndices := rand.Perm(seqv.Len())
+
+ for index, value := range randomIndices {
+ shuffled.Index(value).Set(seqv.Index(index))
+ }
+
+ return shuffled.Interface(), nil
+}
+
+// Slice returns a slice of all passed arguments.
+func (ns *Namespace) Slice(args ...any) any {
+ if len(args) == 0 {
+ return args
+ }
+
+ return collections.Slice(args...)
+}
+
+type intersector struct {
+ r reflect.Value
+ seen map[any]bool
+}
+
+func (i *intersector) appendIfNotSeen(v reflect.Value) {
+ vi := v.Interface()
+ if !i.seen[vi] {
+ i.r = reflect.Append(i.r, v)
+ i.seen[vi] = true
+ }
+}
+
+func (i *intersector) handleValuePair(l1vv, l2vv reflect.Value) {
+ switch kind := l1vv.Kind(); {
+ case kind == reflect.String:
+ l2t, err := toString(l2vv)
+ if err == nil && l1vv.String() == l2t {
+ i.appendIfNotSeen(l1vv)
+ }
+ case isNumber(kind):
+ f1, err1 := numberToFloat(l1vv)
+ f2, err2 := numberToFloat(l2vv)
+ if err1 == nil && err2 == nil && f1 == f2 {
+ i.appendIfNotSeen(l1vv)
+ }
+ case kind == reflect.Ptr, kind == reflect.Struct:
+ if l1vv.Interface() == l2vv.Interface() {
+ i.appendIfNotSeen(l1vv)
+ }
+ case kind == reflect.Interface:
+ i.handleValuePair(reflect.ValueOf(l1vv.Interface()), l2vv)
+ }
+}
+
+// Union returns the union of the given sets, l1 and l2. l1 and
+// l2 must be of the same type and may be either arrays or slices.
+// If l1 and l2 aren't of the same type then l1 will be returned.
+// If either l1 or l2 is nil then the non-nil list will be returned.
+func (ns *Namespace) Union(l1, l2 any) (any, error) {
+ if l1 == nil && l2 == nil {
+ return []any{}, nil
+ } else if l1 == nil && l2 != nil {
+ return l2, nil
+ } else if l1 != nil && l2 == nil {
+ return l1, nil
+ }
+
+ l1v := reflect.ValueOf(l1)
+ l2v := reflect.ValueOf(l2)
+
+ var ins *intersector
+
+ switch l1v.Kind() {
+ case reflect.Array, reflect.Slice:
+ switch l2v.Kind() {
+ case reflect.Array, reflect.Slice:
+ ins = &intersector{r: reflect.MakeSlice(l1v.Type(), 0, 0), seen: make(map[any]bool)}
+
+ if l1v.Type() != l2v.Type() &&
+ l1v.Type().Elem().Kind() != reflect.Interface &&
+ l2v.Type().Elem().Kind() != reflect.Interface {
+ return ins.r.Interface(), nil
+ }
+
+ var (
+ l1vv reflect.Value
+ isNil bool
+ )
+
+ for i := 0; i < l1v.Len(); i++ {
+ l1vv, isNil = indirectInterface(l1v.Index(i))
+
+ if !l1vv.Type().Comparable() {
+ return []any{}, errors.New("union does not support slices or arrays of uncomparable types")
+ }
+
+ if !isNil {
+ ins.appendIfNotSeen(l1vv)
+ }
+ }
+
+ if !l1vv.IsValid() {
+ // The first slice may be empty. Pick the first value of the second
+ // to use as a prototype.
+ if l2v.Len() > 0 {
+ l1vv = l2v.Index(0)
+ }
+ }
+
+ for j := 0; j < l2v.Len(); j++ {
+ l2vv := l2v.Index(j)
+
+ switch kind := l1vv.Kind(); {
+ case kind == reflect.String:
+ l2t, err := toString(l2vv)
+ if err == nil {
+ ins.appendIfNotSeen(reflect.ValueOf(l2t))
+ }
+ case isNumber(kind):
+ var err error
+ l2vv, err = convertNumber(l2vv, kind)
+ if err == nil {
+ ins.appendIfNotSeen(l2vv)
+ }
+ case kind == reflect.Interface, kind == reflect.Struct, kind == reflect.Ptr:
+ ins.appendIfNotSeen(l2vv)
+
+ }
+ }
+
+ return ins.r.Interface(), nil
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(l2).Type().String())
+ }
+ default:
+ return nil, errors.New("can't iterate over " + reflect.ValueOf(l1).Type().String())
+ }
+}
+
+// Uniq takes in a slice or array and returns a slice with subsequent
+// duplicate elements removed.
+func (ns *Namespace) Uniq(seq any) (any, error) {
+ if seq == nil {
+ return make([]any, 0), nil
+ }
+
+ v := reflect.ValueOf(seq)
+ var slice reflect.Value
+
+ switch v.Kind() {
+ case reflect.Slice:
+ slice = reflect.MakeSlice(v.Type(), 0, 0)
+
+ case reflect.Array:
+ slice = reflect.MakeSlice(reflect.SliceOf(v.Type().Elem()), 0, 0)
+ default:
+ return nil, fmt.Errorf("type %T not supported", seq)
+ }
+
+ seen := make(map[any]bool)
+
+ for i := 0; i < v.Len(); i++ {
+ ev, _ := indirectInterface(v.Index(i))
+
+ key := normalize(ev)
+
+ if _, found := seen[key]; !found {
+ slice = reflect.Append(slice, ev)
+ seen[key] = true
+ }
+ }
+
+ return slice.Interface(), nil
+}
+
+// KeyVals creates a key and values wrapper.
+func (ns *Namespace) KeyVals(key any, vals ...any) (types.KeyValues, error) {
+ return types.KeyValues{Key: key, Values: vals}, nil
+}
+
+// NewScratch creates a new Scratch which can be used to store values in a
+// thread safe way.
+func (ns *Namespace) NewScratch() *maps.Scratch {
+ return maps.NewScratch()
+}
diff --git a/tpl/collections/collections_test.go b/tpl/collections/collections_test.go
new file mode 100644
index 000000000..fd78da6d4
--- /dev/null
+++ b/tpl/collections/collections_test.go
@@ -0,0 +1,992 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "math/rand"
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/spf13/afero"
+)
+
+type tstNoStringer struct{}
+
+func TestAfter(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ index any
+ seq any
+ expect any
+ }{
+ {int(2), []string{"a", "b", "c", "d"}, []string{"c", "d"}},
+ {int32(3), []string{"a", "b"}, []string{}},
+ {int64(2), []int{100, 200, 300}, []int{300}},
+ {100, []int{100, 200}, []int{}},
+ {"1", []int{100, 200, 300}, []int{200, 300}},
+ {0, []int{100, 200, 300, 400, 500}, []int{100, 200, 300, 400, 500}},
+ {0, []string{"a", "b", "c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
+ {int64(-1), []int{100, 200, 300}, false},
+ {"noint", []int{100, 200, 300}, false},
+ {2, []string{}, []string{}},
+ {1, nil, false},
+ {nil, []int{100}, false},
+ {1, t, false},
+ {1, (*string)(nil), false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.After(test.index, test.seq)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ }
+}
+
+type tstGrouper struct {
+}
+
+type tstGroupers []*tstGrouper
+
+func (g tstGrouper) Group(key any, items any) (any, error) {
+ ilen := reflect.ValueOf(items).Len()
+ return fmt.Sprintf("%v(%d)", key, ilen), nil
+}
+
+type tstGrouper2 struct {
+}
+
+func (g *tstGrouper2) Group(key any, items any) (any, error) {
+ ilen := reflect.ValueOf(items).Len()
+ return fmt.Sprintf("%v(%d)", key, ilen), nil
+}
+
+func TestGroup(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ key any
+ items any
+ expect any
+ }{
+ {"a", []*tstGrouper{{}, {}}, "a(2)"},
+ {"b", tstGroupers{&tstGrouper{}, &tstGrouper{}}, "b(2)"},
+ {"a", []tstGrouper{{}, {}}, "a(2)"},
+ {"a", []*tstGrouper2{{}, {}}, "a(2)"},
+ {"b", []tstGrouper2{{}, {}}, "b(2)"},
+ {"a", []*tstGrouper{}, "a(0)"},
+ {"a", []string{"a", "b"}, false},
+ {"a", "asdf", false},
+ {"a", nil, false},
+ {nil, []*tstGrouper{{}, {}}, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Group(test.key, test.items)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestDelimit(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{
+ Language: langs.NewDefaultLanguage(config.New()),
+ })
+
+ for i, test := range []struct {
+ seq any
+ delimiter any
+ last any
+ expect template.HTML
+ }{
+ {[]string{"class1", "class2", "class3"}, " ", nil, "class1 class2 class3"},
+ {[]int{1, 2, 3, 4, 5}, ",", nil, "1,2,3,4,5"},
+ {[]int{1, 2, 3, 4, 5}, ", ", nil, "1, 2, 3, 4, 5"},
+ {[]string{"class1", "class2", "class3"}, " ", " and ", "class1 class2 and class3"},
+ {[]int{1, 2, 3, 4, 5}, ",", ",", "1,2,3,4,5"},
+ {[]int{1, 2, 3, 4, 5}, ", ", ", and ", "1, 2, 3, 4, and 5"},
+ // test maps with and without sorting required
+ {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "--", nil, "10--20--30--40--50"},
+ {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "--", nil, "30--20--10--40--50"},
+ {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, "--", nil, "10--20--30--40--50"},
+ {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, "--", nil, "30--20--10--40--50"},
+ {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, "--", nil, "50--40--10--30--20"},
+ {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, "--", nil, "10--20--30--40--50"},
+ {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, "--", nil, "30--20--10--40--50"},
+ {map[float64]string{3.3: "10", 2.3: "20", 1.3: "30", 4.3: "40", 5.3: "50"}, "--", nil, "30--20--10--40--50"},
+ // test maps with a last delimiter
+ {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "--", "--and--", "10--20--30--40--and--50"},
+ {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "--", "--and--", "30--20--10--40--and--50"},
+ {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, "--", "--and--", "10--20--30--40--and--50"},
+ {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, "--", "--and--", "30--20--10--40--and--50"},
+ {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, "--", "--and--", "50--40--10--30--and--20"},
+ {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, "--", "--and--", "10--20--30--40--and--50"},
+ {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, "--", "--and--", "30--20--10--40--and--50"},
+ {map[float64]string{3.5: "10", 2.5: "20", 1.5: "30", 4.5: "40", 5.5: "50"}, "--", "--and--", "30--20--10--40--and--50"},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ var result template.HTML
+ var err error
+
+ if test.last == nil {
+ result, err = ns.Delimit(test.seq, test.delimiter)
+ } else {
+ result, err = ns.Delimit(test.seq, test.delimiter, test.last)
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestDictionary(t *testing.T) {
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ values []any
+ expect any
+ }{
+ {[]any{"a", "b"}, map[string]any{"a": "b"}},
+ {[]any{[]string{"a", "b"}, "c"}, map[string]any{"a": map[string]any{"b": "c"}}},
+ {
+ []any{[]string{"a", "b"}, "c", []string{"a", "b2"}, "c2", "b", "c"},
+ map[string]any{"a": map[string]any{"b": "c", "b2": "c2"}, "b": "c"},
+ },
+ {[]any{"a", 12, "b", []int{4}}, map[string]any{"a": 12, "b": []int{4}}},
+ // errors
+ {[]any{5, "b"}, false},
+ {[]any{"a", "b", "c"}, false},
+ } {
+ i := i
+ test := test
+ c.Run(fmt.Sprint(i), func(c *qt.C) {
+ c.Parallel()
+ errMsg := qt.Commentf("[%d] %v", i, test.values)
+
+ result, err := ns.Dictionary(test.values...)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ return
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, qt.Commentf(fmt.Sprint(result)))
+ })
+ }
+}
+
+func TestReverse(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ s := []string{"a", "b", "c"}
+ reversed, err := ns.Reverse(s)
+ c.Assert(err, qt.IsNil)
+ c.Assert(reversed, qt.DeepEquals, []string{"c", "b", "a"}, qt.Commentf(fmt.Sprint(reversed)))
+ c.Assert(s, qt.DeepEquals, []string{"a", "b", "c"})
+
+ reversed, err = ns.Reverse(nil)
+ c.Assert(err, qt.IsNil)
+ c.Assert(reversed, qt.IsNil)
+ _, err = ns.Reverse(43)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestEchoParam(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ a any
+ key any
+ expect any
+ }{
+ {[]int{1, 2, 3}, 1, int64(2)},
+ {[]uint{1, 2, 3}, 1, uint64(2)},
+ {[]float64{1.1, 2.2, 3.3}, 1, float64(2.2)},
+ {[]string{"foo", "bar", "baz"}, 1, "bar"},
+ {[]TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}}, 1, ""},
+ {map[string]int{"foo": 1, "bar": 2, "baz": 3}, "bar", int64(2)},
+ {map[string]uint{"foo": 1, "bar": 2, "baz": 3}, "bar", uint64(2)},
+ {map[string]float64{"foo": 1.1, "bar": 2.2, "baz": 3.3}, "bar", float64(2.2)},
+ {map[string]string{"foo": "FOO", "bar": "BAR", "baz": "BAZ"}, "bar", "BAR"},
+ {map[string]TstX{"foo": {A: "a", B: "b"}, "bar": {A: "c", B: "d"}, "baz": {A: "e", B: "f"}}, "bar", ""},
+ {map[string]any{"foo": nil}, "foo", ""},
+ {(*[]string)(nil), "bar", ""},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result := ns.EchoParam(test.a, test.key)
+
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestFirst(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ limit any
+ seq any
+ expect any
+ }{
+ {int(2), []string{"a", "b", "c"}, []string{"a", "b"}},
+ {int32(3), []string{"a", "b"}, []string{"a", "b"}},
+ {int64(2), []int{100, 200, 300}, []int{100, 200}},
+ {100, []int{100, 200}, []int{100, 200}},
+ {"1", []int{100, 200, 300}, []int{100}},
+ {0, []string{"h", "u", "g", "o"}, []string{}},
+ {int64(-1), []int{100, 200, 300}, false},
+ {"noint", []int{100, 200, 300}, false},
+ {1, nil, false},
+ {nil, []int{100}, false},
+ {1, t, false},
+ {1, (*string)(nil), false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.First(test.limit, test.seq)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ }
+}
+
+func TestIn(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ l1 any
+ l2 any
+ expect bool
+ }{
+ {[]string{"a", "b", "c"}, "b", true},
+ {[]any{"a", "b", "c"}, "b", true},
+ {[]any{"a", "b", "c"}, "d", false},
+ {[]string{"a", "b", "c"}, "d", false},
+ {[]string{"a", "12", "c"}, 12, false},
+ {[]string{"a", "b", "c"}, nil, false},
+ {[]int{1, 2, 4}, 2, true},
+ {[]any{1, 2, 4}, 2, true},
+ {[]any{1, 2, 4}, nil, false},
+ {[]any{nil}, nil, false},
+ {[]int{1, 2, 4}, 3, false},
+ {[]float64{1.23, 2.45, 4.67}, 1.23, true},
+ {[]float64{1.234567, 2.45, 4.67}, 1.234568, false},
+ {[]float64{1, 2, 3}, 1, true},
+ {[]float32{1, 2, 3}, 1, true},
+ {"this substring should be found", "substring", true},
+ {"this substring should not be found", "subseastring", false},
+ {nil, "foo", false},
+ // Pointers
+ {pagesPtr{p1, p2, p3, p2}, p2, true},
+ {pagesPtr{p1, p2, p3, p2}, p4, false},
+ // Structs
+ {pagesVals{p3v, p2v, p3v, p2v}, p2v, true},
+ {pagesVals{p3v, p2v, p3v, p2v}, p4v, false},
+ // template.HTML
+ {template.HTML("this substring should be found"), "substring", true},
+ {template.HTML("this substring should not be found"), "subseastring", false},
+ // Uncomparable, use hashstructure
+ {[]string{"a", "b"}, []string{"a", "b"}, false},
+ {[][]string{{"a", "b"}}, []string{"a", "b"}, true},
+ } {
+
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.In(test.l1, test.l2)
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+type testPage struct {
+ Title string
+}
+
+func (p testPage) String() string {
+ return "p-" + p.Title
+}
+
+type (
+ pagesPtr []*testPage
+ pagesVals []testPage
+)
+
+var (
+ p1 = &testPage{"A"}
+ p2 = &testPage{"B"}
+ p3 = &testPage{"C"}
+ p4 = &testPage{"D"}
+
+ p1v = testPage{"A"}
+ p2v = testPage{"B"}
+ p3v = testPage{"C"}
+ p4v = testPage{"D"}
+)
+
+func TestIntersect(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ l1, l2 any
+ expect any
+ }{
+ {[]string{"a", "b", "c", "c"}, []string{"a", "b", "b"}, []string{"a", "b"}},
+ {[]string{"a", "b"}, []string{"a", "b", "c"}, []string{"a", "b"}},
+ {[]string{"a", "b", "c"}, []string{"d", "e"}, []string{}},
+ {[]string{}, []string{}, []string{}},
+ {[]string{"a", "b"}, nil, []any{}},
+ {nil, []string{"a", "b"}, []any{}},
+ {nil, nil, []any{}},
+ {[]string{"1", "2"}, []int{1, 2}, []string{}},
+ {[]int{1, 2}, []string{"1", "2"}, []int{}},
+ {[]int{1, 2, 4}, []int{2, 4}, []int{2, 4}},
+ {[]int{2, 4}, []int{1, 2, 4}, []int{2, 4}},
+ {[]int{1, 2, 4}, []int{3, 6}, []int{}},
+ {[]float64{2.2, 4.4}, []float64{1.1, 2.2, 4.4}, []float64{2.2, 4.4}},
+
+ // []interface{} ∩ []interface{}
+ {[]any{"a", "b", "c"}, []any{"a", "b", "b"}, []any{"a", "b"}},
+ {[]any{1, 2, 3}, []any{1, 2, 2}, []any{1, 2}},
+ {[]any{int8(1), int8(2), int8(3)}, []any{int8(1), int8(2), int8(2)}, []any{int8(1), int8(2)}},
+ {[]any{int16(1), int16(2), int16(3)}, []any{int16(1), int16(2), int16(2)}, []any{int16(1), int16(2)}},
+ {[]any{int32(1), int32(2), int32(3)}, []any{int32(1), int32(2), int32(2)}, []any{int32(1), int32(2)}},
+ {[]any{int64(1), int64(2), int64(3)}, []any{int64(1), int64(2), int64(2)}, []any{int64(1), int64(2)}},
+ {[]any{float32(1), float32(2), float32(3)}, []any{float32(1), float32(2), float32(2)}, []any{float32(1), float32(2)}},
+ {[]any{float64(1), float64(2), float64(3)}, []any{float64(1), float64(2), float64(2)}, []any{float64(1), float64(2)}},
+
+ // []interface{} ∩ []T
+ {[]any{"a", "b", "c"}, []string{"a", "b", "b"}, []any{"a", "b"}},
+ {[]any{1, 2, 3}, []int{1, 2, 2}, []any{1, 2}},
+ {[]any{int8(1), int8(2), int8(3)}, []int8{1, 2, 2}, []any{int8(1), int8(2)}},
+ {[]any{int16(1), int16(2), int16(3)}, []int16{1, 2, 2}, []any{int16(1), int16(2)}},
+ {[]any{int32(1), int32(2), int32(3)}, []int32{1, 2, 2}, []any{int32(1), int32(2)}},
+ {[]any{int64(1), int64(2), int64(3)}, []int64{1, 2, 2}, []any{int64(1), int64(2)}},
+ {[]any{uint(1), uint(2), uint(3)}, []uint{1, 2, 2}, []any{uint(1), uint(2)}},
+ {[]any{float32(1), float32(2), float32(3)}, []float32{1, 2, 2}, []any{float32(1), float32(2)}},
+ {[]any{float64(1), float64(2), float64(3)}, []float64{1, 2, 2}, []any{float64(1), float64(2)}},
+
+ // []T ∩ []interface{}
+ {[]string{"a", "b", "c"}, []any{"a", "b", "b"}, []string{"a", "b"}},
+ {[]int{1, 2, 3}, []any{1, 2, 2}, []int{1, 2}},
+ {[]int8{1, 2, 3}, []any{int8(1), int8(2), int8(2)}, []int8{1, 2}},
+ {[]int16{1, 2, 3}, []any{int16(1), int16(2), int16(2)}, []int16{1, 2}},
+ {[]int32{1, 2, 3}, []any{int32(1), int32(2), int32(2)}, []int32{1, 2}},
+ {[]int64{1, 2, 3}, []any{int64(1), int64(2), int64(2)}, []int64{1, 2}},
+ {[]float32{1, 2, 3}, []any{float32(1), float32(2), float32(2)}, []float32{1, 2}},
+ {[]float64{1, 2, 3}, []any{float64(1), float64(2), float64(2)}, []float64{1, 2}},
+
+ // Structs
+ {pagesPtr{p1, p4, p2, p3}, pagesPtr{p4, p2, p2}, pagesPtr{p4, p2}},
+ {pagesVals{p1v, p4v, p2v, p3v}, pagesVals{p1v, p3v, p3v}, pagesVals{p1v, p3v}},
+ {[]any{p1, p4, p2, p3}, []any{p4, p2, p2}, []any{p4, p2}},
+ {[]any{p1v, p4v, p2v, p3v}, []any{p1v, p3v, p3v}, []any{p1v, p3v}},
+ {pagesPtr{p1, p4, p2, p3}, pagesPtr{}, pagesPtr{}},
+ {pagesVals{}, pagesVals{p1v, p3v, p3v}, pagesVals{}},
+ {[]any{p1, p4, p2, p3}, []any{}, []any{}},
+ {[]any{}, []any{p1v, p3v, p3v}, []any{}},
+
+ // errors
+ {"not array or slice", []string{"a"}, false},
+ {[]string{"a"}, "not array or slice", false},
+
+ // uncomparable types - #3820
+ {[]map[int]int{{1: 1}, {2: 2}}, []map[int]int{{2: 2}, {3: 3}}, false},
+ {[][]int{{1, 1}, {1, 2}}, [][]int{{1, 2}, {1, 2}, {1, 3}}, false},
+ {[]int{1, 1}, [][]int{{1, 2}, {1, 2}, {1, 3}}, false},
+ } {
+
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Intersect(test.l1, test.l2)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ if !reflect.DeepEqual(result, test.expect) {
+ t.Fatalf("[%d] Got\n%v expected\n%v", i, result, test.expect)
+ }
+ }
+}
+
+func TestIsSet(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := newTestNs()
+
+ for i, test := range []struct {
+ a any
+ key any
+ expect bool
+ isErr bool
+ }{
+ {[]any{1, 2, 3, 5}, 2, true, false},
+ {[]any{1, 2, 3, 5}, "2", true, false},
+ {[]any{1, 2, 3, 5}, 2.0, true, false},
+
+ {[]any{1, 2, 3, 5}, 22, false, false},
+
+ {map[string]any{"a": 1, "b": 2}, "b", true, false},
+ {map[string]any{"a": 1, "b": 2}, "bc", false, false},
+
+ {time.Now(), "Day", false, false},
+ {nil, "nil", false, false},
+ {[]any{1, 2, 3, 5}, TstX{}, false, true},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.IsSet(test.a, test.key)
+ if test.isErr {
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestLast(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ limit any
+ seq any
+ expect any
+ }{
+ {int(2), []string{"a", "b", "c"}, []string{"b", "c"}},
+ {int32(3), []string{"a", "b"}, []string{"a", "b"}},
+ {int64(2), []int{100, 200, 300}, []int{200, 300}},
+ {100, []int{100, 200}, []int{100, 200}},
+ {"1", []int{100, 200, 300}, []int{300}},
+ {"0", []int{100, 200, 300}, []int{}},
+ {"0", []string{"a", "b", "c"}, []string{}},
+ // errors
+ {int64(-1), []int{100, 200, 300}, false},
+ {"noint", []int{100, 200, 300}, false},
+ {1, nil, false},
+ {nil, []int{100}, false},
+ {1, t, false},
+ {1, (*string)(nil), false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Last(test.limit, test.seq)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ }
+}
+
+func TestQuerify(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ params []any
+ expect any
+ }{
+ {[]any{"a", "b"}, "a=b"},
+ {[]any{"a", "b", "c", "d", "f", " &"}, `a=b&c=d&f=+%26`},
+ {[]any{[]string{"a", "b"}}, "a=b"},
+ {[]any{[]string{"a", "b", "c", "d", "f", " &"}}, `a=b&c=d&f=+%26`},
+ {[]any{[]any{"x", "y"}}, `x=y`},
+ {[]any{[]any{"x", 5}}, `x=5`},
+ // errors
+ {[]any{5, "b"}, false},
+ {[]any{"a", "b", "c"}, false},
+ {[]any{[]string{"a", "b", "c"}}, false},
+ {[]any{[]string{"a", "b"}, "c"}, false},
+ {[]any{[]any{"c", "d", "e"}}, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.params)
+
+ result, err := ns.Querify(test.params...)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func BenchmarkQuerify(b *testing.B) {
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+ params := []any{"a", "b", "c", "d", "f", " &"}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ _, err := ns.Querify(params...)
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func BenchmarkQuerifySlice(b *testing.B) {
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+ params := []string{"a", "b", "c", "d", "f", " &"}
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ _, err := ns.Querify(params)
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func TestSeq(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ args []any
+ expect any
+ }{
+ {[]any{-2, 5}, []int{-2, -1, 0, 1, 2, 3, 4, 5}},
+ {[]any{1, 2, 4}, []int{1, 3}},
+ {[]any{1}, []int{1}},
+ {[]any{3}, []int{1, 2, 3}},
+ {[]any{3.2}, []int{1, 2, 3}},
+ {[]any{0}, []int{}},
+ {[]any{-1}, []int{-1}},
+ {[]any{-3}, []int{-1, -2, -3}},
+ {[]any{3, -2}, []int{3, 2, 1, 0, -1, -2}},
+ {[]any{6, -2, 2}, []int{6, 4, 2}},
+ // errors
+ {[]any{1, 0, 2}, false},
+ {[]any{1, -1, 2}, false},
+ {[]any{2, 1, 1}, false},
+ {[]any{2, 1, 1, 1}, false},
+ {[]any{2001}, false},
+ {[]any{}, false},
+ {[]any{0, -1000000}, false},
+ {[]any{tstNoStringer{}}, false},
+ {nil, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Seq(test.args...)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ }
+}
+
+func TestShuffle(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ seq any
+ success bool
+ }{
+ {[]string{"a", "b", "c", "d"}, true},
+ {[]int{100, 200, 300}, true},
+ {[]int{100, 200, 300}, true},
+ {[]int{100, 200}, true},
+ {[]string{"a", "b"}, true},
+ {[]int{100, 200, 300}, true},
+ {[]int{100, 200, 300}, true},
+ {[]int{100}, true},
+ // errors
+ {nil, false},
+ {t, false},
+ {(*string)(nil), false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Shuffle(test.seq)
+
+ if !test.success {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+
+ resultv := reflect.ValueOf(result)
+ seqv := reflect.ValueOf(test.seq)
+
+ c.Assert(seqv.Len(), qt.Equals, resultv.Len(), errMsg)
+ }
+}
+
+func TestShuffleRandomising(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ // Note that this test can fail with false negative result if the shuffle
+ // of the sequence happens to be the same as the original sequence. However
+ // the probability of the event is 10^-158 which is negligible.
+ seqLen := 100
+ rand.Seed(time.Now().UTC().UnixNano())
+
+ for _, test := range []struct {
+ seq []int
+ }{
+ {rand.Perm(seqLen)},
+ } {
+ result, err := ns.Shuffle(test.seq)
+ resultv := reflect.ValueOf(result)
+
+ c.Assert(err, qt.IsNil)
+
+ allSame := true
+ for i, v := range test.seq {
+ allSame = allSame && (resultv.Index(i).Interface() == v)
+ }
+
+ c.Assert(allSame, qt.Equals, false)
+ }
+}
+
+// Also see tests in commons/collection.
+func TestSlice(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ args []any
+ expected any
+ }{
+ {[]any{"a", "b"}, []string{"a", "b"}},
+ {[]any{}, []any{}},
+ {[]any{nil}, []any{nil}},
+ {[]any{5, "b"}, []any{5, "b"}},
+ {[]any{tstNoStringer{}}, []tstNoStringer{{}}},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.args)
+
+ result := ns.Slice(test.args...)
+
+ c.Assert(result, qt.DeepEquals, test.expected, errMsg)
+ }
+}
+
+func TestUnion(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ l1 any
+ l2 any
+ expect any
+ isErr bool
+ }{
+ {nil, nil, []any{}, false},
+ {nil, []string{"a", "b"}, []string{"a", "b"}, false},
+ {[]string{"a", "b"}, nil, []string{"a", "b"}, false},
+
+ // []A ∪ []B
+ {[]string{"1", "2"}, []int{3}, []string{}, false},
+ {[]int{1, 2}, []string{"1", "2"}, []int{}, false},
+
+ // []T ∪ []T
+ {[]string{"a", "b", "c", "c"}, []string{"a", "b", "b"}, []string{"a", "b", "c"}, false},
+ {[]string{"a", "b"}, []string{"a", "b", "c"}, []string{"a", "b", "c"}, false},
+ {[]string{"a", "b", "c"}, []string{"d", "e"}, []string{"a", "b", "c", "d", "e"}, false},
+ {[]string{}, []string{}, []string{}, false},
+ {[]int{1, 2, 3}, []int{3, 4, 5}, []int{1, 2, 3, 4, 5}, false},
+ {[]int{1, 2, 3}, []int{1, 2, 3}, []int{1, 2, 3}, false},
+ {[]int{1, 2, 4}, []int{2, 4}, []int{1, 2, 4}, false},
+ {[]int{2, 4}, []int{1, 2, 4}, []int{2, 4, 1}, false},
+ {[]int{1, 2, 4}, []int{3, 6}, []int{1, 2, 4, 3, 6}, false},
+ {[]float64{2.2, 4.4}, []float64{1.1, 2.2, 4.4}, []float64{2.2, 4.4, 1.1}, false},
+ {[]any{"a", "b", "c", "c"}, []any{"a", "b", "b"}, []any{"a", "b", "c"}, false},
+
+ // []T ∪ []interface{}
+ {[]string{"1", "2"}, []any{"9"}, []string{"1", "2", "9"}, false},
+ {[]int{2, 4}, []any{1, 2, 4}, []int{2, 4, 1}, false},
+ {[]int8{2, 4}, []any{int8(1), int8(2), int8(4)}, []int8{2, 4, 1}, false},
+ {[]int8{2, 4}, []any{1, 2, 4}, []int8{2, 4, 1}, false},
+ {[]int16{2, 4}, []any{1, 2, 4}, []int16{2, 4, 1}, false},
+ {[]int32{2, 4}, []any{1, 2, 4}, []int32{2, 4, 1}, false},
+ {[]int64{2, 4}, []any{1, 2, 4}, []int64{2, 4, 1}, false},
+
+ {[]float64{2.2, 4.4}, []any{1.1, 2.2, 4.4}, []float64{2.2, 4.4, 1.1}, false},
+ {[]float32{2.2, 4.4}, []any{1.1, 2.2, 4.4}, []float32{2.2, 4.4, 1.1}, false},
+
+ // []interface{} ∪ []T
+ {[]any{"a", "b", "c", "c"}, []string{"a", "b", "d"}, []any{"a", "b", "c", "d"}, false},
+ {[]any{}, []string{}, []any{}, false},
+ {[]any{1, 2}, []int{2, 3}, []any{1, 2, 3}, false},
+ {[]any{1, 2}, []int8{2, 3}, []any{1, 2, 3}, false}, // 28
+ {[]any{uint(1), uint(2)}, []uint{2, 3}, []any{uint(1), uint(2), uint(3)}, false},
+ {[]any{1.1, 2.2}, []float64{2.2, 3.3}, []any{1.1, 2.2, 3.3}, false},
+
+ // Structs
+ {pagesPtr{p1, p4}, pagesPtr{p4, p2, p2}, pagesPtr{p1, p4, p2}, false},
+ {pagesVals{p1v}, pagesVals{p3v, p3v}, pagesVals{p1v, p3v}, false},
+ {[]any{p1, p4}, []any{p4, p2, p2}, []any{p1, p4, p2}, false},
+ {[]any{p1v}, []any{p3v, p3v}, []any{p1v, p3v}, false},
+ // #3686
+ {[]any{p1v}, []any{}, []any{p1v}, false},
+ {[]any{}, []any{p1v}, []any{p1v}, false},
+ {pagesPtr{p1}, pagesPtr{}, pagesPtr{p1}, false},
+ {pagesVals{p1v}, pagesVals{}, pagesVals{p1v}, false},
+ {pagesPtr{}, pagesPtr{p1}, pagesPtr{p1}, false},
+ {pagesVals{}, pagesVals{p1v}, pagesVals{p1v}, false},
+
+ // errors
+ {"not array or slice", []string{"a"}, false, true},
+ {[]string{"a"}, "not array or slice", false, true},
+
+ // uncomparable types - #3820
+ {[]map[string]int{{"K1": 1}}, []map[string]int{{"K2": 2}, {"K2": 2}}, false, true},
+ {[][]int{{1, 1}, {1, 2}}, [][]int{{2, 1}, {2, 2}}, false, true},
+ } {
+
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Union(test.l1, test.l2)
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ if !reflect.DeepEqual(result, test.expect) {
+ t.Fatalf("[%d] Got\n%v expected\n%v", i, result, test.expect)
+ }
+ }
+}
+
+func TestUniq(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+ for i, test := range []struct {
+ l any
+ expect any
+ isErr bool
+ }{
+ {[]string{"a", "b", "c"}, []string{"a", "b", "c"}, false},
+ {[]string{"a", "b", "c", "c"}, []string{"a", "b", "c"}, false},
+ {[]string{"a", "b", "b", "c"}, []string{"a", "b", "c"}, false},
+ {[]string{"a", "b", "c", "b"}, []string{"a", "b", "c"}, false},
+ {[]int{1, 2, 3}, []int{1, 2, 3}, false},
+ {[]int{1, 2, 3, 3}, []int{1, 2, 3}, false},
+ {[]int{1, 2, 2, 3}, []int{1, 2, 3}, false},
+ {[]int{1, 2, 3, 2}, []int{1, 2, 3}, false},
+ {[4]int{1, 2, 3, 2}, []int{1, 2, 3}, false},
+ {nil, make([]any, 0), false},
+ // Pointers
+ {pagesPtr{p1, p2, p3, p2}, pagesPtr{p1, p2, p3}, false},
+ {pagesPtr{}, pagesPtr{}, false},
+ // Structs
+ {pagesVals{p3v, p2v, p3v, p2v}, pagesVals{p3v, p2v}, false},
+
+ // not Comparable(), use hashstructure
+ {[]map[string]int{
+ {"K1": 1}, {"K2": 2}, {"K1": 1}, {"K2": 1},
+ }, []map[string]int{
+ {"K1": 1}, {"K2": 2}, {"K2": 1},
+ }, false},
+
+ // should fail
+ {1, 1, true},
+ {"foo", "fo", true},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Uniq(test.l)
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ }
+}
+
+func (x *TstX) TstRp() string {
+ return "r" + x.A
+}
+
+func (x TstX) TstRv() string {
+ return "r" + x.B
+}
+
+func (x TstX) TstRv2() string {
+ return "r" + x.B
+}
+
+func (x TstX) unexportedMethod() string {
+ return x.unexported
+}
+
+func (x TstX) MethodWithArg(s string) string {
+ return s
+}
+
+func (x TstX) MethodReturnNothing() {}
+
+func (x TstX) MethodReturnErrorOnly() error {
+ return errors.New("some error occurred")
+}
+
+func (x TstX) MethodReturnTwoValues() (string, string) {
+ return "foo", "bar"
+}
+
+func (x TstX) MethodReturnValueWithError() (string, error) {
+ return "", errors.New("some error occurred")
+}
+
+func (x TstX) String() string {
+ return fmt.Sprintf("A: %s, B: %s", x.A, x.B)
+}
+
+type TstX struct {
+ A, B string
+ unexported string
+}
+
+type TstParams struct {
+ params maps.Params
+}
+
+func (x TstParams) Params() maps.Params {
+ return x.params
+}
+
+type TstXIHolder struct {
+ XI TstXI
+}
+
+// Partially implemented by the TstX struct.
+type TstXI interface {
+ TstRv2() string
+}
+
+func ToTstXIs(slice any) []TstXI {
+ s := reflect.ValueOf(slice)
+ if s.Kind() != reflect.Slice {
+ return nil
+ }
+ tis := make([]TstXI, s.Len())
+
+ for i := 0; i < s.Len(); i++ {
+ tsti, ok := s.Index(i).Interface().(TstXI)
+ if !ok {
+ return nil
+ }
+ tis[i] = tsti
+ }
+
+ return tis
+}
+
+func newDeps(cfg config.Provider) *deps.Deps {
+ l := langs.NewLanguage("en", cfg)
+ l.Set("i18nDir", "i18n")
+ cs, err := helpers.NewContentSpec(l, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
+ if err != nil {
+ panic(err)
+ }
+ return &deps.Deps{
+ Language: l,
+ Cfg: cfg,
+ Fs: hugofs.NewMem(l),
+ ContentSpec: cs,
+ Log: loggers.NewErrorLogger(),
+ }
+}
+
+func newTestNs() *Namespace {
+ return New(newDeps(config.NewWithTestDefaults()))
+}
diff --git a/tpl/collections/complement.go b/tpl/collections/complement.go
new file mode 100644
index 000000000..723f73b0a
--- /dev/null
+++ b/tpl/collections/complement.go
@@ -0,0 +1,55 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+)
+
+// Complement gives the elements in the last element of seqs that are not in
+// any of the others.
+// All elements of seqs must be slices or arrays of comparable types.
+//
+// The reasoning behind this rather clumsy API is so we can do this in the templates:
+// {{ $c := .Pages | complement $last4 }}
+func (ns *Namespace) Complement(seqs ...any) (any, error) {
+ if len(seqs) < 2 {
+ return nil, errors.New("complement needs at least two arguments")
+ }
+
+ universe := seqs[len(seqs)-1]
+ as := seqs[:len(seqs)-1]
+
+ aset, err := collectIdentities(as...)
+ if err != nil {
+ return nil, err
+ }
+
+ v := reflect.ValueOf(universe)
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice:
+ sl := reflect.MakeSlice(v.Type(), 0, 0)
+ for i := 0; i < v.Len(); i++ {
+ ev, _ := indirectInterface(v.Index(i))
+ if _, found := aset[normalize(ev)]; !found {
+ sl = reflect.Append(sl, ev)
+ }
+ }
+ return sl.Interface(), nil
+ default:
+ return nil, fmt.Errorf("arguments to complement must be slices or arrays")
+ }
+}
diff --git a/tpl/collections/complement_test.go b/tpl/collections/complement_test.go
new file mode 100644
index 000000000..6c13ab5c4
--- /dev/null
+++ b/tpl/collections/complement_test.go
@@ -0,0 +1,99 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type StructWithSlice struct {
+ A string
+ B []string
+}
+
+type StructWithSlicePointers []*StructWithSlice
+
+func TestComplement(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ s1 := []TstX{{A: "a"}, {A: "b"}, {A: "d"}, {A: "e"}}
+ s2 := []TstX{{A: "b"}, {A: "e"}}
+
+ xa, xb, xd, xe := &StructWithSlice{A: "a"}, &StructWithSlice{A: "b"}, &StructWithSlice{A: "d"}, &StructWithSlice{A: "e"}
+
+ sp1 := []*StructWithSlice{xa, xb, xd, xe}
+ sp2 := []*StructWithSlice{xb, xe}
+
+ sp1_2 := StructWithSlicePointers{xa, xb, xd, xe}
+ sp2_2 := StructWithSlicePointers{xb, xe}
+
+ for i, test := range []struct {
+ s any
+ t []any
+ expected any
+ }{
+ {[]string{"a", "b", "c"}, []any{[]string{"c", "d"}}, []string{"a", "b"}},
+ {[]string{"a", "b", "c"}, []any{[]string{"c", "d"}, []string{"a", "b"}}, []string{}},
+ {[]any{"a", "b", nil}, []any{[]string{"a", "d"}}, []any{"b", nil}},
+ {[]int{1, 2, 3, 4, 5}, []any{[]int{1, 3}, []string{"a", "b"}, []int{1, 2}}, []int{4, 5}},
+ {[]int{1, 2, 3, 4, 5}, []any{[]int64{1, 3}}, []int{2, 4, 5}},
+ {s1, []any{s2}, []TstX{{A: "a"}, {A: "d"}}},
+ {sp1, []any{sp2}, []*StructWithSlice{xa, xd}},
+ {sp1_2, []any{sp2_2}, StructWithSlicePointers{xa, xd}},
+
+ // Errors
+ {[]string{"a", "b", "c"}, []any{"error"}, false},
+ {"error", []any{[]string{"c", "d"}, []string{"a", "b"}}, false},
+ {[]string{"a", "b", "c"}, []any{[][]string{{"c", "d"}}}, false},
+ {
+ []any{[][]string{{"c", "d"}}},
+ []any{[]string{"c", "d"}, []string{"a", "b"}},
+ []any{[][]string{{"c", "d"}}},
+ },
+ } {
+
+ errMsg := qt.Commentf("[%d]", i)
+
+ args := append(test.t, test.s)
+
+ result, err := ns.Complement(args...)
+
+ if b, ok := test.expected.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+
+ if !reflect.DeepEqual(test.expected, result) {
+ t.Fatalf("%s got\n%T: %v\nexpected\n%T: %v", errMsg, result, result, test.expected, test.expected)
+ }
+ }
+
+ _, err := ns.Complement()
+ c.Assert(err, qt.Not(qt.IsNil))
+ _, err = ns.Complement([]string{"a", "b"})
+ c.Assert(err, qt.Not(qt.IsNil))
+}
diff --git a/tpl/collections/index.go b/tpl/collections/index.go
new file mode 100644
index 000000000..43fccbc6b
--- /dev/null
+++ b/tpl/collections/index.go
@@ -0,0 +1,133 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/common/maps"
+)
+
+// Index returns the result of indexing its first argument by the following
+// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each
+// indexed item must be a map, slice, or array.
+//
+// Copied from Go stdlib src/text/template/funcs.go.
+//
+// We deviate from the stdlib due to https://github.com/golang/go/issues/14751.
+//
+// TODO(moorereason): merge upstream changes.
+func (ns *Namespace) Index(item any, args ...any) (any, error) {
+ v := reflect.ValueOf(item)
+ if !v.IsValid() {
+ return nil, errors.New("index of untyped nil")
+ }
+
+ lowerm, ok := item.(maps.Params)
+ if ok {
+ return lowerm.Get(cast.ToStringSlice(args)...), nil
+ }
+
+ var indices []any
+
+ if len(args) == 1 {
+ v := reflect.ValueOf(args[0])
+ if v.Kind() == reflect.Slice {
+ for i := 0; i < v.Len(); i++ {
+ indices = append(indices, v.Index(i).Interface())
+ }
+ }
+ }
+
+ if indices == nil {
+ indices = args
+ }
+
+ for _, i := range indices {
+ index := reflect.ValueOf(i)
+ var isNil bool
+ if v, isNil = indirect(v); isNil {
+ return nil, errors.New("index of nil pointer")
+ }
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ var x int64
+ switch index.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ x = index.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ x = int64(index.Uint())
+ case reflect.Invalid:
+ return nil, errors.New("cannot index slice/array with nil")
+ default:
+ return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type())
+ }
+ if x < 0 || x >= int64(v.Len()) {
+ // We deviate from stdlib here. Don't return an error if the
+ // index is out of range.
+ return nil, nil
+ }
+ v = v.Index(int(x))
+ case reflect.Map:
+ index, err := prepareArg(index, v.Type().Key())
+ if err != nil {
+ return nil, err
+ }
+
+ if x := v.MapIndex(index); x.IsValid() {
+ v = x
+ } else {
+ v = reflect.Zero(v.Type().Elem())
+ }
+ case reflect.Invalid:
+ // the loop holds invariant: v.IsValid()
+ panic("unreachable")
+ default:
+ return nil, fmt.Errorf("can't index item of type %s", v.Type())
+ }
+ }
+ return v.Interface(), nil
+}
+
+// prepareArg checks if value can be used as an argument of type argType, and
+// converts an invalid value to appropriate zero if possible.
+//
+// Copied from Go stdlib src/text/template/funcs.go.
+func prepareArg(value reflect.Value, argType reflect.Type) (reflect.Value, error) {
+ if !value.IsValid() {
+ if !canBeNil(argType) {
+ return reflect.Value{}, fmt.Errorf("value is nil; should be of type %s", argType)
+ }
+ value = reflect.Zero(argType)
+ }
+ if !value.Type().AssignableTo(argType) {
+ return reflect.Value{}, fmt.Errorf("value has type %s; should be %s", value.Type(), argType)
+ }
+ return value, nil
+}
+
+// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero.
+//
+// Copied from Go stdlib src/text/template/exec.go.
+func canBeNil(typ reflect.Type) bool {
+ switch typ.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+ return true
+ }
+ return false
+}
diff --git a/tpl/collections/index_test.go b/tpl/collections/index_test.go
new file mode 100644
index 000000000..662fe6f33
--- /dev/null
+++ b/tpl/collections/index_test.go
@@ -0,0 +1,70 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+)
+
+func TestIndex(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ for i, test := range []struct {
+ item any
+ indices []any
+ expect any
+ isErr bool
+ }{
+ {[]int{0, 1}, []any{0}, 0, false},
+ {[]int{0, 1}, []any{9}, nil, false}, // index out of range
+ {[]uint{0, 1}, nil, []uint{0, 1}, false},
+ {[][]int{{1, 2}, {3, 4}}, []any{0, 0}, 1, false},
+ {map[int]int{1: 10, 2: 20}, []any{1}, 10, false},
+ {map[int]int{1: 10, 2: 20}, []any{0}, 0, false},
+ {map[string]map[string]string{"a": {"b": "c"}}, []any{"a", "b"}, "c", false},
+ {[]map[string]map[string]string{{"a": {"b": "c"}}}, []any{0, "a", "b"}, "c", false},
+ {map[string]map[string]any{"a": {"b": []string{"c", "d"}}}, []any{"a", "b", 1}, "d", false},
+ {map[string]map[string]string{"a": {"b": "c"}}, []any{[]string{"a", "b"}}, "c", false},
+ {maps.Params{"a": "av"}, []any{"A"}, "av", false},
+ {maps.Params{"a": map[string]any{"b": "bv"}}, []any{"A", "B"}, "bv", false},
+ // errors
+ {nil, nil, nil, true},
+ {[]int{0, 1}, []any{"1"}, nil, true},
+ {[]int{0, 1}, []any{nil}, nil, true},
+ {tstNoStringer{}, []any{0}, nil, true},
+ } {
+ c.Run(fmt.Sprint(i), func(c *qt.C) {
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Index(test.item, test.indices...)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ return
+ }
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ })
+ }
+}
diff --git a/tpl/collections/init.go b/tpl/collections/init.go
new file mode 100644
index 000000000..3d22daab0
--- /dev/null
+++ b/tpl/collections/init.go
@@ -0,0 +1,214 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "collections"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.After,
+ []string{"after"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Apply,
+ []string{"apply"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Complement,
+ []string{"complement"},
+ [][2]string{
+ {`{{ slice "a" "b" "c" "d" "e" "f" | complement (slice "b" "c") (slice "d" "e") }}`, `[a f]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.SymDiff,
+ []string{"symdiff"},
+ [][2]string{
+ {`{{ slice 1 2 3 | symdiff (slice 3 4) }}`, `[1 2 4]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Delimit,
+ []string{"delimit"},
+ [][2]string{
+ {`{{ delimit (slice "A" "B" "C") ", " " and " }}`, `A, B and C`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Dictionary,
+ []string{"dict"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.EchoParam,
+ []string{"echoParam"},
+ [][2]string{
+ {`{{ echoParam .Params "langCode" }}`, `en`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.First,
+ []string{"first"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.KeyVals,
+ []string{"keyVals"},
+ [][2]string{
+ {`{{ keyVals "key" "a" "b" }}`, `key: [a b]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.In,
+ []string{"in"},
+ [][2]string{
+ {`{{ if in "this string contains a substring" "substring" }}Substring found!{{ end }}`, `Substring found!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Index,
+ []string{"index"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Intersect,
+ []string{"intersect"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.IsSet,
+ []string{"isSet", "isset"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Last,
+ []string{"last"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Querify,
+ []string{"querify"},
+ [][2]string{
+ {
+ `{{ (querify "foo" 1 "bar" 2 "baz" "with spaces" "qux" "this&that=those") | safeHTML }}`,
+ `bar=2&baz=with+spaces&foo=1&qux=this%26that%3Dthose`,
+ },
+ {
+ `<a href="https://www.google.com?{{ (querify "q" "test" "page" 3) | safeURL }}">Search</a>`,
+ `<a href="https://www.google.com?page=3&amp;q=test">Search</a>`,
+ },
+ {
+ `{{ slice "foo" 1 "bar" 2 | querify | safeHTML }}`,
+ `bar=2&foo=1`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Shuffle,
+ []string{"shuffle"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Slice,
+ []string{"slice"},
+ [][2]string{
+ {`{{ slice "B" "C" "A" | sort }}`, `[A B C]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Sort,
+ []string{"sort"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Union,
+ []string{"union"},
+ [][2]string{
+ {`{{ union (slice 1 2 3) (slice 3 4 5) }}`, `[1 2 3 4 5]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Where,
+ []string{"where"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Append,
+ []string{"append"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Group,
+ []string{"group"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Seq,
+ []string{"seq"},
+ [][2]string{
+ {`{{ seq 3 }}`, `[1 2 3]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.NewScratch,
+ []string{"newScratch"},
+ [][2]string{
+ {`{{ $scratch := newScratch }}{{ $scratch.Add "b" 2 }}{{ $scratch.Add "b" 2 }}{{ $scratch.Get "b" }}`, `4`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Uniq,
+ []string{"uniq"},
+ [][2]string{
+ {`{{ slice 1 2 3 2 | uniq }}`, `[1 2 3]`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Merge,
+ []string{"merge"},
+ [][2]string{
+ {
+ `{{ dict "title" "Hugo Rocks!" | collections.Merge (dict "title" "Default Title" "description" "Yes, Hugo Rocks!") | sort }}`,
+ `[Yes, Hugo Rocks! Hugo Rocks!]`,
+ },
+ {
+ `{{ merge (dict "title" "Default Title" "description" "Yes, Hugo Rocks!") (dict "title" "Hugo Rocks!") | sort }}`,
+ `[Yes, Hugo Rocks! Hugo Rocks!]`,
+ },
+ {
+ `{{ merge (dict "title" "Default Title" "description" "Yes, Hugo Rocks!") (dict "title" "Hugo Rocks!") (dict "extra" "For reals!") | sort }}`,
+ `[Yes, Hugo Rocks! For reals! Hugo Rocks!]`,
+ },
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/collections/integration_test.go b/tpl/collections/integration_test.go
new file mode 100644
index 000000000..225eab9fa
--- /dev/null
+++ b/tpl/collections/integration_test.go
@@ -0,0 +1,75 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+// Issue 9585
+func TestApplyWithContext(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+{{ apply (seq 3) "partial" "foo.html"}}
+-- layouts/partials/foo.html --
+{{ return "foo"}}
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+ [foo foo foo]
+`)
+}
+
+// Issue 9865
+func TestSortStable(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- layouts/index.html --
+{{ $values := slice (dict "a" 1 "b" 2) (dict "a" 3 "b" 1) (dict "a" 2 "b" 0) (dict "a" 1 "b" 0) (dict "a" 3 "b" 1) (dict "a" 2 "b" 2) (dict "a" 2 "b" 1) (dict "a" 0 "b" 3) (dict "a" 3 "b" 3) (dict "a" 0 "b" 0) (dict "a" 0 "b" 0) (dict "a" 2 "b" 0) (dict "a" 1 "b" 2) (dict "a" 1 "b" 1) (dict "a" 3 "b" 0) (dict "a" 2 "b" 0) (dict "a" 3 "b" 0) (dict "a" 3 "b" 0) (dict "a" 3 "b" 0) (dict "a" 3 "b" 1) }}
+Asc: {{ sort (sort $values "b" "asc") "a" "asc" }}
+Desc: {{ sort (sort $values "b" "desc") "a" "desc" }}
+
+ `
+
+ for i := 0; i < 4; i++ {
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+Asc: [map[a:0 b:0] map[a:0 b:0] map[a:0 b:3] map[a:1 b:0] map[a:1 b:1] map[a:1 b:2] map[a:1 b:2] map[a:2 b:0] map[a:2 b:0] map[a:2 b:0] map[a:2 b:1] map[a:2 b:2] map[a:3 b:0] map[a:3 b:0] map[a:3 b:0] map[a:3 b:0] map[a:3 b:1] map[a:3 b:1] map[a:3 b:1] map[a:3 b:3]]
+Desc: [map[a:3 b:3] map[a:3 b:1] map[a:3 b:1] map[a:3 b:1] map[a:3 b:0] map[a:3 b:0] map[a:3 b:0] map[a:3 b:0] map[a:2 b:2] map[a:2 b:1] map[a:2 b:0] map[a:2 b:0] map[a:2 b:0] map[a:1 b:2] map[a:1 b:2] map[a:1 b:1] map[a:1 b:0] map[a:0 b:3] map[a:0 b:0] map[a:0 b:0]]
+`)
+
+ }
+}
diff --git a/tpl/collections/merge.go b/tpl/collections/merge.go
new file mode 100644
index 000000000..4d408302b
--- /dev/null
+++ b/tpl/collections/merge.go
@@ -0,0 +1,127 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/maps"
+
+ "errors"
+)
+
+// Merge creates a copy of the final parameter and merges the preceding
+// parameters into it in reverse order.
+// Currently only maps are supported. Key handling is case insensitive.
+func (ns *Namespace) Merge(params ...any) (any, error) {
+ if len(params) < 2 {
+ return nil, errors.New("merge requires at least two parameters")
+ }
+
+ var err error
+ result := params[len(params)-1]
+
+ for i := len(params) - 2; i >= 0; i-- {
+ result, err = ns.merge(params[i], result)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return result, nil
+}
+
+// merge creates a copy of dst and merges src into it.
+func (ns *Namespace) merge(src, dst any) (any, error) {
+ vdst, vsrc := reflect.ValueOf(dst), reflect.ValueOf(src)
+
+ if vdst.Kind() != reflect.Map {
+ return nil, fmt.Errorf("destination must be a map, got %T", dst)
+ }
+
+ if !hreflect.IsTruthfulValue(vsrc) {
+ return dst, nil
+ }
+
+ if vsrc.Kind() != reflect.Map {
+ return nil, fmt.Errorf("source must be a map, got %T", src)
+ }
+
+ if vsrc.Type().Key() != vdst.Type().Key() {
+ return nil, fmt.Errorf("incompatible map types, got %T to %T", src, dst)
+ }
+
+ return mergeMap(vdst, vsrc).Interface(), nil
+}
+
+func caseInsensitiveLookup(m, k reflect.Value) (reflect.Value, bool) {
+ if m.Type().Key().Kind() != reflect.String || k.Kind() != reflect.String {
+ // Fall back to direct lookup.
+ v := m.MapIndex(k)
+ return v, hreflect.IsTruthfulValue(v)
+ }
+
+ for _, key := range m.MapKeys() {
+ if strings.EqualFold(k.String(), key.String()) {
+ return m.MapIndex(key), true
+ }
+ }
+
+ return reflect.Value{}, false
+}
+
+func mergeMap(dst, src reflect.Value) reflect.Value {
+ out := reflect.MakeMap(dst.Type())
+
+ // If the destination is Params, we must lower case all keys.
+ _, lowerCase := dst.Interface().(maps.Params)
+
+ // Copy the destination map.
+ for _, key := range dst.MapKeys() {
+ v := dst.MapIndex(key)
+ out.SetMapIndex(key, v)
+ }
+
+ // Add all keys in src not already in destination.
+ // Maps of the same type will be merged.
+ for _, key := range src.MapKeys() {
+ sv := src.MapIndex(key)
+ dv, found := caseInsensitiveLookup(dst, key)
+
+ if found {
+ // If both are the same map key type, merge.
+ dve := dv.Elem()
+ if dve.Kind() == reflect.Map {
+ sve := sv.Elem()
+ if sve.Kind() != reflect.Map {
+ continue
+ }
+
+ if dve.Type().Key() == sve.Type().Key() {
+ out.SetMapIndex(key, mergeMap(dve, sve))
+ }
+ }
+ } else {
+ if lowerCase && key.Kind() == reflect.String {
+ key = reflect.ValueOf(strings.ToLower(key.String()))
+ }
+ out.SetMapIndex(key, sv)
+ }
+ }
+
+ return out
+}
diff --git a/tpl/collections/merge_test.go b/tpl/collections/merge_test.go
new file mode 100644
index 000000000..e7a383126
--- /dev/null
+++ b/tpl/collections/merge_test.go
@@ -0,0 +1,243 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "bytes"
+ "reflect"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMerge(t *testing.T) {
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ simpleMap := map[string]any{"a": 1, "b": 2}
+
+ for i, test := range []struct {
+ name string
+ params []any
+ expect any
+ isErr bool
+ }{
+ {
+ "basic",
+ []any{
+ map[string]any{"a": 42, "c": 3},
+ map[string]any{"a": 1, "b": 2},
+ },
+ map[string]any{"a": 1, "b": 2, "c": 3},
+ false,
+ },
+ {
+ "multi",
+ []any{
+ map[string]any{"a": 42, "c": 3, "e": 11},
+ map[string]any{"a": 1, "b": 2},
+ map[string]any{"a": 9, "c": 4, "d": 7},
+ },
+ map[string]any{"a": 9, "b": 2, "c": 4, "d": 7, "e": 11},
+ false,
+ },
+ {
+ "basic case insensitive",
+ []any{
+ map[string]any{"A": 42, "c": 3},
+ map[string]any{"a": 1, "b": 2},
+ },
+ map[string]any{"a": 1, "b": 2, "c": 3},
+ false,
+ },
+ {
+ "nested",
+ []any{
+ map[string]any{"a": 42, "c": 3, "b": map[string]any{"d": 55, "e": 66, "f": 3}},
+ map[string]any{"a": 1, "b": map[string]any{"d": 1, "e": 2}},
+ },
+ map[string]any{"a": 1, "b": map[string]any{"d": 1, "e": 2, "f": 3}, "c": 3},
+ false,
+ },
+ {
+ // https://github.com/gohugoio/hugo/issues/6633
+ "params dst",
+ []any{
+ map[string]any{"a": 42, "c": 3},
+ maps.Params{"a": 1, "b": 2},
+ },
+ maps.Params{"a": int(1), "b": int(2), "c": int(3)},
+ false,
+ },
+ {
+ "params dst, upper case src",
+ []any{
+ map[string]any{"a": 42, "C": 3},
+ maps.Params{"a": 1, "b": 2},
+ },
+ maps.Params{"a": int(1), "b": int(2), "c": int(3)},
+ false,
+ },
+ {
+ "params src",
+ []any{
+ maps.Params{"a": 42, "c": 3},
+ map[string]any{"a": 1, "c": 2},
+ },
+ map[string]any{"a": int(1), "c": int(2)},
+ false,
+ },
+ {
+ "params src, upper case dst",
+ []any{
+ maps.Params{"a": 42, "c": 3},
+ map[string]any{"a": 1, "C": 2},
+ },
+ map[string]any{"a": int(1), "C": int(2)},
+ false,
+ },
+ {
+ "nested, params dst",
+ []any{
+ map[string]any{"a": 42, "c": 3, "b": map[string]any{"d": 55, "e": 66, "f": 3}},
+ maps.Params{"a": 1, "b": maps.Params{"d": 1, "e": 2}},
+ },
+ maps.Params{"a": 1, "b": maps.Params{"d": 1, "e": 2, "f": 3}, "c": 3},
+ false,
+ },
+ {
+ // https://github.com/gohugoio/hugo/issues/7899
+ "matching keys with non-map src value",
+ []any{
+ map[string]any{"k": "v"},
+ map[string]any{"k": map[string]any{"k2": "v2"}},
+ },
+ map[string]any{"k": map[string]any{"k2": "v2"}},
+ false,
+ },
+ {"src nil", []any{nil, simpleMap}, simpleMap, false},
+ // Error cases.
+ {"dst not a map", []any{nil, "not a map"}, nil, true},
+ {"src not a map", []any{"not a map", simpleMap}, nil, true},
+ {"different map types", []any{map[int]any{32: "a"}, simpleMap}, nil, true},
+ {"all nil", []any{nil, nil}, nil, true},
+ } {
+
+ test := test
+
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ c := qt.New(t)
+
+ result, err := ns.Merge(test.params...)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ return
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, test.expect, errMsg)
+ })
+ }
+}
+
+func TestMergeDataFormats(t *testing.T) {
+ c := qt.New(t)
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ toml1 := `
+V1 = "v1_1"
+
+[V2s]
+V21 = "v21_1"
+
+`
+
+ toml2 := `
+V1 = "v1_2"
+V2 = "v2_2"
+
+[V2s]
+V21 = "v21_2"
+V22 = "v22_2"
+
+`
+
+ meta1, err := metadecoders.Default.UnmarshalToMap([]byte(toml1), metadecoders.TOML)
+ c.Assert(err, qt.IsNil)
+ meta2, err := metadecoders.Default.UnmarshalToMap([]byte(toml2), metadecoders.TOML)
+ c.Assert(err, qt.IsNil)
+
+ for _, format := range []metadecoders.Format{metadecoders.JSON, metadecoders.YAML, metadecoders.TOML} {
+
+ var dataStr1, dataStr2 bytes.Buffer
+ err = parser.InterfaceToConfig(meta1, format, &dataStr1)
+ c.Assert(err, qt.IsNil)
+ err = parser.InterfaceToConfig(meta2, format, &dataStr2)
+ c.Assert(err, qt.IsNil)
+
+ dst, err := metadecoders.Default.UnmarshalToMap(dataStr1.Bytes(), format)
+ c.Assert(err, qt.IsNil)
+ src, err := metadecoders.Default.UnmarshalToMap(dataStr2.Bytes(), format)
+ c.Assert(err, qt.IsNil)
+
+ merged, err := ns.Merge(src, dst)
+ c.Assert(err, qt.IsNil)
+
+ c.Assert(
+ merged,
+ qt.DeepEquals,
+ map[string]any{
+ "V1": "v1_1", "V2": "v2_2",
+ "V2s": map[string]any{"V21": "v21_1", "V22": "v22_2"},
+ })
+ }
+}
+
+func TestCaseInsensitiveMapLookup(t *testing.T) {
+ c := qt.New(t)
+
+ m1 := reflect.ValueOf(map[string]any{
+ "a": 1,
+ "B": 2,
+ })
+
+ m2 := reflect.ValueOf(map[int]any{
+ 1: 1,
+ 2: 2,
+ })
+
+ var found bool
+
+ a, found := caseInsensitiveLookup(m1, reflect.ValueOf("A"))
+ c.Assert(found, qt.Equals, true)
+ c.Assert(a.Interface(), qt.Equals, 1)
+
+ b, found := caseInsensitiveLookup(m1, reflect.ValueOf("b"))
+ c.Assert(found, qt.Equals, true)
+ c.Assert(b.Interface(), qt.Equals, 2)
+
+ two, found := caseInsensitiveLookup(m2, reflect.ValueOf(2))
+ c.Assert(found, qt.Equals, true)
+ c.Assert(two.Interface(), qt.Equals, 2)
+}
diff --git a/tpl/collections/reflect_helpers.go b/tpl/collections/reflect_helpers.go
new file mode 100644
index 000000000..a295441ec
--- /dev/null
+++ b/tpl/collections/reflect_helpers.go
@@ -0,0 +1,215 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "reflect"
+
+ "errors"
+
+ "github.com/mitchellh/hashstructure"
+)
+
+var (
+ zero reflect.Value
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+)
+
+func numberToFloat(v reflect.Value) (float64, error) {
+ switch kind := v.Kind(); {
+ case isFloat(kind):
+ return v.Float(), nil
+ case isInt(kind):
+ return float64(v.Int()), nil
+ case isUint(kind):
+ return float64(v.Uint()), nil
+ case kind == reflect.Interface:
+ return numberToFloat(v.Elem())
+ default:
+ return 0, fmt.Errorf("invalid kind %s in numberToFloat", kind)
+ }
+}
+
+// normalizes different numeric types if isNumber
+// or get the hash values if not Comparable (such as map or struct)
+// to make them comparable
+func normalize(v reflect.Value) any {
+ k := v.Kind()
+
+ switch {
+ case !v.Type().Comparable():
+ h, err := hashstructure.Hash(v.Interface(), nil)
+ if err != nil {
+ panic(err)
+ }
+ return h
+ case isNumber(k):
+ f, err := numberToFloat(v)
+ if err == nil {
+ return f
+ }
+ }
+ return v.Interface()
+}
+
+// collects identities from the slices in seqs into a set. Numeric values are normalized,
+// pointers unwrapped.
+func collectIdentities(seqs ...any) (map[any]bool, error) {
+ seen := make(map[any]bool)
+ for _, seq := range seqs {
+ v := reflect.ValueOf(seq)
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice:
+ for i := 0; i < v.Len(); i++ {
+ ev, _ := indirectInterface(v.Index(i))
+
+ if !ev.Type().Comparable() {
+ return nil, errors.New("elements must be comparable")
+ }
+
+ seen[normalize(ev)] = true
+ }
+ default:
+ return nil, fmt.Errorf("arguments must be slices or arrays")
+ }
+ }
+
+ return seen, nil
+}
+
+// We have some different numeric and string types that we try to behave like
+// they were the same.
+func convertValue(v reflect.Value, to reflect.Type) (reflect.Value, error) {
+ if v.Type().AssignableTo(to) {
+ return v, nil
+ }
+ switch kind := to.Kind(); {
+ case kind == reflect.String:
+ s, err := toString(v)
+ return reflect.ValueOf(s), err
+ case isNumber(kind):
+ return convertNumber(v, kind)
+ default:
+ return reflect.Value{}, fmt.Errorf("%s is not assignable to %s", v.Type(), to)
+ }
+}
+
+// There are potential overflows in this function, but the downconversion of
+// int64 etc. into int8 etc. is coming from the synthetic unit tests for Union etc.
+// TODO(bep) We should consider normalizing the slices to int64 etc.
+func convertNumber(v reflect.Value, to reflect.Kind) (reflect.Value, error) {
+ var n reflect.Value
+ if isFloat(to) {
+ f, err := toFloat(v)
+ if err != nil {
+ return n, err
+ }
+ switch to {
+ case reflect.Float32:
+ n = reflect.ValueOf(float32(f))
+ default:
+ n = reflect.ValueOf(float64(f))
+ }
+ } else if isInt(to) {
+ i, err := toInt(v)
+ if err != nil {
+ return n, err
+ }
+ switch to {
+ case reflect.Int:
+ n = reflect.ValueOf(int(i))
+ case reflect.Int8:
+ n = reflect.ValueOf(int8(i))
+ case reflect.Int16:
+ n = reflect.ValueOf(int16(i))
+ case reflect.Int32:
+ n = reflect.ValueOf(int32(i))
+ case reflect.Int64:
+ n = reflect.ValueOf(int64(i))
+ }
+ } else if isUint(to) {
+ i, err := toUint(v)
+ if err != nil {
+ return n, err
+ }
+ switch to {
+ case reflect.Uint:
+ n = reflect.ValueOf(uint(i))
+ case reflect.Uint8:
+ n = reflect.ValueOf(uint8(i))
+ case reflect.Uint16:
+ n = reflect.ValueOf(uint16(i))
+ case reflect.Uint32:
+ n = reflect.ValueOf(uint32(i))
+ case reflect.Uint64:
+ n = reflect.ValueOf(uint64(i))
+ }
+
+ }
+
+ if !n.IsValid() {
+ return n, errors.New("invalid values")
+ }
+
+ return n, nil
+}
+
+func newSliceElement(items any) any {
+ tp := reflect.TypeOf(items)
+ if tp == nil {
+ return nil
+ }
+ switch tp.Kind() {
+ case reflect.Array, reflect.Slice:
+ tp = tp.Elem()
+ if tp.Kind() == reflect.Ptr {
+ tp = tp.Elem()
+ }
+
+ return reflect.New(tp).Interface()
+ }
+ return nil
+}
+
+func isNumber(kind reflect.Kind) bool {
+ return isInt(kind) || isUint(kind) || isFloat(kind)
+}
+
+func isInt(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return true
+ default:
+ return false
+ }
+}
+
+func isUint(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return true
+ default:
+ return false
+ }
+}
+
+func isFloat(kind reflect.Kind) bool {
+ switch kind {
+ case reflect.Float32, reflect.Float64:
+ return true
+ default:
+ return false
+ }
+}
diff --git a/tpl/collections/sort.go b/tpl/collections/sort.go
new file mode 100644
index 000000000..ce76a4522
--- /dev/null
+++ b/tpl/collections/sort.go
@@ -0,0 +1,189 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "reflect"
+ "sort"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/tpl/compare"
+ "github.com/spf13/cast"
+)
+
+// Sort returns a sorted sequence.
+func (ns *Namespace) Sort(seq any, args ...any) (any, error) {
+ if seq == nil {
+ return nil, errors.New("sequence must be provided")
+ }
+
+ seqv, isNil := indirect(reflect.ValueOf(seq))
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value")
+ }
+
+ var sliceType reflect.Type
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice:
+ sliceType = seqv.Type()
+ case reflect.Map:
+ sliceType = reflect.SliceOf(seqv.Type().Elem())
+ default:
+ return nil, errors.New("can't sort " + reflect.ValueOf(seq).Type().String())
+ }
+
+ collator := langs.GetCollator(ns.deps.Language)
+
+ // Create a list of pairs that will be used to do the sort
+ p := pairList{Collator: collator, sortComp: ns.sortComp, SortAsc: true, SliceType: sliceType}
+ p.Pairs = make([]pair, seqv.Len())
+
+ var sortByField string
+ for i, l := range args {
+ dStr, err := cast.ToStringE(l)
+ switch {
+ case i == 0 && err != nil:
+ sortByField = ""
+ case i == 0 && err == nil:
+ sortByField = dStr
+ case i == 1 && err == nil && dStr == "desc":
+ p.SortAsc = false
+ case i == 1:
+ p.SortAsc = true
+ }
+ }
+ path := strings.Split(strings.Trim(sortByField, "."), ".")
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice:
+ for i := 0; i < seqv.Len(); i++ {
+ p.Pairs[i].Value = seqv.Index(i)
+ if sortByField == "" || sortByField == "value" {
+ p.Pairs[i].Key = p.Pairs[i].Value
+ } else {
+ v := p.Pairs[i].Value
+ var err error
+ for i, elemName := range path {
+ v, err = evaluateSubElem(v, elemName)
+ if err != nil {
+ return nil, err
+ }
+ if !v.IsValid() {
+ continue
+ }
+ // Special handling of lower cased maps.
+ if params, ok := v.Interface().(maps.Params); ok {
+ v = reflect.ValueOf(params.Get(path[i+1:]...))
+ break
+ }
+ }
+ p.Pairs[i].Key = v
+ }
+ }
+
+ case reflect.Map:
+ keys := seqv.MapKeys()
+ for i := 0; i < seqv.Len(); i++ {
+ p.Pairs[i].Value = seqv.MapIndex(keys[i])
+
+ if sortByField == "" {
+ p.Pairs[i].Key = keys[i]
+ } else if sortByField == "value" {
+ p.Pairs[i].Key = p.Pairs[i].Value
+ } else {
+ v := p.Pairs[i].Value
+ var err error
+ for i, elemName := range path {
+ v, err = evaluateSubElem(v, elemName)
+ if err != nil {
+ return nil, err
+ }
+ if !v.IsValid() {
+ continue
+ }
+ // Special handling of lower cased maps.
+ if params, ok := v.Interface().(maps.Params); ok {
+ v = reflect.ValueOf(params.Get(path[i+1:]...))
+ break
+ }
+ }
+ p.Pairs[i].Key = v
+ }
+ }
+ }
+
+ collator.Lock()
+ defer collator.Unlock()
+
+ return p.sort(), nil
+}
+
+// Credit for pair sorting method goes to Andrew Gerrand
+// https://groups.google.com/forum/#!topic/golang-nuts/FT7cjmcL7gw
+// A data structure to hold a key/value pair.
+type pair struct {
+ Key reflect.Value
+ Value reflect.Value
+}
+
+// A slice of pairs that implements sort.Interface to sort by Value.
+type pairList struct {
+ Collator *langs.Collator
+ sortComp *compare.Namespace
+ Pairs []pair
+ SortAsc bool
+ SliceType reflect.Type
+}
+
+func (p pairList) Swap(i, j int) { p.Pairs[i], p.Pairs[j] = p.Pairs[j], p.Pairs[i] }
+func (p pairList) Len() int { return len(p.Pairs) }
+func (p pairList) Less(i, j int) bool {
+ iv := p.Pairs[i].Key
+ jv := p.Pairs[j].Key
+
+ if iv.IsValid() {
+ if jv.IsValid() {
+ // can only call Interface() on valid reflect Values
+ return p.sortComp.LtCollate(p.Collator, iv.Interface(), jv.Interface())
+ }
+
+ // if j is invalid, test i against i's zero value
+ return p.sortComp.LtCollate(p.Collator, iv.Interface(), reflect.Zero(iv.Type()))
+ }
+
+ if jv.IsValid() {
+ // if i is invalid, test j against j's zero value
+ return p.sortComp.LtCollate(p.Collator, reflect.Zero(jv.Type()), jv.Interface())
+ }
+
+ return false
+}
+
+// sorts a pairList and returns a slice of sorted values
+func (p pairList) sort() any {
+ if p.SortAsc {
+ sort.Stable(p)
+ } else {
+ sort.Stable(sort.Reverse(p))
+ }
+ sorted := reflect.MakeSlice(p.SliceType, len(p.Pairs), len(p.Pairs))
+ for i, v := range p.Pairs {
+ sorted.Index(i).Set(v.Value)
+ }
+
+ return sorted.Interface()
+}
diff --git a/tpl/collections/sort_test.go b/tpl/collections/sort_test.go
new file mode 100644
index 000000000..a4adccf51
--- /dev/null
+++ b/tpl/collections/sort_test.go
@@ -0,0 +1,268 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+type stringsSlice []string
+
+func TestSort(t *testing.T) {
+ t.Parallel()
+
+ ns := New(&deps.Deps{
+ Language: langs.NewDefaultLanguage(config.New()),
+ })
+
+ type ts struct {
+ MyInt int
+ MyFloat float64
+ MyString string
+ }
+ type mid struct {
+ Tst TstX
+ }
+
+ for i, test := range []struct {
+ seq any
+ sortByField any
+ sortAsc string
+ expect any
+ }{
+ {[]string{"class1", "class2", "class3"}, nil, "asc", []string{"class1", "class2", "class3"}},
+ {[]string{"class3", "class1", "class2"}, nil, "asc", []string{"class1", "class2", "class3"}},
+ {[]string{"CLASS3", "class1", "class2"}, nil, "asc", []string{"class1", "class2", "CLASS3"}},
+ // Issue 6023
+ {stringsSlice{"class3", "class1", "class2"}, nil, "asc", stringsSlice{"class1", "class2", "class3"}},
+
+ {[]int{1, 2, 3, 4, 5}, nil, "asc", []int{1, 2, 3, 4, 5}},
+ {[]int{5, 4, 3, 1, 2}, nil, "asc", []int{1, 2, 3, 4, 5}},
+ // test sort key parameter is forcibly set empty
+ {[]string{"class3", "class1", "class2"}, map[int]string{1: "a"}, "asc", []string{"class1", "class2", "class3"}},
+ // test map sorting by keys
+ {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, nil, "asc", []int{10, 20, 30, 40, 50}},
+ {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, nil, "asc", []int{30, 20, 10, 40, 50}},
+ {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, nil, "asc", []string{"10", "20", "30", "40", "50"}},
+ {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}},
+ {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, nil, "asc", []string{"50", "40", "10", "30", "20"}},
+ {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, nil, "asc", []string{"10", "20", "30", "40", "50"}},
+ {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}},
+ {map[float64]string{3.3: "10", 2.3: "20", 1.3: "30", 4.3: "40", 5.3: "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}},
+ // test map sorting by value
+ {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "value", "asc", []int{10, 20, 30, 40, 50}},
+ {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "value", "asc", []int{10, 20, 30, 40, 50}},
+ // test map sorting by field value
+ {
+ map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}},
+ "MyInt",
+ "asc",
+ []ts{{10, 10.5, "ten"}, {20, 20.5, "twenty"}, {30, 30.5, "thirty"}, {40, 40.5, "forty"}, {50, 50.5, "fifty"}},
+ },
+ {
+ map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}},
+ "MyFloat",
+ "asc",
+ []ts{{10, 10.5, "ten"}, {20, 20.5, "twenty"}, {30, 30.5, "thirty"}, {40, 40.5, "forty"}, {50, 50.5, "fifty"}},
+ },
+ {
+ map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}},
+ "MyString",
+ "asc",
+ []ts{{50, 50.5, "fifty"}, {40, 40.5, "forty"}, {10, 10.5, "ten"}, {30, 30.5, "thirty"}, {20, 20.5, "twenty"}},
+ },
+ // test sort desc
+ {[]string{"class1", "class2", "class3"}, "value", "desc", []string{"class3", "class2", "class1"}},
+ {[]string{"class3", "class1", "class2"}, "value", "desc", []string{"class3", "class2", "class1"}},
+ // test sort by struct's method
+ {
+ []TstX{{A: "i", B: "j"}, {A: "e", B: "f"}, {A: "c", B: "d"}, {A: "g", B: "h"}, {A: "a", B: "b"}},
+ "TstRv",
+ "asc",
+ []TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}},
+ },
+ {
+ []*TstX{{A: "i", B: "j"}, {A: "e", B: "f"}, {A: "c", B: "d"}, {A: "g", B: "h"}, {A: "a", B: "b"}},
+ "TstRp",
+ "asc",
+ []*TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}},
+ },
+ // Lower case Params, slice
+ {
+ []TstParams{{params: maps.Params{"color": "indigo"}}, {params: maps.Params{"color": "blue"}}, {params: maps.Params{"color": "green"}}},
+ ".Params.COLOR",
+ "asc",
+ []TstParams{{params: maps.Params{"color": "blue"}}, {params: maps.Params{"color": "green"}}, {params: maps.Params{"color": "indigo"}}},
+ },
+ // Lower case Params, map
+ {
+ map[string]TstParams{"1": {params: maps.Params{"color": "indigo"}}, "2": {params: maps.Params{"color": "blue"}}, "3": {params: maps.Params{"color": "green"}}},
+ ".Params.CoLoR",
+ "asc",
+ []TstParams{{params: maps.Params{"color": "blue"}}, {params: maps.Params{"color": "green"}}, {params: maps.Params{"color": "indigo"}}},
+ },
+ // test map sorting by struct's method
+ {
+ map[string]TstX{"1": {A: "i", B: "j"}, "2": {A: "e", B: "f"}, "3": {A: "c", B: "d"}, "4": {A: "g", B: "h"}, "5": {A: "a", B: "b"}},
+ "TstRv",
+ "asc",
+ []TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}},
+ },
+ {
+ map[string]*TstX{"1": {A: "i", B: "j"}, "2": {A: "e", B: "f"}, "3": {A: "c", B: "d"}, "4": {A: "g", B: "h"}, "5": {A: "a", B: "b"}},
+ "TstRp",
+ "asc",
+ []*TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}},
+ },
+ // test sort by dot chaining key argument
+ {
+ []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}},
+ "foo.A",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}},
+ ".foo.A",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}},
+ "foo.TstRv",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ []map[string]*TstX{{"foo": &TstX{A: "e", B: "f"}}, {"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}},
+ "foo.TstRp",
+ "asc",
+ []map[string]*TstX{{"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}}},
+ },
+ {
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "e", B: "f"}}}, {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}},
+ "foo.Tst.A",
+ "asc",
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}},
+ },
+ {
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "e", B: "f"}}}, {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}},
+ "foo.Tst.TstRv",
+ "asc",
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}},
+ },
+ // test map sorting by dot chaining key argument
+ {
+ map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}},
+ "foo.A",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}},
+ ".foo.A",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}},
+ "foo.TstRv",
+ "asc",
+ []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}},
+ },
+ {
+ map[string]map[string]*TstX{"1": {"foo": &TstX{A: "e", B: "f"}}, "2": {"foo": &TstX{A: "a", B: "b"}}, "3": {"foo": &TstX{A: "c", B: "d"}}},
+ "foo.TstRp",
+ "asc",
+ []map[string]*TstX{{"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}}},
+ },
+ {
+ map[string]map[string]mid{"1": {"foo": mid{Tst: TstX{A: "e", B: "f"}}}, "2": {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, "3": {"foo": mid{Tst: TstX{A: "c", B: "d"}}}},
+ "foo.Tst.A",
+ "asc",
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}},
+ },
+ {
+ map[string]map[string]mid{"1": {"foo": mid{Tst: TstX{A: "e", B: "f"}}}, "2": {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, "3": {"foo": mid{Tst: TstX{A: "c", B: "d"}}}},
+ "foo.Tst.TstRv",
+ "asc",
+ []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}},
+ },
+ // interface slice with missing elements
+ {
+ []any{
+ map[any]any{"Title": "Foo", "Weight": 10},
+ map[any]any{"Title": "Bar"},
+ map[any]any{"Title": "Zap", "Weight": 5},
+ },
+ "Weight",
+ "asc",
+ []any{
+ map[any]any{"Title": "Bar"},
+ map[any]any{"Title": "Zap", "Weight": 5},
+ map[any]any{"Title": "Foo", "Weight": 10},
+ },
+ },
+ // test boolean values
+ {[]bool{false, true, false}, "value", "asc", []bool{false, false, true}},
+ {[]bool{false, true, false}, "value", "desc", []bool{true, false, false}},
+ // test error cases
+ {(*[]TstX)(nil), nil, "asc", false},
+ {TstX{A: "a", B: "b"}, nil, "asc", false},
+ {
+ []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}},
+ "foo.NotAvailable",
+ "asc",
+ false,
+ },
+ {
+ map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}},
+ "foo.NotAvailable",
+ "asc",
+ false,
+ },
+ {nil, nil, "asc", false},
+ } {
+ t.Run(fmt.Sprintf("test%d", i), func(t *testing.T) {
+ var result any
+ var err error
+ if test.sortByField == nil {
+ result, err = ns.Sort(test.seq)
+ } else {
+ result, err = ns.Sort(test.seq, test.sortByField, test.sortAsc)
+ }
+
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ t.Fatal("Sort didn't return an expected error")
+ }
+ } else {
+ if err != nil {
+ t.Fatalf("failed: %s", err)
+ }
+ if !reflect.DeepEqual(result, test.expect) {
+ t.Fatalf("Sort called on sequence: %#v | sortByField: `%v` | got\n%#v but expected\n%#v", test.seq, test.sortByField, result, test.expect)
+ }
+ }
+ })
+ }
+}
diff --git a/tpl/collections/symdiff.go b/tpl/collections/symdiff.go
new file mode 100644
index 000000000..8ecee3c4a
--- /dev/null
+++ b/tpl/collections/symdiff.go
@@ -0,0 +1,66 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// SymDiff returns the symmetric difference of s1 and s2.
+// Arguments must be either a slice or an array of comparable types.
+func (ns *Namespace) SymDiff(s2, s1 any) (any, error) {
+ ids1, err := collectIdentities(s1)
+ if err != nil {
+ return nil, err
+ }
+ ids2, err := collectIdentities(s2)
+ if err != nil {
+ return nil, err
+ }
+
+ var slice reflect.Value
+ var sliceElemType reflect.Type
+
+ for i, s := range []any{s1, s2} {
+ v := reflect.ValueOf(s)
+
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice:
+ if i == 0 {
+ sliceType := v.Type()
+ sliceElemType = sliceType.Elem()
+ slice = reflect.MakeSlice(sliceType, 0, 0)
+ }
+
+ for i := 0; i < v.Len(); i++ {
+ ev, _ := indirectInterface(v.Index(i))
+ key := normalize(ev)
+
+ // Append if the key is not in their intersection.
+ if ids1[key] != ids2[key] {
+ v, err := convertValue(ev, sliceElemType)
+ if err != nil {
+ return nil, fmt.Errorf("symdiff: failed to convert value: %w", err)
+ }
+ slice = reflect.Append(slice, v)
+ }
+ }
+ default:
+ return nil, fmt.Errorf("arguments to symdiff must be slices or arrays")
+ }
+ }
+
+ return slice.Interface(), nil
+}
diff --git a/tpl/collections/symdiff_test.go b/tpl/collections/symdiff_test.go
new file mode 100644
index 000000000..e5494d5a0
--- /dev/null
+++ b/tpl/collections/symdiff_test.go
@@ -0,0 +1,80 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestSymDiff(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ s1 := []TstX{{A: "a"}, {A: "b"}}
+ s2 := []TstX{{A: "a"}, {A: "e"}}
+
+ xa, xb, xd, xe := &StructWithSlice{A: "a"}, &StructWithSlice{A: "b"}, &StructWithSlice{A: "d"}, &StructWithSlice{A: "e"}
+
+ sp1 := []*StructWithSlice{xa, xb, xd, xe}
+ sp2 := []*StructWithSlice{xb, xe}
+
+ for i, test := range []struct {
+ s1 any
+ s2 any
+ expected any
+ }{
+ {[]string{"a", "x", "b", "c"}, []string{"a", "b", "y", "c"}, []string{"x", "y"}},
+ {[]string{"a", "b", "c"}, []string{"a", "b", "c"}, []string{}},
+ {[]any{"a", "b", nil}, []any{"a"}, []any{"b", nil}},
+ {[]int{1, 2, 3}, []int{3, 4}, []int{1, 2, 4}},
+ {[]int{1, 2, 3}, []int64{3, 4}, []int{1, 2, 4}},
+ {s1, s2, []TstX{{A: "b"}, {A: "e"}}},
+ {sp1, sp2, []*StructWithSlice{xa, xd}},
+
+ // Errors
+ {"error", "error", false},
+ {[]int{1, 2, 3}, []string{"3", "4"}, false},
+ } {
+
+ errMsg := qt.Commentf("[%d]", i)
+
+ result, err := ns.SymDiff(test.s2, test.s1)
+
+ if b, ok := test.expected.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+
+ if !reflect.DeepEqual(test.expected, result) {
+ t.Fatalf("%s got\n%T: %v\nexpected\n%T: %v", errMsg, result, result, test.expected, test.expected)
+ }
+ }
+
+ _, err := ns.Complement()
+ c.Assert(err, qt.Not(qt.IsNil))
+ _, err = ns.Complement([]string{"a", "b"})
+ c.Assert(err, qt.Not(qt.IsNil))
+}
diff --git a/tpl/collections/where.go b/tpl/collections/where.go
new file mode 100644
index 000000000..2cf7227a0
--- /dev/null
+++ b/tpl/collections/where.go
@@ -0,0 +1,515 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/maps"
+)
+
+// Where returns a filtered subset of a given data type.
+func (ns *Namespace) Where(seq, key any, args ...any) (any, error) {
+ seqv, isNil := indirect(reflect.ValueOf(seq))
+ if isNil {
+ return nil, errors.New("can't iterate over a nil value of type " + reflect.ValueOf(seq).Type().String())
+ }
+
+ mv, op, err := parseWhereArgs(args...)
+ if err != nil {
+ return nil, err
+ }
+
+ var path []string
+ kv := reflect.ValueOf(key)
+ if kv.Kind() == reflect.String {
+ path = strings.Split(strings.Trim(kv.String(), "."), ".")
+ }
+
+ switch seqv.Kind() {
+ case reflect.Array, reflect.Slice:
+ return ns.checkWhereArray(seqv, kv, mv, path, op)
+ case reflect.Map:
+ return ns.checkWhereMap(seqv, kv, mv, path, op)
+ default:
+ return nil, fmt.Errorf("can't iterate over %v", seq)
+ }
+}
+
+func (ns *Namespace) checkCondition(v, mv reflect.Value, op string) (bool, error) {
+ v, vIsNil := indirect(v)
+ if !v.IsValid() {
+ vIsNil = true
+ }
+
+ mv, mvIsNil := indirect(mv)
+ if !mv.IsValid() {
+ mvIsNil = true
+ }
+ if vIsNil || mvIsNil {
+ switch op {
+ case "", "=", "==", "eq":
+ return vIsNil == mvIsNil, nil
+ case "!=", "<>", "ne":
+ return vIsNil != mvIsNil, nil
+ }
+ return false, nil
+ }
+
+ if v.Kind() == reflect.Bool && mv.Kind() == reflect.Bool {
+ switch op {
+ case "", "=", "==", "eq":
+ return v.Bool() == mv.Bool(), nil
+ case "!=", "<>", "ne":
+ return v.Bool() != mv.Bool(), nil
+ }
+ return false, nil
+ }
+
+ var ivp, imvp *int64
+ var fvp, fmvp *float64
+ var svp, smvp *string
+ var slv, slmv any
+ var ima []int64
+ var fma []float64
+ var sma []string
+
+ if mv.Kind() == v.Kind() {
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ iv := v.Int()
+ ivp = &iv
+ imv := mv.Int()
+ imvp = &imv
+ case reflect.String:
+ sv := v.String()
+ svp = &sv
+ smv := mv.String()
+ smvp = &smv
+ case reflect.Float64:
+ fv := v.Float()
+ fvp = &fv
+ fmv := mv.Float()
+ fmvp = &fmv
+ case reflect.Struct:
+ if hreflect.IsTime(v.Type()) {
+ iv := ns.toTimeUnix(v)
+ ivp = &iv
+ imv := ns.toTimeUnix(mv)
+ imvp = &imv
+ }
+ case reflect.Array, reflect.Slice:
+ slv = v.Interface()
+ slmv = mv.Interface()
+ }
+ } else if isNumber(v.Kind()) && isNumber(mv.Kind()) {
+ fv, err := toFloat(v)
+ if err != nil {
+ return false, err
+ }
+ fvp = &fv
+ fmv, err := toFloat(mv)
+ if err != nil {
+ return false, err
+ }
+ fmvp = &fmv
+ } else {
+ if mv.Kind() != reflect.Array && mv.Kind() != reflect.Slice {
+ return false, nil
+ }
+
+ if mv.Len() == 0 {
+ return false, nil
+ }
+
+ if v.Kind() != reflect.Interface && mv.Type().Elem().Kind() != reflect.Interface && mv.Type().Elem() != v.Type() && v.Kind() != reflect.Array && v.Kind() != reflect.Slice {
+ return false, nil
+ }
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ iv := v.Int()
+ ivp = &iv
+ for i := 0; i < mv.Len(); i++ {
+ if anInt, err := toInt(mv.Index(i)); err == nil {
+ ima = append(ima, anInt)
+ }
+ }
+ case reflect.String:
+ sv := v.String()
+ svp = &sv
+ for i := 0; i < mv.Len(); i++ {
+ if aString, err := toString(mv.Index(i)); err == nil {
+ sma = append(sma, aString)
+ }
+ }
+ case reflect.Float64:
+ fv := v.Float()
+ fvp = &fv
+ for i := 0; i < mv.Len(); i++ {
+ if aFloat, err := toFloat(mv.Index(i)); err == nil {
+ fma = append(fma, aFloat)
+ }
+ }
+ case reflect.Struct:
+ if hreflect.IsTime(v.Type()) {
+ iv := ns.toTimeUnix(v)
+ ivp = &iv
+ for i := 0; i < mv.Len(); i++ {
+ ima = append(ima, ns.toTimeUnix(mv.Index(i)))
+ }
+ }
+ case reflect.Array, reflect.Slice:
+ slv = v.Interface()
+ slmv = mv.Interface()
+ }
+ }
+
+ switch op {
+ case "", "=", "==", "eq":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp == *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp == *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp == *fmvp, nil
+ }
+ case "!=", "<>", "ne":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp != *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp != *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp != *fmvp, nil
+ }
+ case ">=", "ge":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp >= *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp >= *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp >= *fmvp, nil
+ }
+ case ">", "gt":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp > *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp > *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp > *fmvp, nil
+ }
+ case "<=", "le":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp <= *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp <= *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp <= *fmvp, nil
+ }
+ case "<", "lt":
+ switch {
+ case ivp != nil && imvp != nil:
+ return *ivp < *imvp, nil
+ case svp != nil && smvp != nil:
+ return *svp < *smvp, nil
+ case fvp != nil && fmvp != nil:
+ return *fvp < *fmvp, nil
+ }
+ case "in", "not in":
+ var r bool
+ switch {
+ case ivp != nil && len(ima) > 0:
+ r, _ = ns.In(ima, *ivp)
+ case fvp != nil && len(fma) > 0:
+ r, _ = ns.In(fma, *fvp)
+ case svp != nil:
+ if len(sma) > 0 {
+ r, _ = ns.In(sma, *svp)
+ } else if smvp != nil {
+ r, _ = ns.In(*smvp, *svp)
+ }
+ default:
+ return false, nil
+ }
+ if op == "not in" {
+ return !r, nil
+ }
+ return r, nil
+ case "intersect":
+ r, err := ns.Intersect(slv, slmv)
+ if err != nil {
+ return false, err
+ }
+
+ if reflect.TypeOf(r).Kind() == reflect.Slice {
+ s := reflect.ValueOf(r)
+
+ if s.Len() > 0 {
+ return true, nil
+ }
+ return false, nil
+ }
+ return false, errors.New("invalid intersect values")
+ default:
+ return false, errors.New("no such operator")
+ }
+ return false, nil
+}
+
+func evaluateSubElem(obj reflect.Value, elemName string) (reflect.Value, error) {
+ if !obj.IsValid() {
+ return zero, errors.New("can't evaluate an invalid value")
+ }
+
+ typ := obj.Type()
+ obj, isNil := indirect(obj)
+
+ if obj.Kind() == reflect.Interface {
+ // If obj is an interface, we need to inspect the value it contains
+ // to see the full set of methods and fields.
+ // Indirect returns the value that it points to, which is what's needed
+ // below to be able to reflect on its fields.
+ obj = reflect.Indirect(obj.Elem())
+ }
+
+ // first, check whether obj has a method. In this case, obj is
+ // a struct or its pointer. If obj is a struct,
+ // to check all T and *T method, use obj pointer type Value
+ objPtr := obj
+ if objPtr.Kind() != reflect.Interface && objPtr.CanAddr() {
+ objPtr = objPtr.Addr()
+ }
+
+ index := hreflect.GetMethodIndexByName(objPtr.Type(), elemName)
+ if index != -1 {
+ mt := objPtr.Type().Method(index)
+ switch {
+ case mt.PkgPath != "":
+ return zero, fmt.Errorf("%s is an unexported method of type %s", elemName, typ)
+ case mt.Type.NumIn() > 1:
+ return zero, fmt.Errorf("%s is a method of type %s but requires more than 1 parameter", elemName, typ)
+ case mt.Type.NumOut() == 0:
+ return zero, fmt.Errorf("%s is a method of type %s but returns no output", elemName, typ)
+ case mt.Type.NumOut() > 2:
+ return zero, fmt.Errorf("%s is a method of type %s but returns more than 2 outputs", elemName, typ)
+ case mt.Type.NumOut() == 1 && mt.Type.Out(0).Implements(errorType):
+ return zero, fmt.Errorf("%s is a method of type %s but only returns an error type", elemName, typ)
+ case mt.Type.NumOut() == 2 && !mt.Type.Out(1).Implements(errorType):
+ return zero, fmt.Errorf("%s is a method of type %s returning two values but the second value is not an error type", elemName, typ)
+ }
+ res := objPtr.Method(mt.Index).Call([]reflect.Value{})
+ if len(res) == 2 && !res[1].IsNil() {
+ return zero, fmt.Errorf("error at calling a method %s of type %s: %s", elemName, typ, res[1].Interface().(error))
+ }
+ return res[0], nil
+ }
+
+ // elemName isn't a method so next start to check whether it is
+ // a struct field or a map value. In both cases, it mustn't be
+ // a nil value
+ if isNil {
+ return zero, fmt.Errorf("can't evaluate a nil pointer of type %s by a struct field or map key name %s", typ, elemName)
+ }
+ switch obj.Kind() {
+ case reflect.Struct:
+ ft, ok := obj.Type().FieldByName(elemName)
+ if ok {
+ if ft.PkgPath != "" && !ft.Anonymous {
+ return zero, fmt.Errorf("%s is an unexported field of struct type %s", elemName, typ)
+ }
+ return obj.FieldByIndex(ft.Index), nil
+ }
+ return zero, fmt.Errorf("%s isn't a field of struct type %s", elemName, typ)
+ case reflect.Map:
+ kv := reflect.ValueOf(elemName)
+ if kv.Type().AssignableTo(obj.Type().Key()) {
+ return obj.MapIndex(kv), nil
+ }
+ return zero, fmt.Errorf("%s isn't a key of map type %s", elemName, typ)
+ }
+ return zero, fmt.Errorf("%s is neither a struct field, a method nor a map element of type %s", elemName, typ)
+}
+
+// parseWhereArgs parses the end arguments to the where function. Return a
+// match value and an operator, if one is defined.
+func parseWhereArgs(args ...any) (mv reflect.Value, op string, err error) {
+ switch len(args) {
+ case 1:
+ mv = reflect.ValueOf(args[0])
+ case 2:
+ var ok bool
+ if op, ok = args[0].(string); !ok {
+ err = errors.New("operator argument must be string type")
+ return
+ }
+ op = strings.TrimSpace(strings.ToLower(op))
+ mv = reflect.ValueOf(args[1])
+ default:
+ err = errors.New("can't evaluate the array by no match argument or more than or equal to two arguments")
+ }
+ return
+}
+
+// checkWhereArray handles the where-matching logic when the seqv value is an
+// Array or Slice.
+func (ns *Namespace) checkWhereArray(seqv, kv, mv reflect.Value, path []string, op string) (any, error) {
+ rv := reflect.MakeSlice(seqv.Type(), 0, 0)
+
+ for i := 0; i < seqv.Len(); i++ {
+ var vvv reflect.Value
+ rvv := seqv.Index(i)
+
+ if kv.Kind() == reflect.String {
+ if params, ok := rvv.Interface().(maps.Params); ok {
+ vvv = reflect.ValueOf(params.Get(path...))
+ } else {
+ vvv = rvv
+ for i, elemName := range path {
+ var err error
+ vvv, err = evaluateSubElem(vvv, elemName)
+
+ if err != nil {
+ continue
+ }
+
+ if i < len(path)-1 && vvv.IsValid() {
+ if params, ok := vvv.Interface().(maps.Params); ok {
+ // The current path element is the map itself, .Params.
+ vvv = reflect.ValueOf(params.Get(path[i+1:]...))
+ break
+ }
+ }
+ }
+ }
+ } else {
+ vv, _ := indirect(rvv)
+ if vv.Kind() == reflect.Map && kv.Type().AssignableTo(vv.Type().Key()) {
+ vvv = vv.MapIndex(kv)
+ }
+ }
+
+ if ok, err := ns.checkCondition(vvv, mv, op); ok {
+ rv = reflect.Append(rv, rvv)
+ } else if err != nil {
+ return nil, err
+ }
+ }
+ return rv.Interface(), nil
+}
+
+// checkWhereMap handles the where-matching logic when the seqv value is a Map.
+func (ns *Namespace) checkWhereMap(seqv, kv, mv reflect.Value, path []string, op string) (any, error) {
+ rv := reflect.MakeMap(seqv.Type())
+ keys := seqv.MapKeys()
+ for _, k := range keys {
+ elemv := seqv.MapIndex(k)
+ switch elemv.Kind() {
+ case reflect.Array, reflect.Slice:
+ r, err := ns.checkWhereArray(elemv, kv, mv, path, op)
+ if err != nil {
+ return nil, err
+ }
+
+ switch rr := reflect.ValueOf(r); rr.Kind() {
+ case reflect.Slice:
+ if rr.Len() > 0 {
+ rv.SetMapIndex(k, elemv)
+ }
+ }
+ case reflect.Interface:
+ elemvv, isNil := indirect(elemv)
+ if isNil {
+ continue
+ }
+
+ switch elemvv.Kind() {
+ case reflect.Array, reflect.Slice:
+ r, err := ns.checkWhereArray(elemvv, kv, mv, path, op)
+ if err != nil {
+ return nil, err
+ }
+
+ switch rr := reflect.ValueOf(r); rr.Kind() {
+ case reflect.Slice:
+ if rr.Len() > 0 {
+ rv.SetMapIndex(k, elemv)
+ }
+ }
+ }
+ }
+ }
+ return rv.Interface(), nil
+}
+
+// toFloat returns the float value if possible.
+func toFloat(v reflect.Value) (float64, error) {
+ switch v.Kind() {
+ case reflect.Float32, reflect.Float64:
+ return v.Float(), nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Convert(reflect.TypeOf(float64(0))).Float(), nil
+ case reflect.Interface:
+ return toFloat(v.Elem())
+ }
+ return -1, errors.New("unable to convert value to float")
+}
+
+// toInt returns the int value if possible, -1 if not.
+// TODO(bep) consolidate all these reflect funcs.
+func toInt(v reflect.Value) (int64, error) {
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int(), nil
+ case reflect.Interface:
+ return toInt(v.Elem())
+ }
+ return -1, errors.New("unable to convert value to int")
+}
+
+func toUint(v reflect.Value) (uint64, error) {
+ switch v.Kind() {
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return v.Uint(), nil
+ case reflect.Interface:
+ return toUint(v.Elem())
+ }
+ return 0, errors.New("unable to convert value to uint")
+}
+
+// toString returns the string value if possible, "" if not.
+func toString(v reflect.Value) (string, error) {
+ switch v.Kind() {
+ case reflect.String:
+ return v.String(), nil
+ case reflect.Interface:
+ return toString(v.Elem())
+ }
+ return "", errors.New("unable to convert value to string")
+}
+
+func (ns *Namespace) toTimeUnix(v reflect.Value) int64 {
+ t, ok := hreflect.AsTime(v, ns.loc)
+ if !ok {
+ panic("coding error: argument must be time.Time type reflect Value")
+ }
+ return t.Unix()
+}
diff --git a/tpl/collections/where_test.go b/tpl/collections/where_test.go
new file mode 100644
index 000000000..9a65de3d5
--- /dev/null
+++ b/tpl/collections/where_test.go
@@ -0,0 +1,864 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package collections
+
+import (
+ "fmt"
+ "html/template"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+func TestWhere(t *testing.T) {
+ t.Parallel()
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ type Mid struct {
+ Tst TstX
+ }
+
+ d1 := time.Now()
+ d2 := d1.Add(1 * time.Hour)
+ d3 := d2.Add(1 * time.Hour)
+ d4 := d3.Add(1 * time.Hour)
+ d5 := d4.Add(1 * time.Hour)
+ d6 := d5.Add(1 * time.Hour)
+
+ type testt struct {
+ seq any
+ key any
+ op string
+ match any
+ expect any
+ }
+
+ createTestVariants := func(test testt) []testt {
+ testVariants := []testt{test}
+ if islice := ToTstXIs(test.seq); islice != nil {
+ variant := test
+ variant.seq = islice
+ expect := ToTstXIs(test.expect)
+ if expect != nil {
+ variant.expect = expect
+ }
+ testVariants = append(testVariants, variant)
+ }
+
+ return testVariants
+ }
+
+ for i, test := range []testt{
+ {
+ seq: []map[int]string{
+ {1: "a", 2: "m"}, {1: "c", 2: "d"}, {1: "e", 3: "m"},
+ },
+ key: 2, match: "m",
+ expect: []map[int]string{
+ {1: "a", 2: "m"},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4,
+ expect: []map[string]int{
+ {"a": 3, "b": 4},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.0,
+ expect: []map[string]float64{{"a": 3, "b": 4}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.0, op: "!=",
+ expect: []map[string]float64{{"a": 1, "b": 2}, {"a": 5, "x": 4}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.0, op: "<",
+ expect: []map[string]float64{{"a": 1, "b": 2}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4, op: "<",
+ expect: []map[string]float64{{"a": 1, "b": 2}},
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.0, op: "<",
+ expect: []map[string]int{{"a": 1, "b": 2}},
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.2, op: "<",
+ expect: []map[string]int{{"a": 1, "b": 2}, {"a": 3, "b": 4}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 4.0, op: "<=",
+ expect: []map[string]float64{{"a": 1, "b": 2}, {"a": 3, "b": 4}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 3}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 2.0, op: ">",
+ expect: []map[string]float64{{"a": 3, "b": 3}},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 3}, {"a": 5, "x": 4},
+ },
+ key: "b", match: 2.0, op: ">=",
+ expect: []map[string]float64{{"a": 1, "b": 2}, {"a": 3, "b": 3}},
+ },
+ // Issue #8353
+ // String type mismatch.
+ {
+ seq: []map[string]any{
+ {"a": "1", "b": "2"}, {"a": "3", "b": template.HTML("4")}, {"a": "5", "x": "4"},
+ },
+ key: "b", match: "4",
+ expect: []map[string]any{
+ {"a": "3", "b": template.HTML("4")},
+ },
+ },
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", match: "f",
+ expect: []TstX{
+ {A: "e", B: "f"},
+ },
+ },
+ {
+ seq: []*map[int]string{
+ {1: "a", 2: "m"}, {1: "c", 2: "d"}, {1: "e", 3: "m"},
+ },
+ key: 2, match: "m",
+ expect: []*map[int]string{
+ {1: "a", 2: "m"},
+ },
+ },
+ // Case insensitive maps.Params
+ // Slice of structs
+ {
+ seq: []TstParams{{params: maps.Params{"i": 0, "color": "indigo"}}, {params: maps.Params{"i": 1, "color": "blue"}}, {params: maps.Params{"i": 2, "color": "green"}}, {params: maps.Params{"i": 3, "color": "blue"}}},
+ key: ".Params.COLOR", match: "blue",
+ expect: []TstParams{{params: maps.Params{"i": 1, "color": "blue"}}, {params: maps.Params{"i": 3, "color": "blue"}}},
+ },
+ {
+ seq: []TstParams{{params: maps.Params{"nested": map[string]any{"color": "indigo"}}}, {params: maps.Params{"nested": map[string]any{"color": "blue"}}}},
+ key: ".Params.NEsTED.COLOR", match: "blue",
+ expect: []TstParams{{params: maps.Params{"nested": map[string]any{"color": "blue"}}}},
+ },
+ {
+ seq: []TstParams{{params: maps.Params{"i": 0, "color": "indigo"}}, {params: maps.Params{"i": 1, "color": "blue"}}, {params: maps.Params{"i": 2, "color": "green"}}, {params: maps.Params{"i": 3, "color": "blue"}}},
+ key: ".Params", match: "blue",
+ expect: []TstParams{},
+ },
+ // Slice of maps
+ {
+ seq: []maps.Params{
+ {"a": "a1", "b": "b1"}, {"a": "a2", "b": "b2"},
+ },
+ key: "B", match: "b2",
+ expect: []maps.Params{
+ {"a": "a2", "b": "b2"},
+ },
+ },
+ {
+ seq: []maps.Params{
+ {
+ "a": map[string]any{
+ "b": "b1",
+ },
+ },
+ {
+ "a": map[string]any{
+ "b": "b2",
+ },
+ },
+ },
+ key: "A.B", match: "b2",
+ expect: []maps.Params{
+ {
+ "a": map[string]any{
+ "b": "b2",
+ },
+ },
+ },
+ },
+ {
+ seq: []*TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", match: "f",
+ expect: []*TstX{
+ {A: "e", B: "f"},
+ },
+ },
+ {
+ seq: []*TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "c"},
+ },
+ key: "TstRp", match: "rc",
+ expect: []*TstX{
+ {A: "c", B: "d"},
+ },
+ },
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "c"},
+ },
+ key: "TstRv", match: "rc",
+ expect: []TstX{
+ {A: "e", B: "c"},
+ },
+ },
+ {
+ seq: []map[string]TstX{
+ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}},
+ },
+ key: "foo.B", match: "d",
+ expect: []map[string]TstX{
+ {"foo": TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []map[string]TstX{
+ {"baz": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}},
+ },
+ key: "foo.B", match: "d",
+ expect: []map[string]TstX{
+ {"foo": TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []map[string]TstX{
+ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}},
+ },
+ key: ".foo.B", match: "d",
+ expect: []map[string]TstX{
+ {"foo": TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []map[string]TstX{
+ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}},
+ },
+ key: "foo.TstRv", match: "rd",
+ expect: []map[string]TstX{
+ {"foo": TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []map[string]*TstX{
+ {"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}},
+ },
+ key: "foo.TstRp", match: "rc",
+ expect: []map[string]*TstX{
+ {"foo": &TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []TstXIHolder{
+ {&TstX{A: "a", B: "b"}}, {&TstX{A: "c", B: "d"}}, {&TstX{A: "e", B: "f"}},
+ },
+ key: "XI.TstRp", match: "rc",
+ expect: []TstXIHolder{
+ {&TstX{A: "c", B: "d"}},
+ },
+ },
+ {
+ seq: []TstXIHolder{
+ {&TstX{A: "a", B: "b"}}, {&TstX{A: "c", B: "d"}}, {&TstX{A: "e", B: "f"}},
+ },
+ key: "XI.A", match: "e",
+ expect: []TstXIHolder{
+ {&TstX{A: "e", B: "f"}},
+ },
+ },
+ {
+ seq: []map[string]Mid{
+ {"foo": Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": Mid{Tst: TstX{A: "e", B: "f"}}},
+ },
+ key: "foo.Tst.B", match: "d",
+ expect: []map[string]Mid{
+ {"foo": Mid{Tst: TstX{A: "c", B: "d"}}},
+ },
+ },
+ {
+ seq: []map[string]Mid{
+ {"foo": Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": Mid{Tst: TstX{A: "e", B: "f"}}},
+ },
+ key: "foo.Tst.TstRv", match: "rd",
+ expect: []map[string]Mid{
+ {"foo": Mid{Tst: TstX{A: "c", B: "d"}}},
+ },
+ },
+ {
+ seq: []map[string]*Mid{
+ {"foo": &Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": &Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": &Mid{Tst: TstX{A: "e", B: "f"}}},
+ },
+ key: "foo.Tst.TstRp", match: "rc",
+ expect: []map[string]*Mid{
+ {"foo": &Mid{Tst: TstX{A: "c", B: "d"}}},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: ">", match: 3,
+ expect: []map[string]int{
+ {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: ">", match: 3.0,
+ expect: []map[string]float64{
+ {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ },
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", op: "!=", match: "f",
+ expect: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "in", match: []int{3, 4, 5},
+ expect: []map[string]int{
+ {"a": 3, "b": 4},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "in", match: []float64{3, 4, 5},
+ expect: []map[string]float64{
+ {"a": 3, "b": 4},
+ },
+ },
+ {
+ seq: []map[string][]string{
+ {"a": []string{"A", "B", "C"}, "b": []string{"D", "E", "F"}}, {"a": []string{"G", "H", "I"}, "b": []string{"J", "K", "L"}}, {"a": []string{"M", "N", "O"}, "b": []string{"P", "Q", "R"}},
+ },
+ key: "b", op: "intersect", match: []string{"D", "P", "Q"},
+ expect: []map[string][]string{
+ {"a": []string{"A", "B", "C"}, "b": []string{"D", "E", "F"}}, {"a": []string{"M", "N", "O"}, "b": []string{"P", "Q", "R"}},
+ },
+ },
+ {
+ seq: []map[string][]int{
+ {"a": []int{1, 2, 3}, "b": []int{4, 5, 6}}, {"a": []int{7, 8, 9}, "b": []int{10, 11, 12}}, {"a": []int{13, 14, 15}, "b": []int{16, 17, 18}},
+ },
+ key: "b", op: "intersect", match: []int{4, 10, 12},
+ expect: []map[string][]int{
+ {"a": []int{1, 2, 3}, "b": []int{4, 5, 6}}, {"a": []int{7, 8, 9}, "b": []int{10, 11, 12}},
+ },
+ },
+ {
+ seq: []map[string][]int8{
+ {"a": []int8{1, 2, 3}, "b": []int8{4, 5, 6}}, {"a": []int8{7, 8, 9}, "b": []int8{10, 11, 12}}, {"a": []int8{13, 14, 15}, "b": []int8{16, 17, 18}},
+ },
+ key: "b", op: "intersect", match: []int8{4, 10, 12},
+ expect: []map[string][]int8{
+ {"a": []int8{1, 2, 3}, "b": []int8{4, 5, 6}}, {"a": []int8{7, 8, 9}, "b": []int8{10, 11, 12}},
+ },
+ },
+ {
+ seq: []map[string][]int16{
+ {"a": []int16{1, 2, 3}, "b": []int16{4, 5, 6}}, {"a": []int16{7, 8, 9}, "b": []int16{10, 11, 12}}, {"a": []int16{13, 14, 15}, "b": []int16{16, 17, 18}},
+ },
+ key: "b", op: "intersect", match: []int16{4, 10, 12},
+ expect: []map[string][]int16{
+ {"a": []int16{1, 2, 3}, "b": []int16{4, 5, 6}}, {"a": []int16{7, 8, 9}, "b": []int16{10, 11, 12}},
+ },
+ },
+ {
+ seq: []map[string][]int32{
+ {"a": []int32{1, 2, 3}, "b": []int32{4, 5, 6}}, {"a": []int32{7, 8, 9}, "b": []int32{10, 11, 12}}, {"a": []int32{13, 14, 15}, "b": []int32{16, 17, 18}},
+ },
+ key: "b", op: "intersect", match: []int32{4, 10, 12},
+ expect: []map[string][]int32{
+ {"a": []int32{1, 2, 3}, "b": []int32{4, 5, 6}}, {"a": []int32{7, 8, 9}, "b": []int32{10, 11, 12}},
+ },
+ },
+ {
+ seq: []map[string][]int64{
+ {"a": []int64{1, 2, 3}, "b": []int64{4, 5, 6}}, {"a": []int64{7, 8, 9}, "b": []int64{10, 11, 12}}, {"a": []int64{13, 14, 15}, "b": []int64{16, 17, 18}},
+ },
+ key: "b", op: "intersect", match: []int64{4, 10, 12},
+ expect: []map[string][]int64{
+ {"a": []int64{1, 2, 3}, "b": []int64{4, 5, 6}}, {"a": []int64{7, 8, 9}, "b": []int64{10, 11, 12}},
+ },
+ },
+ {
+ seq: []map[string][]float32{
+ {"a": []float32{1.0, 2.0, 3.0}, "b": []float32{4.0, 5.0, 6.0}}, {"a": []float32{7.0, 8.0, 9.0}, "b": []float32{10.0, 11.0, 12.0}}, {"a": []float32{13.0, 14.0, 15.0}, "b": []float32{16.0, 17.0, 18.0}},
+ },
+ key: "b", op: "intersect", match: []float32{4, 10, 12},
+ expect: []map[string][]float32{
+ {"a": []float32{1.0, 2.0, 3.0}, "b": []float32{4.0, 5.0, 6.0}}, {"a": []float32{7.0, 8.0, 9.0}, "b": []float32{10.0, 11.0, 12.0}},
+ },
+ },
+ {
+ seq: []map[string][]float64{
+ {"a": []float64{1.0, 2.0, 3.0}, "b": []float64{4.0, 5.0, 6.0}}, {"a": []float64{7.0, 8.0, 9.0}, "b": []float64{10.0, 11.0, 12.0}}, {"a": []float64{13.0, 14.0, 15.0}, "b": []float64{16.0, 17.0, 18.0}},
+ },
+ key: "b", op: "intersect", match: []float64{4, 10, 12},
+ expect: []map[string][]float64{
+ {"a": []float64{1.0, 2.0, 3.0}, "b": []float64{4.0, 5.0, 6.0}}, {"a": []float64{7.0, 8.0, 9.0}, "b": []float64{10.0, 11.0, 12.0}},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "in", match: ns.Slice(3, 4, 5),
+ expect: []map[string]int{
+ {"a": 3, "b": 4},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "in", match: ns.Slice(3.0, 4.0, 5.0),
+ expect: []map[string]float64{
+ {"a": 3, "b": 4},
+ },
+ },
+ {
+ seq: []map[string]time.Time{
+ {"a": d1, "b": d2}, {"a": d3, "b": d4}, {"a": d5, "b": d6},
+ },
+ key: "b", op: "in", match: ns.Slice(d3, d4, d5),
+ expect: []map[string]time.Time{
+ {"a": d3, "b": d4},
+ },
+ },
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", op: "not in", match: []string{"c", "d", "e"},
+ expect: []TstX{
+ {A: "a", B: "b"}, {A: "e", B: "f"},
+ },
+ },
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", op: "not in", match: ns.Slice("c", t, "d", "e"),
+ expect: []TstX{
+ {A: "a", B: "b"}, {A: "e", B: "f"},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "", match: nil,
+ expect: []map[string]int{
+ {"a": 3},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "!=", match: nil,
+ expect: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 5, "b": 6},
+ },
+ },
+ {
+ seq: []map[string]int{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: ">", match: nil,
+ expect: []map[string]int{},
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "", match: nil,
+ expect: []map[string]float64{
+ {"a": 3},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: "!=", match: nil,
+ expect: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 5, "b": 6},
+ },
+ },
+ {
+ seq: []map[string]float64{
+ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6},
+ },
+ key: "b", op: ">", match: nil,
+ expect: []map[string]float64{},
+ },
+ {
+ seq: []map[string]bool{
+ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false},
+ },
+ key: "b", op: "", match: true,
+ expect: []map[string]bool{
+ {"c": true, "b": true},
+ },
+ },
+ {
+ seq: []map[string]bool{
+ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false},
+ },
+ key: "b", op: "!=", match: true,
+ expect: []map[string]bool{
+ {"a": true, "b": false}, {"d": true, "b": false},
+ },
+ },
+ {
+ seq: []map[string]bool{
+ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false},
+ },
+ key: "b", op: ">", match: false,
+ expect: []map[string]bool{},
+ },
+ {
+ seq: []map[string]bool{
+ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false},
+ },
+ key: "b.z", match: false,
+ expect: []map[string]bool{},
+ },
+ {seq: (*[]TstX)(nil), key: "A", match: "a", expect: false},
+ {seq: TstX{A: "a", B: "b"}, key: "A", match: "a", expect: false},
+ {seq: []map[string]*TstX{{"foo": nil}}, key: "foo.B", match: "d", expect: []map[string]*TstX{}},
+ {seq: []map[string]*TstX{{"foo": nil}}, key: "foo.B.Z", match: "d", expect: []map[string]*TstX{}},
+ {
+ seq: []TstX{
+ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"},
+ },
+ key: "B", op: "op", match: "f",
+ expect: false,
+ },
+ {
+ seq: map[string]any{
+ "foo": []any{map[any]any{"a": 1, "b": 2}},
+ "bar": []any{map[any]any{"a": 3, "b": 4}},
+ "zap": []any{map[any]any{"a": 5, "b": 6}},
+ },
+ key: "b", op: "in", match: ns.Slice(3, 4, 5),
+ expect: map[string]any{
+ "bar": []any{map[any]any{"a": 3, "b": 4}},
+ },
+ },
+ {
+ seq: map[string]any{
+ "foo": []any{map[any]any{"a": 1, "b": 2}},
+ "bar": []any{map[any]any{"a": 3, "b": 4}},
+ "zap": []any{map[any]any{"a": 5, "b": 6}},
+ },
+ key: "b", op: ">", match: 3,
+ expect: map[string]any{
+ "bar": []any{map[any]any{"a": 3, "b": 4}},
+ "zap": []any{map[any]any{"a": 5, "b": 6}},
+ },
+ },
+ {
+ seq: map[string]any{
+ "foo": []any{maps.Params{"a": 1, "b": 2}},
+ "bar": []any{maps.Params{"a": 3, "b": 4}},
+ "zap": []any{maps.Params{"a": 5, "b": 6}},
+ },
+ key: "B", op: ">", match: 3,
+ expect: map[string]any{
+ "bar": []any{maps.Params{"a": 3, "b": 4}},
+ "zap": []any{maps.Params{"a": 5, "b": 6}},
+ },
+ },
+ } {
+
+ testVariants := createTestVariants(test)
+ for j, test := range testVariants {
+ name := fmt.Sprintf("%d/%d %T %s %s", i, j, test.seq, test.op, test.key)
+ name = strings.ReplaceAll(name, "[]", "slice-of-")
+ t.Run(name, func(t *testing.T) {
+ var results any
+ var err error
+
+ if len(test.op) > 0 {
+ results, err = ns.Where(test.seq, test.key, test.op, test.match)
+ } else {
+ results, err = ns.Where(test.seq, test.key, test.match)
+ }
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ t.Fatalf("[%d] Where didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Fatalf("[%d] failed: %s", i, err)
+ }
+ if !reflect.DeepEqual(results, test.expect) {
+ t.Fatalf("Where clause matching %v with %v in seq %v (%T),\ngot\n%v (%T) but expected\n%v (%T)", test.key, test.match, test.seq, test.seq, results, results, test.expect, test.expect)
+ }
+ }
+ })
+ }
+ }
+
+ var err error
+ _, err = ns.Where(map[string]int{"a": 1, "b": 2}, "a", []byte("="), 1)
+ if err == nil {
+ t.Errorf("Where called with none string op value didn't return an expected error")
+ }
+
+ _, err = ns.Where(map[string]int{"a": 1, "b": 2}, "a", []byte("="), 1, 2)
+ if err == nil {
+ t.Errorf("Where called with more than two variable arguments didn't return an expected error")
+ }
+
+ _, err = ns.Where(map[string]int{"a": 1, "b": 2}, "a")
+ if err == nil {
+ t.Errorf("Where called with no variable arguments didn't return an expected error")
+ }
+}
+
+func TestCheckCondition(t *testing.T) {
+ t.Parallel()
+
+ ns := New(&deps.Deps{Language: langs.NewDefaultLanguage(config.New())})
+
+ type expect struct {
+ result bool
+ isError bool
+ }
+
+ for i, test := range []struct {
+ value reflect.Value
+ match reflect.Value
+ op string
+ expect
+ }{
+ {reflect.ValueOf(123), reflect.ValueOf(123), "", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf("foo"), "", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ "",
+ expect{true, false},
+ },
+ {reflect.ValueOf(true), reflect.ValueOf(true), "", expect{true, false}},
+ {reflect.ValueOf(nil), reflect.ValueOf(nil), "", expect{true, false}},
+ {reflect.ValueOf(123), reflect.ValueOf(456), "!=", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf("bar"), "!=", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)),
+ "!=",
+ expect{true, false},
+ },
+ {reflect.ValueOf(true), reflect.ValueOf(false), "!=", expect{true, false}},
+ {reflect.ValueOf(123), reflect.ValueOf(nil), "!=", expect{true, false}},
+ {reflect.ValueOf(456), reflect.ValueOf(123), ">=", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf("bar"), ">=", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)),
+ ">=",
+ expect{true, false},
+ },
+ {reflect.ValueOf(456), reflect.ValueOf(123), ">", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf("bar"), ">", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)),
+ ">",
+ expect{true, false},
+ },
+ {reflect.ValueOf(123), reflect.ValueOf(456), "<=", expect{true, false}},
+ {reflect.ValueOf("bar"), reflect.ValueOf("foo"), "<=", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ "<=",
+ expect{true, false},
+ },
+ {reflect.ValueOf(123), reflect.ValueOf(456), "<", expect{true, false}},
+ {reflect.ValueOf("bar"), reflect.ValueOf("foo"), "<", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ "<",
+ expect{true, false},
+ },
+ {reflect.ValueOf(123), reflect.ValueOf([]int{123, 45, 678}), "in", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf([]string{"foo", "bar", "baz"}), "in", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf([]time.Time{
+ time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC),
+ time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC),
+ time.Date(2015, time.June, 26, 19, 18, 56, 12345, time.UTC),
+ }),
+ "in",
+ expect{true, false},
+ },
+ {reflect.ValueOf(123), reflect.ValueOf([]int{45, 678}), "not in", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf([]string{"bar", "baz"}), "not in", expect{true, false}},
+ {
+ reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)),
+ reflect.ValueOf([]time.Time{
+ time.Date(2015, time.February, 26, 19, 18, 56, 12345, time.UTC),
+ time.Date(2015, time.March, 26, 19, 18, 56, 12345, time.UTC),
+ time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC),
+ }),
+ "not in",
+ expect{true, false},
+ },
+ {reflect.ValueOf("foo"), reflect.ValueOf("bar-foo-baz"), "in", expect{true, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf("bar--baz"), "not in", expect{true, false}},
+ {reflect.Value{}, reflect.ValueOf("foo"), "", expect{false, false}},
+ {reflect.ValueOf("foo"), reflect.Value{}, "", expect{false, false}},
+ {reflect.ValueOf((*TstX)(nil)), reflect.ValueOf("foo"), "", expect{false, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf((*TstX)(nil)), "", expect{false, false}},
+ {reflect.ValueOf(true), reflect.ValueOf("foo"), "", expect{false, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf(true), "", expect{false, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf(map[int]string{}), "", expect{false, false}},
+ {reflect.ValueOf("foo"), reflect.ValueOf([]int{1, 2}), "", expect{false, false}},
+ {reflect.ValueOf((*TstX)(nil)), reflect.ValueOf((*TstX)(nil)), ">", expect{false, false}},
+ {reflect.ValueOf(true), reflect.ValueOf(false), ">", expect{false, false}},
+ {reflect.ValueOf(123), reflect.ValueOf([]int{}), "in", expect{false, false}},
+ {reflect.ValueOf(123), reflect.ValueOf(123), "op", expect{false, true}},
+
+ // Issue #3718
+ {reflect.ValueOf([]any{"a"}), reflect.ValueOf([]string{"a", "b"}), "intersect", expect{true, false}},
+ {reflect.ValueOf([]string{"a"}), reflect.ValueOf([]any{"a", "b"}), "intersect", expect{true, false}},
+ {reflect.ValueOf([]any{1, 2}), reflect.ValueOf([]int{1}), "intersect", expect{true, false}},
+ {reflect.ValueOf([]int{1}), reflect.ValueOf([]any{1, 2}), "intersect", expect{true, false}},
+ } {
+ result, err := ns.checkCondition(test.value, test.match, test.op)
+ if test.expect.isError {
+ if err == nil {
+ t.Errorf("[%d] checkCondition didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] failed: %s", i, err)
+ continue
+ }
+ if result != test.expect.result {
+ t.Errorf("[%d] check condition %v %s %v, got %v but expected %v", i, test.value, test.op, test.match, result, test.expect.result)
+ }
+ }
+ }
+}
+
+func TestEvaluateSubElem(t *testing.T) {
+ t.Parallel()
+ tstx := TstX{A: "foo", B: "bar"}
+ var inner struct {
+ S fmt.Stringer
+ }
+ inner.S = tstx
+ interfaceValue := reflect.ValueOf(&inner).Elem().Field(0)
+
+ for i, test := range []struct {
+ value reflect.Value
+ key string
+ expect any
+ }{
+ {reflect.ValueOf(tstx), "A", "foo"},
+ {reflect.ValueOf(&tstx), "TstRp", "rfoo"},
+ {reflect.ValueOf(tstx), "TstRv", "rbar"},
+ //{reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), 1, "foo"},
+ {reflect.ValueOf(map[string]string{"key1": "foo", "key2": "bar"}), "key1", "foo"},
+ {interfaceValue, "String", "A: foo, B: bar"},
+ {reflect.Value{}, "foo", false},
+ //{reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), 1.2, false},
+ {reflect.ValueOf(tstx), "unexported", false},
+ {reflect.ValueOf(tstx), "unexportedMethod", false},
+ {reflect.ValueOf(tstx), "MethodWithArg", false},
+ {reflect.ValueOf(tstx), "MethodReturnNothing", false},
+ {reflect.ValueOf(tstx), "MethodReturnErrorOnly", false},
+ {reflect.ValueOf(tstx), "MethodReturnTwoValues", false},
+ {reflect.ValueOf(tstx), "MethodReturnValueWithError", false},
+ {reflect.ValueOf((*TstX)(nil)), "A", false},
+ {reflect.ValueOf(tstx), "C", false},
+ {reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), "1", false},
+ {reflect.ValueOf([]string{"foo", "bar"}), "1", false},
+ } {
+ result, err := evaluateSubElem(test.value, test.key)
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] evaluateSubElem didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] failed: %s", i, err)
+ continue
+ }
+ if result.Kind() != reflect.String || result.String() != test.expect {
+ t.Errorf("[%d] evaluateSubElem with %v got %v but expected %v", i, test.key, result, test.expect)
+ }
+ }
+ }
+}
diff --git a/tpl/compare/compare.go b/tpl/compare/compare.go
new file mode 100644
index 000000000..0b2d065ab
--- /dev/null
+++ b/tpl/compare/compare.go
@@ -0,0 +1,352 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package compare provides template functions for comparing values.
+package compare
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "time"
+
+ "github.com/gohugoio/hugo/compare"
+ "github.com/gohugoio/hugo/langs"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/htime"
+ "github.com/gohugoio/hugo/common/types"
+)
+
+// New returns a new instance of the compare-namespaced template functions.
+func New(loc *time.Location, caseInsensitive bool) *Namespace {
+ return &Namespace{loc: loc, caseInsensitive: caseInsensitive}
+}
+
+// Namespace provides template functions for the "compare" namespace.
+type Namespace struct {
+ loc *time.Location
+ // Enable to do case insensitive string compares.
+ caseInsensitive bool
+}
+
+// Default checks whether a given value is set and returns a default value if it
+// is not. "Set" in this context means non-zero for numeric types and times;
+// non-zero length for strings, arrays, slices, and maps;
+// any boolean or struct value; or non-nil for any other types.
+func (*Namespace) Default(dflt any, given ...any) (any, error) {
+ // given is variadic because the following construct will not pass a piped
+ // argument when the key is missing: {{ index . "key" | default "foo" }}
+ // The Go template will complain that we got 1 argument when we expected 2.
+
+ if len(given) == 0 {
+ return dflt, nil
+ }
+ if len(given) != 1 {
+ return nil, fmt.Errorf("wrong number of args for default: want 2 got %d", len(given)+1)
+ }
+
+ g := reflect.ValueOf(given[0])
+ if !g.IsValid() {
+ return dflt, nil
+ }
+
+ set := false
+
+ switch g.Kind() {
+ case reflect.Bool:
+ set = true
+ case reflect.String, reflect.Array, reflect.Slice, reflect.Map:
+ set = g.Len() != 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ set = g.Int() != 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ set = g.Uint() != 0
+ case reflect.Float32, reflect.Float64:
+ set = g.Float() != 0
+ case reflect.Complex64, reflect.Complex128:
+ set = g.Complex() != 0
+ case reflect.Struct:
+ switch actual := given[0].(type) {
+ case time.Time:
+ set = !actual.IsZero()
+ default:
+ set = true
+ }
+ default:
+ set = !g.IsNil()
+ }
+
+ if set {
+ return given[0], nil
+ }
+
+ return dflt, nil
+}
+
+// Eq returns the boolean truth of arg1 == arg2 || arg1 == arg3 || arg1 == arg4.
+func (n *Namespace) Eq(first any, others ...any) bool {
+ if n.caseInsensitive {
+ panic("caseInsensitive not implemented for Eq")
+ }
+ n.checkComparisonArgCount(1, others...)
+ normalize := func(v any) any {
+ if types.IsNil(v) {
+ return nil
+ }
+
+ if at, ok := v.(htime.AsTimeProvider); ok {
+ return at.AsTime(n.loc)
+ }
+
+ vv := reflect.ValueOf(v)
+ switch vv.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return vv.Int()
+ case reflect.Float32, reflect.Float64:
+ return vv.Float()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return vv.Uint()
+ case reflect.String:
+ return vv.String()
+ default:
+ return v
+ }
+ }
+
+ normFirst := normalize(first)
+ for _, other := range others {
+ if e, ok := first.(compare.Eqer); ok {
+ if e.Eq(other) {
+ return true
+ }
+ continue
+ }
+
+ if e, ok := other.(compare.Eqer); ok {
+ if e.Eq(first) {
+ return true
+ }
+ continue
+ }
+
+ other = normalize(other)
+ if reflect.DeepEqual(normFirst, other) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Ne returns the boolean truth of arg1 != arg2 && arg1 != arg3 && arg1 != arg4.
+func (n *Namespace) Ne(first any, others ...any) bool {
+ n.checkComparisonArgCount(1, others...)
+ for _, other := range others {
+ if n.Eq(first, other) {
+ return false
+ }
+ }
+ return true
+}
+
+// Ge returns the boolean truth of arg1 >= arg2 && arg1 >= arg3 && arg1 >= arg4.
+func (n *Namespace) Ge(first any, others ...any) bool {
+ n.checkComparisonArgCount(1, others...)
+ for _, other := range others {
+ left, right := n.compareGet(first, other)
+ if !(left >= right) {
+ return false
+ }
+ }
+ return true
+}
+
+// Gt returns the boolean truth of arg1 > arg2 && arg1 > arg3 && arg1 > arg4.
+func (n *Namespace) Gt(first any, others ...any) bool {
+ n.checkComparisonArgCount(1, others...)
+ for _, other := range others {
+ left, right := n.compareGet(first, other)
+ if !(left > right) {
+ return false
+ }
+ }
+ return true
+}
+
+// Le returns the boolean truth of arg1 <= arg2 && arg1 <= arg3 && arg1 <= arg4.
+func (n *Namespace) Le(first any, others ...any) bool {
+ n.checkComparisonArgCount(1, others...)
+ for _, other := range others {
+ left, right := n.compareGet(first, other)
+ if !(left <= right) {
+ return false
+ }
+ }
+ return true
+}
+
+// Lt returns the boolean truth of arg1 < arg2 && arg1 < arg3 && arg1 < arg4.
+// The provided collator will be used for string comparisons.
+// This is for internal use.
+func (n *Namespace) LtCollate(collator *langs.Collator, first any, others ...any) bool {
+ n.checkComparisonArgCount(1, others...)
+ for _, other := range others {
+ left, right := n.compareGetWithCollator(collator, first, other)
+ if !(left < right) {
+ return false
+ }
+ }
+ return true
+}
+
+// Lt returns the boolean truth of arg1 < arg2 && arg1 < arg3 && arg1 < arg4.
+func (n *Namespace) Lt(first any, others ...any) bool {
+ return n.LtCollate(nil, first, others...)
+}
+
+func (n *Namespace) checkComparisonArgCount(min int, others ...any) bool {
+ if len(others) < min {
+ panic("missing arguments for comparison")
+ }
+ return true
+}
+
+// Conditional can be used as a ternary operator.
+// It returns a if condition, else b.
+func (n *Namespace) Conditional(condition bool, a, b any) any {
+ if condition {
+ return a
+ }
+ return b
+}
+
+func (ns *Namespace) compareGet(a any, b any) (float64, float64) {
+ return ns.compareGetWithCollator(nil, a, b)
+}
+
+func (ns *Namespace) compareGetWithCollator(collator *langs.Collator, a any, b any) (float64, float64) {
+ if ac, ok := a.(compare.Comparer); ok {
+ c := ac.Compare(b)
+ if c < 0 {
+ return 1, 0
+ } else if c == 0 {
+ return 0, 0
+ } else {
+ return 0, 1
+ }
+ }
+
+ if bc, ok := b.(compare.Comparer); ok {
+ c := bc.Compare(a)
+ if c < 0 {
+ return 0, 1
+ } else if c == 0 {
+ return 0, 0
+ } else {
+ return 1, 0
+ }
+ }
+
+ var left, right float64
+ var leftStr, rightStr *string
+ av := reflect.ValueOf(a)
+
+ switch av.Kind() {
+ case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
+ left = float64(av.Len())
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ left = float64(av.Int())
+ case reflect.Float32, reflect.Float64:
+ left = av.Float()
+ case reflect.String:
+ var err error
+ left, err = strconv.ParseFloat(av.String(), 64)
+ if err != nil {
+ str := av.String()
+ leftStr = &str
+ }
+ case reflect.Struct:
+ if hreflect.IsTime(av.Type()) {
+ left = float64(ns.toTimeUnix(av))
+ }
+ case reflect.Bool:
+ left = 0
+ if av.Bool() {
+ left = 1
+ }
+ }
+
+ bv := reflect.ValueOf(b)
+
+ switch bv.Kind() {
+ case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice:
+ right = float64(bv.Len())
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ right = float64(bv.Int())
+ case reflect.Float32, reflect.Float64:
+ right = bv.Float()
+ case reflect.String:
+ var err error
+ right, err = strconv.ParseFloat(bv.String(), 64)
+ if err != nil {
+ str := bv.String()
+ rightStr = &str
+ }
+ case reflect.Struct:
+ if hreflect.IsTime(bv.Type()) {
+ right = float64(ns.toTimeUnix(bv))
+ }
+ case reflect.Bool:
+ right = 0
+ if bv.Bool() {
+ right = 1
+ }
+ }
+
+ if (ns.caseInsensitive || collator != nil) && leftStr != nil && rightStr != nil {
+ var c int
+ if collator != nil {
+ c = collator.CompareStrings(*leftStr, *rightStr)
+ } else {
+ c = compare.Strings(*leftStr, *rightStr)
+ }
+ if c < 0 {
+ return 0, 1
+ } else if c > 0 {
+ return 1, 0
+ } else {
+ return 0, 0
+ }
+ }
+
+ switch {
+ case leftStr == nil || rightStr == nil:
+ case *leftStr < *rightStr:
+ return 0, 1
+ case *leftStr > *rightStr:
+ return 1, 0
+ default:
+ return 0, 0
+ }
+
+ return left, right
+}
+
+func (ns *Namespace) toTimeUnix(v reflect.Value) int64 {
+ t, ok := hreflect.AsTime(v, ns.loc)
+ if !ok {
+ panic("coding error: argument must be time.Time type reflect Value")
+ }
+ return t.Unix()
+}
diff --git a/tpl/compare/compare_test.go b/tpl/compare/compare_test.go
new file mode 100644
index 000000000..ce2016b38
--- /dev/null
+++ b/tpl/compare/compare_test.go
@@ -0,0 +1,460 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compare
+
+import (
+ "path"
+ "reflect"
+ "runtime"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/spf13/cast"
+)
+
+type T struct {
+ NonEmptyInterfaceNil I
+ NonEmptyInterfaceTypedNil I
+}
+
+type I interface {
+ Foo() string
+}
+
+func (t *T) Foo() string {
+ return "foo"
+}
+
+var testT = &T{
+ NonEmptyInterfaceTypedNil: (*T)(nil),
+}
+
+type (
+ tstEqerType1 string
+ tstEqerType2 string
+)
+
+func (t tstEqerType2) Eq(other any) bool {
+ return cast.ToString(t) == cast.ToString(other)
+}
+
+func (t tstEqerType2) String() string {
+ return string(t)
+}
+
+func (t tstEqerType1) Eq(other any) bool {
+ return cast.ToString(t) == cast.ToString(other)
+}
+
+func (t tstEqerType1) String() string {
+ return string(t)
+}
+
+type stringType string
+
+type tstCompareType int
+
+const (
+ tstEq tstCompareType = iota
+ tstNe
+ tstGt
+ tstGe
+ tstLt
+ tstLe
+)
+
+func tstIsEq(tp tstCompareType) bool { return tp == tstEq || tp == tstGe || tp == tstLe }
+func tstIsGt(tp tstCompareType) bool { return tp == tstGt || tp == tstGe }
+func tstIsLt(tp tstCompareType) bool { return tp == tstLt || tp == tstLe }
+
+func TestDefaultFunc(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ then := time.Now()
+ now := time.Now()
+ ns := New(time.UTC, false)
+
+ for i, test := range []struct {
+ dflt any
+ given any
+ expect any
+ }{
+ {true, false, false},
+ {"5", 0, "5"},
+
+ {"test1", "set", "set"},
+ {"test2", "", "test2"},
+ {"test3", nil, "test3"},
+
+ {[2]int{10, 20}, [2]int{1, 2}, [2]int{1, 2}},
+ {[2]int{10, 20}, [0]int{}, [2]int{10, 20}},
+ {[2]int{100, 200}, nil, [2]int{100, 200}},
+
+ {[]string{"one"}, []string{"uno"}, []string{"uno"}},
+ {[]string{"two"}, []string{}, []string{"two"}},
+ {[]string{"three"}, nil, []string{"three"}},
+
+ {map[string]int{"one": 1}, map[string]int{"uno": 1}, map[string]int{"uno": 1}},
+ {map[string]int{"one": 1}, map[string]int{}, map[string]int{"one": 1}},
+ {map[string]int{"two": 2}, nil, map[string]int{"two": 2}},
+
+ {10, 1, 1},
+ {10, 0, 10},
+ {20, nil, 20},
+
+ {float32(10), float32(1), float32(1)},
+ {float32(10), 0, float32(10)},
+ {float32(20), nil, float32(20)},
+
+ {complex(2, -2), complex(1, -1), complex(1, -1)},
+ {complex(2, -2), complex(0, 0), complex(2, -2)},
+ {complex(3, -3), nil, complex(3, -3)},
+
+ {struct{ f string }{f: "one"}, struct{}{}, struct{}{}},
+ {struct{ f string }{f: "two"}, nil, struct{ f string }{f: "two"}},
+
+ {then, now, now},
+ {then, time.Time{}, then},
+ } {
+
+ eq := qt.CmpEquals(hqt.DeepAllowUnexported(test.dflt))
+
+ errMsg := qt.Commentf("[%d] %v", i, test)
+
+ result, err := ns.Default(test.dflt, test.given)
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, eq, test.expect, errMsg)
+ }
+}
+
+func TestCompare(t *testing.T) {
+ t.Parallel()
+
+ n := New(time.UTC, false)
+
+ twoEq := func(a, b any) bool {
+ return n.Eq(a, b)
+ }
+
+ twoGt := func(a, b any) bool {
+ return n.Gt(a, b)
+ }
+
+ twoLt := func(a, b any) bool {
+ return n.Lt(a, b)
+ }
+
+ twoGe := func(a, b any) bool {
+ return n.Ge(a, b)
+ }
+
+ twoLe := func(a, b any) bool {
+ return n.Le(a, b)
+ }
+
+ twoNe := func(a, b any) bool {
+ return n.Ne(a, b)
+ }
+
+ for _, test := range []struct {
+ tstCompareType
+ funcUnderTest func(a, b any) bool
+ }{
+ {tstGt, twoGt},
+ {tstLt, twoLt},
+ {tstGe, twoGe},
+ {tstLe, twoLe},
+ {tstEq, twoEq},
+ {tstNe, twoNe},
+ } {
+ doTestCompare(t, test.tstCompareType, test.funcUnderTest)
+ }
+}
+
+func doTestCompare(t *testing.T, tp tstCompareType, funcUnderTest func(a, b any) bool) {
+ for i, test := range []struct {
+ left any
+ right any
+ expectIndicator int
+ }{
+ {5, 8, -1},
+ {8, 5, 1},
+ {5, 5, 0},
+ {int(5), int64(5), 0},
+ {int32(5), int(5), 0},
+ {int16(4), int(5), -1},
+ {uint(15), uint64(15), 0},
+ {-2, 1, -1},
+ {2, -5, 1},
+ {0.0, 1.23, -1},
+ {1.1, 1.1, 0},
+ {float32(1.0), float64(1.0), 0},
+ {1.23, 0.0, 1},
+ {"5", "5", 0},
+ {"8", "5", 1},
+ {"5", "0001", 1},
+ {[]int{100, 99}, []int{1, 2, 3, 4}, -1},
+ {cast.ToTime("2015-11-20"), cast.ToTime("2015-11-20"), 0},
+ {cast.ToTime("2015-11-19"), cast.ToTime("2015-11-20"), -1},
+ {cast.ToTime("2015-11-20"), cast.ToTime("2015-11-19"), 1},
+ {"a", "a", 0},
+ {"a", "b", -1},
+ {"b", "a", 1},
+ {tstEqerType1("a"), tstEqerType1("a"), 0},
+ {tstEqerType1("a"), tstEqerType2("a"), 0},
+ {tstEqerType2("a"), tstEqerType1("a"), 0},
+ {tstEqerType2("a"), tstEqerType1("b"), -1},
+ {hugo.MustParseVersion("0.32.1").Version(), hugo.MustParseVersion("0.32").Version(), 1},
+ {hugo.MustParseVersion("0.35").Version(), hugo.MustParseVersion("0.32").Version(), 1},
+ {hugo.MustParseVersion("0.36").Version(), hugo.MustParseVersion("0.36").Version(), 0},
+ {hugo.MustParseVersion("0.32").Version(), hugo.MustParseVersion("0.36").Version(), -1},
+ {hugo.MustParseVersion("0.32").Version(), "0.36", -1},
+ {"0.36", hugo.MustParseVersion("0.32").Version(), 1},
+ {"0.36", hugo.MustParseVersion("0.36").Version(), 0},
+ {"0.37", hugo.MustParseVersion("0.37-DEV").Version(), 1},
+ {"0.37-DEV", hugo.MustParseVersion("0.37").Version(), -1},
+ {"0.36", hugo.MustParseVersion("0.37-DEV").Version(), -1},
+ {"0.37-DEV", hugo.MustParseVersion("0.37-DEV").Version(), 0},
+ // https://github.com/gohugoio/hugo/issues/5905
+ {nil, nil, 0},
+ {testT.NonEmptyInterfaceNil, nil, 0},
+ {testT.NonEmptyInterfaceTypedNil, nil, 0},
+ } {
+
+ result := funcUnderTest(test.left, test.right)
+ success := false
+
+ if test.expectIndicator == 0 {
+ if tstIsEq(tp) {
+ success = result
+ } else {
+ success = !result
+ }
+ }
+
+ if test.expectIndicator < 0 {
+ success = result && (tstIsLt(tp) || tp == tstNe)
+ success = success || (!result && !tstIsLt(tp))
+ }
+
+ if test.expectIndicator > 0 {
+ success = result && (tstIsGt(tp) || tp == tstNe)
+ success = success || (!result && (!tstIsGt(tp) || tp != tstNe))
+ }
+
+ if !success {
+ t.Fatalf("[%d][%s] %v compared to %v: %t", i, path.Base(runtime.FuncForPC(reflect.ValueOf(funcUnderTest).Pointer()).Name()), test.left, test.right, result)
+ }
+ }
+}
+
+func TestEqualExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {1, []any{1, 2}, true},
+ {1, []any{2, 1}, true},
+ {1, []any{2, 3}, false},
+ {tstEqerType1("a"), []any{tstEqerType1("a"), tstEqerType1("b")}, true},
+ {tstEqerType1("a"), []any{tstEqerType1("b"), tstEqerType1("a")}, true},
+ {tstEqerType1("a"), []any{tstEqerType1("b"), tstEqerType1("c")}, false},
+ } {
+
+ result := ns.Eq(test.first, test.others...)
+
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestNotEqualExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {1, []any{2, 3}, true},
+ {1, []any{2, 1}, false},
+ {1, []any{1, 2}, false},
+ } {
+ result := ns.Ne(test.first, test.others...)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestGreaterEqualExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {5, []any{2, 3}, true},
+ {5, []any{5, 5}, true},
+ {3, []any{4, 2}, false},
+ {3, []any{2, 4}, false},
+ } {
+ result := ns.Ge(test.first, test.others...)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestGreaterThanExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {5, []any{2, 3}, true},
+ {5, []any{5, 4}, false},
+ {3, []any{4, 2}, false},
+ } {
+ result := ns.Gt(test.first, test.others...)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestLessEqualExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {1, []any{2, 3}, true},
+ {1, []any{1, 2}, true},
+ {2, []any{1, 2}, false},
+ {3, []any{2, 4}, false},
+ } {
+ result := ns.Le(test.first, test.others...)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestLessThanExtend(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ for _, test := range []struct {
+ first any
+ others []any
+ expect bool
+ }{
+ {1, []any{2, 3}, true},
+ {1, []any{1, 2}, false},
+ {2, []any{1, 2}, false},
+ {3, []any{2, 4}, false},
+ } {
+ result := ns.Lt(test.first, test.others...)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestCase(t *testing.T) {
+ c := qt.New(t)
+ n := New(time.UTC, false)
+
+ c.Assert(n.Eq("az", "az"), qt.Equals, true)
+ c.Assert(n.Eq("az", stringType("az")), qt.Equals, true)
+}
+
+func TestStringType(t *testing.T) {
+ c := qt.New(t)
+ n := New(time.UTC, true)
+
+ c.Assert(n.Lt("az", "Za"), qt.Equals, true)
+ c.Assert(n.Gt("ab", "Ab"), qt.Equals, true)
+}
+
+func TestTimeUnix(t *testing.T) {
+ t.Parallel()
+ n := New(time.UTC, false)
+ var sec int64 = 1234567890
+ tv := reflect.ValueOf(time.Unix(sec, 0))
+ i := 1
+
+ res := n.toTimeUnix(tv)
+ if sec != res {
+ t.Errorf("[%d] timeUnix got %v but expected %v", i, res, sec)
+ }
+
+ i++
+ func(t *testing.T) {
+ defer func() {
+ if err := recover(); err == nil {
+ t.Errorf("[%d] timeUnix didn't return an expected error", i)
+ }
+ }()
+ iv := reflect.ValueOf(sec)
+ n.toTimeUnix(iv)
+ }(t)
+}
+
+func TestConditional(t *testing.T) {
+ c := qt.New(t)
+ n := New(time.UTC, false)
+ a, b := "a", "b"
+
+ c.Assert(n.Conditional(true, a, b), qt.Equals, a)
+ c.Assert(n.Conditional(false, a, b), qt.Equals, b)
+}
+
+// Issue 9462
+func TestComparisonArgCount(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(time.UTC, false)
+
+ panicMsg := "missing arguments for comparison"
+
+ c.Assert(func() { ns.Eq(1) }, qt.PanicMatches, panicMsg)
+ c.Assert(func() { ns.Ge(1) }, qt.PanicMatches, panicMsg)
+ c.Assert(func() { ns.Gt(1) }, qt.PanicMatches, panicMsg)
+ c.Assert(func() { ns.Le(1) }, qt.PanicMatches, panicMsg)
+ c.Assert(func() { ns.Lt(1) }, qt.PanicMatches, panicMsg)
+ c.Assert(func() { ns.Ne(1) }, qt.PanicMatches, panicMsg)
+}
diff --git a/tpl/compare/init.go b/tpl/compare/init.go
new file mode 100644
index 000000000..98c07f41b
--- /dev/null
+++ b/tpl/compare/init.go
@@ -0,0 +1,90 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package compare
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "compare"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ if d.Language == nil {
+ panic("language must be set")
+ }
+
+ ctx := New(langs.GetLocation(d.Language), false)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Default,
+ []string{"default"},
+ [][2]string{
+ {`{{ "Hugo Rocks!" | default "Hugo Rules!" }}`, `Hugo Rocks!`},
+ {`{{ "" | default "Hugo Rules!" }}`, `Hugo Rules!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Eq,
+ []string{"eq"},
+ [][2]string{
+ {`{{ if eq .Section "blog" }}current-section{{ end }}`, `current-section`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Ge,
+ []string{"ge"},
+ [][2]string{
+ {`{{ if ge hugo.Version "0.80" }}Reasonable new Hugo version!{{ end }}`, `Reasonable new Hugo version!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Gt,
+ []string{"gt"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Le,
+ []string{"le"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Lt,
+ []string{"lt"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Ne,
+ []string{"ne"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Conditional,
+ []string{"cond"},
+ [][2]string{
+ {`{{ cond (eq (add 2 2) 4) "2+2 is 4" "what?" | safeHTML }}`, `2+2 is 4`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/crypto/crypto.go b/tpl/crypto/crypto.go
new file mode 100644
index 000000000..42b420d59
--- /dev/null
+++ b/tpl/crypto/crypto.go
@@ -0,0 +1,137 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package crypto provides template functions for cryptographic operations.
+package crypto
+
+import (
+ "crypto/hmac"
+ "crypto/md5"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/sha512"
+ "encoding/hex"
+ "fmt"
+ "hash"
+ "hash/fnv"
+
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the crypto-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "crypto" namespace.
+type Namespace struct{}
+
+// MD5 hashes the given input and returns its MD5 checksum.
+func (ns *Namespace) MD5(in any) (string, error) {
+ conv, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ hash := md5.Sum([]byte(conv))
+ return hex.EncodeToString(hash[:]), nil
+}
+
+// SHA1 hashes the given input and returns its SHA1 checksum.
+func (ns *Namespace) SHA1(in any) (string, error) {
+ conv, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ hash := sha1.Sum([]byte(conv))
+ return hex.EncodeToString(hash[:]), nil
+}
+
+// SHA256 hashes the given input and returns its SHA256 checksum.
+func (ns *Namespace) SHA256(in any) (string, error) {
+ conv, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ hash := sha256.Sum256([]byte(conv))
+ return hex.EncodeToString(hash[:]), nil
+}
+
+// FNV32a hashes using fnv32a algorithm
+func (ns *Namespace) FNV32a(in any) (int, error) {
+ conv, err := cast.ToStringE(in)
+ if err != nil {
+ return 0, err
+ }
+ algorithm := fnv.New32a()
+ algorithm.Write([]byte(conv))
+ return int(algorithm.Sum32()), nil
+}
+
+// HMAC returns a cryptographic hash that uses a key to sign a message.
+func (ns *Namespace) HMAC(h any, k any, m any, e ...any) (string, error) {
+ ha, err := cast.ToStringE(h)
+ if err != nil {
+ return "", err
+ }
+
+ var hash func() hash.Hash
+ switch ha {
+ case "md5":
+ hash = md5.New
+ case "sha1":
+ hash = sha1.New
+ case "sha256":
+ hash = sha256.New
+ case "sha512":
+ hash = sha512.New
+ default:
+ return "", fmt.Errorf("hmac: %s is not a supported hash function", ha)
+ }
+
+ msg, err := cast.ToStringE(m)
+ if err != nil {
+ return "", err
+ }
+
+ key, err := cast.ToStringE(k)
+ if err != nil {
+ return "", err
+ }
+
+ mac := hmac.New(hash, []byte(key))
+ _, err = mac.Write([]byte(msg))
+ if err != nil {
+ return "", err
+ }
+
+ var encoding = "hex"
+ if len(e) > 0 && e[0] != nil {
+ encoding, err = cast.ToStringE(e[0])
+ if err != nil {
+ return "", err
+ }
+ }
+
+ switch encoding {
+ case "binary":
+ return string(mac.Sum(nil)[:]), nil
+ case "hex":
+ return hex.EncodeToString(mac.Sum(nil)[:]), nil
+ default:
+ return "", fmt.Errorf("%q is not a supported encoding method", encoding)
+ }
+
+}
diff --git a/tpl/crypto/crypto_test.go b/tpl/crypto/crypto_test.go
new file mode 100644
index 000000000..b6b2a6915
--- /dev/null
+++ b/tpl/crypto/crypto_test.go
@@ -0,0 +1,138 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package crypto
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestMD5(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for i, test := range []struct {
+ in any
+ expect any
+ }{
+ {"Hello world, gophers!", "b3029f756f98f79e7f1b7f1d1f0dd53b"},
+ {"Lorem ipsum dolor", "06ce65ac476fc656bea3fca5d02cfd81"},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.in)
+
+ result, err := ns.MD5(test.in)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestSHA1(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for i, test := range []struct {
+ in any
+ expect any
+ }{
+ {"Hello world, gophers!", "c8b5b0e33d408246e30f53e32b8f7627a7a649d4"},
+ {"Lorem ipsum dolor", "45f75b844be4d17b3394c6701768daf39419c99b"},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.in)
+
+ result, err := ns.SHA1(test.in)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestSHA256(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for i, test := range []struct {
+ in any
+ expect any
+ }{
+ {"Hello world, gophers!", "6ec43b78da9669f50e4e422575c54bf87536954ccd58280219c393f2ce352b46"},
+ {"Lorem ipsum dolor", "9b3e1beb7053e0f900a674dd1c99aca3355e1275e1b03d3cb1bc977f5154e196"},
+ {t, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v", i, test.in)
+
+ result, err := ns.SHA256(test.in)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
+
+func TestHMAC(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for i, test := range []struct {
+ hash any
+ key any
+ msg any
+ encoding any
+ expect any
+ }{
+ {"md5", "Secret key", "Hello world, gophers!", nil, "36eb69b6bf2de96b6856fdee8bf89754"},
+ {"sha1", "Secret key", "Hello world, gophers!", nil, "84a76647de6cd47ac6ae4258e3753f711172ce68"},
+ {"sha256", "Secret key", "Hello world, gophers!", nil, "b6d11b6c53830b9d87036272ca9fe9d19306b8f9d8aa07b15da27d89e6e34f40"},
+ {"sha512", "Secret key", "Hello world, gophers!", nil, "dc3e586cd936865e2abc4c12665e9cc568b2dad714df3c9037cbea159d036cfc4209da9e3fcd30887ff441056941966899f6fb7eec9646ff9ddb592595a8eb7f"},
+ {"md5", "Secret key", "Hello world, gophers!", "hex", "36eb69b6bf2de96b6856fdee8bf89754"},
+ {"md5", "Secret key", "Hello world, gophers!", "binary", "6\xebi\xb6\xbf-\xe9khV\xfd\xee\x8b\xf8\x97T"},
+ {"md5", "Secret key", "Hello world, gophers!", "foo", false},
+ {"md5", "Secret key", "Hello world, gophers!", "", false},
+ {"", t, "", nil, false},
+ } {
+ errMsg := qt.Commentf("[%d] %v, %v, %v, %v", i, test.hash, test.key, test.msg, test.encoding)
+
+ result, err := ns.HMAC(test.hash, test.key, test.msg, test.encoding)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil), errMsg)
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, errMsg)
+ c.Assert(result, qt.Equals, test.expect, errMsg)
+ }
+}
diff --git a/tpl/crypto/init.go b/tpl/crypto/init.go
new file mode 100644
index 000000000..dddc5585a
--- /dev/null
+++ b/tpl/crypto/init.go
@@ -0,0 +1,72 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package crypto
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "crypto"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.MD5,
+ []string{"md5"},
+ [][2]string{
+ {`{{ md5 "Hello world, gophers!" }}`, `b3029f756f98f79e7f1b7f1d1f0dd53b`},
+ {`{{ crypto.MD5 "Hello world, gophers!" }}`, `b3029f756f98f79e7f1b7f1d1f0dd53b`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.SHA1,
+ []string{"sha1"},
+ [][2]string{
+ {`{{ sha1 "Hello world, gophers!" }}`, `c8b5b0e33d408246e30f53e32b8f7627a7a649d4`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.SHA256,
+ []string{"sha256"},
+ [][2]string{
+ {`{{ sha256 "Hello world, gophers!" }}`, `6ec43b78da9669f50e4e422575c54bf87536954ccd58280219c393f2ce352b46`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FNV32a,
+ nil,
+ [][2]string{
+ {`{{ crypto.FNV32a "Hugo Rocks!!" }}`, `1515779328`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.HMAC,
+ []string{"hmac"},
+ [][2]string{
+ {`{{ hmac "sha256" "Secret key" "Hello world, gophers!" }}`, `b6d11b6c53830b9d87036272ca9fe9d19306b8f9d8aa07b15da27d89e6e34f40`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/data/data.go b/tpl/data/data.go
new file mode 100644
index 000000000..926f6773d
--- /dev/null
+++ b/tpl/data/data.go
@@ -0,0 +1,209 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package data provides template functions for working with external data
+// sources.
+package data
+
+import (
+ "bytes"
+ "encoding/csv"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/config/security"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/common/constants"
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/spf13/cast"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/deps"
+)
+
+// New returns a new instance of the data-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ return &Namespace{
+ deps: deps,
+ cacheGetCSV: deps.FileCaches.GetCSVCache(),
+ cacheGetJSON: deps.FileCaches.GetJSONCache(),
+ client: http.DefaultClient,
+ }
+}
+
+// Namespace provides template functions for the "data" namespace.
+type Namespace struct {
+ deps *deps.Deps
+
+ cacheGetJSON *filecache.Cache
+ cacheGetCSV *filecache.Cache
+
+ client *http.Client
+}
+
+// GetCSV expects a data separator and one or n-parts of a URL to a resource which
+// can either be a local or a remote one.
+// The data separator can be a comma, semi-colon, pipe, etc, but only one character.
+// If you provide multiple parts for the URL they will be joined together to the final URL.
+// GetCSV returns nil or a slice slice to use in a short code.
+func (ns *Namespace) GetCSV(sep string, args ...any) (d [][]string, err error) {
+ url, headers := toURLAndHeaders(args)
+ cache := ns.cacheGetCSV
+
+ unmarshal := func(b []byte) (bool, error) {
+ if d, err = parseCSV(b, sep); err != nil {
+ err = fmt.Errorf("failed to parse CSV file %s: %w", url, err)
+
+ return true, err
+ }
+
+ return false, nil
+ }
+
+ var req *http.Request
+ req, err = http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create request for getCSV for resource %s: %w", url, err)
+ }
+
+ // Add custom user headers.
+ addUserProvidedHeaders(headers, req)
+ addDefaultHeaders(req, "text/csv", "text/plain")
+
+ err = ns.getResource(cache, unmarshal, req)
+ if err != nil {
+ if security.IsAccessDenied(err) {
+ return nil, err
+ }
+ ns.deps.Log.(loggers.IgnorableLogger).Errorsf(constants.ErrRemoteGetCSV, "Failed to get CSV resource %q: %s", url, err)
+ return nil, nil
+ }
+
+ return
+}
+
+// GetJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one.
+// If you provide multiple parts they will be joined together to the final URL.
+// GetJSON returns nil or parsed JSON to use in a short code.
+func (ns *Namespace) GetJSON(args ...any) (any, error) {
+ var v any
+ url, headers := toURLAndHeaders(args)
+ cache := ns.cacheGetJSON
+
+ req, err := http.NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, fmt.Errorf("Failed to create request for getJSON resource %s: %w", url, err)
+ }
+
+ unmarshal := func(b []byte) (bool, error) {
+ err := json.Unmarshal(b, &v)
+ if err != nil {
+ return true, err
+ }
+ return false, nil
+ }
+
+ addUserProvidedHeaders(headers, req)
+ addDefaultHeaders(req, "application/json")
+
+ err = ns.getResource(cache, unmarshal, req)
+ if err != nil {
+ if security.IsAccessDenied(err) {
+ return nil, err
+ }
+ ns.deps.Log.(loggers.IgnorableLogger).Errorsf(constants.ErrRemoteGetJSON, "Failed to get JSON resource %q: %s", url, err)
+ return nil, nil
+ }
+
+ return v, nil
+}
+
+func addDefaultHeaders(req *http.Request, accepts ...string) {
+ for _, accept := range accepts {
+ if !hasHeaderValue(req.Header, "Accept", accept) {
+ req.Header.Add("Accept", accept)
+ }
+ }
+ if !hasHeaderKey(req.Header, "User-Agent") {
+ req.Header.Add("User-Agent", "Hugo Static Site Generator")
+ }
+}
+
+func addUserProvidedHeaders(headers map[string]any, req *http.Request) {
+ if headers == nil {
+ return
+ }
+ for key, val := range headers {
+ vals := types.ToStringSlicePreserveString(val)
+ for _, s := range vals {
+ req.Header.Add(key, s)
+ }
+ }
+}
+
+func hasHeaderValue(m http.Header, key, value string) bool {
+ var s []string
+ var ok bool
+
+ if s, ok = m[key]; !ok {
+ return false
+ }
+
+ for _, v := range s {
+ if v == value {
+ return true
+ }
+ }
+ return false
+}
+
+func hasHeaderKey(m http.Header, key string) bool {
+ _, ok := m[key]
+ return ok
+}
+
+func toURLAndHeaders(urlParts []any) (string, map[string]any) {
+ if len(urlParts) == 0 {
+ return "", nil
+ }
+
+ // The last argument may be a map.
+ headers, err := maps.ToStringMapE(urlParts[len(urlParts)-1])
+ if err == nil {
+ urlParts = urlParts[:len(urlParts)-1]
+ } else {
+ headers = nil
+ }
+
+ return strings.Join(cast.ToStringSlice(urlParts), ""), headers
+}
+
+// parseCSV parses bytes of CSV data into a slice slice string or an error
+func parseCSV(c []byte, sep string) ([][]string, error) {
+ if len(sep) != 1 {
+ return nil, errors.New("Incorrect length of CSV separator: " + sep)
+ }
+ b := bytes.NewReader(c)
+ r := csv.NewReader(b)
+ rSep := []rune(sep)
+ r.Comma = rSep[0]
+ r.FieldsPerRecord = 0
+ return r.ReadAll()
+}
diff --git a/tpl/data/data_test.go b/tpl/data/data_test.go
new file mode 100644
index 000000000..3d365e5fb
--- /dev/null
+++ b/tpl/data/data_test.go
@@ -0,0 +1,349 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package data
+
+import (
+ "bytes"
+ "html/template"
+ "net/http"
+ "net/http/httptest"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestGetCSV(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ sep string
+ url string
+ content string
+ expect any
+ }{
+ // Remotes
+ {
+ ",",
+ `http://success/`,
+ "gomeetup,city\nyes,Sydney\nyes,San Francisco\nyes,Stockholm\n",
+ [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}},
+ },
+ {
+ ",",
+ `http://error.extra.field/`,
+ "gomeetup,city\nyes,Sydney\nyes,San Francisco\nyes,Stockholm,EXTRA\n",
+ false,
+ },
+ {
+ ",",
+ `http://nofound/404`,
+ ``,
+ false,
+ },
+
+ // Locals
+ {
+ ";",
+ "pass/semi",
+ "gomeetup;city\nyes;Sydney\nyes;San Francisco\nyes;Stockholm\n",
+ [][]string{{"gomeetup", "city"}, {"yes", "Sydney"}, {"yes", "San Francisco"}, {"yes", "Stockholm"}},
+ },
+ {
+ ";",
+ "fail/no-file",
+ "",
+ false,
+ },
+ } {
+
+ c.Run(test.url, func(c *qt.C) {
+ msg := qt.Commentf("Test %d", i)
+
+ ns := newTestNs()
+
+ // Setup HTTP test server
+ var srv *httptest.Server
+ srv, ns.client = getTestServer(func(w http.ResponseWriter, r *http.Request) {
+ if !hasHeaderValue(r.Header, "Accept", "text/csv") && !hasHeaderValue(r.Header, "Accept", "text/plain") {
+ http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
+ return
+ }
+
+ if r.URL.Path == "/404" {
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+
+ w.Header().Add("Content-type", "text/csv")
+
+ w.Write([]byte(test.content))
+ })
+ defer func() { srv.Close() }()
+
+ // Setup local test file for schema-less URLs
+ if !strings.Contains(test.url, ":") && !strings.HasPrefix(test.url, "fail/") {
+ f, err := ns.deps.Fs.Source.Create(filepath.Join(ns.deps.Cfg.GetString("workingDir"), test.url))
+ c.Assert(err, qt.IsNil, msg)
+ f.WriteString(test.content)
+ f.Close()
+ }
+
+ // Get on with it
+ got, err := ns.GetCSV(test.sep, test.url)
+
+ if _, ok := test.expect.(bool); ok {
+ c.Assert(int(ns.deps.Log.LogCounters().ErrorCounter.Count()), qt.Equals, 1)
+ c.Assert(got, qt.IsNil)
+ return
+ }
+
+ c.Assert(err, qt.IsNil, msg)
+ c.Assert(int(ns.deps.Log.LogCounters().ErrorCounter.Count()), qt.Equals, 0)
+ c.Assert(got, qt.Not(qt.IsNil), msg)
+ c.Assert(got, qt.DeepEquals, test.expect, msg)
+ })
+
+ }
+}
+
+func TestGetJSON(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ url string
+ content string
+ expect any
+ }{
+ {
+ `http://success/`,
+ `{"gomeetup":["Sydney","San Francisco","Stockholm"]}`,
+ map[string]any{"gomeetup": []any{"Sydney", "San Francisco", "Stockholm"}},
+ },
+ {
+ `http://malformed/`,
+ `{gomeetup:["Sydney","San Francisco","Stockholm"]}`,
+ false,
+ },
+ {
+ `http://nofound/404`,
+ ``,
+ false,
+ },
+ // Locals
+ {
+ "pass/semi",
+ `{"gomeetup":["Sydney","San Francisco","Stockholm"]}`,
+ map[string]any{"gomeetup": []any{"Sydney", "San Francisco", "Stockholm"}},
+ },
+ {
+ "fail/no-file",
+ "",
+ false,
+ },
+ {
+ `pass/üńīçøðê-url.json`,
+ `{"gomeetup":["Sydney","San Francisco","Stockholm"]}`,
+ map[string]any{"gomeetup": []any{"Sydney", "San Francisco", "Stockholm"}},
+ },
+ } {
+
+ c.Run(test.url, func(c *qt.C) {
+
+ msg := qt.Commentf("Test %d", i)
+ ns := newTestNs()
+
+ // Setup HTTP test server
+ var srv *httptest.Server
+ srv, ns.client = getTestServer(func(w http.ResponseWriter, r *http.Request) {
+ if !hasHeaderValue(r.Header, "Accept", "application/json") {
+ http.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)
+ return
+ }
+
+ if r.URL.Path == "/404" {
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+
+ w.Header().Add("Content-type", "application/json")
+
+ w.Write([]byte(test.content))
+ })
+ defer func() { srv.Close() }()
+
+ // Setup local test file for schema-less URLs
+ if !strings.Contains(test.url, ":") && !strings.HasPrefix(test.url, "fail/") {
+ f, err := ns.deps.Fs.Source.Create(filepath.Join(ns.deps.Cfg.GetString("workingDir"), test.url))
+ c.Assert(err, qt.IsNil, msg)
+ f.WriteString(test.content)
+ f.Close()
+ }
+
+ // Get on with it
+ got, _ := ns.GetJSON(test.url)
+
+ if _, ok := test.expect.(bool); ok {
+ c.Assert(int(ns.deps.Log.LogCounters().ErrorCounter.Count()), qt.Equals, 1)
+ return
+ }
+
+ c.Assert(int(ns.deps.Log.LogCounters().ErrorCounter.Count()), qt.Equals, 0, msg)
+ c.Assert(got, qt.Not(qt.IsNil), msg)
+ c.Assert(got, qt.DeepEquals, test.expect)
+
+ })
+ }
+}
+
+func TestHeaders(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ name string
+ headers any
+ assert func(c *qt.C, headers string)
+ }{
+ {
+ `Misc header variants`,
+ map[string]any{
+ "Accept-Charset": "utf-8",
+ "Max-forwards": "10",
+ "X-Int": 32,
+ "X-Templ": template.HTML("a"),
+ "X-Multiple": []string{"a", "b"},
+ "X-MultipleInt": []int{3, 4},
+ },
+ func(c *qt.C, headers string) {
+ c.Assert(headers, qt.Contains, "Accept-Charset: utf-8")
+ c.Assert(headers, qt.Contains, "Max-Forwards: 10")
+ c.Assert(headers, qt.Contains, "X-Int: 32")
+ c.Assert(headers, qt.Contains, "X-Templ: a")
+ c.Assert(headers, qt.Contains, "X-Multiple: a")
+ c.Assert(headers, qt.Contains, "X-Multiple: b")
+ c.Assert(headers, qt.Contains, "X-Multipleint: 3")
+ c.Assert(headers, qt.Contains, "X-Multipleint: 4")
+ c.Assert(headers, qt.Contains, "User-Agent: Hugo Static Site Generator")
+ },
+ },
+ {
+ `Params`,
+ maps.Params{
+ "Accept-Charset": "utf-8",
+ },
+ func(c *qt.C, headers string) {
+ c.Assert(headers, qt.Contains, "Accept-Charset: utf-8")
+ },
+ },
+ {
+ `Override User-Agent`,
+ map[string]any{
+ "User-Agent": "007",
+ },
+ func(c *qt.C, headers string) {
+ c.Assert(headers, qt.Contains, "User-Agent: 007")
+ },
+ },
+ } {
+
+ c.Run(test.name, func(c *qt.C) {
+
+ ns := newTestNs()
+
+ // Setup HTTP test server
+ var srv *httptest.Server
+ var headers bytes.Buffer
+ srv, ns.client = getTestServer(func(w http.ResponseWriter, r *http.Request) {
+ c.Assert(r.URL.String(), qt.Equals, "http://gohugo.io/api?foo")
+ w.Write([]byte("{}"))
+ r.Header.Write(&headers)
+
+ })
+ defer func() { srv.Close() }()
+
+ testFunc := func(fn func(args ...any) error) {
+ defer headers.Reset()
+ err := fn("http://example.org/api", "?foo", test.headers)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(int(ns.deps.Log.LogCounters().ErrorCounter.Count()), qt.Equals, 0)
+ test.assert(c, headers.String())
+ }
+
+ testFunc(func(args ...any) error {
+ _, err := ns.GetJSON(args...)
+ return err
+ })
+ testFunc(func(args ...any) error {
+ _, err := ns.GetCSV(",", args...)
+ return err
+ })
+
+ })
+
+ }
+}
+
+func TestToURLAndHeaders(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ url, headers := toURLAndHeaders([]any{"https://foo?id=", 32})
+ c.Assert(url, qt.Equals, "https://foo?id=32")
+ c.Assert(headers, qt.IsNil)
+
+ url, headers = toURLAndHeaders([]any{"https://foo?id=", 32, map[string]any{"a": "b"}})
+ c.Assert(url, qt.Equals, "https://foo?id=32")
+ c.Assert(headers, qt.DeepEquals, map[string]any{"a": "b"})
+}
+
+func TestParseCSV(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for i, test := range []struct {
+ csv []byte
+ sep string
+ exp string
+ err bool
+ }{
+ {[]byte("a,b,c\nd,e,f\n"), "", "", true},
+ {[]byte("a,b,c\nd,e,f\n"), "~/", "", true},
+ {[]byte("a,b,c\nd,e,f"), "|", "a,b,cd,e,f", false},
+ {[]byte("q,w,e\nd,e,f"), ",", "qwedef", false},
+ {[]byte("a|b|c\nd|e|f|g"), "|", "abcdefg", true},
+ {[]byte("z|y|c\nd|e|f"), "|", "zycdef", false},
+ } {
+ msg := qt.Commentf("Test %d: %v", i, test)
+
+ csv, err := parseCSV(test.csv, test.sep)
+ if test.err {
+ c.Assert(err, qt.Not(qt.IsNil), msg)
+ continue
+ }
+ c.Assert(err, qt.IsNil, msg)
+
+ act := ""
+ for _, v := range csv {
+ act = act + strings.Join(v, "")
+ }
+
+ c.Assert(act, qt.Equals, test.exp, msg)
+ }
+}
diff --git a/tpl/data/init.go b/tpl/data/init.go
new file mode 100644
index 000000000..22e685fc8
--- /dev/null
+++ b/tpl/data/init.go
@@ -0,0 +1,45 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package data
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "data"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.GetCSV,
+ []string{"getCSV"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.GetJSON,
+ []string{"getJSON"},
+ [][2]string{},
+ )
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/data/resources.go b/tpl/data/resources.go
new file mode 100644
index 000000000..0470faf51
--- /dev/null
+++ b/tpl/data/resources.go
@@ -0,0 +1,130 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package data
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "time"
+
+ "github.com/gohugoio/hugo/cache/filecache"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/afero"
+)
+
+var (
+ resSleep = time.Second * 2 // if JSON decoding failed sleep for n seconds before retrying
+ resRetries = 1 // number of retries to load the JSON from URL
+)
+
+// getRemote loads the content of a remote file. This method is thread safe.
+func (ns *Namespace) getRemote(cache *filecache.Cache, unmarshal func([]byte) (bool, error), req *http.Request) error {
+ url := req.URL.String()
+ if err := ns.deps.ExecHelper.Sec().CheckAllowedHTTPURL(url); err != nil {
+ return err
+ }
+ if err := ns.deps.ExecHelper.Sec().CheckAllowedHTTPMethod("GET"); err != nil {
+ return err
+ }
+
+ var headers bytes.Buffer
+ req.Header.Write(&headers)
+ id := helpers.MD5String(url + headers.String())
+ var handled bool
+ var retry bool
+
+ _, b, err := cache.GetOrCreateBytes(id, func() ([]byte, error) {
+ var err error
+ handled = true
+ for i := 0; i <= resRetries; i++ {
+ ns.deps.Log.Infof("Downloading: %s ...", url)
+ var res *http.Response
+ res, err = ns.client.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ var b []byte
+ b, err = ioutil.ReadAll(res.Body)
+ if err != nil {
+ return nil, err
+ }
+ res.Body.Close()
+
+ if isHTTPError(res) {
+ return nil, fmt.Errorf("Failed to retrieve remote file: %s, body: %q", http.StatusText(res.StatusCode), b)
+ }
+
+ retry, err = unmarshal(b)
+
+ if err == nil {
+ // Return it so it can be cached.
+ return b, nil
+ }
+
+ if !retry {
+ return nil, err
+ }
+
+ ns.deps.Log.Infof("Cannot read remote resource %s: %s", url, err)
+ ns.deps.Log.Infof("Retry #%d for %s and sleeping for %s", i+1, url, resSleep)
+ time.Sleep(resSleep)
+ }
+
+ return nil, err
+ })
+
+ if !handled {
+ // This is cached content and should be correct.
+ _, err = unmarshal(b)
+ }
+
+ return err
+}
+
+// getLocal loads the content of a local file
+func getLocal(url string, fs afero.Fs, cfg config.Provider) ([]byte, error) {
+ filename := filepath.Join(cfg.GetString("workingDir"), url)
+ return afero.ReadFile(fs, filename)
+}
+
+// getResource loads the content of a local or remote file and returns its content and the
+// cache ID used, if relevant.
+func (ns *Namespace) getResource(cache *filecache.Cache, unmarshal func(b []byte) (bool, error), req *http.Request) error {
+ switch req.URL.Scheme {
+ case "":
+ url, err := url.QueryUnescape(req.URL.String())
+ if err != nil {
+ return err
+ }
+ b, err := getLocal(url, ns.deps.Fs.Source, ns.deps.Cfg)
+ if err != nil {
+ return err
+ }
+ _, err = unmarshal(b)
+ return err
+ default:
+ return ns.getRemote(cache, unmarshal, req)
+ }
+}
+
+func isHTTPError(res *http.Response) bool {
+ return res.StatusCode < 200 || res.StatusCode > 299
+}
diff --git a/tpl/data/resources_test.go b/tpl/data/resources_test.go
new file mode 100644
index 000000000..44f0f9ac3
--- /dev/null
+++ b/tpl/data/resources_test.go
@@ -0,0 +1,230 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package data
+
+import (
+ "bytes"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "sync"
+ "testing"
+ "time"
+
+ "github.com/gohugoio/hugo/config/security"
+ "github.com/gohugoio/hugo/modules"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/cache/filecache"
+ "github.com/gohugoio/hugo/common/hexec"
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/spf13/afero"
+)
+
+func TestScpGetLocal(t *testing.T) {
+ t.Parallel()
+ v := config.NewWithTestDefaults()
+ fs := hugofs.NewMem(v)
+ ps := helpers.FilePathSeparator
+
+ tests := []struct {
+ path string
+ content []byte
+ }{
+ {"testpath" + ps + "test.txt", []byte(`T€st Content 123 fOO,bar:foo%bAR`)},
+ {"FOo" + ps + "BaR.html", []byte(`FOo/BaR.html T€st Content 123`)},
+ {"трям" + ps + "трям", []byte(`T€st трям/трям Content 123`)},
+ {"은행", []byte(`T€st C은행ontent 123`)},
+ {"Банковский кассир", []byte(`Банковский кассир T€st Content 123`)},
+ }
+
+ for _, test := range tests {
+ r := bytes.NewReader(test.content)
+ err := helpers.WriteToDisk(test.path, r, fs.Source)
+ if err != nil {
+ t.Error(err)
+ }
+
+ c, err := getLocal(test.path, fs.Source, v)
+ if err != nil {
+ t.Errorf("Error getting resource content: %s", err)
+ }
+ if !bytes.Equal(c, test.content) {
+ t.Errorf("\nExpected: %s\nActual: %s\n", string(test.content), string(c))
+ }
+ }
+}
+
+func getTestServer(handler func(w http.ResponseWriter, r *http.Request)) (*httptest.Server, *http.Client) {
+ testServer := httptest.NewServer(http.HandlerFunc(handler))
+ client := &http.Client{
+ Transport: &http.Transport{Proxy: func(r *http.Request) (*url.URL, error) {
+ // Remove when https://github.com/golang/go/issues/13686 is fixed
+ r.Host = "gohugo.io"
+ return url.Parse(testServer.URL)
+ }},
+ }
+ return testServer, client
+}
+
+func TestScpGetRemote(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ fs := new(afero.MemMapFs)
+ cache := filecache.NewCache(fs, 100, "")
+
+ tests := []struct {
+ path string
+ content []byte
+ }{
+ {"http://Foo.Bar/foo_Bar-Foo", []byte(`T€st Content 123`)},
+ {"http://Doppel.Gänger/foo_Bar-Foo", []byte(`T€st Cont€nt 123`)},
+ {"http://Doppel.Gänger/Fizz_Bazz-Foo", []byte(`T€st Банковский кассир Cont€nt 123`)},
+ {"http://Doppel.Gänger/Fizz_Bazz-Bar", []byte(`T€st Банковский кассир Cont€nt 456`)},
+ }
+
+ for _, test := range tests {
+ msg := qt.Commentf("%v", test)
+
+ req, err := http.NewRequest("GET", test.path, nil)
+ c.Assert(err, qt.IsNil, msg)
+
+ srv, cl := getTestServer(func(w http.ResponseWriter, r *http.Request) {
+ w.Write(test.content)
+ })
+ defer func() { srv.Close() }()
+
+ ns := newTestNs()
+ ns.client = cl
+
+ var cb []byte
+ f := func(b []byte) (bool, error) {
+ cb = b
+ return false, nil
+ }
+
+ err = ns.getRemote(cache, f, req)
+ c.Assert(err, qt.IsNil, msg)
+ c.Assert(string(cb), qt.Equals, string(test.content))
+
+ c.Assert(string(cb), qt.Equals, string(test.content))
+
+ }
+}
+
+func TestScpGetRemoteParallel(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ content := []byte(`T€st Content 123`)
+ srv, cl := getTestServer(func(w http.ResponseWriter, r *http.Request) {
+ w.Write(content)
+ })
+
+ defer func() { srv.Close() }()
+
+ url := "http://Foo.Bar/foo_Bar-Foo"
+ req, err := http.NewRequest("GET", url, nil)
+ c.Assert(err, qt.IsNil)
+
+ for _, ignoreCache := range []bool{false} {
+ cfg := config.NewWithTestDefaults()
+ cfg.Set("ignoreCache", ignoreCache)
+
+ ns := New(newDeps(cfg))
+ ns.client = cl
+
+ var wg sync.WaitGroup
+
+ for i := 0; i < 1; i++ {
+ wg.Add(1)
+ go func(gor int) {
+ defer wg.Done()
+ for j := 0; j < 10; j++ {
+ var cb []byte
+ f := func(b []byte) (bool, error) {
+ cb = b
+ return false, nil
+ }
+ err := ns.getRemote(ns.cacheGetJSON, f, req)
+
+ c.Assert(err, qt.IsNil)
+ if string(content) != string(cb) {
+ t.Errorf("expected\n%q\ngot\n%q", content, cb)
+ }
+
+ time.Sleep(23 * time.Millisecond)
+ }
+ }(i)
+ }
+
+ wg.Wait()
+ }
+}
+
+func newDeps(cfg config.Provider) *deps.Deps {
+ cfg.Set("resourceDir", "resources")
+ cfg.Set("dataDir", "resources")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("assetDir", "assets")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("archetypeDir", "archetypes")
+
+ langs.LoadLanguageSettings(cfg, nil)
+ mod, err := modules.CreateProjectModule(cfg)
+ if err != nil {
+ panic(err)
+ }
+ cfg.Set("allModules", modules.Modules{mod})
+
+ ex := hexec.New(security.DefaultConfig)
+
+ logger := loggers.NewIgnorableLogger(loggers.NewErrorLogger(), "none")
+ cs, err := helpers.NewContentSpec(cfg, logger, afero.NewMemMapFs(), ex)
+ if err != nil {
+ panic(err)
+ }
+
+ fs := hugofs.NewMem(cfg)
+
+ p, err := helpers.NewPathSpec(fs, cfg, nil)
+ if err != nil {
+ panic(err)
+ }
+
+ fileCaches, err := filecache.NewCaches(p)
+ if err != nil {
+ panic(err)
+ }
+
+ return &deps.Deps{
+ Cfg: cfg,
+ Fs: fs,
+ FileCaches: fileCaches,
+ ExecHelper: ex,
+ ContentSpec: cs,
+ Log: logger,
+ LogDistinct: helpers.NewDistinctLogger(logger),
+ }
+}
+
+func newTestNs() *Namespace {
+ return New(newDeps(config.NewWithTestDefaults()))
+}
diff --git a/tpl/debug/debug.go b/tpl/debug/debug.go
new file mode 100644
index 000000000..04fb71565
--- /dev/null
+++ b/tpl/debug/debug.go
@@ -0,0 +1,40 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package debug provides template functions to help debugging templates.
+package debug
+
+import (
+ "github.com/sanity-io/litter"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+// New returns a new instance of the debug-namespaced template functions.
+func New(d *deps.Deps) *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "debug" namespace.
+type Namespace struct {
+}
+
+// Dump returns a object dump of val as a string.
+// Note that not every value passed to Dump will print so nicely, but
+// we'll improve on that. We recommend using the "go" Chroma lexer to format the output
+// nicely.
+// Also note that the output from Dump may change from Hugo version to the next,
+// so don't depend on a specific output.
+func (ns *Namespace) Dump(val any) string {
+ return litter.Sdump(val)
+}
diff --git a/tpl/debug/init.go b/tpl/debug/init.go
new file mode 100644
index 000000000..bf59ae030
--- /dev/null
+++ b/tpl/debug/init.go
@@ -0,0 +1,45 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "debug"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Dump,
+ nil,
+ [][2]string{
+ {`{{- $m := newScratch -}}
+{{- $m.Set "Hugo" "Rocks!" -}}
+{{- $m.Values | debug.Dump | safeHTML -}}`, "map[string]interface {}{\n \"Hugo\": \"Rocks!\",\n}"},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/diagrams/diagrams.go b/tpl/diagrams/diagrams.go
new file mode 100644
index 000000000..7f0a171b2
--- /dev/null
+++ b/tpl/diagrams/diagrams.go
@@ -0,0 +1,81 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package diagrams
+
+import (
+ "bytes"
+ "html/template"
+ "io"
+ "strings"
+
+ "github.com/bep/goat"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/cast"
+)
+
+type SVGDiagram interface {
+ // Wrapped returns the diagram as an SVG, including the <svg> container.
+ Wrapped() template.HTML
+
+ // Inner returns the inner markup of the SVG.
+ // This allows for the <svg> container to be created manually.
+ Inner() template.HTML
+
+ // Width returns the width of the SVG.
+ Width() int
+
+ // Height returns the height of the SVG.
+ Height() int
+}
+
+type goatDiagram struct {
+ d goat.SVG
+}
+
+func (d goatDiagram) Inner() template.HTML {
+ return template.HTML(d.d.Body)
+}
+
+func (d goatDiagram) Wrapped() template.HTML {
+ return template.HTML(d.d.String())
+}
+
+func (d goatDiagram) Width() int {
+ return d.d.Width
+}
+
+func (d goatDiagram) Height() int {
+ return d.d.Height
+}
+
+type Diagrams struct {
+ d *deps.Deps
+}
+
+func (d *Diagrams) Goat(v any) SVGDiagram {
+ var r io.Reader
+
+ switch vv := v.(type) {
+ case io.Reader:
+ r = vv
+ case []byte:
+ r = bytes.NewReader(vv)
+ default:
+ r = strings.NewReader(cast.ToString(v))
+ }
+
+ return goatDiagram{
+ d: goat.BuildSVG(r),
+ }
+}
diff --git a/tpl/diagrams/init.go b/tpl/diagrams/init.go
new file mode 100644
index 000000000..fed85e6a3
--- /dev/null
+++ b/tpl/diagrams/init.go
@@ -0,0 +1,38 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package diagrams
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "diagrams"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := &Diagrams{
+ d: d,
+ }
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/encoding/encoding.go b/tpl/encoding/encoding.go
new file mode 100644
index 000000000..272503e0c
--- /dev/null
+++ b/tpl/encoding/encoding.go
@@ -0,0 +1,90 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package encoding provides template functions for encoding content.
+package encoding
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "html/template"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the encoding-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "encoding" namespace.
+type Namespace struct{}
+
+// Base64Decode returns the base64 decoding of the given content.
+func (ns *Namespace) Base64Decode(content any) (string, error) {
+ conv, err := cast.ToStringE(content)
+ if err != nil {
+ return "", err
+ }
+
+ dec, err := base64.StdEncoding.DecodeString(conv)
+ return string(dec), err
+}
+
+// Base64Encode returns the base64 encoding of the given content.
+func (ns *Namespace) Base64Encode(content any) (string, error) {
+ conv, err := cast.ToStringE(content)
+ if err != nil {
+ return "", err
+ }
+
+ return base64.StdEncoding.EncodeToString([]byte(conv)), nil
+}
+
+// Jsonify encodes a given object to JSON. To pretty print the JSON, pass a map
+// or dictionary of options as the first argument. Supported options are
+// "prefix" and "indent". Each JSON element in the output will begin on a new
+// line beginning with prefix followed by one or more copies of indent according
+// to the indentation nesting.
+func (ns *Namespace) Jsonify(args ...any) (template.HTML, error) {
+ var (
+ b []byte
+ err error
+ )
+
+ switch len(args) {
+ case 0:
+ return "", nil
+ case 1:
+ b, err = json.Marshal(args[0])
+ case 2:
+ var opts map[string]string
+
+ opts, err = maps.ToStringMapStringE(args[0])
+ if err != nil {
+ break
+ }
+
+ b, err = json.MarshalIndent(args[1], opts["prefix"], opts["indent"])
+ default:
+ err = errors.New("too many arguments to jsonify")
+ }
+
+ if err != nil {
+ return "", err
+ }
+
+ return template.HTML(b), nil
+}
diff --git a/tpl/encoding/encoding_test.go b/tpl/encoding/encoding_test.go
new file mode 100644
index 000000000..e7c82e3be
--- /dev/null
+++ b/tpl/encoding/encoding_test.go
@@ -0,0 +1,118 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package encoding
+
+import (
+ "html/template"
+ "math"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type tstNoStringer struct{}
+
+func TestBase64Decode(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ v any
+ expect any
+ }{
+ {"YWJjMTIzIT8kKiYoKSctPUB+", "abc123!?$*&()'-=@~"},
+ // errors
+ {t, false},
+ } {
+
+ result, err := ns.Base64Decode(test.v)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestBase64Encode(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ v any
+ expect any
+ }{
+ {"YWJjMTIzIT8kKiYoKSctPUB+", "WVdKak1USXpJVDhrS2lZb0tTY3RQVUIr"},
+ // errors
+ {t, false},
+ } {
+
+ result, err := ns.Base64Encode(test.v)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestJsonify(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for _, test := range []struct {
+ opts any
+ v any
+ expect any
+ }{
+ {nil, []string{"a", "b"}, template.HTML(`["a","b"]`)},
+ {map[string]string{"indent": "<i>"}, []string{"a", "b"}, template.HTML("[\n<i>\"a\",\n<i>\"b\"\n]")},
+ {map[string]string{"prefix": "<p>"}, []string{"a", "b"}, template.HTML("[\n<p>\"a\",\n<p>\"b\"\n<p>]")},
+ {map[string]string{"prefix": "<p>", "indent": "<i>"}, []string{"a", "b"}, template.HTML("[\n<p><i>\"a\",\n<p><i>\"b\"\n<p>]")},
+ {nil, tstNoStringer{}, template.HTML("{}")},
+ {nil, nil, template.HTML("null")},
+ // errors
+ {nil, math.NaN(), false},
+ {tstNoStringer{}, []string{"a", "b"}, false},
+ } {
+ args := []any{}
+
+ if test.opts != nil {
+ args = append(args, test.opts)
+ }
+
+ args = append(args, test.v)
+
+ result, err := ns.Jsonify(args...)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/encoding/init.go b/tpl/encoding/init.go
new file mode 100644
index 000000000..1d42b4e37
--- /dev/null
+++ b/tpl/encoding/init.go
@@ -0,0 +1,59 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package encoding
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "encoding"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Base64Decode,
+ []string{"base64Decode"},
+ [][2]string{
+ {`{{ "SGVsbG8gd29ybGQ=" | base64Decode }}`, `Hello world`},
+ {`{{ 42 | base64Encode | base64Decode }}`, `42`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Base64Encode,
+ []string{"base64Encode"},
+ [][2]string{
+ {`{{ "Hello world" | base64Encode }}`, `SGVsbG8gd29ybGQ=`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Jsonify,
+ []string{"jsonify"},
+ [][2]string{
+ {`{{ (slice "A" "B" "C") | jsonify }}`, `["A","B","C"]`},
+ {`{{ (slice "A" "B" "C") | jsonify (dict "indent" " ") }}`, "[\n \"A\",\n \"B\",\n \"C\"\n]"},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/fmt/fmt.go b/tpl/fmt/fmt.go
new file mode 100644
index 000000000..7790b4955
--- /dev/null
+++ b/tpl/fmt/fmt.go
@@ -0,0 +1,85 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package fmt provides template functions for formatting strings.
+package fmt
+
+import (
+ _fmt "fmt"
+
+ "github.com/gohugoio/hugo/common/loggers"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+)
+
+// New returns a new instance of the fmt-namespaced template functions.
+func New(d *deps.Deps) *Namespace {
+ ignorableLogger, ok := d.Log.(loggers.IgnorableLogger)
+ if !ok {
+ ignorableLogger = loggers.NewIgnorableLogger(d.Log)
+ }
+
+ distinctLogger := helpers.NewDistinctLogger(d.Log)
+ ns := &Namespace{
+ distinctLogger: ignorableLogger.Apply(distinctLogger),
+ }
+
+ d.BuildStartListeners.Add(func() {
+ ns.distinctLogger.Reset()
+ })
+
+ return ns
+}
+
+// Namespace provides template functions for the "fmt" namespace.
+type Namespace struct {
+ distinctLogger loggers.IgnorableLogger
+}
+
+// Print returns a string representation args.
+func (ns *Namespace) Print(args ...any) string {
+ return _fmt.Sprint(args...)
+}
+
+// Printf returns a formatted string representation of args.
+func (ns *Namespace) Printf(format string, args ...any) string {
+ return _fmt.Sprintf(format, args...)
+}
+
+// Println returns string representation of args ending with a newline.
+func (ns *Namespace) Println(args ...any) string {
+ return _fmt.Sprintln(args...)
+}
+
+// Errorf formats args according to a format specifier and logs an ERROR.
+// It returns an empty string.
+func (ns *Namespace) Errorf(format string, args ...any) string {
+ ns.distinctLogger.Errorf(format, args...)
+ return ""
+}
+
+// Erroridf formats args according to a format specifier and logs an ERROR and
+// an information text that the error with the given ID can be suppressed in config.
+// It returns an empty string.
+func (ns *Namespace) Erroridf(id, format string, args ...any) string {
+ ns.distinctLogger.Errorsf(id, format, args...)
+ return ""
+}
+
+// Warnf formats args according to a format specifier and logs a WARNING.
+// It returns an empty string.
+func (ns *Namespace) Warnf(format string, args ...any) string {
+ ns.distinctLogger.Warnf(format, args...)
+ return ""
+}
diff --git a/tpl/fmt/init.go b/tpl/fmt/init.go
new file mode 100644
index 000000000..b0683f061
--- /dev/null
+++ b/tpl/fmt/init.go
@@ -0,0 +1,77 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package fmt
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "fmt"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Print,
+ []string{"print"},
+ [][2]string{
+ {`{{ print "works!" }}`, `works!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Println,
+ []string{"println"},
+ [][2]string{
+ {`{{ println "works!" }}`, "works!\n"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Printf,
+ []string{"printf"},
+ [][2]string{
+ {`{{ printf "%s!" "works" }}`, `works!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Errorf,
+ []string{"errorf"},
+ [][2]string{
+ {`{{ errorf "%s." "failed" }}`, ``},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Erroridf,
+ []string{"erroridf"},
+ [][2]string{
+ {`{{ erroridf "my-err-id" "%s." "failed" }}`, ``},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Warnf,
+ []string{"warnf"},
+ [][2]string{
+ {`{{ warnf "%s." "warning" }}`, ``},
+ },
+ )
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/hugo/init.go b/tpl/hugo/init.go
new file mode 100644
index 000000000..e2b4ae7af
--- /dev/null
+++ b/tpl/hugo/init.go
@@ -0,0 +1,39 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package hugo provides template functions for accessing the Site Hugo object.
+package hugo
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "hugo"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ h := d.Site.Hugo()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return h, nil },
+ }
+
+ // We just add the Hugo struct as the namespace here. No method mappings.
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/images/images.go b/tpl/images/images.go
new file mode 100644
index 000000000..1abee1b0c
--- /dev/null
+++ b/tpl/images/images.go
@@ -0,0 +1,103 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package images provides template functions for manipulating images.
+package images
+
+import (
+ "image"
+ "sync"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/resources/images"
+
+ // Importing image codecs for image.DecodeConfig
+ _ "image/gif"
+ _ "image/jpeg"
+ _ "image/png"
+
+ // Import webp codec
+ _ "golang.org/x/image/webp"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the images-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ return &Namespace{
+ Filters: &images.Filters{},
+ cache: map[string]image.Config{},
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "images" namespace.
+type Namespace struct {
+ *images.Filters
+ cacheMu sync.RWMutex
+ cache map[string]image.Config
+
+ deps *deps.Deps
+}
+
+// Config returns the image.Config for the specified path relative to the
+// working directory.
+func (ns *Namespace) Config(path any) (image.Config, error) {
+ filename, err := cast.ToStringE(path)
+ if err != nil {
+ return image.Config{}, err
+ }
+
+ if filename == "" {
+ return image.Config{}, errors.New("config needs a filename")
+ }
+
+ // Check cache for image config.
+ ns.cacheMu.RLock()
+ config, ok := ns.cache[filename]
+ ns.cacheMu.RUnlock()
+
+ if ok {
+ return config, nil
+ }
+
+ f, err := ns.deps.Fs.WorkingDirReadOnly.Open(filename)
+ if err != nil {
+ return image.Config{}, err
+ }
+ defer f.Close()
+
+ config, _, err = image.DecodeConfig(f)
+ if err != nil {
+ return config, err
+ }
+
+ ns.cacheMu.Lock()
+ ns.cache[filename] = config
+ ns.cacheMu.Unlock()
+
+ return config, nil
+}
+
+func (ns *Namespace) Filter(args ...any) (images.ImageResource, error) {
+ if len(args) < 2 {
+ return nil, errors.New("must provide an image and one or more filters")
+ }
+
+ img := args[len(args)-1].(images.ImageResource)
+ filtersv := args[:len(args)-1]
+
+ return img.Filter(filtersv...)
+}
diff --git a/tpl/images/images_test.go b/tpl/images/images_test.go
new file mode 100644
index 000000000..aa6896521
--- /dev/null
+++ b/tpl/images/images_test.go
@@ -0,0 +1,119 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "bytes"
+ "image"
+ "image/color"
+ "image/png"
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+)
+
+type tstNoStringer struct{}
+
+var configTests = []struct {
+ path any
+ input []byte
+ expect any
+}{
+ {
+ path: "a.png",
+ input: blankImage(10, 10),
+ expect: image.Config{
+ Width: 10,
+ Height: 10,
+ ColorModel: color.NRGBAModel,
+ },
+ },
+ {
+ path: "a.png",
+ input: blankImage(10, 10),
+ expect: image.Config{
+ Width: 10,
+ Height: 10,
+ ColorModel: color.NRGBAModel,
+ },
+ },
+ {
+ path: "b.png",
+ input: blankImage(20, 15),
+ expect: image.Config{
+ Width: 20,
+ Height: 15,
+ ColorModel: color.NRGBAModel,
+ },
+ },
+ {
+ path: "a.png",
+ input: blankImage(20, 15),
+ expect: image.Config{
+ Width: 10,
+ Height: 10,
+ ColorModel: color.NRGBAModel,
+ },
+ },
+ // errors
+ {path: tstNoStringer{}, expect: false},
+ {path: "non-existent.png", expect: false},
+ {path: "", expect: false},
+}
+
+func TestNSConfig(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ v := config.NewWithTestDefaults()
+ v.Set("workingDir", "/a/b")
+
+ ns := New(&deps.Deps{Fs: hugofs.NewMem(v)})
+
+ for _, test := range configTests {
+
+ // check for expected errors early to avoid writing files
+ if b, ok := test.expect.(bool); ok && !b {
+ _, err := ns.Config(test.path)
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ // cast path to string for afero.WriteFile
+ sp, err := cast.ToStringE(test.path)
+ c.Assert(err, qt.IsNil)
+ afero.WriteFile(ns.deps.Fs.Source, filepath.Join(v.GetString("workingDir"), sp), test.input, 0755)
+
+ result, err := ns.Config(test.path)
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ c.Assert(len(ns.cache), qt.Not(qt.Equals), 0)
+ }
+}
+
+func blankImage(width, height int) []byte {
+ var buf bytes.Buffer
+ img := image.NewRGBA(image.Rect(0, 0, width, height))
+ if err := png.Encode(&buf, img); err != nil {
+ panic(err)
+ }
+ return buf.Bytes()
+}
diff --git a/tpl/images/init.go b/tpl/images/init.go
new file mode 100644
index 000000000..d9b9af4e7
--- /dev/null
+++ b/tpl/images/init.go
@@ -0,0 +1,41 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package images
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "images"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Config,
+ []string{"imageConfig"},
+ [][2]string{},
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/inflect/inflect.go b/tpl/inflect/inflect.go
new file mode 100644
index 000000000..1f887cecd
--- /dev/null
+++ b/tpl/inflect/inflect.go
@@ -0,0 +1,79 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package inflect provides template functions for the inflection of words.
+package inflect
+
+import (
+ "strconv"
+ "strings"
+
+ _inflect "github.com/gobuffalo/flect"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the inflect-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "inflect" namespace.
+type Namespace struct{}
+
+// Humanize returns the humanized form of a single parameter.
+//
+// If the parameter is either an integer or a string containing an integer
+// value, the behavior is to add the appropriate ordinal.
+//
+// Example: "my-first-post" -> "My first post"
+// Example: "103" -> "103rd"
+// Example: 52 -> "52nd"
+func (ns *Namespace) Humanize(in any) (string, error) {
+ word, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ if word == "" {
+ return "", nil
+ }
+
+ _, ok := in.(int) // original param was literal int value
+ _, err = strconv.Atoi(word) // original param was string containing an int value
+ if ok || err == nil {
+ return _inflect.Ordinalize(word), nil
+ }
+
+ str := _inflect.Humanize(word)
+ return _inflect.Humanize(strings.ToLower(str)), nil
+}
+
+// Pluralize returns the plural form of a single word.
+func (ns *Namespace) Pluralize(in any) (string, error) {
+ word, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ return _inflect.Pluralize(word), nil
+}
+
+// Singularize returns the singular form of a single word.
+func (ns *Namespace) Singularize(in any) (string, error) {
+ word, err := cast.ToStringE(in)
+ if err != nil {
+ return "", err
+ }
+
+ return _inflect.Singularize(word), nil
+}
diff --git a/tpl/inflect/inflect_test.go b/tpl/inflect/inflect_test.go
new file mode 100644
index 000000000..083e7da4e
--- /dev/null
+++ b/tpl/inflect/inflect_test.go
@@ -0,0 +1,49 @@
+package inflect
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestInflect(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ fn func(i any) (string, error)
+ in any
+ expect any
+ }{
+ {ns.Humanize, "MyCamel", "My camel"},
+ {ns.Humanize, "óbito", "Óbito"},
+ {ns.Humanize, "", ""},
+ {ns.Humanize, "103", "103rd"},
+ {ns.Humanize, "41", "41st"},
+ {ns.Humanize, 103, "103rd"},
+ {ns.Humanize, int64(92), "92nd"},
+ {ns.Humanize, "5.5", "5.5"},
+ {ns.Humanize, t, false},
+ {ns.Humanize, "this is a TEST", "This is a test"},
+ {ns.Humanize, "my-first-Post", "My first post"},
+ {ns.Pluralize, "cat", "cats"},
+ {ns.Pluralize, "", ""},
+ {ns.Pluralize, t, false},
+ {ns.Singularize, "cats", "cat"},
+ {ns.Singularize, "", ""},
+ {ns.Singularize, t, false},
+ } {
+
+ result, err := test.fn(test.in)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/inflect/init.go b/tpl/inflect/init.go
new file mode 100644
index 000000000..a2d28f6bf
--- /dev/null
+++ b/tpl/inflect/init.go
@@ -0,0 +1,60 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package inflect
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "inflect"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Humanize,
+ []string{"humanize"},
+ [][2]string{
+ {`{{ humanize "my-first-post" }}`, `My first post`},
+ {`{{ humanize "myCamelPost" }}`, `My camel post`},
+ {`{{ humanize "52" }}`, `52nd`},
+ {`{{ humanize 103 }}`, `103rd`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Pluralize,
+ []string{"pluralize"},
+ [][2]string{
+ {`{{ "cat" | pluralize }}`, `cats`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Singularize,
+ []string{"singularize"},
+ [][2]string{
+ {`{{ "cats" | singularize }}`, `cat`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/internal/go_templates/cfg/cfg.go b/tpl/internal/go_templates/cfg/cfg.go
new file mode 100644
index 000000000..78664d7a9
--- /dev/null
+++ b/tpl/internal/go_templates/cfg/cfg.go
@@ -0,0 +1,68 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cfg holds configuration shared by the Go command and internal/testenv.
+// Definitions that don't need to be exposed outside of cmd/go should be in
+// cmd/go/internal/cfg instead of this package.
+package cfg
+
+// KnownEnv is a list of environment variables that affect the operation
+// of the Go command.
+const KnownEnv = `
+ AR
+ CC
+ CGO_CFLAGS
+ CGO_CFLAGS_ALLOW
+ CGO_CFLAGS_DISALLOW
+ CGO_CPPFLAGS
+ CGO_CPPFLAGS_ALLOW
+ CGO_CPPFLAGS_DISALLOW
+ CGO_CXXFLAGS
+ CGO_CXXFLAGS_ALLOW
+ CGO_CXXFLAGS_DISALLOW
+ CGO_ENABLED
+ CGO_FFLAGS
+ CGO_FFLAGS_ALLOW
+ CGO_FFLAGS_DISALLOW
+ CGO_LDFLAGS
+ CGO_LDFLAGS_ALLOW
+ CGO_LDFLAGS_DISALLOW
+ CXX
+ FC
+ GCCGO
+ GO111MODULE
+ GO386
+ GOAMD64
+ GOARCH
+ GOARM
+ GOBIN
+ GOCACHE
+ GOENV
+ GOEXE
+ GOEXPERIMENT
+ GOFLAGS
+ GOGCCFLAGS
+ GOHOSTARCH
+ GOHOSTOS
+ GOINSECURE
+ GOMIPS
+ GOMIPS64
+ GOMODCACHE
+ GONOPROXY
+ GONOSUMDB
+ GOOS
+ GOPATH
+ GOPPC64
+ GOPRIVATE
+ GOPROXY
+ GOROOT
+ GOSUMDB
+ GOTMPDIR
+ GOTOOLDIR
+ GOVCS
+ GOWASM
+ GOWORK
+ GO_EXTLINK_ENABLED
+ PKG_CONFIG
+`
diff --git a/tpl/internal/go_templates/fmtsort/export_test.go b/tpl/internal/go_templates/fmtsort/export_test.go
new file mode 100644
index 000000000..25cbb5d4f
--- /dev/null
+++ b/tpl/internal/go_templates/fmtsort/export_test.go
@@ -0,0 +1,11 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fmtsort
+
+import "reflect"
+
+func Compare(a, b reflect.Value) int {
+ return compare(a, b)
+}
diff --git a/tpl/internal/go_templates/fmtsort/sort.go b/tpl/internal/go_templates/fmtsort/sort.go
new file mode 100644
index 000000000..34c1f477f
--- /dev/null
+++ b/tpl/internal/go_templates/fmtsort/sort.go
@@ -0,0 +1,220 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package fmtsort provides a general stable ordering mechanism
+// for maps, on behalf of the fmt and text/template packages.
+// It is not guaranteed to be efficient and works only for types
+// that are valid map keys.
+package fmtsort
+
+import (
+ "reflect"
+ "sort"
+)
+
+// Note: Throughout this package we avoid calling reflect.Value.Interface as
+// it is not always legal to do so and it's easier to avoid the issue than to face it.
+
+// SortedMap represents a map's keys and values. The keys and values are
+// aligned in index order: Value[i] is the value in the map corresponding to Key[i].
+type SortedMap struct {
+ Key []reflect.Value
+ Value []reflect.Value
+}
+
+func (o *SortedMap) Len() int { return len(o.Key) }
+func (o *SortedMap) Less(i, j int) bool { return compare(o.Key[i], o.Key[j]) < 0 }
+func (o *SortedMap) Swap(i, j int) {
+ o.Key[i], o.Key[j] = o.Key[j], o.Key[i]
+ o.Value[i], o.Value[j] = o.Value[j], o.Value[i]
+}
+
+// Sort accepts a map and returns a SortedMap that has the same keys and
+// values but in a stable sorted order according to the keys, modulo issues
+// raised by unorderable key values such as NaNs.
+//
+// The ordering rules are more general than with Go's < operator:
+//
+// - when applicable, nil compares low
+// - ints, floats, and strings order by <
+// - NaN compares less than non-NaN floats
+// - bool compares false before true
+// - complex compares real, then imag
+// - pointers compare by machine address
+// - channel values compare by machine address
+// - structs compare each field in turn
+// - arrays compare each element in turn.
+// Otherwise identical arrays compare by length.
+// - interface values compare first by reflect.Type describing the concrete type
+// and then by concrete value as described in the previous rules.
+//
+func Sort(mapValue reflect.Value) *SortedMap {
+ if mapValue.Type().Kind() != reflect.Map {
+ return nil
+ }
+ // Note: this code is arranged to not panic even in the presence
+ // of a concurrent map update. The runtime is responsible for
+ // yelling loudly if that happens. See issue 33275.
+ n := mapValue.Len()
+ key := make([]reflect.Value, 0, n)
+ value := make([]reflect.Value, 0, n)
+ iter := mapValue.MapRange()
+ for iter.Next() {
+ key = append(key, iter.Key())
+ value = append(value, iter.Value())
+ }
+ sorted := &SortedMap{
+ Key: key,
+ Value: value,
+ }
+ sort.Stable(sorted)
+ return sorted
+}
+
+// compare compares two values of the same type. It returns -1, 0, 1
+// according to whether a > b (1), a == b (0), or a < b (-1).
+// If the types differ, it returns -1.
+// See the comment on Sort for the comparison rules.
+func compare(aVal, bVal reflect.Value) int {
+ aType, bType := aVal.Type(), bVal.Type()
+ if aType != bType {
+ return -1 // No good answer possible, but don't return 0: they're not equal.
+ }
+ switch aVal.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ a, b := aVal.Int(), bVal.Int()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ a, b := aVal.Uint(), bVal.Uint()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.String:
+ a, b := aVal.String(), bVal.String()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Float32, reflect.Float64:
+ return floatCompare(aVal.Float(), bVal.Float())
+ case reflect.Complex64, reflect.Complex128:
+ a, b := aVal.Complex(), bVal.Complex()
+ if c := floatCompare(real(a), real(b)); c != 0 {
+ return c
+ }
+ return floatCompare(imag(a), imag(b))
+ case reflect.Bool:
+ a, b := aVal.Bool(), bVal.Bool()
+ switch {
+ case a == b:
+ return 0
+ case a:
+ return 1
+ default:
+ return -1
+ }
+ case reflect.Pointer, reflect.UnsafePointer:
+ a, b := aVal.Pointer(), bVal.Pointer()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Chan:
+ if c, ok := nilCompare(aVal, bVal); ok {
+ return c
+ }
+ ap, bp := aVal.Pointer(), bVal.Pointer()
+ switch {
+ case ap < bp:
+ return -1
+ case ap > bp:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Struct:
+ for i := 0; i < aVal.NumField(); i++ {
+ if c := compare(aVal.Field(i), bVal.Field(i)); c != 0 {
+ return c
+ }
+ }
+ return 0
+ case reflect.Array:
+ for i := 0; i < aVal.Len(); i++ {
+ if c := compare(aVal.Index(i), bVal.Index(i)); c != 0 {
+ return c
+ }
+ }
+ return 0
+ case reflect.Interface:
+ if c, ok := nilCompare(aVal, bVal); ok {
+ return c
+ }
+ c := compare(reflect.ValueOf(aVal.Elem().Type()), reflect.ValueOf(bVal.Elem().Type()))
+ if c != 0 {
+ return c
+ }
+ return compare(aVal.Elem(), bVal.Elem())
+ default:
+ // Certain types cannot appear as keys (maps, funcs, slices), but be explicit.
+ panic("bad type in compare: " + aType.String())
+ }
+}
+
+// nilCompare checks whether either value is nil. If not, the boolean is false.
+// If either value is nil, the boolean is true and the integer is the comparison
+// value. The comparison is defined to be 0 if both are nil, otherwise the one
+// nil value compares low. Both arguments must represent a chan, func,
+// interface, map, pointer, or slice.
+func nilCompare(aVal, bVal reflect.Value) (int, bool) {
+ if aVal.IsNil() {
+ if bVal.IsNil() {
+ return 0, true
+ }
+ return -1, true
+ }
+ if bVal.IsNil() {
+ return 1, true
+ }
+ return 0, false
+}
+
+// floatCompare compares two floating-point values. NaNs compare low.
+func floatCompare(a, b float64) int {
+ switch {
+ case isNaN(a):
+ return -1 // No good answer if b is a NaN so don't bother checking.
+ case isNaN(b):
+ return 1
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ }
+ return 0
+}
+
+func isNaN(a float64) bool {
+ return a != a
+}
diff --git a/tpl/internal/go_templates/fmtsort/sort_test.go b/tpl/internal/go_templates/fmtsort/sort_test.go
new file mode 100644
index 000000000..a05e8a3c3
--- /dev/null
+++ b/tpl/internal/go_templates/fmtsort/sort_test.go
@@ -0,0 +1,279 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package fmtsort_test
+
+import (
+ "fmt"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/fmtsort"
+ "math"
+ "reflect"
+ "sort"
+ "strings"
+ "testing"
+ "unsafe"
+)
+
+var compareTests = [][]reflect.Value{
+ ct(reflect.TypeOf(int(0)), -1, 0, 1),
+ ct(reflect.TypeOf(int8(0)), -1, 0, 1),
+ ct(reflect.TypeOf(int16(0)), -1, 0, 1),
+ ct(reflect.TypeOf(int32(0)), -1, 0, 1),
+ ct(reflect.TypeOf(int64(0)), -1, 0, 1),
+ ct(reflect.TypeOf(uint(0)), 0, 1, 5),
+ ct(reflect.TypeOf(uint8(0)), 0, 1, 5),
+ ct(reflect.TypeOf(uint16(0)), 0, 1, 5),
+ ct(reflect.TypeOf(uint32(0)), 0, 1, 5),
+ ct(reflect.TypeOf(uint64(0)), 0, 1, 5),
+ ct(reflect.TypeOf(uintptr(0)), 0, 1, 5),
+ ct(reflect.TypeOf(string("")), "", "a", "ab"),
+ ct(reflect.TypeOf(float32(0)), math.NaN(), math.Inf(-1), -1e10, 0, 1e10, math.Inf(1)),
+ ct(reflect.TypeOf(float64(0)), math.NaN(), math.Inf(-1), -1e10, 0, 1e10, math.Inf(1)),
+ ct(reflect.TypeOf(complex64(0+1i)), -1-1i, -1+0i, -1+1i, 0-1i, 0+0i, 0+1i, 1-1i, 1+0i, 1+1i),
+ ct(reflect.TypeOf(complex128(0+1i)), -1-1i, -1+0i, -1+1i, 0-1i, 0+0i, 0+1i, 1-1i, 1+0i, 1+1i),
+ ct(reflect.TypeOf(false), false, true),
+ ct(reflect.TypeOf(&ints[0]), &ints[0], &ints[1], &ints[2]),
+ ct(reflect.TypeOf(unsafe.Pointer(&ints[0])), unsafe.Pointer(&ints[0]), unsafe.Pointer(&ints[1]), unsafe.Pointer(&ints[2])),
+ ct(reflect.TypeOf(chans[0]), chans[0], chans[1], chans[2]),
+ ct(reflect.TypeOf(toy{}), toy{0, 1}, toy{0, 2}, toy{1, -1}, toy{1, 1}),
+ ct(reflect.TypeOf([2]int{}), [2]int{1, 1}, [2]int{1, 2}, [2]int{2, 0}),
+ ct(reflect.TypeOf(any(any(0))), iFace, 1, 2, 3),
+}
+
+var iFace any
+
+func ct(typ reflect.Type, args ...any) []reflect.Value {
+ value := make([]reflect.Value, len(args))
+ for i, v := range args {
+ x := reflect.ValueOf(v)
+ if !x.IsValid() { // Make it a typed nil.
+ x = reflect.Zero(typ)
+ } else {
+ x = x.Convert(typ)
+ }
+ value[i] = x
+ }
+ return value
+}
+
+func TestCompare(t *testing.T) {
+ for _, test := range compareTests {
+ for i, v0 := range test {
+ for j, v1 := range test {
+ c := fmtsort.Compare(v0, v1)
+ var expect int
+ switch {
+ case i == j:
+ expect = 0
+ // NaNs are tricky.
+ if typ := v0.Type(); (typ.Kind() == reflect.Float32 || typ.Kind() == reflect.Float64) && math.IsNaN(v0.Float()) {
+ expect = -1
+ }
+ case i < j:
+ expect = -1
+ case i > j:
+ expect = 1
+ }
+ if c != expect {
+ t.Errorf("%s: compare(%v,%v)=%d; expect %d", v0.Type(), v0, v1, c, expect)
+ }
+ }
+ }
+ }
+}
+
+type sortTest struct {
+ data any // Always a map.
+ print string // Printed result using our custom printer.
+}
+
+var sortTests = []sortTest{
+ {
+ map[int]string{7: "bar", -3: "foo"},
+ "-3:foo 7:bar",
+ },
+ {
+ map[uint8]string{7: "bar", 3: "foo"},
+ "3:foo 7:bar",
+ },
+ {
+ map[string]string{"7": "bar", "3": "foo"},
+ "3:foo 7:bar",
+ },
+ {
+ map[float64]string{7: "bar", -3: "foo", math.NaN(): "nan", math.Inf(0): "inf"},
+ "NaN:nan -3:foo 7:bar +Inf:inf",
+ },
+ {
+ map[complex128]string{7 + 2i: "bar2", 7 + 1i: "bar", -3: "foo", complex(math.NaN(), 0i): "nan", complex(math.Inf(0), 0i): "inf"},
+ "(NaN+0i):nan (-3+0i):foo (7+1i):bar (7+2i):bar2 (+Inf+0i):inf",
+ },
+ {
+ map[bool]string{true: "true", false: "false"},
+ "false:false true:true",
+ },
+ {
+ chanMap(),
+ "CHAN0:0 CHAN1:1 CHAN2:2",
+ },
+ {
+ pointerMap(),
+ "PTR0:0 PTR1:1 PTR2:2",
+ },
+ {
+ unsafePointerMap(),
+ "UNSAFEPTR0:0 UNSAFEPTR1:1 UNSAFEPTR2:2",
+ },
+ {
+ map[toy]string{{7, 2}: "72", {7, 1}: "71", {3, 4}: "34"},
+ "{3 4}:34 {7 1}:71 {7 2}:72",
+ },
+ {
+ map[[2]int]string{{7, 2}: "72", {7, 1}: "71", {3, 4}: "34"},
+ "[3 4]:34 [7 1]:71 [7 2]:72",
+ },
+}
+
+func sprint(data any) string {
+ om := fmtsort.Sort(reflect.ValueOf(data))
+ if om == nil {
+ return "nil"
+ }
+ b := new(strings.Builder)
+ for i, key := range om.Key {
+ if i > 0 {
+ b.WriteRune(' ')
+ }
+ b.WriteString(sprintKey(key))
+ b.WriteRune(':')
+ b.WriteString(fmt.Sprint(om.Value[i]))
+ }
+ return b.String()
+}
+
+// sprintKey formats a reflect.Value but gives reproducible values for some
+// problematic types such as pointers. Note that it only does special handling
+// for the troublesome types used in the test cases; it is not a general
+// printer.
+func sprintKey(key reflect.Value) string {
+ switch str := key.Type().String(); str {
+ case "*int":
+ ptr := key.Interface().(*int)
+ for i := range ints {
+ if ptr == &ints[i] {
+ return fmt.Sprintf("PTR%d", i)
+ }
+ }
+ return "PTR???"
+ case "unsafe.Pointer":
+ ptr := key.Interface().(unsafe.Pointer)
+ for i := range ints {
+ if ptr == unsafe.Pointer(&ints[i]) {
+ return fmt.Sprintf("UNSAFEPTR%d", i)
+ }
+ }
+ return "UNSAFEPTR???"
+ case "chan int":
+ c := key.Interface().(chan int)
+ for i := range chans {
+ if c == chans[i] {
+ return fmt.Sprintf("CHAN%d", i)
+ }
+ }
+ return "CHAN???"
+ default:
+ return fmt.Sprint(key)
+ }
+}
+
+var (
+ ints [3]int
+ chans = makeChans()
+)
+
+func makeChans() []chan int {
+ cs := []chan int{make(chan int), make(chan int), make(chan int)}
+ // Order channels by address. See issue #49431.
+ // TODO: pin these pointers once pinning is available (#46787).
+ sort.Slice(cs, func(i, j int) bool {
+ return uintptr(reflect.ValueOf(cs[i]).UnsafePointer()) < uintptr(reflect.ValueOf(cs[j]).UnsafePointer())
+ })
+ return cs
+}
+
+func pointerMap() map[*int]string {
+ m := make(map[*int]string)
+ for i := 2; i >= 0; i-- {
+ m[&ints[i]] = fmt.Sprint(i)
+ }
+ return m
+}
+
+func unsafePointerMap() map[unsafe.Pointer]string {
+ m := make(map[unsafe.Pointer]string)
+ for i := 2; i >= 0; i-- {
+ m[unsafe.Pointer(&ints[i])] = fmt.Sprint(i)
+ }
+ return m
+}
+
+func chanMap() map[chan int]string {
+ m := make(map[chan int]string)
+ for i := 2; i >= 0; i-- {
+ m[chans[i]] = fmt.Sprint(i)
+ }
+ return m
+}
+
+type toy struct {
+ A int // Exported.
+ b int // Unexported.
+}
+
+func TestOrder(t *testing.T) {
+ for _, test := range sortTests {
+ got := sprint(test.data)
+ if got != test.print {
+ t.Errorf("%s: got %q, want %q", reflect.TypeOf(test.data), got, test.print)
+ }
+ }
+}
+
+func TestInterface(t *testing.T) {
+ // A map containing multiple concrete types should be sorted by type,
+ // then value. However, the relative ordering of types is unspecified,
+ // so test this by checking the presence of sorted subgroups.
+ m := map[any]string{
+ [2]int{1, 0}: "",
+ [2]int{0, 1}: "",
+ true: "",
+ false: "",
+ 3.1: "",
+ 2.1: "",
+ 1.1: "",
+ math.NaN(): "",
+ 3: "",
+ 2: "",
+ 1: "",
+ "c": "",
+ "b": "",
+ "a": "",
+ struct{ x, y int }{1, 0}: "",
+ struct{ x, y int }{0, 1}: "",
+ }
+ got := sprint(m)
+ typeGroups := []string{
+ "NaN: 1.1: 2.1: 3.1:", // float64
+ "false: true:", // bool
+ "1: 2: 3:", // int
+ "a: b: c:", // string
+ "[0 1]: [1 0]:", // [2]int
+ "{0 1}: {1 0}:", // struct{ x int; y int }
+ }
+ for _, g := range typeGroups {
+ if !strings.Contains(got, g) {
+ t.Errorf("sorted map should contain %q", g)
+ }
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/attr.go b/tpl/internal/go_templates/htmltemplate/attr.go
new file mode 100644
index 000000000..6c52211fe
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/attr.go
@@ -0,0 +1,175 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "strings"
+)
+
+// attrTypeMap[n] describes the value of the given attribute.
+// If an attribute affects (or can mask) the encoding or interpretation of
+// other content, or affects the contents, idempotency, or credentials of a
+// network message, then the value in this map is contentTypeUnsafe.
+// This map is derived from HTML5, specifically
+// https://www.w3.org/TR/html5/Overview.html#attributes-1
+// as well as "%URI"-typed attributes from
+// https://www.w3.org/TR/html4/index/attributes.html
+var attrTypeMap = map[string]contentType{
+ "accept": contentTypePlain,
+ "accept-charset": contentTypeUnsafe,
+ "action": contentTypeURL,
+ "alt": contentTypePlain,
+ "archive": contentTypeURL,
+ "async": contentTypeUnsafe,
+ "autocomplete": contentTypePlain,
+ "autofocus": contentTypePlain,
+ "autoplay": contentTypePlain,
+ "background": contentTypeURL,
+ "border": contentTypePlain,
+ "checked": contentTypePlain,
+ "cite": contentTypeURL,
+ "challenge": contentTypeUnsafe,
+ "charset": contentTypeUnsafe,
+ "class": contentTypePlain,
+ "classid": contentTypeURL,
+ "codebase": contentTypeURL,
+ "cols": contentTypePlain,
+ "colspan": contentTypePlain,
+ "content": contentTypeUnsafe,
+ "contenteditable": contentTypePlain,
+ "contextmenu": contentTypePlain,
+ "controls": contentTypePlain,
+ "coords": contentTypePlain,
+ "crossorigin": contentTypeUnsafe,
+ "data": contentTypeURL,
+ "datetime": contentTypePlain,
+ "default": contentTypePlain,
+ "defer": contentTypeUnsafe,
+ "dir": contentTypePlain,
+ "dirname": contentTypePlain,
+ "disabled": contentTypePlain,
+ "draggable": contentTypePlain,
+ "dropzone": contentTypePlain,
+ "enctype": contentTypeUnsafe,
+ "for": contentTypePlain,
+ "form": contentTypeUnsafe,
+ "formaction": contentTypeURL,
+ "formenctype": contentTypeUnsafe,
+ "formmethod": contentTypeUnsafe,
+ "formnovalidate": contentTypeUnsafe,
+ "formtarget": contentTypePlain,
+ "headers": contentTypePlain,
+ "height": contentTypePlain,
+ "hidden": contentTypePlain,
+ "high": contentTypePlain,
+ "href": contentTypeURL,
+ "hreflang": contentTypePlain,
+ "http-equiv": contentTypeUnsafe,
+ "icon": contentTypeURL,
+ "id": contentTypePlain,
+ "ismap": contentTypePlain,
+ "keytype": contentTypeUnsafe,
+ "kind": contentTypePlain,
+ "label": contentTypePlain,
+ "lang": contentTypePlain,
+ "language": contentTypeUnsafe,
+ "list": contentTypePlain,
+ "longdesc": contentTypeURL,
+ "loop": contentTypePlain,
+ "low": contentTypePlain,
+ "manifest": contentTypeURL,
+ "max": contentTypePlain,
+ "maxlength": contentTypePlain,
+ "media": contentTypePlain,
+ "mediagroup": contentTypePlain,
+ "method": contentTypeUnsafe,
+ "min": contentTypePlain,
+ "multiple": contentTypePlain,
+ "name": contentTypePlain,
+ "novalidate": contentTypeUnsafe,
+ // Skip handler names from
+ // https://www.w3.org/TR/html5/webappapis.html#event-handlers-on-elements,-document-objects,-and-window-objects
+ // since we have special handling in attrType.
+ "open": contentTypePlain,
+ "optimum": contentTypePlain,
+ "pattern": contentTypeUnsafe,
+ "placeholder": contentTypePlain,
+ "poster": contentTypeURL,
+ "profile": contentTypeURL,
+ "preload": contentTypePlain,
+ "pubdate": contentTypePlain,
+ "radiogroup": contentTypePlain,
+ "readonly": contentTypePlain,
+ "rel": contentTypeUnsafe,
+ "required": contentTypePlain,
+ "reversed": contentTypePlain,
+ "rows": contentTypePlain,
+ "rowspan": contentTypePlain,
+ "sandbox": contentTypeUnsafe,
+ "spellcheck": contentTypePlain,
+ "scope": contentTypePlain,
+ "scoped": contentTypePlain,
+ "seamless": contentTypePlain,
+ "selected": contentTypePlain,
+ "shape": contentTypePlain,
+ "size": contentTypePlain,
+ "sizes": contentTypePlain,
+ "span": contentTypePlain,
+ "src": contentTypeURL,
+ "srcdoc": contentTypeHTML,
+ "srclang": contentTypePlain,
+ "srcset": contentTypeSrcset,
+ "start": contentTypePlain,
+ "step": contentTypePlain,
+ "style": contentTypeCSS,
+ "tabindex": contentTypePlain,
+ "target": contentTypePlain,
+ "title": contentTypePlain,
+ "type": contentTypeUnsafe,
+ "usemap": contentTypeURL,
+ "value": contentTypeUnsafe,
+ "width": contentTypePlain,
+ "wrap": contentTypePlain,
+ "xmlns": contentTypeURL,
+}
+
+// attrType returns a conservative (upper-bound on authority) guess at the
+// type of the lowercase named attribute.
+func attrType(name string) contentType {
+ if strings.HasPrefix(name, "data-") {
+ // Strip data- so that custom attribute heuristics below are
+ // widely applied.
+ // Treat data-action as URL below.
+ name = name[5:]
+ } else if prefix, short, ok := strings.Cut(name, ":"); ok {
+ if prefix == "xmlns" {
+ return contentTypeURL
+ }
+ // Treat svg:href and xlink:href as href below.
+ name = short
+ }
+ if t, ok := attrTypeMap[name]; ok {
+ return t
+ }
+ // Treat partial event handler names as script.
+ if strings.HasPrefix(name, "on") {
+ return contentTypeJS
+ }
+
+ // Heuristics to prevent "javascript:..." injection in custom
+ // data attributes and custom attributes like g:tweetUrl.
+ // https://www.w3.org/TR/html5/dom.html#embedding-custom-non-visible-data-with-the-data-*-attributes
+ // "Custom data attributes are intended to store custom data
+ // private to the page or application, for which there are no
+ // more appropriate attributes or elements."
+ // Developers seem to store URL content in data URLs that start
+ // or end with "URI" or "URL".
+ if strings.Contains(name, "src") ||
+ strings.Contains(name, "uri") ||
+ strings.Contains(name, "url") {
+ return contentTypeURL
+ }
+ return contentTypePlain
+}
diff --git a/tpl/internal/go_templates/htmltemplate/attr_string.go b/tpl/internal/go_templates/htmltemplate/attr_string.go
new file mode 100644
index 000000000..babe70c08
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/attr_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type attr"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _attr_name = "attrNoneattrScriptattrScriptTypeattrStyleattrURLattrSrcset"
+
+var _attr_index = [...]uint8{0, 8, 18, 32, 41, 48, 58}
+
+func (i attr) String() string {
+ if i >= attr(len(_attr_index)-1) {
+ return "attr(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _attr_name[_attr_index[i]:_attr_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/htmltemplate/clone_test.go b/tpl/internal/go_templates/htmltemplate/clone_test.go
new file mode 100644
index 000000000..553f656b5
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/clone_test.go
@@ -0,0 +1,283 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+ "sync"
+ "testing"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+func TestAddParseTreeHTML(t *testing.T) {
+ root := Must(New("root").Parse(`{{define "a"}} {{.}} {{template "b"}} {{.}} "></a>{{end}}`))
+ tree, err := parse.Parse("t", `{{define "b"}}<a href="{{end}}`, "", "", nil, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ added := Must(root.AddParseTree("b", tree["b"]))
+ b := new(bytes.Buffer)
+ err = added.ExecuteTemplate(b, "a", "1>0")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if got, want := b.String(), ` 1&gt;0 <a href=" 1%3e0 "></a>`; got != want {
+ t.Errorf("got %q want %q", got, want)
+ }
+}
+
+func TestClone(t *testing.T) {
+ // The {{.}} will be executed with data "<i>*/" in different contexts.
+ // In the t0 template, it will be in a text context.
+ // In the t1 template, it will be in a URL context.
+ // In the t2 template, it will be in a JavaScript context.
+ // In the t3 template, it will be in a CSS context.
+ const tmpl = `{{define "a"}}{{template "lhs"}}{{.}}{{template "rhs"}}{{end}}`
+ b := new(bytes.Buffer)
+
+ // Create an incomplete template t0.
+ t0 := Must(New("t0").Parse(tmpl))
+
+ // Clone t0 as t1.
+ t1 := Must(t0.Clone())
+ Must(t1.Parse(`{{define "lhs"}} <a href=" {{end}}`))
+ Must(t1.Parse(`{{define "rhs"}} "></a> {{end}}`))
+
+ // Execute t1.
+ b.Reset()
+ if err := t1.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
+ t.Fatal(err)
+ }
+ if got, want := b.String(), ` <a href=" %3ci%3e*/ "></a> `; got != want {
+ t.Errorf("t1: got %q want %q", got, want)
+ }
+
+ // Clone t0 as t2.
+ t2 := Must(t0.Clone())
+ Must(t2.Parse(`{{define "lhs"}} <p onclick="javascript: {{end}}`))
+ Must(t2.Parse(`{{define "rhs"}} "></p> {{end}}`))
+
+ // Execute t2.
+ b.Reset()
+ if err := t2.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
+ t.Fatal(err)
+ }
+ if got, want := b.String(), ` <p onclick="javascript: &#34;\u003ci\u003e*/&#34; "></p> `; got != want {
+ t.Errorf("t2: got %q want %q", got, want)
+ }
+
+ // Clone t0 as t3, but do not execute t3 yet.
+ t3 := Must(t0.Clone())
+ Must(t3.Parse(`{{define "lhs"}} <style> {{end}}`))
+ Must(t3.Parse(`{{define "rhs"}} </style> {{end}}`))
+
+ // Complete t0.
+ Must(t0.Parse(`{{define "lhs"}} ( {{end}}`))
+ Must(t0.Parse(`{{define "rhs"}} ) {{end}}`))
+
+ // Clone t0 as t4. Redefining the "lhs" template should not fail.
+ t4 := Must(t0.Clone())
+ if _, err := t4.Parse(`{{define "lhs"}} OK {{end}}`); err != nil {
+ t.Errorf(`redefine "lhs": got err %v want nil`, err)
+ }
+ // Cloning t1 should fail as it has been executed.
+ if _, err := t1.Clone(); err == nil {
+ t.Error("cloning t1: got nil err want non-nil")
+ }
+ // Redefining the "lhs" template in t1 should fail as it has been executed.
+ if _, err := t1.Parse(`{{define "lhs"}} OK {{end}}`); err == nil {
+ t.Error(`redefine "lhs": got nil err want non-nil`)
+ }
+
+ // Execute t0.
+ b.Reset()
+ if err := t0.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
+ t.Fatal(err)
+ }
+ if got, want := b.String(), ` ( &lt;i&gt;*/ ) `; got != want {
+ t.Errorf("t0: got %q want %q", got, want)
+ }
+
+ // Clone t0. This should fail, as t0 has already executed.
+ if _, err := t0.Clone(); err == nil {
+ t.Error(`t0.Clone(): got nil err want non-nil`)
+ }
+
+ // Similarly, cloning sub-templates should fail.
+ if _, err := t0.Lookup("a").Clone(); err == nil {
+ t.Error(`t0.Lookup("a").Clone(): got nil err want non-nil`)
+ }
+ if _, err := t0.Lookup("lhs").Clone(); err == nil {
+ t.Error(`t0.Lookup("lhs").Clone(): got nil err want non-nil`)
+ }
+
+ // Execute t3.
+ b.Reset()
+ if err := t3.ExecuteTemplate(b, "a", "<i>*/"); err != nil {
+ t.Fatal(err)
+ }
+ if got, want := b.String(), ` <style> ZgotmplZ </style> `; got != want {
+ t.Errorf("t3: got %q want %q", got, want)
+ }
+}
+
+func TestTemplates(t *testing.T) {
+ names := []string{"t0", "a", "lhs", "rhs"}
+ // Some template definitions borrowed from TestClone.
+ const tmpl = `
+ {{define "a"}}{{template "lhs"}}{{.}}{{template "rhs"}}{{end}}
+ {{define "lhs"}} <a href=" {{end}}
+ {{define "rhs"}} "></a> {{end}}`
+ t0 := Must(New("t0").Parse(tmpl))
+ templates := t0.Templates()
+ if len(templates) != len(names) {
+ t.Errorf("expected %d templates; got %d", len(names), len(templates))
+ }
+ for _, name := range names {
+ found := false
+ for _, tmpl := range templates {
+ if name == tmpl.text.Name() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Error("could not find template", name)
+ }
+ }
+}
+
+// This used to crash; https://golang.org/issue/3281
+func TestCloneCrash(t *testing.T) {
+ t1 := New("all")
+ Must(t1.New("t1").Parse(`{{define "foo"}}foo{{end}}`))
+ t1.Clone()
+}
+
+// Ensure that this guarantee from the docs is upheld:
+// "Further calls to Parse in the copy will add templates
+// to the copy but not to the original."
+func TestCloneThenParse(t *testing.T) {
+ t0 := Must(New("t0").Parse(`{{define "a"}}{{template "embedded"}}{{end}}`))
+ t1 := Must(t0.Clone())
+ Must(t1.Parse(`{{define "embedded"}}t1{{end}}`))
+ if len(t0.Templates())+1 != len(t1.Templates()) {
+ t.Error("adding a template to a clone added it to the original")
+ }
+ // double check that the embedded template isn't available in the original
+ err := t0.ExecuteTemplate(io.Discard, "a", nil)
+ if err == nil {
+ t.Error("expected 'no such template' error")
+ }
+}
+
+// https://golang.org/issue/5980
+func TestFuncMapWorksAfterClone(t *testing.T) {
+ funcs := FuncMap{"customFunc": func() (string, error) {
+ return "", errors.New("issue5980")
+ }}
+
+ // get the expected error output (no clone)
+ uncloned := Must(New("").Funcs(funcs).Parse("{{customFunc}}"))
+ wantErr := uncloned.Execute(io.Discard, nil)
+
+ // toClone must be the same as uncloned. It has to be recreated from scratch,
+ // since cloning cannot occur after execution.
+ toClone := Must(New("").Funcs(funcs).Parse("{{customFunc}}"))
+ cloned := Must(toClone.Clone())
+ gotErr := cloned.Execute(io.Discard, nil)
+
+ if wantErr.Error() != gotErr.Error() {
+ t.Errorf("clone error message mismatch want %q got %q", wantErr, gotErr)
+ }
+}
+
+// https://golang.org/issue/16101
+func TestTemplateCloneExecuteRace(t *testing.T) {
+ const (
+ input = `<title>{{block "a" .}}a{{end}}</title><body>{{block "b" .}}b{{end}}<body>`
+ overlay = `{{define "b"}}A{{end}}`
+ )
+ outer := Must(New("outer").Parse(input))
+ tmpl := Must(Must(outer.Clone()).Parse(overlay))
+
+ var wg sync.WaitGroup
+ for i := 0; i < 10; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for i := 0; i < 100; i++ {
+ if err := tmpl.Execute(io.Discard, "data"); err != nil {
+ panic(err)
+ }
+ }
+ }()
+ }
+ wg.Wait()
+}
+
+func TestTemplateCloneLookup(t *testing.T) {
+ // Template.escape makes an assumption that the template associated
+ // with t.Name() is t. Check that this holds.
+ tmpl := Must(New("x").Parse("a"))
+ tmpl = Must(tmpl.Clone())
+ if tmpl.Lookup(tmpl.Name()) != tmpl {
+ t.Error("after Clone, tmpl.Lookup(tmpl.Name()) != tmpl")
+ }
+}
+
+func TestCloneGrowth(t *testing.T) {
+ tmpl := Must(New("root").Parse(`<title>{{block "B". }}Arg{{end}}</title>`))
+ tmpl = Must(tmpl.Clone())
+ Must(tmpl.Parse(`{{define "B"}}Text{{end}}`))
+ for i := 0; i < 10; i++ {
+ tmpl.Execute(io.Discard, nil)
+ }
+ if len(tmpl.DefinedTemplates()) > 200 {
+ t.Fatalf("too many templates: %v", len(tmpl.DefinedTemplates()))
+ }
+}
+
+// https://golang.org/issue/17735
+func TestCloneRedefinedName(t *testing.T) {
+ const base = `
+{{ define "a" -}}<title>{{ template "b" . -}}</title>{{ end -}}
+{{ define "b" }}{{ end -}}
+`
+ const page = `{{ template "a" . }}`
+
+ t1 := Must(New("a").Parse(base))
+
+ for i := 0; i < 2; i++ {
+ t2 := Must(t1.Clone())
+ t2 = Must(t2.New(fmt.Sprintf("%d", i)).Parse(page))
+ err := t2.Execute(io.Discard, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+}
+
+// Issue 24791.
+func TestClonePipe(t *testing.T) {
+ a := Must(New("a").Parse(`{{define "a"}}{{range $v := .A}}{{$v}}{{end}}{{end}}`))
+ data := struct{ A []string }{A: []string{"hi"}}
+ b := Must(a.Clone())
+ var buf strings.Builder
+ if err := b.Execute(&buf, &data); err != nil {
+ t.Fatal(err)
+ }
+ if got, want := buf.String(), "hi"; got != want {
+ t.Errorf("got %q want %q", got, want)
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/content.go b/tpl/internal/go_templates/htmltemplate/content.go
new file mode 100644
index 000000000..65cc3086c
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/content.go
@@ -0,0 +1,102 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "fmt"
+ htmltemplate "html/template"
+ "reflect"
+)
+
+type contentType uint8
+
+const (
+ contentTypePlain contentType = iota
+ contentTypeCSS
+ contentTypeHTML
+ contentTypeHTMLAttr
+ contentTypeJS
+ contentTypeJSStr
+ contentTypeURL
+ contentTypeSrcset
+ // contentTypeUnsafe is used in attr.go for values that affect how
+ // embedded content and network messages are formed, vetted,
+ // or interpreted; or which credentials network messages carry.
+ contentTypeUnsafe
+)
+
+// indirect returns the value, after dereferencing as many times
+// as necessary to reach the base type (or nil).
+func indirect(a any) any {
+ if a == nil {
+ return nil
+ }
+ if t := reflect.TypeOf(a); t.Kind() != reflect.Pointer {
+ // Avoid creating a reflect.Value if it's not a pointer.
+ return a
+ }
+ v := reflect.ValueOf(a)
+ for v.Kind() == reflect.Pointer && !v.IsNil() {
+ v = v.Elem()
+ }
+ return v.Interface()
+}
+
+var (
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+ fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
+)
+
+// indirectToStringerOrError returns the value, after dereferencing as many times
+// as necessary to reach the base type (or nil) or an implementation of fmt.Stringer
+// or error,
+func indirectToStringerOrError(a any) any {
+ if a == nil {
+ return nil
+ }
+ v := reflect.ValueOf(a)
+ for !v.Type().Implements(fmtStringerType) && !v.Type().Implements(errorType) && v.Kind() == reflect.Pointer && !v.IsNil() {
+ v = v.Elem()
+ }
+ return v.Interface()
+}
+
+// stringify converts its arguments to a string and the type of the content.
+// All pointers are dereferenced, as in the text/template package.
+func stringify(args ...any) (string, contentType) {
+ if len(args) == 1 {
+ switch s := indirect(args[0]).(type) {
+ case string:
+ return s, contentTypePlain
+ case htmltemplate.CSS:
+ return string(s), contentTypeCSS
+ case htmltemplate.HTML:
+ return string(s), contentTypeHTML
+ case htmltemplate.HTMLAttr:
+ return string(s), contentTypeHTMLAttr
+ case htmltemplate.JS:
+ return string(s), contentTypeJS
+ case htmltemplate.JSStr:
+ return string(s), contentTypeJSStr
+ case htmltemplate.URL:
+ return string(s), contentTypeURL
+ case htmltemplate.Srcset:
+ return string(s), contentTypeSrcset
+ }
+ }
+ i := 0
+ for _, arg := range args {
+ // We skip untyped nil arguments for backward compatibility.
+ // Without this they would be output as <nil>, escaped.
+ // See issue 25875.
+ if arg == nil {
+ continue
+ }
+
+ args[i] = indirectToStringerOrError(arg)
+ i++
+ }
+ return fmt.Sprint(args[:i]...), contentTypePlain
+}
diff --git a/tpl/internal/go_templates/htmltemplate/content_test.go b/tpl/internal/go_templates/htmltemplate/content_test.go
new file mode 100644
index 000000000..29221a4ad
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/content_test.go
@@ -0,0 +1,462 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "fmt"
+ htmltemplate "html/template"
+ "strings"
+ "testing"
+)
+
+func TestTypedContent(t *testing.T) {
+ data := []any{
+ `<b> "foo%" O'Reilly &bar;`,
+ htmltemplate.CSS(`a[href =~ "//example.com"]#foo`),
+ htmltemplate.HTML(`Hello, <b>World</b> &amp;tc!`),
+ htmltemplate.HTMLAttr(` dir="ltr"`),
+ htmltemplate.JS(`c && alert("Hello, World!");`),
+ htmltemplate.JSStr(`Hello, World & O'Reilly\u0021`),
+ htmltemplate.URL(`greeting=H%69,&addressee=(World)`),
+ htmltemplate.Srcset(`greeting=H%69,&addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`),
+ htmltemplate.URL(`,foo/,`),
+ }
+
+ // For each content sensitive escaper, see how it does on
+ // each of the typed strings above.
+ tests := []struct {
+ // A template containing a single {{.}}.
+ input string
+ want []string
+ }{
+ {
+ `<style>{{.}} { color: blue }</style>`,
+ []string{
+ `ZgotmplZ`,
+ // Allowed but not escaped.
+ `a[href =~ "//example.com"]#foo`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ },
+ },
+ {
+ `<div style="{{.}}">`,
+ []string{
+ `ZgotmplZ`,
+ // Allowed and HTML escaped.
+ `a[href =~ &#34;//example.com&#34;]#foo`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ },
+ },
+ {
+ `{{.}}`,
+ []string{
+ `&lt;b&gt; &#34;foo%&#34; O&#39;Reilly &amp;bar;`,
+ `a[href =~ &#34;//example.com&#34;]#foo`,
+ // Not escaped.
+ `Hello, <b>World</b> &amp;tc!`,
+ ` dir=&#34;ltr&#34;`,
+ `c &amp;&amp; alert(&#34;Hello, World!&#34;);`,
+ `Hello, World &amp; O&#39;Reilly\u0021`,
+ `greeting=H%69,&amp;addressee=(World)`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<a{{.}}>`,
+ []string{
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ // Allowed and HTML escaped.
+ ` dir="ltr"`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ `ZgotmplZ`,
+ },
+ },
+ {
+ `<a title={{.}}>`,
+ []string{
+ `&lt;b&gt;&#32;&#34;foo%&#34;&#32;O&#39;Reilly&#32;&amp;bar;`,
+ `a[href&#32;&#61;~&#32;&#34;//example.com&#34;]#foo`,
+ // Tags stripped, spaces escaped, entity not re-escaped.
+ `Hello,&#32;World&#32;&amp;tc!`,
+ `&#32;dir&#61;&#34;ltr&#34;`,
+ `c&#32;&amp;&amp;&#32;alert(&#34;Hello,&#32;World!&#34;);`,
+ `Hello,&#32;World&#32;&amp;&#32;O&#39;Reilly\u0021`,
+ `greeting&#61;H%69,&amp;addressee&#61;(World)`,
+ `greeting&#61;H%69,&amp;addressee&#61;(World)&#32;2x,&#32;https://golang.org/favicon.ico&#32;500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<a title='{{.}}'>`,
+ []string{
+ `&lt;b&gt; &#34;foo%&#34; O&#39;Reilly &amp;bar;`,
+ `a[href =~ &#34;//example.com&#34;]#foo`,
+ // Tags stripped, entity not re-escaped.
+ `Hello, World &amp;tc!`,
+ ` dir=&#34;ltr&#34;`,
+ `c &amp;&amp; alert(&#34;Hello, World!&#34;);`,
+ `Hello, World &amp; O&#39;Reilly\u0021`,
+ `greeting=H%69,&amp;addressee=(World)`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<textarea>{{.}}</textarea>`,
+ []string{
+ `&lt;b&gt; &#34;foo%&#34; O&#39;Reilly &amp;bar;`,
+ `a[href =~ &#34;//example.com&#34;]#foo`,
+ // Angle brackets escaped to prevent injection of close tags, entity not re-escaped.
+ `Hello, &lt;b&gt;World&lt;/b&gt; &amp;tc!`,
+ ` dir=&#34;ltr&#34;`,
+ `c &amp;&amp; alert(&#34;Hello, World!&#34;);`,
+ `Hello, World &amp; O&#39;Reilly\u0021`,
+ `greeting=H%69,&amp;addressee=(World)`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<script>alert({{.}})</script>`,
+ []string{
+ `"\u003cb\u003e \"foo%\" O'Reilly \u0026bar;"`,
+ `"a[href =~ \"//example.com\"]#foo"`,
+ `"Hello, \u003cb\u003eWorld\u003c/b\u003e \u0026amp;tc!"`,
+ `" dir=\"ltr\""`,
+ // Not escaped.
+ `c && alert("Hello, World!");`,
+ // Escape sequence not over-escaped.
+ `"Hello, World & O'Reilly\u0021"`,
+ `"greeting=H%69,\u0026addressee=(World)"`,
+ `"greeting=H%69,\u0026addressee=(World) 2x, https://golang.org/favicon.ico 500.5w"`,
+ `",foo/,"`,
+ },
+ },
+ {
+ `<button onclick="alert({{.}})">`,
+ []string{
+ `&#34;\u003cb\u003e \&#34;foo%\&#34; O&#39;Reilly \u0026bar;&#34;`,
+ `&#34;a[href =~ \&#34;//example.com\&#34;]#foo&#34;`,
+ `&#34;Hello, \u003cb\u003eWorld\u003c/b\u003e \u0026amp;tc!&#34;`,
+ `&#34; dir=\&#34;ltr\&#34;&#34;`,
+ // Not JS escaped but HTML escaped.
+ `c &amp;&amp; alert(&#34;Hello, World!&#34;);`,
+ // Escape sequence not over-escaped.
+ `&#34;Hello, World &amp; O&#39;Reilly\u0021&#34;`,
+ `&#34;greeting=H%69,\u0026addressee=(World)&#34;`,
+ `&#34;greeting=H%69,\u0026addressee=(World) 2x, https://golang.org/favicon.ico 500.5w&#34;`,
+ `&#34;,foo/,&#34;`,
+ },
+ },
+ {
+ `<script>alert("{{.}}")</script>`,
+ []string{
+ `\u003cb\u003e \u0022foo%\u0022 O\u0027Reilly \u0026bar;`,
+ `a[href =~ \u0022\/\/example.com\u0022]#foo`,
+ `Hello, \u003cb\u003eWorld\u003c\/b\u003e \u0026amp;tc!`,
+ ` dir=\u0022ltr\u0022`,
+ `c \u0026\u0026 alert(\u0022Hello, World!\u0022);`,
+ // Escape sequence not over-escaped.
+ `Hello, World \u0026 O\u0027Reilly\u0021`,
+ `greeting=H%69,\u0026addressee=(World)`,
+ `greeting=H%69,\u0026addressee=(World) 2x, https:\/\/golang.org\/favicon.ico 500.5w`,
+ `,foo\/,`,
+ },
+ },
+ {
+ `<script type="text/javascript">alert("{{.}}")</script>`,
+ []string{
+ `\u003cb\u003e \u0022foo%\u0022 O\u0027Reilly \u0026bar;`,
+ `a[href =~ \u0022\/\/example.com\u0022]#foo`,
+ `Hello, \u003cb\u003eWorld\u003c\/b\u003e \u0026amp;tc!`,
+ ` dir=\u0022ltr\u0022`,
+ `c \u0026\u0026 alert(\u0022Hello, World!\u0022);`,
+ // Escape sequence not over-escaped.
+ `Hello, World \u0026 O\u0027Reilly\u0021`,
+ `greeting=H%69,\u0026addressee=(World)`,
+ `greeting=H%69,\u0026addressee=(World) 2x, https:\/\/golang.org\/favicon.ico 500.5w`,
+ `,foo\/,`,
+ },
+ },
+ {
+ `<script type="text/javascript">alert({{.}})</script>`,
+ []string{
+ `"\u003cb\u003e \"foo%\" O'Reilly \u0026bar;"`,
+ `"a[href =~ \"//example.com\"]#foo"`,
+ `"Hello, \u003cb\u003eWorld\u003c/b\u003e \u0026amp;tc!"`,
+ `" dir=\"ltr\""`,
+ // Not escaped.
+ `c && alert("Hello, World!");`,
+ // Escape sequence not over-escaped.
+ `"Hello, World & O'Reilly\u0021"`,
+ `"greeting=H%69,\u0026addressee=(World)"`,
+ `"greeting=H%69,\u0026addressee=(World) 2x, https://golang.org/favicon.ico 500.5w"`,
+ `",foo/,"`,
+ },
+ },
+ {
+ // Not treated as JS. The output is same as for <div>{{.}}</div>
+ `<script type="text/template">{{.}}</script>`,
+ []string{
+ `&lt;b&gt; &#34;foo%&#34; O&#39;Reilly &amp;bar;`,
+ `a[href =~ &#34;//example.com&#34;]#foo`,
+ // Not escaped.
+ `Hello, <b>World</b> &amp;tc!`,
+ ` dir=&#34;ltr&#34;`,
+ `c &amp;&amp; alert(&#34;Hello, World!&#34;);`,
+ `Hello, World &amp; O&#39;Reilly\u0021`,
+ `greeting=H%69,&amp;addressee=(World)`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<button onclick='alert("{{.}}")'>`,
+ []string{
+ `\u003cb\u003e \u0022foo%\u0022 O\u0027Reilly \u0026bar;`,
+ `a[href =~ \u0022\/\/example.com\u0022]#foo`,
+ `Hello, \u003cb\u003eWorld\u003c\/b\u003e \u0026amp;tc!`,
+ ` dir=\u0022ltr\u0022`,
+ `c \u0026\u0026 alert(\u0022Hello, World!\u0022);`,
+ // Escape sequence not over-escaped.
+ `Hello, World \u0026 O\u0027Reilly\u0021`,
+ `greeting=H%69,\u0026addressee=(World)`,
+ `greeting=H%69,\u0026addressee=(World) 2x, https:\/\/golang.org\/favicon.ico 500.5w`,
+ `,foo\/,`,
+ },
+ },
+ {
+ `<a href="?q={{.}}">`,
+ []string{
+ `%3cb%3e%20%22foo%25%22%20O%27Reilly%20%26bar%3b`,
+ `a%5bhref%20%3d~%20%22%2f%2fexample.com%22%5d%23foo`,
+ `Hello%2c%20%3cb%3eWorld%3c%2fb%3e%20%26amp%3btc%21`,
+ `%20dir%3d%22ltr%22`,
+ `c%20%26%26%20alert%28%22Hello%2c%20World%21%22%29%3b`,
+ `Hello%2c%20World%20%26%20O%27Reilly%5cu0021`,
+ // Quotes and parens are escaped but %69 is not over-escaped. HTML escaping is done.
+ `greeting=H%69,&amp;addressee=%28World%29`,
+ `greeting%3dH%2569%2c%26addressee%3d%28World%29%202x%2c%20https%3a%2f%2fgolang.org%2ffavicon.ico%20500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<style>body { background: url('?img={{.}}') }</style>`,
+ []string{
+ `%3cb%3e%20%22foo%25%22%20O%27Reilly%20%26bar%3b`,
+ `a%5bhref%20%3d~%20%22%2f%2fexample.com%22%5d%23foo`,
+ `Hello%2c%20%3cb%3eWorld%3c%2fb%3e%20%26amp%3btc%21`,
+ `%20dir%3d%22ltr%22`,
+ `c%20%26%26%20alert%28%22Hello%2c%20World%21%22%29%3b`,
+ `Hello%2c%20World%20%26%20O%27Reilly%5cu0021`,
+ // Quotes and parens are escaped but %69 is not over-escaped. HTML escaping is not done.
+ `greeting=H%69,&addressee=%28World%29`,
+ `greeting%3dH%2569%2c%26addressee%3d%28World%29%202x%2c%20https%3a%2f%2fgolang.org%2ffavicon.ico%20500.5w`,
+ `,foo/,`,
+ },
+ },
+ {
+ `<img srcset="{{.}}">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ // Commas are not esacped
+ `Hello,#ZgotmplZ`,
+ // Leading spaces are not percent escapes.
+ ` dir=%22ltr%22`,
+ // Spaces after commas are not percent escaped.
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ // Metadata is not escaped.
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset={{.}}>`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ // Spaces are HTML escaped not %-escaped
+ `&#32;dir&#61;%22ltr%22`,
+ `#ZgotmplZ,&#32;World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting&#61;H%69%2c&amp;addressee&#61;%28World%29`,
+ `greeting&#61;H%69,&amp;addressee&#61;(World)&#32;2x,&#32;https://golang.org/favicon.ico&#32;500.5w`,
+ // Commas are escaped.
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset="{{.}} 2x, https://golang.org/ 500.5w">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ ` dir=%22ltr%22`,
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset="http://godoc.org/ {{.}}, https://golang.org/ 500.5w">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ ` dir=%22ltr%22`,
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset="http://godoc.org/?q={{.}} 2x, https://golang.org/ 500.5w">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ ` dir=%22ltr%22`,
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset="http://godoc.org/ 2x, {{.}} 500.5w">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ ` dir=%22ltr%22`,
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ {
+ `<img srcset="http://godoc.org/ 2x, https://golang.org/ {{.}}">`,
+ []string{
+ `#ZgotmplZ`,
+ `#ZgotmplZ`,
+ `Hello,#ZgotmplZ`,
+ ` dir=%22ltr%22`,
+ `#ZgotmplZ, World!%22%29;`,
+ `Hello,#ZgotmplZ`,
+ `greeting=H%69%2c&amp;addressee=%28World%29`,
+ `greeting=H%69,&amp;addressee=(World) 2x, https://golang.org/favicon.ico 500.5w`,
+ `%2cfoo/%2c`,
+ },
+ },
+ }
+
+ for _, test := range tests {
+ tmpl := Must(New("x").Parse(test.input))
+ pre := strings.Index(test.input, "{{.}}")
+ post := len(test.input) - (pre + 5)
+ var b bytes.Buffer
+ for i, x := range data {
+ b.Reset()
+ if err := tmpl.Execute(&b, x); err != nil {
+ t.Errorf("%q with %v: %s", test.input, x, err)
+ continue
+ }
+ if want, got := test.want[i], b.String()[pre:b.Len()-post]; want != got {
+ t.Errorf("%q with %v:\nwant\n\t%q,\ngot\n\t%q\n", test.input, x, want, got)
+ continue
+ }
+ }
+ }
+}
+
+// Test that we print using the String method. Was issue 3073.
+type myStringer struct {
+ v int
+}
+
+func (s *myStringer) String() string {
+ return fmt.Sprintf("string=%d", s.v)
+}
+
+type errorer struct {
+ v int
+}
+
+func (s *errorer) Error() string {
+ return fmt.Sprintf("error=%d", s.v)
+}
+
+func TestStringer(t *testing.T) {
+ s := &myStringer{3}
+ b := new(bytes.Buffer)
+ tmpl := Must(New("x").Parse("{{.}}"))
+ if err := tmpl.Execute(b, s); err != nil {
+ t.Fatal(err)
+ }
+ var expect = "string=3"
+ if b.String() != expect {
+ t.Errorf("expected %q got %q", expect, b.String())
+ }
+ e := &errorer{7}
+ b.Reset()
+ if err := tmpl.Execute(b, e); err != nil {
+ t.Fatal(err)
+ }
+ expect = "error=7"
+ if b.String() != expect {
+ t.Errorf("expected %q got %q", expect, b.String())
+ }
+}
+
+// https://golang.org/issue/5982
+func TestEscapingNilNonemptyInterfaces(t *testing.T) {
+ tmpl := Must(New("x").Parse("{{.E}}"))
+
+ got := new(bytes.Buffer)
+ testData := struct{ E error }{} // any non-empty interface here will do; error is just ready at hand
+ tmpl.Execute(got, testData)
+
+ // A non-empty interface should print like an empty interface.
+ want := new(bytes.Buffer)
+ data := struct{ E any }{}
+ tmpl.Execute(want, data)
+
+ if !bytes.Equal(want.Bytes(), got.Bytes()) {
+ t.Errorf("expected %q got %q", string(want.Bytes()), string(got.Bytes()))
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/context.go b/tpl/internal/go_templates/htmltemplate/context.go
new file mode 100644
index 000000000..c28e08dce
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/context.go
@@ -0,0 +1,265 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+// context describes the state an HTML parser must be in when it reaches the
+// portion of HTML produced by evaluating a particular template node.
+//
+// The zero value of type context is the start context for a template that
+// produces an HTML fragment as defined at
+// https://www.w3.org/TR/html5/syntax.html#the-end
+// where the context element is null.
+type context struct {
+ state state
+ delim delim
+ urlPart urlPart
+ jsCtx jsCtx
+ attr attr
+ element element
+ n parse.Node // for range break/continue
+ err *Error
+}
+
+func (c context) String() string {
+ var err error
+ if c.err != nil {
+ err = c.err
+ }
+ return fmt.Sprintf("{%v %v %v %v %v %v %v}", c.state, c.delim, c.urlPart, c.jsCtx, c.attr, c.element, err)
+}
+
+// eq reports whether two contexts are equal.
+func (c context) eq(d context) bool {
+ return c.state == d.state &&
+ c.delim == d.delim &&
+ c.urlPart == d.urlPart &&
+ c.jsCtx == d.jsCtx &&
+ c.attr == d.attr &&
+ c.element == d.element &&
+ c.err == d.err
+}
+
+// mangle produces an identifier that includes a suffix that distinguishes it
+// from template names mangled with different contexts.
+func (c context) mangle(templateName string) string {
+ // The mangled name for the default context is the input templateName.
+ if c.state == stateText {
+ return templateName
+ }
+ s := templateName + "$htmltemplate_" + c.state.String()
+ if c.delim != delimNone {
+ s += "_" + c.delim.String()
+ }
+ if c.urlPart != urlPartNone {
+ s += "_" + c.urlPart.String()
+ }
+ if c.jsCtx != jsCtxRegexp {
+ s += "_" + c.jsCtx.String()
+ }
+ if c.attr != attrNone {
+ s += "_" + c.attr.String()
+ }
+ if c.element != elementNone {
+ s += "_" + c.element.String()
+ }
+ return s
+}
+
+// state describes a high-level HTML parser state.
+//
+// It bounds the top of the element stack, and by extension the HTML insertion
+// mode, but also contains state that does not correspond to anything in the
+// HTML5 parsing algorithm because a single token production in the HTML
+// grammar may contain embedded actions in a template. For instance, the quoted
+// HTML attribute produced by
+// <div title="Hello {{.World}}">
+// is a single token in HTML's grammar but in a template spans several nodes.
+type state uint8
+
+//go:generate stringer -type state
+
+const (
+ // stateText is parsed character data. An HTML parser is in
+ // this state when its parse position is outside an HTML tag,
+ // directive, comment, and special element body.
+ stateText state = iota
+ // stateTag occurs before an HTML attribute or the end of a tag.
+ stateTag
+ // stateAttrName occurs inside an attribute name.
+ // It occurs between the ^'s in ` ^name^ = value`.
+ stateAttrName
+ // stateAfterName occurs after an attr name has ended but before any
+ // equals sign. It occurs between the ^'s in ` name^ ^= value`.
+ stateAfterName
+ // stateBeforeValue occurs after the equals sign but before the value.
+ // It occurs between the ^'s in ` name =^ ^value`.
+ stateBeforeValue
+ // stateHTMLCmt occurs inside an <!-- HTML comment -->.
+ stateHTMLCmt
+ // stateRCDATA occurs inside an RCDATA element (<textarea> or <title>)
+ // as described at https://www.w3.org/TR/html5/syntax.html#elements-0
+ stateRCDATA
+ // stateAttr occurs inside an HTML attribute whose content is text.
+ stateAttr
+ // stateURL occurs inside an HTML attribute whose content is a URL.
+ stateURL
+ // stateSrcset occurs inside an HTML srcset attribute.
+ stateSrcset
+ // stateJS occurs inside an event handler or script element.
+ stateJS
+ // stateJSDqStr occurs inside a JavaScript double quoted string.
+ stateJSDqStr
+ // stateJSSqStr occurs inside a JavaScript single quoted string.
+ stateJSSqStr
+ // stateJSRegexp occurs inside a JavaScript regexp literal.
+ stateJSRegexp
+ // stateJSBlockCmt occurs inside a JavaScript /* block comment */.
+ stateJSBlockCmt
+ // stateJSLineCmt occurs inside a JavaScript // line comment.
+ stateJSLineCmt
+ // stateCSS occurs inside a <style> element or style attribute.
+ stateCSS
+ // stateCSSDqStr occurs inside a CSS double quoted string.
+ stateCSSDqStr
+ // stateCSSSqStr occurs inside a CSS single quoted string.
+ stateCSSSqStr
+ // stateCSSDqURL occurs inside a CSS double quoted url("...").
+ stateCSSDqURL
+ // stateCSSSqURL occurs inside a CSS single quoted url('...').
+ stateCSSSqURL
+ // stateCSSURL occurs inside a CSS unquoted url(...).
+ stateCSSURL
+ // stateCSSBlockCmt occurs inside a CSS /* block comment */.
+ stateCSSBlockCmt
+ // stateCSSLineCmt occurs inside a CSS // line comment.
+ stateCSSLineCmt
+ // stateError is an infectious error state outside any valid
+ // HTML/CSS/JS construct.
+ stateError
+ // stateDead marks unreachable code after a {{break}} or {{continue}}.
+ stateDead
+)
+
+// isComment is true for any state that contains content meant for template
+// authors & maintainers, not for end-users or machines.
+func isComment(s state) bool {
+ switch s {
+ case stateHTMLCmt, stateJSBlockCmt, stateJSLineCmt, stateCSSBlockCmt, stateCSSLineCmt:
+ return true
+ }
+ return false
+}
+
+// isInTag return whether s occurs solely inside an HTML tag.
+func isInTag(s state) bool {
+ switch s {
+ case stateTag, stateAttrName, stateAfterName, stateBeforeValue, stateAttr:
+ return true
+ }
+ return false
+}
+
+// delim is the delimiter that will end the current HTML attribute.
+type delim uint8
+
+//go:generate stringer -type delim
+
+const (
+ // delimNone occurs outside any attribute.
+ delimNone delim = iota
+ // delimDoubleQuote occurs when a double quote (") closes the attribute.
+ delimDoubleQuote
+ // delimSingleQuote occurs when a single quote (') closes the attribute.
+ delimSingleQuote
+ // delimSpaceOrTagEnd occurs when a space or right angle bracket (>)
+ // closes the attribute.
+ delimSpaceOrTagEnd
+)
+
+// urlPart identifies a part in an RFC 3986 hierarchical URL to allow different
+// encoding strategies.
+type urlPart uint8
+
+//go:generate stringer -type urlPart
+
+const (
+ // urlPartNone occurs when not in a URL, or possibly at the start:
+ // ^ in "^http://auth/path?k=v#frag".
+ urlPartNone urlPart = iota
+ // urlPartPreQuery occurs in the scheme, authority, or path; between the
+ // ^s in "h^ttp://auth/path^?k=v#frag".
+ urlPartPreQuery
+ // urlPartQueryOrFrag occurs in the query portion between the ^s in
+ // "http://auth/path?^k=v#frag^".
+ urlPartQueryOrFrag
+ // urlPartUnknown occurs due to joining of contexts both before and
+ // after the query separator.
+ urlPartUnknown
+)
+
+// jsCtx determines whether a '/' starts a regular expression literal or a
+// division operator.
+type jsCtx uint8
+
+//go:generate stringer -type jsCtx
+
+const (
+ // jsCtxRegexp occurs where a '/' would start a regexp literal.
+ jsCtxRegexp jsCtx = iota
+ // jsCtxDivOp occurs where a '/' would start a division operator.
+ jsCtxDivOp
+ // jsCtxUnknown occurs where a '/' is ambiguous due to context joining.
+ jsCtxUnknown
+)
+
+// element identifies the HTML element when inside a start tag or special body.
+// Certain HTML element (for example <script> and <style>) have bodies that are
+// treated differently from stateText so the element type is necessary to
+// transition into the correct context at the end of a tag and to identify the
+// end delimiter for the body.
+type element uint8
+
+//go:generate stringer -type element
+
+const (
+ // elementNone occurs outside a special tag or special element body.
+ elementNone element = iota
+ // elementScript corresponds to the raw text <script> element
+ // with JS MIME type or no type attribute.
+ elementScript
+ // elementStyle corresponds to the raw text <style> element.
+ elementStyle
+ // elementTextarea corresponds to the RCDATA <textarea> element.
+ elementTextarea
+ // elementTitle corresponds to the RCDATA <title> element.
+ elementTitle
+)
+
+//go:generate stringer -type attr
+
+// attr identifies the current HTML attribute when inside the attribute,
+// that is, starting from stateAttrName until stateTag/stateText (exclusive).
+type attr uint8
+
+const (
+ // attrNone corresponds to a normal attribute or no attribute.
+ attrNone attr = iota
+ // attrScript corresponds to an event handler attribute.
+ attrScript
+ // attrScriptType corresponds to the type attribute in script HTML element
+ attrScriptType
+ // attrStyle corresponds to the style attribute whose value is CSS.
+ attrStyle
+ // attrURL corresponds to an attribute whose value is a URL.
+ attrURL
+ // attrSrcset corresponds to a srcset attribute.
+ attrSrcset
+)
diff --git a/tpl/internal/go_templates/htmltemplate/css.go b/tpl/internal/go_templates/htmltemplate/css.go
new file mode 100644
index 000000000..890a0c6b2
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/css.go
@@ -0,0 +1,260 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// endsWithCSSKeyword reports whether b ends with an ident that
+// case-insensitively matches the lower-case kw.
+func endsWithCSSKeyword(b []byte, kw string) bool {
+ i := len(b) - len(kw)
+ if i < 0 {
+ // Too short.
+ return false
+ }
+ if i != 0 {
+ r, _ := utf8.DecodeLastRune(b[:i])
+ if isCSSNmchar(r) {
+ // Too long.
+ return false
+ }
+ }
+ // Many CSS keywords, such as "!important" can have characters encoded,
+ // but the URI production does not allow that according to
+ // https://www.w3.org/TR/css3-syntax/#TOK-URI
+ // This does not attempt to recognize encoded keywords. For example,
+ // given "\75\72\6c" and "url" this return false.
+ return string(bytes.ToLower(b[i:])) == kw
+}
+
+// isCSSNmchar reports whether rune is allowed anywhere in a CSS identifier.
+func isCSSNmchar(r rune) bool {
+ // Based on the CSS3 nmchar production but ignores multi-rune escape
+ // sequences.
+ // https://www.w3.org/TR/css3-syntax/#SUBTOK-nmchar
+ return 'a' <= r && r <= 'z' ||
+ 'A' <= r && r <= 'Z' ||
+ '0' <= r && r <= '9' ||
+ r == '-' ||
+ r == '_' ||
+ // Non-ASCII cases below.
+ 0x80 <= r && r <= 0xd7ff ||
+ 0xe000 <= r && r <= 0xfffd ||
+ 0x10000 <= r && r <= 0x10ffff
+}
+
+// decodeCSS decodes CSS3 escapes given a sequence of stringchars.
+// If there is no change, it returns the input, otherwise it returns a slice
+// backed by a new array.
+// https://www.w3.org/TR/css3-syntax/#SUBTOK-stringchar defines stringchar.
+func decodeCSS(s []byte) []byte {
+ i := bytes.IndexByte(s, '\\')
+ if i == -1 {
+ return s
+ }
+ // The UTF-8 sequence for a codepoint is never longer than 1 + the
+ // number hex digits need to represent that codepoint, so len(s) is an
+ // upper bound on the output length.
+ b := make([]byte, 0, len(s))
+ for len(s) != 0 {
+ i := bytes.IndexByte(s, '\\')
+ if i == -1 {
+ i = len(s)
+ }
+ b, s = append(b, s[:i]...), s[i:]
+ if len(s) < 2 {
+ break
+ }
+ // https://www.w3.org/TR/css3-syntax/#SUBTOK-escape
+ // escape ::= unicode | '\' [#x20-#x7E#x80-#xD7FF#xE000-#xFFFD#x10000-#x10FFFF]
+ if isHex(s[1]) {
+ // https://www.w3.org/TR/css3-syntax/#SUBTOK-unicode
+ // unicode ::= '\' [0-9a-fA-F]{1,6} wc?
+ j := 2
+ for j < len(s) && j < 7 && isHex(s[j]) {
+ j++
+ }
+ r := hexDecode(s[1:j])
+ if r > unicode.MaxRune {
+ r, j = r/16, j-1
+ }
+ n := utf8.EncodeRune(b[len(b):cap(b)], r)
+ // The optional space at the end allows a hex
+ // sequence to be followed by a literal hex.
+ // string(decodeCSS([]byte(`\A B`))) == "\nB"
+ b, s = b[:len(b)+n], skipCSSSpace(s[j:])
+ } else {
+ // `\\` decodes to `\` and `\"` to `"`.
+ _, n := utf8.DecodeRune(s[1:])
+ b, s = append(b, s[1:1+n]...), s[1+n:]
+ }
+ }
+ return b
+}
+
+// isHex reports whether the given character is a hex digit.
+func isHex(c byte) bool {
+ return '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F'
+}
+
+// hexDecode decodes a short hex digit sequence: "10" -> 16.
+func hexDecode(s []byte) rune {
+ n := '\x00'
+ for _, c := range s {
+ n <<= 4
+ switch {
+ case '0' <= c && c <= '9':
+ n |= rune(c - '0')
+ case 'a' <= c && c <= 'f':
+ n |= rune(c-'a') + 10
+ case 'A' <= c && c <= 'F':
+ n |= rune(c-'A') + 10
+ default:
+ panic(fmt.Sprintf("Bad hex digit in %q", s))
+ }
+ }
+ return n
+}
+
+// skipCSSSpace returns a suffix of c, skipping over a single space.
+func skipCSSSpace(c []byte) []byte {
+ if len(c) == 0 {
+ return c
+ }
+ // wc ::= #x9 | #xA | #xC | #xD | #x20
+ switch c[0] {
+ case '\t', '\n', '\f', ' ':
+ return c[1:]
+ case '\r':
+ // This differs from CSS3's wc production because it contains a
+ // probable spec error whereby wc contains all the single byte
+ // sequences in nl (newline) but not CRLF.
+ if len(c) >= 2 && c[1] == '\n' {
+ return c[2:]
+ }
+ return c[1:]
+ }
+ return c
+}
+
+// isCSSSpace reports whether b is a CSS space char as defined in wc.
+func isCSSSpace(b byte) bool {
+ switch b {
+ case '\t', '\n', '\f', '\r', ' ':
+ return true
+ }
+ return false
+}
+
+// cssEscaper escapes HTML and CSS special characters using \<hex>+ escapes.
+func cssEscaper(args ...any) string {
+ s, _ := stringify(args...)
+ var b strings.Builder
+ r, w, written := rune(0), 0, 0
+ for i := 0; i < len(s); i += w {
+ // See comment in htmlEscaper.
+ r, w = utf8.DecodeRuneInString(s[i:])
+ var repl string
+ switch {
+ case int(r) < len(cssReplacementTable) && cssReplacementTable[r] != "":
+ repl = cssReplacementTable[r]
+ default:
+ continue
+ }
+ if written == 0 {
+ b.Grow(len(s))
+ }
+ b.WriteString(s[written:i])
+ b.WriteString(repl)
+ written = i + w
+ if repl != `\\` && (written == len(s) || isHex(s[written]) || isCSSSpace(s[written])) {
+ b.WriteByte(' ')
+ }
+ }
+ if written == 0 {
+ return s
+ }
+ b.WriteString(s[written:])
+ return b.String()
+}
+
+var cssReplacementTable = []string{
+ 0: `\0`,
+ '\t': `\9`,
+ '\n': `\a`,
+ '\f': `\c`,
+ '\r': `\d`,
+ // Encode HTML specials as hex so the output can be embedded
+ // in HTML attributes without further encoding.
+ '"': `\22`,
+ '&': `\26`,
+ '\'': `\27`,
+ '(': `\28`,
+ ')': `\29`,
+ '+': `\2b`,
+ '/': `\2f`,
+ ':': `\3a`,
+ ';': `\3b`,
+ '<': `\3c`,
+ '>': `\3e`,
+ '\\': `\\`,
+ '{': `\7b`,
+ '}': `\7d`,
+}
+
+var expressionBytes = []byte("expression")
+var mozBindingBytes = []byte("mozbinding")
+
+// cssValueFilter allows innocuous CSS values in the output including CSS
+// quantities (10px or 25%), ID or class literals (#foo, .bar), keyword values
+// (inherit, blue), and colors (#888).
+// It filters out unsafe values, such as those that affect token boundaries,
+// and anything that might execute scripts.
+func cssValueFilter(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeCSS {
+ return s
+ }
+ b, id := decodeCSS([]byte(s)), make([]byte, 0, 64)
+
+ // CSS3 error handling is specified as honoring string boundaries per
+ // https://www.w3.org/TR/css3-syntax/#error-handling :
+ // Malformed declarations. User agents must handle unexpected
+ // tokens encountered while parsing a declaration by reading until
+ // the end of the declaration, while observing the rules for
+ // matching pairs of (), [], {}, "", and '', and correctly handling
+ // escapes. For example, a malformed declaration may be missing a
+ // property, colon (:) or value.
+ // So we need to make sure that values do not have mismatched bracket
+ // or quote characters to prevent the browser from restarting parsing
+ // inside a string that might embed JavaScript source.
+ for i, c := range b {
+ switch c {
+ case 0, '"', '\'', '(', ')', '/', ';', '@', '[', '\\', ']', '`', '{', '}':
+ return filterFailsafe
+ case '-':
+ // Disallow <!-- or -->.
+ // -- should not appear in valid identifiers.
+ if i != 0 && b[i-1] == '-' {
+ return filterFailsafe
+ }
+ default:
+ if c < utf8.RuneSelf && isCSSNmchar(rune(c)) {
+ id = append(id, c)
+ }
+ }
+ }
+ id = bytes.ToLower(id)
+ if bytes.Contains(id, expressionBytes) || bytes.Contains(id, mozBindingBytes) {
+ return filterFailsafe
+ }
+ return string(b)
+}
diff --git a/tpl/internal/go_templates/htmltemplate/css_test.go b/tpl/internal/go_templates/htmltemplate/css_test.go
new file mode 100644
index 000000000..7d8ad8b59
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/css_test.go
@@ -0,0 +1,284 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "strconv"
+ "strings"
+ "testing"
+)
+
+func TestEndsWithCSSKeyword(t *testing.T) {
+ tests := []struct {
+ css, kw string
+ want bool
+ }{
+ {"", "url", false},
+ {"url", "url", true},
+ {"URL", "url", true},
+ {"Url", "url", true},
+ {"url", "important", false},
+ {"important", "important", true},
+ {"image-url", "url", false},
+ {"imageurl", "url", false},
+ {"image url", "url", true},
+ }
+ for _, test := range tests {
+ got := endsWithCSSKeyword([]byte(test.css), test.kw)
+ if got != test.want {
+ t.Errorf("want %t but got %t for css=%v, kw=%v", test.want, got, test.css, test.kw)
+ }
+ }
+}
+
+func TestIsCSSNmchar(t *testing.T) {
+ tests := []struct {
+ rune rune
+ want bool
+ }{
+ {0, false},
+ {'0', true},
+ {'9', true},
+ {'A', true},
+ {'Z', true},
+ {'a', true},
+ {'z', true},
+ {'_', true},
+ {'-', true},
+ {':', false},
+ {';', false},
+ {' ', false},
+ {0x7f, false},
+ {0x80, true},
+ {0x1234, true},
+ {0xd800, false},
+ {0xdc00, false},
+ {0xfffe, false},
+ {0x10000, true},
+ {0x110000, false},
+ }
+ for _, test := range tests {
+ got := isCSSNmchar(test.rune)
+ if got != test.want {
+ t.Errorf("%q: want %t but got %t", string(test.rune), test.want, got)
+ }
+ }
+}
+
+func TestDecodeCSS(t *testing.T) {
+ tests := []struct {
+ css, want string
+ }{
+ {``, ``},
+ {`foo`, `foo`},
+ {`foo\`, `foo`},
+ {`foo\\`, `foo\`},
+ {`\`, ``},
+ {`\A`, "\n"},
+ {`\a`, "\n"},
+ {`\0a`, "\n"},
+ {`\00000a`, "\n"},
+ {`\000000a`, "\u0000a"},
+ {`\1234 5`, "\u1234" + "5"},
+ {`\1234\20 5`, "\u1234" + " 5"},
+ {`\1234\A 5`, "\u1234" + "\n5"},
+ {"\\1234\t5", "\u1234" + "5"},
+ {"\\1234\n5", "\u1234" + "5"},
+ {"\\1234\r\n5", "\u1234" + "5"},
+ {`\12345`, "\U00012345"},
+ {`\\`, `\`},
+ {`\\ `, `\ `},
+ {`\"`, `"`},
+ {`\'`, `'`},
+ {`\.`, `.`},
+ {`\. .`, `. .`},
+ {
+ `The \3c i\3equick\3c/i\3e,\d\A\3cspan style=\27 color:brown\27\3e brown\3c/span\3e fox jumps\2028over the \3c canine class=\22lazy\22 \3e dog\3c/canine\3e`,
+ "The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>",
+ },
+ }
+ for _, test := range tests {
+ got1 := string(decodeCSS([]byte(test.css)))
+ if got1 != test.want {
+ t.Errorf("%q: want\n\t%q\nbut got\n\t%q", test.css, test.want, got1)
+ }
+ recoded := cssEscaper(got1)
+ if got2 := string(decodeCSS([]byte(recoded))); got2 != test.want {
+ t.Errorf("%q: escape & decode not dual for %q", test.css, recoded)
+ }
+ }
+}
+
+func TestHexDecode(t *testing.T) {
+ for i := 0; i < 0x200000; i += 101 /* coprime with 16 */ {
+ s := strconv.FormatInt(int64(i), 16)
+ if got := int(hexDecode([]byte(s))); got != i {
+ t.Errorf("%s: want %d but got %d", s, i, got)
+ }
+ s = strings.ToUpper(s)
+ if got := int(hexDecode([]byte(s))); got != i {
+ t.Errorf("%s: want %d but got %d", s, i, got)
+ }
+ }
+}
+
+func TestSkipCSSSpace(t *testing.T) {
+ tests := []struct {
+ css, want string
+ }{
+ {"", ""},
+ {"foo", "foo"},
+ {"\n", ""},
+ {"\r\n", ""},
+ {"\r", ""},
+ {"\t", ""},
+ {" ", ""},
+ {"\f", ""},
+ {" foo", "foo"},
+ {" foo", " foo"},
+ {`\20`, `\20`},
+ }
+ for _, test := range tests {
+ got := string(skipCSSSpace([]byte(test.css)))
+ if got != test.want {
+ t.Errorf("%q: want %q but got %q", test.css, test.want, got)
+ }
+ }
+}
+
+func TestCSSEscaper(t *testing.T) {
+ input := ("\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !"#$%&'()*+,-./` +
+ `0123456789:;<=>?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\x7f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\U0001D11E")
+
+ want := ("\\0\x01\x02\x03\x04\x05\x06\x07" +
+ "\x08\\9 \\a\x0b\\c \\d\x0E\x0F" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17" +
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !\22#$%\26\27\28\29*\2b,-.\2f ` +
+ `0123456789\3a\3b\3c=\3e?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\\]^_` +
+ "`abcdefghijklmno" +
+ `pqrstuvwxyz\7b|\7d~` + "\u007f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\U0001D11E")
+
+ got := cssEscaper(input)
+ if got != want {
+ t.Errorf("encode: want\n\t%q\nbut got\n\t%q", want, got)
+ }
+
+ got = string(decodeCSS([]byte(got)))
+ if input != got {
+ t.Errorf("decode: want\n\t%q\nbut got\n\t%q", input, got)
+ }
+}
+
+func TestCSSValueFilter(t *testing.T) {
+ tests := []struct {
+ css, want string
+ }{
+ {"", ""},
+ {"foo", "foo"},
+ {"0", "0"},
+ {"0px", "0px"},
+ {"-5px", "-5px"},
+ {"1.25in", "1.25in"},
+ {"+.33em", "+.33em"},
+ {"100%", "100%"},
+ {"12.5%", "12.5%"},
+ {".foo", ".foo"},
+ {"#bar", "#bar"},
+ {"corner-radius", "corner-radius"},
+ {"-moz-corner-radius", "-moz-corner-radius"},
+ {"#000", "#000"},
+ {"#48f", "#48f"},
+ {"#123456", "#123456"},
+ {"U+00-FF, U+980-9FF", "U+00-FF, U+980-9FF"},
+ {"color: red", "color: red"},
+ {"<!--", "ZgotmplZ"},
+ {"-->", "ZgotmplZ"},
+ {"<![CDATA[", "ZgotmplZ"},
+ {"]]>", "ZgotmplZ"},
+ {"</style", "ZgotmplZ"},
+ {`"`, "ZgotmplZ"},
+ {`'`, "ZgotmplZ"},
+ {"`", "ZgotmplZ"},
+ {"\x00", "ZgotmplZ"},
+ {"/* foo */", "ZgotmplZ"},
+ {"//", "ZgotmplZ"},
+ {"[href=~", "ZgotmplZ"},
+ {"expression(alert(1337))", "ZgotmplZ"},
+ {"-expression(alert(1337))", "ZgotmplZ"},
+ {"expression", "ZgotmplZ"},
+ {"Expression", "ZgotmplZ"},
+ {"EXPRESSION", "ZgotmplZ"},
+ {"-moz-binding", "ZgotmplZ"},
+ {"-expr\x00ession(alert(1337))", "ZgotmplZ"},
+ {`-expr\0ession(alert(1337))`, "ZgotmplZ"},
+ {`-express\69on(alert(1337))`, "ZgotmplZ"},
+ {`-express\69 on(alert(1337))`, "ZgotmplZ"},
+ {`-exp\72 ession(alert(1337))`, "ZgotmplZ"},
+ {`-exp\52 ession(alert(1337))`, "ZgotmplZ"},
+ {`-exp\000052 ession(alert(1337))`, "ZgotmplZ"},
+ {`-expre\0000073sion`, "-expre\x073sion"},
+ {`@import url evil.css`, "ZgotmplZ"},
+ }
+ for _, test := range tests {
+ got := cssValueFilter(test.css)
+ if got != test.want {
+ t.Errorf("%q: want %q but got %q", test.css, test.want, got)
+ }
+ }
+}
+
+func BenchmarkCSSEscaper(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ cssEscaper("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
+
+func BenchmarkCSSEscaperNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ cssEscaper("The quick, brown fox jumps over the lazy dog.")
+ }
+}
+
+func BenchmarkDecodeCSS(b *testing.B) {
+ s := []byte(`The \3c i\3equick\3c/i\3e,\d\A\3cspan style=\27 color:brown\27\3e brown\3c/span\3e fox jumps\2028over the \3c canine class=\22lazy\22 \3edog\3c/canine\3e`)
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ decodeCSS(s)
+ }
+}
+
+func BenchmarkDecodeCSSNoSpecials(b *testing.B) {
+ s := []byte("The quick, brown fox jumps over the lazy dog.")
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ decodeCSS(s)
+ }
+}
+
+func BenchmarkCSSValueFilter(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ cssValueFilter(` e\78preS\0Sio/**/n(alert(1337))`)
+ }
+}
+
+func BenchmarkCSSValueFilterOk(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ cssValueFilter(`Times New Roman`)
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/delim_string.go b/tpl/internal/go_templates/htmltemplate/delim_string.go
new file mode 100644
index 000000000..6d80e09a4
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/delim_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type delim"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _delim_name = "delimNonedelimDoubleQuotedelimSingleQuotedelimSpaceOrTagEnd"
+
+var _delim_index = [...]uint8{0, 9, 25, 41, 59}
+
+func (i delim) String() string {
+ if i >= delim(len(_delim_index)-1) {
+ return "delim(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _delim_name[_delim_index[i]:_delim_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/htmltemplate/doc.go b/tpl/internal/go_templates/htmltemplate/doc.go
new file mode 100644
index 000000000..b6a1504f8
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/doc.go
@@ -0,0 +1,241 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package template (html/template) implements data-driven templates for
+generating HTML output safe against code injection. It provides the
+same interface as package text/template and should be used instead of
+text/template whenever the output is HTML.
+
+The documentation here focuses on the security features of the package.
+For information about how to program the templates themselves, see the
+documentation for text/template.
+
+Introduction
+
+This package wraps package text/template so you can share its template API
+to parse and execute HTML templates safely.
+
+ tmpl, err := template.New("name").Parse(...)
+ // Error checking elided
+ err = tmpl.Execute(out, data)
+
+If successful, tmpl will now be injection-safe. Otherwise, err is an error
+defined in the docs for ErrorCode.
+
+HTML templates treat data values as plain text which should be encoded so they
+can be safely embedded in an HTML document. The escaping is contextual, so
+actions can appear within JavaScript, CSS, and URI contexts.
+
+The security model used by this package assumes that template authors are
+trusted, while Execute's data parameter is not. More details are
+provided below.
+
+Example
+
+ import template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+ ...
+ t, err := template.New("foo").Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
+ err = t.ExecuteTemplate(out, "T", "<script>alert('you have been pwned')</script>")
+
+produces
+
+ Hello, <script>alert('you have been pwned')</script>!
+
+but the contextual autoescaping in html/template
+
+ import template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ ...
+ t, err := template.New("foo").Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
+ err = t.ExecuteTemplate(out, "T", "<script>alert('you have been pwned')</script>")
+
+produces safe, escaped HTML output
+
+ Hello, &lt;script&gt;alert(&#39;you have been pwned&#39;)&lt;/script&gt;!
+
+
+Contexts
+
+This package understands HTML, CSS, JavaScript, and URIs. It adds sanitizing
+functions to each simple action pipeline, so given the excerpt
+
+ <a href="/search?q={{.}}">{{.}}</a>
+
+At parse time each {{.}} is overwritten to add escaping functions as necessary.
+In this case it becomes
+
+ <a href="/search?q={{. | urlescaper | attrescaper}}">{{. | htmlescaper}}</a>
+
+where urlescaper, attrescaper, and htmlescaper are aliases for internal escaping
+functions.
+
+For these internal escaping functions, if an action pipeline evaluates to
+a nil interface value, it is treated as though it were an empty string.
+
+Namespaced and data- attributes
+
+Attributes with a namespace are treated as if they had no namespace.
+Given the excerpt
+
+ <a my:href="{{.}}"></a>
+
+At parse time the attribute will be treated as if it were just "href".
+So at parse time the template becomes:
+
+ <a my:href="{{. | urlescaper | attrescaper}}"></a>
+
+Similarly to attributes with namespaces, attributes with a "data-" prefix are
+treated as if they had no "data-" prefix. So given
+
+ <a data-href="{{.}}"></a>
+
+At parse time this becomes
+
+ <a data-href="{{. | urlescaper | attrescaper}}"></a>
+
+If an attribute has both a namespace and a "data-" prefix, only the namespace
+will be removed when determining the context. For example
+
+ <a my:data-href="{{.}}"></a>
+
+This is handled as if "my:data-href" was just "data-href" and not "href" as
+it would be if the "data-" prefix were to be ignored too. Thus at parse
+time this becomes just
+
+ <a my:data-href="{{. | attrescaper}}"></a>
+
+As a special case, attributes with the namespace "xmlns" are always treated
+as containing URLs. Given the excerpts
+
+ <a xmlns:title="{{.}}"></a>
+ <a xmlns:href="{{.}}"></a>
+ <a xmlns:onclick="{{.}}"></a>
+
+At parse time they become:
+
+ <a xmlns:title="{{. | urlescaper | attrescaper}}"></a>
+ <a xmlns:href="{{. | urlescaper | attrescaper}}"></a>
+ <a xmlns:onclick="{{. | urlescaper | attrescaper}}"></a>
+
+Errors
+
+See the documentation of ErrorCode for details.
+
+
+A fuller picture
+
+The rest of this package comment may be skipped on first reading; it includes
+details necessary to understand escaping contexts and error messages. Most users
+will not need to understand these details.
+
+
+Contexts
+
+Assuming {{.}} is `O'Reilly: How are <i>you</i>?`, the table below shows
+how {{.}} appears when used in the context to the left.
+
+ Context {{.}} After
+ {{.}} O'Reilly: How are &lt;i&gt;you&lt;/i&gt;?
+ <a title='{{.}}'> O&#39;Reilly: How are you?
+ <a href="/{{.}}"> O&#39;Reilly: How are %3ci%3eyou%3c/i%3e?
+ <a href="?q={{.}}"> O&#39;Reilly%3a%20How%20are%3ci%3e...%3f
+ <a onx='f("{{.}}")'> O\x27Reilly: How are \x3ci\x3eyou...?
+ <a onx='f({{.}})'> "O\x27Reilly: How are \x3ci\x3eyou...?"
+ <a onx='pattern = /{{.}}/;'> O\x27Reilly: How are \x3ci\x3eyou...\x3f
+
+If used in an unsafe context, then the value might be filtered out:
+
+ Context {{.}} After
+ <a href="{{.}}"> #ZgotmplZ
+
+since "O'Reilly:" is not an allowed protocol like "http:".
+
+
+If {{.}} is the innocuous word, `left`, then it can appear more widely,
+
+ Context {{.}} After
+ {{.}} left
+ <a title='{{.}}'> left
+ <a href='{{.}}'> left
+ <a href='/{{.}}'> left
+ <a href='?dir={{.}}'> left
+ <a style="border-{{.}}: 4px"> left
+ <a style="align: {{.}}"> left
+ <a style="background: '{{.}}'> left
+ <a style="background: url('{{.}}')> left
+ <style>p.{{.}} {color:red}</style> left
+
+Non-string values can be used in JavaScript contexts.
+If {{.}} is
+
+ struct{A,B string}{ "foo", "bar" }
+
+in the escaped template
+
+ <script>var pair = {{.}};</script>
+
+then the template output is
+
+ <script>var pair = {"A": "foo", "B": "bar"};</script>
+
+See package json to understand how non-string content is marshaled for
+embedding in JavaScript contexts.
+
+
+Typed Strings
+
+By default, this package assumes that all pipelines produce a plain text string.
+It adds escaping pipeline stages necessary to correctly and safely embed that
+plain text string in the appropriate context.
+
+When a data value is not plain text, you can make sure it is not over-escaped
+by marking it with its type.
+
+Types HTML, JS, URL, and others from content.go can carry safe content that is
+exempted from escaping.
+
+The template
+
+ Hello, {{.}}!
+
+can be invoked with
+
+ tmpl.Execute(out, template.HTML(`<b>World</b>`))
+
+to produce
+
+ Hello, <b>World</b>!
+
+instead of the
+
+ Hello, &lt;b&gt;World&lt;b&gt;!
+
+that would have been produced if {{.}} was a regular string.
+
+
+Security Model
+
+https://rawgit.com/mikesamuel/sanitized-jquery-templates/trunk/safetemplate.html#problem_definition defines "safe" as used by this package.
+
+This package assumes that template authors are trusted, that Execute's data
+parameter is not, and seeks to preserve the properties below in the face
+of untrusted data:
+
+Structure Preservation Property:
+"... when a template author writes an HTML tag in a safe templating language,
+the browser will interpret the corresponding portion of the output as a tag
+regardless of the values of untrusted data, and similarly for other structures
+such as attribute boundaries and JS and CSS string boundaries."
+
+Code Effect Property:
+"... only code specified by the template author should run as a result of
+injecting the template output into a page and all code specified by the
+template author should run as a result of the same."
+
+Least Surprise Property:
+"A developer (or code reviewer) familiar with HTML, CSS, and JavaScript, who
+knows that contextual autoescaping happens should be able to look at a {{.}}
+and correctly infer what sanitization happens."
+*/
+package template
diff --git a/tpl/internal/go_templates/htmltemplate/element_string.go b/tpl/internal/go_templates/htmltemplate/element_string.go
new file mode 100644
index 000000000..4573e0873
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/element_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type element"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _element_name = "elementNoneelementScriptelementStyleelementTextareaelementTitle"
+
+var _element_index = [...]uint8{0, 11, 24, 36, 51, 63}
+
+func (i element) String() string {
+ if i >= element(len(_element_index)-1) {
+ return "element(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _element_name[_element_index[i]:_element_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/htmltemplate/error.go b/tpl/internal/go_templates/htmltemplate/error.go
new file mode 100644
index 000000000..21c86a9ef
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/error.go
@@ -0,0 +1,234 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+// Error describes a problem encountered during template Escaping.
+type Error struct {
+ // ErrorCode describes the kind of error.
+ ErrorCode ErrorCode
+ // Node is the node that caused the problem, if known.
+ // If not nil, it overrides Name and Line.
+ Node parse.Node
+ // Name is the name of the template in which the error was encountered.
+ Name string
+ // Line is the line number of the error in the template source or 0.
+ Line int
+ // Description is a human-readable description of the problem.
+ Description string
+}
+
+// ErrorCode is a code for a kind of error.
+type ErrorCode int
+
+// We define codes for each error that manifests while escaping templates, but
+// escaped templates may also fail at runtime.
+//
+// Output: "ZgotmplZ"
+// Example:
+// <img src="{{.X}}">
+// where {{.X}} evaluates to `javascript:...`
+// Discussion:
+// "ZgotmplZ" is a special value that indicates that unsafe content reached a
+// CSS or URL context at runtime. The output of the example will be
+// <img src="#ZgotmplZ">
+// If the data comes from a trusted source, use content types to exempt it
+// from filtering: URL(`javascript:...`).
+const (
+ // OK indicates the lack of an error.
+ OK ErrorCode = iota
+
+ // ErrAmbigContext: "... appears in an ambiguous context within a URL"
+ // Example:
+ // <a href="
+ // {{if .C}}
+ // /path/
+ // {{else}}
+ // /search?q=
+ // {{end}}
+ // {{.X}}
+ // ">
+ // Discussion:
+ // {{.X}} is in an ambiguous URL context since, depending on {{.C}},
+ // it may be either a URL suffix or a query parameter.
+ // Moving {{.X}} into the condition removes the ambiguity:
+ // <a href="{{if .C}}/path/{{.X}}{{else}}/search?q={{.X}}">
+ ErrAmbigContext
+
+ // ErrBadHTML: "expected space, attr name, or end of tag, but got ...",
+ // "... in unquoted attr", "... in attribute name"
+ // Example:
+ // <a href = /search?q=foo>
+ // <href=foo>
+ // <form na<e=...>
+ // <option selected<
+ // Discussion:
+ // This is often due to a typo in an HTML element, but some runes
+ // are banned in tag names, attribute names, and unquoted attribute
+ // values because they can tickle parser ambiguities.
+ // Quoting all attributes is the best policy.
+ ErrBadHTML
+
+ // ErrBranchEnd: "{{if}} branches end in different contexts"
+ // Example:
+ // {{if .C}}<a href="{{end}}{{.X}}
+ // Discussion:
+ // Package html/template statically examines each path through an
+ // {{if}}, {{range}}, or {{with}} to escape any following pipelines.
+ // The example is ambiguous since {{.X}} might be an HTML text node,
+ // or a URL prefix in an HTML attribute. The context of {{.X}} is
+ // used to figure out how to escape it, but that context depends on
+ // the run-time value of {{.C}} which is not statically known.
+ //
+ // The problem is usually something like missing quotes or angle
+ // brackets, or can be avoided by refactoring to put the two contexts
+ // into different branches of an if, range or with. If the problem
+ // is in a {{range}} over a collection that should never be empty,
+ // adding a dummy {{else}} can help.
+ ErrBranchEnd
+
+ // ErrEndContext: "... ends in a non-text context: ..."
+ // Examples:
+ // <div
+ // <div title="no close quote>
+ // <script>f()
+ // Discussion:
+ // Executed templates should produce a DocumentFragment of HTML.
+ // Templates that end without closing tags will trigger this error.
+ // Templates that should not be used in an HTML context or that
+ // produce incomplete Fragments should not be executed directly.
+ //
+ // {{define "main"}} <script>{{template "helper"}}</script> {{end}}
+ // {{define "helper"}} document.write(' <div title=" ') {{end}}
+ //
+ // "helper" does not produce a valid document fragment, so should
+ // not be Executed directly.
+ ErrEndContext
+
+ // ErrNoSuchTemplate: "no such template ..."
+ // Examples:
+ // {{define "main"}}<div {{template "attrs"}}>{{end}}
+ // {{define "attrs"}}href="{{.URL}}"{{end}}
+ // Discussion:
+ // Package html/template looks through template calls to compute the
+ // context.
+ // Here the {{.URL}} in "attrs" must be treated as a URL when called
+ // from "main", but you will get this error if "attrs" is not defined
+ // when "main" is parsed.
+ ErrNoSuchTemplate
+
+ // ErrOutputContext: "cannot compute output context for template ..."
+ // Examples:
+ // {{define "t"}}{{if .T}}{{template "t" .T}}{{end}}{{.H}}",{{end}}
+ // Discussion:
+ // A recursive template does not end in the same context in which it
+ // starts, and a reliable output context cannot be computed.
+ // Look for typos in the named template.
+ // If the template should not be called in the named start context,
+ // look for calls to that template in unexpected contexts.
+ // Maybe refactor recursive templates to not be recursive.
+ ErrOutputContext
+
+ // ErrPartialCharset: "unfinished JS regexp charset in ..."
+ // Example:
+ // <script>var pattern = /foo[{{.Chars}}]/</script>
+ // Discussion:
+ // Package html/template does not support interpolation into regular
+ // expression literal character sets.
+ ErrPartialCharset
+
+ // ErrPartialEscape: "unfinished escape sequence in ..."
+ // Example:
+ // <script>alert("\{{.X}}")</script>
+ // Discussion:
+ // Package html/template does not support actions following a
+ // backslash.
+ // This is usually an error and there are better solutions; for
+ // example
+ // <script>alert("{{.X}}")</script>
+ // should work, and if {{.X}} is a partial escape sequence such as
+ // "xA0", mark the whole sequence as safe content: JSStr(`\xA0`)
+ ErrPartialEscape
+
+ // ErrRangeLoopReentry: "on range loop re-entry: ..."
+ // Example:
+ // <script>var x = [{{range .}}'{{.}},{{end}}]</script>
+ // Discussion:
+ // If an iteration through a range would cause it to end in a
+ // different context than an earlier pass, there is no single context.
+ // In the example, there is missing a quote, so it is not clear
+ // whether {{.}} is meant to be inside a JS string or in a JS value
+ // context. The second iteration would produce something like
+ //
+ // <script>var x = ['firstValue,'secondValue]</script>
+ ErrRangeLoopReentry
+
+ // ErrSlashAmbig: '/' could start a division or regexp.
+ // Example:
+ // <script>
+ // {{if .C}}var x = 1{{end}}
+ // /-{{.N}}/i.test(x) ? doThis : doThat();
+ // </script>
+ // Discussion:
+ // The example above could produce `var x = 1/-2/i.test(s)...`
+ // in which the first '/' is a mathematical division operator or it
+ // could produce `/-2/i.test(s)` in which the first '/' starts a
+ // regexp literal.
+ // Look for missing semicolons inside branches, and maybe add
+ // parentheses to make it clear which interpretation you intend.
+ ErrSlashAmbig
+
+ // ErrPredefinedEscaper: "predefined escaper ... disallowed in template"
+ // Example:
+ // <div class={{. | html}}>Hello<div>
+ // Discussion:
+ // Package html/template already contextually escapes all pipelines to
+ // produce HTML output safe against code injection. Manually escaping
+ // pipeline output using the predefined escapers "html" or "urlquery" is
+ // unnecessary, and may affect the correctness or safety of the escaped
+ // pipeline output in Go 1.8 and earlier.
+ //
+ // In most cases, such as the given example, this error can be resolved by
+ // simply removing the predefined escaper from the pipeline and letting the
+ // contextual autoescaper handle the escaping of the pipeline. In other
+ // instances, where the predefined escaper occurs in the middle of a
+ // pipeline where subsequent commands expect escaped input, e.g.
+ // {{.X | html | makeALink}}
+ // where makeALink does
+ // return `<a href="`+input+`">link</a>`
+ // consider refactoring the surrounding template to make use of the
+ // contextual autoescaper, i.e.
+ // <a href="{{.X}}">link</a>
+ //
+ // To ease migration to Go 1.9 and beyond, "html" and "urlquery" will
+ // continue to be allowed as the last command in a pipeline. However, if the
+ // pipeline occurs in an unquoted attribute value context, "html" is
+ // disallowed. Avoid using "html" and "urlquery" entirely in new templates.
+ ErrPredefinedEscaper
+)
+
+func (e *Error) Error() string {
+ switch {
+ case e.Node != nil:
+ loc, _ := (*parse.Tree)(nil).ErrorContext(e.Node)
+ return fmt.Sprintf("html/template:%s: %s", loc, e.Description)
+ case e.Line != 0:
+ return fmt.Sprintf("html/template:%s:%d: %s", e.Name, e.Line, e.Description)
+ case e.Name != "":
+ return fmt.Sprintf("html/template:%s: %s", e.Name, e.Description)
+ }
+ return "html/template: " + e.Description
+}
+
+// errorf creates an error given a format string f and args.
+// The template Name still needs to be supplied.
+func errorf(k ErrorCode, node parse.Node, line int, f string, args ...any) *Error {
+ return &Error{k, node, "", line, fmt.Sprintf(f, args...)}
+}
diff --git a/tpl/internal/go_templates/htmltemplate/escape.go b/tpl/internal/go_templates/htmltemplate/escape.go
new file mode 100644
index 000000000..488894416
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/escape.go
@@ -0,0 +1,962 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "fmt"
+ "html"
+ "io"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+// escapeTemplate rewrites the named template, which must be
+// associated with t, to guarantee that the output of any of the named
+// templates is properly escaped. If no error is returned, then the named templates have
+// been modified. Otherwise the named templates have been rendered
+// unusable.
+func escapeTemplate(tmpl *Template, node parse.Node, name string) error {
+ c, _ := tmpl.esc.escapeTree(context{}, node, name, 0)
+ var err error
+ if c.err != nil {
+ err, c.err.Name = c.err, name
+ } else if c.state != stateText {
+ err = &Error{ErrEndContext, nil, name, 0, fmt.Sprintf("ends in a non-text context: %v", c)}
+ }
+ if err != nil {
+ // Prevent execution of unsafe templates.
+ if t := tmpl.set[name]; t != nil {
+ t.escapeErr = err
+ t.text.Tree = nil
+ t.Tree = nil
+ }
+ return err
+ }
+ tmpl.esc.commit()
+ if t := tmpl.set[name]; t != nil {
+ t.escapeErr = escapeOK
+ t.Tree = t.text.Tree
+ }
+ return nil
+}
+
+// evalArgs formats the list of arguments into a string. It is equivalent to
+// fmt.Sprint(args...), except that it dereferences all pointers.
+func evalArgs(args ...any) string {
+ // Optimization for simple common case of a single string argument.
+ if len(args) == 1 {
+ if s, ok := args[0].(string); ok {
+ return s
+ }
+ }
+ for i, arg := range args {
+ args[i] = indirectToStringerOrError(arg)
+ }
+ return fmt.Sprint(args...)
+}
+
+// funcMap maps command names to functions that render their inputs safe.
+var funcMap = template.FuncMap{
+ "_html_template_attrescaper": attrEscaper,
+ "_html_template_commentescaper": commentEscaper,
+ "_html_template_cssescaper": cssEscaper,
+ "_html_template_cssvaluefilter": cssValueFilter,
+ "_html_template_htmlnamefilter": htmlNameFilter,
+ "_html_template_htmlescaper": htmlEscaper,
+ "_html_template_jsregexpescaper": jsRegexpEscaper,
+ "_html_template_jsstrescaper": jsStrEscaper,
+ "_html_template_jsvalescaper": jsValEscaper,
+ "_html_template_nospaceescaper": htmlNospaceEscaper,
+ "_html_template_rcdataescaper": rcdataEscaper,
+ "_html_template_srcsetescaper": srcsetFilterAndEscaper,
+ "_html_template_urlescaper": urlEscaper,
+ "_html_template_urlfilter": urlFilter,
+ "_html_template_urlnormalizer": urlNormalizer,
+ "_eval_args_": evalArgs,
+}
+
+// escaper collects type inferences about templates and changes needed to make
+// templates injection safe.
+type escaper struct {
+ // ns is the nameSpace that this escaper is associated with.
+ ns *nameSpace
+ // output[templateName] is the output context for a templateName that
+ // has been mangled to include its input context.
+ output map[string]context
+ // derived[c.mangle(name)] maps to a template derived from the template
+ // named name templateName for the start context c.
+ derived map[string]*template.Template
+ // called[templateName] is a set of called mangled template names.
+ called map[string]bool
+ // xxxNodeEdits are the accumulated edits to apply during commit.
+ // Such edits are not applied immediately in case a template set
+ // executes a given template in different escaping contexts.
+ actionNodeEdits map[*parse.ActionNode][]string
+ templateNodeEdits map[*parse.TemplateNode]string
+ textNodeEdits map[*parse.TextNode][]byte
+ // rangeContext holds context about the current range loop.
+ rangeContext *rangeContext
+}
+
+// rangeContext holds information about the current range loop.
+type rangeContext struct {
+ outer *rangeContext // outer loop
+ breaks []context // context at each break action
+ continues []context // context at each continue action
+}
+
+// makeEscaper creates a blank escaper for the given set.
+func makeEscaper(n *nameSpace) escaper {
+ return escaper{
+ n,
+ map[string]context{},
+ map[string]*template.Template{},
+ map[string]bool{},
+ map[*parse.ActionNode][]string{},
+ map[*parse.TemplateNode]string{},
+ map[*parse.TextNode][]byte{},
+ nil,
+ }
+}
+
+// filterFailsafe is an innocuous word that is emitted in place of unsafe values
+// by sanitizer functions. It is not a keyword in any programming language,
+// contains no special characters, is not empty, and when it appears in output
+// it is distinct enough that a developer can find the source of the problem
+// via a search engine.
+const filterFailsafe = "ZgotmplZ"
+
+// escape escapes a template node.
+func (e *escaper) escape(c context, n parse.Node) context {
+ switch n := n.(type) {
+ case *parse.ActionNode:
+ return e.escapeAction(c, n)
+ case *parse.BreakNode:
+ c.n = n
+ e.rangeContext.breaks = append(e.rangeContext.breaks, c)
+ return context{state: stateDead}
+ case *parse.CommentNode:
+ return c
+ case *parse.ContinueNode:
+ c.n = n
+ e.rangeContext.continues = append(e.rangeContext.breaks, c)
+ return context{state: stateDead}
+ case *parse.IfNode:
+ return e.escapeBranch(c, &n.BranchNode, "if")
+ case *parse.ListNode:
+ return e.escapeList(c, n)
+ case *parse.RangeNode:
+ return e.escapeBranch(c, &n.BranchNode, "range")
+ case *parse.TemplateNode:
+ return e.escapeTemplate(c, n)
+ case *parse.TextNode:
+ return e.escapeText(c, n)
+ case *parse.WithNode:
+ return e.escapeBranch(c, &n.BranchNode, "with")
+ }
+ panic("escaping " + n.String() + " is unimplemented")
+}
+
+// escapeAction escapes an action template node.
+func (e *escaper) escapeAction(c context, n *parse.ActionNode) context {
+ if len(n.Pipe.Decl) != 0 {
+ // A local variable assignment, not an interpolation.
+ return c
+ }
+ c = nudge(c)
+ // Check for disallowed use of predefined escapers in the pipeline.
+ for pos, idNode := range n.Pipe.Cmds {
+ node, ok := idNode.Args[0].(*parse.IdentifierNode)
+ if !ok {
+ // A predefined escaper "esc" will never be found as an identifier in a
+ // Chain or Field node, since:
+ // - "esc.x ..." is invalid, since predefined escapers return strings, and
+ // strings do not have methods, keys or fields.
+ // - "... .esc" is invalid, since predefined escapers are global functions,
+ // not methods or fields of any types.
+ // Therefore, it is safe to ignore these two node types.
+ continue
+ }
+ ident := node.Ident
+ if _, ok := predefinedEscapers[ident]; ok {
+ if pos < len(n.Pipe.Cmds)-1 ||
+ c.state == stateAttr && c.delim == delimSpaceOrTagEnd && ident == "html" {
+ return context{
+ state: stateError,
+ err: errorf(ErrPredefinedEscaper, n, n.Line, "predefined escaper %q disallowed in template", ident),
+ }
+ }
+ }
+ }
+ s := make([]string, 0, 3)
+ switch c.state {
+ case stateError:
+ return c
+ case stateURL, stateCSSDqStr, stateCSSSqStr, stateCSSDqURL, stateCSSSqURL, stateCSSURL:
+ switch c.urlPart {
+ case urlPartNone:
+ s = append(s, "_html_template_urlfilter")
+ fallthrough
+ case urlPartPreQuery:
+ switch c.state {
+ case stateCSSDqStr, stateCSSSqStr:
+ s = append(s, "_html_template_cssescaper")
+ default:
+ s = append(s, "_html_template_urlnormalizer")
+ }
+ case urlPartQueryOrFrag:
+ s = append(s, "_html_template_urlescaper")
+ case urlPartUnknown:
+ return context{
+ state: stateError,
+ err: errorf(ErrAmbigContext, n, n.Line, "%s appears in an ambiguous context within a URL", n),
+ }
+ default:
+ panic(c.urlPart.String())
+ }
+ case stateJS:
+ s = append(s, "_html_template_jsvalescaper")
+ // A slash after a value starts a div operator.
+ c.jsCtx = jsCtxDivOp
+ case stateJSDqStr, stateJSSqStr:
+ s = append(s, "_html_template_jsstrescaper")
+ case stateJSRegexp:
+ s = append(s, "_html_template_jsregexpescaper")
+ case stateCSS:
+ s = append(s, "_html_template_cssvaluefilter")
+ case stateText:
+ s = append(s, "_html_template_htmlescaper")
+ case stateRCDATA:
+ s = append(s, "_html_template_rcdataescaper")
+ case stateAttr:
+ // Handled below in delim check.
+ case stateAttrName, stateTag:
+ c.state = stateAttrName
+ s = append(s, "_html_template_htmlnamefilter")
+ case stateSrcset:
+ s = append(s, "_html_template_srcsetescaper")
+ default:
+ if isComment(c.state) {
+ s = append(s, "_html_template_commentescaper")
+ } else {
+ panic("unexpected state " + c.state.String())
+ }
+ }
+ switch c.delim {
+ case delimNone:
+ // No extra-escaping needed for raw text content.
+ case delimSpaceOrTagEnd:
+ s = append(s, "_html_template_nospaceescaper")
+ default:
+ s = append(s, "_html_template_attrescaper")
+ }
+ e.editActionNode(n, s)
+ return c
+}
+
+// ensurePipelineContains ensures that the pipeline ends with the commands with
+// the identifiers in s in order. If the pipeline ends with a predefined escaper
+// (i.e. "html" or "urlquery"), merge it with the identifiers in s.
+func ensurePipelineContains(p *parse.PipeNode, s []string) {
+ if len(s) == 0 {
+ // Do not rewrite pipeline if we have no escapers to insert.
+ return
+ }
+ // Precondition: p.Cmds contains at most one predefined escaper and the
+ // escaper will be present at p.Cmds[len(p.Cmds)-1]. This precondition is
+ // always true because of the checks in escapeAction.
+ pipelineLen := len(p.Cmds)
+ if pipelineLen > 0 {
+ lastCmd := p.Cmds[pipelineLen-1]
+ if idNode, ok := lastCmd.Args[0].(*parse.IdentifierNode); ok {
+ if esc := idNode.Ident; predefinedEscapers[esc] {
+ // Pipeline ends with a predefined escaper.
+ if len(p.Cmds) == 1 && len(lastCmd.Args) > 1 {
+ // Special case: pipeline is of the form {{ esc arg1 arg2 ... argN }},
+ // where esc is the predefined escaper, and arg1...argN are its arguments.
+ // Convert this into the equivalent form
+ // {{ _eval_args_ arg1 arg2 ... argN | esc }}, so that esc can be easily
+ // merged with the escapers in s.
+ lastCmd.Args[0] = parse.NewIdentifier("_eval_args_").SetTree(nil).SetPos(lastCmd.Args[0].Position())
+ p.Cmds = appendCmd(p.Cmds, newIdentCmd(esc, p.Position()))
+ pipelineLen++
+ }
+ // If any of the commands in s that we are about to insert is equivalent
+ // to the predefined escaper, use the predefined escaper instead.
+ dup := false
+ for i, escaper := range s {
+ if escFnsEq(esc, escaper) {
+ s[i] = idNode.Ident
+ dup = true
+ }
+ }
+ if dup {
+ // The predefined escaper will already be inserted along with the
+ // escapers in s, so do not copy it to the rewritten pipeline.
+ pipelineLen--
+ }
+ }
+ }
+ }
+ // Rewrite the pipeline, creating the escapers in s at the end of the pipeline.
+ newCmds := make([]*parse.CommandNode, pipelineLen, pipelineLen+len(s))
+ insertedIdents := make(map[string]bool)
+ for i := 0; i < pipelineLen; i++ {
+ cmd := p.Cmds[i]
+ newCmds[i] = cmd
+ if idNode, ok := cmd.Args[0].(*parse.IdentifierNode); ok {
+ insertedIdents[normalizeEscFn(idNode.Ident)] = true
+ }
+ }
+ for _, name := range s {
+ if !insertedIdents[normalizeEscFn(name)] {
+ // When two templates share an underlying parse tree via the use of
+ // AddParseTree and one template is executed after the other, this check
+ // ensures that escapers that were already inserted into the pipeline on
+ // the first escaping pass do not get inserted again.
+ newCmds = appendCmd(newCmds, newIdentCmd(name, p.Position()))
+ }
+ }
+ p.Cmds = newCmds
+}
+
+// predefinedEscapers contains template predefined escapers that are equivalent
+// to some contextual escapers. Keep in sync with equivEscapers.
+var predefinedEscapers = map[string]bool{
+ "html": true,
+ "urlquery": true,
+}
+
+// equivEscapers matches contextual escapers to equivalent predefined
+// template escapers.
+var equivEscapers = map[string]string{
+ // The following pairs of HTML escapers provide equivalent security
+ // guarantees, since they all escape '\000', '\'', '"', '&', '<', and '>'.
+ "_html_template_attrescaper": "html",
+ "_html_template_htmlescaper": "html",
+ "_html_template_rcdataescaper": "html",
+ // These two URL escapers produce URLs safe for embedding in a URL query by
+ // percent-encoding all the reserved characters specified in RFC 3986 Section
+ // 2.2
+ "_html_template_urlescaper": "urlquery",
+ // These two functions are not actually equivalent; urlquery is stricter as it
+ // escapes reserved characters (e.g. '#'), while _html_template_urlnormalizer
+ // does not. It is therefore only safe to replace _html_template_urlnormalizer
+ // with urlquery (this happens in ensurePipelineContains), but not the otherI've
+ // way around. We keep this entry around to preserve the behavior of templates
+ // written before Go 1.9, which might depend on this substitution taking place.
+ "_html_template_urlnormalizer": "urlquery",
+}
+
+// escFnsEq reports whether the two escaping functions are equivalent.
+func escFnsEq(a, b string) bool {
+ return normalizeEscFn(a) == normalizeEscFn(b)
+}
+
+// normalizeEscFn(a) is equal to normalizeEscFn(b) for any pair of names of
+// escaper functions a and b that are equivalent.
+func normalizeEscFn(e string) string {
+ if norm := equivEscapers[e]; norm != "" {
+ return norm
+ }
+ return e
+}
+
+// redundantFuncs[a][b] implies that funcMap[b](funcMap[a](x)) == funcMap[a](x)
+// for all x.
+var redundantFuncs = map[string]map[string]bool{
+ "_html_template_commentescaper": {
+ "_html_template_attrescaper": true,
+ "_html_template_nospaceescaper": true,
+ "_html_template_htmlescaper": true,
+ },
+ "_html_template_cssescaper": {
+ "_html_template_attrescaper": true,
+ },
+ "_html_template_jsregexpescaper": {
+ "_html_template_attrescaper": true,
+ },
+ "_html_template_jsstrescaper": {
+ "_html_template_attrescaper": true,
+ },
+ "_html_template_urlescaper": {
+ "_html_template_urlnormalizer": true,
+ },
+}
+
+// appendCmd appends the given command to the end of the command pipeline
+// unless it is redundant with the last command.
+func appendCmd(cmds []*parse.CommandNode, cmd *parse.CommandNode) []*parse.CommandNode {
+ if n := len(cmds); n != 0 {
+ last, okLast := cmds[n-1].Args[0].(*parse.IdentifierNode)
+ next, okNext := cmd.Args[0].(*parse.IdentifierNode)
+ if okLast && okNext && redundantFuncs[last.Ident][next.Ident] {
+ return cmds
+ }
+ }
+ return append(cmds, cmd)
+}
+
+// newIdentCmd produces a command containing a single identifier node.
+func newIdentCmd(identifier string, pos parse.Pos) *parse.CommandNode {
+ return &parse.CommandNode{
+ NodeType: parse.NodeCommand,
+ Args: []parse.Node{parse.NewIdentifier(identifier).SetTree(nil).SetPos(pos)}, // TODO: SetTree.
+ }
+}
+
+// nudge returns the context that would result from following empty string
+// transitions from the input context.
+// For example, parsing:
+// `<a href=`
+// will end in context{stateBeforeValue, attrURL}, but parsing one extra rune:
+// `<a href=x`
+// will end in context{stateURL, delimSpaceOrTagEnd, ...}.
+// There are two transitions that happen when the 'x' is seen:
+// (1) Transition from a before-value state to a start-of-value state without
+// consuming any character.
+// (2) Consume 'x' and transition past the first value character.
+// In this case, nudging produces the context after (1) happens.
+func nudge(c context) context {
+ switch c.state {
+ case stateTag:
+ // In `<foo {{.}}`, the action should emit an attribute.
+ c.state = stateAttrName
+ case stateBeforeValue:
+ // In `<foo bar={{.}}`, the action is an undelimited value.
+ c.state, c.delim, c.attr = attrStartStates[c.attr], delimSpaceOrTagEnd, attrNone
+ case stateAfterName:
+ // In `<foo bar {{.}}`, the action is an attribute name.
+ c.state, c.attr = stateAttrName, attrNone
+ }
+ return c
+}
+
+// join joins the two contexts of a branch template node. The result is an
+// error context if either of the input contexts are error contexts, or if the
+// input contexts differ.
+func join(a, b context, node parse.Node, nodeName string) context {
+ if a.state == stateError {
+ return a
+ }
+ if b.state == stateError {
+ return b
+ }
+ if a.state == stateDead {
+ return b
+ }
+ if b.state == stateDead {
+ return a
+ }
+ if a.eq(b) {
+ return a
+ }
+
+ c := a
+ c.urlPart = b.urlPart
+ if c.eq(b) {
+ // The contexts differ only by urlPart.
+ c.urlPart = urlPartUnknown
+ return c
+ }
+
+ c = a
+ c.jsCtx = b.jsCtx
+ if c.eq(b) {
+ // The contexts differ only by jsCtx.
+ c.jsCtx = jsCtxUnknown
+ return c
+ }
+
+ // Allow a nudged context to join with an unnudged one.
+ // This means that
+ // <p title={{if .C}}{{.}}{{end}}
+ // ends in an unquoted value state even though the else branch
+ // ends in stateBeforeValue.
+ if c, d := nudge(a), nudge(b); !(c.eq(a) && d.eq(b)) {
+ if e := join(c, d, node, nodeName); e.state != stateError {
+ return e
+ }
+ }
+
+ return context{
+ state: stateError,
+ err: errorf(ErrBranchEnd, node, 0, "{{%s}} branches end in different contexts: %v, %v", nodeName, a, b),
+ }
+}
+
+// escapeBranch escapes a branch template node: "if", "range" and "with".
+func (e *escaper) escapeBranch(c context, n *parse.BranchNode, nodeName string) context {
+ if nodeName == "range" {
+ e.rangeContext = &rangeContext{outer: e.rangeContext}
+ }
+ c0 := e.escapeList(c, n.List)
+ if nodeName == "range" {
+ if c0.state != stateError {
+ c0 = joinRange(c0, e.rangeContext)
+ }
+ e.rangeContext = e.rangeContext.outer
+ if c0.state == stateError {
+ return c0
+ }
+
+ // The "true" branch of a "range" node can execute multiple times.
+ // We check that executing n.List once results in the same context
+ // as executing n.List twice.
+ e.rangeContext = &rangeContext{outer: e.rangeContext}
+ c1, _ := e.escapeListConditionally(c0, n.List, nil)
+ c0 = join(c0, c1, n, nodeName)
+ if c0.state == stateError {
+ e.rangeContext = e.rangeContext.outer
+ // Make clear that this is a problem on loop re-entry
+ // since developers tend to overlook that branch when
+ // debugging templates.
+ c0.err.Line = n.Line
+ c0.err.Description = "on range loop re-entry: " + c0.err.Description
+ return c0
+ }
+ c0 = joinRange(c0, e.rangeContext)
+ e.rangeContext = e.rangeContext.outer
+ if c0.state == stateError {
+ return c0
+ }
+ }
+ c1 := e.escapeList(c, n.ElseList)
+ return join(c0, c1, n, nodeName)
+}
+
+func joinRange(c0 context, rc *rangeContext) context {
+ // Merge contexts at break and continue statements into overall body context.
+ // In theory we could treat breaks differently from continues, but for now it is
+ // enough to treat them both as going back to the start of the loop (which may then stop).
+ for _, c := range rc.breaks {
+ c0 = join(c0, c, c.n, "range")
+ if c0.state == stateError {
+ c0.err.Line = c.n.(*parse.BreakNode).Line
+ c0.err.Description = "at range loop break: " + c0.err.Description
+ return c0
+ }
+ }
+ for _, c := range rc.continues {
+ c0 = join(c0, c, c.n, "range")
+ if c0.state == stateError {
+ c0.err.Line = c.n.(*parse.ContinueNode).Line
+ c0.err.Description = "at range loop continue: " + c0.err.Description
+ return c0
+ }
+ }
+ return c0
+}
+
+// escapeList escapes a list template node.
+func (e *escaper) escapeList(c context, n *parse.ListNode) context {
+ if n == nil {
+ return c
+ }
+ for _, m := range n.Nodes {
+ c = e.escape(c, m)
+ if c.state == stateDead {
+ break
+ }
+ }
+ return c
+}
+
+// escapeListConditionally escapes a list node but only preserves edits and
+// inferences in e if the inferences and output context satisfy filter.
+// It returns the best guess at an output context, and the result of the filter
+// which is the same as whether e was updated.
+func (e *escaper) escapeListConditionally(c context, n *parse.ListNode, filter func(*escaper, context) bool) (context, bool) {
+ e1 := makeEscaper(e.ns)
+ e1.rangeContext = e.rangeContext
+ // Make type inferences available to f.
+ for k, v := range e.output {
+ e1.output[k] = v
+ }
+ c = e1.escapeList(c, n)
+ ok := filter != nil && filter(&e1, c)
+ if ok {
+ // Copy inferences and edits from e1 back into e.
+ for k, v := range e1.output {
+ e.output[k] = v
+ }
+ for k, v := range e1.derived {
+ e.derived[k] = v
+ }
+ for k, v := range e1.called {
+ e.called[k] = v
+ }
+ for k, v := range e1.actionNodeEdits {
+ e.editActionNode(k, v)
+ }
+ for k, v := range e1.templateNodeEdits {
+ e.editTemplateNode(k, v)
+ }
+ for k, v := range e1.textNodeEdits {
+ e.editTextNode(k, v)
+ }
+ }
+ return c, ok
+}
+
+// escapeTemplate escapes a {{template}} call node.
+func (e *escaper) escapeTemplate(c context, n *parse.TemplateNode) context {
+ c, name := e.escapeTree(c, n, n.Name, n.Line)
+ if name != n.Name {
+ e.editTemplateNode(n, name)
+ }
+ return c
+}
+
+// escapeTree escapes the named template starting in the given context as
+// necessary and returns its output context.
+func (e *escaper) escapeTree(c context, node parse.Node, name string, line int) (context, string) {
+ // Mangle the template name with the input context to produce a reliable
+ // identifier.
+ dname := c.mangle(name)
+ e.called[dname] = true
+ if out, ok := e.output[dname]; ok {
+ // Already escaped.
+ return out, dname
+ }
+ t := e.template(name)
+ if t == nil {
+ // Two cases: The template exists but is empty, or has never been mentioned at
+ // all. Distinguish the cases in the error messages.
+ if e.ns.set[name] != nil {
+ return context{
+ state: stateError,
+ err: errorf(ErrNoSuchTemplate, node, line, "%q is an incomplete or empty template", name),
+ }, dname
+ }
+ return context{
+ state: stateError,
+ err: errorf(ErrNoSuchTemplate, node, line, "no such template %q", name),
+ }, dname
+ }
+ if dname != name {
+ // Use any template derived during an earlier call to escapeTemplate
+ // with different top level templates, or clone if necessary.
+ dt := e.template(dname)
+ if dt == nil {
+ dt = template.New(dname)
+ dt.Tree = &parse.Tree{Name: dname, Root: t.Root.CopyList()}
+ e.derived[dname] = dt
+ }
+ t = dt
+ }
+ return e.computeOutCtx(c, t), dname
+}
+
+// computeOutCtx takes a template and its start context and computes the output
+// context while storing any inferences in e.
+func (e *escaper) computeOutCtx(c context, t *template.Template) context {
+ // Propagate context over the body.
+ c1, ok := e.escapeTemplateBody(c, t)
+ if !ok {
+ // Look for a fixed point by assuming c1 as the output context.
+ if c2, ok2 := e.escapeTemplateBody(c1, t); ok2 {
+ c1, ok = c2, true
+ }
+ // Use c1 as the error context if neither assumption worked.
+ }
+ if !ok && c1.state != stateError {
+ return context{
+ state: stateError,
+ err: errorf(ErrOutputContext, t.Tree.Root, 0, "cannot compute output context for template %s", t.Name()),
+ }
+ }
+ return c1
+}
+
+// escapeTemplateBody escapes the given template assuming the given output
+// context, and returns the best guess at the output context and whether the
+// assumption was correct.
+func (e *escaper) escapeTemplateBody(c context, t *template.Template) (context, bool) {
+ filter := func(e1 *escaper, c1 context) bool {
+ if c1.state == stateError {
+ // Do not update the input escaper, e.
+ return false
+ }
+ if !e1.called[t.Name()] {
+ // If t is not recursively called, then c1 is an
+ // accurate output context.
+ return true
+ }
+ // c1 is accurate if it matches our assumed output context.
+ return c.eq(c1)
+ }
+ // We need to assume an output context so that recursive template calls
+ // take the fast path out of escapeTree instead of infinitely recurring.
+ // Naively assuming that the input context is the same as the output
+ // works >90% of the time.
+ e.output[t.Name()] = c
+ return e.escapeListConditionally(c, t.Tree.Root, filter)
+}
+
+// delimEnds maps each delim to a string of characters that terminate it.
+var delimEnds = [...]string{
+ delimDoubleQuote: `"`,
+ delimSingleQuote: "'",
+ // Determined empirically by running the below in various browsers.
+ // var div = document.createElement("DIV");
+ // for (var i = 0; i < 0x10000; ++i) {
+ // div.innerHTML = "<span title=x" + String.fromCharCode(i) + "-bar>";
+ // if (div.getElementsByTagName("SPAN")[0].title.indexOf("bar") < 0)
+ // document.write("<p>U+" + i.toString(16));
+ // }
+ delimSpaceOrTagEnd: " \t\n\f\r>",
+}
+
+var doctypeBytes = []byte("<!DOCTYPE")
+
+// escapeText escapes a text template node.
+func (e *escaper) escapeText(c context, n *parse.TextNode) context {
+ s, written, i, b := n.Text, 0, 0, new(bytes.Buffer)
+ for i != len(s) {
+ c1, nread := contextAfterText(c, s[i:])
+ i1 := i + nread
+ if c.state == stateText || c.state == stateRCDATA {
+ end := i1
+ if c1.state != c.state {
+ for j := end - 1; j >= i; j-- {
+ if s[j] == '<' {
+ end = j
+ break
+ }
+ }
+ }
+ for j := i; j < end; j++ {
+ if s[j] == '<' && !bytes.HasPrefix(bytes.ToUpper(s[j:]), doctypeBytes) {
+ b.Write(s[written:j])
+ b.WriteString("&lt;")
+ written = j + 1
+ }
+ }
+ } else if isComment(c.state) && c.delim == delimNone {
+ switch c.state {
+ case stateJSBlockCmt:
+ // https://es5.github.com/#x7.4:
+ // "Comments behave like white space and are
+ // discarded except that, if a MultiLineComment
+ // contains a line terminator character, then
+ // the entire comment is considered to be a
+ // LineTerminator for purposes of parsing by
+ // the syntactic grammar."
+ if bytes.ContainsAny(s[written:i1], "\n\r\u2028\u2029") {
+ b.WriteByte('\n')
+ } else {
+ b.WriteByte(' ')
+ }
+ case stateCSSBlockCmt:
+ b.WriteByte(' ')
+ }
+ written = i1
+ }
+ if c.state != c1.state && isComment(c1.state) && c1.delim == delimNone {
+ // Preserve the portion between written and the comment start.
+ cs := i1 - 2
+ if c1.state == stateHTMLCmt {
+ // "<!--" instead of "/*" or "//"
+ cs -= 2
+ }
+ b.Write(s[written:cs])
+ written = i1
+ }
+ if i == i1 && c.state == c1.state {
+ panic(fmt.Sprintf("infinite loop from %v to %v on %q..%q", c, c1, s[:i], s[i:]))
+ }
+ c, i = c1, i1
+ }
+
+ if written != 0 && c.state != stateError {
+ if !isComment(c.state) || c.delim != delimNone {
+ b.Write(n.Text[written:])
+ }
+ e.editTextNode(n, b.Bytes())
+ }
+ return c
+}
+
+// contextAfterText starts in context c, consumes some tokens from the front of
+// s, then returns the context after those tokens and the unprocessed suffix.
+func contextAfterText(c context, s []byte) (context, int) {
+ if c.delim == delimNone {
+ c1, i := tSpecialTagEnd(c, s)
+ if i == 0 {
+ // A special end tag (`</script>`) has been seen and
+ // all content preceding it has been consumed.
+ return c1, 0
+ }
+ // Consider all content up to any end tag.
+ return transitionFunc[c.state](c, s[:i])
+ }
+
+ // We are at the beginning of an attribute value.
+
+ i := bytes.IndexAny(s, delimEnds[c.delim])
+ if i == -1 {
+ i = len(s)
+ }
+ if c.delim == delimSpaceOrTagEnd {
+ // https://www.w3.org/TR/html5/syntax.html#attribute-value-(unquoted)-state
+ // lists the runes below as error characters.
+ // Error out because HTML parsers may differ on whether
+ // "<a id= onclick=f(" ends inside id's or onclick's value,
+ // "<a class=`foo " ends inside a value,
+ // "<a style=font:'Arial'" needs open-quote fixup.
+ // IE treats '`' as a quotation character.
+ if j := bytes.IndexAny(s[:i], "\"'<=`"); j >= 0 {
+ return context{
+ state: stateError,
+ err: errorf(ErrBadHTML, nil, 0, "%q in unquoted attr: %q", s[j:j+1], s[:i]),
+ }, len(s)
+ }
+ }
+ if i == len(s) {
+ // Remain inside the attribute.
+ // Decode the value so non-HTML rules can easily handle
+ // <button onclick="alert(&quot;Hi!&quot;)">
+ // without having to entity decode token boundaries.
+ for u := []byte(html.UnescapeString(string(s))); len(u) != 0; {
+ c1, i1 := transitionFunc[c.state](c, u)
+ c, u = c1, u[i1:]
+ }
+ return c, len(s)
+ }
+
+ element := c.element
+
+ // If this is a non-JS "type" attribute inside "script" tag, do not treat the contents as JS.
+ if c.state == stateAttr && c.element == elementScript && c.attr == attrScriptType && !isJSType(string(s[:i])) {
+ element = elementNone
+ }
+
+ if c.delim != delimSpaceOrTagEnd {
+ // Consume any quote.
+ i++
+ }
+ // On exiting an attribute, we discard all state information
+ // except the state and element.
+ return context{state: stateTag, element: element}, i
+}
+
+// editActionNode records a change to an action pipeline for later commit.
+func (e *escaper) editActionNode(n *parse.ActionNode, cmds []string) {
+ if _, ok := e.actionNodeEdits[n]; ok {
+ panic(fmt.Sprintf("node %s shared between templates", n))
+ }
+ e.actionNodeEdits[n] = cmds
+}
+
+// editTemplateNode records a change to a {{template}} callee for later commit.
+func (e *escaper) editTemplateNode(n *parse.TemplateNode, callee string) {
+ if _, ok := e.templateNodeEdits[n]; ok {
+ panic(fmt.Sprintf("node %s shared between templates", n))
+ }
+ e.templateNodeEdits[n] = callee
+}
+
+// editTextNode records a change to a text node for later commit.
+func (e *escaper) editTextNode(n *parse.TextNode, text []byte) {
+ if _, ok := e.textNodeEdits[n]; ok {
+ panic(fmt.Sprintf("node %s shared between templates", n))
+ }
+ e.textNodeEdits[n] = text
+}
+
+// commit applies changes to actions and template calls needed to contextually
+// autoescape content and adds any derived templates to the set.
+func (e *escaper) commit() {
+ for name := range e.output {
+ e.template(name).Funcs(funcMap)
+ }
+ // Any template from the name space associated with this escaper can be used
+ // to add derived templates to the underlying text/template name space.
+ tmpl := e.arbitraryTemplate()
+ for _, t := range e.derived {
+ if _, err := tmpl.text.AddParseTree(t.Name(), t.Tree); err != nil {
+ panic("error adding derived template")
+ }
+ }
+ for n, s := range e.actionNodeEdits {
+ ensurePipelineContains(n.Pipe, s)
+ }
+ for n, name := range e.templateNodeEdits {
+ n.Name = name
+ }
+ for n, s := range e.textNodeEdits {
+ n.Text = s
+ }
+ // Reset state that is specific to this commit so that the same changes are
+ // not re-applied to the template on subsequent calls to commit.
+ e.called = make(map[string]bool)
+ e.actionNodeEdits = make(map[*parse.ActionNode][]string)
+ e.templateNodeEdits = make(map[*parse.TemplateNode]string)
+ e.textNodeEdits = make(map[*parse.TextNode][]byte)
+}
+
+// template returns the named template given a mangled template name.
+func (e *escaper) template(name string) *template.Template {
+ // Any template from the name space associated with this escaper can be used
+ // to look up templates in the underlying text/template name space.
+ t := e.arbitraryTemplate().text.Lookup(name)
+ if t == nil {
+ t = e.derived[name]
+ }
+ return t
+}
+
+// arbitraryTemplate returns an arbitrary template from the name space
+// associated with e and panics if no templates are found.
+func (e *escaper) arbitraryTemplate() *Template {
+ for _, t := range e.ns.set {
+ return t
+ }
+ panic("no templates in name space")
+}
+
+// Forwarding functions so that clients need only import this package
+// to reach the general escaping functions of text/template.
+
+// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b.
+func HTMLEscape(w io.Writer, b []byte) {
+ template.HTMLEscape(w, b)
+}
+
+// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s.
+func HTMLEscapeString(s string) string {
+ return template.HTMLEscapeString(s)
+}
+
+// HTMLEscaper returns the escaped HTML equivalent of the textual
+// representation of its arguments.
+func HTMLEscaper(args ...any) string {
+ return template.HTMLEscaper(args...)
+}
+
+// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b.
+func JSEscape(w io.Writer, b []byte) {
+ template.JSEscape(w, b)
+}
+
+// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s.
+func JSEscapeString(s string) string {
+ return template.JSEscapeString(s)
+}
+
+// JSEscaper returns the escaped JavaScript equivalent of the textual
+// representation of its arguments.
+func JSEscaper(args ...any) string {
+ return template.JSEscaper(args...)
+}
+
+// URLQueryEscaper returns the escaped value of the textual representation of
+// its arguments in a form suitable for embedding in a URL query.
+func URLQueryEscaper(args ...any) string {
+ return template.URLQueryEscaper(args...)
+}
diff --git a/tpl/internal/go_templates/htmltemplate/escape_test.go b/tpl/internal/go_templates/htmltemplate/escape_test.go
new file mode 100644
index 000000000..adf160b5d
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/escape_test.go
@@ -0,0 +1,1998 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ htmltemplate "html/template"
+ "os"
+ "strings"
+ "testing"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+type badMarshaler struct{}
+
+func (x *badMarshaler) MarshalJSON() ([]byte, error) {
+ // Keys in valid JSON must be double quoted as must all strings.
+ return []byte("{ foo: 'not quite valid JSON' }"), nil
+}
+
+type goodMarshaler struct{}
+
+func (x *goodMarshaler) MarshalJSON() ([]byte, error) {
+ return []byte(`{ "<foo>": "O'Reilly" }`), nil
+}
+
+func TestEscape(t *testing.T) {
+ data := struct {
+ F, T bool
+ C, G, H string
+ A, E []string
+ B, M json.Marshaler
+ N int
+ U any // untyped nil
+ Z *int // typed nil
+ W htmltemplate.HTML
+ }{
+ F: false,
+ T: true,
+ C: "<Cincinnati>",
+ G: "<Goodbye>",
+ H: "<Hello>",
+ A: []string{"<a>", "<b>"},
+ E: []string{},
+ N: 42,
+ B: &badMarshaler{},
+ M: &goodMarshaler{},
+ U: nil,
+ Z: nil,
+ W: htmltemplate.HTML(`&iexcl;<b class="foo">Hello</b>, <textarea>O'World</textarea>!`),
+ }
+ pdata := &data
+
+ tests := []struct {
+ name string
+ input string
+ output string
+ }{
+ {
+ "if",
+ "{{if .T}}Hello{{end}}, {{.C}}!",
+ "Hello, &lt;Cincinnati&gt;!",
+ },
+ {
+ "else",
+ "{{if .F}}{{.H}}{{else}}{{.G}}{{end}}!",
+ "&lt;Goodbye&gt;!",
+ },
+ {
+ "overescaping1",
+ "Hello, {{.C | html}}!",
+ "Hello, &lt;Cincinnati&gt;!",
+ },
+ {
+ "overescaping2",
+ "Hello, {{html .C}}!",
+ "Hello, &lt;Cincinnati&gt;!",
+ },
+ {
+ "overescaping3",
+ "{{with .C}}{{$msg := .}}Hello, {{$msg}}!{{end}}",
+ "Hello, &lt;Cincinnati&gt;!",
+ },
+ {
+ "assignment",
+ "{{if $x := .H}}{{$x}}{{end}}",
+ "&lt;Hello&gt;",
+ },
+ {
+ "withBody",
+ "{{with .H}}{{.}}{{end}}",
+ "&lt;Hello&gt;",
+ },
+ {
+ "withElse",
+ "{{with .E}}{{.}}{{else}}{{.H}}{{end}}",
+ "&lt;Hello&gt;",
+ },
+ {
+ "rangeBody",
+ "{{range .A}}{{.}}{{end}}",
+ "&lt;a&gt;&lt;b&gt;",
+ },
+ {
+ "rangeElse",
+ "{{range .E}}{{.}}{{else}}{{.H}}{{end}}",
+ "&lt;Hello&gt;",
+ },
+ {
+ "nonStringValue",
+ "{{.T}}",
+ "true",
+ },
+ {
+ "untypedNilValue",
+ "{{.U}}",
+ "",
+ },
+ {
+ "typedNilValue",
+ "{{.Z}}",
+ "&lt;nil&gt;",
+ },
+ {
+ "constant",
+ `<a href="/search?q={{"'a<b'"}}">`,
+ `<a href="/search?q=%27a%3cb%27">`,
+ },
+ {
+ "multipleAttrs",
+ "<a b=1 c={{.H}}>",
+ "<a b=1 c=&lt;Hello&gt;>",
+ },
+ {
+ "urlStartRel",
+ `<a href='{{"/foo/bar?a=b&c=d"}}'>`,
+ `<a href='/foo/bar?a=b&amp;c=d'>`,
+ },
+ {
+ "urlStartAbsOk",
+ `<a href='{{"http://example.com/foo/bar?a=b&c=d"}}'>`,
+ `<a href='http://example.com/foo/bar?a=b&amp;c=d'>`,
+ },
+ {
+ "protocolRelativeURLStart",
+ `<a href='{{"//example.com:8000/foo/bar?a=b&c=d"}}'>`,
+ `<a href='//example.com:8000/foo/bar?a=b&amp;c=d'>`,
+ },
+ {
+ "pathRelativeURLStart",
+ `<a href="{{"/javascript:80/foo/bar"}}">`,
+ `<a href="/javascript:80/foo/bar">`,
+ },
+ {
+ "dangerousURLStart",
+ `<a href='{{"javascript:alert(%22pwned%22)"}}'>`,
+ `<a href='#ZgotmplZ'>`,
+ },
+ {
+ "dangerousURLStart2",
+ `<a href=' {{"javascript:alert(%22pwned%22)"}}'>`,
+ `<a href=' #ZgotmplZ'>`,
+ },
+ {
+ "nonHierURL",
+ `<a href={{"mailto:Muhammed \"The Greatest\" Ali <m.ali@example.com>"}}>`,
+ `<a href=mailto:Muhammed%20%22The%20Greatest%22%20Ali%20%3cm.ali@example.com%3e>`,
+ },
+ {
+ "urlPath",
+ `<a href='http://{{"javascript:80"}}/foo'>`,
+ `<a href='http://javascript:80/foo'>`,
+ },
+ {
+ "urlQuery",
+ `<a href='/search?q={{.H}}'>`,
+ `<a href='/search?q=%3cHello%3e'>`,
+ },
+ {
+ "urlFragment",
+ `<a href='/faq#{{.H}}'>`,
+ `<a href='/faq#%3cHello%3e'>`,
+ },
+ {
+ "urlBranch",
+ `<a href="{{if .F}}/foo?a=b{{else}}/bar{{end}}">`,
+ `<a href="/bar">`,
+ },
+ {
+ "urlBranchConflictMoot",
+ `<a href="{{if .T}}/foo?a={{else}}/bar#{{end}}{{.C}}">`,
+ `<a href="/foo?a=%3cCincinnati%3e">`,
+ },
+ {
+ "jsStrValue",
+ "<button onclick='alert({{.H}})'>",
+ `<button onclick='alert(&#34;\u003cHello\u003e&#34;)'>`,
+ },
+ {
+ "jsNumericValue",
+ "<button onclick='alert({{.N}})'>",
+ `<button onclick='alert( 42 )'>`,
+ },
+ {
+ "jsBoolValue",
+ "<button onclick='alert({{.T}})'>",
+ `<button onclick='alert( true )'>`,
+ },
+ {
+ "jsNilValueTyped",
+ "<button onclick='alert(typeof{{.Z}})'>",
+ `<button onclick='alert(typeof null )'>`,
+ },
+ {
+ "jsNilValueUntyped",
+ "<button onclick='alert(typeof{{.U}})'>",
+ `<button onclick='alert(typeof null )'>`,
+ },
+ {
+ "jsObjValue",
+ "<button onclick='alert({{.A}})'>",
+ `<button onclick='alert([&#34;\u003ca\u003e&#34;,&#34;\u003cb\u003e&#34;])'>`,
+ },
+ {
+ "jsObjValueScript",
+ "<script>alert({{.A}})</script>",
+ `<script>alert(["\u003ca\u003e","\u003cb\u003e"])</script>`,
+ },
+ {
+ "jsObjValueNotOverEscaped",
+ "<button onclick='alert({{.A | html}})'>",
+ `<button onclick='alert([&#34;\u003ca\u003e&#34;,&#34;\u003cb\u003e&#34;])'>`,
+ },
+ {
+ "jsStr",
+ "<button onclick='alert(&quot;{{.H}}&quot;)'>",
+ `<button onclick='alert(&quot;\u003cHello\u003e&quot;)'>`,
+ },
+ {
+ "badMarshaler",
+ `<button onclick='alert(1/{{.B}}in numbers)'>`,
+ `<button onclick='alert(1/ /* json: error calling MarshalJSON for type *template.badMarshaler: invalid character &#39;f&#39; looking for beginning of object key string */null in numbers)'>`,
+ },
+ {
+ "jsMarshaler",
+ `<button onclick='alert({{.M}})'>`,
+ `<button onclick='alert({&#34;\u003cfoo\u003e&#34;:&#34;O&#39;Reilly&#34;})'>`,
+ },
+ {
+ "jsStrNotUnderEscaped",
+ "<button onclick='alert({{.C | urlquery}})'>",
+ // URL escaped, then quoted for JS.
+ `<button onclick='alert(&#34;%3CCincinnati%3E&#34;)'>`,
+ },
+ {
+ "jsRe",
+ `<button onclick='alert(/{{"foo+bar"}}/.test(""))'>`,
+ `<button onclick='alert(/foo\u002bbar/.test(""))'>`,
+ },
+ {
+ "jsReBlank",
+ `<script>alert(/{{""}}/.test(""));</script>`,
+ `<script>alert(/(?:)/.test(""));</script>`,
+ },
+ {
+ "jsReAmbigOk",
+ `<script>{{if true}}var x = 1{{end}}</script>`,
+ // The {if} ends in an ambiguous jsCtx but there is
+ // no slash following so we shouldn't care.
+ `<script>var x = 1</script>`,
+ },
+ {
+ "styleBidiKeywordPassed",
+ `<p style="dir: {{"ltr"}}">`,
+ `<p style="dir: ltr">`,
+ },
+ {
+ "styleBidiPropNamePassed",
+ `<p style="border-{{"left"}}: 0; border-{{"right"}}: 1in">`,
+ `<p style="border-left: 0; border-right: 1in">`,
+ },
+ {
+ "styleExpressionBlocked",
+ `<p style="width: {{"expression(alert(1337))"}}">`,
+ `<p style="width: ZgotmplZ">`,
+ },
+ {
+ "styleTagSelectorPassed",
+ `<style>{{"p"}} { color: pink }</style>`,
+ `<style>p { color: pink }</style>`,
+ },
+ {
+ "styleIDPassed",
+ `<style>p{{"#my-ID"}} { font: Arial }</style>`,
+ `<style>p#my-ID { font: Arial }</style>`,
+ },
+ {
+ "styleClassPassed",
+ `<style>p{{".my_class"}} { font: Arial }</style>`,
+ `<style>p.my_class { font: Arial }</style>`,
+ },
+ {
+ "styleQuantityPassed",
+ `<a style="left: {{"2em"}}; top: {{0}}">`,
+ `<a style="left: 2em; top: 0">`,
+ },
+ {
+ "stylePctPassed",
+ `<table style=width:{{"100%"}}>`,
+ `<table style=width:100%>`,
+ },
+ {
+ "styleColorPassed",
+ `<p style="color: {{"#8ff"}}; background: {{"#000"}}">`,
+ `<p style="color: #8ff; background: #000">`,
+ },
+ {
+ "styleObfuscatedExpressionBlocked",
+ `<p style="width: {{" e\\78preS\x00Sio/**/n(alert(1337))"}}">`,
+ `<p style="width: ZgotmplZ">`,
+ },
+ {
+ "styleMozBindingBlocked",
+ `<p style="{{"-moz-binding(alert(1337))"}}: ...">`,
+ `<p style="ZgotmplZ: ...">`,
+ },
+ {
+ "styleObfuscatedMozBindingBlocked",
+ `<p style="{{" -mo\\7a-B\x00I/**/nding(alert(1337))"}}: ...">`,
+ `<p style="ZgotmplZ: ...">`,
+ },
+ {
+ "styleFontNameString",
+ `<p style='font-family: "{{"Times New Roman"}}"'>`,
+ `<p style='font-family: "Times New Roman"'>`,
+ },
+ {
+ "styleFontNameString",
+ `<p style='font-family: "{{"Times New Roman"}}", "{{"sans-serif"}}"'>`,
+ `<p style='font-family: "Times New Roman", "sans-serif"'>`,
+ },
+ {
+ "styleFontNameUnquoted",
+ `<p style='font-family: {{"Times New Roman"}}'>`,
+ `<p style='font-family: Times New Roman'>`,
+ },
+ {
+ "styleURLQueryEncoded",
+ `<p style="background: url(/img?name={{"O'Reilly Animal(1)<2>.png"}})">`,
+ `<p style="background: url(/img?name=O%27Reilly%20Animal%281%29%3c2%3e.png)">`,
+ },
+ {
+ "styleQuotedURLQueryEncoded",
+ `<p style="background: url('/img?name={{"O'Reilly Animal(1)<2>.png"}}')">`,
+ `<p style="background: url('/img?name=O%27Reilly%20Animal%281%29%3c2%3e.png')">`,
+ },
+ {
+ "styleStrQueryEncoded",
+ `<p style="background: '/img?name={{"O'Reilly Animal(1)<2>.png"}}'">`,
+ `<p style="background: '/img?name=O%27Reilly%20Animal%281%29%3c2%3e.png'">`,
+ },
+ {
+ "styleURLBadProtocolBlocked",
+ `<a style="background: url('{{"javascript:alert(1337)"}}')">`,
+ `<a style="background: url('#ZgotmplZ')">`,
+ },
+ {
+ "styleStrBadProtocolBlocked",
+ `<a style="background: '{{"vbscript:alert(1337)"}}'">`,
+ `<a style="background: '#ZgotmplZ'">`,
+ },
+ {
+ "styleStrEncodedProtocolEncoded",
+ `<a style="background: '{{"javascript\\3a alert(1337)"}}'">`,
+ // The CSS string 'javascript\\3a alert(1337)' does not contain a colon.
+ `<a style="background: 'javascript\\3a alert\28 1337\29 '">`,
+ },
+ {
+ "styleURLGoodProtocolPassed",
+ `<a style="background: url('{{"http://oreilly.com/O'Reilly Animals(1)<2>;{}.html"}}')">`,
+ `<a style="background: url('http://oreilly.com/O%27Reilly%20Animals%281%29%3c2%3e;%7b%7d.html')">`,
+ },
+ {
+ "styleStrGoodProtocolPassed",
+ `<a style="background: '{{"http://oreilly.com/O'Reilly Animals(1)<2>;{}.html"}}'">`,
+ `<a style="background: 'http\3a\2f\2foreilly.com\2fO\27Reilly Animals\28 1\29\3c 2\3e\3b\7b\7d.html'">`,
+ },
+ {
+ "styleURLEncodedForHTMLInAttr",
+ `<a style="background: url('{{"/search?img=foo&size=icon"}}')">`,
+ `<a style="background: url('/search?img=foo&amp;size=icon')">`,
+ },
+ {
+ "styleURLNotEncodedForHTMLInCdata",
+ `<style>body { background: url('{{"/search?img=foo&size=icon"}}') }</style>`,
+ `<style>body { background: url('/search?img=foo&size=icon') }</style>`,
+ },
+ {
+ "styleURLMixedCase",
+ `<p style="background: URL(#{{.H}})">`,
+ `<p style="background: URL(#%3cHello%3e)">`,
+ },
+ {
+ "stylePropertyPairPassed",
+ `<a style='{{"color: red"}}'>`,
+ `<a style='color: red'>`,
+ },
+ {
+ "styleStrSpecialsEncoded",
+ `<a style="font-family: '{{"/**/'\";:// \\"}}', &quot;{{"/**/'\";:// \\"}}&quot;">`,
+ `<a style="font-family: '\2f**\2f\27\22\3b\3a\2f\2f \\', &quot;\2f**\2f\27\22\3b\3a\2f\2f \\&quot;">`,
+ },
+ {
+ "styleURLSpecialsEncoded",
+ `<a style="border-image: url({{"/**/'\";:// \\"}}), url(&quot;{{"/**/'\";:// \\"}}&quot;), url('{{"/**/'\";:// \\"}}'), 'http://www.example.com/?q={{"/**/'\";:// \\"}}''">`,
+ `<a style="border-image: url(/**/%27%22;://%20%5c), url(&quot;/**/%27%22;://%20%5c&quot;), url('/**/%27%22;://%20%5c'), 'http://www.example.com/?q=%2f%2a%2a%2f%27%22%3b%3a%2f%2f%20%5c''">`,
+ },
+ {
+ "HTML comment",
+ "<b>Hello, <!-- name of world -->{{.C}}</b>",
+ "<b>Hello, &lt;Cincinnati&gt;</b>",
+ },
+ {
+ "HTML comment not first < in text node.",
+ "<<!-- -->!--",
+ "&lt;!--",
+ },
+ {
+ "HTML normalization 1",
+ "a < b",
+ "a &lt; b",
+ },
+ {
+ "HTML normalization 2",
+ "a << b",
+ "a &lt;&lt; b",
+ },
+ {
+ "HTML normalization 3",
+ "a<<!-- --><!-- -->b",
+ "a&lt;b",
+ },
+ {
+ "HTML doctype not normalized",
+ "<!DOCTYPE html>Hello, World!",
+ "<!DOCTYPE html>Hello, World!",
+ },
+ {
+ "HTML doctype not case-insensitive",
+ "<!doCtYPE htMl>Hello, World!",
+ "<!doCtYPE htMl>Hello, World!",
+ },
+ {
+ "No doctype injection",
+ `<!{{"DOCTYPE"}}`,
+ "&lt;!DOCTYPE",
+ },
+ {
+ "Split HTML comment",
+ "<b>Hello, <!-- name of {{if .T}}city -->{{.C}}{{else}}world -->{{.W}}{{end}}</b>",
+ "<b>Hello, &lt;Cincinnati&gt;</b>",
+ },
+ {
+ "JS line comment",
+ "<script>for (;;) { if (c()) break// foo not a label\n" +
+ "foo({{.T}});}</script>",
+ "<script>for (;;) { if (c()) break\n" +
+ "foo( true );}</script>",
+ },
+ {
+ "JS multiline block comment",
+ "<script>for (;;) { if (c()) break/* foo not a label\n" +
+ " */foo({{.T}});}</script>",
+ // Newline separates break from call. If newline
+ // removed, then break will consume label leaving
+ // code invalid.
+ "<script>for (;;) { if (c()) break\n" +
+ "foo( true );}</script>",
+ },
+ {
+ "JS single-line block comment",
+ "<script>for (;;) {\n" +
+ "if (c()) break/* foo a label */foo;" +
+ "x({{.T}});}</script>",
+ // Newline separates break from call. If newline
+ // removed, then break will consume label leaving
+ // code invalid.
+ "<script>for (;;) {\n" +
+ "if (c()) break foo;" +
+ "x( true );}</script>",
+ },
+ {
+ "JS block comment flush with mathematical division",
+ "<script>var a/*b*//c\nd</script>",
+ "<script>var a /c\nd</script>",
+ },
+ {
+ "JS mixed comments",
+ "<script>var a/*b*///c\nd</script>",
+ "<script>var a \nd</script>",
+ },
+ {
+ "CSS comments",
+ "<style>p// paragraph\n" +
+ `{border: 1px/* color */{{"#00f"}}}</style>`,
+ "<style>p\n" +
+ "{border: 1px #00f}</style>",
+ },
+ {
+ "JS attr block comment",
+ `<a onclick="f(&quot;&quot;); /* alert({{.H}}) */">`,
+ // Attribute comment tests should pass if the comments
+ // are successfully elided.
+ `<a onclick="f(&quot;&quot;); /* alert() */">`,
+ },
+ {
+ "JS attr line comment",
+ `<a onclick="// alert({{.G}})">`,
+ `<a onclick="// alert()">`,
+ },
+ {
+ "CSS attr block comment",
+ `<a style="/* color: {{.H}} */">`,
+ `<a style="/* color: */">`,
+ },
+ {
+ "CSS attr line comment",
+ `<a style="// color: {{.G}}">`,
+ `<a style="// color: ">`,
+ },
+ {
+ "HTML substitution commented out",
+ "<p><!-- {{.H}} --></p>",
+ "<p></p>",
+ },
+ {
+ "Comment ends flush with start",
+ "<!--{{.}}--><script>/*{{.}}*///{{.}}\n</script><style>/*{{.}}*///{{.}}\n</style><a onclick='/*{{.}}*///{{.}}' style='/*{{.}}*///{{.}}'>",
+ "<script> \n</script><style> \n</style><a onclick='/**///' style='/**///'>",
+ },
+ {
+ "typed HTML in text",
+ `{{.W}}`,
+ `&iexcl;<b class="foo">Hello</b>, <textarea>O'World</textarea>!`,
+ },
+ {
+ "typed HTML in attribute",
+ `<div title="{{.W}}">`,
+ `<div title="&iexcl;Hello, O&#39;World!">`,
+ },
+ {
+ "typed HTML in script",
+ `<button onclick="alert({{.W}})">`,
+ `<button onclick="alert(&#34;\u0026iexcl;\u003cb class=\&#34;foo\&#34;\u003eHello\u003c/b\u003e, \u003ctextarea\u003eO&#39;World\u003c/textarea\u003e!&#34;)">`,
+ },
+ {
+ "typed HTML in RCDATA",
+ `<textarea>{{.W}}</textarea>`,
+ `<textarea>&iexcl;&lt;b class=&#34;foo&#34;&gt;Hello&lt;/b&gt;, &lt;textarea&gt;O&#39;World&lt;/textarea&gt;!</textarea>`,
+ },
+ {
+ "range in textarea",
+ "<textarea>{{range .A}}{{.}}{{end}}</textarea>",
+ "<textarea>&lt;a&gt;&lt;b&gt;</textarea>",
+ },
+ {
+ "No tag injection",
+ `{{"10$"}}<{{"script src,evil.org/pwnd.js"}}...`,
+ `10$&lt;script src,evil.org/pwnd.js...`,
+ },
+ {
+ "No comment injection",
+ `<{{"!--"}}`,
+ `&lt;!--`,
+ },
+ {
+ "No RCDATA end tag injection",
+ `<textarea><{{"/textarea "}}...</textarea>`,
+ `<textarea>&lt;/textarea ...</textarea>`,
+ },
+ {
+ "optional attrs",
+ `<img class="{{"iconClass"}}"` +
+ `{{if .T}} id="{{"<iconId>"}}"{{end}}` +
+ // Double quotes inside if/else.
+ ` src=` +
+ `{{if .T}}"?{{"<iconPath>"}}"` +
+ `{{else}}"images/cleardot.gif"{{end}}` +
+ // Missing space before title, but it is not a
+ // part of the src attribute.
+ `{{if .T}}title="{{"<title>"}}"{{end}}` +
+ // Quotes outside if/else.
+ ` alt="` +
+ `{{if .T}}{{"<alt>"}}` +
+ `{{else}}{{if .F}}{{"<title>"}}{{end}}` +
+ `{{end}}"` +
+ `>`,
+ `<img class="iconClass" id="&lt;iconId&gt;" src="?%3ciconPath%3e"title="&lt;title&gt;" alt="&lt;alt&gt;">`,
+ },
+ {
+ "conditional valueless attr name",
+ `<input{{if .T}} checked{{end}} name=n>`,
+ `<input checked name=n>`,
+ },
+ {
+ "conditional dynamic valueless attr name 1",
+ `<input{{if .T}} {{"checked"}}{{end}} name=n>`,
+ `<input checked name=n>`,
+ },
+ {
+ "conditional dynamic valueless attr name 2",
+ `<input {{if .T}}{{"checked"}} {{end}}name=n>`,
+ `<input checked name=n>`,
+ },
+ {
+ "dynamic attribute name",
+ `<img on{{"load"}}="alert({{"loaded"}})">`,
+ // Treated as JS since quotes are inserted.
+ `<img onload="alert(&#34;loaded&#34;)">`,
+ },
+ {
+ "bad dynamic attribute name 1",
+ // Allow checked, selected, disabled, but not JS or
+ // CSS attributes.
+ `<input {{"onchange"}}="{{"doEvil()"}}">`,
+ `<input ZgotmplZ="doEvil()">`,
+ },
+ {
+ "bad dynamic attribute name 2",
+ `<div {{"sTyle"}}="{{"color: expression(alert(1337))"}}">`,
+ `<div ZgotmplZ="color: expression(alert(1337))">`,
+ },
+ {
+ "bad dynamic attribute name 3",
+ // Allow title or alt, but not a URL.
+ `<img {{"src"}}="{{"javascript:doEvil()"}}">`,
+ `<img ZgotmplZ="javascript:doEvil()">`,
+ },
+ {
+ "bad dynamic attribute name 4",
+ // Structure preservation requires values to associate
+ // with a consistent attribute.
+ `<input checked {{""}}="Whose value am I?">`,
+ `<input checked ZgotmplZ="Whose value am I?">`,
+ },
+ {
+ "dynamic element name",
+ `<h{{3}}><table><t{{"head"}}>...</h{{3}}>`,
+ `<h3><table><thead>...</h3>`,
+ },
+ {
+ "bad dynamic element name",
+ // Dynamic element names are typically used to switch
+ // between (thead, tfoot, tbody), (ul, ol), (th, td),
+ // and other replaceable sets.
+ // We do not currently easily support (ul, ol).
+ // If we do change to support that, this test should
+ // catch failures to filter out special tag names which
+ // would violate the structure preservation property --
+ // if any special tag name could be substituted, then
+ // the content could be raw text/RCDATA for some inputs
+ // and regular HTML content for others.
+ `<{{"script"}}>{{"doEvil()"}}</{{"script"}}>`,
+ `&lt;script>doEvil()&lt;/script>`,
+ },
+ {
+ "srcset bad URL in second position",
+ `<img srcset="{{"/not-an-image#,javascript:alert(1)"}}">`,
+ // The second URL is also filtered.
+ `<img srcset="/not-an-image#,#ZgotmplZ">`,
+ },
+ {
+ "srcset buffer growth",
+ `<img srcset={{",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"}}>`,
+ `<img srcset=,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,>`,
+ },
+ }
+
+ for _, test := range tests {
+ tmpl := New(test.name)
+ tmpl = Must(tmpl.Parse(test.input))
+ // Check for bug 6459: Tree field was not set in Parse.
+ if tmpl.Tree != tmpl.text.Tree {
+ t.Errorf("%s: tree not set properly", test.name)
+ continue
+ }
+ b := new(bytes.Buffer)
+ if err := tmpl.Execute(b, data); err != nil {
+ t.Errorf("%s: template execution failed: %s", test.name, err)
+ continue
+ }
+ if w, g := test.output, b.String(); w != g {
+ t.Errorf("%s: escaped output: want\n\t%q\ngot\n\t%q", test.name, w, g)
+ continue
+ }
+ b.Reset()
+ if err := tmpl.Execute(b, pdata); err != nil {
+ t.Errorf("%s: template execution failed for pointer: %s", test.name, err)
+ continue
+ }
+ if w, g := test.output, b.String(); w != g {
+ t.Errorf("%s: escaped output for pointer: want\n\t%q\ngot\n\t%q", test.name, w, g)
+ continue
+ }
+ if tmpl.Tree != tmpl.text.Tree {
+ t.Errorf("%s: tree mismatch", test.name)
+ continue
+ }
+ }
+}
+
+func TestEscapeMap(t *testing.T) {
+ data := map[string]string{
+ "html": `<h1>Hi!</h1>`,
+ "urlquery": `http://www.foo.com/index.html?title=main`,
+ }
+ for _, test := range [...]struct {
+ desc, input, output string
+ }{
+ // covering issue 20323
+ {
+ "field with predefined escaper name 1",
+ `{{.html | print}}`,
+ `&lt;h1&gt;Hi!&lt;/h1&gt;`,
+ },
+ // covering issue 20323
+ {
+ "field with predefined escaper name 2",
+ `{{.urlquery | print}}`,
+ `http://www.foo.com/index.html?title=main`,
+ },
+ } {
+ tmpl := Must(New("").Parse(test.input))
+ b := new(bytes.Buffer)
+ if err := tmpl.Execute(b, data); err != nil {
+ t.Errorf("%s: template execution failed: %s", test.desc, err)
+ continue
+ }
+ if w, g := test.output, b.String(); w != g {
+ t.Errorf("%s: escaped output: want\n\t%q\ngot\n\t%q", test.desc, w, g)
+ continue
+ }
+ }
+}
+
+func TestEscapeSet(t *testing.T) {
+ type dataItem struct {
+ Children []*dataItem
+ X string
+ }
+
+ data := dataItem{
+ Children: []*dataItem{
+ {X: "foo"},
+ {X: "<bar>"},
+ {
+ Children: []*dataItem{
+ {X: "baz"},
+ },
+ },
+ },
+ }
+
+ tests := []struct {
+ inputs map[string]string
+ want string
+ }{
+ // The trivial set.
+ {
+ map[string]string{
+ "main": ``,
+ },
+ ``,
+ },
+ // A template called in the start context.
+ {
+ map[string]string{
+ "main": `Hello, {{template "helper"}}!`,
+ // Not a valid top level HTML template.
+ // "<b" is not a full tag.
+ "helper": `{{"<World>"}}`,
+ },
+ `Hello, &lt;World&gt;!`,
+ },
+ // A template called in a context other than the start.
+ {
+ map[string]string{
+ "main": `<a onclick='a = {{template "helper"}};'>`,
+ // Not a valid top level HTML template.
+ // "<b" is not a full tag.
+ "helper": `{{"<a>"}}<b`,
+ },
+ `<a onclick='a = &#34;\u003ca\u003e&#34;<b;'>`,
+ },
+ // A recursive template that ends in its start context.
+ {
+ map[string]string{
+ "main": `{{range .Children}}{{template "main" .}}{{else}}{{.X}} {{end}}`,
+ },
+ `foo &lt;bar&gt; baz `,
+ },
+ // A recursive helper template that ends in its start context.
+ {
+ map[string]string{
+ "main": `{{template "helper" .}}`,
+ "helper": `{{if .Children}}<ul>{{range .Children}}<li>{{template "main" .}}</li>{{end}}</ul>{{else}}{{.X}}{{end}}`,
+ },
+ `<ul><li>foo</li><li>&lt;bar&gt;</li><li><ul><li>baz</li></ul></li></ul>`,
+ },
+ // Co-recursive templates that end in its start context.
+ {
+ map[string]string{
+ "main": `<blockquote>{{range .Children}}{{template "helper" .}}{{end}}</blockquote>`,
+ "helper": `{{if .Children}}{{template "main" .}}{{else}}{{.X}}<br>{{end}}`,
+ },
+ `<blockquote>foo<br>&lt;bar&gt;<br><blockquote>baz<br></blockquote></blockquote>`,
+ },
+ // A template that is called in two different contexts.
+ {
+ map[string]string{
+ "main": `<button onclick="title='{{template "helper"}}'; ...">{{template "helper"}}</button>`,
+ "helper": `{{11}} of {{"<100>"}}`,
+ },
+ `<button onclick="title='11 of \u003c100\u003e'; ...">11 of &lt;100&gt;</button>`,
+ },
+ // A non-recursive template that ends in a different context.
+ // helper starts in jsCtxRegexp and ends in jsCtxDivOp.
+ {
+ map[string]string{
+ "main": `<script>var x={{template "helper"}}/{{"42"}};</script>`,
+ "helper": "{{126}}",
+ },
+ `<script>var x= 126 /"42";</script>`,
+ },
+ // A recursive template that ends in a similar context.
+ {
+ map[string]string{
+ "main": `<script>var x=[{{template "countdown" 4}}];</script>`,
+ "countdown": `{{.}}{{if .}},{{template "countdown" . | pred}}{{end}}`,
+ },
+ `<script>var x=[ 4 , 3 , 2 , 1 , 0 ];</script>`,
+ },
+ // A recursive template that ends in a different context.
+ /*
+ {
+ map[string]string{
+ "main": `<a href="/foo{{template "helper" .}}">`,
+ "helper": `{{if .Children}}{{range .Children}}{{template "helper" .}}{{end}}{{else}}?x={{.X}}{{end}}`,
+ },
+ `<a href="/foo?x=foo?x=%3cbar%3e?x=baz">`,
+ },
+ */
+ }
+
+ // pred is a template function that returns the predecessor of a
+ // natural number for testing recursive templates.
+ fns := FuncMap{"pred": func(a ...any) (any, error) {
+ if len(a) == 1 {
+ if i, _ := a[0].(int); i > 0 {
+ return i - 1, nil
+ }
+ }
+ return nil, fmt.Errorf("undefined pred(%v)", a)
+ }}
+
+ for _, test := range tests {
+ source := ""
+ for name, body := range test.inputs {
+ source += fmt.Sprintf("{{define %q}}%s{{end}} ", name, body)
+ }
+ tmpl, err := New("root").Funcs(fns).Parse(source)
+ if err != nil {
+ t.Errorf("error parsing %q: %v", source, err)
+ continue
+ }
+ var b bytes.Buffer
+
+ if err := tmpl.ExecuteTemplate(&b, "main", data); err != nil {
+ t.Errorf("%q executing %v", err.Error(), tmpl.Lookup("main"))
+ continue
+ }
+ if got := b.String(); test.want != got {
+ t.Errorf("want\n\t%q\ngot\n\t%q", test.want, got)
+ }
+ }
+
+}
+
+func TestErrors(t *testing.T) {
+ tests := []struct {
+ input string
+ err string
+ }{
+ // Non-error cases.
+ {
+ "{{if .Cond}}<a>{{else}}<b>{{end}}",
+ "",
+ },
+ {
+ "{{if .Cond}}<a>{{end}}",
+ "",
+ },
+ {
+ "{{if .Cond}}{{else}}<b>{{end}}",
+ "",
+ },
+ {
+ "{{with .Cond}}<div>{{end}}",
+ "",
+ },
+ {
+ "{{range .Items}}<a>{{end}}",
+ "",
+ },
+ {
+ "<a href='/foo?{{range .Items}}&{{.K}}={{.V}}{{end}}'>",
+ "",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{end}}>{{end}}",
+ "",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{end}}>{{continue}}{{end}}",
+ "",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{end}}>{{break}}{{end}}",
+ "",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{end}}>{{if .X}}{{break}}{{end}}{{end}}",
+ "",
+ },
+ // Error cases.
+ {
+ "{{if .Cond}}<a{{end}}",
+ "z:1:5: {{if}} branches",
+ },
+ {
+ "{{if .Cond}}\n{{else}}\n<a{{end}}",
+ "z:1:5: {{if}} branches",
+ },
+ {
+ // Missing quote in the else branch.
+ `{{if .Cond}}<a href="foo">{{else}}<a href="bar>{{end}}`,
+ "z:1:5: {{if}} branches",
+ },
+ {
+ // Different kind of attribute: href implies a URL.
+ "<a {{if .Cond}}href='{{else}}title='{{end}}{{.X}}'>",
+ "z:1:8: {{if}} branches",
+ },
+ {
+ "\n{{with .X}}<a{{end}}",
+ "z:2:7: {{with}} branches",
+ },
+ {
+ "\n{{with .X}}<a>{{else}}<a{{end}}",
+ "z:2:7: {{with}} branches",
+ },
+ {
+ "{{range .Items}}<a{{end}}",
+ `z:1: on range loop re-entry: "<" in attribute name: "<a"`,
+ },
+ {
+ "\n{{range .Items}} x='<a{{end}}",
+ "z:2:8: on range loop re-entry: {{range}} branches",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{break}}{{end}}>{{end}}",
+ "z:1:29: at range loop break: {{range}} branches end in different contexts",
+ },
+ {
+ "{{range .Items}}<a{{if .X}}{{continue}}{{end}}>{{end}}",
+ "z:1:29: at range loop continue: {{range}} branches end in different contexts",
+ },
+ {
+ "<a b=1 c={{.H}}",
+ "z: ends in a non-text context: {stateAttr delimSpaceOrTagEnd",
+ },
+ {
+ "<script>foo();",
+ "z: ends in a non-text context: {stateJS",
+ },
+ {
+ `<a href="{{if .F}}/foo?a={{else}}/bar/{{end}}{{.H}}">`,
+ "z:1:47: {{.H}} appears in an ambiguous context within a URL",
+ },
+ {
+ `<a onclick="alert('Hello \`,
+ `unfinished escape sequence in JS string: "Hello \\"`,
+ },
+ {
+ `<a onclick='alert("Hello\, World\`,
+ `unfinished escape sequence in JS string: "Hello\\, World\\"`,
+ },
+ {
+ `<a onclick='alert(/x+\`,
+ `unfinished escape sequence in JS string: "x+\\"`,
+ },
+ {
+ `<a onclick="/foo[\]/`,
+ `unfinished JS regexp charset: "foo[\\]/"`,
+ },
+ {
+ // It is ambiguous whether 1.5 should be 1\.5 or 1.5.
+ // Either `var x = 1/- 1.5 /i.test(x)`
+ // where `i.test(x)` is a method call of reference i,
+ // or `/-1\.5/i.test(x)` which is a method call on a
+ // case insensitive regular expression.
+ `<script>{{if false}}var x = 1{{end}}/-{{"1.5"}}/i.test(x)</script>`,
+ `'/' could start a division or regexp: "/-"`,
+ },
+ {
+ `{{template "foo"}}`,
+ "z:1:11: no such template \"foo\"",
+ },
+ {
+ `<div{{template "y"}}>` +
+ // Illegal starting in stateTag but not in stateText.
+ `{{define "y"}} foo<b{{end}}`,
+ `"<" in attribute name: " foo<b"`,
+ },
+ {
+ `<script>reverseList = [{{template "t"}}]</script>` +
+ // Missing " after recursive call.
+ `{{define "t"}}{{if .Tail}}{{template "t" .Tail}}{{end}}{{.Head}}",{{end}}`,
+ `: cannot compute output context for template t$htmltemplate_stateJS_elementScript`,
+ },
+ {
+ `<input type=button value=onclick=>`,
+ `html/template:z: "=" in unquoted attr: "onclick="`,
+ },
+ {
+ `<input type=button value= onclick=>`,
+ `html/template:z: "=" in unquoted attr: "onclick="`,
+ },
+ {
+ `<input type=button value= 1+1=2>`,
+ `html/template:z: "=" in unquoted attr: "1+1=2"`,
+ },
+ {
+ "<a class=`foo>",
+ "html/template:z: \"`\" in unquoted attr: \"`foo\"",
+ },
+ {
+ `<a style=font:'Arial'>`,
+ `html/template:z: "'" in unquoted attr: "font:'Arial'"`,
+ },
+ {
+ `<a=foo>`,
+ `: expected space, attr name, or end of tag, but got "=foo>"`,
+ },
+ {
+ `Hello, {{. | urlquery | print}}!`,
+ // urlquery is disallowed if it is not the last command in the pipeline.
+ `predefined escaper "urlquery" disallowed in template`,
+ },
+ {
+ `Hello, {{. | html | print}}!`,
+ // html is disallowed if it is not the last command in the pipeline.
+ `predefined escaper "html" disallowed in template`,
+ },
+ {
+ `Hello, {{html . | print}}!`,
+ // A direct call to html is disallowed if it is not the last command in the pipeline.
+ `predefined escaper "html" disallowed in template`,
+ },
+ {
+ `<div class={{. | html}}>Hello<div>`,
+ // html is disallowed in a pipeline that is in an unquoted attribute context,
+ // even if it is the last command in the pipeline.
+ `predefined escaper "html" disallowed in template`,
+ },
+ {
+ `Hello, {{. | urlquery | html}}!`,
+ // html is allowed since it is the last command in the pipeline, but urlquery is not.
+ `predefined escaper "urlquery" disallowed in template`,
+ },
+ }
+ for _, test := range tests {
+ buf := new(bytes.Buffer)
+ tmpl, err := New("z").Parse(test.input)
+ if err != nil {
+ t.Errorf("input=%q: unexpected parse error %s\n", test.input, err)
+ continue
+ }
+ err = tmpl.Execute(buf, nil)
+ var got string
+ if err != nil {
+ got = err.Error()
+ }
+ if test.err == "" {
+ if got != "" {
+ t.Errorf("input=%q: unexpected error %q", test.input, got)
+ }
+ continue
+ }
+ if !strings.Contains(got, test.err) {
+ t.Errorf("input=%q: error\n\t%q\ndoes not contain expected string\n\t%q", test.input, got, test.err)
+ continue
+ }
+ // Check that we get the same error if we call Execute again.
+ if err := tmpl.Execute(buf, nil); err == nil || err.Error() != got {
+ t.Errorf("input=%q: unexpected error on second call %q", test.input, err)
+
+ }
+ }
+}
+
+func TestEscapeText(t *testing.T) {
+ tests := []struct {
+ input string
+ output context
+ }{
+ {
+ ``,
+ context{},
+ },
+ {
+ `Hello, World!`,
+ context{},
+ },
+ {
+ // An orphaned "<" is OK.
+ `I <3 Ponies!`,
+ context{},
+ },
+ {
+ `<a`,
+ context{state: stateTag},
+ },
+ {
+ `<a `,
+ context{state: stateTag},
+ },
+ {
+ `<a>`,
+ context{state: stateText},
+ },
+ {
+ `<a href`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a on`,
+ context{state: stateAttrName, attr: attrScript},
+ },
+ {
+ `<a href `,
+ context{state: stateAfterName, attr: attrURL},
+ },
+ {
+ `<a style = `,
+ context{state: stateBeforeValue, attr: attrStyle},
+ },
+ {
+ `<a href=`,
+ context{state: stateBeforeValue, attr: attrURL},
+ },
+ {
+ `<a href=x`,
+ context{state: stateURL, delim: delimSpaceOrTagEnd, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href=x `,
+ context{state: stateTag},
+ },
+ {
+ `<a href=>`,
+ context{state: stateText},
+ },
+ {
+ `<a href=x>`,
+ context{state: stateText},
+ },
+ {
+ `<a href ='`,
+ context{state: stateURL, delim: delimSingleQuote, attr: attrURL},
+ },
+ {
+ `<a href=''`,
+ context{state: stateTag},
+ },
+ {
+ `<a href= "`,
+ context{state: stateURL, delim: delimDoubleQuote, attr: attrURL},
+ },
+ {
+ `<a href=""`,
+ context{state: stateTag},
+ },
+ {
+ `<a title="`,
+ context{state: stateAttr, delim: delimDoubleQuote},
+ },
+ {
+ `<a HREF='http:`,
+ context{state: stateURL, delim: delimSingleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a Href='/`,
+ context{state: stateURL, delim: delimSingleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href='"`,
+ context{state: stateURL, delim: delimSingleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href="'`,
+ context{state: stateURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href='&apos;`,
+ context{state: stateURL, delim: delimSingleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href="&quot;`,
+ context{state: stateURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href="&#34;`,
+ context{state: stateURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<a href=&quot;`,
+ context{state: stateURL, delim: delimSpaceOrTagEnd, urlPart: urlPartPreQuery, attr: attrURL},
+ },
+ {
+ `<img alt="1">`,
+ context{state: stateText},
+ },
+ {
+ `<img alt="1>"`,
+ context{state: stateTag},
+ },
+ {
+ `<img alt="1>">`,
+ context{state: stateText},
+ },
+ {
+ `<input checked type="checkbox"`,
+ context{state: stateTag},
+ },
+ {
+ `<a onclick="`,
+ context{state: stateJS, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="//foo`,
+ context{state: stateJSLineCmt, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ "<a onclick='//\n",
+ context{state: stateJS, delim: delimSingleQuote, attr: attrScript},
+ },
+ {
+ "<a onclick='//\r\n",
+ context{state: stateJS, delim: delimSingleQuote, attr: attrScript},
+ },
+ {
+ "<a onclick='//\u2028",
+ context{state: stateJS, delim: delimSingleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/*`,
+ context{state: stateJSBlockCmt, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/*/`,
+ context{state: stateJSBlockCmt, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/**/`,
+ context{state: stateJS, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onkeypress="&quot;`,
+ context{state: stateJSDqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick='&quot;foo&quot;`,
+ context{state: stateJS, delim: delimSingleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<a onclick=&#39;foo&#39;`,
+ context{state: stateJS, delim: delimSpaceOrTagEnd, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<a onclick=&#39;foo`,
+ context{state: stateJSSqStr, delim: delimSpaceOrTagEnd, attr: attrScript},
+ },
+ {
+ `<a onclick="&quot;foo'`,
+ context{state: stateJSDqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="'foo&quot;`,
+ context{state: stateJSSqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<A ONCLICK="'`,
+ context{state: stateJSSqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/`,
+ context{state: stateJSRegexp, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="'foo'`,
+ context{state: stateJS, delim: delimDoubleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<a onclick="'foo\'`,
+ context{state: stateJSSqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="'foo\'`,
+ context{state: stateJSSqStr, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/foo/`,
+ context{state: stateJS, delim: delimDoubleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<script>/foo/ /=`,
+ context{state: stateJS, element: elementScript},
+ },
+ {
+ `<a onclick="1 /foo`,
+ context{state: stateJS, delim: delimDoubleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<a onclick="1 /*c*/ /foo`,
+ context{state: stateJS, delim: delimDoubleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<a onclick="/foo[/]`,
+ context{state: stateJSRegexp, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/foo\/`,
+ context{state: stateJSRegexp, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<a onclick="/foo/`,
+ context{state: stateJS, delim: delimDoubleQuote, jsCtx: jsCtxDivOp, attr: attrScript},
+ },
+ {
+ `<input checked style="`,
+ context{state: stateCSS, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="//`,
+ context{state: stateCSSLineCmt, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="//</script>`,
+ context{state: stateCSSLineCmt, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ "<a style='//\n",
+ context{state: stateCSS, delim: delimSingleQuote, attr: attrStyle},
+ },
+ {
+ "<a style='//\r",
+ context{state: stateCSS, delim: delimSingleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="/*`,
+ context{state: stateCSSBlockCmt, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="/*/`,
+ context{state: stateCSSBlockCmt, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="/**/`,
+ context{state: stateCSS, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: '`,
+ context{state: stateCSSSqStr, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: &quot;`,
+ context{state: stateCSSDqStr, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: '/foo?img=`,
+ context{state: stateCSSSqStr, delim: delimDoubleQuote, urlPart: urlPartQueryOrFrag, attr: attrStyle},
+ },
+ {
+ `<a style="background: '/`,
+ context{state: stateCSSSqStr, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url(&#x22;/`,
+ context{state: stateCSSDqURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url('/`,
+ context{state: stateCSSSqURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url('/)`,
+ context{state: stateCSSSqURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url('/ `,
+ context{state: stateCSSSqURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url(/`,
+ context{state: stateCSSURL, delim: delimDoubleQuote, urlPart: urlPartPreQuery, attr: attrStyle},
+ },
+ {
+ `<a style="background: url( `,
+ context{state: stateCSSURL, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: url( /image?name=`,
+ context{state: stateCSSURL, delim: delimDoubleQuote, urlPart: urlPartQueryOrFrag, attr: attrStyle},
+ },
+ {
+ `<a style="background: url(x)`,
+ context{state: stateCSS, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: url('x'`,
+ context{state: stateCSS, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<a style="background: url( x `,
+ context{state: stateCSS, delim: delimDoubleQuote, attr: attrStyle},
+ },
+ {
+ `<!-- foo`,
+ context{state: stateHTMLCmt},
+ },
+ {
+ `<!-->`,
+ context{state: stateHTMLCmt},
+ },
+ {
+ `<!--->`,
+ context{state: stateHTMLCmt},
+ },
+ {
+ `<!-- foo -->`,
+ context{state: stateText},
+ },
+ {
+ `<script`,
+ context{state: stateTag, element: elementScript},
+ },
+ {
+ `<script `,
+ context{state: stateTag, element: elementScript},
+ },
+ {
+ `<script src="foo.js" `,
+ context{state: stateTag, element: elementScript},
+ },
+ {
+ `<script src='foo.js' `,
+ context{state: stateTag, element: elementScript},
+ },
+ {
+ `<script type=text/javascript `,
+ context{state: stateTag, element: elementScript},
+ },
+ {
+ `<script>`,
+ context{state: stateJS, jsCtx: jsCtxRegexp, element: elementScript},
+ },
+ {
+ `<script>foo`,
+ context{state: stateJS, jsCtx: jsCtxDivOp, element: elementScript},
+ },
+ {
+ `<script>foo</script>`,
+ context{state: stateText},
+ },
+ {
+ `<script>foo</script><!--`,
+ context{state: stateHTMLCmt},
+ },
+ {
+ `<script>document.write("<p>foo</p>");`,
+ context{state: stateJS, element: elementScript},
+ },
+ {
+ `<script>document.write("<p>foo<\/script>");`,
+ context{state: stateJS, element: elementScript},
+ },
+ {
+ `<script>document.write("<script>alert(1)</script>");`,
+ context{state: stateText},
+ },
+ {
+ `<script type="text/template">`,
+ context{state: stateText},
+ },
+ // covering issue 19968
+ {
+ `<script type="TEXT/JAVASCRIPT">`,
+ context{state: stateJS, element: elementScript},
+ },
+ // covering issue 19965
+ {
+ `<script TYPE="text/template">`,
+ context{state: stateText},
+ },
+ {
+ `<script type="notjs">`,
+ context{state: stateText},
+ },
+ {
+ `<Script>`,
+ context{state: stateJS, element: elementScript},
+ },
+ {
+ `<SCRIPT>foo`,
+ context{state: stateJS, jsCtx: jsCtxDivOp, element: elementScript},
+ },
+ {
+ `<textarea>value`,
+ context{state: stateRCDATA, element: elementTextarea},
+ },
+ {
+ `<textarea>value</TEXTAREA>`,
+ context{state: stateText},
+ },
+ {
+ `<textarea name=html><b`,
+ context{state: stateRCDATA, element: elementTextarea},
+ },
+ {
+ `<title>value`,
+ context{state: stateRCDATA, element: elementTitle},
+ },
+ {
+ `<style>value`,
+ context{state: stateCSS, element: elementStyle},
+ },
+ {
+ `<a xlink:href`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a xmlns`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a xmlns:foo`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a xmlnsxyz`,
+ context{state: stateAttrName},
+ },
+ {
+ `<a data-url`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a data-iconUri`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a data-urlItem`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a g:`,
+ context{state: stateAttrName},
+ },
+ {
+ `<a g:url`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a g:iconUri`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a g:urlItem`,
+ context{state: stateAttrName, attr: attrURL},
+ },
+ {
+ `<a g:value`,
+ context{state: stateAttrName},
+ },
+ {
+ `<a svg:style='`,
+ context{state: stateCSS, delim: delimSingleQuote, attr: attrStyle},
+ },
+ {
+ `<svg:font-face`,
+ context{state: stateTag},
+ },
+ {
+ `<svg:a svg:onclick="`,
+ context{state: stateJS, delim: delimDoubleQuote, attr: attrScript},
+ },
+ {
+ `<svg:a svg:onclick="x()">`,
+ context{},
+ },
+ }
+
+ for _, test := range tests {
+ b, e := []byte(test.input), makeEscaper(nil)
+ c := e.escapeText(context{}, &parse.TextNode{NodeType: parse.NodeText, Text: b})
+ if !test.output.eq(c) {
+ t.Errorf("input %q: want context\n\t%v\ngot\n\t%v", test.input, test.output, c)
+ continue
+ }
+ if test.input != string(b) {
+ t.Errorf("input %q: text node was modified: want %q got %q", test.input, test.input, b)
+ continue
+ }
+ }
+}
+
+func TestEnsurePipelineContains(t *testing.T) {
+ tests := []struct {
+ input, output string
+ ids []string
+ }{
+ {
+ "{{.X}}",
+ ".X",
+ []string{},
+ },
+ {
+ "{{.X | html}}",
+ ".X | html",
+ []string{},
+ },
+ {
+ "{{.X}}",
+ ".X | html",
+ []string{"html"},
+ },
+ {
+ "{{html .X}}",
+ "_eval_args_ .X | html | urlquery",
+ []string{"html", "urlquery"},
+ },
+ {
+ "{{html .X .Y .Z}}",
+ "_eval_args_ .X .Y .Z | html | urlquery",
+ []string{"html", "urlquery"},
+ },
+ {
+ "{{.X | print}}",
+ ".X | print | urlquery",
+ []string{"urlquery"},
+ },
+ {
+ "{{.X | print | urlquery}}",
+ ".X | print | urlquery",
+ []string{"urlquery"},
+ },
+ {
+ "{{.X | urlquery}}",
+ ".X | html | urlquery",
+ []string{"html", "urlquery"},
+ },
+ {
+ "{{.X | print 2 | .f 3}}",
+ ".X | print 2 | .f 3 | urlquery | html",
+ []string{"urlquery", "html"},
+ },
+ {
+ // covering issue 10801
+ "{{.X | println.x }}",
+ ".X | println.x | urlquery | html",
+ []string{"urlquery", "html"},
+ },
+ {
+ // covering issue 10801
+ "{{.X | (print 12 | println).x }}",
+ ".X | (print 12 | println).x | urlquery | html",
+ []string{"urlquery", "html"},
+ },
+ // The following test cases ensure that the merging of internal escapers
+ // with the predefined "html" and "urlquery" escapers is correct.
+ {
+ "{{.X | urlquery}}",
+ ".X | _html_template_urlfilter | urlquery",
+ []string{"_html_template_urlfilter", "_html_template_urlnormalizer"},
+ },
+ {
+ "{{.X | urlquery}}",
+ ".X | urlquery | _html_template_urlfilter | _html_template_cssescaper",
+ []string{"_html_template_urlfilter", "_html_template_cssescaper"},
+ },
+ {
+ "{{.X | urlquery}}",
+ ".X | urlquery",
+ []string{"_html_template_urlnormalizer"},
+ },
+ {
+ "{{.X | urlquery}}",
+ ".X | urlquery",
+ []string{"_html_template_urlescaper"},
+ },
+ {
+ "{{.X | html}}",
+ ".X | html",
+ []string{"_html_template_htmlescaper"},
+ },
+ {
+ "{{.X | html}}",
+ ".X | html",
+ []string{"_html_template_rcdataescaper"},
+ },
+ }
+ for i, test := range tests {
+ tmpl := template.Must(template.New("test").Parse(test.input))
+ action, ok := (tmpl.Tree.Root.Nodes[0].(*parse.ActionNode))
+ if !ok {
+ t.Errorf("First node is not an action: %s", test.input)
+ continue
+ }
+ pipe := action.Pipe
+ originalIDs := make([]string, len(test.ids))
+ copy(originalIDs, test.ids)
+ ensurePipelineContains(pipe, test.ids)
+ got := pipe.String()
+ if got != test.output {
+ t.Errorf("#%d: %s, %v: want\n\t%s\ngot\n\t%s", i, test.input, originalIDs, test.output, got)
+ }
+ }
+}
+
+func TestEscapeMalformedPipelines(t *testing.T) {
+ tests := []string{
+ "{{ 0 | $ }}",
+ "{{ 0 | $ | urlquery }}",
+ "{{ 0 | (nil) }}",
+ "{{ 0 | (nil) | html }}",
+ }
+ for _, test := range tests {
+ var b bytes.Buffer
+ tmpl, err := New("test").Parse(test)
+ if err != nil {
+ t.Errorf("failed to parse set: %q", err)
+ }
+ err = tmpl.Execute(&b, nil)
+ if err == nil {
+ t.Errorf("Expected error for %q", test)
+ }
+ }
+}
+
+func TestEscapeErrorsNotIgnorable(t *testing.T) {
+ var b bytes.Buffer
+ tmpl, _ := New("dangerous").Parse("<a")
+ err := tmpl.Execute(&b, nil)
+ if err == nil {
+ t.Errorf("Expected error")
+ } else if b.Len() != 0 {
+ t.Errorf("Emitted output despite escaping failure")
+ }
+}
+
+func TestEscapeSetErrorsNotIgnorable(t *testing.T) {
+ var b bytes.Buffer
+ tmpl, err := New("root").Parse(`{{define "t"}}<a{{end}}`)
+ if err != nil {
+ t.Errorf("failed to parse set: %q", err)
+ }
+ err = tmpl.ExecuteTemplate(&b, "t", nil)
+ if err == nil {
+ t.Errorf("Expected error")
+ } else if b.Len() != 0 {
+ t.Errorf("Emitted output despite escaping failure")
+ }
+}
+
+func TestRedundantFuncs(t *testing.T) {
+ inputs := []any{
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !"#$%&'()*+,-./` +
+ `0123456789:;<=>?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\x7f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\ufdec\ufffd\uffff\U0001D11E" +
+ "&amp;%22\\",
+ htmltemplate.CSS(`a[href =~ "//example.com"]#foo`),
+ htmltemplate.HTML(`Hello, <b>World</b> &amp;tc!`),
+ htmltemplate.HTMLAttr(` dir="ltr"`),
+ htmltemplate.JS(`c && alert("Hello, World!");`),
+ htmltemplate.JSStr(`Hello, World & O'Reilly\x21`),
+ htmltemplate.URL(`greeting=H%69&addressee=(World)`),
+ }
+
+ for n0, m := range redundantFuncs {
+ f0 := funcMap[n0].(func(...any) string)
+ for n1 := range m {
+ f1 := funcMap[n1].(func(...any) string)
+ for _, input := range inputs {
+ want := f0(input)
+ if got := f1(want); want != got {
+ t.Errorf("%s %s with %T %q: want\n\t%q,\ngot\n\t%q", n0, n1, input, input, want, got)
+ }
+ }
+ }
+ }
+}
+
+func TestIndirectPrint(t *testing.T) {
+ a := 3
+ ap := &a
+ b := "hello"
+ bp := &b
+ bpp := &bp
+ tmpl := Must(New("t").Parse(`{{.}}`))
+ var buf bytes.Buffer
+ err := tmpl.Execute(&buf, ap)
+ if err != nil {
+ t.Errorf("Unexpected error: %s", err)
+ } else if buf.String() != "3" {
+ t.Errorf(`Expected "3"; got %q`, buf.String())
+ }
+ buf.Reset()
+ err = tmpl.Execute(&buf, bpp)
+ if err != nil {
+ t.Errorf("Unexpected error: %s", err)
+ } else if buf.String() != "hello" {
+ t.Errorf(`Expected "hello"; got %q`, buf.String())
+ }
+}
+
+// This is a test for issue 3272.
+func TestEmptyTemplateHTML(t *testing.T) {
+ page := Must(New("page").ParseFiles(os.DevNull))
+ if err := page.ExecuteTemplate(os.Stdout, "page", "nothing"); err == nil {
+ t.Fatal("expected error")
+ }
+}
+
+type Issue7379 int
+
+func (Issue7379) SomeMethod(x int) string {
+ return fmt.Sprintf("<%d>", x)
+}
+
+// This is a test for issue 7379: type assertion error caused panic, and then
+// the code to handle the panic breaks escaping. It's hard to see the second
+// problem once the first is fixed, but its fix is trivial so we let that go. See
+// the discussion for issue 7379.
+func TestPipeToMethodIsEscaped(t *testing.T) {
+ tmpl := Must(New("x").Parse("<html>{{0 | .SomeMethod}}</html>\n"))
+ tryExec := func() string {
+ defer func() {
+ panicValue := recover()
+ if panicValue != nil {
+ t.Errorf("panicked: %v\n", panicValue)
+ }
+ }()
+ var b bytes.Buffer
+ tmpl.Execute(&b, Issue7379(0))
+ return b.String()
+ }
+ for i := 0; i < 3; i++ {
+ str := tryExec()
+ const expect = "<html>&lt;0&gt;</html>\n"
+ if str != expect {
+ t.Errorf("expected %q got %q", expect, str)
+ }
+ }
+}
+
+// Unlike text/template, html/template crashed if given an incomplete
+// template, that is, a template that had been named but not given any content.
+// This is issue #10204.
+func TestErrorOnUndefined(t *testing.T) {
+ tmpl := New("undefined")
+
+ err := tmpl.Execute(nil, nil)
+ if err == nil {
+ t.Error("expected error")
+ } else if !strings.Contains(err.Error(), "incomplete") {
+ t.Errorf("expected error about incomplete template; got %s", err)
+ }
+}
+
+// This covers issue #20842.
+func TestIdempotentExecute(t *testing.T) {
+ tmpl := Must(New("").
+ Parse(`{{define "main"}}<body>{{template "hello"}}</body>{{end}}`))
+ Must(tmpl.
+ Parse(`{{define "hello"}}Hello, {{"Ladies & Gentlemen!"}}{{end}}`))
+ got := new(bytes.Buffer)
+ var err error
+ // Ensure that "hello" produces the same output when executed twice.
+ want := "Hello, Ladies &amp; Gentlemen!"
+ for i := 0; i < 2; i++ {
+ err = tmpl.ExecuteTemplate(got, "hello", nil)
+ if err != nil {
+ t.Errorf("unexpected error: %s", err)
+ }
+ if got.String() != want {
+ t.Errorf("after executing template \"hello\", got:\n\t%q\nwant:\n\t%q\n", got.String(), want)
+ }
+ got.Reset()
+ }
+ // Ensure that the implicit re-execution of "hello" during the execution of
+ // "main" does not cause the output of "hello" to change.
+ err = tmpl.ExecuteTemplate(got, "main", nil)
+ if err != nil {
+ t.Errorf("unexpected error: %s", err)
+ }
+ // If the HTML escaper is added again to the action {{"Ladies & Gentlemen!"}},
+ // we would expected to see the ampersand overescaped to "&amp;amp;".
+ want = "<body>Hello, Ladies &amp; Gentlemen!</body>"
+ if got.String() != want {
+ t.Errorf("after executing template \"main\", got:\n\t%q\nwant:\n\t%q\n", got.String(), want)
+ }
+}
+
+func BenchmarkEscapedExecute(b *testing.B) {
+ tmpl := Must(New("t").Parse(`<a onclick="alert('{{.}}')">{{.}}</a>`))
+ var buf bytes.Buffer
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ tmpl.Execute(&buf, "foo & 'bar' & baz")
+ buf.Reset()
+ }
+}
+
+// Covers issue 22780.
+func TestOrphanedTemplate(t *testing.T) {
+ t1 := Must(New("foo").Parse(`<a href="{{.}}">link1</a>`))
+ t2 := Must(t1.New("foo").Parse(`bar`))
+
+ var b bytes.Buffer
+ const wantError = `template: "foo" is an incomplete or empty template`
+ if err := t1.Execute(&b, "javascript:alert(1)"); err == nil {
+ t.Fatal("expected error executing t1")
+ } else if gotError := err.Error(); gotError != wantError {
+ t.Fatalf("got t1 execution error:\n\t%s\nwant:\n\t%s", gotError, wantError)
+ }
+ b.Reset()
+ if err := t2.Execute(&b, nil); err != nil {
+ t.Fatalf("error executing t2: %s", err)
+ }
+ const want = "bar"
+ if got := b.String(); got != want {
+ t.Fatalf("t2 rendered %q, want %q", got, want)
+ }
+}
+
+// Covers issue 21844.
+func TestAliasedParseTreeDoesNotOverescape(t *testing.T) {
+ const (
+ tmplText = `{{.}}`
+ data = `<baz>`
+ want = `&lt;baz&gt;`
+ )
+ // Templates "foo" and "bar" both alias the same underlying parse tree.
+ tpl := Must(New("foo").Parse(tmplText))
+ if _, err := tpl.AddParseTree("bar", tpl.Tree); err != nil {
+ t.Fatalf("AddParseTree error: %v", err)
+ }
+ var b1, b2 bytes.Buffer
+ if err := tpl.ExecuteTemplate(&b1, "foo", data); err != nil {
+ t.Fatalf(`ExecuteTemplate failed for "foo": %v`, err)
+ }
+ if err := tpl.ExecuteTemplate(&b2, "bar", data); err != nil {
+ t.Fatalf(`ExecuteTemplate failed for "foo": %v`, err)
+ }
+ got1, got2 := b1.String(), b2.String()
+ if got1 != want {
+ t.Fatalf(`Template "foo" rendered %q, want %q`, got1, want)
+ }
+ if got1 != got2 {
+ t.Fatalf(`Template "foo" and "bar" rendered %q and %q respectively, expected equal values`, got1, got2)
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/example_test.go b/tpl/internal/go_templates/htmltemplate/example_test.go
new file mode 100644
index 000000000..6485c7cfb
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/example_test.go
@@ -0,0 +1,185 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "strings"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+)
+
+func Example() {
+ const tpl = `
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="UTF-8">
+ <title>{{.Title}}</title>
+ </head>
+ <body>
+ {{range .Items}}<div>{{ . }}</div>{{else}}<div><strong>no rows</strong></div>{{end}}
+ </body>
+</html>`
+
+ check := func(err error) {
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+ t, err := template.New("webpage").Parse(tpl)
+ check(err)
+
+ data := struct {
+ Title string
+ Items []string
+ }{
+ Title: "My page",
+ Items: []string{
+ "My photos",
+ "My blog",
+ },
+ }
+
+ err = t.Execute(os.Stdout, data)
+ check(err)
+
+ noItems := struct {
+ Title string
+ Items []string
+ }{
+ Title: "My another page",
+ Items: []string{},
+ }
+
+ err = t.Execute(os.Stdout, noItems)
+ check(err)
+
+ // Output:
+ // <!DOCTYPE html>
+ // <html>
+ // <head>
+ // <meta charset="UTF-8">
+ // <title>My page</title>
+ // </head>
+ // <body>
+ // <div>My photos</div><div>My blog</div>
+ // </body>
+ // </html>
+ // <!DOCTYPE html>
+ // <html>
+ // <head>
+ // <meta charset="UTF-8">
+ // <title>My another page</title>
+ // </head>
+ // <body>
+ // <div><strong>no rows</strong></div>
+ // </body>
+ // </html>
+
+}
+
+func Example_autoescaping() {
+ check := func(err error) {
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+ t, err := template.New("foo").Parse(`{{define "T"}}Hello, {{.}}!{{end}}`)
+ check(err)
+ err = t.ExecuteTemplate(os.Stdout, "T", "<script>alert('you have been pwned')</script>")
+ check(err)
+ // Output:
+ // Hello, &lt;script&gt;alert(&#39;you have been pwned&#39;)&lt;/script&gt;!
+}
+
+func Example_escape() {
+ const s = `"Fran & Freddie's Diner" <tasty@example.com>`
+ v := []any{`"Fran & Freddie's Diner"`, ' ', `<tasty@example.com>`}
+
+ fmt.Println(template.HTMLEscapeString(s))
+ template.HTMLEscape(os.Stdout, []byte(s))
+ fmt.Fprintln(os.Stdout, "")
+ fmt.Println(template.HTMLEscaper(v...))
+
+ fmt.Println(template.JSEscapeString(s))
+ template.JSEscape(os.Stdout, []byte(s))
+ fmt.Fprintln(os.Stdout, "")
+ fmt.Println(template.JSEscaper(v...))
+
+ fmt.Println(template.URLQueryEscaper(v...))
+
+ // Output:
+ // &#34;Fran &amp; Freddie&#39;s Diner&#34; &lt;tasty@example.com&gt;
+ // &#34;Fran &amp; Freddie&#39;s Diner&#34; &lt;tasty@example.com&gt;
+ // &#34;Fran &amp; Freddie&#39;s Diner&#34;32&lt;tasty@example.com&gt;
+ // \"Fran \u0026 Freddie\'s Diner\" \u003Ctasty@example.com\u003E
+ // \"Fran \u0026 Freddie\'s Diner\" \u003Ctasty@example.com\u003E
+ // \"Fran \u0026 Freddie\'s Diner\"32\u003Ctasty@example.com\u003E
+ // %22Fran+%26+Freddie%27s+Diner%2232%3Ctasty%40example.com%3E
+
+}
+
+func ExampleTemplate_Delims() {
+ const text = "<<.Greeting>> {{.Name}}"
+
+ data := struct {
+ Greeting string
+ Name string
+ }{
+ Greeting: "Hello",
+ Name: "Joe",
+ }
+
+ t := template.Must(template.New("tpl").Delims("<<", ">>").Parse(text))
+
+ err := t.Execute(os.Stdout, data)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Output:
+ // Hello {{.Name}}
+}
+
+// The following example is duplicated in text/template; keep them in sync.
+
+func ExampleTemplate_block() {
+ const (
+ master = `Names:{{block "list" .}}{{"\n"}}{{range .}}{{println "-" .}}{{end}}{{end}}`
+ overlay = `{{define "list"}} {{join . ", "}}{{end}} `
+ )
+ var (
+ funcs = template.FuncMap{"join": strings.Join}
+ guardians = []string{"Gamora", "Groot", "Nebula", "Rocket", "Star-Lord"}
+ )
+ masterTmpl, err := template.New("master").Funcs(funcs).Parse(master)
+ if err != nil {
+ log.Fatal(err)
+ }
+ overlayTmpl, err := template.Must(masterTmpl.Clone()).Parse(overlay)
+ if err != nil {
+ log.Fatal(err)
+ }
+ if err := masterTmpl.Execute(os.Stdout, guardians); err != nil {
+ log.Fatal(err)
+ }
+ if err := overlayTmpl.Execute(os.Stdout, guardians); err != nil {
+ log.Fatal(err)
+ }
+ // Output:
+ // Names:
+ // - Gamora
+ // - Groot
+ // - Nebula
+ // - Rocket
+ // - Star-Lord
+ // Names: Gamora, Groot, Nebula, Rocket, Star-Lord
+}
diff --git a/tpl/internal/go_templates/htmltemplate/examplefiles_test.go b/tpl/internal/go_templates/htmltemplate/examplefiles_test.go
new file mode 100644
index 000000000..43cc3bf01
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/examplefiles_test.go
@@ -0,0 +1,229 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+)
+
+// templateFile defines the contents of a template to be stored in a file, for testing.
+type templateFile struct {
+ name string
+ contents string
+}
+
+func createTestDir(files []templateFile) string {
+ dir, err := os.MkdirTemp("", "template")
+ if err != nil {
+ log.Fatal(err)
+ }
+ for _, file := range files {
+ f, err := os.Create(filepath.Join(dir, file.name))
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer f.Close()
+ _, err = io.WriteString(f, file.contents)
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+ return dir
+}
+
+// The following example is duplicated in text/template; keep them in sync.
+
+// Here we demonstrate loading a set of templates from a directory.
+func ExampleTemplate_glob() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T0.tmpl is a plain template file that just invokes T1.
+ {"T0.tmpl", `T0 invokes T1: ({{template "T1"}})`},
+ // T1.tmpl defines a template, T1 that invokes T2.
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ // T2.tmpl defines a template T2.
+ {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // T0.tmpl is the first name matched, so it becomes the starting template,
+ // the value returned by ParseGlob.
+ tmpl := template.Must(template.ParseGlob(pattern))
+
+ err := tmpl.Execute(os.Stdout, nil)
+ if err != nil {
+ log.Fatalf("template execution: %s", err)
+ }
+ // Output:
+ // T0 invokes T1: (T1 invokes T2: (This is T2))
+}
+
+// Here we demonstrate loading a set of templates from files in different directories
+func ExampleTemplate_parsefiles() {
+ // Here we create different temporary directories and populate them with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir1 := createTestDir([]templateFile{
+ // T1.tmpl is a plain template file that just invokes T2.
+ {"T1.tmpl", `T1 invokes T2: ({{template "T2"}})`},
+ })
+
+ dir2 := createTestDir([]templateFile{
+ // T2.tmpl defines a template T2.
+ {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
+ })
+
+ // Clean up after the test; another quirk of running as an example.
+ defer func(dirs ...string) {
+ for _, dir := range dirs {
+ os.RemoveAll(dir)
+ }
+ }(dir1, dir2)
+
+ // Here starts the example proper.
+ // Let's just parse only dir1/T0 and dir2/T2
+ paths := []string{
+ filepath.Join(dir1, "T1.tmpl"),
+ filepath.Join(dir2, "T2.tmpl"),
+ }
+ tmpl := template.Must(template.ParseFiles(paths...))
+
+ err := tmpl.Execute(os.Stdout, nil)
+ if err != nil {
+ log.Fatalf("template execution: %s", err)
+ }
+ // Output:
+ // T1 invokes T2: (This is T2)
+}
+
+// The following example is duplicated in text/template; keep them in sync.
+
+// This example demonstrates one way to share some templates
+// and use them in different contexts. In this variant we add multiple driver
+// templates by hand to an existing bundle of templates.
+func ExampleTemplate_helpers() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T1.tmpl defines a template, T1 that invokes T2.
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ // T2.tmpl defines a template T2.
+ {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // Load the helpers.
+ templates := template.Must(template.ParseGlob(pattern))
+ // Add one driver template to the bunch; we do this with an explicit template definition.
+ _, err := templates.Parse("{{define `driver1`}}Driver 1 calls T1: ({{template `T1`}})\n{{end}}")
+ if err != nil {
+ log.Fatal("parsing driver1: ", err)
+ }
+ // Add another driver template.
+ _, err = templates.Parse("{{define `driver2`}}Driver 2 calls T2: ({{template `T2`}})\n{{end}}")
+ if err != nil {
+ log.Fatal("parsing driver2: ", err)
+ }
+ // We load all the templates before execution. This package does not require
+ // that behavior but html/template's escaping does, so it's a good habit.
+ err = templates.ExecuteTemplate(os.Stdout, "driver1", nil)
+ if err != nil {
+ log.Fatalf("driver1 execution: %s", err)
+ }
+ err = templates.ExecuteTemplate(os.Stdout, "driver2", nil)
+ if err != nil {
+ log.Fatalf("driver2 execution: %s", err)
+ }
+ // Output:
+ // Driver 1 calls T1: (T1 invokes T2: (This is T2))
+ // Driver 2 calls T2: (This is T2)
+}
+
+// The following example is duplicated in text/template; keep them in sync.
+
+// This example demonstrates how to use one group of driver
+// templates with distinct sets of helper templates.
+func ExampleTemplate_share() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T0.tmpl is a plain template file that just invokes T1.
+ {"T0.tmpl", "T0 ({{.}} version) invokes T1: ({{template `T1`}})\n"},
+ // T1.tmpl defines a template, T1 that invokes T2. Note T2 is not defined
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // Load the drivers.
+ drivers := template.Must(template.ParseGlob(pattern))
+
+ // We must define an implementation of the T2 template. First we clone
+ // the drivers, then add a definition of T2 to the template name space.
+
+ // 1. Clone the helper set to create a new name space from which to run them.
+ first, err := drivers.Clone()
+ if err != nil {
+ log.Fatal("cloning helpers: ", err)
+ }
+ // 2. Define T2, version A, and parse it.
+ _, err = first.Parse("{{define `T2`}}T2, version A{{end}}")
+ if err != nil {
+ log.Fatal("parsing T2: ", err)
+ }
+
+ // Now repeat the whole thing, using a different version of T2.
+ // 1. Clone the drivers.
+ second, err := drivers.Clone()
+ if err != nil {
+ log.Fatal("cloning drivers: ", err)
+ }
+ // 2. Define T2, version B, and parse it.
+ _, err = second.Parse("{{define `T2`}}T2, version B{{end}}")
+ if err != nil {
+ log.Fatal("parsing T2: ", err)
+ }
+
+ // Execute the templates in the reverse order to verify the
+ // first is unaffected by the second.
+ err = second.ExecuteTemplate(os.Stdout, "T0.tmpl", "second")
+ if err != nil {
+ log.Fatalf("second execution: %s", err)
+ }
+ err = first.ExecuteTemplate(os.Stdout, "T0.tmpl", "first")
+ if err != nil {
+ log.Fatalf("first: execution: %s", err)
+ }
+
+ // Output:
+ // T0 (second version) invokes T1: (T1 invokes T2: (T2, version B))
+ // T0 (first version) invokes T1: (T1 invokes T2: (T2, version A))
+}
diff --git a/tpl/internal/go_templates/htmltemplate/exec_test.go b/tpl/internal/go_templates/htmltemplate/exec_test.go
new file mode 100644
index 000000000..08195af0e
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/exec_test.go
@@ -0,0 +1,1838 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Tests for template execution, copied from text/template.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ htmltemplate "html/template"
+ "io"
+ "reflect"
+ "strings"
+ "sync"
+ "testing"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+)
+
+var debug = flag.Bool("debug", false, "show the errors produced by the tests")
+
+// T has lots of interesting pieces to use to test execution.
+type T struct {
+ // Basics
+ True bool
+ I int
+ U16 uint16
+ X, S string
+ FloatZero float64
+ ComplexZero complex128
+ // Nested structs.
+ U *U
+ // Struct with String method.
+ V0 V
+ V1, V2 *V
+ // Struct with Error method.
+ W0 W
+ W1, W2 *W
+ // Slices
+ SI []int
+ SICap []int
+ SIEmpty []int
+ SB []bool
+ // Arrays
+ AI [3]int
+ // Maps
+ MSI map[string]int
+ MSIone map[string]int // one element, for deterministic output
+ MSIEmpty map[string]int
+ MXI map[any]int
+ MII map[int]int
+ MI32S map[int32]string
+ MI64S map[int64]string
+ MUI32S map[uint32]string
+ MUI64S map[uint64]string
+ MI8S map[int8]string
+ MUI8S map[uint8]string
+ SMSI []map[string]int
+ // Empty interfaces; used to see if we can dig inside one.
+ Empty0 any // nil
+ Empty1 any
+ Empty2 any
+ Empty3 any
+ Empty4 any
+ // Non-empty interfaces.
+ NonEmptyInterface I
+ NonEmptyInterfacePtS *I
+ NonEmptyInterfaceNil I
+ NonEmptyInterfaceTypedNil I
+ // Stringer.
+ Str fmt.Stringer
+ Err error
+ // Pointers
+ PI *int
+ PS *string
+ PSI *[]int
+ NIL *int
+ // Function (not method)
+ BinaryFunc func(string, string) string
+ VariadicFunc func(...string) string
+ VariadicFuncInt func(int, ...string) string
+ NilOKFunc func(*int) bool
+ ErrFunc func() (string, error)
+ PanicFunc func() string
+ // Template to test evaluation of templates.
+ Tmpl *Template
+ // Unexported field; cannot be accessed by template.
+ unexported int
+}
+
+type S []string
+
+func (S) Method0() string {
+ return "M0"
+}
+
+type U struct {
+ V string
+}
+
+type V struct {
+ j int
+}
+
+func (v *V) String() string {
+ if v == nil {
+ return "nilV"
+ }
+ return fmt.Sprintf("<%d>", v.j)
+}
+
+type W struct {
+ k int
+}
+
+func (w *W) Error() string {
+ if w == nil {
+ return "nilW"
+ }
+ return fmt.Sprintf("[%d]", w.k)
+}
+
+var siVal = I(S{"a", "b"})
+
+var tVal = &T{
+ True: true,
+ I: 17,
+ U16: 16,
+ X: "x",
+ S: "xyz",
+ U: &U{"v"},
+ V0: V{6666},
+ V1: &V{7777}, // leave V2 as nil
+ W0: W{888},
+ W1: &W{999}, // leave W2 as nil
+ SI: []int{3, 4, 5},
+ SICap: make([]int, 5, 10),
+ AI: [3]int{3, 4, 5},
+ SB: []bool{true, false},
+ MSI: map[string]int{"one": 1, "two": 2, "three": 3},
+ MSIone: map[string]int{"one": 1},
+ MXI: map[any]int{"one": 1},
+ MII: map[int]int{1: 1},
+ MI32S: map[int32]string{1: "one", 2: "two"},
+ MI64S: map[int64]string{2: "i642", 3: "i643"},
+ MUI32S: map[uint32]string{2: "u322", 3: "u323"},
+ MUI64S: map[uint64]string{2: "ui642", 3: "ui643"},
+ MI8S: map[int8]string{2: "i82", 3: "i83"},
+ MUI8S: map[uint8]string{2: "u82", 3: "u83"},
+ SMSI: []map[string]int{
+ {"one": 1, "two": 2},
+ {"eleven": 11, "twelve": 12},
+ },
+ Empty1: 3,
+ Empty2: "empty2",
+ Empty3: []int{7, 8},
+ Empty4: &U{"UinEmpty"},
+ NonEmptyInterface: &T{X: "x"},
+ NonEmptyInterfacePtS: &siVal,
+ NonEmptyInterfaceTypedNil: (*T)(nil),
+ Str: bytes.NewBuffer([]byte("foozle")),
+ Err: errors.New("erroozle"),
+ PI: newInt(23),
+ PS: newString("a string"),
+ PSI: newIntSlice(21, 22, 23),
+ BinaryFunc: func(a, b string) string { return fmt.Sprintf("[%s=%s]", a, b) },
+ VariadicFunc: func(s ...string) string { return fmt.Sprint("<", strings.Join(s, "+"), ">") },
+ VariadicFuncInt: func(a int, s ...string) string { return fmt.Sprint(a, "=<", strings.Join(s, "+"), ">") },
+ NilOKFunc: func(s *int) bool { return s == nil },
+ ErrFunc: func() (string, error) { return "bla", nil },
+ PanicFunc: func() string { panic("test panic") },
+ Tmpl: Must(New("x").Parse("test template")), // "x" is the value of .X
+}
+
+var tSliceOfNil = []*T{nil}
+
+// A non-empty interface.
+type I interface {
+ Method0() string
+}
+
+var iVal I = tVal
+
+// Helpers for creation.
+func newInt(n int) *int {
+ return &n
+}
+
+func newString(s string) *string {
+ return &s
+}
+
+func newIntSlice(n ...int) *[]int {
+ p := new([]int)
+ *p = make([]int, len(n))
+ copy(*p, n)
+ return p
+}
+
+// Simple methods with and without arguments.
+func (t *T) Method0() string {
+ return "M0"
+}
+
+func (t *T) Method1(a int) int {
+ return a
+}
+
+func (t *T) Method2(a uint16, b string) string {
+ return fmt.Sprintf("Method2: %d %s", a, b)
+}
+
+func (t *T) Method3(v any) string {
+ return fmt.Sprintf("Method3: %v", v)
+}
+
+func (t *T) Copy() *T {
+ n := new(T)
+ *n = *t
+ return n
+}
+
+func (t *T) MAdd(a int, b []int) []int {
+ v := make([]int, len(b))
+ for i, x := range b {
+ v[i] = x + a
+ }
+ return v
+}
+
+var myError = errors.New("my error")
+
+// MyError returns a value and an error according to its argument.
+func (t *T) MyError(error bool) (bool, error) {
+ if error {
+ return true, myError
+ }
+ return false, nil
+}
+
+// A few methods to test chaining.
+func (t *T) GetU() *U {
+ return t.U
+}
+
+func (u *U) TrueFalse(b bool) string {
+ if b {
+ return "true"
+ }
+ return ""
+}
+
+func typeOf(arg any) string {
+ return fmt.Sprintf("%T", arg)
+}
+
+type execTest struct {
+ name string
+ input string
+ output string
+ data any
+ ok bool
+}
+
+// bigInt and bigUint are hex string representing numbers either side
+// of the max int boundary.
+// We do it this way so the test doesn't depend on ints being 32 bits.
+var (
+ bigInt = fmt.Sprintf("0x%x", int(1<<uint(reflect.TypeOf(0).Bits()-1)-1))
+ bigUint = fmt.Sprintf("0x%x", uint(1<<uint(reflect.TypeOf(0).Bits()-1)))
+)
+
+var execTests = []execTest{
+ // Trivial cases.
+ {"empty", "", "", nil, true},
+ {"text", "some text", "some text", nil, true},
+ {"nil action", "{{nil}}", "", nil, false},
+
+ // Ideal constants.
+ {"ideal int", "{{typeOf 3}}", "int", 0, true},
+ {"ideal float", "{{typeOf 1.0}}", "float64", 0, true},
+ {"ideal exp float", "{{typeOf 1e1}}", "float64", 0, true},
+ {"ideal complex", "{{typeOf 1i}}", "complex128", 0, true},
+ {"ideal int", "{{typeOf " + bigInt + "}}", "int", 0, true},
+ {"ideal too big", "{{typeOf " + bigUint + "}}", "", 0, false},
+ {"ideal nil without type", "{{nil}}", "", 0, false},
+
+ // Fields of structs.
+ {".X", "-{{.X}}-", "-x-", tVal, true},
+ {".U.V", "-{{.U.V}}-", "-v-", tVal, true},
+ {".unexported", "{{.unexported}}", "", tVal, false},
+
+ // Fields on maps.
+ {"map .one", "{{.MSI.one}}", "1", tVal, true},
+ {"map .two", "{{.MSI.two}}", "2", tVal, true},
+ {"map .NO", "{{.MSI.NO}}", "", tVal, true}, // NOTE: <no value> in text/template
+ {"map .one interface", "{{.MXI.one}}", "1", tVal, true},
+ {"map .WRONG args", "{{.MSI.one 1}}", "", tVal, false},
+ {"map .WRONG type", "{{.MII.one}}", "", tVal, false},
+
+ // Dots of all kinds to test basic evaluation.
+ {"dot int", "<{{.}}>", "&lt;13>", 13, true},
+ {"dot uint", "<{{.}}>", "&lt;14>", uint(14), true},
+ {"dot float", "<{{.}}>", "&lt;15.1>", 15.1, true},
+ {"dot bool", "<{{.}}>", "&lt;true>", true, true},
+ {"dot complex", "<{{.}}>", "&lt;(16.2-17i)>", 16.2 - 17i, true},
+ {"dot string", "<{{.}}>", "&lt;hello>", "hello", true},
+ {"dot slice", "<{{.}}>", "&lt;[-1 -2 -3]>", []int{-1, -2, -3}, true},
+ {"dot map", "<{{.}}>", "&lt;map[two:22]>", map[string]int{"two": 22}, true},
+ {"dot struct", "<{{.}}>", "&lt;{7 seven}>", struct {
+ a int
+ b string
+ }{7, "seven"}, true},
+
+ // Variables.
+ {"$ int", "{{$}}", "123", 123, true},
+ {"$.I", "{{$.I}}", "17", tVal, true},
+ {"$.U.V", "{{$.U.V}}", "v", tVal, true},
+ {"declare in action", "{{$x := $.U.V}}{{$x}}", "v", tVal, true},
+ {"simple assignment", "{{$x := 2}}{{$x = 3}}{{$x}}", "3", tVal, true},
+ {"nested assignment",
+ "{{$x := 2}}{{if true}}{{$x = 3}}{{end}}{{$x}}",
+ "3", tVal, true},
+ {"nested assignment changes the last declaration",
+ "{{$x := 1}}{{if true}}{{$x := 2}}{{if true}}{{$x = 3}}{{end}}{{end}}{{$x}}",
+ "1", tVal, true},
+
+ // Type with String method.
+ {"V{6666}.String()", "-{{.V0}}-", "-{6666}-", tVal, true}, // NOTE: -<6666>- in text/template
+ {"&V{7777}.String()", "-{{.V1}}-", "-&lt;7777&gt;-", tVal, true},
+ {"(*V)(nil).String()", "-{{.V2}}-", "-nilV-", tVal, true},
+
+ // Type with Error method.
+ {"W{888}.Error()", "-{{.W0}}-", "-{888}-", tVal, true}, // NOTE: -[888] in text/template
+ {"&W{999}.Error()", "-{{.W1}}-", "-[999]-", tVal, true},
+ {"(*W)(nil).Error()", "-{{.W2}}-", "-nilW-", tVal, true},
+
+ // Pointers.
+ {"*int", "{{.PI}}", "23", tVal, true},
+ {"*string", "{{.PS}}", "a string", tVal, true},
+ {"*[]int", "{{.PSI}}", "[21 22 23]", tVal, true},
+ {"*[]int[1]", "{{index .PSI 1}}", "22", tVal, true},
+ {"NIL", "{{.NIL}}", "&lt;nil&gt;", tVal, true},
+
+ // Empty interfaces holding values.
+ {"empty nil", "{{.Empty0}}", "", tVal, true}, // NOTE: <no value> in text/template
+ {"empty with int", "{{.Empty1}}", "3", tVal, true},
+ {"empty with string", "{{.Empty2}}", "empty2", tVal, true},
+ {"empty with slice", "{{.Empty3}}", "[7 8]", tVal, true},
+ {"empty with struct", "{{.Empty4}}", "{UinEmpty}", tVal, true},
+ {"empty with struct, field", "{{.Empty4.V}}", "UinEmpty", tVal, true},
+
+ // Edge cases with <no value> with an interface value
+ {"field on interface", "{{.foo}}", "", nil, true}, // NOTE: <no value> in text/template
+ {"field on parenthesized interface", "{{(.).foo}}", "", nil, true}, // NOTE: <no value> in text/template
+
+ // Issue 31810: Parenthesized first element of pipeline with arguments.
+ // See also TestIssue31810.
+ {"unparenthesized non-function", "{{1 2}}", "", nil, false},
+ {"parenthesized non-function", "{{(1) 2}}", "", nil, false},
+ {"parenthesized non-function with no args", "{{(1)}}", "1", nil, true}, // This is fine.
+
+ // Method calls.
+ {".Method0", "-{{.Method0}}-", "-M0-", tVal, true},
+ {".Method1(1234)", "-{{.Method1 1234}}-", "-1234-", tVal, true},
+ {".Method1(.I)", "-{{.Method1 .I}}-", "-17-", tVal, true},
+ {".Method2(3, .X)", "-{{.Method2 3 .X}}-", "-Method2: 3 x-", tVal, true},
+ {".Method2(.U16, `str`)", "-{{.Method2 .U16 `str`}}-", "-Method2: 16 str-", tVal, true},
+ {".Method2(.U16, $x)", "{{if $x := .X}}-{{.Method2 .U16 $x}}{{end}}-", "-Method2: 16 x-", tVal, true},
+ {".Method3(nil constant)", "-{{.Method3 nil}}-", "-Method3: &lt;nil&gt;-", tVal, true},
+ {".Method3(nil value)", "-{{.Method3 .MXI.unset}}-", "-Method3: &lt;nil&gt;-", tVal, true},
+ {"method on var", "{{if $x := .}}-{{$x.Method2 .U16 $x.X}}{{end}}-", "-Method2: 16 x-", tVal, true},
+ {"method on chained var",
+ "{{range .MSIone}}{{if $.U.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}",
+ "true", tVal, true},
+ {"chained method",
+ "{{range .MSIone}}{{if $.GetU.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}",
+ "true", tVal, true},
+ {"chained method on variable",
+ "{{with $x := .}}{{with .SI}}{{$.GetU.TrueFalse $.True}}{{end}}{{end}}",
+ "true", tVal, true},
+ {".NilOKFunc not nil", "{{call .NilOKFunc .PI}}", "false", tVal, true},
+ {".NilOKFunc nil", "{{call .NilOKFunc nil}}", "true", tVal, true},
+ {"method on nil value from slice", "-{{range .}}{{.Method1 1234}}{{end}}-", "-1234-", tSliceOfNil, true},
+ {"method on typed nil interface value", "{{.NonEmptyInterfaceTypedNil.Method0}}", "M0", tVal, true},
+
+ // Function call builtin.
+ {".BinaryFunc", "{{call .BinaryFunc `1` `2`}}", "[1=2]", tVal, true},
+ {".VariadicFunc0", "{{call .VariadicFunc}}", "&lt;&gt;", tVal, true},
+ {".VariadicFunc2", "{{call .VariadicFunc `he` `llo`}}", "&lt;he&#43;llo&gt;", tVal, true},
+ {".VariadicFuncInt", "{{call .VariadicFuncInt 33 `he` `llo`}}", "33=&lt;he&#43;llo&gt;", tVal, true},
+ {"if .BinaryFunc call", "{{ if .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{end}}", "[1=2]", tVal, true},
+ {"if not .BinaryFunc call", "{{ if not .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{else}}No{{end}}", "No", tVal, true},
+ {"Interface Call", `{{stringer .S}}`, "foozle", map[string]any{"S": bytes.NewBufferString("foozle")}, true},
+ {".ErrFunc", "{{call .ErrFunc}}", "bla", tVal, true},
+ {"call nil", "{{call nil}}", "", tVal, false},
+
+ // Erroneous function calls (check args).
+ {".BinaryFuncTooFew", "{{call .BinaryFunc `1`}}", "", tVal, false},
+ {".BinaryFuncTooMany", "{{call .BinaryFunc `1` `2` `3`}}", "", tVal, false},
+ {".BinaryFuncBad0", "{{call .BinaryFunc 1 3}}", "", tVal, false},
+ {".BinaryFuncBad1", "{{call .BinaryFunc `1` 3}}", "", tVal, false},
+ {".VariadicFuncBad0", "{{call .VariadicFunc 3}}", "", tVal, false},
+ {".VariadicFuncIntBad0", "{{call .VariadicFuncInt}}", "", tVal, false},
+ {".VariadicFuncIntBad`", "{{call .VariadicFuncInt `x`}}", "", tVal, false},
+ {".VariadicFuncNilBad", "{{call .VariadicFunc nil}}", "", tVal, false},
+
+ // Pipelines.
+ {"pipeline", "-{{.Method0 | .Method2 .U16}}-", "-Method2: 16 M0-", tVal, true},
+ {"pipeline func", "-{{call .VariadicFunc `llo` | call .VariadicFunc `he` }}-", "-&lt;he&#43;&lt;llo&gt;&gt;-", tVal, true},
+
+ // Nil values aren't missing arguments.
+ {"nil pipeline", "{{ .Empty0 | call .NilOKFunc }}", "true", tVal, true},
+ {"nil call arg", "{{ call .NilOKFunc .Empty0 }}", "true", tVal, true},
+ {"bad nil pipeline", "{{ .Empty0 | .VariadicFunc }}", "", tVal, false},
+
+ // Parenthesized expressions
+ {"parens in pipeline", "{{printf `%d %d %d` (1) (2 | add 3) (add 4 (add 5 6))}}", "1 5 15", tVal, true},
+
+ // Parenthesized expressions with field accesses
+ {"parens: $ in paren", "{{($).X}}", "x", tVal, true},
+ {"parens: $.GetU in paren", "{{($.GetU).V}}", "v", tVal, true},
+ {"parens: $ in paren in pipe", "{{($ | echo).X}}", "x", tVal, true},
+ {"parens: spaces and args", `{{(makemap "up" "down" "left" "right").left}}`, "right", tVal, true},
+
+ // If.
+ {"if true", "{{if true}}TRUE{{end}}", "TRUE", tVal, true},
+ {"if false", "{{if false}}TRUE{{else}}FALSE{{end}}", "FALSE", tVal, true},
+ {"if nil", "{{if nil}}TRUE{{end}}", "", tVal, false},
+ {"if on typed nil interface value", "{{if .NonEmptyInterfaceTypedNil}}TRUE{{ end }}", "", tVal, true},
+ {"if 1", "{{if 1}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0", "{{if 0}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if 1.5", "{{if 1.5}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0.0", "{{if .FloatZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if 1.5i", "{{if 1.5i}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0.0i", "{{if .ComplexZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if emptystring", "{{if ``}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if string", "{{if `notempty`}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if emptyslice", "{{if .SIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if slice", "{{if .SI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if emptymap", "{{if .MSIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if map", "{{if .MSI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if map unset", "{{if .MXI.none}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if map not unset", "{{if not .MXI.none}}ZERO{{else}}NON-ZERO{{end}}", "ZERO", tVal, true},
+ {"if $x with $y int", "{{if $x := true}}{{with $y := .I}}{{$x}},{{$y}}{{end}}{{end}}", "true,17", tVal, true},
+ {"if $x with $x int", "{{if $x := true}}{{with $x := .I}}{{$x}},{{end}}{{$x}}{{end}}", "17,true", tVal, true},
+ {"if else if", "{{if false}}FALSE{{else if true}}TRUE{{end}}", "TRUE", tVal, true},
+ {"if else chain", "{{if eq 1 3}}1{{else if eq 2 3}}2{{else if eq 3 3}}3{{end}}", "3", tVal, true},
+
+ // Print etc.
+ {"print", `{{print "hello, print"}}`, "hello, print", tVal, true},
+ {"print 123", `{{print 1 2 3}}`, "1 2 3", tVal, true},
+ {"print nil", `{{print nil}}`, "&lt;nil&gt;", tVal, true},
+ {"println", `{{println 1 2 3}}`, "1 2 3\n", tVal, true},
+ {"printf int", `{{printf "%04x" 127}}`, "007f", tVal, true},
+ {"printf float", `{{printf "%g" 3.5}}`, "3.5", tVal, true},
+ {"printf complex", `{{printf "%g" 1+7i}}`, "(1&#43;7i)", tVal, true},
+ {"printf string", `{{printf "%s" "hello"}}`, "hello", tVal, true},
+ {"printf function", `{{printf "%#q" zeroArgs}}`, "`zeroArgs`", tVal, true},
+ {"printf field", `{{printf "%s" .U.V}}`, "v", tVal, true},
+ {"printf method", `{{printf "%s" .Method0}}`, "M0", tVal, true},
+ {"printf dot", `{{with .I}}{{printf "%d" .}}{{end}}`, "17", tVal, true},
+ {"printf var", `{{with $x := .I}}{{printf "%d" $x}}{{end}}`, "17", tVal, true},
+ {"printf lots", `{{printf "%d %s %g %s" 127 "hello" 7-3i .Method0}}`, "127 hello (7-3i) M0", tVal, true},
+
+ // HTML.
+ {"html", `{{html "<script>alert(\"XSS\");</script>"}}`,
+ "&lt;script&gt;alert(&#34;XSS&#34;);&lt;/script&gt;", nil, true},
+ {"html pipeline", `{{printf "<script>alert(\"XSS\");</script>" | html}}`,
+ "&lt;script&gt;alert(&#34;XSS&#34;);&lt;/script&gt;", nil, true},
+ {"html", `{{html .PS}}`, "a string", tVal, true},
+ {"html typed nil", `{{html .NIL}}`, "&lt;nil&gt;", tVal, true},
+ {"html untyped nil", `{{html .Empty0}}`, "&lt;nil&gt;", tVal, true}, // NOTE: "&lt;no value&gt;" in text/template
+
+ // JavaScript.
+ {"js", `{{js .}}`, `It\&#39;d be nice.`, `It'd be nice.`, true},
+
+ // URL query.
+ {"urlquery", `{{"http://www.example.org/"|urlquery}}`, "http%3A%2F%2Fwww.example.org%2F", nil, true},
+
+ // Booleans
+ {"not", "{{not true}} {{not false}}", "false true", nil, true},
+ {"and", "{{and false 0}} {{and 1 0}} {{and 0 true}} {{and 1 1}}", "false 0 0 1", nil, true},
+ {"or", "{{or 0 0}} {{or 1 0}} {{or 0 true}} {{or 1 1}}", "0 1 true 1", nil, true},
+ {"boolean if", "{{if and true 1 `hi`}}TRUE{{else}}FALSE{{end}}", "TRUE", tVal, true},
+ {"boolean if not", "{{if and true 1 `hi` | not}}TRUE{{else}}FALSE{{end}}", "FALSE", nil, true},
+
+ // Indexing.
+ {"slice[0]", "{{index .SI 0}}", "3", tVal, true},
+ {"slice[1]", "{{index .SI 1}}", "4", tVal, true},
+ {"slice[HUGE]", "{{index .SI 10}}", "", tVal, false},
+ {"slice[WRONG]", "{{index .SI `hello`}}", "", tVal, false},
+ {"slice[nil]", "{{index .SI nil}}", "", tVal, false},
+ {"map[one]", "{{index .MSI `one`}}", "1", tVal, true},
+ {"map[two]", "{{index .MSI `two`}}", "2", tVal, true},
+ {"map[NO]", "{{index .MSI `XXX`}}", "0", tVal, true},
+ {"map[nil]", "{{index .MSI nil}}", "", tVal, false},
+ {"map[``]", "{{index .MSI ``}}", "0", tVal, true},
+ {"map[WRONG]", "{{index .MSI 10}}", "", tVal, false},
+ {"double index", "{{index .SMSI 1 `eleven`}}", "11", tVal, true},
+ {"nil[1]", "{{index nil 1}}", "", tVal, false},
+ {"map MI64S", "{{index .MI64S 2}}", "i642", tVal, true},
+ {"map MI32S", "{{index .MI32S 2}}", "two", tVal, true},
+ {"map MUI64S", "{{index .MUI64S 3}}", "ui643", tVal, true},
+ {"map MI8S", "{{index .MI8S 3}}", "i83", tVal, true},
+ {"map MUI8S", "{{index .MUI8S 2}}", "u82", tVal, true},
+ {"index of an interface field", "{{index .Empty3 0}}", "7", tVal, true},
+
+ // Slicing.
+ {"slice[:]", "{{slice .SI}}", "[3 4 5]", tVal, true},
+ {"slice[1:]", "{{slice .SI 1}}", "[4 5]", tVal, true},
+ {"slice[1:2]", "{{slice .SI 1 2}}", "[4]", tVal, true},
+ {"slice[-1:]", "{{slice .SI -1}}", "", tVal, false},
+ {"slice[1:-2]", "{{slice .SI 1 -2}}", "", tVal, false},
+ {"slice[1:2:-1]", "{{slice .SI 1 2 -1}}", "", tVal, false},
+ {"slice[2:1]", "{{slice .SI 2 1}}", "", tVal, false},
+ {"slice[2:2:1]", "{{slice .SI 2 2 1}}", "", tVal, false},
+ {"out of range", "{{slice .SI 4 5}}", "", tVal, false},
+ {"out of range", "{{slice .SI 2 2 5}}", "", tVal, false},
+ {"len(s) < indexes < cap(s)", "{{slice .SICap 6 10}}", "[0 0 0 0]", tVal, true},
+ {"len(s) < indexes < cap(s)", "{{slice .SICap 6 10 10}}", "[0 0 0 0]", tVal, true},
+ {"indexes > cap(s)", "{{slice .SICap 10 11}}", "", tVal, false},
+ {"indexes > cap(s)", "{{slice .SICap 6 10 11}}", "", tVal, false},
+ {"array[:]", "{{slice .AI}}", "[3 4 5]", tVal, true},
+ {"array[1:]", "{{slice .AI 1}}", "[4 5]", tVal, true},
+ {"array[1:2]", "{{slice .AI 1 2}}", "[4]", tVal, true},
+ {"string[:]", "{{slice .S}}", "xyz", tVal, true},
+ {"string[0:1]", "{{slice .S 0 1}}", "x", tVal, true},
+ {"string[1:]", "{{slice .S 1}}", "yz", tVal, true},
+ {"string[1:2]", "{{slice .S 1 2}}", "y", tVal, true},
+ {"out of range", "{{slice .S 1 5}}", "", tVal, false},
+ {"3-index slice of string", "{{slice .S 1 2 2}}", "", tVal, false},
+ {"slice of an interface field", "{{slice .Empty3 0 1}}", "[7]", tVal, true},
+
+ // Len.
+ {"slice", "{{len .SI}}", "3", tVal, true},
+ {"map", "{{len .MSI }}", "3", tVal, true},
+ {"len of int", "{{len 3}}", "", tVal, false},
+ {"len of nothing", "{{len .Empty0}}", "", tVal, false},
+ {"len of an interface field", "{{len .Empty3}}", "2", tVal, true},
+
+ // With.
+ {"with true", "{{with true}}{{.}}{{end}}", "true", tVal, true},
+ {"with false", "{{with false}}{{.}}{{else}}FALSE{{end}}", "FALSE", tVal, true},
+ {"with 1", "{{with 1}}{{.}}{{else}}ZERO{{end}}", "1", tVal, true},
+ {"with 0", "{{with 0}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with 1.5", "{{with 1.5}}{{.}}{{else}}ZERO{{end}}", "1.5", tVal, true},
+ {"with 0.0", "{{with .FloatZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with 1.5i", "{{with 1.5i}}{{.}}{{else}}ZERO{{end}}", "(0&#43;1.5i)", tVal, true},
+ {"with 0.0i", "{{with .ComplexZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with emptystring", "{{with ``}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with string", "{{with `notempty`}}{{.}}{{else}}EMPTY{{end}}", "notempty", tVal, true},
+ {"with emptyslice", "{{with .SIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with slice", "{{with .SI}}{{.}}{{else}}EMPTY{{end}}", "[3 4 5]", tVal, true},
+ {"with emptymap", "{{with .MSIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with map", "{{with .MSIone}}{{.}}{{else}}EMPTY{{end}}", "map[one:1]", tVal, true},
+ {"with empty interface, struct field", "{{with .Empty4}}{{.V}}{{end}}", "UinEmpty", tVal, true},
+ {"with $x int", "{{with $x := .I}}{{$x}}{{end}}", "17", tVal, true},
+ {"with $x struct.U.V", "{{with $x := $}}{{$x.U.V}}{{end}}", "v", tVal, true},
+ {"with variable and action", "{{with $x := $}}{{$y := $.U.V}}{{$y}}{{end}}", "v", tVal, true},
+ {"with on typed nil interface value", "{{with .NonEmptyInterfaceTypedNil}}TRUE{{ end }}", "", tVal, true},
+
+ // Range.
+ {"range []int", "{{range .SI}}-{{.}}-{{end}}", "-3--4--5-", tVal, true},
+ {"range empty no else", "{{range .SIEmpty}}-{{.}}-{{end}}", "", tVal, true},
+ {"range []int else", "{{range .SI}}-{{.}}-{{else}}EMPTY{{end}}", "-3--4--5-", tVal, true},
+ {"range empty else", "{{range .SIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"range []int break else", "{{range .SI}}-{{.}}-{{break}}NOTREACHED{{else}}EMPTY{{end}}", "-3-", tVal, true},
+ {"range []int continue else", "{{range .SI}}-{{.}}-{{continue}}NOTREACHED{{else}}EMPTY{{end}}", "-3--4--5-", tVal, true},
+ {"range []bool", "{{range .SB}}-{{.}}-{{end}}", "-true--false-", tVal, true},
+ {"range []int method", "{{range .SI | .MAdd .I}}-{{.}}-{{end}}", "-20--21--22-", tVal, true},
+ {"range map", "{{range .MSI}}-{{.}}-{{end}}", "-1--3--2-", tVal, true},
+ {"range empty map no else", "{{range .MSIEmpty}}-{{.}}-{{end}}", "", tVal, true},
+ {"range map else", "{{range .MSI}}-{{.}}-{{else}}EMPTY{{end}}", "-1--3--2-", tVal, true},
+ {"range empty map else", "{{range .MSIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"range empty interface", "{{range .Empty3}}-{{.}}-{{else}}EMPTY{{end}}", "-7--8-", tVal, true},
+ {"range empty nil", "{{range .Empty0}}-{{.}}-{{end}}", "", tVal, true},
+ {"range $x SI", "{{range $x := .SI}}<{{$x}}>{{end}}", "&lt;3>&lt;4>&lt;5>", tVal, true},
+ {"range $x $y SI", "{{range $x, $y := .SI}}<{{$x}}={{$y}}>{{end}}", "&lt;0=3>&lt;1=4>&lt;2=5>", tVal, true},
+ {"range $x MSIone", "{{range $x := .MSIone}}<{{$x}}>{{end}}", "&lt;1>", tVal, true},
+ {"range $x $y MSIone", "{{range $x, $y := .MSIone}}<{{$x}}={{$y}}>{{end}}", "&lt;one=1>", tVal, true},
+ {"range $x PSI", "{{range $x := .PSI}}<{{$x}}>{{end}}", "&lt;21>&lt;22>&lt;23>", tVal, true},
+ {"declare in range", "{{range $x := .PSI}}<{{$foo:=$x}}{{$x}}>{{end}}", "&lt;21>&lt;22>&lt;23>", tVal, true},
+ {"range count", `{{range $i, $x := count 5}}[{{$i}}]{{$x}}{{end}}`, "[0]a[1]b[2]c[3]d[4]e", tVal, true},
+ {"range nil count", `{{range $i, $x := count 0}}{{else}}empty{{end}}`, "empty", tVal, true},
+
+ // Cute examples.
+ {"or as if true", `{{or .SI "slice is empty"}}`, "[3 4 5]", tVal, true},
+ {"or as if false", `{{or .SIEmpty "slice is empty"}}`, "slice is empty", tVal, true},
+
+ // Error handling.
+ {"error method, error", "{{.MyError true}}", "", tVal, false},
+ {"error method, no error", "{{.MyError false}}", "false", tVal, true},
+
+ // Numbers
+ {"decimal", "{{print 1234}}", "1234", tVal, true},
+ {"decimal _", "{{print 12_34}}", "1234", tVal, true},
+ {"binary", "{{print 0b101}}", "5", tVal, true},
+ {"binary _", "{{print 0b_1_0_1}}", "5", tVal, true},
+ {"BINARY", "{{print 0B101}}", "5", tVal, true},
+ {"octal0", "{{print 0377}}", "255", tVal, true},
+ {"octal", "{{print 0o377}}", "255", tVal, true},
+ {"octal _", "{{print 0o_3_7_7}}", "255", tVal, true},
+ {"OCTAL", "{{print 0O377}}", "255", tVal, true},
+ {"hex", "{{print 0x123}}", "291", tVal, true},
+ {"hex _", "{{print 0x1_23}}", "291", tVal, true},
+ {"HEX", "{{print 0X123ABC}}", "1194684", tVal, true},
+ {"float", "{{print 123.4}}", "123.4", tVal, true},
+ {"float _", "{{print 0_0_1_2_3.4}}", "123.4", tVal, true},
+ {"hex float", "{{print +0x1.ep+2}}", "7.5", tVal, true},
+ {"hex float _", "{{print +0x_1.e_0p+0_2}}", "7.5", tVal, true},
+ {"HEX float", "{{print +0X1.EP+2}}", "7.5", tVal, true},
+ {"print multi", "{{print 1_2_3_4 7.5_00_00_00}}", "1234 7.5", tVal, true},
+ {"print multi2", "{{print 1234 0x0_1.e_0p+02}}", "1234 7.5", tVal, true},
+
+ // Fixed bugs.
+ // Must separate dot and receiver; otherwise args are evaluated with dot set to variable.
+ {"bug0", "{{range .MSIone}}{{if $.Method1 .}}X{{end}}{{end}}", "X", tVal, true},
+ // Do not loop endlessly in indirect for non-empty interfaces.
+ // The bug appears with *interface only; looped forever.
+ {"bug1", "{{.Method0}}", "M0", &iVal, true},
+ // Was taking address of interface field, so method set was empty.
+ {"bug2", "{{$.NonEmptyInterface.Method0}}", "M0", tVal, true},
+ // Struct values were not legal in with - mere oversight.
+ {"bug3", "{{with $}}{{.Method0}}{{end}}", "M0", tVal, true},
+ // Nil interface values in if.
+ {"bug4", "{{if .Empty0}}non-nil{{else}}nil{{end}}", "nil", tVal, true},
+ // Stringer.
+ {"bug5", "{{.Str}}", "foozle", tVal, true},
+ {"bug5a", "{{.Err}}", "erroozle", tVal, true},
+ // Args need to be indirected and dereferenced sometimes.
+ {"bug6a", "{{vfunc .V0 .V1}}", "vfunc", tVal, true},
+ {"bug6b", "{{vfunc .V0 .V0}}", "vfunc", tVal, true},
+ {"bug6c", "{{vfunc .V1 .V0}}", "vfunc", tVal, true},
+ {"bug6d", "{{vfunc .V1 .V1}}", "vfunc", tVal, true},
+ // Legal parse but illegal execution: non-function should have no arguments.
+ {"bug7a", "{{3 2}}", "", tVal, false},
+ {"bug7b", "{{$x := 1}}{{$x 2}}", "", tVal, false},
+ {"bug7c", "{{$x := 1}}{{3 | $x}}", "", tVal, false},
+ // Pipelined arg was not being type-checked.
+ {"bug8a", "{{3|oneArg}}", "", tVal, false},
+ {"bug8b", "{{4|dddArg 3}}", "", tVal, false},
+ // A bug was introduced that broke map lookups for lower-case names.
+ {"bug9", "{{.cause}}", "neglect", map[string]string{"cause": "neglect"}, true},
+ // Field chain starting with function did not work.
+ {"bug10", "{{mapOfThree.three}}-{{(mapOfThree).three}}", "3-3", 0, true},
+ // Dereferencing nil pointer while evaluating function arguments should not panic. Issue 7333.
+ {"bug11", "{{valueString .PS}}", "", T{}, false},
+ // 0xef gave constant type float64. Issue 8622.
+ {"bug12xe", "{{printf `%T` 0xef}}", "int", T{}, true},
+ {"bug12xE", "{{printf `%T` 0xEE}}", "int", T{}, true},
+ {"bug12Xe", "{{printf `%T` 0Xef}}", "int", T{}, true},
+ {"bug12XE", "{{printf `%T` 0XEE}}", "int", T{}, true},
+ // Chained nodes did not work as arguments. Issue 8473.
+ {"bug13", "{{print (.Copy).I}}", "17", tVal, true},
+ // Didn't protect against nil or literal values in field chains.
+ {"bug14a", "{{(nil).True}}", "", tVal, false},
+ {"bug14b", "{{$x := nil}}{{$x.anything}}", "", tVal, false},
+ {"bug14c", `{{$x := (1.0)}}{{$y := ("hello")}}{{$x.anything}}{{$y.true}}`, "", tVal, false},
+ // Didn't call validateType on function results. Issue 10800.
+ {"bug15", "{{valueString returnInt}}", "", tVal, false},
+ // Variadic function corner cases. Issue 10946.
+ {"bug16a", "{{true|printf}}", "", tVal, false},
+ {"bug16b", "{{1|printf}}", "", tVal, false},
+ {"bug16c", "{{1.1|printf}}", "", tVal, false},
+ {"bug16d", "{{'x'|printf}}", "", tVal, false},
+ {"bug16e", "{{0i|printf}}", "", tVal, false},
+ {"bug16f", "{{true|twoArgs \"xxx\"}}", "", tVal, false},
+ {"bug16g", "{{\"aaa\" |twoArgs \"bbb\"}}", "twoArgs=bbbaaa", tVal, true},
+ {"bug16h", "{{1|oneArg}}", "", tVal, false},
+ {"bug16i", "{{\"aaa\"|oneArg}}", "oneArg=aaa", tVal, true},
+ {"bug16j", "{{1+2i|printf \"%v\"}}", "(1&#43;2i)", tVal, true},
+ {"bug16k", "{{\"aaa\"|printf }}", "aaa", tVal, true},
+ {"bug17a", "{{.NonEmptyInterface.X}}", "x", tVal, true},
+ {"bug17b", "-{{.NonEmptyInterface.Method1 1234}}-", "-1234-", tVal, true},
+ {"bug17c", "{{len .NonEmptyInterfacePtS}}", "2", tVal, true},
+ {"bug17d", "{{index .NonEmptyInterfacePtS 0}}", "a", tVal, true},
+ {"bug17e", "{{range .NonEmptyInterfacePtS}}-{{.}}-{{end}}", "-a--b-", tVal, true},
+
+ // More variadic function corner cases. Some runes would get evaluated
+ // as constant floats instead of ints. Issue 34483.
+ {"bug18a", "{{eq . '.'}}", "true", '.', true},
+ {"bug18b", "{{eq . 'e'}}", "true", 'e', true},
+ {"bug18c", "{{eq . 'P'}}", "true", 'P', true},
+}
+
+func zeroArgs() string {
+ return "zeroArgs"
+}
+
+func oneArg(a string) string {
+ return "oneArg=" + a
+}
+
+func twoArgs(a, b string) string {
+ return "twoArgs=" + a + b
+}
+
+func dddArg(a int, b ...string) string {
+ return fmt.Sprintln(a, b)
+}
+
+// count returns a channel that will deliver n sequential 1-letter strings starting at "a"
+func count(n int) chan string {
+ if n == 0 {
+ return nil
+ }
+ c := make(chan string)
+ go func() {
+ for i := 0; i < n; i++ {
+ c <- "abcdefghijklmnop"[i : i+1]
+ }
+ close(c)
+ }()
+ return c
+}
+
+// vfunc takes a *V and a V
+func vfunc(V, *V) string {
+ return "vfunc"
+}
+
+// valueString takes a string, not a pointer.
+func valueString(v string) string {
+ return "value is ignored"
+}
+
+// returnInt returns an int
+func returnInt() int {
+ return 7
+}
+
+func add(args ...int) int {
+ sum := 0
+ for _, x := range args {
+ sum += x
+ }
+ return sum
+}
+
+func echo(arg any) any {
+ return arg
+}
+
+func makemap(arg ...string) map[string]string {
+ if len(arg)%2 != 0 {
+ panic("bad makemap")
+ }
+ m := make(map[string]string)
+ for i := 0; i < len(arg); i += 2 {
+ m[arg[i]] = arg[i+1]
+ }
+ return m
+}
+
+func stringer(s fmt.Stringer) string {
+ return s.String()
+}
+
+func mapOfThree() any {
+ return map[string]int{"three": 3}
+}
+
+func testExecute(execTests []execTest, template *Template, t *testing.T) {
+ b := new(bytes.Buffer)
+ funcs := FuncMap{
+ "add": add,
+ "count": count,
+ "dddArg": dddArg,
+ "echo": echo,
+ "makemap": makemap,
+ "mapOfThree": mapOfThree,
+ "oneArg": oneArg,
+ "returnInt": returnInt,
+ "stringer": stringer,
+ "twoArgs": twoArgs,
+ "typeOf": typeOf,
+ "valueString": valueString,
+ "vfunc": vfunc,
+ "zeroArgs": zeroArgs,
+ }
+ for _, test := range execTests {
+ var tmpl *Template
+ var err error
+ if template == nil {
+ tmpl, err = New(test.name).Funcs(funcs).Parse(test.input)
+ } else {
+ tmpl, err = template.Clone()
+ if err != nil {
+ t.Errorf("%s: clone error: %s", test.name, err)
+ continue
+ }
+ tmpl, err = tmpl.New(test.name).Funcs(funcs).Parse(test.input)
+ }
+ if err != nil {
+ t.Errorf("%s: parse error: %s", test.name, err)
+ continue
+ }
+ b.Reset()
+ err = tmpl.Execute(b, test.data)
+ switch {
+ case !test.ok && err == nil:
+ t.Errorf("%s: expected error; got none", test.name)
+ continue
+ case test.ok && err != nil:
+ t.Errorf("%s: unexpected execute error: %s", test.name, err)
+ continue
+ case !test.ok && err != nil:
+ // expected error, got one
+ if *debug {
+ fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
+ }
+ }
+ result := b.String()
+ if result != test.output {
+ t.Errorf("%s: expected\n\t%q\ngot\n\t%q", test.name, test.output, result)
+ }
+ }
+}
+
+func TestExecute(t *testing.T) {
+ testExecute(execTests, nil, t)
+}
+
+var delimPairs = []string{
+ "", "", // default
+ "{{", "}}", // same as default
+ "|", "|", // same
+ "(日)", "(本)", // peculiar
+}
+
+func TestDelims(t *testing.T) {
+ const hello = "Hello, world"
+ var value = struct{ Str string }{hello}
+ for i := 0; i < len(delimPairs); i += 2 {
+ text := ".Str"
+ left := delimPairs[i+0]
+ trueLeft := left
+ right := delimPairs[i+1]
+ trueRight := right
+ if left == "" { // default case
+ trueLeft = "{{"
+ }
+ if right == "" { // default case
+ trueRight = "}}"
+ }
+ text = trueLeft + text + trueRight
+ // Now add a comment
+ text += trueLeft + "/*comment*/" + trueRight
+ // Now add an action containing a string.
+ text += trueLeft + `"` + trueLeft + `"` + trueRight
+ // At this point text looks like `{{.Str}}{{/*comment*/}}{{"{{"}}`.
+ tmpl, err := New("delims").Delims(left, right).Parse(text)
+ if err != nil {
+ t.Fatalf("delim %q text %q parse err %s", left, text, err)
+ }
+ var b = new(bytes.Buffer)
+ err = tmpl.Execute(b, value)
+ if err != nil {
+ t.Fatalf("delim %q exec err %s", left, err)
+ }
+ if b.String() != hello+trueLeft {
+ t.Errorf("expected %q got %q", hello+trueLeft, b.String())
+ }
+ }
+}
+
+// Check that an error from a method flows back to the top.
+func TestExecuteError(t *testing.T) {
+ b := new(bytes.Buffer)
+ tmpl := New("error")
+ _, err := tmpl.Parse("{{.MyError true}}")
+ if err != nil {
+ t.Fatalf("parse error: %s", err)
+ }
+ err = tmpl.Execute(b, tVal)
+ if err == nil {
+ t.Errorf("expected error; got none")
+ } else if !strings.Contains(err.Error(), myError.Error()) {
+ if *debug {
+ fmt.Printf("test execute error: %s\n", err)
+ }
+ t.Errorf("expected myError; got %s", err)
+ }
+}
+
+const execErrorText = `line 1
+line 2
+line 3
+{{template "one" .}}
+{{define "one"}}{{template "two" .}}{{end}}
+{{define "two"}}{{template "three" .}}{{end}}
+{{define "three"}}{{index "hi" $}}{{end}}`
+
+// Check that an error from a nested template contains all the relevant information.
+func TestExecError(t *testing.T) {
+ tmpl, err := New("top").Parse(execErrorText)
+ if err != nil {
+ t.Fatal("parse error:", err)
+ }
+ var b bytes.Buffer
+ err = tmpl.Execute(&b, 5) // 5 is out of range indexing "hi"
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ const want = `template: top:7:20: executing "three" at <index "hi" $>: error calling index: index out of range: 5`
+ got := err.Error()
+ if got != want {
+ t.Errorf("expected\n%q\ngot\n%q", want, got)
+ }
+}
+
+func TestJSEscaping(t *testing.T) {
+ testCases := []struct {
+ in, exp string
+ }{
+ {`a`, `a`},
+ {`'foo`, `\'foo`},
+ {`Go "jump" \`, `Go \"jump\" \\`},
+ {`Yukihiro says "今日は世界"`, `Yukihiro says \"今日は世界\"`},
+ {"unprintable \uFDFF", `unprintable \uFDFF`},
+ {`<html>`, `\u003Chtml\u003E`},
+ {`no = in attributes`, `no \u003D in attributes`},
+ {`&#x27; does not become HTML entity`, `\u0026#x27; does not become HTML entity`},
+ }
+ for _, tc := range testCases {
+ s := JSEscapeString(tc.in)
+ if s != tc.exp {
+ t.Errorf("JS escaping [%s] got [%s] want [%s]", tc.in, s, tc.exp)
+ }
+ }
+}
+
+// A nice example: walk a binary tree.
+
+type Tree struct {
+ Val int
+ Left, Right *Tree
+}
+
+// Use different delimiters to test Set.Delims.
+// Also test the trimming of leading and trailing spaces.
+const treeTemplate = `
+ (- define "tree" -)
+ [
+ (- .Val -)
+ (- with .Left -)
+ (template "tree" . -)
+ (- end -)
+ (- with .Right -)
+ (- template "tree" . -)
+ (- end -)
+ ]
+ (- end -)
+`
+
+func TestTree(t *testing.T) {
+ var tree = &Tree{
+ 1,
+ &Tree{
+ 2, &Tree{
+ 3,
+ &Tree{
+ 4, nil, nil,
+ },
+ nil,
+ },
+ &Tree{
+ 5,
+ &Tree{
+ 6, nil, nil,
+ },
+ nil,
+ },
+ },
+ &Tree{
+ 7,
+ &Tree{
+ 8,
+ &Tree{
+ 9, nil, nil,
+ },
+ nil,
+ },
+ &Tree{
+ 10,
+ &Tree{
+ 11, nil, nil,
+ },
+ nil,
+ },
+ },
+ }
+ tmpl, err := New("root").Delims("(", ")").Parse(treeTemplate)
+ if err != nil {
+ t.Fatal("parse error:", err)
+ }
+ var b bytes.Buffer
+ const expect = "[1[2[3[4]][5[6]]][7[8[9]][10[11]]]]"
+ // First by looking up the template.
+ err = tmpl.Lookup("tree").Execute(&b, tree)
+ if err != nil {
+ t.Fatal("exec error:", err)
+ }
+ result := b.String()
+ if result != expect {
+ t.Errorf("expected %q got %q", expect, result)
+ }
+ // Then direct to execution.
+ b.Reset()
+ err = tmpl.ExecuteTemplate(&b, "tree", tree)
+ if err != nil {
+ t.Fatal("exec error:", err)
+ }
+ result = b.String()
+ if result != expect {
+ t.Errorf("expected %q got %q", expect, result)
+ }
+}
+
+func TestExecuteOnNewTemplate(t *testing.T) {
+ // This is issue 3872.
+ New("Name").Templates()
+ // This is issue 11379.
+ // new(Template).Templates() // TODO: crashes
+ // new(Template).Parse("") // TODO: crashes
+ // new(Template).New("abc").Parse("") // TODO: crashes
+ // new(Template).Execute(nil, nil) // TODO: crashes; returns an error (but does not crash)
+ // new(Template).ExecuteTemplate(nil, "XXX", nil) // TODO: crashes; returns an error (but does not crash)
+}
+
+const testTemplates = `{{define "one"}}one{{end}}{{define "two"}}two{{end}}`
+
+func TestMessageForExecuteEmpty(t *testing.T) {
+ // Test a truly empty template.
+ tmpl := New("empty")
+ var b bytes.Buffer
+ err := tmpl.Execute(&b, 0)
+ if err == nil {
+ t.Fatal("expected initial error")
+ }
+ got := err.Error()
+ want := `template: "empty" is an incomplete or empty template` // NOTE: text/template has extra "empty: " in message
+ if got != want {
+ t.Errorf("expected error %s got %s", want, got)
+ }
+
+ // Add a non-empty template to check that the error is helpful.
+ tmpl = New("empty")
+ tests, err := New("").Parse(testTemplates)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tmpl.AddParseTree("secondary", tests.Tree)
+ err = tmpl.Execute(&b, 0)
+ if err == nil {
+ t.Fatal("expected second error")
+ }
+ got = err.Error()
+ if got != want {
+ t.Errorf("expected error %s got %s", want, got)
+ }
+ // Make sure we can execute the secondary.
+ err = tmpl.ExecuteTemplate(&b, "secondary", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestFinalForPrintf(t *testing.T) {
+ tmpl, err := New("").Parse(`{{"x" | printf}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b bytes.Buffer
+ err = tmpl.Execute(&b, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+type cmpTest struct {
+ expr string
+ truth string
+ ok bool
+}
+
+var cmpTests = []cmpTest{
+ {"eq true true", "true", true},
+ {"eq true false", "false", true},
+ {"eq 1+2i 1+2i", "true", true},
+ {"eq 1+2i 1+3i", "false", true},
+ {"eq 1.5 1.5", "true", true},
+ {"eq 1.5 2.5", "false", true},
+ {"eq 1 1", "true", true},
+ {"eq 1 2", "false", true},
+ {"eq `xy` `xy`", "true", true},
+ {"eq `xy` `xyz`", "false", true},
+ {"eq .Uthree .Uthree", "true", true},
+ {"eq .Uthree .Ufour", "false", true},
+ {"eq 3 4 5 6 3", "true", true},
+ {"eq 3 4 5 6 7", "false", true},
+ {"ne true true", "false", true},
+ {"ne true false", "true", true},
+ {"ne 1+2i 1+2i", "false", true},
+ {"ne 1+2i 1+3i", "true", true},
+ {"ne 1.5 1.5", "false", true},
+ {"ne 1.5 2.5", "true", true},
+ {"ne 1 1", "false", true},
+ {"ne 1 2", "true", true},
+ {"ne `xy` `xy`", "false", true},
+ {"ne `xy` `xyz`", "true", true},
+ {"ne .Uthree .Uthree", "false", true},
+ {"ne .Uthree .Ufour", "true", true},
+ {"lt 1.5 1.5", "false", true},
+ {"lt 1.5 2.5", "true", true},
+ {"lt 1 1", "false", true},
+ {"lt 1 2", "true", true},
+ {"lt `xy` `xy`", "false", true},
+ {"lt `xy` `xyz`", "true", true},
+ {"lt .Uthree .Uthree", "false", true},
+ {"lt .Uthree .Ufour", "true", true},
+ {"le 1.5 1.5", "true", true},
+ {"le 1.5 2.5", "true", true},
+ {"le 2.5 1.5", "false", true},
+ {"le 1 1", "true", true},
+ {"le 1 2", "true", true},
+ {"le 2 1", "false", true},
+ {"le `xy` `xy`", "true", true},
+ {"le `xy` `xyz`", "true", true},
+ {"le `xyz` `xy`", "false", true},
+ {"le .Uthree .Uthree", "true", true},
+ {"le .Uthree .Ufour", "true", true},
+ {"le .Ufour .Uthree", "false", true},
+ {"gt 1.5 1.5", "false", true},
+ {"gt 1.5 2.5", "false", true},
+ {"gt 1 1", "false", true},
+ {"gt 2 1", "true", true},
+ {"gt 1 2", "false", true},
+ {"gt `xy` `xy`", "false", true},
+ {"gt `xy` `xyz`", "false", true},
+ {"gt .Uthree .Uthree", "false", true},
+ {"gt .Uthree .Ufour", "false", true},
+ {"gt .Ufour .Uthree", "true", true},
+ {"ge 1.5 1.5", "true", true},
+ {"ge 1.5 2.5", "false", true},
+ {"ge 2.5 1.5", "true", true},
+ {"ge 1 1", "true", true},
+ {"ge 1 2", "false", true},
+ {"ge 2 1", "true", true},
+ {"ge `xy` `xy`", "true", true},
+ {"ge `xy` `xyz`", "false", true},
+ {"ge `xyz` `xy`", "true", true},
+ {"ge .Uthree .Uthree", "true", true},
+ {"ge .Uthree .Ufour", "false", true},
+ {"ge .Ufour .Uthree", "true", true},
+ // Mixing signed and unsigned integers.
+ {"eq .Uthree .Three", "true", true},
+ {"eq .Three .Uthree", "true", true},
+ {"le .Uthree .Three", "true", true},
+ {"le .Three .Uthree", "true", true},
+ {"ge .Uthree .Three", "true", true},
+ {"ge .Three .Uthree", "true", true},
+ {"lt .Uthree .Three", "false", true},
+ {"lt .Three .Uthree", "false", true},
+ {"gt .Uthree .Three", "false", true},
+ {"gt .Three .Uthree", "false", true},
+ {"eq .Ufour .Three", "false", true},
+ {"lt .Ufour .Three", "false", true},
+ {"gt .Ufour .Three", "true", true},
+ {"eq .NegOne .Uthree", "false", true},
+ {"eq .Uthree .NegOne", "false", true},
+ {"ne .NegOne .Uthree", "true", true},
+ {"ne .Uthree .NegOne", "true", true},
+ {"lt .NegOne .Uthree", "true", true},
+ {"lt .Uthree .NegOne", "false", true},
+ {"le .NegOne .Uthree", "true", true},
+ {"le .Uthree .NegOne", "false", true},
+ {"gt .NegOne .Uthree", "false", true},
+ {"gt .Uthree .NegOne", "true", true},
+ {"ge .NegOne .Uthree", "false", true},
+ {"ge .Uthree .NegOne", "true", true},
+ {"eq (index `x` 0) 'x'", "true", true}, // The example that triggered this rule.
+ {"eq (index `x` 0) 'y'", "false", true},
+ {"eq .V1 .V2", "true", true},
+ {"eq .Ptr .Ptr", "true", true},
+ {"eq .Ptr .NilPtr", "false", true},
+ {"eq .NilPtr .NilPtr", "true", true},
+ {"eq .Iface1 .Iface1", "true", true},
+ {"eq .Iface1 .Iface2", "false", true},
+ {"eq .Iface2 .Iface2", "true", true},
+ // Errors
+ {"eq `xy` 1", "", false}, // Different types.
+ {"eq 2 2.0", "", false}, // Different types.
+ {"lt true true", "", false}, // Unordered types.
+ {"lt 1+0i 1+0i", "", false}, // Unordered types.
+ {"eq .Ptr 1", "", false}, // Incompatible types.
+ {"eq .Ptr .NegOne", "", false}, // Incompatible types.
+ {"eq .Map .Map", "", false}, // Uncomparable types.
+ {"eq .Map .V1", "", false}, // Uncomparable types.
+}
+
+func TestComparison(t *testing.T) {
+ b := new(bytes.Buffer)
+ var cmpStruct = struct {
+ Uthree, Ufour uint
+ NegOne, Three int
+ Ptr, NilPtr *int
+ Map map[int]int
+ V1, V2 V
+ Iface1, Iface2 fmt.Stringer
+ }{
+ Uthree: 3,
+ Ufour: 4,
+ NegOne: -1,
+ Three: 3,
+ Ptr: new(int),
+ Iface1: b,
+ }
+ for _, test := range cmpTests {
+ text := fmt.Sprintf("{{if %s}}true{{else}}false{{end}}", test.expr)
+ tmpl, err := New("empty").Parse(text)
+ if err != nil {
+ t.Fatalf("%q: %s", test.expr, err)
+ }
+ b.Reset()
+ err = tmpl.Execute(b, &cmpStruct)
+ if test.ok && err != nil {
+ t.Errorf("%s errored incorrectly: %s", test.expr, err)
+ continue
+ }
+ if !test.ok && err == nil {
+ t.Errorf("%s did not error", test.expr)
+ continue
+ }
+ if b.String() != test.truth {
+ t.Errorf("%s: want %s; got %s", test.expr, test.truth, b.String())
+ }
+ }
+}
+
+func TestMissingMapKey(t *testing.T) {
+ data := map[string]int{
+ "x": 99,
+ }
+ tmpl, err := New("t1").Parse("{{.x}} {{.y}}")
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b bytes.Buffer
+ // By default, just get "<no value>" // NOTE: not in html/template, get empty string
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := "99 "
+ got := b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Same if we set the option explicitly to the default.
+ tmpl.Option("missingkey=default")
+ b.Reset()
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal("default:", err)
+ }
+ got = b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Next we ask for a zero value
+ tmpl.Option("missingkey=zero")
+ b.Reset()
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal("zero:", err)
+ }
+ want = "99 0"
+ got = b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Now we ask for an error.
+ tmpl.Option("missingkey=error")
+ err = tmpl.Execute(&b, data)
+ if err == nil {
+ t.Errorf("expected error; got none")
+ }
+ // same Option, but now a nil interface: ask for an error
+ err = tmpl.Execute(&b, nil)
+ t.Log(err)
+ if err == nil {
+ t.Errorf("expected error for nil-interface; got none")
+ }
+}
+
+// Test that the error message for multiline unterminated string
+// refers to the line number of the opening quote.
+func TestUnterminatedStringError(t *testing.T) {
+ _, err := New("X").Parse("hello\n\n{{`unterminated\n\n\n\n}}\n some more\n\n")
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ str := err.Error()
+ if !strings.Contains(str, "X:3: unterminated raw quoted string") {
+ t.Fatalf("unexpected error: %s", str)
+ }
+}
+
+const alwaysErrorText = "always be failing"
+
+var alwaysError = errors.New(alwaysErrorText)
+
+type ErrorWriter int
+
+func (e ErrorWriter) Write(p []byte) (int, error) {
+ return 0, alwaysError
+}
+
+func TestExecuteGivesExecError(t *testing.T) {
+ // First, a non-execution error shouldn't be an ExecError.
+ tmpl, err := New("X").Parse("hello")
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = tmpl.Execute(ErrorWriter(0), 0)
+ if err == nil {
+ t.Fatal("expected error; got none")
+ }
+ if err.Error() != alwaysErrorText {
+ t.Errorf("expected %q error; got %q", alwaysErrorText, err)
+ }
+ // This one should be an ExecError.
+ tmpl, err = New("X").Parse("hello, {{.X.Y}}")
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = tmpl.Execute(io.Discard, 0)
+ if err == nil {
+ t.Fatal("expected error; got none")
+ }
+ eerr, ok := err.(template.ExecError)
+ if !ok {
+ t.Fatalf("did not expect ExecError %s", eerr)
+ }
+ expect := "field X in type int"
+ if !strings.Contains(err.Error(), expect) {
+ t.Errorf("expected %q; got %q", expect, err)
+ }
+}
+
+func funcNameTestFunc() int {
+ return 0
+}
+
+func TestGoodFuncNames(t *testing.T) {
+ names := []string{
+ "_",
+ "a",
+ "a1",
+ "a1",
+ "Ӵ",
+ }
+ for _, name := range names {
+ tmpl := New("X").Funcs(
+ FuncMap{
+ name: funcNameTestFunc,
+ },
+ )
+ if tmpl == nil {
+ t.Fatalf("nil result for %q", name)
+ }
+ }
+}
+
+func TestBadFuncNames(t *testing.T) {
+ names := []string{
+ "",
+ "2",
+ "a-b",
+ }
+ for _, name := range names {
+ testBadFuncName(name, t)
+ }
+}
+
+func testBadFuncName(name string, t *testing.T) {
+ t.Helper()
+ defer func() {
+ recover()
+ }()
+ New("X").Funcs(
+ FuncMap{
+ name: funcNameTestFunc,
+ },
+ )
+ // If we get here, the name did not cause a panic, which is how Funcs
+ // reports an error.
+ t.Errorf("%q succeeded incorrectly as function name", name)
+}
+
+func TestBlock(t *testing.T) {
+ const (
+ input = `a({{block "inner" .}}bar({{.}})baz{{end}})b`
+ want = `a(bar(hello)baz)b`
+ overlay = `{{define "inner"}}foo({{.}})bar{{end}}`
+ want2 = `a(foo(goodbye)bar)b`
+ )
+ tmpl, err := New("outer").Parse(input)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tmpl2, err := Must(tmpl.Clone()).Parse(overlay)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, "hello"); err != nil {
+ t.Fatal(err)
+ }
+ if got := buf.String(); got != want {
+ t.Errorf("got %q, want %q", got, want)
+ }
+
+ buf.Reset()
+ if err := tmpl2.Execute(&buf, "goodbye"); err != nil {
+ t.Fatal(err)
+ }
+ if got := buf.String(); got != want2 {
+ t.Errorf("got %q, want %q", got, want2)
+ }
+}
+
+func TestEvalFieldErrors(t *testing.T) {
+ tests := []struct {
+ name, src string
+ value any
+ want string
+ }{
+ {
+ // Check that calling an invalid field on nil pointer
+ // prints a field error instead of a distracting nil
+ // pointer error. https://golang.org/issue/15125
+ "MissingFieldOnNil",
+ "{{.MissingField}}",
+ (*T)(nil),
+ "can't evaluate field MissingField in type *template.T",
+ },
+ {
+ "MissingFieldOnNonNil",
+ "{{.MissingField}}",
+ &T{},
+ "can't evaluate field MissingField in type *template.T",
+ },
+ {
+ "ExistingFieldOnNil",
+ "{{.X}}",
+ (*T)(nil),
+ "nil pointer evaluating *template.T.X",
+ },
+ {
+ "MissingKeyOnNilMap",
+ "{{.MissingKey}}",
+ (*map[string]string)(nil),
+ "nil pointer evaluating *map[string]string.MissingKey",
+ },
+ {
+ "MissingKeyOnNilMapPtr",
+ "{{.MissingKey}}",
+ (*map[string]string)(nil),
+ "nil pointer evaluating *map[string]string.MissingKey",
+ },
+ {
+ "MissingKeyOnMapPtrToNil",
+ "{{.MissingKey}}",
+ &map[string]string{},
+ "<nil>",
+ },
+ }
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ tmpl := Must(New("tmpl").Parse(tc.src))
+ err := tmpl.Execute(io.Discard, tc.value)
+ got := "<nil>"
+ if err != nil {
+ got = err.Error()
+ }
+ if !strings.HasSuffix(got, tc.want) {
+ t.Fatalf("got error %q, want %q", got, tc.want)
+ }
+ })
+ }
+}
+
+func TestMaxExecDepth(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in -short mode")
+ }
+ tmpl := Must(New("tmpl").Parse(`{{template "tmpl" .}}`))
+ err := tmpl.Execute(io.Discard, nil)
+ got := "<nil>"
+ if err != nil {
+ got = err.Error()
+ }
+ const want = "exceeded maximum template depth"
+ if !strings.Contains(got, want) {
+ t.Errorf("got error %q; want %q", got, want)
+ }
+}
+
+func TestAddrOfIndex(t *testing.T) {
+ // golang.org/issue/14916.
+ // Before index worked on reflect.Values, the .String could not be
+ // found on the (incorrectly unaddressable) V value,
+ // in contrast to range, which worked fine.
+ // Also testing that passing a reflect.Value to tmpl.Execute works.
+ texts := []string{
+ `{{range .}}{{.String}}{{end}}`,
+ `{{with index . 0}}{{.String}}{{end}}`,
+ }
+ for _, text := range texts {
+ tmpl := Must(New("tmpl").Parse(text))
+ var buf bytes.Buffer
+ err := tmpl.Execute(&buf, reflect.ValueOf([]V{{1}}))
+ if err != nil {
+ t.Fatalf("%s: Execute: %v", text, err)
+ }
+ if buf.String() != "&lt;1&gt;" {
+ t.Fatalf("%s: template output = %q, want %q", text, &buf, "&lt;1&gt;")
+ }
+ }
+}
+
+func TestInterfaceValues(t *testing.T) {
+ // golang.org/issue/17714.
+ // Before index worked on reflect.Values, interface values
+ // were always implicitly promoted to the underlying value,
+ // except that nil interfaces were promoted to the zero reflect.Value.
+ // Eliminating a round trip to interface{} and back to reflect.Value
+ // eliminated this promotion, breaking these cases.
+ tests := []struct {
+ text string
+ out string
+ }{
+ {`{{index .Nil 1}}`, "ERROR: index of untyped nil"},
+ {`{{index .Slice 2}}`, "2"},
+ {`{{index .Slice .Two}}`, "2"},
+ {`{{call .Nil 1}}`, "ERROR: call of nil"},
+ {`{{call .PlusOne 1}}`, "2"},
+ {`{{call .PlusOne .One}}`, "2"},
+ {`{{and (index .Slice 0) true}}`, "0"},
+ {`{{and .Zero true}}`, "0"},
+ {`{{and (index .Slice 1) false}}`, "false"},
+ {`{{and .One false}}`, "false"},
+ {`{{or (index .Slice 0) false}}`, "false"},
+ {`{{or .Zero false}}`, "false"},
+ {`{{or (index .Slice 1) true}}`, "1"},
+ {`{{or .One true}}`, "1"},
+ {`{{not (index .Slice 0)}}`, "true"},
+ {`{{not .Zero}}`, "true"},
+ {`{{not (index .Slice 1)}}`, "false"},
+ {`{{not .One}}`, "false"},
+ {`{{eq (index .Slice 0) .Zero}}`, "true"},
+ {`{{eq (index .Slice 1) .One}}`, "true"},
+ {`{{ne (index .Slice 0) .Zero}}`, "false"},
+ {`{{ne (index .Slice 1) .One}}`, "false"},
+ {`{{ge (index .Slice 0) .One}}`, "false"},
+ {`{{ge (index .Slice 1) .Zero}}`, "true"},
+ {`{{gt (index .Slice 0) .One}}`, "false"},
+ {`{{gt (index .Slice 1) .Zero}}`, "true"},
+ {`{{le (index .Slice 0) .One}}`, "true"},
+ {`{{le (index .Slice 1) .Zero}}`, "false"},
+ {`{{lt (index .Slice 0) .One}}`, "true"},
+ {`{{lt (index .Slice 1) .Zero}}`, "false"},
+ }
+
+ for _, tt := range tests {
+ tmpl := Must(New("tmpl").Parse(tt.text))
+ var buf bytes.Buffer
+ err := tmpl.Execute(&buf, map[string]any{
+ "PlusOne": func(n int) int {
+ return n + 1
+ },
+ "Slice": []int{0, 1, 2, 3},
+ "One": 1,
+ "Two": 2,
+ "Nil": nil,
+ "Zero": 0,
+ })
+ if strings.HasPrefix(tt.out, "ERROR:") {
+ e := strings.TrimSpace(strings.TrimPrefix(tt.out, "ERROR:"))
+ if err == nil || !strings.Contains(err.Error(), e) {
+ t.Errorf("%s: Execute: %v, want error %q", tt.text, err, e)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf("%s: Execute: %v", tt.text, err)
+ continue
+ }
+ if buf.String() != tt.out {
+ t.Errorf("%s: template output = %q, want %q", tt.text, &buf, tt.out)
+ }
+ }
+}
+
+// Check that panics during calls are recovered and returned as errors.
+func TestExecutePanicDuringCall(t *testing.T) {
+ funcs := map[string]any{
+ "doPanic": func() string {
+ panic("custom panic string")
+ },
+ }
+ tests := []struct {
+ name string
+ input string
+ data any
+ wantErr string
+ }{
+ {
+ "direct func call panics",
+ "{{doPanic}}", (*T)(nil),
+ `template: t:1:2: executing "t" at <doPanic>: error calling doPanic: custom panic string`,
+ },
+ {
+ "indirect func call panics",
+ "{{call doPanic}}", (*T)(nil),
+ `template: t:1:7: executing "t" at <doPanic>: error calling doPanic: custom panic string`,
+ },
+ {
+ "direct method call panics",
+ "{{.GetU}}", (*T)(nil),
+ `template: t:1:2: executing "t" at <.GetU>: error calling GetU: runtime error: invalid memory address or nil pointer dereference`,
+ },
+ {
+ "indirect method call panics",
+ "{{call .GetU}}", (*T)(nil),
+ `template: t:1:7: executing "t" at <.GetU>: error calling GetU: runtime error: invalid memory address or nil pointer dereference`,
+ },
+ {
+ "func field call panics",
+ "{{call .PanicFunc}}", tVal,
+ `template: t:1:2: executing "t" at <call .PanicFunc>: error calling call: test panic`,
+ },
+ {
+ "method call on nil interface",
+ "{{.NonEmptyInterfaceNil.Method0}}", tVal,
+ `template: t:1:23: executing "t" at <.NonEmptyInterfaceNil.Method0>: nil pointer evaluating template.I.Method0`,
+ },
+ }
+ for _, tc := range tests {
+ b := new(bytes.Buffer)
+ tmpl, err := New("t").Funcs(funcs).Parse(tc.input)
+ if err != nil {
+ t.Fatalf("parse error: %s", err)
+ }
+ err = tmpl.Execute(b, tc.data)
+ if err == nil {
+ t.Errorf("%s: expected error; got none", tc.name)
+ } else if !strings.Contains(err.Error(), tc.wantErr) {
+ if *debug {
+ fmt.Printf("%s: test execute error: %s\n", tc.name, err)
+ }
+ t.Errorf("%s: expected error:\n%s\ngot:\n%s", tc.name, tc.wantErr, err)
+ }
+ }
+}
+
+// Issue 31810. Check that a parenthesized first argument behaves properly.
+func TestIssue31810(t *testing.T) {
+ t.Skip("broken in html/template")
+
+ // A simple value with no arguments is fine.
+ var b bytes.Buffer
+ const text = "{{ (.) }}"
+ tmpl, err := New("").Parse(text)
+ if err != nil {
+ t.Error(err)
+ }
+ err = tmpl.Execute(&b, "result")
+ if err != nil {
+ t.Error(err)
+ }
+ if b.String() != "result" {
+ t.Errorf("%s got %q, expected %q", text, b.String(), "result")
+ }
+
+ // Even a plain function fails - need to use call.
+ f := func() string { return "result" }
+ b.Reset()
+ err = tmpl.Execute(&b, f)
+ if err == nil {
+ t.Error("expected error with no call, got none")
+ }
+
+ // Works if the function is explicitly called.
+ const textCall = "{{ (call .) }}"
+ tmpl, err = New("").Parse(textCall)
+ b.Reset()
+ err = tmpl.Execute(&b, f)
+ if err != nil {
+ t.Error(err)
+ }
+ if b.String() != "result" {
+ t.Errorf("%s got %q, expected %q", textCall, b.String(), "result")
+ }
+}
+
+// Issue 39807. There was a race applying escapeTemplate.
+
+const raceText = `
+{{- define "jstempl" -}}
+var v = "v";
+{{- end -}}
+<script type="application/javascript">
+{{ template "jstempl" $ }}
+</script>
+`
+
+func TestEscapeRace(t *testing.T) {
+ tmpl := New("")
+ _, err := tmpl.New("templ.html").Parse(raceText)
+ if err != nil {
+ t.Fatal(err)
+ }
+ const count = 20
+ for i := 0; i < count; i++ {
+ _, err := tmpl.New(fmt.Sprintf("x%d.html", i)).Parse(`{{ template "templ.html" .}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ var wg sync.WaitGroup
+ for i := 0; i < 10; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for j := 0; j < count; j++ {
+ sub := tmpl.Lookup(fmt.Sprintf("x%d.html", j))
+ if err := sub.Execute(io.Discard, nil); err != nil {
+ t.Error(err)
+ }
+ }
+ }()
+ }
+ wg.Wait()
+}
+
+func TestRecursiveExecute(t *testing.T) {
+ tmpl := New("")
+
+ recur := func() (htmltemplate.HTML, error) {
+ var sb strings.Builder
+ if err := tmpl.ExecuteTemplate(&sb, "subroutine", nil); err != nil {
+ t.Fatal(err)
+ }
+ return htmltemplate.HTML(sb.String()), nil
+ }
+
+ m := FuncMap{
+ "recur": recur,
+ }
+
+ top, err := tmpl.New("x.html").Funcs(m).Parse(`{{recur}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = tmpl.New("subroutine").Parse(`<a href="/x?p={{"'a<b'"}}">`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if err := top.Execute(io.Discard, nil); err != nil {
+ t.Fatal(err)
+ }
+}
+
+// recursiveInvoker is for TestRecursiveExecuteViaMethod.
+type recursiveInvoker struct {
+ t *testing.T
+ tmpl *Template
+}
+
+func (r *recursiveInvoker) Recur() (string, error) {
+ var sb strings.Builder
+ if err := r.tmpl.ExecuteTemplate(&sb, "subroutine", nil); err != nil {
+ r.t.Fatal(err)
+ }
+ return sb.String(), nil
+}
+
+func TestRecursiveExecuteViaMethod(t *testing.T) {
+ tmpl := New("")
+ top, err := tmpl.New("x.html").Parse(`{{.Recur}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = tmpl.New("subroutine").Parse(`<a href="/x?p={{"'a<b'"}}">`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ r := &recursiveInvoker{
+ t: t,
+ tmpl: tmpl,
+ }
+ if err := top.Execute(io.Discard, r); err != nil {
+ t.Fatal(err)
+ }
+}
+
+// Issue 43295.
+func TestTemplateFuncsAfterClone(t *testing.T) {
+ s := `{{ f . }}`
+ want := "test"
+ orig := New("orig").Funcs(map[string]any{
+ "f": func(in string) string {
+ return in
+ },
+ }).New("child")
+
+ overviewTmpl := Must(Must(orig.Clone()).Parse(s))
+ var out strings.Builder
+ if err := overviewTmpl.Execute(&out, want); err != nil {
+ t.Fatal(err)
+ }
+ if got := out.String(); got != want {
+ t.Fatalf("got %q; want %q", got, want)
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/html.go b/tpl/internal/go_templates/htmltemplate/html.go
new file mode 100644
index 000000000..19bd0ccb2
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/html.go
@@ -0,0 +1,265 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "unicode/utf8"
+)
+
+// htmlNospaceEscaper escapes for inclusion in unquoted attribute values.
+func htmlNospaceEscaper(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeHTML {
+ return htmlReplacer(stripTags(s), htmlNospaceNormReplacementTable, false)
+ }
+ return htmlReplacer(s, htmlNospaceReplacementTable, false)
+}
+
+// attrEscaper escapes for inclusion in quoted attribute values.
+func attrEscaper(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeHTML {
+ return htmlReplacer(stripTags(s), htmlNormReplacementTable, true)
+ }
+ return htmlReplacer(s, htmlReplacementTable, true)
+}
+
+// rcdataEscaper escapes for inclusion in an RCDATA element body.
+func rcdataEscaper(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeHTML {
+ return htmlReplacer(s, htmlNormReplacementTable, true)
+ }
+ return htmlReplacer(s, htmlReplacementTable, true)
+}
+
+// htmlEscaper escapes for inclusion in HTML text.
+func htmlEscaper(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeHTML {
+ return s
+ }
+ return htmlReplacer(s, htmlReplacementTable, true)
+}
+
+// htmlReplacementTable contains the runes that need to be escaped
+// inside a quoted attribute value or in a text node.
+var htmlReplacementTable = []string{
+ // https://www.w3.org/TR/html5/syntax.html#attribute-value-(unquoted)-state
+ // U+0000 NULL Parse error. Append a U+FFFD REPLACEMENT
+ // CHARACTER character to the current attribute's value.
+ // "
+ // and similarly
+ // https://www.w3.org/TR/html5/syntax.html#before-attribute-value-state
+ 0: "\uFFFD",
+ '"': "&#34;",
+ '&': "&amp;",
+ '\'': "&#39;",
+ '+': "&#43;",
+ '<': "&lt;",
+ '>': "&gt;",
+}
+
+// htmlNormReplacementTable is like htmlReplacementTable but without '&' to
+// avoid over-encoding existing entities.
+var htmlNormReplacementTable = []string{
+ 0: "\uFFFD",
+ '"': "&#34;",
+ '\'': "&#39;",
+ '+': "&#43;",
+ '<': "&lt;",
+ '>': "&gt;",
+}
+
+// htmlNospaceReplacementTable contains the runes that need to be escaped
+// inside an unquoted attribute value.
+// The set of runes escaped is the union of the HTML specials and
+// those determined by running the JS below in browsers:
+// <div id=d></div>
+// <script>(function () {
+// var a = [], d = document.getElementById("d"), i, c, s;
+// for (i = 0; i < 0x10000; ++i) {
+// c = String.fromCharCode(i);
+// d.innerHTML = "<span title=" + c + "lt" + c + "></span>"
+// s = d.getElementsByTagName("SPAN")[0];
+// if (!s || s.title !== c + "lt" + c) { a.push(i.toString(16)); }
+// }
+// document.write(a.join(", "));
+// })()</script>
+var htmlNospaceReplacementTable = []string{
+ 0: "&#xfffd;",
+ '\t': "&#9;",
+ '\n': "&#10;",
+ '\v': "&#11;",
+ '\f': "&#12;",
+ '\r': "&#13;",
+ ' ': "&#32;",
+ '"': "&#34;",
+ '&': "&amp;",
+ '\'': "&#39;",
+ '+': "&#43;",
+ '<': "&lt;",
+ '=': "&#61;",
+ '>': "&gt;",
+ // A parse error in the attribute value (unquoted) and
+ // before attribute value states.
+ // Treated as a quoting character by IE.
+ '`': "&#96;",
+}
+
+// htmlNospaceNormReplacementTable is like htmlNospaceReplacementTable but
+// without '&' to avoid over-encoding existing entities.
+var htmlNospaceNormReplacementTable = []string{
+ 0: "&#xfffd;",
+ '\t': "&#9;",
+ '\n': "&#10;",
+ '\v': "&#11;",
+ '\f': "&#12;",
+ '\r': "&#13;",
+ ' ': "&#32;",
+ '"': "&#34;",
+ '\'': "&#39;",
+ '+': "&#43;",
+ '<': "&lt;",
+ '=': "&#61;",
+ '>': "&gt;",
+ // A parse error in the attribute value (unquoted) and
+ // before attribute value states.
+ // Treated as a quoting character by IE.
+ '`': "&#96;",
+}
+
+// htmlReplacer returns s with runes replaced according to replacementTable
+// and when badRunes is true, certain bad runes are allowed through unescaped.
+func htmlReplacer(s string, replacementTable []string, badRunes bool) string {
+ written, b := 0, new(strings.Builder)
+ r, w := rune(0), 0
+ for i := 0; i < len(s); i += w {
+ // Cannot use 'for range s' because we need to preserve the width
+ // of the runes in the input. If we see a decoding error, the input
+ // width will not be utf8.Runelen(r) and we will overrun the buffer.
+ r, w = utf8.DecodeRuneInString(s[i:])
+ if int(r) < len(replacementTable) {
+ if repl := replacementTable[r]; len(repl) != 0 {
+ if written == 0 {
+ b.Grow(len(s))
+ }
+ b.WriteString(s[written:i])
+ b.WriteString(repl)
+ written = i + w
+ }
+ } else if badRunes {
+ // No-op.
+ // IE does not allow these ranges in unquoted attrs.
+ } else if 0xfdd0 <= r && r <= 0xfdef || 0xfff0 <= r && r <= 0xffff {
+ if written == 0 {
+ b.Grow(len(s))
+ }
+ fmt.Fprintf(b, "%s&#x%x;", s[written:i], r)
+ written = i + w
+ }
+ }
+ if written == 0 {
+ return s
+ }
+ b.WriteString(s[written:])
+ return b.String()
+}
+
+// stripTags takes a snippet of HTML and returns only the text content.
+// For example, `<b>&iexcl;Hi!</b> <script>...</script>` -> `&iexcl;Hi! `.
+func stripTags(html string) string {
+ var b bytes.Buffer
+ s, c, i, allText := []byte(html), context{}, 0, true
+ // Using the transition funcs helps us avoid mangling
+ // `<div title="1>2">` or `I <3 Ponies!`.
+ for i != len(s) {
+ if c.delim == delimNone {
+ st := c.state
+ // Use RCDATA instead of parsing into JS or CSS styles.
+ if c.element != elementNone && !isInTag(st) {
+ st = stateRCDATA
+ }
+ d, nread := transitionFunc[st](c, s[i:])
+ i1 := i + nread
+ if c.state == stateText || c.state == stateRCDATA {
+ // Emit text up to the start of the tag or comment.
+ j := i1
+ if d.state != c.state {
+ for j1 := j - 1; j1 >= i; j1-- {
+ if s[j1] == '<' {
+ j = j1
+ break
+ }
+ }
+ }
+ b.Write(s[i:j])
+ } else {
+ allText = false
+ }
+ c, i = d, i1
+ continue
+ }
+ i1 := i + bytes.IndexAny(s[i:], delimEnds[c.delim])
+ if i1 < i {
+ break
+ }
+ if c.delim != delimSpaceOrTagEnd {
+ // Consume any quote.
+ i1++
+ }
+ c, i = context{state: stateTag, element: c.element}, i1
+ }
+ if allText {
+ return html
+ } else if c.state == stateText || c.state == stateRCDATA {
+ b.Write(s[i:])
+ }
+ return b.String()
+}
+
+// htmlNameFilter accepts valid parts of an HTML attribute or tag name or
+// a known-safe HTML attribute.
+func htmlNameFilter(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeHTMLAttr {
+ return s
+ }
+ if len(s) == 0 {
+ // Avoid violation of structure preservation.
+ // <input checked {{.K}}={{.V}}>.
+ // Without this, if .K is empty then .V is the value of
+ // checked, but otherwise .V is the value of the attribute
+ // named .K.
+ return filterFailsafe
+ }
+ s = strings.ToLower(s)
+ if t := attrType(s); t != contentTypePlain {
+ // TODO: Split attr and element name part filters so we can recognize known attributes.
+ return filterFailsafe
+ }
+ for _, r := range s {
+ switch {
+ case '0' <= r && r <= '9':
+ case 'a' <= r && r <= 'z':
+ default:
+ return filterFailsafe
+ }
+ }
+ return s
+}
+
+// commentEscaper returns the empty string regardless of input.
+// Comment content does not correspond to any parsed structure or
+// human-readable content, so the simplest and most secure policy is to drop
+// content interpolated into comments.
+// This approach is equally valid whether or not static comment content is
+// removed from the template.
+func commentEscaper(args ...any) string {
+ return ""
+}
diff --git a/tpl/internal/go_templates/htmltemplate/html_test.go b/tpl/internal/go_templates/htmltemplate/html_test.go
new file mode 100644
index 000000000..2809ee1e2
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/html_test.go
@@ -0,0 +1,100 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "html"
+ "strings"
+ "testing"
+)
+
+func TestHTMLNospaceEscaper(t *testing.T) {
+ input := ("\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !"#$%&'()*+,-./` +
+ `0123456789:;<=>?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\x7f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\ufdec\U0001D11E" +
+ "erroneous\x960") // keep at the end
+
+ want := ("&#xfffd;\x01\x02\x03\x04\x05\x06\x07" +
+ "\x08&#9;&#10;&#11;&#12;&#13;\x0E\x0F" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17" +
+ "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ `&#32;!&#34;#$%&amp;&#39;()*&#43;,-./` +
+ `0123456789:;&lt;&#61;&gt;?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ `&#96;abcdefghijklmno` +
+ `pqrstuvwxyz{|}~` + "\u007f" +
+ "\u00A0\u0100\u2028\u2029\ufeff&#xfdec;\U0001D11E" +
+ "erroneous&#xfffd;0") // keep at the end
+
+ got := htmlNospaceEscaper(input)
+ if got != want {
+ t.Errorf("encode: want\n\t%q\nbut got\n\t%q", want, got)
+ }
+
+ r := strings.NewReplacer("\x00", "\ufffd", "\x96", "\ufffd")
+ got, want = html.UnescapeString(got), r.Replace(input)
+ if want != got {
+ t.Errorf("decode: want\n\t%q\nbut got\n\t%q", want, got)
+ }
+}
+
+func TestStripTags(t *testing.T) {
+ tests := []struct {
+ input, want string
+ }{
+ {"", ""},
+ {"Hello, World!", "Hello, World!"},
+ {"foo&amp;bar", "foo&amp;bar"},
+ {`Hello <a href="www.example.com/">World</a>!`, "Hello World!"},
+ {"Foo <textarea>Bar</textarea> Baz", "Foo Bar Baz"},
+ {"Foo <!-- Bar --> Baz", "Foo Baz"},
+ {"<", "<"},
+ {"foo < bar", "foo < bar"},
+ {`Foo<script type="text/javascript">alert(1337)</script>Bar`, "FooBar"},
+ {`Foo<div title="1>2">Bar`, "FooBar"},
+ {`I <3 Ponies!`, `I <3 Ponies!`},
+ {`<script>foo()</script>`, ``},
+ }
+
+ for _, test := range tests {
+ if got := stripTags(test.input); got != test.want {
+ t.Errorf("%q: want %q, got %q", test.input, test.want, got)
+ }
+ }
+}
+
+func BenchmarkHTMLNospaceEscaper(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ htmlNospaceEscaper("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
+
+func BenchmarkHTMLNospaceEscaperNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ htmlNospaceEscaper("The_quick,_brown_fox_jumps_over_the_lazy_dog.")
+ }
+}
+
+func BenchmarkStripTags(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ stripTags("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
+
+func BenchmarkStripTagsNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ stripTags("The quick, brown fox jumps over the lazy dog.")
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/hugo_template.go b/tpl/internal/go_templates/htmltemplate/hugo_template.go
new file mode 100644
index 000000000..99edf8f68
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/hugo_template.go
@@ -0,0 +1,41 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package template
+
+import (
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+)
+
+/*
+
+This files contains the Hugo related addons. All the other files in this
+package is auto generated.
+
+*/
+
+// Export it so we can populate Hugo's func map with it, which makes it faster.
+var GoFuncs = funcMap
+
+// Prepare returns a template ready for execution.
+func (t *Template) Prepare() (*template.Template, error) {
+ if err := t.escape(); err != nil {
+ return nil, err
+ }
+ return t.text, nil
+}
+
+// See https://github.com/golang/go/issues/5884
+func StripTags(html string) string {
+ return stripTags(html)
+}
diff --git a/tpl/internal/go_templates/htmltemplate/js.go b/tpl/internal/go_templates/htmltemplate/js.go
new file mode 100644
index 000000000..6187dc036
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/js.go
@@ -0,0 +1,430 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ htmltemplate "html/template"
+ "reflect"
+ "strings"
+ "unicode/utf8"
+)
+
+// nextJSCtx returns the context that determines whether a slash after the
+// given run of tokens starts a regular expression instead of a division
+// operator: / or /=.
+//
+// This assumes that the token run does not include any string tokens, comment
+// tokens, regular expression literal tokens, or division operators.
+//
+// This fails on some valid but nonsensical JavaScript programs like
+// "x = ++/foo/i" which is quite different than "x++/foo/i", but is not known to
+// fail on any known useful programs. It is based on the draft
+// JavaScript 2.0 lexical grammar and requires one token of lookbehind:
+// https://www.mozilla.org/js/language/js20-2000-07/rationale/syntax.html
+func nextJSCtx(s []byte, preceding jsCtx) jsCtx {
+ s = bytes.TrimRight(s, "\t\n\f\r \u2028\u2029")
+ if len(s) == 0 {
+ return preceding
+ }
+
+ // All cases below are in the single-byte UTF-8 group.
+ switch c, n := s[len(s)-1], len(s); c {
+ case '+', '-':
+ // ++ and -- are not regexp preceders, but + and - are whether
+ // they are used as infix or prefix operators.
+ start := n - 1
+ // Count the number of adjacent dashes or pluses.
+ for start > 0 && s[start-1] == c {
+ start--
+ }
+ if (n-start)&1 == 1 {
+ // Reached for trailing minus signs since "---" is the
+ // same as "-- -".
+ return jsCtxRegexp
+ }
+ return jsCtxDivOp
+ case '.':
+ // Handle "42."
+ if n != 1 && '0' <= s[n-2] && s[n-2] <= '9' {
+ return jsCtxDivOp
+ }
+ return jsCtxRegexp
+ // Suffixes for all punctuators from section 7.7 of the language spec
+ // that only end binary operators not handled above.
+ case ',', '<', '>', '=', '*', '%', '&', '|', '^', '?':
+ return jsCtxRegexp
+ // Suffixes for all punctuators from section 7.7 of the language spec
+ // that are prefix operators not handled above.
+ case '!', '~':
+ return jsCtxRegexp
+ // Matches all the punctuators from section 7.7 of the language spec
+ // that are open brackets not handled above.
+ case '(', '[':
+ return jsCtxRegexp
+ // Matches all the punctuators from section 7.7 of the language spec
+ // that precede expression starts.
+ case ':', ';', '{':
+ return jsCtxRegexp
+ // CAVEAT: the close punctuators ('}', ']', ')') precede div ops and
+ // are handled in the default except for '}' which can precede a
+ // division op as in
+ // ({ valueOf: function () { return 42 } } / 2
+ // which is valid, but, in practice, developers don't divide object
+ // literals, so our heuristic works well for code like
+ // function () { ... } /foo/.test(x) && sideEffect();
+ // The ')' punctuator can precede a regular expression as in
+ // if (b) /foo/.test(x) && ...
+ // but this is much less likely than
+ // (a + b) / c
+ case '}':
+ return jsCtxRegexp
+ default:
+ // Look for an IdentifierName and see if it is a keyword that
+ // can precede a regular expression.
+ j := n
+ for j > 0 && isJSIdentPart(rune(s[j-1])) {
+ j--
+ }
+ if regexpPrecederKeywords[string(s[j:])] {
+ return jsCtxRegexp
+ }
+ }
+ // Otherwise is a punctuator not listed above, or
+ // a string which precedes a div op, or an identifier
+ // which precedes a div op.
+ return jsCtxDivOp
+}
+
+// regexpPrecederKeywords is a set of reserved JS keywords that can precede a
+// regular expression in JS source.
+var regexpPrecederKeywords = map[string]bool{
+ "break": true,
+ "case": true,
+ "continue": true,
+ "delete": true,
+ "do": true,
+ "else": true,
+ "finally": true,
+ "in": true,
+ "instanceof": true,
+ "return": true,
+ "throw": true,
+ "try": true,
+ "typeof": true,
+ "void": true,
+}
+
+var jsonMarshalType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
+
+// indirectToJSONMarshaler returns the value, after dereferencing as many times
+// as necessary to reach the base type (or nil) or an implementation of json.Marshal.
+func indirectToJSONMarshaler(a any) any {
+ // text/template now supports passing untyped nil as a func call
+ // argument, so we must support it. Otherwise we'd panic below, as one
+ // cannot call the Type or Interface methods on an invalid
+ // reflect.Value. See golang.org/issue/18716.
+ if a == nil {
+ return nil
+ }
+
+ v := reflect.ValueOf(a)
+ for !v.Type().Implements(jsonMarshalType) && v.Kind() == reflect.Pointer && !v.IsNil() {
+ v = v.Elem()
+ }
+ return v.Interface()
+}
+
+// jsValEscaper escapes its inputs to a JS Expression (section 11.14) that has
+// neither side-effects nor free variables outside (NaN, Infinity).
+func jsValEscaper(args ...any) string {
+ var a any
+ if len(args) == 1 {
+ a = indirectToJSONMarshaler(args[0])
+ switch t := a.(type) {
+ case htmltemplate.JS:
+ return string(t)
+ case htmltemplate.JSStr:
+ // TODO: normalize quotes.
+ return `"` + string(t) + `"`
+ case json.Marshaler:
+ // Do not treat as a Stringer.
+ case fmt.Stringer:
+ a = t.String()
+ }
+ } else {
+ for i, arg := range args {
+ args[i] = indirectToJSONMarshaler(arg)
+ }
+ a = fmt.Sprint(args...)
+ }
+ // TODO: detect cycles before calling Marshal which loops infinitely on
+ // cyclic data. This may be an unacceptable DoS risk.
+ b, err := json.Marshal(a)
+ if err != nil {
+ // Put a space before comment so that if it is flush against
+ // a division operator it is not turned into a line comment:
+ // x/{{y}}
+ // turning into
+ // x//* error marshaling y:
+ // second line of error message */null
+ return fmt.Sprintf(" /* %s */null ", strings.ReplaceAll(err.Error(), "*/", "* /"))
+ }
+
+ // TODO: maybe post-process output to prevent it from containing
+ // "<!--", "-->", "<![CDATA[", "]]>", or "</script"
+ // in case custom marshalers produce output containing those.
+ // Note: Do not use \x escaping to save bytes because it is not JSON compatible and this escaper
+ // supports ld+json content-type.
+ if len(b) == 0 {
+ // In, `x=y/{{.}}*z` a json.Marshaler that produces "" should
+ // not cause the output `x=y/*z`.
+ return " null "
+ }
+ first, _ := utf8.DecodeRune(b)
+ last, _ := utf8.DecodeLastRune(b)
+ var buf strings.Builder
+ // Prevent IdentifierNames and NumericLiterals from running into
+ // keywords: in, instanceof, typeof, void
+ pad := isJSIdentPart(first) || isJSIdentPart(last)
+ if pad {
+ buf.WriteByte(' ')
+ }
+ written := 0
+ // Make sure that json.Marshal escapes codepoints U+2028 & U+2029
+ // so it falls within the subset of JSON which is valid JS.
+ for i := 0; i < len(b); {
+ rune, n := utf8.DecodeRune(b[i:])
+ repl := ""
+ if rune == 0x2028 {
+ repl = `\u2028`
+ } else if rune == 0x2029 {
+ repl = `\u2029`
+ }
+ if repl != "" {
+ buf.Write(b[written:i])
+ buf.WriteString(repl)
+ written = i + n
+ }
+ i += n
+ }
+ if buf.Len() != 0 {
+ buf.Write(b[written:])
+ if pad {
+ buf.WriteByte(' ')
+ }
+ return buf.String()
+ }
+ return string(b)
+}
+
+// jsStrEscaper produces a string that can be included between quotes in
+// JavaScript source, in JavaScript embedded in an HTML5 <script> element,
+// or in an HTML5 event handler attribute such as onclick.
+func jsStrEscaper(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeJSStr {
+ return replace(s, jsStrNormReplacementTable)
+ }
+ return replace(s, jsStrReplacementTable)
+}
+
+// jsRegexpEscaper behaves like jsStrEscaper but escapes regular expression
+// specials so the result is treated literally when included in a regular
+// expression literal. /foo{{.X}}bar/ matches the string "foo" followed by
+// the literal text of {{.X}} followed by the string "bar".
+func jsRegexpEscaper(args ...any) string {
+ s, _ := stringify(args...)
+ s = replace(s, jsRegexpReplacementTable)
+ if s == "" {
+ // /{{.X}}/ should not produce a line comment when .X == "".
+ return "(?:)"
+ }
+ return s
+}
+
+// replace replaces each rune r of s with replacementTable[r], provided that
+// r < len(replacementTable). If replacementTable[r] is the empty string then
+// no replacement is made.
+// It also replaces runes U+2028 and U+2029 with the raw strings `\u2028` and
+// `\u2029`.
+func replace(s string, replacementTable []string) string {
+ var b strings.Builder
+ r, w, written := rune(0), 0, 0
+ for i := 0; i < len(s); i += w {
+ // See comment in htmlEscaper.
+ r, w = utf8.DecodeRuneInString(s[i:])
+ var repl string
+ switch {
+ case int(r) < len(lowUnicodeReplacementTable):
+ repl = lowUnicodeReplacementTable[r]
+ case int(r) < len(replacementTable) && replacementTable[r] != "":
+ repl = replacementTable[r]
+ case r == '\u2028':
+ repl = `\u2028`
+ case r == '\u2029':
+ repl = `\u2029`
+ default:
+ continue
+ }
+ if written == 0 {
+ b.Grow(len(s))
+ }
+ b.WriteString(s[written:i])
+ b.WriteString(repl)
+ written = i + w
+ }
+ if written == 0 {
+ return s
+ }
+ b.WriteString(s[written:])
+ return b.String()
+}
+
+var lowUnicodeReplacementTable = []string{
+ 0: `\u0000`, 1: `\u0001`, 2: `\u0002`, 3: `\u0003`, 4: `\u0004`, 5: `\u0005`, 6: `\u0006`,
+ '\a': `\u0007`,
+ '\b': `\u0008`,
+ '\t': `\t`,
+ '\n': `\n`,
+ '\v': `\u000b`, // "\v" == "v" on IE 6.
+ '\f': `\f`,
+ '\r': `\r`,
+ 0xe: `\u000e`, 0xf: `\u000f`, 0x10: `\u0010`, 0x11: `\u0011`, 0x12: `\u0012`, 0x13: `\u0013`,
+ 0x14: `\u0014`, 0x15: `\u0015`, 0x16: `\u0016`, 0x17: `\u0017`, 0x18: `\u0018`, 0x19: `\u0019`,
+ 0x1a: `\u001a`, 0x1b: `\u001b`, 0x1c: `\u001c`, 0x1d: `\u001d`, 0x1e: `\u001e`, 0x1f: `\u001f`,
+}
+
+var jsStrReplacementTable = []string{
+ 0: `\u0000`,
+ '\t': `\t`,
+ '\n': `\n`,
+ '\v': `\u000b`, // "\v" == "v" on IE 6.
+ '\f': `\f`,
+ '\r': `\r`,
+ // Encode HTML specials as hex so the output can be embedded
+ // in HTML attributes without further encoding.
+ '"': `\u0022`,
+ '&': `\u0026`,
+ '\'': `\u0027`,
+ '+': `\u002b`,
+ '/': `\/`,
+ '<': `\u003c`,
+ '>': `\u003e`,
+ '\\': `\\`,
+}
+
+// jsStrNormReplacementTable is like jsStrReplacementTable but does not
+// overencode existing escapes since this table has no entry for `\`.
+var jsStrNormReplacementTable = []string{
+ 0: `\u0000`,
+ '\t': `\t`,
+ '\n': `\n`,
+ '\v': `\u000b`, // "\v" == "v" on IE 6.
+ '\f': `\f`,
+ '\r': `\r`,
+ // Encode HTML specials as hex so the output can be embedded
+ // in HTML attributes without further encoding.
+ '"': `\u0022`,
+ '&': `\u0026`,
+ '\'': `\u0027`,
+ '+': `\u002b`,
+ '/': `\/`,
+ '<': `\u003c`,
+ '>': `\u003e`,
+}
+var jsRegexpReplacementTable = []string{
+ 0: `\u0000`,
+ '\t': `\t`,
+ '\n': `\n`,
+ '\v': `\u000b`, // "\v" == "v" on IE 6.
+ '\f': `\f`,
+ '\r': `\r`,
+ // Encode HTML specials as hex so the output can be embedded
+ // in HTML attributes without further encoding.
+ '"': `\u0022`,
+ '$': `\$`,
+ '&': `\u0026`,
+ '\'': `\u0027`,
+ '(': `\(`,
+ ')': `\)`,
+ '*': `\*`,
+ '+': `\u002b`,
+ '-': `\-`,
+ '.': `\.`,
+ '/': `\/`,
+ '<': `\u003c`,
+ '>': `\u003e`,
+ '?': `\?`,
+ '[': `\[`,
+ '\\': `\\`,
+ ']': `\]`,
+ '^': `\^`,
+ '{': `\{`,
+ '|': `\|`,
+ '}': `\}`,
+}
+
+// isJSIdentPart reports whether the given rune is a JS identifier part.
+// It does not handle all the non-Latin letters, joiners, and combining marks,
+// but it does handle every codepoint that can occur in a numeric literal or
+// a keyword.
+func isJSIdentPart(r rune) bool {
+ switch {
+ case r == '$':
+ return true
+ case '0' <= r && r <= '9':
+ return true
+ case 'A' <= r && r <= 'Z':
+ return true
+ case r == '_':
+ return true
+ case 'a' <= r && r <= 'z':
+ return true
+ }
+ return false
+}
+
+// isJSType reports whether the given MIME type should be considered JavaScript.
+//
+// It is used to determine whether a script tag with a type attribute is a javascript container.
+func isJSType(mimeType string) bool {
+ // per
+ // https://www.w3.org/TR/html5/scripting-1.html#attr-script-type
+ // https://tools.ietf.org/html/rfc7231#section-3.1.1
+ // https://tools.ietf.org/html/rfc4329#section-3
+ // https://www.ietf.org/rfc/rfc4627.txt
+ // discard parameters
+ mimeType, _, _ = strings.Cut(mimeType, ";")
+ mimeType = strings.ToLower(mimeType)
+ mimeType = strings.TrimSpace(mimeType)
+ switch mimeType {
+ case
+ "application/ecmascript",
+ "application/javascript",
+ "application/json",
+ "application/ld+json",
+ "application/x-ecmascript",
+ "application/x-javascript",
+ "module",
+ "text/ecmascript",
+ "text/javascript",
+ "text/javascript1.0",
+ "text/javascript1.1",
+ "text/javascript1.2",
+ "text/javascript1.3",
+ "text/javascript1.4",
+ "text/javascript1.5",
+ "text/jscript",
+ "text/livescript",
+ "text/x-ecmascript",
+ "text/x-javascript":
+ return true
+ default:
+ return false
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/js_test.go b/tpl/internal/go_templates/htmltemplate/js_test.go
new file mode 100644
index 000000000..92073b37a
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/js_test.go
@@ -0,0 +1,426 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "math"
+ "strings"
+ "testing"
+)
+
+func TestNextJsCtx(t *testing.T) {
+ tests := []struct {
+ jsCtx jsCtx
+ s string
+ }{
+ // Statement terminators precede regexps.
+ {jsCtxRegexp, ";"},
+ // This is not airtight.
+ // ({ valueOf: function () { return 1 } } / 2)
+ // is valid JavaScript but in practice, devs do not do this.
+ // A block followed by a statement starting with a RegExp is
+ // much more common:
+ // while (x) {...} /foo/.test(x) || panic()
+ {jsCtxRegexp, "}"},
+ // But member, call, grouping, and array expression terminators
+ // precede div ops.
+ {jsCtxDivOp, ")"},
+ {jsCtxDivOp, "]"},
+ // At the start of a primary expression, array, or expression
+ // statement, expect a regexp.
+ {jsCtxRegexp, "("},
+ {jsCtxRegexp, "["},
+ {jsCtxRegexp, "{"},
+ // Assignment operators precede regexps as do all exclusively
+ // prefix and binary operators.
+ {jsCtxRegexp, "="},
+ {jsCtxRegexp, "+="},
+ {jsCtxRegexp, "*="},
+ {jsCtxRegexp, "*"},
+ {jsCtxRegexp, "!"},
+ // Whether the + or - is infix or prefix, it cannot precede a
+ // div op.
+ {jsCtxRegexp, "+"},
+ {jsCtxRegexp, "-"},
+ // An incr/decr op precedes a div operator.
+ // This is not airtight. In (g = ++/h/i) a regexp follows a
+ // pre-increment operator, but in practice devs do not try to
+ // increment or decrement regular expressions.
+ // (g++/h/i) where ++ is a postfix operator on g is much more
+ // common.
+ {jsCtxDivOp, "--"},
+ {jsCtxDivOp, "++"},
+ {jsCtxDivOp, "x--"},
+ // When we have many dashes or pluses, then they are grouped
+ // left to right.
+ {jsCtxRegexp, "x---"}, // A postfix -- then a -.
+ // return followed by a slash returns the regexp literal or the
+ // slash starts a regexp literal in an expression statement that
+ // is dead code.
+ {jsCtxRegexp, "return"},
+ {jsCtxRegexp, "return "},
+ {jsCtxRegexp, "return\t"},
+ {jsCtxRegexp, "return\n"},
+ {jsCtxRegexp, "return\u2028"},
+ // Identifiers can be divided and cannot validly be preceded by
+ // a regular expressions. Semicolon insertion cannot happen
+ // between an identifier and a regular expression on a new line
+ // because the one token lookahead for semicolon insertion has
+ // to conclude that it could be a div binary op and treat it as
+ // such.
+ {jsCtxDivOp, "x"},
+ {jsCtxDivOp, "x "},
+ {jsCtxDivOp, "x\t"},
+ {jsCtxDivOp, "x\n"},
+ {jsCtxDivOp, "x\u2028"},
+ {jsCtxDivOp, "preturn"},
+ // Numbers precede div ops.
+ {jsCtxDivOp, "0"},
+ // Dots that are part of a number are div preceders.
+ {jsCtxDivOp, "0."},
+ }
+
+ for _, test := range tests {
+ if nextJSCtx([]byte(test.s), jsCtxRegexp) != test.jsCtx {
+ t.Errorf("want %s got %q", test.jsCtx, test.s)
+ }
+ if nextJSCtx([]byte(test.s), jsCtxDivOp) != test.jsCtx {
+ t.Errorf("want %s got %q", test.jsCtx, test.s)
+ }
+ }
+
+ if nextJSCtx([]byte(" "), jsCtxRegexp) != jsCtxRegexp {
+ t.Error("Blank tokens")
+ }
+
+ if nextJSCtx([]byte(" "), jsCtxDivOp) != jsCtxDivOp {
+ t.Error("Blank tokens")
+ }
+}
+
+func TestJSValEscaper(t *testing.T) {
+ tests := []struct {
+ x any
+ js string
+ }{
+ {int(42), " 42 "},
+ {uint(42), " 42 "},
+ {int16(42), " 42 "},
+ {uint16(42), " 42 "},
+ {int32(-42), " -42 "},
+ {uint32(42), " 42 "},
+ {int16(-42), " -42 "},
+ {uint16(42), " 42 "},
+ {int64(-42), " -42 "},
+ {uint64(42), " 42 "},
+ {uint64(1) << 53, " 9007199254740992 "},
+ // ulp(1 << 53) > 1 so this loses precision in JS
+ // but it is still a representable integer literal.
+ {uint64(1)<<53 + 1, " 9007199254740993 "},
+ {float32(1.0), " 1 "},
+ {float32(-1.0), " -1 "},
+ {float32(0.5), " 0.5 "},
+ {float32(-0.5), " -0.5 "},
+ {float32(1.0) / float32(256), " 0.00390625 "},
+ {float32(0), " 0 "},
+ {math.Copysign(0, -1), " -0 "},
+ {float64(1.0), " 1 "},
+ {float64(-1.0), " -1 "},
+ {float64(0.5), " 0.5 "},
+ {float64(-0.5), " -0.5 "},
+ {float64(0), " 0 "},
+ {math.Copysign(0, -1), " -0 "},
+ {"", `""`},
+ {"foo", `"foo"`},
+ // Newlines.
+ {"\r\n\u2028\u2029", `"\r\n\u2028\u2029"`},
+ // "\v" == "v" on IE 6 so use "\u000b" instead.
+ {"\t\x0b", `"\t\u000b"`},
+ {struct{ X, Y int }{1, 2}, `{"X":1,"Y":2}`},
+ {[]any{}, "[]"},
+ {[]any{42, "foo", nil}, `[42,"foo",null]`},
+ {[]string{"<!--", "</script>", "-->"}, `["\u003c!--","\u003c/script\u003e","--\u003e"]`},
+ {"<!--", `"\u003c!--"`},
+ {"-->", `"--\u003e"`},
+ {"<![CDATA[", `"\u003c![CDATA["`},
+ {"]]>", `"]]\u003e"`},
+ {"</script", `"\u003c/script"`},
+ {"\U0001D11E", "\"\U0001D11E\""}, // or "\uD834\uDD1E"
+ {nil, " null "},
+ }
+
+ for _, test := range tests {
+ if js := jsValEscaper(test.x); js != test.js {
+ t.Errorf("%+v: want\n\t%q\ngot\n\t%q", test.x, test.js, js)
+ }
+ // Make sure that escaping corner cases are not broken
+ // by nesting.
+ a := []any{test.x}
+ want := "[" + strings.TrimSpace(test.js) + "]"
+ if js := jsValEscaper(a); js != want {
+ t.Errorf("%+v: want\n\t%q\ngot\n\t%q", a, want, js)
+ }
+ }
+}
+
+func TestJSStrEscaper(t *testing.T) {
+ tests := []struct {
+ x any
+ esc string
+ }{
+ {"", ``},
+ {"foo", `foo`},
+ {"\u0000", `\u0000`},
+ {"\t", `\t`},
+ {"\n", `\n`},
+ {"\r", `\r`},
+ {"\u2028", `\u2028`},
+ {"\u2029", `\u2029`},
+ {"\\", `\\`},
+ {"\\n", `\\n`},
+ {"foo\r\nbar", `foo\r\nbar`},
+ // Preserve attribute boundaries.
+ {`"`, `\u0022`},
+ {`'`, `\u0027`},
+ // Allow embedding in HTML without further escaping.
+ {`&amp;`, `\u0026amp;`},
+ // Prevent breaking out of text node and element boundaries.
+ {"</script>", `\u003c\/script\u003e`},
+ {"<![CDATA[", `\u003c![CDATA[`},
+ {"]]>", `]]\u003e`},
+ // https://dev.w3.org/html5/markup/aria/syntax.html#escaping-text-span
+ // "The text in style, script, title, and textarea elements
+ // must not have an escaping text span start that is not
+ // followed by an escaping text span end."
+ // Furthermore, spoofing an escaping text span end could lead
+ // to different interpretation of a </script> sequence otherwise
+ // masked by the escaping text span, and spoofing a start could
+ // allow regular text content to be interpreted as script
+ // allowing script execution via a combination of a JS string
+ // injection followed by an HTML text injection.
+ {"<!--", `\u003c!--`},
+ {"-->", `--\u003e`},
+ // From https://code.google.com/p/doctype/wiki/ArticleUtf7
+ {"+ADw-script+AD4-alert(1)+ADw-/script+AD4-",
+ `\u002bADw-script\u002bAD4-alert(1)\u002bADw-\/script\u002bAD4-`,
+ },
+ // Invalid UTF-8 sequence
+ {"foo\xA0bar", "foo\xA0bar"},
+ // Invalid unicode scalar value.
+ {"foo\xed\xa0\x80bar", "foo\xed\xa0\x80bar"},
+ }
+
+ for _, test := range tests {
+ esc := jsStrEscaper(test.x)
+ if esc != test.esc {
+ t.Errorf("%q: want %q got %q", test.x, test.esc, esc)
+ }
+ }
+}
+
+func TestJSRegexpEscaper(t *testing.T) {
+ tests := []struct {
+ x any
+ esc string
+ }{
+ {"", `(?:)`},
+ {"foo", `foo`},
+ {"\u0000", `\u0000`},
+ {"\t", `\t`},
+ {"\n", `\n`},
+ {"\r", `\r`},
+ {"\u2028", `\u2028`},
+ {"\u2029", `\u2029`},
+ {"\\", `\\`},
+ {"\\n", `\\n`},
+ {"foo\r\nbar", `foo\r\nbar`},
+ // Preserve attribute boundaries.
+ {`"`, `\u0022`},
+ {`'`, `\u0027`},
+ // Allow embedding in HTML without further escaping.
+ {`&amp;`, `\u0026amp;`},
+ // Prevent breaking out of text node and element boundaries.
+ {"</script>", `\u003c\/script\u003e`},
+ {"<![CDATA[", `\u003c!\[CDATA\[`},
+ {"]]>", `\]\]\u003e`},
+ // Escaping text spans.
+ {"<!--", `\u003c!\-\-`},
+ {"-->", `\-\-\u003e`},
+ {"*", `\*`},
+ {"+", `\u002b`},
+ {"?", `\?`},
+ {"[](){}", `\[\]\(\)\{\}`},
+ {"$foo|x.y", `\$foo\|x\.y`},
+ {"x^y", `x\^y`},
+ }
+
+ for _, test := range tests {
+ esc := jsRegexpEscaper(test.x)
+ if esc != test.esc {
+ t.Errorf("%q: want %q got %q", test.x, test.esc, esc)
+ }
+ }
+}
+
+func TestEscapersOnLower7AndSelectHighCodepoints(t *testing.T) {
+ input := ("\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !"#$%&'()*+,-./` +
+ `0123456789:;<=>?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\x7f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\U0001D11E")
+
+ tests := []struct {
+ name string
+ escaper func(...any) string
+ escaped string
+ }{
+ {
+ "jsStrEscaper",
+ jsStrEscaper,
+ `\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007` +
+ `\u0008\t\n\u000b\f\r\u000e\u000f` +
+ `\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017` +
+ `\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f` +
+ ` !\u0022#$%\u0026\u0027()*\u002b,-.\/` +
+ `0123456789:;\u003c=\u003e?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\u007f" +
+ "\u00A0\u0100\\u2028\\u2029\ufeff\U0001D11E",
+ },
+ {
+ "jsRegexpEscaper",
+ jsRegexpEscaper,
+ `\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007` +
+ `\u0008\t\n\u000b\f\r\u000e\u000f` +
+ `\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017` +
+ `\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f` +
+ ` !\u0022#\$%\u0026\u0027\(\)\*\u002b,\-\.\/` +
+ `0123456789:;\u003c=\u003e\?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ\[\\\]\^_` +
+ "`abcdefghijklmno" +
+ `pqrstuvwxyz\{\|\}~` + "\u007f" +
+ "\u00A0\u0100\\u2028\\u2029\ufeff\U0001D11E",
+ },
+ }
+
+ for _, test := range tests {
+ if s := test.escaper(input); s != test.escaped {
+ t.Errorf("%s once: want\n\t%q\ngot\n\t%q", test.name, test.escaped, s)
+ continue
+ }
+
+ // Escape it rune by rune to make sure that any
+ // fast-path checking does not break escaping.
+ var buf bytes.Buffer
+ for _, c := range input {
+ buf.WriteString(test.escaper(string(c)))
+ }
+
+ if s := buf.String(); s != test.escaped {
+ t.Errorf("%s rune-wise: want\n\t%q\ngot\n\t%q", test.name, test.escaped, s)
+ continue
+ }
+ }
+}
+
+func TestIsJsMimeType(t *testing.T) {
+ tests := []struct {
+ in string
+ out bool
+ }{
+ {"application/javascript;version=1.8", true},
+ {"application/javascript;version=1.8;foo=bar", true},
+ {"application/javascript/version=1.8", false},
+ {"text/javascript", true},
+ {"application/json", true},
+ {"application/ld+json", true},
+ {"module", true},
+ }
+
+ for _, test := range tests {
+ if isJSType(test.in) != test.out {
+ t.Errorf("isJSType(%q) = %v, want %v", test.in, !test.out, test.out)
+ }
+ }
+}
+
+func BenchmarkJSValEscaperWithNum(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsValEscaper(3.141592654)
+ }
+}
+
+func BenchmarkJSValEscaperWithStr(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsValEscaper("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
+
+func BenchmarkJSValEscaperWithStrNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsValEscaper("The quick, brown fox jumps over the lazy dog")
+ }
+}
+
+func BenchmarkJSValEscaperWithObj(b *testing.B) {
+ o := struct {
+ S string
+ N int
+ }{
+ "The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>\u2028",
+ 42,
+ }
+ for i := 0; i < b.N; i++ {
+ jsValEscaper(o)
+ }
+}
+
+func BenchmarkJSValEscaperWithObjNoSpecials(b *testing.B) {
+ o := struct {
+ S string
+ N int
+ }{
+ "The quick, brown fox jumps over the lazy dog",
+ 42,
+ }
+ for i := 0; i < b.N; i++ {
+ jsValEscaper(o)
+ }
+}
+
+func BenchmarkJSStrEscaperNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsStrEscaper("The quick, brown fox jumps over the lazy dog.")
+ }
+}
+
+func BenchmarkJSStrEscaper(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsStrEscaper("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
+
+func BenchmarkJSRegexpEscaperNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsRegexpEscaper("The quick, brown fox jumps over the lazy dog")
+ }
+}
+
+func BenchmarkJSRegexpEscaper(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ jsRegexpEscaper("The <i>quick</i>,\r\n<span style='color:brown'>brown</span> fox jumps\u2028over the <canine class=\"lazy\">dog</canine>")
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/jsctx_string.go b/tpl/internal/go_templates/htmltemplate/jsctx_string.go
new file mode 100644
index 000000000..dd1d87ee4
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/jsctx_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type jsCtx"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _jsCtx_name = "jsCtxRegexpjsCtxDivOpjsCtxUnknown"
+
+var _jsCtx_index = [...]uint8{0, 11, 21, 33}
+
+func (i jsCtx) String() string {
+ if i >= jsCtx(len(_jsCtx_index)-1) {
+ return "jsCtx(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _jsCtx_name[_jsCtx_index[i]:_jsCtx_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/htmltemplate/multi_test.go b/tpl/internal/go_templates/htmltemplate/multi_test.go
new file mode 100644
index 000000000..14cd7c766
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/multi_test.go
@@ -0,0 +1,293 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Tests for multiple-template execution, copied from text/template.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "archive/zip"
+ "bytes"
+ "os"
+ "testing"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+var multiExecTests = []execTest{
+ {"empty", "", "", nil, true},
+ {"text", "some text", "some text", nil, true},
+ {"invoke x", `{{template "x" .SI}}`, "TEXT", tVal, true},
+ {"invoke x no args", `{{template "x"}}`, "TEXT", tVal, true},
+ {"invoke dot int", `{{template "dot" .I}}`, "17", tVal, true},
+ {"invoke dot []int", `{{template "dot" .SI}}`, "[3 4 5]", tVal, true},
+ {"invoke dotV", `{{template "dotV" .U}}`, "v", tVal, true},
+ {"invoke nested int", `{{template "nested" .I}}`, "17", tVal, true},
+ {"variable declared by template", `{{template "nested" $x:=.SI}},{{index $x 1}}`, "[3 4 5],4", tVal, true},
+
+ // User-defined function: test argument evaluator.
+ {"testFunc literal", `{{oneArg "joe"}}`, "oneArg=joe", tVal, true},
+ {"testFunc .", `{{oneArg .}}`, "oneArg=joe", "joe", true},
+}
+
+// These strings are also in testdata/*.
+const multiText1 = `
+ {{define "x"}}TEXT{{end}}
+ {{define "dotV"}}{{.V}}{{end}}
+`
+
+const multiText2 = `
+ {{define "dot"}}{{.}}{{end}}
+ {{define "nested"}}{{template "dot" .}}{{end}}
+`
+
+func TestMultiExecute(t *testing.T) {
+ // Declare a couple of templates first.
+ template, err := New("root").Parse(multiText1)
+ if err != nil {
+ t.Fatalf("parse error for 1: %s", err)
+ }
+ _, err = template.Parse(multiText2)
+ if err != nil {
+ t.Fatalf("parse error for 2: %s", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseFiles(t *testing.T) {
+ _, err := ParseFiles("DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ template := New("root")
+ _, err = template.ParseFiles("testdata/file1.tmpl", "testdata/file2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseGlob(t *testing.T) {
+ _, err := ParseGlob("DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ _, err = New("error").ParseGlob("[x")
+ if err == nil {
+ t.Error("expected error for bad pattern; got none")
+ }
+ template := New("root")
+ _, err = template.ParseGlob("testdata/file*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseFS(t *testing.T) {
+ fs := os.DirFS("testdata")
+
+ {
+ _, err := ParseFS(fs, "DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ }
+
+ {
+ template := New("root")
+ _, err := template.ParseFS(fs, "file1.tmpl", "file2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+ }
+
+ {
+ template := New("root")
+ _, err := template.ParseFS(fs, "file*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+ }
+}
+
+// In these tests, actual content (not just template definitions) comes from the parsed files.
+
+var templateFileExecTests = []execTest{
+ {"test", `{{template "tmpl1.tmpl"}}{{template "tmpl2.tmpl"}}`, "template1\n\ny\ntemplate2\n\nx\n", 0, true},
+}
+
+func TestParseFilesWithData(t *testing.T) {
+ template, err := New("root").ParseFiles("testdata/tmpl1.tmpl", "testdata/tmpl2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(templateFileExecTests, template, t)
+}
+
+func TestParseGlobWithData(t *testing.T) {
+ template, err := New("root").ParseGlob("testdata/tmpl*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(templateFileExecTests, template, t)
+}
+
+func TestParseZipFS(t *testing.T) {
+ z, err := zip.OpenReader("testdata/fs.zip")
+ if err != nil {
+ t.Fatalf("error parsing zip: %v", err)
+ }
+ template, err := New("root").ParseFS(z, "tmpl*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(templateFileExecTests, template, t)
+}
+
+const (
+ cloneText1 = `{{define "a"}}{{template "b"}}{{template "c"}}{{end}}`
+ cloneText2 = `{{define "b"}}b{{end}}`
+ cloneText3 = `{{define "c"}}root{{end}}`
+ cloneText4 = `{{define "c"}}clone{{end}}`
+)
+
+// Issue 7032
+func TestAddParseTreeToUnparsedTemplate(t *testing.T) {
+ master := "{{define \"master\"}}{{end}}"
+ tmpl := New("master")
+ tree, err := parse.Parse("master", master, "", "", nil)
+ if err != nil {
+ t.Fatalf("unexpected parse err: %v", err)
+ }
+ masterTree := tree["master"]
+ tmpl.AddParseTree("master", masterTree) // used to panic
+}
+
+func TestRedefinition(t *testing.T) {
+ var tmpl *Template
+ var err error
+ if tmpl, err = New("tmpl1").Parse(`{{define "test"}}foo{{end}}`); err != nil {
+ t.Fatalf("parse 1: %v", err)
+ }
+ if _, err = tmpl.Parse(`{{define "test"}}bar{{end}}`); err != nil {
+ t.Fatalf("got error %v, expected nil", err)
+ }
+ if _, err = tmpl.New("tmpl2").Parse(`{{define "test"}}bar{{end}}`); err != nil {
+ t.Fatalf("got error %v, expected nil", err)
+ }
+}
+
+// Issue 10879
+func TestEmptyTemplateCloneCrash(t *testing.T) {
+ t1 := New("base")
+ t1.Clone() // used to panic
+}
+
+// Issue 10910, 10926
+func TestTemplateLookUp(t *testing.T) {
+ t.Skip("broken on html/template") // TODO
+ t1 := New("foo")
+ if t1.Lookup("foo") != nil {
+ t.Error("Lookup returned non-nil value for undefined template foo")
+ }
+ t1.New("bar")
+ if t1.Lookup("bar") != nil {
+ t.Error("Lookup returned non-nil value for undefined template bar")
+ }
+ t1.Parse(`{{define "foo"}}test{{end}}`)
+ if t1.Lookup("foo") == nil {
+ t.Error("Lookup returned nil value for defined template")
+ }
+}
+
+func TestParse(t *testing.T) {
+ // In multiple calls to Parse with the same receiver template, only one call
+ // can contain text other than space, comments, and template definitions
+ t1 := New("test")
+ if _, err := t1.Parse(`{{define "test"}}{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+ if _, err := t1.Parse(`{{define "test"}}{{/* this is a comment */}}{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+ if _, err := t1.Parse(`{{define "test"}}foo{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+}
+
+func TestEmptyTemplate(t *testing.T) {
+ cases := []struct {
+ defn []string
+ in string
+ want string
+ }{
+ {[]string{"x", "y"}, "", "y"},
+ {[]string{""}, "once", ""},
+ {[]string{"", ""}, "twice", ""},
+ {[]string{"{{.}}", "{{.}}"}, "twice", "twice"},
+ {[]string{"{{/* a comment */}}", "{{/* a comment */}}"}, "comment", ""},
+ {[]string{"{{.}}", ""}, "twice", "twice"}, // TODO: should want "" not "twice"
+ }
+
+ for i, c := range cases {
+ root := New("root")
+
+ var (
+ m *Template
+ err error
+ )
+ for _, d := range c.defn {
+ m, err = root.New(c.in).Parse(d)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+ buf := &bytes.Buffer{}
+ if err := m.Execute(buf, c.in); err != nil {
+ t.Error(i, err)
+ continue
+ }
+ if buf.String() != c.want {
+ t.Errorf("expected string %q: got %q", c.want, buf.String())
+ }
+ }
+}
+
+// Issue 19249 was a regression in 1.8 caused by the handling of empty
+// templates added in that release, which got different answers depending
+// on the order templates appeared in the internal map.
+func TestIssue19294(t *testing.T) {
+ // The empty block in "xhtml" should be replaced during execution
+ // by the contents of "stylesheet", but if the internal map associating
+ // names with templates is built in the wrong order, the empty block
+ // looks non-empty and this doesn't happen.
+ var inlined = map[string]string{
+ "stylesheet": `{{define "stylesheet"}}stylesheet{{end}}`,
+ "xhtml": `{{block "stylesheet" .}}{{end}}`,
+ }
+ all := []string{"stylesheet", "xhtml"}
+ for i := 0; i < 100; i++ {
+ res, err := New("title.xhtml").Parse(`{{template "xhtml" .}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, name := range all {
+ _, err := res.New(name).Parse(inlined[name])
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+ var buf bytes.Buffer
+ res.Execute(&buf, 0)
+ if buf.String() != "stylesheet" {
+ t.Fatalf("iteration %d: got %q; expected %q", i, buf.String(), "stylesheet")
+ }
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/state_string.go b/tpl/internal/go_templates/htmltemplate/state_string.go
new file mode 100644
index 000000000..05104be89
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/state_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type state"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _state_name = "stateTextstateTagstateAttrNamestateAfterNamestateBeforeValuestateHTMLCmtstateRCDATAstateAttrstateURLstateSrcsetstateJSstateJSDqStrstateJSSqStrstateJSRegexpstateJSBlockCmtstateJSLineCmtstateCSSstateCSSDqStrstateCSSSqStrstateCSSDqURLstateCSSSqURLstateCSSURLstateCSSBlockCmtstateCSSLineCmtstateError"
+
+var _state_index = [...]uint16{0, 9, 17, 30, 44, 60, 72, 83, 92, 100, 111, 118, 130, 142, 155, 170, 184, 192, 205, 218, 231, 244, 255, 271, 286, 296}
+
+func (i state) String() string {
+ if i >= state(len(_state_index)-1) {
+ return "state(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _state_name[_state_index[i]:_state_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/htmltemplate/template.go b/tpl/internal/go_templates/htmltemplate/template.go
new file mode 100644
index 000000000..b4ccaa648
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/template.go
@@ -0,0 +1,537 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path"
+ "path/filepath"
+ "sync"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+// Template is a specialized Template from "text/template" that produces a safe
+// HTML document fragment.
+type Template struct {
+ // Sticky error if escaping fails, or escapeOK if succeeded.
+ escapeErr error
+ // We could embed the text/template field, but it's safer not to because
+ // we need to keep our version of the name space and the underlying
+ // template's in sync.
+ text *template.Template
+ // The underlying template's parse tree, updated to be HTML-safe.
+ Tree *parse.Tree
+ *nameSpace // common to all associated templates
+}
+
+// escapeOK is a sentinel value used to indicate valid escaping.
+var escapeOK = fmt.Errorf("template escaped correctly")
+
+// nameSpace is the data structure shared by all templates in an association.
+type nameSpace struct {
+ mu sync.Mutex
+ set map[string]*Template
+ escaped bool
+ esc escaper
+}
+
+// Templates returns a slice of the templates associated with t, including t
+// itself.
+func (t *Template) Templates() []*Template {
+ ns := t.nameSpace
+ ns.mu.Lock()
+ defer ns.mu.Unlock()
+ // Return a slice so we don't expose the map.
+ m := make([]*Template, 0, len(ns.set))
+ for _, v := range ns.set {
+ m = append(m, v)
+ }
+ return m
+}
+
+// Option sets options for the template. Options are described by
+// strings, either a simple string or "key=value". There can be at
+// most one equals sign in an option string. If the option string
+// is unrecognized or otherwise invalid, Option panics.
+//
+// Known options:
+//
+// missingkey: Control the behavior during execution if a map is
+// indexed with a key that is not present in the map.
+// "missingkey=default" or "missingkey=invalid"
+// The default behavior: Do nothing and continue execution.
+// If printed, the result of the index operation is the string
+// "<no value>".
+// "missingkey=zero"
+// The operation returns the zero value for the map type's element.
+// "missingkey=error"
+// Execution stops immediately with an error.
+//
+func (t *Template) Option(opt ...string) *Template {
+ t.text.Option(opt...)
+ return t
+}
+
+// checkCanParse checks whether it is OK to parse templates.
+// If not, it returns an error.
+func (t *Template) checkCanParse() error {
+ if t == nil {
+ return nil
+ }
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ if t.nameSpace.escaped {
+ return fmt.Errorf("html/template: cannot Parse after Execute")
+ }
+ return nil
+}
+
+// escape escapes all associated templates.
+func (t *Template) escape() error {
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ t.nameSpace.escaped = true
+ if t.escapeErr == nil {
+ if t.Tree == nil {
+ return fmt.Errorf("template: %q is an incomplete or empty template", t.Name())
+ }
+ if err := escapeTemplate(t, t.text.Root, t.Name()); err != nil {
+ return err
+ }
+ } else if t.escapeErr != escapeOK {
+ return t.escapeErr
+ }
+ return nil
+}
+
+// Execute applies a parsed template to the specified data object,
+// writing the output to wr.
+// If an error occurs executing the template or writing its output,
+// execution stops, but partial results may already have been written to
+// the output writer.
+// A template may be executed safely in parallel, although if parallel
+// executions share a Writer the output may be interleaved.
+func (t *Template) Execute(wr io.Writer, data any) error {
+ if err := t.escape(); err != nil {
+ return err
+ }
+ return t.text.Execute(wr, data)
+}
+
+// ExecuteTemplate applies the template associated with t that has the given
+// name to the specified data object and writes the output to wr.
+// If an error occurs executing the template or writing its output,
+// execution stops, but partial results may already have been written to
+// the output writer.
+// A template may be executed safely in parallel, although if parallel
+// executions share a Writer the output may be interleaved.
+func (t *Template) ExecuteTemplate(wr io.Writer, name string, data any) error {
+ tmpl, err := t.lookupAndEscapeTemplate(name)
+ if err != nil {
+ return err
+ }
+ return tmpl.text.Execute(wr, data)
+}
+
+// lookupAndEscapeTemplate guarantees that the template with the given name
+// is escaped, or returns an error if it cannot be. It returns the named
+// template.
+func (t *Template) lookupAndEscapeTemplate(name string) (tmpl *Template, err error) {
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ t.nameSpace.escaped = true
+ tmpl = t.set[name]
+ if tmpl == nil {
+ return nil, fmt.Errorf("html/template: %q is undefined", name)
+ }
+ if tmpl.escapeErr != nil && tmpl.escapeErr != escapeOK {
+ return nil, tmpl.escapeErr
+ }
+ if tmpl.text.Tree == nil || tmpl.text.Root == nil {
+ return nil, fmt.Errorf("html/template: %q is an incomplete template", name)
+ }
+ if t.text.Lookup(name) == nil {
+ panic("html/template internal error: template escaping out of sync")
+ }
+ if tmpl.escapeErr == nil {
+ err = escapeTemplate(tmpl, tmpl.text.Root, name)
+ }
+ return tmpl, err
+}
+
+// DefinedTemplates returns a string listing the defined templates,
+// prefixed by the string "; defined templates are: ". If there are none,
+// it returns the empty string. Used to generate an error message.
+func (t *Template) DefinedTemplates() string {
+ return t.text.DefinedTemplates()
+}
+
+// Parse parses text as a template body for t.
+// Named template definitions ({{define ...}} or {{block ...}} statements) in text
+// define additional templates associated with t and are removed from the
+// definition of t itself.
+//
+// Templates can be redefined in successive calls to Parse,
+// before the first use of Execute on t or any associated template.
+// A template definition with a body containing only white space and comments
+// is considered empty and will not replace an existing template's body.
+// This allows using Parse to add new named template definitions without
+// overwriting the main template body.
+func (t *Template) Parse(text string) (*Template, error) {
+ if err := t.checkCanParse(); err != nil {
+ return nil, err
+ }
+
+ ret, err := t.text.Parse(text)
+ if err != nil {
+ return nil, err
+ }
+
+ // In general, all the named templates might have changed underfoot.
+ // Regardless, some new ones may have been defined.
+ // The template.Template set has been updated; update ours.
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ for _, v := range ret.Templates() {
+ name := v.Name()
+ tmpl := t.set[name]
+ if tmpl == nil {
+ tmpl = t.new(name)
+ }
+ tmpl.text = v
+ tmpl.Tree = v.Tree
+ }
+ return t, nil
+}
+
+// AddParseTree creates a new template with the name and parse tree
+// and associates it with t.
+//
+// It returns an error if t or any associated template has already been executed.
+func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
+ if err := t.checkCanParse(); err != nil {
+ return nil, err
+ }
+
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ text, err := t.text.AddParseTree(name, tree)
+ if err != nil {
+ return nil, err
+ }
+ ret := &Template{
+ nil,
+ text,
+ text.Tree,
+ t.nameSpace,
+ }
+ t.set[name] = ret
+ return ret, nil
+}
+
+// Clone returns a duplicate of the template, including all associated
+// templates. The actual representation is not copied, but the name space of
+// associated templates is, so further calls to Parse in the copy will add
+// templates to the copy but not to the original. Clone can be used to prepare
+// common templates and use them with variant definitions for other templates
+// by adding the variants after the clone is made.
+//
+// It returns an error if t has already been executed.
+func (t *Template) Clone() (*Template, error) {
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ if t.escapeErr != nil {
+ return nil, fmt.Errorf("html/template: cannot Clone %q after it has executed", t.Name())
+ }
+ textClone, err := t.text.Clone()
+ if err != nil {
+ return nil, err
+ }
+ ns := &nameSpace{set: make(map[string]*Template)}
+ ns.esc = makeEscaper(ns)
+ ret := &Template{
+ nil,
+ textClone,
+ textClone.Tree,
+ ns,
+ }
+ ret.set[ret.Name()] = ret
+ for _, x := range textClone.Templates() {
+ name := x.Name()
+ src := t.set[name]
+ if src == nil || src.escapeErr != nil {
+ return nil, fmt.Errorf("html/template: cannot Clone %q after it has executed", t.Name())
+ }
+ x.Tree = x.Tree.Copy()
+ ret.set[name] = &Template{
+ nil,
+ x,
+ x.Tree,
+ ret.nameSpace,
+ }
+ }
+ // Return the template associated with the name of this template.
+ return ret.set[ret.Name()], nil
+}
+
+// New allocates a new HTML template with the given name.
+func New(name string) *Template {
+ ns := &nameSpace{set: make(map[string]*Template)}
+ ns.esc = makeEscaper(ns)
+ tmpl := &Template{
+ nil,
+ template.New(name),
+ nil,
+ ns,
+ }
+ tmpl.set[name] = tmpl
+ return tmpl
+}
+
+// New allocates a new HTML template associated with the given one
+// and with the same delimiters. The association, which is transitive,
+// allows one template to invoke another with a {{template}} action.
+//
+// If a template with the given name already exists, the new HTML template
+// will replace it. The existing template will be reset and disassociated with
+// t.
+func (t *Template) New(name string) *Template {
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ return t.new(name)
+}
+
+// new is the implementation of New, without the lock.
+func (t *Template) new(name string) *Template {
+ tmpl := &Template{
+ nil,
+ t.text.New(name),
+ nil,
+ t.nameSpace,
+ }
+ if existing, ok := tmpl.set[name]; ok {
+ emptyTmpl := New(existing.Name())
+ *existing = *emptyTmpl
+ }
+ tmpl.set[name] = tmpl
+ return tmpl
+}
+
+// Name returns the name of the template.
+func (t *Template) Name() string {
+ return t.text.Name()
+}
+
+// FuncMap is the type of the map defining the mapping from names to
+// functions. Each function must have either a single return value, or two
+// return values of which the second has type error. In that case, if the
+// second (error) argument evaluates to non-nil during execution, execution
+// terminates and Execute returns that error. FuncMap has the same base type
+// as FuncMap in "text/template", copied here so clients need not import
+// "text/template".
+type FuncMap map[string]any
+
+// Funcs adds the elements of the argument map to the template's function map.
+// It must be called before the template is parsed.
+// It panics if a value in the map is not a function with appropriate return
+// type. However, it is legal to overwrite elements of the map. The return
+// value is the template, so calls can be chained.
+func (t *Template) Funcs(funcMap FuncMap) *Template {
+ t.text.Funcs(template.FuncMap(funcMap))
+ return t
+}
+
+// Delims sets the action delimiters to the specified strings, to be used in
+// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template
+// definitions will inherit the settings. An empty delimiter stands for the
+// corresponding default: {{ or }}.
+// The return value is the template, so calls can be chained.
+func (t *Template) Delims(left, right string) *Template {
+ t.text.Delims(left, right)
+ return t
+}
+
+// Lookup returns the template with the given name that is associated with t,
+// or nil if there is no such template.
+func (t *Template) Lookup(name string) *Template {
+ t.nameSpace.mu.Lock()
+ defer t.nameSpace.mu.Unlock()
+ return t.set[name]
+}
+
+// Must is a helper that wraps a call to a function returning (*Template, error)
+// and panics if the error is non-nil. It is intended for use in variable initializations
+// such as
+// var t = template.Must(template.New("name").Parse("html"))
+func Must(t *Template, err error) *Template {
+ if err != nil {
+ panic(err)
+ }
+ return t
+}
+
+// ParseFiles creates a new Template and parses the template definitions from
+// the named files. The returned template's name will have the (base) name and
+// (parsed) contents of the first file. There must be at least one file.
+// If an error occurs, parsing stops and the returned *Template is nil.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+// For instance, ParseFiles("a/foo", "b/foo") stores "b/foo" as the template
+// named "foo", while "a/foo" is unavailable.
+func ParseFiles(filenames ...string) (*Template, error) {
+ return parseFiles(nil, readFileOS, filenames...)
+}
+
+// ParseFiles parses the named files and associates the resulting templates with
+// t. If an error occurs, parsing stops and the returned template is nil;
+// otherwise it is t. There must be at least one file.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+//
+// ParseFiles returns an error if t or any associated template has already been executed.
+func (t *Template) ParseFiles(filenames ...string) (*Template, error) {
+ return parseFiles(t, readFileOS, filenames...)
+}
+
+// parseFiles is the helper for the method and function. If the argument
+// template is nil, it is created from the first file.
+func parseFiles(t *Template, readFile func(string) (string, []byte, error), filenames ...string) (*Template, error) {
+ if err := t.checkCanParse(); err != nil {
+ return nil, err
+ }
+
+ if len(filenames) == 0 {
+ // Not really a problem, but be consistent.
+ return nil, fmt.Errorf("html/template: no files named in call to ParseFiles")
+ }
+ for _, filename := range filenames {
+ name, b, err := readFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ s := string(b)
+ // First template becomes return value if not already defined,
+ // and we use that one for subsequent New calls to associate
+ // all the templates together. Also, if this file has the same name
+ // as t, this file becomes the contents of t, so
+ // t, err := New(name).Funcs(xxx).ParseFiles(name)
+ // works. Otherwise we create a new template associated with t.
+ var tmpl *Template
+ if t == nil {
+ t = New(name)
+ }
+ if name == t.Name() {
+ tmpl = t
+ } else {
+ tmpl = t.New(name)
+ }
+ _, err = tmpl.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return t, nil
+}
+
+// ParseGlob creates a new Template and parses the template definitions from
+// the files identified by the pattern. The files are matched according to the
+// semantics of filepath.Match, and the pattern must match at least one file.
+// The returned template will have the (base) name and (parsed) contents of the
+// first file matched by the pattern. ParseGlob is equivalent to calling
+// ParseFiles with the list of files matched by the pattern.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+func ParseGlob(pattern string) (*Template, error) {
+ return parseGlob(nil, pattern)
+}
+
+// ParseGlob parses the template definitions in the files identified by the
+// pattern and associates the resulting templates with t. The files are matched
+// according to the semantics of filepath.Match, and the pattern must match at
+// least one file. ParseGlob is equivalent to calling t.ParseFiles with the
+// list of files matched by the pattern.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+//
+// ParseGlob returns an error if t or any associated template has already been executed.
+func (t *Template) ParseGlob(pattern string) (*Template, error) {
+ return parseGlob(t, pattern)
+}
+
+// parseGlob is the implementation of the function and method ParseGlob.
+func parseGlob(t *Template, pattern string) (*Template, error) {
+ if err := t.checkCanParse(); err != nil {
+ return nil, err
+ }
+ filenames, err := filepath.Glob(pattern)
+ if err != nil {
+ return nil, err
+ }
+ if len(filenames) == 0 {
+ return nil, fmt.Errorf("html/template: pattern matches no files: %#q", pattern)
+ }
+ return parseFiles(t, readFileOS, filenames...)
+}
+
+// IsTrue reports whether the value is 'true', in the sense of not the zero of its type,
+// and whether the value has a meaningful truth value. This is the definition of
+// truth used by if and other such actions.
+func IsTrue(val any) (truth, ok bool) {
+ return template.IsTrue(val)
+}
+
+// ParseFS is like ParseFiles or ParseGlob but reads from the file system fs
+// instead of the host operating system's file system.
+// It accepts a list of glob patterns.
+// (Note that most file names serve as glob patterns matching only themselves.)
+func ParseFS(fs fs.FS, patterns ...string) (*Template, error) {
+ return parseFS(nil, fs, patterns)
+}
+
+// ParseFS is like ParseFiles or ParseGlob but reads from the file system fs
+// instead of the host operating system's file system.
+// It accepts a list of glob patterns.
+// (Note that most file names serve as glob patterns matching only themselves.)
+func (t *Template) ParseFS(fs fs.FS, patterns ...string) (*Template, error) {
+ return parseFS(t, fs, patterns)
+}
+
+func parseFS(t *Template, fsys fs.FS, patterns []string) (*Template, error) {
+ var filenames []string
+ for _, pattern := range patterns {
+ list, err := fs.Glob(fsys, pattern)
+ if err != nil {
+ return nil, err
+ }
+ if len(list) == 0 {
+ return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
+ }
+ filenames = append(filenames, list...)
+ }
+ return parseFiles(t, readFileFS(fsys), filenames...)
+}
+
+func readFileOS(file string) (name string, b []byte, err error) {
+ name = filepath.Base(file)
+ b, err = os.ReadFile(file)
+ return
+}
+
+func readFileFS(fsys fs.FS) func(string) (string, []byte, error) {
+ return func(file string) (name string, b []byte, err error) {
+ name = path.Base(file)
+ b, err = fs.ReadFile(fsys, file)
+ return
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/template_test.go b/tpl/internal/go_templates/htmltemplate/template_test.go
new file mode 100644
index 000000000..8a8f2f38c
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/template_test.go
@@ -0,0 +1,222 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "strings"
+ "testing"
+
+ . "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse" // https://golang.org/issue/12996
+)
+
+func TestTemplateClone(t *testing.T) {
+
+ orig := New("name")
+ clone, err := orig.Clone()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(clone.Templates()) != len(orig.Templates()) {
+ t.Fatalf("Invalid length of t.Clone().Templates()")
+ }
+
+ const want = "stuff"
+ parsed := Must(clone.Parse(want))
+ var buf bytes.Buffer
+ err = parsed.Execute(&buf, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if got := buf.String(); got != want {
+ t.Fatalf("got %q; want %q", got, want)
+ }
+}
+
+func TestRedefineNonEmptyAfterExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `foo`)
+ c.mustExecute(c.root, nil, "foo")
+ c.mustNotParse(c.root, `bar`)
+}
+
+func TestRedefineEmptyAfterExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, ``)
+ c.mustExecute(c.root, nil, "")
+ c.mustNotParse(c.root, `foo`)
+ c.mustExecute(c.root, nil, "")
+}
+
+func TestRedefineAfterNonExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `{{if .}}<{{template "X"}}>{{end}}{{define "X"}}foo{{end}}`)
+ c.mustExecute(c.root, 0, "")
+ c.mustNotParse(c.root, `{{define "X"}}bar{{end}}`)
+ c.mustExecute(c.root, 1, "&lt;foo>")
+}
+
+func TestRedefineAfterNamedExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `<{{template "X" .}}>{{define "X"}}foo{{end}}`)
+ c.mustExecute(c.root, nil, "&lt;foo>")
+ c.mustNotParse(c.root, `{{define "X"}}bar{{end}}`)
+ c.mustExecute(c.root, nil, "&lt;foo>")
+}
+
+func TestRedefineNestedByNameAfterExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `{{define "X"}}foo{{end}}`)
+ c.mustExecute(c.lookup("X"), nil, "foo")
+ c.mustNotParse(c.root, `{{define "X"}}bar{{end}}`)
+ c.mustExecute(c.lookup("X"), nil, "foo")
+}
+
+func TestRedefineNestedByTemplateAfterExecution(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `{{define "X"}}foo{{end}}`)
+ c.mustExecute(c.lookup("X"), nil, "foo")
+ c.mustNotParse(c.lookup("X"), `bar`)
+ c.mustExecute(c.lookup("X"), nil, "foo")
+}
+
+func TestRedefineSafety(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `<html><a href="{{template "X"}}">{{define "X"}}{{end}}`)
+ c.mustExecute(c.root, nil, `<html><a href="">`)
+ // Note: Every version of Go prior to Go 1.8 accepted the redefinition of "X"
+ // on the next line, but luckily kept it from being used in the outer template.
+ // Now we reject it, which makes clearer that we're not going to use it.
+ c.mustNotParse(c.root, `{{define "X"}}" bar="baz{{end}}`)
+ c.mustExecute(c.root, nil, `<html><a href="">`)
+}
+
+func TestRedefineTopUse(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `{{template "X"}}{{.}}{{define "X"}}{{end}}`)
+ c.mustExecute(c.root, 42, `42`)
+ c.mustNotParse(c.root, `{{define "X"}}<script>{{end}}`)
+ c.mustExecute(c.root, 42, `42`)
+}
+
+func TestRedefineOtherParsers(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, ``)
+ c.mustExecute(c.root, nil, ``)
+ if _, err := c.root.ParseFiles("no.template"); err == nil || !strings.Contains(err.Error(), "Execute") {
+ t.Errorf("ParseFiles: %v\nwanted error about already having Executed", err)
+ }
+ if _, err := c.root.ParseGlob("*.no.template"); err == nil || !strings.Contains(err.Error(), "Execute") {
+ t.Errorf("ParseGlob: %v\nwanted error about already having Executed", err)
+ }
+ if _, err := c.root.AddParseTree("t1", c.root.Tree); err == nil || !strings.Contains(err.Error(), "Execute") {
+ t.Errorf("AddParseTree: %v\nwanted error about already having Executed", err)
+ }
+}
+
+func TestNumbers(t *testing.T) {
+ c := newTestCase(t)
+ c.mustParse(c.root, `{{print 1_2.3_4}} {{print 0x0_1.e_0p+02}}`)
+ c.mustExecute(c.root, nil, "12.34 7.5")
+}
+
+func TestStringsInScriptsWithJsonContentTypeAreCorrectlyEscaped(t *testing.T) {
+ // See #33671 and #37634 for more context on this.
+ tests := []struct{ name, in string }{
+ {"empty", ""},
+ {"invalid", string(rune(-1))},
+ {"null", "\u0000"},
+ {"unit separator", "\u001F"},
+ {"tab", "\t"},
+ {"gt and lt", "<>"},
+ {"quotes", `'"`},
+ {"ASCII letters", "ASCII letters"},
+ {"Unicode", "ʕ⊙ϖ⊙ʔ"},
+ {"Pizza", "🍕"},
+ }
+ const (
+ prefix = `<script type="application/ld+json">`
+ suffix = `</script>`
+ templ = prefix + `"{{.}}"` + suffix
+ )
+ tpl := Must(New("JS string is JSON string").Parse(templ))
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ var buf bytes.Buffer
+ if err := tpl.Execute(&buf, tt.in); err != nil {
+ t.Fatalf("Cannot render template: %v", err)
+ }
+ trimmed := bytes.TrimSuffix(bytes.TrimPrefix(buf.Bytes(), []byte(prefix)), []byte(suffix))
+ var got string
+ if err := json.Unmarshal(trimmed, &got); err != nil {
+ t.Fatalf("Cannot parse JS string %q as JSON: %v", trimmed[1:len(trimmed)-1], err)
+ }
+ if got != tt.in {
+ t.Errorf("Serialization changed the string value: got %q want %q", got, tt.in)
+ }
+ })
+ }
+}
+
+func TestSkipEscapeComments(t *testing.T) {
+ c := newTestCase(t)
+ tr := parse.New("root")
+ tr.Mode = parse.ParseComments
+ newT, err := tr.Parse("{{/* A comment */}}{{ 1 }}{{/* Another comment */}}", "", "", make(map[string]*parse.Tree))
+ if err != nil {
+ t.Fatalf("Cannot parse template text: %v", err)
+ }
+ c.root, err = c.root.AddParseTree("root", newT)
+ if err != nil {
+ t.Fatalf("Cannot add parse tree to template: %v", err)
+ }
+ c.mustExecute(c.root, nil, "1")
+}
+
+type testCase struct {
+ t *testing.T
+ root *Template
+}
+
+func newTestCase(t *testing.T) *testCase {
+ return &testCase{
+ t: t,
+ root: New("root"),
+ }
+}
+
+func (c *testCase) lookup(name string) *Template {
+ return c.root.Lookup(name)
+}
+
+func (c *testCase) mustParse(t *Template, text string) {
+ _, err := t.Parse(text)
+ if err != nil {
+ c.t.Fatalf("parse: %v", err)
+ }
+}
+
+func (c *testCase) mustNotParse(t *Template, text string) {
+ _, err := t.Parse(text)
+ if err == nil {
+ c.t.Fatalf("parse: unexpected success")
+ }
+}
+
+func (c *testCase) mustExecute(t *Template, val any, want string) {
+ var buf bytes.Buffer
+ err := t.Execute(&buf, val)
+ if err != nil {
+ c.t.Fatalf("execute: %v", err)
+ }
+ if buf.String() != want {
+ c.t.Fatalf("template output:\n%s\nwant:\n%s", buf.String(), want)
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/testdata/file1.tmpl b/tpl/internal/go_templates/htmltemplate/testdata/file1.tmpl
new file mode 100644
index 000000000..febf9d9f8
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/testdata/file1.tmpl
@@ -0,0 +1,2 @@
+{{define "x"}}TEXT{{end}}
+{{define "dotV"}}{{.V}}{{end}}
diff --git a/tpl/internal/go_templates/htmltemplate/testdata/file2.tmpl b/tpl/internal/go_templates/htmltemplate/testdata/file2.tmpl
new file mode 100644
index 000000000..39bf6fb9e
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/testdata/file2.tmpl
@@ -0,0 +1,2 @@
+{{define "dot"}}{{.}}{{end}}
+{{define "nested"}}{{template "dot" .}}{{end}}
diff --git a/tpl/internal/go_templates/htmltemplate/testdata/fs.zip b/tpl/internal/go_templates/htmltemplate/testdata/fs.zip
new file mode 100644
index 000000000..8581313ae
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/testdata/fs.zip
Binary files differ
diff --git a/tpl/internal/go_templates/htmltemplate/testdata/tmpl1.tmpl b/tpl/internal/go_templates/htmltemplate/testdata/tmpl1.tmpl
new file mode 100644
index 000000000..b72b3a340
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/testdata/tmpl1.tmpl
@@ -0,0 +1,3 @@
+template1
+{{define "x"}}x{{end}}
+{{template "y"}}
diff --git a/tpl/internal/go_templates/htmltemplate/testdata/tmpl2.tmpl b/tpl/internal/go_templates/htmltemplate/testdata/tmpl2.tmpl
new file mode 100644
index 000000000..16beba6e7
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/testdata/tmpl2.tmpl
@@ -0,0 +1,3 @@
+template2
+{{define "y"}}y{{end}}
+{{template "x"}}
diff --git a/tpl/internal/go_templates/htmltemplate/transition.go b/tpl/internal/go_templates/htmltemplate/transition.go
new file mode 100644
index 000000000..06df67933
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/transition.go
@@ -0,0 +1,592 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "strings"
+)
+
+// transitionFunc is the array of context transition functions for text nodes.
+// A transition function takes a context and template text input, and returns
+// the updated context and the number of bytes consumed from the front of the
+// input.
+var transitionFunc = [...]func(context, []byte) (context, int){
+ stateText: tText,
+ stateTag: tTag,
+ stateAttrName: tAttrName,
+ stateAfterName: tAfterName,
+ stateBeforeValue: tBeforeValue,
+ stateHTMLCmt: tHTMLCmt,
+ stateRCDATA: tSpecialTagEnd,
+ stateAttr: tAttr,
+ stateURL: tURL,
+ stateSrcset: tURL,
+ stateJS: tJS,
+ stateJSDqStr: tJSDelimited,
+ stateJSSqStr: tJSDelimited,
+ stateJSRegexp: tJSDelimited,
+ stateJSBlockCmt: tBlockCmt,
+ stateJSLineCmt: tLineCmt,
+ stateCSS: tCSS,
+ stateCSSDqStr: tCSSStr,
+ stateCSSSqStr: tCSSStr,
+ stateCSSDqURL: tCSSStr,
+ stateCSSSqURL: tCSSStr,
+ stateCSSURL: tCSSStr,
+ stateCSSBlockCmt: tBlockCmt,
+ stateCSSLineCmt: tLineCmt,
+ stateError: tError,
+}
+
+var commentStart = []byte("<!--")
+var commentEnd = []byte("-->")
+
+// tText is the context transition function for the text state.
+func tText(c context, s []byte) (context, int) {
+ k := 0
+ for {
+ i := k + bytes.IndexByte(s[k:], '<')
+ if i < k || i+1 == len(s) {
+ return c, len(s)
+ } else if i+4 <= len(s) && bytes.Equal(commentStart, s[i:i+4]) {
+ return context{state: stateHTMLCmt}, i + 4
+ }
+ i++
+ end := false
+ if s[i] == '/' {
+ if i+1 == len(s) {
+ return c, len(s)
+ }
+ end, i = true, i+1
+ }
+ j, e := eatTagName(s, i)
+ if j != i {
+ if end {
+ e = elementNone
+ }
+ // We've found an HTML tag.
+ return context{state: stateTag, element: e}, j
+ }
+ k = j
+ }
+}
+
+var elementContentType = [...]state{
+ elementNone: stateText,
+ elementScript: stateJS,
+ elementStyle: stateCSS,
+ elementTextarea: stateRCDATA,
+ elementTitle: stateRCDATA,
+}
+
+// tTag is the context transition function for the tag state.
+func tTag(c context, s []byte) (context, int) {
+ // Find the attribute name.
+ i := eatWhiteSpace(s, 0)
+ if i == len(s) {
+ return c, len(s)
+ }
+ if s[i] == '>' {
+ return context{
+ state: elementContentType[c.element],
+ element: c.element,
+ }, i + 1
+ }
+ j, err := eatAttrName(s, i)
+ if err != nil {
+ return context{state: stateError, err: err}, len(s)
+ }
+ state, attr := stateTag, attrNone
+ if i == j {
+ return context{
+ state: stateError,
+ err: errorf(ErrBadHTML, nil, 0, "expected space, attr name, or end of tag, but got %q", s[i:]),
+ }, len(s)
+ }
+
+ attrName := strings.ToLower(string(s[i:j]))
+ if c.element == elementScript && attrName == "type" {
+ attr = attrScriptType
+ } else {
+ switch attrType(attrName) {
+ case contentTypeURL:
+ attr = attrURL
+ case contentTypeCSS:
+ attr = attrStyle
+ case contentTypeJS:
+ attr = attrScript
+ case contentTypeSrcset:
+ attr = attrSrcset
+ }
+ }
+
+ if j == len(s) {
+ state = stateAttrName
+ } else {
+ state = stateAfterName
+ }
+ return context{state: state, element: c.element, attr: attr}, j
+}
+
+// tAttrName is the context transition function for stateAttrName.
+func tAttrName(c context, s []byte) (context, int) {
+ i, err := eatAttrName(s, 0)
+ if err != nil {
+ return context{state: stateError, err: err}, len(s)
+ } else if i != len(s) {
+ c.state = stateAfterName
+ }
+ return c, i
+}
+
+// tAfterName is the context transition function for stateAfterName.
+func tAfterName(c context, s []byte) (context, int) {
+ // Look for the start of the value.
+ i := eatWhiteSpace(s, 0)
+ if i == len(s) {
+ return c, len(s)
+ } else if s[i] != '=' {
+ // Occurs due to tag ending '>', and valueless attribute.
+ c.state = stateTag
+ return c, i
+ }
+ c.state = stateBeforeValue
+ // Consume the "=".
+ return c, i + 1
+}
+
+var attrStartStates = [...]state{
+ attrNone: stateAttr,
+ attrScript: stateJS,
+ attrScriptType: stateAttr,
+ attrStyle: stateCSS,
+ attrURL: stateURL,
+ attrSrcset: stateSrcset,
+}
+
+// tBeforeValue is the context transition function for stateBeforeValue.
+func tBeforeValue(c context, s []byte) (context, int) {
+ i := eatWhiteSpace(s, 0)
+ if i == len(s) {
+ return c, len(s)
+ }
+ // Find the attribute delimiter.
+ delim := delimSpaceOrTagEnd
+ switch s[i] {
+ case '\'':
+ delim, i = delimSingleQuote, i+1
+ case '"':
+ delim, i = delimDoubleQuote, i+1
+ }
+ c.state, c.delim = attrStartStates[c.attr], delim
+ return c, i
+}
+
+// tHTMLCmt is the context transition function for stateHTMLCmt.
+func tHTMLCmt(c context, s []byte) (context, int) {
+ if i := bytes.Index(s, commentEnd); i != -1 {
+ return context{}, i + 3
+ }
+ return c, len(s)
+}
+
+// specialTagEndMarkers maps element types to the character sequence that
+// case-insensitively signals the end of the special tag body.
+var specialTagEndMarkers = [...][]byte{
+ elementScript: []byte("script"),
+ elementStyle: []byte("style"),
+ elementTextarea: []byte("textarea"),
+ elementTitle: []byte("title"),
+}
+
+var (
+ specialTagEndPrefix = []byte("</")
+ tagEndSeparators = []byte("> \t\n\f/")
+)
+
+// tSpecialTagEnd is the context transition function for raw text and RCDATA
+// element states.
+func tSpecialTagEnd(c context, s []byte) (context, int) {
+ if c.element != elementNone {
+ if i := indexTagEnd(s, specialTagEndMarkers[c.element]); i != -1 {
+ return context{}, i
+ }
+ }
+ return c, len(s)
+}
+
+// indexTagEnd finds the index of a special tag end in a case insensitive way, or returns -1
+func indexTagEnd(s []byte, tag []byte) int {
+ res := 0
+ plen := len(specialTagEndPrefix)
+ for len(s) > 0 {
+ // Try to find the tag end prefix first
+ i := bytes.Index(s, specialTagEndPrefix)
+ if i == -1 {
+ return i
+ }
+ s = s[i+plen:]
+ // Try to match the actual tag if there is still space for it
+ if len(tag) <= len(s) && bytes.EqualFold(tag, s[:len(tag)]) {
+ s = s[len(tag):]
+ // Check the tag is followed by a proper separator
+ if len(s) > 0 && bytes.IndexByte(tagEndSeparators, s[0]) != -1 {
+ return res + i
+ }
+ res += len(tag)
+ }
+ res += i + plen
+ }
+ return -1
+}
+
+// tAttr is the context transition function for the attribute state.
+func tAttr(c context, s []byte) (context, int) {
+ return c, len(s)
+}
+
+// tURL is the context transition function for the URL state.
+func tURL(c context, s []byte) (context, int) {
+ if bytes.ContainsAny(s, "#?") {
+ c.urlPart = urlPartQueryOrFrag
+ } else if len(s) != eatWhiteSpace(s, 0) && c.urlPart == urlPartNone {
+ // HTML5 uses "Valid URL potentially surrounded by spaces" for
+ // attrs: https://www.w3.org/TR/html5/index.html#attributes-1
+ c.urlPart = urlPartPreQuery
+ }
+ return c, len(s)
+}
+
+// tJS is the context transition function for the JS state.
+func tJS(c context, s []byte) (context, int) {
+ i := bytes.IndexAny(s, `"'/`)
+ if i == -1 {
+ // Entire input is non string, comment, regexp tokens.
+ c.jsCtx = nextJSCtx(s, c.jsCtx)
+ return c, len(s)
+ }
+ c.jsCtx = nextJSCtx(s[:i], c.jsCtx)
+ switch s[i] {
+ case '"':
+ c.state, c.jsCtx = stateJSDqStr, jsCtxRegexp
+ case '\'':
+ c.state, c.jsCtx = stateJSSqStr, jsCtxRegexp
+ case '/':
+ switch {
+ case i+1 < len(s) && s[i+1] == '/':
+ c.state, i = stateJSLineCmt, i+1
+ case i+1 < len(s) && s[i+1] == '*':
+ c.state, i = stateJSBlockCmt, i+1
+ case c.jsCtx == jsCtxRegexp:
+ c.state = stateJSRegexp
+ case c.jsCtx == jsCtxDivOp:
+ c.jsCtx = jsCtxRegexp
+ default:
+ return context{
+ state: stateError,
+ err: errorf(ErrSlashAmbig, nil, 0, "'/' could start a division or regexp: %.32q", s[i:]),
+ }, len(s)
+ }
+ default:
+ panic("unreachable")
+ }
+ return c, i + 1
+}
+
+// tJSDelimited is the context transition function for the JS string and regexp
+// states.
+func tJSDelimited(c context, s []byte) (context, int) {
+ specials := `\"`
+ switch c.state {
+ case stateJSSqStr:
+ specials = `\'`
+ case stateJSRegexp:
+ specials = `\/[]`
+ }
+
+ k, inCharset := 0, false
+ for {
+ i := k + bytes.IndexAny(s[k:], specials)
+ if i < k {
+ break
+ }
+ switch s[i] {
+ case '\\':
+ i++
+ if i == len(s) {
+ return context{
+ state: stateError,
+ err: errorf(ErrPartialEscape, nil, 0, "unfinished escape sequence in JS string: %q", s),
+ }, len(s)
+ }
+ case '[':
+ inCharset = true
+ case ']':
+ inCharset = false
+ default:
+ // end delimiter
+ if !inCharset {
+ c.state, c.jsCtx = stateJS, jsCtxDivOp
+ return c, i + 1
+ }
+ }
+ k = i + 1
+ }
+
+ if inCharset {
+ // This can be fixed by making context richer if interpolation
+ // into charsets is desired.
+ return context{
+ state: stateError,
+ err: errorf(ErrPartialCharset, nil, 0, "unfinished JS regexp charset: %q", s),
+ }, len(s)
+ }
+
+ return c, len(s)
+}
+
+var blockCommentEnd = []byte("*/")
+
+// tBlockCmt is the context transition function for /*comment*/ states.
+func tBlockCmt(c context, s []byte) (context, int) {
+ i := bytes.Index(s, blockCommentEnd)
+ if i == -1 {
+ return c, len(s)
+ }
+ switch c.state {
+ case stateJSBlockCmt:
+ c.state = stateJS
+ case stateCSSBlockCmt:
+ c.state = stateCSS
+ default:
+ panic(c.state.String())
+ }
+ return c, i + 2
+}
+
+// tLineCmt is the context transition function for //comment states.
+func tLineCmt(c context, s []byte) (context, int) {
+ var lineTerminators string
+ var endState state
+ switch c.state {
+ case stateJSLineCmt:
+ lineTerminators, endState = "\n\r\u2028\u2029", stateJS
+ case stateCSSLineCmt:
+ lineTerminators, endState = "\n\f\r", stateCSS
+ // Line comments are not part of any published CSS standard but
+ // are supported by the 4 major browsers.
+ // This defines line comments as
+ // LINECOMMENT ::= "//" [^\n\f\d]*
+ // since https://www.w3.org/TR/css3-syntax/#SUBTOK-nl defines
+ // newlines:
+ // nl ::= #xA | #xD #xA | #xD | #xC
+ default:
+ panic(c.state.String())
+ }
+
+ i := bytes.IndexAny(s, lineTerminators)
+ if i == -1 {
+ return c, len(s)
+ }
+ c.state = endState
+ // Per section 7.4 of EcmaScript 5 : https://es5.github.com/#x7.4
+ // "However, the LineTerminator at the end of the line is not
+ // considered to be part of the single-line comment; it is
+ // recognized separately by the lexical grammar and becomes part
+ // of the stream of input elements for the syntactic grammar."
+ return c, i
+}
+
+// tCSS is the context transition function for the CSS state.
+func tCSS(c context, s []byte) (context, int) {
+ // CSS quoted strings are almost never used except for:
+ // (1) URLs as in background: "/foo.png"
+ // (2) Multiword font-names as in font-family: "Times New Roman"
+ // (3) List separators in content values as in inline-lists:
+ // <style>
+ // ul.inlineList { list-style: none; padding:0 }
+ // ul.inlineList > li { display: inline }
+ // ul.inlineList > li:before { content: ", " }
+ // ul.inlineList > li:first-child:before { content: "" }
+ // </style>
+ // <ul class=inlineList><li>One<li>Two<li>Three</ul>
+ // (4) Attribute value selectors as in a[href="http://example.com/"]
+ //
+ // We conservatively treat all strings as URLs, but make some
+ // allowances to avoid confusion.
+ //
+ // In (1), our conservative assumption is justified.
+ // In (2), valid font names do not contain ':', '?', or '#', so our
+ // conservative assumption is fine since we will never transition past
+ // urlPartPreQuery.
+ // In (3), our protocol heuristic should not be tripped, and there
+ // should not be non-space content after a '?' or '#', so as long as
+ // we only %-encode RFC 3986 reserved characters we are ok.
+ // In (4), we should URL escape for URL attributes, and for others we
+ // have the attribute name available if our conservative assumption
+ // proves problematic for real code.
+
+ k := 0
+ for {
+ i := k + bytes.IndexAny(s[k:], `("'/`)
+ if i < k {
+ return c, len(s)
+ }
+ switch s[i] {
+ case '(':
+ // Look for url to the left.
+ p := bytes.TrimRight(s[:i], "\t\n\f\r ")
+ if endsWithCSSKeyword(p, "url") {
+ j := len(s) - len(bytes.TrimLeft(s[i+1:], "\t\n\f\r "))
+ switch {
+ case j != len(s) && s[j] == '"':
+ c.state, j = stateCSSDqURL, j+1
+ case j != len(s) && s[j] == '\'':
+ c.state, j = stateCSSSqURL, j+1
+ default:
+ c.state = stateCSSURL
+ }
+ return c, j
+ }
+ case '/':
+ if i+1 < len(s) {
+ switch s[i+1] {
+ case '/':
+ c.state = stateCSSLineCmt
+ return c, i + 2
+ case '*':
+ c.state = stateCSSBlockCmt
+ return c, i + 2
+ }
+ }
+ case '"':
+ c.state = stateCSSDqStr
+ return c, i + 1
+ case '\'':
+ c.state = stateCSSSqStr
+ return c, i + 1
+ }
+ k = i + 1
+ }
+}
+
+// tCSSStr is the context transition function for the CSS string and URL states.
+func tCSSStr(c context, s []byte) (context, int) {
+ var endAndEsc string
+ switch c.state {
+ case stateCSSDqStr, stateCSSDqURL:
+ endAndEsc = `\"`
+ case stateCSSSqStr, stateCSSSqURL:
+ endAndEsc = `\'`
+ case stateCSSURL:
+ // Unquoted URLs end with a newline or close parenthesis.
+ // The below includes the wc (whitespace character) and nl.
+ endAndEsc = "\\\t\n\f\r )"
+ default:
+ panic(c.state.String())
+ }
+
+ k := 0
+ for {
+ i := k + bytes.IndexAny(s[k:], endAndEsc)
+ if i < k {
+ c, nread := tURL(c, decodeCSS(s[k:]))
+ return c, k + nread
+ }
+ if s[i] == '\\' {
+ i++
+ if i == len(s) {
+ return context{
+ state: stateError,
+ err: errorf(ErrPartialEscape, nil, 0, "unfinished escape sequence in CSS string: %q", s),
+ }, len(s)
+ }
+ } else {
+ c.state = stateCSS
+ return c, i + 1
+ }
+ c, _ = tURL(c, decodeCSS(s[:i+1]))
+ k = i + 1
+ }
+}
+
+// tError is the context transition function for the error state.
+func tError(c context, s []byte) (context, int) {
+ return c, len(s)
+}
+
+// eatAttrName returns the largest j such that s[i:j] is an attribute name.
+// It returns an error if s[i:] does not look like it begins with an
+// attribute name, such as encountering a quote mark without a preceding
+// equals sign.
+func eatAttrName(s []byte, i int) (int, *Error) {
+ for j := i; j < len(s); j++ {
+ switch s[j] {
+ case ' ', '\t', '\n', '\f', '\r', '=', '>':
+ return j, nil
+ case '\'', '"', '<':
+ // These result in a parse warning in HTML5 and are
+ // indicative of serious problems if seen in an attr
+ // name in a template.
+ return -1, errorf(ErrBadHTML, nil, 0, "%q in attribute name: %.32q", s[j:j+1], s)
+ default:
+ // No-op.
+ }
+ }
+ return len(s), nil
+}
+
+var elementNameMap = map[string]element{
+ "script": elementScript,
+ "style": elementStyle,
+ "textarea": elementTextarea,
+ "title": elementTitle,
+}
+
+// asciiAlpha reports whether c is an ASCII letter.
+func asciiAlpha(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+}
+
+// asciiAlphaNum reports whether c is an ASCII letter or digit.
+func asciiAlphaNum(c byte) bool {
+ return asciiAlpha(c) || '0' <= c && c <= '9'
+}
+
+// eatTagName returns the largest j such that s[i:j] is a tag name and the tag type.
+func eatTagName(s []byte, i int) (int, element) {
+ if i == len(s) || !asciiAlpha(s[i]) {
+ return i, elementNone
+ }
+ j := i + 1
+ for j < len(s) {
+ x := s[j]
+ if asciiAlphaNum(x) {
+ j++
+ continue
+ }
+ // Allow "x-y" or "x:y" but not "x-", "-y", or "x--y".
+ if (x == ':' || x == '-') && j+1 < len(s) && asciiAlphaNum(s[j+1]) {
+ j += 2
+ continue
+ }
+ break
+ }
+ return j, elementNameMap[strings.ToLower(string(s[i:j]))]
+}
+
+// eatWhiteSpace returns the largest j such that s[i:j] is white space.
+func eatWhiteSpace(s []byte, i int) int {
+ for j := i; j < len(s); j++ {
+ switch s[j] {
+ case ' ', '\t', '\n', '\f', '\r':
+ // No-op.
+ default:
+ return j
+ }
+ }
+ return len(s)
+}
diff --git a/tpl/internal/go_templates/htmltemplate/transition_test.go b/tpl/internal/go_templates/htmltemplate/transition_test.go
new file mode 100644
index 000000000..0bd38800f
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/transition_test.go
@@ -0,0 +1,63 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+)
+
+func TestFindEndTag(t *testing.T) {
+ tests := []struct {
+ s, tag string
+ want int
+ }{
+ {"", "tag", -1},
+ {"hello </textarea> hello", "textarea", 6},
+ {"hello </TEXTarea> hello", "textarea", 6},
+ {"hello </textAREA>", "textarea", 6},
+ {"hello </textarea", "textareax", -1},
+ {"hello </textarea>", "tag", -1},
+ {"hello tag </textarea", "tag", -1},
+ {"hello </tag> </other> </textarea> <other>", "textarea", 22},
+ {"</textarea> <other>", "textarea", 0},
+ {"<div> </div> </TEXTAREA>", "textarea", 13},
+ {"<div> </div> </TEXTAREA\t>", "textarea", 13},
+ {"<div> </div> </TEXTAREA >", "textarea", 13},
+ {"<div> </div> </TEXTAREAfoo", "textarea", -1},
+ {"</TEXTAREAfoo </textarea>", "textarea", 14},
+ {"<</script >", "script", 1},
+ {"</script>", "textarea", -1},
+ }
+ for _, test := range tests {
+ if got := indexTagEnd([]byte(test.s), []byte(test.tag)); test.want != got {
+ t.Errorf("%q/%q: want\n\t%d\nbut got\n\t%d", test.s, test.tag, test.want, got)
+ }
+ }
+}
+
+func BenchmarkTemplateSpecialTags(b *testing.B) {
+
+ r := struct {
+ Name, Gift string
+ }{"Aunt Mildred", "bone china tea set"}
+
+ h1 := "<textarea> Hello Hello Hello </textarea> "
+ h2 := "<textarea> <p> Dear {{.Name}},\n{{with .Gift}}Thank you for the lovely {{.}}. {{end}}\nBest wishes. </p>\n</textarea>"
+ html := strings.Repeat(h1, 100) + h2 + strings.Repeat(h1, 100) + h2
+
+ var buf bytes.Buffer
+ for i := 0; i < b.N; i++ {
+ tmpl := Must(New("foo").Parse(html))
+ if err := tmpl.Execute(&buf, r); err != nil {
+ b.Fatal(err)
+ }
+ buf.Reset()
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/url.go b/tpl/internal/go_templates/htmltemplate/url.go
new file mode 100644
index 000000000..93905586a
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/url.go
@@ -0,0 +1,217 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+)
+
+// urlFilter returns its input unless it contains an unsafe scheme in which
+// case it defangs the entire URL.
+//
+// Schemes that cause unintended side effects that are irreversible without user
+// interaction are considered unsafe. For example, clicking on a "javascript:"
+// link can immediately trigger JavaScript code execution.
+//
+// This filter conservatively assumes that all schemes other than the following
+// are unsafe:
+// * http: Navigates to a new website, and may open a new window or tab.
+// These side effects can be reversed by navigating back to the
+// previous website, or closing the window or tab. No irreversible
+// changes will take place without further user interaction with
+// the new website.
+// * https: Same as http.
+// * mailto: Opens an email program and starts a new draft. This side effect
+// is not irreversible until the user explicitly clicks send; it
+// can be undone by closing the email program.
+//
+// To allow URLs containing other schemes to bypass this filter, developers must
+// explicitly indicate that such a URL is expected and safe by encapsulating it
+// in a template.URL value.
+func urlFilter(args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeURL {
+ return s
+ }
+ if !isSafeURL(s) {
+ return "#" + filterFailsafe
+ }
+ return s
+}
+
+// isSafeURL is true if s is a relative URL or if URL has a protocol in
+// (http, https, mailto).
+func isSafeURL(s string) bool {
+ if protocol, _, ok := strings.Cut(s, ":"); ok && !strings.Contains(protocol, "/") {
+ if !strings.EqualFold(protocol, "http") && !strings.EqualFold(protocol, "https") && !strings.EqualFold(protocol, "mailto") {
+ return false
+ }
+ }
+ return true
+}
+
+// urlEscaper produces an output that can be embedded in a URL query.
+// The output can be embedded in an HTML attribute without further escaping.
+func urlEscaper(args ...any) string {
+ return urlProcessor(false, args...)
+}
+
+// urlNormalizer normalizes URL content so it can be embedded in a quote-delimited
+// string or parenthesis delimited url(...).
+// The normalizer does not encode all HTML specials. Specifically, it does not
+// encode '&' so correct embedding in an HTML attribute requires escaping of
+// '&' to '&amp;'.
+func urlNormalizer(args ...any) string {
+ return urlProcessor(true, args...)
+}
+
+// urlProcessor normalizes (when norm is true) or escapes its input to produce
+// a valid hierarchical or opaque URL part.
+func urlProcessor(norm bool, args ...any) string {
+ s, t := stringify(args...)
+ if t == contentTypeURL {
+ norm = true
+ }
+ var b bytes.Buffer
+ if processURLOnto(s, norm, &b) {
+ return b.String()
+ }
+ return s
+}
+
+// processURLOnto appends a normalized URL corresponding to its input to b
+// and reports whether the appended content differs from s.
+func processURLOnto(s string, norm bool, b *bytes.Buffer) bool {
+ b.Grow(len(s) + 16)
+ written := 0
+ // The byte loop below assumes that all URLs use UTF-8 as the
+ // content-encoding. This is similar to the URI to IRI encoding scheme
+ // defined in section 3.1 of RFC 3987, and behaves the same as the
+ // EcmaScript builtin encodeURIComponent.
+ // It should not cause any misencoding of URLs in pages with
+ // Content-type: text/html;charset=UTF-8.
+ for i, n := 0, len(s); i < n; i++ {
+ c := s[i]
+ switch c {
+ // Single quote and parens are sub-delims in RFC 3986, but we
+ // escape them so the output can be embedded in single
+ // quoted attributes and unquoted CSS url(...) constructs.
+ // Single quotes are reserved in URLs, but are only used in
+ // the obsolete "mark" rule in an appendix in RFC 3986
+ // so can be safely encoded.
+ case '!', '#', '$', '&', '*', '+', ',', '/', ':', ';', '=', '?', '@', '[', ']':
+ if norm {
+ continue
+ }
+ // Unreserved according to RFC 3986 sec 2.3
+ // "For consistency, percent-encoded octets in the ranges of
+ // ALPHA (%41-%5A and %61-%7A), DIGIT (%30-%39), hyphen (%2D),
+ // period (%2E), underscore (%5F), or tilde (%7E) should not be
+ // created by URI producers
+ case '-', '.', '_', '~':
+ continue
+ case '%':
+ // When normalizing do not re-encode valid escapes.
+ if norm && i+2 < len(s) && isHex(s[i+1]) && isHex(s[i+2]) {
+ continue
+ }
+ default:
+ // Unreserved according to RFC 3986 sec 2.3
+ if 'a' <= c && c <= 'z' {
+ continue
+ }
+ if 'A' <= c && c <= 'Z' {
+ continue
+ }
+ if '0' <= c && c <= '9' {
+ continue
+ }
+ }
+ b.WriteString(s[written:i])
+ fmt.Fprintf(b, "%%%02x", c)
+ written = i + 1
+ }
+ b.WriteString(s[written:])
+ return written != 0
+}
+
+// Filters and normalizes srcset values which are comma separated
+// URLs followed by metadata.
+func srcsetFilterAndEscaper(args ...any) string {
+ s, t := stringify(args...)
+ switch t {
+ case contentTypeSrcset:
+ return s
+ case contentTypeURL:
+ // Normalizing gets rid of all HTML whitespace
+ // which separate the image URL from its metadata.
+ var b bytes.Buffer
+ if processURLOnto(s, true, &b) {
+ s = b.String()
+ }
+ // Additionally, commas separate one source from another.
+ return strings.ReplaceAll(s, ",", "%2c")
+ }
+
+ var b bytes.Buffer
+ written := 0
+ for i := 0; i < len(s); i++ {
+ if s[i] == ',' {
+ filterSrcsetElement(s, written, i, &b)
+ b.WriteString(",")
+ written = i + 1
+ }
+ }
+ filterSrcsetElement(s, written, len(s), &b)
+ return b.String()
+}
+
+// Derived from https://play.golang.org/p/Dhmj7FORT5
+const htmlSpaceAndASCIIAlnumBytes = "\x00\x36\x00\x00\x01\x00\xff\x03\xfe\xff\xff\x07\xfe\xff\xff\x07"
+
+// isHTMLSpace is true iff c is a whitespace character per
+// https://infra.spec.whatwg.org/#ascii-whitespace
+func isHTMLSpace(c byte) bool {
+ return (c <= 0x20) && 0 != (htmlSpaceAndASCIIAlnumBytes[c>>3]&(1<<uint(c&0x7)))
+}
+
+func isHTMLSpaceOrASCIIAlnum(c byte) bool {
+ return (c < 0x80) && 0 != (htmlSpaceAndASCIIAlnumBytes[c>>3]&(1<<uint(c&0x7)))
+}
+
+func filterSrcsetElement(s string, left int, right int, b *bytes.Buffer) {
+ start := left
+ for start < right && isHTMLSpace(s[start]) {
+ start++
+ }
+ end := right
+ for i := start; i < right; i++ {
+ if isHTMLSpace(s[i]) {
+ end = i
+ break
+ }
+ }
+ if url := s[start:end]; isSafeURL(url) {
+ // If image metadata is only spaces or alnums then
+ // we don't need to URL normalize it.
+ metadataOk := true
+ for i := end; i < right; i++ {
+ if !isHTMLSpaceOrASCIIAlnum(s[i]) {
+ metadataOk = false
+ break
+ }
+ }
+ if metadataOk {
+ b.WriteString(s[left:start])
+ processURLOnto(url, true, b)
+ b.WriteString(s[end:right])
+ return
+ }
+ }
+ b.WriteString("#")
+ b.WriteString(filterFailsafe)
+}
diff --git a/tpl/internal/go_templates/htmltemplate/url_test.go b/tpl/internal/go_templates/htmltemplate/url_test.go
new file mode 100644
index 000000000..72c8a4fe9
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/url_test.go
@@ -0,0 +1,172 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "testing"
+)
+
+func TestURLNormalizer(t *testing.T) {
+ tests := []struct {
+ url, want string
+ }{
+ {"", ""},
+ {
+ "http://example.com:80/foo/bar?q=foo%20&bar=x+y#frag",
+ "http://example.com:80/foo/bar?q=foo%20&bar=x+y#frag",
+ },
+ {" ", "%20"},
+ {"%7c", "%7c"},
+ {"%7C", "%7C"},
+ {"%2", "%252"},
+ {"%", "%25"},
+ {"%z", "%25z"},
+ {"/foo|bar/%5c\u1234", "/foo%7cbar/%5c%e1%88%b4"},
+ }
+ for _, test := range tests {
+ if got := urlNormalizer(test.url); test.want != got {
+ t.Errorf("%q: want\n\t%q\nbut got\n\t%q", test.url, test.want, got)
+ }
+ if test.want != urlNormalizer(test.want) {
+ t.Errorf("not idempotent: %q", test.want)
+ }
+ }
+}
+
+func TestURLFilters(t *testing.T) {
+ input := ("\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f" +
+ "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" +
+ ` !"#$%&'()*+,-./` +
+ `0123456789:;<=>?` +
+ `@ABCDEFGHIJKLMNO` +
+ `PQRSTUVWXYZ[\]^_` +
+ "`abcdefghijklmno" +
+ "pqrstuvwxyz{|}~\x7f" +
+ "\u00A0\u0100\u2028\u2029\ufeff\U0001D11E")
+
+ tests := []struct {
+ name string
+ escaper func(...any) string
+ escaped string
+ }{
+ {
+ "urlEscaper",
+ urlEscaper,
+ "%00%01%02%03%04%05%06%07%08%09%0a%0b%0c%0d%0e%0f" +
+ "%10%11%12%13%14%15%16%17%18%19%1a%1b%1c%1d%1e%1f" +
+ "%20%21%22%23%24%25%26%27%28%29%2a%2b%2c-.%2f" +
+ "0123456789%3a%3b%3c%3d%3e%3f" +
+ "%40ABCDEFGHIJKLMNO" +
+ "PQRSTUVWXYZ%5b%5c%5d%5e_" +
+ "%60abcdefghijklmno" +
+ "pqrstuvwxyz%7b%7c%7d~%7f" +
+ "%c2%a0%c4%80%e2%80%a8%e2%80%a9%ef%bb%bf%f0%9d%84%9e",
+ },
+ {
+ "urlNormalizer",
+ urlNormalizer,
+ "%00%01%02%03%04%05%06%07%08%09%0a%0b%0c%0d%0e%0f" +
+ "%10%11%12%13%14%15%16%17%18%19%1a%1b%1c%1d%1e%1f" +
+ "%20!%22#$%25&%27%28%29*+,-./" +
+ "0123456789:;%3c=%3e?" +
+ "@ABCDEFGHIJKLMNO" +
+ "PQRSTUVWXYZ[%5c]%5e_" +
+ "%60abcdefghijklmno" +
+ "pqrstuvwxyz%7b%7c%7d~%7f" +
+ "%c2%a0%c4%80%e2%80%a8%e2%80%a9%ef%bb%bf%f0%9d%84%9e",
+ },
+ }
+
+ for _, test := range tests {
+ if s := test.escaper(input); s != test.escaped {
+ t.Errorf("%s: want\n\t%q\ngot\n\t%q", test.name, test.escaped, s)
+ continue
+ }
+ }
+}
+
+func TestSrcsetFilter(t *testing.T) {
+ tests := []struct {
+ name string
+ input string
+ want string
+ }{
+ {
+ "one ok",
+ "http://example.com/img.png",
+ "http://example.com/img.png",
+ },
+ {
+ "one ok with metadata",
+ " /img.png 200w",
+ " /img.png 200w",
+ },
+ {
+ "one bad",
+ "javascript:alert(1) 200w",
+ "#ZgotmplZ",
+ },
+ {
+ "two ok",
+ "foo.png, bar.png",
+ "foo.png, bar.png",
+ },
+ {
+ "left bad",
+ "javascript:alert(1), /foo.png",
+ "#ZgotmplZ, /foo.png",
+ },
+ {
+ "right bad",
+ "/bogus#, javascript:alert(1)",
+ "/bogus#,#ZgotmplZ",
+ },
+ }
+
+ for _, test := range tests {
+ if got := srcsetFilterAndEscaper(test.input); got != test.want {
+ t.Errorf("%s: srcsetFilterAndEscaper(%q) want %q != %q", test.name, test.input, test.want, got)
+ }
+ }
+}
+
+func BenchmarkURLEscaper(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ urlEscaper("http://example.com:80/foo?q=bar%20&baz=x+y#frag")
+ }
+}
+
+func BenchmarkURLEscaperNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ urlEscaper("TheQuickBrownFoxJumpsOverTheLazyDog.")
+ }
+}
+
+func BenchmarkURLNormalizer(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ urlNormalizer("The quick brown fox jumps over the lazy dog.\n")
+ }
+}
+
+func BenchmarkURLNormalizerNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ urlNormalizer("http://example.com:80/foo?q=bar%20&baz=x+y#frag")
+ }
+}
+
+func BenchmarkSrcsetFilter(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ srcsetFilterAndEscaper(" /foo/bar.png 200w, /baz/boo(1).png")
+ }
+}
+
+func BenchmarkSrcsetFilterNoSpecials(b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ srcsetFilterAndEscaper("http://example.com:80/foo?q=bar%20&baz=x+y#frag")
+ }
+}
diff --git a/tpl/internal/go_templates/htmltemplate/urlpart_string.go b/tpl/internal/go_templates/htmltemplate/urlpart_string.go
new file mode 100644
index 000000000..813eea9e4
--- /dev/null
+++ b/tpl/internal/go_templates/htmltemplate/urlpart_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type urlPart"; DO NOT EDIT.
+
+package template
+
+import "strconv"
+
+const _urlPart_name = "urlPartNoneurlPartPreQueryurlPartQueryOrFragurlPartUnknown"
+
+var _urlPart_index = [...]uint8{0, 11, 26, 44, 58}
+
+func (i urlPart) String() string {
+ if i >= urlPart(len(_urlPart_index)-1) {
+ return "urlPart(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _urlPart_name[_urlPart_index[i]:_urlPart_index[i+1]]
+}
diff --git a/tpl/internal/go_templates/testenv/testenv.go b/tpl/internal/go_templates/testenv/testenv.go
new file mode 100644
index 000000000..510b5406e
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv.go
@@ -0,0 +1,366 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package testenv provides information about what functionality
+// is available in different testing environments run by the Go team.
+//
+// It is an internal package because these details are specific
+// to the Go team's test setup (on build.golang.org) and not
+// fundamental to tests in general.
+package testenv
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/cfg"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "testing"
+ "time"
+)
+
+// Builder reports the name of the builder running this test
+// (for example, "linux-amd64" or "windows-386-gce").
+// If the test is not running on the build infrastructure,
+// Builder returns the empty string.
+func Builder() string {
+ return os.Getenv("GO_BUILDER_NAME")
+}
+
+// HasGoBuild reports whether the current system can build programs with ``go build''
+// and then run them with os.StartProcess or exec.Command.
+func HasGoBuild() bool {
+ if os.Getenv("GO_GCFLAGS") != "" {
+ // It's too much work to require every caller of the go command
+ // to pass along "-gcflags="+os.Getenv("GO_GCFLAGS").
+ // For now, if $GO_GCFLAGS is set, report that we simply can't
+ // run go build.
+ return false
+ }
+ switch runtime.GOOS {
+ case "android", "js", "ios":
+ return false
+ }
+ return true
+}
+
+// MustHaveGoBuild checks that the current system can build programs with ``go build''
+// and then run them with os.StartProcess or exec.Command.
+// If not, MustHaveGoBuild calls t.Skip with an explanation.
+func MustHaveGoBuild(t testing.TB) {
+ if os.Getenv("GO_GCFLAGS") != "" {
+ t.Skipf("skipping test: 'go build' not compatible with setting $GO_GCFLAGS")
+ }
+ if !HasGoBuild() {
+ t.Skipf("skipping test: 'go build' not available on %s/%s", runtime.GOOS, runtime.GOARCH)
+ }
+}
+
+// HasGoRun reports whether the current system can run programs with ``go run.''
+func HasGoRun() bool {
+ // For now, having go run and having go build are the same.
+ return HasGoBuild()
+}
+
+// MustHaveGoRun checks that the current system can run programs with ``go run.''
+// If not, MustHaveGoRun calls t.Skip with an explanation.
+func MustHaveGoRun(t testing.TB) {
+ if !HasGoRun() {
+ t.Skipf("skipping test: 'go run' not available on %s/%s", runtime.GOOS, runtime.GOARCH)
+ }
+}
+
+// GoToolPath reports the path to the Go tool.
+// It is a convenience wrapper around GoTool.
+// If the tool is unavailable GoToolPath calls t.Skip.
+// If the tool should be available and isn't, GoToolPath calls t.Fatal.
+func GoToolPath(t testing.TB) string {
+ MustHaveGoBuild(t)
+ path, err := GoTool()
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Add all environment variables that affect the Go command to test metadata.
+ // Cached test results will be invalidate when these variables change.
+ // See golang.org/issue/32285.
+ for _, envVar := range strings.Fields(cfg.KnownEnv) {
+ os.Getenv(envVar)
+ }
+ return path
+}
+
+// GoTool reports the path to the Go tool.
+func GoTool() (string, error) {
+ if !HasGoBuild() {
+ return "", errors.New("platform cannot run go tool")
+ }
+ var exeSuffix string
+ if runtime.GOOS == "windows" {
+ exeSuffix = ".exe"
+ }
+ path := filepath.Join(runtime.GOROOT(), "bin", "go"+exeSuffix)
+ if _, err := os.Stat(path); err == nil {
+ return path, nil
+ }
+ goBin, err := exec.LookPath("go" + exeSuffix)
+ if err != nil {
+ return "", errors.New("cannot find go tool: " + err.Error())
+ }
+ return goBin, nil
+}
+
+// HasExec reports whether the current system can start new processes
+// using os.StartProcess or (more commonly) exec.Command.
+func HasExec() bool {
+ switch runtime.GOOS {
+ case "js", "ios":
+ return false
+ }
+ return true
+}
+
+// HasSrc reports whether the entire source tree is available under GOROOT.
+func HasSrc() bool {
+ switch runtime.GOOS {
+ case "ios":
+ return false
+ }
+ return true
+}
+
+// MustHaveExec checks that the current system can start new processes
+// using os.StartProcess or (more commonly) exec.Command.
+// If not, MustHaveExec calls t.Skip with an explanation.
+func MustHaveExec(t testing.TB) {
+ if !HasExec() {
+ t.Skipf("skipping test: cannot exec subprocess on %s/%s", runtime.GOOS, runtime.GOARCH)
+ }
+}
+
+var execPaths sync.Map // path -> error
+
+// MustHaveExecPath checks that the current system can start the named executable
+// using os.StartProcess or (more commonly) exec.Command.
+// If not, MustHaveExecPath calls t.Skip with an explanation.
+func MustHaveExecPath(t testing.TB, path string) {
+ MustHaveExec(t)
+
+ err, found := execPaths.Load(path)
+ if !found {
+ _, err = exec.LookPath(path)
+ err, _ = execPaths.LoadOrStore(path, err)
+ }
+ if err != nil {
+ t.Skipf("skipping test: %s: %s", path, err)
+ }
+}
+
+// HasExternalNetwork reports whether the current system can use
+// external (non-localhost) networks.
+func HasExternalNetwork() bool {
+ return !testing.Short() && runtime.GOOS != "js"
+}
+
+// MustHaveExternalNetwork checks that the current system can use
+// external (non-localhost) networks.
+// If not, MustHaveExternalNetwork calls t.Skip with an explanation.
+func MustHaveExternalNetwork(t testing.TB) {
+ if runtime.GOOS == "js" {
+ t.Skipf("skipping test: no external network on %s", runtime.GOOS)
+ }
+ if testing.Short() {
+ t.Skipf("skipping test: no external network in -short mode")
+ }
+}
+
+var haveCGO bool
+
+// HasCGO reports whether the current system can use cgo.
+func HasCGO() bool {
+ return haveCGO
+}
+
+// MustHaveCGO calls t.Skip if cgo is not available.
+func MustHaveCGO(t testing.TB) {
+ if !haveCGO {
+ t.Skipf("skipping test: no cgo")
+ }
+}
+
+// CanInternalLink reports whether the current system can link programs with
+// internal linking.
+// (This is the opposite of cmd/internal/sys.MustLinkExternal. Keep them in sync.)
+func CanInternalLink() bool {
+ switch runtime.GOOS {
+ case "android":
+ if runtime.GOARCH != "arm64" {
+ return false
+ }
+ case "ios":
+ if runtime.GOARCH == "arm64" {
+ return false
+ }
+ }
+ return true
+}
+
+// MustInternalLink checks that the current system can link programs with internal
+// linking.
+// If not, MustInternalLink calls t.Skip with an explanation.
+func MustInternalLink(t testing.TB) {
+ if !CanInternalLink() {
+ t.Skipf("skipping test: internal linking on %s/%s is not supported", runtime.GOOS, runtime.GOARCH)
+ }
+}
+
+// HasSymlink reports whether the current system can use os.Symlink.
+func HasSymlink() bool {
+ ok, _ := hasSymlink()
+ return ok
+}
+
+// MustHaveSymlink reports whether the current system can use os.Symlink.
+// If not, MustHaveSymlink calls t.Skip with an explanation.
+func MustHaveSymlink(t testing.TB) {
+ ok, reason := hasSymlink()
+ if !ok {
+ t.Skipf("skipping test: cannot make symlinks on %s/%s%s", runtime.GOOS, runtime.GOARCH, reason)
+ }
+}
+
+// HasLink reports whether the current system can use os.Link.
+func HasLink() bool {
+ // From Android release M (Marshmallow), hard linking files is blocked
+ // and an attempt to call link() on a file will return EACCES.
+ // - https://code.google.com/p/android-developer-preview/issues/detail?id=3150
+ return runtime.GOOS != "plan9" && runtime.GOOS != "android"
+}
+
+// MustHaveLink reports whether the current system can use os.Link.
+// If not, MustHaveLink calls t.Skip with an explanation.
+func MustHaveLink(t testing.TB) {
+ if !HasLink() {
+ t.Skipf("skipping test: hardlinks are not supported on %s/%s", runtime.GOOS, runtime.GOARCH)
+ }
+}
+
+var flaky = flag.Bool("flaky", false, "run known-flaky tests too")
+
+func SkipFlaky(t testing.TB, issue int) {
+ t.Helper()
+ if !*flaky {
+ t.Skipf("skipping known flaky test without the -flaky flag; see golang.org/issue/%d", issue)
+ }
+}
+
+func SkipFlakyNet(t testing.TB) {
+ t.Helper()
+ if v, _ := strconv.ParseBool(os.Getenv("GO_BUILDER_FLAKY_NET")); v {
+ t.Skip("skipping test on builder known to have frequent network failures")
+ }
+}
+
+// CleanCmdEnv will fill cmd.Env with the environment, excluding certain
+// variables that could modify the behavior of the Go tools such as
+// GODEBUG and GOTRACEBACK.
+func CleanCmdEnv(cmd *exec.Cmd) *exec.Cmd {
+ if cmd.Env != nil {
+ panic("environment already set")
+ }
+ for _, env := range os.Environ() {
+ // Exclude GODEBUG from the environment to prevent its output
+ // from breaking tests that are trying to parse other command output.
+ if strings.HasPrefix(env, "GODEBUG=") {
+ continue
+ }
+ // Exclude GOTRACEBACK for the same reason.
+ if strings.HasPrefix(env, "GOTRACEBACK=") {
+ continue
+ }
+ cmd.Env = append(cmd.Env, env)
+ }
+ return cmd
+}
+
+// CPUIsSlow reports whether the CPU running the test is suspected to be slow.
+func CPUIsSlow() bool {
+ switch runtime.GOARCH {
+ case "arm", "mips", "mipsle", "mips64", "mips64le":
+ return true
+ }
+ return false
+}
+
+// SkipIfShortAndSlow skips t if -short is set and the CPU running the test is
+// suspected to be slow.
+//
+// (This is useful for CPU-intensive tests that otherwise complete quickly.)
+func SkipIfShortAndSlow(t testing.TB) {
+ if testing.Short() && CPUIsSlow() {
+ t.Helper()
+ t.Skipf("skipping test in -short mode on %s", runtime.GOARCH)
+ }
+}
+
+// RunWithTimeout runs cmd and returns its combined output. If the
+// subprocess exits with a non-zero status, it will log that status
+// and return a non-nil error, but this is not considered fatal.
+func RunWithTimeout(t testing.TB, cmd *exec.Cmd) ([]byte, error) {
+ args := cmd.Args
+ if args == nil {
+ args = []string{cmd.Path}
+ }
+
+ var b bytes.Buffer
+ cmd.Stdout = &b
+ cmd.Stderr = &b
+ if err := cmd.Start(); err != nil {
+ t.Fatalf("starting %s: %v", args, err)
+ }
+
+ // If the process doesn't complete within 1 minute,
+ // assume it is hanging and kill it to get a stack trace.
+ p := cmd.Process
+ done := make(chan bool)
+ go func() {
+ scale := 1
+ // This GOARCH/GOOS test is copied from cmd/dist/test.go.
+ // TODO(iant): Have cmd/dist update the environment variable.
+ if runtime.GOARCH == "arm" || runtime.GOOS == "windows" {
+ scale = 2
+ }
+ if s := os.Getenv("GO_TEST_TIMEOUT_SCALE"); s != "" {
+ if sc, err := strconv.Atoi(s); err == nil {
+ scale = sc
+ }
+ }
+
+ select {
+ case <-done:
+ case <-time.After(time.Duration(scale) * time.Minute):
+ p.Signal(Sigquit)
+ // If SIGQUIT doesn't do it after a little
+ // while, kill the process.
+ select {
+ case <-done:
+ case <-time.After(time.Duration(scale) * 30 * time.Second):
+ p.Signal(os.Kill)
+ }
+ }
+ }()
+
+ err := cmd.Wait()
+ if err != nil {
+ t.Logf("%s exit status: %v", args, err)
+ }
+ close(done)
+
+ return b.Bytes(), err
+}
diff --git a/tpl/internal/go_templates/testenv/testenv_cgo.go b/tpl/internal/go_templates/testenv/testenv_cgo.go
new file mode 100644
index 000000000..7426a29c1
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv_cgo.go
@@ -0,0 +1,11 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build cgo
+
+package testenv
+
+func init() {
+ haveCGO = true
+}
diff --git a/tpl/internal/go_templates/testenv/testenv_notunix.go b/tpl/internal/go_templates/testenv/testenv_notunix.go
new file mode 100644
index 000000000..180206bc9
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv_notunix.go
@@ -0,0 +1,13 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build windows || plan9 || (js && wasm)
+
+package testenv
+
+import "os"
+
+// Sigquit is the signal to send to kill a hanging subprocess.
+// On Unix we send SIGQUIT, but on non-Unix we only have os.Kill.
+var Sigquit = os.Kill
diff --git a/tpl/internal/go_templates/testenv/testenv_notwin.go b/tpl/internal/go_templates/testenv/testenv_notwin.go
new file mode 100644
index 000000000..81171fd19
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv_notwin.go
@@ -0,0 +1,20 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !windows
+
+package testenv
+
+import (
+ "runtime"
+)
+
+func hasSymlink() (ok bool, reason string) {
+ switch runtime.GOOS {
+ case "android", "plan9":
+ return false, ""
+ }
+
+ return true, ""
+}
diff --git a/tpl/internal/go_templates/testenv/testenv_unix.go b/tpl/internal/go_templates/testenv/testenv_unix.go
new file mode 100644
index 000000000..3dc5daf45
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv_unix.go
@@ -0,0 +1,13 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris
+
+package testenv
+
+import "syscall"
+
+// Sigquit is the signal to send to kill a hanging subprocess.
+// Send SIGQUIT to get a stack trace.
+var Sigquit = syscall.SIGQUIT
diff --git a/tpl/internal/go_templates/testenv/testenv_windows.go b/tpl/internal/go_templates/testenv/testenv_windows.go
new file mode 100644
index 000000000..4802b1395
--- /dev/null
+++ b/tpl/internal/go_templates/testenv/testenv_windows.go
@@ -0,0 +1,47 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package testenv
+
+import (
+ "os"
+ "path/filepath"
+ "sync"
+ "syscall"
+)
+
+var symlinkOnce sync.Once
+var winSymlinkErr error
+
+func initWinHasSymlink() {
+ tmpdir, err := os.MkdirTemp("", "symtest")
+ if err != nil {
+ panic("failed to create temp directory: " + err.Error())
+ }
+ defer os.RemoveAll(tmpdir)
+
+ err = os.Symlink("target", filepath.Join(tmpdir, "symlink"))
+ if err != nil {
+ err = err.(*os.LinkError).Err
+ switch err {
+ case syscall.EWINDOWS, syscall.ERROR_PRIVILEGE_NOT_HELD:
+ winSymlinkErr = err
+ }
+ }
+}
+
+func hasSymlink() (ok bool, reason string) {
+ symlinkOnce.Do(initWinHasSymlink)
+
+ switch winSymlinkErr {
+ case nil:
+ return true, ""
+ case syscall.EWINDOWS:
+ return false, ": symlinks are not supported on your version of Windows"
+ case syscall.ERROR_PRIVILEGE_NOT_HELD:
+ return false, ": you don't have enough privileges to create symlinks"
+ }
+
+ return false, ""
+}
diff --git a/tpl/internal/go_templates/texttemplate/doc.go b/tpl/internal/go_templates/texttemplate/doc.go
new file mode 100644
index 000000000..10093881f
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/doc.go
@@ -0,0 +1,465 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package template implements data-driven templates for generating textual output.
+
+To generate HTML output, see package html/template, which has the same interface
+as this package but automatically secures HTML output against certain attacks.
+
+Templates are executed by applying them to a data structure. Annotations in the
+template refer to elements of the data structure (typically a field of a struct
+or a key in a map) to control execution and derive values to be displayed.
+Execution of the template walks the structure and sets the cursor, represented
+by a period '.' and called "dot", to the value at the current location in the
+structure as execution proceeds.
+
+The input text for a template is UTF-8-encoded text in any format.
+"Actions"--data evaluations or control structures--are delimited by
+"{{" and "}}"; all text outside actions is copied to the output unchanged.
+Except for raw strings, actions may not span newlines, although comments can.
+
+Once parsed, a template may be executed safely in parallel, although if parallel
+executions share a Writer the output may be interleaved.
+
+Here is a trivial example that prints "17 items are made of wool".
+
+ type Inventory struct {
+ Material string
+ Count uint
+ }
+ sweaters := Inventory{"wool", 17}
+ tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}")
+ if err != nil { panic(err) }
+ err = tmpl.Execute(os.Stdout, sweaters)
+ if err != nil { panic(err) }
+
+More intricate examples appear below.
+
+Text and spaces
+
+By default, all text between actions is copied verbatim when the template is
+executed. For example, the string " items are made of " in the example above
+appears on standard output when the program is run.
+
+However, to aid in formatting template source code, if an action's left
+delimiter (by default "{{") is followed immediately by a minus sign and white
+space, all trailing white space is trimmed from the immediately preceding text.
+Similarly, if the right delimiter ("}}") is preceded by white space and a minus
+sign, all leading white space is trimmed from the immediately following text.
+In these trim markers, the white space must be present:
+"{{- 3}}" is like "{{3}}" but trims the immediately preceding text, while
+"{{-3}}" parses as an action containing the number -3.
+
+For instance, when executing the template whose source is
+
+ "{{23 -}} < {{- 45}}"
+
+the generated output would be
+
+ "23<45"
+
+For this trimming, the definition of white space characters is the same as in Go:
+space, horizontal tab, carriage return, and newline.
+
+Actions
+
+Here is the list of actions. "Arguments" and "pipelines" are evaluations of
+data, defined in detail in the corresponding sections that follow.
+
+*/
+// {{/* a comment */}}
+// {{- /* a comment with white space trimmed from preceding and following text */ -}}
+// A comment; discarded. May contain newlines.
+// Comments do not nest and must start and end at the
+// delimiters, as shown here.
+/*
+
+ {{pipeline}}
+ The default textual representation (the same as would be
+ printed by fmt.Print) of the value of the pipeline is copied
+ to the output.
+
+ {{if pipeline}} T1 {{end}}
+ If the value of the pipeline is empty, no output is generated;
+ otherwise, T1 is executed. The empty values are false, 0, any
+ nil pointer or interface value, and any array, slice, map, or
+ string of length zero.
+ Dot is unaffected.
+
+ {{if pipeline}} T1 {{else}} T0 {{end}}
+ If the value of the pipeline is empty, T0 is executed;
+ otherwise, T1 is executed. Dot is unaffected.
+
+ {{if pipeline}} T1 {{else if pipeline}} T0 {{end}}
+ To simplify the appearance of if-else chains, the else action
+ of an if may include another if directly; the effect is exactly
+ the same as writing
+ {{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}}
+
+ {{range pipeline}} T1 {{end}}
+ The value of the pipeline must be an array, slice, map, or channel.
+ If the value of the pipeline has length zero, nothing is output;
+ otherwise, dot is set to the successive elements of the array,
+ slice, or map and T1 is executed. If the value is a map and the
+ keys are of basic type with a defined order, the elements will be
+ visited in sorted key order.
+
+ {{range pipeline}} T1 {{else}} T0 {{end}}
+ The value of the pipeline must be an array, slice, map, or channel.
+ If the value of the pipeline has length zero, dot is unaffected and
+ T0 is executed; otherwise, dot is set to the successive elements
+ of the array, slice, or map and T1 is executed.
+
+ {{break}}
+ The innermost {{range pipeline}} loop is ended early, stopping the
+ current iteration and bypassing all remaining iterations.
+
+ {{continue}}
+ The current iteration of the innermost {{range pipeline}} loop is
+ stopped, and the loop starts the next iteration.
+
+ {{template "name"}}
+ The template with the specified name is executed with nil data.
+
+ {{template "name" pipeline}}
+ The template with the specified name is executed with dot set
+ to the value of the pipeline.
+
+ {{block "name" pipeline}} T1 {{end}}
+ A block is shorthand for defining a template
+ {{define "name"}} T1 {{end}}
+ and then executing it in place
+ {{template "name" pipeline}}
+ The typical use is to define a set of root templates that are
+ then customized by redefining the block templates within.
+
+ {{with pipeline}} T1 {{end}}
+ If the value of the pipeline is empty, no output is generated;
+ otherwise, dot is set to the value of the pipeline and T1 is
+ executed.
+
+ {{with pipeline}} T1 {{else}} T0 {{end}}
+ If the value of the pipeline is empty, dot is unaffected and T0
+ is executed; otherwise, dot is set to the value of the pipeline
+ and T1 is executed.
+
+Arguments
+
+An argument is a simple value, denoted by one of the following.
+
+ - A boolean, string, character, integer, floating-point, imaginary
+ or complex constant in Go syntax. These behave like Go's untyped
+ constants. Note that, as in Go, whether a large integer constant
+ overflows when assigned or passed to a function can depend on whether
+ the host machine's ints are 32 or 64 bits.
+ - The keyword nil, representing an untyped Go nil.
+ - The character '.' (period):
+ .
+ The result is the value of dot.
+ - A variable name, which is a (possibly empty) alphanumeric string
+ preceded by a dollar sign, such as
+ $piOver2
+ or
+ $
+ The result is the value of the variable.
+ Variables are described below.
+ - The name of a field of the data, which must be a struct, preceded
+ by a period, such as
+ .Field
+ The result is the value of the field. Field invocations may be
+ chained:
+ .Field1.Field2
+ Fields can also be evaluated on variables, including chaining:
+ $x.Field1.Field2
+ - The name of a key of the data, which must be a map, preceded
+ by a period, such as
+ .Key
+ The result is the map element value indexed by the key.
+ Key invocations may be chained and combined with fields to any
+ depth:
+ .Field1.Key1.Field2.Key2
+ Although the key must be an alphanumeric identifier, unlike with
+ field names they do not need to start with an upper case letter.
+ Keys can also be evaluated on variables, including chaining:
+ $x.key1.key2
+ - The name of a niladic method of the data, preceded by a period,
+ such as
+ .Method
+ The result is the value of invoking the method with dot as the
+ receiver, dot.Method(). Such a method must have one return value (of
+ any type) or two return values, the second of which is an error.
+ If it has two and the returned error is non-nil, execution terminates
+ and an error is returned to the caller as the value of Execute.
+ Method invocations may be chained and combined with fields and keys
+ to any depth:
+ .Field1.Key1.Method1.Field2.Key2.Method2
+ Methods can also be evaluated on variables, including chaining:
+ $x.Method1.Field
+ - The name of a niladic function, such as
+ fun
+ The result is the value of invoking the function, fun(). The return
+ types and values behave as in methods. Functions and function
+ names are described below.
+ - A parenthesized instance of one the above, for grouping. The result
+ may be accessed by a field or map key invocation.
+ print (.F1 arg1) (.F2 arg2)
+ (.StructValuedMethod "arg").Field
+
+Arguments may evaluate to any type; if they are pointers the implementation
+automatically indirects to the base type when required.
+If an evaluation yields a function value, such as a function-valued
+field of a struct, the function is not invoked automatically, but it
+can be used as a truth value for an if action and the like. To invoke
+it, use the call function, defined below.
+
+Pipelines
+
+A pipeline is a possibly chained sequence of "commands". A command is a simple
+value (argument) or a function or method call, possibly with multiple arguments:
+
+ Argument
+ The result is the value of evaluating the argument.
+ .Method [Argument...]
+ The method can be alone or the last element of a chain but,
+ unlike methods in the middle of a chain, it can take arguments.
+ The result is the value of calling the method with the
+ arguments:
+ dot.Method(Argument1, etc.)
+ functionName [Argument...]
+ The result is the value of calling the function associated
+ with the name:
+ function(Argument1, etc.)
+ Functions and function names are described below.
+
+A pipeline may be "chained" by separating a sequence of commands with pipeline
+characters '|'. In a chained pipeline, the result of each command is
+passed as the last argument of the following command. The output of the final
+command in the pipeline is the value of the pipeline.
+
+The output of a command will be either one value or two values, the second of
+which has type error. If that second value is present and evaluates to
+non-nil, execution terminates and the error is returned to the caller of
+Execute.
+
+Variables
+
+A pipeline inside an action may initialize a variable to capture the result.
+The initialization has syntax
+
+ $variable := pipeline
+
+where $variable is the name of the variable. An action that declares a
+variable produces no output.
+
+Variables previously declared can also be assigned, using the syntax
+
+ $variable = pipeline
+
+If a "range" action initializes a variable, the variable is set to the
+successive elements of the iteration. Also, a "range" may declare two
+variables, separated by a comma:
+
+ range $index, $element := pipeline
+
+in which case $index and $element are set to the successive values of the
+array/slice index or map key and element, respectively. Note that if there is
+only one variable, it is assigned the element; this is opposite to the
+convention in Go range clauses.
+
+A variable's scope extends to the "end" action of the control structure ("if",
+"with", or "range") in which it is declared, or to the end of the template if
+there is no such control structure. A template invocation does not inherit
+variables from the point of its invocation.
+
+When execution begins, $ is set to the data argument passed to Execute, that is,
+to the starting value of dot.
+
+Examples
+
+Here are some example one-line templates demonstrating pipelines and variables.
+All produce the quoted word "output":
+
+ {{"\"output\""}}
+ A string constant.
+ {{`"output"`}}
+ A raw string constant.
+ {{printf "%q" "output"}}
+ A function call.
+ {{"output" | printf "%q"}}
+ A function call whose final argument comes from the previous
+ command.
+ {{printf "%q" (print "out" "put")}}
+ A parenthesized argument.
+ {{"put" | printf "%s%s" "out" | printf "%q"}}
+ A more elaborate call.
+ {{"output" | printf "%s" | printf "%q"}}
+ A longer chain.
+ {{with "output"}}{{printf "%q" .}}{{end}}
+ A with action using dot.
+ {{with $x := "output" | printf "%q"}}{{$x}}{{end}}
+ A with action that creates and uses a variable.
+ {{with $x := "output"}}{{printf "%q" $x}}{{end}}
+ A with action that uses the variable in another action.
+ {{with $x := "output"}}{{$x | printf "%q"}}{{end}}
+ The same, but pipelined.
+
+Functions
+
+During execution functions are found in two function maps: first in the
+template, then in the global function map. By default, no functions are defined
+in the template but the Funcs method can be used to add them.
+
+Predefined global functions are named as follows.
+
+ and
+ Returns the boolean AND of its arguments by returning the
+ first empty argument or the last argument. That is,
+ "and x y" behaves as "if x then y else x."
+ Evaluation proceeds through the arguments left to right
+ and returns when the result is determined.
+ call
+ Returns the result of calling the first argument, which
+ must be a function, with the remaining arguments as parameters.
+ Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where
+ Y is a func-valued field, map entry, or the like.
+ The first argument must be the result of an evaluation
+ that yields a value of function type (as distinct from
+ a predefined function such as print). The function must
+ return either one or two result values, the second of which
+ is of type error. If the arguments don't match the function
+ or the returned error value is non-nil, execution stops.
+ html
+ Returns the escaped HTML equivalent of the textual
+ representation of its arguments. This function is unavailable
+ in html/template, with a few exceptions.
+ index
+ Returns the result of indexing its first argument by the
+ following arguments. Thus "index x 1 2 3" is, in Go syntax,
+ x[1][2][3]. Each indexed item must be a map, slice, or array.
+ slice
+ slice returns the result of slicing its first argument by the
+ remaining arguments. Thus "slice x 1 2" is, in Go syntax, x[1:2],
+ while "slice x" is x[:], "slice x 1" is x[1:], and "slice x 1 2 3"
+ is x[1:2:3]. The first argument must be a string, slice, or array.
+ js
+ Returns the escaped JavaScript equivalent of the textual
+ representation of its arguments.
+ len
+ Returns the integer length of its argument.
+ not
+ Returns the boolean negation of its single argument.
+ or
+ Returns the boolean OR of its arguments by returning the
+ first non-empty argument or the last argument, that is,
+ "or x y" behaves as "if x then x else y".
+ Evaluation proceeds through the arguments left to right
+ and returns when the result is determined.
+ print
+ An alias for fmt.Sprint
+ printf
+ An alias for fmt.Sprintf
+ println
+ An alias for fmt.Sprintln
+ urlquery
+ Returns the escaped value of the textual representation of
+ its arguments in a form suitable for embedding in a URL query.
+ This function is unavailable in html/template, with a few
+ exceptions.
+
+The boolean functions take any zero value to be false and a non-zero
+value to be true.
+
+There is also a set of binary comparison operators defined as
+functions:
+
+ eq
+ Returns the boolean truth of arg1 == arg2
+ ne
+ Returns the boolean truth of arg1 != arg2
+ lt
+ Returns the boolean truth of arg1 < arg2
+ le
+ Returns the boolean truth of arg1 <= arg2
+ gt
+ Returns the boolean truth of arg1 > arg2
+ ge
+ Returns the boolean truth of arg1 >= arg2
+
+For simpler multi-way equality tests, eq (only) accepts two or more
+arguments and compares the second and subsequent to the first,
+returning in effect
+
+ arg1==arg2 || arg1==arg3 || arg1==arg4 ...
+
+(Unlike with || in Go, however, eq is a function call and all the
+arguments will be evaluated.)
+
+The comparison functions work on any values whose type Go defines as
+comparable. For basic types such as integers, the rules are relaxed:
+size and exact type are ignored, so any integer value, signed or unsigned,
+may be compared with any other integer value. (The arithmetic value is compared,
+not the bit pattern, so all negative integers are less than all unsigned integers.)
+However, as usual, one may not compare an int with a float32 and so on.
+
+Associated templates
+
+Each template is named by a string specified when it is created. Also, each
+template is associated with zero or more other templates that it may invoke by
+name; such associations are transitive and form a name space of templates.
+
+A template may use a template invocation to instantiate another associated
+template; see the explanation of the "template" action above. The name must be
+that of a template associated with the template that contains the invocation.
+
+Nested template definitions
+
+When parsing a template, another template may be defined and associated with the
+template being parsed. Template definitions must appear at the top level of the
+template, much like global variables in a Go program.
+
+The syntax of such definitions is to surround each template declaration with a
+"define" and "end" action.
+
+The define action names the template being created by providing a string
+constant. Here is a simple example:
+
+ `{{define "T1"}}ONE{{end}}
+ {{define "T2"}}TWO{{end}}
+ {{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}}
+ {{template "T3"}}`
+
+This defines two templates, T1 and T2, and a third T3 that invokes the other two
+when it is executed. Finally it invokes T3. If executed this template will
+produce the text
+
+ ONE TWO
+
+By construction, a template may reside in only one association. If it's
+necessary to have a template addressable from multiple associations, the
+template definition must be parsed multiple times to create distinct *Template
+values, or must be copied with the Clone or AddParseTree method.
+
+Parse may be called multiple times to assemble the various associated templates;
+see the ParseFiles and ParseGlob functions and methods for simple ways to parse
+related templates stored in files.
+
+A template may be executed directly or through ExecuteTemplate, which executes
+an associated template identified by name. To invoke our example above, we
+might write,
+
+ err := tmpl.Execute(os.Stdout, "no data needed")
+ if err != nil {
+ log.Fatalf("execution failed: %s", err)
+ }
+
+or to invoke a particular template explicitly by name,
+
+ err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed")
+ if err != nil {
+ log.Fatalf("execution failed: %s", err)
+ }
+
+*/
+package template
diff --git a/tpl/internal/go_templates/texttemplate/example_test.go b/tpl/internal/go_templates/texttemplate/example_test.go
new file mode 100644
index 000000000..295a810b8
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/example_test.go
@@ -0,0 +1,113 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "log"
+ "os"
+ "strings"
+ "text/template"
+)
+
+func ExampleTemplate() {
+ // Define a template.
+ const letter = `
+Dear {{.Name}},
+{{if .Attended}}
+It was a pleasure to see you at the wedding.
+{{- else}}
+It is a shame you couldn't make it to the wedding.
+{{- end}}
+{{with .Gift -}}
+Thank you for the lovely {{.}}.
+{{end}}
+Best wishes,
+Josie
+`
+
+ // Prepare some data to insert into the template.
+ type Recipient struct {
+ Name, Gift string
+ Attended bool
+ }
+ var recipients = []Recipient{
+ {"Aunt Mildred", "bone china tea set", true},
+ {"Uncle John", "moleskin pants", false},
+ {"Cousin Rodney", "", false},
+ }
+
+ // Create a new template and parse the letter into it.
+ t := template.Must(template.New("letter").Parse(letter))
+
+ // Execute the template for each recipient.
+ for _, r := range recipients {
+ err := t.Execute(os.Stdout, r)
+ if err != nil {
+ log.Println("executing template:", err)
+ }
+ }
+
+ // Output:
+ // Dear Aunt Mildred,
+ //
+ // It was a pleasure to see you at the wedding.
+ // Thank you for the lovely bone china tea set.
+ //
+ // Best wishes,
+ // Josie
+ //
+ // Dear Uncle John,
+ //
+ // It is a shame you couldn't make it to the wedding.
+ // Thank you for the lovely moleskin pants.
+ //
+ // Best wishes,
+ // Josie
+ //
+ // Dear Cousin Rodney,
+ //
+ // It is a shame you couldn't make it to the wedding.
+ //
+ // Best wishes,
+ // Josie
+}
+
+// The following example is duplicated in html/template; keep them in sync.
+
+func ExampleTemplate_block() {
+ const (
+ master = `Names:{{block "list" .}}{{"\n"}}{{range .}}{{println "-" .}}{{end}}{{end}}`
+ overlay = `{{define "list"}} {{join . ", "}}{{end}} `
+ )
+ var (
+ funcs = template.FuncMap{"join": strings.Join}
+ guardians = []string{"Gamora", "Groot", "Nebula", "Rocket", "Star-Lord"}
+ )
+ masterTmpl, err := template.New("master").Funcs(funcs).Parse(master)
+ if err != nil {
+ log.Fatal(err)
+ }
+ overlayTmpl, err := template.Must(masterTmpl.Clone()).Parse(overlay)
+ if err != nil {
+ log.Fatal(err)
+ }
+ if err := masterTmpl.Execute(os.Stdout, guardians); err != nil {
+ log.Fatal(err)
+ }
+ if err := overlayTmpl.Execute(os.Stdout, guardians); err != nil {
+ log.Fatal(err)
+ }
+ // Output:
+ // Names:
+ // - Gamora
+ // - Groot
+ // - Nebula
+ // - Rocket
+ // - Star-Lord
+ // Names: Gamora, Groot, Nebula, Rocket, Star-Lord
+}
diff --git a/tpl/internal/go_templates/texttemplate/examplefiles_test.go b/tpl/internal/go_templates/texttemplate/examplefiles_test.go
new file mode 100644
index 000000000..bc91e87f9
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/examplefiles_test.go
@@ -0,0 +1,184 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "io"
+ "log"
+ "os"
+ "path/filepath"
+ "text/template"
+)
+
+// templateFile defines the contents of a template to be stored in a file, for testing.
+type templateFile struct {
+ name string
+ contents string
+}
+
+func createTestDir(files []templateFile) string {
+ dir, err := os.MkdirTemp("", "template")
+ if err != nil {
+ log.Fatal(err)
+ }
+ for _, file := range files {
+ f, err := os.Create(filepath.Join(dir, file.name))
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer f.Close()
+ _, err = io.WriteString(f, file.contents)
+ if err != nil {
+ log.Fatal(err)
+ }
+ }
+ return dir
+}
+
+// Here we demonstrate loading a set of templates from a directory.
+func ExampleTemplate_glob() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T0.tmpl is a plain template file that just invokes T1.
+ {"T0.tmpl", `T0 invokes T1: ({{template "T1"}})`},
+ // T1.tmpl defines a template, T1 that invokes T2.
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ // T2.tmpl defines a template T2.
+ {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // T0.tmpl is the first name matched, so it becomes the starting template,
+ // the value returned by ParseGlob.
+ tmpl := template.Must(template.ParseGlob(pattern))
+
+ err := tmpl.Execute(os.Stdout, nil)
+ if err != nil {
+ log.Fatalf("template execution: %s", err)
+ }
+ // Output:
+ // T0 invokes T1: (T1 invokes T2: (This is T2))
+}
+
+// This example demonstrates one way to share some templates
+// and use them in different contexts. In this variant we add multiple driver
+// templates by hand to an existing bundle of templates.
+func ExampleTemplate_helpers() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T1.tmpl defines a template, T1 that invokes T2.
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ // T2.tmpl defines a template T2.
+ {"T2.tmpl", `{{define "T2"}}This is T2{{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // Load the helpers.
+ templates := template.Must(template.ParseGlob(pattern))
+ // Add one driver template to the bunch; we do this with an explicit template definition.
+ _, err := templates.Parse("{{define `driver1`}}Driver 1 calls T1: ({{template `T1`}})\n{{end}}")
+ if err != nil {
+ log.Fatal("parsing driver1: ", err)
+ }
+ // Add another driver template.
+ _, err = templates.Parse("{{define `driver2`}}Driver 2 calls T2: ({{template `T2`}})\n{{end}}")
+ if err != nil {
+ log.Fatal("parsing driver2: ", err)
+ }
+ // We load all the templates before execution. This package does not require
+ // that behavior but html/template's escaping does, so it's a good habit.
+ err = templates.ExecuteTemplate(os.Stdout, "driver1", nil)
+ if err != nil {
+ log.Fatalf("driver1 execution: %s", err)
+ }
+ err = templates.ExecuteTemplate(os.Stdout, "driver2", nil)
+ if err != nil {
+ log.Fatalf("driver2 execution: %s", err)
+ }
+ // Output:
+ // Driver 1 calls T1: (T1 invokes T2: (This is T2))
+ // Driver 2 calls T2: (This is T2)
+}
+
+// This example demonstrates how to use one group of driver
+// templates with distinct sets of helper templates.
+func ExampleTemplate_share() {
+ // Here we create a temporary directory and populate it with our sample
+ // template definition files; usually the template files would already
+ // exist in some location known to the program.
+ dir := createTestDir([]templateFile{
+ // T0.tmpl is a plain template file that just invokes T1.
+ {"T0.tmpl", "T0 ({{.}} version) invokes T1: ({{template `T1`}})\n"},
+ // T1.tmpl defines a template, T1 that invokes T2. Note T2 is not defined
+ {"T1.tmpl", `{{define "T1"}}T1 invokes T2: ({{template "T2"}}){{end}}`},
+ })
+ // Clean up after the test; another quirk of running as an example.
+ defer os.RemoveAll(dir)
+
+ // pattern is the glob pattern used to find all the template files.
+ pattern := filepath.Join(dir, "*.tmpl")
+
+ // Here starts the example proper.
+ // Load the drivers.
+ drivers := template.Must(template.ParseGlob(pattern))
+
+ // We must define an implementation of the T2 template. First we clone
+ // the drivers, then add a definition of T2 to the template name space.
+
+ // 1. Clone the helper set to create a new name space from which to run them.
+ first, err := drivers.Clone()
+ if err != nil {
+ log.Fatal("cloning helpers: ", err)
+ }
+ // 2. Define T2, version A, and parse it.
+ _, err = first.Parse("{{define `T2`}}T2, version A{{end}}")
+ if err != nil {
+ log.Fatal("parsing T2: ", err)
+ }
+
+ // Now repeat the whole thing, using a different version of T2.
+ // 1. Clone the drivers.
+ second, err := drivers.Clone()
+ if err != nil {
+ log.Fatal("cloning drivers: ", err)
+ }
+ // 2. Define T2, version B, and parse it.
+ _, err = second.Parse("{{define `T2`}}T2, version B{{end}}")
+ if err != nil {
+ log.Fatal("parsing T2: ", err)
+ }
+
+ // Execute the templates in the reverse order to verify the
+ // first is unaffected by the second.
+ err = second.ExecuteTemplate(os.Stdout, "T0.tmpl", "second")
+ if err != nil {
+ log.Fatalf("second execution: %s", err)
+ }
+ err = first.ExecuteTemplate(os.Stdout, "T0.tmpl", "first")
+ if err != nil {
+ log.Fatalf("first: execution: %s", err)
+ }
+
+ // Output:
+ // T0 (second version) invokes T1: (T1 invokes T2: (T2, version B))
+ // T0 (first version) invokes T1: (T1 invokes T2: (T2, version A))
+}
diff --git a/tpl/internal/go_templates/texttemplate/examplefunc_test.go b/tpl/internal/go_templates/texttemplate/examplefunc_test.go
new file mode 100644
index 000000000..4a13b1f9a
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/examplefunc_test.go
@@ -0,0 +1,57 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "log"
+ "os"
+ "strings"
+ "text/template"
+)
+
+// This example demonstrates a custom function to process template text.
+// It installs the strings.Title function and uses it to
+// Make Title Text Look Good In Our Template's Output.
+func ExampleTemplate_func() {
+ // First we create a FuncMap with which to register the function.
+ funcMap := template.FuncMap{
+ // The name "title" is what the function will be called in the template text.
+ "title": strings.Title,
+ }
+
+ // A simple template definition to test our function.
+ // We print the input text several ways:
+ // - the original
+ // - title-cased
+ // - title-cased and then printed with %q
+ // - printed with %q and then title-cased.
+ const templateText = `
+Input: {{printf "%q" .}}
+Output 0: {{title .}}
+Output 1: {{title . | printf "%q"}}
+Output 2: {{printf "%q" . | title}}
+`
+
+ // Create a template, add the function map, and parse the text.
+ tmpl, err := template.New("titleTest").Funcs(funcMap).Parse(templateText)
+ if err != nil {
+ log.Fatalf("parsing: %s", err)
+ }
+
+ // Run the template to verify the output.
+ err = tmpl.Execute(os.Stdout, "the go programming language")
+ if err != nil {
+ log.Fatalf("execution: %s", err)
+ }
+
+ // Output:
+ // Input: "the go programming language"
+ // Output 0: The Go Programming Language
+ // Output 1: "The Go Programming Language"
+ // Output 2: "The Go Programming Language"
+}
diff --git a/tpl/internal/go_templates/texttemplate/exec.go b/tpl/internal/go_templates/texttemplate/exec.go
new file mode 100644
index 000000000..4460771cb
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/exec.go
@@ -0,0 +1,1045 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "errors"
+ "fmt"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/fmtsort"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+ "io"
+ "reflect"
+ "runtime"
+ "strings"
+)
+
+// maxExecDepth specifies the maximum stack depth of templates within
+// templates. This limit is only practically reached by accidentally
+// recursive template invocations. This limit allows us to return
+// an error instead of triggering a stack overflow.
+var maxExecDepth = initMaxExecDepth()
+
+func initMaxExecDepth() int {
+ if runtime.GOARCH == "wasm" {
+ return 1000
+ }
+ return 100000
+}
+
+// state represents the state of an execution. It's not part of the
+// template so that multiple executions of the same template
+// can execute in parallel.
+type stateOld struct {
+ tmpl *Template
+ wr io.Writer
+ node parse.Node // current node, for errors
+ vars []variable // push-down stack of variable values.
+ depth int // the height of the stack of executing templates.
+}
+
+// variable holds the dynamic value of a variable such as $, $x etc.
+type variable struct {
+ name string
+ value reflect.Value
+}
+
+// push pushes a new variable on the stack.
+func (s *state) push(name string, value reflect.Value) {
+ s.vars = append(s.vars, variable{name, value})
+}
+
+// mark returns the length of the variable stack.
+func (s *state) mark() int {
+ return len(s.vars)
+}
+
+// pop pops the variable stack up to the mark.
+func (s *state) pop(mark int) {
+ s.vars = s.vars[0:mark]
+}
+
+// setVar overwrites the last declared variable with the given name.
+// Used by variable assignments.
+func (s *state) setVar(name string, value reflect.Value) {
+ for i := s.mark() - 1; i >= 0; i-- {
+ if s.vars[i].name == name {
+ s.vars[i].value = value
+ return
+ }
+ }
+ s.errorf("undefined variable: %s", name)
+}
+
+// setTopVar overwrites the top-nth variable on the stack. Used by range iterations.
+func (s *state) setTopVar(n int, value reflect.Value) {
+ s.vars[len(s.vars)-n].value = value
+}
+
+// varValue returns the value of the named variable.
+func (s *state) varValue(name string) reflect.Value {
+ for i := s.mark() - 1; i >= 0; i-- {
+ if s.vars[i].name == name {
+ return s.vars[i].value
+ }
+ }
+ s.errorf("undefined variable: %s", name)
+ return zero
+}
+
+var zero reflect.Value
+
+type missingValType struct{}
+
+var missingVal = reflect.ValueOf(missingValType{})
+
+// at marks the state to be on node n, for error reporting.
+func (s *state) at(node parse.Node) {
+ s.node = node
+}
+
+// doublePercent returns the string with %'s replaced by %%, if necessary,
+// so it can be used safely inside a Printf format string.
+func doublePercent(str string) string {
+ return strings.ReplaceAll(str, "%", "%%")
+}
+
+// TODO: It would be nice if ExecError was more broken down, but
+// the way ErrorContext embeds the template name makes the
+// processing too clumsy.
+
+// ExecError is the custom error type returned when Execute has an
+// error evaluating its template. (If a write error occurs, the actual
+// error is returned; it will not be of type ExecError.)
+type ExecError struct {
+ Name string // Name of template.
+ Err error // Pre-formatted error.
+}
+
+func (e ExecError) Error() string {
+ return e.Err.Error()
+}
+
+func (e ExecError) Unwrap() error {
+ return e.Err
+}
+
+// errorf records an ExecError and terminates processing.
+func (s *state) errorf(format string, args ...any) {
+ name := doublePercent(s.tmpl.Name())
+ if s.node == nil {
+ format = fmt.Sprintf("template: %s: %s", name, format)
+ } else {
+ location, context := s.tmpl.ErrorContext(s.node)
+ format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format)
+ }
+ panic(ExecError{
+ Name: s.tmpl.Name(),
+ Err: fmt.Errorf(format, args...),
+ })
+}
+
+// writeError is the wrapper type used internally when Execute has an
+// error writing to its output. We strip the wrapper in errRecover.
+// Note that this is not an implementation of error, so it cannot escape
+// from the package as an error value.
+type writeError struct {
+ Err error // Original error.
+}
+
+func (s *state) writeError(err error) {
+ panic(writeError{
+ Err: err,
+ })
+}
+
+// errRecover is the handler that turns panics into returns from the top
+// level of Parse.
+func errRecover(errp *error) {
+ e := recover()
+ if e != nil {
+ switch err := e.(type) {
+ case runtime.Error:
+ panic(e)
+ case writeError:
+ *errp = err.Err // Strip the wrapper.
+ case ExecError:
+ *errp = err // Keep the wrapper.
+ default:
+ panic(e)
+ }
+ }
+}
+
+// ExecuteTemplate applies the template associated with t that has the given name
+// to the specified data object and writes the output to wr.
+// If an error occurs executing the template or writing its output,
+// execution stops, but partial results may already have been written to
+// the output writer.
+// A template may be executed safely in parallel, although if parallel
+// executions share a Writer the output may be interleaved.
+func (t *Template) ExecuteTemplate(wr io.Writer, name string, data any) error {
+ tmpl := t.Lookup(name)
+ if tmpl == nil {
+ return fmt.Errorf("template: no template %q associated with template %q", name, t.name)
+ }
+ return tmpl.Execute(wr, data)
+}
+
+// Execute applies a parsed template to the specified data object,
+// and writes the output to wr.
+// If an error occurs executing the template or writing its output,
+// execution stops, but partial results may already have been written to
+// the output writer.
+// A template may be executed safely in parallel, although if parallel
+// executions share a Writer the output may be interleaved.
+//
+// If data is a reflect.Value, the template applies to the concrete
+// value that the reflect.Value holds, as in fmt.Print.
+func (t *Template) Execute(wr io.Writer, data any) error {
+ return t.execute(wr, data)
+}
+
+func (t *Template) execute(wr io.Writer, data any) (err error) {
+ defer errRecover(&err)
+ value, ok := data.(reflect.Value)
+ if !ok {
+ value = reflect.ValueOf(data)
+ }
+ state := &state{
+ tmpl: t,
+ wr: wr,
+ vars: []variable{{"$", value}},
+ }
+ if t.Tree == nil || t.Root == nil {
+ state.errorf("%q is an incomplete or empty template", t.Name())
+ }
+ state.walk(value, t.Root)
+ return
+}
+
+// DefinedTemplates returns a string listing the defined templates,
+// prefixed by the string "; defined templates are: ". If there are none,
+// it returns the empty string. For generating an error message here
+// and in html/template.
+func (t *Template) DefinedTemplates() string {
+ if t.common == nil {
+ return ""
+ }
+ var b strings.Builder
+ t.muTmpl.RLock()
+ defer t.muTmpl.RUnlock()
+ for name, tmpl := range t.tmpl {
+ if tmpl.Tree == nil || tmpl.Root == nil {
+ continue
+ }
+ if b.Len() == 0 {
+ b.WriteString("; defined templates are: ")
+ } else {
+ b.WriteString(", ")
+ }
+ fmt.Fprintf(&b, "%q", name)
+ }
+ return b.String()
+}
+
+// Sentinel errors for use with panic to signal early exits from range loops.
+var (
+ walkBreak = errors.New("break")
+ walkContinue = errors.New("continue")
+)
+
+// Walk functions step through the major pieces of the template structure,
+// generating output as they go.
+func (s *state) walk(dot reflect.Value, node parse.Node) {
+ s.at(node)
+ switch node := node.(type) {
+ case *parse.ActionNode:
+ // Do not pop variables so they persist until next end.
+ // Also, if the action declares variables, don't print the result.
+ val := s.evalPipeline(dot, node.Pipe)
+ if len(node.Pipe.Decl) == 0 {
+ s.printValue(node, val)
+ }
+ case *parse.BreakNode:
+ panic(walkBreak)
+ case *parse.CommentNode:
+ case *parse.ContinueNode:
+ panic(walkContinue)
+ case *parse.IfNode:
+ s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList)
+ case *parse.ListNode:
+ for _, node := range node.Nodes {
+ s.walk(dot, node)
+ }
+ case *parse.RangeNode:
+ s.walkRange(dot, node)
+ case *parse.TemplateNode:
+ s.walkTemplate(dot, node)
+ case *parse.TextNode:
+ if _, err := s.wr.Write(node.Text); err != nil {
+ s.writeError(err)
+ }
+ case *parse.WithNode:
+ s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList)
+ default:
+ s.errorf("unknown node: %s", node)
+ }
+}
+
+// walkIfOrWith walks an 'if' or 'with' node. The two control structures
+// are identical in behavior except that 'with' sets dot.
+func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) {
+ defer s.pop(s.mark())
+ val := s.evalPipeline(dot, pipe)
+ truth, ok := isTrue(indirectInterface(val))
+ if !ok {
+ s.errorf("if/with can't use %v", val)
+ }
+ if truth {
+ if typ == parse.NodeWith {
+ s.walk(val, list)
+ } else {
+ s.walk(dot, list)
+ }
+ } else if elseList != nil {
+ s.walk(dot, elseList)
+ }
+}
+
+// IsTrue reports whether the value is 'true', in the sense of not the zero of its type,
+// and whether the value has a meaningful truth value. This is the definition of
+// truth used by if and other such actions.
+func IsTrue(val any) (truth, ok bool) {
+ return isTrue(reflect.ValueOf(val))
+}
+
+func isTrueOld(val reflect.Value) (truth, ok bool) {
+ if !val.IsValid() {
+ // Something like var x interface{}, never set. It's a form of nil.
+ return false, true
+ }
+ switch val.Kind() {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ truth = val.Len() > 0
+ case reflect.Bool:
+ truth = val.Bool()
+ case reflect.Complex64, reflect.Complex128:
+ truth = val.Complex() != 0
+ case reflect.Chan, reflect.Func, reflect.Pointer, reflect.Interface:
+ truth = !val.IsNil()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ truth = val.Int() != 0
+ case reflect.Float32, reflect.Float64:
+ truth = val.Float() != 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ truth = val.Uint() != 0
+ case reflect.Struct:
+ truth = true // Struct values are always true.
+ default:
+ return
+ }
+ return truth, true
+}
+
+func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) {
+ s.at(r)
+ defer func() {
+ if r := recover(); r != nil && r != walkBreak {
+ panic(r)
+ }
+ }()
+ defer s.pop(s.mark())
+ val, _ := indirect(s.evalPipeline(dot, r.Pipe))
+ // mark top of stack before any variables in the body are pushed.
+ mark := s.mark()
+ oneIteration := func(index, elem reflect.Value) {
+ // Set top var (lexically the second if there are two) to the element.
+ if len(r.Pipe.Decl) > 0 {
+ s.setTopVar(1, elem)
+ }
+ // Set next var (lexically the first if there are two) to the index.
+ if len(r.Pipe.Decl) > 1 {
+ s.setTopVar(2, index)
+ }
+ defer s.pop(mark)
+ defer func() {
+ // Consume panic(walkContinue)
+ if r := recover(); r != nil && r != walkContinue {
+ panic(r)
+ }
+ }()
+ s.walk(elem, r.List)
+ }
+ switch val.Kind() {
+ case reflect.Array, reflect.Slice:
+ if val.Len() == 0 {
+ break
+ }
+ for i := 0; i < val.Len(); i++ {
+ oneIteration(reflect.ValueOf(i), val.Index(i))
+ }
+ return
+ case reflect.Map:
+ if val.Len() == 0 {
+ break
+ }
+ om := fmtsort.Sort(val)
+ for i, key := range om.Key {
+ oneIteration(key, om.Value[i])
+ }
+ return
+ case reflect.Chan:
+ if val.IsNil() {
+ break
+ }
+ if val.Type().ChanDir() == reflect.SendDir {
+ s.errorf("range over send-only channel %v", val)
+ break
+ }
+ i := 0
+ for ; ; i++ {
+ elem, ok := val.Recv()
+ if !ok {
+ break
+ }
+ oneIteration(reflect.ValueOf(i), elem)
+ }
+ if i == 0 {
+ break
+ }
+ return
+ case reflect.Invalid:
+ break // An invalid value is likely a nil map, etc. and acts like an empty map.
+ default:
+ s.errorf("range can't iterate over %v", val)
+ }
+ if r.ElseList != nil {
+ s.walk(dot, r.ElseList)
+ }
+}
+
+func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) {
+ s.at(t)
+ tmpl := s.tmpl.Lookup(t.Name)
+ if tmpl == nil {
+ s.errorf("template %q not defined", t.Name)
+ }
+ if s.depth == maxExecDepth {
+ s.errorf("exceeded maximum template depth (%v)", maxExecDepth)
+ }
+ // Variables declared by the pipeline persist.
+ dot = s.evalPipeline(dot, t.Pipe)
+ newState := *s
+ newState.depth++
+ newState.tmpl = tmpl
+ // No dynamic scoping: template invocations inherit no variables.
+ newState.vars = []variable{{"$", dot}}
+ newState.walk(dot, tmpl.Root)
+}
+
+// Eval functions evaluate pipelines, commands, and their elements and extract
+// values from the data structure by examining fields, calling methods, and so on.
+// The printing of those values happens only through walk functions.
+
+// evalPipeline returns the value acquired by evaluating a pipeline. If the
+// pipeline has a variable declaration, the variable will be pushed on the
+// stack. Callers should therefore pop the stack after they are finished
+// executing commands depending on the pipeline value.
+func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) {
+ if pipe == nil {
+ return
+ }
+ s.at(pipe)
+ value = missingVal
+ for _, cmd := range pipe.Cmds {
+ value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg.
+ // If the object has type interface{}, dig down one level to the thing inside.
+ if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 {
+ value = reflect.ValueOf(value.Interface()) // lovely!
+ }
+ }
+ for _, variable := range pipe.Decl {
+ if pipe.IsAssign {
+ s.setVar(variable.Ident[0], value)
+ } else {
+ s.push(variable.Ident[0], value)
+ }
+ }
+ return value
+}
+
+func (s *state) notAFunction(args []parse.Node, final reflect.Value) {
+ if len(args) > 1 || final != missingVal {
+ s.errorf("can't give argument to non-function %s", args[0])
+ }
+}
+
+func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value {
+ firstWord := cmd.Args[0]
+ switch n := firstWord.(type) {
+ case *parse.FieldNode:
+ return s.evalFieldNode(dot, n, cmd.Args, final)
+ case *parse.ChainNode:
+ return s.evalChainNode(dot, n, cmd.Args, final)
+ case *parse.IdentifierNode:
+ // Must be a function.
+ return s.evalFunction(dot, n, cmd, cmd.Args, final)
+ case *parse.PipeNode:
+ // Parenthesized pipeline. The arguments are all inside the pipeline; final must be absent.
+ s.notAFunction(cmd.Args, final)
+ return s.evalPipeline(dot, n)
+ case *parse.VariableNode:
+ return s.evalVariableNode(dot, n, cmd.Args, final)
+ }
+ s.at(firstWord)
+ s.notAFunction(cmd.Args, final)
+ switch word := firstWord.(type) {
+ case *parse.BoolNode:
+ return reflect.ValueOf(word.True)
+ case *parse.DotNode:
+ return dot
+ case *parse.NilNode:
+ s.errorf("nil is not a command")
+ case *parse.NumberNode:
+ return s.idealConstant(word)
+ case *parse.StringNode:
+ return reflect.ValueOf(word.Text)
+ }
+ s.errorf("can't evaluate command %q", firstWord)
+ panic("not reached")
+}
+
+// idealConstant is called to return the value of a number in a context where
+// we don't know the type. In that case, the syntax of the number tells us
+// its type, and we use Go rules to resolve. Note there is no such thing as
+// a uint ideal constant in this situation - the value must be of int type.
+func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value {
+ // These are ideal constants but we don't know the type
+ // and we have no context. (If it was a method argument,
+ // we'd know what we need.) The syntax guides us to some extent.
+ s.at(constant)
+ switch {
+ case constant.IsComplex:
+ return reflect.ValueOf(constant.Complex128) // incontrovertible.
+
+ case constant.IsFloat &&
+ !isHexInt(constant.Text) && !isRuneInt(constant.Text) &&
+ strings.ContainsAny(constant.Text, ".eEpP"):
+ return reflect.ValueOf(constant.Float64)
+
+ case constant.IsInt:
+ n := int(constant.Int64)
+ if int64(n) != constant.Int64 {
+ s.errorf("%s overflows int", constant.Text)
+ }
+ return reflect.ValueOf(n)
+
+ case constant.IsUint:
+ s.errorf("%s overflows int", constant.Text)
+ }
+ return zero
+}
+
+func isRuneInt(s string) bool {
+ return len(s) > 0 && s[0] == '\''
+}
+
+func isHexInt(s string) bool {
+ return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X') && !strings.ContainsAny(s, "pP")
+}
+
+func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value {
+ s.at(field)
+ return s.evalFieldChain(dot, dot, field, field.Ident, args, final)
+}
+
+func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value {
+ s.at(chain)
+ if len(chain.Field) == 0 {
+ s.errorf("internal error: no fields in evalChainNode")
+ }
+ if chain.Node.Type() == parse.NodeNil {
+ s.errorf("indirection through explicit nil in %s", chain)
+ }
+ // (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields.
+ pipe := s.evalArg(dot, nil, chain.Node)
+ return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final)
+}
+
+func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value {
+ // $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields.
+ s.at(variable)
+ value := s.varValue(variable.Ident[0])
+ if len(variable.Ident) == 1 {
+ s.notAFunction(args, final)
+ return value
+ }
+ return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final)
+}
+
+// evalFieldChain evaluates .X.Y.Z possibly followed by arguments.
+// dot is the environment in which to evaluate arguments, while
+// receiver is the value being walked along the chain.
+func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value {
+ n := len(ident)
+ for i := 0; i < n-1; i++ {
+ receiver = s.evalField(dot, ident[i], node, nil, missingVal, receiver)
+ }
+ // Now if it's a method, it gets the arguments.
+ return s.evalField(dot, ident[n-1], node, args, final, receiver)
+}
+
+func (s *state) evalFunctionOld(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value {
+ s.at(node)
+ name := node.Ident
+ function, isBuiltin, ok := findFunction(name, s.tmpl)
+ if !ok {
+ s.errorf("%q is not a defined function", name)
+ }
+ return s.evalCall(dot, function, isBuiltin, cmd, name, args, final)
+}
+
+// evalField evaluates an expression like (.Field) or (.Field arg1 arg2).
+// The 'final' argument represents the return value from the preceding
+// value of the pipeline, if any.
+func (s *state) evalFieldOld(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value {
+ if !receiver.IsValid() {
+ if s.tmpl.option.missingKey == mapError { // Treat invalid value as missing map key.
+ s.errorf("nil data; no entry for key %q", fieldName)
+ }
+ return zero
+ }
+ typ := receiver.Type()
+ receiver, isNil := indirect(receiver)
+ if receiver.Kind() == reflect.Interface && isNil {
+ // Calling a method on a nil interface can't work. The
+ // MethodByName method call below would panic.
+ s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
+ return zero
+ }
+
+ // Unless it's an interface, need to get to a value of type *T to guarantee
+ // we see all methods of T and *T.
+ ptr := receiver
+ if ptr.Kind() != reflect.Interface && ptr.Kind() != reflect.Pointer && ptr.CanAddr() {
+ ptr = ptr.Addr()
+ }
+ if method := ptr.MethodByName(fieldName); method.IsValid() {
+ return s.evalCall(dot, method, false, node, fieldName, args, final)
+ }
+ hasArgs := len(args) > 1 || final != missingVal
+ // It's not a method; must be a field of a struct or an element of a map.
+ switch receiver.Kind() {
+ case reflect.Struct:
+ tField, ok := receiver.Type().FieldByName(fieldName)
+ if ok {
+ field, err := receiver.FieldByIndexErr(tField.Index)
+ if !tField.IsExported() {
+ s.errorf("%s is an unexported field of struct type %s", fieldName, typ)
+ }
+ if err != nil {
+ s.errorf("%v", err)
+ }
+ // If it's a function, we must call it.
+ if hasArgs {
+ s.errorf("%s has arguments but cannot be invoked as function", fieldName)
+ }
+ return field
+ }
+ case reflect.Map:
+ // If it's a map, attempt to use the field name as a key.
+ nameVal := reflect.ValueOf(fieldName)
+ if nameVal.Type().AssignableTo(receiver.Type().Key()) {
+ if hasArgs {
+ s.errorf("%s is not a method but has arguments", fieldName)
+ }
+ result := receiver.MapIndex(nameVal)
+ if !result.IsValid() {
+ switch s.tmpl.option.missingKey {
+ case mapInvalid:
+ // Just use the invalid value.
+ case mapZeroValue:
+ result = reflect.Zero(receiver.Type().Elem())
+ case mapError:
+ s.errorf("map has no entry for key %q", fieldName)
+ }
+ }
+ return result
+ }
+ case reflect.Pointer:
+ etyp := receiver.Type().Elem()
+ if etyp.Kind() == reflect.Struct {
+ if _, ok := etyp.FieldByName(fieldName); !ok {
+ // If there's no such field, say "can't evaluate"
+ // instead of "nil pointer evaluating".
+ break
+ }
+ }
+ if isNil {
+ s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
+ }
+ }
+ s.errorf("can't evaluate field %s in type %s", fieldName, typ)
+ panic("not reached")
+}
+
+var (
+ errorType = reflect.TypeOf((*error)(nil)).Elem()
+ fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem()
+ reflectValueType = reflect.TypeOf((*reflect.Value)(nil)).Elem()
+)
+
+// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so
+// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0]
+// as the function itself.
+func (s *state) evalCallOld(dot, fun reflect.Value, isBuiltin bool, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value {
+ if args != nil {
+ args = args[1:] // Zeroth arg is function name/node; not passed to function.
+ }
+ typ := fun.Type()
+ numIn := len(args)
+ if final != missingVal {
+ numIn++
+ }
+ numFixed := len(args)
+ if typ.IsVariadic() {
+ numFixed = typ.NumIn() - 1 // last arg is the variadic one.
+ if numIn < numFixed {
+ s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args))
+ }
+ } else if numIn != typ.NumIn() {
+ s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), numIn)
+ }
+ if !goodFunc(typ) {
+ // TODO: This could still be a confusing error; maybe goodFunc should provide info.
+ s.errorf("can't call method/function %q with %d results", name, typ.NumOut())
+ }
+
+ unwrap := func(v reflect.Value) reflect.Value {
+ if v.Type() == reflectValueType {
+ v = v.Interface().(reflect.Value)
+ }
+ return v
+ }
+
+ // Special case for builtin and/or, which short-circuit.
+ if isBuiltin && (name == "and" || name == "or") {
+ argType := typ.In(0)
+ var v reflect.Value
+ for _, arg := range args {
+ v = s.evalArg(dot, argType, arg).Interface().(reflect.Value)
+ if truth(v) == (name == "or") {
+ // This value was already unwrapped
+ // by the .Interface().(reflect.Value).
+ return v
+ }
+ }
+ if final != missingVal {
+ // The last argument to and/or is coming from
+ // the pipeline. We didn't short circuit on an earlier
+ // argument, so we are going to return this one.
+ // We don't have to evaluate final, but we do
+ // have to check its type. Then, since we are
+ // going to return it, we have to unwrap it.
+ v = unwrap(s.validateType(final, argType))
+ }
+ return v
+ }
+
+ // Build the arg list.
+ argv := make([]reflect.Value, numIn)
+ // Args must be evaluated. Fixed args first.
+ i := 0
+ for ; i < numFixed && i < len(args); i++ {
+ argv[i] = s.evalArg(dot, typ.In(i), args[i])
+ }
+ // Now the ... args.
+ if typ.IsVariadic() {
+ argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice.
+ for ; i < len(args); i++ {
+ argv[i] = s.evalArg(dot, argType, args[i])
+ }
+ }
+ // Add final value if necessary.
+ if final != missingVal {
+ t := typ.In(typ.NumIn() - 1)
+ if typ.IsVariadic() {
+ if numIn-1 < numFixed {
+ // The added final argument corresponds to a fixed parameter of the function.
+ // Validate against the type of the actual parameter.
+ t = typ.In(numIn - 1)
+ } else {
+ // The added final argument corresponds to the variadic part.
+ // Validate against the type of the elements of the variadic slice.
+ t = t.Elem()
+ }
+ }
+ argv[i] = s.validateType(final, t)
+ }
+ v, err := safeCall(fun, argv)
+ // If we have an error that is not nil, stop execution and return that
+ // error to the caller.
+ if err != nil {
+ s.at(node)
+ s.errorf("error calling %s: %w", name, err)
+ }
+ return unwrap(v)
+}
+
+// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero.
+func canBeNil(typ reflect.Type) bool {
+ switch typ.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice:
+ return true
+ case reflect.Struct:
+ return typ == reflectValueType
+ }
+ return false
+}
+
+// validateType guarantees that the value is valid and assignable to the type.
+func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value {
+ if !value.IsValid() {
+ if typ == nil {
+ // An untyped nil interface{}. Accept as a proper nil value.
+ return reflect.ValueOf(nil)
+ }
+ if canBeNil(typ) {
+ // Like above, but use the zero value of the non-nil type.
+ return reflect.Zero(typ)
+ }
+ s.errorf("invalid value; expected %s", typ)
+ }
+ if typ == reflectValueType && value.Type() != typ {
+ return reflect.ValueOf(value)
+ }
+ if typ != nil && !value.Type().AssignableTo(typ) {
+ if value.Kind() == reflect.Interface && !value.IsNil() {
+ value = value.Elem()
+ if value.Type().AssignableTo(typ) {
+ return value
+ }
+ // fallthrough
+ }
+ // Does one dereference or indirection work? We could do more, as we
+ // do with method receivers, but that gets messy and method receivers
+ // are much more constrained, so it makes more sense there than here.
+ // Besides, one is almost always all you need.
+ switch {
+ case value.Kind() == reflect.Pointer && value.Type().Elem().AssignableTo(typ):
+ value = value.Elem()
+ if !value.IsValid() {
+ s.errorf("dereference of nil pointer of type %s", typ)
+ }
+ case reflect.PointerTo(value.Type()).AssignableTo(typ) && value.CanAddr():
+ value = value.Addr()
+ default:
+ s.errorf("wrong type for value; expected %s; got %s", typ, value.Type())
+ }
+ }
+ return value
+}
+
+func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ switch arg := n.(type) {
+ case *parse.DotNode:
+ return s.validateType(dot, typ)
+ case *parse.NilNode:
+ if canBeNil(typ) {
+ return reflect.Zero(typ)
+ }
+ s.errorf("cannot assign nil to %s", typ)
+ case *parse.FieldNode:
+ return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, missingVal), typ)
+ case *parse.VariableNode:
+ return s.validateType(s.evalVariableNode(dot, arg, nil, missingVal), typ)
+ case *parse.PipeNode:
+ return s.validateType(s.evalPipeline(dot, arg), typ)
+ case *parse.IdentifierNode:
+ return s.validateType(s.evalFunction(dot, arg, arg, nil, missingVal), typ)
+ case *parse.ChainNode:
+ return s.validateType(s.evalChainNode(dot, arg, nil, missingVal), typ)
+ }
+ switch typ.Kind() {
+ case reflect.Bool:
+ return s.evalBool(typ, n)
+ case reflect.Complex64, reflect.Complex128:
+ return s.evalComplex(typ, n)
+ case reflect.Float32, reflect.Float64:
+ return s.evalFloat(typ, n)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return s.evalInteger(typ, n)
+ case reflect.Interface:
+ if typ.NumMethod() == 0 {
+ return s.evalEmptyInterface(dot, n)
+ }
+ case reflect.Struct:
+ if typ == reflectValueType {
+ return reflect.ValueOf(s.evalEmptyInterface(dot, n))
+ }
+ case reflect.String:
+ return s.evalString(typ, n)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return s.evalUnsignedInteger(typ, n)
+ }
+ s.errorf("can't handle %s for arg of type %s", n, typ)
+ panic("not reached")
+}
+
+func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ if n, ok := n.(*parse.BoolNode); ok {
+ value := reflect.New(typ).Elem()
+ value.SetBool(n.True)
+ return value
+ }
+ s.errorf("expected bool; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ if n, ok := n.(*parse.StringNode); ok {
+ value := reflect.New(typ).Elem()
+ value.SetString(n.Text)
+ return value
+ }
+ s.errorf("expected string; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ if n, ok := n.(*parse.NumberNode); ok && n.IsInt {
+ value := reflect.New(typ).Elem()
+ value.SetInt(n.Int64)
+ return value
+ }
+ s.errorf("expected integer; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ if n, ok := n.(*parse.NumberNode); ok && n.IsUint {
+ value := reflect.New(typ).Elem()
+ value.SetUint(n.Uint64)
+ return value
+ }
+ s.errorf("expected unsigned integer; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value {
+ s.at(n)
+ if n, ok := n.(*parse.NumberNode); ok && n.IsFloat {
+ value := reflect.New(typ).Elem()
+ value.SetFloat(n.Float64)
+ return value
+ }
+ s.errorf("expected float; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value {
+ if n, ok := n.(*parse.NumberNode); ok && n.IsComplex {
+ value := reflect.New(typ).Elem()
+ value.SetComplex(n.Complex128)
+ return value
+ }
+ s.errorf("expected complex; found %s", n)
+ panic("not reached")
+}
+
+func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value {
+ s.at(n)
+ switch n := n.(type) {
+ case *parse.BoolNode:
+ return reflect.ValueOf(n.True)
+ case *parse.DotNode:
+ return dot
+ case *parse.FieldNode:
+ return s.evalFieldNode(dot, n, nil, missingVal)
+ case *parse.IdentifierNode:
+ return s.evalFunction(dot, n, n, nil, missingVal)
+ case *parse.NilNode:
+ // NilNode is handled in evalArg, the only place that calls here.
+ s.errorf("evalEmptyInterface: nil (can't happen)")
+ case *parse.NumberNode:
+ return s.idealConstant(n)
+ case *parse.StringNode:
+ return reflect.ValueOf(n.Text)
+ case *parse.VariableNode:
+ return s.evalVariableNode(dot, n, nil, missingVal)
+ case *parse.PipeNode:
+ return s.evalPipeline(dot, n)
+ }
+ s.errorf("can't handle assignment of %s to empty interface argument", n)
+ panic("not reached")
+}
+
+// indirect returns the item at the end of indirection, and a bool to indicate
+// if it's nil. If the returned bool is true, the returned value's kind will be
+// either a pointer or interface.
+func indirect(v reflect.Value) (rv reflect.Value, isNil bool) {
+ for ; v.Kind() == reflect.Pointer || v.Kind() == reflect.Interface; v = v.Elem() {
+ if v.IsNil() {
+ return v, true
+ }
+ }
+ return v, false
+}
+
+// indirectInterface returns the concrete value in an interface value,
+// or else the zero reflect.Value.
+// That is, if v represents the interface value x, the result is the same as reflect.ValueOf(x):
+// the fact that x was an interface value is forgotten.
+func indirectInterface(v reflect.Value) reflect.Value {
+ if v.Kind() != reflect.Interface {
+ return v
+ }
+ if v.IsNil() {
+ return reflect.Value{}
+ }
+ return v.Elem()
+}
+
+// printValue writes the textual representation of the value to the output of
+// the template.
+func (s *state) printValue(n parse.Node, v reflect.Value) {
+ s.at(n)
+ iface, ok := printableValue(v)
+ if !ok {
+ s.errorf("can't print %s of type %s", n, v.Type())
+ }
+ _, err := fmt.Fprint(s.wr, iface)
+ if err != nil {
+ s.writeError(err)
+ }
+}
+
+// printableValue returns the, possibly indirected, interface value inside v that
+// is best for a call to formatted printer.
+func printableValue(v reflect.Value) (any, bool) {
+ if v.Kind() == reflect.Pointer {
+ v, _ = indirect(v) // fmt.Fprint handles nil.
+ }
+ if !v.IsValid() {
+ return "<no value>", true
+ }
+
+ if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) {
+ if v.CanAddr() && (reflect.PointerTo(v.Type()).Implements(errorType) || reflect.PointerTo(v.Type()).Implements(fmtStringerType)) {
+ v = v.Addr()
+ } else {
+ switch v.Kind() {
+ case reflect.Chan, reflect.Func:
+ return nil, false
+ }
+ }
+ }
+ return v.Interface(), true
+}
diff --git a/tpl/internal/go_templates/texttemplate/exec_test.go b/tpl/internal/go_templates/texttemplate/exec_test.go
new file mode 100644
index 000000000..64cb87ec6
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/exec_test.go
@@ -0,0 +1,1815 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+import (
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "io"
+ "reflect"
+ "strings"
+ "sync"
+ "testing"
+)
+
+var debug = flag.Bool("debug", false, "show the errors produced by the tests")
+
+// T has lots of interesting pieces to use to test execution.
+type T struct {
+ // Basics
+ True bool
+ I int
+ U16 uint16
+ X, S string
+ FloatZero float64
+ ComplexZero complex128
+ // Nested structs.
+ U *U
+ // Struct with String method.
+ V0 V
+ V1, V2 *V
+ // Struct with Error method.
+ W0 W
+ W1, W2 *W
+ // Slices
+ SI []int
+ SICap []int
+ SIEmpty []int
+ SB []bool
+ // Arrays
+ AI [3]int
+ // Maps
+ MSI map[string]int
+ MSIone map[string]int // one element, for deterministic output
+ MSIEmpty map[string]int
+ MXI map[any]int
+ MII map[int]int
+ MI32S map[int32]string
+ MI64S map[int64]string
+ MUI32S map[uint32]string
+ MUI64S map[uint64]string
+ MI8S map[int8]string
+ MUI8S map[uint8]string
+ SMSI []map[string]int
+ // Empty interfaces; used to see if we can dig inside one.
+ Empty0 any // nil
+ Empty1 any
+ Empty2 any
+ Empty3 any
+ Empty4 any
+ // Non-empty interfaces.
+ NonEmptyInterface I
+ NonEmptyInterfacePtS *I
+ NonEmptyInterfaceNil I
+ NonEmptyInterfaceTypedNil I
+ // Stringer.
+ Str fmt.Stringer
+ Err error
+ // Pointers
+ PI *int
+ PS *string
+ PSI *[]int
+ NIL *int
+ // Function (not method)
+ BinaryFunc func(string, string) string
+ VariadicFunc func(...string) string
+ VariadicFuncInt func(int, ...string) string
+ NilOKFunc func(*int) bool
+ ErrFunc func() (string, error)
+ PanicFunc func() string
+ // Template to test evaluation of templates.
+ Tmpl *Template
+ // Unexported field; cannot be accessed by template.
+ unexported int
+}
+
+type S []string
+
+func (S) Method0() string {
+ return "M0"
+}
+
+type U struct {
+ V string
+}
+
+type V struct {
+ j int
+}
+
+func (v *V) String() string {
+ if v == nil {
+ return "nilV"
+ }
+ return fmt.Sprintf("<%d>", v.j)
+}
+
+type W struct {
+ k int
+}
+
+func (w *W) Error() string {
+ if w == nil {
+ return "nilW"
+ }
+ return fmt.Sprintf("[%d]", w.k)
+}
+
+var siVal = I(S{"a", "b"})
+
+var tVal = &T{
+ True: true,
+ I: 17,
+ U16: 16,
+ X: "x",
+ S: "xyz",
+ U: &U{"v"},
+ V0: V{6666},
+ V1: &V{7777}, // leave V2 as nil
+ W0: W{888},
+ W1: &W{999}, // leave W2 as nil
+ SI: []int{3, 4, 5},
+ SICap: make([]int, 5, 10),
+ AI: [3]int{3, 4, 5},
+ SB: []bool{true, false},
+ MSI: map[string]int{"one": 1, "two": 2, "three": 3},
+ MSIone: map[string]int{"one": 1},
+ MXI: map[any]int{"one": 1},
+ MII: map[int]int{1: 1},
+ MI32S: map[int32]string{1: "one", 2: "two"},
+ MI64S: map[int64]string{2: "i642", 3: "i643"},
+ MUI32S: map[uint32]string{2: "u322", 3: "u323"},
+ MUI64S: map[uint64]string{2: "ui642", 3: "ui643"},
+ MI8S: map[int8]string{2: "i82", 3: "i83"},
+ MUI8S: map[uint8]string{2: "u82", 3: "u83"},
+ SMSI: []map[string]int{
+ {"one": 1, "two": 2},
+ {"eleven": 11, "twelve": 12},
+ },
+ Empty1: 3,
+ Empty2: "empty2",
+ Empty3: []int{7, 8},
+ Empty4: &U{"UinEmpty"},
+ NonEmptyInterface: &T{X: "x"},
+ NonEmptyInterfacePtS: &siVal,
+ NonEmptyInterfaceTypedNil: (*T)(nil),
+ Str: bytes.NewBuffer([]byte("foozle")),
+ Err: errors.New("erroozle"),
+ PI: newInt(23),
+ PS: newString("a string"),
+ PSI: newIntSlice(21, 22, 23),
+ BinaryFunc: func(a, b string) string { return fmt.Sprintf("[%s=%s]", a, b) },
+ VariadicFunc: func(s ...string) string { return fmt.Sprint("<", strings.Join(s, "+"), ">") },
+ VariadicFuncInt: func(a int, s ...string) string { return fmt.Sprint(a, "=<", strings.Join(s, "+"), ">") },
+ NilOKFunc: func(s *int) bool { return s == nil },
+ ErrFunc: func() (string, error) { return "bla", nil },
+ PanicFunc: func() string { panic("test panic") },
+ Tmpl: Must(New("x").Parse("test template")), // "x" is the value of .X
+}
+
+var tSliceOfNil = []*T{nil}
+
+// A non-empty interface.
+type I interface {
+ Method0() string
+}
+
+var iVal I = tVal
+
+// Helpers for creation.
+func newInt(n int) *int {
+ return &n
+}
+
+func newString(s string) *string {
+ return &s
+}
+
+func newIntSlice(n ...int) *[]int {
+ p := new([]int)
+ *p = make([]int, len(n))
+ copy(*p, n)
+ return p
+}
+
+// Simple methods with and without arguments.
+func (t *T) Method0() string {
+ return "M0"
+}
+
+func (t *T) Method1(a int) int {
+ return a
+}
+
+func (t *T) Method2(a uint16, b string) string {
+ return fmt.Sprintf("Method2: %d %s", a, b)
+}
+
+func (t *T) Method3(v any) string {
+ return fmt.Sprintf("Method3: %v", v)
+}
+
+func (t *T) Copy() *T {
+ n := new(T)
+ *n = *t
+ return n
+}
+
+func (t *T) MAdd(a int, b []int) []int {
+ v := make([]int, len(b))
+ for i, x := range b {
+ v[i] = x + a
+ }
+ return v
+}
+
+var myError = errors.New("my error")
+
+// MyError returns a value and an error according to its argument.
+func (t *T) MyError(error bool) (bool, error) {
+ if error {
+ return true, myError
+ }
+ return false, nil
+}
+
+// A few methods to test chaining.
+func (t *T) GetU() *U {
+ return t.U
+}
+
+func (u *U) TrueFalse(b bool) string {
+ if b {
+ return "true"
+ }
+ return ""
+}
+
+func typeOf(arg any) string {
+ return fmt.Sprintf("%T", arg)
+}
+
+type execTest struct {
+ name string
+ input string
+ output string
+ data any
+ ok bool
+}
+
+// bigInt and bigUint are hex string representing numbers either side
+// of the max int boundary.
+// We do it this way so the test doesn't depend on ints being 32 bits.
+var (
+ bigInt = fmt.Sprintf("0x%x", int(1<<uint(reflect.TypeOf(0).Bits()-1)-1))
+ bigUint = fmt.Sprintf("0x%x", uint(1<<uint(reflect.TypeOf(0).Bits()-1)))
+)
+
+var execTests = []execTest{
+ // Trivial cases.
+ {"empty", "", "", nil, true},
+ {"text", "some text", "some text", nil, true},
+ {"nil action", "{{nil}}", "", nil, false},
+
+ // Ideal constants.
+ {"ideal int", "{{typeOf 3}}", "int", 0, true},
+ {"ideal float", "{{typeOf 1.0}}", "float64", 0, true},
+ {"ideal exp float", "{{typeOf 1e1}}", "float64", 0, true},
+ {"ideal complex", "{{typeOf 1i}}", "complex128", 0, true},
+ {"ideal int", "{{typeOf " + bigInt + "}}", "int", 0, true},
+ {"ideal too big", "{{typeOf " + bigUint + "}}", "", 0, false},
+ {"ideal nil without type", "{{nil}}", "", 0, false},
+
+ // Fields of structs.
+ {".X", "-{{.X}}-", "-x-", tVal, true},
+ {".U.V", "-{{.U.V}}-", "-v-", tVal, true},
+ {".unexported", "{{.unexported}}", "", tVal, false},
+
+ // Fields on maps.
+ {"map .one", "{{.MSI.one}}", "1", tVal, true},
+ {"map .two", "{{.MSI.two}}", "2", tVal, true},
+ {"map .NO", "{{.MSI.NO}}", "<no value>", tVal, true},
+ {"map .one interface", "{{.MXI.one}}", "1", tVal, true},
+ {"map .WRONG args", "{{.MSI.one 1}}", "", tVal, false},
+ {"map .WRONG type", "{{.MII.one}}", "", tVal, false},
+
+ // Dots of all kinds to test basic evaluation.
+ {"dot int", "<{{.}}>", "<13>", 13, true},
+ {"dot uint", "<{{.}}>", "<14>", uint(14), true},
+ {"dot float", "<{{.}}>", "<15.1>", 15.1, true},
+ {"dot bool", "<{{.}}>", "<true>", true, true},
+ {"dot complex", "<{{.}}>", "<(16.2-17i)>", 16.2 - 17i, true},
+ {"dot string", "<{{.}}>", "<hello>", "hello", true},
+ {"dot slice", "<{{.}}>", "<[-1 -2 -3]>", []int{-1, -2, -3}, true},
+ {"dot map", "<{{.}}>", "<map[two:22]>", map[string]int{"two": 22}, true},
+ {"dot struct", "<{{.}}>", "<{7 seven}>", struct {
+ a int
+ b string
+ }{7, "seven"}, true},
+
+ // Variables.
+ {"$ int", "{{$}}", "123", 123, true},
+ {"$.I", "{{$.I}}", "17", tVal, true},
+ {"$.U.V", "{{$.U.V}}", "v", tVal, true},
+ {"declare in action", "{{$x := $.U.V}}{{$x}}", "v", tVal, true},
+ {"simple assignment", "{{$x := 2}}{{$x = 3}}{{$x}}", "3", tVal, true},
+ {"nested assignment",
+ "{{$x := 2}}{{if true}}{{$x = 3}}{{end}}{{$x}}",
+ "3", tVal, true},
+ {"nested assignment changes the last declaration",
+ "{{$x := 1}}{{if true}}{{$x := 2}}{{if true}}{{$x = 3}}{{end}}{{end}}{{$x}}",
+ "1", tVal, true},
+
+ // Type with String method.
+ {"V{6666}.String()", "-{{.V0}}-", "-<6666>-", tVal, true},
+ {"&V{7777}.String()", "-{{.V1}}-", "-<7777>-", tVal, true},
+ {"(*V)(nil).String()", "-{{.V2}}-", "-nilV-", tVal, true},
+
+ // Type with Error method.
+ {"W{888}.Error()", "-{{.W0}}-", "-[888]-", tVal, true},
+ {"&W{999}.Error()", "-{{.W1}}-", "-[999]-", tVal, true},
+ {"(*W)(nil).Error()", "-{{.W2}}-", "-nilW-", tVal, true},
+
+ // Pointers.
+ {"*int", "{{.PI}}", "23", tVal, true},
+ {"*string", "{{.PS}}", "a string", tVal, true},
+ {"*[]int", "{{.PSI}}", "[21 22 23]", tVal, true},
+ {"*[]int[1]", "{{index .PSI 1}}", "22", tVal, true},
+ {"NIL", "{{.NIL}}", "<nil>", tVal, true},
+
+ // Empty interfaces holding values.
+ {"empty nil", "{{.Empty0}}", "<no value>", tVal, true},
+ {"empty with int", "{{.Empty1}}", "3", tVal, true},
+ {"empty with string", "{{.Empty2}}", "empty2", tVal, true},
+ {"empty with slice", "{{.Empty3}}", "[7 8]", tVal, true},
+ {"empty with struct", "{{.Empty4}}", "{UinEmpty}", tVal, true},
+ {"empty with struct, field", "{{.Empty4.V}}", "UinEmpty", tVal, true},
+
+ // Edge cases with <no value> with an interface value
+ {"field on interface", "{{.foo}}", "<no value>", nil, true},
+ {"field on parenthesized interface", "{{(.).foo}}", "<no value>", nil, true},
+
+ // Issue 31810: Parenthesized first element of pipeline with arguments.
+ // See also TestIssue31810.
+ {"unparenthesized non-function", "{{1 2}}", "", nil, false},
+ {"parenthesized non-function", "{{(1) 2}}", "", nil, false},
+ {"parenthesized non-function with no args", "{{(1)}}", "1", nil, true}, // This is fine.
+
+ // Method calls.
+ {".Method0", "-{{.Method0}}-", "-M0-", tVal, true},
+ {".Method1(1234)", "-{{.Method1 1234}}-", "-1234-", tVal, true},
+ {".Method1(.I)", "-{{.Method1 .I}}-", "-17-", tVal, true},
+ {".Method2(3, .X)", "-{{.Method2 3 .X}}-", "-Method2: 3 x-", tVal, true},
+ {".Method2(.U16, `str`)", "-{{.Method2 .U16 `str`}}-", "-Method2: 16 str-", tVal, true},
+ {".Method2(.U16, $x)", "{{if $x := .X}}-{{.Method2 .U16 $x}}{{end}}-", "-Method2: 16 x-", tVal, true},
+ {".Method3(nil constant)", "-{{.Method3 nil}}-", "-Method3: <nil>-", tVal, true},
+ {".Method3(nil value)", "-{{.Method3 .MXI.unset}}-", "-Method3: <nil>-", tVal, true},
+ {"method on var", "{{if $x := .}}-{{$x.Method2 .U16 $x.X}}{{end}}-", "-Method2: 16 x-", tVal, true},
+ {"method on chained var",
+ "{{range .MSIone}}{{if $.U.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}",
+ "true", tVal, true},
+ {"chained method",
+ "{{range .MSIone}}{{if $.GetU.TrueFalse $.True}}{{$.U.TrueFalse $.True}}{{else}}WRONG{{end}}{{end}}",
+ "true", tVal, true},
+ {"chained method on variable",
+ "{{with $x := .}}{{with .SI}}{{$.GetU.TrueFalse $.True}}{{end}}{{end}}",
+ "true", tVal, true},
+ {".NilOKFunc not nil", "{{call .NilOKFunc .PI}}", "false", tVal, true},
+ {".NilOKFunc nil", "{{call .NilOKFunc nil}}", "true", tVal, true},
+ {"method on nil value from slice", "-{{range .}}{{.Method1 1234}}{{end}}-", "-1234-", tSliceOfNil, true},
+ {"method on typed nil interface value", "{{.NonEmptyInterfaceTypedNil.Method0}}", "M0", tVal, true},
+
+ // Function call builtin.
+ {".BinaryFunc", "{{call .BinaryFunc `1` `2`}}", "[1=2]", tVal, true},
+ {".VariadicFunc0", "{{call .VariadicFunc}}", "<>", tVal, true},
+ {".VariadicFunc2", "{{call .VariadicFunc `he` `llo`}}", "<he+llo>", tVal, true},
+ {".VariadicFuncInt", "{{call .VariadicFuncInt 33 `he` `llo`}}", "33=<he+llo>", tVal, true},
+ {"if .BinaryFunc call", "{{ if .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{end}}", "[1=2]", tVal, true},
+ {"if not .BinaryFunc call", "{{ if not .BinaryFunc}}{{call .BinaryFunc `1` `2`}}{{else}}No{{end}}", "No", tVal, true},
+ {"Interface Call", `{{stringer .S}}`, "foozle", map[string]any{"S": bytes.NewBufferString("foozle")}, true},
+ {".ErrFunc", "{{call .ErrFunc}}", "bla", tVal, true},
+ {"call nil", "{{call nil}}", "", tVal, false},
+
+ // Erroneous function calls (check args).
+ {".BinaryFuncTooFew", "{{call .BinaryFunc `1`}}", "", tVal, false},
+ {".BinaryFuncTooMany", "{{call .BinaryFunc `1` `2` `3`}}", "", tVal, false},
+ {".BinaryFuncBad0", "{{call .BinaryFunc 1 3}}", "", tVal, false},
+ {".BinaryFuncBad1", "{{call .BinaryFunc `1` 3}}", "", tVal, false},
+ {".VariadicFuncBad0", "{{call .VariadicFunc 3}}", "", tVal, false},
+ {".VariadicFuncIntBad0", "{{call .VariadicFuncInt}}", "", tVal, false},
+ {".VariadicFuncIntBad`", "{{call .VariadicFuncInt `x`}}", "", tVal, false},
+ {".VariadicFuncNilBad", "{{call .VariadicFunc nil}}", "", tVal, false},
+
+ // Pipelines.
+ {"pipeline", "-{{.Method0 | .Method2 .U16}}-", "-Method2: 16 M0-", tVal, true},
+ {"pipeline func", "-{{call .VariadicFunc `llo` | call .VariadicFunc `he` }}-", "-<he+<llo>>-", tVal, true},
+
+ // Nil values aren't missing arguments.
+ {"nil pipeline", "{{ .Empty0 | call .NilOKFunc }}", "true", tVal, true},
+ {"nil call arg", "{{ call .NilOKFunc .Empty0 }}", "true", tVal, true},
+ {"bad nil pipeline", "{{ .Empty0 | .VariadicFunc }}", "", tVal, false},
+
+ // Parenthesized expressions
+ {"parens in pipeline", "{{printf `%d %d %d` (1) (2 | add 3) (add 4 (add 5 6))}}", "1 5 15", tVal, true},
+
+ // Parenthesized expressions with field accesses
+ {"parens: $ in paren", "{{($).X}}", "x", tVal, true},
+ {"parens: $.GetU in paren", "{{($.GetU).V}}", "v", tVal, true},
+ {"parens: $ in paren in pipe", "{{($ | echo).X}}", "x", tVal, true},
+ {"parens: spaces and args", `{{(makemap "up" "down" "left" "right").left}}`, "right", tVal, true},
+
+ // If.
+ {"if true", "{{if true}}TRUE{{end}}", "TRUE", tVal, true},
+ {"if false", "{{if false}}TRUE{{else}}FALSE{{end}}", "FALSE", tVal, true},
+ {"if nil", "{{if nil}}TRUE{{end}}", "", tVal, false},
+ {"if on typed nil interface value", "{{if .NonEmptyInterfaceTypedNil}}TRUE{{ end }}", "", tVal, true},
+ {"if 1", "{{if 1}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0", "{{if 0}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if 1.5", "{{if 1.5}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0.0", "{{if .FloatZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if 1.5i", "{{if 1.5i}}NON-ZERO{{else}}ZERO{{end}}", "NON-ZERO", tVal, true},
+ {"if 0.0i", "{{if .ComplexZero}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if emptystring", "{{if ``}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if string", "{{if `notempty`}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if emptyslice", "{{if .SIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if slice", "{{if .SI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if emptymap", "{{if .MSIEmpty}}NON-EMPTY{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"if map", "{{if .MSI}}NON-EMPTY{{else}}EMPTY{{end}}", "NON-EMPTY", tVal, true},
+ {"if map unset", "{{if .MXI.none}}NON-ZERO{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"if map not unset", "{{if not .MXI.none}}ZERO{{else}}NON-ZERO{{end}}", "ZERO", tVal, true},
+ {"if $x with $y int", "{{if $x := true}}{{with $y := .I}}{{$x}},{{$y}}{{end}}{{end}}", "true,17", tVal, true},
+ {"if $x with $x int", "{{if $x := true}}{{with $x := .I}}{{$x}},{{end}}{{$x}}{{end}}", "17,true", tVal, true},
+ {"if else if", "{{if false}}FALSE{{else if true}}TRUE{{end}}", "TRUE", tVal, true},
+ {"if else chain", "{{if eq 1 3}}1{{else if eq 2 3}}2{{else if eq 3 3}}3{{end}}", "3", tVal, true},
+
+ // Print etc.
+ {"print", `{{print "hello, print"}}`, "hello, print", tVal, true},
+ {"print 123", `{{print 1 2 3}}`, "1 2 3", tVal, true},
+ {"print nil", `{{print nil}}`, "<nil>", tVal, true},
+ {"println", `{{println 1 2 3}}`, "1 2 3\n", tVal, true},
+ {"printf int", `{{printf "%04x" 127}}`, "007f", tVal, true},
+ {"printf float", `{{printf "%g" 3.5}}`, "3.5", tVal, true},
+ {"printf complex", `{{printf "%g" 1+7i}}`, "(1+7i)", tVal, true},
+ {"printf string", `{{printf "%s" "hello"}}`, "hello", tVal, true},
+ {"printf function", `{{printf "%#q" zeroArgs}}`, "`zeroArgs`", tVal, true},
+ {"printf field", `{{printf "%s" .U.V}}`, "v", tVal, true},
+ {"printf method", `{{printf "%s" .Method0}}`, "M0", tVal, true},
+ {"printf dot", `{{with .I}}{{printf "%d" .}}{{end}}`, "17", tVal, true},
+ {"printf var", `{{with $x := .I}}{{printf "%d" $x}}{{end}}`, "17", tVal, true},
+ {"printf lots", `{{printf "%d %s %g %s" 127 "hello" 7-3i .Method0}}`, "127 hello (7-3i) M0", tVal, true},
+
+ // HTML.
+ {"html", `{{html "<script>alert(\"XSS\");</script>"}}`,
+ "&lt;script&gt;alert(&#34;XSS&#34;);&lt;/script&gt;", nil, true},
+ {"html pipeline", `{{printf "<script>alert(\"XSS\");</script>" | html}}`,
+ "&lt;script&gt;alert(&#34;XSS&#34;);&lt;/script&gt;", nil, true},
+ {"html", `{{html .PS}}`, "a string", tVal, true},
+ {"html typed nil", `{{html .NIL}}`, "&lt;nil&gt;", tVal, true},
+ {"html untyped nil", `{{html .Empty0}}`, "&lt;no value&gt;", tVal, true},
+
+ // JavaScript.
+ {"js", `{{js .}}`, `It\'d be nice.`, `It'd be nice.`, true},
+
+ // URL query.
+ {"urlquery", `{{"http://www.example.org/"|urlquery}}`, "http%3A%2F%2Fwww.example.org%2F", nil, true},
+
+ // Booleans
+ {"not", "{{not true}} {{not false}}", "false true", nil, true},
+ {"and", "{{and false 0}} {{and 1 0}} {{and 0 true}} {{and 1 1}}", "false 0 0 1", nil, true},
+ {"or", "{{or 0 0}} {{or 1 0}} {{or 0 true}} {{or 1 1}}", "0 1 true 1", nil, true},
+ {"or short-circuit", "{{or 0 1 (die)}}", "1", nil, true},
+ {"and short-circuit", "{{and 1 0 (die)}}", "0", nil, true},
+ {"or short-circuit2", "{{or 0 0 (die)}}", "", nil, false},
+ {"and short-circuit2", "{{and 1 1 (die)}}", "", nil, false},
+ {"and pipe-true", "{{1 | and 1}}", "1", nil, true},
+ {"and pipe-false", "{{0 | and 1}}", "0", nil, true},
+ {"or pipe-true", "{{1 | or 0}}", "1", nil, true},
+ {"or pipe-false", "{{0 | or 0}}", "0", nil, true},
+ {"and undef", "{{and 1 .Unknown}}", "<no value>", nil, true},
+ {"or undef", "{{or 0 .Unknown}}", "<no value>", nil, true},
+ {"boolean if", "{{if and true 1 `hi`}}TRUE{{else}}FALSE{{end}}", "TRUE", tVal, true},
+ {"boolean if not", "{{if and true 1 `hi` | not}}TRUE{{else}}FALSE{{end}}", "FALSE", nil, true},
+ {"boolean if pipe", "{{if true | not | and 1}}TRUE{{else}}FALSE{{end}}", "FALSE", nil, true},
+
+ // Indexing.
+ {"slice[0]", "{{index .SI 0}}", "3", tVal, true},
+ {"slice[1]", "{{index .SI 1}}", "4", tVal, true},
+ {"slice[HUGE]", "{{index .SI 10}}", "", tVal, false},
+ {"slice[WRONG]", "{{index .SI `hello`}}", "", tVal, false},
+ {"slice[nil]", "{{index .SI nil}}", "", tVal, false},
+ {"map[one]", "{{index .MSI `one`}}", "1", tVal, true},
+ {"map[two]", "{{index .MSI `two`}}", "2", tVal, true},
+ {"map[NO]", "{{index .MSI `XXX`}}", "0", tVal, true},
+ {"map[nil]", "{{index .MSI nil}}", "", tVal, false},
+ {"map[``]", "{{index .MSI ``}}", "0", tVal, true},
+ {"map[WRONG]", "{{index .MSI 10}}", "", tVal, false},
+ {"double index", "{{index .SMSI 1 `eleven`}}", "11", tVal, true},
+ {"nil[1]", "{{index nil 1}}", "", tVal, false},
+ {"map MI64S", "{{index .MI64S 2}}", "i642", tVal, true},
+ {"map MI32S", "{{index .MI32S 2}}", "two", tVal, true},
+ {"map MUI64S", "{{index .MUI64S 3}}", "ui643", tVal, true},
+ {"map MI8S", "{{index .MI8S 3}}", "i83", tVal, true},
+ {"map MUI8S", "{{index .MUI8S 2}}", "u82", tVal, true},
+ {"index of an interface field", "{{index .Empty3 0}}", "7", tVal, true},
+
+ // Slicing.
+ {"slice[:]", "{{slice .SI}}", "[3 4 5]", tVal, true},
+ {"slice[1:]", "{{slice .SI 1}}", "[4 5]", tVal, true},
+ {"slice[1:2]", "{{slice .SI 1 2}}", "[4]", tVal, true},
+ {"slice[-1:]", "{{slice .SI -1}}", "", tVal, false},
+ {"slice[1:-2]", "{{slice .SI 1 -2}}", "", tVal, false},
+ {"slice[1:2:-1]", "{{slice .SI 1 2 -1}}", "", tVal, false},
+ {"slice[2:1]", "{{slice .SI 2 1}}", "", tVal, false},
+ {"slice[2:2:1]", "{{slice .SI 2 2 1}}", "", tVal, false},
+ {"out of range", "{{slice .SI 4 5}}", "", tVal, false},
+ {"out of range", "{{slice .SI 2 2 5}}", "", tVal, false},
+ {"len(s) < indexes < cap(s)", "{{slice .SICap 6 10}}", "[0 0 0 0]", tVal, true},
+ {"len(s) < indexes < cap(s)", "{{slice .SICap 6 10 10}}", "[0 0 0 0]", tVal, true},
+ {"indexes > cap(s)", "{{slice .SICap 10 11}}", "", tVal, false},
+ {"indexes > cap(s)", "{{slice .SICap 6 10 11}}", "", tVal, false},
+ {"array[:]", "{{slice .AI}}", "[3 4 5]", tVal, true},
+ {"array[1:]", "{{slice .AI 1}}", "[4 5]", tVal, true},
+ {"array[1:2]", "{{slice .AI 1 2}}", "[4]", tVal, true},
+ {"string[:]", "{{slice .S}}", "xyz", tVal, true},
+ {"string[0:1]", "{{slice .S 0 1}}", "x", tVal, true},
+ {"string[1:]", "{{slice .S 1}}", "yz", tVal, true},
+ {"string[1:2]", "{{slice .S 1 2}}", "y", tVal, true},
+ {"out of range", "{{slice .S 1 5}}", "", tVal, false},
+ {"3-index slice of string", "{{slice .S 1 2 2}}", "", tVal, false},
+ {"slice of an interface field", "{{slice .Empty3 0 1}}", "[7]", tVal, true},
+
+ // Len.
+ {"slice", "{{len .SI}}", "3", tVal, true},
+ {"map", "{{len .MSI }}", "3", tVal, true},
+ {"len of int", "{{len 3}}", "", tVal, false},
+ {"len of nothing", "{{len .Empty0}}", "", tVal, false},
+ {"len of an interface field", "{{len .Empty3}}", "2", tVal, true},
+
+ // With.
+ {"with true", "{{with true}}{{.}}{{end}}", "true", tVal, true},
+ {"with false", "{{with false}}{{.}}{{else}}FALSE{{end}}", "FALSE", tVal, true},
+ {"with 1", "{{with 1}}{{.}}{{else}}ZERO{{end}}", "1", tVal, true},
+ {"with 0", "{{with 0}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with 1.5", "{{with 1.5}}{{.}}{{else}}ZERO{{end}}", "1.5", tVal, true},
+ {"with 0.0", "{{with .FloatZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with 1.5i", "{{with 1.5i}}{{.}}{{else}}ZERO{{end}}", "(0+1.5i)", tVal, true},
+ {"with 0.0i", "{{with .ComplexZero}}{{.}}{{else}}ZERO{{end}}", "ZERO", tVal, true},
+ {"with emptystring", "{{with ``}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with string", "{{with `notempty`}}{{.}}{{else}}EMPTY{{end}}", "notempty", tVal, true},
+ {"with emptyslice", "{{with .SIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with slice", "{{with .SI}}{{.}}{{else}}EMPTY{{end}}", "[3 4 5]", tVal, true},
+ {"with emptymap", "{{with .MSIEmpty}}{{.}}{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"with map", "{{with .MSIone}}{{.}}{{else}}EMPTY{{end}}", "map[one:1]", tVal, true},
+ {"with empty interface, struct field", "{{with .Empty4}}{{.V}}{{end}}", "UinEmpty", tVal, true},
+ {"with $x int", "{{with $x := .I}}{{$x}}{{end}}", "17", tVal, true},
+ {"with $x struct.U.V", "{{with $x := $}}{{$x.U.V}}{{end}}", "v", tVal, true},
+ {"with variable and action", "{{with $x := $}}{{$y := $.U.V}}{{$y}}{{end}}", "v", tVal, true},
+ {"with on typed nil interface value", "{{with .NonEmptyInterfaceTypedNil}}TRUE{{ end }}", "", tVal, true},
+
+ // Range.
+ {"range []int", "{{range .SI}}-{{.}}-{{end}}", "-3--4--5-", tVal, true},
+ {"range empty no else", "{{range .SIEmpty}}-{{.}}-{{end}}", "", tVal, true},
+ {"range []int else", "{{range .SI}}-{{.}}-{{else}}EMPTY{{end}}", "-3--4--5-", tVal, true},
+ {"range empty else", "{{range .SIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"range []int break else", "{{range .SI}}-{{.}}-{{break}}NOTREACHED{{else}}EMPTY{{end}}", "-3-", tVal, true},
+ {"range []int continue else", "{{range .SI}}-{{.}}-{{continue}}NOTREACHED{{else}}EMPTY{{end}}", "-3--4--5-", tVal, true},
+ {"range []bool", "{{range .SB}}-{{.}}-{{end}}", "-true--false-", tVal, true},
+ {"range []int method", "{{range .SI | .MAdd .I}}-{{.}}-{{end}}", "-20--21--22-", tVal, true},
+ {"range map", "{{range .MSI}}-{{.}}-{{end}}", "-1--3--2-", tVal, true},
+ {"range empty map no else", "{{range .MSIEmpty}}-{{.}}-{{end}}", "", tVal, true},
+ {"range map else", "{{range .MSI}}-{{.}}-{{else}}EMPTY{{end}}", "-1--3--2-", tVal, true},
+ {"range empty map else", "{{range .MSIEmpty}}-{{.}}-{{else}}EMPTY{{end}}", "EMPTY", tVal, true},
+ {"range empty interface", "{{range .Empty3}}-{{.}}-{{else}}EMPTY{{end}}", "-7--8-", tVal, true},
+ {"range empty nil", "{{range .Empty0}}-{{.}}-{{end}}", "", tVal, true},
+ {"range $x SI", "{{range $x := .SI}}<{{$x}}>{{end}}", "<3><4><5>", tVal, true},
+ {"range $x $y SI", "{{range $x, $y := .SI}}<{{$x}}={{$y}}>{{end}}", "<0=3><1=4><2=5>", tVal, true},
+ {"range $x MSIone", "{{range $x := .MSIone}}<{{$x}}>{{end}}", "<1>", tVal, true},
+ {"range $x $y MSIone", "{{range $x, $y := .MSIone}}<{{$x}}={{$y}}>{{end}}", "<one=1>", tVal, true},
+ {"range $x PSI", "{{range $x := .PSI}}<{{$x}}>{{end}}", "<21><22><23>", tVal, true},
+ {"declare in range", "{{range $x := .PSI}}<{{$foo:=$x}}{{$x}}>{{end}}", "<21><22><23>", tVal, true},
+ {"range count", `{{range $i, $x := count 5}}[{{$i}}]{{$x}}{{end}}`, "[0]a[1]b[2]c[3]d[4]e", tVal, true},
+ {"range nil count", `{{range $i, $x := count 0}}{{else}}empty{{end}}`, "empty", tVal, true},
+
+ // Cute examples.
+ {"or as if true", `{{or .SI "slice is empty"}}`, "[3 4 5]", tVal, true},
+ {"or as if false", `{{or .SIEmpty "slice is empty"}}`, "slice is empty", tVal, true},
+
+ // Error handling.
+ {"error method, error", "{{.MyError true}}", "", tVal, false},
+ {"error method, no error", "{{.MyError false}}", "false", tVal, true},
+
+ // Numbers
+ {"decimal", "{{print 1234}}", "1234", tVal, true},
+ {"decimal _", "{{print 12_34}}", "1234", tVal, true},
+ {"binary", "{{print 0b101}}", "5", tVal, true},
+ {"binary _", "{{print 0b_1_0_1}}", "5", tVal, true},
+ {"BINARY", "{{print 0B101}}", "5", tVal, true},
+ {"octal0", "{{print 0377}}", "255", tVal, true},
+ {"octal", "{{print 0o377}}", "255", tVal, true},
+ {"octal _", "{{print 0o_3_7_7}}", "255", tVal, true},
+ {"OCTAL", "{{print 0O377}}", "255", tVal, true},
+ {"hex", "{{print 0x123}}", "291", tVal, true},
+ {"hex _", "{{print 0x1_23}}", "291", tVal, true},
+ {"HEX", "{{print 0X123ABC}}", "1194684", tVal, true},
+ {"float", "{{print 123.4}}", "123.4", tVal, true},
+ {"float _", "{{print 0_0_1_2_3.4}}", "123.4", tVal, true},
+ {"hex float", "{{print +0x1.ep+2}}", "7.5", tVal, true},
+ {"hex float _", "{{print +0x_1.e_0p+0_2}}", "7.5", tVal, true},
+ {"HEX float", "{{print +0X1.EP+2}}", "7.5", tVal, true},
+ {"print multi", "{{print 1_2_3_4 7.5_00_00_00}}", "1234 7.5", tVal, true},
+ {"print multi2", "{{print 1234 0x0_1.e_0p+02}}", "1234 7.5", tVal, true},
+
+ // Fixed bugs.
+ // Must separate dot and receiver; otherwise args are evaluated with dot set to variable.
+ {"bug0", "{{range .MSIone}}{{if $.Method1 .}}X{{end}}{{end}}", "X", tVal, true},
+ // Do not loop endlessly in indirect for non-empty interfaces.
+ // The bug appears with *interface only; looped forever.
+ {"bug1", "{{.Method0}}", "M0", &iVal, true},
+ // Was taking address of interface field, so method set was empty.
+ {"bug2", "{{$.NonEmptyInterface.Method0}}", "M0", tVal, true},
+ // Struct values were not legal in with - mere oversight.
+ {"bug3", "{{with $}}{{.Method0}}{{end}}", "M0", tVal, true},
+ // Nil interface values in if.
+ {"bug4", "{{if .Empty0}}non-nil{{else}}nil{{end}}", "nil", tVal, true},
+ // Stringer.
+ {"bug5", "{{.Str}}", "foozle", tVal, true},
+ {"bug5a", "{{.Err}}", "erroozle", tVal, true},
+ // Args need to be indirected and dereferenced sometimes.
+ {"bug6a", "{{vfunc .V0 .V1}}", "vfunc", tVal, true},
+ {"bug6b", "{{vfunc .V0 .V0}}", "vfunc", tVal, true},
+ {"bug6c", "{{vfunc .V1 .V0}}", "vfunc", tVal, true},
+ {"bug6d", "{{vfunc .V1 .V1}}", "vfunc", tVal, true},
+ // Legal parse but illegal execution: non-function should have no arguments.
+ {"bug7a", "{{3 2}}", "", tVal, false},
+ {"bug7b", "{{$x := 1}}{{$x 2}}", "", tVal, false},
+ {"bug7c", "{{$x := 1}}{{3 | $x}}", "", tVal, false},
+ // Pipelined arg was not being type-checked.
+ {"bug8a", "{{3|oneArg}}", "", tVal, false},
+ {"bug8b", "{{4|dddArg 3}}", "", tVal, false},
+ // A bug was introduced that broke map lookups for lower-case names.
+ {"bug9", "{{.cause}}", "neglect", map[string]string{"cause": "neglect"}, true},
+ // Field chain starting with function did not work.
+ {"bug10", "{{mapOfThree.three}}-{{(mapOfThree).three}}", "3-3", 0, true},
+ // Dereferencing nil pointer while evaluating function arguments should not panic. Issue 7333.
+ {"bug11", "{{valueString .PS}}", "", T{}, false},
+ // 0xef gave constant type float64. Issue 8622.
+ {"bug12xe", "{{printf `%T` 0xef}}", "int", T{}, true},
+ {"bug12xE", "{{printf `%T` 0xEE}}", "int", T{}, true},
+ {"bug12Xe", "{{printf `%T` 0Xef}}", "int", T{}, true},
+ {"bug12XE", "{{printf `%T` 0XEE}}", "int", T{}, true},
+ // Chained nodes did not work as arguments. Issue 8473.
+ {"bug13", "{{print (.Copy).I}}", "17", tVal, true},
+ // Didn't protect against nil or literal values in field chains.
+ {"bug14a", "{{(nil).True}}", "", tVal, false},
+ {"bug14b", "{{$x := nil}}{{$x.anything}}", "", tVal, false},
+ {"bug14c", `{{$x := (1.0)}}{{$y := ("hello")}}{{$x.anything}}{{$y.true}}`, "", tVal, false},
+ // Didn't call validateType on function results. Issue 10800.
+ {"bug15", "{{valueString returnInt}}", "", tVal, false},
+ // Variadic function corner cases. Issue 10946.
+ {"bug16a", "{{true|printf}}", "", tVal, false},
+ {"bug16b", "{{1|printf}}", "", tVal, false},
+ {"bug16c", "{{1.1|printf}}", "", tVal, false},
+ {"bug16d", "{{'x'|printf}}", "", tVal, false},
+ {"bug16e", "{{0i|printf}}", "", tVal, false},
+ {"bug16f", "{{true|twoArgs \"xxx\"}}", "", tVal, false},
+ {"bug16g", "{{\"aaa\" |twoArgs \"bbb\"}}", "twoArgs=bbbaaa", tVal, true},
+ {"bug16h", "{{1|oneArg}}", "", tVal, false},
+ {"bug16i", "{{\"aaa\"|oneArg}}", "oneArg=aaa", tVal, true},
+ {"bug16j", "{{1+2i|printf \"%v\"}}", "(1+2i)", tVal, true},
+ {"bug16k", "{{\"aaa\"|printf }}", "aaa", tVal, true},
+ {"bug17a", "{{.NonEmptyInterface.X}}", "x", tVal, true},
+ {"bug17b", "-{{.NonEmptyInterface.Method1 1234}}-", "-1234-", tVal, true},
+ {"bug17c", "{{len .NonEmptyInterfacePtS}}", "2", tVal, true},
+ {"bug17d", "{{index .NonEmptyInterfacePtS 0}}", "a", tVal, true},
+ {"bug17e", "{{range .NonEmptyInterfacePtS}}-{{.}}-{{end}}", "-a--b-", tVal, true},
+
+ // More variadic function corner cases. Some runes would get evaluated
+ // as constant floats instead of ints. Issue 34483.
+ {"bug18a", "{{eq . '.'}}", "true", '.', true},
+ {"bug18b", "{{eq . 'e'}}", "true", 'e', true},
+ {"bug18c", "{{eq . 'P'}}", "true", 'P', true},
+}
+
+func zeroArgs() string {
+ return "zeroArgs"
+}
+
+func oneArg(a string) string {
+ return "oneArg=" + a
+}
+
+func twoArgs(a, b string) string {
+ return "twoArgs=" + a + b
+}
+
+func dddArg(a int, b ...string) string {
+ return fmt.Sprintln(a, b)
+}
+
+// count returns a channel that will deliver n sequential 1-letter strings starting at "a"
+func count(n int) chan string {
+ if n == 0 {
+ return nil
+ }
+ c := make(chan string)
+ go func() {
+ for i := 0; i < n; i++ {
+ c <- "abcdefghijklmnop"[i : i+1]
+ }
+ close(c)
+ }()
+ return c
+}
+
+// vfunc takes a *V and a V
+func vfunc(V, *V) string {
+ return "vfunc"
+}
+
+// valueString takes a string, not a pointer.
+func valueString(v string) string {
+ return "value is ignored"
+}
+
+// returnInt returns an int
+func returnInt() int {
+ return 7
+}
+
+func add(args ...int) int {
+ sum := 0
+ for _, x := range args {
+ sum += x
+ }
+ return sum
+}
+
+func echo(arg any) any {
+ return arg
+}
+
+func makemap(arg ...string) map[string]string {
+ if len(arg)%2 != 0 {
+ panic("bad makemap")
+ }
+ m := make(map[string]string)
+ for i := 0; i < len(arg); i += 2 {
+ m[arg[i]] = arg[i+1]
+ }
+ return m
+}
+
+func stringer(s fmt.Stringer) string {
+ return s.String()
+}
+
+func mapOfThree() any {
+ return map[string]int{"three": 3}
+}
+
+func testExecute(execTests []execTest, template *Template, t *testing.T) {
+ b := new(bytes.Buffer)
+ funcs := FuncMap{
+ "add": add,
+ "count": count,
+ "dddArg": dddArg,
+ "die": func() bool { panic("die") },
+ "echo": echo,
+ "makemap": makemap,
+ "mapOfThree": mapOfThree,
+ "oneArg": oneArg,
+ "returnInt": returnInt,
+ "stringer": stringer,
+ "twoArgs": twoArgs,
+ "typeOf": typeOf,
+ "valueString": valueString,
+ "vfunc": vfunc,
+ "zeroArgs": zeroArgs,
+ }
+ for _, test := range execTests {
+ var tmpl *Template
+ var err error
+ if template == nil {
+ tmpl, err = New(test.name).Funcs(funcs).Parse(test.input)
+ } else {
+ tmpl, err = template.New(test.name).Funcs(funcs).Parse(test.input)
+ }
+ if err != nil {
+ t.Errorf("%s: parse error: %s", test.name, err)
+ continue
+ }
+ b.Reset()
+ err = tmpl.Execute(b, test.data)
+ switch {
+ case !test.ok && err == nil:
+ t.Errorf("%s: expected error; got none", test.name)
+ continue
+ case test.ok && err != nil:
+ t.Errorf("%s: unexpected execute error: %s", test.name, err)
+ continue
+ case !test.ok && err != nil:
+ // expected error, got one
+ if *debug {
+ fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
+ }
+ }
+ result := b.String()
+ if result != test.output {
+ t.Errorf("%s: expected\n\t%q\ngot\n\t%q", test.name, test.output, result)
+ }
+ }
+}
+
+func TestExecute(t *testing.T) {
+ testExecute(execTests, nil, t)
+}
+
+var delimPairs = []string{
+ "", "", // default
+ "{{", "}}", // same as default
+ "<<", ">>", // distinct
+ "|", "|", // same
+ "(日)", "(本)", // peculiar
+}
+
+func TestDelims(t *testing.T) {
+ const hello = "Hello, world"
+ var value = struct{ Str string }{hello}
+ for i := 0; i < len(delimPairs); i += 2 {
+ text := ".Str"
+ left := delimPairs[i+0]
+ trueLeft := left
+ right := delimPairs[i+1]
+ trueRight := right
+ if left == "" { // default case
+ trueLeft = "{{"
+ }
+ if right == "" { // default case
+ trueRight = "}}"
+ }
+ text = trueLeft + text + trueRight
+ // Now add a comment
+ text += trueLeft + "/*comment*/" + trueRight
+ // Now add an action containing a string.
+ text += trueLeft + `"` + trueLeft + `"` + trueRight
+ // At this point text looks like `{{.Str}}{{/*comment*/}}{{"{{"}}`.
+ tmpl, err := New("delims").Delims(left, right).Parse(text)
+ if err != nil {
+ t.Fatalf("delim %q text %q parse err %s", left, text, err)
+ }
+ var b = new(bytes.Buffer)
+ err = tmpl.Execute(b, value)
+ if err != nil {
+ t.Fatalf("delim %q exec err %s", left, err)
+ }
+ if b.String() != hello+trueLeft {
+ t.Errorf("expected %q got %q", hello+trueLeft, b.String())
+ }
+ }
+}
+
+// Check that an error from a method flows back to the top.
+func TestExecuteError(t *testing.T) {
+ b := new(bytes.Buffer)
+ tmpl := New("error")
+ _, err := tmpl.Parse("{{.MyError true}}")
+ if err != nil {
+ t.Fatalf("parse error: %s", err)
+ }
+ err = tmpl.Execute(b, tVal)
+ if err == nil {
+ t.Errorf("expected error; got none")
+ } else if !strings.Contains(err.Error(), myError.Error()) {
+ if *debug {
+ fmt.Printf("test execute error: %s\n", err)
+ }
+ t.Errorf("expected myError; got %s", err)
+ }
+}
+
+const execErrorText = `line 1
+line 2
+line 3
+{{template "one" .}}
+{{define "one"}}{{template "two" .}}{{end}}
+{{define "two"}}{{template "three" .}}{{end}}
+{{define "three"}}{{index "hi" $}}{{end}}`
+
+// Check that an error from a nested template contains all the relevant information.
+func TestExecError(t *testing.T) {
+ tmpl, err := New("top").Parse(execErrorText)
+ if err != nil {
+ t.Fatal("parse error:", err)
+ }
+ var b bytes.Buffer
+ err = tmpl.Execute(&b, 5) // 5 is out of range indexing "hi"
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ const want = `template: top:7:20: executing "three" at <index "hi" $>: error calling index: index out of range: 5`
+ got := err.Error()
+ if got != want {
+ t.Errorf("expected\n%q\ngot\n%q", want, got)
+ }
+}
+
+type CustomError struct{}
+
+func (*CustomError) Error() string { return "heyo !" }
+
+// Check that a custom error can be returned.
+func TestExecError_CustomError(t *testing.T) {
+ failingFunc := func() (string, error) {
+ return "", &CustomError{}
+ }
+ tmpl := Must(New("top").Funcs(FuncMap{
+ "err": failingFunc,
+ }).Parse("{{ err }}"))
+
+ var b bytes.Buffer
+ err := tmpl.Execute(&b, nil)
+
+ var e *CustomError
+ if !errors.As(err, &e) {
+ t.Fatalf("expected custom error; got %s", err)
+ }
+}
+
+func TestJSEscaping(t *testing.T) {
+ testCases := []struct {
+ in, exp string
+ }{
+ {`a`, `a`},
+ {`'foo`, `\'foo`},
+ {`Go "jump" \`, `Go \"jump\" \\`},
+ {`Yukihiro says "今日は世界"`, `Yukihiro says \"今日は世界\"`},
+ {"unprintable \uFDFF", `unprintable \uFDFF`},
+ {`<html>`, `\u003Chtml\u003E`},
+ {`no = in attributes`, `no \u003D in attributes`},
+ {`&#x27; does not become HTML entity`, `\u0026#x27; does not become HTML entity`},
+ }
+ for _, tc := range testCases {
+ s := JSEscapeString(tc.in)
+ if s != tc.exp {
+ t.Errorf("JS escaping [%s] got [%s] want [%s]", tc.in, s, tc.exp)
+ }
+ }
+}
+
+// A nice example: walk a binary tree.
+
+type Tree struct {
+ Val int
+ Left, Right *Tree
+}
+
+// Use different delimiters to test Set.Delims.
+// Also test the trimming of leading and trailing spaces.
+const treeTemplate = `
+ (- define "tree" -)
+ [
+ (- .Val -)
+ (- with .Left -)
+ (template "tree" . -)
+ (- end -)
+ (- with .Right -)
+ (- template "tree" . -)
+ (- end -)
+ ]
+ (- end -)
+`
+
+func TestTree(t *testing.T) {
+ var tree = &Tree{
+ 1,
+ &Tree{
+ 2, &Tree{
+ 3,
+ &Tree{
+ 4, nil, nil,
+ },
+ nil,
+ },
+ &Tree{
+ 5,
+ &Tree{
+ 6, nil, nil,
+ },
+ nil,
+ },
+ },
+ &Tree{
+ 7,
+ &Tree{
+ 8,
+ &Tree{
+ 9, nil, nil,
+ },
+ nil,
+ },
+ &Tree{
+ 10,
+ &Tree{
+ 11, nil, nil,
+ },
+ nil,
+ },
+ },
+ }
+ tmpl, err := New("root").Delims("(", ")").Parse(treeTemplate)
+ if err != nil {
+ t.Fatal("parse error:", err)
+ }
+ var b bytes.Buffer
+ const expect = "[1[2[3[4]][5[6]]][7[8[9]][10[11]]]]"
+ // First by looking up the template.
+ err = tmpl.Lookup("tree").Execute(&b, tree)
+ if err != nil {
+ t.Fatal("exec error:", err)
+ }
+ result := b.String()
+ if result != expect {
+ t.Errorf("expected %q got %q", expect, result)
+ }
+ // Then direct to execution.
+ b.Reset()
+ err = tmpl.ExecuteTemplate(&b, "tree", tree)
+ if err != nil {
+ t.Fatal("exec error:", err)
+ }
+ result = b.String()
+ if result != expect {
+ t.Errorf("expected %q got %q", expect, result)
+ }
+}
+
+func TestExecuteOnNewTemplate(t *testing.T) {
+ // This is issue 3872.
+ New("Name").Templates()
+ // This is issue 11379.
+ new(Template).Templates()
+ new(Template).Parse("")
+ new(Template).New("abc").Parse("")
+ new(Template).Execute(nil, nil) // returns an error (but does not crash)
+ new(Template).ExecuteTemplate(nil, "XXX", nil) // returns an error (but does not crash)
+}
+
+const testTemplates = `{{define "one"}}one{{end}}{{define "two"}}two{{end}}`
+
+func TestMessageForExecuteEmpty(t *testing.T) {
+ // Test a truly empty template.
+ tmpl := New("empty")
+ var b bytes.Buffer
+ err := tmpl.Execute(&b, 0)
+ if err == nil {
+ t.Fatal("expected initial error")
+ }
+ got := err.Error()
+ want := `template: empty: "empty" is an incomplete or empty template`
+ if got != want {
+ t.Errorf("expected error %s got %s", want, got)
+ }
+ // Add a non-empty template to check that the error is helpful.
+ tests, err := New("").Parse(testTemplates)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tmpl.AddParseTree("secondary", tests.Tree)
+ err = tmpl.Execute(&b, 0)
+ if err == nil {
+ t.Fatal("expected second error")
+ }
+ got = err.Error()
+ want = `template: empty: "empty" is an incomplete or empty template`
+ if got != want {
+ t.Errorf("expected error %s got %s", want, got)
+ }
+ // Make sure we can execute the secondary.
+ err = tmpl.ExecuteTemplate(&b, "secondary", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestFinalForPrintf(t *testing.T) {
+ tmpl, err := New("").Parse(`{{"x" | printf}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b bytes.Buffer
+ err = tmpl.Execute(&b, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+type cmpTest struct {
+ expr string
+ truth string
+ ok bool
+}
+
+var cmpTests = []cmpTest{
+ {"eq true true", "true", true},
+ {"eq true false", "false", true},
+ {"eq 1+2i 1+2i", "true", true},
+ {"eq 1+2i 1+3i", "false", true},
+ {"eq 1.5 1.5", "true", true},
+ {"eq 1.5 2.5", "false", true},
+ {"eq 1 1", "true", true},
+ {"eq 1 2", "false", true},
+ {"eq `xy` `xy`", "true", true},
+ {"eq `xy` `xyz`", "false", true},
+ {"eq .Uthree .Uthree", "true", true},
+ {"eq .Uthree .Ufour", "false", true},
+ {"eq 3 4 5 6 3", "true", true},
+ {"eq 3 4 5 6 7", "false", true},
+ {"ne true true", "false", true},
+ {"ne true false", "true", true},
+ {"ne 1+2i 1+2i", "false", true},
+ {"ne 1+2i 1+3i", "true", true},
+ {"ne 1.5 1.5", "false", true},
+ {"ne 1.5 2.5", "true", true},
+ {"ne 1 1", "false", true},
+ {"ne 1 2", "true", true},
+ {"ne `xy` `xy`", "false", true},
+ {"ne `xy` `xyz`", "true", true},
+ {"ne .Uthree .Uthree", "false", true},
+ {"ne .Uthree .Ufour", "true", true},
+ {"lt 1.5 1.5", "false", true},
+ {"lt 1.5 2.5", "true", true},
+ {"lt 1 1", "false", true},
+ {"lt 1 2", "true", true},
+ {"lt `xy` `xy`", "false", true},
+ {"lt `xy` `xyz`", "true", true},
+ {"lt .Uthree .Uthree", "false", true},
+ {"lt .Uthree .Ufour", "true", true},
+ {"le 1.5 1.5", "true", true},
+ {"le 1.5 2.5", "true", true},
+ {"le 2.5 1.5", "false", true},
+ {"le 1 1", "true", true},
+ {"le 1 2", "true", true},
+ {"le 2 1", "false", true},
+ {"le `xy` `xy`", "true", true},
+ {"le `xy` `xyz`", "true", true},
+ {"le `xyz` `xy`", "false", true},
+ {"le .Uthree .Uthree", "true", true},
+ {"le .Uthree .Ufour", "true", true},
+ {"le .Ufour .Uthree", "false", true},
+ {"gt 1.5 1.5", "false", true},
+ {"gt 1.5 2.5", "false", true},
+ {"gt 1 1", "false", true},
+ {"gt 2 1", "true", true},
+ {"gt 1 2", "false", true},
+ {"gt `xy` `xy`", "false", true},
+ {"gt `xy` `xyz`", "false", true},
+ {"gt .Uthree .Uthree", "false", true},
+ {"gt .Uthree .Ufour", "false", true},
+ {"gt .Ufour .Uthree", "true", true},
+ {"ge 1.5 1.5", "true", true},
+ {"ge 1.5 2.5", "false", true},
+ {"ge 2.5 1.5", "true", true},
+ {"ge 1 1", "true", true},
+ {"ge 1 2", "false", true},
+ {"ge 2 1", "true", true},
+ {"ge `xy` `xy`", "true", true},
+ {"ge `xy` `xyz`", "false", true},
+ {"ge `xyz` `xy`", "true", true},
+ {"ge .Uthree .Uthree", "true", true},
+ {"ge .Uthree .Ufour", "false", true},
+ {"ge .Ufour .Uthree", "true", true},
+ // Mixing signed and unsigned integers.
+ {"eq .Uthree .Three", "true", true},
+ {"eq .Three .Uthree", "true", true},
+ {"le .Uthree .Three", "true", true},
+ {"le .Three .Uthree", "true", true},
+ {"ge .Uthree .Three", "true", true},
+ {"ge .Three .Uthree", "true", true},
+ {"lt .Uthree .Three", "false", true},
+ {"lt .Three .Uthree", "false", true},
+ {"gt .Uthree .Three", "false", true},
+ {"gt .Three .Uthree", "false", true},
+ {"eq .Ufour .Three", "false", true},
+ {"lt .Ufour .Three", "false", true},
+ {"gt .Ufour .Three", "true", true},
+ {"eq .NegOne .Uthree", "false", true},
+ {"eq .Uthree .NegOne", "false", true},
+ {"ne .NegOne .Uthree", "true", true},
+ {"ne .Uthree .NegOne", "true", true},
+ {"lt .NegOne .Uthree", "true", true},
+ {"lt .Uthree .NegOne", "false", true},
+ {"le .NegOne .Uthree", "true", true},
+ {"le .Uthree .NegOne", "false", true},
+ {"gt .NegOne .Uthree", "false", true},
+ {"gt .Uthree .NegOne", "true", true},
+ {"ge .NegOne .Uthree", "false", true},
+ {"ge .Uthree .NegOne", "true", true},
+ {"eq (index `x` 0) 'x'", "true", true}, // The example that triggered this rule.
+ {"eq (index `x` 0) 'y'", "false", true},
+ {"eq .V1 .V2", "true", true},
+ {"eq .Ptr .Ptr", "true", true},
+ {"eq .Ptr .NilPtr", "false", true},
+ {"eq .NilPtr .NilPtr", "true", true},
+ {"eq .Iface1 .Iface1", "true", true},
+ {"eq .Iface1 .NilIface", "false", true},
+ {"eq .NilIface .NilIface", "true", true},
+ {"eq .NilIface .Iface1", "false", true},
+ {"eq .NilIface 0", "false", true},
+ {"eq 0 .NilIface", "false", true},
+ // Errors
+ {"eq `xy` 1", "", false}, // Different types.
+ {"eq 2 2.0", "", false}, // Different types.
+ {"lt true true", "", false}, // Unordered types.
+ {"lt 1+0i 1+0i", "", false}, // Unordered types.
+ {"eq .Ptr 1", "", false}, // Incompatible types.
+ {"eq .Ptr .NegOne", "", false}, // Incompatible types.
+ {"eq .Map .Map", "", false}, // Uncomparable types.
+ {"eq .Map .V1", "", false}, // Uncomparable types.
+}
+
+func TestComparison(t *testing.T) {
+ b := new(bytes.Buffer)
+ var cmpStruct = struct {
+ Uthree, Ufour uint
+ NegOne, Three int
+ Ptr, NilPtr *int
+ Map map[int]int
+ V1, V2 V
+ Iface1, NilIface fmt.Stringer
+ }{
+ Uthree: 3,
+ Ufour: 4,
+ NegOne: -1,
+ Three: 3,
+ Ptr: new(int),
+ Iface1: b,
+ }
+ for _, test := range cmpTests {
+ text := fmt.Sprintf("{{if %s}}true{{else}}false{{end}}", test.expr)
+ tmpl, err := New("empty").Parse(text)
+ if err != nil {
+ t.Fatalf("%q: %s", test.expr, err)
+ }
+ b.Reset()
+ err = tmpl.Execute(b, &cmpStruct)
+ if test.ok && err != nil {
+ t.Errorf("%s errored incorrectly: %s", test.expr, err)
+ continue
+ }
+ if !test.ok && err == nil {
+ t.Errorf("%s did not error", test.expr)
+ continue
+ }
+ if b.String() != test.truth {
+ t.Errorf("%s: want %s; got %s", test.expr, test.truth, b.String())
+ }
+ }
+}
+
+func TestMissingMapKey(t *testing.T) {
+ data := map[string]int{
+ "x": 99,
+ }
+ tmpl, err := New("t1").Parse("{{.x}} {{.y}}")
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b bytes.Buffer
+ // By default, just get "<no value>"
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := "99 <no value>"
+ got := b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Same if we set the option explicitly to the default.
+ tmpl.Option("missingkey=default")
+ b.Reset()
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal("default:", err)
+ }
+ want = "99 <no value>"
+ got = b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Next we ask for a zero value
+ tmpl.Option("missingkey=zero")
+ b.Reset()
+ err = tmpl.Execute(&b, data)
+ if err != nil {
+ t.Fatal("zero:", err)
+ }
+ want = "99 0"
+ got = b.String()
+ if got != want {
+ t.Errorf("got %q; expected %q", got, want)
+ }
+ // Now we ask for an error.
+ tmpl.Option("missingkey=error")
+ err = tmpl.Execute(&b, data)
+ if err == nil {
+ t.Errorf("expected error; got none")
+ }
+ // same Option, but now a nil interface: ask for an error
+ err = tmpl.Execute(&b, nil)
+ t.Log(err)
+ if err == nil {
+ t.Errorf("expected error for nil-interface; got none")
+ }
+}
+
+// Test that the error message for multiline unterminated string
+// refers to the line number of the opening quote.
+func TestUnterminatedStringError(t *testing.T) {
+ _, err := New("X").Parse("hello\n\n{{`unterminated\n\n\n\n}}\n some more\n\n")
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ str := err.Error()
+ if !strings.Contains(str, "X:3: unterminated raw quoted string") {
+ t.Fatalf("unexpected error: %s", str)
+ }
+}
+
+const alwaysErrorText = "always be failing"
+
+var alwaysError = errors.New(alwaysErrorText)
+
+type ErrorWriter int
+
+func (e ErrorWriter) Write(p []byte) (int, error) {
+ return 0, alwaysError
+}
+
+func TestExecuteGivesExecError(t *testing.T) {
+ // First, a non-execution error shouldn't be an ExecError.
+ tmpl, err := New("X").Parse("hello")
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = tmpl.Execute(ErrorWriter(0), 0)
+ if err == nil {
+ t.Fatal("expected error; got none")
+ }
+ if err.Error() != alwaysErrorText {
+ t.Errorf("expected %q error; got %q", alwaysErrorText, err)
+ }
+ // This one should be an ExecError.
+ tmpl, err = New("X").Parse("hello, {{.X.Y}}")
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = tmpl.Execute(io.Discard, 0)
+ if err == nil {
+ t.Fatal("expected error; got none")
+ }
+ eerr, ok := err.(ExecError)
+ if !ok {
+ t.Fatalf("did not expect ExecError %s", eerr)
+ }
+ expect := "field X in type int"
+ if !strings.Contains(err.Error(), expect) {
+ t.Errorf("expected %q; got %q", expect, err)
+ }
+}
+
+func funcNameTestFunc() int {
+ return 0
+}
+
+func TestGoodFuncNames(t *testing.T) {
+ names := []string{
+ "_",
+ "a",
+ "a1",
+ "a1",
+ "Ӵ",
+ }
+ for _, name := range names {
+ tmpl := New("X").Funcs(
+ FuncMap{
+ name: funcNameTestFunc,
+ },
+ )
+ if tmpl == nil {
+ t.Fatalf("nil result for %q", name)
+ }
+ }
+}
+
+func TestBadFuncNames(t *testing.T) {
+ names := []string{
+ "",
+ "2",
+ "a-b",
+ }
+ for _, name := range names {
+ testBadFuncName(name, t)
+ }
+}
+
+func testBadFuncName(name string, t *testing.T) {
+ t.Helper()
+ defer func() {
+ recover()
+ }()
+ New("X").Funcs(
+ FuncMap{
+ name: funcNameTestFunc,
+ },
+ )
+ // If we get here, the name did not cause a panic, which is how Funcs
+ // reports an error.
+ t.Errorf("%q succeeded incorrectly as function name", name)
+}
+
+func TestBlock(t *testing.T) {
+ const (
+ input = `a({{block "inner" .}}bar({{.}})baz{{end}})b`
+ want = `a(bar(hello)baz)b`
+ overlay = `{{define "inner"}}foo({{.}})bar{{end}}`
+ want2 = `a(foo(goodbye)bar)b`
+ )
+ tmpl, err := New("outer").Parse(input)
+ if err != nil {
+ t.Fatal(err)
+ }
+ tmpl2, err := Must(tmpl.Clone()).Parse(overlay)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var buf bytes.Buffer
+ if err := tmpl.Execute(&buf, "hello"); err != nil {
+ t.Fatal(err)
+ }
+ if got := buf.String(); got != want {
+ t.Errorf("got %q, want %q", got, want)
+ }
+
+ buf.Reset()
+ if err := tmpl2.Execute(&buf, "goodbye"); err != nil {
+ t.Fatal(err)
+ }
+ if got := buf.String(); got != want2 {
+ t.Errorf("got %q, want %q", got, want2)
+ }
+}
+
+func TestEvalFieldErrors(t *testing.T) {
+ tests := []struct {
+ name, src string
+ value any
+ want string
+ }{
+ {
+ // Check that calling an invalid field on nil pointer
+ // prints a field error instead of a distracting nil
+ // pointer error. https://golang.org/issue/15125
+ "MissingFieldOnNil",
+ "{{.MissingField}}",
+ (*T)(nil),
+ "can't evaluate field MissingField in type *template.T",
+ },
+ {
+ "MissingFieldOnNonNil",
+ "{{.MissingField}}",
+ &T{},
+ "can't evaluate field MissingField in type *template.T",
+ },
+ {
+ "ExistingFieldOnNil",
+ "{{.X}}",
+ (*T)(nil),
+ "nil pointer evaluating *template.T.X",
+ },
+ {
+ "MissingKeyOnNilMap",
+ "{{.MissingKey}}",
+ (*map[string]string)(nil),
+ "nil pointer evaluating *map[string]string.MissingKey",
+ },
+ {
+ "MissingKeyOnNilMapPtr",
+ "{{.MissingKey}}",
+ (*map[string]string)(nil),
+ "nil pointer evaluating *map[string]string.MissingKey",
+ },
+ {
+ "MissingKeyOnMapPtrToNil",
+ "{{.MissingKey}}",
+ &map[string]string{},
+ "<nil>",
+ },
+ }
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ tmpl := Must(New("tmpl").Parse(tc.src))
+ err := tmpl.Execute(io.Discard, tc.value)
+ got := "<nil>"
+ if err != nil {
+ got = err.Error()
+ }
+ if !strings.HasSuffix(got, tc.want) {
+ t.Fatalf("got error %q, want %q", got, tc.want)
+ }
+ })
+ }
+}
+
+func TestMaxExecDepth(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in -short mode")
+ }
+ tmpl := Must(New("tmpl").Parse(`{{template "tmpl" .}}`))
+ err := tmpl.Execute(io.Discard, nil)
+ got := "<nil>"
+ if err != nil {
+ got = err.Error()
+ }
+ const want = "exceeded maximum template depth"
+ if !strings.Contains(got, want) {
+ t.Errorf("got error %q; want %q", got, want)
+ }
+}
+
+func TestAddrOfIndex(t *testing.T) {
+ // golang.org/issue/14916.
+ // Before index worked on reflect.Values, the .String could not be
+ // found on the (incorrectly unaddressable) V value,
+ // in contrast to range, which worked fine.
+ // Also testing that passing a reflect.Value to tmpl.Execute works.
+ texts := []string{
+ `{{range .}}{{.String}}{{end}}`,
+ `{{with index . 0}}{{.String}}{{end}}`,
+ }
+ for _, text := range texts {
+ tmpl := Must(New("tmpl").Parse(text))
+ var buf bytes.Buffer
+ err := tmpl.Execute(&buf, reflect.ValueOf([]V{{1}}))
+ if err != nil {
+ t.Fatalf("%s: Execute: %v", text, err)
+ }
+ if buf.String() != "<1>" {
+ t.Fatalf("%s: template output = %q, want %q", text, &buf, "<1>")
+ }
+ }
+}
+
+func TestInterfaceValues(t *testing.T) {
+ // golang.org/issue/17714.
+ // Before index worked on reflect.Values, interface values
+ // were always implicitly promoted to the underlying value,
+ // except that nil interfaces were promoted to the zero reflect.Value.
+ // Eliminating a round trip to interface{} and back to reflect.Value
+ // eliminated this promotion, breaking these cases.
+ tests := []struct {
+ text string
+ out string
+ }{
+ {`{{index .Nil 1}}`, "ERROR: index of untyped nil"},
+ {`{{index .Slice 2}}`, "2"},
+ {`{{index .Slice .Two}}`, "2"},
+ {`{{call .Nil 1}}`, "ERROR: call of nil"},
+ {`{{call .PlusOne 1}}`, "2"},
+ {`{{call .PlusOne .One}}`, "2"},
+ {`{{and (index .Slice 0) true}}`, "0"},
+ {`{{and .Zero true}}`, "0"},
+ {`{{and (index .Slice 1) false}}`, "false"},
+ {`{{and .One false}}`, "false"},
+ {`{{or (index .Slice 0) false}}`, "false"},
+ {`{{or .Zero false}}`, "false"},
+ {`{{or (index .Slice 1) true}}`, "1"},
+ {`{{or .One true}}`, "1"},
+ {`{{not (index .Slice 0)}}`, "true"},
+ {`{{not .Zero}}`, "true"},
+ {`{{not (index .Slice 1)}}`, "false"},
+ {`{{not .One}}`, "false"},
+ {`{{eq (index .Slice 0) .Zero}}`, "true"},
+ {`{{eq (index .Slice 1) .One}}`, "true"},
+ {`{{ne (index .Slice 0) .Zero}}`, "false"},
+ {`{{ne (index .Slice 1) .One}}`, "false"},
+ {`{{ge (index .Slice 0) .One}}`, "false"},
+ {`{{ge (index .Slice 1) .Zero}}`, "true"},
+ {`{{gt (index .Slice 0) .One}}`, "false"},
+ {`{{gt (index .Slice 1) .Zero}}`, "true"},
+ {`{{le (index .Slice 0) .One}}`, "true"},
+ {`{{le (index .Slice 1) .Zero}}`, "false"},
+ {`{{lt (index .Slice 0) .One}}`, "true"},
+ {`{{lt (index .Slice 1) .Zero}}`, "false"},
+ }
+
+ for _, tt := range tests {
+ tmpl := Must(New("tmpl").Parse(tt.text))
+ var buf bytes.Buffer
+ err := tmpl.Execute(&buf, map[string]any{
+ "PlusOne": func(n int) int {
+ return n + 1
+ },
+ "Slice": []int{0, 1, 2, 3},
+ "One": 1,
+ "Two": 2,
+ "Nil": nil,
+ "Zero": 0,
+ })
+ if strings.HasPrefix(tt.out, "ERROR:") {
+ e := strings.TrimSpace(strings.TrimPrefix(tt.out, "ERROR:"))
+ if err == nil || !strings.Contains(err.Error(), e) {
+ t.Errorf("%s: Execute: %v, want error %q", tt.text, err, e)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf("%s: Execute: %v", tt.text, err)
+ continue
+ }
+ if buf.String() != tt.out {
+ t.Errorf("%s: template output = %q, want %q", tt.text, &buf, tt.out)
+ }
+ }
+}
+
+// Check that panics during calls are recovered and returned as errors.
+func TestExecutePanicDuringCall(t *testing.T) {
+ funcs := map[string]any{
+ "doPanic": func() string {
+ panic("custom panic string")
+ },
+ }
+ tests := []struct {
+ name string
+ input string
+ data any
+ wantErr string
+ }{
+ {
+ "direct func call panics",
+ "{{doPanic}}", (*T)(nil),
+ `template: t:1:2: executing "t" at <doPanic>: error calling doPanic: custom panic string`,
+ },
+ {
+ "indirect func call panics",
+ "{{call doPanic}}", (*T)(nil),
+ `template: t:1:7: executing "t" at <doPanic>: error calling doPanic: custom panic string`,
+ },
+ {
+ "direct method call panics",
+ "{{.GetU}}", (*T)(nil),
+ `template: t:1:2: executing "t" at <.GetU>: error calling GetU: runtime error: invalid memory address or nil pointer dereference`,
+ },
+ {
+ "indirect method call panics",
+ "{{call .GetU}}", (*T)(nil),
+ `template: t:1:7: executing "t" at <.GetU>: error calling GetU: runtime error: invalid memory address or nil pointer dereference`,
+ },
+ {
+ "func field call panics",
+ "{{call .PanicFunc}}", tVal,
+ `template: t:1:2: executing "t" at <call .PanicFunc>: error calling call: test panic`,
+ },
+ {
+ "method call on nil interface",
+ "{{.NonEmptyInterfaceNil.Method0}}", tVal,
+ `template: t:1:23: executing "t" at <.NonEmptyInterfaceNil.Method0>: nil pointer evaluating template.I.Method0`,
+ },
+ }
+ for _, tc := range tests {
+ b := new(bytes.Buffer)
+ tmpl, err := New("t").Funcs(funcs).Parse(tc.input)
+ if err != nil {
+ t.Fatalf("parse error: %s", err)
+ }
+ err = tmpl.Execute(b, tc.data)
+ if err == nil {
+ t.Errorf("%s: expected error; got none", tc.name)
+ } else if !strings.Contains(err.Error(), tc.wantErr) {
+ if *debug {
+ fmt.Printf("%s: test execute error: %s\n", tc.name, err)
+ }
+ t.Errorf("%s: expected error:\n%s\ngot:\n%s", tc.name, tc.wantErr, err)
+ }
+ }
+}
+
+// Issue 31810. Check that a parenthesized first argument behaves properly.
+func TestIssue31810(t *testing.T) {
+ // A simple value with no arguments is fine.
+ var b bytes.Buffer
+ const text = "{{ (.) }}"
+ tmpl, err := New("").Parse(text)
+ if err != nil {
+ t.Error(err)
+ }
+ err = tmpl.Execute(&b, "result")
+ if err != nil {
+ t.Error(err)
+ }
+ if b.String() != "result" {
+ t.Errorf("%s got %q, expected %q", text, b.String(), "result")
+ }
+
+ // Even a plain function fails - need to use call.
+ f := func() string { return "result" }
+ b.Reset()
+ err = tmpl.Execute(&b, f)
+ if err == nil {
+ t.Error("expected error with no call, got none")
+ }
+
+ // Works if the function is explicitly called.
+ const textCall = "{{ (call .) }}"
+ tmpl, err = New("").Parse(textCall)
+ b.Reset()
+ err = tmpl.Execute(&b, f)
+ if err != nil {
+ t.Error(err)
+ }
+ if b.String() != "result" {
+ t.Errorf("%s got %q, expected %q", textCall, b.String(), "result")
+ }
+}
+
+// Issue 43065, range over send only channel
+func TestIssue43065(t *testing.T) {
+ var b bytes.Buffer
+ tmp := Must(New("").Parse(`{{range .}}{{end}}`))
+ ch := make(chan<- int)
+ err := tmp.Execute(&b, ch)
+ if err == nil {
+ t.Error("expected err got nil")
+ } else if !strings.Contains(err.Error(), "range over send-only channel") {
+ t.Errorf("%s", err)
+ }
+}
+
+// Issue 39807: data race in html/template & text/template
+func TestIssue39807(t *testing.T) {
+ var wg sync.WaitGroup
+
+ tplFoo, err := New("foo").Parse(`{{ template "bar" . }}`)
+ if err != nil {
+ t.Error(err)
+ }
+
+ tplBar, err := New("bar").Parse("bar")
+ if err != nil {
+ t.Error(err)
+ }
+
+ gofuncs := 10
+ numTemplates := 10
+
+ for i := 1; i <= gofuncs; i++ {
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for j := 0; j < numTemplates; j++ {
+ _, err := tplFoo.AddParseTree(tplBar.Name(), tplBar.Tree)
+ if err != nil {
+ t.Error(err)
+ }
+ err = tplFoo.Execute(io.Discard, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ }
+ }()
+ }
+
+ wg.Wait()
+}
+
+// Issue 48215: embedded nil pointer causes panic.
+// Fixed by adding FieldByIndexErr to the reflect package.
+func TestIssue48215(t *testing.T) {
+ type A struct {
+ S string
+ }
+ type B struct {
+ *A
+ }
+ tmpl, err := New("").Parse(`{{ .S }}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ err = tmpl.Execute(io.Discard, B{})
+ // We expect an error, not a panic.
+ if err == nil {
+ t.Fatal("did not get error for nil embedded struct")
+ }
+ if !strings.Contains(err.Error(), "reflect: indirection through nil pointer to embedded struct field A") {
+ t.Fatal(err)
+ }
+}
diff --git a/tpl/internal/go_templates/texttemplate/funcs.go b/tpl/internal/go_templates/texttemplate/funcs.go
new file mode 100644
index 000000000..dca5ed28d
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/funcs.go
@@ -0,0 +1,753 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "net/url"
+ "reflect"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+// FuncMap is the type of the map defining the mapping from names to functions.
+// Each function must have either a single return value, or two return values of
+// which the second has type error. In that case, if the second (error)
+// return value evaluates to non-nil during execution, execution terminates and
+// Execute returns that error.
+//
+// Errors returned by Execute wrap the underlying error; call errors.As to
+// uncover them.
+//
+// When template execution invokes a function with an argument list, that list
+// must be assignable to the function's parameter types. Functions meant to
+// apply to arguments of arbitrary type can use parameters of type interface{} or
+// of type reflect.Value. Similarly, functions meant to return a result of arbitrary
+// type can return interface{} or reflect.Value.
+type FuncMap map[string]any
+
+// builtins returns the FuncMap.
+// It is not a global variable so the linker can dead code eliminate
+// more when this isn't called. See golang.org/issue/36021.
+// TODO: revert this back to a global map once golang.org/issue/2559 is fixed.
+func builtins() FuncMap {
+ return FuncMap{
+ "and": and,
+ "call": call,
+ "html": HTMLEscaper,
+ "index": index,
+ "slice": slice,
+ "js": JSEscaper,
+ "len": length,
+ "not": not,
+ "or": or,
+ "print": fmt.Sprint,
+ "printf": fmt.Sprintf,
+ "println": fmt.Sprintln,
+ "urlquery": URLQueryEscaper,
+
+ // Comparisons
+ "eq": eq, // ==
+ "ge": ge, // >=
+ "gt": gt, // >
+ "le": le, // <=
+ "lt": lt, // <
+ "ne": ne, // !=
+ }
+}
+
+var builtinFuncsOnce struct {
+ sync.Once
+ v map[string]reflect.Value
+}
+
+// builtinFuncsOnce lazily computes & caches the builtinFuncs map.
+// TODO: revert this back to a global map once golang.org/issue/2559 is fixed.
+func builtinFuncs() map[string]reflect.Value {
+ builtinFuncsOnce.Do(func() {
+ builtinFuncsOnce.v = createValueFuncs(builtins())
+ })
+ return builtinFuncsOnce.v
+}
+
+// createValueFuncs turns a FuncMap into a map[string]reflect.Value
+func createValueFuncs(funcMap FuncMap) map[string]reflect.Value {
+ m := make(map[string]reflect.Value)
+ addValueFuncs(m, funcMap)
+ return m
+}
+
+// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values.
+func addValueFuncs(out map[string]reflect.Value, in FuncMap) {
+ for name, fn := range in {
+ if !goodName(name) {
+ panic(fmt.Errorf("function name %q is not a valid identifier", name))
+ }
+ v := reflect.ValueOf(fn)
+ if v.Kind() != reflect.Func {
+ panic("value for " + name + " not a function")
+ }
+ if !goodFunc(v.Type()) {
+ panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut()))
+ }
+ out[name] = v
+ }
+}
+
+// addFuncs adds to values the functions in funcs. It does no checking of the input -
+// call addValueFuncs first.
+func addFuncs(out, in FuncMap) {
+ for name, fn := range in {
+ out[name] = fn
+ }
+}
+
+// goodFunc reports whether the function or method has the right result signature.
+func goodFunc(typ reflect.Type) bool {
+ // We allow functions with 1 result or 2 results where the second is an error.
+ switch {
+ case typ.NumOut() == 1:
+ return true
+ case typ.NumOut() == 2 && typ.Out(1) == errorType:
+ return true
+ }
+ return false
+}
+
+// goodName reports whether the function name is a valid identifier.
+func goodName(name string) bool {
+ if name == "" {
+ return false
+ }
+ for i, r := range name {
+ switch {
+ case r == '_':
+ case i == 0 && !unicode.IsLetter(r):
+ return false
+ case !unicode.IsLetter(r) && !unicode.IsDigit(r):
+ return false
+ }
+ }
+ return true
+}
+
+// findFunction looks for a function in the template, and global map.
+func findFunction(name string, tmpl *Template) (v reflect.Value, isBuiltin, ok bool) {
+ if tmpl != nil && tmpl.common != nil {
+ tmpl.muFuncs.RLock()
+ defer tmpl.muFuncs.RUnlock()
+ if fn := tmpl.execFuncs[name]; fn.IsValid() {
+ return fn, false, true
+ }
+ }
+ if fn := builtinFuncs()[name]; fn.IsValid() {
+ return fn, true, true
+ }
+ return reflect.Value{}, false, false
+}
+
+// prepareArg checks if value can be used as an argument of type argType, and
+// converts an invalid value to appropriate zero if possible.
+func prepareArg(value reflect.Value, argType reflect.Type) (reflect.Value, error) {
+ if !value.IsValid() {
+ if !canBeNil(argType) {
+ return reflect.Value{}, fmt.Errorf("value is nil; should be of type %s", argType)
+ }
+ value = reflect.Zero(argType)
+ }
+ if value.Type().AssignableTo(argType) {
+ return value, nil
+ }
+ if intLike(value.Kind()) && intLike(argType.Kind()) && value.Type().ConvertibleTo(argType) {
+ value = value.Convert(argType)
+ return value, nil
+ }
+ return reflect.Value{}, fmt.Errorf("value has type %s; should be %s", value.Type(), argType)
+}
+
+func intLike(typ reflect.Kind) bool {
+ switch typ {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return true
+ }
+ return false
+}
+
+// indexArg checks if a reflect.Value can be used as an index, and converts it to int if possible.
+func indexArg(index reflect.Value, cap int) (int, error) {
+ var x int64
+ switch index.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ x = index.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ x = int64(index.Uint())
+ case reflect.Invalid:
+ return 0, fmt.Errorf("cannot index slice/array with nil")
+ default:
+ return 0, fmt.Errorf("cannot index slice/array with type %s", index.Type())
+ }
+ if x < 0 || int(x) < 0 || int(x) > cap {
+ return 0, fmt.Errorf("index out of range: %d", x)
+ }
+ return int(x), nil
+}
+
+// Indexing.
+
+// index returns the result of indexing its first argument by the following
+// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each
+// indexed item must be a map, slice, or array.
+func index(item reflect.Value, indexes ...reflect.Value) (reflect.Value, error) {
+ item = indirectInterface(item)
+ if !item.IsValid() {
+ return reflect.Value{}, fmt.Errorf("index of untyped nil")
+ }
+ for _, index := range indexes {
+ index = indirectInterface(index)
+ var isNil bool
+ if item, isNil = indirect(item); isNil {
+ return reflect.Value{}, fmt.Errorf("index of nil pointer")
+ }
+ switch item.Kind() {
+ case reflect.Array, reflect.Slice, reflect.String:
+ x, err := indexArg(index, item.Len())
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ item = item.Index(x)
+ case reflect.Map:
+ index, err := prepareArg(index, item.Type().Key())
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ if x := item.MapIndex(index); x.IsValid() {
+ item = x
+ } else {
+ item = reflect.Zero(item.Type().Elem())
+ }
+ case reflect.Invalid:
+ // the loop holds invariant: item.IsValid()
+ panic("unreachable")
+ default:
+ return reflect.Value{}, fmt.Errorf("can't index item of type %s", item.Type())
+ }
+ }
+ return item, nil
+}
+
+// Slicing.
+
+// slice returns the result of slicing its first argument by the remaining
+// arguments. Thus "slice x 1 2" is, in Go syntax, x[1:2], while "slice x"
+// is x[:], "slice x 1" is x[1:], and "slice x 1 2 3" is x[1:2:3]. The first
+// argument must be a string, slice, or array.
+func slice(item reflect.Value, indexes ...reflect.Value) (reflect.Value, error) {
+ item = indirectInterface(item)
+ if !item.IsValid() {
+ return reflect.Value{}, fmt.Errorf("slice of untyped nil")
+ }
+ if len(indexes) > 3 {
+ return reflect.Value{}, fmt.Errorf("too many slice indexes: %d", len(indexes))
+ }
+ var cap int
+ switch item.Kind() {
+ case reflect.String:
+ if len(indexes) == 3 {
+ return reflect.Value{}, fmt.Errorf("cannot 3-index slice a string")
+ }
+ cap = item.Len()
+ case reflect.Array, reflect.Slice:
+ cap = item.Cap()
+ default:
+ return reflect.Value{}, fmt.Errorf("can't slice item of type %s", item.Type())
+ }
+ // set default values for cases item[:], item[i:].
+ idx := [3]int{0, item.Len()}
+ for i, index := range indexes {
+ x, err := indexArg(index, cap)
+ if err != nil {
+ return reflect.Value{}, err
+ }
+ idx[i] = x
+ }
+ // given item[i:j], make sure i <= j.
+ if idx[0] > idx[1] {
+ return reflect.Value{}, fmt.Errorf("invalid slice index: %d > %d", idx[0], idx[1])
+ }
+ if len(indexes) < 3 {
+ return item.Slice(idx[0], idx[1]), nil
+ }
+ // given item[i:j:k], make sure i <= j <= k.
+ if idx[1] > idx[2] {
+ return reflect.Value{}, fmt.Errorf("invalid slice index: %d > %d", idx[1], idx[2])
+ }
+ return item.Slice3(idx[0], idx[1], idx[2]), nil
+}
+
+// Length
+
+// length returns the length of the item, with an error if it has no defined length.
+func length(item reflect.Value) (int, error) {
+ item, isNil := indirect(item)
+ if isNil {
+ return 0, fmt.Errorf("len of nil pointer")
+ }
+ switch item.Kind() {
+ case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
+ return item.Len(), nil
+ }
+ return 0, fmt.Errorf("len of type %s", item.Type())
+}
+
+// Function invocation
+
+// call returns the result of evaluating the first argument as a function.
+// The function must return 1 result, or 2 results, the second of which is an error.
+func call(fn reflect.Value, args ...reflect.Value) (reflect.Value, error) {
+ fn = indirectInterface(fn)
+ if !fn.IsValid() {
+ return reflect.Value{}, fmt.Errorf("call of nil")
+ }
+ typ := fn.Type()
+ if typ.Kind() != reflect.Func {
+ return reflect.Value{}, fmt.Errorf("non-function of type %s", typ)
+ }
+ if !goodFunc(typ) {
+ return reflect.Value{}, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut())
+ }
+ numIn := typ.NumIn()
+ var dddType reflect.Type
+ if typ.IsVariadic() {
+ if len(args) < numIn-1 {
+ return reflect.Value{}, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1)
+ }
+ dddType = typ.In(numIn - 1).Elem()
+ } else {
+ if len(args) != numIn {
+ return reflect.Value{}, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn)
+ }
+ }
+ argv := make([]reflect.Value, len(args))
+ for i, arg := range args {
+ arg = indirectInterface(arg)
+ // Compute the expected type. Clumsy because of variadics.
+ argType := dddType
+ if !typ.IsVariadic() || i < numIn-1 {
+ argType = typ.In(i)
+ }
+
+ var err error
+ if argv[i], err = prepareArg(arg, argType); err != nil {
+ return reflect.Value{}, fmt.Errorf("arg %d: %w", i, err)
+ }
+ }
+ return safeCall(fn, argv)
+}
+
+// safeCall runs fun.Call(args), and returns the resulting value and error, if
+// any. If the call panics, the panic value is returned as an error.
+func safeCall(fun reflect.Value, args []reflect.Value) (val reflect.Value, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if e, ok := r.(error); ok {
+ err = e
+ } else {
+ err = fmt.Errorf("%v", r)
+ }
+ }
+ }()
+ ret := fun.Call(args)
+ if len(ret) == 2 && !ret[1].IsNil() {
+ return ret[0], ret[1].Interface().(error)
+ }
+ return ret[0], nil
+}
+
+// Boolean logic.
+
+func truth(arg reflect.Value) bool {
+ t, _ := isTrue(indirectInterface(arg))
+ return t
+}
+
+// and computes the Boolean AND of its arguments, returning
+// the first false argument it encounters, or the last argument.
+func and(arg0 reflect.Value, args ...reflect.Value) reflect.Value {
+ panic("unreachable") // implemented as a special case in evalCall
+}
+
+// or computes the Boolean OR of its arguments, returning
+// the first true argument it encounters, or the last argument.
+func or(arg0 reflect.Value, args ...reflect.Value) reflect.Value {
+ panic("unreachable") // implemented as a special case in evalCall
+}
+
+// not returns the Boolean negation of its argument.
+func not(arg reflect.Value) bool {
+ return !truth(arg)
+}
+
+// Comparison.
+
+// TODO: Perhaps allow comparison between signed and unsigned integers.
+
+var (
+ errBadComparisonType = errors.New("invalid type for comparison")
+ errBadComparison = errors.New("incompatible types for comparison")
+ errNoComparison = errors.New("missing argument for comparison")
+)
+
+type kind int
+
+const (
+ invalidKind kind = iota
+ boolKind
+ complexKind
+ intKind
+ floatKind
+ stringKind
+ uintKind
+)
+
+func basicKind(v reflect.Value) (kind, error) {
+ switch v.Kind() {
+ case reflect.Bool:
+ return boolKind, nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return intKind, nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return uintKind, nil
+ case reflect.Float32, reflect.Float64:
+ return floatKind, nil
+ case reflect.Complex64, reflect.Complex128:
+ return complexKind, nil
+ case reflect.String:
+ return stringKind, nil
+ }
+ return invalidKind, errBadComparisonType
+}
+
+// eq evaluates the comparison a == b || a == c || ...
+func eq(arg1 reflect.Value, arg2 ...reflect.Value) (bool, error) {
+ arg1 = indirectInterface(arg1)
+ if arg1 != zero {
+ if t1 := arg1.Type(); !t1.Comparable() {
+ return false, fmt.Errorf("uncomparable type %s: %v", t1, arg1)
+ }
+ }
+ if len(arg2) == 0 {
+ return false, errNoComparison
+ }
+ k1, _ := basicKind(arg1)
+ for _, arg := range arg2 {
+ arg = indirectInterface(arg)
+ k2, _ := basicKind(arg)
+ truth := false
+ if k1 != k2 {
+ // Special case: Can compare integer values regardless of type's sign.
+ switch {
+ case k1 == intKind && k2 == uintKind:
+ truth = arg1.Int() >= 0 && uint64(arg1.Int()) == arg.Uint()
+ case k1 == uintKind && k2 == intKind:
+ truth = arg.Int() >= 0 && arg1.Uint() == uint64(arg.Int())
+ default:
+ if arg1 != zero && arg != zero {
+ return false, errBadComparison
+ }
+ }
+ } else {
+ switch k1 {
+ case boolKind:
+ truth = arg1.Bool() == arg.Bool()
+ case complexKind:
+ truth = arg1.Complex() == arg.Complex()
+ case floatKind:
+ truth = arg1.Float() == arg.Float()
+ case intKind:
+ truth = arg1.Int() == arg.Int()
+ case stringKind:
+ truth = arg1.String() == arg.String()
+ case uintKind:
+ truth = arg1.Uint() == arg.Uint()
+ default:
+ if arg == zero || arg1 == zero {
+ truth = arg1 == arg
+ } else {
+ if t2 := arg.Type(); !t2.Comparable() {
+ return false, fmt.Errorf("uncomparable type %s: %v", t2, arg)
+ }
+ truth = arg1.Interface() == arg.Interface()
+ }
+ }
+ }
+ if truth {
+ return true, nil
+ }
+ }
+ return false, nil
+}
+
+// ne evaluates the comparison a != b.
+func ne(arg1, arg2 reflect.Value) (bool, error) {
+ // != is the inverse of ==.
+ equal, err := eq(arg1, arg2)
+ return !equal, err
+}
+
+// lt evaluates the comparison a < b.
+func lt(arg1, arg2 reflect.Value) (bool, error) {
+ arg1 = indirectInterface(arg1)
+ k1, err := basicKind(arg1)
+ if err != nil {
+ return false, err
+ }
+ arg2 = indirectInterface(arg2)
+ k2, err := basicKind(arg2)
+ if err != nil {
+ return false, err
+ }
+ truth := false
+ if k1 != k2 {
+ // Special case: Can compare integer values regardless of type's sign.
+ switch {
+ case k1 == intKind && k2 == uintKind:
+ truth = arg1.Int() < 0 || uint64(arg1.Int()) < arg2.Uint()
+ case k1 == uintKind && k2 == intKind:
+ truth = arg2.Int() >= 0 && arg1.Uint() < uint64(arg2.Int())
+ default:
+ return false, errBadComparison
+ }
+ } else {
+ switch k1 {
+ case boolKind, complexKind:
+ return false, errBadComparisonType
+ case floatKind:
+ truth = arg1.Float() < arg2.Float()
+ case intKind:
+ truth = arg1.Int() < arg2.Int()
+ case stringKind:
+ truth = arg1.String() < arg2.String()
+ case uintKind:
+ truth = arg1.Uint() < arg2.Uint()
+ default:
+ panic("invalid kind")
+ }
+ }
+ return truth, nil
+}
+
+// le evaluates the comparison <= b.
+func le(arg1, arg2 reflect.Value) (bool, error) {
+ // <= is < or ==.
+ lessThan, err := lt(arg1, arg2)
+ if lessThan || err != nil {
+ return lessThan, err
+ }
+ return eq(arg1, arg2)
+}
+
+// gt evaluates the comparison a > b.
+func gt(arg1, arg2 reflect.Value) (bool, error) {
+ // > is the inverse of <=.
+ lessOrEqual, err := le(arg1, arg2)
+ if err != nil {
+ return false, err
+ }
+ return !lessOrEqual, nil
+}
+
+// ge evaluates the comparison a >= b.
+func ge(arg1, arg2 reflect.Value) (bool, error) {
+ // >= is the inverse of <.
+ lessThan, err := lt(arg1, arg2)
+ if err != nil {
+ return false, err
+ }
+ return !lessThan, nil
+}
+
+// HTML escaping.
+
+var (
+ htmlQuot = []byte("&#34;") // shorter than "&quot;"
+ htmlApos = []byte("&#39;") // shorter than "&apos;" and apos was not in HTML until HTML5
+ htmlAmp = []byte("&amp;")
+ htmlLt = []byte("&lt;")
+ htmlGt = []byte("&gt;")
+ htmlNull = []byte("\uFFFD")
+)
+
+// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b.
+func HTMLEscape(w io.Writer, b []byte) {
+ last := 0
+ for i, c := range b {
+ var html []byte
+ switch c {
+ case '\000':
+ html = htmlNull
+ case '"':
+ html = htmlQuot
+ case '\'':
+ html = htmlApos
+ case '&':
+ html = htmlAmp
+ case '<':
+ html = htmlLt
+ case '>':
+ html = htmlGt
+ default:
+ continue
+ }
+ w.Write(b[last:i])
+ w.Write(html)
+ last = i + 1
+ }
+ w.Write(b[last:])
+}
+
+// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s.
+func HTMLEscapeString(s string) string {
+ // Avoid allocation if we can.
+ if !strings.ContainsAny(s, "'\"&<>\000") {
+ return s
+ }
+ var b bytes.Buffer
+ HTMLEscape(&b, []byte(s))
+ return b.String()
+}
+
+// HTMLEscaper returns the escaped HTML equivalent of the textual
+// representation of its arguments.
+func HTMLEscaper(args ...any) string {
+ return HTMLEscapeString(evalArgs(args))
+}
+
+// JavaScript escaping.
+
+var (
+ jsLowUni = []byte(`\u00`)
+ hex = []byte("0123456789ABCDEF")
+
+ jsBackslash = []byte(`\\`)
+ jsApos = []byte(`\'`)
+ jsQuot = []byte(`\"`)
+ jsLt = []byte(`\u003C`)
+ jsGt = []byte(`\u003E`)
+ jsAmp = []byte(`\u0026`)
+ jsEq = []byte(`\u003D`)
+)
+
+// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b.
+func JSEscape(w io.Writer, b []byte) {
+ last := 0
+ for i := 0; i < len(b); i++ {
+ c := b[i]
+
+ if !jsIsSpecial(rune(c)) {
+ // fast path: nothing to do
+ continue
+ }
+ w.Write(b[last:i])
+
+ if c < utf8.RuneSelf {
+ // Quotes, slashes and angle brackets get quoted.
+ // Control characters get written as \u00XX.
+ switch c {
+ case '\\':
+ w.Write(jsBackslash)
+ case '\'':
+ w.Write(jsApos)
+ case '"':
+ w.Write(jsQuot)
+ case '<':
+ w.Write(jsLt)
+ case '>':
+ w.Write(jsGt)
+ case '&':
+ w.Write(jsAmp)
+ case '=':
+ w.Write(jsEq)
+ default:
+ w.Write(jsLowUni)
+ t, b := c>>4, c&0x0f
+ w.Write(hex[t : t+1])
+ w.Write(hex[b : b+1])
+ }
+ } else {
+ // Unicode rune.
+ r, size := utf8.DecodeRune(b[i:])
+ if unicode.IsPrint(r) {
+ w.Write(b[i : i+size])
+ } else {
+ fmt.Fprintf(w, "\\u%04X", r)
+ }
+ i += size - 1
+ }
+ last = i + 1
+ }
+ w.Write(b[last:])
+}
+
+// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s.
+func JSEscapeString(s string) string {
+ // Avoid allocation if we can.
+ if strings.IndexFunc(s, jsIsSpecial) < 0 {
+ return s
+ }
+ var b bytes.Buffer
+ JSEscape(&b, []byte(s))
+ return b.String()
+}
+
+func jsIsSpecial(r rune) bool {
+ switch r {
+ case '\\', '\'', '"', '<', '>', '&', '=':
+ return true
+ }
+ return r < ' ' || utf8.RuneSelf <= r
+}
+
+// JSEscaper returns the escaped JavaScript equivalent of the textual
+// representation of its arguments.
+func JSEscaper(args ...any) string {
+ return JSEscapeString(evalArgs(args))
+}
+
+// URLQueryEscaper returns the escaped value of the textual representation of
+// its arguments in a form suitable for embedding in a URL query.
+func URLQueryEscaper(args ...any) string {
+ return url.QueryEscape(evalArgs(args))
+}
+
+// evalArgs formats the list of arguments into a string. It is therefore equivalent to
+// fmt.Sprint(args...)
+// except that each argument is indirected (if a pointer), as required,
+// using the same rules as the default string evaluation during template
+// execution.
+func evalArgs(args []any) string {
+ ok := false
+ var s string
+ // Fast path for simple common case.
+ if len(args) == 1 {
+ s, ok = args[0].(string)
+ }
+ if !ok {
+ for i, arg := range args {
+ a, ok := printableValue(reflect.ValueOf(arg))
+ if ok {
+ args[i] = a
+ } // else let fmt do its thing
+ }
+ s = fmt.Sprint(args...)
+ }
+ return s
+}
diff --git a/tpl/internal/go_templates/texttemplate/helper.go b/tpl/internal/go_templates/texttemplate/helper.go
new file mode 100644
index 000000000..57905e613
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/helper.go
@@ -0,0 +1,177 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Helper functions to make constructing templates easier.
+
+package template
+
+import (
+ "fmt"
+ "io/fs"
+ "os"
+ "path"
+ "path/filepath"
+)
+
+// Functions and methods to parse templates.
+
+// Must is a helper that wraps a call to a function returning (*Template, error)
+// and panics if the error is non-nil. It is intended for use in variable
+// initializations such as
+// var t = template.Must(template.New("name").Parse("text"))
+func Must(t *Template, err error) *Template {
+ if err != nil {
+ panic(err)
+ }
+ return t
+}
+
+// ParseFiles creates a new Template and parses the template definitions from
+// the named files. The returned template's name will have the base name and
+// parsed contents of the first file. There must be at least one file.
+// If an error occurs, parsing stops and the returned *Template is nil.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+// For instance, ParseFiles("a/foo", "b/foo") stores "b/foo" as the template
+// named "foo", while "a/foo" is unavailable.
+func ParseFiles(filenames ...string) (*Template, error) {
+ return parseFiles(nil, readFileOS, filenames...)
+}
+
+// ParseFiles parses the named files and associates the resulting templates with
+// t. If an error occurs, parsing stops and the returned template is nil;
+// otherwise it is t. There must be at least one file.
+// Since the templates created by ParseFiles are named by the base
+// names of the argument files, t should usually have the name of one
+// of the (base) names of the files. If it does not, depending on t's
+// contents before calling ParseFiles, t.Execute may fail. In that
+// case use t.ExecuteTemplate to execute a valid template.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+func (t *Template) ParseFiles(filenames ...string) (*Template, error) {
+ t.init()
+ return parseFiles(t, readFileOS, filenames...)
+}
+
+// parseFiles is the helper for the method and function. If the argument
+// template is nil, it is created from the first file.
+func parseFiles(t *Template, readFile func(string) (string, []byte, error), filenames ...string) (*Template, error) {
+ if len(filenames) == 0 {
+ // Not really a problem, but be consistent.
+ return nil, fmt.Errorf("template: no files named in call to ParseFiles")
+ }
+ for _, filename := range filenames {
+ name, b, err := readFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ s := string(b)
+ // First template becomes return value if not already defined,
+ // and we use that one for subsequent New calls to associate
+ // all the templates together. Also, if this file has the same name
+ // as t, this file becomes the contents of t, so
+ // t, err := New(name).Funcs(xxx).ParseFiles(name)
+ // works. Otherwise we create a new template associated with t.
+ var tmpl *Template
+ if t == nil {
+ t = New(name)
+ }
+ if name == t.Name() {
+ tmpl = t
+ } else {
+ tmpl = t.New(name)
+ }
+ _, err = tmpl.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return t, nil
+}
+
+// ParseGlob creates a new Template and parses the template definitions from
+// the files identified by the pattern. The files are matched according to the
+// semantics of filepath.Match, and the pattern must match at least one file.
+// The returned template will have the (base) name and (parsed) contents of the
+// first file matched by the pattern. ParseGlob is equivalent to calling
+// ParseFiles with the list of files matched by the pattern.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+func ParseGlob(pattern string) (*Template, error) {
+ return parseGlob(nil, pattern)
+}
+
+// ParseGlob parses the template definitions in the files identified by the
+// pattern and associates the resulting templates with t. The files are matched
+// according to the semantics of filepath.Match, and the pattern must match at
+// least one file. ParseGlob is equivalent to calling t.ParseFiles with the
+// list of files matched by the pattern.
+//
+// When parsing multiple files with the same name in different directories,
+// the last one mentioned will be the one that results.
+func (t *Template) ParseGlob(pattern string) (*Template, error) {
+ t.init()
+ return parseGlob(t, pattern)
+}
+
+// parseGlob is the implementation of the function and method ParseGlob.
+func parseGlob(t *Template, pattern string) (*Template, error) {
+ filenames, err := filepath.Glob(pattern)
+ if err != nil {
+ return nil, err
+ }
+ if len(filenames) == 0 {
+ return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
+ }
+ return parseFiles(t, readFileOS, filenames...)
+}
+
+// ParseFS is like ParseFiles or ParseGlob but reads from the file system fsys
+// instead of the host operating system's file system.
+// It accepts a list of glob patterns.
+// (Note that most file names serve as glob patterns matching only themselves.)
+func ParseFS(fsys fs.FS, patterns ...string) (*Template, error) {
+ return parseFS(nil, fsys, patterns)
+}
+
+// ParseFS is like ParseFiles or ParseGlob but reads from the file system fsys
+// instead of the host operating system's file system.
+// It accepts a list of glob patterns.
+// (Note that most file names serve as glob patterns matching only themselves.)
+func (t *Template) ParseFS(fsys fs.FS, patterns ...string) (*Template, error) {
+ t.init()
+ return parseFS(t, fsys, patterns)
+}
+
+func parseFS(t *Template, fsys fs.FS, patterns []string) (*Template, error) {
+ var filenames []string
+ for _, pattern := range patterns {
+ list, err := fs.Glob(fsys, pattern)
+ if err != nil {
+ return nil, err
+ }
+ if len(list) == 0 {
+ return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern)
+ }
+ filenames = append(filenames, list...)
+ }
+ return parseFiles(t, readFileFS(fsys), filenames...)
+}
+
+func readFileOS(file string) (name string, b []byte, err error) {
+ name = filepath.Base(file)
+ b, err = os.ReadFile(file)
+ return
+}
+
+func readFileFS(fsys fs.FS) func(string) (string, []byte, error) {
+ return func(file string) (name string, b []byte, err error) {
+ name = path.Base(file)
+ b, err = fs.ReadFile(fsys, file)
+ return
+ }
+}
diff --git a/tpl/internal/go_templates/texttemplate/hugo_template.go b/tpl/internal/go_templates/texttemplate/hugo_template.go
new file mode 100644
index 000000000..dab5a05a3
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/hugo_template.go
@@ -0,0 +1,398 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package template
+
+import (
+ "context"
+ "io"
+ "reflect"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+)
+
+/*
+
+This files contains the Hugo related addons. All the other files in this
+package is auto generated.
+
+*/
+
+// Export it so we can populate Hugo's func map with it, which makes it faster.
+var GoFuncs = builtinFuncs()
+
+// Preparer prepares the template before execution.
+type Preparer interface {
+ Prepare() (*Template, error)
+}
+
+// ExecHelper allows some custom eval hooks.
+type ExecHelper interface {
+ Init(ctx context.Context, tmpl Preparer)
+ GetFunc(ctx context.Context, tmpl Preparer, name string) (reflect.Value, reflect.Value, bool)
+ GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value)
+ GetMapValue(ctx context.Context, tmpl Preparer, receiver, key reflect.Value) (reflect.Value, bool)
+}
+
+// Executer executes a given template.
+type Executer interface {
+ ExecuteWithContext(ctx context.Context, p Preparer, wr io.Writer, data any) error
+}
+
+type executer struct {
+ helper ExecHelper
+}
+
+func NewExecuter(helper ExecHelper) Executer {
+ return &executer{helper: helper}
+}
+
+type (
+ dataContextKeyType string
+ hasLockContextKeyType string
+)
+
+const (
+ // The data object passed to Execute or ExecuteWithContext gets stored with this key if not already set.
+ DataContextKey = dataContextKeyType("data")
+ // Used in partialCached to signal to nested templates that a lock is already taken.
+ HasLockContextKey = hasLockContextKeyType("hasLock")
+)
+
+// Note: The context is currently not fully implemeted in Hugo. This is a work in progress.
+func (t *executer) ExecuteWithContext(ctx context.Context, p Preparer, wr io.Writer, data any) error {
+ tmpl, err := p.Prepare()
+ if err != nil {
+ return err
+ }
+
+ if v := ctx.Value(DataContextKey); v == nil {
+ ctx = context.WithValue(ctx, DataContextKey, data)
+ }
+
+ value, ok := data.(reflect.Value)
+ if !ok {
+ value = reflect.ValueOf(data)
+ }
+
+ state := &state{
+ ctx: ctx,
+ helper: t.helper,
+ prep: p,
+ tmpl: tmpl,
+ wr: wr,
+ vars: []variable{{"$", value}},
+ }
+
+ t.helper.Init(ctx, p)
+
+ return tmpl.executeWithState(state, value)
+}
+
+func (t *executer) Execute(p Preparer, wr io.Writer, data any) error {
+ tmpl, err := p.Prepare()
+ if err != nil {
+ return err
+ }
+
+ value, ok := data.(reflect.Value)
+ if !ok {
+ value = reflect.ValueOf(data)
+ }
+
+ state := &state{
+ helper: t.helper,
+ prep: p,
+ tmpl: tmpl,
+ wr: wr,
+ vars: []variable{{"$", value}},
+ }
+
+ return tmpl.executeWithState(state, value)
+}
+
+// Prepare returns a template ready for execution.
+func (t *Template) Prepare() (*Template, error) {
+ return t, nil
+}
+
+func (t *Template) executeWithState(state *state, value reflect.Value) (err error) {
+ defer errRecover(&err)
+ if t.Tree == nil || t.Root == nil {
+ state.errorf("%q is an incomplete or empty template", t.Name())
+ }
+ state.walk(value, t.Root)
+ return
+}
+
+// Below are modified structs etc. The changes are marked with "Added for Hugo."
+
+// state represents the state of an execution. It's not part of the
+// template so that multiple executions of the same template
+// can execute in parallel.
+type state struct {
+ tmpl *Template
+ ctx context.Context // Added for Hugo. The orignal data context.
+ prep Preparer // Added for Hugo.
+ helper ExecHelper // Added for Hugo.
+ wr io.Writer
+ node parse.Node // current node, for errors
+ vars []variable // push-down stack of variable values.
+ depth int // the height of the stack of executing templates.
+}
+
+func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value {
+ s.at(node)
+ name := node.Ident
+
+ var function reflect.Value
+ // Added for Hugo.
+ var first reflect.Value
+ var ok bool
+ var isBuiltin bool
+ if s.helper != nil {
+ isBuiltin = name == "and" || name == "or"
+ function, first, ok = s.helper.GetFunc(s.ctx, s.prep, name)
+ }
+
+ if !ok {
+ function, isBuiltin, ok = findFunction(name, s.tmpl)
+ }
+
+ if !ok {
+ s.errorf("%q is not a defined function", name)
+ }
+ if first != zero {
+ return s.evalCall(dot, function, isBuiltin, cmd, name, args, final, first)
+ }
+ return s.evalCall(dot, function, isBuiltin, cmd, name, args, final)
+}
+
+// evalField evaluates an expression like (.Field) or (.Field arg1 arg2).
+// The 'final' argument represents the return value from the preceding
+// value of the pipeline, if any.
+func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value {
+ if !receiver.IsValid() {
+ if s.tmpl.option.missingKey == mapError { // Treat invalid value as missing map key.
+ s.errorf("nil data; no entry for key %q", fieldName)
+ }
+ return zero
+ }
+ typ := receiver.Type()
+ receiver, isNil := indirect(receiver)
+ if receiver.Kind() == reflect.Interface && isNil {
+ // Calling a method on a nil interface can't work. The
+ // MethodByName method call below would panic.
+ s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
+ return zero
+ }
+
+ // Unless it's an interface, need to get to a value of type *T to guarantee
+ // we see all methods of T and *T.
+ ptr := receiver
+ if ptr.Kind() != reflect.Interface && ptr.Kind() != reflect.Pointer && ptr.CanAddr() {
+ ptr = ptr.Addr()
+ }
+
+ // Added for Hugo.
+ var first reflect.Value
+ var method reflect.Value
+ if s.helper != nil {
+ method, first = s.helper.GetMethod(s.ctx, s.prep, ptr, fieldName)
+ } else {
+ method = ptr.MethodByName(fieldName)
+ }
+
+ if method.IsValid() {
+ if first != zero {
+ return s.evalCall(dot, method, false, node, fieldName, args, final, first)
+ }
+
+ return s.evalCall(dot, method, false, node, fieldName, args, final)
+ }
+
+ if method := ptr.MethodByName(fieldName); method.IsValid() {
+ return s.evalCall(dot, method, false, node, fieldName, args, final)
+ }
+ hasArgs := len(args) > 1 || final != missingVal
+ // It's not a method; must be a field of a struct or an element of a map.
+ switch receiver.Kind() {
+ case reflect.Struct:
+ tField, ok := receiver.Type().FieldByName(fieldName)
+ if ok {
+ field, err := receiver.FieldByIndexErr(tField.Index)
+ if !tField.IsExported() {
+ s.errorf("%s is an unexported field of struct type %s", fieldName, typ)
+ }
+ if err != nil {
+ s.errorf("%v", err)
+ }
+ // If it's a function, we must call it.
+ if hasArgs {
+ s.errorf("%s has arguments but cannot be invoked as function", fieldName)
+ }
+ return field
+ }
+ case reflect.Map:
+ // If it's a map, attempt to use the field name as a key.
+ nameVal := reflect.ValueOf(fieldName)
+ if nameVal.Type().AssignableTo(receiver.Type().Key()) {
+ if hasArgs {
+ s.errorf("%s is not a method but has arguments", fieldName)
+ }
+ var result reflect.Value
+ if s.helper != nil {
+ // Added for Hugo.
+ result, _ = s.helper.GetMapValue(s.ctx, s.prep, receiver, nameVal)
+ } else {
+ result = receiver.MapIndex(nameVal)
+ }
+ if !result.IsValid() {
+ switch s.tmpl.option.missingKey {
+ case mapInvalid:
+ // Just use the invalid value.
+ case mapZeroValue:
+ result = reflect.Zero(receiver.Type().Elem())
+ case mapError:
+ s.errorf("map has no entry for key %q", fieldName)
+ }
+ }
+ return result
+ }
+ case reflect.Pointer:
+ etyp := receiver.Type().Elem()
+ if etyp.Kind() == reflect.Struct {
+ if _, ok := etyp.FieldByName(fieldName); !ok {
+ // If there's no such field, say "can't evaluate"
+ // instead of "nil pointer evaluating".
+ break
+ }
+ }
+ if isNil {
+ s.errorf("nil pointer evaluating %s.%s", typ, fieldName)
+ }
+ }
+ s.errorf("can't evaluate field %s in type %s", fieldName, typ)
+ panic("not reached")
+}
+
+// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so
+// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0]
+// as the function itself.
+func (s *state) evalCall(dot, fun reflect.Value, isBuiltin bool, node parse.Node, name string, args []parse.Node, final reflect.Value, first ...reflect.Value) reflect.Value {
+ if args != nil {
+ args = args[1:] // Zeroth arg is function name/node; not passed to function.
+ }
+
+ typ := fun.Type()
+ numFirst := len(first)
+ numIn := len(args) + numFirst // Added for Hugo
+ if final != missingVal {
+ numIn++
+ }
+ numFixed := len(args) + len(first) // Adjusted for Hugo
+ if typ.IsVariadic() {
+ numFixed = typ.NumIn() - 1 // last arg is the variadic one.
+ if numIn < numFixed {
+ s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args))
+ }
+ } else if numIn != typ.NumIn() {
+ s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), numIn)
+ }
+ if !goodFunc(typ) {
+ // TODO: This could still be a confusing error; maybe goodFunc should provide info.
+ s.errorf("can't call method/function %q with %d results", name, typ.NumOut())
+ }
+
+ unwrap := func(v reflect.Value) reflect.Value {
+ if v.Type() == reflectValueType {
+ v = v.Interface().(reflect.Value)
+ }
+ return v
+ }
+
+ // Special case for builtin and/or, which short-circuit.
+ if isBuiltin && (name == "and" || name == "or") {
+ argType := typ.In(0)
+ var v reflect.Value
+ for _, arg := range args {
+ v = s.evalArg(dot, argType, arg).Interface().(reflect.Value)
+ if truth(v) == (name == "or") {
+ // This value was already unwrapped
+ // by the .Interface().(reflect.Value).
+ return v
+ }
+ }
+ if final != missingVal {
+ // The last argument to and/or is coming from
+ // the pipeline. We didn't short circuit on an earlier
+ // argument, so we are going to return this one.
+ // We don't have to evaluate final, but we do
+ // have to check its type. Then, since we are
+ // going to return it, we have to unwrap it.
+ v = unwrap(s.validateType(final, argType))
+ }
+ return v
+ }
+
+ // Build the arg list.
+ argv := make([]reflect.Value, numIn)
+ // Args must be evaluated. Fixed args first.
+ i := len(first) // Adjusted for Hugo.
+ for ; i < numFixed && i < len(args)+numFirst; i++ { // Adjusted for Hugo.
+ argv[i] = s.evalArg(dot, typ.In(i), args[i-numFirst]) // Adjusted for Hugo.
+ }
+ // Now the ... args.
+ if typ.IsVariadic() {
+ argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice.
+ for ; i < len(args)+numFirst; i++ { // Adjusted for Hugo.
+ argv[i] = s.evalArg(dot, argType, args[i-numFirst]) // Adjusted for Hugo.
+ }
+ }
+ // Add final value if necessary.
+ if final != missingVal {
+ t := typ.In(typ.NumIn() - 1)
+ if typ.IsVariadic() {
+ if numIn-1 < numFixed {
+ // The added final argument corresponds to a fixed parameter of the function.
+ // Validate against the type of the actual parameter.
+ t = typ.In(numIn - 1)
+ } else {
+ // The added final argument corresponds to the variadic part.
+ // Validate against the type of the elements of the variadic slice.
+ t = t.Elem()
+ }
+ }
+ argv[i] = s.validateType(final, t)
+ }
+
+ // Added for Hugo
+ for i := 0; i < len(first); i++ {
+ argv[i] = s.validateType(first[i], typ.In(i))
+ }
+
+ v, err := safeCall(fun, argv)
+ // If we have an error that is not nil, stop execution and return that
+ // error to the caller.
+ if err != nil {
+ s.at(node)
+ s.errorf("error calling %s: %w", name, err)
+ }
+ return unwrap(v)
+}
+
+func isTrue(val reflect.Value) (truth, ok bool) {
+ return hreflect.IsTruthfulValue(val), true
+}
diff --git a/tpl/internal/go_templates/texttemplate/hugo_template_test.go b/tpl/internal/go_templates/texttemplate/hugo_template_test.go
new file mode 100644
index 000000000..cc88151e3
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/hugo_template_test.go
@@ -0,0 +1,92 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package template
+
+import (
+ "bytes"
+ "context"
+ "reflect"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/common/hreflect"
+)
+
+type TestStruct struct {
+ S string
+ M map[string]string
+}
+
+func (t TestStruct) Hello1(arg string) string {
+ return arg
+}
+
+func (t TestStruct) Hello2(arg1, arg2 string) string {
+ return arg1 + " " + arg2
+}
+
+type execHelper struct{}
+
+func (e *execHelper) Init(ctx context.Context, tmpl Preparer) {
+}
+
+func (e *execHelper) GetFunc(ctx context.Context, tmpl Preparer, name string) (reflect.Value, reflect.Value, bool) {
+ if name == "print" {
+ return zero, zero, false
+ }
+ return reflect.ValueOf(func(s string) string {
+ return "hello " + s
+ }), zero, true
+}
+
+func (e *execHelper) GetMapValue(ctx context.Context, tmpl Preparer, m, key reflect.Value) (reflect.Value, bool) {
+ key = reflect.ValueOf(strings.ToLower(key.String()))
+ return m.MapIndex(key), true
+}
+
+func (e *execHelper) GetMethod(ctx context.Context, tmpl Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) {
+ if name != "Hello1" {
+ return zero, zero
+ }
+ m := hreflect.GetMethodByName(receiver, "Hello2")
+ return m, reflect.ValueOf("v2")
+}
+
+func TestTemplateExecutor(t *testing.T) {
+ c := qt.New(t)
+
+ templ, err := New("").Parse(`
+{{ print "foo" }}
+{{ printf "hugo" }}
+Map: {{ .M.A }}
+Method: {{ .Hello1 "v1" }}
+
+`)
+
+ c.Assert(err, qt.IsNil)
+
+ ex := NewExecuter(&execHelper{})
+
+ var b bytes.Buffer
+ data := TestStruct{S: "sv", M: map[string]string{"a": "av"}}
+
+ c.Assert(ex.ExecuteWithContext(context.Background(), templ, &b, data), qt.IsNil)
+ got := b.String()
+
+ c.Assert(got, qt.Contains, "foo")
+ c.Assert(got, qt.Contains, "hello hugo")
+ c.Assert(got, qt.Contains, "Map: av")
+ c.Assert(got, qt.Contains, "Method: v2 v1")
+}
diff --git a/tpl/internal/go_templates/texttemplate/link_test.go b/tpl/internal/go_templates/texttemplate/link_test.go
new file mode 100644
index 000000000..23f6a31fa
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/link_test.go
@@ -0,0 +1,62 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package template_test
+
+import (
+ "bytes"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/testenv"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "testing"
+)
+
+// Issue 36021: verify that text/template doesn't prevent the linker from removing
+// unused methods.
+func _TestLinkerGC(t *testing.T) {
+ if testing.Short() {
+ t.Skip("skipping in short mode")
+ }
+ testenv.MustHaveGoBuild(t)
+ const prog = `package main
+
+import (
+ _ "text/template"
+)
+
+type T struct{}
+
+func (t *T) Unused() { println("THIS SHOULD BE ELIMINATED") }
+func (t *T) Used() {}
+
+var sink *T
+
+func main() {
+ var t T
+ sink = &t
+ t.Used()
+}
+`
+ td := t.TempDir()
+
+ if err := os.WriteFile(filepath.Join(td, "x.go"), []byte(prog), 0644); err != nil {
+ t.Fatal(err)
+ }
+ cmd := exec.Command(testenv.GoToolPath(t), "build", "-o", "x.exe", "x.go")
+ cmd.Dir = td
+ if out, err := cmd.CombinedOutput(); err != nil {
+ t.Fatalf("go build: %v, %s", err, out)
+ }
+ slurp, err := os.ReadFile(filepath.Join(td, "x.exe"))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if bytes.Contains(slurp, []byte("THIS SHOULD BE ELIMINATED")) {
+ t.Error("binary contains code that should be deadcode eliminated")
+ }
+}
diff --git a/tpl/internal/go_templates/texttemplate/multi_test.go b/tpl/internal/go_templates/texttemplate/multi_test.go
new file mode 100644
index 000000000..e3c9ec3ae
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/multi_test.go
@@ -0,0 +1,467 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13 && !windows
+// +build go1.13,!windows
+
+package template
+
+// Tests for multiple-template parsing and execution.
+
+import (
+ "bytes"
+ "fmt"
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+ "os"
+ "testing"
+)
+
+const (
+ noError = true
+ hasError = false
+)
+
+type multiParseTest struct {
+ name string
+ input string
+ ok bool
+ names []string
+ results []string
+}
+
+var multiParseTests = []multiParseTest{
+ {"empty", "", noError,
+ nil,
+ nil},
+ {"one", `{{define "foo"}} FOO {{end}}`, noError,
+ []string{"foo"},
+ []string{" FOO "}},
+ {"two", `{{define "foo"}} FOO {{end}}{{define "bar"}} BAR {{end}}`, noError,
+ []string{"foo", "bar"},
+ []string{" FOO ", " BAR "}},
+ // errors
+ {"missing end", `{{define "foo"}} FOO `, hasError,
+ nil,
+ nil},
+ {"malformed name", `{{define "foo}} FOO `, hasError,
+ nil,
+ nil},
+}
+
+func TestMultiParse(t *testing.T) {
+ for _, test := range multiParseTests {
+ template, err := New("root").Parse(test.input)
+ switch {
+ case err == nil && !test.ok:
+ t.Errorf("%q: expected error; got none", test.name)
+ continue
+ case err != nil && test.ok:
+ t.Errorf("%q: unexpected error: %v", test.name, err)
+ continue
+ case err != nil && !test.ok:
+ // expected error, got one
+ if *debug {
+ fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
+ }
+ continue
+ }
+ if template == nil {
+ continue
+ }
+ if len(template.tmpl) != len(test.names)+1 { // +1 for root
+ t.Errorf("%s: wrong number of templates; wanted %d got %d", test.name, len(test.names), len(template.tmpl))
+ continue
+ }
+ for i, name := range test.names {
+ tmpl, ok := template.tmpl[name]
+ if !ok {
+ t.Errorf("%s: can't find template %q", test.name, name)
+ continue
+ }
+ result := tmpl.Root.String()
+ if result != test.results[i] {
+ t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.results[i])
+ }
+ }
+ }
+}
+
+var multiExecTests = []execTest{
+ {"empty", "", "", nil, true},
+ {"text", "some text", "some text", nil, true},
+ {"invoke x", `{{template "x" .SI}}`, "TEXT", tVal, true},
+ {"invoke x no args", `{{template "x"}}`, "TEXT", tVal, true},
+ {"invoke dot int", `{{template "dot" .I}}`, "17", tVal, true},
+ {"invoke dot []int", `{{template "dot" .SI}}`, "[3 4 5]", tVal, true},
+ {"invoke dotV", `{{template "dotV" .U}}`, "v", tVal, true},
+ {"invoke nested int", `{{template "nested" .I}}`, "17", tVal, true},
+ {"variable declared by template", `{{template "nested" $x:=.SI}},{{index $x 1}}`, "[3 4 5],4", tVal, true},
+
+ // User-defined function: test argument evaluator.
+ {"testFunc literal", `{{oneArg "joe"}}`, "oneArg=joe", tVal, true},
+ {"testFunc .", `{{oneArg .}}`, "oneArg=joe", "joe", true},
+}
+
+// These strings are also in testdata/*.
+const multiText1 = `
+ {{define "x"}}TEXT{{end}}
+ {{define "dotV"}}{{.V}}{{end}}
+`
+
+const multiText2 = `
+ {{define "dot"}}{{.}}{{end}}
+ {{define "nested"}}{{template "dot" .}}{{end}}
+`
+
+func TestMultiExecute(t *testing.T) {
+ // Declare a couple of templates first.
+ template, err := New("root").Parse(multiText1)
+ if err != nil {
+ t.Fatalf("parse error for 1: %s", err)
+ }
+ _, err = template.Parse(multiText2)
+ if err != nil {
+ t.Fatalf("parse error for 2: %s", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseFiles(t *testing.T) {
+ _, err := ParseFiles("DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ template := New("root")
+ _, err = template.ParseFiles("testdata/file1.tmpl", "testdata/file2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseGlob(t *testing.T) {
+ _, err := ParseGlob("DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ _, err = New("error").ParseGlob("[x")
+ if err == nil {
+ t.Error("expected error for bad pattern; got none")
+ }
+ template := New("root")
+ _, err = template.ParseGlob("testdata/file*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+}
+
+func TestParseFS(t *testing.T) {
+ fs := os.DirFS("testdata")
+
+ {
+ _, err := ParseFS(fs, "DOES NOT EXIST")
+ if err == nil {
+ t.Error("expected error for non-existent file; got none")
+ }
+ }
+
+ {
+ template := New("root")
+ _, err := template.ParseFS(fs, "file1.tmpl", "file2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+ }
+
+ {
+ template := New("root")
+ _, err := template.ParseFS(fs, "file*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(multiExecTests, template, t)
+ }
+}
+
+// In these tests, actual content (not just template definitions) comes from the parsed files.
+
+var templateFileExecTests = []execTest{
+ {"test", `{{template "tmpl1.tmpl"}}{{template "tmpl2.tmpl"}}`, "template1\n\ny\ntemplate2\n\nx\n", 0, true},
+}
+
+func TestParseFilesWithData(t *testing.T) {
+ template, err := New("root").ParseFiles("testdata/tmpl1.tmpl", "testdata/tmpl2.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(templateFileExecTests, template, t)
+}
+
+func TestParseGlobWithData(t *testing.T) {
+ template, err := New("root").ParseGlob("testdata/tmpl*.tmpl")
+ if err != nil {
+ t.Fatalf("error parsing files: %v", err)
+ }
+ testExecute(templateFileExecTests, template, t)
+}
+
+const (
+ cloneText1 = `{{define "a"}}{{template "b"}}{{template "c"}}{{end}}`
+ cloneText2 = `{{define "b"}}b{{end}}`
+ cloneText3 = `{{define "c"}}root{{end}}`
+ cloneText4 = `{{define "c"}}clone{{end}}`
+)
+
+func TestClone(t *testing.T) {
+ // Create some templates and clone the root.
+ root, err := New("root").Parse(cloneText1)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = root.Parse(cloneText2)
+ if err != nil {
+ t.Fatal(err)
+ }
+ clone := Must(root.Clone())
+ // Add variants to both.
+ _, err = root.Parse(cloneText3)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = clone.Parse(cloneText4)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Verify that the clone is self-consistent.
+ for k, v := range clone.tmpl {
+ if k == clone.name && v.tmpl[k] != clone {
+ t.Error("clone does not contain root")
+ }
+ if v != v.tmpl[v.name] {
+ t.Errorf("clone does not contain self for %q", k)
+ }
+ }
+ // Execute root.
+ var b bytes.Buffer
+ err = root.ExecuteTemplate(&b, "a", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if b.String() != "broot" {
+ t.Errorf("expected %q got %q", "broot", b.String())
+ }
+ // Execute copy.
+ b.Reset()
+ err = clone.ExecuteTemplate(&b, "a", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if b.String() != "bclone" {
+ t.Errorf("expected %q got %q", "bclone", b.String())
+ }
+}
+
+func TestAddParseTree(t *testing.T) {
+ // Create some templates.
+ root, err := New("root").Parse(cloneText1)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = root.Parse(cloneText2)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Add a new parse tree.
+ tree, err := parse.Parse("cloneText3", cloneText3, "", "", nil, builtins())
+ if err != nil {
+ t.Fatal(err)
+ }
+ added, err := root.AddParseTree("c", tree["c"])
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Execute.
+ var b bytes.Buffer
+ err = added.ExecuteTemplate(&b, "a", 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if b.String() != "broot" {
+ t.Errorf("expected %q got %q", "broot", b.String())
+ }
+}
+
+// Issue 7032
+func TestAddParseTreeToUnparsedTemplate(t *testing.T) {
+ master := "{{define \"master\"}}{{end}}"
+ tmpl := New("master")
+ tree, err := parse.Parse("master", master, "", "", nil)
+ if err != nil {
+ t.Fatalf("unexpected parse err: %v", err)
+ }
+ masterTree := tree["master"]
+ tmpl.AddParseTree("master", masterTree) // used to panic
+}
+
+func TestRedefinition(t *testing.T) {
+ var tmpl *Template
+ var err error
+ if tmpl, err = New("tmpl1").Parse(`{{define "test"}}foo{{end}}`); err != nil {
+ t.Fatalf("parse 1: %v", err)
+ }
+ if _, err = tmpl.Parse(`{{define "test"}}bar{{end}}`); err != nil {
+ t.Fatalf("got error %v, expected nil", err)
+ }
+ if _, err = tmpl.New("tmpl2").Parse(`{{define "test"}}bar{{end}}`); err != nil {
+ t.Fatalf("got error %v, expected nil", err)
+ }
+}
+
+// Issue 10879
+func TestEmptyTemplateCloneCrash(t *testing.T) {
+ t1 := New("base")
+ t1.Clone() // used to panic
+}
+
+// Issue 10910, 10926
+func TestTemplateLookUp(t *testing.T) {
+ t1 := New("foo")
+ if t1.Lookup("foo") != nil {
+ t.Error("Lookup returned non-nil value for undefined template foo")
+ }
+ t1.New("bar")
+ if t1.Lookup("bar") != nil {
+ t.Error("Lookup returned non-nil value for undefined template bar")
+ }
+ t1.Parse(`{{define "foo"}}test{{end}}`)
+ if t1.Lookup("foo") == nil {
+ t.Error("Lookup returned nil value for defined template")
+ }
+}
+
+func TestNew(t *testing.T) {
+ // template with same name already exists
+ t1, _ := New("test").Parse(`{{define "test"}}foo{{end}}`)
+ t2 := t1.New("test")
+
+ if t1.common != t2.common {
+ t.Errorf("t1 & t2 didn't share common struct; got %v != %v", t1.common, t2.common)
+ }
+ if t1.Tree == nil {
+ t.Error("defined template got nil Tree")
+ }
+ if t2.Tree != nil {
+ t.Error("undefined template got non-nil Tree")
+ }
+
+ containsT1 := false
+ for _, tmpl := range t1.Templates() {
+ if tmpl == t2 {
+ t.Error("Templates included undefined template")
+ }
+ if tmpl == t1 {
+ containsT1 = true
+ }
+ }
+ if !containsT1 {
+ t.Error("Templates didn't include defined template")
+ }
+}
+
+func TestParse(t *testing.T) {
+ // In multiple calls to Parse with the same receiver template, only one call
+ // can contain text other than space, comments, and template definitions
+ t1 := New("test")
+ if _, err := t1.Parse(`{{define "test"}}{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+ if _, err := t1.Parse(`{{define "test"}}{{/* this is a comment */}}{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+ if _, err := t1.Parse(`{{define "test"}}foo{{end}}`); err != nil {
+ t.Fatalf("parsing test: %s", err)
+ }
+}
+
+func TestEmptyTemplate(t *testing.T) {
+ cases := []struct {
+ defn []string
+ in string
+ want string
+ }{
+ {[]string{"x", "y"}, "", "y"},
+ {[]string{""}, "once", ""},
+ {[]string{"", ""}, "twice", ""},
+ {[]string{"{{.}}", "{{.}}"}, "twice", "twice"},
+ {[]string{"{{/* a comment */}}", "{{/* a comment */}}"}, "comment", ""},
+ {[]string{"{{.}}", ""}, "twice", ""},
+ }
+
+ for i, c := range cases {
+ root := New("root")
+
+ var (
+ m *Template
+ err error
+ )
+ for _, d := range c.defn {
+ m, err = root.New(c.in).Parse(d)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+ buf := &bytes.Buffer{}
+ if err := m.Execute(buf, c.in); err != nil {
+ t.Error(i, err)
+ continue
+ }
+ if buf.String() != c.want {
+ t.Errorf("expected string %q: got %q", c.want, buf.String())
+ }
+ }
+}
+
+// Issue 19249 was a regression in 1.8 caused by the handling of empty
+// templates added in that release, which got different answers depending
+// on the order templates appeared in the internal map.
+func TestIssue19294(t *testing.T) {
+ // The empty block in "xhtml" should be replaced during execution
+ // by the contents of "stylesheet", but if the internal map associating
+ // names with templates is built in the wrong order, the empty block
+ // looks non-empty and this doesn't happen.
+ var inlined = map[string]string{
+ "stylesheet": `{{define "stylesheet"}}stylesheet{{end}}`,
+ "xhtml": `{{block "stylesheet" .}}{{end}}`,
+ }
+ all := []string{"stylesheet", "xhtml"}
+ for i := 0; i < 100; i++ {
+ res, err := New("title.xhtml").Parse(`{{template "xhtml" .}}`)
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, name := range all {
+ _, err := res.New(name).Parse(inlined[name])
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+ var buf bytes.Buffer
+ res.Execute(&buf, 0)
+ if buf.String() != "stylesheet" {
+ t.Fatalf("iteration %d: got %q; expected %q", i, buf.String(), "stylesheet")
+ }
+ }
+}
+
+// Issue 48436
+func TestAddToZeroTemplate(t *testing.T) {
+ tree, err := parse.Parse("c", cloneText3, "", "", nil, builtins())
+ if err != nil {
+ t.Fatal(err)
+ }
+ var tmpl Template
+ tmpl.AddParseTree("x", tree["c"])
+}
diff --git a/tpl/internal/go_templates/texttemplate/option.go b/tpl/internal/go_templates/texttemplate/option.go
new file mode 100644
index 000000000..1035afad7
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/option.go
@@ -0,0 +1,72 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains the code to handle template options.
+
+package template
+
+import "strings"
+
+// missingKeyAction defines how to respond to indexing a map with a key that is not present.
+type missingKeyAction int
+
+const (
+ mapInvalid missingKeyAction = iota // Return an invalid reflect.Value.
+ mapZeroValue // Return the zero value for the map element.
+ mapError // Error out
+)
+
+type option struct {
+ missingKey missingKeyAction
+}
+
+// Option sets options for the template. Options are described by
+// strings, either a simple string or "key=value". There can be at
+// most one equals sign in an option string. If the option string
+// is unrecognized or otherwise invalid, Option panics.
+//
+// Known options:
+//
+// missingkey: Control the behavior during execution if a map is
+// indexed with a key that is not present in the map.
+// "missingkey=default" or "missingkey=invalid"
+// The default behavior: Do nothing and continue execution.
+// If printed, the result of the index operation is the string
+// "<no value>".
+// "missingkey=zero"
+// The operation returns the zero value for the map type's element.
+// "missingkey=error"
+// Execution stops immediately with an error.
+//
+func (t *Template) Option(opt ...string) *Template {
+ t.init()
+ for _, s := range opt {
+ t.setOption(s)
+ }
+ return t
+}
+
+func (t *Template) setOption(opt string) {
+ if opt == "" {
+ panic("empty option string")
+ }
+ // key=value
+ if key, value, ok := strings.Cut(opt, "="); ok {
+ switch key {
+ case "missingkey":
+ switch value {
+ case "invalid", "default":
+ t.option.missingKey = mapInvalid
+ return
+ case "zero":
+ t.option.missingKey = mapZeroValue
+ return
+ case "error":
+ t.option.missingKey = mapError
+ return
+ }
+ }
+ }
+ panic("unrecognized option: " + opt)
+}
diff --git a/tpl/internal/go_templates/texttemplate/parse/lex.go b/tpl/internal/go_templates/texttemplate/parse/lex.go
new file mode 100644
index 000000000..40d041112
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/parse/lex.go
@@ -0,0 +1,682 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package parse
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// item represents a token or text string returned from the scanner.
+type item struct {
+ typ itemType // The type of this item.
+ pos Pos // The starting position, in bytes, of this item in the input string.
+ val string // The value of this item.
+ line int // The line number at the start of this item.
+}
+
+func (i item) String() string {
+ switch {
+ case i.typ == itemEOF:
+ return "EOF"
+ case i.typ == itemError:
+ return i.val
+ case i.typ > itemKeyword:
+ return fmt.Sprintf("<%s>", i.val)
+ case len(i.val) > 10:
+ return fmt.Sprintf("%.10q...", i.val)
+ }
+ return fmt.Sprintf("%q", i.val)
+}
+
+// itemType identifies the type of lex items.
+type itemType int
+
+const (
+ itemError itemType = iota // error occurred; value is text of error
+ itemBool // boolean constant
+ itemChar // printable ASCII character; grab bag for comma etc.
+ itemCharConstant // character constant
+ itemComment // comment text
+ itemComplex // complex constant (1+2i); imaginary is just a number
+ itemAssign // equals ('=') introducing an assignment
+ itemDeclare // colon-equals (':=') introducing a declaration
+ itemEOF
+ itemField // alphanumeric identifier starting with '.'
+ itemIdentifier // alphanumeric identifier not starting with '.'
+ itemLeftDelim // left action delimiter
+ itemLeftParen // '(' inside action
+ itemNumber // simple number, including imaginary
+ itemPipe // pipe symbol
+ itemRawString // raw quoted string (includes quotes)
+ itemRightDelim // right action delimiter
+ itemRightParen // ')' inside action
+ itemSpace // run of spaces separating arguments
+ itemString // quoted string (includes quotes)
+ itemText // plain text
+ itemVariable // variable starting with '$', such as '$' or '$1' or '$hello'
+ // Keywords appear after all the rest.
+ itemKeyword // used only to delimit the keywords
+ itemBlock // block keyword
+ itemBreak // break keyword
+ itemContinue // continue keyword
+ itemDot // the cursor, spelled '.'
+ itemDefine // define keyword
+ itemElse // else keyword
+ itemEnd // end keyword
+ itemIf // if keyword
+ itemNil // the untyped nil constant, easiest to treat as a keyword
+ itemRange // range keyword
+ itemTemplate // template keyword
+ itemWith // with keyword
+)
+
+var key = map[string]itemType{
+ ".": itemDot,
+ "block": itemBlock,
+ "break": itemBreak,
+ "continue": itemContinue,
+ "define": itemDefine,
+ "else": itemElse,
+ "end": itemEnd,
+ "if": itemIf,
+ "range": itemRange,
+ "nil": itemNil,
+ "template": itemTemplate,
+ "with": itemWith,
+}
+
+const eof = -1
+
+// Trimming spaces.
+// If the action begins "{{- " rather than "{{", then all space/tab/newlines
+// preceding the action are trimmed; conversely if it ends " -}}" the
+// leading spaces are trimmed. This is done entirely in the lexer; the
+// parser never sees it happen. We require an ASCII space (' ', \t, \r, \n)
+// to be present to avoid ambiguity with things like "{{-3}}". It reads
+// better with the space present anyway. For simplicity, only ASCII
+// does the job.
+const (
+ spaceChars = " \t\r\n" // These are the space characters defined by Go itself.
+ trimMarker = '-' // Attached to left/right delimiter, trims trailing spaces from preceding/following text.
+ trimMarkerLen = Pos(1 + 1) // marker plus space before or after
+)
+
+// stateFn represents the state of the scanner as a function that returns the next state.
+type stateFn func(*lexer) stateFn
+
+// lexer holds the state of the scanner.
+type lexer struct {
+ name string // the name of the input; used only for error reports
+ input string // the string being scanned
+ leftDelim string // start of action
+ rightDelim string // end of action
+ emitComment bool // emit itemComment tokens.
+ pos Pos // current position in the input
+ start Pos // start position of this item
+ width Pos // width of last rune read from input
+ items chan item // channel of scanned items
+ parenDepth int // nesting depth of ( ) exprs
+ line int // 1+number of newlines seen
+ startLine int // start line of this item
+ breakOK bool // break keyword allowed
+ continueOK bool // continue keyword allowed
+}
+
+// next returns the next rune in the input.
+func (l *lexer) next() rune {
+ if int(l.pos) >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ r, w := utf8.DecodeRuneInString(l.input[l.pos:])
+ l.width = Pos(w)
+ l.pos += l.width
+ if r == '\n' {
+ l.line++
+ }
+ return r
+}
+
+// peek returns but does not consume the next rune in the input.
+func (l *lexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+// backup steps back one rune. Can only be called once per call of next.
+func (l *lexer) backup() {
+ l.pos -= l.width
+ // Correct newline count.
+ if l.width == 1 && l.input[l.pos] == '\n' {
+ l.line--
+ }
+}
+
+// emit passes an item back to the client.
+func (l *lexer) emit(t itemType) {
+ l.items <- item{t, l.start, l.input[l.start:l.pos], l.startLine}
+ l.start = l.pos
+ l.startLine = l.line
+}
+
+// ignore skips over the pending input before this point.
+func (l *lexer) ignore() {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.start = l.pos
+ l.startLine = l.line
+}
+
+// accept consumes the next rune if it's from the valid set.
+func (l *lexer) accept(valid string) bool {
+ if strings.ContainsRune(valid, l.next()) {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+// acceptRun consumes a run of runes from the valid set.
+func (l *lexer) acceptRun(valid string) {
+ for strings.ContainsRune(valid, l.next()) {
+ }
+ l.backup()
+}
+
+// errorf returns an error token and terminates the scan by passing
+// back a nil pointer that will be the next state, terminating l.nextItem.
+func (l *lexer) errorf(format string, args ...any) stateFn {
+ l.items <- item{itemError, l.start, fmt.Sprintf(format, args...), l.startLine}
+ return nil
+}
+
+// nextItem returns the next item from the input.
+// Called by the parser, not in the lexing goroutine.
+func (l *lexer) nextItem() item {
+ return <-l.items
+}
+
+// drain drains the output so the lexing goroutine will exit.
+// Called by the parser, not in the lexing goroutine.
+func (l *lexer) drain() {
+ for range l.items {
+ }
+}
+
+// lex creates a new scanner for the input string.
+func lex(name, input, left, right string, emitComment bool) *lexer {
+ if left == "" {
+ left = leftDelim
+ }
+ if right == "" {
+ right = rightDelim
+ }
+ l := &lexer{
+ name: name,
+ input: input,
+ leftDelim: left,
+ rightDelim: right,
+ emitComment: emitComment,
+ items: make(chan item),
+ line: 1,
+ startLine: 1,
+ }
+ go l.run()
+ return l
+}
+
+// run runs the state machine for the lexer.
+func (l *lexer) run() {
+ for state := lexText; state != nil; {
+ state = state(l)
+ }
+ close(l.items)
+}
+
+// state functions
+
+const (
+ leftDelim = "{{"
+ rightDelim = "}}"
+ leftComment = "/*"
+ rightComment = "*/"
+)
+
+// lexText scans until an opening action delimiter, "{{".
+func lexText(l *lexer) stateFn {
+ l.width = 0
+ if x := strings.Index(l.input[l.pos:], l.leftDelim); x >= 0 {
+ ldn := Pos(len(l.leftDelim))
+ l.pos += Pos(x)
+ trimLength := Pos(0)
+ if hasLeftTrimMarker(l.input[l.pos+ldn:]) {
+ trimLength = rightTrimLength(l.input[l.start:l.pos])
+ }
+ l.pos -= trimLength
+ if l.pos > l.start {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.emit(itemText)
+ }
+ l.pos += trimLength
+ l.ignore()
+ return lexLeftDelim
+ }
+ l.pos = Pos(len(l.input))
+ // Correctly reached EOF.
+ if l.pos > l.start {
+ l.line += strings.Count(l.input[l.start:l.pos], "\n")
+ l.emit(itemText)
+ }
+ l.emit(itemEOF)
+ return nil
+}
+
+// rightTrimLength returns the length of the spaces at the end of the string.
+func rightTrimLength(s string) Pos {
+ return Pos(len(s) - len(strings.TrimRight(s, spaceChars)))
+}
+
+// atRightDelim reports whether the lexer is at a right delimiter, possibly preceded by a trim marker.
+func (l *lexer) atRightDelim() (delim, trimSpaces bool) {
+ if hasRightTrimMarker(l.input[l.pos:]) && strings.HasPrefix(l.input[l.pos+trimMarkerLen:], l.rightDelim) { // With trim marker.
+ return true, true
+ }
+ if strings.HasPrefix(l.input[l.pos:], l.rightDelim) { // Without trim marker.
+ return true, false
+ }
+ return false, false
+}
+
+// leftTrimLength returns the length of the spaces at the beginning of the string.
+func leftTrimLength(s string) Pos {
+ return Pos(len(s) - len(strings.TrimLeft(s, spaceChars)))
+}
+
+// lexLeftDelim scans the left delimiter, which is known to be present, possibly with a trim marker.
+func lexLeftDelim(l *lexer) stateFn {
+ l.pos += Pos(len(l.leftDelim))
+ trimSpace := hasLeftTrimMarker(l.input[l.pos:])
+ afterMarker := Pos(0)
+ if trimSpace {
+ afterMarker = trimMarkerLen
+ }
+ if strings.HasPrefix(l.input[l.pos+afterMarker:], leftComment) {
+ l.pos += afterMarker
+ l.ignore()
+ return lexComment
+ }
+ l.emit(itemLeftDelim)
+ l.pos += afterMarker
+ l.ignore()
+ l.parenDepth = 0
+ return lexInsideAction
+}
+
+// lexComment scans a comment. The left comment marker is known to be present.
+func lexComment(l *lexer) stateFn {
+ l.pos += Pos(len(leftComment))
+ i := strings.Index(l.input[l.pos:], rightComment)
+ if i < 0 {
+ return l.errorf("unclosed comment")
+ }
+ l.pos += Pos(i + len(rightComment))
+ delim, trimSpace := l.atRightDelim()
+ if !delim {
+ return l.errorf("comment ends before closing delimiter")
+ }
+ if l.emitComment {
+ l.emit(itemComment)
+ }
+ if trimSpace {
+ l.pos += trimMarkerLen
+ }
+ l.pos += Pos(len(l.rightDelim))
+ if trimSpace {
+ l.pos += leftTrimLength(l.input[l.pos:])
+ }
+ l.ignore()
+ return lexText
+}
+
+// lexRightDelim scans the right delimiter, which is known to be present, possibly with a trim marker.
+func lexRightDelim(l *lexer) stateFn {
+ trimSpace := hasRightTrimMarker(l.input[l.pos:])
+ if trimSpace {
+ l.pos += trimMarkerLen
+ l.ignore()
+ }
+ l.pos += Pos(len(l.rightDelim))
+ l.emit(itemRightDelim)
+ if trimSpace {
+ l.pos += leftTrimLength(l.input[l.pos:])
+ l.ignore()
+ }
+ return lexText
+}
+
+// lexInsideAction scans the elements inside action delimiters.
+func lexInsideAction(l *lexer) stateFn {
+ // Either number, quoted string, or identifier.
+ // Spaces separate arguments; runs of spaces turn into itemSpace.
+ // Pipe symbols separate and are emitted.
+ delim, _ := l.atRightDelim()
+ if delim {
+ if l.parenDepth == 0 {
+ return lexRightDelim
+ }
+ return l.errorf("unclosed left paren")
+ }
+ switch r := l.next(); {
+ case r == eof:
+ return l.errorf("unclosed action")
+ case isSpace(r):
+ l.backup() // Put space back in case we have " -}}".
+ return lexSpace
+ case r == '=':
+ l.emit(itemAssign)
+ case r == ':':
+ if l.next() != '=' {
+ return l.errorf("expected :=")
+ }
+ l.emit(itemDeclare)
+ case r == '|':
+ l.emit(itemPipe)
+ case r == '"':
+ return lexQuote
+ case r == '`':
+ return lexRawQuote
+ case r == '$':
+ return lexVariable
+ case r == '\'':
+ return lexChar
+ case r == '.':
+ // special look-ahead for ".field" so we don't break l.backup().
+ if l.pos < Pos(len(l.input)) {
+ r := l.input[l.pos]
+ if r < '0' || '9' < r {
+ return lexField
+ }
+ }
+ fallthrough // '.' can start a number.
+ case r == '+' || r == '-' || ('0' <= r && r <= '9'):
+ l.backup()
+ return lexNumber
+ case isAlphaNumeric(r):
+ l.backup()
+ return lexIdentifier
+ case r == '(':
+ l.emit(itemLeftParen)
+ l.parenDepth++
+ case r == ')':
+ l.emit(itemRightParen)
+ l.parenDepth--
+ if l.parenDepth < 0 {
+ return l.errorf("unexpected right paren %#U", r)
+ }
+ case r <= unicode.MaxASCII && unicode.IsPrint(r):
+ l.emit(itemChar)
+ default:
+ return l.errorf("unrecognized character in action: %#U", r)
+ }
+ return lexInsideAction
+}
+
+// lexSpace scans a run of space characters.
+// We have not consumed the first space, which is known to be present.
+// Take care if there is a trim-marked right delimiter, which starts with a space.
+func lexSpace(l *lexer) stateFn {
+ var r rune
+ var numSpaces int
+ for {
+ r = l.peek()
+ if !isSpace(r) {
+ break
+ }
+ l.next()
+ numSpaces++
+ }
+ // Be careful about a trim-marked closing delimiter, which has a minus
+ // after a space. We know there is a space, so check for the '-' that might follow.
+ if hasRightTrimMarker(l.input[l.pos-1:]) && strings.HasPrefix(l.input[l.pos-1+trimMarkerLen:], l.rightDelim) {
+ l.backup() // Before the space.
+ if numSpaces == 1 {
+ return lexRightDelim // On the delim, so go right to that.
+ }
+ }
+ l.emit(itemSpace)
+ return lexInsideAction
+}
+
+// lexIdentifier scans an alphanumeric.
+func lexIdentifier(l *lexer) stateFn {
+Loop:
+ for {
+ switch r := l.next(); {
+ case isAlphaNumeric(r):
+ // absorb.
+ default:
+ l.backup()
+ word := l.input[l.start:l.pos]
+ if !l.atTerminator() {
+ return l.errorf("bad character %#U", r)
+ }
+ switch {
+ case key[word] > itemKeyword:
+ item := key[word]
+ if item == itemBreak && !l.breakOK || item == itemContinue && !l.continueOK {
+ l.emit(itemIdentifier)
+ } else {
+ l.emit(item)
+ }
+ case word[0] == '.':
+ l.emit(itemField)
+ case word == "true", word == "false":
+ l.emit(itemBool)
+ default:
+ l.emit(itemIdentifier)
+ }
+ break Loop
+ }
+ }
+ return lexInsideAction
+}
+
+// lexField scans a field: .Alphanumeric.
+// The . has been scanned.
+func lexField(l *lexer) stateFn {
+ return lexFieldOrVariable(l, itemField)
+}
+
+// lexVariable scans a Variable: $Alphanumeric.
+// The $ has been scanned.
+func lexVariable(l *lexer) stateFn {
+ if l.atTerminator() { // Nothing interesting follows -> "$".
+ l.emit(itemVariable)
+ return lexInsideAction
+ }
+ return lexFieldOrVariable(l, itemVariable)
+}
+
+// lexVariable scans a field or variable: [.$]Alphanumeric.
+// The . or $ has been scanned.
+func lexFieldOrVariable(l *lexer, typ itemType) stateFn {
+ if l.atTerminator() { // Nothing interesting follows -> "." or "$".
+ if typ == itemVariable {
+ l.emit(itemVariable)
+ } else {
+ l.emit(itemDot)
+ }
+ return lexInsideAction
+ }
+ var r rune
+ for {
+ r = l.next()
+ if !isAlphaNumeric(r) {
+ l.backup()
+ break
+ }
+ }
+ if !l.atTerminator() {
+ return l.errorf("bad character %#U", r)
+ }
+ l.emit(typ)
+ return lexInsideAction
+}
+
+// atTerminator reports whether the input is at valid termination character to
+// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases
+// like "$x+2" not being acceptable without a space, in case we decide one
+// day to implement arithmetic.
+func (l *lexer) atTerminator() bool {
+ r := l.peek()
+ if isSpace(r) {
+ return true
+ }
+ switch r {
+ case eof, '.', ',', '|', ':', ')', '(':
+ return true
+ }
+ // Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will
+ // succeed but should fail) but only in extremely rare cases caused by willfully
+ // bad choice of delimiter.
+ if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r {
+ return true
+ }
+ return false
+}
+
+// lexChar scans a character constant. The initial quote is already
+// scanned. Syntax checking is done by the parser.
+func lexChar(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case '\\':
+ if r := l.next(); r != eof && r != '\n' {
+ break
+ }
+ fallthrough
+ case eof, '\n':
+ return l.errorf("unterminated character constant")
+ case '\'':
+ break Loop
+ }
+ }
+ l.emit(itemCharConstant)
+ return lexInsideAction
+}
+
+// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
+// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
+// and "089" - but when it's wrong the input is invalid and the parser (via
+// strconv) will notice.
+func lexNumber(l *lexer) stateFn {
+ if !l.scanNumber() {
+ return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
+ }
+ if sign := l.peek(); sign == '+' || sign == '-' {
+ // Complex: 1+2i. No spaces, must end in 'i'.
+ if !l.scanNumber() || l.input[l.pos-1] != 'i' {
+ return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
+ }
+ l.emit(itemComplex)
+ } else {
+ l.emit(itemNumber)
+ }
+ return lexInsideAction
+}
+
+func (l *lexer) scanNumber() bool {
+ // Optional leading sign.
+ l.accept("+-")
+ // Is it hex?
+ digits := "0123456789_"
+ if l.accept("0") {
+ // Note: Leading 0 does not mean octal in floats.
+ if l.accept("xX") {
+ digits = "0123456789abcdefABCDEF_"
+ } else if l.accept("oO") {
+ digits = "01234567_"
+ } else if l.accept("bB") {
+ digits = "01_"
+ }
+ }
+ l.acceptRun(digits)
+ if l.accept(".") {
+ l.acceptRun(digits)
+ }
+ if len(digits) == 10+1 && l.accept("eE") {
+ l.accept("+-")
+ l.acceptRun("0123456789_")
+ }
+ if len(digits) == 16+6+1 && l.accept("pP") {
+ l.accept("+-")
+ l.acceptRun("0123456789_")
+ }
+ // Is it imaginary?
+ l.accept("i")
+ // Next thing mustn't be alphanumeric.
+ if isAlphaNumeric(l.peek()) {
+ l.next()
+ return false
+ }
+ return true
+}
+
+// lexQuote scans a quoted string.
+func lexQuote(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case '\\':
+ if r := l.next(); r != eof && r != '\n' {
+ break
+ }
+ fallthrough
+ case eof, '\n':
+ return l.errorf("unterminated quoted string")
+ case '"':
+ break Loop
+ }
+ }
+ l.emit(itemString)
+ return lexInsideAction
+}
+
+// lexRawQuote scans a raw quoted string.
+func lexRawQuote(l *lexer) stateFn {
+Loop:
+ for {
+ switch l.next() {
+ case eof:
+ return l.errorf("unterminated raw quoted string")
+ case '`':
+ break Loop
+ }
+ }
+ l.emit(itemRawString)
+ return lexInsideAction
+}
+
+// isSpace reports whether r is a space character.
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t' || r == '\r' || r == '\n'
+}
+
+// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore.
+func isAlphaNumeric(r rune) bool {
+ return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
+}
+
+func hasLeftTrimMarker(s string) bool {
+ return len(s) >= 2 && s[0] == trimMarker && isSpace(rune(s[1]))
+}
+
+func hasRightTrimMarker(s string) bool {
+ return len(s) >= 2 && isSpace(rune(s[0])) && s[1] == trimMarker
+}
diff --git a/tpl/internal/go_templates/texttemplate/parse/lex_test.go b/tpl/internal/go_templates/texttemplate/parse/lex_test.go
new file mode 100644
index 000000000..9189035fe
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/parse/lex_test.go
@@ -0,0 +1,562 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package parse
+
+import (
+ "fmt"
+ "testing"
+)
+
+// Make the types prettyprint.
+var itemName = map[itemType]string{
+ itemError: "error",
+ itemBool: "bool",
+ itemChar: "char",
+ itemCharConstant: "charconst",
+ itemComment: "comment",
+ itemComplex: "complex",
+ itemDeclare: ":=",
+ itemEOF: "EOF",
+ itemField: "field",
+ itemIdentifier: "identifier",
+ itemLeftDelim: "left delim",
+ itemLeftParen: "(",
+ itemNumber: "number",
+ itemPipe: "pipe",
+ itemRawString: "raw string",
+ itemRightDelim: "right delim",
+ itemRightParen: ")",
+ itemSpace: "space",
+ itemString: "string",
+ itemVariable: "variable",
+
+ // keywords
+ itemDot: ".",
+ itemBlock: "block",
+ itemBreak: "break",
+ itemContinue: "continue",
+ itemDefine: "define",
+ itemElse: "else",
+ itemIf: "if",
+ itemEnd: "end",
+ itemNil: "nil",
+ itemRange: "range",
+ itemTemplate: "template",
+ itemWith: "with",
+}
+
+func (i itemType) String() string {
+ s := itemName[i]
+ if s == "" {
+ return fmt.Sprintf("item%d", int(i))
+ }
+ return s
+}
+
+type lexTest struct {
+ name string
+ input string
+ items []item
+}
+
+func mkItem(typ itemType, text string) item {
+ return item{
+ typ: typ,
+ val: text,
+ }
+}
+
+var (
+ tDot = mkItem(itemDot, ".")
+ tBlock = mkItem(itemBlock, "block")
+ tEOF = mkItem(itemEOF, "")
+ tFor = mkItem(itemIdentifier, "for")
+ tLeft = mkItem(itemLeftDelim, "{{")
+ tLpar = mkItem(itemLeftParen, "(")
+ tPipe = mkItem(itemPipe, "|")
+ tQuote = mkItem(itemString, `"abc \n\t\" "`)
+ tRange = mkItem(itemRange, "range")
+ tRight = mkItem(itemRightDelim, "}}")
+ tRpar = mkItem(itemRightParen, ")")
+ tSpace = mkItem(itemSpace, " ")
+ raw = "`" + `abc\n\t\" ` + "`"
+ rawNL = "`now is{{\n}}the time`" // Contains newline inside raw quote.
+ tRawQuote = mkItem(itemRawString, raw)
+ tRawQuoteNL = mkItem(itemRawString, rawNL)
+)
+
+var lexTests = []lexTest{
+ {"empty", "", []item{tEOF}},
+ {"spaces", " \t\n", []item{mkItem(itemText, " \t\n"), tEOF}},
+ {"text", `now is the time`, []item{mkItem(itemText, "now is the time"), tEOF}},
+ {"text with comment", "hello-{{/* this is a comment */}}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemComment, "/* this is a comment */"),
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ {"punctuation", "{{,@% }}", []item{
+ tLeft,
+ mkItem(itemChar, ","),
+ mkItem(itemChar, "@"),
+ mkItem(itemChar, "%"),
+ tSpace,
+ tRight,
+ tEOF,
+ }},
+ {"parens", "{{((3))}}", []item{
+ tLeft,
+ tLpar,
+ tLpar,
+ mkItem(itemNumber, "3"),
+ tRpar,
+ tRpar,
+ tRight,
+ tEOF,
+ }},
+ {"empty action", `{{}}`, []item{tLeft, tRight, tEOF}},
+ {"for", `{{for}}`, []item{tLeft, tFor, tRight, tEOF}},
+ {"block", `{{block "foo" .}}`, []item{
+ tLeft, tBlock, tSpace, mkItem(itemString, `"foo"`), tSpace, tDot, tRight, tEOF,
+ }},
+ {"quote", `{{"abc \n\t\" "}}`, []item{tLeft, tQuote, tRight, tEOF}},
+ {"raw quote", "{{" + raw + "}}", []item{tLeft, tRawQuote, tRight, tEOF}},
+ {"raw quote with newline", "{{" + rawNL + "}}", []item{tLeft, tRawQuoteNL, tRight, tEOF}},
+ {"numbers", "{{1 02 0x14 0X14 -7.2i 1e3 1E3 +1.2e-4 4.2i 1+2i 1_2 0x1.e_fp4 0X1.E_FP4}}", []item{
+ tLeft,
+ mkItem(itemNumber, "1"),
+ tSpace,
+ mkItem(itemNumber, "02"),
+ tSpace,
+ mkItem(itemNumber, "0x14"),
+ tSpace,
+ mkItem(itemNumber, "0X14"),
+ tSpace,
+ mkItem(itemNumber, "-7.2i"),
+ tSpace,
+ mkItem(itemNumber, "1e3"),
+ tSpace,
+ mkItem(itemNumber, "1E3"),
+ tSpace,
+ mkItem(itemNumber, "+1.2e-4"),
+ tSpace,
+ mkItem(itemNumber, "4.2i"),
+ tSpace,
+ mkItem(itemComplex, "1+2i"),
+ tSpace,
+ mkItem(itemNumber, "1_2"),
+ tSpace,
+ mkItem(itemNumber, "0x1.e_fp4"),
+ tSpace,
+ mkItem(itemNumber, "0X1.E_FP4"),
+ tRight,
+ tEOF,
+ }},
+ {"characters", `{{'a' '\n' '\'' '\\' '\u00FF' '\xFF' '本'}}`, []item{
+ tLeft,
+ mkItem(itemCharConstant, `'a'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\n'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\''`),
+ tSpace,
+ mkItem(itemCharConstant, `'\\'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\u00FF'`),
+ tSpace,
+ mkItem(itemCharConstant, `'\xFF'`),
+ tSpace,
+ mkItem(itemCharConstant, `'本'`),
+ tRight,
+ tEOF,
+ }},
+ {"bools", "{{true false}}", []item{
+ tLeft,
+ mkItem(itemBool, "true"),
+ tSpace,
+ mkItem(itemBool, "false"),
+ tRight,
+ tEOF,
+ }},
+ {"dot", "{{.}}", []item{
+ tLeft,
+ tDot,
+ tRight,
+ tEOF,
+ }},
+ {"nil", "{{nil}}", []item{
+ tLeft,
+ mkItem(itemNil, "nil"),
+ tRight,
+ tEOF,
+ }},
+ {"dots", "{{.x . .2 .x.y.z}}", []item{
+ tLeft,
+ mkItem(itemField, ".x"),
+ tSpace,
+ tDot,
+ tSpace,
+ mkItem(itemNumber, ".2"),
+ tSpace,
+ mkItem(itemField, ".x"),
+ mkItem(itemField, ".y"),
+ mkItem(itemField, ".z"),
+ tRight,
+ tEOF,
+ }},
+ {"keywords", "{{range if else end with}}", []item{
+ tLeft,
+ mkItem(itemRange, "range"),
+ tSpace,
+ mkItem(itemIf, "if"),
+ tSpace,
+ mkItem(itemElse, "else"),
+ tSpace,
+ mkItem(itemEnd, "end"),
+ tSpace,
+ mkItem(itemWith, "with"),
+ tRight,
+ tEOF,
+ }},
+ {"variables", "{{$c := printf $ $hello $23 $ $var.Field .Method}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$c"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemIdentifier, "printf"),
+ tSpace,
+ mkItem(itemVariable, "$"),
+ tSpace,
+ mkItem(itemVariable, "$hello"),
+ tSpace,
+ mkItem(itemVariable, "$23"),
+ tSpace,
+ mkItem(itemVariable, "$"),
+ tSpace,
+ mkItem(itemVariable, "$var"),
+ mkItem(itemField, ".Field"),
+ tSpace,
+ mkItem(itemField, ".Method"),
+ tRight,
+ tEOF,
+ }},
+ {"variable invocation", "{{$x 23}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$x"),
+ tSpace,
+ mkItem(itemNumber, "23"),
+ tRight,
+ tEOF,
+ }},
+ {"pipeline", `intro {{echo hi 1.2 |noargs|args 1 "hi"}} outro`, []item{
+ mkItem(itemText, "intro "),
+ tLeft,
+ mkItem(itemIdentifier, "echo"),
+ tSpace,
+ mkItem(itemIdentifier, "hi"),
+ tSpace,
+ mkItem(itemNumber, "1.2"),
+ tSpace,
+ tPipe,
+ mkItem(itemIdentifier, "noargs"),
+ tPipe,
+ mkItem(itemIdentifier, "args"),
+ tSpace,
+ mkItem(itemNumber, "1"),
+ tSpace,
+ mkItem(itemString, `"hi"`),
+ tRight,
+ mkItem(itemText, " outro"),
+ tEOF,
+ }},
+ {"declaration", "{{$v := 3}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$v"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemNumber, "3"),
+ tRight,
+ tEOF,
+ }},
+ {"2 declarations", "{{$v , $w := 3}}", []item{
+ tLeft,
+ mkItem(itemVariable, "$v"),
+ tSpace,
+ mkItem(itemChar, ","),
+ tSpace,
+ mkItem(itemVariable, "$w"),
+ tSpace,
+ mkItem(itemDeclare, ":="),
+ tSpace,
+ mkItem(itemNumber, "3"),
+ tRight,
+ tEOF,
+ }},
+ {"field of parenthesized expression", "{{(.X).Y}}", []item{
+ tLeft,
+ tLpar,
+ mkItem(itemField, ".X"),
+ tRpar,
+ mkItem(itemField, ".Y"),
+ tRight,
+ tEOF,
+ }},
+ {"trimming spaces before and after", "hello- {{- 3 -}} -world", []item{
+ mkItem(itemText, "hello-"),
+ tLeft,
+ mkItem(itemNumber, "3"),
+ tRight,
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ {"trimming spaces before and after comment", "hello- {{- /* hello */ -}} -world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemComment, "/* hello */"),
+ mkItem(itemText, "-world"),
+ tEOF,
+ }},
+ // errors
+ {"badchar", "#{{\x01}}", []item{
+ mkItem(itemText, "#"),
+ tLeft,
+ mkItem(itemError, "unrecognized character in action: U+0001"),
+ }},
+ {"unclosed action", "{{", []item{
+ tLeft,
+ mkItem(itemError, "unclosed action"),
+ }},
+ {"EOF in action", "{{range", []item{
+ tLeft,
+ tRange,
+ mkItem(itemError, "unclosed action"),
+ }},
+ {"unclosed quote", "{{\"\n\"}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated quoted string"),
+ }},
+ {"unclosed raw quote", "{{`xx}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated raw quoted string"),
+ }},
+ {"unclosed char constant", "{{'\n}}", []item{
+ tLeft,
+ mkItem(itemError, "unterminated character constant"),
+ }},
+ {"bad number", "{{3k}}", []item{
+ tLeft,
+ mkItem(itemError, `bad number syntax: "3k"`),
+ }},
+ {"unclosed paren", "{{(3}}", []item{
+ tLeft,
+ tLpar,
+ mkItem(itemNumber, "3"),
+ mkItem(itemError, `unclosed left paren`),
+ }},
+ {"extra right paren", "{{3)}}", []item{
+ tLeft,
+ mkItem(itemNumber, "3"),
+ tRpar,
+ mkItem(itemError, `unexpected right paren U+0029 ')'`),
+ }},
+
+ // Fixed bugs
+ // Many elements in an action blew the lookahead until
+ // we made lexInsideAction not loop.
+ {"long pipeline deadlock", "{{|||||}}", []item{
+ tLeft,
+ tPipe,
+ tPipe,
+ tPipe,
+ tPipe,
+ tPipe,
+ tRight,
+ tEOF,
+ }},
+ {"text with bad comment", "hello-{{/*/}}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemError, `unclosed comment`),
+ }},
+ {"text with comment close separated from delim", "hello-{{/* */ }}-world", []item{
+ mkItem(itemText, "hello-"),
+ mkItem(itemError, `comment ends before closing delimiter`),
+ }},
+ // This one is an error that we can't catch because it breaks templates with
+ // minimized JavaScript. Should have fixed it before Go 1.1.
+ {"unmatched right delimiter", "hello-{.}}-world", []item{
+ mkItem(itemText, "hello-{.}}-world"),
+ tEOF,
+ }},
+}
+
+// collect gathers the emitted items into a slice.
+func collect(t *lexTest, left, right string) (items []item) {
+ l := lex(t.name, t.input, left, right, true)
+ for {
+ item := l.nextItem()
+ items = append(items, item)
+ if item.typ == itemEOF || item.typ == itemError {
+ break
+ }
+ }
+ return
+}
+
+func equal(i1, i2 []item, checkPos bool) bool {
+ if len(i1) != len(i2) {
+ return false
+ }
+ for k := range i1 {
+ if i1[k].typ != i2[k].typ {
+ return false
+ }
+ if i1[k].val != i2[k].val {
+ return false
+ }
+ if checkPos && i1[k].pos != i2[k].pos {
+ return false
+ }
+ if checkPos && i1[k].line != i2[k].line {
+ return false
+ }
+ }
+ return true
+}
+
+func TestLex(t *testing.T) {
+ for _, test := range lexTests {
+ items := collect(&test, "", "")
+ if !equal(items, test.items, false) {
+ t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, items, test.items)
+ }
+ }
+}
+
+// Some easy cases from above, but with delimiters $$ and @@
+var lexDelimTests = []lexTest{
+ {"punctuation", "$$,@%{{}}@@", []item{
+ tLeftDelim,
+ mkItem(itemChar, ","),
+ mkItem(itemChar, "@"),
+ mkItem(itemChar, "%"),
+ mkItem(itemChar, "{"),
+ mkItem(itemChar, "{"),
+ mkItem(itemChar, "}"),
+ mkItem(itemChar, "}"),
+ tRightDelim,
+ tEOF,
+ }},
+ {"empty action", `$$@@`, []item{tLeftDelim, tRightDelim, tEOF}},
+ {"for", `$$for@@`, []item{tLeftDelim, tFor, tRightDelim, tEOF}},
+ {"quote", `$$"abc \n\t\" "@@`, []item{tLeftDelim, tQuote, tRightDelim, tEOF}},
+ {"raw quote", "$$" + raw + "@@", []item{tLeftDelim, tRawQuote, tRightDelim, tEOF}},
+}
+
+var (
+ tLeftDelim = mkItem(itemLeftDelim, "$$")
+ tRightDelim = mkItem(itemRightDelim, "@@")
+)
+
+func TestDelims(t *testing.T) {
+ for _, test := range lexDelimTests {
+ items := collect(&test, "$$", "@@")
+ if !equal(items, test.items, false) {
+ t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
+ }
+ }
+}
+
+var lexPosTests = []lexTest{
+ {"empty", "", []item{{itemEOF, 0, "", 1}}},
+ {"punctuation", "{{,@%#}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemChar, 2, ",", 1},
+ {itemChar, 3, "@", 1},
+ {itemChar, 4, "%", 1},
+ {itemChar, 5, "#", 1},
+ {itemRightDelim, 6, "}}", 1},
+ {itemEOF, 8, "", 1},
+ }},
+ {"sample", "0123{{hello}}xyz", []item{
+ {itemText, 0, "0123", 1},
+ {itemLeftDelim, 4, "{{", 1},
+ {itemIdentifier, 6, "hello", 1},
+ {itemRightDelim, 11, "}}", 1},
+ {itemText, 13, "xyz", 1},
+ {itemEOF, 16, "", 1},
+ }},
+ {"trimafter", "{{x -}}\n{{y}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemIdentifier, 2, "x", 1},
+ {itemRightDelim, 5, "}}", 1},
+ {itemLeftDelim, 8, "{{", 2},
+ {itemIdentifier, 10, "y", 2},
+ {itemRightDelim, 11, "}}", 2},
+ {itemEOF, 13, "", 2},
+ }},
+ {"trimbefore", "{{x}}\n{{- y}}", []item{
+ {itemLeftDelim, 0, "{{", 1},
+ {itemIdentifier, 2, "x", 1},
+ {itemRightDelim, 3, "}}", 1},
+ {itemLeftDelim, 6, "{{", 2},
+ {itemIdentifier, 10, "y", 2},
+ {itemRightDelim, 11, "}}", 2},
+ {itemEOF, 13, "", 2},
+ }},
+}
+
+// The other tests don't check position, to make the test cases easier to construct.
+// This one does.
+func TestPos(t *testing.T) {
+ for _, test := range lexPosTests {
+ items := collect(&test, "", "")
+ if !equal(items, test.items, true) {
+ t.Errorf("%s: got\n\t%v\nexpected\n\t%v", test.name, items, test.items)
+ if len(items) == len(test.items) {
+ // Detailed print; avoid item.String() to expose the position value.
+ for i := range items {
+ if !equal(items[i:i+1], test.items[i:i+1], true) {
+ i1 := items[i]
+ i2 := test.items[i]
+ t.Errorf("\t#%d: got {%v %d %q %d} expected {%v %d %q %d}",
+ i, i1.typ, i1.pos, i1.val, i1.line, i2.typ, i2.pos, i2.val, i2.line)
+ }
+ }
+ }
+ }
+ }
+}
+
+// Test that an error shuts down the lexing goroutine.
+func TestShutdown(t *testing.T) {
+ // We need to duplicate template.Parse here to hold on to the lexer.
+ const text = "erroneous{{define}}{{else}}1234"
+ lexer := lex("foo", text, "{{", "}}", false)
+ _, err := New("root").parseLexer(lexer)
+ if err == nil {
+ t.Fatalf("expected error")
+ }
+ // The error should have drained the input. Therefore, the lexer should be shut down.
+ token, ok := <-lexer.items
+ if ok {
+ t.Fatalf("input was not drained; got %v", token)
+ }
+}
+
+// parseLexer is a local version of parse that lets us pass in the lexer instead of building it.
+// We expect an error, so the tree set and funcs list are explicitly nil.
+func (t *Tree) parseLexer(lex *lexer) (tree *Tree, err error) {
+ defer t.recover(&err)
+ t.ParseName = t.Name
+ t.startParse(nil, lex, map[string]*Tree{})
+ t.parse()
+ t.add()
+ t.stopParse()
+ return t, nil
+}
diff --git a/tpl/internal/go_templates/texttemplate/parse/node.go b/tpl/internal/go_templates/texttemplate/parse/node.go
new file mode 100644
index 000000000..47268225c
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/parse/node.go
@@ -0,0 +1,1008 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Parse nodes.
+
+package parse
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+var textFormat = "%s" // Changed to "%q" in tests for better error messages.
+
+// A Node is an element in the parse tree. The interface is trivial.
+// The interface contains an unexported method so that only
+// types local to this package can satisfy it.
+type Node interface {
+ Type() NodeType
+ String() string
+ // Copy does a deep copy of the Node and all its components.
+ // To avoid type assertions, some XxxNodes also have specialized
+ // CopyXxx methods that return *XxxNode.
+ Copy() Node
+ Position() Pos // byte position of start of node in full original input string
+ // tree returns the containing *Tree.
+ // It is unexported so all implementations of Node are in this package.
+ tree() *Tree
+ // writeTo writes the String output to the builder.
+ writeTo(*strings.Builder)
+}
+
+// NodeType identifies the type of a parse tree node.
+type NodeType int
+
+// Pos represents a byte position in the original input text from which
+// this template was parsed.
+type Pos int
+
+func (p Pos) Position() Pos {
+ return p
+}
+
+// Type returns itself and provides an easy default implementation
+// for embedding in a Node. Embedded in all non-trivial Nodes.
+func (t NodeType) Type() NodeType {
+ return t
+}
+
+const (
+ NodeText NodeType = iota // Plain text.
+ NodeAction // A non-control action such as a field evaluation.
+ NodeBool // A boolean constant.
+ NodeChain // A sequence of field accesses.
+ NodeCommand // An element of a pipeline.
+ NodeDot // The cursor, dot.
+ nodeElse // An else action. Not added to tree.
+ nodeEnd // An end action. Not added to tree.
+ NodeField // A field or method name.
+ NodeIdentifier // An identifier; always a function name.
+ NodeIf // An if action.
+ NodeList // A list of Nodes.
+ NodeNil // An untyped nil constant.
+ NodeNumber // A numerical constant.
+ NodePipe // A pipeline of commands.
+ NodeRange // A range action.
+ NodeString // A string constant.
+ NodeTemplate // A template invocation action.
+ NodeVariable // A $ variable.
+ NodeWith // A with action.
+ NodeComment // A comment.
+ NodeBreak // A break action.
+ NodeContinue // A continue action.
+)
+
+// Nodes.
+
+// ListNode holds a sequence of nodes.
+type ListNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Nodes []Node // The element nodes in lexical order.
+}
+
+func (t *Tree) newList(pos Pos) *ListNode {
+ return &ListNode{tr: t, NodeType: NodeList, Pos: pos}
+}
+
+func (l *ListNode) append(n Node) {
+ l.Nodes = append(l.Nodes, n)
+}
+
+func (l *ListNode) tree() *Tree {
+ return l.tr
+}
+
+func (l *ListNode) String() string {
+ var sb strings.Builder
+ l.writeTo(&sb)
+ return sb.String()
+}
+
+func (l *ListNode) writeTo(sb *strings.Builder) {
+ for _, n := range l.Nodes {
+ n.writeTo(sb)
+ }
+}
+
+func (l *ListNode) CopyList() *ListNode {
+ if l == nil {
+ return l
+ }
+ n := l.tr.newList(l.Pos)
+ for _, elem := range l.Nodes {
+ n.append(elem.Copy())
+ }
+ return n
+}
+
+func (l *ListNode) Copy() Node {
+ return l.CopyList()
+}
+
+// TextNode holds plain text.
+type TextNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Text []byte // The text; may span newlines.
+}
+
+func (t *Tree) newText(pos Pos, text string) *TextNode {
+ return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)}
+}
+
+func (t *TextNode) String() string {
+ return fmt.Sprintf(textFormat, t.Text)
+}
+
+func (t *TextNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(t.String())
+}
+
+func (t *TextNode) tree() *Tree {
+ return t.tr
+}
+
+func (t *TextNode) Copy() Node {
+ return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)}
+}
+
+// CommentNode holds a comment.
+type CommentNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Text string // Comment text.
+}
+
+func (t *Tree) newComment(pos Pos, text string) *CommentNode {
+ return &CommentNode{tr: t, NodeType: NodeComment, Pos: pos, Text: text}
+}
+
+func (c *CommentNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *CommentNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{")
+ sb.WriteString(c.Text)
+ sb.WriteString("}}")
+}
+
+func (c *CommentNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *CommentNode) Copy() Node {
+ return &CommentNode{tr: c.tr, NodeType: NodeComment, Pos: c.Pos, Text: c.Text}
+}
+
+// PipeNode holds a pipeline with optional declaration
+type PipeNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ IsAssign bool // The variables are being assigned, not declared.
+ Decl []*VariableNode // Variables in lexical order.
+ Cmds []*CommandNode // The commands in lexical order.
+}
+
+func (t *Tree) newPipeline(pos Pos, line int, vars []*VariableNode) *PipeNode {
+ return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: vars}
+}
+
+func (p *PipeNode) append(command *CommandNode) {
+ p.Cmds = append(p.Cmds, command)
+}
+
+func (p *PipeNode) String() string {
+ var sb strings.Builder
+ p.writeTo(&sb)
+ return sb.String()
+}
+
+func (p *PipeNode) writeTo(sb *strings.Builder) {
+ if len(p.Decl) > 0 {
+ for i, v := range p.Decl {
+ if i > 0 {
+ sb.WriteString(", ")
+ }
+ v.writeTo(sb)
+ }
+ sb.WriteString(" := ")
+ }
+ for i, c := range p.Cmds {
+ if i > 0 {
+ sb.WriteString(" | ")
+ }
+ c.writeTo(sb)
+ }
+}
+
+func (p *PipeNode) tree() *Tree {
+ return p.tr
+}
+
+func (p *PipeNode) CopyPipe() *PipeNode {
+ if p == nil {
+ return p
+ }
+ vars := make([]*VariableNode, len(p.Decl))
+ for i, d := range p.Decl {
+ vars[i] = d.Copy().(*VariableNode)
+ }
+ n := p.tr.newPipeline(p.Pos, p.Line, vars)
+ n.IsAssign = p.IsAssign
+ for _, c := range p.Cmds {
+ n.append(c.Copy().(*CommandNode))
+ }
+ return n
+}
+
+func (p *PipeNode) Copy() Node {
+ return p.CopyPipe()
+}
+
+// ActionNode holds an action (something bounded by delimiters).
+// Control actions have their own nodes; ActionNode represents simple
+// ones such as field evaluations and parenthesized pipelines.
+type ActionNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Pipe *PipeNode // The pipeline in the action.
+}
+
+func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode {
+ return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe}
+}
+
+func (a *ActionNode) String() string {
+ var sb strings.Builder
+ a.writeTo(&sb)
+ return sb.String()
+}
+
+func (a *ActionNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{")
+ a.Pipe.writeTo(sb)
+ sb.WriteString("}}")
+}
+
+func (a *ActionNode) tree() *Tree {
+ return a.tr
+}
+
+func (a *ActionNode) Copy() Node {
+ return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe())
+
+}
+
+// CommandNode holds a command (a pipeline inside an evaluating action).
+type CommandNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Args []Node // Arguments in lexical order: Identifier, field, or constant.
+}
+
+func (t *Tree) newCommand(pos Pos) *CommandNode {
+ return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos}
+}
+
+func (c *CommandNode) append(arg Node) {
+ c.Args = append(c.Args, arg)
+}
+
+func (c *CommandNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *CommandNode) writeTo(sb *strings.Builder) {
+ for i, arg := range c.Args {
+ if i > 0 {
+ sb.WriteByte(' ')
+ }
+ if arg, ok := arg.(*PipeNode); ok {
+ sb.WriteByte('(')
+ arg.writeTo(sb)
+ sb.WriteByte(')')
+ continue
+ }
+ arg.writeTo(sb)
+ }
+}
+
+func (c *CommandNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *CommandNode) Copy() Node {
+ if c == nil {
+ return c
+ }
+ n := c.tr.newCommand(c.Pos)
+ for _, c := range c.Args {
+ n.append(c.Copy())
+ }
+ return n
+}
+
+// IdentifierNode holds an identifier.
+type IdentifierNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident string // The identifier's name.
+}
+
+// NewIdentifier returns a new IdentifierNode with the given identifier name.
+func NewIdentifier(ident string) *IdentifierNode {
+ return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident}
+}
+
+// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature.
+// Chained for convenience.
+// TODO: fix one day?
+func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode {
+ i.Pos = pos
+ return i
+}
+
+// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature.
+// Chained for convenience.
+// TODO: fix one day?
+func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode {
+ i.tr = t
+ return i
+}
+
+func (i *IdentifierNode) String() string {
+ return i.Ident
+}
+
+func (i *IdentifierNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(i.String())
+}
+
+func (i *IdentifierNode) tree() *Tree {
+ return i.tr
+}
+
+func (i *IdentifierNode) Copy() Node {
+ return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos)
+}
+
+// VariableNode holds a list of variable names, possibly with chained field
+// accesses. The dollar sign is part of the (first) name.
+type VariableNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident []string // Variable name and fields in lexical order.
+}
+
+func (t *Tree) newVariable(pos Pos, ident string) *VariableNode {
+ return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")}
+}
+
+func (v *VariableNode) String() string {
+ var sb strings.Builder
+ v.writeTo(&sb)
+ return sb.String()
+}
+
+func (v *VariableNode) writeTo(sb *strings.Builder) {
+ for i, id := range v.Ident {
+ if i > 0 {
+ sb.WriteByte('.')
+ }
+ sb.WriteString(id)
+ }
+}
+
+func (v *VariableNode) tree() *Tree {
+ return v.tr
+}
+
+func (v *VariableNode) Copy() Node {
+ return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)}
+}
+
+// DotNode holds the special identifier '.'.
+type DotNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newDot(pos Pos) *DotNode {
+ return &DotNode{tr: t, NodeType: NodeDot, Pos: pos}
+}
+
+func (d *DotNode) Type() NodeType {
+ // Override method on embedded NodeType for API compatibility.
+ // TODO: Not really a problem; could change API without effect but
+ // api tool complains.
+ return NodeDot
+}
+
+func (d *DotNode) String() string {
+ return "."
+}
+
+func (d *DotNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(d.String())
+}
+
+func (d *DotNode) tree() *Tree {
+ return d.tr
+}
+
+func (d *DotNode) Copy() Node {
+ return d.tr.newDot(d.Pos)
+}
+
+// NilNode holds the special identifier 'nil' representing an untyped nil constant.
+type NilNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newNil(pos Pos) *NilNode {
+ return &NilNode{tr: t, NodeType: NodeNil, Pos: pos}
+}
+
+func (n *NilNode) Type() NodeType {
+ // Override method on embedded NodeType for API compatibility.
+ // TODO: Not really a problem; could change API without effect but
+ // api tool complains.
+ return NodeNil
+}
+
+func (n *NilNode) String() string {
+ return "nil"
+}
+
+func (n *NilNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(n.String())
+}
+
+func (n *NilNode) tree() *Tree {
+ return n.tr
+}
+
+func (n *NilNode) Copy() Node {
+ return n.tr.newNil(n.Pos)
+}
+
+// FieldNode holds a field (identifier starting with '.').
+// The names may be chained ('.x.y').
+// The period is dropped from each ident.
+type FieldNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Ident []string // The identifiers in lexical order.
+}
+
+func (t *Tree) newField(pos Pos, ident string) *FieldNode {
+ return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period
+}
+
+func (f *FieldNode) String() string {
+ var sb strings.Builder
+ f.writeTo(&sb)
+ return sb.String()
+}
+
+func (f *FieldNode) writeTo(sb *strings.Builder) {
+ for _, id := range f.Ident {
+ sb.WriteByte('.')
+ sb.WriteString(id)
+ }
+}
+
+func (f *FieldNode) tree() *Tree {
+ return f.tr
+}
+
+func (f *FieldNode) Copy() Node {
+ return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)}
+}
+
+// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.').
+// The names may be chained ('.x.y').
+// The periods are dropped from each ident.
+type ChainNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Node Node
+ Field []string // The identifiers in lexical order.
+}
+
+func (t *Tree) newChain(pos Pos, node Node) *ChainNode {
+ return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node}
+}
+
+// Add adds the named field (which should start with a period) to the end of the chain.
+func (c *ChainNode) Add(field string) {
+ if len(field) == 0 || field[0] != '.' {
+ panic("no dot in field")
+ }
+ field = field[1:] // Remove leading dot.
+ if field == "" {
+ panic("empty field")
+ }
+ c.Field = append(c.Field, field)
+}
+
+func (c *ChainNode) String() string {
+ var sb strings.Builder
+ c.writeTo(&sb)
+ return sb.String()
+}
+
+func (c *ChainNode) writeTo(sb *strings.Builder) {
+ if _, ok := c.Node.(*PipeNode); ok {
+ sb.WriteByte('(')
+ c.Node.writeTo(sb)
+ sb.WriteByte(')')
+ } else {
+ c.Node.writeTo(sb)
+ }
+ for _, field := range c.Field {
+ sb.WriteByte('.')
+ sb.WriteString(field)
+ }
+}
+
+func (c *ChainNode) tree() *Tree {
+ return c.tr
+}
+
+func (c *ChainNode) Copy() Node {
+ return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)}
+}
+
+// BoolNode holds a boolean constant.
+type BoolNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ True bool // The value of the boolean constant.
+}
+
+func (t *Tree) newBool(pos Pos, true bool) *BoolNode {
+ return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true}
+}
+
+func (b *BoolNode) String() string {
+ if b.True {
+ return "true"
+ }
+ return "false"
+}
+
+func (b *BoolNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(b.String())
+}
+
+func (b *BoolNode) tree() *Tree {
+ return b.tr
+}
+
+func (b *BoolNode) Copy() Node {
+ return b.tr.newBool(b.Pos, b.True)
+}
+
+// NumberNode holds a number: signed or unsigned integer, float, or complex.
+// The value is parsed and stored under all the types that can represent the value.
+// This simulates in a small amount of code the behavior of Go's ideal constants.
+type NumberNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ IsInt bool // Number has an integral value.
+ IsUint bool // Number has an unsigned integral value.
+ IsFloat bool // Number has a floating-point value.
+ IsComplex bool // Number is complex.
+ Int64 int64 // The signed integer value.
+ Uint64 uint64 // The unsigned integer value.
+ Float64 float64 // The floating-point value.
+ Complex128 complex128 // The complex value.
+ Text string // The original textual representation from the input.
+}
+
+func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) {
+ n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text}
+ switch typ {
+ case itemCharConstant:
+ rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0])
+ if err != nil {
+ return nil, err
+ }
+ if tail != "'" {
+ return nil, fmt.Errorf("malformed character constant: %s", text)
+ }
+ n.Int64 = int64(rune)
+ n.IsInt = true
+ n.Uint64 = uint64(rune)
+ n.IsUint = true
+ n.Float64 = float64(rune) // odd but those are the rules.
+ n.IsFloat = true
+ return n, nil
+ case itemComplex:
+ // fmt.Sscan can parse the pair, so let it do the work.
+ if _, err := fmt.Sscan(text, &n.Complex128); err != nil {
+ return nil, err
+ }
+ n.IsComplex = true
+ n.simplifyComplex()
+ return n, nil
+ }
+ // Imaginary constants can only be complex unless they are zero.
+ if len(text) > 0 && text[len(text)-1] == 'i' {
+ f, err := strconv.ParseFloat(text[:len(text)-1], 64)
+ if err == nil {
+ n.IsComplex = true
+ n.Complex128 = complex(0, f)
+ n.simplifyComplex()
+ return n, nil
+ }
+ }
+ // Do integer test first so we get 0x123 etc.
+ u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below.
+ if err == nil {
+ n.IsUint = true
+ n.Uint64 = u
+ }
+ i, err := strconv.ParseInt(text, 0, 64)
+ if err == nil {
+ n.IsInt = true
+ n.Int64 = i
+ if i == 0 {
+ n.IsUint = true // in case of -0.
+ n.Uint64 = u
+ }
+ }
+ // If an integer extraction succeeded, promote the float.
+ if n.IsInt {
+ n.IsFloat = true
+ n.Float64 = float64(n.Int64)
+ } else if n.IsUint {
+ n.IsFloat = true
+ n.Float64 = float64(n.Uint64)
+ } else {
+ f, err := strconv.ParseFloat(text, 64)
+ if err == nil {
+ // If we parsed it as a float but it looks like an integer,
+ // it's a huge number too large to fit in an int. Reject it.
+ if !strings.ContainsAny(text, ".eEpP") {
+ return nil, fmt.Errorf("integer overflow: %q", text)
+ }
+ n.IsFloat = true
+ n.Float64 = f
+ // If a floating-point extraction succeeded, extract the int if needed.
+ if !n.IsInt && float64(int64(f)) == f {
+ n.IsInt = true
+ n.Int64 = int64(f)
+ }
+ if !n.IsUint && float64(uint64(f)) == f {
+ n.IsUint = true
+ n.Uint64 = uint64(f)
+ }
+ }
+ }
+ if !n.IsInt && !n.IsUint && !n.IsFloat {
+ return nil, fmt.Errorf("illegal number syntax: %q", text)
+ }
+ return n, nil
+}
+
+// simplifyComplex pulls out any other types that are represented by the complex number.
+// These all require that the imaginary part be zero.
+func (n *NumberNode) simplifyComplex() {
+ n.IsFloat = imag(n.Complex128) == 0
+ if n.IsFloat {
+ n.Float64 = real(n.Complex128)
+ n.IsInt = float64(int64(n.Float64)) == n.Float64
+ if n.IsInt {
+ n.Int64 = int64(n.Float64)
+ }
+ n.IsUint = float64(uint64(n.Float64)) == n.Float64
+ if n.IsUint {
+ n.Uint64 = uint64(n.Float64)
+ }
+ }
+}
+
+func (n *NumberNode) String() string {
+ return n.Text
+}
+
+func (n *NumberNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(n.String())
+}
+
+func (n *NumberNode) tree() *Tree {
+ return n.tr
+}
+
+func (n *NumberNode) Copy() Node {
+ nn := new(NumberNode)
+ *nn = *n // Easy, fast, correct.
+ return nn
+}
+
+// StringNode holds a string constant. The value has been "unquoted".
+type StringNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Quoted string // The original text of the string, with quotes.
+ Text string // The string, after quote processing.
+}
+
+func (t *Tree) newString(pos Pos, orig, text string) *StringNode {
+ return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
+}
+
+func (s *StringNode) String() string {
+ return s.Quoted
+}
+
+func (s *StringNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(s.String())
+}
+
+func (s *StringNode) tree() *Tree {
+ return s.tr
+}
+
+func (s *StringNode) Copy() Node {
+ return s.tr.newString(s.Pos, s.Quoted, s.Text)
+}
+
+// endNode represents an {{end}} action.
+// It does not appear in the final parse tree.
+type endNode struct {
+ NodeType
+ Pos
+ tr *Tree
+}
+
+func (t *Tree) newEnd(pos Pos) *endNode {
+ return &endNode{tr: t, NodeType: nodeEnd, Pos: pos}
+}
+
+func (e *endNode) String() string {
+ return "{{end}}"
+}
+
+func (e *endNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(e.String())
+}
+
+func (e *endNode) tree() *Tree {
+ return e.tr
+}
+
+func (e *endNode) Copy() Node {
+ return e.tr.newEnd(e.Pos)
+}
+
+// elseNode represents an {{else}} action. Does not appear in the final tree.
+type elseNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+}
+
+func (t *Tree) newElse(pos Pos, line int) *elseNode {
+ return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line}
+}
+
+func (e *elseNode) Type() NodeType {
+ return nodeElse
+}
+
+func (e *elseNode) String() string {
+ return "{{else}}"
+}
+
+func (e *elseNode) writeTo(sb *strings.Builder) {
+ sb.WriteString(e.String())
+}
+
+func (e *elseNode) tree() *Tree {
+ return e.tr
+}
+
+func (e *elseNode) Copy() Node {
+ return e.tr.newElse(e.Pos, e.Line)
+}
+
+// BranchNode is the common representation of if, range, and with.
+type BranchNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Pipe *PipeNode // The pipeline to be evaluated.
+ List *ListNode // What to execute if the value is non-empty.
+ ElseList *ListNode // What to execute if the value is empty (nil if absent).
+}
+
+func (b *BranchNode) String() string {
+ var sb strings.Builder
+ b.writeTo(&sb)
+ return sb.String()
+}
+
+func (b *BranchNode) writeTo(sb *strings.Builder) {
+ name := ""
+ switch b.NodeType {
+ case NodeIf:
+ name = "if"
+ case NodeRange:
+ name = "range"
+ case NodeWith:
+ name = "with"
+ default:
+ panic("unknown branch type")
+ }
+ sb.WriteString("{{")
+ sb.WriteString(name)
+ sb.WriteByte(' ')
+ b.Pipe.writeTo(sb)
+ sb.WriteString("}}")
+ b.List.writeTo(sb)
+ if b.ElseList != nil {
+ sb.WriteString("{{else}}")
+ b.ElseList.writeTo(sb)
+ }
+ sb.WriteString("{{end}}")
+}
+
+func (b *BranchNode) tree() *Tree {
+ return b.tr
+}
+
+func (b *BranchNode) Copy() Node {
+ switch b.NodeType {
+ case NodeIf:
+ return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ case NodeRange:
+ return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ case NodeWith:
+ return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList)
+ default:
+ panic("unknown branch type")
+ }
+}
+
+// IfNode represents an {{if}} action and its commands.
+type IfNode struct {
+ BranchNode
+}
+
+func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode {
+ return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (i *IfNode) Copy() Node {
+ return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList())
+}
+
+// BreakNode represents a {{break}} action.
+type BreakNode struct {
+ tr *Tree
+ NodeType
+ Pos
+ Line int
+}
+
+func (t *Tree) newBreak(pos Pos, line int) *BreakNode {
+ return &BreakNode{tr: t, NodeType: NodeBreak, Pos: pos, Line: line}
+}
+
+func (b *BreakNode) Copy() Node { return b.tr.newBreak(b.Pos, b.Line) }
+func (b *BreakNode) String() string { return "{{break}}" }
+func (b *BreakNode) tree() *Tree { return b.tr }
+func (b *BreakNode) writeTo(sb *strings.Builder) { sb.WriteString("{{break}}") }
+
+// ContinueNode represents a {{continue}} action.
+type ContinueNode struct {
+ tr *Tree
+ NodeType
+ Pos
+ Line int
+}
+
+func (t *Tree) newContinue(pos Pos, line int) *ContinueNode {
+ return &ContinueNode{tr: t, NodeType: NodeContinue, Pos: pos, Line: line}
+}
+
+func (c *ContinueNode) Copy() Node { return c.tr.newContinue(c.Pos, c.Line) }
+func (c *ContinueNode) String() string { return "{{continue}}" }
+func (c *ContinueNode) tree() *Tree { return c.tr }
+func (c *ContinueNode) writeTo(sb *strings.Builder) { sb.WriteString("{{continue}}") }
+
+// RangeNode represents a {{range}} action and its commands.
+type RangeNode struct {
+ BranchNode
+}
+
+func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode {
+ return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (r *RangeNode) Copy() Node {
+ return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList())
+}
+
+// WithNode represents a {{with}} action and its commands.
+type WithNode struct {
+ BranchNode
+}
+
+func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode {
+ return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}}
+}
+
+func (w *WithNode) Copy() Node {
+ return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList())
+}
+
+// TemplateNode represents a {{template}} action.
+type TemplateNode struct {
+ NodeType
+ Pos
+ tr *Tree
+ Line int // The line number in the input. Deprecated: Kept for compatibility.
+ Name string // The name of the template (unquoted).
+ Pipe *PipeNode // The command to evaluate as dot for the template.
+}
+
+func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode {
+ return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe}
+}
+
+func (t *TemplateNode) String() string {
+ var sb strings.Builder
+ t.writeTo(&sb)
+ return sb.String()
+}
+
+func (t *TemplateNode) writeTo(sb *strings.Builder) {
+ sb.WriteString("{{template ")
+ sb.WriteString(strconv.Quote(t.Name))
+ if t.Pipe != nil {
+ sb.WriteByte(' ')
+ t.Pipe.writeTo(sb)
+ }
+ sb.WriteString("}}")
+}
+
+func (t *TemplateNode) tree() *Tree {
+ return t.tr
+}
+
+func (t *TemplateNode) Copy() Node {
+ return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe())
+}
diff --git a/tpl/internal/go_templates/texttemplate/parse/parse.go b/tpl/internal/go_templates/texttemplate/parse/parse.go
new file mode 100644
index 000000000..ce548b088
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/parse/parse.go
@@ -0,0 +1,795 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package parse builds parse trees for templates as defined by text/template
+// and html/template. Clients should use those packages to construct templates
+// rather than this one, which provides shared internal data structures not
+// intended for general use.
+package parse
+
+import (
+ "bytes"
+ "fmt"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+// Tree is the representation of a single parsed template.
+type Tree struct {
+ Name string // name of the template represented by the tree.
+ ParseName string // name of the top-level template during parsing, for error messages.
+ Root *ListNode // top-level root of the tree.
+ Mode Mode // parsing mode.
+ text string // text parsed to create the template (or its parent)
+ // Parsing only; cleared after parse.
+ funcs []map[string]any
+ lex *lexer
+ token [3]item // three-token lookahead for parser.
+ peekCount int
+ vars []string // variables defined at the moment.
+ treeSet map[string]*Tree
+ actionLine int // line of left delim starting action
+ rangeDepth int
+}
+
+// A mode value is a set of flags (or 0). Modes control parser behavior.
+type Mode uint
+
+const (
+ ParseComments Mode = 1 << iota // parse comments and add them to AST
+ SkipFuncCheck // do not check that functions are defined
+)
+
+// Copy returns a copy of the Tree. Any parsing state is discarded.
+func (t *Tree) Copy() *Tree {
+ if t == nil {
+ return nil
+ }
+ return &Tree{
+ Name: t.Name,
+ ParseName: t.ParseName,
+ Root: t.Root.CopyList(),
+ text: t.text,
+ }
+}
+
+// Parse returns a map from template name to parse.Tree, created by parsing the
+// templates described in the argument string. The top-level template will be
+// given the specified name. If an error is encountered, parsing stops and an
+// empty map is returned with the error.
+func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]any) (map[string]*Tree, error) {
+ treeSet := make(map[string]*Tree)
+ t := New(name)
+ t.text = text
+ _, err := t.Parse(text, leftDelim, rightDelim, treeSet, funcs...)
+ return treeSet, err
+}
+
+// next returns the next token.
+func (t *Tree) next() item {
+ if t.peekCount > 0 {
+ t.peekCount--
+ } else {
+ t.token[0] = t.lex.nextItem()
+ }
+ return t.token[t.peekCount]
+}
+
+// backup backs the input stream up one token.
+func (t *Tree) backup() {
+ t.peekCount++
+}
+
+// backup2 backs the input stream up two tokens.
+// The zeroth token is already there.
+func (t *Tree) backup2(t1 item) {
+ t.token[1] = t1
+ t.peekCount = 2
+}
+
+// backup3 backs the input stream up three tokens
+// The zeroth token is already there.
+func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back.
+ t.token[1] = t1
+ t.token[2] = t2
+ t.peekCount = 3
+}
+
+// peek returns but does not consume the next token.
+func (t *Tree) peek() item {
+ if t.peekCount > 0 {
+ return t.token[t.peekCount-1]
+ }
+ t.peekCount = 1
+ t.token[0] = t.lex.nextItem()
+ return t.token[0]
+}
+
+// nextNonSpace returns the next non-space token.
+func (t *Tree) nextNonSpace() (token item) {
+ for {
+ token = t.next()
+ if token.typ != itemSpace {
+ break
+ }
+ }
+ return token
+}
+
+// peekNonSpace returns but does not consume the next non-space token.
+func (t *Tree) peekNonSpace() item {
+ token := t.nextNonSpace()
+ t.backup()
+ return token
+}
+
+// Parsing.
+
+// New allocates a new parse tree with the given name.
+func New(name string, funcs ...map[string]any) *Tree {
+ return &Tree{
+ Name: name,
+ funcs: funcs,
+ }
+}
+
+// ErrorContext returns a textual representation of the location of the node in the input text.
+// The receiver is only used when the node does not have a pointer to the tree inside,
+// which can occur in old code.
+func (t *Tree) ErrorContext(n Node) (location, context string) {
+ pos := int(n.Position())
+ tree := n.tree()
+ if tree == nil {
+ tree = t
+ }
+ text := tree.text[:pos]
+ byteNum := strings.LastIndex(text, "\n")
+ if byteNum == -1 {
+ byteNum = pos // On first line.
+ } else {
+ byteNum++ // After the newline.
+ byteNum = pos - byteNum
+ }
+ lineNum := 1 + strings.Count(text, "\n")
+ context = n.String()
+ return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context
+}
+
+// errorf formats the error and terminates processing.
+func (t *Tree) errorf(format string, args ...any) {
+ t.Root = nil
+ format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.token[0].line, format)
+ panic(fmt.Errorf(format, args...))
+}
+
+// error terminates processing.
+func (t *Tree) error(err error) {
+ t.errorf("%s", err)
+}
+
+// expect consumes the next token and guarantees it has the required type.
+func (t *Tree) expect(expected itemType, context string) item {
+ token := t.nextNonSpace()
+ if token.typ != expected {
+ t.unexpected(token, context)
+ }
+ return token
+}
+
+// expectOneOf consumes the next token and guarantees it has one of the required types.
+func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
+ token := t.nextNonSpace()
+ if token.typ != expected1 && token.typ != expected2 {
+ t.unexpected(token, context)
+ }
+ return token
+}
+
+// unexpected complains about the token and terminates processing.
+func (t *Tree) unexpected(token item, context string) {
+ if token.typ == itemError {
+ extra := ""
+ if t.actionLine != 0 && t.actionLine != token.line {
+ extra = fmt.Sprintf(" in action started at %s:%d", t.ParseName, t.actionLine)
+ if strings.HasSuffix(token.val, " action") {
+ extra = extra[len(" in action"):] // avoid "action in action"
+ }
+ }
+ t.errorf("%s%s", token, extra)
+ }
+ t.errorf("unexpected %s in %s", token, context)
+}
+
+// recover is the handler that turns panics into returns from the top level of Parse.
+func (t *Tree) recover(errp *error) {
+ e := recover()
+ if e != nil {
+ if _, ok := e.(runtime.Error); ok {
+ panic(e)
+ }
+ if t != nil {
+ t.lex.drain()
+ t.stopParse()
+ }
+ *errp = e.(error)
+ }
+}
+
+// startParse initializes the parser, using the lexer.
+func (t *Tree) startParse(funcs []map[string]any, lex *lexer, treeSet map[string]*Tree) {
+ t.Root = nil
+ t.lex = lex
+ t.vars = []string{"$"}
+ t.funcs = funcs
+ t.treeSet = treeSet
+ lex.breakOK = !t.hasFunction("break")
+ lex.continueOK = !t.hasFunction("continue")
+}
+
+// stopParse terminates parsing.
+func (t *Tree) stopParse() {
+ t.lex = nil
+ t.vars = nil
+ t.funcs = nil
+ t.treeSet = nil
+}
+
+// Parse parses the template definition string to construct a representation of
+// the template for execution. If either action delimiter string is empty, the
+// default ("{{" or "}}") is used. Embedded template definitions are added to
+// the treeSet map.
+func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]any) (tree *Tree, err error) {
+ defer t.recover(&err)
+ t.ParseName = t.Name
+ emitComment := t.Mode&ParseComments != 0
+ t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim, emitComment), treeSet)
+ t.text = text
+ t.parse()
+ t.add()
+ t.stopParse()
+ return t, nil
+}
+
+// add adds tree to t.treeSet.
+func (t *Tree) add() {
+ tree := t.treeSet[t.Name]
+ if tree == nil || IsEmptyTree(tree.Root) {
+ t.treeSet[t.Name] = t
+ return
+ }
+ if !IsEmptyTree(t.Root) {
+ t.errorf("template: multiple definition of template %q", t.Name)
+ }
+}
+
+// IsEmptyTree reports whether this tree (node) is empty of everything but space or comments.
+func IsEmptyTree(n Node) bool {
+ switch n := n.(type) {
+ case nil:
+ return true
+ case *ActionNode:
+ case *CommentNode:
+ return true
+ case *IfNode:
+ case *ListNode:
+ for _, node := range n.Nodes {
+ if !IsEmptyTree(node) {
+ return false
+ }
+ }
+ return true
+ case *RangeNode:
+ case *TemplateNode:
+ case *TextNode:
+ return len(bytes.TrimSpace(n.Text)) == 0
+ case *WithNode:
+ default:
+ panic("unknown node: " + n.String())
+ }
+ return false
+}
+
+// parse is the top-level parser for a template, essentially the same
+// as itemList except it also parses {{define}} actions.
+// It runs to EOF.
+func (t *Tree) parse() {
+ t.Root = t.newList(t.peek().pos)
+ for t.peek().typ != itemEOF {
+ if t.peek().typ == itemLeftDelim {
+ delim := t.next()
+ if t.nextNonSpace().typ == itemDefine {
+ newT := New("definition") // name will be updated once we know it.
+ newT.text = t.text
+ newT.Mode = t.Mode
+ newT.ParseName = t.ParseName
+ newT.startParse(t.funcs, t.lex, t.treeSet)
+ newT.parseDefinition()
+ continue
+ }
+ t.backup2(delim)
+ }
+ switch n := t.textOrAction(); n.Type() {
+ case nodeEnd, nodeElse:
+ t.errorf("unexpected %s", n)
+ default:
+ t.Root.append(n)
+ }
+ }
+}
+
+// parseDefinition parses a {{define}} ... {{end}} template definition and
+// installs the definition in t.treeSet. The "define" keyword has already
+// been scanned.
+func (t *Tree) parseDefinition() {
+ const context = "define clause"
+ name := t.expectOneOf(itemString, itemRawString, context)
+ var err error
+ t.Name, err = strconv.Unquote(name.val)
+ if err != nil {
+ t.error(err)
+ }
+ t.expect(itemRightDelim, context)
+ var end Node
+ t.Root, end = t.itemList()
+ if end.Type() != nodeEnd {
+ t.errorf("unexpected %s in %s", end, context)
+ }
+ t.add()
+ t.stopParse()
+}
+
+// itemList:
+// textOrAction*
+// Terminates at {{end}} or {{else}}, returned separately.
+func (t *Tree) itemList() (list *ListNode, next Node) {
+ list = t.newList(t.peekNonSpace().pos)
+ for t.peekNonSpace().typ != itemEOF {
+ n := t.textOrAction()
+ switch n.Type() {
+ case nodeEnd, nodeElse:
+ return list, n
+ }
+ list.append(n)
+ }
+ t.errorf("unexpected EOF")
+ return
+}
+
+// textOrAction:
+// text | comment | action
+func (t *Tree) textOrAction() Node {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemText:
+ return t.newText(token.pos, token.val)
+ case itemLeftDelim:
+ t.actionLine = token.line
+ defer t.clearActionLine()
+ return t.action()
+ case itemComment:
+ return t.newComment(token.pos, token.val)
+ default:
+ t.unexpected(token, "input")
+ }
+ return nil
+}
+
+func (t *Tree) clearActionLine() {
+ t.actionLine = 0
+}
+
+// Action:
+// control
+// command ("|" command)*
+// Left delim is past. Now get actions.
+// First word could be a keyword such as range.
+func (t *Tree) action() (n Node) {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemBlock:
+ return t.blockControl()
+ case itemBreak:
+ return t.breakControl(token.pos, token.line)
+ case itemContinue:
+ return t.continueControl(token.pos, token.line)
+ case itemElse:
+ return t.elseControl()
+ case itemEnd:
+ return t.endControl()
+ case itemIf:
+ return t.ifControl()
+ case itemRange:
+ return t.rangeControl()
+ case itemTemplate:
+ return t.templateControl()
+ case itemWith:
+ return t.withControl()
+ }
+ t.backup()
+ token := t.peek()
+ // Do not pop variables; they persist until "end".
+ return t.newAction(token.pos, token.line, t.pipeline("command", itemRightDelim))
+}
+
+// Break:
+// {{break}}
+// Break keyword is past.
+func (t *Tree) breakControl(pos Pos, line int) Node {
+ if token := t.nextNonSpace(); token.typ != itemRightDelim {
+ t.unexpected(token, "{{break}}")
+ }
+ if t.rangeDepth == 0 {
+ t.errorf("{{break}} outside {{range}}")
+ }
+ return t.newBreak(pos, line)
+}
+
+// Continue:
+// {{continue}}
+// Continue keyword is past.
+func (t *Tree) continueControl(pos Pos, line int) Node {
+ if token := t.nextNonSpace(); token.typ != itemRightDelim {
+ t.unexpected(token, "{{continue}}")
+ }
+ if t.rangeDepth == 0 {
+ t.errorf("{{continue}} outside {{range}}")
+ }
+ return t.newContinue(pos, line)
+}
+
+// Pipeline:
+// declarations? command ('|' command)*
+func (t *Tree) pipeline(context string, end itemType) (pipe *PipeNode) {
+ token := t.peekNonSpace()
+ pipe = t.newPipeline(token.pos, token.line, nil)
+ // Are there declarations or assignments?
+decls:
+ if v := t.peekNonSpace(); v.typ == itemVariable {
+ t.next()
+ // Since space is a token, we need 3-token look-ahead here in the worst case:
+ // in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an
+ // argument variable rather than a declaration. So remember the token
+ // adjacent to the variable so we can push it back if necessary.
+ tokenAfterVariable := t.peek()
+ next := t.peekNonSpace()
+ switch {
+ case next.typ == itemAssign, next.typ == itemDeclare:
+ pipe.IsAssign = next.typ == itemAssign
+ t.nextNonSpace()
+ pipe.Decl = append(pipe.Decl, t.newVariable(v.pos, v.val))
+ t.vars = append(t.vars, v.val)
+ case next.typ == itemChar && next.val == ",":
+ t.nextNonSpace()
+ pipe.Decl = append(pipe.Decl, t.newVariable(v.pos, v.val))
+ t.vars = append(t.vars, v.val)
+ if context == "range" && len(pipe.Decl) < 2 {
+ switch t.peekNonSpace().typ {
+ case itemVariable, itemRightDelim, itemRightParen:
+ // second initialized variable in a range pipeline
+ goto decls
+ default:
+ t.errorf("range can only initialize variables")
+ }
+ }
+ t.errorf("too many declarations in %s", context)
+ case tokenAfterVariable.typ == itemSpace:
+ t.backup3(v, tokenAfterVariable)
+ default:
+ t.backup2(v)
+ }
+ }
+ for {
+ switch token := t.nextNonSpace(); token.typ {
+ case end:
+ // At this point, the pipeline is complete
+ t.checkPipeline(pipe, context)
+ return
+ case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier,
+ itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen:
+ t.backup()
+ pipe.append(t.command())
+ default:
+ t.unexpected(token, context)
+ }
+ }
+}
+
+func (t *Tree) checkPipeline(pipe *PipeNode, context string) {
+ // Reject empty pipelines
+ if len(pipe.Cmds) == 0 {
+ t.errorf("missing value for %s", context)
+ }
+ // Only the first command of a pipeline can start with a non executable operand
+ for i, c := range pipe.Cmds[1:] {
+ switch c.Args[0].Type() {
+ case NodeBool, NodeDot, NodeNil, NodeNumber, NodeString:
+ // With A|B|C, pipeline stage 2 is B
+ t.errorf("non executable command in pipeline stage %d", i+2)
+ }
+ }
+}
+
+func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) {
+ defer t.popVars(len(t.vars))
+ pipe = t.pipeline(context, itemRightDelim)
+ if context == "range" {
+ t.rangeDepth++
+ }
+ var next Node
+ list, next = t.itemList()
+ if context == "range" {
+ t.rangeDepth--
+ }
+ switch next.Type() {
+ case nodeEnd: //done
+ case nodeElse:
+ if allowElseIf {
+ // Special case for "else if". If the "else" is followed immediately by an "if",
+ // the elseControl will have left the "if" token pending. Treat
+ // {{if a}}_{{else if b}}_{{end}}
+ // as
+ // {{if a}}_{{else}}{{if b}}_{{end}}{{end}}.
+ // To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}}
+ // is assumed. This technique works even for long if-else-if chains.
+ // TODO: Should we allow else-if in with and range?
+ if t.peek().typ == itemIf {
+ t.next() // Consume the "if" token.
+ elseList = t.newList(next.Position())
+ elseList.append(t.ifControl())
+ // Do not consume the next item - only one {{end}} required.
+ break
+ }
+ }
+ elseList, next = t.itemList()
+ if next.Type() != nodeEnd {
+ t.errorf("expected end; found %s", next)
+ }
+ }
+ return pipe.Position(), pipe.Line, pipe, list, elseList
+}
+
+// If:
+// {{if pipeline}} itemList {{end}}
+// {{if pipeline}} itemList {{else}} itemList {{end}}
+// If keyword is past.
+func (t *Tree) ifControl() Node {
+ return t.newIf(t.parseControl(true, "if"))
+}
+
+// Range:
+// {{range pipeline}} itemList {{end}}
+// {{range pipeline}} itemList {{else}} itemList {{end}}
+// Range keyword is past.
+func (t *Tree) rangeControl() Node {
+ r := t.newRange(t.parseControl(false, "range"))
+ return r
+}
+
+// With:
+// {{with pipeline}} itemList {{end}}
+// {{with pipeline}} itemList {{else}} itemList {{end}}
+// If keyword is past.
+func (t *Tree) withControl() Node {
+ return t.newWith(t.parseControl(false, "with"))
+}
+
+// End:
+// {{end}}
+// End keyword is past.
+func (t *Tree) endControl() Node {
+ return t.newEnd(t.expect(itemRightDelim, "end").pos)
+}
+
+// Else:
+// {{else}}
+// Else keyword is past.
+func (t *Tree) elseControl() Node {
+ // Special case for "else if".
+ peek := t.peekNonSpace()
+ if peek.typ == itemIf {
+ // We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ".
+ return t.newElse(peek.pos, peek.line)
+ }
+ token := t.expect(itemRightDelim, "else")
+ return t.newElse(token.pos, token.line)
+}
+
+// Block:
+// {{block stringValue pipeline}}
+// Block keyword is past.
+// The name must be something that can evaluate to a string.
+// The pipeline is mandatory.
+func (t *Tree) blockControl() Node {
+ const context = "block clause"
+
+ token := t.nextNonSpace()
+ name := t.parseTemplateName(token, context)
+ pipe := t.pipeline(context, itemRightDelim)
+
+ block := New(name) // name will be updated once we know it.
+ block.text = t.text
+ block.Mode = t.Mode
+ block.ParseName = t.ParseName
+ block.startParse(t.funcs, t.lex, t.treeSet)
+ var end Node
+ block.Root, end = block.itemList()
+ if end.Type() != nodeEnd {
+ t.errorf("unexpected %s in %s", end, context)
+ }
+ block.add()
+ block.stopParse()
+
+ return t.newTemplate(token.pos, token.line, name, pipe)
+}
+
+// Template:
+// {{template stringValue pipeline}}
+// Template keyword is past. The name must be something that can evaluate
+// to a string.
+func (t *Tree) templateControl() Node {
+ const context = "template clause"
+ token := t.nextNonSpace()
+ name := t.parseTemplateName(token, context)
+ var pipe *PipeNode
+ if t.nextNonSpace().typ != itemRightDelim {
+ t.backup()
+ // Do not pop variables; they persist until "end".
+ pipe = t.pipeline(context, itemRightDelim)
+ }
+ return t.newTemplate(token.pos, token.line, name, pipe)
+}
+
+func (t *Tree) parseTemplateName(token item, context string) (name string) {
+ switch token.typ {
+ case itemString, itemRawString:
+ s, err := strconv.Unquote(token.val)
+ if err != nil {
+ t.error(err)
+ }
+ name = s
+ default:
+ t.unexpected(token, context)
+ }
+ return
+}
+
+// command:
+// operand (space operand)*
+// space-separated arguments up to a pipeline character or right delimiter.
+// we consume the pipe character but leave the right delim to terminate the action.
+func (t *Tree) command() *CommandNode {
+ cmd := t.newCommand(t.peekNonSpace().pos)
+ for {
+ t.peekNonSpace() // skip leading spaces.
+ operand := t.operand()
+ if operand != nil {
+ cmd.append(operand)
+ }
+ switch token := t.next(); token.typ {
+ case itemSpace:
+ continue
+ case itemRightDelim, itemRightParen:
+ t.backup()
+ case itemPipe:
+ // nothing here; break loop below
+ default:
+ t.unexpected(token, "operand")
+ }
+ break
+ }
+ if len(cmd.Args) == 0 {
+ t.errorf("empty command")
+ }
+ return cmd
+}
+
+// operand:
+// term .Field*
+// An operand is a space-separated component of a command,
+// a term possibly followed by field accesses.
+// A nil return means the next item is not an operand.
+func (t *Tree) operand() Node {
+ node := t.term()
+ if node == nil {
+ return nil
+ }
+ if t.peek().typ == itemField {
+ chain := t.newChain(t.peek().pos, node)
+ for t.peek().typ == itemField {
+ chain.Add(t.next().val)
+ }
+ // Compatibility with original API: If the term is of type NodeField
+ // or NodeVariable, just put more fields on the original.
+ // Otherwise, keep the Chain node.
+ // Obvious parsing errors involving literal values are detected here.
+ // More complex error cases will have to be handled at execution time.
+ switch node.Type() {
+ case NodeField:
+ node = t.newField(chain.Position(), chain.String())
+ case NodeVariable:
+ node = t.newVariable(chain.Position(), chain.String())
+ case NodeBool, NodeString, NodeNumber, NodeNil, NodeDot:
+ t.errorf("unexpected . after term %q", node.String())
+ default:
+ node = chain
+ }
+ }
+ return node
+}
+
+// term:
+// literal (number, string, nil, boolean)
+// function (identifier)
+// .
+// .Field
+// $
+// '(' pipeline ')'
+// A term is a simple "expression".
+// A nil return means the next item is not a term.
+func (t *Tree) term() Node {
+ switch token := t.nextNonSpace(); token.typ {
+ case itemIdentifier:
+ checkFunc := t.Mode&SkipFuncCheck == 0
+ if checkFunc && !t.hasFunction(token.val) {
+ t.errorf("function %q not defined", token.val)
+ }
+ return NewIdentifier(token.val).SetTree(t).SetPos(token.pos)
+ case itemDot:
+ return t.newDot(token.pos)
+ case itemNil:
+ return t.newNil(token.pos)
+ case itemVariable:
+ return t.useVar(token.pos, token.val)
+ case itemField:
+ return t.newField(token.pos, token.val)
+ case itemBool:
+ return t.newBool(token.pos, token.val == "true")
+ case itemCharConstant, itemComplex, itemNumber:
+ number, err := t.newNumber(token.pos, token.val, token.typ)
+ if err != nil {
+ t.error(err)
+ }
+ return number
+ case itemLeftParen:
+ return t.pipeline("parenthesized pipeline", itemRightParen)
+ case itemString, itemRawString:
+ s, err := strconv.Unquote(token.val)
+ if err != nil {
+ t.error(err)
+ }
+ return t.newString(token.pos, token.val, s)
+ }
+ t.backup()
+ return nil
+}
+
+// hasFunction reports if a function name exists in the Tree's maps.
+func (t *Tree) hasFunction(name string) bool {
+ for _, funcMap := range t.funcs {
+ if funcMap == nil {
+ continue
+ }
+ if funcMap[name] != nil {
+ return true
+ }
+ }
+ return false
+}
+
+// popVars trims the variable list to the specified length
+func (t *Tree) popVars(n int) {
+ t.vars = t.vars[:n]
+}
+
+// useVar returns a node for a variable reference. It errors if the
+// variable is not defined.
+func (t *Tree) useVar(pos Pos, name string) Node {
+ v := t.newVariable(pos, name)
+ for _, varName := range t.vars {
+ if varName == v.Ident[0] {
+ return v
+ }
+ }
+ t.errorf("undefined variable %q", v.Ident[0])
+ return nil
+}
diff --git a/tpl/internal/go_templates/texttemplate/parse/parse_test.go b/tpl/internal/go_templates/texttemplate/parse/parse_test.go
new file mode 100644
index 000000000..52bd6aca2
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/parse/parse_test.go
@@ -0,0 +1,683 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.13
+// +build go1.13
+
+package parse
+
+import (
+ "flag"
+ "fmt"
+ "strings"
+ "testing"
+)
+
+var debug = flag.Bool("debug", false, "show the errors produced by the main tests")
+
+type numberTest struct {
+ text string
+ isInt bool
+ isUint bool
+ isFloat bool
+ isComplex bool
+ int64
+ uint64
+ float64
+ complex128
+}
+
+var numberTests = []numberTest{
+ // basics
+ {"0", true, true, true, false, 0, 0, 0, 0},
+ {"-0", true, true, true, false, 0, 0, 0, 0}, // check that -0 is a uint.
+ {"73", true, true, true, false, 73, 73, 73, 0},
+ {"7_3", true, true, true, false, 73, 73, 73, 0},
+ {"0b10_010_01", true, true, true, false, 73, 73, 73, 0},
+ {"0B10_010_01", true, true, true, false, 73, 73, 73, 0},
+ {"073", true, true, true, false, 073, 073, 073, 0},
+ {"0o73", true, true, true, false, 073, 073, 073, 0},
+ {"0O73", true, true, true, false, 073, 073, 073, 0},
+ {"0x73", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"0X73", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"0x7_3", true, true, true, false, 0x73, 0x73, 0x73, 0},
+ {"-73", true, false, true, false, -73, 0, -73, 0},
+ {"+73", true, false, true, false, 73, 0, 73, 0},
+ {"100", true, true, true, false, 100, 100, 100, 0},
+ {"1e9", true, true, true, false, 1e9, 1e9, 1e9, 0},
+ {"-1e9", true, false, true, false, -1e9, 0, -1e9, 0},
+ {"-1.2", false, false, true, false, 0, 0, -1.2, 0},
+ {"1e19", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"1e1_9", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"1E19", false, true, true, false, 0, 1e19, 1e19, 0},
+ {"-1e19", false, false, true, false, 0, 0, -1e19, 0},
+ {"0x_1p4", true, true, true, false, 16, 16, 16, 0},
+ {"0X_1P4", true, true, true, false, 16, 16, 16, 0},
+ {"0x_1p-4", false, false, true, false, 0, 0, 1 / 16., 0},
+ {"4i", false, false, false, true, 0, 0, 0, 4i},
+ {"-1.2+4.2i", false, false, false, true, 0, 0, 0, -1.2 + 4.2i},
+ {"073i", false, false, false, true, 0, 0, 0, 73i}, // not octal!
+ // complex with 0 imaginary are float (and maybe integer)
+ {"0i", true, true, true, true, 0, 0, 0, 0},
+ {"-1.2+0i", false, false, true, true, 0, 0, -1.2, -1.2},
+ {"-12+0i", true, false, true, true, -12, 0, -12, -12},
+ {"13+0i", true, true, true, true, 13, 13, 13, 13},
+ // funny bases
+ {"0123", true, true, true, false, 0123, 0123, 0123, 0},
+ {"-0x0", true, true, true, false, 0, 0, 0, 0},
+ {"0xdeadbeef", true, true, true, false, 0xdeadbeef, 0xdeadbeef, 0xdeadbeef, 0},
+ // character constants
+ {`'a'`, true, true, true, false, 'a', 'a', 'a', 0},
+ {`'\n'`, true, true, true, false, '\n', '\n', '\n', 0},
+ {`'\\'`, true, true, true, false, '\\', '\\', '\\', 0},
+ {`'\''`, true, true, true, false, '\'', '\'', '\'', 0},
+ {`'\xFF'`, true, true, true, false, 0xFF, 0xFF, 0xFF, 0},
+ {`'パ'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ {`'\u30d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ {`'\U000030d1'`, true, true, true, false, 0x30d1, 0x30d1, 0x30d1, 0},
+ // some broken syntax
+ {text: "+-2"},
+ {text: "0x123."},
+ {text: "1e."},
+ {text: "0xi."},
+ {text: "1+2."},
+ {text: "'x"},
+ {text: "'xx'"},
+ {text: "'433937734937734969526500969526500'"}, // Integer too large - issue 10634.
+ // Issue 8622 - 0xe parsed as floating point. Very embarrassing.
+ {"0xef", true, true, true, false, 0xef, 0xef, 0xef, 0},
+}
+
+func TestNumberParse(t *testing.T) {
+ for _, test := range numberTests {
+ // If fmt.Sscan thinks it's complex, it's complex. We can't trust the output
+ // because imaginary comes out as a number.
+ var c complex128
+ typ := itemNumber
+ var tree *Tree
+ if test.text[0] == '\'' {
+ typ = itemCharConstant
+ } else {
+ _, err := fmt.Sscan(test.text, &c)
+ if err == nil {
+ typ = itemComplex
+ }
+ }
+ n, err := tree.newNumber(0, test.text, typ)
+ ok := test.isInt || test.isUint || test.isFloat || test.isComplex
+ if ok && err != nil {
+ t.Errorf("unexpected error for %q: %s", test.text, err)
+ continue
+ }
+ if !ok && err == nil {
+ t.Errorf("expected error for %q", test.text)
+ continue
+ }
+ if !ok {
+ if *debug {
+ fmt.Printf("%s\n\t%s\n", test.text, err)
+ }
+ continue
+ }
+ if n.IsComplex != test.isComplex {
+ t.Errorf("complex incorrect for %q; should be %t", test.text, test.isComplex)
+ }
+ if test.isInt {
+ if !n.IsInt {
+ t.Errorf("expected integer for %q", test.text)
+ }
+ if n.Int64 != test.int64 {
+ t.Errorf("int64 for %q should be %d Is %d", test.text, test.int64, n.Int64)
+ }
+ } else if n.IsInt {
+ t.Errorf("did not expect integer for %q", test.text)
+ }
+ if test.isUint {
+ if !n.IsUint {
+ t.Errorf("expected unsigned integer for %q", test.text)
+ }
+ if n.Uint64 != test.uint64 {
+ t.Errorf("uint64 for %q should be %d Is %d", test.text, test.uint64, n.Uint64)
+ }
+ } else if n.IsUint {
+ t.Errorf("did not expect unsigned integer for %q", test.text)
+ }
+ if test.isFloat {
+ if !n.IsFloat {
+ t.Errorf("expected float for %q", test.text)
+ }
+ if n.Float64 != test.float64 {
+ t.Errorf("float64 for %q should be %g Is %g", test.text, test.float64, n.Float64)
+ }
+ } else if n.IsFloat {
+ t.Errorf("did not expect float for %q", test.text)
+ }
+ if test.isComplex {
+ if !n.IsComplex {
+ t.Errorf("expected complex for %q", test.text)
+ }
+ if n.Complex128 != test.complex128 {
+ t.Errorf("complex128 for %q should be %g Is %g", test.text, test.complex128, n.Complex128)
+ }
+ } else if n.IsComplex {
+ t.Errorf("did not expect complex for %q", test.text)
+ }
+ }
+}
+
+type parseTest struct {
+ name string
+ input string
+ ok bool
+ result string // what the user would see in an error message.
+}
+
+const (
+ noError = true
+ hasError = false
+)
+
+var parseTests = []parseTest{
+ {"empty", "", noError,
+ ``},
+ {"comment", "{{/*\n\n\n*/}}", noError,
+ ``},
+ {"spaces", " \t\n", noError,
+ `" \t\n"`},
+ {"text", "some text", noError,
+ `"some text"`},
+ {"emptyAction", "{{}}", hasError,
+ `{{}}`},
+ {"field", "{{.X}}", noError,
+ `{{.X}}`},
+ {"simple command", "{{printf}}", noError,
+ `{{printf}}`},
+ {"$ invocation", "{{$}}", noError,
+ "{{$}}"},
+ {"variable invocation", "{{with $x := 3}}{{$x 23}}{{end}}", noError,
+ "{{with $x := 3}}{{$x 23}}{{end}}"},
+ {"variable with fields", "{{$.I}}", noError,
+ "{{$.I}}"},
+ {"multi-word command", "{{printf `%d` 23}}", noError,
+ "{{printf `%d` 23}}"},
+ {"pipeline", "{{.X|.Y}}", noError,
+ `{{.X | .Y}}`},
+ {"pipeline with decl", "{{$x := .X|.Y}}", noError,
+ `{{$x := .X | .Y}}`},
+ {"nested pipeline", "{{.X (.Y .Z) (.A | .B .C) (.E)}}", noError,
+ `{{.X (.Y .Z) (.A | .B .C) (.E)}}`},
+ {"field applied to parentheses", "{{(.Y .Z).Field}}", noError,
+ `{{(.Y .Z).Field}}`},
+ {"simple if", "{{if .X}}hello{{end}}", noError,
+ `{{if .X}}"hello"{{end}}`},
+ {"if with else", "{{if .X}}true{{else}}false{{end}}", noError,
+ `{{if .X}}"true"{{else}}"false"{{end}}`},
+ {"if with else if", "{{if .X}}true{{else if .Y}}false{{end}}", noError,
+ `{{if .X}}"true"{{else}}{{if .Y}}"false"{{end}}{{end}}`},
+ {"if else chain", "+{{if .X}}X{{else if .Y}}Y{{else if .Z}}Z{{end}}+", noError,
+ `"+"{{if .X}}"X"{{else}}{{if .Y}}"Y"{{else}}{{if .Z}}"Z"{{end}}{{end}}{{end}}"+"`},
+ {"simple range", "{{range .X}}hello{{end}}", noError,
+ `{{range .X}}"hello"{{end}}`},
+ {"chained field range", "{{range .X.Y.Z}}hello{{end}}", noError,
+ `{{range .X.Y.Z}}"hello"{{end}}`},
+ {"nested range", "{{range .X}}hello{{range .Y}}goodbye{{end}}{{end}}", noError,
+ `{{range .X}}"hello"{{range .Y}}"goodbye"{{end}}{{end}}`},
+ {"range with else", "{{range .X}}true{{else}}false{{end}}", noError,
+ `{{range .X}}"true"{{else}}"false"{{end}}`},
+ {"range over pipeline", "{{range .X|.M}}true{{else}}false{{end}}", noError,
+ `{{range .X | .M}}"true"{{else}}"false"{{end}}`},
+ {"range []int", "{{range .SI}}{{.}}{{end}}", noError,
+ `{{range .SI}}{{.}}{{end}}`},
+ {"range 1 var", "{{range $x := .SI}}{{.}}{{end}}", noError,
+ `{{range $x := .SI}}{{.}}{{end}}`},
+ {"range 2 vars", "{{range $x, $y := .SI}}{{.}}{{end}}", noError,
+ `{{range $x, $y := .SI}}{{.}}{{end}}`},
+ {"range with break", "{{range .SI}}{{.}}{{break}}{{end}}", noError,
+ `{{range .SI}}{{.}}{{break}}{{end}}`},
+ {"range with continue", "{{range .SI}}{{.}}{{continue}}{{end}}", noError,
+ `{{range .SI}}{{.}}{{continue}}{{end}}`},
+ {"constants", "{{range .SI 1 -3.2i true false 'a' nil}}{{end}}", noError,
+ `{{range .SI 1 -3.2i true false 'a' nil}}{{end}}`},
+ {"template", "{{template `x`}}", noError,
+ `{{template "x"}}`},
+ {"template with arg", "{{template `x` .Y}}", noError,
+ `{{template "x" .Y}}`},
+ {"with", "{{with .X}}hello{{end}}", noError,
+ `{{with .X}}"hello"{{end}}`},
+ {"with with else", "{{with .X}}hello{{else}}goodbye{{end}}", noError,
+ `{{with .X}}"hello"{{else}}"goodbye"{{end}}`},
+ // Trimming spaces.
+ {"trim left", "x \r\n\t{{- 3}}", noError, `"x"{{3}}`},
+ {"trim right", "{{3 -}}\n\n\ty", noError, `{{3}}"y"`},
+ {"trim left and right", "x \r\n\t{{- 3 -}}\n\n\ty", noError, `"x"{{3}}"y"`},
+ {"trim with extra spaces", "x\n{{- 3 -}}\ny", noError, `"x"{{3}}"y"`},
+ {"comment trim left", "x \r\n\t{{- /* hi */}}", noError, `"x"`},
+ {"comment trim right", "{{/* hi */ -}}\n\n\ty", noError, `"y"`},
+ {"comment trim left and right", "x \r\n\t{{- /* */ -}}\n\n\ty", noError, `"x""y"`},
+ {"block definition", `{{block "foo" .}}hello{{end}}`, noError,
+ `{{template "foo" .}}`},
+
+ {"newline in assignment", "{{ $x \n := \n 1 \n }}", noError, "{{$x := 1}}"},
+ {"newline in empty action", "{{\n}}", hasError, "{{\n}}"},
+ {"newline in pipeline", "{{\n\"x\"\n|\nprintf\n}}", noError, `{{"x" | printf}}`},
+ {"newline in comment", "{{/*\nhello\n*/}}", noError, ""},
+ {"newline in comment", "{{-\n/*\nhello\n*/\n-}}", noError, ""},
+ {"spaces around continue", "{{range .SI}}{{.}}{{ continue }}{{end}}", noError,
+ `{{range .SI}}{{.}}{{continue}}{{end}}`},
+ {"spaces around break", "{{range .SI}}{{.}}{{ break }}{{end}}", noError,
+ `{{range .SI}}{{.}}{{break}}{{end}}`},
+
+ // Errors.
+ {"unclosed action", "hello{{range", hasError, ""},
+ {"unmatched end", "{{end}}", hasError, ""},
+ {"unmatched else", "{{else}}", hasError, ""},
+ {"unmatched else after if", "{{if .X}}hello{{end}}{{else}}", hasError, ""},
+ {"multiple else", "{{if .X}}1{{else}}2{{else}}3{{end}}", hasError, ""},
+ {"missing end", "hello{{range .x}}", hasError, ""},
+ {"missing end after else", "hello{{range .x}}{{else}}", hasError, ""},
+ {"undefined function", "hello{{undefined}}", hasError, ""},
+ {"undefined variable", "{{$x}}", hasError, ""},
+ {"variable undefined after end", "{{with $x := 4}}{{end}}{{$x}}", hasError, ""},
+ {"variable undefined in template", "{{template $v}}", hasError, ""},
+ {"declare with field", "{{with $x.Y := 4}}{{end}}", hasError, ""},
+ {"template with field ref", "{{template .X}}", hasError, ""},
+ {"template with var", "{{template $v}}", hasError, ""},
+ {"invalid punctuation", "{{printf 3, 4}}", hasError, ""},
+ {"multidecl outside range", "{{with $v, $u := 3}}{{end}}", hasError, ""},
+ {"too many decls in range", "{{range $u, $v, $w := 3}}{{end}}", hasError, ""},
+ {"dot applied to parentheses", "{{printf (printf .).}}", hasError, ""},
+ {"adjacent args", "{{printf 3`x`}}", hasError, ""},
+ {"adjacent args with .", "{{printf `x`.}}", hasError, ""},
+ {"extra end after if", "{{if .X}}a{{else if .Y}}b{{end}}{{end}}", hasError, ""},
+ {"break outside range", "{{range .}}{{end}} {{break}}", hasError, ""},
+ {"continue outside range", "{{range .}}{{end}} {{continue}}", hasError, ""},
+ {"break in range else", "{{range .}}{{else}}{{break}}{{end}}", hasError, ""},
+ {"continue in range else", "{{range .}}{{else}}{{continue}}{{end}}", hasError, ""},
+ // Other kinds of assignments and operators aren't available yet.
+ {"bug0a", "{{$x := 0}}{{$x}}", noError, "{{$x := 0}}{{$x}}"},
+ {"bug0b", "{{$x += 1}}{{$x}}", hasError, ""},
+ {"bug0c", "{{$x ! 2}}{{$x}}", hasError, ""},
+ {"bug0d", "{{$x % 3}}{{$x}}", hasError, ""},
+ // Check the parse fails for := rather than comma.
+ {"bug0e", "{{range $x := $y := 3}}{{end}}", hasError, ""},
+ // Another bug: variable read must ignore following punctuation.
+ {"bug1a", "{{$x:=.}}{{$x!2}}", hasError, ""}, // ! is just illegal here.
+ {"bug1b", "{{$x:=.}}{{$x+2}}", hasError, ""}, // $x+2 should not parse as ($x) (+2).
+ {"bug1c", "{{$x:=.}}{{$x +2}}", noError, "{{$x := .}}{{$x +2}}"}, // It's OK with a space.
+ // dot following a literal value
+ {"dot after integer", "{{1.E}}", hasError, ""},
+ {"dot after float", "{{0.1.E}}", hasError, ""},
+ {"dot after boolean", "{{true.E}}", hasError, ""},
+ {"dot after char", "{{'a'.any}}", hasError, ""},
+ {"dot after string", `{{"hello".guys}}`, hasError, ""},
+ {"dot after dot", "{{..E}}", hasError, ""},
+ {"dot after nil", "{{nil.E}}", hasError, ""},
+ // Wrong pipeline
+ {"wrong pipeline dot", "{{12|.}}", hasError, ""},
+ {"wrong pipeline number", "{{.|12|printf}}", hasError, ""},
+ {"wrong pipeline string", "{{.|printf|\"error\"}}", hasError, ""},
+ {"wrong pipeline char", "{{12|printf|'e'}}", hasError, ""},
+ {"wrong pipeline boolean", "{{.|true}}", hasError, ""},
+ {"wrong pipeline nil", "{{'c'|nil}}", hasError, ""},
+ {"empty pipeline", `{{printf "%d" ( ) }}`, hasError, ""},
+ // Missing pipeline in block
+ {"block definition", `{{block "foo"}}hello{{end}}`, hasError, ""},
+}
+
+var builtins = map[string]any{
+ "printf": fmt.Sprintf,
+ "contains": strings.Contains,
+}
+
+func testParse(doCopy bool, t *testing.T) {
+ textFormat = "%q"
+ defer func() { textFormat = "%s" }()
+ for _, test := range parseTests {
+ tmpl, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree), builtins)
+ switch {
+ case err == nil && !test.ok:
+ t.Errorf("%q: expected error; got none", test.name)
+ continue
+ case err != nil && test.ok:
+ t.Errorf("%q: unexpected error: %v", test.name, err)
+ continue
+ case err != nil && !test.ok:
+ // expected error, got one
+ if *debug {
+ fmt.Printf("%s: %s\n\t%s\n", test.name, test.input, err)
+ }
+ continue
+ }
+ var result string
+ if doCopy {
+ result = tmpl.Root.Copy().String()
+ } else {
+ result = tmpl.Root.String()
+ }
+ if result != test.result {
+ t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result)
+ }
+ }
+}
+
+func TestParse(t *testing.T) {
+ testParse(false, t)
+}
+
+// Same as TestParse, but we copy the node first
+func TestParseCopy(t *testing.T) {
+ testParse(true, t)
+}
+
+func TestParseWithComments(t *testing.T) {
+ textFormat = "%q"
+ defer func() { textFormat = "%s" }()
+ tests := [...]parseTest{
+ {"comment", "{{/*\n\n\n*/}}", noError, "{{/*\n\n\n*/}}"},
+ {"comment trim left", "x \r\n\t{{- /* hi */}}", noError, `"x"{{/* hi */}}`},
+ {"comment trim right", "{{/* hi */ -}}\n\n\ty", noError, `{{/* hi */}}"y"`},
+ {"comment trim left and right", "x \r\n\t{{- /* */ -}}\n\n\ty", noError, `"x"{{/* */}}"y"`},
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ tr := New(test.name)
+ tr.Mode = ParseComments
+ tmpl, err := tr.Parse(test.input, "", "", make(map[string]*Tree))
+ if err != nil {
+ t.Errorf("%q: expected error; got none", test.name)
+ }
+ if result := tmpl.Root.String(); result != test.result {
+ t.Errorf("%s=(%q): got\n\t%v\nexpected\n\t%v", test.name, test.input, result, test.result)
+ }
+ })
+ }
+}
+
+func TestSkipFuncCheck(t *testing.T) {
+ oldTextFormat := textFormat
+ textFormat = "%q"
+ defer func() { textFormat = oldTextFormat }()
+ tr := New("skip func check")
+ tr.Mode = SkipFuncCheck
+ tmpl, err := tr.Parse("{{fn 1 2}}", "", "", make(map[string]*Tree))
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expected := "{{fn 1 2}}"
+ if result := tmpl.Root.String(); result != expected {
+ t.Errorf("got\n\t%v\nexpected\n\t%v", result, expected)
+ }
+}
+
+type isEmptyTest struct {
+ name string
+ input string
+ empty bool
+}
+
+var isEmptyTests = []isEmptyTest{
+ {"empty", ``, true},
+ {"nonempty", `hello`, false},
+ {"spaces only", " \t\n \t\n", true},
+ {"comment only", "{{/* comment */}}", true},
+ {"definition", `{{define "x"}}something{{end}}`, true},
+ {"definitions and space", "{{define `x`}}something{{end}}\n\n{{define `y`}}something{{end}}\n\n", true},
+ {"definitions and text", "{{define `x`}}something{{end}}\nx\n{{define `y`}}something{{end}}\ny\n", false},
+ {"definition and action", "{{define `x`}}something{{end}}{{if 3}}foo{{end}}", false},
+}
+
+func TestIsEmpty(t *testing.T) {
+ if !IsEmptyTree(nil) {
+ t.Errorf("nil tree is not empty")
+ }
+ for _, test := range isEmptyTests {
+ tree, err := New("root").Parse(test.input, "", "", make(map[string]*Tree), nil)
+ if err != nil {
+ t.Errorf("%q: unexpected error: %v", test.name, err)
+ continue
+ }
+ if empty := IsEmptyTree(tree.Root); empty != test.empty {
+ t.Errorf("%q: expected %t got %t", test.name, test.empty, empty)
+ }
+ }
+}
+
+func TestErrorContextWithTreeCopy(t *testing.T) {
+ tree, err := New("root").Parse("{{if true}}{{end}}", "", "", make(map[string]*Tree), nil)
+ if err != nil {
+ t.Fatalf("unexpected tree parse failure: %v", err)
+ }
+ treeCopy := tree.Copy()
+ wantLocation, wantContext := tree.ErrorContext(tree.Root.Nodes[0])
+ gotLocation, gotContext := treeCopy.ErrorContext(treeCopy.Root.Nodes[0])
+ if wantLocation != gotLocation {
+ t.Errorf("wrong error location want %q got %q", wantLocation, gotLocation)
+ }
+ if wantContext != gotContext {
+ t.Errorf("wrong error location want %q got %q", wantContext, gotContext)
+ }
+}
+
+// All failures, and the result is a string that must appear in the error message.
+var errorTests = []parseTest{
+ // Check line numbers are accurate.
+ {"unclosed1",
+ "line1\n{{",
+ hasError, `unclosed1:2: unclosed action`},
+ {"unclosed2",
+ "line1\n{{define `x`}}line2\n{{",
+ hasError, `unclosed2:3: unclosed action`},
+ {"unclosed3",
+ "line1\n{{\"x\"\n\"y\"\n",
+ hasError, `unclosed3:4: unclosed action started at unclosed3:2`},
+ {"unclosed4",
+ "{{\n\n\n\n\n",
+ hasError, `unclosed4:6: unclosed action started at unclosed4:1`},
+ {"var1",
+ "line1\n{{\nx\n}}",
+ hasError, `var1:3: function "x" not defined`},
+ // Specific errors.
+ {"function",
+ "{{foo}}",
+ hasError, `function "foo" not defined`},
+ {"comment1",
+ "{{/*}}",
+ hasError, `comment1:1: unclosed comment`},
+ {"comment2",
+ "{{/*\nhello\n}}",
+ hasError, `comment2:1: unclosed comment`},
+ {"lparen",
+ "{{.X (1 2 3}}",
+ hasError, `unclosed left paren`},
+ {"rparen",
+ "{{.X 1 2 3 ) }}",
+ hasError, `unexpected ")" in command`},
+ {"rparen2",
+ "{{(.X 1 2 3",
+ hasError, `unclosed action`},
+ {"space",
+ "{{`x`3}}",
+ hasError, `in operand`},
+ {"idchar",
+ "{{a#}}",
+ hasError, `'#'`},
+ {"charconst",
+ "{{'a}}",
+ hasError, `unterminated character constant`},
+ {"stringconst",
+ `{{"a}}`,
+ hasError, `unterminated quoted string`},
+ {"rawstringconst",
+ "{{`a}}",
+ hasError, `unterminated raw quoted string`},
+ {"number",
+ "{{0xi}}",
+ hasError, `number syntax`},
+ {"multidefine",
+ "{{define `a`}}a{{end}}{{define `a`}}b{{end}}",
+ hasError, `multiple definition of template`},
+ {"eof",
+ "{{range .X}}",
+ hasError, `unexpected EOF`},
+ {"variable",
+ // Declare $x so it's defined, to avoid that error, and then check we don't parse a declaration.
+ "{{$x := 23}}{{with $x.y := 3}}{{$x 23}}{{end}}",
+ hasError, `unexpected ":="`},
+ {"multidecl",
+ "{{$a,$b,$c := 23}}",
+ hasError, `too many declarations`},
+ {"undefvar",
+ "{{$a}}",
+ hasError, `undefined variable`},
+ {"wrongdot",
+ "{{true.any}}",
+ hasError, `unexpected . after term`},
+ {"wrongpipeline",
+ "{{12|false}}",
+ hasError, `non executable command in pipeline`},
+ {"emptypipeline",
+ `{{ ( ) }}`,
+ hasError, `missing value for parenthesized pipeline`},
+ {"multilinerawstring",
+ "{{ $v := `\n` }} {{",
+ hasError, `multilinerawstring:2: unclosed action`},
+ {"rangeundefvar",
+ "{{range $k}}{{end}}",
+ hasError, `undefined variable`},
+ {"rangeundefvars",
+ "{{range $k, $v}}{{end}}",
+ hasError, `undefined variable`},
+ {"rangemissingvalue1",
+ "{{range $k,}}{{end}}",
+ hasError, `missing value for range`},
+ {"rangemissingvalue2",
+ "{{range $k, $v := }}{{end}}",
+ hasError, `missing value for range`},
+ {"rangenotvariable1",
+ "{{range $k, .}}{{end}}",
+ hasError, `range can only initialize variables`},
+ {"rangenotvariable2",
+ "{{range $k, 123 := .}}{{end}}",
+ hasError, `range can only initialize variables`},
+}
+
+func TestErrors(t *testing.T) {
+ for _, test := range errorTests {
+ t.Run(test.name, func(t *testing.T) {
+ _, err := New(test.name).Parse(test.input, "", "", make(map[string]*Tree))
+ if err == nil {
+ t.Fatalf("expected error %q, got nil", test.result)
+ }
+ if !strings.Contains(err.Error(), test.result) {
+ t.Fatalf("error %q does not contain %q", err, test.result)
+ }
+ })
+ }
+}
+
+func TestBlock(t *testing.T) {
+ const (
+ input = `a{{block "inner" .}}bar{{.}}baz{{end}}b`
+ outer = `a{{template "inner" .}}b`
+ inner = `bar{{.}}baz`
+ )
+ treeSet := make(map[string]*Tree)
+ tmpl, err := New("outer").Parse(input, "", "", treeSet, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if g, w := tmpl.Root.String(), outer; g != w {
+ t.Errorf("outer template = %q, want %q", g, w)
+ }
+ inTmpl := treeSet["inner"]
+ if inTmpl == nil {
+ t.Fatal("block did not define template")
+ }
+ if g, w := inTmpl.Root.String(), inner; g != w {
+ t.Errorf("inner template = %q, want %q", g, w)
+ }
+}
+
+func TestLineNum(t *testing.T) {
+ const count = 100
+ text := strings.Repeat("{{printf 1234}}\n", count)
+ tree, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Check the line numbers. Each line is an action containing a template, followed by text.
+ // That's two nodes per line.
+ nodes := tree.Root.Nodes
+ for i := 0; i < len(nodes); i += 2 {
+ line := 1 + i/2
+ // Action first.
+ action := nodes[i].(*ActionNode)
+ if action.Line != line {
+ t.Fatalf("line %d: action is line %d", line, action.Line)
+ }
+ pipe := action.Pipe
+ if pipe.Line != line {
+ t.Fatalf("line %d: pipe is line %d", line, pipe.Line)
+ }
+ }
+}
+
+func BenchmarkParseLarge(b *testing.B) {
+ text := strings.Repeat("{{1234}}\n", 10000)
+ for i := 0; i < b.N; i++ {
+ _, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+var sinkv, sinkl string
+
+func BenchmarkVariableString(b *testing.B) {
+ v := &VariableNode{
+ Ident: []string{"$", "A", "BB", "CCC", "THIS_IS_THE_VARIABLE_BEING_PROCESSED"},
+ }
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ sinkv = v.String()
+ }
+ if sinkv == "" {
+ b.Fatal("Benchmark was not run")
+ }
+}
+
+func BenchmarkListString(b *testing.B) {
+ text := `
+{{(printf .Field1.Field2.Field3).Value}}
+{{$x := (printf .Field1.Field2.Field3).Value}}
+{{$y := (printf $x.Field1.Field2.Field3).Value}}
+{{$z := $y.Field1.Field2.Field3}}
+{{if contains $y $z}}
+ {{printf "%q" $y}}
+{{else}}
+ {{printf "%q" $x}}
+{{end}}
+{{with $z.Field1 | contains "boring"}}
+ {{printf "%q" . | printf "%s"}}
+{{else}}
+ {{printf "%d %d %d" 11 11 11}}
+ {{printf "%d %d %s" 22 22 $x.Field1.Field2.Field3 | printf "%s"}}
+ {{printf "%v" (contains $z.Field1.Field2 $y)}}
+{{end}}
+`
+ tree, err := New("bench").Parse(text, "", "", make(map[string]*Tree), builtins)
+ if err != nil {
+ b.Fatal(err)
+ }
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ sinkl = tree.Root.String()
+ }
+ if sinkl == "" {
+ b.Fatal("Benchmark was not run")
+ }
+}
diff --git a/tpl/internal/go_templates/texttemplate/template.go b/tpl/internal/go_templates/texttemplate/template.go
new file mode 100644
index 000000000..1ba72c194
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/template.go
@@ -0,0 +1,238 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package template
+
+import (
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+ "reflect"
+ "sync"
+)
+
+// common holds the information shared by related templates.
+type common struct {
+ tmpl map[string]*Template // Map from name to defined templates.
+ muTmpl sync.RWMutex // protects tmpl
+ option option
+ // We use two maps, one for parsing and one for execution.
+ // This separation makes the API cleaner since it doesn't
+ // expose reflection to the client.
+ muFuncs sync.RWMutex // protects parseFuncs and execFuncs
+ parseFuncs FuncMap
+ execFuncs map[string]reflect.Value
+}
+
+// Template is the representation of a parsed template. The *parse.Tree
+// field is exported only for use by html/template and should be treated
+// as unexported by all other clients.
+type Template struct {
+ name string
+ *parse.Tree
+ *common
+ leftDelim string
+ rightDelim string
+}
+
+// New allocates a new, undefined template with the given name.
+func New(name string) *Template {
+ t := &Template{
+ name: name,
+ }
+ t.init()
+ return t
+}
+
+// Name returns the name of the template.
+func (t *Template) Name() string {
+ return t.name
+}
+
+// New allocates a new, undefined template associated with the given one and with the same
+// delimiters. The association, which is transitive, allows one template to
+// invoke another with a {{template}} action.
+//
+// Because associated templates share underlying data, template construction
+// cannot be done safely in parallel. Once the templates are constructed, they
+// can be executed in parallel.
+func (t *Template) New(name string) *Template {
+ t.init()
+ nt := &Template{
+ name: name,
+ common: t.common,
+ leftDelim: t.leftDelim,
+ rightDelim: t.rightDelim,
+ }
+ return nt
+}
+
+// init guarantees that t has a valid common structure.
+func (t *Template) init() {
+ if t.common == nil {
+ c := new(common)
+ c.tmpl = make(map[string]*Template)
+ c.parseFuncs = make(FuncMap)
+ c.execFuncs = make(map[string]reflect.Value)
+ t.common = c
+ }
+}
+
+// Clone returns a duplicate of the template, including all associated
+// templates. The actual representation is not copied, but the name space of
+// associated templates is, so further calls to Parse in the copy will add
+// templates to the copy but not to the original. Clone can be used to prepare
+// common templates and use them with variant definitions for other templates
+// by adding the variants after the clone is made.
+func (t *Template) Clone() (*Template, error) {
+ nt := t.copy(nil)
+ nt.init()
+ if t.common == nil {
+ return nt, nil
+ }
+ t.muTmpl.RLock()
+ defer t.muTmpl.RUnlock()
+ for k, v := range t.tmpl {
+ if k == t.name {
+ nt.tmpl[t.name] = nt
+ continue
+ }
+ // The associated templates share nt's common structure.
+ tmpl := v.copy(nt.common)
+ nt.tmpl[k] = tmpl
+ }
+ t.muFuncs.RLock()
+ defer t.muFuncs.RUnlock()
+ for k, v := range t.parseFuncs {
+ nt.parseFuncs[k] = v
+ }
+ for k, v := range t.execFuncs {
+ nt.execFuncs[k] = v
+ }
+ return nt, nil
+}
+
+// copy returns a shallow copy of t, with common set to the argument.
+func (t *Template) copy(c *common) *Template {
+ return &Template{
+ name: t.name,
+ Tree: t.Tree,
+ common: c,
+ leftDelim: t.leftDelim,
+ rightDelim: t.rightDelim,
+ }
+}
+
+// AddParseTree associates the argument parse tree with the template t, giving
+// it the specified name. If the template has not been defined, this tree becomes
+// its definition. If it has been defined and already has that name, the existing
+// definition is replaced; otherwise a new template is created, defined, and returned.
+func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) {
+ t.init()
+ t.muTmpl.Lock()
+ defer t.muTmpl.Unlock()
+ nt := t
+ if name != t.name {
+ nt = t.New(name)
+ }
+ // Even if nt == t, we need to install it in the common.tmpl map.
+ if t.associate(nt, tree) || nt.Tree == nil {
+ nt.Tree = tree
+ }
+ return nt, nil
+}
+
+// Templates returns a slice of defined templates associated with t.
+func (t *Template) Templates() []*Template {
+ if t.common == nil {
+ return nil
+ }
+ // Return a slice so we don't expose the map.
+ t.muTmpl.RLock()
+ defer t.muTmpl.RUnlock()
+ m := make([]*Template, 0, len(t.tmpl))
+ for _, v := range t.tmpl {
+ m = append(m, v)
+ }
+ return m
+}
+
+// Delims sets the action delimiters to the specified strings, to be used in
+// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template
+// definitions will inherit the settings. An empty delimiter stands for the
+// corresponding default: {{ or }}.
+// The return value is the template, so calls can be chained.
+func (t *Template) Delims(left, right string) *Template {
+ t.init()
+ t.leftDelim = left
+ t.rightDelim = right
+ return t
+}
+
+// Funcs adds the elements of the argument map to the template's function map.
+// It must be called before the template is parsed.
+// It panics if a value in the map is not a function with appropriate return
+// type or if the name cannot be used syntactically as a function in a template.
+// It is legal to overwrite elements of the map. The return value is the template,
+// so calls can be chained.
+func (t *Template) Funcs(funcMap FuncMap) *Template {
+ t.init()
+ t.muFuncs.Lock()
+ defer t.muFuncs.Unlock()
+ addValueFuncs(t.execFuncs, funcMap)
+ addFuncs(t.parseFuncs, funcMap)
+ return t
+}
+
+// Lookup returns the template with the given name that is associated with t.
+// It returns nil if there is no such template or the template has no definition.
+func (t *Template) Lookup(name string) *Template {
+ if t.common == nil {
+ return nil
+ }
+ t.muTmpl.RLock()
+ defer t.muTmpl.RUnlock()
+ return t.tmpl[name]
+}
+
+// Parse parses text as a template body for t.
+// Named template definitions ({{define ...}} or {{block ...}} statements) in text
+// define additional templates associated with t and are removed from the
+// definition of t itself.
+//
+// Templates can be redefined in successive calls to Parse.
+// A template definition with a body containing only white space and comments
+// is considered empty and will not replace an existing template's body.
+// This allows using Parse to add new named template definitions without
+// overwriting the main template body.
+func (t *Template) Parse(text string) (*Template, error) {
+ t.init()
+ t.muFuncs.RLock()
+ trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins())
+ t.muFuncs.RUnlock()
+ if err != nil {
+ return nil, err
+ }
+ // Add the newly parsed trees, including the one for t, into our common structure.
+ for name, tree := range trees {
+ if _, err := t.AddParseTree(name, tree); err != nil {
+ return nil, err
+ }
+ }
+ return t, nil
+}
+
+// associate installs the new template into the group of templates associated
+// with t. The two are already known to share the common structure.
+// The boolean return value reports whether to store this tree as t.Tree.
+func (t *Template) associate(new *Template, tree *parse.Tree) bool {
+ if new.common != t.common {
+ panic("internal error: associate not common")
+ }
+ if old := t.tmpl[new.name]; old != nil && parse.IsEmptyTree(tree.Root) && old.Tree != nil {
+ // If a template by that name exists,
+ // don't replace it with an empty template.
+ return false
+ }
+ t.tmpl[new.name] = new
+ return true
+}
diff --git a/tpl/internal/go_templates/texttemplate/testdata/file1.tmpl b/tpl/internal/go_templates/texttemplate/testdata/file1.tmpl
new file mode 100644
index 000000000..febf9d9f8
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/testdata/file1.tmpl
@@ -0,0 +1,2 @@
+{{define "x"}}TEXT{{end}}
+{{define "dotV"}}{{.V}}{{end}}
diff --git a/tpl/internal/go_templates/texttemplate/testdata/file2.tmpl b/tpl/internal/go_templates/texttemplate/testdata/file2.tmpl
new file mode 100644
index 000000000..39bf6fb9e
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/testdata/file2.tmpl
@@ -0,0 +1,2 @@
+{{define "dot"}}{{.}}{{end}}
+{{define "nested"}}{{template "dot" .}}{{end}}
diff --git a/tpl/internal/go_templates/texttemplate/testdata/tmpl1.tmpl b/tpl/internal/go_templates/texttemplate/testdata/tmpl1.tmpl
new file mode 100644
index 000000000..b72b3a340
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/testdata/tmpl1.tmpl
@@ -0,0 +1,3 @@
+template1
+{{define "x"}}x{{end}}
+{{template "y"}}
diff --git a/tpl/internal/go_templates/texttemplate/testdata/tmpl2.tmpl b/tpl/internal/go_templates/texttemplate/testdata/tmpl2.tmpl
new file mode 100644
index 000000000..16beba6e7
--- /dev/null
+++ b/tpl/internal/go_templates/texttemplate/testdata/tmpl2.tmpl
@@ -0,0 +1,3 @@
+template2
+{{define "y"}}y{{end}}
+{{template "x"}}
diff --git a/tpl/internal/resourcehelpers/helpers.go b/tpl/internal/resourcehelpers/helpers.go
new file mode 100644
index 000000000..2d50c59a4
--- /dev/null
+++ b/tpl/internal/resourcehelpers/helpers.go
@@ -0,0 +1,69 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Portions Copyright The Go Authors.
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resourcehelpers
+
+import (
+ "errors"
+ "fmt"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/resources"
+)
+
+// We allow string or a map as the first argument in some cases.
+func ResolveIfFirstArgIsString(args []any) (resources.ResourceTransformer, string, bool) {
+ if len(args) != 2 {
+ return nil, "", false
+ }
+
+ v1, ok1 := args[0].(string)
+ if !ok1 {
+ return nil, "", false
+ }
+ v2, ok2 := args[1].(resources.ResourceTransformer)
+
+ return v2, v1, ok2
+}
+
+// This roundabout way of doing it is needed to get both pipeline behaviour and options as arguments.
+func ResolveArgs(args []any) (resources.ResourceTransformer, map[string]any, error) {
+ if len(args) == 0 {
+ return nil, nil, errors.New("no Resource provided in transformation")
+ }
+
+ if len(args) == 1 {
+ r, ok := args[0].(resources.ResourceTransformer)
+ if !ok {
+ return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
+ }
+ return r, nil, nil
+ }
+
+ r, ok := args[1].(resources.ResourceTransformer)
+ if !ok {
+ if _, ok := args[1].(map[string]any); !ok {
+ return nil, nil, fmt.Errorf("no Resource provided in transformation")
+ }
+ return nil, nil, fmt.Errorf("type %T not supported in Resource transformations", args[0])
+ }
+
+ m, err := maps.ToStringMapE(args[0])
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid options type: %w", err)
+ }
+
+ return r, m, nil
+}
diff --git a/tpl/internal/templatefuncRegistry_test.go b/tpl/internal/templatefuncRegistry_test.go
new file mode 100644
index 000000000..8609bf34a
--- /dev/null
+++ b/tpl/internal/templatefuncRegistry_test.go
@@ -0,0 +1,39 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import (
+ "runtime"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type Test struct {
+}
+
+func (t *Test) MyTestMethod() string {
+ return "abcde"
+}
+
+func TestMethodToName(t *testing.T) {
+ c := qt.New(t)
+ test := &Test{}
+
+ if runtime.Compiler == "gccgo" {
+ c.Assert(methodToName(test.MyTestMethod), qt.Contains, "thunk")
+ } else {
+ c.Assert(methodToName(test.MyTestMethod), qt.Equals, "MyTestMethod")
+ }
+}
diff --git a/tpl/internal/templatefuncsRegistry.go b/tpl/internal/templatefuncsRegistry.go
new file mode 100644
index 000000000..d06b55b4d
--- /dev/null
+++ b/tpl/internal/templatefuncsRegistry.go
@@ -0,0 +1,293 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Portions Copyright The Go Authors.
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "go/doc"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "reflect"
+ "runtime"
+ "strings"
+ "sync"
+
+ "github.com/gohugoio/hugo/deps"
+)
+
+// TemplateFuncsNamespaceRegistry describes a registry of functions that provide
+// namespaces.
+var TemplateFuncsNamespaceRegistry []func(d *deps.Deps) *TemplateFuncsNamespace
+
+// AddTemplateFuncsNamespace adds a given function to a registry.
+func AddTemplateFuncsNamespace(ns func(d *deps.Deps) *TemplateFuncsNamespace) {
+ TemplateFuncsNamespaceRegistry = append(TemplateFuncsNamespaceRegistry, ns)
+}
+
+// TemplateFuncsNamespace represents a template function namespace.
+type TemplateFuncsNamespace struct {
+ // The namespace name, "strings", "lang", etc.
+ Name string
+
+ // This is the method receiver.
+ Context func(v ...any) (any, error)
+
+ // Additional info, aliases and examples, per method name.
+ MethodMappings map[string]TemplateFuncMethodMapping
+}
+
+// TemplateFuncsNamespaces is a slice of TemplateFuncsNamespace.
+type TemplateFuncsNamespaces []*TemplateFuncsNamespace
+
+// AddMethodMapping adds a method to a template function namespace.
+func (t *TemplateFuncsNamespace) AddMethodMapping(m any, aliases []string, examples [][2]string) {
+ if t.MethodMappings == nil {
+ t.MethodMappings = make(map[string]TemplateFuncMethodMapping)
+ }
+
+ name := methodToName(m)
+
+ // sanity check
+ for _, e := range examples {
+ if e[0] == "" {
+ panic(t.Name + ": Empty example for " + name)
+ }
+ }
+ for _, a := range aliases {
+ if a == "" {
+ panic(t.Name + ": Empty alias for " + name)
+ }
+ }
+
+ t.MethodMappings[name] = TemplateFuncMethodMapping{
+ Method: m,
+ Aliases: aliases,
+ Examples: examples,
+ }
+}
+
+// TemplateFuncMethodMapping represents a mapping of functions to methods for a
+// given namespace.
+type TemplateFuncMethodMapping struct {
+ Method any
+
+ // Any template funcs aliases. This is mainly motivated by keeping
+ // backwards compatibility, but some new template funcs may also make
+ // sense to give short and snappy aliases.
+ // Note that these aliases are global and will be merged, so the last
+ // key will win.
+ Aliases []string
+
+ // A slice of input/expected examples.
+ // We keep it a the namespace level for now, but may find a way to keep track
+ // of the single template func, for documentation purposes.
+ // Some of these, hopefully just a few, may depend on some test data to run.
+ Examples [][2]string
+}
+
+func methodToName(m any) string {
+ name := runtime.FuncForPC(reflect.ValueOf(m).Pointer()).Name()
+ name = filepath.Ext(name)
+ name = strings.TrimPrefix(name, ".")
+ name = strings.TrimSuffix(name, "-fm")
+ return name
+}
+
+type goDocFunc struct {
+ Name string
+ Description string
+ Args []string
+ Aliases []string
+ Examples [][2]string
+}
+
+func (t goDocFunc) toJSON() ([]byte, error) {
+ args, err := json.Marshal(t.Args)
+ if err != nil {
+ return nil, err
+ }
+ aliases, err := json.Marshal(t.Aliases)
+ if err != nil {
+ return nil, err
+ }
+ examples, err := json.Marshal(t.Examples)
+ if err != nil {
+ return nil, err
+ }
+ var buf bytes.Buffer
+ buf.WriteString(fmt.Sprintf(`%q:
+ { "Description": %q, "Args": %s, "Aliases": %s, "Examples": %s }
+`, t.Name, t.Description, args, aliases, examples))
+
+ return buf.Bytes(), nil
+}
+
+// MarshalJSON returns the JSON encoding of namespaces.
+func (namespaces TemplateFuncsNamespaces) MarshalJSON() ([]byte, error) {
+ var buf bytes.Buffer
+
+ buf.WriteString("{")
+
+ for i, ns := range namespaces {
+ if i != 0 {
+ buf.WriteString(",")
+ }
+ b, err := ns.toJSON()
+ if err != nil {
+ return nil, err
+ }
+ buf.Write(b)
+ }
+
+ buf.WriteString("}")
+
+ return buf.Bytes(), nil
+}
+
+var ignoreFuncs = map[string]bool{
+ "Reset": true,
+}
+
+func (t *TemplateFuncsNamespace) toJSON() ([]byte, error) {
+ var buf bytes.Buffer
+
+ godoc := getGetTplPackagesGoDoc()[t.Name]
+
+ var funcs []goDocFunc
+
+ buf.WriteString(fmt.Sprintf(`%q: {`, t.Name))
+
+ ctx, err := t.Context()
+ if err != nil {
+ return nil, err
+ }
+ ctxType := reflect.TypeOf(ctx)
+ for i := 0; i < ctxType.NumMethod(); i++ {
+ method := ctxType.Method(i)
+ if ignoreFuncs[method.Name] {
+ continue
+ }
+ f := goDocFunc{
+ Name: method.Name,
+ }
+
+ methodGoDoc := godoc[method.Name]
+
+ if mapping, ok := t.MethodMappings[method.Name]; ok {
+ f.Aliases = mapping.Aliases
+ f.Examples = mapping.Examples
+ f.Description = methodGoDoc.Description
+ f.Args = methodGoDoc.Args
+ }
+
+ funcs = append(funcs, f)
+ }
+
+ for i, f := range funcs {
+ if i != 0 {
+ buf.WriteString(",")
+ }
+ funcStr, err := f.toJSON()
+ if err != nil {
+ return nil, err
+ }
+ buf.Write(funcStr)
+ }
+
+ buf.WriteString("}")
+
+ return buf.Bytes(), nil
+}
+
+type methodGoDocInfo struct {
+ Description string
+ Args []string
+}
+
+var (
+ tplPackagesGoDoc map[string]map[string]methodGoDocInfo
+ tplPackagesGoDocInit sync.Once
+)
+
+func getGetTplPackagesGoDoc() map[string]map[string]methodGoDocInfo {
+ tplPackagesGoDocInit.Do(func() {
+ tplPackagesGoDoc = make(map[string]map[string]methodGoDocInfo)
+ pwd, err := os.Getwd()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ fset := token.NewFileSet()
+
+ // pwd will be inside one of the namespace packages during tests
+ var basePath string
+ if strings.Contains(pwd, "tpl") {
+ basePath = filepath.Join(pwd, "..")
+ } else {
+ basePath = filepath.Join(pwd, "tpl")
+ }
+
+ files, err := ioutil.ReadDir(basePath)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ for _, fi := range files {
+ if !fi.IsDir() {
+ continue
+ }
+
+ namespaceDoc := make(map[string]methodGoDocInfo)
+ packagePath := filepath.Join(basePath, fi.Name())
+
+ d, err := parser.ParseDir(fset, packagePath, nil, parser.ParseComments)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ for _, f := range d {
+ p := doc.New(f, "./", 0)
+
+ for _, t := range p.Types {
+ if t.Name == "Namespace" {
+ for _, tt := range t.Methods {
+ var args []string
+ for _, p := range tt.Decl.Type.Params.List {
+ for _, pp := range p.Names {
+ args = append(args, pp.Name)
+ }
+ }
+
+ description := strings.TrimSpace(tt.Doc)
+ di := methodGoDocInfo{Description: description, Args: args}
+ namespaceDoc[tt.Name] = di
+ }
+ }
+ }
+ }
+
+ tplPackagesGoDoc[fi.Name()] = namespaceDoc
+ }
+ })
+
+ return tplPackagesGoDoc
+}
diff --git a/tpl/js/init.go b/tpl/js/init.go
new file mode 100644
index 000000000..d57e0fdcb
--- /dev/null
+++ b/tpl/js/init.go
@@ -0,0 +1,36 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package js
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "js"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/js/js.go b/tpl/js/js.go
new file mode 100644
index 000000000..bb8d20966
--- /dev/null
+++ b/tpl/js/js.go
@@ -0,0 +1,65 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package js provides functions for building JavaScript resources
+package js
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+ "github.com/gohugoio/hugo/resources/resource_transformers/js"
+ "github.com/gohugoio/hugo/tpl/internal/resourcehelpers"
+)
+
+// New returns a new instance of the js-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ if deps.ResourceSpec == nil {
+ return &Namespace{}
+ }
+ return &Namespace{
+ client: js.New(deps.BaseFs.Assets, deps.ResourceSpec),
+ }
+}
+
+// Namespace provides template functions for the "js" namespace.
+type Namespace struct {
+ deps *deps.Deps
+ client *js.Client
+}
+
+// Build processes the given Resource with ESBuild.
+func (ns *Namespace) Build(args ...any) (resource.Resource, error) {
+ var (
+ r resources.ResourceTransformer
+ m map[string]any
+ targetPath string
+ err error
+ ok bool
+ )
+
+ r, targetPath, ok = resourcehelpers.ResolveIfFirstArgIsString(args)
+
+ if !ok {
+ r, m, err = resourcehelpers.ResolveArgs(args)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if targetPath != "" {
+ m = map[string]any{"targetPath": targetPath}
+ }
+
+ return ns.client.Process(r, m)
+}
diff --git a/tpl/lang/init.go b/tpl/lang/init.go
new file mode 100644
index 000000000..3f7b57ffc
--- /dev/null
+++ b/tpl/lang/init.go
@@ -0,0 +1,81 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package lang
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "lang"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d, langs.GetTranslator(d.Language))
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Translate,
+ []string{"i18n", "T"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.FormatNumber,
+ nil,
+ [][2]string{
+ {`{{ 512.5032 | lang.FormatNumber 2 }}`, `512.50`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FormatPercent,
+ nil,
+ [][2]string{
+ {`{{ 512.5032 | lang.FormatPercent 2 }}`, `512.50%`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FormatCurrency,
+ nil,
+ [][2]string{
+ {`{{ 512.5032 | lang.FormatCurrency 2 "USD" }}`, `$512.50`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FormatAccounting,
+ nil,
+ [][2]string{
+ {`{{ 512.5032 | lang.FormatAccounting 2 "NOK" }}`, `NOK512.50`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FormatNumberCustom,
+ nil,
+ [][2]string{
+ {`{{ lang.FormatNumberCustom 2 12345.6789 }}`, `12,345.68`},
+ {`{{ lang.FormatNumberCustom 2 12345.6789 "- , ." }}`, `12.345,68`},
+ {`{{ lang.FormatNumberCustom 6 -12345.6789 "- ." }}`, `-12345.678900`},
+ {`{{ lang.FormatNumberCustom 0 -12345.6789 "- . ," }}`, `-12,346`},
+ {`{{ -98765.4321 | lang.FormatNumberCustom 2 }}`, `-98,765.43`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/lang/lang.go b/tpl/lang/lang.go
new file mode 100644
index 000000000..17d37faa4
--- /dev/null
+++ b/tpl/lang/lang.go
@@ -0,0 +1,266 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package lang provides template functions for content internationalization.
+package lang
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+
+ "errors"
+
+ "github.com/gohugoio/locales"
+ translators "github.com/gohugoio/localescompressed"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the lang-namespaced template functions.
+func New(deps *deps.Deps, translator locales.Translator) *Namespace {
+ return &Namespace{
+ translator: translator,
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "lang" namespace.
+type Namespace struct {
+ translator locales.Translator
+ deps *deps.Deps
+}
+
+// Translate returns a translated string for id.
+func (ns *Namespace) Translate(id any, args ...any) (string, error) {
+ var templateData any
+
+ if len(args) > 0 {
+ if len(args) > 1 {
+ return "", fmt.Errorf("wrong number of arguments, expecting at most 2, got %d", len(args)+1)
+ }
+ templateData = args[0]
+ }
+
+ sid, err := cast.ToStringE(id)
+ if err != nil {
+ return "", nil
+ }
+
+ return ns.deps.Translate(sid, templateData), nil
+}
+
+// FormatNumber formats number with the given precision for the current language.
+func (ns *Namespace) FormatNumber(precision, number any) (string, error) {
+ p, n, err := ns.castPrecisionNumber(precision, number)
+ if err != nil {
+ return "", err
+ }
+ return ns.translator.FmtNumber(n, p), nil
+}
+
+// FormatPercent formats number with the given precision for the current language.
+// Note that the number is assumed to be a percentage.
+func (ns *Namespace) FormatPercent(precision, number any) (string, error) {
+ p, n, err := ns.castPrecisionNumber(precision, number)
+ if err != nil {
+ return "", err
+ }
+ return ns.translator.FmtPercent(n, p), nil
+}
+
+// FormatCurrency returns the currency representation of number for the given currency and precision
+// for the current language.
+//
+// The return value is formatted with at least two decimal places.
+func (ns *Namespace) FormatCurrency(precision, currency, number any) (string, error) {
+ p, n, err := ns.castPrecisionNumber(precision, number)
+ if err != nil {
+ return "", err
+ }
+ c := translators.GetCurrency(cast.ToString(currency))
+ if c < 0 {
+ return "", fmt.Errorf("unknown currency code: %q", currency)
+ }
+ return ns.translator.FmtCurrency(n, p, c), nil
+}
+
+// FormatAccounting returns the currency representation of number for the given currency and precision
+// for the current language in accounting notation.
+//
+// The return value is formatted with at least two decimal places.
+func (ns *Namespace) FormatAccounting(precision, currency, number any) (string, error) {
+ p, n, err := ns.castPrecisionNumber(precision, number)
+ if err != nil {
+ return "", err
+ }
+ c := translators.GetCurrency(cast.ToString(currency))
+ if c < 0 {
+ return "", fmt.Errorf("unknown currency code: %q", currency)
+ }
+ return ns.translator.FmtAccounting(n, p, c), nil
+}
+
+func (ns *Namespace) castPrecisionNumber(precision, number any) (uint64, float64, error) {
+ p, err := cast.ToUint64E(precision)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ // Sanity check.
+ if p > 20 {
+ return 0, 0, fmt.Errorf("invalid precision: %d", precision)
+ }
+
+ n, err := cast.ToFloat64E(number)
+ if err != nil {
+ return 0, 0, err
+ }
+ return p, n, nil
+}
+
+// FormatNumberCustom formats a number with the given precision using the
+// negative, decimal, and grouping options. The `options`
+// parameter is a string consisting of `<negative> <decimal> <grouping>`. The
+// default `options` value is `- . ,`.
+//
+// Note that numbers are rounded up at 5 or greater.
+// So, with precision set to 0, 1.5 becomes `2`, and 1.4 becomes `1`.
+//
+// For a simpler function that adapts to the current language, see FormatNumber.
+func (ns *Namespace) FormatNumberCustom(precision, number any, options ...any) (string, error) {
+ prec, err := cast.ToIntE(precision)
+ if err != nil {
+ return "", err
+ }
+
+ n, err := cast.ToFloat64E(number)
+ if err != nil {
+ return "", err
+ }
+
+ var neg, dec, grp string
+
+ if len(options) == 0 {
+ // defaults
+ neg, dec, grp = "-", ".", ","
+ } else {
+ delim := " "
+
+ if len(options) == 2 {
+ // custom delimiter
+ s, err := cast.ToStringE(options[1])
+ if err != nil {
+ return "", nil
+ }
+
+ delim = s
+ }
+
+ s, err := cast.ToStringE(options[0])
+ if err != nil {
+ return "", nil
+ }
+
+ rs := strings.Split(s, delim)
+ switch len(rs) {
+ case 0:
+ case 1:
+ neg = rs[0]
+ case 2:
+ neg, dec = rs[0], rs[1]
+ case 3:
+ neg, dec, grp = rs[0], rs[1], rs[2]
+ default:
+ return "", errors.New("too many fields in options parameter to NumFmt")
+ }
+ }
+
+ exp := math.Pow(10.0, float64(prec))
+ r := math.Round(n*exp) / exp
+
+ // Logic from MIT Licensed github.com/gohugoio/locales/
+ // Original Copyright (c) 2016 Go Playground
+
+ s := strconv.FormatFloat(math.Abs(r), 'f', prec, 64)
+ L := len(s) + 2 + len(s[:len(s)-1-prec])/3
+
+ var count int
+ inWhole := prec == 0
+ b := make([]byte, 0, L)
+
+ for i := len(s) - 1; i >= 0; i-- {
+ if s[i] == '.' {
+ for j := len(dec) - 1; j >= 0; j-- {
+ b = append(b, dec[j])
+ }
+ inWhole = true
+ continue
+ }
+
+ if inWhole {
+ if count == 3 {
+ for j := len(grp) - 1; j >= 0; j-- {
+ b = append(b, grp[j])
+ }
+ count = 1
+ } else {
+ count++
+ }
+ }
+
+ b = append(b, s[i])
+ }
+
+ if n < 0 {
+ for j := len(neg) - 1; j >= 0; j-- {
+ b = append(b, neg[j])
+ }
+ }
+
+ // reverse
+ for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
+ b[i], b[j] = b[j], b[i]
+ }
+
+ return string(b), nil
+}
+
+// NumFmt is deprecated, use FormatNumberCustom.
+// We renamed this in Hugo 0.87.
+// Deprecated: Use FormatNumberCustom
+func (ns *Namespace) NumFmt(precision, number any, options ...any) (string, error) {
+ return ns.FormatNumberCustom(precision, number, options...)
+}
+
+type pagesLanguageMerger interface {
+ MergeByLanguageInterface(other any) (any, error)
+}
+
+// Merge creates a union of pages from two languages.
+func (ns *Namespace) Merge(p2, p1 any) (any, error) {
+ if !hreflect.IsTruthful(p1) {
+ return p2, nil
+ }
+ if !hreflect.IsTruthful(p2) {
+ return p1, nil
+ }
+ merger, ok := p1.(pagesLanguageMerger)
+ if !ok {
+ return nil, fmt.Errorf("language merge not supported for %T", p1)
+ }
+ return merger.MergeByLanguageInterface(p2)
+}
diff --git a/tpl/lang/lang_test.go b/tpl/lang/lang_test.go
new file mode 100644
index 000000000..8d5430f6f
--- /dev/null
+++ b/tpl/lang/lang_test.go
@@ -0,0 +1,140 @@
+package lang
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+ translators "github.com/gohugoio/localescompressed"
+)
+
+func TestNumFmt(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New(&deps.Deps{}, nil)
+
+ cases := []struct {
+ prec int
+ n float64
+ runes string
+ delim string
+
+ want string
+ }{
+ {2, -12345.6789, "", "", "-12,345.68"},
+ {2, -12345.6789, "- . ,", "", "-12,345.68"},
+ {2, -12345.1234, "- . ,", "", "-12,345.12"},
+
+ {2, 12345.6789, "- . ,", "", "12,345.68"},
+ {0, 12345.6789, "- . ,", "", "12,346"},
+ {11, -12345.6789, "- . ,", "", "-12,345.67890000000"},
+
+ {2, 927.675, "- .", "", "927.68"},
+ {2, 1927.675, "- .", "", "1927.68"},
+ {2, 2927.675, "- .", "", "2927.68"},
+
+ {3, -12345.6789, "- ,", "", "-12345,679"},
+ {6, -12345.6789, "- , .", "", "-12.345,678900"},
+
+ {3, -12345.6789, "-|,| ", "|", "-12 345,679"},
+ {6, -12345.6789, "-|,| ", "|", "-12 345,678900"},
+
+ // Arabic, ar_AE
+ {6, -12345.6789, "‏- ٫ ٬", "", "‏-12٬345٫678900"},
+ {6, -12345.6789, "‏-|٫| ", "|", "‏-12 345٫678900"},
+ }
+
+ for _, cas := range cases {
+ var s string
+ var err error
+
+ if len(cas.runes) == 0 {
+ s, err = ns.FormatNumberCustom(cas.prec, cas.n)
+ } else {
+ if cas.delim == "" {
+ s, err = ns.FormatNumberCustom(cas.prec, cas.n, cas.runes)
+ } else {
+ s, err = ns.FormatNumberCustom(cas.prec, cas.n, cas.runes, cas.delim)
+ }
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(s, qt.Equals, cas.want)
+ }
+}
+
+func TestFormatNumbers(t *testing.T) {
+
+ c := qt.New(t)
+
+ nsNn := New(&deps.Deps{}, translators.GetTranslator("nn"))
+ nsEn := New(&deps.Deps{}, translators.GetTranslator("en"))
+ pi := 3.14159265359
+
+ c.Run("FormatNumber", func(c *qt.C) {
+ c.Parallel()
+ got, err := nsNn.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3,142")
+
+ got, err = nsEn.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3.142")
+ })
+
+ c.Run("FormatPercent", func(c *qt.C) {
+ c.Parallel()
+ got, err := nsEn.FormatPercent(3, 67.33333)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "67.333%")
+ })
+
+ c.Run("FormatCurrency", func(c *qt.C) {
+ c.Parallel()
+ got, err := nsEn.FormatCurrency(2, "USD", 20000)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "$20,000.00")
+ })
+
+ c.Run("FormatAccounting", func(c *qt.C) {
+ c.Parallel()
+ got, err := nsEn.FormatAccounting(2, "USD", 20000)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "$20,000.00")
+ })
+
+}
+
+// Issue 9446
+func TestLanguageKeyFormat(t *testing.T) {
+
+ c := qt.New(t)
+
+ nsUnderscoreUpper := New(&deps.Deps{}, translators.GetTranslator("es_ES"))
+ nsUnderscoreLower := New(&deps.Deps{}, translators.GetTranslator("es_es"))
+ nsHyphenUpper := New(&deps.Deps{}, translators.GetTranslator("es-ES"))
+ nsHyphenLower := New(&deps.Deps{}, translators.GetTranslator("es-es"))
+ pi := 3.14159265359
+
+ c.Run("FormatNumber", func(c *qt.C) {
+ c.Parallel()
+ got, err := nsUnderscoreUpper.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3,142")
+
+ got, err = nsUnderscoreLower.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3,142")
+
+ got, err = nsHyphenUpper.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3,142")
+
+ got, err = nsHyphenLower.FormatNumber(3, pi)
+ c.Assert(err, qt.IsNil)
+ c.Assert(got, qt.Equals, "3,142")
+
+ })
+
+}
diff --git a/tpl/math/init.go b/tpl/math/init.go
new file mode 100644
index 000000000..b4b002a78
--- /dev/null
+++ b/tpl/math/init.go
@@ -0,0 +1,134 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package math
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "math"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Add,
+ []string{"add"},
+ [][2]string{
+ {"{{add 1 2}}", "3"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Ceil,
+ nil,
+ [][2]string{
+ {"{{math.Ceil 2.1}}", "3"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Div,
+ []string{"div"},
+ [][2]string{
+ {"{{div 6 3}}", "2"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Floor,
+ nil,
+ [][2]string{
+ {"{{math.Floor 1.9}}", "1"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Log,
+ nil,
+ [][2]string{
+ {"{{math.Log 1}}", "0"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Max,
+ nil,
+ [][2]string{
+ {"{{math.Max 1 2 }}", "2"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Min,
+ nil,
+ [][2]string{
+ {"{{math.Min 1 2 }}", "1"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Mod,
+ []string{"mod"},
+ [][2]string{
+ {"{{mod 15 3}}", "0"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ModBool,
+ []string{"modBool"},
+ [][2]string{
+ {"{{modBool 15 3}}", "true"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Mul,
+ []string{"mul"},
+ [][2]string{
+ {"{{mul 2 3}}", "6"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Pow,
+ []string{"pow"},
+ [][2]string{
+ {"{{math.Pow 2 3}}", "8"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Round,
+ nil,
+ [][2]string{
+ {"{{math.Round 1.5}}", "2"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Sqrt,
+ nil,
+ [][2]string{
+ {"{{math.Sqrt 81}}", "9"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Sub,
+ []string{"sub"},
+ [][2]string{
+ {"{{sub 3 2}}", "1"},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/math/math.go b/tpl/math/math.go
new file mode 100644
index 000000000..257e803e4
--- /dev/null
+++ b/tpl/math/math.go
@@ -0,0 +1,176 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package math provides template functions for mathematical operations.
+package math
+
+import (
+ "errors"
+ "math"
+ "sync/atomic"
+
+ _math "github.com/gohugoio/hugo/common/math"
+
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the math-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "math" namespace.
+type Namespace struct{}
+
+// Add adds the two addends n1 and n2.
+func (ns *Namespace) Add(n1, n2 any) (any, error) {
+ return _math.DoArithmetic(n1, n2, '+')
+}
+
+// Ceil returns the least integer value greater than or equal to n.
+func (ns *Namespace) Ceil(n any) (float64, error) {
+ xf, err := cast.ToFloat64E(n)
+ if err != nil {
+ return 0, errors.New("Ceil operator can't be used with non-float value")
+ }
+
+ return math.Ceil(xf), nil
+}
+
+// Div divides n1 by n2.
+func (ns *Namespace) Div(n1, n2 any) (any, error) {
+ return _math.DoArithmetic(n1, n2, '/')
+}
+
+// Floor returns the greatest integer value less than or equal to n.
+func (ns *Namespace) Floor(n any) (float64, error) {
+ xf, err := cast.ToFloat64E(n)
+ if err != nil {
+ return 0, errors.New("Floor operator can't be used with non-float value")
+ }
+
+ return math.Floor(xf), nil
+}
+
+// Log returns the natural logarithm of the number n.
+func (ns *Namespace) Log(n any) (float64, error) {
+ af, err := cast.ToFloat64E(n)
+ if err != nil {
+ return 0, errors.New("Log operator can't be used with non integer or float value")
+ }
+
+ return math.Log(af), nil
+}
+
+// Max returns the greater of the two numbers n1 or n2.
+func (ns *Namespace) Max(n1, n2 any) (float64, error) {
+ af, erra := cast.ToFloat64E(n1)
+ bf, errb := cast.ToFloat64E(n2)
+
+ if erra != nil || errb != nil {
+ return 0, errors.New("Max operator can't be used with non-float value")
+ }
+
+ return math.Max(af, bf), nil
+}
+
+// Min returns the smaller of two numbers n1 or n2.
+func (ns *Namespace) Min(n1, n2 any) (float64, error) {
+ af, erra := cast.ToFloat64E(n1)
+ bf, errb := cast.ToFloat64E(n2)
+
+ if erra != nil || errb != nil {
+ return 0, errors.New("Min operator can't be used with non-float value")
+ }
+
+ return math.Min(af, bf), nil
+}
+
+// Mod returns n1 % n2.
+func (ns *Namespace) Mod(n1, n2 any) (int64, error) {
+ ai, erra := cast.ToInt64E(n1)
+ bi, errb := cast.ToInt64E(n2)
+
+ if erra != nil || errb != nil {
+ return 0, errors.New("modulo operator can't be used with non integer value")
+ }
+
+ if bi == 0 {
+ return 0, errors.New("the number can't be divided by zero at modulo operation")
+ }
+
+ return ai % bi, nil
+}
+
+// ModBool returns the boolean of n1 % n2. If n1 % n2 == 0, return true.
+func (ns *Namespace) ModBool(n1, n2 any) (bool, error) {
+ res, err := ns.Mod(n1, n2)
+ if err != nil {
+ return false, err
+ }
+
+ return res == int64(0), nil
+}
+
+// Mul multiplies the two numbers n1 and n2.
+func (ns *Namespace) Mul(n1, n2 any) (any, error) {
+ return _math.DoArithmetic(n1, n2, '*')
+}
+
+// Pow returns n1 raised to the power of n2.
+func (ns *Namespace) Pow(n1, n2 any) (float64, error) {
+ af, erra := cast.ToFloat64E(n1)
+ bf, errb := cast.ToFloat64E(n2)
+
+ if erra != nil || errb != nil {
+ return 0, errors.New("Pow operator can't be used with non-float value")
+ }
+
+ return math.Pow(af, bf), nil
+}
+
+// Round returns the integer nearest to n, rounding half away from zero.
+func (ns *Namespace) Round(n any) (float64, error) {
+ xf, err := cast.ToFloat64E(n)
+ if err != nil {
+ return 0, errors.New("Round operator can't be used with non-float value")
+ }
+
+ return _round(xf), nil
+}
+
+// Sqrt returns the square root of the number n.
+func (ns *Namespace) Sqrt(n any) (float64, error) {
+ af, err := cast.ToFloat64E(n)
+ if err != nil {
+ return 0, errors.New("Sqrt operator can't be used with non integer or float value")
+ }
+
+ return math.Sqrt(af), nil
+}
+
+// Sub subtracts n2 from n1.
+func (ns *Namespace) Sub(n1, n2 any) (any, error) {
+ return _math.DoArithmetic(n1, n2, '-')
+}
+
+var counter uint64
+
+// Counter increments and returns a global counter.
+// This was originally added to be used in tests where now.UnixNano did not
+// have the needed precision (especially on Windows).
+// Note that given the parallel nature of Hugo, you cannot use this to get sequences of numbers,
+// and the counter will reset on new builds.
+func (ns *Namespace) Counter() uint64 {
+ return atomic.AddUint64(&counter, uint64(1))
+}
diff --git a/tpl/math/math_test.go b/tpl/math/math_test.go
new file mode 100644
index 000000000..cdec43d5f
--- /dev/null
+++ b/tpl/math/math_test.go
@@ -0,0 +1,437 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package math
+
+import (
+ "math"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestBasicNSArithmetic(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ fn func(a, b any) (any, error)
+ a any
+ b any
+ expect any
+ }{
+ {ns.Add, 4, 2, int64(6)},
+ {ns.Add, 1.0, "foo", false},
+ {ns.Sub, 4, 2, int64(2)},
+ {ns.Sub, 1.0, "foo", false},
+ {ns.Mul, 4, 2, int64(8)},
+ {ns.Mul, 1.0, "foo", false},
+ {ns.Div, 4, 2, int64(2)},
+ {ns.Div, 1.0, "foo", false},
+ } {
+
+ result, err := test.fn(test.a, test.b)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestCeil(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+ ns := New()
+
+ for _, test := range []struct {
+ x any
+ expect any
+ }{
+ {0.1, 1.0},
+ {0.5, 1.0},
+ {1.1, 2.0},
+ {1.5, 2.0},
+ {-0.1, 0.0},
+ {-0.5, 0.0},
+ {-1.1, -1.0},
+ {-1.5, -1.0},
+ {"abc", false},
+ } {
+
+ result, err := ns.Ceil(test.x)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestFloor(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ x any
+ expect any
+ }{
+ {0.1, 0.0},
+ {0.5, 0.0},
+ {1.1, 1.0},
+ {1.5, 1.0},
+ {-0.1, -1.0},
+ {-0.5, -1.0},
+ {-1.1, -2.0},
+ {-1.5, -2.0},
+ {"abc", false},
+ } {
+
+ result, err := ns.Floor(test.x)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestLog(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {1, float64(0)},
+ {3, float64(1.0986)},
+ {0, float64(math.Inf(-1))},
+ {1.0, float64(0)},
+ {3.1, float64(1.1314)},
+ {"abc", false},
+ } {
+
+ result, err := ns.Log(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ // we compare only 4 digits behind point if its a real float
+ // otherwise we usually get different float values on the last positions
+ if result != math.Inf(-1) {
+ result = float64(int(result*10000)) / 10000
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+
+ // Separate test for Log(-1) -- returns NaN
+ result, err := ns.Log(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Satisfies, math.IsNaN)
+}
+
+func TestSqrt(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {81, float64(9)},
+ {0.25, float64(0.5)},
+ {0, float64(0)},
+ {"abc", false},
+ } {
+
+ result, err := ns.Sqrt(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ // we compare only 4 digits behind point if its a real float
+ // otherwise we usually get different float values on the last positions
+ if result != math.Inf(-1) {
+ result = float64(int(result*10000)) / 10000
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+
+ // Separate test for Sqrt(-1) -- returns NaN
+ result, err := ns.Sqrt(-1)
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Satisfies, math.IsNaN)
+}
+
+func TestMod(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ b any
+ expect any
+ }{
+ {3, 2, int64(1)},
+ {3, 1, int64(0)},
+ {3, 0, false},
+ {0, 3, int64(0)},
+ {3.1, 2, int64(1)},
+ {3, 2.1, int64(1)},
+ {3.1, 2.1, int64(1)},
+ {int8(3), int8(2), int64(1)},
+ {int16(3), int16(2), int64(1)},
+ {int32(3), int32(2), int64(1)},
+ {int64(3), int64(2), int64(1)},
+ {"3", "2", int64(1)},
+ {"3.1", "2", false},
+ {"aaa", "0", false},
+ {"3", "aaa", false},
+ } {
+
+ result, err := ns.Mod(test.a, test.b)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestModBool(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ b any
+ expect any
+ }{
+ {3, 3, true},
+ {3, 2, false},
+ {3, 1, true},
+ {3, 0, nil},
+ {0, 3, true},
+ {3.1, 2, false},
+ {3, 2.1, false},
+ {3.1, 2.1, false},
+ {int8(3), int8(3), true},
+ {int8(3), int8(2), false},
+ {int16(3), int16(3), true},
+ {int16(3), int16(2), false},
+ {int32(3), int32(3), true},
+ {int32(3), int32(2), false},
+ {int64(3), int64(3), true},
+ {int64(3), int64(2), false},
+ {"3", "3", true},
+ {"3", "2", false},
+ {"3.1", "2", nil},
+ {"aaa", "0", nil},
+ {"3", "aaa", nil},
+ } {
+
+ result, err := ns.ModBool(test.a, test.b)
+
+ if test.expect == nil {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestRound(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ x any
+ expect any
+ }{
+ {0.1, 0.0},
+ {0.5, 1.0},
+ {1.1, 1.0},
+ {1.5, 2.0},
+ {-0.1, -0.0},
+ {-0.5, -1.0},
+ {-1.1, -1.0},
+ {-1.5, -2.0},
+ {"abc", false},
+ } {
+
+ result, err := ns.Round(test.x)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestPow(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ b any
+ expect any
+ }{
+ {0, 0, float64(1)},
+ {2, 0, float64(1)},
+ {2, 3, float64(8)},
+ {-2, 3, float64(-8)},
+ {2, -3, float64(0.125)},
+ {-2, -3, float64(-0.125)},
+ {0.2, 3, float64(0.008)},
+ {2, 0.3, float64(1.2311)},
+ {0.2, 0.3, float64(0.617)},
+ {"aaa", "3", false},
+ {"2", "aaa", false},
+ } {
+
+ result, err := ns.Pow(test.a, test.b)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ // we compare only 4 digits behind point if its a real float
+ // otherwise we usually get different float values on the last positions
+ result = float64(int(result*10000)) / 10000
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestMax(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ b any
+ expect any
+ }{
+ {-1, -1, float64(-1)},
+ {-1, 0, float64(0)},
+ {-1, 1, float64(1)},
+ {0, -1, float64(0)},
+ {0, 0, float64(0)},
+ {0, 1, float64(1)},
+ {1, -1, float64(1)},
+ {1, 0, float64(1)},
+ {1, 1, float64(1)},
+ {1.2, 1.23, float64(1.23)},
+ {-1.2, -1.23, float64(-1.2)},
+ {0, "a", false},
+ {"a", 0, false},
+ {"a", "b", false},
+ } {
+
+ result, err := ns.Max(test.a, test.b)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestMin(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ b any
+ expect any
+ }{
+ {-1, -1, float64(-1)},
+ {-1, 0, float64(-1)},
+ {-1, 1, float64(-1)},
+ {0, -1, float64(-1)},
+ {0, 0, float64(0)},
+ {0, 1, float64(0)},
+ {1, -1, float64(-1)},
+ {1, 0, float64(0)},
+ {1, 1, float64(1)},
+ {1.2, 1.23, float64(1.2)},
+ {-1.2, -1.23, float64(-1.23)},
+ {0, "a", false},
+ {"a", 0, false},
+ {"a", "b", false},
+ } {
+
+ result, err := ns.Min(test.a, test.b)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/math/round.go b/tpl/math/round.go
new file mode 100644
index 000000000..9b33120af
--- /dev/null
+++ b/tpl/math/round.go
@@ -0,0 +1,61 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// According to https://github.com/golang/go/issues/20100, the Go stdlib will
+// include math.Round beginning with Go 1.10.
+//
+// The following implementation was taken from https://golang.org/cl/43652.
+
+package math
+
+import "math"
+
+const (
+ mask = 0x7FF
+ shift = 64 - 11 - 1
+ bias = 1023
+)
+
+// Round returns the nearest integer, rounding half away from zero.
+//
+// Special cases are:
+// Round(±0) = ±0
+// Round(±Inf) = ±Inf
+// Round(NaN) = NaN
+func _round(x float64) float64 {
+ // Round is a faster implementation of:
+ //
+ // func Round(x float64) float64 {
+ // t := Trunc(x)
+ // if Abs(x-t) >= 0.5 {
+ // return t + Copysign(1, x)
+ // }
+ // return t
+ // }
+ const (
+ signMask = 1 << 63
+ fracMask = 1<<shift - 1
+ half = 1 << (shift - 1)
+ one = bias << shift
+ )
+
+ bits := math.Float64bits(x)
+ e := uint(bits>>shift) & mask
+ if e < bias {
+ // Round abs(x) < 1 including denormals.
+ bits &= signMask // +-0
+ if e == bias-1 {
+ bits |= one // +-1
+ }
+ } else if e < bias+shift {
+ // Round any abs(x) >= 1 containing a fractional component [0,1).
+ //
+ // Numbers with larger exponents are returned unchanged since they
+ // must be either an integer, infinity, or NaN.
+ e -= bias
+ bits += half >> e
+ bits &^= fracMask >> e
+ }
+ return math.Float64frombits(bits)
+}
diff --git a/tpl/openapi/openapi3/init.go b/tpl/openapi/openapi3/init.go
new file mode 100644
index 000000000..8597e3294
--- /dev/null
+++ b/tpl/openapi/openapi3/init.go
@@ -0,0 +1,41 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openapi3
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "openapi3"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Unmarshal,
+ nil,
+ [][2]string{},
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/openapi/openapi3/integration_test.go b/tpl/openapi/openapi3/integration_test.go
new file mode 100644
index 000000000..d3be0eda9
--- /dev/null
+++ b/tpl/openapi/openapi3/integration_test.go
@@ -0,0 +1,74 @@
+// Copyright 2021 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openapi3_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestUnmarshal(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- assets/api/myapi.yaml --
+openapi: 3.0.0
+info:
+ title: Sample API
+ description: Optional multiline or single-line description in [CommonMark](http://commonmark.org/help/) or HTML.
+ version: 0.1.9
+servers:
+ - url: http://api.example.com/v1
+ description: Optional server description, e.g. Main (production) server
+ - url: http://staging-api.example.com
+ description: Optional server description, e.g. Internal staging server for testing
+paths:
+ /users:
+ get:
+ summary: Returns a list of users.
+ description: Optional extended description in CommonMark or HTML.
+ responses:
+ '200': # status code
+ description: A JSON array of user names
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+{{ $api := resources.Get "api/myapi.yaml" | openapi3.Unmarshal }}
+API: {{ $api.Info.Title | safeHTML }}
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ Running: true,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `API: Sample API`)
+
+ b.
+ EditFileReplace("assets/api/myapi.yaml", func(s string) string { return strings.ReplaceAll(s, "Sample API", "Hugo API") }).
+ Build()
+
+ b.AssertFileContent("public/index.html", `API: Hugo API`)
+}
diff --git a/tpl/openapi/openapi3/openapi3.go b/tpl/openapi/openapi3/openapi3.go
new file mode 100644
index 000000000..1eea04b25
--- /dev/null
+++ b/tpl/openapi/openapi3/openapi3.go
@@ -0,0 +1,95 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package openapi3
+
+import (
+ "fmt"
+ "io/ioutil"
+
+ gyaml "github.com/ghodss/yaml"
+
+ "errors"
+
+ kopenapi3 "github.com/getkin/kin-openapi/openapi3"
+ "github.com/gohugoio/hugo/cache/namedmemcache"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+ "github.com/gohugoio/hugo/resources/resource"
+)
+
+// New returns a new instance of the openapi3-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ // TODO(bep) consolidate when merging that "other branch" -- but be aware of the keys.
+ cache := namedmemcache.New()
+ deps.BuildStartListeners.Add(
+ func() {
+ cache.Clear()
+ })
+
+ return &Namespace{
+ cache: cache,
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "openapi3".
+type Namespace struct {
+ cache *namedmemcache.Cache
+ deps *deps.Deps
+}
+
+func (ns *Namespace) Unmarshal(r resource.UnmarshableResource) (*kopenapi3.T, error) {
+ key := r.Key()
+ if key == "" {
+ return nil, errors.New("no Key set in Resource")
+ }
+
+ v, err := ns.cache.GetOrCreate(key, func() (any, error) {
+ f := metadecoders.FormatFromMediaType(r.MediaType())
+ if f == "" {
+ return nil, fmt.Errorf("MIME %q not supported", r.MediaType())
+ }
+
+ reader, err := r.ReadSeekCloser()
+ if err != nil {
+ return nil, err
+ }
+ defer reader.Close()
+
+ b, err := ioutil.ReadAll(reader)
+ if err != nil {
+ return nil, err
+ }
+
+ s := &kopenapi3.T{}
+ switch f {
+ case metadecoders.YAML:
+ err = gyaml.Unmarshal(b, s)
+ default:
+ err = metadecoders.Default.UnmarshalTo(b, f, s)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ err = kopenapi3.NewLoader().ResolveRefsIn(s, nil)
+
+ return s, err
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return v.(*kopenapi3.T), nil
+}
diff --git a/tpl/os/init.go b/tpl/os/init.go
new file mode 100644
index 000000000..cd9e370cd
--- /dev/null
+++ b/tpl/os/init.go
@@ -0,0 +1,62 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package os
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "os"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Getenv,
+ []string{"getenv"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.ReadDir,
+ []string{"readDir"},
+ [][2]string{
+ {`{{ range (readDir "files") }}{{ .Name }}{{ end }}`, "README.txt"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ReadFile,
+ []string{"readFile"},
+ [][2]string{
+ {`{{ readFile "files/README.txt" }}`, `Hugo Rocks!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FileExists,
+ []string{"fileExists"},
+ [][2]string{
+ {`{{ fileExists "foo.txt" }}`, `false`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/os/integration_test.go b/tpl/os/integration_test.go
new file mode 100644
index 000000000..fe1bb3d6e
--- /dev/null
+++ b/tpl/os/integration_test.go
@@ -0,0 +1,51 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package os_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+// Issue 9599
+func TestReadDirWorkDir(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+theme = "mytheme"
+-- myproject.txt --
+Hello project!
+-- themes/mytheme/mytheme.txt --
+Hello theme!
+-- layouts/index.html --
+{{ $entries := (readDir ".") }}
+START:|{{ range $entry := $entries }}{{ if not $entry.IsDir }}{{ $entry.Name }}|{{ end }}{{ end }}:END:
+
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+START:|config.toml|myproject.txt|:END:
+`)
+}
diff --git a/tpl/os/os.go b/tpl/os/os.go
new file mode 100644
index 000000000..e7fd05939
--- /dev/null
+++ b/tpl/os/os.go
@@ -0,0 +1,158 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package os provides template functions for interacting with the operating
+// system.
+package os
+
+import (
+ "errors"
+ "fmt"
+ _os "os"
+ "path/filepath"
+
+ "github.com/bep/overlayfs"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/afero"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the os-namespaced template functions.
+func New(d *deps.Deps) *Namespace {
+ var readFileFs, workFs afero.Fs
+
+ // The docshelper script does not have or need all the dependencies set up.
+ if d.PathSpec != nil {
+ readFileFs = overlayfs.New(overlayfs.Options{
+ Fss: []afero.Fs{
+ d.PathSpec.BaseFs.Work,
+ d.PathSpec.BaseFs.Content.Fs,
+ },
+ })
+ // See #9599
+ workFs = d.PathSpec.BaseFs.WorkDir
+ }
+
+ return &Namespace{
+ readFileFs: readFileFs,
+ workFs: workFs,
+ deps: d,
+ }
+}
+
+// Namespace provides template functions for the "os" namespace.
+type Namespace struct {
+ readFileFs afero.Fs
+ workFs afero.Fs
+ deps *deps.Deps
+}
+
+// Getenv retrieves the value of the environment variable named by the key.
+// It returns the value, which will be empty if the variable is not present.
+func (ns *Namespace) Getenv(key any) (string, error) {
+ skey, err := cast.ToStringE(key)
+ if err != nil {
+ return "", nil
+ }
+
+ if err = ns.deps.ExecHelper.Sec().CheckAllowedGetEnv(skey); err != nil {
+ return "", err
+ }
+
+ return _os.Getenv(skey), nil
+}
+
+// readFile reads the file named by filename in the given filesystem
+// and returns the contents as a string.
+func readFile(fs afero.Fs, filename string) (string, error) {
+ filename = filepath.Clean(filename)
+ if filename == "" || filename == "." || filename == string(_os.PathSeparator) {
+ return "", errors.New("invalid filename")
+ }
+
+ b, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ return "", err
+ }
+
+ return string(b), nil
+}
+
+// ReadFile reads the file named by filename relative to the configured WorkingDir.
+// It returns the contents as a string.
+// There is an upper size limit set at 1 megabytes.
+func (ns *Namespace) ReadFile(i any) (string, error) {
+ s, err := cast.ToStringE(i)
+ if err != nil {
+ return "", err
+ }
+
+ if ns.deps.PathSpec != nil {
+ s = ns.deps.PathSpec.RelPathify(s)
+ }
+
+ return readFile(ns.readFileFs, s)
+}
+
+// ReadDir lists the directory contents relative to the configured WorkingDir.
+func (ns *Namespace) ReadDir(i any) ([]_os.FileInfo, error) {
+ path, err := cast.ToStringE(i)
+ if err != nil {
+ return nil, err
+ }
+
+ list, err := afero.ReadDir(ns.workFs, path)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read directory %q: %s", path, err)
+ }
+
+ return list, nil
+}
+
+// FileExists checks whether a file exists under the given path.
+func (ns *Namespace) FileExists(i any) (bool, error) {
+ path, err := cast.ToStringE(i)
+ if err != nil {
+ return false, err
+ }
+
+ if path == "" {
+ return false, errors.New("fileExists needs a path to a file")
+ }
+
+ status, err := afero.Exists(ns.readFileFs, path)
+ if err != nil {
+ return false, err
+ }
+
+ return status, nil
+}
+
+// Stat returns the os.FileInfo structure describing file.
+func (ns *Namespace) Stat(i any) (_os.FileInfo, error) {
+ path, err := cast.ToStringE(i)
+ if err != nil {
+ return nil, err
+ }
+
+ if path == "" {
+ return nil, errors.New("fileStat needs a path to a file")
+ }
+
+ r, err := ns.readFileFs.Stat(path)
+ if err != nil {
+ return nil, err
+ }
+
+ return r, nil
+}
diff --git a/tpl/os/os_test.go b/tpl/os/os_test.go
new file mode 100644
index 000000000..98befa061
--- /dev/null
+++ b/tpl/os/os_test.go
@@ -0,0 +1,128 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package os_test
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl/os"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestReadFile(t *testing.T) {
+ t.Parallel()
+
+ b := newFileTestBuilder(t).Build()
+
+ // helpers.PrintFs(b.H.PathSpec.BaseFs.Work, "", _os.Stdout)
+
+ ns := os.New(b.H.Deps)
+
+ for _, test := range []struct {
+ filename string
+ expect any
+ }{
+ {filepath.FromSlash("/f/f1.txt"), "f1-content"},
+ {filepath.FromSlash("f/f1.txt"), "f1-content"},
+ {filepath.FromSlash("../f2.txt"), false},
+ {"", false},
+ {"b", false},
+ } {
+
+ result, err := ns.ReadFile(test.filename)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestFileExists(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ b := newFileTestBuilder(t).Build()
+ ns := os.New(b.H.Deps)
+
+ for _, test := range []struct {
+ filename string
+ expect any
+ }{
+ {filepath.FromSlash("/f/f1.txt"), true},
+ {filepath.FromSlash("f/f1.txt"), true},
+ {filepath.FromSlash("../f2.txt"), false},
+ {"b", false},
+ {"", nil},
+ } {
+ result, err := ns.FileExists(test.filename)
+
+ if test.expect == nil {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestStat(t *testing.T) {
+ t.Parallel()
+ b := newFileTestBuilder(t).Build()
+ ns := os.New(b.H.Deps)
+
+ for _, test := range []struct {
+ filename string
+ expect any
+ }{
+ {filepath.FromSlash("/f/f1.txt"), int64(10)},
+ {filepath.FromSlash("f/f1.txt"), int64(10)},
+ {"b", nil},
+ {"", nil},
+ } {
+ result, err := ns.Stat(test.filename)
+
+ if test.expect == nil {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result.Size(), qt.Equals, test.expect)
+ }
+}
+
+func newFileTestBuilder(t *testing.T) *hugolib.IntegrationTestBuilder {
+ files := `
+-- f/f1.txt --
+f1-content
+-- home/f2.txt --
+f2-content
+ `
+
+ return hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ WorkingDir: "/mywork",
+ },
+ )
+}
diff --git a/tpl/partials/init.go b/tpl/partials/init.go
new file mode 100644
index 000000000..2662b8894
--- /dev/null
+++ b/tpl/partials/init.go
@@ -0,0 +1,55 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package partials
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const namespaceName = "partials"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: namespaceName,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Include,
+ []string{"partial"},
+ [][2]string{
+ {`{{ partial "header.html" . }}`, `<title>Hugo Rocks!</title>`},
+ },
+ )
+
+ // TODO(bep) we need the return to be a valid identifier, but
+ // should consider another way of adding it.
+ ns.AddMethodMapping(func() string { return "" },
+ []string{"return"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.IncludeCached,
+ []string{"partialCached"},
+ [][2]string{},
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/partials/integration_test.go b/tpl/partials/integration_test.go
new file mode 100644
index 000000000..bda5ddbd5
--- /dev/null
+++ b/tpl/partials/integration_test.go
@@ -0,0 +1,274 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package partials_test
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+ "sort"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestInclude(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+partial: {{ partials.Include "foo.html" . }}
+-- layouts/partials/foo.html --
+foo
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+partial: foo
+`)
+}
+
+func TestIncludeCached(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+partialCached: {{ partials.IncludeCached "foo.html" . }}
+partialCached: {{ partials.IncludeCached "foo.html" . }}
+-- layouts/partials/foo.html --
+foo
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+partialCached: foo
+partialCached: foo
+`)
+}
+
+// Issue 9519
+func TestIncludeCachedRecursion(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+{{ partials.IncludeCached "p1.html" . }}
+-- layouts/partials/p1.html --
+{{ partials.IncludeCached "p2.html" . }}
+-- layouts/partials/p2.html --
+P2
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+P2
+`)
+}
+
+// Issue #588
+func TestIncludeCachedRecursionShortcode(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- content/_index.md --
+---
+title: "Index"
+---
+{{< short >}}
+-- layouts/index.html --
+{{ partials.IncludeCached "p1.html" . }}
+-- layouts/partials/p1.html --
+{{ .Content }}
+{{ partials.IncludeCached "p2.html" . }}
+-- layouts/partials/p2.html --
+-- layouts/shortcodes/short.html --
+SHORT
+{{ partials.IncludeCached "p2.html" . }}
+P2
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+SHORT
+P2
+`)
+}
+
+func TestIncludeCacheHints(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+templateMetrics=true
+templateMetricsHints=true
+disableKinds = ["page", "section", "taxonomy", "term", "sitemap"]
+[outputs]
+home = ["HTML"]
+-- layouts/index.html --
+{{ partials.IncludeCached "static1.html" . }}
+{{ partials.IncludeCached "static1.html" . }}
+{{ partials.Include "static2.html" . }}
+
+D1I: {{ partials.Include "dynamic1.html" . }}
+D1C: {{ partials.IncludeCached "dynamic1.html" . }}
+D1C: {{ partials.IncludeCached "dynamic1.html" . }}
+D1C: {{ partials.IncludeCached "dynamic1.html" . }}
+H1I: {{ partials.Include "halfdynamic1.html" . }}
+H1C: {{ partials.IncludeCached "halfdynamic1.html" . }}
+H1C: {{ partials.IncludeCached "halfdynamic1.html" . }}
+
+-- layouts/partials/static1.html --
+P1
+-- layouts/partials/static2.html --
+P2
+-- layouts/partials/dynamic1.html --
+{{ math.Counter }}
+-- layouts/partials/halfdynamic1.html --
+D1
+{{ math.Counter }}
+
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ // fmt.Println(b.FileContent("public/index.html"))
+
+ var buf bytes.Buffer
+ b.H.Metrics.WriteMetrics(&buf)
+
+ got := buf.String()
+
+ // Get rid of all the durations, they are never the same.
+ durationRe := regexp.MustCompile(`\b[\.\d]*(ms|µs|s)\b`)
+
+ normalize := func(s string) string {
+ s = durationRe.ReplaceAllString(s, "")
+ linesIn := strings.Split(s, "\n")[3:]
+ var lines []string
+ for _, l := range linesIn {
+ l = strings.TrimSpace(l)
+ if l == "" {
+ continue
+ }
+ lines = append(lines, l)
+ }
+
+ sort.Strings(lines)
+
+ return strings.Join(lines, "\n")
+ }
+
+ got = normalize(got)
+
+ expect := `
+ 0 0 0 1 index.html
+ 100 0 0 1 partials/static2.html
+ 100 50 1 2 partials/static1.html
+ 25 50 2 4 partials/dynamic1.html
+ 66 33 1 3 partials/halfdynamic1.html
+ `
+
+ b.Assert(got, hqt.IsSameString, expect)
+}
+
+// gobench --package ./tpl/partials
+func BenchmarkIncludeCached(b *testing.B) {
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+-- layouts/_default/single.html --
+{{ partialCached "heavy.html" "foo" }}
+{{ partialCached "easy1.html" "bar" }}
+{{ partialCached "easy1.html" "baz" }}
+{{ partialCached "easy2.html" "baz" }}
+-- layouts/partials/easy1.html --
+ABCD
+-- layouts/partials/easy2.html --
+ABCDE
+-- layouts/partials/heavy.html --
+{{ $result := slice }}
+{{ range site.RegularPages }}
+{{ $result = $result | append (dict "title" .Title "link" .RelPermalink "readingTime" .ReadingTime) }}
+{{ end }}
+{{ range $result }}
+* {{ .title }} {{ .link }} {{ .readingTime }}
+{{ end }}
+
+
+`
+
+ for i := 1; i < 100; i++ {
+ files += fmt.Sprintf("\n-- content/p%d.md --\n---\ntitle: page\n---\n"+strings.Repeat("FOO ", i), i)
+ }
+
+ cfg := hugolib.IntegrationTestConfig{
+ T: b,
+ TxtarString: files,
+ }
+ builders := make([]*hugolib.IntegrationTestBuilder, b.N)
+
+ for i, _ := range builders {
+ builders[i] = hugolib.NewIntegrationTestBuilder(cfg)
+ }
+
+ b.ResetTimer()
+
+ for i := 0; i < b.N; i++ {
+ builders[i].Build()
+ }
+}
diff --git a/tpl/partials/partials.go b/tpl/partials/partials.go
new file mode 100644
index 000000000..eb4ebfe32
--- /dev/null
+++ b/tpl/partials/partials.go
@@ -0,0 +1,277 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package partials provides template functions for working with reusable
+// templates.
+package partials
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "html/template"
+ "io"
+ "io/ioutil"
+ "reflect"
+ "strings"
+ "sync"
+ "time"
+
+ texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/tpl"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/deps"
+)
+
+// TestTemplateProvider is global deps.ResourceProvider.
+// NOTE: It's currently unused.
+var TestTemplateProvider deps.ResourceProvider
+
+type partialCacheKey struct {
+ name string
+ variant any
+}
+
+func (k partialCacheKey) templateName() string {
+ if !strings.HasPrefix(k.name, "partials/") {
+ return "partials/" + k.name
+ }
+ return k.name
+}
+
+// partialCache represents a cache of partials protected by a mutex.
+type partialCache struct {
+ sync.RWMutex
+ p map[partialCacheKey]any
+}
+
+func (p *partialCache) clear() {
+ p.Lock()
+ defer p.Unlock()
+ p.p = make(map[partialCacheKey]any)
+}
+
+// New returns a new instance of the templates-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ cache := &partialCache{p: make(map[partialCacheKey]any)}
+ deps.BuildStartListeners.Add(
+ func() {
+ cache.clear()
+ })
+
+ return &Namespace{
+ deps: deps,
+ cachedPartials: cache,
+ }
+}
+
+// Namespace provides template functions for the "templates" namespace.
+type Namespace struct {
+ deps *deps.Deps
+ cachedPartials *partialCache
+}
+
+// contextWrapper makes room for a return value in a partial invocation.
+type contextWrapper struct {
+ Arg any
+ Result any
+}
+
+// Set sets the return value and returns an empty string.
+func (c *contextWrapper) Set(in any) string {
+ c.Result = in
+ return ""
+}
+
+// Include executes the named partial.
+// If the partial contains a return statement, that value will be returned.
+// Else, the rendered output will be returned:
+// A string if the partial is a text/template, or template.HTML when html/template.
+// Note that ctx is provided by Hugo, not the end user.
+func (ns *Namespace) Include(ctx context.Context, name string, contextList ...any) (any, error) {
+ name, result, err := ns.include(ctx, name, contextList...)
+ if err != nil {
+ return result, err
+ }
+
+ if ns.deps.Metrics != nil {
+ ns.deps.Metrics.TrackValue(name, result, false)
+ }
+
+ return result, nil
+}
+
+// include is a helper function that lookups and executes the named partial.
+// Returns the final template name and the rendered output.
+func (ns *Namespace) include(ctx context.Context, name string, dataList ...any) (string, any, error) {
+ var data any
+ if len(dataList) > 0 {
+ data = dataList[0]
+ }
+
+ var n string
+ if strings.HasPrefix(name, "partials/") {
+ n = name
+ } else {
+ n = "partials/" + name
+ }
+
+ templ, found := ns.deps.Tmpl().Lookup(n)
+ if !found {
+ // For legacy reasons.
+ templ, found = ns.deps.Tmpl().Lookup(n + ".html")
+ }
+
+ if !found {
+ return "", "", fmt.Errorf("partial %q not found", name)
+ }
+
+ var info tpl.ParseInfo
+ if ip, ok := templ.(tpl.Info); ok {
+ info = ip.ParseInfo()
+ }
+
+ var w io.Writer
+
+ if info.HasReturn {
+ // Wrap the context sent to the template to capture the return value.
+ // Note that the template is rewritten to make sure that the dot (".")
+ // and the $ variable points to Arg.
+ data = &contextWrapper{
+ Arg: data,
+ }
+
+ // We don't care about any template output.
+ w = ioutil.Discard
+ } else {
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+ w = b
+ }
+
+ if err := ns.deps.Tmpl().ExecuteWithContext(ctx, templ, w, data); err != nil {
+ return "", nil, err
+ }
+
+ var result any
+
+ if ctx, ok := data.(*contextWrapper); ok {
+ result = ctx.Result
+ } else if _, ok := templ.(*texttemplate.Template); ok {
+ result = w.(fmt.Stringer).String()
+ } else {
+ result = template.HTML(w.(fmt.Stringer).String())
+ }
+
+ return templ.Name(), result, nil
+}
+
+// IncludeCached executes and caches partial templates. The cache is created with name+variants as the key.
+// Note that ctx is provided by Hugo, not the end user.
+func (ns *Namespace) IncludeCached(ctx context.Context, name string, context any, variants ...any) (any, error) {
+ key, err := createKey(name, variants...)
+ if err != nil {
+ return nil, err
+ }
+
+ result, err := ns.getOrCreate(ctx, key, context)
+ if err == errUnHashable {
+ // Try one more
+ key.variant = helpers.HashString(key.variant)
+ result, err = ns.getOrCreate(ctx, key, context)
+ }
+
+ return result, err
+}
+
+func createKey(name string, variants ...any) (partialCacheKey, error) {
+ var variant any
+
+ if len(variants) > 1 {
+ variant = helpers.HashString(variants...)
+ } else if len(variants) == 1 {
+ variant = variants[0]
+ t := reflect.TypeOf(variant)
+ switch t.Kind() {
+ // This isn't an exhaustive list of unhashable types.
+ // There may be structs with slices,
+ // but that should be very rare. We do recover from that situation
+ // below.
+ case reflect.Slice, reflect.Array, reflect.Map:
+ variant = helpers.HashString(variant)
+ }
+ }
+
+ return partialCacheKey{name: name, variant: variant}, nil
+}
+
+var errUnHashable = errors.New("unhashable")
+
+func (ns *Namespace) getOrCreate(ctx context.Context, key partialCacheKey, context any) (result any, err error) {
+ start := time.Now()
+ defer func() {
+ if r := recover(); r != nil {
+ err = r.(error)
+ if strings.Contains(err.Error(), "unhashable type") {
+ ns.cachedPartials.RUnlock()
+ err = errUnHashable
+ }
+ }
+ }()
+
+ ns.cachedPartials.RLock()
+ p, ok := ns.cachedPartials.p[key]
+ ns.cachedPartials.RUnlock()
+
+ if ok {
+ if ns.deps.Metrics != nil {
+ ns.deps.Metrics.TrackValue(key.templateName(), p, true)
+ // The templates that gets executed is measured in Execute.
+ // We need to track the time spent in the cache to
+ // get the totals correct.
+ ns.deps.Metrics.MeasureSince(key.templateName(), start)
+
+ }
+ return p, nil
+ }
+
+ // This needs to be done outside the lock.
+ // See #9588
+ _, p, err = ns.include(ctx, key.name, context)
+ if err != nil {
+ return nil, err
+ }
+
+ ns.cachedPartials.Lock()
+ defer ns.cachedPartials.Unlock()
+ // Double-check.
+ if p2, ok := ns.cachedPartials.p[key]; ok {
+ if ns.deps.Metrics != nil {
+ ns.deps.Metrics.TrackValue(key.templateName(), p, true)
+ ns.deps.Metrics.MeasureSince(key.templateName(), start)
+ }
+ return p2, nil
+
+ }
+ if ns.deps.Metrics != nil {
+ ns.deps.Metrics.TrackValue(key.templateName(), p, false)
+ }
+
+ ns.cachedPartials.p[key] = p
+
+ return p, nil
+}
diff --git a/tpl/partials/partials_test.go b/tpl/partials/partials_test.go
new file mode 100644
index 000000000..490354499
--- /dev/null
+++ b/tpl/partials/partials_test.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package partials
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestCreateKey(t *testing.T) {
+ c := qt.New(t)
+ m := make(map[any]bool)
+
+ create := func(name string, variants ...any) partialCacheKey {
+ k, err := createKey(name, variants...)
+ c.Assert(err, qt.IsNil)
+ m[k] = true
+ return k
+ }
+
+ for i := 0; i < 123; i++ {
+ c.Assert(create("a", "b"), qt.Equals, partialCacheKey{name: "a", variant: "b"})
+ c.Assert(create("a", "b", "c"), qt.Equals, partialCacheKey{name: "a", variant: "9629524865311698396"})
+ c.Assert(create("a", 1), qt.Equals, partialCacheKey{name: "a", variant: 1})
+ c.Assert(create("a", map[string]string{"a": "av"}), qt.Equals, partialCacheKey{name: "a", variant: "4809626101226749924"})
+ c.Assert(create("a", []string{"a", "b"}), qt.Equals, partialCacheKey{name: "a", variant: "2712570657419664240"})
+ }
+}
diff --git a/tpl/path/init.go b/tpl/path/init.go
new file mode 100644
index 000000000..c67d94866
--- /dev/null
+++ b/tpl/path/init.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package path
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "path"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Split,
+ nil,
+ [][2]string{
+ {`{{ "/my/path/filename.txt" | path.Split }}`, `/my/path/|filename.txt`},
+ {fmt.Sprintf(`{{ %q | path.Split }}`, filepath.FromSlash("/my/path/filename.txt")), `/my/path/|filename.txt`},
+ },
+ )
+
+ testDir := filepath.Join("my", "path")
+ testFile := filepath.Join(testDir, "filename.txt")
+
+ ns.AddMethodMapping(ctx.Join,
+ nil,
+ [][2]string{
+ {fmt.Sprintf(`{{ slice %q "filename.txt" | path.Join }}`, testDir), `my/path/filename.txt`},
+ {`{{ path.Join "my" "path" "filename.txt" }}`, `my/path/filename.txt`},
+ {fmt.Sprintf(`{{ %q | path.Ext }}`, testFile), `.txt`},
+ {fmt.Sprintf(`{{ %q | path.Base }}`, testFile), `filename.txt`},
+ {fmt.Sprintf(`{{ %q | path.Dir }}`, testFile), `my/path`},
+ },
+ )
+
+ return ns
+ }
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/path/path.go b/tpl/path/path.go
new file mode 100644
index 000000000..378b97e03
--- /dev/null
+++ b/tpl/path/path.go
@@ -0,0 +1,174 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package path provides template functions for manipulating paths.
+package path
+
+import (
+ "fmt"
+ _path "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the path-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ return &Namespace{
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "os" namespace.
+type Namespace struct {
+ deps *deps.Deps
+}
+
+// DirFile holds the result from path.Split.
+type DirFile struct {
+ Dir string
+ File string
+}
+
+// Used in test.
+func (df DirFile) String() string {
+ return fmt.Sprintf("%s|%s", df.Dir, df.File)
+}
+
+// Ext returns the file name extension used by path.
+// The extension is the suffix beginning at the final dot
+// in the final slash-separated element of path;
+// it is empty if there is no dot.
+// The input path is passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+func (ns *Namespace) Ext(path any) (string, error) {
+ spath, err := cast.ToStringE(path)
+ if err != nil {
+ return "", err
+ }
+ spath = filepath.ToSlash(spath)
+ return _path.Ext(spath), nil
+}
+
+// Dir returns all but the last element of path, typically the path's directory.
+// After dropping the final element using Split, the path is Cleaned and trailing
+// slashes are removed.
+// If the path is empty, Dir returns ".".
+// If the path consists entirely of slashes followed by non-slash bytes, Dir
+// returns a single slash. In any other case, the returned path does not end in a
+// slash.
+// The input path is passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+func (ns *Namespace) Dir(path any) (string, error) {
+ spath, err := cast.ToStringE(path)
+ if err != nil {
+ return "", err
+ }
+ spath = filepath.ToSlash(spath)
+ return _path.Dir(spath), nil
+}
+
+// Base returns the last element of path.
+// Trailing slashes are removed before extracting the last element.
+// If the path is empty, Base returns ".".
+// If the path consists entirely of slashes, Base returns "/".
+// The input path is passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+func (ns *Namespace) Base(path any) (string, error) {
+ spath, err := cast.ToStringE(path)
+ if err != nil {
+ return "", err
+ }
+ spath = filepath.ToSlash(spath)
+ return _path.Base(spath), nil
+}
+
+// BaseName returns the last element of path, removing the extension if present.
+// Trailing slashes are removed before extracting the last element.
+// If the path is empty, Base returns ".".
+// If the path consists entirely of slashes, Base returns "/".
+// The input path is passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+func (ns *Namespace) BaseName(path any) (string, error) {
+ spath, err := cast.ToStringE(path)
+ if err != nil {
+ return "", err
+ }
+ spath = filepath.ToSlash(spath)
+ return strings.TrimSuffix(_path.Base(spath), _path.Ext(spath)), nil
+}
+
+// Split splits path immediately following the final slash,
+// separating it into a directory and file name component.
+// If there is no slash in path, Split returns an empty dir and
+// file set to path.
+// The input path is passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+// The returned values have the property that path = dir+file.
+func (ns *Namespace) Split(path any) (DirFile, error) {
+ spath, err := cast.ToStringE(path)
+ if err != nil {
+ return DirFile{}, err
+ }
+ spath = filepath.ToSlash(spath)
+ dir, file := _path.Split(spath)
+
+ return DirFile{Dir: dir, File: file}, nil
+}
+
+// Join joins any number of path elements into a single path, adding a
+// separating slash if necessary. All the input
+// path elements are passed into filepath.ToSlash converting any Windows slashes
+// to forward slashes.
+// The result is Cleaned; in particular,
+// all empty strings are ignored.
+func (ns *Namespace) Join(elements ...any) (string, error) {
+ var pathElements []string
+ for _, elem := range elements {
+ switch v := elem.(type) {
+ case []string:
+ for _, e := range v {
+ pathElements = append(pathElements, filepath.ToSlash(e))
+ }
+ case []any:
+ for _, e := range v {
+ elemStr, err := cast.ToStringE(e)
+ if err != nil {
+ return "", err
+ }
+ pathElements = append(pathElements, filepath.ToSlash(elemStr))
+ }
+ default:
+ elemStr, err := cast.ToStringE(elem)
+ if err != nil {
+ return "", err
+ }
+ pathElements = append(pathElements, filepath.ToSlash(elemStr))
+ }
+ }
+ return _path.Join(pathElements...), nil
+}
+
+// Clean replaces the separators used with standard slashes and then
+// extraneous slashes are removed.
+func (ns *Namespace) Clean(path any) (string, error) {
+ spath, err := cast.ToStringE(path)
+
+ if err != nil {
+ return "", err
+ }
+ spath = filepath.ToSlash(spath)
+ return _path.Clean(spath), nil
+}
diff --git a/tpl/path/path_test.go b/tpl/path/path_test.go
new file mode 100644
index 000000000..599d8367a
--- /dev/null
+++ b/tpl/path/path_test.go
@@ -0,0 +1,236 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package path
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+)
+
+var ns = New(&deps.Deps{Cfg: config.New()})
+
+type tstNoStringer struct{}
+
+func TestBase(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.txt`), `bar.txt`},
+ {filepath.FromSlash(`foo/bar/txt `), `txt `},
+ {filepath.FromSlash(`foo/bar.t`), `bar.t`},
+ {`foo.bar.txt`, `foo.bar.txt`},
+ {`.x`, `.x`},
+ {``, `.`},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Base(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestBaseName(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.txt`), `bar`},
+ {filepath.FromSlash(`foo/bar/txt `), `txt `},
+ {filepath.FromSlash(`foo/bar.t`), `bar`},
+ {`foo.bar.txt`, `foo.bar`},
+ {`.x`, ``},
+ {``, `.`},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.BaseName(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestDir(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.txt`), `foo`},
+ {filepath.FromSlash(`foo/bar/txt `), `foo/bar`},
+ {filepath.FromSlash(`foo/bar.t`), `foo`},
+ {`foo.bar.txt`, `.`},
+ {`.x`, `.`},
+ {``, `.`},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Dir(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestExt(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.json`), `.json`},
+ {`foo.bar.txt `, `.txt `},
+ {``, ``},
+ {`.x`, `.x`},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Ext(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestJoin(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ elements any
+ expect any
+ }{
+ {
+ []string{"", "baz", filepath.FromSlash(`foo/bar.txt`)},
+ `baz/foo/bar.txt`,
+ },
+ {
+ []any{"", "baz", DirFile{"big", "john"}, filepath.FromSlash(`foo/bar.txt`)},
+ `baz/big|john/foo/bar.txt`,
+ },
+ {nil, ""},
+ // errors
+ {tstNoStringer{}, false},
+ {[]any{"", tstNoStringer{}}, false},
+ } {
+
+ result, err := ns.Join(test.elements)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestSplit(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.txt`), DirFile{`foo/`, `bar.txt`}},
+ {filepath.FromSlash(`foo/bar/txt `), DirFile{`foo/bar/`, `txt `}},
+ {`foo.bar.txt`, DirFile{``, `foo.bar.txt`}},
+ {``, DirFile{``, ``}},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Split(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestClean(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ path any
+ expect any
+ }{
+ {filepath.FromSlash(`foo/bar.txt`), `foo/bar.txt`},
+ {filepath.FromSlash(`foo/bar/txt`), `foo/bar/txt`},
+ {filepath.FromSlash(`foo/bar`), `foo/bar`},
+ {filepath.FromSlash(`foo/bar.t`), `foo/bar.t`},
+ {``, `.`},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Clean(test.path)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/reflect/init.go b/tpl/reflect/init.go
new file mode 100644
index 000000000..3af6dfa11
--- /dev/null
+++ b/tpl/reflect/init.go
@@ -0,0 +1,51 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package reflect provides template functions for run-time object reflection.
+package reflect
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "reflect"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.IsMap,
+ nil,
+ [][2]string{
+ {`{{ if reflect.IsMap (dict "a" 1) }}Map{{ end }}`, `Map`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.IsSlice,
+ nil,
+ [][2]string{
+ {`{{ if reflect.IsSlice (slice 1 2 3) }}Slice{{ end }}`, `Slice`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/reflect/reflect.go b/tpl/reflect/reflect.go
new file mode 100644
index 000000000..07834be1c
--- /dev/null
+++ b/tpl/reflect/reflect.go
@@ -0,0 +1,36 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package reflect
+
+import (
+ "reflect"
+)
+
+// New returns a new instance of the reflect-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "reflect" namespace.
+type Namespace struct{}
+
+// IsMap reports whether v is a map.
+func (ns *Namespace) IsMap(v any) bool {
+ return reflect.ValueOf(v).Kind() == reflect.Map
+}
+
+// IsSlice reports whether v is a slice.
+func (ns *Namespace) IsSlice(v any) bool {
+ return reflect.ValueOf(v).Kind() == reflect.Slice
+}
diff --git a/tpl/reflect/reflect_test.go b/tpl/reflect/reflect_test.go
new file mode 100644
index 000000000..f85af87dd
--- /dev/null
+++ b/tpl/reflect/reflect_test.go
@@ -0,0 +1,54 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package reflect
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+var ns = New()
+
+type tstNoStringer struct{}
+
+func TestIsMap(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ v any
+ expect any
+ }{
+ {map[int]int{1: 1}, true},
+ {"foo", false},
+ {nil, false},
+ } {
+ result := ns.IsMap(test.v)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestIsSlice(t *testing.T) {
+ c := qt.New(t)
+ for _, test := range []struct {
+ v any
+ expect any
+ }{
+ {[]int{1, 2}, true},
+ {"foo", false},
+ {nil, false},
+ } {
+ result := ns.IsSlice(test.v)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/resources/init.go b/tpl/resources/init.go
new file mode 100644
index 000000000..73a7b8f42
--- /dev/null
+++ b/tpl/resources/init.go
@@ -0,0 +1,77 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "resources"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx, err := New(d)
+ if err != nil {
+ // TODO(bep) no panic.
+ panic(err)
+ }
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Get,
+ nil,
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.GetRemote,
+ nil,
+ [][2]string{},
+ )
+
+ // Add aliases for the most common transformations.
+
+ ns.AddMethodMapping(ctx.Fingerprint,
+ []string{"fingerprint"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Minify,
+ []string{"minify"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.ToCSS,
+ []string{"toCSS"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.PostCSS,
+ []string{"postCSS"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Babel,
+ []string{"babel"},
+ [][2]string{},
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/resources/integration_test.go b/tpl/resources/integration_test.go
new file mode 100644
index 000000000..06f98eeee
--- /dev/null
+++ b/tpl/resources/integration_test.go
@@ -0,0 +1,100 @@
+// Copyright 2022s The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package resources_test
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestCopy(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = "http://example.com/blog"
+-- assets/images/pixel.png --
+iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
+-- layouts/index.html --
+{{/* Image resources */}}
+{{ $img := resources.Get "images/pixel.png" }}
+{{ $imgCopy1 := $img | resources.Copy "images/copy.png" }}
+{{ $imgCopy1 = $imgCopy1.Resize "3x4"}}
+{{ $imgCopy2 := $imgCopy1 | resources.Copy "images/copy2.png" }}
+{{ $imgCopy3 := $imgCopy1 | resources.Copy "images/copy3.png" }}
+Image Orig: {{ $img.RelPermalink}}|{{ $img.MediaType }}|{{ $img.Width }}|{{ $img.Height }}|
+Image Copy1: {{ $imgCopy1.RelPermalink}}|{{ $imgCopy1.MediaType }}|{{ $imgCopy1.Width }}|{{ $imgCopy1.Height }}|
+Image Copy2: {{ $imgCopy2.RelPermalink}}|{{ $imgCopy2.MediaType }}|{{ $imgCopy2.Width }}|{{ $imgCopy2.Height }}|
+Image Copy3: {{ $imgCopy3.MediaType }}|{{ $imgCopy3.Width }}|{{ $imgCopy3.Height }}|
+
+{{/* Generic resources */}}
+{{ $targetPath := "js/vars.js" }}
+{{ $orig := "let foo;" | resources.FromString "js/foo.js" }}
+{{ $copy1 := $orig | resources.Copy "js/copies/bar.js" }}
+{{ $copy2 := $orig | resources.Copy "js/copies/baz.js" | fingerprint "md5" }}
+{{ $copy3 := $copy2 | resources.Copy "js/copies/moo.js" | minify }}
+
+Orig: {{ $orig.RelPermalink}}|{{ $orig.MediaType }}|{{ $orig.Content | safeJS }}|
+Copy1: {{ $copy1.RelPermalink}}|{{ $copy1.MediaType }}|{{ $copy1.Content | safeJS }}|
+Copy2: {{ $copy2.RelPermalink}}|{{ $copy2.MediaType }}|{{ $copy2.Content | safeJS }}|
+Copy3: {{ $copy3.RelPermalink}}|{{ $copy3.MediaType }}|{{ $copy3.Content | safeJS }}|
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ }).Build()
+
+ b.AssertFileContent("public/index.html", `
+Image Orig: /blog/images/pixel.png|image/png|1|1|
+Image Copy1: /blog/images/copy_hu8aa3346827e49d756ff4e630147c42b5_70_3x4_resize_box_3.png|image/png|3|4|
+Image Copy2: /blog/images/copy2.png|image/png|3|4
+Image Copy3: image/png|3|4|
+Orig: /blog/js/foo.js|application/javascript|let foo;|
+Copy1: /blog/js/copies/bar.js|application/javascript|let foo;|
+Copy2: /blog/js/copies/baz.a677329fc6c4ad947e0c7116d91f37a2.js|application/javascript|let foo;|
+Copy3: /blog/js/copies/moo.a677329fc6c4ad947e0c7116d91f37a2.min.js|application/javascript|let foo|
+
+ `)
+
+ b.AssertDestinationExists("images/copy2.png", true)
+ // No permalink used.
+ b.AssertDestinationExists("images/copy3.png", false)
+
+}
+
+func TestCopyPageShouldFail(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+-- layouts/index.html --
+{{/* This is currently not supported. */}}
+{{ $copy := .Copy "copy.md" }}
+
+ `
+
+ b, err := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ }).BuildE()
+
+ b.Assert(err, qt.IsNotNil)
+
+}
diff --git a/tpl/resources/resources.go b/tpl/resources/resources.go
new file mode 100644
index 000000000..428e36c92
--- /dev/null
+++ b/tpl/resources/resources.go
@@ -0,0 +1,432 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package resources provides template functions for working with resources.
+package resources
+
+import (
+ "fmt"
+ "sync"
+
+ "github.com/gohugoio/hugo/common/herrors"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/maps"
+
+ "github.com/gohugoio/hugo/tpl/internal/resourcehelpers"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/resources/postpub"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/resources"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/resources/resource_factories/bundler"
+ "github.com/gohugoio/hugo/resources/resource_factories/create"
+ "github.com/gohugoio/hugo/resources/resource_transformers/babel"
+ "github.com/gohugoio/hugo/resources/resource_transformers/integrity"
+ "github.com/gohugoio/hugo/resources/resource_transformers/minifier"
+ "github.com/gohugoio/hugo/resources/resource_transformers/postcss"
+ "github.com/gohugoio/hugo/resources/resource_transformers/templates"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/dartsass"
+ "github.com/gohugoio/hugo/resources/resource_transformers/tocss/scss"
+
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the resources-namespaced template functions.
+func New(deps *deps.Deps) (*Namespace, error) {
+ if deps.ResourceSpec == nil {
+ return &Namespace{}, nil
+ }
+
+ scssClient, err := scss.New(deps.BaseFs.Assets, deps.ResourceSpec)
+ if err != nil {
+ return nil, err
+ }
+
+ minifyClient, err := minifier.New(deps.ResourceSpec)
+ if err != nil {
+ return nil, err
+ }
+
+ return &Namespace{
+ deps: deps,
+ scssClientLibSass: scssClient,
+ createClient: create.New(deps.ResourceSpec),
+ bundlerClient: bundler.New(deps.ResourceSpec),
+ integrityClient: integrity.New(deps.ResourceSpec),
+ minifyClient: minifyClient,
+ postcssClient: postcss.New(deps.ResourceSpec),
+ templatesClient: templates.New(deps.ResourceSpec, deps),
+ babelClient: babel.New(deps.ResourceSpec),
+ }, nil
+}
+
+var _ resource.ResourceFinder = (*Namespace)(nil)
+
+// Namespace provides template functions for the "resources" namespace.
+type Namespace struct {
+ deps *deps.Deps
+
+ createClient *create.Client
+ bundlerClient *bundler.Client
+ scssClientLibSass *scss.Client
+ integrityClient *integrity.Client
+ minifyClient *minifier.Client
+ postcssClient *postcss.Client
+ babelClient *babel.Client
+ templatesClient *templates.Client
+
+ // The Dart Client requires a os/exec process, so only
+ // create it if we really need it.
+ // This is mostly to avoid creating one per site build test.
+ scssClientDartSassInit sync.Once
+ scssClientDartSass *dartsass.Client
+}
+
+func (ns *Namespace) getscssClientDartSass() (*dartsass.Client, error) {
+ var err error
+ ns.scssClientDartSassInit.Do(func() {
+ ns.scssClientDartSass, err = dartsass.New(ns.deps.BaseFs.Assets, ns.deps.ResourceSpec)
+ if err != nil {
+ return
+ }
+ ns.deps.BuildClosers.Add(ns.scssClientDartSass)
+
+ })
+
+ return ns.scssClientDartSass, err
+}
+
+// Copy copies r to the new targetPath in s.
+func (ns *Namespace) Copy(s any, r resource.Resource) (resource.Resource, error) {
+ targetPath, err := cast.ToStringE(s)
+ if err != nil {
+ panic(err)
+ }
+ return ns.createClient.Copy(r, targetPath)
+}
+
+// Get locates the filename given in Hugo's assets filesystem
+// and creates a Resource object that can be used for further transformations.
+func (ns *Namespace) Get(filename any) resource.Resource {
+ filenamestr, err := cast.ToStringE(filename)
+ if err != nil {
+ panic(err)
+ }
+ r, err := ns.createClient.Get(filenamestr)
+ if err != nil {
+ panic(err)
+ }
+
+ return r
+}
+
+// GetRemote gets the URL (via HTTP(s)) in the first argument in args and creates Resource object that can be used for
+// further transformations.
+//
+// A second argument may be provided with an option map.
+//
+// Note: This method does not return any error as a second argument,
+// for any error situations the error can be checked in .Err.
+func (ns *Namespace) GetRemote(args ...any) resource.Resource {
+ get := func(args ...any) (resource.Resource, error) {
+ if len(args) < 1 {
+ return nil, errors.New("must provide an URL")
+ }
+
+ urlstr, err := cast.ToStringE(args[0])
+ if err != nil {
+ return nil, err
+ }
+
+ var options map[string]any
+
+ if len(args) > 1 {
+ options, err = maps.ToStringMapE(args[1])
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return ns.createClient.FromRemote(urlstr, options)
+
+ }
+
+ r, err := get(args...)
+ if err != nil {
+ switch v := err.(type) {
+ case *create.HTTPError:
+ return resources.NewErrorResource(resource.NewResourceError(v, v.Data))
+ default:
+ return resources.NewErrorResource(resource.NewResourceError(fmt.Errorf("error calling resources.GetRemote: %w", err), make(map[string]any)))
+ }
+
+ }
+ return r
+
+}
+
+// GetMatch finds the first Resource matching the given pattern, or nil if none found.
+//
+// It looks for files in the assets file system.
+//
+// See Match for a more complete explanation about the rules used.
+func (ns *Namespace) GetMatch(pattern any) resource.Resource {
+ patternStr, err := cast.ToStringE(pattern)
+ if err != nil {
+ panic(err)
+ }
+
+ r, err := ns.createClient.GetMatch(patternStr)
+ if err != nil {
+ panic(err)
+ }
+
+ return r
+}
+
+// ByType returns resources of a given resource type (e.g. "image").
+func (ns *Namespace) ByType(typ any) resource.Resources {
+ return ns.createClient.ByType(cast.ToString(typ))
+}
+
+// Match gets all resources matching the given base path prefix, e.g
+// "*.png" will match all png files. The "*" does not match path delimiters (/),
+// so if you organize your resources in sub-folders, you need to be explicit about it, e.g.:
+// "images/*.png". To match any PNG image anywhere in the bundle you can do "**.png", and
+// to match all PNG images below the images folder, use "images/**.jpg".
+//
+// The matching is case insensitive.
+//
+// Match matches by using the files name with path relative to the file system root
+// with Unix style slashes (/) and no leading slash, e.g. "images/logo.png".
+//
+// See https://github.com/gobwas/glob for the full rules set.
+//
+// It looks for files in the assets file system.
+//
+// See Match for a more complete explanation about the rules used.
+func (ns *Namespace) Match(pattern any) resource.Resources {
+ defer herrors.Recover()
+ patternStr, err := cast.ToStringE(pattern)
+ if err != nil {
+ panic(err)
+ }
+
+ r, err := ns.createClient.Match(patternStr)
+ if err != nil {
+ panic(err)
+ }
+
+ return r
+}
+
+// Concat concatenates a slice of Resource objects. These resources must
+// (currently) be of the same Media Type.
+func (ns *Namespace) Concat(targetPathIn any, r any) (resource.Resource, error) {
+ targetPath, err := cast.ToStringE(targetPathIn)
+ if err != nil {
+ return nil, err
+ }
+
+ var rr resource.Resources
+
+ switch v := r.(type) {
+ case resource.Resources:
+ rr = v
+ case resource.ResourcesConverter:
+ rr = v.ToResources()
+ default:
+ return nil, fmt.Errorf("slice %T not supported in concat", r)
+ }
+
+ if len(rr) == 0 {
+ return nil, errors.New("must provide one or more Resource objects to concat")
+ }
+
+ return ns.bundlerClient.Concat(targetPath, rr)
+}
+
+// FromString creates a Resource from a string published to the relative target path.
+func (ns *Namespace) FromString(targetPathIn, contentIn any) (resource.Resource, error) {
+ targetPath, err := cast.ToStringE(targetPathIn)
+ if err != nil {
+ return nil, err
+ }
+ content, err := cast.ToStringE(contentIn)
+ if err != nil {
+ return nil, err
+ }
+
+ return ns.createClient.FromString(targetPath, content)
+}
+
+// ExecuteAsTemplate creates a Resource from a Go template, parsed and executed with
+// the given data, and published to the relative target path.
+func (ns *Namespace) ExecuteAsTemplate(args ...any) (resource.Resource, error) {
+ if len(args) != 3 {
+ return nil, fmt.Errorf("must provide targetPath, the template data context and a Resource object")
+ }
+ targetPath, err := cast.ToStringE(args[0])
+ if err != nil {
+ return nil, err
+ }
+ data := args[1]
+
+ r, ok := args[2].(resources.ResourceTransformer)
+ if !ok {
+ return nil, fmt.Errorf("type %T not supported in Resource transformations", args[2])
+ }
+
+ return ns.templatesClient.ExecuteAsTemplate(r, targetPath, data)
+}
+
+// Fingerprint transforms the given Resource with a MD5 hash of the content in
+// the RelPermalink and Permalink.
+func (ns *Namespace) Fingerprint(args ...any) (resource.Resource, error) {
+ if len(args) < 1 || len(args) > 2 {
+ return nil, errors.New("must provide a Resource and (optional) crypto algo")
+ }
+
+ var algo string
+ resIdx := 0
+
+ if len(args) == 2 {
+ resIdx = 1
+ var err error
+ algo, err = cast.ToStringE(args[0])
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ r, ok := args[resIdx].(resources.ResourceTransformer)
+ if !ok {
+ return nil, fmt.Errorf("%T can not be transformed", args[resIdx])
+ }
+
+ return ns.integrityClient.Fingerprint(r, algo)
+}
+
+// Minify minifies the given Resource using the MediaType to pick the correct
+// minifier.
+func (ns *Namespace) Minify(r resources.ResourceTransformer) (resource.Resource, error) {
+ return ns.minifyClient.Minify(r)
+}
+
+// ToCSS converts the given Resource to CSS. You can optional provide an Options
+// object or a target path (string) as first argument.
+func (ns *Namespace) ToCSS(args ...any) (resource.Resource, error) {
+ const (
+ // Transpiler implementation can be controlled from the client by
+ // setting the 'transpiler' option.
+ // Default is currently 'libsass', but that may change.
+ transpilerDart = "dartsass"
+ transpilerLibSass = "libsass"
+ )
+
+ var (
+ r resources.ResourceTransformer
+ m map[string]any
+ targetPath string
+ err error
+ ok bool
+ transpiler = transpilerLibSass
+ )
+
+ r, targetPath, ok = resourcehelpers.ResolveIfFirstArgIsString(args)
+
+ if !ok {
+ r, m, err = resourcehelpers.ResolveArgs(args)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if m != nil {
+ maps.PrepareParams(m)
+ if t, found := m["transpiler"]; found {
+ switch t {
+ case transpilerDart, transpilerLibSass:
+ transpiler = cast.ToString(t)
+ default:
+ return nil, fmt.Errorf("unsupported transpiler %q; valid values are %q or %q", t, transpilerLibSass, transpilerDart)
+ }
+ }
+ }
+
+ if transpiler == transpilerLibSass {
+ var options scss.Options
+ if targetPath != "" {
+ options.TargetPath = helpers.ToSlashTrimLeading(targetPath)
+ } else if m != nil {
+ options, err = scss.DecodeOptions(m)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return ns.scssClientLibSass.ToCSS(r, options)
+ }
+
+ if m == nil {
+ m = make(map[string]any)
+ }
+ if targetPath != "" {
+ m["targetPath"] = targetPath
+ }
+
+ client, err := ns.getscssClientDartSass()
+ if err != nil {
+ return nil, err
+ }
+
+ return client.ToCSS(r, m)
+
+}
+
+// PostCSS processes the given Resource with PostCSS
+func (ns *Namespace) PostCSS(args ...any) (resource.Resource, error) {
+ r, m, err := resourcehelpers.ResolveArgs(args)
+ if err != nil {
+ return nil, err
+ }
+
+ return ns.postcssClient.Process(r, m)
+}
+
+func (ns *Namespace) PostProcess(r resource.Resource) (postpub.PostPublishedResource, error) {
+ return ns.deps.ResourceSpec.PostProcess(r)
+}
+
+// Babel processes the given Resource with Babel.
+func (ns *Namespace) Babel(args ...any) (resource.Resource, error) {
+ r, m, err := resourcehelpers.ResolveArgs(args)
+ if err != nil {
+ return nil, err
+ }
+ var options babel.Options
+ if m != nil {
+ options, err = babel.DecodeOptions(m)
+
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return ns.babelClient.Process(r, options)
+}
diff --git a/tpl/safe/init.go b/tpl/safe/init.go
new file mode 100644
index 000000000..794c9d6f0
--- /dev/null
+++ b/tpl/safe/init.go
@@ -0,0 +1,80 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package safe
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "safe"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New()
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.CSS,
+ []string{"safeCSS"},
+ [][2]string{
+ {`{{ "Bat&Man" | safeCSS | safeCSS }}`, `Bat&amp;Man`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.HTML,
+ []string{"safeHTML"},
+ [][2]string{
+ {`{{ "Bat&Man" | safeHTML | safeHTML }}`, `Bat&Man`},
+ {`{{ "Bat&Man" | safeHTML }}`, `Bat&Man`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.HTMLAttr,
+ []string{"safeHTMLAttr"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.JS,
+ []string{"safeJS"},
+ [][2]string{
+ {`{{ "(1*2)" | safeJS | safeJS }}`, `(1*2)`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.JSStr,
+ []string{"safeJSStr"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.URL,
+ []string{"safeURL"},
+ [][2]string{
+ {`{{ "http://gohugo.io" | safeURL | safeURL }}`, `http://gohugo.io`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.SanitizeURL,
+ []string{"sanitizeURL", "sanitizeurl"},
+ [][2]string{},
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/safe/safe.go b/tpl/safe/safe.go
new file mode 100644
index 000000000..d1a2e8d4e
--- /dev/null
+++ b/tpl/safe/safe.go
@@ -0,0 +1,73 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package safe provides template functions for escaping untrusted content or
+// encapsulating trusted content.
+package safe
+
+import (
+ "html/template"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the safe-namespaced template functions.
+func New() *Namespace {
+ return &Namespace{}
+}
+
+// Namespace provides template functions for the "safe" namespace.
+type Namespace struct{}
+
+// CSS returns the string s as html/template CSS content.
+func (ns *Namespace) CSS(s any) (template.CSS, error) {
+ ss, err := cast.ToStringE(s)
+ return template.CSS(ss), err
+}
+
+// HTML returns the string s as html/template HTML content.
+func (ns *Namespace) HTML(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ return template.HTML(ss), err
+}
+
+// HTMLAttr returns the string s as html/template HTMLAttr content.
+func (ns *Namespace) HTMLAttr(s any) (template.HTMLAttr, error) {
+ ss, err := cast.ToStringE(s)
+ return template.HTMLAttr(ss), err
+}
+
+// JS returns the given string as a html/template JS content.
+func (ns *Namespace) JS(s any) (template.JS, error) {
+ ss, err := cast.ToStringE(s)
+ return template.JS(ss), err
+}
+
+// JSStr returns the given string as a html/template JSStr content.
+func (ns *Namespace) JSStr(s any) (template.JSStr, error) {
+ ss, err := cast.ToStringE(s)
+ return template.JSStr(ss), err
+}
+
+// URL returns the string s as html/template URL content.
+func (ns *Namespace) URL(s any) (template.URL, error) {
+ ss, err := cast.ToStringE(s)
+ return template.URL(ss), err
+}
+
+// SanitizeURL returns the string s as html/template URL content.
+func (ns *Namespace) SanitizeURL(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ return helpers.SanitizeURL(ss), err
+}
diff --git a/tpl/safe/safe_test.go b/tpl/safe/safe_test.go
new file mode 100644
index 000000000..81fa40fd8
--- /dev/null
+++ b/tpl/safe/safe_test.go
@@ -0,0 +1,211 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package safe
+
+import (
+ "html/template"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+type tstNoStringer struct{}
+
+func TestCSS(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {`a[href =~ "//example.com"]#foo`, template.CSS(`a[href =~ "//example.com"]#foo`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.CSS(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHTML(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {`Hello, <b>World</b> &amp;tc!`, template.HTML(`Hello, <b>World</b> &amp;tc!`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.HTML(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHTMLAttr(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {` dir="ltr"`, template.HTMLAttr(` dir="ltr"`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+ result, err := ns.HTMLAttr(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestJS(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {`c && alert("Hello, World!");`, template.JS(`c && alert("Hello, World!");`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.JS(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestJSStr(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {`Hello, World & O'Reilly\x21`, template.JSStr(`Hello, World & O'Reilly\x21`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.JSStr(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestURL(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {`greeting=H%69&addressee=(World)`, template.URL(`greeting=H%69&addressee=(World)`)},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.URL(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestSanitizeURL(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ ns := New()
+
+ for _, test := range []struct {
+ a any
+ expect any
+ }{
+ {"http://foo/../../bar", "http://foo/bar"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.SanitizeURL(test.a)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/site/init.go b/tpl/site/init.go
new file mode 100644
index 000000000..34ea7309f
--- /dev/null
+++ b/tpl/site/init.go
@@ -0,0 +1,43 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package site provides template functions for accessing the Site object.
+package site
+
+import (
+ "github.com/gohugoio/hugo/deps"
+
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "site"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ s := d.Site
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return s, nil },
+ }
+
+ if s == nil {
+ panic("no Site")
+ }
+
+ // We just add the Site as the namespace here. No method mappings.
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/strings/init.go b/tpl/strings/init.go
new file mode 100644
index 000000000..a11246e1c
--- /dev/null
+++ b/tpl/strings/init.go
@@ -0,0 +1,229 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "strings"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Chomp,
+ []string{"chomp"},
+ [][2]string{
+ {`{{chomp "<p>Blockhead</p>\n" | safeHTML }}`, `<p>Blockhead</p>`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.CountRunes,
+ []string{"countrunes"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.RuneCount,
+ nil,
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.CountWords,
+ []string{"countwords"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Count,
+ nil,
+ [][2]string{
+ {`{{"aabab" | strings.Count "a" }}`, `3`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Contains,
+ nil,
+ [][2]string{
+ {`{{ strings.Contains "abc" "b" }}`, `true`},
+ {`{{ strings.Contains "abc" "d" }}`, `false`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ContainsAny,
+ nil,
+ [][2]string{
+ {`{{ strings.ContainsAny "abc" "bcd" }}`, `true`},
+ {`{{ strings.ContainsAny "abc" "def" }}`, `false`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FindRE,
+ []string{"findRE"},
+ [][2]string{
+ {
+ `{{ findRE "[G|g]o" "Hugo is a static side generator written in Go." "1" }}`,
+ `[go]`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.HasPrefix,
+ []string{"hasPrefix"},
+ [][2]string{
+ {`{{ hasPrefix "Hugo" "Hu" }}`, `true`},
+ {`{{ hasPrefix "Hugo" "Fu" }}`, `false`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ToLower,
+ []string{"lower"},
+ [][2]string{
+ {`{{lower "BatMan"}}`, `batman`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Replace,
+ []string{"replace"},
+ [][2]string{
+ {
+ `{{ replace "Batman and Robin" "Robin" "Catwoman" }}`,
+ `Batman and Catwoman`,
+ },
+ {
+ `{{ replace "aabbaabb" "a" "z" 2 }}`,
+ `zzbbaabb`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ReplaceRE,
+ []string{"replaceRE"},
+ [][2]string{
+ {
+ `{{ replaceRE "a+b" "X" "aabbaabbab" }}`,
+ `XbXbX`,
+ },
+ {
+ `{{ replaceRE "a+b" "X" "aabbaabbab" 1 }}`,
+ `Xbaabbab`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.SliceString,
+ []string{"slicestr"},
+ [][2]string{
+ {`{{slicestr "BatMan" 0 3}}`, `Bat`},
+ {`{{slicestr "BatMan" 3}}`, `Man`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Split,
+ []string{"split"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Substr,
+ []string{"substr"},
+ [][2]string{
+ {`{{substr "BatMan" 0 -3}}`, `Bat`},
+ {`{{substr "BatMan" 3 3}}`, `Man`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Trim,
+ []string{"trim"},
+ [][2]string{
+ {`{{ trim "++Batman--" "+-" }}`, `Batman`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.TrimLeft,
+ nil,
+ [][2]string{
+ {`{{ "aabbaa" | strings.TrimLeft "a" }}`, `bbaa`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.TrimPrefix,
+ nil,
+ [][2]string{
+ {`{{ "aabbaa" | strings.TrimPrefix "a" }}`, `abbaa`},
+ {`{{ "aabbaa" | strings.TrimPrefix "aa" }}`, `bbaa`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.TrimRight,
+ nil,
+ [][2]string{
+ {`{{ "aabbaa" | strings.TrimRight "a" }}`, `aabb`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.TrimSuffix,
+ nil,
+ [][2]string{
+ {`{{ "aabbaa" | strings.TrimSuffix "a" }}`, `aabba`},
+ {`{{ "aabbaa" | strings.TrimSuffix "aa" }}`, `aabb`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Title,
+ []string{"title"},
+ [][2]string{
+ {`{{title "Bat man"}}`, `Bat Man`},
+ {`{{title "somewhere over the rainbow"}}`, `Somewhere Over the Rainbow`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.FirstUpper,
+ nil,
+ [][2]string{
+ {`{{ "hugo rocks!" | strings.FirstUpper }}`, `Hugo rocks!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Truncate,
+ []string{"truncate"},
+ [][2]string{
+ {`{{ "this is a very long text" | truncate 10 " ..." }}`, `this is a ...`},
+ {`{{ "With [Markdown](/markdown) inside." | markdownify | truncate 14 }}`, `With <a href="/markdown">Markdown …</a>`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Repeat,
+ nil,
+ [][2]string{
+ {`{{ "yo" | strings.Repeat 4 }}`, `yoyoyoyo`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ToUpper,
+ []string{"upper"},
+ [][2]string{
+ {`{{upper "BatMan"}}`, `BATMAN`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/strings/regexp.go b/tpl/strings/regexp.go
new file mode 100644
index 000000000..5b6a812d4
--- /dev/null
+++ b/tpl/strings/regexp.go
@@ -0,0 +1,125 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "regexp"
+ "sync"
+
+ "github.com/spf13/cast"
+)
+
+// FindRE returns a list of strings that match the regular expression. By default all matches
+// will be included. The number of matches can be limited with an optional third parameter.
+func (ns *Namespace) FindRE(expr string, content any, limit ...any) ([]string, error) {
+ re, err := reCache.Get(expr)
+ if err != nil {
+ return nil, err
+ }
+
+ conv, err := cast.ToStringE(content)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(limit) == 0 {
+ return re.FindAllString(conv, -1), nil
+ }
+
+ lim, err := cast.ToIntE(limit[0])
+ if err != nil {
+ return nil, err
+ }
+
+ return re.FindAllString(conv, lim), nil
+}
+
+// ReplaceRE returns a copy of s, replacing all matches of the regular
+// expression pattern with the replacement text repl. The number of replacements
+// can be limited with an optional fourth parameter.
+func (ns *Namespace) ReplaceRE(pattern, repl, s any, n ...any) (_ string, err error) {
+ sp, err := cast.ToStringE(pattern)
+ if err != nil {
+ return
+ }
+
+ sr, err := cast.ToStringE(repl)
+ if err != nil {
+ return
+ }
+
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return
+ }
+
+ nn := -1
+ if len(n) > 0 {
+ nn, err = cast.ToIntE(n[0])
+ if err != nil {
+ return
+ }
+ }
+
+ re, err := reCache.Get(sp)
+ if err != nil {
+ return "", err
+ }
+
+ return re.ReplaceAllStringFunc(ss, func(str string) string {
+ if nn == 0 {
+ return str
+ }
+
+ nn -= 1
+ return re.ReplaceAllString(str, sr)
+ }), nil
+}
+
+// regexpCache represents a cache of regexp objects protected by a mutex.
+type regexpCache struct {
+ mu sync.RWMutex
+ re map[string]*regexp.Regexp
+}
+
+// Get retrieves a regexp object from the cache based upon the pattern.
+// If the pattern is not found in the cache, create one
+func (rc *regexpCache) Get(pattern string) (re *regexp.Regexp, err error) {
+ var ok bool
+
+ if re, ok = rc.get(pattern); !ok {
+ re, err = regexp.Compile(pattern)
+ if err != nil {
+ return nil, err
+ }
+ rc.set(pattern, re)
+ }
+
+ return re, nil
+}
+
+func (rc *regexpCache) get(key string) (re *regexp.Regexp, ok bool) {
+ rc.mu.RLock()
+ re, ok = rc.re[key]
+ rc.mu.RUnlock()
+ return
+}
+
+func (rc *regexpCache) set(key string, re *regexp.Regexp) {
+ rc.mu.Lock()
+ rc.re[key] = re
+ rc.mu.Unlock()
+}
+
+var reCache = regexpCache{re: make(map[string]*regexp.Regexp)}
diff --git a/tpl/strings/regexp_test.go b/tpl/strings/regexp_test.go
new file mode 100644
index 000000000..9ac098c17
--- /dev/null
+++ b/tpl/strings/regexp_test.go
@@ -0,0 +1,93 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestFindRE(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ expr string
+ content any
+ limit any
+ expect any
+ }{
+ {"[G|g]o", "Hugo is a static site generator written in Go.", 2, []string{"go", "Go"}},
+ {"[G|g]o", "Hugo is a static site generator written in Go.", -1, []string{"go", "Go"}},
+ {"[G|g]o", "Hugo is a static site generator written in Go.", 1, []string{"go"}},
+ {"[G|g]o", "Hugo is a static site generator written in Go.", "1", []string{"go"}},
+ {"[G|g]o", "Hugo is a static site generator written in Go.", nil, []string(nil)},
+ // errors
+ {"[G|go", "Hugo is a static site generator written in Go.", nil, false},
+ {"[G|g]o", t, nil, false},
+ } {
+ result, err := ns.FindRE(test.expr, test.content, test.limit)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Check(result, qt.DeepEquals, test.expect)
+ }
+}
+
+func TestReplaceRE(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ pattern any
+ repl any
+ s any
+ n []any
+ expect any
+ }{
+ {"^https?://([^/]+).*", "$1", "http://gohugo.io/docs", nil, "gohugo.io"},
+ {"^https?://([^/]+).*", "$2", "http://gohugo.io/docs", nil, ""},
+ {"(ab)", "AB", "aabbaab", nil, "aABbaAB"},
+ {"(ab)", "AB", "aabbaab", []any{1}, "aABbaab"},
+ // errors
+ {"(ab", "AB", "aabb", nil, false}, // invalid re
+ {tstNoStringer{}, "$2", "http://gohugo.io/docs", nil, false},
+ {"^https?://([^/]+).*", tstNoStringer{}, "http://gohugo.io/docs", nil, false},
+ {"^https?://([^/]+).*", "$2", tstNoStringer{}, nil, false},
+ } {
+
+ var (
+ result string
+ err error
+ )
+ if len(test.n) > 0 {
+ result, err = ns.ReplaceRE(test.pattern, test.repl, test.s, test.n...)
+ } else {
+ result, err = ns.ReplaceRE(test.pattern, test.repl, test.s)
+ }
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Check(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/strings/strings.go b/tpl/strings/strings.go
new file mode 100644
index 000000000..a49451483
--- /dev/null
+++ b/tpl/strings/strings.go
@@ -0,0 +1,505 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package strings provides template functions for manipulating strings.
+package strings
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "regexp"
+ "strings"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/common/text"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the strings-namespaced template functions.
+func New(d *deps.Deps) *Namespace {
+ titleCaseStyle := d.Cfg.GetString("titleCaseStyle")
+ titleFunc := helpers.GetTitleFunc(titleCaseStyle)
+ return &Namespace{deps: d, titleFunc: titleFunc}
+}
+
+// Namespace provides template functions for the "strings" namespace.
+// Most functions mimic the Go stdlib, but the order of the parameters may be
+// different to ease their use in the Go template system.
+type Namespace struct {
+ titleFunc func(s string) string
+ deps *deps.Deps
+}
+
+// CountRunes returns the number of runes in s, excluding whitespace.
+func (ns *Namespace) CountRunes(s any) (int, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to convert content to string: %w", err)
+ }
+
+ counter := 0
+ for _, r := range tpl.StripHTML(ss) {
+ if !helpers.IsWhitespace(r) {
+ counter++
+ }
+ }
+
+ return counter, nil
+}
+
+// RuneCount returns the number of runes in s.
+func (ns *Namespace) RuneCount(s any) (int, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to convert content to string: %w", err)
+ }
+ return utf8.RuneCountInString(ss), nil
+}
+
+// CountWords returns the approximate word count in s.
+func (ns *Namespace) CountWords(s any) (int, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to convert content to string: %w", err)
+ }
+
+ isCJKLanguage, err := regexp.MatchString(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`, ss)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to match regex pattern against string: %w", err)
+ }
+
+ if !isCJKLanguage {
+ return len(strings.Fields(tpl.StripHTML(ss))), nil
+ }
+
+ counter := 0
+ for _, word := range strings.Fields(tpl.StripHTML(ss)) {
+ runeCount := utf8.RuneCountInString(word)
+ if len(word) == runeCount {
+ counter++
+ } else {
+ counter += runeCount
+ }
+ }
+
+ return counter, nil
+}
+
+// Count counts the number of non-overlapping instances of substr in s.
+// If substr is an empty string, Count returns 1 + the number of Unicode code points in s.
+func (ns *Namespace) Count(substr, s any) (int, error) {
+ substrs, err := cast.ToStringE(substr)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to convert substr to string: %w", err)
+ }
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return 0, fmt.Errorf("Failed to convert s to string: %w", err)
+ }
+ return strings.Count(ss, substrs), nil
+}
+
+// Chomp returns a copy of s with all trailing newline characters removed.
+func (ns *Namespace) Chomp(s any) (any, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ res := text.Chomp(ss)
+ switch s.(type) {
+ case template.HTML:
+ return template.HTML(res), nil
+ default:
+ return res, nil
+ }
+}
+
+// Contains reports whether substr is in s.
+func (ns *Namespace) Contains(s, substr any) (bool, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return false, err
+ }
+
+ su, err := cast.ToStringE(substr)
+ if err != nil {
+ return false, err
+ }
+
+ return strings.Contains(ss, su), nil
+}
+
+// ContainsAny reports whether any Unicode code points in chars are within s.
+func (ns *Namespace) ContainsAny(s, chars any) (bool, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return false, err
+ }
+
+ sc, err := cast.ToStringE(chars)
+ if err != nil {
+ return false, err
+ }
+
+ return strings.ContainsAny(ss, sc), nil
+}
+
+// HasPrefix tests whether the input s begins with prefix.
+func (ns *Namespace) HasPrefix(s, prefix any) (bool, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return false, err
+ }
+
+ sx, err := cast.ToStringE(prefix)
+ if err != nil {
+ return false, err
+ }
+
+ return strings.HasPrefix(ss, sx), nil
+}
+
+// HasSuffix tests whether the input s begins with suffix.
+func (ns *Namespace) HasSuffix(s, suffix any) (bool, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return false, err
+ }
+
+ sx, err := cast.ToStringE(suffix)
+ if err != nil {
+ return false, err
+ }
+
+ return strings.HasSuffix(ss, sx), nil
+}
+
+// Replace returns a copy of the string s with all occurrences of old replaced
+// with new. The number of replacements can be limited with an optional fourth
+// parameter.
+func (ns *Namespace) Replace(s, old, new any, limit ...any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ so, err := cast.ToStringE(old)
+ if err != nil {
+ return "", err
+ }
+
+ sn, err := cast.ToStringE(new)
+ if err != nil {
+ return "", err
+ }
+
+ if len(limit) == 0 {
+ return strings.ReplaceAll(ss, so, sn), nil
+ }
+
+ lim, err := cast.ToIntE(limit[0])
+ if err != nil {
+ return "", err
+ }
+
+ return strings.Replace(ss, so, sn, lim), nil
+}
+
+// SliceString slices a string by specifying a half-open range with
+// two indices, start and end. 1 and 4 creates a slice including elements 1 through 3.
+// The end index can be omitted, it defaults to the string's length.
+func (ns *Namespace) SliceString(a any, startEnd ...any) (string, error) {
+ aStr, err := cast.ToStringE(a)
+ if err != nil {
+ return "", err
+ }
+
+ var argStart, argEnd int
+
+ argNum := len(startEnd)
+
+ if argNum > 0 {
+ if argStart, err = cast.ToIntE(startEnd[0]); err != nil {
+ return "", errors.New("start argument must be integer")
+ }
+ }
+ if argNum > 1 {
+ if argEnd, err = cast.ToIntE(startEnd[1]); err != nil {
+ return "", errors.New("end argument must be integer")
+ }
+ }
+
+ if argNum > 2 {
+ return "", errors.New("too many arguments")
+ }
+
+ asRunes := []rune(aStr)
+
+ if argNum > 0 && (argStart < 0 || argStart >= len(asRunes)) {
+ return "", errors.New("slice bounds out of range")
+ }
+
+ if argNum == 2 {
+ if argEnd < 0 || argEnd > len(asRunes) {
+ return "", errors.New("slice bounds out of range")
+ }
+ return string(asRunes[argStart:argEnd]), nil
+ } else if argNum == 1 {
+ return string(asRunes[argStart:]), nil
+ } else {
+ return string(asRunes[:]), nil
+ }
+}
+
+// Split slices an input string into all substrings separated by delimiter.
+func (ns *Namespace) Split(a any, delimiter string) ([]string, error) {
+ aStr, err := cast.ToStringE(a)
+ if err != nil {
+ return []string{}, err
+ }
+
+ return strings.Split(aStr, delimiter), nil
+}
+
+// Substr extracts parts of a string, beginning at the character at the specified
+// position, and returns the specified number of characters.
+//
+// It normally takes two parameters: start and length.
+// It can also take one parameter: start, i.e. length is omitted, in which case
+// the substring starting from start until the end of the string will be returned.
+//
+// To extract characters from the end of the string, use a negative start number.
+//
+// In addition, borrowing from the extended behavior described at http://php.net/substr,
+// if length is given and is negative, then that many characters will be omitted from
+// the end of string.
+func (ns *Namespace) Substr(a any, nums ...any) (string, error) {
+ s, err := cast.ToStringE(a)
+ if err != nil {
+ return "", err
+ }
+
+ asRunes := []rune(s)
+ rlen := len(asRunes)
+
+ var start, length int
+
+ switch len(nums) {
+ case 0:
+ return "", errors.New("too few arguments")
+ case 1:
+ if start, err = cast.ToIntE(nums[0]); err != nil {
+ return "", errors.New("start argument must be an integer")
+ }
+ length = rlen
+ case 2:
+ if start, err = cast.ToIntE(nums[0]); err != nil {
+ return "", errors.New("start argument must be an integer")
+ }
+ if length, err = cast.ToIntE(nums[1]); err != nil {
+ return "", errors.New("length argument must be an integer")
+ }
+ default:
+ return "", errors.New("too many arguments")
+ }
+
+ if rlen == 0 {
+ return "", nil
+ }
+
+ if start < 0 {
+ start += rlen
+ }
+
+ // start was originally negative beyond rlen
+ if start < 0 {
+ start = 0
+ }
+
+ if start > rlen-1 {
+ return "", nil
+ }
+
+ end := rlen
+
+ switch {
+ case length == 0:
+ return "", nil
+ case length < 0:
+ end += length
+ case length > 0:
+ end = start + length
+ }
+
+ if start >= end {
+ return "", nil
+ }
+
+ if end < 0 {
+ return "", nil
+ }
+
+ if end > rlen {
+ end = rlen
+ }
+
+ return string(asRunes[start:end]), nil
+}
+
+// Title returns a copy of the input s with all Unicode letters that begin words
+// mapped to their title case.
+func (ns *Namespace) Title(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return ns.titleFunc(ss), nil
+}
+
+// FirstUpper converts s making the first character upper case.
+func (ns *Namespace) FirstUpper(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return helpers.FirstUpper(ss), nil
+}
+
+// ToLower returns a copy of the input s with all Unicode letters mapped to their
+// lower case.
+func (ns *Namespace) ToLower(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.ToLower(ss), nil
+}
+
+// ToUpper returns a copy of the input s with all Unicode letters mapped to their
+// upper case.
+func (ns *Namespace) ToUpper(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.ToUpper(ss), nil
+}
+
+// Trim returns converts the strings s removing all leading and trailing characters defined
+// contained.
+func (ns *Namespace) Trim(s, cutset any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sc, err := cast.ToStringE(cutset)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.Trim(ss, sc), nil
+}
+
+// TrimLeft returns a slice of the string s with all leading characters
+// contained in cutset removed.
+func (ns *Namespace) TrimLeft(cutset, s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sc, err := cast.ToStringE(cutset)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.TrimLeft(ss, sc), nil
+}
+
+// TrimPrefix returns s without the provided leading prefix string. If s doesn't
+// start with prefix, s is returned unchanged.
+func (ns *Namespace) TrimPrefix(prefix, s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sx, err := cast.ToStringE(prefix)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.TrimPrefix(ss, sx), nil
+}
+
+// TrimRight returns a slice of the string s with all trailing characters
+// contained in cutset removed.
+func (ns *Namespace) TrimRight(cutset, s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sc, err := cast.ToStringE(cutset)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.TrimRight(ss, sc), nil
+}
+
+// TrimSuffix returns s without the provided trailing suffix string. If s
+// doesn't end with suffix, s is returned unchanged.
+func (ns *Namespace) TrimSuffix(suffix, s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sx, err := cast.ToStringE(suffix)
+ if err != nil {
+ return "", err
+ }
+
+ return strings.TrimSuffix(ss, sx), nil
+}
+
+// Repeat returns a new string consisting of n copies of the string s.
+func (ns *Namespace) Repeat(n, s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ sn, err := cast.ToIntE(n)
+ if err != nil {
+ return "", err
+ }
+
+ if sn < 0 {
+ return "", errors.New("strings: negative Repeat count")
+ }
+
+ return strings.Repeat(ss, sn), nil
+}
diff --git a/tpl/strings/strings_test.go b/tpl/strings/strings_test.go
new file mode 100644
index 000000000..7e3960934
--- /dev/null
+++ b/tpl/strings/strings_test.go
@@ -0,0 +1,787 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "html/template"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/spf13/cast"
+)
+
+var ns = New(&deps.Deps{Cfg: config.New()})
+
+type tstNoStringer struct{}
+
+func TestChomp(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"\n a\n", "\n a"},
+ {"\n a\n\n", "\n a"},
+ {"\n a\r\n", "\n a"},
+ {"\n a\n\r\n", "\n a"},
+ {"\n a\r\r", "\n a"},
+ {"\n a\r", "\n a"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Chomp(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+
+ // repeat the check with template.HTML input
+ result, err = ns.Chomp(template.HTML(cast.ToString(test.s)))
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, template.HTML(cast.ToString(test.expect)))
+ }
+}
+
+func TestContains(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ substr any
+ expect bool
+ isErr bool
+ }{
+ {"", "", true, false},
+ {"123", "23", true, false},
+ {"123", "234", false, false},
+ {"123", "", true, false},
+ {"", "a", false, false},
+ {123, "23", true, false},
+ {123, "234", false, false},
+ {123, "", true, false},
+ {template.HTML("123"), []byte("23"), true, false},
+ {template.HTML("123"), []byte("234"), false, false},
+ {template.HTML("123"), []byte(""), true, false},
+ // errors
+ {"", tstNoStringer{}, false, true},
+ {tstNoStringer{}, "", false, true},
+ } {
+
+ result, err := ns.Contains(test.s, test.substr)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestContainsAny(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ substr any
+ expect bool
+ isErr bool
+ }{
+ {"", "", false, false},
+ {"", "1", false, false},
+ {"", "123", false, false},
+ {"1", "", false, false},
+ {"1", "1", true, false},
+ {"111", "1", true, false},
+ {"123", "789", false, false},
+ {"123", "729", true, false},
+ {"a☺b☻c☹d", "uvw☻xyz", true, false},
+ {1, "", false, false},
+ {1, "1", true, false},
+ {111, "1", true, false},
+ {123, "789", false, false},
+ {123, "729", true, false},
+ {[]byte("123"), template.HTML("789"), false, false},
+ {[]byte("123"), template.HTML("729"), true, false},
+ {[]byte("a☺b☻c☹d"), template.HTML("uvw☻xyz"), true, false},
+ // errors
+ {"", tstNoStringer{}, false, true},
+ {tstNoStringer{}, "", false, true},
+ } {
+
+ result, err := ns.ContainsAny(test.s, test.substr)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestCountRunes(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"foo bar", 6},
+ {"旁边", 2},
+ {`<div class="test">旁边</div>`, 2},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.CountRunes(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestRuneCount(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"foo bar", 7},
+ {"旁边", 2},
+ {`<div class="test">旁边</div>`, 26},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.RuneCount(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestCountWords(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"Do Be Do Be Do", 5},
+ {"旁边", 2},
+ {`<div class="test">旁边</div>`, 2},
+ {"Here's to you...", 3},
+ {"Here’s to you...", 3},
+ {"Here’s to you…", 3},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.CountWords(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHasPrefix(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ prefix any
+ expect any
+ isErr bool
+ }{
+ {"abcd", "ab", true, false},
+ {"abcd", "cd", false, false},
+ {template.HTML("abcd"), "ab", true, false},
+ {template.HTML("abcd"), "cd", false, false},
+ {template.HTML("1234"), 12, true, false},
+ {template.HTML("1234"), 34, false, false},
+ {[]byte("abcd"), "ab", true, false},
+ // errors
+ {"", tstNoStringer{}, false, true},
+ {tstNoStringer{}, "", false, true},
+ } {
+
+ result, err := ns.HasPrefix(test.s, test.prefix)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHasSuffix(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ suffix any
+ expect any
+ isErr bool
+ }{
+ {"abcd", "cd", true, false},
+ {"abcd", "ab", false, false},
+ {template.HTML("abcd"), "cd", true, false},
+ {template.HTML("abcd"), "ab", false, false},
+ {template.HTML("1234"), 34, true, false},
+ {template.HTML("1234"), 12, false, false},
+ {[]byte("abcd"), "cd", true, false},
+ // errors
+ {"", tstNoStringer{}, false, true},
+ {tstNoStringer{}, "", false, true},
+ } {
+
+ result, err := ns.HasSuffix(test.s, test.suffix)
+
+ if test.isErr {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestReplace(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ old any
+ new any
+ limit any
+ expect any
+ }{
+ {"aab", "a", "b", nil, "bbb"},
+ {"11a11", 1, 2, nil, "22a22"},
+ {12345, 1, 2, nil, "22345"},
+ {"aab", "a", "b", 1, "bab"},
+ {"11a11", 1, 2, 2, "22a11"},
+ // errors
+ {tstNoStringer{}, "a", "b", nil, false},
+ {"a", tstNoStringer{}, "b", nil, false},
+ {"a", "b", tstNoStringer{}, nil, false},
+ } {
+
+ var (
+ result string
+ err error
+ )
+
+ if test.limit != nil {
+ result, err = ns.Replace(test.s, test.old, test.new, test.limit)
+ } else {
+ result, err = ns.Replace(test.s, test.old, test.new)
+ }
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestSliceString(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ var err error
+ for _, test := range []struct {
+ v1 any
+ v2 any
+ v3 any
+ expect any
+ }{
+ {"abc", 1, 2, "b"},
+ {"abc", 1, 3, "bc"},
+ {"abcdef", 1, int8(3), "bc"},
+ {"abcdef", 1, int16(3), "bc"},
+ {"abcdef", 1, int32(3), "bc"},
+ {"abcdef", 1, int64(3), "bc"},
+ {"abc", 0, 1, "a"},
+ {"abcdef", nil, nil, "abcdef"},
+ {"abcdef", 0, 6, "abcdef"},
+ {"abcdef", 0, 2, "ab"},
+ {"abcdef", 2, nil, "cdef"},
+ {"abcdef", int8(2), nil, "cdef"},
+ {"abcdef", int16(2), nil, "cdef"},
+ {"abcdef", int32(2), nil, "cdef"},
+ {"abcdef", int64(2), nil, "cdef"},
+ {123, 1, 3, "23"},
+ {"abcdef", 6, nil, false},
+ {"abcdef", 4, 7, false},
+ {"abcdef", -1, nil, false},
+ {"abcdef", -1, 7, false},
+ {"abcdef", 1, -1, false},
+ {tstNoStringer{}, 0, 1, false},
+ {"ĀĀĀ", 0, 1, "Ā"}, // issue #1333
+ {"a", t, nil, false},
+ {"a", 1, t, false},
+ } {
+
+ var result string
+ if test.v2 == nil {
+ result, err = ns.SliceString(test.v1)
+ } else if test.v3 == nil {
+ result, err = ns.SliceString(test.v1, test.v2)
+ } else {
+ result, err = ns.SliceString(test.v1, test.v2, test.v3)
+ }
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+
+ // Too many arguments
+ _, err = ns.SliceString("a", 1, 2, 3)
+ if err == nil {
+ t.Errorf("Should have errored")
+ }
+}
+
+func TestSplit(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ v1 any
+ v2 string
+ expect any
+ }{
+ {"a, b", ", ", []string{"a", "b"}},
+ {"a & b & c", " & ", []string{"a", "b", "c"}},
+ {"http://example.com", "http://", []string{"", "example.com"}},
+ {123, "2", []string{"1", "3"}},
+ {tstNoStringer{}, ",", false},
+ } {
+
+ result, err := ns.Split(test.v1, test.v2)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.DeepEquals, test.expect)
+ }
+}
+
+func TestSubstr(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ var err error
+ for _, test := range []struct {
+ v1 any
+ v2 any
+ v3 any
+ expect any
+ }{
+ {"abc", 1, 2, "bc"},
+ {"abc", 0, 1, "a"},
+ {"abcdef", 0, 0, ""},
+ {"abcdef", 1, 0, ""},
+ {"abcdef", -1, 0, ""},
+ {"abcdef", -1, 2, "f"},
+ {"abcdef", -3, 3, "def"},
+ {"abcdef", -1, nil, "f"},
+ {"abcdef", -2, nil, "ef"},
+ {"abcdef", -3, 1, "d"},
+ {"abcdef", 0, -1, "abcde"},
+ {"abcdef", 2, -1, "cde"},
+ {"abcdef", 4, -4, ""},
+ {"abcdef", 7, 1, ""},
+ {"abcdef", 6, nil, ""},
+ {"abcdef", 1, 100, "bcdef"},
+ {"abcdef", -100, 3, "abc"},
+ {"abcdef", -3, -1, "de"},
+ {"abcdef", 2, nil, "cdef"},
+ {"abcdef", int8(2), nil, "cdef"},
+ {"abcdef", int16(2), nil, "cdef"},
+ {"abcdef", int32(2), nil, "cdef"},
+ {"abcdef", int64(2), nil, "cdef"},
+ {"abcdef", 2, int8(3), "cde"},
+ {"abcdef", 2, int16(3), "cde"},
+ {"abcdef", 2, int32(3), "cde"},
+ {"abcdef", 2, int64(3), "cde"},
+ {123, 1, 3, "23"},
+ {1.2e3, 0, 4, "1200"},
+ {tstNoStringer{}, 0, 1, false},
+ {"abcdef", 2.0, nil, "cdef"},
+ {"abcdef", 2.0, 2, "cd"},
+ {"abcdef", 2, 2.0, "cd"},
+ {"ĀĀĀ", 1, 2, "ĀĀ"}, // # issue 1333
+ {"abcdef", "doo", nil, false},
+ {"abcdef", "doo", "doo", false},
+ {"abcdef", 1, "doo", false},
+ {"", 0, nil, ""},
+ } {
+
+ var result string
+
+ if test.v3 == nil {
+ result, err = ns.Substr(test.v1, test.v2)
+ } else {
+ result, err = ns.Substr(test.v1, test.v2, test.v3)
+ }
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Check(err, qt.Not(qt.IsNil), qt.Commentf("%v", test))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil, qt.Commentf("%v", test))
+ c.Check(result, qt.Equals, test.expect, qt.Commentf("%v", test))
+ }
+
+ _, err = ns.Substr("abcdef")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = ns.Substr("abcdef", 1, 2, 3)
+ c.Assert(err, qt.Not(qt.IsNil))
+}
+
+func TestTitle(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"test", "Test"},
+ {template.HTML("hypertext"), "Hypertext"},
+ {[]byte("bytes"), "Bytes"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Title(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestToLower(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"TEST", "test"},
+ {template.HTML("LoWeR"), "lower"},
+ {[]byte("BYTES"), "bytes"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.ToLower(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestToUpper(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"test", "TEST"},
+ {template.HTML("UpPeR"), "UPPER"},
+ {[]byte("bytes"), "BYTES"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.ToUpper(test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestTrim(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ cutset any
+ expect any
+ }{
+ {"abba", "a", "bb"},
+ {"abba", "ab", ""},
+ {"<tag>", "<>", "tag"},
+ {`"quote"`, `"`, "quote"},
+ {1221, "1", "22"},
+ {1221, "12", ""},
+ {template.HTML("<tag>"), "<>", "tag"},
+ {[]byte("<tag>"), "<>", "tag"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ } {
+
+ result, err := ns.Trim(test.s, test.cutset)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestTrimLeft(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ cutset any
+ expect any
+ }{
+ {"abba", "a", "bba"},
+ {"abba", "ab", ""},
+ {"<tag>", "<>", "tag>"},
+ {`"quote"`, `"`, `quote"`},
+ {1221, "1", "221"},
+ {1221, "12", ""},
+ {"007", "0", "7"},
+ {template.HTML("<tag>"), "<>", "tag>"},
+ {[]byte("<tag>"), "<>", "tag>"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ } {
+
+ result, err := ns.TrimLeft(test.cutset, test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestTrimPrefix(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ prefix any
+ expect any
+ }{
+ {"aabbaa", "a", "abbaa"},
+ {"aabb", "b", "aabb"},
+ {1234, "12", "34"},
+ {1234, "34", "1234"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ } {
+
+ result, err := ns.TrimPrefix(test.prefix, test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestTrimRight(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ cutset any
+ expect any
+ }{
+ {"abba", "a", "abb"},
+ {"abba", "ab", ""},
+ {"<tag>", "<>", "<tag"},
+ {`"quote"`, `"`, `"quote`},
+ {1221, "1", "122"},
+ {1221, "12", ""},
+ {"007", "0", "007"},
+ {template.HTML("<tag>"), "<>", "<tag"},
+ {[]byte("<tag>"), "<>", "<tag"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ } {
+
+ result, err := ns.TrimRight(test.cutset, test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestTrimSuffix(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ suffix any
+ expect any
+ }{
+ {"aabbaa", "a", "aabba"},
+ {"aabb", "b", "aab"},
+ {1234, "12", "1234"},
+ {1234, "34", "12"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ } {
+
+ result, err := ns.TrimSuffix(test.suffix, test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestRepeat(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ s any
+ n any
+ expect any
+ }{
+ {"yo", "2", "yoyo"},
+ {"~", "16", "~~~~~~~~~~~~~~~~"},
+ {"<tag>", "0", ""},
+ {"yay", "1", "yay"},
+ {1221, "1", "1221"},
+ {1221, 2, "12211221"},
+ {template.HTML("<tag>"), "2", "<tag><tag>"},
+ {[]byte("<tag>"), 2, "<tag><tag>"},
+ // errors
+ {"", tstNoStringer{}, false},
+ {tstNoStringer{}, "", false},
+ {"ab", -1, false},
+ } {
+
+ result, err := ns.Repeat(test.n, test.s)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result, qt.Equals, test.expect)
+ }
+}
diff --git a/tpl/strings/truncate.go b/tpl/strings/truncate.go
new file mode 100644
index 000000000..dd6267280
--- /dev/null
+++ b/tpl/strings/truncate.go
@@ -0,0 +1,157 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "errors"
+ "html"
+ "html/template"
+ "regexp"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/spf13/cast"
+)
+
+var (
+ tagRE = regexp.MustCompile(`^<(/)?([^ ]+?)(?:(\s*/)| .*?)?>`)
+ htmlSinglets = map[string]bool{
+ "br": true, "col": true, "link": true,
+ "base": true, "img": true, "param": true,
+ "area": true, "hr": true, "input": true,
+ }
+)
+
+type htmlTag struct {
+ name string
+ pos int
+ openTag bool
+}
+
+// Truncate truncates a given string to the specified length.
+func (ns *Namespace) Truncate(a any, options ...any) (template.HTML, error) {
+ length, err := cast.ToIntE(a)
+ if err != nil {
+ return "", err
+ }
+ var textParam any
+ var ellipsis string
+
+ switch len(options) {
+ case 0:
+ return "", errors.New("truncate requires a length and a string")
+ case 1:
+ textParam = options[0]
+ ellipsis = " …"
+ case 2:
+ textParam = options[1]
+ ellipsis, err = cast.ToStringE(options[0])
+ if err != nil {
+ return "", errors.New("ellipsis must be a string")
+ }
+ if _, ok := options[0].(template.HTML); !ok {
+ ellipsis = html.EscapeString(ellipsis)
+ }
+ default:
+ return "", errors.New("too many arguments passed to truncate")
+ }
+ if err != nil {
+ return "", errors.New("text to truncate must be a string")
+ }
+ text, err := cast.ToStringE(textParam)
+ if err != nil {
+ return "", errors.New("text must be a string")
+ }
+
+ _, isHTML := textParam.(template.HTML)
+
+ if utf8.RuneCountInString(text) <= length {
+ if isHTML {
+ return template.HTML(text), nil
+ }
+ return template.HTML(html.EscapeString(text)), nil
+ }
+
+ tags := []htmlTag{}
+ var lastWordIndex, lastNonSpace, currentLen, endTextPos, nextTag int
+
+ for i, r := range text {
+ if i < nextTag {
+ continue
+ }
+
+ if isHTML {
+ // Make sure we keep tag of HTML tags
+ slice := text[i:]
+ m := tagRE.FindStringSubmatchIndex(slice)
+ if len(m) > 0 && m[0] == 0 {
+ nextTag = i + m[1]
+ tagname := slice[m[4]:m[5]]
+ lastWordIndex = lastNonSpace
+ _, singlet := htmlSinglets[tagname]
+ if !singlet && m[6] == -1 {
+ tags = append(tags, htmlTag{name: tagname, pos: i, openTag: m[2] == -1})
+ }
+
+ continue
+ }
+ }
+
+ currentLen++
+ if unicode.IsSpace(r) {
+ lastWordIndex = lastNonSpace
+ } else if unicode.In(r, unicode.Han, unicode.Hangul, unicode.Hiragana, unicode.Katakana) {
+ lastWordIndex = i
+ } else {
+ lastNonSpace = i + utf8.RuneLen(r)
+ }
+
+ if currentLen > length {
+ if lastWordIndex == 0 {
+ endTextPos = i
+ } else {
+ endTextPos = lastWordIndex
+ }
+ out := text[0:endTextPos]
+ if isHTML {
+ out += ellipsis
+ // Close out any open HTML tags
+ var currentTag *htmlTag
+ for i := len(tags) - 1; i >= 0; i-- {
+ tag := tags[i]
+ if tag.pos >= endTextPos || currentTag != nil {
+ if currentTag != nil && currentTag.name == tag.name {
+ currentTag = nil
+ }
+ continue
+ }
+
+ if tag.openTag {
+ out += ("</" + tag.name + ">")
+ } else {
+ currentTag = &tag
+ }
+ }
+
+ return template.HTML(out), nil
+ }
+ return template.HTML(html.EscapeString(out) + ellipsis), nil
+ }
+ }
+
+ if isHTML {
+ return template.HTML(text), nil
+ }
+ return template.HTML(html.EscapeString(text)), nil
+}
diff --git a/tpl/strings/truncate_test.go b/tpl/strings/truncate_test.go
new file mode 100644
index 000000000..f7d5d132d
--- /dev/null
+++ b/tpl/strings/truncate_test.go
@@ -0,0 +1,83 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strings
+
+import (
+ "html/template"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+func TestTruncate(t *testing.T) {
+ t.Parallel()
+
+ var err error
+ cases := []struct {
+ v1 any
+ v2 any
+ v3 any
+ want any
+ isErr bool
+ }{
+ {10, "I am a test sentence", nil, template.HTML("I am a …"), false},
+ {10, "", "I am a test sentence", template.HTML("I am a"), false},
+ {10, "", "a b c d e f g h i j k", template.HTML("a b c d e"), false},
+ {12, "", "<b>Should be escaped</b>", template.HTML("&lt;b&gt;Should be"), false},
+ {10, template.HTML(" <a href='#'>Read more</a>"), "I am a test sentence", template.HTML("I am a <a href='#'>Read more</a>"), false},
+ {20, template.HTML("I have a <a href='/markdown'>Markdown link</a> inside."), nil, template.HTML("I have a <a href='/markdown'>Markdown …</a>"), false},
+ {10, "IamanextremelylongwordthatjustgoesonandonandonjusttoannoyyoualmostasifIwaswritteninGermanActuallyIbettheresagermanwordforthis", nil, template.HTML("Iamanextre …"), false},
+ {10, template.HTML("<p>IamanextremelylongwordthatjustgoesonandonandonjusttoannoyyoualmostasifIwaswritteninGermanActuallyIbettheresagermanwordforthis</p>"), nil, template.HTML("<p>Iamanextre …</p>"), false},
+ {13, template.HTML("With <a href=\"/markdown\">Markdown</a> inside."), nil, template.HTML("With <a href=\"/markdown\">Markdown …</a>"), false},
+ {14, "Hello中国 Good 好的", nil, template.HTML("Hello中国 Good 好 …"), false},
+ {15, "", template.HTML("A <br> tag that's not closed"), template.HTML("A <br> tag that's"), false},
+ {14, template.HTML("<p>Hello中国 Good 好的</p>"), nil, template.HTML("<p>Hello中国 Good 好 …</p>"), false},
+ {2, template.HTML("<p>P1</p><p>P2</p>"), nil, template.HTML("<p>P1 …</p>"), false},
+ {3, template.HTML(strings.Repeat("<p>P</p>", 20)), nil, template.HTML("<p>P</p><p>P</p><p>P …</p>"), false},
+ {18, template.HTML("<p>test <b>hello</b> test something</p>"), nil, template.HTML("<p>test <b>hello</b> test …</p>"), false},
+ {4, template.HTML("<p>a<b><i>b</b>c d e</p>"), nil, template.HTML("<p>a<b><i>b</b>c …</p>"), false},
+ {10, nil, nil, template.HTML(""), true},
+ {nil, nil, nil, template.HTML(""), true},
+ }
+ for i, c := range cases {
+ var result template.HTML
+ if c.v2 == nil {
+ result, err = ns.Truncate(c.v1)
+ } else if c.v3 == nil {
+ result, err = ns.Truncate(c.v1, c.v2)
+ } else {
+ result, err = ns.Truncate(c.v1, c.v2, c.v3)
+ }
+
+ if c.isErr {
+ if err == nil {
+ t.Errorf("[%d] Slice didn't return an expected error", i)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] failed: %s", i, err)
+ continue
+ }
+ if !reflect.DeepEqual(result, c.want) {
+ t.Errorf("[%d] got '%s' but expected '%s'", i, result, c.want)
+ }
+ }
+ }
+
+ // Too many arguments
+ _, err = ns.Truncate(10, " ...", "I am a test sentence", "wrong")
+ if err == nil {
+ t.Errorf("Should have errored")
+ }
+}
diff --git a/tpl/template.go b/tpl/template.go
new file mode 100644
index 000000000..738750de7
--- /dev/null
+++ b/tpl/template.go
@@ -0,0 +1,211 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+import (
+ "context"
+ "io"
+ "reflect"
+ "regexp"
+ "strings"
+ "unicode"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+
+ "github.com/gohugoio/hugo/output"
+
+ htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+)
+
+// TemplateManager manages the collection of templates.
+type TemplateManager interface {
+ TemplateHandler
+ TemplateFuncGetter
+ AddTemplate(name, tpl string) error
+ MarkReady() error
+}
+
+// TemplateVariants describes the possible variants of a template.
+// All of these may be empty.
+type TemplateVariants struct {
+ Language string
+ OutputFormat output.Format
+}
+
+// TemplateFinder finds templates.
+type TemplateFinder interface {
+ TemplateLookup
+ TemplateLookupVariant
+}
+
+// UnusedTemplatesProvider lists unused templates if the build is configured to track those.
+type UnusedTemplatesProvider interface {
+ UnusedTemplates() []FileInfo
+}
+
+// TemplateHandler finds and executes templates.
+type TemplateHandler interface {
+ TemplateFinder
+ Execute(t Template, wr io.Writer, data any) error
+ ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error
+ LookupLayout(d output.LayoutDescriptor, f output.Format) (Template, bool, error)
+ HasTemplate(name string) bool
+}
+
+type TemplateLookup interface {
+ Lookup(name string) (Template, bool)
+}
+
+type TemplateLookupVariant interface {
+ // TODO(bep) this currently only works for shortcodes.
+ // We may unify and expand this variant pattern to the
+ // other templates, but we need this now for the shortcodes to
+ // quickly determine if a shortcode has a template for a given
+ // output format.
+ // It returns the template, if it was found or not and if there are
+ // alternative representations (output format, language).
+ // We are currently only interested in output formats, so we should improve
+ // this for speed.
+ LookupVariant(name string, variants TemplateVariants) (Template, bool, bool)
+ LookupVariants(name string) []Template
+}
+
+// Template is the common interface between text/template and html/template.
+type Template interface {
+ Name() string
+ Prepare() (*texttemplate.Template, error)
+}
+
+// TemplateParser is used to parse ad-hoc templates, e.g. in the Resource chain.
+type TemplateParser interface {
+ Parse(name, tpl string) (Template, error)
+}
+
+// TemplateParseFinder provides both parsing and finding.
+type TemplateParseFinder interface {
+ TemplateParser
+ TemplateFinder
+}
+
+// TemplateDebugger prints some debug info to stdout.
+type TemplateDebugger interface {
+ Debug()
+}
+
+// templateInfo wraps a Template with some additional information.
+type templateInfo struct {
+ Template
+ Info
+}
+
+// templateInfo wraps a Template with some additional information.
+type templateInfoManager struct {
+ Template
+ InfoManager
+}
+
+// TemplatesProvider as implemented by deps.Deps.
+type TemplatesProvider interface {
+ Tmpl() TemplateHandler
+ TextTmpl() TemplateParseFinder
+}
+
+// WithInfo wraps the info in a template.
+func WithInfo(templ Template, info Info) Template {
+ if manager, ok := info.(InfoManager); ok {
+ return &templateInfoManager{
+ Template: templ,
+ InfoManager: manager,
+ }
+ }
+
+ return &templateInfo{
+ Template: templ,
+ Info: info,
+ }
+}
+
+var baseOfRe = regexp.MustCompile("template: (.*?):")
+
+func extractBaseOf(err string) string {
+ m := baseOfRe.FindStringSubmatch(err)
+ if len(m) == 2 {
+ return m[1]
+ }
+ return ""
+}
+
+// TemplateFuncGetter allows to find a template func by name.
+type TemplateFuncGetter interface {
+ GetFunc(name string) (reflect.Value, bool)
+}
+
+// GetDataFromContext returns the template data context (usually .Page) from ctx if set.
+// NOte: This is not fully implemented yet.
+func GetDataFromContext(ctx context.Context) any {
+ return ctx.Value(texttemplate.DataContextKey)
+}
+
+func GetHasLockFromContext(ctx context.Context) bool {
+ if v := ctx.Value(texttemplate.HasLockContextKey); v != nil {
+ return v.(bool)
+ }
+ return false
+}
+
+func SetHasLockInContext(ctx context.Context, hasLock bool) context.Context {
+ return context.WithValue(ctx, texttemplate.HasLockContextKey, hasLock)
+}
+
+const hugoNewLinePlaceholder = "___hugonl_"
+
+var (
+ stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "</p>", hugoNewLinePlaceholder, "<br>", hugoNewLinePlaceholder, "<br />", hugoNewLinePlaceholder)
+ whitespaceRe = regexp.MustCompile(`\s+`)
+)
+
+// StripHTML strips out all HTML tags in s.
+func StripHTML(s string) string {
+ // Shortcut strings with no tags in them
+ if !strings.ContainsAny(s, "<>") {
+ return s
+ }
+
+ pre := stripHTMLReplacerPre.Replace(s)
+ preReplaced := pre != s
+
+ s = htmltemplate.StripTags(pre)
+
+ if preReplaced {
+ s = strings.ReplaceAll(s, hugoNewLinePlaceholder, "\n")
+ }
+
+ var wasSpace bool
+ b := bp.GetBuffer()
+ defer bp.PutBuffer(b)
+ for _, r := range s {
+ isSpace := unicode.IsSpace(r)
+ if !(isSpace && wasSpace) {
+ b.WriteRune(r)
+ }
+ wasSpace = isSpace
+ }
+
+ if b.Len() > 0 {
+ s = b.String()
+ }
+
+ return s
+}
diff --git a/tpl/template_info.go b/tpl/template_info.go
new file mode 100644
index 000000000..c21c0ae7d
--- /dev/null
+++ b/tpl/template_info.go
@@ -0,0 +1,83 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+import (
+ "github.com/gohugoio/hugo/identity"
+)
+
+// Increments on breaking changes.
+const TemplateVersion = 2
+
+type Info interface {
+ ParseInfo() ParseInfo
+
+ // Identifies this template and its dependencies.
+ identity.Provider
+}
+
+type FileInfo interface {
+ Name() string
+ Filename() string
+}
+
+type InfoManager interface {
+ ParseInfo() ParseInfo
+
+ // Identifies and manages this template and its dependencies.
+ identity.Manager
+}
+
+type defaultInfo struct {
+ identity.Manager
+ parseInfo ParseInfo
+}
+
+func NewInfo(id identity.Manager, parseInfo ParseInfo) Info {
+ return &defaultInfo{
+ Manager: id,
+ parseInfo: parseInfo,
+ }
+}
+
+func (info *defaultInfo) ParseInfo() ParseInfo {
+ return info.parseInfo
+}
+
+type ParseInfo struct {
+ // Set for shortcode templates with any {{ .Inner }}
+ IsInner bool
+
+ // Set for partials with a return statement.
+ HasReturn bool
+
+ // Config extracted from template.
+ Config ParseConfig
+}
+
+func (info ParseInfo) IsZero() bool {
+ return info.Config.Version == 0
+}
+
+type ParseConfig struct {
+ Version int
+}
+
+var DefaultParseConfig = ParseConfig{
+ Version: TemplateVersion,
+}
+
+var DefaultParseInfo = ParseInfo{
+ Config: DefaultParseConfig,
+}
diff --git a/tpl/template_test.go b/tpl/template_test.go
new file mode 100644
index 000000000..d989b7158
--- /dev/null
+++ b/tpl/template_test.go
@@ -0,0 +1,71 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tpl
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestExtractBaseof(t *testing.T) {
+ c := qt.New(t)
+
+ replaced := extractBaseOf(`failed: template: _default/baseof.html:37:11: executing "_default/baseof.html" at <.Parents>: can't evaluate field Parents in type *hugolib.PageOutput`)
+
+ c.Assert(replaced, qt.Equals, "_default/baseof.html")
+ c.Assert(extractBaseOf("not baseof for you"), qt.Equals, "")
+ c.Assert(extractBaseOf("template: blog/baseof.html:23:11:"), qt.Equals, "blog/baseof.html")
+}
+
+func TestStripHTML(t *testing.T) {
+ type test struct {
+ input, expected string
+ }
+ data := []test{
+ {"<h1>strip h1 tag <h1>", "strip h1 tag "},
+ {"<p> strip p tag </p>", " strip p tag "},
+ {"</br> strip br<br>", " strip br\n"},
+ {"</br> strip br2<br />", " strip br2\n"},
+ {"This <strong>is</strong> a\nnewline", "This is a newline"},
+ {"No Tags", "No Tags"},
+ {`<p>Summary Next Line.
+<figure >
+
+ <img src="/not/real" />
+
+
+</figure>
+.
+More text here.</p>
+
+<p>Some more text</p>`, "Summary Next Line. . More text here.\nSome more text\n"},
+
+ // Issue 9199
+ {"<div data-action='click->my-controller#doThing'>qwe</div>", "qwe"},
+ {"Hello, World!", "Hello, World!"},
+ {"foo&amp;bar", "foo&amp;bar"},
+ {`Hello <a href="www.example.com/">World</a>!`, "Hello World!"},
+ {"Foo <textarea>Bar</textarea> Baz", "Foo Bar Baz"},
+ {"Foo <!-- Bar --> Baz", "Foo Baz"},
+ }
+ for i, d := range data {
+ output := StripHTML(d.input)
+ if d.expected != output {
+ t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
+ }
+ }
+}
+
+const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
diff --git a/tpl/templates/init.go b/tpl/templates/init.go
new file mode 100644
index 000000000..e068fca81
--- /dev/null
+++ b/tpl/templates/init.go
@@ -0,0 +1,44 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package templates
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "templates"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Exists,
+ nil,
+ [][2]string{
+ {`{{ if (templates.Exists "partials/header.html") }}Yes!{{ end }}`, `Yes!`},
+ {`{{ if not (templates.Exists "partials/doesnotexist.html") }}No!{{ end }}`, `No!`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/templates/integration_test.go b/tpl/templates/integration_test.go
new file mode 100644
index 000000000..fea2d7f6e
--- /dev/null
+++ b/tpl/templates/integration_test.go
@@ -0,0 +1,85 @@
+// Copyright 2022 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package templates_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+)
+
+func TestExists(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/index.html --
+index.html: {{ templates.Exists "index.html" }}
+post/single.html: {{ templates.Exists "post/single.html" }}
+partials/foo.html: {{ templates.Exists "partials/foo.html" }}
+partials/doesnotexist.html: {{ templates.Exists "partials/doesnotexist.html" }}
+-- layouts/post/single.html --
+-- layouts/partials/foo.html --
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+index.html: true
+post/single.html: true
+partials/foo.html: true
+partials/doesnotexist.html: false
+`)
+}
+
+func TestExistsWithBaseOf(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+-- layouts/baseof.html --
+{{ block "main" . }}{{ end }}
+-- layouts/index.html --
+{{ define "main" }}
+index.html: {{ templates.Exists "index.html" }}
+post/single.html: {{ templates.Exists "post/single.html" }}
+post/doesnotexist.html: {{ templates.Exists "post/doesnotexist.html" }}
+{{ end }}
+-- layouts/post/single.html --
+{{ define "main" }}MAIN{{ end }}
+
+
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/index.html", `
+index.html: true
+post/single.html: true
+post/doesnotexist.html: false
+
+`)
+}
diff --git a/tpl/templates/templates.go b/tpl/templates/templates.go
new file mode 100644
index 000000000..8e40f3443
--- /dev/null
+++ b/tpl/templates/templates.go
@@ -0,0 +1,38 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package templates provides template functions for working with templates.
+package templates
+
+import (
+ "github.com/gohugoio/hugo/deps"
+)
+
+// New returns a new instance of the templates-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ return &Namespace{
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "templates" namespace.
+type Namespace struct {
+ deps *deps.Deps
+}
+
+// Exists returns whether the template with the given name exists.
+// Note that this is the Unix-styled relative path including filename suffix,
+// e.g. partials/header.html
+func (ns *Namespace) Exists(name string) bool {
+ return ns.deps.Tmpl().HasTemplate(name)
+}
diff --git a/tpl/time/init.go b/tpl/time/init.go
new file mode 100644
index 000000000..4bb2ddf67
--- /dev/null
+++ b/tpl/time/init.go
@@ -0,0 +1,95 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package time
+
+import (
+ "errors"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/langs"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "time"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ if d.Language == nil {
+ panic("Language must be set")
+ }
+ ctx := New(langs.GetTimeFormatter(d.Language), langs.GetLocation(d.Language))
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) {
+ // Handle overlapping "time" namespace and func.
+ //
+ // If no args are passed to `time`, assume namespace usage and
+ // return namespace context.
+ //
+ // If args are passed, call AsTime().
+
+ switch len(args) {
+ case 0:
+ return ctx, nil
+ case 1:
+ return ctx.AsTime(args[0])
+ case 2:
+ return ctx.AsTime(args[0], args[1])
+
+ // 3 or more arguments. Currently not supported.
+ default:
+ return nil, errors.New("Invalid arguments supplied to `time`. Refer to time documentation: https://gohugo.io/functions/time/")
+ }
+ },
+ }
+
+ ns.AddMethodMapping(ctx.Format,
+ []string{"dateFormat"},
+ [][2]string{
+ {`dateFormat: {{ dateFormat "Monday, Jan 2, 2006" "2015-01-21" }}`, `dateFormat: Wednesday, Jan 21, 2015`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Now,
+ []string{"now"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.AsTime,
+ nil,
+ [][2]string{
+ {`{{ (time "2015-01-21").Year }}`, `2015`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Duration,
+ []string{"duration"},
+ [][2]string{
+ {`{{ mul 60 60 | duration "second" }}`, `1h0m0s`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.ParseDuration,
+ nil,
+ [][2]string{
+ {`{{ "1h12m10s" | time.ParseDuration }}`, `1h12m10s`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/time/time.go b/tpl/time/time.go
new file mode 100644
index 000000000..cd78b83aa
--- /dev/null
+++ b/tpl/time/time.go
@@ -0,0 +1,123 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package time provides template functions for measuring and displaying time.
+package time
+
+import (
+ "fmt"
+ "time"
+ _time "time"
+
+ "github.com/gohugoio/hugo/common/htime"
+
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the time-namespaced template functions.
+func New(timeFormatter htime.TimeFormatter, location *time.Location) *Namespace {
+ return &Namespace{
+ timeFormatter: timeFormatter,
+ location: location,
+ }
+}
+
+// Namespace provides template functions for the "time" namespace.
+type Namespace struct {
+ timeFormatter htime.TimeFormatter
+ location *time.Location
+}
+
+// AsTime converts the textual representation of the datetime string into
+// a time.Time interface.
+func (ns *Namespace) AsTime(v any, args ...any) (any, error) {
+ loc := ns.location
+ if len(args) > 0 {
+ locStr, err := cast.ToStringE(args[0])
+ if err != nil {
+ return nil, err
+ }
+ loc, err = _time.LoadLocation(locStr)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return htime.ToTimeInDefaultLocationE(v, loc)
+
+}
+
+// Format converts the textual representation of the datetime string in v into
+// time.Time if needed and formats it with the given layout.
+func (ns *Namespace) Format(layout string, v any) (string, error) {
+ t, err := htime.ToTimeInDefaultLocationE(v, ns.location)
+ if err != nil {
+ return "", err
+ }
+
+ return ns.timeFormatter.Format(t, layout), nil
+}
+
+// Now returns the current local time or `clock` time
+func (ns *Namespace) Now() _time.Time {
+ return htime.Now()
+}
+
+// ParseDuration parses the duration string s.
+// A duration string is a possibly signed sequence of
+// decimal numbers, each with optional fraction and a unit suffix,
+// such as "300ms", "-1.5h" or "2h45m".
+// Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".
+// See https://golang.org/pkg/time/#ParseDuration
+func (ns *Namespace) ParseDuration(s any) (_time.Duration, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return 0, err
+ }
+
+ return _time.ParseDuration(ss)
+}
+
+var durationUnits = map[string]_time.Duration{
+ "nanosecond": _time.Nanosecond,
+ "ns": _time.Nanosecond,
+ "microsecond": _time.Microsecond,
+ "us": _time.Microsecond,
+ "µs": _time.Microsecond,
+ "millisecond": _time.Millisecond,
+ "ms": _time.Millisecond,
+ "second": _time.Second,
+ "s": _time.Second,
+ "minute": _time.Minute,
+ "m": _time.Minute,
+ "hour": _time.Hour,
+ "h": _time.Hour,
+}
+
+// Duration converts the given number to a time.Duration.
+// Unit is one of nanosecond/ns, microsecond/us/µs, millisecond/ms, second/s, minute/m or hour/h.
+func (ns *Namespace) Duration(unit any, number any) (_time.Duration, error) {
+ unitStr, err := cast.ToStringE(unit)
+ if err != nil {
+ return 0, err
+ }
+ unitDuration, found := durationUnits[unitStr]
+ if !found {
+ return 0, fmt.Errorf("%q is not a valid duration unit", unit)
+ }
+ n, err := cast.ToInt64E(number)
+ if err != nil {
+ return 0, err
+ }
+ return _time.Duration(n) * unitDuration, nil
+}
diff --git a/tpl/time/time_test.go b/tpl/time/time_test.go
new file mode 100644
index 000000000..9001f6b6b
--- /dev/null
+++ b/tpl/time/time_test.go
@@ -0,0 +1,185 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package time
+
+import (
+ "strings"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+
+ "github.com/gohugoio/hugo/common/htime"
+ translators "github.com/gohugoio/localescompressed"
+)
+
+func TestTimeLocation(t *testing.T) {
+ t.Parallel()
+
+ loc, _ := time.LoadLocation("America/Antigua")
+ ns := New(htime.NewTimeFormatter(translators.GetTranslator("en")), loc)
+
+ for i, test := range []struct {
+ name string
+ value string
+ location any
+ expect any
+ }{
+ {"Empty location", "2020-10-20", "", "2020-10-20 00:00:00 +0000 UTC"},
+ {"New location", "2020-10-20", nil, "2020-10-20 00:00:00 -0400 AST"},
+ {"New York EDT", "2020-10-20", "America/New_York", "2020-10-20 00:00:00 -0400 EDT"},
+ {"New York EST", "2020-01-20", "America/New_York", "2020-01-20 00:00:00 -0500 EST"},
+ {"Empty location, time", "2020-10-20 20:33:59", "", "2020-10-20 20:33:59 +0000 UTC"},
+ {"New York, time", "2020-10-20 20:33:59", "America/New_York", "2020-10-20 20:33:59 -0400 EDT"},
+ // The following have an explicit offset specified. In this case, it overrides timezone
+ {"Offset minus 0700, empty location", "2020-09-23T20:33:44-0700", "", "2020-09-23 20:33:44 -0700 -0700"},
+ {"Offset plus 0200, empty location", "2020-09-23T20:33:44+0200", "", "2020-09-23 20:33:44 +0200 +0200"},
+
+ {"Offset, New York", "2020-09-23T20:33:44-0700", "America/New_York", "2020-09-23 20:33:44 -0700 -0700"},
+ {"Offset, Oslo", "2020-09-23T20:33:44+0200", "Europe/Oslo", "2020-09-23 20:33:44 +0200 +0200"},
+
+ // Failures.
+ {"Invalid time zone", "2020-01-20", "invalid-timezone", false},
+ {"Invalid time value", "invalid-value", "", false},
+ } {
+ t.Run(test.name, func(t *testing.T) {
+ var args []any
+ if test.location != nil {
+ args = append(args, test.location)
+ }
+ result, err := ns.AsTime(test.value, args...)
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] AsTime didn't return an expected error, got %v", i, result)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] AsTime failed: %s", i, err)
+ return
+ }
+
+ // See https://github.com/gohugoio/hugo/issues/8843#issuecomment-891551447
+ // Drop the location string (last element) when comparing,
+ // as that may change depending on the local locale.
+ timeStr := result.(time.Time).String()
+ timeStr = timeStr[:strings.LastIndex(timeStr, " ")]
+ if !strings.HasPrefix(test.expect.(string), timeStr) {
+ t.Errorf("[%d] AsTime got %v but expected %v", i, timeStr, test.expect)
+ }
+ }
+ })
+ }
+}
+
+func TestFormat(t *testing.T) {
+ c := qt.New(t)
+
+ c.Run("UTC", func(c *qt.C) {
+ c.Parallel()
+ ns := New(htime.NewTimeFormatter(translators.GetTranslator("en")), time.UTC)
+
+ for i, test := range []struct {
+ layout string
+ value any
+ expect any
+ }{
+ {"Monday, Jan 2, 2006", "2015-01-21", "Wednesday, Jan 21, 2015"},
+ {"Monday, Jan 2, 2006", time.Date(2015, time.January, 21, 0, 0, 0, 0, time.UTC), "Wednesday, Jan 21, 2015"},
+ {"This isn't a date layout string", "2015-01-21", "This isn't a date layout string"},
+ // The following test case gives either "Tuesday, Jan 20, 2015" or "Monday, Jan 19, 2015" depending on the local time zone
+ {"Monday, Jan 2, 2006", 1421733600, time.Unix(1421733600, 0).Format("Monday, Jan 2, 2006")},
+ {"Monday, Jan 2, 2006", 1421733600.123, false},
+ {time.RFC3339, time.Date(2016, time.March, 3, 4, 5, 0, 0, time.UTC), "2016-03-03T04:05:00Z"},
+ {time.RFC1123, time.Date(2016, time.March, 3, 4, 5, 0, 0, time.UTC), "Thu, 03 Mar 2016 04:05:00 UTC"},
+ {time.RFC3339, "Thu, 03 Mar 2016 04:05:00 UTC", "2016-03-03T04:05:00Z"},
+ {time.RFC1123, "2016-03-03T04:05:00Z", "Thu, 03 Mar 2016 04:05:00 UTC"},
+ // Custom layouts, as introduced in Hugo 0.87.
+ {":date_medium", "2015-01-21", "Jan 21, 2015"},
+ } {
+ result, err := ns.Format(test.layout, test.value)
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ c.Errorf("[%d] DateFormat didn't return an expected error, got %v", i, result)
+ }
+ } else {
+ if err != nil {
+ c.Errorf("[%d] DateFormat failed: %s", i, err)
+ continue
+ }
+ if result != test.expect {
+ c.Errorf("[%d] DateFormat got %v but expected %v", i, result, test.expect)
+ }
+ }
+ }
+ })
+
+ //Issue #9084
+ c.Run("TZ America/Los_Angeles", func(c *qt.C) {
+ c.Parallel()
+
+ loc, err := time.LoadLocation("America/Los_Angeles")
+ c.Assert(err, qt.IsNil)
+ ns := New(htime.NewTimeFormatter(translators.GetTranslator("en")), loc)
+
+ d, err := ns.Format(":time_full", "2020-03-09T11:00:00")
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(d, qt.Equals, "11:00:00 am Pacific Daylight Time")
+
+ })
+
+}
+
+func TestDuration(t *testing.T) {
+ t.Parallel()
+
+ ns := New(htime.NewTimeFormatter(translators.GetTranslator("en")), time.UTC)
+
+ for i, test := range []struct {
+ unit any
+ num any
+ expect any
+ }{
+ {"nanosecond", 10, 10 * time.Nanosecond},
+ {"ns", 10, 10 * time.Nanosecond},
+ {"microsecond", 20, 20 * time.Microsecond},
+ {"us", 20, 20 * time.Microsecond},
+ {"µs", 20, 20 * time.Microsecond},
+ {"millisecond", 20, 20 * time.Millisecond},
+ {"ms", 20, 20 * time.Millisecond},
+ {"second", 30, 30 * time.Second},
+ {"s", 30, 30 * time.Second},
+ {"minute", 20, 20 * time.Minute},
+ {"m", 20, 20 * time.Minute},
+ {"hour", 20, 20 * time.Hour},
+ {"h", 20, 20 * time.Hour},
+ {"hours", 20, false},
+ {"hour", "30", 30 * time.Hour},
+ } {
+ result, err := ns.Duration(test.unit, test.num)
+ if b, ok := test.expect.(bool); ok && !b {
+ if err == nil {
+ t.Errorf("[%d] Duration didn't return an expected error, got %v", i, result)
+ }
+ } else {
+ if err != nil {
+ t.Errorf("[%d] Duration failed: %s", i, err)
+ continue
+ }
+ if result != test.expect {
+ t.Errorf("[%d] Duration got %v but expected %v", i, result, test.expect)
+ }
+ }
+ }
+}
diff --git a/tpl/tplimpl/embedded/.gitattributes b/tpl/tplimpl/embedded/.gitattributes
new file mode 100644
index 000000000..721b3af6b
--- /dev/null
+++ b/tpl/tplimpl/embedded/.gitattributes
@@ -0,0 +1 @@
+*autogen.go linguist-generated=true
diff --git a/tpl/tplimpl/embedded/templates/_default/_markup/render-codeblock-goat.html b/tpl/tplimpl/embedded/templates/_default/_markup/render-codeblock-goat.html
new file mode 100644
index 000000000..35ec0b309
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_default/_markup/render-codeblock-goat.html
@@ -0,0 +1,18 @@
+{{ $width := .Attributes.width }}
+{{ $height := .Attributes.height }}
+{{ $class := .Attributes.class | default "" }}
+<div class="goat svg-container {{ $class }}">
+ {{ with diagrams.Goat .Inner }}
+ <svg
+ xmlns="http://www.w3.org/2000/svg"
+ font-family="Menlo,Lucida Console,monospace"
+ {{ if or $width $height }}
+ {{ with $width }}width="{{ . }}"{{ end }}
+ {{ with $height }}height="{{ . }}"{{ end }}
+ {{ else }}
+ viewBox="0 0 {{ .Width }} {{ .Height }}"
+ {{ end }}>
+ {{ .Inner }}
+ </svg>
+ {{ end }}
+</div>
diff --git a/tpl/tplimpl/embedded/templates/_default/robots.txt b/tpl/tplimpl/embedded/templates/_default/robots.txt
new file mode 100644
index 000000000..4f9540ba3
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_default/robots.txt
@@ -0,0 +1 @@
+User-agent: * \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/_default/rss.xml b/tpl/tplimpl/embedded/templates/_default/rss.xml
new file mode 100644
index 000000000..7acdbef6b
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_default/rss.xml
@@ -0,0 +1,39 @@
+{{- $pctx := . -}}
+{{- if .IsHome -}}{{ $pctx = .Site }}{{- end -}}
+{{- $pages := slice -}}
+{{- if or $.IsHome $.IsSection -}}
+{{- $pages = $pctx.RegularPages -}}
+{{- else -}}
+{{- $pages = $pctx.Pages -}}
+{{- end -}}
+{{- $limit := .Site.Config.Services.RSS.Limit -}}
+{{- if ge $limit 1 -}}
+{{- $pages = $pages | first $limit -}}
+{{- end -}}
+{{- printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?>" | safeHTML }}
+<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
+ <channel>
+ <title>{{ if eq .Title .Site.Title }}{{ .Site.Title }}{{ else }}{{ with .Title }}{{.}} on {{ end }}{{ .Site.Title }}{{ end }}</title>
+ <link>{{ .Permalink }}</link>
+ <description>Recent content {{ if ne .Title .Site.Title }}{{ with .Title }}in {{.}} {{ end }}{{ end }}on {{ .Site.Title }}</description>
+ <generator>Hugo -- gohugo.io</generator>{{ with .Site.LanguageCode }}
+ <language>{{.}}</language>{{end}}{{ with .Site.Author.email }}
+ <managingEditor>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</managingEditor>{{end}}{{ with .Site.Author.email }}
+ <webMaster>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</webMaster>{{end}}{{ with .Site.Copyright }}
+ <copyright>{{.}}</copyright>{{end}}{{ if not .Date.IsZero }}
+ <lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</lastBuildDate>{{ end }}
+ {{- with .OutputFormats.Get "RSS" -}}
+ {{ printf "<atom:link href=%q rel=\"self\" type=%q />" .Permalink .MediaType | safeHTML }}
+ {{- end -}}
+ {{ range $pages }}
+ <item>
+ <title>{{ .Title }}</title>
+ <link>{{ .Permalink }}</link>
+ <pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</pubDate>
+ {{ with .Site.Author.email }}<author>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</author>{{end}}
+ <guid>{{ .Permalink }}</guid>
+ <description>{{ .Summary | html }}</description>
+ </item>
+ {{ end }}
+ </channel>
+</rss>
diff --git a/tpl/tplimpl/embedded/templates/_default/sitemap.xml b/tpl/tplimpl/embedded/templates/_default/sitemap.xml
new file mode 100644
index 000000000..cdb3748e8
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_default/sitemap.xml
@@ -0,0 +1,24 @@
+{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?>" | safeHTML }}
+<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
+ xmlns:xhtml="http://www.w3.org/1999/xhtml">
+ {{ range .Data.Pages }}
+ {{- if .Permalink -}}
+ <url>
+ <loc>{{ .Permalink }}</loc>{{ if not .Lastmod.IsZero }}
+ <lastmod>{{ safeHTML ( .Lastmod.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
+ <changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }}
+ <priority>{{ .Sitemap.Priority }}</priority>{{ end }}{{ if .IsTranslated }}{{ range .Translations }}
+ <xhtml:link
+ rel="alternate"
+ hreflang="{{ .Language.Lang }}"
+ href="{{ .Permalink }}"
+ />{{ end }}
+ <xhtml:link
+ rel="alternate"
+ hreflang="{{ .Language.Lang }}"
+ href="{{ .Permalink }}"
+ />{{ end }}
+ </url>
+ {{- end -}}
+ {{ end }}
+</urlset>
diff --git a/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml b/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml
new file mode 100644
index 000000000..01cf65bc5
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_default/sitemapindex.xml
@@ -0,0 +1,11 @@
+{{ printf "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?>" | safeHTML }}
+<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
+ {{ range . }}
+ <sitemap>
+ <loc>{{ .SitemapAbsURL }}</loc>
+ {{ if not .LastChange.IsZero }}
+ <lastmod>{{ .LastChange.Format "2006-01-02T15:04:05-07:00" | safeHTML }}</lastmod>
+ {{ end }}
+ </sitemap>
+ {{ end }}
+</sitemapindex>
diff --git a/tpl/tplimpl/embedded/templates/_server/error.html b/tpl/tplimpl/embedded/templates/_server/error.html
new file mode 100644
index 000000000..77d581391
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/_server/error.html
@@ -0,0 +1,87 @@
+<!DOCTYPE html>
+<html class="no-js" lang="">
+ <head>
+ <meta charset="utf-8" />
+ <title>Hugo Server: Error</title>
+ <style type="text/css">
+ body {
+ font-family: "Muli", system-ui, -apple-system, "Segoe UI", Roboto,
+ "Helvetica Neue", "Noto Sans", "Liberation Sans", Arial, sans-serif,
+ "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol",
+ "Noto Color Emoji";
+ font-size: 14px;
+ background-color: #272a36;
+ }
+ main {
+ max-width: 100ch;
+ padding: 2ch;
+ margin: auto;
+ }
+
+ .version {
+ font-size: 0.75rem;
+ color: #7c7c7c;
+ }
+
+ hr {
+ margin-bottom: 1rem;
+ border: none;
+ height: 1px;
+ background-color: #3d3d3d;
+ }
+ pre,
+ code {
+ white-space: pre-wrap;
+ white-space: -moz-pre-wrap;
+ white-space: -pre-wrap;
+ white-space: -o-pre-wrap;
+ word-wrap: break-word;
+ font-family: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono",
+ "Courier New", monospace;
+ }
+ .error pre {
+ line-height: 1.5;
+ }
+ .filename {
+ color: #eef78a;
+ font-size: 0.9rem;
+ line-height: 1.5;
+ }
+ .highlight {
+ overflow-x: auto;
+ }
+ a {
+ color: #0594cb;
+ text-decoration: none;
+ }
+ a:hover {
+ color: #ccc;
+ }
+ </style>
+ </head>
+ <body>
+ <main>
+ {{ $codeStyle := "dracula" }}
+ <div class="error">
+ {{ highlight .Error "apl" (printf "linenos=false,noclasses=true,style=%s" $codeStyle ) }}
+ </div>
+ <hr />
+ {{ range $i, $e := .Files }}
+ {{ if not .ErrorContext }}
+ {{ continue }}
+ {{ end }}
+ {{ $params := printf "noclasses=true,style=%s,linenos=table,hl_lines=%d,linenostart=%d" $codeStyle (add .ErrorContext.LinesPos 1) (sub .Position.LineNumber .ErrorContext.LinesPos) }}
+ {{ $lexer := .ErrorContext.ChromaLexer | default "go-html-template" }}
+ {{ with .Position }}
+ <code class="filename"
+ >{{ printf "%s:%d:%d" .Filename .LineNumber .ColumnNumber }}:</code
+ >
+ {{ end }}
+ {{ highlight (delimit .ErrorContext.Lines "\n") $lexer $params }}
+ <hr />
+ {{ end }}
+ <p class="version">{{ .Version }}</p>
+ <a href="">Reload Page</a>
+ </main>
+ </body>
+</html>
diff --git a/tpl/tplimpl/embedded/templates/alias.html b/tpl/tplimpl/embedded/templates/alias.html
new file mode 100644
index 000000000..8448760fa
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/alias.html
@@ -0,0 +1,10 @@
+<!DOCTYPE html>
+<html{{ with site.LanguageCode | default site.Language.Lang }} lang="{{ . }}"{{ end }}>
+ <head>
+ <title>{{ .Permalink }}</title>
+ <link rel="canonical" href="{{ .Permalink }}">
+ <meta name="robots" content="noindex">
+ <meta charset="utf-8">
+ <meta http-equiv="refresh" content="0; url={{ .Permalink }}">
+ </head>
+</html>
diff --git a/tpl/tplimpl/embedded/templates/disqus.html b/tpl/tplimpl/embedded/templates/disqus.html
new file mode 100644
index 000000000..ca5118608
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/disqus.html
@@ -0,0 +1,23 @@
+{{- $pc := .Site.Config.Privacy.Disqus -}}
+{{- if not $pc.Disable -}}
+{{ if .Site.DisqusShortname }}<div id="disqus_thread"></div>
+<script type="application/javascript">
+ window.disqus_config = function () {
+ {{with .Params.disqus_identifier }}this.page.identifier = '{{ . }}';{{end}}
+ {{with .Params.disqus_title }}this.page.title = '{{ . }}';{{end}}
+ {{with .Params.disqus_url }}this.page.url = '{{ . | html }}';{{end}}
+ };
+ (function() {
+ if (["localhost", "127.0.0.1"].indexOf(window.location.hostname) != -1) {
+ document.getElementById('disqus_thread').innerHTML = 'Disqus comments not available by default when the website is previewed locally.';
+ return;
+ }
+ var d = document, s = d.createElement('script'); s.async = true;
+ s.src = '//' + {{ .Site.DisqusShortname }} + '.disqus.com/embed.js';
+ s.setAttribute('data-timestamp', +new Date());
+ (d.head || d.body).appendChild(s);
+ })();
+</script>
+<noscript>Please enable JavaScript to view the <a href="https://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
+<a href="https://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>{{end}}
+{{- end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/google_analytics.html b/tpl/tplimpl/embedded/templates/google_analytics.html
new file mode 100644
index 000000000..f518b150c
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/google_analytics.html
@@ -0,0 +1,51 @@
+{{- $pc := .Site.Config.Privacy.GoogleAnalytics -}}
+{{- if not $pc.Disable }}{{ with .Site.GoogleAnalytics -}}
+{{ if hasPrefix . "G-"}}
+<script async src="https://www.googletagmanager.com/gtag/js?id={{ . }}"></script>
+<script>
+{{ template "__ga_js_set_doNotTrack" $ }}
+if (!doNotTrack) {
+ window.dataLayer = window.dataLayer || [];
+ function gtag(){dataLayer.push(arguments);}
+ gtag('js', new Date());
+ gtag('config', '{{ . }}', { 'anonymize_ip': {{- $pc.AnonymizeIP -}} });
+}
+</script>
+{{ else if hasPrefix . "UA-" }}
+<script type="application/javascript">
+{{ template "__ga_js_set_doNotTrack" $ }}
+if (!doNotTrack) {
+ (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+ m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+ })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
+ {{- if $pc.UseSessionStorage }}
+ if (window.sessionStorage) {
+ var GA_SESSION_STORAGE_KEY = 'ga:clientId';
+ ga('create', '{{ . }}', {
+ 'storage': 'none',
+ 'clientId': sessionStorage.getItem(GA_SESSION_STORAGE_KEY)
+ });
+ ga(function(tracker) {
+ sessionStorage.setItem(GA_SESSION_STORAGE_KEY, tracker.get('clientId'));
+ });
+ }
+ {{ else }}
+ ga('create', '{{ . }}', 'auto');
+ {{ end -}}
+ {{ if $pc.AnonymizeIP }}ga('set', 'anonymizeIp', true);{{ end }}
+ ga('send', 'pageview');
+}
+</script>
+{{- end -}}
+{{- end }}{{ end -}}
+
+{{- define "__ga_js_set_doNotTrack" -}}{{/* This is also used in the async version. */}}
+{{- $pc := .Site.Config.Privacy.GoogleAnalytics -}}
+{{- if not $pc.RespectDoNotTrack -}}
+var doNotTrack = false;
+{{- else -}}
+var dnt = (navigator.doNotTrack || window.doNotTrack || navigator.msDoNotTrack);
+var doNotTrack = (dnt == "1" || dnt == "yes");
+{{- end -}}
+{{- end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/google_analytics_async.html b/tpl/tplimpl/embedded/templates/google_analytics_async.html
new file mode 100644
index 000000000..499cb6fe3
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/google_analytics_async.html
@@ -0,0 +1,28 @@
+{{- $pc := .Site.Config.Privacy.GoogleAnalytics -}}
+{{- if not $pc.Disable -}}
+{{ with .Site.GoogleAnalytics }}
+<script type="application/javascript">
+{{ template "__ga_js_set_doNotTrack" $ }}
+if (!doNotTrack) {
+ window.ga=window.ga||function(){(ga.q=ga.q||[]).push(arguments)};ga.l=+new Date;
+ {{- if $pc.UseSessionStorage }}
+ if (window.sessionStorage) {
+ var GA_SESSION_STORAGE_KEY = 'ga:clientId';
+ ga('create', '{{ . }}', {
+ 'storage': 'none',
+ 'clientId': sessionStorage.getItem(GA_SESSION_STORAGE_KEY)
+ });
+ ga(function(tracker) {
+ sessionStorage.setItem(GA_SESSION_STORAGE_KEY, tracker.get('clientId'));
+ });
+ }
+ {{ else }}
+ ga('create', '{{ . }}', 'auto');
+ {{ end -}}
+ {{ if $pc.AnonymizeIP }}ga('set', 'anonymizeIp', true);{{ end }}
+ ga('send', 'pageview');
+}
+</script>
+<script async src='https://www.google-analytics.com/analytics.js'></script>
+{{ end }}
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/google_news.html b/tpl/tplimpl/embedded/templates/google_news.html
new file mode 100644
index 000000000..d1a995c83
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/google_news.html
@@ -0,0 +1,6 @@
+{{- warnf "The google_news internal template will be removed in a future release. Please remove calls to this template. See https://github.com/gohugoio/hugo/issues/9172 for additional information." -}}
+{{- if .IsPage -}}
+ {{- with .Params.news_keywords -}}
+ <meta name="news_keywords" content="{{ range $i, $kw := first 10 . }}{{ if $i }},{{ end }}{{ $kw }}{{ end }}">
+ {{- end -}}
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/opengraph.html b/tpl/tplimpl/embedded/templates/opengraph.html
new file mode 100644
index 000000000..e3961524b
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/opengraph.html
@@ -0,0 +1,44 @@
+<meta property="og:title" content="{{ .Title }}" />
+<meta property="og:description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}" />
+<meta property="og:type" content="{{ if .IsPage }}article{{ else }}website{{ end }}" />
+<meta property="og:url" content="{{ .Permalink }}" />
+
+{{- with $.Params.images -}}
+{{- range first 6 . }}<meta property="og:image" content="{{ . | absURL }}" />{{ end -}}
+{{- else -}}
+{{- $images := $.Resources.ByType "image" -}}
+{{- $featured := $images.GetMatch "*feature*" -}}
+{{- if not $featured }}{{ $featured = $images.GetMatch "{*cover*,*thumbnail*}" }}{{ end -}}
+{{- with $featured -}}
+<meta property="og:image" content="{{ $featured.Permalink }}"/>
+{{- else -}}
+{{- with $.Site.Params.images }}<meta property="og:image" content="{{ index . 0 | absURL }}"/>{{ end -}}
+{{- end -}}
+{{- end -}}
+
+{{- if .IsPage }}
+{{- $iso8601 := "2006-01-02T15:04:05-07:00" -}}
+<meta property="article:section" content="{{ .Section }}" />
+{{ with .PublishDate }}<meta property="article:published_time" {{ .Format $iso8601 | printf "content=%q" | safeHTMLAttr }} />{{ end }}
+{{ with .Lastmod }}<meta property="article:modified_time" {{ .Format $iso8601 | printf "content=%q" | safeHTMLAttr }} />{{ end }}
+{{- end -}}
+
+{{- with .Params.audio }}<meta property="og:audio" content="{{ . }}" />{{ end }}
+{{- with .Params.locale }}<meta property="og:locale" content="{{ . }}" />{{ end }}
+{{- with .Site.Params.title }}<meta property="og:site_name" content="{{ . }}" />{{ end }}
+{{- with .Params.videos }}{{- range . }}
+<meta property="og:video" content="{{ . | absURL }}" />
+{{ end }}{{ end }}
+
+{{- /* If it is part of a series, link to related articles */}}
+{{- $permalink := .Permalink }}
+{{- $siteSeries := .Site.Taxonomies.series }}
+{{ with .Params.series }}{{- range $name := . }}
+ {{- $series := index $siteSeries ($name | urlize) }}
+ {{- range $page := first 6 $series.Pages }}
+ {{- if ne $page.Permalink $permalink }}<meta property="og:see_also" content="{{ $page.Permalink }}" />{{ end }}
+ {{- end }}
+{{ end }}{{ end }}
+
+{{- /* Facebook Page Admin ID for Domain Insights */}}
+{{- with .Site.Social.facebook_admin }}<meta property="fb:admins" content="{{ . }}" />{{ end }}
diff --git a/tpl/tplimpl/embedded/templates/pagination.html b/tpl/tplimpl/embedded/templates/pagination.html
new file mode 100644
index 000000000..717797ab2
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/pagination.html
@@ -0,0 +1,154 @@
+{{- $validFormats := slice "default" "terse" }}
+
+{{- $msg1 := "When passing a map to the internal pagination template, one of the elements must be named 'page', and it must be set to the context of the current page." }}
+{{- $msg2 := "The 'format' specified in the map passed to the internal pagination template is invalid. Valid choices are: %s." }}
+
+{{- $page := . }}
+{{- $format := "default" }}
+
+{{- if reflect.IsMap . }}
+ {{- with .page }}
+ {{- $page = . }}
+ {{- else }}
+ {{- errorf $msg1 }}
+ {{- end }}
+ {{- with .format }}
+ {{- $format = lower . }}
+ {{- end }}
+{{- end }}
+
+{{- if in $validFormats $format }}
+ {{- if gt $page.Paginator.TotalPages 1 }}
+ <ul class="pagination pagination-{{ $format }}">
+ {{- partial (printf "partials/inline/pagination/%s" $format) $page }}
+ </ul>
+ {{- end }}
+{{- else }}
+ {{- errorf $msg2 (delimit $validFormats ", ") }}
+{{- end -}}
+
+{{/* Format: default
+{{/* --------------------------------------------------------------------- */}}
+{{- define "partials/inline/pagination/default" }}
+ {{- with .Paginator }}
+ {{- $currentPageNumber := .PageNumber }}
+
+ {{- with .First }}
+ {{- if ne $currentPageNumber .PageNumber }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="First" class="page-link" role="button"><span aria-hidden="true">&laquo;&laquo;</span></a>
+ </li>
+ {{- else }}
+ <li class="page-item disabled">
+ <a aria-disabled="true" aria-label="First" class="page-link" role="button" tabindex="-1"><span aria-hidden="true">&laquo;&laquo;</span></a>
+ </li>
+ {{- end }}
+ {{- end }}
+
+ {{- with .Prev }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Previous" class="page-link" role="button"><span aria-hidden="true">&laquo;</span></a>
+ </li>
+ {{- else }}
+ <li class="page-item disabled">
+ <a aria-disabled="true" aria-label="Previous" class="page-link" role="button" tabindex="-1"><span aria-hidden="true">&laquo;</span></a>
+ </li>
+ {{- end }}
+
+ {{- $slots := 5 }}
+ {{- $start := math.Max 1 (sub .PageNumber (math.Floor (div $slots 2))) }}
+ {{- $end := math.Min .TotalPages (sub (add $start $slots) 1) }}
+ {{- if lt (add (sub $end $start) 1) $slots }}
+ {{- $start = math.Max 1 (add (sub $end $slots) 1) }}
+ {{- end }}
+
+ {{- range $k := seq $start $end }}
+ {{- if eq $.Paginator.PageNumber $k }}
+ <li class="page-item active">
+ <a aria-current="page" aria-label="Page {{ $k }}" class="page-link" role="button">{{ $k }}</a>
+ </li>
+ {{- else }}
+ <li class="page-item">
+ <a href="{{ (index $.Paginator.Pagers (sub $k 1)).URL }}" aria-label="Page {{ $k }}" class="page-link" role="button">{{ $k }}</a>
+ </li>
+ {{- end }}
+ {{- end }}
+
+ {{- with .Next }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Next" class="page-link" role="button"><span aria-hidden="true">&raquo;</span></a>
+ </li>
+ {{- else }}
+ <li class="page-item disabled">
+ <a aria-disabled="true" aria-label="Next" class="page-link" role="button" tabindex="-1"><span aria-hidden="true">&raquo;</span></a>
+ </li>
+ {{- end }}
+
+ {{- with .Last }}
+ {{- if ne $currentPageNumber .PageNumber }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Last" class="page-link" role="button"><span aria-hidden="true">&raquo;&raquo;</span></a>
+ </li>
+ {{- else }}
+ <li class="page-item disabled">
+ <a aria-disabled="true" aria-label="Last" class="page-link" role="button" tabindex="-1"><span aria-hidden="true">&raquo;&raquo;</span></a>
+ </li>
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end -}}
+
+{{/* Format: terse
+{{/* --------------------------------------------------------------------- */}}
+{{- define "partials/inline/pagination/terse" }}
+ {{- with .Paginator }}
+ {{- $currentPageNumber := .PageNumber }}
+
+ {{- with .First }}
+ {{- if ne $currentPageNumber .PageNumber }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="First" class="page-link" role="button"><span aria-hidden="true">&laquo;&laquo;</span></a>
+ </li>
+ {{- end }}
+ {{- end }}
+
+ {{- with .Prev }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Previous" class="page-link" role="button"><span aria-hidden="true">&laquo;</span></a>
+ </li>
+ {{- end }}
+
+ {{- $slots := 3 }}
+ {{- $start := math.Max 1 (sub .PageNumber (math.Floor (div $slots 2))) }}
+ {{- $end := math.Min .TotalPages (sub (add $start $slots) 1) }}
+ {{- if lt (add (sub $end $start) 1) $slots }}
+ {{- $start = math.Max 1 (add (sub $end $slots) 1) }}
+ {{- end }}
+
+ {{- range $k := seq $start $end }}
+ {{- if eq $.Paginator.PageNumber $k }}
+ <li class="page-item active">
+ <a aria-current="page" aria-label="Page {{ $k }}" class="page-link" role="button">{{ $k }}</a>
+ </li>
+ {{- else }}
+ <li class="page-item">
+ <a href="{{ (index $.Paginator.Pagers (sub $k 1)).URL }}" aria-label="Page {{ $k }}" class="page-link" role="button">{{ $k }}</a>
+ </li>
+ {{- end }}
+ {{- end }}
+
+ {{- with .Next }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Next" class="page-link" role="button"><span aria-hidden="true">&raquo;</span></a>
+ </li>
+ {{- end }}
+
+ {{- with .Last }}
+ {{- if ne $currentPageNumber .PageNumber }}
+ <li class="page-item">
+ <a href="{{ .URL }}" aria-label="Last" class="page-link" role="button"><span aria-hidden="true">&raquo;&raquo;</span></a>
+ </li>
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/schema.html b/tpl/tplimpl/embedded/templates/schema.html
new file mode 100644
index 000000000..20b477b59
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/schema.html
@@ -0,0 +1,25 @@
+<meta itemprop="name" content="{{ .Title }}">
+<meta itemprop="description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}">
+
+{{- if .IsPage -}}
+{{- $iso8601 := "2006-01-02T15:04:05-07:00" -}}
+{{ with .PublishDate }}<meta itemprop="datePublished" {{ .Format $iso8601 | printf "content=%q" | safeHTMLAttr }} />{{ end}}
+{{ with .Lastmod }}<meta itemprop="dateModified" {{ .Format $iso8601 | printf "content=%q" | safeHTMLAttr }} />{{ end}}
+<meta itemprop="wordCount" content="{{ .WordCount }}">
+
+{{- with $.Params.images -}}
+{{- range first 6 . -}}<meta itemprop="image" content="{{ . | absURL }}">{{ end -}}
+{{- else -}}
+{{- $images := $.Resources.ByType "image" -}}
+{{- $featured := $images.GetMatch "*feature*" -}}
+{{- if not $featured }}{{ $featured = $images.GetMatch "{*cover*,*thumbnail*}" }}{{ end -}}
+{{- with $featured -}}
+<meta itemprop="image" content="{{ $featured.Permalink }}">
+{{- else -}}
+{{- with $.Site.Params.images -}}<meta itemprop="image" content="{{ index . 0 | absURL }}"/>{{ end -}}
+{{- end -}}
+{{- end -}}
+
+<!-- Output all taxonomies as schema.org keywords -->
+<meta itemprop="keywords" content="{{ if .IsPage}}{{ range $index, $tag := .Params.tags }}{{ $tag }},{{ end }}{{ else }}{{ range $plural, $terms := .Site.Taxonomies }}{{ range $term, $val := $terms }}{{ printf "%s," $term }}{{ end }}{{ end }}{{ end }}" />
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/1__h_simple_assets.html b/tpl/tplimpl/embedded/templates/shortcodes/1__h_simple_assets.html
new file mode 100644
index 000000000..da1bb82eb
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/1__h_simple_assets.html
@@ -0,0 +1,34 @@
+{{ define "__h_simple_css" }}{{/* These template definitions are global. */}}
+{{- if not (.Page.Scratch.Get "__h_simple_css") -}}
+{{/* Only include once */}}
+{{- .Page.Scratch.Set "__h_simple_css" true -}}
+<style>
+.__h_video {
+ position: relative;
+ padding-bottom: 56.23%;
+ height: 0;
+ overflow: hidden;
+ width: 100%;
+ background: #000;
+}
+.__h_video img {
+ width: 100%;
+ height: auto;
+ color: #000;
+}
+.__h_video .play {
+ height: 72px;
+ width: 72px;
+ left: 50%;
+ top: 50%;
+ margin-left: -36px;
+ margin-top: -36px;
+ position: absolute;
+ cursor: pointer;
+}
+</style>
+{{- end -}}
+{{- end -}}
+{{- define "__h_simple_icon_play" -}}
+<svg version="1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 61 61"><circle cx="30.5" cy="30.5" r="30.5" opacity=".8" fill="#000"></circle><path d="M25.3 19.2c-2.1-1.2-3.8-.2-3.8 2.2v18.1c0 2.4 1.7 3.4 3.8 2.2l16.6-9.1c2.1-1.2 2.1-3.2 0-4.4l-16.6-9z" fill="#fff"></path></svg>
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/figure.html b/tpl/tplimpl/embedded/templates/shortcodes/figure.html
new file mode 100644
index 000000000..ecabb286e
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/figure.html
@@ -0,0 +1,28 @@
+<figure{{ with .Get "class" }} class="{{ . }}"{{ end }}>
+ {{- if .Get "link" -}}
+ <a href="{{ .Get "link" }}"{{ with .Get "target" }} target="{{ . }}"{{ end }}{{ with .Get "rel" }} rel="{{ . }}"{{ end }}>
+ {{- end -}}
+ <img src="{{ .Get "src" }}"
+ {{- if or (.Get "alt") (.Get "caption") }}
+ alt="{{ with .Get "alt" }}{{ . }}{{ else }}{{ .Get "caption" | markdownify| plainify }}{{ end }}"
+ {{- end -}}
+ {{- with .Get "width" }} width="{{ . }}"{{ end -}}
+ {{- with .Get "height" }} height="{{ . }}"{{ end -}}
+ /><!-- Closing img tag -->
+ {{- if .Get "link" }}</a>{{ end -}}
+ {{- if or (or (.Get "title") (.Get "caption")) (.Get "attr") -}}
+ <figcaption>
+ {{ with (.Get "title") -}}
+ <h4>{{ . }}</h4>
+ {{- end -}}
+ {{- if or (.Get "caption") (.Get "attr") -}}<p>
+ {{- .Get "caption" | markdownify -}}
+ {{- with .Get "attrlink" }}
+ <a href="{{ . }}">
+ {{- end -}}
+ {{- .Get "attr" | markdownify -}}
+ {{- if .Get "attrlink" }}</a>{{ end }}</p>
+ {{- end }}
+ </figcaption>
+ {{- end }}
+</figure>
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/gist.html b/tpl/tplimpl/embedded/templates/shortcodes/gist.html
new file mode 100644
index 000000000..781b26567
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/gist.html
@@ -0,0 +1 @@
+<script type="application/javascript" src="https://gist.github.com/{{ index .Params 0 }}/{{ index .Params 1 }}.js{{if len .Params | eq 3 }}?file={{ index .Params 2 }}{{end}}"></script>
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/highlight.html b/tpl/tplimpl/embedded/templates/shortcodes/highlight.html
new file mode 100644
index 000000000..54e92bb09
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/highlight.html
@@ -0,0 +1 @@
+{{ if len .Params | eq 2 }}{{ highlight (trim .InnerDeindent "\n\r") (.Get 0) (.Get 1) }}{{ else }}{{ highlight (trim .InnerDeindent "\n\r") (.Get 0) "" }}{{ end }} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/instagram.html b/tpl/tplimpl/embedded/templates/shortcodes/instagram.html
new file mode 100644
index 000000000..a6cd002df
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/instagram.html
@@ -0,0 +1,18 @@
+{{- $pc := site.Config.Privacy.Instagram -}}
+{{- if not $pc.Disable -}}
+ {{ $accessToken := site.Config.Services.Instagram.AccessToken }}
+ {{- if not $accessToken -}}
+ {{- erroridf "error-missing-instagram-accesstoken" "instagram shortcode: Missing config value for services.instagram.accessToken. This can be set in config.toml, but it is recommended to configure this via the HUGO_SERVICES_INSTAGRAM_ACCESSTOKEN OS environment variable. If you are using a Client Access Token, remember that you must combine it with your App ID using a pipe symbol (<APPID>|<CLIENTTOKEN>) otherwise the request will fail." -}}
+ {{- else -}}
+ {{- if $pc.Simple -}}
+ {{ template "_internal/shortcodes/instagram_simple.html" . }}
+ {{- else -}}
+ {{ $id := .Get 0 }}
+ {{ $hideCaption := cond (eq (.Get 1) "hidecaption") "1" "0" }}
+ {{ $headers := dict "Authorization" (printf "Bearer %s" $accessToken) }}
+ {{ with getJSON "https://graph.facebook.com/v8.0/instagram_oembed/?url=https://instagram.com/p/" $id "/&hidecaption=" $hideCaption $headers }}
+ {{ .html | safeHTML }}
+ {{ end }}
+ {{- end -}}
+ {{- end -}}
+{{- end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/instagram_simple.html b/tpl/tplimpl/embedded/templates/shortcodes/instagram_simple.html
new file mode 100644
index 000000000..f219181ac
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/instagram_simple.html
@@ -0,0 +1,67 @@
+{{- $pc := .Page.Site.Config.Privacy.Instagram -}}
+{{- $sc := .Page.Site.Config.Services.Instagram -}}
+{{- if not $pc.Disable -}}
+ {{ $accessToken := site.Config.Services.Instagram.AccessToken }}
+ {{- if not $accessToken -}}
+ {{- erroridf "error-missing-instagram-accesstoken" "instagram shortcode: Missing config value for services.instagram.accessToken. This can be set in config.toml, but it is recommended to configure this via the HUGO_SERVICES_INSTAGRAM_ACCESSTOKEN OS environment variable. If you are using a Client Access Token, remember that you must combine it with your App ID using a pipe symbol (<APPID>|<CLIENTTOKEN>) otherwise the request will fail." -}}
+ {{- else -}}
+ {{- $id := .Get 0 -}}
+ {{- $headers := dict "Authorization" (printf "Bearer %s" $accessToken) -}}
+ {{- $item := getJSON "https://graph.facebook.com/v8.0/instagram_oembed/?url=https://instagram.com/p/" $id "/&amp;maxwidth=640&amp;omitscript=true" $headers -}}
+ {{- $class1 := "__h_instagram" -}}
+ {{- $class2 := "s_instagram_simple" -}}
+ {{- $hideCaption := (eq (.Get 1) "hidecaption") -}}
+ {{ with $item }}
+ {{- $mediaURL := printf "https://instagram.com/p/%s/" $id | safeURL -}}
+ {{- if not $sc.DisableInlineCSS -}}
+ {{ template "__h_simple_instagram_css" $ }}
+ {{- end -}}
+ <div class="{{ $class1 }} {{ $class2 }} card" style="max-width: {{ $item.thumbnail_width }}px">
+ <div class="card-header">
+ <a href="{{ $item.author_url | safeURL }}" class="card-link">
+ {{ $item.author_name }}
+ </a>
+ </div>
+ <a href="{{ $mediaURL }}" rel="noopener" target="_blank">
+ <img class="card-img-top img-fluid" src="{{ $item.thumbnail_url }}" width="{{ $item.thumbnail_width }}" height="{{ $item.thumbnail_height }}" alt="Instagram Image">
+ </a>
+ <div class="card-body">
+ {{ if not $hideCaption }}
+ <p class="card-text">
+ <a href="{{ $item.author_url | safeURL }}" class="card-link">
+ {{ $item.author_name }}
+ </a>
+ {{ $item.title}}
+ </p>
+ {{ end }}
+ <a href="{{ $item.author_url | safeURL }}" class="card-link">
+ View More on Instagram
+ </a>
+ </div>
+ </div>
+ {{ end }}
+ {{- end -}}
+{{- end -}}
+
+{{ define "__h_simple_instagram_css" }}
+ {{ if not (.Page.Scratch.Get "__h_simple_instagram_css") }}
+ {{/* Only include once */}}
+ {{ .Page.Scratch.Set "__h_simple_instagram_css" true }}
+ <style type="text/css">
+ .__h_instagram.card {
+ font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,Oxygen-Sans,Ubuntu,Cantarell,"Helvetica Neue",sans-serif;
+ font-size: 14px;
+ border: 1px solid rgb(219, 219, 219);
+ padding: 0;
+ margin-top: 30px;
+ }
+ .__h_instagram.card .card-header, .__h_instagram.card .card-body {
+ padding: 10px 10px 10px;
+ }
+ .__h_instagram.card img {
+ width: 100%;
+ height: auto;
+ }
+ </style>
+ {{ end }}
+{{ end }} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/param.html b/tpl/tplimpl/embedded/templates/shortcodes/param.html
new file mode 100644
index 000000000..74aa3ee7b
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/param.html
@@ -0,0 +1,4 @@
+{{- $name := (.Get 0) -}}
+{{- with $name -}}
+{{- with ($.Page.Param .) }}{{ . }}{{ else }}{{ errorf "Param %q not found: %s" $name $.Position }}{{ end -}}
+{{- else }}{{ errorf "Missing param key: %s" $.Position }}{{ end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/ref.html b/tpl/tplimpl/embedded/templates/shortcodes/ref.html
new file mode 100644
index 000000000..cd9c3defc
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/ref.html
@@ -0,0 +1 @@
+{{ ref . .Params }} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/relref.html b/tpl/tplimpl/embedded/templates/shortcodes/relref.html
new file mode 100644
index 000000000..82005bd82
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/relref.html
@@ -0,0 +1 @@
+{{ relref . .Params }} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/twitter.html b/tpl/tplimpl/embedded/templates/shortcodes/twitter.html
new file mode 100644
index 000000000..2f1d2323e
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/twitter.html
@@ -0,0 +1,35 @@
+{{- $pc := .Page.Site.Config.Privacy.Twitter -}}
+{{- if not $pc.Disable -}}
+ {{- if $pc.Simple -}}
+ {{- template "_internal/shortcodes/twitter_simple.html" . -}}
+ {{- else -}}
+ {{- $msg1 := "The %q shortcode requires two named parameters: user and id. See %s" -}}
+ {{- $msg2 := "The %q shortcode will soon require two named parameters: user and id. See %s" -}}
+ {{- if .IsNamedParams -}}
+ {{- $id := .Get "id" -}}
+ {{- $user := .Get "user" -}}
+ {{- if and $id $user -}}
+ {{- template "render-tweet" (dict "id" $id "user" $user "dnt" $pc.EnableDNT) -}}
+ {{- else -}}
+ {{- errorf $msg1 .Name .Position -}}
+ {{- end -}}
+ {{- else -}}
+ {{- $id := .Get 1 -}}
+ {{- $user := .Get 0 -}}
+ {{- if eq 1 (len .Params) -}}
+ {{- $id = .Get 0 -}}
+ {{- $user = "x" -}} {{/* This triggers a redirect. It works, but may not work forever. */}}
+ {{- warnf $msg2 .Name .Position -}}
+ {{- end -}}
+ {{- template "render-tweet" (dict "id" $id "user" $user "dnt" $pc.EnableDNT) -}}
+ {{- end -}}
+ {{- end -}}
+{{- end -}}
+
+{{- define "render-tweet" -}}
+ {{- $url := printf "https://twitter.com/%v/status/%v" .user .id -}}
+ {{- $query := querify "url" $url "dnt" .dnt -}}
+ {{- $request := printf "https://publish.twitter.com/oembed?%s" $query -}}
+ {{- $json := getJSON $request -}}
+ {{- $json.html | safeHTML -}}
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/twitter_simple.html b/tpl/tplimpl/embedded/templates/shortcodes/twitter_simple.html
new file mode 100644
index 000000000..0127fbe22
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/twitter_simple.html
@@ -0,0 +1,58 @@
+{{- $pc := .Page.Site.Config.Privacy.Twitter -}}
+{{- $sc := .Page.Site.Config.Services.Twitter -}}
+{{- if not $pc.Disable -}}
+ {{- $msg1 := "The %q shortcode requires two named parameters: user and id. See %s" -}}
+ {{- $msg2 := "The %q shortcode will soon require two named parameters: user and id. See %s" -}}
+ {{- if .IsNamedParams -}}
+ {{- $id := .Get "id" -}}
+ {{- $user := .Get "user" -}}
+ {{- if and $id $user -}}
+ {{- template "render-simple-tweet" (dict "id" $id "user" $user "dnt" $pc.EnableDNT "disableInlineCSS" $sc.DisableInlineCSS "ctx" .) -}}
+ {{- else -}}
+ {{- errorf $msg1 .Name .Position -}}
+ {{- end -}}
+ {{- else -}}
+ {{- $id := .Get 1 -}}
+ {{- $user := .Get 0 -}}
+ {{- if eq 1 (len .Params) -}}
+ {{- $id = .Get 0 -}}
+ {{- $user = "x" -}} {{/* This triggers a redirect. It works, but may not work forever. */}}
+ {{- warnf $msg2 .Name .Position -}}
+ {{- end -}}
+ {{- template "render-simple-tweet" (dict "id" $id "user" $user "dnt" $pc.EnableDNT "disableInlineCSS" $sc.DisableInlineCSS "ctx" .) -}}
+ {{- end -}}
+{{- end -}}
+
+{{- define "render-simple-tweet" -}}
+ {{- $url := printf "https://twitter.com/%v/status/%v" .user .id -}}
+ {{- $query := querify "url" $url "dnt" .dnt "omit_script" true -}}
+ {{- $request := printf "https://publish.twitter.com/oembed?%s" $query -}}
+ {{- $json := getJSON $request -}}
+ {{- if not .disableInlineCSS -}}
+ {{- template "__h_simple_twitter_css" .ctx -}}
+ {{- end }}
+ {{ $json.html | safeHTML -}}
+{{- end -}}
+
+{{- define "__h_simple_twitter_css" -}}
+ {{- if not (.Page.Scratch.Get "__h_simple_twitter_css") -}}
+ {{/* Only include once */}}
+ {{- .Page.Scratch.Set "__h_simple_twitter_css" true }}
+ <style type="text/css">
+ .twitter-tweet {
+ font: 14px/1.45 -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,Oxygen-Sans,Ubuntu,Cantarell,"Helvetica Neue",sans-serif;
+ border-left: 4px solid #2b7bb9;
+ padding-left: 1.5em;
+ color: #555;
+ }
+ .twitter-tweet a {
+ color: #2b7bb9;
+ text-decoration: none;
+ }
+ blockquote.twitter-tweet a:hover,
+ blockquote.twitter-tweet a:focus {
+ text-decoration: underline;
+ }
+ </style>
+ {{- end -}}
+{{- end -}}
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/vimeo.html b/tpl/tplimpl/embedded/templates/shortcodes/vimeo.html
new file mode 100644
index 000000000..8ddad9b43
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/vimeo.html
@@ -0,0 +1,14 @@
+{{- $pc := .Page.Site.Config.Privacy.Vimeo -}}
+{{- if not $pc.Disable -}}
+{{- if $pc.Simple -}}
+{{ template "_internal/shortcodes/vimeo_simple.html" . }}
+{{- else -}}
+{{ if .IsNamedParams }}<div {{ if .Get "class" }}class="{{ .Get "class" }}"{{ else }}style="position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden;"{{ end }}>
+ <iframe src="https://player.vimeo.com/video/{{ .Get "id" }}{{- if $pc.EnableDNT -}}?dnt=1{{- end -}}" {{ if not (.Get "class") }}style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; border:0;" {{ end }}{{ if .Get "title"}}title="{{ .Get "title" }}"{{ else }}title="vimeo video"{{ end }} webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
+</div>{{ else }}
+<div {{ if gt (len .Params) 1 }}class="{{ .Get 1 }}"{{ else }}style="position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden;"{{ end }}>
+ <iframe src="https://player.vimeo.com/video/{{ .Get 0 }}{{- if $pc.EnableDNT -}}?dnt=1{{- end -}}" {{ if len .Params | eq 1 }}style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; border:0;" {{ end }}{{ if len .Params | eq 3 }}title="{{ .Get 2 }}"{{ else }}title="vimeo video"{{ end }} webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
+</div>
+{{ end }}
+{{- end -}}
+{{- end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/vimeo_simple.html b/tpl/tplimpl/embedded/templates/shortcodes/vimeo_simple.html
new file mode 100644
index 000000000..00080ab59
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/vimeo_simple.html
@@ -0,0 +1,22 @@
+{{- $pc := .Page.Site.Config.Privacy.Vimeo -}}
+{{- if not $pc.Disable -}}
+{{ $id := .Get "id" | default (.Get 0) }}
+{{ $dnt := cond (eq $pc.EnableDNT true) "?dnt=1" "" }}
+{{- $item := getJSON (print "https://vimeo.com/api/oembed.json?url=https://vimeo.com/" $id $dnt) -}}
+{{ $class := .Get "class" | default (.Get 1) }}
+{{ $hasClass := $class }}
+{{ $class := $class | default "__h_video" }}
+{{ if not $hasClass }}
+{{/* If class is set, assume the user wants to provide his own styles. */}}
+{{ template "__h_simple_css" $ }}
+{{ end }}
+{{ $secondClass := "s_video_simple" }}
+<div class="{{ $secondClass }} {{ $class }}">
+{{- with $item }}
+<a href="{{ .provider_url }}{{ .video_id }}" rel="noopener" target="_blank">
+{{ $thumb := .thumbnail_url }}
+{{ $original := $thumb | replaceRE "(_.*\\.)" "." }}
+<img src="{{ $thumb }}" srcset="{{ $thumb }} 1x, {{ $original }} 2x" alt="{{ .title }}">
+<div class="play">{{ template "__h_simple_icon_play" $ }}</div></a></div>
+{{- end -}}
+{{- end -}} \ No newline at end of file
diff --git a/tpl/tplimpl/embedded/templates/shortcodes/youtube.html b/tpl/tplimpl/embedded/templates/shortcodes/youtube.html
new file mode 100644
index 000000000..93fed2326
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/shortcodes/youtube.html
@@ -0,0 +1,10 @@
+{{- $pc := .Page.Site.Config.Privacy.YouTube -}}
+{{- if not $pc.Disable -}}
+{{- $ytHost := cond $pc.PrivacyEnhanced "www.youtube-nocookie.com" "www.youtube.com" -}}
+{{- $id := .Get "id" | default (.Get 0) -}}
+{{- $class := .Get "class" | default (.Get 1) -}}
+{{- $title := .Get "title" | default "YouTube Video" }}
+<div {{ with $class }}class="{{ . }}"{{ else }}style="position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden;"{{ end }}>
+ <iframe src="https://{{ $ytHost }}/embed/{{ $id }}{{ with .Get "autoplay" }}{{ if eq . "true" }}?autoplay=1{{ end }}{{ end }}" {{ if not $class }}style="position: absolute; top: 0; left: 0; width: 100%; height: 100%; border:0;" {{ end }}allowfullscreen title="{{ $title }}"></iframe>
+</div>
+{{ end -}}
diff --git a/tpl/tplimpl/embedded/templates/twitter_cards.html b/tpl/tplimpl/embedded/templates/twitter_cards.html
new file mode 100644
index 000000000..83a3b79c4
--- /dev/null
+++ b/tpl/tplimpl/embedded/templates/twitter_cards.html
@@ -0,0 +1,24 @@
+{{- with $.Params.images -}}
+<meta name="twitter:card" content="summary_large_image"/>
+<meta name="twitter:image" content="{{ index . 0 | absURL }}"/>
+{{ else -}}
+{{- $images := $.Resources.ByType "image" -}}
+{{- $featured := $images.GetMatch "*feature*" -}}
+{{- if not $featured }}{{ $featured = $images.GetMatch "{*cover*,*thumbnail*}" }}{{ end -}}
+{{- with $featured -}}
+<meta name="twitter:card" content="summary_large_image"/>
+<meta name="twitter:image" content="{{ $featured.Permalink }}"/>
+{{- else -}}
+{{- with $.Site.Params.images -}}
+<meta name="twitter:card" content="summary_large_image"/>
+<meta name="twitter:image" content="{{ index . 0 | absURL }}"/>
+{{ else -}}
+<meta name="twitter:card" content="summary"/>
+{{- end -}}
+{{- end -}}
+{{- end }}
+<meta name="twitter:title" content="{{ .Title }}"/>
+<meta name="twitter:description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end -}}"/>
+{{ with .Site.Social.twitter -}}
+<meta name="twitter:site" content="@{{ . }}"/>
+{{ end -}}
diff --git a/tpl/tplimpl/integration_test.go b/tpl/tplimpl/integration_test.go
new file mode 100644
index 000000000..49722c5c1
--- /dev/null
+++ b/tpl/tplimpl/integration_test.go
@@ -0,0 +1,117 @@
+package tplimpl_test
+
+import (
+ "path/filepath"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+func TestPrintUnusedTemplates(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+printUnusedTemplates=true
+-- content/p1.md --
+---
+title: "P1"
+---
+{{< usedshortcode >}}
+-- layouts/baseof.html --
+{{ block "main" . }}{{ end }}
+-- layouts/baseof.json --
+{{ block "main" . }}{{ end }}
+-- layouts/index.html --
+{{ define "main" }}FOO{{ end }}
+-- layouts/_default/single.json --
+-- layouts/_default/single.html --
+{{ define "main" }}MAIN{{ end }}
+-- layouts/post/single.html --
+{{ define "main" }}MAIN{{ end }}
+-- layouts/partials/usedpartial.html --
+-- layouts/partials/unusedpartial.html --
+-- layouts/shortcodes/usedshortcode.html --
+{{ partial "usedpartial.html" }}
+-- layouts/shortcodes/unusedshortcode.html --
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ )
+ b.Build()
+
+ unused := b.H.Tmpl().(tpl.UnusedTemplatesProvider).UnusedTemplates()
+
+ var names []string
+ for _, tmpl := range unused {
+ names = append(names, tmpl.Name())
+ }
+
+ b.Assert(names, qt.DeepEquals, []string{"_default/single.json", "baseof.json", "partials/unusedpartial.html", "post/single.html", "shortcodes/unusedshortcode.html"})
+ b.Assert(unused[0].Filename(), qt.Equals, filepath.Join(b.Cfg.WorkingDir, "layouts/_default/single.json"))
+}
+
+// Verify that the new keywords in Go 1.18 is available.
+func TestGo18Constructs(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+baseURL = 'http://example.com/'
+disableKinds = ["section", "home", "rss", "taxonomy", "term", "rss"]
+-- content/p1.md --
+---
+title: "P1"
+---
+-- layouts/partials/counter.html --
+{{ if .Scratch.Get "counter" }}{{ .Scratch.Add "counter" 1 }}{{ else }}{{ .Scratch.Set "counter" 1 }}{{ end }}{{ return true }}
+-- layouts/_default/single.html --
+continue:{{ range seq 5 }}{{ if eq . 2 }}{{continue}}{{ end }}{{ . }}{{ end }}:END:
+break:{{ range seq 5 }}{{ if eq . 2 }}{{break}}{{ end }}{{ . }}{{ end }}:END:
+continue2:{{ range seq 5 }}{{ if eq . 2 }}{{ continue }}{{ end }}{{ . }}{{ end }}:END:
+break2:{{ range seq 5 }}{{ if eq . 2 }}{{ break }}{{ end }}{{ . }}{{ end }}:END:
+
+counter1: {{ partial "counter.html" . }}/{{ .Scratch.Get "counter" }}
+and1: {{ if (and false (partial "counter.html" .)) }}true{{ else }}false{{ end }}
+or1: {{ if (or true (partial "counter.html" .)) }}true{{ else }}false{{ end }}
+and2: {{ if (and true (partial "counter.html" .)) }}true{{ else }}false{{ end }}
+or2: {{ if (or false (partial "counter.html" .)) }}true{{ else }}false{{ end }}
+
+
+counter2: {{ .Scratch.Get "counter" }}
+
+
+ `
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ )
+ b.Build()
+
+ b.AssertFileContent("public/p1/index.html", `
+continue:1345:END:
+break:1:END:
+continue2:1345:END:
+break2:1:END:
+counter1: true/1
+and1: false
+or1: true
+and2: true
+or2: true
+counter2: 3
+`)
+
+}
diff --git a/tpl/tplimpl/shortcodes.go b/tpl/tplimpl/shortcodes.go
new file mode 100644
index 000000000..938fc74e2
--- /dev/null
+++ b/tpl/tplimpl/shortcodes.go
@@ -0,0 +1,154 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "strings"
+
+ "github.com/gohugoio/hugo/tpl"
+)
+
+// Currently lang, outFormat, suffix
+const numTemplateVariants = 3
+
+type shortcodeVariant struct {
+
+ // The possible variants: lang, outFormat, suffix
+ // gtag
+ // gtag.html
+ // gtag.no.html
+ // gtag.no.amp.html
+ // A slice of length numTemplateVariants.
+ variants []string
+
+ ts *templateState
+}
+
+type shortcodeTemplates struct {
+ variants []shortcodeVariant
+}
+
+func (s *shortcodeTemplates) indexOf(variants []string) int {
+L:
+ for i, v1 := range s.variants {
+ for i, v2 := range v1.variants {
+ if v2 != variants[i] {
+ continue L
+ }
+ }
+ return i
+ }
+ return -1
+}
+
+func (s *shortcodeTemplates) fromVariants(variants tpl.TemplateVariants) (shortcodeVariant, bool) {
+ return s.fromVariantsSlice([]string{
+ variants.Language,
+ strings.ToLower(variants.OutputFormat.Name),
+ variants.OutputFormat.MediaType.FirstSuffix.Suffix,
+ })
+}
+
+func (s *shortcodeTemplates) fromVariantsSlice(variants []string) (shortcodeVariant, bool) {
+ var (
+ bestMatch shortcodeVariant
+ bestMatchWeight int
+ )
+
+ for _, variant := range s.variants {
+ w := s.compareVariants(variants, variant.variants)
+ if bestMatchWeight == 0 || w > bestMatchWeight {
+ bestMatch = variant
+ bestMatchWeight = w
+ }
+ }
+
+ return bestMatch, true
+}
+
+// calculate a weight for two string slices of same length.
+// higher value means "better match".
+func (s *shortcodeTemplates) compareVariants(a, b []string) int {
+ weight := 0
+ k := len(a)
+ for i, av := range a {
+ bv := b[i]
+ if av == bv {
+ // Add more weight to the left side (language...).
+ weight = weight + k - i
+ } else {
+ weight--
+ }
+ }
+ return weight
+}
+
+func templateVariants(name string) []string {
+ _, variants := templateNameAndVariants(name)
+ return variants
+}
+
+func templateNameAndVariants(name string) (string, []string) {
+ variants := make([]string, numTemplateVariants)
+
+ parts := strings.Split(name, ".")
+
+ if len(parts) <= 1 {
+ // No variants.
+ return name, variants
+ }
+
+ name = parts[0]
+ parts = parts[1:]
+ lp := len(parts)
+ start := len(variants) - lp
+
+ for i, j := start, 0; i < len(variants); i, j = i+1, j+1 {
+ variants[i] = parts[j]
+ }
+
+ if lp > 1 && lp < len(variants) {
+ for i := lp - 1; i > 0; i-- {
+ variants[i-1] = variants[i]
+ }
+ }
+
+ if lp == 1 {
+ // Suffix only. Duplicate it into the output format field to
+ // make HTML win over AMP.
+ variants[len(variants)-2] = variants[len(variants)-1]
+ }
+
+ return name, variants
+}
+
+func resolveTemplateType(name string) templateType {
+ if isShortcode(name) {
+ return templateShortcode
+ }
+
+ if strings.Contains(name, "partials/") {
+ return templatePartial
+ }
+
+ return templateUndefined
+}
+
+func isShortcode(name string) bool {
+ return strings.Contains(name, shortcodesPathPrefix)
+}
+
+func isInternal(name string) bool {
+ return strings.HasPrefix(name, internalPathPrefix)
+}
diff --git a/tpl/tplimpl/shortcodes_test.go b/tpl/tplimpl/shortcodes_test.go
new file mode 100644
index 000000000..f97c7f278
--- /dev/null
+++ b/tpl/tplimpl/shortcodes_test.go
@@ -0,0 +1,91 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestShortcodesTemplate(t *testing.T) {
+ t.Run("isShortcode", func(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(isShortcode("shortcodes/figures.html"), qt.Equals, true)
+ c.Assert(isShortcode("_internal/shortcodes/figures.html"), qt.Equals, true)
+ c.Assert(isShortcode("shortcodes\\figures.html"), qt.Equals, false)
+ c.Assert(isShortcode("myshortcodes"), qt.Equals, false)
+ })
+
+ t.Run("variantsFromName", func(t *testing.T) {
+ c := qt.New(t)
+ c.Assert(templateVariants("figure.html"), qt.DeepEquals, []string{"", "html", "html"})
+ c.Assert(templateVariants("figure.no.html"), qt.DeepEquals, []string{"no", "no", "html"})
+ c.Assert(templateVariants("figure.no.amp.html"), qt.DeepEquals, []string{"no", "amp", "html"})
+ c.Assert(templateVariants("figure.amp.html"), qt.DeepEquals, []string{"amp", "amp", "html"})
+
+ name, variants := templateNameAndVariants("figure.html")
+ c.Assert(name, qt.Equals, "figure")
+ c.Assert(variants, qt.DeepEquals, []string{"", "html", "html"})
+ })
+
+ t.Run("compareVariants", func(t *testing.T) {
+ c := qt.New(t)
+ var s *shortcodeTemplates
+
+ tests := []struct {
+ name string
+ name1 string
+ name2 string
+ expected int
+ }{
+ {"Same suffix", "figure.html", "figure.html", 6},
+ {"Same suffix and output format", "figure.html.html", "figure.html.html", 6},
+ {"Same suffix, output format and language", "figure.no.html.html", "figure.no.html.html", 6},
+ {"No suffix", "figure", "figure", 6},
+ {"Different output format", "figure.amp.html", "figure.html.html", -1},
+ {"One with output format, one without", "figure.amp.html", "figure.html", -1},
+ }
+
+ for _, test := range tests {
+ w := s.compareVariants(templateVariants(test.name1), templateVariants(test.name2))
+ c.Assert(w, qt.Equals, test.expected)
+ }
+ })
+
+ t.Run("indexOf", func(t *testing.T) {
+ c := qt.New(t)
+
+ s := &shortcodeTemplates{
+ variants: []shortcodeVariant{
+ {variants: []string{"a", "b", "c"}},
+ {variants: []string{"a", "b", "d"}},
+ },
+ }
+
+ c.Assert(s.indexOf([]string{"a", "b", "c"}), qt.Equals, 0)
+ c.Assert(s.indexOf([]string{"a", "b", "d"}), qt.Equals, 1)
+ c.Assert(s.indexOf([]string{"a", "b", "x"}), qt.Equals, -1)
+ })
+
+ t.Run("Name", func(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(templateBaseName(templateShortcode, "shortcodes/foo.html"), qt.Equals, "foo.html")
+ c.Assert(templateBaseName(templateShortcode, "_internal/shortcodes/foo.html"), qt.Equals, "foo.html")
+ c.Assert(templateBaseName(templateShortcode, "shortcodes/test/foo.html"), qt.Equals, "test/foo.html")
+
+ c.Assert(true, qt.Equals, true)
+ })
+}
diff --git a/tpl/tplimpl/template.go b/tpl/tplimpl/template.go
new file mode 100644
index 000000000..c79605cbc
--- /dev/null
+++ b/tpl/tplimpl/template.go
@@ -0,0 +1,1161 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "bytes"
+ "context"
+ "embed"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "reflect"
+ "regexp"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/gohugoio/hugo/helpers"
+
+ "github.com/gohugoio/hugo/output"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/afero"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/hugofs/files"
+
+ htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+
+ "github.com/gohugoio/hugo/identity"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+const (
+ textTmplNamePrefix = "_text/"
+
+ shortcodesPathPrefix = "shortcodes/"
+ internalPathPrefix = "_internal/"
+ baseFileBase = "baseof"
+)
+
+// The identifiers may be truncated in the log, e.g.
+// "executing "main" at <$scaled.SRelPermalin...>: can't evaluate field SRelPermalink in type *resource.Image"
+// We need this to identify position in templates with base templates applied.
+var identifiersRe = regexp.MustCompile(`at \<(.*?)(\.{3})?\>:`)
+
+var embeddedTemplatesAliases = map[string][]string{
+ "shortcodes/twitter.html": {"shortcodes/tweet.html"},
+}
+
+var (
+ _ tpl.TemplateManager = (*templateExec)(nil)
+ _ tpl.TemplateHandler = (*templateExec)(nil)
+ _ tpl.TemplateFuncGetter = (*templateExec)(nil)
+ _ tpl.TemplateFinder = (*templateExec)(nil)
+ _ tpl.UnusedTemplatesProvider = (*templateExec)(nil)
+
+ _ tpl.Template = (*templateState)(nil)
+ _ tpl.Info = (*templateState)(nil)
+)
+
+var baseTemplateDefineRe = regexp.MustCompile(`^{{-?\s*define`)
+
+// needsBaseTemplate returns true if the first non-comment template block is a
+// define block.
+// If a base template does not exist, we will handle that when it's used.
+func needsBaseTemplate(templ string) bool {
+ idx := -1
+ inComment := false
+ for i := 0; i < len(templ); {
+ if !inComment && strings.HasPrefix(templ[i:], "{{/*") {
+ inComment = true
+ i += 4
+ } else if inComment && strings.HasPrefix(templ[i:], "*/}}") {
+ inComment = false
+ i += 4
+ } else {
+ r, size := utf8.DecodeRuneInString(templ[i:])
+ if !inComment {
+ if strings.HasPrefix(templ[i:], "{{") {
+ idx = i
+ break
+ } else if !unicode.IsSpace(r) {
+ break
+ }
+ }
+ i += size
+ }
+ }
+
+ if idx == -1 {
+ return false
+ }
+
+ return baseTemplateDefineRe.MatchString(templ[idx:])
+}
+
+func newIdentity(name string) identity.Manager {
+ return identity.NewManager(identity.NewPathIdentity(files.ComponentFolderLayouts, name))
+}
+
+func newStandaloneTextTemplate(funcs map[string]any) tpl.TemplateParseFinder {
+ return &textTemplateWrapperWithLock{
+ RWMutex: &sync.RWMutex{},
+ Template: texttemplate.New("").Funcs(funcs),
+ }
+}
+
+func newTemplateExec(d *deps.Deps) (*templateExec, error) {
+ exec, funcs := newTemplateExecuter(d)
+ funcMap := make(map[string]any)
+ for k, v := range funcs {
+ funcMap[k] = v.Interface()
+ }
+
+ var templateUsageTracker map[string]templateInfo
+ if d.Cfg.GetBool("printUnusedTemplates") {
+ templateUsageTracker = make(map[string]templateInfo)
+ }
+
+ h := &templateHandler{
+ nameBaseTemplateName: make(map[string]string),
+ transformNotFound: make(map[string]*templateState),
+ identityNotFound: make(map[string][]identity.Manager),
+
+ shortcodes: make(map[string]*shortcodeTemplates),
+ templateInfo: make(map[string]tpl.Info),
+ baseof: make(map[string]templateInfo),
+ needsBaseof: make(map[string]templateInfo),
+
+ main: newTemplateNamespace(funcMap),
+
+ Deps: d,
+ layoutHandler: output.NewLayoutHandler(),
+ layoutsFs: d.BaseFs.Layouts.Fs,
+ layoutTemplateCache: make(map[layoutCacheKey]tpl.Template),
+
+ templateUsageTracker: templateUsageTracker,
+ }
+
+ if err := h.loadEmbedded(); err != nil {
+ return nil, err
+ }
+
+ if err := h.loadTemplates(); err != nil {
+ return nil, err
+ }
+
+ e := &templateExec{
+ d: d,
+ executor: exec,
+ funcs: funcs,
+ templateHandler: h,
+ }
+
+ d.SetTmpl(e)
+ d.SetTextTmpl(newStandaloneTextTemplate(funcMap))
+
+ if d.WithTemplate != nil {
+ if err := d.WithTemplate(e); err != nil {
+ return nil, err
+ }
+ }
+
+ return e, nil
+}
+
+func newTemplateNamespace(funcs map[string]any) *templateNamespace {
+ return &templateNamespace{
+ prototypeHTML: htmltemplate.New("").Funcs(funcs),
+ prototypeText: texttemplate.New("").Funcs(funcs),
+ templateStateMap: &templateStateMap{
+ templates: make(map[string]*templateState),
+ },
+ }
+}
+
+func newTemplateState(templ tpl.Template, info templateInfo) *templateState {
+ return &templateState{
+ info: info,
+ typ: info.resolveType(),
+ Template: templ,
+ Manager: newIdentity(info.name),
+ parseInfo: tpl.DefaultParseInfo,
+ }
+}
+
+type layoutCacheKey struct {
+ d output.LayoutDescriptor
+ f string
+}
+
+type templateExec struct {
+ d *deps.Deps
+ executor texttemplate.Executer
+ funcs map[string]reflect.Value
+
+ *templateHandler
+}
+
+func (t templateExec) Clone(d *deps.Deps) *templateExec {
+ exec, funcs := newTemplateExecuter(d)
+ t.executor = exec
+ t.funcs = funcs
+ t.d = d
+ return &t
+}
+
+func (t *templateExec) Execute(templ tpl.Template, wr io.Writer, data any) error {
+ return t.ExecuteWithContext(context.Background(), templ, wr, data)
+}
+
+func (t *templateExec) ExecuteWithContext(ctx context.Context, templ tpl.Template, wr io.Writer, data any) error {
+ if rlocker, ok := templ.(types.RLocker); ok {
+ rlocker.RLock()
+ defer rlocker.RUnlock()
+ }
+ if t.Metrics != nil {
+ defer t.Metrics.MeasureSince(templ.Name(), time.Now())
+ }
+
+ if t.templateUsageTracker != nil {
+ if ts, ok := templ.(*templateState); ok {
+ t.templateUsageTrackerMu.Lock()
+ if _, found := t.templateUsageTracker[ts.Name()]; !found {
+ t.templateUsageTracker[ts.Name()] = ts.info
+ }
+
+ if !ts.baseInfo.IsZero() {
+ if _, found := t.templateUsageTracker[ts.baseInfo.name]; !found {
+ t.templateUsageTracker[ts.baseInfo.name] = ts.baseInfo
+ }
+ }
+ t.templateUsageTrackerMu.Unlock()
+ }
+ }
+
+ execErr := t.executor.ExecuteWithContext(ctx, templ, wr, data)
+ if execErr != nil {
+ execErr = t.addFileContext(templ, execErr)
+ }
+ return execErr
+}
+
+func (t *templateExec) UnusedTemplates() []tpl.FileInfo {
+ if t.templateUsageTracker == nil {
+ return nil
+ }
+ var unused []tpl.FileInfo
+
+ for _, ti := range t.needsBaseof {
+ if _, found := t.templateUsageTracker[ti.name]; !found {
+ unused = append(unused, ti)
+ }
+ }
+
+ for _, ti := range t.baseof {
+ if _, found := t.templateUsageTracker[ti.name]; !found {
+ unused = append(unused, ti)
+ }
+ }
+
+ for _, ts := range t.main.templates {
+ ti := ts.info
+ if strings.HasPrefix(ti.name, "_internal/") || ti.realFilename == "" {
+ continue
+ }
+
+ if _, found := t.templateUsageTracker[ti.name]; !found {
+ unused = append(unused, ti)
+ }
+ }
+
+ sort.Slice(unused, func(i, j int) bool {
+ return unused[i].Name() < unused[j].Name()
+ })
+
+ return unused
+}
+
+func (t *templateExec) GetFunc(name string) (reflect.Value, bool) {
+ v, found := t.funcs[name]
+ return v, found
+}
+
+func (t *templateExec) MarkReady() error {
+ var err error
+ t.readyInit.Do(func() {
+ // We only need the clones if base templates are in use.
+ if len(t.needsBaseof) > 0 {
+ err = t.main.createPrototypes()
+ }
+ })
+
+ return err
+}
+
+type templateHandler struct {
+ main *templateNamespace
+ needsBaseof map[string]templateInfo
+ baseof map[string]templateInfo
+
+ readyInit sync.Once
+
+ // This is the filesystem to load the templates from. All the templates are
+ // stored in the root of this filesystem.
+ layoutsFs afero.Fs
+
+ layoutHandler *output.LayoutHandler
+
+ layoutTemplateCache map[layoutCacheKey]tpl.Template
+ layoutTemplateCacheMu sync.RWMutex
+
+ *deps.Deps
+
+ // Used to get proper filenames in errors
+ nameBaseTemplateName map[string]string
+
+ // Holds name and source of template definitions not found during the first
+ // AST transformation pass.
+ transformNotFound map[string]*templateState
+
+ // Holds identities of templates not found during first pass.
+ identityNotFound map[string][]identity.Manager
+
+ // shortcodes maps shortcode name to template variants
+ // (language, output format etc.) of that shortcode.
+ shortcodes map[string]*shortcodeTemplates
+
+ // templateInfo maps template name to some additional information about that template.
+ // Note that for shortcodes that same information is embedded in the
+ // shortcodeTemplates type.
+ templateInfo map[string]tpl.Info
+
+ // May be nil.
+ templateUsageTracker map[string]templateInfo
+ templateUsageTrackerMu sync.Mutex
+}
+
+// AddTemplate parses and adds a template to the collection.
+// Templates with name prefixed with "_text" will be handled as plain
+// text templates.
+func (t *templateHandler) AddTemplate(name, tpl string) error {
+ templ, err := t.addTemplateTo(t.newTemplateInfo(name, tpl), t.main)
+ if err == nil {
+ t.applyTemplateTransformers(t.main, templ)
+ }
+ return err
+}
+
+func (t *templateHandler) Lookup(name string) (tpl.Template, bool) {
+ templ, found := t.main.Lookup(name)
+ if found {
+ return templ, true
+ }
+
+ return nil, false
+}
+
+func (t *templateHandler) LookupLayout(d output.LayoutDescriptor, f output.Format) (tpl.Template, bool, error) {
+ key := layoutCacheKey{d, f.Name}
+ t.layoutTemplateCacheMu.RLock()
+ if cacheVal, found := t.layoutTemplateCache[key]; found {
+ t.layoutTemplateCacheMu.RUnlock()
+ return cacheVal, true, nil
+ }
+ t.layoutTemplateCacheMu.RUnlock()
+
+ t.layoutTemplateCacheMu.Lock()
+ defer t.layoutTemplateCacheMu.Unlock()
+
+ templ, found, err := t.findLayout(d, f)
+ if err == nil && found {
+ t.layoutTemplateCache[key] = templ
+ return templ, true, nil
+ }
+
+ return nil, false, err
+}
+
+// This currently only applies to shortcodes and what we get here is the
+// shortcode name.
+func (t *templateHandler) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+ name = templateBaseName(templateShortcode, name)
+ s, found := t.shortcodes[name]
+ if !found {
+ return nil, false, false
+ }
+
+ sv, found := s.fromVariants(variants)
+ if !found {
+ return nil, false, false
+ }
+
+ more := len(s.variants) > 1
+
+ return sv.ts, true, more
+}
+
+// LookupVariants returns all variants of name, nil if none found.
+func (t *templateHandler) LookupVariants(name string) []tpl.Template {
+ name = templateBaseName(templateShortcode, name)
+ s, found := t.shortcodes[name]
+ if !found {
+ return nil
+ }
+
+ variants := make([]tpl.Template, len(s.variants))
+ for i := 0; i < len(variants); i++ {
+ variants[i] = s.variants[i].ts
+ }
+
+ return variants
+}
+
+func (t *templateHandler) HasTemplate(name string) bool {
+ if _, found := t.baseof[name]; found {
+ return true
+ }
+
+ if _, found := t.needsBaseof[name]; found {
+ return true
+ }
+
+ _, found := t.Lookup(name)
+ return found
+}
+
+func (t *templateHandler) findLayout(d output.LayoutDescriptor, f output.Format) (tpl.Template, bool, error) {
+ layouts, _ := t.layoutHandler.For(d, f)
+ for _, name := range layouts {
+ templ, found := t.main.Lookup(name)
+ if found {
+ return templ, true, nil
+ }
+
+ overlay, found := t.needsBaseof[name]
+
+ if !found {
+ continue
+ }
+
+ d.Baseof = true
+ baseLayouts, _ := t.layoutHandler.For(d, f)
+ var base templateInfo
+ found = false
+ for _, l := range baseLayouts {
+ base, found = t.baseof[l]
+ if found {
+ break
+ }
+ }
+
+ templ, err := t.applyBaseTemplate(overlay, base)
+ if err != nil {
+ return nil, false, err
+ }
+
+ ts := newTemplateState(templ, overlay)
+
+ if found {
+ ts.baseInfo = base
+
+ // Add the base identity to detect changes
+ ts.Add(identity.NewPathIdentity(files.ComponentFolderLayouts, base.name))
+ }
+
+ t.applyTemplateTransformers(t.main, ts)
+
+ if err := t.extractPartials(ts.Template); err != nil {
+ return nil, false, err
+ }
+
+ return ts, true, nil
+
+ }
+
+ return nil, false, nil
+}
+
+func (t *templateHandler) findTemplate(name string) *templateState {
+ if templ, found := t.Lookup(name); found {
+ return templ.(*templateState)
+ }
+ return nil
+}
+
+func (t *templateHandler) newTemplateInfo(name, tpl string) templateInfo {
+ var isText bool
+ name, isText = t.nameIsText(name)
+ return templateInfo{
+ name: name,
+ isText: isText,
+ template: tpl,
+ }
+}
+
+func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error {
+ if strings.HasPrefix(templ.Name(), "_internal") {
+ return inerr
+ }
+
+ ts, ok := templ.(*templateState)
+ if !ok {
+ return inerr
+ }
+
+ identifiers := t.extractIdentifiers(inerr.Error())
+
+ //lint:ignore ST1008 the error is the main result
+ checkFilename := func(info templateInfo, inErr error) (error, bool) {
+ if info.filename == "" {
+ return inErr, false
+ }
+
+ lineMatcher := func(m herrors.LineMatcher) int {
+ if m.Position.LineNumber != m.LineNumber {
+ return -1
+ }
+
+ for _, id := range identifiers {
+ if strings.Contains(m.Line, id) {
+ // We found the line, but return a 0 to signal to
+ // use the column from the error message.
+ return 0
+ }
+ }
+ return -1
+ }
+
+ f, err := t.layoutsFs.Open(info.filename)
+ if err != nil {
+ return inErr, false
+ }
+ defer f.Close()
+
+ fe := herrors.NewFileErrorFromName(inErr, info.realFilename)
+ fe.UpdateContent(f, lineMatcher)
+
+ if !fe.ErrorContext().Position.IsValid() {
+ return inErr, false
+ }
+ return fe, true
+ }
+
+ inerr = fmt.Errorf("execute of template failed: %w", inerr)
+
+ if err, ok := checkFilename(ts.info, inerr); ok {
+ return err
+ }
+
+ err, _ := checkFilename(ts.baseInfo, inerr)
+
+ return err
+}
+
+func (t *templateHandler) extractIdentifiers(line string) []string {
+ m := identifiersRe.FindAllStringSubmatch(line, -1)
+ identifiers := make([]string, len(m))
+ for i := 0; i < len(m); i++ {
+ identifiers[i] = m[i][1]
+ }
+ return identifiers
+}
+
+func (t *templateHandler) addShortcodeVariant(ts *templateState) {
+ name := ts.Name()
+ base := templateBaseName(templateShortcode, name)
+
+ shortcodename, variants := templateNameAndVariants(base)
+
+ templs, found := t.shortcodes[shortcodename]
+ if !found {
+ templs = &shortcodeTemplates{}
+ t.shortcodes[shortcodename] = templs
+ }
+
+ sv := shortcodeVariant{variants: variants, ts: ts}
+
+ i := templs.indexOf(variants)
+
+ if i != -1 {
+ // Only replace if it's an override of an internal template.
+ if !isInternal(name) {
+ templs.variants[i] = sv
+ }
+ } else {
+ templs.variants = append(templs.variants, sv)
+ }
+}
+
+func (t *templateHandler) addTemplateFile(name, path string) error {
+ getTemplate := func(filename string) (templateInfo, error) {
+ fs := t.Layouts.Fs
+ b, err := afero.ReadFile(fs, filename)
+ if err != nil {
+ return templateInfo{filename: filename, fs: fs}, err
+ }
+
+ s := removeLeadingBOM(string(b))
+
+ realFilename := filename
+ if fi, err := fs.Stat(filename); err == nil {
+ if fim, ok := fi.(hugofs.FileMetaInfo); ok {
+ realFilename = fim.Meta().Filename
+ }
+ }
+
+ var isText bool
+ name, isText = t.nameIsText(name)
+
+ return templateInfo{
+ name: name,
+ isText: isText,
+ template: s,
+ filename: filename,
+ realFilename: realFilename,
+ fs: fs,
+ }, nil
+ }
+
+ tinfo, err := getTemplate(path)
+ if err != nil {
+ return err
+ }
+
+ if isBaseTemplatePath(name) {
+ // Store it for later.
+ t.baseof[name] = tinfo
+ return nil
+ }
+
+ needsBaseof := !t.noBaseNeeded(name) && needsBaseTemplate(tinfo.template)
+ if needsBaseof {
+ t.needsBaseof[name] = tinfo
+ return nil
+ }
+
+ templ, err := t.addTemplateTo(tinfo, t.main)
+ if err != nil {
+ return tinfo.errWithFileContext("parse failed", err)
+ }
+ t.applyTemplateTransformers(t.main, templ)
+
+ return nil
+}
+
+func (t *templateHandler) addTemplateTo(info templateInfo, to *templateNamespace) (*templateState, error) {
+ return to.parse(info)
+}
+
+func (t *templateHandler) applyBaseTemplate(overlay, base templateInfo) (tpl.Template, error) {
+ if overlay.isText {
+ var (
+ templ = t.main.prototypeTextClone.New(overlay.name)
+ err error
+ )
+
+ if !base.IsZero() {
+ templ, err = templ.Parse(base.template)
+ if err != nil {
+ return nil, base.errWithFileContext("parse failed", err)
+ }
+ }
+
+ templ, err = texttemplate.Must(templ.Clone()).Parse(overlay.template)
+ if err != nil {
+ return nil, overlay.errWithFileContext("parse failed", err)
+ }
+
+ // The extra lookup is a workaround, see
+ // * https://github.com/golang/go/issues/16101
+ // * https://github.com/gohugoio/hugo/issues/2549
+ // templ = templ.Lookup(templ.Name())
+
+ return templ, nil
+ }
+
+ var (
+ templ = t.main.prototypeHTMLClone.New(overlay.name)
+ err error
+ )
+
+ if !base.IsZero() {
+ templ, err = templ.Parse(base.template)
+ if err != nil {
+ return nil, base.errWithFileContext("parse failed", err)
+ }
+ }
+
+ templ, err = htmltemplate.Must(templ.Clone()).Parse(overlay.template)
+ if err != nil {
+ return nil, overlay.errWithFileContext("parse failed", err)
+ }
+
+ // The extra lookup is a workaround, see
+ // * https://github.com/golang/go/issues/16101
+ // * https://github.com/gohugoio/hugo/issues/2549
+ templ = templ.Lookup(templ.Name())
+
+ return templ, err
+}
+
+func (t *templateHandler) applyTemplateTransformers(ns *templateNamespace, ts *templateState) (*templateContext, error) {
+ c, err := applyTemplateTransformers(ts, ns.newTemplateLookup(ts))
+ if err != nil {
+ return nil, err
+ }
+
+ for k := range c.templateNotFound {
+ t.transformNotFound[k] = ts
+ t.identityNotFound[k] = append(t.identityNotFound[k], c.t)
+ }
+
+ for k := range c.identityNotFound {
+ t.identityNotFound[k] = append(t.identityNotFound[k], c.t)
+ }
+
+ return c, err
+}
+
+//go:embed embedded/templates/*
+//go:embed embedded/templates/_default/*
+//go:embed embedded/templates/_server/*
+var embededTemplatesFs embed.FS
+
+func (t *templateHandler) loadEmbedded() error {
+ return fs.WalkDir(embededTemplatesFs, ".", func(path string, d fs.DirEntry, err error) error {
+ if d == nil || d.IsDir() {
+ return nil
+ }
+
+ templb, err := embededTemplatesFs.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ // Get the newlines on Windows in line with how we had it back when we used Go Generate
+ // to write the templates to Go files.
+ templ := string(bytes.ReplaceAll(templb, []byte("\r\n"), []byte("\n")))
+ name := strings.TrimPrefix(filepath.ToSlash(path), "embedded/templates/")
+ templateName := name
+
+ // For the render hooks and the server templates it does not make sense to preseve the
+ // double _indternal double book-keeping,
+ // just add it if its now provided by the user.
+ if !strings.Contains(path, "_default/_markup") && !strings.HasPrefix(name, "_server/") {
+ templateName = internalPathPrefix + name
+ }
+
+ if _, found := t.Lookup(templateName); !found {
+ if err := t.AddTemplate(templateName, templ); err != nil {
+ return err
+ }
+ }
+
+ if aliases, found := embeddedTemplatesAliases[name]; found {
+ // TODO(bep) avoid reparsing these aliases
+ for _, alias := range aliases {
+ alias = internalPathPrefix + alias
+ if err := t.AddTemplate(alias, templ); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+ })
+}
+
+func (t *templateHandler) loadTemplates() error {
+ walker := func(path string, fi hugofs.FileMetaInfo, err error) error {
+ if err != nil || fi.IsDir() {
+ return err
+ }
+
+ if isDotFile(path) || isBackupFile(path) {
+ return nil
+ }
+
+ name := strings.TrimPrefix(filepath.ToSlash(path), "/")
+ filename := filepath.Base(path)
+ outputFormat, found := t.OutputFormatsConfig.FromFilename(filename)
+
+ if found && outputFormat.IsPlainText {
+ name = textTmplNamePrefix + name
+ }
+
+ if err := t.addTemplateFile(name, path); err != nil {
+ return err
+ }
+
+ return nil
+ }
+
+ if err := helpers.SymbolicWalk(t.Layouts.Fs, "", walker); err != nil {
+ if !os.IsNotExist(err) {
+ return err
+ }
+ return nil
+ }
+
+ return nil
+}
+
+func (t *templateHandler) nameIsText(name string) (string, bool) {
+ isText := strings.HasPrefix(name, textTmplNamePrefix)
+ if isText {
+ name = strings.TrimPrefix(name, textTmplNamePrefix)
+ }
+ return name, isText
+}
+
+func (t *templateHandler) noBaseNeeded(name string) bool {
+ if strings.HasPrefix(name, "shortcodes/") || strings.HasPrefix(name, "partials/") {
+ return true
+ }
+ return strings.Contains(name, "_markup/")
+}
+
+func (t *templateHandler) extractPartials(templ tpl.Template) error {
+ templs := templates(templ)
+ for _, templ := range templs {
+ if templ.Name() == "" || !strings.HasPrefix(templ.Name(), "partials/") {
+ continue
+ }
+
+ ts := newTemplateState(templ, templateInfo{name: templ.Name()})
+ ts.typ = templatePartial
+
+ t.main.mu.RLock()
+ _, found := t.main.templates[templ.Name()]
+ t.main.mu.RUnlock()
+
+ if !found {
+ t.main.mu.Lock()
+ // This is a template defined inline.
+ _, err := applyTemplateTransformers(ts, t.main.newTemplateLookup(ts))
+ if err != nil {
+ t.main.mu.Unlock()
+ return err
+ }
+ t.main.templates[templ.Name()] = ts
+ t.main.mu.Unlock()
+
+ }
+ }
+
+ return nil
+}
+
+func (t *templateHandler) postTransform() error {
+ defineCheckedHTML := false
+ defineCheckedText := false
+
+ for _, v := range t.main.templates {
+ if v.typ == templateShortcode {
+ t.addShortcodeVariant(v)
+ }
+
+ if defineCheckedHTML && defineCheckedText {
+ continue
+ }
+
+ isText := isText(v.Template)
+ if isText {
+ if defineCheckedText {
+ continue
+ }
+ defineCheckedText = true
+ } else {
+ if defineCheckedHTML {
+ continue
+ }
+ defineCheckedHTML = true
+ }
+
+ if err := t.extractPartials(v.Template); err != nil {
+ return err
+ }
+ }
+
+ for name, source := range t.transformNotFound {
+ lookup := t.main.newTemplateLookup(source)
+ templ := lookup(name)
+ if templ != nil {
+ _, err := applyTemplateTransformers(templ, lookup)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ for k, v := range t.identityNotFound {
+ ts := t.findTemplate(k)
+ if ts != nil {
+ for _, im := range v {
+ im.Add(ts)
+ }
+ }
+ }
+
+ for _, v := range t.shortcodes {
+ sort.Slice(v.variants, func(i, j int) bool {
+ v1, v2 := v.variants[i], v.variants[j]
+ name1, name2 := v1.ts.Name(), v2.ts.Name()
+ isHTMl1, isHTML2 := strings.HasSuffix(name1, "html"), strings.HasSuffix(name2, "html")
+
+ // There will be a weighted selection later, but make
+ // sure these are sorted to get a stable selection for
+ // output formats missing specific templates.
+ // Prefer HTML.
+ if isHTMl1 || isHTML2 && !(isHTMl1 && isHTML2) {
+ return isHTMl1
+ }
+
+ return name1 < name2
+ })
+ }
+
+ return nil
+}
+
+type templateNamespace struct {
+ prototypeText *texttemplate.Template
+ prototypeHTML *htmltemplate.Template
+ prototypeTextClone *texttemplate.Template
+ prototypeHTMLClone *htmltemplate.Template
+
+ *templateStateMap
+}
+
+func (t templateNamespace) Clone() *templateNamespace {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
+ t.templateStateMap = &templateStateMap{
+ templates: make(map[string]*templateState),
+ }
+
+ t.prototypeText = texttemplate.Must(t.prototypeText.Clone())
+ t.prototypeHTML = htmltemplate.Must(t.prototypeHTML.Clone())
+
+ return &t
+}
+
+func (t *templateNamespace) Lookup(name string) (tpl.Template, bool) {
+ t.mu.RLock()
+ defer t.mu.RUnlock()
+
+ templ, found := t.templates[name]
+ if !found {
+ return nil, false
+ }
+
+ return templ, found
+}
+
+func (t *templateNamespace) createPrototypes() error {
+ t.prototypeTextClone = texttemplate.Must(t.prototypeText.Clone())
+ t.prototypeHTMLClone = htmltemplate.Must(t.prototypeHTML.Clone())
+
+ return nil
+}
+
+func (t *templateNamespace) newTemplateLookup(in *templateState) func(name string) *templateState {
+ return func(name string) *templateState {
+ if templ, found := t.templates[name]; found {
+ if templ.isText() != in.isText() {
+ return nil
+ }
+ return templ
+ }
+ if templ, found := findTemplateIn(name, in); found {
+ return newTemplateState(templ, templateInfo{name: templ.Name()})
+ }
+ return nil
+ }
+}
+
+func (t *templateNamespace) parse(info templateInfo) (*templateState, error) {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
+ if info.isText {
+ prototype := t.prototypeText
+
+ templ, err := prototype.New(info.name).Parse(info.template)
+ if err != nil {
+ return nil, err
+ }
+
+ ts := newTemplateState(templ, info)
+
+ t.templates[info.name] = ts
+
+ return ts, nil
+ }
+
+ prototype := t.prototypeHTML
+
+ templ, err := prototype.New(info.name).Parse(info.template)
+ if err != nil {
+ return nil, err
+ }
+
+ ts := newTemplateState(templ, info)
+
+ t.templates[info.name] = ts
+
+ return ts, nil
+}
+
+type templateState struct {
+ tpl.Template
+
+ typ templateType
+ parseInfo tpl.ParseInfo
+ identity.Manager
+
+ info templateInfo
+ baseInfo templateInfo // Set when a base template is used.
+}
+
+func (t *templateState) ParseInfo() tpl.ParseInfo {
+ return t.parseInfo
+}
+
+func (t *templateState) isText() bool {
+ return isText(t.Template)
+}
+
+func isText(templ tpl.Template) bool {
+ _, isText := templ.(*texttemplate.Template)
+ return isText
+}
+
+type templateStateMap struct {
+ mu sync.RWMutex
+ templates map[string]*templateState
+}
+
+type templateWrapperWithLock struct {
+ *sync.RWMutex
+ tpl.Template
+}
+
+type textTemplateWrapperWithLock struct {
+ *sync.RWMutex
+ *texttemplate.Template
+}
+
+func (t *textTemplateWrapperWithLock) Lookup(name string) (tpl.Template, bool) {
+ t.RLock()
+ templ := t.Template.Lookup(name)
+ t.RUnlock()
+ if templ == nil {
+ return nil, false
+ }
+ return &textTemplateWrapperWithLock{
+ RWMutex: t.RWMutex,
+ Template: templ,
+ }, true
+}
+
+func (t *textTemplateWrapperWithLock) LookupVariant(name string, variants tpl.TemplateVariants) (tpl.Template, bool, bool) {
+ panic("not supported")
+}
+
+func (t *textTemplateWrapperWithLock) LookupVariants(name string) []tpl.Template {
+ panic("not supported")
+}
+
+func (t *textTemplateWrapperWithLock) Parse(name, tpl string) (tpl.Template, error) {
+ t.Lock()
+ defer t.Unlock()
+ return t.Template.New(name).Parse(tpl)
+}
+
+func isBackupFile(path string) bool {
+ return path[len(path)-1] == '~'
+}
+
+func isBaseTemplatePath(path string) bool {
+ return strings.Contains(filepath.Base(path), baseFileBase)
+}
+
+func isDotFile(path string) bool {
+ return filepath.Base(path)[0] == '.'
+}
+
+func removeLeadingBOM(s string) string {
+ const bom = '\ufeff'
+
+ for i, r := range s {
+ if i == 0 && r != bom {
+ return s
+ }
+ if i > 0 {
+ return s[i:]
+ }
+ }
+
+ return s
+}
+
+// resolves _internal/shortcodes/param.html => param.html etc.
+func templateBaseName(typ templateType, name string) string {
+ name = strings.TrimPrefix(name, internalPathPrefix)
+ switch typ {
+ case templateShortcode:
+ return strings.TrimPrefix(name, shortcodesPathPrefix)
+ default:
+ panic("not implemented")
+ }
+}
+
+func unwrap(templ tpl.Template) tpl.Template {
+ if ts, ok := templ.(*templateState); ok {
+ return ts.Template
+ }
+ return templ
+}
+
+func templates(in tpl.Template) []tpl.Template {
+ var templs []tpl.Template
+ in = unwrap(in)
+ if textt, ok := in.(*texttemplate.Template); ok {
+ for _, t := range textt.Templates() {
+ templs = append(templs, t)
+ }
+ }
+
+ if htmlt, ok := in.(*htmltemplate.Template); ok {
+ for _, t := range htmlt.Templates() {
+ templs = append(templs, t)
+ }
+ }
+
+ return templs
+}
diff --git a/tpl/tplimpl/templateFuncster.go b/tpl/tplimpl/templateFuncster.go
new file mode 100644
index 000000000..96404f51b
--- /dev/null
+++ b/tpl/tplimpl/templateFuncster.go
@@ -0,0 +1,14 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
diff --git a/tpl/tplimpl/templateProvider.go b/tpl/tplimpl/templateProvider.go
new file mode 100644
index 000000000..933ee7dc3
--- /dev/null
+++ b/tpl/tplimpl/templateProvider.go
@@ -0,0 +1,41 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "github.com/gohugoio/hugo/deps"
+)
+
+// TemplateProvider manages templates.
+type TemplateProvider struct{}
+
+// DefaultTemplateProvider is a globally available TemplateProvider.
+var DefaultTemplateProvider *TemplateProvider
+
+// Update updates the Hugo Template System in the provided Deps
+// with all the additional features, templates & functions.
+func (*TemplateProvider) Update(d *deps.Deps) error {
+ tmpl, err := newTemplateExec(d)
+ if err != nil {
+ return err
+ }
+ return tmpl.postTransform()
+}
+
+// Clone clones.
+func (*TemplateProvider) Clone(d *deps.Deps) error {
+ t := d.Tmpl().(*templateExec)
+ d.SetTmpl(t.Clone(d))
+ return nil
+}
diff --git a/tpl/tplimpl/template_ast_transformers.go b/tpl/tplimpl/template_ast_transformers.go
new file mode 100644
index 000000000..bc02d93ac
--- /dev/null
+++ b/tpl/tplimpl/template_ast_transformers.go
@@ -0,0 +1,348 @@
+// Copyright 2016 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+
+ htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+
+ "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/tpl"
+ "github.com/mitchellh/mapstructure"
+)
+
+type templateType int
+
+const (
+ templateUndefined templateType = iota
+ templateShortcode
+ templatePartial
+)
+
+type templateContext struct {
+ visited map[string]bool
+ templateNotFound map[string]bool
+ identityNotFound map[string]bool
+ lookupFn func(name string) *templateState
+
+ // The last error encountered.
+ err error
+
+ // Set when we're done checking for config header.
+ configChecked bool
+
+ t *templateState
+
+ // Store away the return node in partials.
+ returnNode *parse.CommandNode
+}
+
+func (c templateContext) getIfNotVisited(name string) *templateState {
+ if c.visited[name] {
+ return nil
+ }
+ c.visited[name] = true
+ templ := c.lookupFn(name)
+ if templ == nil {
+ // This may be a inline template defined outside of this file
+ // and not yet parsed. Unusual, but it happens.
+ // Store the name to try again later.
+ c.templateNotFound[name] = true
+ }
+
+ return templ
+}
+
+func newTemplateContext(
+ t *templateState,
+ lookupFn func(name string) *templateState) *templateContext {
+ return &templateContext{
+ t: t,
+ lookupFn: lookupFn,
+ visited: make(map[string]bool),
+ templateNotFound: make(map[string]bool),
+ identityNotFound: make(map[string]bool),
+ }
+}
+
+func applyTemplateTransformers(
+ t *templateState,
+ lookupFn func(name string) *templateState) (*templateContext, error) {
+ if t == nil {
+ return nil, errors.New("expected template, but none provided")
+ }
+
+ c := newTemplateContext(t, lookupFn)
+ tree := getParseTree(t.Template)
+
+ _, err := c.applyTransformations(tree.Root)
+
+ if err == nil && c.returnNode != nil {
+ // This is a partial with a return statement.
+ c.t.parseInfo.HasReturn = true
+ tree.Root = c.wrapInPartialReturnWrapper(tree.Root)
+ }
+
+ return c, err
+}
+
+func getParseTree(templ tpl.Template) *parse.Tree {
+ templ = unwrap(templ)
+ if text, ok := templ.(*texttemplate.Template); ok {
+ return text.Tree
+ }
+ return templ.(*htmltemplate.Template).Tree
+}
+
+const (
+ // We parse this template and modify the nodes in order to assign
+ // the return value of a partial to a contextWrapper via Set. We use
+ // "range" over a one-element slice so we can shift dot to the
+ // partial's argument, Arg, while allowing Arg to be falsy.
+ partialReturnWrapperTempl = `{{ $_hugo_dot := $ }}{{ $ := .Arg }}{{ range (slice .Arg) }}{{ $_hugo_dot.Set ("PLACEHOLDER") }}{{ end }}`
+)
+
+var partialReturnWrapper *parse.ListNode
+
+func init() {
+ templ, err := texttemplate.New("").Parse(partialReturnWrapperTempl)
+ if err != nil {
+ panic(err)
+ }
+ partialReturnWrapper = templ.Tree.Root
+}
+
+// wrapInPartialReturnWrapper copies and modifies the parsed nodes of a
+// predefined partial return wrapper to insert those of a user-defined partial.
+func (c *templateContext) wrapInPartialReturnWrapper(n *parse.ListNode) *parse.ListNode {
+ wrapper := partialReturnWrapper.CopyList()
+ rangeNode := wrapper.Nodes[2].(*parse.RangeNode)
+ retn := rangeNode.List.Nodes[0]
+ setCmd := retn.(*parse.ActionNode).Pipe.Cmds[0]
+ setPipe := setCmd.Args[1].(*parse.PipeNode)
+ // Replace PLACEHOLDER with the real return value.
+ // Note that this is a PipeNode, so it will be wrapped in parens.
+ setPipe.Cmds = []*parse.CommandNode{c.returnNode}
+ rangeNode.List.Nodes = append(n.Nodes, retn)
+
+ return wrapper
+}
+
+// applyTransformations do 2 things:
+// 1) Parses partial return statement.
+// 2) Tracks template (partial) dependencies and some other info.
+func (c *templateContext) applyTransformations(n parse.Node) (bool, error) {
+ switch x := n.(type) {
+ case *parse.ListNode:
+ if x != nil {
+ c.applyTransformationsToNodes(x.Nodes...)
+ }
+ case *parse.ActionNode:
+ c.applyTransformationsToNodes(x.Pipe)
+ case *parse.IfNode:
+ c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
+ case *parse.WithNode:
+ c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
+ case *parse.RangeNode:
+ c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
+ case *parse.TemplateNode:
+ subTempl := c.getIfNotVisited(x.Name)
+ if subTempl != nil {
+ c.applyTransformationsToNodes(getParseTree(subTempl.Template).Root)
+ }
+ case *parse.PipeNode:
+ c.collectConfig(x)
+ for i, cmd := range x.Cmds {
+ keep, _ := c.applyTransformations(cmd)
+ if !keep {
+ x.Cmds = append(x.Cmds[:i], x.Cmds[i+1:]...)
+ }
+ }
+
+ case *parse.CommandNode:
+ c.collectPartialInfo(x)
+ c.collectInner(x)
+ keep := c.collectReturnNode(x)
+
+ for _, elem := range x.Args {
+ switch an := elem.(type) {
+ case *parse.PipeNode:
+ c.applyTransformations(an)
+ }
+ }
+ return keep, c.err
+ }
+
+ return true, c.err
+}
+
+func (c *templateContext) applyTransformationsToNodes(nodes ...parse.Node) {
+ for _, node := range nodes {
+ c.applyTransformations(node)
+ }
+}
+
+func (c *templateContext) hasIdent(idents []string, ident string) bool {
+ for _, id := range idents {
+ if id == ident {
+ return true
+ }
+ }
+ return false
+}
+
+// collectConfig collects and parses any leading template config variable declaration.
+// This will be the first PipeNode in the template, and will be a variable declaration
+// on the form:
+// {{ $_hugo_config:= `{ "version": 1 }` }}
+func (c *templateContext) collectConfig(n *parse.PipeNode) {
+ if c.t.typ != templateShortcode {
+ return
+ }
+ if c.configChecked {
+ return
+ }
+ c.configChecked = true
+
+ if len(n.Decl) != 1 || len(n.Cmds) != 1 {
+ // This cannot be a config declaration
+ return
+ }
+
+ v := n.Decl[0]
+
+ if len(v.Ident) == 0 || v.Ident[0] != "$_hugo_config" {
+ return
+ }
+
+ cmd := n.Cmds[0]
+
+ if len(cmd.Args) == 0 {
+ return
+ }
+
+ if s, ok := cmd.Args[0].(*parse.StringNode); ok {
+ errMsg := "failed to decode $_hugo_config in template: %w"
+ m, err := maps.ToStringMapE(s.Text)
+ if err != nil {
+ c.err = fmt.Errorf(errMsg, err)
+ return
+ }
+ if err := mapstructure.WeakDecode(m, &c.t.parseInfo.Config); err != nil {
+ c.err = fmt.Errorf(errMsg, err)
+ }
+ }
+}
+
+// collectInner determines if the given CommandNode represents a
+// shortcode call to its .Inner.
+func (c *templateContext) collectInner(n *parse.CommandNode) {
+ if c.t.typ != templateShortcode {
+ return
+ }
+ if c.t.parseInfo.IsInner || len(n.Args) == 0 {
+ return
+ }
+
+ for _, arg := range n.Args {
+ var idents []string
+ switch nt := arg.(type) {
+ case *parse.FieldNode:
+ idents = nt.Ident
+ case *parse.VariableNode:
+ idents = nt.Ident
+ }
+
+ if c.hasIdent(idents, "Inner") || c.hasIdent(idents, "InnerDeindent") {
+ c.t.parseInfo.IsInner = true
+ break
+ }
+ }
+}
+
+var partialRe = regexp.MustCompile(`^partial(Cached)?$|^partials\.Include(Cached)?$`)
+
+func (c *templateContext) collectPartialInfo(x *parse.CommandNode) {
+ if len(x.Args) < 2 {
+ return
+ }
+
+ first := x.Args[0]
+ var id string
+ switch v := first.(type) {
+ case *parse.IdentifierNode:
+ id = v.Ident
+ case *parse.ChainNode:
+ id = v.String()
+ }
+
+ if partialRe.MatchString(id) {
+ partialName := strings.Trim(x.Args[1].String(), "\"")
+ if !strings.Contains(partialName, ".") {
+ partialName += ".html"
+ }
+ partialName = "partials/" + partialName
+ info := c.lookupFn(partialName)
+
+ if info != nil {
+ c.t.Add(info)
+ } else {
+ // Delay for later
+ c.identityNotFound[partialName] = true
+ }
+ }
+}
+
+func (c *templateContext) collectReturnNode(n *parse.CommandNode) bool {
+ if c.t.typ != templatePartial || c.returnNode != nil {
+ return true
+ }
+
+ if len(n.Args) < 2 {
+ return true
+ }
+
+ ident, ok := n.Args[0].(*parse.IdentifierNode)
+ if !ok || ident.Ident != "return" {
+ return true
+ }
+
+ c.returnNode = n
+ // Remove the "return" identifiers
+ c.returnNode.Args = c.returnNode.Args[1:]
+
+ return false
+}
+
+func findTemplateIn(name string, in tpl.Template) (tpl.Template, bool) {
+ in = unwrap(in)
+ if text, ok := in.(*texttemplate.Template); ok {
+ if templ := text.Lookup(name); templ != nil {
+ return templ, true
+ }
+ return nil, false
+ }
+ if templ := in.(*htmltemplate.Template).Lookup(name); templ != nil {
+ return templ, true
+ }
+ return nil, false
+}
diff --git a/tpl/tplimpl/template_ast_transformers_test.go b/tpl/tplimpl/template_ast_transformers_test.go
new file mode 100644
index 000000000..90ca325ab
--- /dev/null
+++ b/tpl/tplimpl/template_ast_transformers_test.go
@@ -0,0 +1,160 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package tplimpl
+
+import (
+ "testing"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/tpl"
+)
+
+// Issue #2927
+func TestTransformRecursiveTemplate(t *testing.T) {
+ c := qt.New(t)
+
+ recursive := `
+{{ define "menu-nodes" }}
+{{ template "menu-node" }}
+{{ end }}
+{{ define "menu-node" }}
+{{ template "menu-node" }}
+{{ end }}
+{{ template "menu-nodes" }}
+`
+
+ templ, err := template.New("foo").Parse(recursive)
+ c.Assert(err, qt.IsNil)
+ ts := newTestTemplate(templ)
+
+ ctx := newTemplateContext(
+ ts,
+ newTestTemplateLookup(ts),
+ )
+ ctx.applyTransformations(templ.Tree.Root)
+}
+
+func newTestTemplate(templ tpl.Template) *templateState {
+ return newTemplateState(
+ templ,
+ templateInfo{
+ name: templ.Name(),
+ },
+ )
+}
+
+func newTestTemplateLookup(in *templateState) func(name string) *templateState {
+ m := make(map[string]*templateState)
+ return func(name string) *templateState {
+ if in.Name() == name {
+ return in
+ }
+
+ if ts, found := m[name]; found {
+ return ts
+ }
+
+ if templ, found := findTemplateIn(name, in); found {
+ ts := newTestTemplate(templ)
+ m[name] = ts
+ return ts
+ }
+
+ return nil
+ }
+}
+
+func TestCollectInfo(t *testing.T) {
+ configStr := `{ "version": 42 }`
+
+ tests := []struct {
+ name string
+ tplString string
+ expected tpl.ParseInfo
+ }{
+ {"Basic Inner", `{{ .Inner }}`, tpl.ParseInfo{IsInner: true, Config: tpl.DefaultParseConfig}},
+ {"Basic config map", "{{ $_hugo_config := `" + configStr + "` }}", tpl.ParseInfo{Config: tpl.ParseConfig{Version: 42}}},
+ }
+
+ echo := func(in any) any {
+ return in
+ }
+
+ funcs := template.FuncMap{
+ "highlight": echo,
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ c := qt.New(t)
+
+ templ, err := template.New("foo").Funcs(funcs).Parse(test.tplString)
+ c.Assert(err, qt.IsNil)
+ ts := newTestTemplate(templ)
+ ts.typ = templateShortcode
+ ctx := newTemplateContext(
+ ts,
+ newTestTemplateLookup(ts),
+ )
+ ctx.applyTransformations(templ.Tree.Root)
+ c.Assert(ctx.t.parseInfo, qt.DeepEquals, test.expected)
+ })
+ }
+}
+
+func TestPartialReturn(t *testing.T) {
+ tests := []struct {
+ name string
+ tplString string
+ expected bool
+ }{
+ {"Basic", `
+{{ $a := "Hugo Rocks!" }}
+{{ return $a }}
+`, true},
+ {"Expression", `
+{{ return add 32 }}
+`, true},
+ }
+
+ echo := func(in any) any {
+ return in
+ }
+
+ funcs := template.FuncMap{
+ "return": echo,
+ "add": echo,
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ c := qt.New(t)
+
+ templ, err := template.New("foo").Funcs(funcs).Parse(test.tplString)
+ c.Assert(err, qt.IsNil)
+ ts := newTestTemplate(templ)
+ ctx := newTemplateContext(
+ ts,
+ newTestTemplateLookup(ts),
+ )
+
+ _, err = ctx.applyTransformations(templ.Tree.Root)
+
+ // Just check that it doesn't fail in this test. We have functional tests
+ // in hugoblib.
+ c.Assert(err, qt.IsNil)
+ })
+ }
+}
diff --git a/tpl/tplimpl/template_errors.go b/tpl/tplimpl/template_errors.go
new file mode 100644
index 000000000..ac8a72df5
--- /dev/null
+++ b/tpl/tplimpl/template_errors.go
@@ -0,0 +1,64 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "fmt"
+
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/spf13/afero"
+)
+
+type templateInfo struct {
+ name string
+ template string
+ isText bool // HTML or plain text template.
+
+ // Used to create some error context in error situations
+ fs afero.Fs
+
+ // The filename relative to the fs above.
+ filename string
+
+ // The real filename (if possible). Used for logging.
+ realFilename string
+}
+
+func (t templateInfo) Name() string {
+ return t.name
+}
+
+func (t templateInfo) Filename() string {
+ return t.realFilename
+}
+
+func (t templateInfo) IsZero() bool {
+ return t.name == ""
+}
+
+func (t templateInfo) resolveType() templateType {
+ return resolveTemplateType(t.name)
+}
+
+func (info templateInfo) errWithFileContext(what string, err error) error {
+ err = fmt.Errorf(what+": %w", err)
+ fe := herrors.NewFileErrorFromName(err, info.realFilename)
+ f, err := info.fs.Open(info.filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ return fe.UpdateContent(f, nil)
+
+}
diff --git a/tpl/tplimpl/template_funcs.go b/tpl/tplimpl/template_funcs.go
new file mode 100644
index 000000000..e664bd6c5
--- /dev/null
+++ b/tpl/tplimpl/template_funcs.go
@@ -0,0 +1,204 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Portions Copyright The Go Authors.
+
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl
+
+import (
+ "context"
+ "reflect"
+ "strings"
+
+ "github.com/gohugoio/hugo/common/hreflect"
+ "github.com/gohugoio/hugo/common/maps"
+ "github.com/gohugoio/hugo/tpl"
+
+ template "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
+ texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
+
+ "github.com/gohugoio/hugo/deps"
+
+ "github.com/gohugoio/hugo/tpl/internal"
+
+ // Init the namespaces
+ _ "github.com/gohugoio/hugo/tpl/cast"
+ _ "github.com/gohugoio/hugo/tpl/collections"
+ _ "github.com/gohugoio/hugo/tpl/compare"
+ _ "github.com/gohugoio/hugo/tpl/crypto"
+ _ "github.com/gohugoio/hugo/tpl/data"
+ _ "github.com/gohugoio/hugo/tpl/debug"
+ _ "github.com/gohugoio/hugo/tpl/diagrams"
+ _ "github.com/gohugoio/hugo/tpl/encoding"
+ _ "github.com/gohugoio/hugo/tpl/fmt"
+ _ "github.com/gohugoio/hugo/tpl/hugo"
+ _ "github.com/gohugoio/hugo/tpl/images"
+ _ "github.com/gohugoio/hugo/tpl/inflect"
+ _ "github.com/gohugoio/hugo/tpl/js"
+ _ "github.com/gohugoio/hugo/tpl/lang"
+ _ "github.com/gohugoio/hugo/tpl/math"
+ _ "github.com/gohugoio/hugo/tpl/openapi/openapi3"
+ _ "github.com/gohugoio/hugo/tpl/os"
+ _ "github.com/gohugoio/hugo/tpl/partials"
+ _ "github.com/gohugoio/hugo/tpl/path"
+ _ "github.com/gohugoio/hugo/tpl/reflect"
+ _ "github.com/gohugoio/hugo/tpl/resources"
+ _ "github.com/gohugoio/hugo/tpl/safe"
+ _ "github.com/gohugoio/hugo/tpl/site"
+ _ "github.com/gohugoio/hugo/tpl/strings"
+ _ "github.com/gohugoio/hugo/tpl/templates"
+ _ "github.com/gohugoio/hugo/tpl/time"
+ _ "github.com/gohugoio/hugo/tpl/transform"
+ _ "github.com/gohugoio/hugo/tpl/urls"
+)
+
+var (
+ _ texttemplate.ExecHelper = (*templateExecHelper)(nil)
+ zero reflect.Value
+ contextInterface = reflect.TypeOf((*context.Context)(nil)).Elem()
+)
+
+type templateExecHelper struct {
+ running bool // whether we're in server mode.
+ funcs map[string]reflect.Value
+}
+
+func (t *templateExecHelper) GetFunc(ctx context.Context, tmpl texttemplate.Preparer, name string) (fn reflect.Value, firstArg reflect.Value, found bool) {
+ if fn, found := t.funcs[name]; found {
+ if fn.Type().NumIn() > 0 {
+ first := fn.Type().In(0)
+ if first.Implements(contextInterface) {
+ // TODO(bep) check if we can void this conversion every time -- and if that matters.
+ // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down
+ // contextual information, e.g. the top level data context (e.g. Page).
+ return fn, reflect.ValueOf(ctx), true
+ }
+ }
+
+ return fn, zero, true
+ }
+ return zero, zero, false
+}
+
+func (t *templateExecHelper) Init(ctx context.Context, tmpl texttemplate.Preparer) {
+}
+
+func (t *templateExecHelper) GetMapValue(ctx context.Context, tmpl texttemplate.Preparer, receiver, key reflect.Value) (reflect.Value, bool) {
+ if params, ok := receiver.Interface().(maps.Params); ok {
+ // Case insensitive.
+ keystr := strings.ToLower(key.String())
+ v, found := params[keystr]
+ if !found {
+ return zero, false
+ }
+ return reflect.ValueOf(v), true
+ }
+
+ v := receiver.MapIndex(key)
+
+ return v, v.IsValid()
+}
+
+func (t *templateExecHelper) GetMethod(ctx context.Context, tmpl texttemplate.Preparer, receiver reflect.Value, name string) (method reflect.Value, firstArg reflect.Value) {
+ if t.running {
+ switch name {
+ case "GetPage", "Render":
+ if info, ok := tmpl.(tpl.Info); ok {
+ if m := receiver.MethodByName(name + "WithTemplateInfo"); m.IsValid() {
+ return m, reflect.ValueOf(info)
+ }
+ }
+ }
+ }
+
+ fn := hreflect.GetMethodByName(receiver, name)
+ if !fn.IsValid() {
+ return zero, zero
+ }
+
+ if fn.Type().NumIn() > 0 {
+ first := fn.Type().In(0)
+ if first.Implements(contextInterface) {
+ // The first argument may be context.Context. This is never provided by the end user, but it's used to pass down
+ // contextual information, e.g. the top level data context (e.g. Page).
+ return fn, reflect.ValueOf(ctx)
+ }
+ }
+
+ return fn, zero
+}
+
+func newTemplateExecuter(d *deps.Deps) (texttemplate.Executer, map[string]reflect.Value) {
+ funcs := createFuncMap(d)
+ funcsv := make(map[string]reflect.Value)
+
+ for k, v := range funcs {
+ vv := reflect.ValueOf(v)
+ funcsv[k] = vv
+ }
+
+ // Duplicate Go's internal funcs here for faster lookups.
+ for k, v := range template.GoFuncs {
+ if _, exists := funcsv[k]; !exists {
+ vv, ok := v.(reflect.Value)
+ if !ok {
+ vv = reflect.ValueOf(v)
+ }
+ funcsv[k] = vv
+ }
+ }
+
+ for k, v := range texttemplate.GoFuncs {
+ if _, exists := funcsv[k]; !exists {
+ funcsv[k] = v
+ }
+ }
+
+ exeHelper := &templateExecHelper{
+ running: d.Running,
+ funcs: funcsv,
+ }
+
+ return texttemplate.NewExecuter(
+ exeHelper,
+ ), funcsv
+}
+
+func createFuncMap(d *deps.Deps) map[string]any {
+ funcMap := template.FuncMap{}
+
+ // Merge the namespace funcs
+ for _, nsf := range internal.TemplateFuncsNamespaceRegistry {
+ ns := nsf(d)
+ if _, exists := funcMap[ns.Name]; exists {
+ panic(ns.Name + " is a duplicate template func")
+ }
+ funcMap[ns.Name] = ns.Context
+ for _, mm := range ns.MethodMappings {
+ for _, alias := range mm.Aliases {
+ if _, exists := funcMap[alias]; exists {
+ panic(alias + " is a duplicate template func")
+ }
+ funcMap[alias] = mm.Method
+ }
+ }
+ }
+
+ if d.OverloadedTemplateFuncs != nil {
+ for k, v := range d.OverloadedTemplateFuncs {
+ funcMap[k] = v
+ }
+ }
+
+ return funcMap
+}
diff --git a/tpl/tplimpl/template_funcs_test.go b/tpl/tplimpl/template_funcs_test.go
new file mode 100644
index 000000000..cb1aa6feb
--- /dev/null
+++ b/tpl/tplimpl/template_funcs_test.go
@@ -0,0 +1,84 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package tplimpl_test
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+func TestTemplateFuncsExamples(t *testing.T) {
+ t.Parallel()
+
+ files := `
+-- config.toml --
+disableKinds=["home", "section", "taxonomy", "term", "sitemap", "robotsTXT"]
+ignoreErrors = ["my-err-id"]
+[outputs]
+home=["HTML"]
+-- layouts/partials/header.html --
+<title>Hugo Rocks!</title>
+-- files/README.txt --
+Hugo Rocks!
+-- content/blog/hugo-rocks.md --
+---
+title: "**BatMan**"
+---
+`
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ ).Build()
+
+ d := b.H.Sites[0].Deps
+
+ var (
+ templates []string
+ expected []string
+ )
+
+ for _, nsf := range internal.TemplateFuncsNamespaceRegistry {
+ ns := nsf(d)
+ for _, mm := range ns.MethodMappings {
+ for _, example := range mm.Examples {
+ if strings.Contains(example[0], "errorf") {
+ // This will fail the build, so skip for now.
+ continue
+ }
+ templates = append(templates, example[0])
+ expected = append(expected, example[1])
+ }
+ }
+ }
+
+ files += fmt.Sprintf("-- layouts/_default/single.html --\n%s\n", strings.Join(templates, "\n"))
+ b = hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{
+ T: t,
+ TxtarString: files,
+ NeedsOsFS: true,
+ },
+ ).Build()
+
+ b.AssertFileContent("public/blog/hugo-rocks/index.html", expected...)
+}
diff --git a/tpl/tplimpl/template_test.go b/tpl/tplimpl/template_test.go
new file mode 100644
index 000000000..5e372d986
--- /dev/null
+++ b/tpl/tplimpl/template_test.go
@@ -0,0 +1,40 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package tplimpl
+
+import (
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestNeedsBaseTemplate(t *testing.T) {
+ c := qt.New(t)
+
+ c.Assert(needsBaseTemplate(`{{ define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(`{{define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(`{{- define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(`{{-define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(`
+
+ {{-define "main" }}
+
+ `), qt.Equals, true)
+ c.Assert(needsBaseTemplate(` {{ define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(`
+ {{ define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(` A {{ define "main" }}`), qt.Equals, false)
+ c.Assert(needsBaseTemplate(` {{ printf "foo" }}`), qt.Equals, false)
+ c.Assert(needsBaseTemplate(`{{/* comment */}} {{ define "main" }}`), qt.Equals, true)
+ c.Assert(needsBaseTemplate(` {{/* comment */}} A {{ define "main" }}`), qt.Equals, false)
+}
diff --git a/tpl/transform/init.go b/tpl/transform/init.go
new file mode 100644
index 000000000..5c6d7da5c
--- /dev/null
+++ b/tpl/transform/init.go
@@ -0,0 +1,117 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "transform"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.Emojify,
+ []string{"emojify"},
+ [][2]string{
+ {`{{ "I :heart: Hugo" | emojify }}`, `I ❤️ Hugo`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Highlight,
+ []string{"highlight"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.HTMLEscape,
+ []string{"htmlEscape"},
+ [][2]string{
+ {
+ `{{ htmlEscape "Cathal Garvey & The Sunshine Band <cathal@foo.bar>" | safeHTML}}`,
+ `Cathal Garvey &amp; The Sunshine Band &lt;cathal@foo.bar&gt;`,
+ },
+ {
+ `{{ htmlEscape "Cathal Garvey & The Sunshine Band <cathal@foo.bar>"}}`,
+ `Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;cathal@foo.bar&amp;gt;`,
+ },
+ {
+ `{{ htmlEscape "Cathal Garvey & The Sunshine Band <cathal@foo.bar>" | htmlUnescape | safeHTML }}`,
+ `Cathal Garvey & The Sunshine Band <cathal@foo.bar>`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.HTMLUnescape,
+ []string{"htmlUnescape"},
+ [][2]string{
+ {
+ `{{ htmlUnescape "Cathal Garvey &amp; The Sunshine Band &lt;cathal@foo.bar&gt;" | safeHTML}}`,
+ `Cathal Garvey & The Sunshine Band <cathal@foo.bar>`,
+ },
+ {
+ `{{"Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;cathal@foo.bar&amp;gt;" | htmlUnescape | htmlUnescape | safeHTML}}`,
+ `Cathal Garvey & The Sunshine Band <cathal@foo.bar>`,
+ },
+ {
+ `{{"Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;cathal@foo.bar&amp;gt;" | htmlUnescape | htmlUnescape }}`,
+ `Cathal Garvey &amp; The Sunshine Band &lt;cathal@foo.bar&gt;`,
+ },
+ {
+ `{{ htmlUnescape "Cathal Garvey &amp; The Sunshine Band &lt;cathal@foo.bar&gt;" | htmlEscape | safeHTML }}`,
+ `Cathal Garvey &amp; The Sunshine Band &lt;cathal@foo.bar&gt;`,
+ },
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Markdownify,
+ []string{"markdownify"},
+ [][2]string{
+ {`{{ .Title | markdownify}}`, `<strong>BatMan</strong>`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Plainify,
+ []string{"plainify"},
+ [][2]string{
+ {`{{ plainify "Hello <strong>world</strong>, gophers!" }}`, `Hello world, gophers!`},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Remarshal,
+ nil,
+ [][2]string{
+ {`{{ "title = \"Hello World\"" | transform.Remarshal "json" | safeHTML }}`, "{\n \"title\": \"Hello World\"\n}\n"},
+ },
+ )
+
+ ns.AddMethodMapping(ctx.Unmarshal,
+ []string{"unmarshal"},
+ [][2]string{
+ {`{{ "hello = \"Hello World\"" | transform.Unmarshal }}`, "map[hello:Hello World]"},
+ {`{{ "hello = \"Hello World\"" | resources.FromString "data/greetings.toml" | transform.Unmarshal }}`, "map[hello:Hello World]"},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/transform/remarshal.go b/tpl/transform/remarshal.go
new file mode 100644
index 000000000..0ad54bb96
--- /dev/null
+++ b/tpl/transform/remarshal.go
@@ -0,0 +1,88 @@
+package transform
+
+import (
+ "bytes"
+ "strings"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/parser"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+ "github.com/spf13/cast"
+)
+
+// Remarshal is used in the Hugo documentation to convert configuration
+// examples from YAML to JSON, TOML (and possibly the other way around).
+// The is primarily a helper for the Hugo docs site.
+// It is not a general purpose YAML to TOML converter etc., and may
+// change without notice if it serves a purpose in the docs.
+// Format is one of json, yaml or toml.
+func (ns *Namespace) Remarshal(format string, data any) (string, error) {
+ var meta map[string]any
+
+ format = strings.TrimSpace(strings.ToLower(format))
+
+ mark, err := toFormatMark(format)
+ if err != nil {
+ return "", err
+ }
+
+ if m, ok := data.(map[string]any); ok {
+ meta = m
+ } else {
+ from, err := cast.ToStringE(data)
+ if err != nil {
+ return "", err
+ }
+
+ from = strings.TrimSpace(from)
+ if from == "" {
+ return "", nil
+ }
+
+ fromFormat := metadecoders.Default.FormatFromContentString(from)
+ if fromFormat == "" {
+ return "", errors.New("failed to detect format from content")
+ }
+
+ meta, err = metadecoders.Default.UnmarshalToMap([]byte(from), fromFormat)
+ if err != nil {
+ return "", err
+ }
+ }
+
+ // Make it so 1.0 float64 prints as 1 etc.
+ applyMarshalTypes(meta)
+
+ var result bytes.Buffer
+ if err := parser.InterfaceToConfig(meta, mark, &result); err != nil {
+ return "", err
+ }
+
+ return result.String(), nil
+}
+
+// The unmarshal/marshal dance is extremely type lossy, and we need
+// to make sure that integer types prints as "43" and not "43.0" in
+// all formats, hence this hack.
+func applyMarshalTypes(m map[string]any) {
+ for k, v := range m {
+ switch t := v.(type) {
+ case map[string]any:
+ applyMarshalTypes(t)
+ case float64:
+ i := int64(t)
+ if t == float64(i) {
+ m[k] = i
+ }
+ }
+ }
+}
+
+func toFormatMark(format string) (metadecoders.Format, error) {
+ if f := metadecoders.FormatFromString(format); f != "" {
+ return f, nil
+ }
+
+ return "", errors.New("failed to detect target data serialization format")
+}
diff --git a/tpl/transform/remarshal_test.go b/tpl/transform/remarshal_test.go
new file mode 100644
index 000000000..5262db591
--- /dev/null
+++ b/tpl/transform/remarshal_test.go
@@ -0,0 +1,203 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform_test
+
+import (
+ "testing"
+
+ "github.com/gohugoio/hugo/htesting"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl/transform"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestRemarshal(t *testing.T) {
+ t.Parallel()
+
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+ c := qt.New(t)
+
+ c.Run("Roundtrip variants", func(c *qt.C) {
+ tomlExample := `title = 'Test Metadata'
+
+[[resources]]
+ src = '**image-4.png'
+ title = 'The Fourth Image!'
+ [resources.params]
+ byline = 'picasso'
+
+[[resources]]
+ name = 'my-cool-image-:counter'
+ src = '**.png'
+ title = 'TOML: The Image #:counter'
+ [resources.params]
+ byline = 'bep'
+`
+
+ yamlExample := `resources:
+- params:
+ byline: picasso
+ src: '**image-4.png'
+ title: The Fourth Image!
+- name: my-cool-image-:counter
+ params:
+ byline: bep
+ src: '**.png'
+ title: 'TOML: The Image #:counter'
+title: Test Metadata
+`
+
+ jsonExample := `{
+ "resources": [
+ {
+ "params": {
+ "byline": "picasso"
+ },
+ "src": "**image-4.png",
+ "title": "The Fourth Image!"
+ },
+ {
+ "name": "my-cool-image-:counter",
+ "params": {
+ "byline": "bep"
+ },
+ "src": "**.png",
+ "title": "TOML: The Image #:counter"
+ }
+ ],
+ "title": "Test Metadata"
+}
+`
+ xmlExample := `<root>
+ <resources>
+ <params>
+ <byline>picasso</byline>
+ </params>
+ <src>**image-4.png</src>
+ <title>The Fourth Image!</title>
+ </resources>
+ <resources>
+ <name>my-cool-image-:counter</name>
+ <params>
+ <byline>bep</byline>
+ </params>
+ <src>**.png</src>
+ <title>TOML: The Image #:counter</title>
+ </resources>
+ <title>Test Metadata</title>
+ </root>
+ `
+
+ variants := []struct {
+ format string
+ data string
+ }{
+ {"yaml", yamlExample},
+ {"json", jsonExample},
+ {"toml", tomlExample},
+ {"TOML", tomlExample},
+ {"Toml", tomlExample},
+ {" TOML ", tomlExample},
+ {"XML", xmlExample},
+ }
+
+ for _, v1 := range variants {
+ for _, v2 := range variants {
+ // Both from and to may be the same here, but that is fine.
+ fromTo := qt.Commentf("%s => %s", v2.format, v1.format)
+
+ converted, err := ns.Remarshal(v1.format, v2.data)
+ c.Assert(err, qt.IsNil, fromTo)
+ diff := htesting.DiffStrings(v1.data, converted)
+ if len(diff) > 0 {
+ t.Errorf("[%s] Expected \n%v\ngot\n%v\ndiff:\n%v", fromTo, v1.data, converted, diff)
+ }
+
+ }
+ }
+ })
+
+ c.Run("Comments", func(c *qt.C) {
+ input := `
+Hugo = "Rules"
+
+# It really does!
+
+[m]
+# A comment
+a = "b"
+
+`
+
+ expected := `Hugo = 'Rules'
+[m]
+a = 'b'
+`
+
+ for _, format := range []string{"json", "yaml", "toml"} {
+ fromTo := qt.Commentf("%s => %s", "toml", format)
+
+ converted := input
+ var err error
+ // Do a round-trip conversion
+ for _, toFormat := range []string{format, "toml"} {
+ converted, err = ns.Remarshal(toFormat, converted)
+ c.Assert(err, qt.IsNil, fromTo)
+ }
+
+ diff := htesting.DiffStrings(expected, converted)
+ if len(diff) > 0 {
+ t.Fatalf("[%s] Expected \n%v\ngot\n>>%v\ndiff:\n%v\n", fromTo, expected, converted, diff)
+ }
+ }
+ })
+
+ // Issue 8850
+ c.Run("TOML Indent", func(c *qt.C) {
+ input := `
+
+[params]
+[params.variables]
+a = "b"
+
+`
+
+ converted, err := ns.Remarshal("toml", input)
+ c.Assert(err, qt.IsNil)
+ c.Assert(converted, qt.Equals, "[params]\n [params.variables]\n a = 'b'\n\n\n")
+ })
+
+ c.Run("Map input", func(c *qt.C) {
+ input := map[string]any{
+ "hello": "world",
+ }
+
+ output, err := ns.Remarshal("toml", input)
+ c.Assert(err, qt.IsNil)
+ c.Assert(output, qt.Equals, "hello = 'world'\n")
+ })
+
+ c.Run("Error", func(c *qt.C) {
+ _, err := ns.Remarshal("asdf", "asdf")
+ c.Assert(err, qt.Not(qt.IsNil))
+
+ _, err = ns.Remarshal("json", "asdf")
+ c.Assert(err, qt.Not(qt.IsNil))
+ })
+}
diff --git a/tpl/transform/transform.go b/tpl/transform/transform.go
new file mode 100644
index 000000000..36508c428
--- /dev/null
+++ b/tpl/transform/transform.go
@@ -0,0 +1,151 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package transform provides template functions for transforming content.
+package transform
+
+import (
+ "html"
+ "html/template"
+
+ "github.com/alecthomas/chroma/v2/lexers"
+ "github.com/gohugoio/hugo/cache/namedmemcache"
+ "github.com/gohugoio/hugo/markup/converter/hooks"
+ "github.com/gohugoio/hugo/markup/highlight"
+ "github.com/gohugoio/hugo/tpl"
+
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the transform-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ cache := namedmemcache.New()
+ deps.BuildStartListeners.Add(
+ func() {
+ cache.Clear()
+ })
+
+ return &Namespace{
+ cache: cache,
+ deps: deps,
+ }
+}
+
+// Namespace provides template functions for the "transform" namespace.
+type Namespace struct {
+ cache *namedmemcache.Cache
+ deps *deps.Deps
+}
+
+// Emojify returns a copy of s with all emoji codes replaced with actual emojis.
+//
+// See http://www.emoji-cheat-sheet.com/
+func (ns *Namespace) Emojify(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return template.HTML(helpers.Emojify([]byte(ss))), nil
+}
+
+// Highlight returns a copy of s as an HTML string with syntax
+// highlighting applied.
+func (ns *Namespace) Highlight(s any, lang string, opts ...any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ var optsv any
+ if len(opts) > 0 {
+ optsv = opts[0]
+ }
+
+ hl := ns.deps.ContentSpec.Converters.GetHighlighter()
+ highlighted, _ := hl.Highlight(ss, lang, optsv)
+ return template.HTML(highlighted), nil
+}
+
+// HighlightCodeBlock highlights a code block on the form received in the codeblock render hooks.
+func (ns *Namespace) HighlightCodeBlock(ctx hooks.CodeblockContext, opts ...any) (highlight.HightlightResult, error) {
+ var optsv any
+ if len(opts) > 0 {
+ optsv = opts[0]
+ }
+
+ hl := ns.deps.ContentSpec.Converters.GetHighlighter()
+
+ return hl.HighlightCodeBlock(ctx, optsv)
+}
+
+// CanHighlight returns whether the given code language is supported by the Chroma highlighter.
+func (ns *Namespace) CanHighlight(language string) bool {
+ return lexers.Get(language) != nil
+}
+
+// HTMLEscape returns a copy of s with reserved HTML characters escaped.
+func (ns *Namespace) HTMLEscape(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return html.EscapeString(ss), nil
+}
+
+// HTMLUnescape returns a copy of s with HTML escape requences converted to plain
+// text.
+func (ns *Namespace) HTMLUnescape(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return html.UnescapeString(ss), nil
+}
+
+// Markdownify renders s from Markdown to HTML.
+func (ns *Namespace) Markdownify(s any) (template.HTML, error) {
+
+ home := ns.deps.Site.Home()
+ if home == nil {
+ panic("home must not be nil")
+ }
+ ss, err := home.RenderString(s)
+ if err != nil {
+ return "", err
+ }
+
+ // Strip if this is a short inline type of text.
+ bb := ns.deps.ContentSpec.TrimShortHTML([]byte(ss))
+
+ return helpers.BytesToHTML(bb), nil
+}
+
+// Plainify returns a copy of s with all HTML tags removed.
+func (ns *Namespace) Plainify(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return tpl.StripHTML(ss), nil
+}
+
+// For internal use.
+func (ns *Namespace) Reset() {
+ ns.cache.Clear()
+}
diff --git a/tpl/transform/transform_test.go b/tpl/transform/transform_test.go
new file mode 100644
index 000000000..edef4e1bd
--- /dev/null
+++ b/tpl/transform/transform_test.go
@@ -0,0 +1,273 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform_test
+
+import (
+ "html/template"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/common/loggers"
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl/transform"
+ "github.com/spf13/afero"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/hugofs"
+ "github.com/gohugoio/hugo/langs"
+)
+
+type tstNoStringer struct{}
+
+func TestEmojify(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {":notamoji:", template.HTML(":notamoji:")},
+ {"I :heart: Hugo", template.HTML("I ❤️ Hugo")},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Emojify(test.s)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHighlight(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ lang string
+ opts any
+ expect any
+ }{
+ {"func boo() {}", "go", "", "boo"},
+ {"func boo() {}", "go", nil, "boo"},
+ // Issue #4179
+ {`<Foo attr=" &lt; "></Foo>`, "xml", "", `&amp;lt;`},
+ {tstNoStringer{}, "go", "", false},
+ // Issue #9591
+ {strings.Repeat("AAA \n", 10), "bash", template.HTML("linenos=true,noClasses=false"), "line"},
+ } {
+
+ result, err := ns.Highlight(test.s, test.lang, test.opts)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(string(result), qt.Contains, test.expect.(string))
+ }
+}
+
+func TestCanHighlight(t *testing.T) {
+ t.Parallel()
+
+ c := qt.New(t)
+ ns := &transform.Namespace{}
+
+ c.Assert(ns.CanHighlight("go"), qt.Equals, true)
+ c.Assert(ns.CanHighlight("foo"), qt.Equals, false)
+}
+
+func TestHTMLEscape(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {`"Foo & Bar's Diner" <y@z>`, `&#34;Foo &amp; Bar&#39;s Diner&#34; &lt;y@z&gt;`},
+ {"Hugo & Caddy > Wordpress & Apache", "Hugo &amp; Caddy &gt; Wordpress &amp; Apache"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.HTMLEscape(test.s)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestHTMLUnescape(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {`&quot;Foo &amp; Bar&#39;s Diner&quot; &lt;y@z&gt;`, `"Foo & Bar's Diner" <y@z>`},
+ {"Hugo &amp; Caddy &gt; Wordpress &amp; Apache", "Hugo & Caddy > Wordpress & Apache"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.HTMLUnescape(test.s)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func TestMarkdownify(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"Hello **World!**", template.HTML("Hello <strong>World!</strong>")},
+ {[]byte("Hello Bytes **World!**"), template.HTML("Hello Bytes <strong>World!</strong>")},
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Markdownify(test.s)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+// Issue #3040
+func TestMarkdownifyBlocksOfText(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ text := `
+#First
+
+This is some *bold* text.
+
+## Second
+
+This is some more text.
+
+And then some.
+`
+
+ result, err := ns.Markdownify(text)
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, template.HTML(
+ "<p>#First</p>\n<p>This is some <em>bold</em> text.</p>\n<h2 id=\"second\">Second</h2>\n<p>This is some more text.</p>\n<p>And then some.</p>\n"))
+}
+
+func TestPlainify(t *testing.T) {
+ t.Parallel()
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ for _, test := range []struct {
+ s any
+ expect any
+ }{
+ {"<em>Note:</em> blah <b>blah</b>", "Note: blah blah"},
+ {"<div data-action='click->my-controller#doThing'>qwe</div>", "qwe"},
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Plainify(test.s)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+}
+
+func newDeps(cfg config.Provider) *deps.Deps {
+ cfg.Set("contentDir", "content")
+ cfg.Set("i18nDir", "i18n")
+
+ l := langs.NewLanguage("en", cfg)
+
+ cs, err := helpers.NewContentSpec(l, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
+ if err != nil {
+ panic(err)
+ }
+
+ return &deps.Deps{
+ Cfg: cfg,
+ Fs: hugofs.NewMem(l),
+ ContentSpec: cs,
+ }
+}
diff --git a/tpl/transform/unmarshal.go b/tpl/transform/unmarshal.go
new file mode 100644
index 000000000..340235fa4
--- /dev/null
+++ b/tpl/transform/unmarshal.go
@@ -0,0 +1,170 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform
+
+import (
+ "fmt"
+ "io/ioutil"
+ "strings"
+
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/common/types"
+
+ "github.com/mitchellh/mapstructure"
+
+ "errors"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/parser/metadecoders"
+
+ "github.com/spf13/cast"
+)
+
+// Unmarshal unmarshals the data given, which can be either a string, json.RawMessage
+// or a Resource. Supported formats are JSON, TOML, YAML, and CSV.
+// You can optionally provide an options map as the first argument.
+func (ns *Namespace) Unmarshal(args ...any) (any, error) {
+ if len(args) < 1 || len(args) > 2 {
+ return nil, errors.New("unmarshal takes 1 or 2 arguments")
+ }
+
+ var data any
+ decoder := metadecoders.Default
+
+ if len(args) == 1 {
+ data = args[0]
+ } else {
+ m, ok := args[0].(map[string]any)
+ if !ok {
+ return nil, errors.New("first argument must be a map")
+ }
+
+ var err error
+
+ data = args[1]
+ decoder, err = decodeDecoder(m)
+ if err != nil {
+ return nil, fmt.Errorf("failed to decode options: %w", err)
+ }
+ }
+
+ if r, ok := data.(resource.UnmarshableResource); ok {
+ key := r.Key()
+
+ if key == "" {
+ return nil, errors.New("no Key set in Resource")
+ }
+
+ if decoder != metadecoders.Default {
+ key += decoder.OptionsKey()
+ }
+
+ return ns.cache.GetOrCreate(key, func() (any, error) {
+ f := metadecoders.FormatFromMediaType(r.MediaType())
+ if f == "" {
+ return nil, fmt.Errorf("MIME %q not supported", r.MediaType())
+ }
+
+ reader, err := r.ReadSeekCloser()
+ if err != nil {
+ return nil, err
+ }
+ defer reader.Close()
+
+ b, err := ioutil.ReadAll(reader)
+ if err != nil {
+ return nil, err
+ }
+
+ return decoder.Unmarshal(b, f)
+ })
+ }
+
+ dataStr, err := types.ToStringE(data)
+ if err != nil {
+ return nil, fmt.Errorf("type %T not supported", data)
+ }
+
+ if dataStr == "" {
+ return nil, errors.New("no data to transform")
+ }
+
+ key := helpers.MD5String(dataStr)
+
+ return ns.cache.GetOrCreate(key, func() (any, error) {
+ f := decoder.FormatFromContentString(dataStr)
+ if f == "" {
+ return nil, errors.New("unknown format")
+ }
+
+ return decoder.Unmarshal([]byte(dataStr), f)
+ })
+}
+
+func decodeDecoder(m map[string]any) (metadecoders.Decoder, error) {
+ opts := metadecoders.Default
+
+ if m == nil {
+ return opts, nil
+ }
+
+ // mapstructure does not support string to rune conversion, so do that manually.
+ // See https://github.com/mitchellh/mapstructure/issues/151
+ for k, v := range m {
+ if strings.EqualFold(k, "Delimiter") {
+ r, err := stringToRune(v)
+ if err != nil {
+ return opts, err
+ }
+ opts.Delimiter = r
+ delete(m, k)
+
+ } else if strings.EqualFold(k, "Comment") {
+ r, err := stringToRune(v)
+ if err != nil {
+ return opts, err
+ }
+ opts.Comment = r
+ delete(m, k)
+ }
+ }
+
+ err := mapstructure.WeakDecode(m, &opts)
+
+ return opts, err
+}
+
+func stringToRune(v any) (rune, error) {
+ s, err := cast.ToStringE(v)
+ if err != nil {
+ return 0, err
+ }
+
+ if len(s) == 0 {
+ return 0, nil
+ }
+
+ var r rune
+
+ for i, rr := range s {
+ if i == 0 {
+ r = rr
+ } else {
+ return 0, fmt.Errorf("invalid character: %q", v)
+ }
+ }
+
+ return r, nil
+}
diff --git a/tpl/transform/unmarshal_test.go b/tpl/transform/unmarshal_test.go
new file mode 100644
index 000000000..e63f96de2
--- /dev/null
+++ b/tpl/transform/unmarshal_test.go
@@ -0,0 +1,233 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform_test
+
+import (
+ "fmt"
+ "math/rand"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/hugolib"
+ "github.com/gohugoio/hugo/tpl/transform"
+
+ "github.com/gohugoio/hugo/common/hugio"
+ "github.com/gohugoio/hugo/resources/resource"
+
+ "github.com/gohugoio/hugo/media"
+
+ qt "github.com/frankban/quicktest"
+)
+
+const (
+ testJSON = `
+
+{
+ "ROOT_KEY": {
+ "title": "example glossary",
+ "GlossDiv": {
+ "title": "S",
+ "GlossList": {
+ "GlossEntry": {
+ "ID": "SGML",
+ "SortAs": "SGML",
+ "GlossTerm": "Standard Generalized Markup Language",
+ "Acronym": "SGML",
+ "Abbrev": "ISO 8879:1986",
+ "GlossDef": {
+ "para": "A meta-markup language, used to create markup languages such as DocBook.",
+ "GlossSeeAlso": ["GML", "XML"]
+ },
+ "GlossSee": "markup"
+ }
+ }
+ }
+ }
+}
+
+ `
+)
+
+var _ resource.ReadSeekCloserResource = (*testContentResource)(nil)
+
+type testContentResource struct {
+ content string
+ mime media.Type
+
+ key string
+}
+
+func (t testContentResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
+ return hugio.NewReadSeekerNoOpCloserFromString(t.content), nil
+}
+
+func (t testContentResource) MediaType() media.Type {
+ return t.mime
+}
+
+func (t testContentResource) Key() string {
+ return t.key
+}
+
+func TestUnmarshal(t *testing.T) {
+ b := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: t},
+ ).Build()
+
+ ns := transform.New(b.H.Deps)
+
+ assertSlogan := func(m map[string]any) {
+ b.Assert(m["slogan"], qt.Equals, "Hugo Rocks!")
+ }
+
+ for _, test := range []struct {
+ data any
+ options any
+ expect any
+ }{
+ {`{ "slogan": "Hugo Rocks!" }`, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {`slogan: "Hugo Rocks!"`, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {`slogan = "Hugo Rocks!"`, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {testContentResource{key: "r1", content: `slogan: "Hugo Rocks!"`, mime: media.YAMLType}, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {testContentResource{key: "r1", content: `{ "slogan": "Hugo Rocks!" }`, mime: media.JSONType}, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {testContentResource{key: "r1", content: `slogan = "Hugo Rocks!"`, mime: media.TOMLType}, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {testContentResource{key: "r1", content: `<root><slogan>Hugo Rocks!</slogan></root>"`, mime: media.XMLType}, nil, func(m map[string]any) {
+ assertSlogan(m)
+ }},
+ {testContentResource{key: "r1", content: `1997,Ford,E350,"ac, abs, moon",3000.00
+1999,Chevy,"Venture ""Extended Edition""","",4900.00`, mime: media.CSVType}, nil, func(r [][]string) {
+ b.Assert(len(r), qt.Equals, 2)
+ first := r[0]
+ b.Assert(len(first), qt.Equals, 5)
+ b.Assert(first[1], qt.Equals, "Ford")
+ }},
+ {testContentResource{key: "r1", content: `a;b;c`, mime: media.CSVType}, map[string]any{"delimiter": ";"}, func(r [][]string) {
+ b.Assert([][]string{{"a", "b", "c"}}, qt.DeepEquals, r)
+ }},
+ {"a,b,c", nil, func(r [][]string) {
+ b.Assert([][]string{{"a", "b", "c"}}, qt.DeepEquals, r)
+ }},
+ {"a;b;c", map[string]any{"delimiter": ";"}, func(r [][]string) {
+ b.Assert([][]string{{"a", "b", "c"}}, qt.DeepEquals, r)
+ }},
+ {testContentResource{key: "r1", content: `
+% This is a comment
+a;b;c`, mime: media.CSVType}, map[string]any{"DElimiter": ";", "Comment": "%"}, func(r [][]string) {
+ b.Assert([][]string{{"a", "b", "c"}}, qt.DeepEquals, r)
+ }},
+ // errors
+ {"thisisnotavaliddataformat", nil, false},
+ {testContentResource{key: "r1", content: `invalid&toml"`, mime: media.TOMLType}, nil, false},
+ {testContentResource{key: "r1", content: `unsupported: MIME"`, mime: media.CalendarType}, nil, false},
+ {"thisisnotavaliddataformat", nil, false},
+ {`{ notjson }`, nil, false},
+ {tstNoStringer{}, nil, false},
+ } {
+
+ ns.Reset()
+
+ var args []any
+
+ if test.options != nil {
+ args = []any{test.options, test.data}
+ } else {
+ args = []any{test.data}
+ }
+
+ result, err := ns.Unmarshal(args...)
+
+ if bb, ok := test.expect.(bool); ok && !bb {
+ b.Assert(err, qt.Not(qt.IsNil))
+ } else if fn, ok := test.expect.(func(m map[string]any)); ok {
+ b.Assert(err, qt.IsNil)
+ m, ok := result.(map[string]any)
+ b.Assert(ok, qt.Equals, true)
+ fn(m)
+ } else if fn, ok := test.expect.(func(r [][]string)); ok {
+ b.Assert(err, qt.IsNil)
+ r, ok := result.([][]string)
+ b.Assert(ok, qt.Equals, true)
+ fn(r)
+ } else {
+ b.Assert(err, qt.IsNil)
+ b.Assert(result, qt.Equals, test.expect)
+ }
+
+ }
+}
+
+func BenchmarkUnmarshalString(b *testing.B) {
+ bb := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: b},
+ ).Build()
+
+ ns := transform.New(bb.H.Deps)
+
+ const numJsons = 100
+
+ var jsons [numJsons]string
+ for i := 0; i < numJsons; i++ {
+ jsons[i] = strings.Replace(testJSON, "ROOT_KEY", fmt.Sprintf("root%d", i), 1)
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)])
+ if err != nil {
+ b.Fatal(err)
+ }
+ if result == nil {
+ b.Fatal("no result")
+ }
+ }
+}
+
+func BenchmarkUnmarshalResource(b *testing.B) {
+ bb := hugolib.NewIntegrationTestBuilder(
+ hugolib.IntegrationTestConfig{T: b},
+ ).Build()
+
+ ns := transform.New(bb.H.Deps)
+
+ const numJsons = 100
+
+ var jsons [numJsons]testContentResource
+ for i := 0; i < numJsons; i++ {
+ key := fmt.Sprintf("root%d", i)
+ jsons[i] = testContentResource{key: key, content: strings.Replace(testJSON, "ROOT_KEY", key, 1), mime: media.JSONType}
+ }
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ result, err := ns.Unmarshal(jsons[rand.Intn(numJsons)])
+ if err != nil {
+ b.Fatal(err)
+ }
+ if result == nil {
+ b.Fatal("no result")
+ }
+ }
+}
diff --git a/tpl/urls/init.go b/tpl/urls/init.go
new file mode 100644
index 000000000..3597e87c5
--- /dev/null
+++ b/tpl/urls/init.go
@@ -0,0 +1,73 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urls
+
+import (
+ "github.com/gohugoio/hugo/deps"
+ "github.com/gohugoio/hugo/tpl/internal"
+)
+
+const name = "urls"
+
+func init() {
+ f := func(d *deps.Deps) *internal.TemplateFuncsNamespace {
+ ctx := New(d)
+
+ ns := &internal.TemplateFuncsNamespace{
+ Name: name,
+ Context: func(args ...any) (any, error) { return ctx, nil },
+ }
+
+ ns.AddMethodMapping(ctx.AbsURL,
+ []string{"absURL"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.AbsLangURL,
+ []string{"absLangURL"},
+ [][2]string{},
+ )
+ ns.AddMethodMapping(ctx.Ref,
+ []string{"ref"},
+ [][2]string{},
+ )
+ ns.AddMethodMapping(ctx.RelURL,
+ []string{"relURL"},
+ [][2]string{},
+ )
+ ns.AddMethodMapping(ctx.RelLangURL,
+ []string{"relLangURL"},
+ [][2]string{},
+ )
+ ns.AddMethodMapping(ctx.RelRef,
+ []string{"relref"},
+ [][2]string{},
+ )
+ ns.AddMethodMapping(ctx.URLize,
+ []string{"urlize"},
+ [][2]string{},
+ )
+
+ ns.AddMethodMapping(ctx.Anchorize,
+ []string{"anchorize"},
+ [][2]string{
+ {`{{ "This is a title" | anchorize }}`, `this-is-a-title`},
+ },
+ )
+
+ return ns
+ }
+
+ internal.AddTemplateFuncsNamespace(f)
+}
diff --git a/tpl/urls/urls.go b/tpl/urls/urls.go
new file mode 100644
index 000000000..bfbd7304f
--- /dev/null
+++ b/tpl/urls/urls.go
@@ -0,0 +1,187 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package urls provides template functions to deal with URLs.
+package urls
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "net/url"
+
+ "github.com/gohugoio/hugo/common/urls"
+ "github.com/gohugoio/hugo/deps"
+ "github.com/spf13/cast"
+)
+
+// New returns a new instance of the urls-namespaced template functions.
+func New(deps *deps.Deps) *Namespace {
+ return &Namespace{
+ deps: deps,
+ multihost: deps.Cfg.GetBool("multihost"),
+ }
+}
+
+// Namespace provides template functions for the "urls" namespace.
+type Namespace struct {
+ deps *deps.Deps
+ multihost bool
+}
+
+// AbsURL takes the string s and converts it to an absolute URL.
+func (ns *Namespace) AbsURL(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", nil
+ }
+
+ return template.HTML(ns.deps.PathSpec.AbsURL(ss, false)), nil
+}
+
+// Parse parses rawurl into a URL structure. The rawurl may be relative or
+// absolute.
+func (ns *Namespace) Parse(rawurl any) (*url.URL, error) {
+ s, err := cast.ToStringE(rawurl)
+ if err != nil {
+ return nil, fmt.Errorf("Error in Parse: %w", err)
+ }
+
+ return url.Parse(s)
+}
+
+// RelURL takes the string s and prepends the relative path according to a
+// page's position in the project directory structure.
+func (ns *Namespace) RelURL(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", nil
+ }
+
+ return template.HTML(ns.deps.PathSpec.RelURL(ss, false)), nil
+}
+
+// URLize returns the the strings s formatted as an URL.
+func (ns *Namespace) URLize(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", nil
+ }
+ return ns.deps.PathSpec.URLize(ss), nil
+}
+
+// Anchorize creates sanitized anchor name version of the string s that is compatible
+// with how your configured markdown renderer does it.
+func (ns *Namespace) Anchorize(s any) (string, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", nil
+ }
+ return ns.deps.ContentSpec.SanitizeAnchorName(ss), nil
+}
+
+// Ref returns the absolute URL path to a given content item from Page p.
+func (ns *Namespace) Ref(p any, args any) (template.HTML, error) {
+ pp, ok := p.(urls.RefLinker)
+ if !ok {
+ return "", errors.New("invalid Page received in Ref")
+ }
+ argsm, err := ns.refArgsToMap(args)
+ if err != nil {
+ return "", err
+ }
+ s, err := pp.Ref(argsm)
+ return template.HTML(s), err
+}
+
+// RelRef returns the relative URL path to a given content item from Page p.
+func (ns *Namespace) RelRef(p any, args any) (template.HTML, error) {
+ pp, ok := p.(urls.RefLinker)
+ if !ok {
+ return "", errors.New("invalid Page received in RelRef")
+ }
+ argsm, err := ns.refArgsToMap(args)
+ if err != nil {
+ return "", err
+ }
+
+ s, err := pp.RelRef(argsm)
+ return template.HTML(s), err
+}
+
+func (ns *Namespace) refArgsToMap(args any) (map[string]any, error) {
+ var (
+ s string
+ of string
+ )
+
+ v := args
+ if _, ok := v.([]any); ok {
+ v = cast.ToStringSlice(v)
+ }
+
+ switch v := v.(type) {
+ case map[string]any:
+ return v, nil
+ case map[string]string:
+ m := make(map[string]any)
+ for k, v := range v {
+ m[k] = v
+ }
+ return m, nil
+ case []string:
+ if len(v) == 0 || len(v) > 2 {
+ return nil, fmt.Errorf("invalid number of arguments to ref")
+ }
+ // These where the options before we introduced the map type:
+ s = v[0]
+ if len(v) == 2 {
+ of = v[1]
+ }
+ default:
+ var err error
+ s, err = cast.ToStringE(args)
+ if err != nil {
+ return nil, err
+ }
+
+ }
+
+ return map[string]any{
+ "path": s,
+ "outputFormat": of,
+ }, nil
+}
+
+// RelLangURL takes the string s and prepends the relative path according to a
+// page's position in the project directory structure and the current language.
+func (ns *Namespace) RelLangURL(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return template.HTML(ns.deps.PathSpec.RelURL(ss, !ns.multihost)), nil
+}
+
+// AbsLangURL the string s and converts it to an absolute URL according
+// to a page's position in the project directory structure and the current
+// language.
+func (ns *Namespace) AbsLangURL(s any) (template.HTML, error) {
+ ss, err := cast.ToStringE(s)
+ if err != nil {
+ return "", err
+ }
+
+ return template.HTML(ns.deps.PathSpec.AbsURL(ss, !ns.multihost)), nil
+}
diff --git a/tpl/urls/urls_test.go b/tpl/urls/urls_test.go
new file mode 100644
index 000000000..73b5cd141
--- /dev/null
+++ b/tpl/urls/urls_test.go
@@ -0,0 +1,70 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urls
+
+import (
+ "net/url"
+ "testing"
+
+ "github.com/gohugoio/hugo/config"
+
+ "github.com/gohugoio/hugo/htesting/hqt"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/deps"
+)
+
+var ns = New(&deps.Deps{Cfg: config.New()})
+
+type tstNoStringer struct{}
+
+func TestParse(t *testing.T) {
+ t.Parallel()
+ c := qt.New(t)
+
+ for _, test := range []struct {
+ rawurl any
+ expect any
+ }{
+ {
+ "http://www.google.com",
+ &url.URL{
+ Scheme: "http",
+ Host: "www.google.com",
+ },
+ },
+ {
+ "http://j@ne:password@google.com",
+ &url.URL{
+ Scheme: "http",
+ User: url.UserPassword("j@ne", "password"),
+ Host: "google.com",
+ },
+ },
+ // errors
+ {tstNoStringer{}, false},
+ } {
+
+ result, err := ns.Parse(test.rawurl)
+
+ if b, ok := test.expect.(bool); ok && !b {
+ c.Assert(err, qt.Not(qt.IsNil))
+ continue
+ }
+
+ c.Assert(err, qt.IsNil)
+ c.Assert(result,
+ qt.CmpEquals(hqt.DeepAllowUnexported(&url.URL{}, url.Userinfo{})), test.expect)
+ }
+}
diff --git a/transform/chain.go b/transform/chain.go
new file mode 100644
index 000000000..a5f042d96
--- /dev/null
+++ b/transform/chain.go
@@ -0,0 +1,125 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform
+
+import (
+ "bytes"
+ "io"
+ "io/ioutil"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+ "github.com/gohugoio/hugo/common/herrors"
+ "github.com/gohugoio/hugo/hugofs"
+)
+
+// Transformer is the func that needs to be implemented by a transformation step.
+type Transformer func(ft FromTo) error
+
+// BytesReader wraps the Bytes method, usually implemented by bytes.Buffer, and an
+// io.Reader.
+type BytesReader interface {
+ // The slice given by Bytes is valid for use only until the next buffer modification.
+ // That is, if you want to use this value outside of the current transformer step,
+ // you need to take a copy.
+ Bytes() []byte
+
+ io.Reader
+}
+
+// FromTo is sent to each transformation step in the chain.
+type FromTo interface {
+ From() BytesReader
+ To() io.Writer
+}
+
+// Chain is an ordered processing chain. The next transform operation will
+// receive the output from the previous.
+type Chain []Transformer
+
+// New creates a content transformer chain given the provided transform funcs.
+func New(trs ...Transformer) Chain {
+ return trs
+}
+
+// NewEmpty creates a new slice of transformers with a capacity of 20.
+func NewEmpty() Chain {
+ return make(Chain, 0, 20)
+}
+
+// Implements contentTransformer
+// Content is read from the from-buffer and rewritten to to the to-buffer.
+type fromToBuffer struct {
+ from *bytes.Buffer
+ to *bytes.Buffer
+}
+
+func (ft fromToBuffer) From() BytesReader {
+ return ft.from
+}
+
+func (ft fromToBuffer) To() io.Writer {
+ return ft.to
+}
+
+// Apply passes the given from io.Reader through the transformation chain.
+// The result is written to to.
+func (c *Chain) Apply(to io.Writer, from io.Reader) error {
+ if len(*c) == 0 {
+ _, err := io.Copy(to, from)
+ return err
+ }
+
+ b1 := bp.GetBuffer()
+ defer bp.PutBuffer(b1)
+
+ if _, err := b1.ReadFrom(from); err != nil {
+ return err
+ }
+
+ b2 := bp.GetBuffer()
+ defer bp.PutBuffer(b2)
+
+ fb := &fromToBuffer{from: b1, to: b2}
+
+ for i, tr := range *c {
+ if i > 0 {
+ if fb.from == b1 {
+ fb.from = b2
+ fb.to = b1
+ fb.to.Reset()
+ } else {
+ fb.from = b1
+ fb.to = b2
+ fb.to.Reset()
+ }
+ }
+
+ if err := tr(fb); err != nil {
+ // Write output to a temp file so it can be read by the user for trouble shooting.
+ filename := "output.html"
+ tempfile, ferr := ioutil.TempFile("", "hugo-transform-error")
+ if ferr == nil {
+ filename = tempfile.Name()
+ defer tempfile.Close()
+ _, _ = io.Copy(tempfile, fb.from)
+ return herrors.NewFileErrorFromFile(err, filename, hugofs.Os, nil)
+ }
+ return herrors.NewFileErrorFromName(err, filename).UpdateContent(fb.from, nil)
+
+ }
+ }
+
+ _, err := fb.to.WriteTo(to)
+ return err
+}
diff --git a/transform/chain_test.go b/transform/chain_test.go
new file mode 100644
index 000000000..78b288e97
--- /dev/null
+++ b/transform/chain_test.go
@@ -0,0 +1,70 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transform
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+)
+
+func TestChainZeroTransformers(t *testing.T) {
+ tr := New()
+ in := new(bytes.Buffer)
+ out := new(bytes.Buffer)
+ if err := tr.Apply(in, out); err != nil {
+ t.Errorf("A zero transformer chain returned an error.")
+ }
+}
+
+func TestChainingMultipleTransformers(t *testing.T) {
+ f1 := func(ct FromTo) error {
+ _, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f1"), []byte("f1r"), -1))
+ return err
+ }
+ f2 := func(ct FromTo) error {
+ _, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f2"), []byte("f2r"), -1))
+ return err
+ }
+ f3 := func(ct FromTo) error {
+ _, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f3"), []byte("f3r"), -1))
+ return err
+ }
+
+ f4 := func(ct FromTo) error {
+ _, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f4"), []byte("f4r"), -1))
+ return err
+ }
+
+ tr := New(f1, f2, f3, f4)
+
+ out := new(bytes.Buffer)
+ if err := tr.Apply(out, strings.NewReader("Test: f4 f3 f1 f2 f1 The End.")); err != nil {
+ t.Errorf("Multi transformer chain returned an error: %s", err)
+ }
+
+ expected := "Test: f4r f3r f1r f2r f1r The End."
+
+ if out.String() != expected {
+ t.Errorf("Expected %s got %s", expected, out.String())
+ }
+}
+
+func TestNewEmptyTransforms(t *testing.T) {
+ c := qt.New(t)
+ transforms := NewEmpty()
+ c.Assert(cap(transforms), qt.Equals, 20)
+}
diff --git a/transform/livereloadinject/livereloadinject.go b/transform/livereloadinject/livereloadinject.go
new file mode 100644
index 000000000..32ed55f63
--- /dev/null
+++ b/transform/livereloadinject/livereloadinject.go
@@ -0,0 +1,85 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package livereloadinject
+
+import (
+ "bytes"
+ "fmt"
+ "html"
+ "net/url"
+ "strings"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/transform"
+)
+
+type tag struct {
+ markup []byte
+ appendScript bool
+}
+
+var tags = []tag{
+ {markup: []byte("<head>"), appendScript: true},
+ {markup: []byte("<HEAD>"), appendScript: true},
+ {markup: []byte("</body>")},
+ {markup: []byte("</BODY>")},
+}
+
+// New creates a function that can be used
+// to inject a script tag for the livereload JavaScript in a HTML document.
+func New(baseURL url.URL) transform.Transformer {
+ return func(ft transform.FromTo) error {
+ b := ft.From().Bytes()
+ idx := -1
+ var match tag
+ // We used to insert the livereload script right before the closing body.
+ // This does not work when combined with tools such as Turbolinks.
+ // So we try to inject the script as early as possible.
+ for _, t := range tags {
+ idx = bytes.Index(b, t.markup)
+ if idx != -1 {
+ match = t
+ break
+ }
+ }
+
+ path := strings.TrimSuffix(baseURL.Path, "/")
+
+ src := path + "/livereload.js?mindelay=10&v=2"
+ src += "&port=" + baseURL.Port()
+ src += "&path=" + strings.TrimPrefix(path+"/livereload", "/")
+
+ c := make([]byte, len(b))
+ copy(c, b)
+
+ if idx == -1 {
+ _, err := ft.To().Write(c)
+ return err
+ }
+
+ script := []byte(fmt.Sprintf(`<script src="%s" data-no-instant defer></script>`, html.EscapeString(src)))
+
+ i := idx
+ if match.appendScript {
+ i += len(match.markup)
+ }
+
+ c = append(c[:i], append(script, c[i:]...)...)
+
+ if _, err := ft.To().Write(c); err != nil {
+ helpers.DistinctWarnLog.Println("Failed to inject LiveReload script:", err)
+ }
+ return nil
+ }
+}
diff --git a/transform/livereloadinject/livereloadinject_test.go b/transform/livereloadinject/livereloadinject_test.go
new file mode 100644
index 000000000..b2ec4483a
--- /dev/null
+++ b/transform/livereloadinject/livereloadinject_test.go
@@ -0,0 +1,64 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package livereloadinject
+
+import (
+ "bytes"
+ "net/url"
+ "strings"
+ "testing"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/gohugoio/hugo/transform"
+)
+
+func TestLiveReloadInject(t *testing.T) {
+ c := qt.New(t)
+
+ lrurl, err := url.Parse("http://localhost:1234/subpath")
+ if err != nil {
+ t.Errorf("Parsing test URL failed")
+ return
+ }
+ expectBase := `<script src="/subpath/livereload.js?mindelay=10&amp;v=2&amp;port=1234&amp;path=subpath/livereload" data-no-instant defer></script>`
+ apply := func(s string) string {
+ out := new(bytes.Buffer)
+ in := strings.NewReader(s)
+
+ tr := transform.New(New(*lrurl))
+ tr.Apply(out, in)
+
+ return out.String()
+ }
+
+ c.Run("Head lower", func(c *qt.C) {
+ c.Assert(apply("<html><head>foo"), qt.Equals, "<html><head>"+expectBase+"foo")
+ })
+
+ c.Run("Head upper", func(c *qt.C) {
+ c.Assert(apply("<html><HEAD>foo"), qt.Equals, "<html><HEAD>"+expectBase+"foo")
+ })
+
+ c.Run("Body lower", func(c *qt.C) {
+ c.Assert(apply("foo</body>"), qt.Equals, "foo"+expectBase+"</body>")
+ })
+
+ c.Run("Body upper", func(c *qt.C) {
+ c.Assert(apply("foo</BODY>"), qt.Equals, "foo"+expectBase+"</BODY>")
+ })
+
+ c.Run("No match", func(c *qt.C) {
+ c.Assert(apply("<h1>No match</h1>"), qt.Equals, "<h1>No match</h1>")
+ })
+}
diff --git a/transform/metainject/hugogenerator.go b/transform/metainject/hugogenerator.go
new file mode 100644
index 000000000..20f05145b
--- /dev/null
+++ b/transform/metainject/hugogenerator.go
@@ -0,0 +1,56 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metainject
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+
+ "github.com/gohugoio/hugo/common/hugo"
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/transform"
+)
+
+var (
+ metaTagsCheck = regexp.MustCompile(`(?i)<meta\s+name=['|"]?generator['|"]?`)
+ hugoGeneratorTag = fmt.Sprintf(`<meta name="generator" content="Hugo %s" />`, hugo.CurrentVersion)
+)
+
+// HugoGenerator injects a meta generator tag for Hugo if none present.
+func HugoGenerator(ft transform.FromTo) error {
+ b := ft.From().Bytes()
+ if metaTagsCheck.Match(b) {
+ if _, err := ft.To().Write(b); err != nil {
+ helpers.DistinctWarnLog.Println("Failed to inject Hugo generator tag:", err)
+ }
+ return nil
+ }
+
+ head := "<head>"
+ replace := []byte(fmt.Sprintf("%s\n\t%s", head, hugoGeneratorTag))
+ newcontent := bytes.Replace(b, []byte(head), replace, 1)
+
+ if len(newcontent) == len(b) {
+ head := "<HEAD>"
+ replace := []byte(fmt.Sprintf("%s\n\t%s", head, hugoGeneratorTag))
+ newcontent = bytes.Replace(b, []byte(head), replace, 1)
+ }
+
+ if _, err := ft.To().Write(newcontent); err != nil {
+ helpers.DistinctWarnLog.Println("Failed to inject Hugo generator tag:", err)
+ }
+
+ return nil
+}
diff --git a/transform/metainject/hugogenerator_test.go b/transform/metainject/hugogenerator_test.go
new file mode 100644
index 000000000..1d6d7c4b9
--- /dev/null
+++ b/transform/metainject/hugogenerator_test.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metainject
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+
+ "github.com/gohugoio/hugo/transform"
+)
+
+func TestHugoGeneratorInject(t *testing.T) {
+ hugoGeneratorTag = "META"
+ for i, this := range []struct {
+ in string
+ expect string
+ }{
+ {`<head>
+ <foo />
+</head>`, `<head>
+ META
+ <foo />
+</head>`},
+ {`<HEAD>
+ <foo />
+</HEAD>`, `<HEAD>
+ META
+ <foo />
+</HEAD>`},
+ {`<head><meta name="generator" content="Jekyll" /></head>`, `<head><meta name="generator" content="Jekyll" /></head>`},
+ {`<head><meta name='generator' content='Jekyll' /></head>`, `<head><meta name='generator' content='Jekyll' /></head>`},
+ {`<head><meta name=generator content=Jekyll /></head>`, `<head><meta name=generator content=Jekyll /></head>`},
+ {`<head><META NAME="GENERATOR" content="Jekyll" /></head>`, `<head><META NAME="GENERATOR" content="Jekyll" /></head>`},
+ {"", ""},
+ {"</head>", "</head>"},
+ {"<head>", "<head>\n\tMETA"},
+ } {
+ in := strings.NewReader(this.in)
+ out := new(bytes.Buffer)
+
+ tr := transform.New(HugoGenerator)
+ tr.Apply(out, in)
+
+ if out.String() != this.expect {
+ t.Errorf("[%d] Expected \n%q got \n%q", i, this.expect, out.String())
+ }
+ }
+}
diff --git a/transform/urlreplacers/absurl.go b/transform/urlreplacers/absurl.go
new file mode 100644
index 000000000..029d94da2
--- /dev/null
+++ b/transform/urlreplacers/absurl.go
@@ -0,0 +1,36 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urlreplacers
+
+import "github.com/gohugoio/hugo/transform"
+
+var ar = newAbsURLReplacer()
+
+// NewAbsURLTransformer replaces relative URLs with absolute ones
+// in HTML files, using the baseURL setting.
+func NewAbsURLTransformer(path string) transform.Transformer {
+ return func(ft transform.FromTo) error {
+ ar.replaceInHTML(path, ft)
+ return nil
+ }
+}
+
+// NewAbsURLInXMLTransformer replaces relative URLs with absolute ones
+// in XML files, using the baseURL setting.
+func NewAbsURLInXMLTransformer(path string) transform.Transformer {
+ return func(ft transform.FromTo) error {
+ ar.replaceInXML(path, ft)
+ return nil
+ }
+}
diff --git a/transform/urlreplacers/absurlreplacer.go b/transform/urlreplacers/absurlreplacer.go
new file mode 100644
index 000000000..a875e6fa8
--- /dev/null
+++ b/transform/urlreplacers/absurlreplacer.go
@@ -0,0 +1,260 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urlreplacers
+
+import (
+ "bytes"
+ "io"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/gohugoio/hugo/transform"
+)
+
+type absurllexer struct {
+ // the source to absurlify
+ content []byte
+ // the target for the new absurlified content
+ w io.Writer
+
+ // path may be set to a "." relative path
+ path []byte
+
+ pos int // input position
+ start int // item start position
+
+ quotes [][]byte
+}
+
+type prefix struct {
+ disabled bool
+ b []byte
+ f func(l *absurllexer)
+
+ nextPos int
+}
+
+func (p *prefix) find(bs []byte, start int) bool {
+ if p.disabled {
+ return false
+ }
+
+ if p.nextPos == -1 {
+ idx := bytes.Index(bs[start:], p.b)
+
+ if idx == -1 {
+ p.disabled = true
+ // Find the closest match
+ return false
+ }
+
+ p.nextPos = start + idx + len(p.b)
+ }
+
+ return true
+}
+
+func newPrefixState() []*prefix {
+ return []*prefix{
+ {b: []byte("src="), f: checkCandidateBase},
+ {b: []byte("href="), f: checkCandidateBase},
+ {b: []byte("url="), f: checkCandidateBase},
+ {b: []byte("action="), f: checkCandidateBase},
+ {b: []byte("srcset="), f: checkCandidateSrcset},
+ }
+}
+
+func (l *absurllexer) emit() {
+ l.w.Write(l.content[l.start:l.pos])
+ l.start = l.pos
+}
+
+var (
+ relURLPrefix = []byte("/")
+ relURLPrefixLen = len(relURLPrefix)
+)
+
+func (l *absurllexer) consumeQuote() []byte {
+ for _, q := range l.quotes {
+ if bytes.HasPrefix(l.content[l.pos:], q) {
+ l.pos += len(q)
+ l.emit()
+ return q
+ }
+ }
+ return nil
+}
+
+// handle URLs in src and href.
+func checkCandidateBase(l *absurllexer) {
+ l.consumeQuote()
+
+ if !bytes.HasPrefix(l.content[l.pos:], relURLPrefix) {
+ return
+ }
+
+ // check for schemaless URLs
+ posAfter := l.pos + relURLPrefixLen
+ if posAfter >= len(l.content) {
+ return
+ }
+ r, _ := utf8.DecodeRune(l.content[posAfter:])
+ if r == '/' {
+ // schemaless: skip
+ return
+ }
+ if l.pos > l.start {
+ l.emit()
+ }
+ l.pos += relURLPrefixLen
+ l.w.Write(l.path)
+ l.start = l.pos
+}
+
+func (l *absurllexer) posAfterURL(q []byte) int {
+ if len(q) > 0 {
+ // look for end quote
+ return bytes.Index(l.content[l.pos:], q)
+ }
+
+ return bytes.IndexFunc(l.content[l.pos:], func(r rune) bool {
+ return r == '>' || unicode.IsSpace(r)
+ })
+}
+
+// handle URLs in srcset.
+func checkCandidateSrcset(l *absurllexer) {
+ q := l.consumeQuote()
+ if q == nil {
+ // srcset needs to be quoted.
+ return
+ }
+
+ // special case, not frequent (me think)
+ if !bytes.HasPrefix(l.content[l.pos:], relURLPrefix) {
+ return
+ }
+
+ // check for schemaless URLs
+ posAfter := l.pos + relURLPrefixLen
+ if posAfter >= len(l.content) {
+ return
+ }
+ r, _ := utf8.DecodeRune(l.content[posAfter:])
+ if r == '/' {
+ // schemaless: skip
+ return
+ }
+
+ posEnd := l.posAfterURL(q)
+
+ // safe guard
+ if posEnd < 0 || posEnd > 2000 {
+ return
+ }
+
+ if l.pos > l.start {
+ l.emit()
+ }
+
+ section := l.content[l.pos : l.pos+posEnd+1]
+
+ fields := bytes.Fields(section)
+ for i, f := range fields {
+ if f[0] == '/' {
+ l.w.Write(l.path)
+ l.w.Write(f[1:])
+
+ } else {
+ l.w.Write(f)
+ }
+
+ if i < len(fields)-1 {
+ l.w.Write([]byte(" "))
+ }
+ }
+
+ l.pos += len(section)
+ l.start = l.pos
+}
+
+// main loop
+func (l *absurllexer) replace() {
+ contentLength := len(l.content)
+
+ prefixes := newPrefixState()
+
+ for {
+ if l.pos >= contentLength {
+ break
+ }
+
+ var match *prefix
+
+ for _, p := range prefixes {
+ if !p.find(l.content, l.pos) {
+ continue
+ }
+
+ if match == nil || p.nextPos < match.nextPos {
+ match = p
+ }
+ }
+
+ if match == nil {
+ // Done!
+ l.pos = contentLength
+ break
+ } else {
+ l.pos = match.nextPos
+ match.nextPos = -1
+ match.f(l)
+ }
+ }
+ // Done!
+ if l.pos > l.start {
+ l.emit()
+ }
+}
+
+func doReplace(path string, ct transform.FromTo, quotes [][]byte) {
+ lexer := &absurllexer{
+ content: ct.From().Bytes(),
+ w: ct.To(),
+ path: []byte(path),
+ quotes: quotes,
+ }
+
+ lexer.replace()
+}
+
+type absURLReplacer struct {
+ htmlQuotes [][]byte
+ xmlQuotes [][]byte
+}
+
+func newAbsURLReplacer() *absURLReplacer {
+ return &absURLReplacer{
+ htmlQuotes: [][]byte{[]byte("\""), []byte("'")},
+ xmlQuotes: [][]byte{[]byte("&#34;"), []byte("&#39;")},
+ }
+}
+
+func (au *absURLReplacer) replaceInHTML(path string, ct transform.FromTo) {
+ doReplace(path, ct, au.htmlQuotes)
+}
+
+func (au *absURLReplacer) replaceInXML(path string, ct transform.FromTo) {
+ doReplace(path, ct, au.xmlQuotes)
+}
diff --git a/transform/urlreplacers/absurlreplacer_test.go b/transform/urlreplacers/absurlreplacer_test.go
new file mode 100644
index 000000000..f95ee4fc1
--- /dev/null
+++ b/transform/urlreplacers/absurlreplacer_test.go
@@ -0,0 +1,236 @@
+// Copyright 2018 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package urlreplacers
+
+import (
+ "path/filepath"
+ "testing"
+
+ bp "github.com/gohugoio/hugo/bufferpool"
+
+ "github.com/gohugoio/hugo/helpers"
+ "github.com/gohugoio/hugo/transform"
+)
+
+const (
+ h5JsContentDoubleQuote = "<!DOCTYPE html><html><head><script src=\"foobar.js\"></script><script src=\"/barfoo.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"foobar\">foobar</a>. <a href=\"/foobar\">Follow up</a></article></body></html>"
+ h5JsContentSingleQuote = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='/foobar'>Follow up</a></article></body></html>"
+ h5JsContentAbsURL = "<!DOCTYPE html><html><head><script src=\"http://user@host:10234/foobar.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"https://host/foobar\">foobar</a>. Follow up</article></body></html>"
+ h5JsContentAbsURLSchemaless = "<!DOCTYPE html><html><head><script src=\"//host/foobar.js\"></script><script src='//host2/barfoo.js'></head><body><nav><h1>title</h1></nav><article>content <a href=\"//host/foobar\">foobar</a>. <a href='//host2/foobar'>Follow up</a></article></body></html>"
+ correctOutputSrcHrefDq = "<!DOCTYPE html><html><head><script src=\"foobar.js\"></script><script src=\"http://base/barfoo.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"foobar\">foobar</a>. <a href=\"http://base/foobar\">Follow up</a></article></body></html>"
+ correctOutputSrcHrefSq = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='http://base/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='http://base/foobar'>Follow up</a></article></body></html>"
+
+ h5XMLContentAbsURL = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\">&lt;p&gt;&lt;a href=&#34;/foobar&#34;&gt;foobar&lt;/a&gt;&lt;/p&gt; &lt;p&gt;A video: &lt;iframe src=&#39;/foo&#39;&gt;&lt;/iframe&gt;&lt;/p&gt;</content></entry></feed>"
+ correctOutputSrcHrefInXML = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\">&lt;p&gt;&lt;a href=&#34;http://base/foobar&#34;&gt;foobar&lt;/a&gt;&lt;/p&gt; &lt;p&gt;A video: &lt;iframe src=&#39;http://base/foo&#39;&gt;&lt;/iframe&gt;&lt;/p&gt;</content></entry></feed>"
+ h5XMLContentGuarded = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\">&lt;p&gt;&lt;a href=&#34;//foobar&#34;&gt;foobar&lt;/a&gt;&lt;/p&gt; &lt;p&gt;A video: &lt;iframe src=&#39;//foo&#39;&gt;&lt;/iframe&gt;&lt;/p&gt;</content></entry></feed>"
+)
+
+const (
+ // additional sanity tests for replacements testing
+ replace1 = "No replacements."
+ replace2 = "ᚠᛇᚻ ᛒᛦᚦ ᚠᚱᚩᚠᚢᚱ\nᚠᛁᚱᚪ ᚷᛖᚻᚹᛦᛚᚳᚢᛗ"
+ replace3 = `End of file: src="/`
+ replace5 = `Srcsett with no closing quote: srcset="/img/small.jpg do be do be do.`
+
+ // Issue: 816, schemaless links combined with others
+ replaceSchemalessHTML = `Pre. src='//schemaless' src='/normal' <a href="//schemaless">Schemaless</a>. <a href="/normal">normal</a>. Post.`
+ replaceSchemalessHTMLCorrect = `Pre. src='//schemaless' src='http://base/normal' <a href="//schemaless">Schemaless</a>. <a href="http://base/normal">normal</a>. Post.`
+ replaceSchemalessXML = `Pre. src=&#39;//schemaless&#39; src=&#39;/normal&#39; <a href=&#39;//schemaless&#39;>Schemaless</a>. <a href=&#39;/normal&#39;>normal</a>. Post.`
+ replaceSchemalessXMLCorrect = `Pre. src=&#39;//schemaless&#39; src=&#39;http://base/normal&#39; <a href=&#39;//schemaless&#39;>Schemaless</a>. <a href=&#39;http://base/normal&#39;>normal</a>. Post.`
+)
+
+const (
+ // srcset=
+ srcsetBasic = `Pre. <img srcset="/img/small.jpg 200w, /img/medium.jpg 300w, /img/big.jpg 700w" alt="text" src="/img/foo.jpg">`
+ srcsetBasicCorrect = `Pre. <img srcset="http://base/img/small.jpg 200w, http://base/img/medium.jpg 300w, http://base/img/big.jpg 700w" alt="text" src="http://base/img/foo.jpg">`
+ srcsetSingleQuote = `Pre. <img srcset='/img/small.jpg 200w, /img/big.jpg 700w' alt="text" src="/img/foo.jpg"> POST.`
+ srcsetSingleQuoteCorrect = `Pre. <img srcset='http://base/img/small.jpg 200w, http://base/img/big.jpg 700w' alt="text" src="http://base/img/foo.jpg"> POST.`
+ srcsetXMLBasic = `Pre. <img srcset=&#34;/img/small.jpg 200w, /img/big.jpg 700w&#34; alt=&#34;text&#34; src=&#34;/img/foo.jpg&#34;>`
+ srcsetXMLBasicCorrect = `Pre. <img srcset=&#34;http://base/img/small.jpg 200w, http://base/img/big.jpg 700w&#34; alt=&#34;text&#34; src=&#34;http://base/img/foo.jpg&#34;>`
+ srcsetXMLSingleQuote = `Pre. <img srcset=&#34;/img/small.jpg 200w, /img/big.jpg 700w&#34; alt=&#34;text&#34; src=&#34;/img/foo.jpg&#34;>`
+ srcsetXMLSingleQuoteCorrect = `Pre. <img srcset=&#34;http://base/img/small.jpg 200w, http://base/img/big.jpg 700w&#34; alt=&#34;text&#34; src=&#34;http://base/img/foo.jpg&#34;>`
+ srcsetVariations = `Pre.
+Missing start quote: <img srcset=/img/small.jpg 200w, /img/big.jpg 700w" alt="text"> src='/img/foo.jpg'> FOO.
+<img srcset='/img.jpg'>
+schemaless: <img srcset='//img.jpg' src='//basic.jpg'>
+schemaless2: <img srcset="//img.jpg" src="//basic.jpg2> POST
+`
+)
+
+const (
+ srcsetVariationsCorrect = `Pre.
+Missing start quote: <img srcset=/img/small.jpg 200w, /img/big.jpg 700w" alt="text"> src='http://base/img/foo.jpg'> FOO.
+<img srcset='http://base/img.jpg'>
+schemaless: <img srcset='//img.jpg' src='//basic.jpg'>
+schemaless2: <img srcset="//img.jpg" src="//basic.jpg2> POST
+`
+ srcsetXMLVariations = `Pre.
+Missing start quote: &lt;img srcset=/img/small.jpg 200w /img/big.jpg 700w&quot; alt=&quot;text&quot;&gt; src=&#39;/img/foo.jpg&#39;&gt; FOO.
+&lt;img srcset=&#39;/img.jpg&#39;&gt;
+schemaless: &lt;img srcset=&#39;//img.jpg&#39; src=&#39;//basic.jpg&#39;&gt;
+schemaless2: &lt;img srcset=&quot;//img.jpg&quot; src=&quot;//basic.jpg2&gt; POST
+`
+ srcsetXMLVariationsCorrect = `Pre.
+Missing start quote: &lt;img srcset=/img/small.jpg 200w /img/big.jpg 700w&quot; alt=&quot;text&quot;&gt; src=&#39;http://base/img/foo.jpg&#39;&gt; FOO.
+&lt;img srcset=&#39;http://base/img.jpg&#39;&gt;
+schemaless: &lt;img srcset=&#39;//img.jpg&#39; src=&#39;//basic.jpg&#39;&gt;
+schemaless2: &lt;img srcset=&quot;//img.jpg&quot; src=&quot;//basic.jpg2&gt; POST
+`
+
+ relPathVariations = `PRE. a href="/img/small.jpg" input action="/foo.html" meta url=/redirect/to/page/ POST.`
+ relPathVariationsCorrect = `PRE. a href="../../img/small.jpg" input action="../../foo.html" meta url=../../redirect/to/page/ POST.`
+
+ testBaseURL = "http://base/"
+)
+
+var (
+ absURLlBenchTests = []test{
+ {h5JsContentDoubleQuote, correctOutputSrcHrefDq},
+ {h5JsContentSingleQuote, correctOutputSrcHrefSq},
+ {h5JsContentAbsURL, h5JsContentAbsURL},
+ {h5JsContentAbsURLSchemaless, h5JsContentAbsURLSchemaless},
+ }
+
+ xmlAbsURLBenchTests = []test{
+ {h5XMLContentAbsURL, correctOutputSrcHrefInXML},
+ {h5XMLContentGuarded, h5XMLContentGuarded},
+ }
+
+ sanityTests = []test{{replace1, replace1}, {replace2, replace2}, {replace3, replace3}, {replace3, replace3}, {replace5, replace5}}
+ extraTestsHTML = []test{{replaceSchemalessHTML, replaceSchemalessHTMLCorrect}}
+ absURLTests = append(absURLlBenchTests, append(sanityTests, extraTestsHTML...)...)
+ extraTestsXML = []test{{replaceSchemalessXML, replaceSchemalessXMLCorrect}}
+ xmlAbsURLTests = append(xmlAbsURLBenchTests, append(sanityTests, extraTestsXML...)...)
+ srcsetTests = []test{{srcsetBasic, srcsetBasicCorrect}, {srcsetSingleQuote, srcsetSingleQuoteCorrect}, {srcsetVariations, srcsetVariationsCorrect}}
+ srcsetXMLTests = []test{
+ {srcsetXMLBasic, srcsetXMLBasicCorrect},
+ {srcsetXMLSingleQuote, srcsetXMLSingleQuoteCorrect},
+ {srcsetXMLVariations, srcsetXMLVariationsCorrect},
+ }
+
+ relurlTests = []test{{relPathVariations, relPathVariationsCorrect}}
+)
+
+func BenchmarkAbsURL(b *testing.B) {
+ tr := transform.New(NewAbsURLTransformer(testBaseURL))
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ apply(b.Errorf, tr, absURLlBenchTests)
+ }
+}
+
+func BenchmarkAbsURLSrcset(b *testing.B) {
+ tr := transform.New(NewAbsURLTransformer(testBaseURL))
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ apply(b.Errorf, tr, srcsetTests)
+ }
+}
+
+func BenchmarkXMLAbsURLSrcset(b *testing.B) {
+ tr := transform.New(NewAbsURLInXMLTransformer(testBaseURL))
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ apply(b.Errorf, tr, srcsetXMLTests)
+ }
+}
+
+func TestAbsURL(t *testing.T) {
+ tr := transform.New(NewAbsURLTransformer(testBaseURL))
+
+ apply(t.Errorf, tr, absURLTests)
+}
+
+func TestAbsURLUnquoted(t *testing.T) {
+ tr := transform.New(NewAbsURLTransformer(testBaseURL))
+
+ apply(t.Errorf, tr, []test{
+ {
+ content: `Link: <a href=/asdf>ASDF</a>`,
+ expected: `Link: <a href=http://base/asdf>ASDF</a>`,
+ },
+ {
+ content: `Link: <a href=/asdf >ASDF</a>`,
+ expected: `Link: <a href=http://base/asdf >ASDF</a>`,
+ },
+ })
+}
+
+func TestRelativeURL(t *testing.T) {
+ tr := transform.New(NewAbsURLTransformer(helpers.GetDottedRelativePath(filepath.FromSlash("/post/sub/"))))
+
+ applyWithPath(t.Errorf, tr, relurlTests)
+}
+
+func TestAbsURLSrcSet(t *testing.T) {
+ tr := transform.New(NewAbsURLTransformer(testBaseURL))
+
+ apply(t.Errorf, tr, srcsetTests)
+}
+
+func TestAbsXMLURLSrcSet(t *testing.T) {
+ tr := transform.New(NewAbsURLInXMLTransformer(testBaseURL))
+
+ apply(t.Errorf, tr, srcsetXMLTests)
+}
+
+func BenchmarkXMLAbsURL(b *testing.B) {
+ tr := transform.New(NewAbsURLInXMLTransformer(testBaseURL))
+
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ apply(b.Errorf, tr, xmlAbsURLBenchTests)
+ }
+}
+
+func TestXMLAbsURL(t *testing.T) {
+ tr := transform.New(NewAbsURLInXMLTransformer(testBaseURL))
+ apply(t.Errorf, tr, xmlAbsURLTests)
+}
+
+func apply(ef errorf, tr transform.Chain, tests []test) {
+ applyWithPath(ef, tr, tests)
+}
+
+func applyWithPath(ef errorf, tr transform.Chain, tests []test) {
+ out := bp.GetBuffer()
+ defer bp.PutBuffer(out)
+
+ in := bp.GetBuffer()
+ defer bp.PutBuffer(in)
+
+ for _, test := range tests {
+ var err error
+ in.WriteString(test.content)
+ err = tr.Apply(out, in)
+ if err != nil {
+ ef("Unexpected error: %s", err)
+ }
+ if test.expected != out.String() {
+ ef("Expected:\n%s\nGot:\n%s", test.expected, out.String())
+ }
+ out.Reset()
+ in.Reset()
+ }
+}
+
+type test struct {
+ content string
+ expected string
+}
+
+type errorf func(string, ...any)
diff --git a/watcher/batcher.go b/watcher/batcher.go
new file mode 100644
index 000000000..718eea73f
--- /dev/null
+++ b/watcher/batcher.go
@@ -0,0 +1,87 @@
+// Copyright 2020 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package watcher
+
+import (
+ "time"
+
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/watcher/filenotify"
+)
+
+// Batcher batches file watch events in a given interval.
+type Batcher struct {
+ filenotify.FileWatcher
+ interval time.Duration
+ done chan struct{}
+
+ Events chan []fsnotify.Event // Events are returned on this channel
+}
+
+// New creates and starts a Batcher with the given time interval.
+// It will fall back to a poll based watcher if native isn's supported.
+// To always use polling, set poll to true.
+func New(intervalBatcher, intervalPoll time.Duration, poll bool) (*Batcher, error) {
+ var err error
+ var watcher filenotify.FileWatcher
+
+ if poll {
+ watcher = filenotify.NewPollingWatcher(intervalPoll)
+ } else {
+ watcher, err = filenotify.New(intervalPoll)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ batcher := &Batcher{}
+ batcher.FileWatcher = watcher
+ batcher.interval = intervalBatcher
+ batcher.done = make(chan struct{}, 1)
+ batcher.Events = make(chan []fsnotify.Event, 1)
+
+ if err == nil {
+ go batcher.run()
+ }
+
+ return batcher, nil
+}
+
+func (b *Batcher) run() {
+ tick := time.Tick(b.interval)
+ evs := make([]fsnotify.Event, 0)
+OuterLoop:
+ for {
+ select {
+ case ev := <-b.FileWatcher.Events():
+ evs = append(evs, ev)
+ case <-tick:
+ if len(evs) == 0 {
+ continue
+ }
+ b.Events <- evs
+ evs = make([]fsnotify.Event, 0)
+ case <-b.done:
+ break OuterLoop
+ }
+ }
+ close(b.done)
+}
+
+// Close stops the watching of the files.
+func (b *Batcher) Close() {
+ b.done <- struct{}{}
+ b.FileWatcher.Close()
+}
diff --git a/watcher/filenotify/filenotify.go b/watcher/filenotify/filenotify.go
new file mode 100644
index 000000000..b9d0d2e14
--- /dev/null
+++ b/watcher/filenotify/filenotify.go
@@ -0,0 +1,49 @@
+// Package filenotify provides a mechanism for watching file(s) for changes.
+// Generally leans on fsnotify, but provides a poll-based notifier which fsnotify does not support.
+// These are wrapped up in a common interface so that either can be used interchangeably in your code.
+//
+// This package is adapted from https://github.com/moby/moby/tree/master/pkg/filenotify, Apache-2.0 License.
+// Hopefully this can be replaced with an external package sometime in the future, see https://github.com/fsnotify/fsnotify/issues/9
+package filenotify
+
+import (
+ "time"
+
+ "github.com/fsnotify/fsnotify"
+)
+
+// FileWatcher is an interface for implementing file notification watchers
+type FileWatcher interface {
+ Events() <-chan fsnotify.Event
+ Errors() <-chan error
+ Add(name string) error
+ Remove(name string) error
+ Close() error
+}
+
+// New tries to use an fs-event watcher, and falls back to the poller if there is an error
+func New(interval time.Duration) (FileWatcher, error) {
+ if watcher, err := NewEventWatcher(); err == nil {
+ return watcher, nil
+ }
+ return NewPollingWatcher(interval), nil
+}
+
+// NewPollingWatcher returns a poll-based file watcher
+func NewPollingWatcher(interval time.Duration) FileWatcher {
+ return &filePoller{
+ interval: interval,
+ done: make(chan struct{}),
+ events: make(chan fsnotify.Event),
+ errors: make(chan error),
+ }
+}
+
+// NewEventWatcher returns an fs-event based file watcher
+func NewEventWatcher() (FileWatcher, error) {
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ return nil, err
+ }
+ return &fsNotifyWatcher{watcher}, nil
+}
diff --git a/watcher/filenotify/fsnotify.go b/watcher/filenotify/fsnotify.go
new file mode 100644
index 000000000..19534128a
--- /dev/null
+++ b/watcher/filenotify/fsnotify.go
@@ -0,0 +1,20 @@
+// Package filenotify is adapted from https://github.com/moby/moby/tree/master/pkg/filenotify, Apache-2.0 License.
+// Hopefully this can be replaced with an external package sometime in the future, see https://github.com/fsnotify/fsnotify/issues/9
+package filenotify
+
+import "github.com/fsnotify/fsnotify"
+
+// fsNotifyWatcher wraps the fsnotify package to satisfy the FileNotifier interface
+type fsNotifyWatcher struct {
+ *fsnotify.Watcher
+}
+
+// Events returns the fsnotify event channel receiver
+func (w *fsNotifyWatcher) Events() <-chan fsnotify.Event {
+ return w.Watcher.Events
+}
+
+// Errors returns the fsnotify error channel receiver
+func (w *fsNotifyWatcher) Errors() <-chan error {
+ return w.Watcher.Errors
+}
diff --git a/watcher/filenotify/poller.go b/watcher/filenotify/poller.go
new file mode 100644
index 000000000..71d806209
--- /dev/null
+++ b/watcher/filenotify/poller.go
@@ -0,0 +1,326 @@
+// Package filenotify is adapted from https://github.com/moby/moby/tree/master/pkg/filenotify, Apache-2.0 License.
+// Hopefully this can be replaced with an external package sometime in the future, see https://github.com/fsnotify/fsnotify/issues/9
+package filenotify
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "sync"
+ "time"
+
+ "github.com/fsnotify/fsnotify"
+)
+
+var (
+ // errPollerClosed is returned when the poller is closed
+ errPollerClosed = errors.New("poller is closed")
+ // errNoSuchWatch is returned when trying to remove a watch that doesn't exist
+ errNoSuchWatch = errors.New("watch does not exist")
+)
+
+// filePoller is used to poll files for changes, especially in cases where fsnotify
+// can't be run (e.g. when inotify handles are exhausted)
+// filePoller satisfies the FileWatcher interface
+type filePoller struct {
+ // the duration between polls.
+ interval time.Duration
+ // watches is the list of files currently being polled, close the associated channel to stop the watch
+ watches map[string]struct{}
+ // Will be closed when done.
+ done chan struct{}
+ // events is the channel to listen to for watch events
+ events chan fsnotify.Event
+ // errors is the channel to listen to for watch errors
+ errors chan error
+ // mu locks the poller for modification
+ mu sync.Mutex
+ // closed is used to specify when the poller has already closed
+ closed bool
+}
+
+// Add adds a filename to the list of watches
+// once added the file is polled for changes in a separate goroutine
+func (w *filePoller) Add(name string) error {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ if w.closed {
+ return errPollerClosed
+ }
+
+ item, err := newItemToWatch(name)
+ if err != nil {
+ return err
+ }
+ if item.left.FileInfo == nil {
+ return os.ErrNotExist
+ }
+
+ if w.watches == nil {
+ w.watches = make(map[string]struct{})
+ }
+ if _, exists := w.watches[name]; exists {
+ return fmt.Errorf("watch exists")
+ }
+ w.watches[name] = struct{}{}
+
+ go w.watch(item)
+ return nil
+}
+
+// Remove stops and removes watch with the specified name
+func (w *filePoller) Remove(name string) error {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+ return w.remove(name)
+}
+
+func (w *filePoller) remove(name string) error {
+ if w.closed {
+ return errPollerClosed
+ }
+
+ _, exists := w.watches[name]
+ if !exists {
+ return errNoSuchWatch
+ }
+ delete(w.watches, name)
+ return nil
+}
+
+// Events returns the event channel
+// This is used for notifications on events about watched files
+func (w *filePoller) Events() <-chan fsnotify.Event {
+ return w.events
+}
+
+// Errors returns the errors channel
+// This is used for notifications about errors on watched files
+func (w *filePoller) Errors() <-chan error {
+ return w.errors
+}
+
+// Close closes the poller
+// All watches are stopped, removed, and the poller cannot be added to
+func (w *filePoller) Close() error {
+ w.mu.Lock()
+ defer w.mu.Unlock()
+
+ if w.closed {
+ return nil
+ }
+ w.closed = true
+ close(w.done)
+ for name := range w.watches {
+ w.remove(name)
+ }
+
+ return nil
+}
+
+// sendEvent publishes the specified event to the events channel
+func (w *filePoller) sendEvent(e fsnotify.Event) error {
+ select {
+ case w.events <- e:
+ case <-w.done:
+ return fmt.Errorf("closed")
+ }
+ return nil
+}
+
+// sendErr publishes the specified error to the errors channel
+func (w *filePoller) sendErr(e error) error {
+ select {
+ case w.errors <- e:
+ case <-w.done:
+ return fmt.Errorf("closed")
+ }
+ return nil
+}
+
+// watch watches item for changes until done is closed.
+func (w *filePoller) watch(item *itemToWatch) {
+ ticker := time.NewTicker(w.interval)
+ defer ticker.Stop()
+
+ for {
+ select {
+ case <-ticker.C:
+ case <-w.done:
+ return
+ }
+
+ evs, err := item.checkForChanges()
+ if err != nil {
+ if err := w.sendErr(err); err != nil {
+ return
+ }
+ }
+
+ item.left, item.right = item.right, item.left
+
+ for _, ev := range evs {
+ if err := w.sendEvent(ev); err != nil {
+ return
+ }
+ }
+
+ }
+}
+
+// recording records the state of a file or a dir.
+type recording struct {
+ os.FileInfo
+
+ // Set if FileInfo is a dir.
+ entries map[string]os.FileInfo
+}
+
+func (r *recording) clear() {
+ r.FileInfo = nil
+ if r.entries != nil {
+ for k := range r.entries {
+ delete(r.entries, k)
+ }
+ }
+}
+
+func (r *recording) record(filename string) error {
+ r.clear()
+
+ fi, err := os.Stat(filename)
+ if err != nil && !os.IsNotExist(err) {
+ return err
+ }
+
+ if fi == nil {
+ return nil
+ }
+
+ r.FileInfo = fi
+
+ // If fi is a dir, we watch the files inside that directory (not recursively).
+ // This matches the behaviour of fsnotity.
+ if fi.IsDir() {
+ f, err := os.Open(filename)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+ return err
+ }
+ defer f.Close()
+
+ fis, err := f.Readdir(-1)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+ return err
+ }
+
+ for _, fi := range fis {
+ r.entries[fi.Name()] = fi
+ }
+ }
+
+ return nil
+}
+
+// itemToWatch may be a file or a dir.
+type itemToWatch struct {
+ // Full path to the filename.
+ filename string
+
+ // Snapshots of the stat state of this file or dir.
+ left *recording
+ right *recording
+}
+
+func newItemToWatch(filename string) (*itemToWatch, error) {
+ r := &recording{
+ entries: make(map[string]os.FileInfo),
+ }
+ err := r.record(filename)
+ if err != nil {
+ return nil, err
+ }
+
+ return &itemToWatch{filename: filename, left: r}, nil
+
+}
+
+func (item *itemToWatch) checkForChanges() ([]fsnotify.Event, error) {
+ if item.right == nil {
+ item.right = &recording{
+ entries: make(map[string]os.FileInfo),
+ }
+ }
+
+ err := item.right.record(item.filename)
+ if err != nil && !os.IsNotExist(err) {
+ return nil, err
+ }
+
+ dirOp := checkChange(item.left.FileInfo, item.right.FileInfo)
+
+ if dirOp != 0 {
+ evs := []fsnotify.Event{fsnotify.Event{Op: dirOp, Name: item.filename}}
+ return evs, nil
+ }
+
+ if item.left.FileInfo == nil || !item.left.IsDir() {
+ // Done.
+ return nil, nil
+ }
+
+ leftIsIn := false
+ left, right := item.left.entries, item.right.entries
+ if len(right) > len(left) {
+ left, right = right, left
+ leftIsIn = true
+ }
+
+ var evs []fsnotify.Event
+
+ for name, fi1 := range left {
+ fi2 := right[name]
+ fil, fir := fi1, fi2
+ if leftIsIn {
+ fil, fir = fir, fil
+ }
+ op := checkChange(fil, fir)
+ if op != 0 {
+ evs = append(evs, fsnotify.Event{Op: op, Name: filepath.Join(item.filename, name)})
+ }
+
+ }
+
+ return evs, nil
+
+}
+
+func checkChange(fi1, fi2 os.FileInfo) fsnotify.Op {
+ if fi1 == nil && fi2 != nil {
+ return fsnotify.Create
+ }
+ if fi1 != nil && fi2 == nil {
+ return fsnotify.Remove
+ }
+ if fi1 == nil && fi2 == nil {
+ return 0
+ }
+ if fi1.IsDir() || fi2.IsDir() {
+ return 0
+ }
+ if fi1.Mode() != fi2.Mode() {
+ return fsnotify.Chmod
+ }
+ if fi1.ModTime() != fi2.ModTime() || fi1.Size() != fi2.Size() {
+ return fsnotify.Write
+ }
+
+ return 0
+}
diff --git a/watcher/filenotify/poller_test.go b/watcher/filenotify/poller_test.go
new file mode 100644
index 000000000..b4723c758
--- /dev/null
+++ b/watcher/filenotify/poller_test.go
@@ -0,0 +1,304 @@
+// Package filenotify is adapted from https://github.com/moby/moby/tree/master/pkg/filenotify, Apache-2.0 License.
+// Hopefully this can be replaced with an external package sometime in the future, see https://github.com/fsnotify/fsnotify/issues/9
+package filenotify
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "runtime"
+ "testing"
+ "time"
+
+ qt "github.com/frankban/quicktest"
+ "github.com/fsnotify/fsnotify"
+ "github.com/gohugoio/hugo/htesting"
+)
+
+const (
+ subdir1 = "subdir1"
+ subdir2 = "subdir2"
+ watchWaitTime = 200 * time.Millisecond
+)
+
+var (
+ isMacOs = runtime.GOOS == "darwin"
+ isWindows = runtime.GOOS == "windows"
+ isCI = htesting.IsCI()
+)
+
+func TestPollerAddRemove(t *testing.T) {
+ c := qt.New(t)
+ w := NewPollingWatcher(watchWaitTime)
+
+ c.Assert(w.Add("foo"), qt.Not(qt.IsNil))
+ c.Assert(w.Remove("foo"), qt.Not(qt.IsNil))
+
+ f, err := ioutil.TempFile("", "asdf")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(f.Name())
+ c.Assert(w.Add(f.Name()), qt.IsNil)
+ c.Assert(w.Remove(f.Name()), qt.IsNil)
+
+}
+
+func TestPollerEvent(t *testing.T) {
+ c := qt.New(t)
+
+ for _, poll := range []bool{true, false} {
+ if !(poll || isMacOs) || isCI {
+ // Only run the fsnotify tests on MacOS locally.
+ continue
+ }
+ method := "fsnotify"
+ if poll {
+ method = "poll"
+ }
+
+ c.Run(fmt.Sprintf("%s, Watch dir", method), func(c *qt.C) {
+ dir, w := preparePollTest(c, poll)
+ subdir := filepath.Join(dir, subdir1)
+ c.Assert(w.Add(subdir), qt.IsNil)
+
+ filename := filepath.Join(subdir, "file1")
+
+ // Write to one file.
+ c.Assert(ioutil.WriteFile(filename, []byte("changed"), 0600), qt.IsNil)
+
+ var expected []fsnotify.Event
+
+ if poll {
+ expected = append(expected, fsnotify.Event{Name: filename, Op: fsnotify.Write})
+ assertEvents(c, w, expected...)
+ } else {
+ // fsnotify sometimes emits Chmod before Write,
+ // which is hard to test, so skip it here.
+ drainEvents(c, w)
+ }
+
+ // Remove one file.
+ filename = filepath.Join(subdir, "file2")
+ c.Assert(os.Remove(filename), qt.IsNil)
+ assertEvents(c, w, fsnotify.Event{Name: filename, Op: fsnotify.Remove})
+
+ // Add one file.
+ filename = filepath.Join(subdir, "file3")
+ c.Assert(ioutil.WriteFile(filename, []byte("new"), 0600), qt.IsNil)
+ assertEvents(c, w, fsnotify.Event{Name: filename, Op: fsnotify.Create})
+
+ // Remove entire directory.
+ subdir = filepath.Join(dir, subdir2)
+ c.Assert(w.Add(subdir), qt.IsNil)
+
+ c.Assert(os.RemoveAll(subdir), qt.IsNil)
+
+ expected = expected[:0]
+
+ // This looks like a bug in fsnotify on MacOS. There are
+ // 3 files in this directory, yet we get Remove events
+ // for one of them + the directory.
+ if !poll {
+ expected = append(expected, fsnotify.Event{Name: filepath.Join(subdir, "file2"), Op: fsnotify.Remove})
+ }
+ expected = append(expected, fsnotify.Event{Name: subdir, Op: fsnotify.Remove})
+ assertEvents(c, w, expected...)
+
+ })
+
+ c.Run(fmt.Sprintf("%s, Add should not trigger event", method), func(c *qt.C) {
+ dir, w := preparePollTest(c, poll)
+ subdir := filepath.Join(dir, subdir1)
+ w.Add(subdir)
+ assertEvents(c, w)
+ // Create a new sub directory and add it to the watcher.
+ subdir = filepath.Join(dir, subdir1, subdir2)
+ c.Assert(os.Mkdir(subdir, 0777), qt.IsNil)
+ w.Add(subdir)
+ // This should create only one event.
+ assertEvents(c, w, fsnotify.Event{Name: subdir, Op: fsnotify.Create})
+ })
+
+ }
+}
+
+func TestPollerClose(t *testing.T) {
+ c := qt.New(t)
+ w := NewPollingWatcher(watchWaitTime)
+ f1, err := ioutil.TempFile("", "f1")
+ c.Assert(err, qt.IsNil)
+ f2, err := ioutil.TempFile("", "f2")
+ c.Assert(err, qt.IsNil)
+ filename1 := f1.Name()
+ filename2 := f2.Name()
+ f1.Close()
+ f2.Close()
+
+ c.Assert(w.Add(filename1), qt.IsNil)
+ c.Assert(w.Add(filename2), qt.IsNil)
+ c.Assert(w.Close(), qt.IsNil)
+ c.Assert(w.Close(), qt.IsNil)
+ c.Assert(ioutil.WriteFile(filename1, []byte("new"), 0600), qt.IsNil)
+ c.Assert(ioutil.WriteFile(filename2, []byte("new"), 0600), qt.IsNil)
+ // No more event as the watchers are closed.
+ assertEvents(c, w)
+
+ f2, err = ioutil.TempFile("", "f2")
+ c.Assert(err, qt.IsNil)
+
+ defer os.Remove(f2.Name())
+
+ c.Assert(w.Add(f2.Name()), qt.Not(qt.IsNil))
+
+}
+
+func TestCheckChange(t *testing.T) {
+ c := qt.New(t)
+
+ dir := prepareTestDirWithSomeFiles(c, "check-change")
+
+ stat := func(s ...string) os.FileInfo {
+ fi, err := os.Stat(filepath.Join(append([]string{dir}, s...)...))
+ c.Assert(err, qt.IsNil)
+ return fi
+ }
+
+ f0, f1, f2 := stat(subdir2, "file0"), stat(subdir2, "file1"), stat(subdir2, "file2")
+ d1 := stat(subdir1)
+
+ // Note that on Windows, only the 0200 bit (owner writable) of mode is used.
+ c.Assert(os.Chmod(filepath.Join(filepath.Join(dir, subdir2, "file1")), 0400), qt.IsNil)
+ f1_2 := stat(subdir2, "file1")
+
+ c.Assert(ioutil.WriteFile(filepath.Join(filepath.Join(dir, subdir2, "file2")), []byte("changed"), 0600), qt.IsNil)
+ f2_2 := stat(subdir2, "file2")
+
+ c.Assert(checkChange(f0, nil), qt.Equals, fsnotify.Remove)
+ c.Assert(checkChange(nil, f0), qt.Equals, fsnotify.Create)
+ c.Assert(checkChange(f1, f1_2), qt.Equals, fsnotify.Chmod)
+ c.Assert(checkChange(f2, f2_2), qt.Equals, fsnotify.Write)
+ c.Assert(checkChange(nil, nil), qt.Equals, fsnotify.Op(0))
+ c.Assert(checkChange(d1, f1), qt.Equals, fsnotify.Op(0))
+ c.Assert(checkChange(f1, d1), qt.Equals, fsnotify.Op(0))
+}
+
+func BenchmarkPoller(b *testing.B) {
+ runBench := func(b *testing.B, item *itemToWatch) {
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ evs, err := item.checkForChanges()
+ if err != nil {
+ b.Fatal(err)
+ }
+ if len(evs) != 0 {
+ b.Fatal("got events")
+ }
+
+ }
+
+ }
+
+ b.Run("Check for changes in dir", func(b *testing.B) {
+ c := qt.New(b)
+ dir := prepareTestDirWithSomeFiles(c, "bench-check")
+ item, err := newItemToWatch(dir)
+ c.Assert(err, qt.IsNil)
+ runBench(b, item)
+
+ })
+
+ b.Run("Check for changes in file", func(b *testing.B) {
+ c := qt.New(b)
+ dir := prepareTestDirWithSomeFiles(c, "bench-check-file")
+ filename := filepath.Join(dir, subdir1, "file1")
+ item, err := newItemToWatch(filename)
+ c.Assert(err, qt.IsNil)
+ runBench(b, item)
+ })
+
+}
+
+func prepareTestDirWithSomeFiles(c *qt.C, id string) string {
+ dir, err := ioutil.TempDir("", fmt.Sprintf("test-poller-dir-%s", id))
+ c.Assert(err, qt.IsNil)
+ c.Assert(os.MkdirAll(filepath.Join(dir, subdir1), 0777), qt.IsNil)
+ c.Assert(os.MkdirAll(filepath.Join(dir, subdir2), 0777), qt.IsNil)
+
+ for i := 0; i < 3; i++ {
+ c.Assert(ioutil.WriteFile(filepath.Join(dir, subdir1, fmt.Sprintf("file%d", i)), []byte("hello1"), 0600), qt.IsNil)
+ }
+
+ for i := 0; i < 3; i++ {
+ c.Assert(ioutil.WriteFile(filepath.Join(dir, subdir2, fmt.Sprintf("file%d", i)), []byte("hello2"), 0600), qt.IsNil)
+ }
+
+ c.Cleanup(func() {
+ os.RemoveAll(dir)
+ })
+
+ return dir
+}
+
+func preparePollTest(c *qt.C, poll bool) (string, FileWatcher) {
+ var w FileWatcher
+ if poll {
+ w = NewPollingWatcher(watchWaitTime)
+ } else {
+ var err error
+ w, err = NewEventWatcher()
+ c.Assert(err, qt.IsNil)
+ }
+
+ dir := prepareTestDirWithSomeFiles(c, fmt.Sprint(poll))
+
+ c.Cleanup(func() {
+ w.Close()
+ })
+ return dir, w
+}
+
+func assertEvents(c *qt.C, w FileWatcher, evs ...fsnotify.Event) {
+ c.Helper()
+ i := 0
+ check := func() error {
+ for {
+ select {
+ case got := <-w.Events():
+ if i > len(evs)-1 {
+ return fmt.Errorf("got too many event(s): %q", got)
+ }
+ expected := evs[i]
+ i++
+ if expected.Name != got.Name {
+ return fmt.Errorf("got wrong filename, expected %q: %v", expected.Name, got.Name)
+ } else if got.Op&expected.Op != expected.Op {
+ return fmt.Errorf("got wrong event type, expected %q: %v", expected.Op, got.Op)
+ }
+ case e := <-w.Errors():
+ return fmt.Errorf("got unexpected error waiting for events %v", e)
+ case <-time.After(watchWaitTime + (watchWaitTime / 2)):
+ return nil
+ }
+ }
+ }
+ c.Assert(check(), qt.IsNil)
+ c.Assert(i, qt.Equals, len(evs))
+}
+
+func drainEvents(c *qt.C, w FileWatcher) {
+ c.Helper()
+ check := func() error {
+ for {
+ select {
+ case <-w.Events():
+ case e := <-w.Errors():
+ return fmt.Errorf("got unexpected error waiting for events %v", e)
+ case <-time.After(watchWaitTime * 2):
+ return nil
+ }
+ }
+ }
+ c.Assert(check(), qt.IsNil)
+}