diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000..36883d7 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,11 @@ +codecov: + ci: + - "!ci.appveyor.com" +coverage: + status: + patch: false + project: + default: + target: 90 + +comment: false diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..013dd20 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[report] +show_missing = True diff --git a/.csslintrc b/.csslintrc new file mode 100644 index 0000000..8877d0e --- /dev/null +++ b/.csslintrc @@ -0,0 +1,42 @@ +--exclude-list = mkdocs/themes/mkdocs/css/bootstrap.min.css, + mkdocs/themes/mkdocs/css/font-awesome.min.css, + mkdocs/themes/mkdocs/css/highlight.css, + mkdocs/themes/readthedocs/css/theme.css +--errors = known-properties, + box-sizing, + outline-none, + bulletproof-font-face, + compatible-vendor-prefixes, + errors, + duplicate-background-images, + duplicate-properties, + empty-rules, + selector-max-approaching, + gradients, + floats, + font-faces, + font-sizes, + shorthand, + import, + import-ie-limit, + text-indent, + rules-count, + regex-selectors, + selector-max, + selector-newline, + star-property-hack, + underscore-property-hack, + universal-selector, + unqualified-attributes, + vendor-prefix, + zero-units, + overqualified-elements, + unique-headings, + qualified-headings, + ids, + display-property-grouping, + fallback-colors, + box-model, + important, + adjoining-classes +--ignore = order-alphabetical diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..8c0ef43 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,16 @@ +# editorconfig.org + +root = true + +[*] +charset = utf-8 +indent_size = 4 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.js] +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..d9d23f9 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,10 @@ +mkdocs/contrib/search/lunr-language/** linguist-vendored +mkdocs/themes/mkdocs/js/** linguist-vendored +mkdocs/themes/mkdocs/js/base.js linguist-vendored=false +mkdocs/themes/mkdocs/css/** linguist-vendored +mkdocs/themes/mkdocs/css/base.css linguist-vendored=false +mkdocs/themes/readthedocs/js/** linguist-vendored +mkdocs/themes/readthedocs/js/theme.js linguist-vendored=false +mkdocs/themes/readthedocs/css/** linguist-vendored +mkdocs/themes/readthedocs/css/theme_extra.css linguist-vendored=false +docs/img/plugin-events.svg linguist-generated diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cebf2ab --- /dev/null +++ b/.gitignore @@ -0,0 +1,70 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +node_modules/ +parts/ +sdist/ +var/ +package*.json +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo + +# Scrapy stuff: +.scrapy + +# PyBuilder +target/ + +# IPython Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# virtualenv +venv/ +ENV/ + +# MkDocs documentation +site*/ diff --git a/.jshintignore b/.jshintignore new file mode 100644 index 0000000..72344b0 --- /dev/null +++ b/.jshintignore @@ -0,0 +1,9 @@ +mkdocs/themes/**/js/jquery-**.min.js +mkdocs/themes/mkdocs/js/highlight.pack.js +mkdocs/themes/mkdocs/js/bootstrap.min.js +mkdocs/themes/mkdocs/js/modernizr-**.min.js +mkdocs/themes/readthedocs/js/theme.js +mkdocs/themes/readthedocs/js/html5shiv.min.js +mkdocs/contrib/search/templates/search/lunr.js +mkdocs/contrib/search/lunr-language/lunr.**.js +mkdocs/contrib/search/lunr-language/tinyseg.js diff --git a/.markdownlintrc b/.markdownlintrc new file mode 100644 index 0000000..aa9fc26 --- /dev/null +++ b/.markdownlintrc @@ -0,0 +1,24 @@ +{ + // Enable all markdownlint rules + "default": true, + + // Disable line length check + "MD013": false, + + // Set Ordered list item prefix to "ordered" (use 1. 2. 3. not 1. 1. 1.) + "MD029": { "style": "ordered" }, + + "MD030": { "ul_multi": 3, "ol_multi": 2 }, + + // Set list indent level to 4 which Python-Markdown requires + "MD007": { "indent": 4 }, + + // Code block style + "MD046": { "style": "fenced" }, + + // Multiple headings with the same title + "MD024": { "siblings_only": true }, + + // Allow inline HTML + "MD033": false +} diff --git a/LICENSE b/LICENSE index cf6234d..7bb5073 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright © 2014, Tom Christie. All rights reserved. +Copyright © 2014-present, Tom Christie. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 0a3afb3..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,5 +0,0 @@ -include README.md -include LICENSE -recursive-include mkdocs *.ico *.js *.css *.png *.html *.eot *.svg *.ttf *.woff *.woff2 *.xml *.mustache *mkdocs_theme.yml -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] diff --git a/README.md b/README.md index 8fdaca1..0c294f0 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,79 @@ # MkDocs -Project documentation with Markdown. - ---- +> *Project documentation with Markdown* [![PyPI Version][pypi-v-image]][pypi-v-link] -[![Build Status][travis-image]][travis-link] -[![Windows Build Status][appveyor-image]][appveyor-link] +[![Build Status][GHAction-image]][GHAction-link] [![Coverage Status][codecov-image]][codecov-link] -[![Landscale Code Health][landscape-image]][landscape-link] -- View the [MkDocs documentation][mkdocs]. -- Project [release notes][release-notes]. -- Visit the [MkDocs wiki](https://github.com/mkdocs/mkdocs/wiki) for community - resources, including third party themes and a list of MkDocs users. -- IRC channel: `#mkdocs` on freenode. -- Discussions and support: +MkDocs is a **fast**, **simple** and **downright gorgeous** static site +generator that's geared towards building project documentation. Documentation +source files are written in Markdown, and configured with a single YAML +configuration file. It is designed to be easy to use and can be extended with +third-party themes, plugins, and Markdown extensions. + +Please see the [Documentation][mkdocs] for an introductory tutorial and a full +user guide. + +## Features + +- Build static HTML files from Markdown files. +- Use Plugins and Markdown Extensions to enhance MkDocs. +- Use the built-in themes, third party themes or create your own. +- Publish your documentation anywhere that static files can be served. +- Much more! + +## Support + +If you need help with MkDocs, do not hesitate to get in contact with us! + +- For questions and high-level discussions, use **[Discussions]** on GitHub. + - For small questions, a good alternative is the **[Chat room]** on + Gitter/Matrix. +- To report a bug or make a feature request, open an **[Issue]** on GitHub. + +Please note that we may only provide +support for problems/questions regarding core features of MkDocs. Any +questions or bug reports about features of third-party themes, plugins, +extensions or similar should be made to their respective projects. +But, such questions are *not* banned from the [chat room]. + +Make sure to stick around to answer some questions as well! + +## Links + +- [Official Documentation][mkdocs] +- [Latest Release Notes][release-notes] +- [Catalog of third-party plugins, themes and recipes][catalog] + +## Contributing to MkDocs + +The MkDocs project welcomes, and depends on, contributions from developers and +users in the open source community. Please see the [Contributing Guide] for +information on how you can help. ## Code of Conduct -Everyone interacting in the MkDocs project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the [PyPA Code of Conduct]. +Everyone interacting in the MkDocs project's codebases, issue trackers, and +discussion forums is expected to follow the [PyPA Code of Conduct]. -[appveyor-image]: https://img.shields.io/appveyor/ci/d0ugal/mkdocs/master.svg -[appveyor-link]: https://ci.appveyor.com/project/d0ugal/mkdocs + [codecov-image]: https://codecov.io/github/mkdocs/mkdocs/coverage.svg?branch=master [codecov-link]: https://codecov.io/github/mkdocs/mkdocs?branch=master -[landscape-image]: https://landscape.io/github/mkdocs/mkdocs/master/landscape.svg?style=flat -[landscape-link]: https://landscape.io/github/mkdocs/mkdocs/master [pypi-v-image]: https://img.shields.io/pypi/v/mkdocs.svg [pypi-v-link]: https://pypi.org/project/mkdocs/ -[travis-image]: https://img.shields.io/travis/mkdocs/mkdocs/master.svg -[travis-link]: https://travis-ci.org/mkdocs/mkdocs - +[GHAction-image]: https://github.com/mkdocs/mkdocs/workflows/CI/badge.svg?branch=master&event=push +[GHAction-link]: https://github.com/mkdocs/mkdocs/actions?query=event%3Apush+branch%3Amaster + [mkdocs]: https://www.mkdocs.org +[Issue]: https://github.com/mkdocs/mkdocs/issues +[Discussions]: https://github.com/mkdocs/mkdocs/discussions +[Chat room]: https://gitter.im/mkdocs/community [release-notes]: https://www.mkdocs.org/about/release-notes/ - +[Contributing Guide]: https://www.mkdocs.org/about/contributing/ [PyPA Code of Conduct]: https://www.pypa.io/en/latest/code-of-conduct/ +[catalog]: https://github.com/mkdocs/catalog + +## License + +[BSD-2-Clause](https://github.com/mkdocs/mkdocs/blob/master/LICENSE) diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 5ed4bf1..0000000 --- a/appveyor.yml +++ /dev/null @@ -1,25 +0,0 @@ -build: false -environment: - matrix: - - TOXENV: py35-integration - - TOXENV: py35-min-req - - TOXENV: py35-unittests - - TOXENV: py36-integration - - TOXENV: py36-min-req - - TOXENV: py36-unittests - - TOXENV: py37-integration - - TOXENV: py37-min-req - - TOXENV: py37-unittests - - TOXENV: py38-integration - - TOXENV: py38-min-req - - TOXENV: py38-unittests - - TOXENV: flake8 -init: - - "ECHO %TOXENV%" -install: - - "c:\\python37\\Scripts\\pip install tox" -test_script: - - "git clean -f -d -x" - - "c:\\python37\\Scripts\\tox --version" - - "c:\\python37\\Scripts\\pip --version" - - "c:\\python37\\Scripts\\tox" diff --git a/debian/README.source b/debian/README.source new file mode 100644 index 0000000..6f31faf --- /dev/null +++ b/debian/README.source @@ -0,0 +1,44 @@ +This package is maintained with git-buildpackage(1). It follows DEP-14 for +branch naming (e.g. using debian/master for the current version in Debian +unstable due Debian Python team policy). + +It uses pristine-tar(1) to store enough information in git to generate bit +identical tarballs when building the package without having downloaded an +upstream tarball first. + +When working with patches it is recommended to use "gbp pq import" to import +the patches, modify the source and then use "gbp pq export --commit" to commit +the modifications. + +The changelog is generated using "gbp dch" so if you submit any changes don't +bother to add changelog entries but rather provide a nice git commit message +that can then end up in the changelog. + +It is recommended to build the package with pbuilder using: + + gbp buildpackage --git-pbuilder + +For information on how to set up a pbuilder environment see the git-pbuilder(1) +manpage. In short: + + DIST=sid git-pbuilder create + gbp clone https://salsa.debian.org/python-team/packages/python-mkdocs.git + cd python-mkdocs + gbp buildpackage --git-pbuilder + +The mkdocs source of 1.3.0 comes with new included web fonts from the +readthedocs inclusion. +Namely these are + + Roboto-Slab-Bold.{woff,woff2} + Roboto-Slab-Regular.{woff,woff2} + lato-bold-italic.{woff,woff2} + lato-bold-regular.{woff,woff2} + lato-normal{,-italic}.{woff,woff2} + lato-normal{,-regular}.{woff,woff2} + +These fonts haven't been packaged yet in Debian while writing. +The autopkgtest should be updated once these font files are available as an +package to check if the files are linked to the font package. + + -- Carsten Schoenert Tue, 29 Mar 2022 08:47:00 +0200 diff --git a/debian/changelog b/debian/changelog index 3bf9b68..f158fb7 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,188 @@ -python-mkdocs (1.1.2+dfsg-deepin1) unstable; urgency=medium +python-mkdocs (1.5.3+dfsg-1) unstable; urgency=medium + + * Team Upload. + * [efaace8] New upstream version 1.5.3+dfsg + + -- Carsten Schoenert Sun, 01 Oct 2023 19:50:09 +0200 + +python-mkdocs (1.5.2+dfsg-1) unstable; urgency=medium + + * Team Upload. + [ Bas Couwenberg ] + * [a8cf95a] Provide dh-sequence-mkdocs. + (Closes: #1024658) + + [ Carsten Schoenert ] + * [d300f73] d/watch: Add compression typ gz + * [e1a9122] d/gbp.conf: Catch all versions of minimized JQuery files + * [bbf7ba7] New upstream version 1.5.2+dfsg + * [2961680] Rebuild patch queue from patch-queue branch + Adjusted patches: + debian-hacks/Disable-the-Markdown-GitHub-links-extension.patch + debian-hacks/Disable-use_directory_urls-configuration-option.patch + debian-hacks/docs-Disable-Google-Analytics-in-mkdocs-doc.patch + * [f1c643e] d/rules: Add target override_dh_clean + (Closes: #1046685) + * [09b8645] d/control: Add BuildProfileSpec to Build-Depends + * [dbfc01b] d/rules: Catch nodoc DEB_BUILD_OPTIONS to disable docu build + * [c36fb0d] d/control: Bump Standards-Version to 4.6.2 + No further changes needed. + * [38ea797] d/copyright: Update content and year data + + -- Carsten Schoenert Tue, 05 Sep 2023 19:52:06 +0530 + +python-mkdocs (1.4.2+dfsg-2) unstable; urgency=medium + + * Team Upload. + * [3211714] Rebuild patch queue from patch-queue branch + Dropped patches (packages are now within the archive): + debian-hacks/mkdocs.yml-Don-t-use-callout-markdown-extension.patch + debian-hacks/mkdocs.yml-Don-t-use-literate-nav-plugin.patch + * [cebfb2c] d/control: Add new build dependencies to build mkdocs-doc + Adding mkdocs-literate-nav and python3-markdown-callouts to B-D. + * [ed2a332] d/rules: Remove Python version in PYBUILD_DESTDIR + * [5a2bb78] autopkgtest: Make upstream-tests more resilent + + -- Carsten Schoenert Tue, 29 Nov 2022 09:52:02 +0100 + +python-mkdocs (1.4.2+dfsg-1) unstable; urgency=medium + + * Team Upload. + * [e4d93b1] New upstream version 1.4.2+dfsg + * [b0819bf] Rebuild patch queue from patch-queue branch + Just a refreshing of existing patches. + * [bba881f] d/copyright: Update upstream year data + + -- Carsten Schoenert Sat, 05 Nov 2022 10:03:14 +0100 + +python-mkdocs (1.4.1+dfsg-1) unstable; urgency=medium + + * Team Upload. + * [4fb8f7d] New upstream version 1.4.1+dfsg + * [84a3885] Rebuild patch queue from patch-queue branch + Removed patch: + debian-hacks/Add-.md-files-to-the-MANIFEST.patch + * [cd7a450] d/{control,rules}: Move over to dh-sequence-python3 + * [f00cab4] d/control: Adding new required Build-Dependencies + Upstream switched to hatchling build together with a pyproject.toml file. + * [9fb9074] d/rules: Adjust test environment after build + Due to modifications within the folder structure we need to prepare and + copy some testing data into the build folder before running the tests. + * [43895a2] d/copyright: Adding myself to the copyright data + * [992c76e] d/rules: Use verbose mode while mkdocs is running + * [4d6f995] d/watch: Add repacksuffix to opts variable + * [0079d44] d/mkdocs.lintian-overrides: Add more stuff to ignore + * [52f2f5b] d/s/lintian-overrides: Ignore some files and folders + * [64e2689] Salsa CI: Disable unneeded runners + * [f42d4ec] Salsa CI: Rename Yaml file to salsa-ci.yml + + -- Carsten Schoenert Sun, 23 Oct 2022 08:03:05 +0200 + +python-mkdocs (1.4.0+dfsg-3) unstable; urgency=medium + + * Team Upload. + * [b3b226c] Rebuild patch queue from patch-queue branch + Added patch: + debian-hacks/s390x-Work-around-unittest-issue.patch + + -- Carsten Schoenert Sun, 09 Oct 2022 09:12:32 +0200 + +python-mkdocs (1.4.0+dfsg-2) unstable; urgency=medium + + * [a4060d7] autopkgtest: Ensure B-D installed for unittests + + -- Carsten Schoenert Sat, 08 Oct 2022 09:33:10 +0200 + +python-mkdocs (1.4.0+dfsg-1) unstable; urgency=medium + + * Team Upload. + * New upstream version 1.4.0+dfsg + (Closes: 1009702) + * Rebuild patch-queue from patch queue branch + Adjusted/rebuild patches: + debian-hacks/Disable-non-English-search-support.patch + debian-hacks/Disable-the-Markdown-GitHub-links-extension.patch + debian-hacks/Disable-use_directory_urls-configuration-option.patch + Added patches: + debian-hacks/mkdocs.yml-Don-t-use-callout-markdown-extension.patch + debian-hacks/mkdocs.yml-Don-t-use-literate-nav-plugin.patch + Upstream has added the Markdown extension 'callout' and the plugin + 'literate-nav' to the mkdocs.yml file, both parts aren't yet packaged in + Debian. + * d/control: Adding new required Build-Dependencies + MkDocs uses now the plugins autorefs and mkdocstrings together with the + mkdocstrings-python-handlers Add-on and also the Markdown extension + pymdownx which are added to the B-D in order to get the MkDocs + documentation build. + * d/mkdocs.lintian-overrides: Update syntax + * d/mkdocs-doc.lintian-overrides: Update syntax + * autopkgtest: Add upstream tests to the test queue + + -- Carsten Schoenert Fri, 07 Oct 2022 18:23:43 +0200 + +python-mkdocs (1.3.0+dfsg-2) unstable; urgency=medium + + * Team Upload. + [ Debian Janitor ] + * Remove constraints unnecessary since buster: + + mkdocs: Drop versioned constraint on python3-livereload in Depends. + + mkdocs: Drop versioned constraint on mkdocs-bootstrap in Breaks. + * Update standards version to 4.6.1, no changes needed. + + [ Nilesh Patra ] + * Add python3-babel to B-D (Closes: #1020003) + * Add babel to suggests since it is being used for i18n + + -- Nilesh Patra Sat, 01 Oct 2022 14:49:56 +0530 - * Fix compatibility with Python 3.10. +python-mkdocs (1.3.0+dfsg-1) unstable; urgency=medium - -- Changwei Miao Mon, 26 Sep 2022 14:41:49 +0800 + * Team upload. + * New upstream version 1.3.0+dfsg + * d/gbp.conf: Ignore patch numbers while pq export + * Rebuild patch-queue from patch queue branch + Added patch: + debian-hacks/docs-Disable-Google-Analytics-in-mkdocs-doc.patch + Removed patches: + 0002-Remove-privacy-breaches.patch + 0008-Disable-Lunr.py-support.patch + 0011-tests-Fixup-inclusion-for-localization-module.patch + * d/watch: Correct Debian version mangling + * d/control: Add new required package dependency + python3-lunr is now available in the archive so it's usable as an + dependency. + * d/control: (Re-)add package python3-livereload + This dependency was added through a patch in the patch queue which isn't + used any more, to fulfill the need for this package add it to the + package dependencies. + * d/mkdocs.lintian-overrides: Ignore two template files + These two files are not a potential privacy breach as they are just + templates which the user needs to configure later. + * d/mkdocs-doc.lintian-overrides: Ignore files in docu + The documentation is build without usage of Google Analytics. + * d/mkdocs.links: Add more linking to FontAwesome + * d/python-mkdocs.NEWS: Create a new NEWS file + * d/README.source: Adding default data about source + + -- Carsten Schoenert Tue, 29 Mar 2022 08:59:53 +0200 + +python-mkdocs (1.2.3-1) unstable; urgency=medium + + * Team upload. + * d/gbp.conf: Add some more defaults + * New upstream version 1.2.3 + (Closes: #1002198) + * Rebuild patch-queue from patch queue branch + Added patch: + 0011-tests-Fixup-inclusion-for-localization-module.patch + * d/watch: Update to version 4 + * d/copyright: Update content + * d/control: Update build dependencies + * d/control: Restructure and split off build-deps + * d/control: Adding entry Rules-Requires-Root: no + * d/mkdocs.lintian-overrides: Exclude HTML file within themes + + -- Carsten Schoenert Mon, 28 Feb 2022 17:35:14 +0100 python-mkdocs (1.1.2+dfsg-2) unstable; urgency=medium diff --git a/debian/control b/debian/control index 51a00a4..e4a2e37 100644 --- a/debian/control +++ b/debian/control @@ -4,18 +4,32 @@ Priority: optional Maintainer: Debian Python Team Uploaders: Brian May Build-Depends: - debhelper-compat (= 12), - dh-python (>= 3.20180313), - python3-all (>= 3.2), - python3-click, - python3-jinja2 (>= 2.10.1), - python3-livereload (>= 2.5.1-1~), - python3-markdown (>= 3.2.1), - python3-pkg-resources, - python3-setuptools, - python3-tornado, - python3-yaml -Standards-Version: 4.6.0 + debhelper-compat (= 13), + dh-sequence-python3, + pybuild-plugin-pyproject, + python3-all, +Build-Depends-Indep: + ghp-import (>= 1.0) , + mkdocs-autorefs , + mkdocs-click , + mkdocs-literate-nav , + mkdocs-redirects , + mkdocstrings , + mkdocstrings-python-handlers , + python3-babel, + python3-click , + python3-hatchling, + python3-jinja2 , + python3-markdown , + python3-markdown-callouts , + python3-mergedeep , + python3-platformdirs , + python3-pymdownx , + python3-pyyaml-env-tag , + python3-watchdog , + python3-yaml , +Rules-Requires-Root: no +Standards-Version: 4.6.2 Homepage: https://www.mkdocs.org/ Vcs-Browser: https://salsa.debian.org/python-team/packages/python-mkdocs Vcs-Git: https://salsa.debian.org/python-team/packages/python-mkdocs.git @@ -28,12 +42,14 @@ Depends: libjs-jquery, libjs-lunr, libjs-modernizr, + python3-livereload, + python3-lunr, python3-pkg-resources, sphinx-rtd-theme-common, ${misc:Depends}, ${python3:Depends} -Suggests: ghp-import, mkdocs-doc, nodejs -Breaks: mkdocs-bootstrap (<< 0.2) +Suggests: ghp-import, mkdocs-doc, nodejs, python3-babel +Provides: dh-sequence-mkdocs Description: Static site generator geared towards building project documentation MkDocs is a fast, simple and downright gorgeous static site generator that's geared towards building project documentation. Documentation diff --git a/debian/copyright b/debian/copyright index 04db158..16c5c76 100644 --- a/debian/copyright +++ b/debian/copyright @@ -6,34 +6,24 @@ Files-Excluded: mkdocs/contrib/search/lunr-language mkdocs/themes/mkdocs/css/font-awesome.min.css mkdocs/themes/mkdocs/fonts mkdocs/themes/mkdocs/js/bootstrap.min.js - mkdocs/themes/mkdocs/js/jquery-1.10.2.min.js + mkdocs/themes/mkdocs/js/jquery-*.min.js mkdocs/themes/readthedocs/css/theme.css mkdocs/themes/readthedocs/fonts mkdocs/themes/readthedocs/js Files: * -Copyright: 2014 Tom Christie +Copyright: 2014 - 2023 Tom Christie License: BSD-2-Clause Files: debian/* Copyright: 2015 Brian May + 2022-2023 Carsten Schoenert License: BSD-2-Clause Files: mkdocs/contrib/search/templates/search/lunr.js Copyright: 2018 Oliver Nightingale License: Expat -Files: mkdocs/utils/ghp_import.py -Copyright: 2013 Paul Davis -License: Tumbolia - Copying and distribution of this file, with or without modification, are - permitted in any medium without royalty provided the copyright notice and this - notice are preserved. - . - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - . - 0. opan saurce LOL - Files: mkdocs/utils/meta.py Copyright: 2015 Waylan Limberg License: BSD-3-Clause diff --git a/debian/gbp.conf b/debian/gbp.conf index 3879982..dbac55f 100644 --- a/debian/gbp.conf +++ b/debian/gbp.conf @@ -1,2 +1,26 @@ [DEFAULT] -debian-branch=debian/master +# use pristine-tar +pristine-tar = True +# generate gz compressed orig tarball +compression = gz +debian-branch = debian/master +upstream-branch = upstream + +[pq] +patch-numbers = False + +[import-orig] +# filter out unwanted files/dirs from upstream +filter = [ + 'mkdocs/contrib/search/lunr-language', + 'mkdocs/themes/mkdocs/css/bootstrap.min.css', + 'mkdocs/themes/mkdocs/css/font-awesome.min.css', + 'mkdocs/themes/mkdocs/fonts', + 'mkdocs/themes/mkdocs/js/bootstrap.min.js', + 'mkdocs/themes/mkdocs/js/jquery-*.min.js', + 'mkdocs/themes/readthedocs/css/theme.css', + 'mkdocs/themes/readthedocs/fonts', + 'mkdocs/themes/readthedocs/js', + ] +# filter the files out of the tarball passed to pristine-tar +filter-pristine-tar = True diff --git a/debian/mkdocs-doc.lintian-overrides b/debian/mkdocs-doc.lintian-overrides new file mode 100644 index 0000000..4e40b36 --- /dev/null +++ b/debian/mkdocs-doc.lintian-overrides @@ -0,0 +1,3 @@ +# All the HTML files within mkdocs-doc have set the gtag value to 'None'. +# Due this no Google Analytics will be used. +mkdocs-doc: privacy-breach-generic [ +> > {%- endfor %} +> > ``` +> > +> > This old-style example even uses the obsolete top-level `extra_javascript` list. Please always use `config.extra_javascript` instead. +> > +> > So, a slightly more modern approach is the following, but it is still obsolete because it ignores the extra attributes of the script: +> > +> > ```django +> > {%- for path in config.extra_javascript %} +> > +> > {%- endfor %} +> > ``` +> +> >? EXAMPLE: **New style:** +> > +> > ```django +> > {%- for script in config.extra_javascript %} +> > {{ script | script_tag }} +> > {%- endfor %} +> > ``` +> +> If you wish to be able to pick up the new customizations while keeping your theme compatible with older versions of MkDocs, use this snippet: +> +> >! EXAMPLE: **Backwards-compatible style:** +> > +> > ```django +> > {%- for script in config.extra_javascript %} +> > {%- if script.path %} {# Detected MkDocs 1.5+ which has `script.path` and `script_tag` #} +> > {{ script | script_tag }} +> > {%- else %} {# Fallback - examine the file name directly #} +> > +> > {%- endif %} +> > {%- endfor %} +> > ``` + +## Theme Files + +There are various files which a theme treats special in some way. Any other +files are simply copied from the theme directory to the same path in the +`site_dir` when the site it built. For example image and CSS files have no +special significance and are copied as-is. Note, however, that if the user +provides a file with the same path in their `docs_dir`, then the user's file +will replace the theme file. + +### Template Files + +Any files with the `.html` extension are considered to be template files and are +not copied from the theme directory or any subdirectories. Also, any files +listed in [static_templates] are treated as templates regardless of their file +extension. + +[static_templates]: #static_templates + +### Theme Meta Files + +The various files required for packaging a theme are also ignored. Specifically, +the `mkdocs_theme.yml` configuration file and any Python files. + +### Dot Files + +Theme authors can explicitly force MkDocs to ignore files by starting a file or +directory name with a dot. Any of the following files would be ignored: + +```text +.ignored.txt +.ignored/file.txt +foo/.ignored.txt +foo/.ignored/file.txt +``` + +### Documentation Files + +All documentation files are ignored. Specifically, any Markdown files (using any +of the file extensions supported by MKDocs). Additionally, any README files +which may exist in the theme directories are ignored. + +## Template Variables + +Each template in a theme is built with a template context. These are the +variables that are available to themes. The context varies depending on the +template that is being built. At the moment templates are either built with +the global context or with a page specific context. The global context is used +for HTML pages that don't represent an individual Markdown document, for +example a 404.html page or search.html. + +### Global Context + +The following variables are available globally on any template. + +#### config + +The `config` variable is an instance of MkDocs' config object generated from the +`mkdocs.yml` config file. While you can use any config option, some commonly +used options include: + +* [config.site_name](../user-guide/configuration.md#site_name) +* [config.site_url](../user-guide/configuration.md#site_url) +* [config.site_author](../user-guide/configuration.md#site_author) +* [config.site_description](../user-guide/configuration.md#site_description) +* [config.theme.locale](../user-guide/configuration.md#locale) (See also [Theme Configuration](#locale) below) +* [config.extra_javascript](../user-guide/configuration.md#extra_javascript) +* [config.extra_css](../user-guide/configuration.md#extra_css) +* [config.repo_url](../user-guide/configuration.md#repo_url) +* [config.repo_name](../user-guide/configuration.md#repo_name) +* [config.copyright](../user-guide/configuration.md#copyright) +* [config.google_analytics](../user-guide/configuration.md#google_analytics) + +#### nav + +The `nav` variable is used to create the navigation for the documentation. The +`nav` object is an iterable of [navigation objects](#navigation-objects) as +defined by the [nav] configuration setting. + +[nav]: ../user-guide/configuration.md#nav + +::: mkdocs.structure.nav.Navigation + options: + show_root_heading: false + show_root_toc_entry: true + members: [] + heading_level: 4 + +In addition to the iterable of [navigation objects](#navigation-objects), the +`nav` object contains the following attributes: + +::: mkdocs.structure.nav.Navigation.homepage + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Navigation.pages + options: + show_root_full_path: false + heading_level: 5 + +This list is not necessarily a complete list of all site pages as it does not contain +pages which are not included in the navigation. This list does match the list +and order of pages used for all "next page" and "previous page" links. For a +list of all pages, use the [pages](#pages) template variable. + +##### Nav Example + +Following is a basic usage example which outputs the first and second level +navigation as a nested list. + +```django +{% if nav|length > 1 %} +
    + {% for nav_item in nav %} + {% if nav_item.children %} +
  • {{ nav_item.title }} + +
  • + {% else %} + + {% endif %} + {% endfor %} +
+{% endif %} +``` + +#### base_url + +The `base_url` provides a relative path to the root of the MkDocs project. While +this can be used directly by prepending it to a local relative URL, it is best +to use the [url](#url) template filter, which is smarter about how it applies +`base_url`. + +#### mkdocs_version + +Contains the current MkDocs version. + +#### build_date_utc + +A Python datetime object that represents the date and time the documentation +was built in UTC. This is useful for showing how recently the documentation +was updated. + +#### pages + +A flat list of `File` objects for *all* pages in the project. This list can +contain pages not included in the global [navigation](#nav) and may not match +the order of pages within that navigation. The [page](#page) object for each +`File` can be accessed from `file.page`. + +#### page + +In templates which are not rendered from a Markdown source file, the `page` +variable is `None`. In templates which are rendered from a Markdown source file, +the `page` variable contains a `page` object. The same `page` objects are used +as `page` [navigation objects](#navigation-objects) in the global +[navigation](#nav) and in the [pages](#pages) template variable. + +::: mkdocs.structure.pages.Page + options: + show_root_heading: false + show_root_toc_entry: true + members: [] + heading_level: 4 + +All `page` objects contain the following attributes: + +::: mkdocs.structure.pages.Page.title + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.content + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.toc + options: + show_root_full_path: false + heading_level: 5 + +The following example would display the top two levels of the Table of Contents +for a page. + +```django + +``` + +::: mkdocs.structure.pages.Page.meta + options: + show_root_full_path: false + heading_level: 5 + +In this example we define a `source` property above the page title: + +```text +source: generics.py + mixins.py + +# Page title + +Content... +``` + +A template can access this metadata for the page with the `meta.source` +variable. This could then be used to link to source files related to the +documentation page. + +```django +{% for filename in page.meta.source %} + + {{ filename }} + +{% endfor %} +``` + +::: mkdocs.structure.pages.Page.url + options: + show_root_full_path: false + heading_level: 5 + +It is expected that this be used with the [url](#url) filter to ensure the URL is relative to the current +page. + +```django +{{ page.title }} +``` + +::: mkdocs.structure.pages.Page.file + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.abs_url + options: + show_root_full_path: false + heading_level: 5 + +For example, if `site_url: https://example.com/`, then the value of +`page.abs_url` for the page `foo.md` would be `/foo/`. However, if +`site_url: https://example.com/bar/`, then the value of `page.abs_url` for the +page `foo.md` would be `/bar/foo/`. + +::: mkdocs.structure.pages.Page.canonical_url + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.edit_url + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.is_homepage + options: + show_root_full_path: false + heading_level: 5 + +This can be used in conjunction with other attributes of the `page` +object to alter the behavior. For example, to display a different title +on the homepage: + +```django +{% if not page.is_homepage %}{{ page.title }} - {% endif %}{{ site_name }} +``` + +::: mkdocs.structure.pages.Page.previous_page + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.next_page + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.StructureItem.parent + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.children + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.active + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.is_section + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.is_page + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.pages.Page.is_link + options: + show_root_full_path: false + heading_level: 5 + +#### AnchorLink + +::: mkdocs.structure.toc.AnchorLink + options: + show_root_heading: false + show_root_toc_entry: true + heading_level: 5 + +### Navigation Objects + +Navigation objects contained in the [nav](#nav) template variable may be one of +[section](#section) objects, [page](#page) objects, and [link](#link) objects. +While section objects may contain nested navigation objects, pages and links do +not. + +Page objects are the full page object as used for the current [page](#page) with +all of the same attributes available. Section and Link objects contain a subset +of those attributes as defined below: + +#### Section + +A `section` navigation object defines a named section in the navigation and +contains a list of child navigation objects. Note that sections do not contain +URLs and are not links of any kind. However, by default, MkDocs sorts index +pages to the top and the first child might be used as the URL for a section if a +theme chooses to do so. + +::: mkdocs.structure.nav.Section + options: + show_root_heading: false + show_root_toc_entry: true + members: [] + heading_level: 4 + +The following attributes are available on `section` objects: + +::: mkdocs.structure.nav.Section.title + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.StructureItem.parent + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Section.children + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Section.active + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Section.is_section + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Section.is_page + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Section.is_link + options: + show_root_full_path: false + heading_level: 5 + +#### Link + +A `link` navigation object contains a link which does not point to an internal +MkDocs page. + +::: mkdocs.structure.nav.Link + options: + show_root_heading: false + show_root_toc_entry: true + members: [] + heading_level: 4 + +The following attributes are available on `link` objects: + +::: mkdocs.structure.nav.Link.title + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.url + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.StructureItem.parent + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.children + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.active + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.is_section + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.is_page + options: + show_root_full_path: false + heading_level: 5 + +::: mkdocs.structure.nav.Link.is_link + options: + show_root_full_path: false + heading_level: 5 + +### Extra Context + +Additional variables can be passed to the template with the +[`extra`](../user-guide/configuration.md#extra) configuration option. This is a +set of key value pairs that can make custom templates far more flexible. + +For example, this could be used to include the project version of all pages +and a list of links related to the project. This can be achieved with the +following `extra` configuration: + +```yaml +extra: + version: 0.13.0 + links: + - https://github.com/mkdocs + - https://docs.readthedocs.org/en/latest/builds.html#mkdocs + - https://www.mkdocs.org/ +``` + +And then displayed with this HTML in the custom theme. + +```django +{{ config.extra.version }} + +{% if config.extra.links %} +
    + {% for link in config.extra.links %} +
  • {{ link }}
  • + {% endfor %} +
+{% endif %} +``` + +## Template Filters + +In addition to [Jinja's default filters], the following custom filters are +available to use in MkDocs templates: + +### url + +Normalizes a URL. Absolute URLs are passed through unaltered. If the URL is +relative and the template context includes a page object, then the URL is +returned relative to the page object. Otherwise, the URL is returned with +[base_url](#base_url) prepended. + +```django +{{ page.title }} +``` + +### tojson + +Safely convert a Python object to a value in a JavaScript script. + +```django + +``` + +### script_tag + +NEW: **New in version 1.5.** + +Convert an item from `extra_javascript` to a ` +``` + +With properly configured settings, the following HTML in a template will add a +full search implementation to your theme. + +```django +

Search Results

+ +
+ +
+ +
+ Sorry, page not found. +
+``` + +The JavaScript in the plugin works by looking for the specific ID's used in the +above HTML. The form input for the user to type the search query must be +identified with `id="mkdocs-search-query"` and the div where the results will be +placed must be identified with `id="mkdocs-search-results"`. + +The plugin supports the following options being set in the [theme's +configuration file], `mkdocs_theme.yml`: + +### include_search_page + +Determines whether the search plugin expects the theme to provide a dedicated +search page via a template located at `search/search.html`. + +When `include_search_page` is set to `true`, the search template will be built +and available at `search/search.html`. This method is used by the `readthedocs` +theme. + +When `include_search_page` is set to `false` or not defined, it is expected that +the theme provide some other mechanisms for displaying search results. For +example, the `mkdocs` theme displays results on any page via a modal. + +### search_index_only + +Determines whether the search plugin should only generate a search index or a +complete search solution. + +When `search_index_only` is set to `false`, then the search plugin modifies the +Jinja environment by adding its own `templates` directory (with a lower +precedence than the theme) and adds its scripts to the `extra_javascript` config +setting. + +When `search_index_only` is set to `true` or not defined, the search plugin +makes no modifications to the Jinja environment. A complete solution using the +provided index file is the responsibility of the theme. + +The search index is written to a JSON file at `search/search_index.json` in the +[site_dir]. The JSON object contained within the file may contain up to three +objects. + +```json +{ + config: {...}, + docs: [...], + index: {...} +} +``` + +If present, the `config` object contains the key/value pairs of config options +defined for the plugin in the user's `mkdocs.yml` config file under +`plugings.search`. The `config` object was new in MkDocs version *1.0*. + +The `docs` object contains a list of document objects. Each document object is +made up of a `location` (URL), a `title`, and `text` which can be used to create +a search index and/or display search results. + +If present, the `index` object contains a pre-built index which offers +performance improvements for larger sites. Note that the pre-built index is only +created if the user explicitly enables the [prebuild_index] config option. +Themes should expect the index to not be present, but can choose to use the +index when it is available. The `index` object was new in MkDocs version *1.0*. + +[Jinja2 template]: https://jinja.palletsprojects.com/ +[built-in themes]: https://github.com/mkdocs/mkdocs/tree/master/mkdocs/themes +[theme's configuration file]: #theme-configuration +[lunr.js]: https://lunrjs.com/ +[site_dir]: ../user-guide/configuration.md#site_dir +[prebuild_index]: ../user-guide/configuration.md#prebuild_index +[Jinja's default filters]: https://jinja.palletsprojects.com/en/latest/templates/#builtin-filters + +## Packaging Themes + +MkDocs makes use of [Python packaging] to distribute themes. This comes with a +few requirements. + +To see an example of a package containing one theme, see the [MkDocs Bootstrap +theme] and to see a package that contains many themes, see the [MkDocs +Bootswatch theme]. + +NOTE: +It is not strictly necessary to package a theme, as the entire theme +can be contained in the `custom_dir`. If you have created a "one-off theme," +that should be sufficient. However, if you intend to distribute your theme +for others to use, packaging the theme has some advantages. By packaging +your theme, your users can more easily install it, they can rely on a default +[configuration] being defined, and they can then take advantage of the +[custom_dir] to make tweaks to your theme to better suit their needs. + +[Python packaging]: https://packaging.python.org/en/latest/ +[MkDocs Bootstrap theme]: https://mkdocs.github.io/mkdocs-bootstrap/ +[MkDocs Bootswatch theme]: https://mkdocs.github.io/mkdocs-bootswatch/ + +### Package Layout + +The following layout is recommended for themes. Two files at the top level +directory called `MANIFEST.in` and `setup.py` beside the theme directory which +contains an empty `__init__.py` file, a theme configuration file +(`mkdocs_theme.yml`), and your template and media files. + +```text +. +|-- MANIFEST.in +|-- theme_name +| |-- __init__.py +| |-- mkdocs_theme.yml +| |-- main.html +| |-- styles.css +`-- setup.py +``` + +The `MANIFEST.in` file should contain the following contents but with +theme_name updated and any extra file extensions added to the include. + +```text +recursive-include theme_name *.ico *.js *.css *.png *.html *.eot *.svg *.ttf *.woff +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] +``` + +The `setup.py` should include the following text with the modifications +described below. + +```python +from setuptools import setup, find_packages + +VERSION = '0.0.1' + +setup( + name="mkdocs-themename", + version=VERSION, + url='', + license='', + description='', + author='', + author_email='', + packages=find_packages(), + include_package_data=True, + entry_points={ + 'mkdocs.themes': [ + 'themename = theme_name', + ] + }, + zip_safe=False +) +``` + +Fill in the URL, license, description, author and author email address. + +The name should follow the convention `mkdocs-themename` (like +`mkdocs-bootstrap` and `mkdocs-bootswatch`), starting with MkDocs, using +hyphens to separate words and including the name of your theme. + +Most of the rest of the file can be left unedited. The last section we need to +change is the entry_points. This is how MkDocs finds the theme(s) you are +including in the package. The name on the left is the one that users will use +in their mkdocs.yml and the one on the right is the directory containing your +theme files. + +The directory you created at the start of this section with the main.html file +should contain all of the other theme files. The minimum requirement is that +it includes a `main.html` for the theme. It **must** also include a +`__init__.py` file which should be empty, this file tells Python that the +directory is a package. + +### Theme Configuration + +A packaged theme is required to include a configuration file named +`mkdocs_theme.yml` which is placed in the root of your template files. The file +should contain default configuration options for the theme. However, if the +theme offers no configuration options, the file is still required and can be +left blank. A theme which is not packaged does not need a `mkdocs_theme.yml` +file as that file is not loaded from `theme.custom_dir`. + +The theme author is free to define any arbitrary options deemed necessary and +those options will be made available in the templates to control behavior. +For example, a theme might want to make a sidebar optional and include the +following in the `mkdocs_theme.yml` file: + +```yaml +show_sidebar: true +``` + +Then in a template, that config option could be referenced: + +```django +{% if config.theme.show_sidebar %} + +{% endif %} +``` + +And the user could override the default in their project's `mkdocs.yml` config +file: + +```yaml +theme: + name: themename + show_sidebar: false +``` + +In addition to arbitrary options defined by the theme, MkDocs defines a few +special options which alters its behavior: + +> BLOCK: +> +> #### locale +> +> This option mirrors the [theme] config option of the same name. If this +> value is not defined in the `mkdocs_theme.yml` file and the user does not +> set it in `mkdocs.yml` then it will default to `en` (English). The value +> is expected to match the language used in the text provided by the theme +> (such a "next" and "previous" links) and should be used as the value of +> the `` tag's `lang` attribute. See [Supporting theme localization/ +> translation](#supporting-theme-localizationtranslation) for more +> information. +> +> Note that during configuration validation, the provided string is converted +> to a `Locale` object. The object contains `Locale.language` and +> `Locale.territory` attributes and will resolve as a string from within a +> template. Therefore, the following will work fine: +> +> ```html +> +> ``` +> +> If the locale was set to `fr_CA` (Canadian French), then the above template +> would render as: +> +> ```html +> +> ``` +> +> If you did not want the territory attribute to be included, then reference +> the `language` attribute directly: +> +> ```html +> +> ``` +> +> That would render as: +> +> ```html +> +> ``` +> +> #### static_templates +> +> This option mirrors the [theme] config option of the same name and allows +> some defaults to be set by the theme. Note that while the user can add +> templates to this list, the user cannot remove templates included in the +> theme's config. +> +> #### extends +> +> Defines a parent theme that this theme inherits from. The value should be +> the string name of the parent theme. Normal [Jinja inheritance rules] +> apply. + +Plugins may also define some options which allow the theme to inform a plugin +about which set of plugin options it expects. See the documentation for any +plugins you may wish to support in your theme. + +### Distributing Themes + +With the above changes, your theme should now be ready to install. This can be +done with pip, using `pip install .` if you are still in the same directory as +the setup.py. + +Most Python packages, including MkDocs, are distributed on PyPI. To do this, +you should run the following command. + +```bash +python setup.py register +``` + +If you don't have an account setup, you should be prompted to create one. + +For a much more detailed guide, see the official Python packaging +documentation for [Packaging and Distributing Projects]. + +[Packaging and Distributing Projects]: https://packaging.python.org/en/latest/distributing/ +[Jinja inheritance rules]: https://jinja.palletsprojects.com/en/latest/templates/#template-inheritance + +## Supporting theme Localization/Translation + +While the built-in themes provide support for [localization/translation] of +templates, custom themes and third-party themes may choose not to. Regardless, +the [`locale`](#locale) setting of the `theme` configuration option is always +present and is relied upon by other parts of the system. Therefore, it is +recommended that all third-party themes use the same setting for designating a +language regardless of the system they use for translation. In that way, users +will experience consistent behavior regardless of the theme they may choose. + +The method for managing translations is up to the developers of a theme. +However, if a theme developer chooses to use the same mechanisms used by the +built-in themes, the sections below outline how to enable and make use of the +same commands utilized by MkDocs. + +[localization/translation]: ../user-guide/localizing-your-theme.md + +### Using the Localization/Translation commands + +WARNING: +As **[pybabel] is not installed by default** and most users will not have +pybabel installed, theme developers and/or translators should make sure to +have installed the necessary dependencies +(using `pip install mkdocs[i18n]`) in order for the commands to be +available for use. + +The translation commands should be called from the root of your theme's working tree. + +For an overview of the workflow used by MkDocs to translate the built-in +themes, see the appropriate [section] of the Contributing Guide and the +[Translation Guide]. + +[pybabel]: https://babel.pocoo.org/en/latest/setup.html +[section]: ../about/contributing.md#submitting-changes-to-the-builtin-themes +[Translation Guide]: translations.md + +### Example custom theme Localization/Translation workflow + +> NOTE: If your theme inherits from an existing theme which already provides +> translation catalogs, your theme's translations will be merged with the +> parent theme's translations during a MkDocs build. +> +> This means that you only need to concentrate on the added translations. +> Yet, you will still benefit from the translations of the parent theme. At +> the same time, you may override any of parent theme's translations! + +Let's suppose that you're working on your own fork of the +[mkdocs-basic-theme][basic theme] and want to add translations to it. + +Edit the templates by wrapping text in your HTML sources with +`{% trans %}` and `{% endtrans %}` as follows: + +```diff +--- a/basic_theme/base.html ++++ b/basic_theme/base.html +@@ -88,7 +88,7 @@ + + + +-

This is an example theme for MkDocs.

++

{% trans %}This is an example theme for MkDocs.{% endtrans %}

+ +

+ It is designed to be read by looking at the theme HTML which is heavily +``` + +Then you would follow the [Translation Guide] as usual to get your translations +running. + +### Packaging Translations with your theme + +While the Portable Object Template (`pot`) file created by the +`extract_messages` command and the Portable Object (`po`) files created by the +`init_catalog` and `update_catalog` commands are useful for creating and +editing translations, they are not used by MkDocs directly and do not need to +be included in a packaged release of a theme. When MkDocs builds a site with +translations, it only makes use of the binary `mo` files(s) for the specified +locale. Therefore, when [packaging a theme], make sure to include it in the +"wheels", using a `MANIFEST.in` file or otherwise. + +Then, before building your Python package, you will want to ensure that the +binary `mo` file for each locale is up-to-date by running the `compile_catalog` +command for each locale. MkDocs expects the binary `mo` files to be located at +`locales//LC_MESSAGES/messages.mo`, which the `compile_catalog` +command automatically does for you. See [Testing theme translations] for +details. + +NOTE: +As outlined in our [Translation Guide], the MkDocs project has chosen to +include the `pot` and `po` files in our code repository, but not the +`mo` files. This requires us to always run `compile_catalog` before +packaging a new release regardless of whether any changes were made to a +translation or not. However, you may chose an alternate workflow for your +theme. At a minimum, you need to ensure that up-to-date `mo` files are +included at the correct location in each release. However, you may use a +different process for generating those `mo` files if you chose to do so. + +[packaging a theme]: #packaging-themes +[Testing theme translations]: translations.md#testing-theme-translations diff --git a/docs/dev-guide/translations.md b/docs/dev-guide/translations.md new file mode 100644 index 0000000..13d0226 --- /dev/null +++ b/docs/dev-guide/translations.md @@ -0,0 +1,221 @@ +# Translations + +Theme localization guide. + +--- + +The [built-in themes] that are included with MkDocs provide support for +translations. This is a guide for translators, which documents the process for +contributing new translations and/or updating existing translations. For +guidance on modifying the existing themes, see the [Contributing Guide][update +themes]. To enable a specific translation see the documentation about the +specific theme you are using in the [User Guide][built-in themes]. For +translations of third-party themes, please see the documentation for those +themes. For a third-party theme to make use of MkDocs' translation tools and +methods, that theme must be properly [configured] to make use of those tools. + +NOTE: +Translations only apply to text contained within a theme's template, such +as "next" and "previous" links. The Markdown content of a page is not +translated. If you wish to create multilingual documentation, you need to +combine theme localization with a third-party +internationalization/localization plugin. + +[built-in themes]: ../user-guide/choosing-your-theme.md +[update themes]: ../about/contributing.md#submitting-changes-to-the-builtin-themes +[configured]: themes.md#supporting-theme-localizationtranslation + +## Localization tooling prerequisites + +Theme localization makes use of the [babel][babel] project for generation and +compilation of localization files. You will need to be working from the +git working tree on your local machine to make use of the translation commands. + +See the [Contributing Guide] for direction on how to [Install for Development] +and [Submit a Pull Request]. The instructions in this document assume that you +are working from a properly configured development environment. + +Make sure translation requirements are installed in your environment: + +```bash +pip install mkdocs[i18n] +``` + +[babel]: https://babel.pocoo.org/en/latest/cmdline.html +[Contributing Guide]: ../about/contributing.md +[Install for Development]: ../about/contributing.md#installing-for-development +[Submit a Pull Request]: ../about/contributing.md#submitting-pull-requests + +## Adding language translations to themes + +If your favorite language locale is not yet supported on one (or both) of the +built-in themes (`mkdocs` and `readthedocs`), you can easily contribute a +translation by following the steps below. + +Here is a quick summary of what you'll need to do: + +1. [Fork and clone the MkDocs repository](#fork-and-clone-the-mkdocs-repository) and then [install MkDocs for development](../about/contributing.md#installing-for-development) for adding and testing translations. +2. [Initialize new localization catalogs](#initializing-the-localization-catalogs) for your language (if a translation for your locale already exists, follow the instructions for [updating theme localization files](#updating-the-translation-catalogs) instead). +3. [Add a translation](#translating-the-mkdocs-themes) for every text placeholder in the localized catalogs. +4. [Locally serve and test](#testing-theme-translations) the translated themes for your language. +5. [Update the documentation](#updating-theme-documentation) about supported translations for each translated theme. +6. [Contribute your translation](#contributing-translations) through a Pull Request. + +NOTE: +Translation locales are usually identified using the [ISO-639-1] (2-letter) +language codes. While territory/region/county codes are also supported, +location specific translations should only be added after the general +language translation has been completed and the regional dialect requires +use of a term which differs from the general language translation. + +[ISO-639-1]: https://en.wikipedia.org/wiki/ISO_639-1 + +### Fork and clone the MkDocs repository + +In the following steps you'll work with a fork of the MkDocs repository. Follow +the instructions for [forking and cloning the MkDocs +repository](../about/contributing.md#installing-for-development). + +To test the translations you also need to [install MkDocs for +development](../about/contributing.md#installing-for-development) from your fork. + +### Initializing the localization catalogs + +The templates for each theme contain text placeholders that have been extracted +into a Portable Object Template (`messages.pot`) file, which is present in each +theme's folder. + +Initializing a catalog consists of running a command which will create a +directory structure for your desired language and prepare a Portable Object +(`messages.po`) file derived from the `pot` file of the theme. + +Use the `init_catalog` command on each theme's directory and provide the appropriate language code (`-l `). + +The language code is almost always just two lowercase letters, such as `sv`, but in some cases it needs to be further disambiguated. + +See: + +* [Already translated languages for built-in themes](../user-guide/choosing-your-theme.md#mkdocs-locale) +* [ISO 639 Language List](https://www.localeplanet.com/icu/iso639.html) +* [Language subtag registry](https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry) + +In particular, the way to know that the `pt` language should be disambiguated as `pt_PT` and `pt_BR` is that the *Language subtag registry* page contains `pt-` if you search for it. Whereas `sv` should remain just `sv`, because that page does *not* contain `sv-`. + +So, if we pick `es` (Spanish) as our example language code, to add a translation for it to both built-in themes, run these commands: + +```bash +pybabel init --input-file mkdocs/themes/mkdocs/messages.pot --output-dir mkdocs/themes/mkdocs/locales -l es +pybabel init --input-file mkdocs/themes/readthedocs/messages.pot --output-dir mkdocs/themes/readthedocs/locales -l es +``` + +The above command will create a file structure as follows: + +```text +mkdocs/themes/mkdocs/locales +├── es +│   └── LC_MESSAGES +│   └── messages.po +``` + +You can now move on to the next step and [add a +translation](#translating-the-mkdocs-themes) for every text placeholder in the +localized catalog. + +## Updating a theme translation + +If a theme's `messages.pot` template file has been [updated][update themes] +since the `messages.po` was last updated for your locale, follow the steps +below to update the theme's `messages.po` file: + +1. [Update the theme's translation catalog](#updating-the-translation-catalogs) to refresh the translatable text placeholders of each theme. +2. [Translate](#translating-the-mkdocs-themes) the newly added translatable text placeholders on every `messages.po` catalog file language you can. +3. [Locally serve and test](#testing-theme-translations) the translated themes for your language. +4. [Contribute your translation](#contributing-translations) through a Pull Request. + +### Updating the translation catalogs + +This step should be completed after a theme template have been [updated][update +themes] for each language that you are comfortable contributing a translation +for. + +To update the `fr` translation catalog of both built-in themes, use the following commands: + +```bash +pybabel update --ignore-obsolete --update-header-comment --input-file mkdocs/themes/mkdocs/messages.pot --output-dir mkdocs/themes/mkdocs/locales -l fr +pybabel update --ignore-obsolete --update-header-comment --input-file mkdocs/themes/readthedocs/messages.pot --output-dir mkdocs/themes/readthedocs/locales -l fr +``` + +You can now move on to the next step and [add a translation] for every updated +text placeholder in the localized catalog. + +[add a translation]: #translating-the-mkdocs-themes + +### Translating the MkDocs themes + +Now that your localized `messages.po` files are ready, all you need to do is +add a translation in each `msgstr` item for each `msgid` item in the file. + +```text +msgid "Next" +msgstr "Siguiente" +``` + +WARNING: +Do not modify the `msgid` as it is common to all translations. Just add +its translation in the `msgstr` item. + +Once you have finished translating all of the terms listed in the `po` file, +you'll want to [test your localized theme](#testing-theme-translations). + +### Testing theme translations + +To test a theme with translations, you need to first compile the `messages.po` +files of your theme into `messages.mo` files. The following commands will compile +the `es` translation for both built-in themes: + +```bash +pybabel compile --statistics --directory mkdocs/themes/mkdocs/locales -l es +pybabel compile --statistics --directory mkdocs/themes/readthedocs/locales -l es +``` + +The above command results in the following file structure: + +```text +mkdocs/themes/mkdocs/locales +├── es +│   └── LC_MESSAGES +│   ├── messages.mo +│   └── messages.po +``` + +Note that the compiled `messages.mo` file was generated based on the +`messages.po` file that you just edited. + +Then modify the `mkdocs.yml` file at the root of the project to test the new +and/or updated locale: + +```yaml +theme: + name: mkdocs + locale: es +``` + +Finally, run `mkdocs serve` to check out your new localized version of the theme. + +> NOTE: +> The build and release process takes care of compiling and distributing +> all locales to end users so you only have to worry about contributing the +> actual text translation `messages.po` files (the rest is ignored by git). +> +> After you have finished testing your work, be sure to undo the change to +> the `locale` setting in the `mkdocs.yml` file before submitting your +> changes. + +## Updating theme documentation + +The page [Choosing your theme](../user-guide/choosing-your-theme.md) updates by itself with all available locale options. + +## Contributing translations + +It is now time for you to [contribute](../about/contributing.md) your nice work +to the project. Thank you! diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 0000000..89c78e3 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,221 @@ +# Getting Started with MkDocs + +An introductory tutorial! + +--- + +## Installation + +To install MkDocs, run the following command from the command line: + +```bash +pip install mkdocs +``` + +For more details, see the [Installation Guide]. + +## Creating a new project + +Getting started is super easy. To create a new project, run the following +command from the command line: + +```bash +mkdocs new my-project +cd my-project +``` + +Take a moment to review the initial project that has been created for you. + +![The initial MkDocs layout](img/initial-layout.png) + +There's a single configuration file named `mkdocs.yml`, and a folder named +`docs` that will contain your documentation source files (`docs` is +the default value for the [docs_dir] configuration setting). Right now the `docs` +folder just contains a single documentation page, named `index.md`. + +MkDocs comes with a built-in dev-server that lets you preview your documentation +as you work on it. Make sure you're in the same directory as the `mkdocs.yml` +configuration file, and then start the server by running the `mkdocs serve` +command: + +```console +$ mkdocs serve +INFO - Building documentation... +INFO - Cleaning site directory +INFO - Documentation built in 0.22 seconds +INFO - [15:50:43] Watching paths for changes: 'docs', 'mkdocs.yml' +INFO - [15:50:43] Serving on http://127.0.0.1:8000/ +``` + +Open up in your browser, and you'll see the default +home page being displayed: + +![The MkDocs live server](img/screenshot.png) + +The dev-server also supports auto-reloading, and will rebuild your documentation +whenever anything in the configuration file, documentation directory, or theme +directory changes. + +Open the `docs/index.md` document in your text editor of choice, change the +initial heading to `MkLorum`, and save your changes. Your browser will +auto-reload and you should see your updated documentation immediately. + +Now try editing the configuration file: `mkdocs.yml`. Change the +[`site_name`][site_name] setting to `MkLorum` and save the file. + +```yaml +site_name: MkLorum +site_url: https://example.com/ +``` + +Your browser should immediately reload, and you'll see your new site name take +effect. + +![The site_name setting](img/site-name.png) + +NOTE: +The [`site_name`][site_name] and [`site_url`][site_url] configuration +options are the only two required options in your configuration file. When +you create a new project, the `site_url` option is assigned the placeholder +value: `https://example.com`. If the final location is known, you can change +the setting now to point to it. Or you may choose to leave it alone for now. +Just be sure to edit it before you deploy your site to a production server. + +## Adding pages + +Now add a second page to your documentation: + +```bash +curl 'https://jaspervdj.be/lorem-markdownum/markdown.txt' > docs/about.md +``` + +As our documentation site will include some navigation headers, you may want to +edit the configuration file and add some information about the order, title, and +nesting of each page in the navigation header by adding a [`nav`][nav] +setting: + +```yaml +site_name: MkLorum +site_url: https://example.com/ +nav: + - Home: index.md + - About: about.md +``` + +Save your changes and you'll now see a navigation bar with `Home` and `About` +items on the left as well as `Search`, `Previous`, and `Next` items on the +right. + +![Screenshot](img/multipage.png) + +Try the menu items and navigate back and forth between pages. Then click on +`Search`. A search dialog will appear, allowing you to search for any text on +any page. Notice that the search results include every occurrence of the search +term on the site and links directly to the section of the page in which the +search term appears. You get all of that with no effort or configuration on your +part! + +![Screenshot](img/search.png) + +## Theming our documentation + +Now change the configuration file to alter how the documentation is displayed by +changing the theme. Edit the `mkdocs.yml` file and add a [`theme`][theme] setting: + +```yaml +site_name: MkLorum +site_url: https://example.com/ +nav: + - Home: index.md + - About: about.md +theme: readthedocs +``` + +Save your changes, and you'll see the ReadTheDocs theme being used. + +![Screenshot](img/readthedocs.png) + +## Changing the Favicon Icon + +By default, MkDocs uses the [MkDocs favicon] icon. To use a different icon, create +an `img` subdirectory in the `docs` directory and copy your custom `favicon.ico` +file to that directory. MkDocs will automatically detect and use that file as your +favicon icon. + +[MkDocs favicon]: img/favicon.ico + +## Building the site + +That's looking good. You're ready to deploy the first pass of your `MkLorum` +documentation. First build the documentation: + +```bash +mkdocs build +``` + +This will create a new directory, named `site`. Take a look inside the +directory: + +```console +$ ls site +about fonts index.html license search.html +css img js mkdocs sitemap.xml +``` + +Notice that your source documentation has been output as two HTML files named +`index.html` and `about/index.html`. You also have various other media that's +been copied into the `site` directory as part of the documentation theme. You +even have a `sitemap.xml` file and `mkdocs/search_index.json`. + +If you're using source code control such as `git` you probably don't want to +check your documentation builds into the repository. Add a line containing +`site/` to your `.gitignore` file. + +```bash +echo "site/" >> .gitignore +``` + +If you're using another source code control tool you'll want to check its +documentation on how to ignore specific directories. + +## Other Commands and Options + +There are various other commands and options available. For a complete list of +commands, use the `--help` flag: + +```bash +mkdocs --help +``` + +To view a list of options available on a given command, use the `--help` flag +with that command. For example, to get a list of all options available for the +`build` command run the following: + +```bash +mkdocs build --help +``` + +## Deploying + +The documentation site that you just built only uses static files so you'll be +able to host it from pretty much anywhere. Simply upload the contents of the +entire `site` directory to wherever you're hosting your website from and +you're done. For specific instructions on a number of common hosts, see the +[Deploying your Docs][deploy] page. + +## Getting help + +See the [User Guide] for more complete documentation of all of MkDocs' features. + +To get help with MkDocs, please use the [GitHub discussions] or [GitHub issues]. + +[Installation Guide]: user-guide/installation.md +[docs_dir]: user-guide/configuration.md#docs_dir +[deploy]: user-guide/deploying-your-docs.md +[nav]: user-guide/configuration.md#nav +[GitHub discussions]: https://github.com/mkdocs/mkdocs/discussions +[GitHub issues]: https://github.com/mkdocs/mkdocs/issues +[site_name]: user-guide/configuration.md#site_name +[site_url]: user-guide/configuration.md#site_url +[theme]: user-guide/configuration.md#theme +[User Guide]: user-guide/README.md diff --git a/docs/hooks.py b/docs/hooks.py new file mode 100644 index 0000000..ad88111 --- /dev/null +++ b/docs/hooks.py @@ -0,0 +1,32 @@ +import re +from pathlib import Path + +from mkdocs.config.defaults import MkDocsConfig +from mkdocs.structure.nav import Page + + +def _get_language_of_translation_file(path: Path) -> str: + with path.open(encoding='utf-8') as f: + translation_line = f.readline() + m = re.search('^# (.+) translations ', translation_line) + assert m + return m[1] + + +def on_page_markdown(markdown: str, page: Page, config: MkDocsConfig, **kwargs): + if page.file.src_uri == 'user-guide/choosing-your-theme.md': + here = Path(config.config_file_path).parent + + def replacement(m: re.Match) -> str: + lines = [] + for d in sorted(here.glob(m[2])): + lang = _get_language_of_translation_file(Path(d, 'LC_MESSAGES', 'messages.po')) + lines.append(f'{m[1]}`{d.name}`: {lang}') + return '\n'.join(lines) + + return re.sub( + r'^( *\* )\(see the list of existing directories `(.+)`\)$', + replacement, + markdown, + flags=re.MULTILINE, + ) diff --git a/docs/img/plugin-events.py b/docs/img/plugin-events.py new file mode 100644 index 0000000..0ad1f98 --- /dev/null +++ b/docs/img/plugin-events.py @@ -0,0 +1,171 @@ +# Run this to re-generate 'plugin-events.svg'. +# Requires `pip install graphviz`. + +import contextlib +import pathlib +import re + +from graphviz import Digraph + +graph = Digraph("MkDocs", format="svg") +graph.attr(compound="true", bgcolor="transparent") +graph.graph_attr.update(fontname="inherit", tooltip=" ") +graph.node_attr.update(fontname="inherit", tooltip=" ", style="filled") +graph.edge_attr.update(fontname="inherit", tooltip=" ") + + +def strip_suffix(name): + return re.sub(r"_.$", "", name) + + +subgraph_to_first_node = {} +subgraph_to_last_node = {} + + +def node(g, name, **kwargs): + if "_point" in name: + kwargs.setdefault("shape", "point") + else: + kwargs.setdefault("fillcolor", "#77ff7788") + kwargs.setdefault("color", "#00000099") + kwargs.setdefault("label", strip_suffix(name)) + + g.node(name, **kwargs) + + subgraph_to_first_node.setdefault(g.name, name) + subgraph_to_last_node[g.name] = name + + +def edge(g, a, b, dashed=False, **kwargs): + if kwargs.get("style") == "dashed": + kwargs.setdefault("penwidth", "1.5") + + if a in subgraph_to_last_node: + kwargs.setdefault("ltail", a) + a = subgraph_to_last_node[a] + if b in subgraph_to_first_node: + kwargs.setdefault("lhead", b) + b = subgraph_to_first_node[b] + + if a.startswith(("on_", "placeholder_")): + a += ":s" + else: + node(g, a.split(":")[0]) + if b.startswith(("on_", "placeholder_")): + b += ":n" + else: + node(g, b.split(":")[0]) + + g.edge(a, b, **kwargs) + + +def ensure_order(a, b): + edge(graph, a, b, style="invis") + + +@contextlib.contextmanager +def cluster(g, name, **kwargs): + assert name.startswith("cluster_") + kwargs.setdefault("label", strip_suffix(name)[len("cluster_") :]) + kwargs.setdefault("bgcolor", "#dddddd55") + kwargs.setdefault("pencolor", "#00000066") + with g.subgraph(name=name) as c: + c.attr(**kwargs) + yield c + + +def event(g, name, parameters): + with cluster( + g, f"cluster_{name}", href=f"#{name}", bgcolor="#ffff3388", pencolor="#00000088" + ) as c: + label = "|".join(f"<{p}>{p}" for p in parameters.split()) + node(c, name, shape="record" if parameters else "point", label=label, fillcolor="#ffffff55") + + +def placeholder_cluster(g, name): + with cluster(g, name) as c: + node(c, f"placeholder_{name}", label="...", fillcolor="transparent", color="transparent") + + +event(graph, "on_startup", "command dirty") + +with cluster(graph, "cluster_build", bgcolor="#dddddd11") as g: + event(g, "on_config", "config") + event(g, "on_pre_build", "config") + event(g, "on_files", "files config") + event(g, "on_nav", "nav config files") + + edge(g, "load_config", "on_config:config") + edge(g, "on_config:config", "on_pre_build:config") + edge(g, "on_config:config", "get_files") + edge(g, "get_files", "on_files:files") + edge(g, "on_files:files", "get_nav") + edge(g, "get_nav", "on_nav:nav") + edge(g, "on_files:files", "on_nav:files") + + with cluster(g, "cluster_populate_page") as c: + event(c, "on_pre_page", "page config files") + event(c, "on_page_read_source", "page config") + event(c, "on_page_markdown", "markdown page config files") + event(c, "on_page_content", "html page config files") + + edge(c, "on_pre_page:page", "on_page_read_source:page", style="dashed") + edge(c, "cluster_on_page_read_source", "on_page_markdown:markdown", style="dashed") + edge(c, "on_page_markdown:markdown", "render_p", style="dashed") + edge(c, "render_p", "on_page_content:html", style="dashed") + + edge(g, "on_nav:files", "pages_point_a", arrowhead="none") + edge(g, "pages_point_a", "on_pre_page:page", style="dashed") + edge(g, "pages_point_a", "cluster_populate_page") + + for i in 2, 3: + placeholder_cluster(g, f"cluster_populate_page_{i}") + edge(g, "pages_point_a", f"cluster_populate_page_{i}", style="dashed") + edge(g, f"cluster_populate_page_{i}", "pages_point_b", style="dashed") + + event(g, "on_env", "env config files") + + edge(g, "on_page_content:html", "pages_point_b", style="dashed") + edge(g, "pages_point_b", "on_env:files") + + edge(g, "pages_point_b", "pages_point_c", arrowhead="none") + edge(g, "pages_point_c", "on_page_context:page", style="dashed") + + with cluster(g, "cluster_build_page") as c: + event(c, "on_page_context", "context page config nav") + event(c, "on_post_page", "output page config") + + edge(c, "get_context", "on_page_context:context") + edge(c, "on_page_context:context", "render") + edge(c, "get_template", "render") + edge(c, "render", "on_post_page:output") + edge(c, "on_post_page:output", "write_file") + + edge(g, "on_nav:nav", "cluster_build_page") + edge(g, "on_env:env", "cluster_build_page") + + for i in 2, 3: + placeholder_cluster(g, f"cluster_build_page_{i}") + edge(g, "pages_point_c", f"cluster_build_page_{i}", style="dashed") + + event(g, "on_post_build", "config") + +event(graph, "on_serve", "server config") +event(graph, "on_shutdown", "") + + +ensure_order("on_startup", "cluster_build") +ensure_order("on_pre_build", "on_files") +ensure_order("on_nav", "cluster_populate_page") +ensure_order("cluster_populate_page_2", "cluster_populate_page_3") +ensure_order("on_page_content", "on_env") +ensure_order("pages_point_c", "cluster_build_page") +ensure_order("cluster_build_page_2", "cluster_build_page_3") +ensure_order("cluster_build_page", "on_post_build") +ensure_order("on_post_build", "on_serve") +ensure_order("on_serve", "on_shutdown") + + +data = graph.pipe() +data = data[data.index(b" + +MkDocs + + + + +cluster_on_startup + + +on_startup + + + + +cluster_build + + +build + + + + +cluster_on_config + + +on_config + + + + +cluster_on_pre_build + + +on_pre_build + + + + +cluster_on_files + + +on_files + + + + +cluster_on_nav + + +on_nav + + + + +cluster_populate_page + + +populate_page + + + + +cluster_on_pre_page + + +on_pre_page + + + + +cluster_on_page_read_source + + +on_page_read_source + + + + +cluster_on_page_markdown + + +on_page_markdown + + + + +cluster_on_page_content + + +on_page_content + + + + +cluster_on_env + + +on_env + + + + +cluster_populate_page_2 + + +populate_page + + + + +cluster_populate_page_3 + + +populate_page + + + + +cluster_build_page + + +build_page + + + + +cluster_on_page_context + + +on_page_context + + + + +cluster_on_post_page + + +on_post_page + + + + +cluster_build_page_2 + + +build_page + + + + +cluster_build_page_3 + + +build_page + + + + +cluster_on_post_build + + +on_post_build + + + + +cluster_on_serve + + +on_serve + + + + +cluster_on_shutdown + + +on_shutdown + + + + + +on_startup + + +command + +dirty + + + + + +load_config + + +load_config + + + + + + +on_config + + +config + + + + + +on_pre_build + + +config + + + + + +on_config:s->on_pre_build:n + + + + + + + + +get_files + + +get_files + + + + + +on_config:s->get_files + + + + + + + + +on_files + + +files + +config + + + + + + +on_nav + + +nav + +config + +files + + + + + +on_files:s->on_nav:n + + + + + + + + +get_nav + + +get_nav + + + + + +on_files:s->get_nav + + + + + + + + +render_p + + +render + + + + + + +pages_point_a + + + + + + + +on_nav:s->pages_point_a + + + + + + + +get_context + + +get_context + + + + + +on_nav:s->get_context + + + + + + + + +load_config->on_config:n + + + + + + + + +get_files->on_files:n + + + + + + + + +get_nav->on_nav:n + + + + + + + + +on_pre_page + + +page + +config + +files + + + + + +on_page_read_source + + +page + +config + + + + + +on_pre_page:s->on_page_read_source:n + + + + + + + + +on_page_markdown + + +markdown + +page + +config + +files + + + + + +on_page_read_source:s->on_page_markdown:n + + + + + + + + +on_page_markdown:s->render_p + + + + + + + + +on_page_content + + +html + +page + +config + +files + + + + + +pages_point_b + + + + + + + +on_page_content:s->pages_point_b + + + + + + + + +on_env + + +env + +config + +files + + + + + + +render_p->on_page_content:n + + + + + + + + +pages_point_a->on_pre_page:n + + + + + + + + +pages_point_a->render_p + + + + + + + + +placeholder_cluster_populate_page_2 + + +... + + + + + +pages_point_a->placeholder_cluster_populate_page_2:n + + + + + + + + +placeholder_cluster_populate_page_3 + + +... + + + + + +pages_point_a->placeholder_cluster_populate_page_3:n + + + + + + + + +placeholder_cluster_populate_page_2:s->pages_point_b + + + + + + + + + +pages_point_b->on_env:n + + + + + + + + +pages_point_c + + + + + + + +pages_point_b->pages_point_c + + + + + + + +placeholder_cluster_populate_page_3:s->pages_point_b + + + + + + + + +on_env:s->get_context + + + + + + + + +on_page_context + + +context + +page + +config + +nav + + + + + +pages_point_c->on_page_context:n + + + + + + + + + +placeholder_cluster_build_page_2 + + +... + + + + + +pages_point_c->placeholder_cluster_build_page_2:n + + + + + + + + +placeholder_cluster_build_page_3 + + +... + + + + + +pages_point_c->placeholder_cluster_build_page_3:n + + + + + + + + +render + + +render + + + + + +on_page_context:s->render + + + + + + + + +on_post_page + + +output + +page + +config + + + + + +write_file + + +write_file + + + + + +on_post_page:s->write_file + + + + + + + + +get_context->on_page_context:n + + + + + + + + +render->on_post_page:n + + + + + + + + +get_template + + +get_template + + + + + +get_template->render + + + + + + + + +on_post_build + + +config + + + + + + + +on_serve + + +server + +config + + + + + + +on_shutdown + + + + + + + + diff --git a/docs/img/readthedocs.png b/docs/img/readthedocs.png index c88bffd..b5456bb 100644 Binary files a/docs/img/readthedocs.png and b/docs/img/readthedocs.png differ diff --git a/docs/index.md b/docs/index.md index 0e2dadb..069d822 100644 --- a/docs/index.md +++ b/docs/index.md @@ -4,361 +4,86 @@ Project documentation with Markdown. --- -## Overview - MkDocs is a **fast**, **simple** and **downright gorgeous** static site generator that's geared towards building project documentation. Documentation source files are written in Markdown, and configured with a single YAML -configuration file. Start by reading the introduction below, then check the User -Guide for more info. - -### Host anywhere - -MkDocs builds completely static HTML sites that you can host on GitHub pages, -Amazon S3, or [anywhere][deploy] else you choose. - -### Great themes available - -There's a stack of good looking [themes] available for MkDocs. Choose between -the built in themes: [mkdocs] and [readthedocs], select one of the 3rd -party themes listed on the [MkDocs Themes] wiki page, or [build your own]. - -### Preview your site as you work - -The built-in dev-server allows you to preview your documentation as you're -writing it. It will even auto-reload and refresh your browser whenever you save -your changes. - -### Easy to customize - -Get your project documentation looking just the way you want it by customizing -the [theme] and/or installing some [plugins]. - ---- - -## Installation - -### Install with a Package Manager - -If you have and use a package manager (such as [apt-get], [dnf], [homebrew], -[yum], [chocolatey], etc.) to install packages on your system, then you may -want to search for a "MkDocs" package and, if a recent version is available, -install it with your package manager (check your system's documentation for -details). That's it, you're done! Skip down to [Getting Started](#getting-started). - -If your package manager does not have a recent "MkDocs" package, you can still -use your package manager to install "Python" and "pip". Then you can use pip to -[install MkDocs](#installing-mkdocs). - -[apt-get]: https://help.ubuntu.com/community/AptGet/Howto -[homebrew]: https://brew.sh/ -[dnf]: https://dnf.readthedocs.io/en/latest/index.html -[yum]: http://yum.baseurl.org/ -[chocolatey]: https://chocolatey.org/ - -### Manual Installation - -In order to manually install MkDocs you'll need [Python] installed on your -system, as well as the Python package manager, [pip]. You can check if you have -these already installed from the command line: - -```bash -$ python --version -Python 3.8.2 -$ pip --version -pip 20.0.2 from /usr/local/lib/python3.8/site-packages/pip (python 3.8) -``` - -MkDocs supports Python versions 3.5, 3.6, 3.7, 3.8, and pypy3. - -#### Installing Python - -Install [Python] by downloading an installer appropriate for your system from -[python.org] and running it. - -!!! Note - - If you are installing Python on Windows, be sure to check the box to have - Python added to your PATH if the installer offers such an option (it's - normally off by default). - - ![Add Python to PATH](img/win-py-install.png) - -[python.org]: https://www.python.org/downloads/ - -#### Installing pip - -If you're using a recent version of Python, the Python package manager, [pip], -is most likely installed by default. However, you may need to upgrade pip to the -lasted version: - -```bash -pip install --upgrade pip -``` - -If you need to install [pip] for the first time, download [get-pip.py]. -Then run the following command to install it: - -```bash -python get-pip.py -``` - -#### Installing MkDocs - -Install the `mkdocs` package using pip: - -```bash -pip install mkdocs -``` - -You should now have the `mkdocs` command installed on your system. Run `mkdocs ---version` to check that everything worked okay. - -```bash -$ mkdocs --version -mkdocs, version 0.15.3 -``` - -!!! Note - If you would like manpages installed for MkDocs, the [click-man] tool can - generate and install them for you. Simply run the following two commands: - - pip install click-man - click-man --target path/to/man/pages mkdocs - - See the [click-man documentation] for an explanation of why manpages are - not automatically generated and installed by pip. - -[click-man]: https://github.com/click-contrib/click-man -[click-man documentation]: https://github.com/click-contrib/click-man#automatic-man-page-installation-with-setuptools-and-pip - -!!! Note - If you are using Windows, some of the above commands may not work - out-of-the-box. - - A quick solution may be to preface every Python command with `python -m` - like this: - - python -m pip install mkdocs - python -m mkdocs - - For a more permanent solution, you may need to edit your `PATH` environment - variable to include the `Scripts` directory of your Python installation. - Recent versions of Python include a script to do this for you. Navigate to - your Python installation directory (for example `C:\Python38\`), open the - `Tools`, then `Scripts` folder, and run the `win_add2path.py` file by double - clicking on it. Alternatively, you can [download][a2p] the script and run it - (`python win_add2path.py`). - -[a2p]: https://svn.python.org/projects/python/trunk/Tools/scripts/win_add2path.py - ---- - -## Getting Started - -Getting started is super easy. - -```bash -mkdocs new my-project -cd my-project -``` - -Take a moment to review the initial project that has been created for you. - -![The initial MkDocs layout](img/initial-layout.png) - -There's a single configuration file named `mkdocs.yml`, and a folder named -`docs` that will contain your documentation source files. Right now the `docs` -folder just contains a single documentation page, named `index.md`. - -MkDocs comes with a built-in dev-server that lets you preview your documentation -as you work on it. Make sure you're in the same directory as the `mkdocs.yml` -configuration file, and then start the server by running the `mkdocs serve` -command: - -```bash -$ mkdocs serve -INFO - Building documentation... -INFO - Cleaning site directory -[I 160402 15:50:43 server:271] Serving on http://127.0.0.1:8000 -[I 160402 15:50:43 handlers:58] Start watching changes -[I 160402 15:50:43 handlers:60] Start detecting changes -``` - -Open up `http://127.0.0.1:8000/` in your browser, and you'll see the default -home page being displayed: - -![The MkDocs live server](img/screenshot.png) - -The dev-server also supports auto-reloading, and will rebuild your documentation -whenever anything in the configuration file, documentation directory, or theme -directory changes. - -Open the `docs/index.md` document in your text editor of choice, change the -initial heading to `MkLorum`, and save your changes. Your browser will -auto-reload and you should see your updated documentation immediately. - -Now try editing the configuration file: `mkdocs.yml`. Change the -[`site_name`][site_name] setting to `MkLorum` and save the file. - -```yaml -site_name: MkLorum -``` - -Your browser should immediately reload, and you'll see your new site name take -effect. - -![The site_name setting](img/site-name.png) - -## Adding pages - -Now add a second page to your documentation: - -```bash -curl 'https://jaspervdj.be/lorem-markdownum/markdown.txt' > docs/about.md -``` - -As our documentation site will include some navigation headers, you may want to -edit the configuration file and add some information about the order, title, and -nesting of each page in the navigation header by adding a [`nav`][nav] -setting: - -```yaml -site_name: MkLorum -nav: - - Home: index.md - - About: about.md -``` - -Save your changes and you'll now see a navigation bar with `Home` and `About` -items on the left as well as `Search`, `Previous`, and `Next` items on the -right. - -![Screenshot](img/multipage.png) - -Try the menu items and navigate back and forth between pages. Then click on -`Search`. A search dialog will appear, allowing you to search for any text on -any page. Notice that the search results include every occurrence of the search -term on the site and links directly to the section of the page in which the -search term appears. You get all of that with no effort or configuration on your -part! - -![Screenshot](img/search.png) - -## Theming our documentation - -Now change the configuration file to alter how the documentation is displayed by -changing the theme. Edit the `mkdocs.yml` file and add a [`theme`][theme] setting: - -```yaml -site_name: MkLorum -nav: - - Home: index.md - - About: about.md -theme: readthedocs -``` - -Save your changes, and you'll see the ReadTheDocs theme being used. - -![Screenshot](img/readthedocs.png) - -## Changing the Favicon Icon - -By default, MkDocs uses the [MkDocs favicon] icon. To use a different icon, create -an `img` subdirectory in your `docs_dir` and copy your custom `favicon.ico` file -to that directory. MkDocs will automatically detect and use that file as your -favicon icon. - -[MkDocs favicon]: /img/favicon.ico - -## Building the site - -That's looking good. You're ready to deploy the first pass of your `MkLorum` -documentation. First build the documentation: - -```bash -mkdocs build -``` - -This will create a new directory, named `site`. Take a look inside the -directory: - -```bash -$ ls site -about fonts index.html license search.html -css img js mkdocs sitemap.xml -``` - -Notice that your source documentation has been output as two HTML files named -`index.html` and `about/index.html`. You also have various other media that's -been copied into the `site` directory as part of the documentation theme. You -even have a `sitemap.xml` file and `mkdocs/search_index.json`. - -If you're using source code control such as `git` you probably don't want to -check your documentation builds into the repository. Add a line containing -`site/` to your `.gitignore` file. - -```bash -echo "site/" >> .gitignore -``` - -If you're using another source code control tool you'll want to check its -documentation on how to ignore specific directories. - -After some time, files may be removed from the documentation but they will still -reside in the `site` directory. To remove those stale files, just run `mkdocs` -with the `--clean` switch. - -```bash -mkdocs build --clean -``` - -## Other Commands and Options - -There are various other commands and options available. For a complete list of -commands, use the `--help` flag: - -```bash -mkdocs --help -``` - -To view a list of options available on a given command, use the `--help` flag -with that command. For example, to get a list of all options available for the -`build` command run the following: - -```bash -mkdocs build --help -``` - -## Deploying - -The documentation site that you just built only uses static files so you'll be -able to host it from pretty much anywhere. [GitHub project pages] and [Amazon -S3] may be good hosting options, depending upon your needs. Upload the contents -of the entire `site` directory to wherever you're hosting your website from and -you're done. For specific instructions on a number of common hosts, see the -[Deploying your Docs][deploy] page. - -## Getting help - -To get help with MkDocs, please use the [discussion group], [GitHub issues] or -the MkDocs IRC channel `#mkdocs` on freenode. - -[deploy]: user-guide/deploying-your-docs/ -[mkdocs]: user-guide/styling-your-docs/#mkdocs -[readthedocs]: user-guide/styling-your-docs/#readthedocs -[theme]: user-guide/styling-your-docs/ -[themes]: user-guide/styling-your-docs/ -[plugins]: user-guide/plugins/ -[MkDocs Themes]: https://github.com/mkdocs/mkdocs/wiki/MkDocs-Themes -[build your own]: user-guide/custom-themes/ -[Amazon S3]: https://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteHosting.html -[get-pip.py]: https://bootstrap.pypa.io/get-pip.py -[nav]: user-guide/configuration/#nav -[discussion group]: https://groups.google.com/forum/#!forum/mkdocs -[GitHub issues]: https://github.com/mkdocs/mkdocs/issues -[GitHub project pages]: https://help.github.com/articles/creating-project-pages-manually/ -[pip]: https://pip.readthedocs.io/en/stable/installing/ -[Python]: https://www.python.org/ -[site_name]: user-guide/configuration/#site_name -[theme]: user-guide/configuration/#theme +configuration file. Start by reading the [introductory tutorial], then check the +[User Guide] for more information. + +[introductory tutorial]: getting-started.md +[User Guide]: user-guide/README.md + +

+ +
+

Features

+ +
+
+
+
+

Great themes available

+

+ There's a stack of good looking themes available for MkDocs. + Choose between the built in themes: + mkdocs and + readthedocs, + select one of the third-party themes + (on the MkDocs Themes wiki page + as well as the MkDocs Catalog), + or build your own. +

+
+
+
+
+
+
+

Easy to customize

+

+ Get your project documentation looking just the way you want it by + customizing your + theme and/or installing some plugins. Modify + Markdown's behavior with Markdown + extensions. Many configuration options are + available. +

+
+
+
+
+ +
+
+
+
+

Preview your site as you work

+

+ The built-in dev-server allows you to preview your documentation + as you're writing it. It will even auto-reload and refresh your + browser whenever you save your changes. +

+
+
+
+
+
+
+

Host anywhere

+

+ MkDocs builds completely static HTML sites that you can host on + GitHub Pages, Amazon S3, or anywhere else you + choose. +

+
+
+
+
+
diff --git a/docs/user-guide/README.md b/docs/user-guide/README.md new file mode 100644 index 0000000..0b3cde3 --- /dev/null +++ b/docs/user-guide/README.md @@ -0,0 +1,21 @@ +# User Guide + +Building Documentation with MkDocs + +--- + +The MkDocs Developer Guide provides documentation for users of MkDocs. See +[Getting Started] for an introductory tutorial. You can jump directly to a +page listed below, or use the *next* and *previous* buttons in the navigation +bar at the top of the page to move through the documentation in order. + +- [Installation](installation.md) +- [Writing Your Docs](writing-your-docs.md) +- [Choosing Your Theme](choosing-your-theme.md) +- [Customizing Your Theme](customizing-your-theme.md) +- [Localizing Your Theme](localizing-your-theme.md) +- [Configuration](configuration.md) +- [Command Line Interface](cli.md) +- [Deploying Your Docs](deploying-your-docs.md) + +[Getting Started]: ../getting-started.md diff --git a/docs/user-guide/choosing-your-theme.md b/docs/user-guide/choosing-your-theme.md new file mode 100644 index 0000000..8ababf5 --- /dev/null +++ b/docs/user-guide/choosing-your-theme.md @@ -0,0 +1,209 @@ +# Choosing your Theme + +Selecting and configuring a theme. + +--- + +MkDocs includes two built-in themes ([mkdocs](#mkdocs) and +[readthedocs](#readthedocs)), as documented below. However, many [third party +themes] are available to choose from as well. + +To choose a theme, set the [theme] configuration option in your `mkdocs.yml` +config file. + +```yaml +theme: + name: readthedocs +``` + +## mkdocs + +The default theme, which was built as a custom [Bootstrap] theme, supports almost +every feature of MkDocs. + +![mkdocs](../img/mkdocs.png) + +In addition to the default [theme configuration options][theme], the `mkdocs` theme +supports the following options: + +* __`highlightjs`__: Enables highlighting of source code in code blocks using + the [highlight.js] JavaScript library. Default: `True`. + +* __`hljs_style`__: The highlight.js library provides 79 different [styles] + (color variations) for highlighting source code in code blocks. Set this to + the name of the desired style. Default: `github`. + +* __`hljs_languages`__: By default, highlight.js only supports 23 common + languages. List additional languages here to include support for them. + + ```yaml + theme: + name: mkdocs + highlightjs: true + hljs_languages: + - yaml + - rust + ``` + +* __`analytics`__: Defines configuration options for an analytics service. + Currently, only Google Analytics v4 is supported via the `gtag` option. + + * __`gtag`__: To enable Google Analytics, set to a Google Analytics v4 + tracking ID, which uses the `G-` format. See Google's documentation to + [Set up Analytics for a website and/or app (GA4)][setup-GA4] or to + [Upgrade to a Google Analytics 4 property][upgrade-GA4]. + + ```yaml + theme: + name: mkdocs + analytics: + gtag: G-ABC123 + ``` + + When set to the default (`null`) Google Analytics is disabled for the + site. + +* __`shortcuts`__: Defines keyboard shortcut keys. + + ```yaml + theme: + name: mkdocs + shortcuts: + help: 191 # ? + next: 78 # n + previous: 80 # p + search: 83 # s + ``` + + All values must be numeric key codes. It is best to use keys that are + available on all keyboards. You may use to determine + the key code for a given key. + + * __`help`__: Display a help modal that lists the keyboard shortcuts. + Default: `191` (?) + + * __`next`__: Navigate to the "next" page. Default: `78` (n) + + * __`previous`__: Navigate to the "previous" page. Default: `80` (p) + + * __`search`__: Display the search modal. Default: `83` (s) + +* __`navigation_depth`__: The maximum depth of the navigation tree in the + sidebar. Default: `2`. + +* __`nav_style`__: This adjusts the visual style for the top navigation bar; by + default, this is set to `primary` (the default), but it can also be set to + `dark` or `light`. + + ```yaml + theme: + name: mkdocs + nav_style: dark + ``` + +* __`locale`__{ #mkdocs-locale }: The locale (language/location) used to + build the theme. If your locale is not yet supported, it will fall back + to the default. + + The following locales are supported by this theme: + + * `en`: English (default) + * (see the list of existing directories `mkdocs/themes/mkdocs/locales/*/`) + + See the guide on [localizing your theme] for more information. + +## readthedocs + +A clone of the default theme used by the [Read the Docs] service, which offers +the same restricted feature set as its parent theme. Like its parent theme, only +two levels of navigation are supported. + +![ReadTheDocs](../img/readthedocs.png) + +In addition to the default [theme configuration options][theme], the `readthedocs` +theme supports the following options: + +* __`highlightjs`__: Enables highlighting of source code in code blocks using + the [highlight.js] JavaScript library. Default: `True`. + +* __`hljs_languages`__: By default, highlight.js only supports 23 common + languages. List additional languages here to include support for them. + + ```yaml + theme: + name: readthedocs + highlightjs: true + hljs_languages: + - yaml + - rust + ``` + +* __`analytics`__: Defines configuration options for an analytics service. + + * __`gtag`__: To enable Google Analytics, set to a Google Analytics v4 + tracking ID, which uses the `G-` format. See Google's documentation to + [Set up Analytics for a website and/or app (GA4)][setup-GA4] or to + [Upgrade to a Google Analytics 4 property][upgrade-GA4]. + + ```yaml + theme: + name: readthedocs + analytics: + gtag: G-ABC123 + ``` + + When set to the default (`null`) Google Analytics is disabled for the + + * __`anonymize_ip`__: To enable anonymous IP address for Google Analytics, + set this to `True`. Default: `False`. + +* __`include_homepage_in_sidebar`__: Lists the homepage in the sidebar menu. As + MkDocs requires that the homepage be listed in the `nav` configuration + option, this setting allows the homepage to be included or excluded from + the sidebar. Note that the site name/logo always links to the homepage. + Default: `True`. + +* __`prev_next_buttons_location`__: One of `bottom`, `top`, `both` , or `none`. + Displays the “Next” and “Previous” buttons accordingly. Default: `bottom`. + +* __`navigation_depth`__: The maximum depth of the navigation tree in the + sidebar. Default: `4`. + +* __`collapse_navigation`__: Only include the page section headers in the + sidebar for the current page. Default: `True`. + +* __`titles_only`__: Only include page titles in the sidebar, excluding all + section headers for all pages. Default: `False`. + +* __`sticky_navigation`__: If True, causes the sidebar to scroll with the main + page content as you scroll the page. Default: `True`. + +* __`locale`__{ #readthedocs-locale }: The locale (language/location) used to + build the theme. If your locale is not yet supported, it will fall back + to the default. + + The following locales are supported by this theme: + + * `en`: English (default) + * (see the list of existing directories `mkdocs/themes/readthedocs/locales/*/`) + + See the guide on [localizing your theme] for more information. + +* __`logo`__: To set a logo on your project instead of the plain text + `site_name`, set this variable to be the location of your image. Default: `null`. + +## Third Party Themes + +A list of third party themes can be found at the [community wiki] page and [the ranked catalog][catalog]. If you have created your own, please add them there. + +[third party themes]: #third-party-themes +[theme]: configuration.md#theme +[Bootstrap]: https://getbootstrap.com/ +[highlight.js]: https://highlightjs.org/ +[styles]: https://highlightjs.org/static/demo/ +[setup-GA4]: https://support.google.com/analytics/answer/9304153?hl=en&ref_topic=9303319 +[upgrade-GA4]: https://support.google.com/analytics/answer/9744165?hl=en&ref_topic=9303319 +[Read the Docs]: https://readthedocs.org/ +[community wiki]: https://github.com/mkdocs/mkdocs/wiki/MkDocs-Themes +[catalog]: https://github.com/mkdocs/catalog#-theming +[localizing your theme]: localizing-your-theme.md diff --git a/docs/user-guide/cli.md b/docs/user-guide/cli.md new file mode 100644 index 0000000..987c138 --- /dev/null +++ b/docs/user-guide/cli.md @@ -0,0 +1,8 @@ +# Command Line Interface + +::: mkdocs-click + :module: mkdocs.__main__ + :command: cli + :prog_name: mkdocs + :style: table + :list_subcommands: true diff --git a/docs/user-guide/configuration.md b/docs/user-guide/configuration.md index ff703b7..9c6eff4 100644 --- a/docs/user-guide/configuration.md +++ b/docs/user-guide/configuration.md @@ -6,11 +6,11 @@ Guide to all available configuration settings. ## Introduction -Project settings are always configured by using a YAML configuration file in the -project directory named `mkdocs.yml`. +Project settings are configured by default using a YAML configuration file in +the project directory named `mkdocs.yml`. You can specify another path for it +by using the `-f`/`--config-file` option (see `mkdocs build --help`). -As a minimum this configuration file must contain the `site_name` setting. All -other settings are optional. +As a minimum, this configuration file must contain the `site_name`. All other settings are optional. ## Project information @@ -28,8 +28,15 @@ variable. ### site_url -Set the canonical URL of the site. This will add a link tag with the canonical -URL to the generated HTML header. +Set the canonical URL of the site. This will add a `link` tag with the +`canonical` URL to the `head` section of each HTML page. If the 'root' of the +MkDocs site will be within a subdirectory of a domain, be sure to include that +subdirectory in the setting (`https://example.com/foo/`). + +This setting is also used for `mkdocs serve`: the server will be mounted onto a +path taken from the path component of the URL, e.g. `some/page.md` will be +served from `http://127.0.0.1:8000/foo/some/page/` to mimic the expected remote +layout. **default**: `null` @@ -53,7 +60,7 @@ those domains, otherwise the hostname from the `repo_url`. ### edit_uri -Path from the base `repo_url` to the docs directory when directly viewing a +The path from the base `repo_url` to the docs directory when directly viewing a page, accounting for specifics of the repository host (e.g. GitHub, Bitbucket, etc), the branch, and the docs directory itself. MkDocs concatenates `repo_url` and `edit_uri`, and appends the input path of the page. @@ -86,37 +93,112 @@ directory. edit_uri: root/path/docs/ ``` -!!! note - On a few known hosts (specifically GitHub, Bitbucket and GitLab), the - `edit_uri` is derived from the 'repo_url' and does not need to be set - manually. Simply defining a `repo_url` will automatically populate the - `edit_uri` configs setting. +For example, having this config: + +```yaml +repo_url: https://example.com/project/repo +edit_uri: blob/main/docs/ +``` - For example, for a GitHub- or GitLab-hosted repository, the `edit_uri` - would be automatically set as `edit/master/docs/` (Note the `edit` path - and `master` branch). +means that a page named 'foo/bar.md' will have its edit link lead to: + - For a Bitbucket-hosted repository, the equivalent `edit_uri` would be - automatically set as `src/default/docs/` (note the `src` path and `default` - branch). +`edit_uri` can actually be just an absolute URL, not necessarily relative to `repo_url`, so this can achieve the same result: - To use a different URI than the default (for example a different branch), - simply set the `edit_uri` to your desired string. If you do not want any - "edit URL link" displayed on your pages, then set `edit_uri` to an empty - string to disable the automatic setting. +```yaml +edit_uri: https://example.com/project/repo/blob/main/docs/ +``` -!!! warning - On GitHub and GitLab, the default "edit" path (`edit/master/docs/`) opens - the page in the online editor. This functionality requires that the user - have and be logged in to a GitHub/GitLab account. Otherwise, the user will - be redirected to a login/signup page. Alternatively, use the "blob" path - (`blob/master/docs/`) to open a read-only view, which supports anonymous - access. +For more flexibility, see [edit_uri_template](#edit_uri_template) below. + +> NOTE: +> On a few known hosts (specifically GitHub, Bitbucket and GitLab), the +> `edit_uri` is derived from the 'repo_url' and does not need to be set +> manually. Simply defining a `repo_url` will automatically populate the +> `edit_uri` configs setting. +> +> For example, for a GitHub- or GitLab-hosted repository, the `edit_uri` +> would be automatically set as `edit/master/docs/` (Note the `edit` path +> and `master` branch). +> +> For a Bitbucket-hosted repository, the equivalent `edit_uri` would be +> automatically set as `src/default/docs/` (note the `src` path and `default` +> branch). +> +> To use a different URI than the default (for example a different branch), +> simply set the `edit_uri` to your desired string. If you do not want any +> "edit URL link" displayed on your pages, then set `edit_uri` to an empty +> string to disable the automatic setting. + +WARNING: +On GitHub and GitLab, the default "edit" path (`edit/master/docs/`) opens +the page in the online editor. This functionality requires that the user +have and be logged in to a GitHub/GitLab account. Otherwise, the user will +be redirected to a login/signup page. Alternatively, use the "blob" path +(`blob/master/docs/`) to open a read-only view, which supports anonymous +access. **default**: `edit/master/docs/` for GitHub and GitLab repos or `src/default/docs/` for a Bitbucket repo, if `repo_url` matches those domains, otherwise `null` +### edit_uri_template + +The more flexible variant of [edit_uri](#edit_uri). These two are equivalent: + +```yaml +edit_uri: 'blob/main/docs/' +edit_uri_template: 'blob/main/docs/{path}' +``` + +(they are also mutually exclusive -- don't specify both). + +Starting from here, you can change the positioning or formatting of the path, in case the default behavior of appending the path isn't enough. + +The contents of `edit_uri_template` are normal [Python format strings](https://docs.python.org/3/library/string.html#formatstrings), with only these fields available: + +* `{path}`, e.g. `foo/bar.md` +* `{path_noext}`, e.g. `foo/bar` + +And the conversion flag `!q` is available, to percent-encode the field: + +* `{path!q}`, e.g. `foo%2Fbar.md` + +>? NOTE: **Suggested useful configurations:** +> +> * GitHub Wiki: +> (e.g. `https://github.com/project/repo/wiki/foo/bar/_edit`) +> +> ```yaml +> repo_url: 'https://github.com/project/repo/wiki' +> edit_uri_template: '{path_noext}/_edit' +> ``` +> +> * BitBucket editor: +> (e.g. `https://bitbucket.org/project/repo/src/master/docs/foo/bar.md?mode=edit`) +> +> ```yaml +> repo_url: 'https://bitbucket.org/project/repo/' +> edit_uri_template: 'src/master/docs/{path}?mode=edit' +> ``` +> +> * GitLab Static Site Editor: +> (e.g. `https://gitlab.com/project/repo/-/sse/master/docs%2Ffoo%2bar.md`) +> +> ```yaml +> repo_url: 'https://gitlab.com/project/repo' +> edit_uri_template: '-/sse/master/docs%2F{path!q}' +> ``` +> +> * GitLab Web IDE: +> (e.g. `https://gitlab.com/-/ide/project/repo/edit/master/-/docs/foo/bar.md`) +> +> ```yaml +> edit_uri_template: 'https://gitlab.com/-/ide/project/repo/edit/master/-/docs/{path}' +> ``` + +**default**: `null` + ### site_description Set the site description. This will add a meta tag to the generated HTML header. @@ -136,26 +218,16 @@ Set the copyright information to be included in the documentation by the theme. **default**: `null` -### google_analytics - -Set the Google analytics tracking configuration. - -```yaml -google_analytics: ['UA-36723568-3', 'mkdocs.org'] -``` - -**default**: `null` - ### remote_branch -Set the remote branch to commit to when using `gh-deploy` to deploy to Github +Set the remote branch to commit to when using `gh-deploy` to deploy to GitHub Pages. This option can be overridden by a command line option in `gh-deploy`. **default**: `gh-pages` ### remote_name -Set the remote name to push to when using `gh-deploy` to deploy to Github Pages. +Set the remote name to push to when using `gh-deploy` to deploy to GitHub Pages. This option can be overridden by a command line option in `gh-deploy`. **default**: `origin` @@ -169,13 +241,14 @@ for the site. A minimal navigation configuration could look like this: ```yaml nav: - - 'index.md' - - 'about.md' + - 'index.md' + - 'about.md' ``` -All paths must be relative to the `mkdocs.yml` configuration file. See the -section on [configuring pages and navigation] for a more detailed breakdown, -including how to create sub-sections. +All paths in the navigation configuration must be relative to the +[`docs_dir`](#docs_dir) configuration option. See the section on [configuring +pages and navigation] for a more detailed breakdown, including how to create +sub-sections. Navigation items may also include links to external sites. While titles are optional for internal links, they are required for external links. An external @@ -185,9 +258,9 @@ how MkDocs determines the page title of a document. ```yaml nav: - - Introduction: 'index.md' - - 'about.md' - - 'Issue Tracker': 'https://example.com/' + - Introduction: 'index.md' + - 'about.md' + - 'Issue Tracker': 'https://example.com/' ``` In the above example, the first two items point to local files while the third @@ -201,15 +274,15 @@ the full domain. In that case, you may use an appropriate relative URL. site_url: https://example.com/foo/ nav: - - Home: '../' - - 'User Guide': 'user-guide.md' - - 'Bug Tracker': '/bugs/' + - Home: '../' + - 'User Guide': 'user-guide.md' + - 'Bug Tracker': '/bugs/' ``` -In the above example, two different styles of external links are used. First +In the above example, two different styles of external links are used. First, note that the `site_url` indicates that the MkDocs site is hosted in the `/foo/` subdirectory of the domain. Therefore, the `Home` navigation item is a relative -link which steps up one level to the server root and effectively points to +link that steps up one level to the server root and effectively points to `https://example.com/`. The `Bug Tracker` item uses an absolute path from the server root and effectively points to `https://example.com/bugs/`. Of course, the `User Guide` points to a local MkDocs page. @@ -218,59 +291,192 @@ server root and effectively points to `https://example.com/bugs/`. Of course, th list of all the Markdown files found within the `docs_dir` and its sub-directories. Index files will always be listed first within a sub-section. -## Build directories +### exclude_docs -### theme +NEW: **New in version 1.5.** -Sets the theme and theme specific configuration of your documentation site. -May be either a string or a set of key/value pairs. +This config defines patterns of files (under [`docs_dir`](#docs_dir)) to not be picked up into the built site. -If a string, it must be the string name of a known installed theme. For a list -of available themes visit [styling your docs]. +Example: -An example set of key/value pairs might look something like this: +```yaml +exclude_docs: | + api-config.json # A file with this name anywhere. + drafts/ # A "drafts" directory anywhere. + /requirements.txt # Top-level "docs/requirements.txt". + *.py # Any file with this extension anywhere. + !/foo/example.py # But keep this particular file. +``` + +This follows the [.gitignore pattern format](https://git-scm.com/docs/gitignore#_pattern_format). + +Note that `mkdocs serve` does *not* follow this setting and instead displays excluded documents but with a "DRAFT" mark. To prevent this effect, you can run `mkdocs serve --clean`. + +The following defaults are always implicitly prepended - to exclude dot-files (and directories) as well as the top-level `templates` directory: ```yaml -theme: - name: mkdocs - custom_dir: my_theme_customizations/ - static_templates: - - sitemap.html - include_sidebar: false +exclude_docs: | + .* + /templates/ ``` -If a set of key/value pairs, the following nested keys can be defined: +So, in order to really start this config fresh, you'd need to specify a negated version of these entries first. + +Otherwise you could for example opt only certain dot-files back into the site: + +```yaml +exclude_docs: | + !.assets # Don't exclude '.assets' although all other '.*' are excluded +``` + +### not_in_nav + +NEW: **New in version 1.5.** + +NOTE: This option does *not* actually exclude anything from the nav. + +If you want to include some docs into the site but intentionally exclude them from the nav, normally MkDocs warns about this. -!!! block "" +Adding such patterns of files (relative to [`docs_dir`](#docs_dir)) into the `not_in_nav` config will prevent such warnings. - #### name: +Example: - The string name of a known installed theme. For a list of available themes - visit [styling your docs]. +```yaml +nav: + - Foo: foo.md + - Bar: bar.md + +not_in_nav: | + /private.md +``` + +As the previous option, this follows the .gitignore pattern format. + +NOTE: Adding a given file to [`exclude_docs`](#exclude_docs) takes precedence over and implies `not_in_nav`. + +### validation + +NEW: **New in version 1.5.** + +Configure the strictness of MkDocs' diagnostic messages when validating links to documents. + +This is a tree of configs, and for each one the value can be one of the three: `warn`, `info`, `ignore`. Which cause a logging message of the corresponding severity to be produced. The `warn` level is, of course, intended for use with `mkdocs build --strict` (where it becomes an error), which you can employ in continuous testing. + +> EXAMPLE: **Defaults of this config as of MkDocs 1.5:** +> +> ```yaml +> validation: +> nav: +> omitted_files: info +> not_found: warn +> absolute_links: info +> links: +> not_found: warn +> absolute_links: info +> unrecognized_links: info +> ``` +> +> (Note: you shouldn't copy this whole example, because it only duplicates the defaults. Only individual items that differ should be set.) + +The defaults of some of the behaviors already differ from MkDocs 1.4 and below - they were ignored before. + +>? EXAMPLE: **Configure MkDocs 1.5 to behave like MkDocs 1.4 and below (reduce strictness):** +> +> ```yaml +> validation: +> absolute_links: ignore +> unrecognized_links: ignore +> ``` + +>! EXAMPLE: **Recommended settings for most sites (maximal strictness):** +> +> ```yaml +> validation: +> omitted_files: warn +> absolute_links: warn +> unrecognized_links: warn +> ``` + +Note how in the above examples we omitted the 'nav' and 'links' keys. Here `absolute_links:` means setting both `nav: absolute_links:` and `links: absolute_links:`. + +Full list of values and examples of log messages that they can hide or make more prominent: + +* `validation.nav.omitted_files` + * "The following pages exist in the docs directory, but are not included in the "nav" configuration: ..." +* `validation.nav.not_found` + * "A relative path to 'foo/bar.md' is included in the 'nav' configuration, which is not found in the documentation files." + * "A reference to 'foo/bar.md' is included in the 'nav' configuration, but this file is excluded from the built site." +* `validation.nav.absolute_links` + * "An absolute path to '/foo/bar.html' is included in the 'nav' configuration, which presumably points to an external resource." + +* `validation.links.not_found` + * "Doc file 'example.md' contains a relative link '../foo/bar.md', but the target is not found among documentation files." + * "Doc file 'example.md' contains a link to 'foo/bar.md' which is excluded from the built site." +* `validation.links.absolute_links` + * "Doc file 'example.md' contains an absolute link '/foo/bar.html', it was left as is. Did you mean 'foo/bar.md'?" +* `validation.links.unrecognized_links` + * "Doc file 'example.md' contains an unrecognized relative link '../foo/bar/', it was left as is. Did you mean 'foo/bar.md'?" + * "Doc file 'example.md' contains an unrecognized relative link 'mail\@example.com', it was left as is. Did you mean 'mailto:mail\@example.com'?" - #### custom_dir: +## Build directories - A directory containing a custom theme. This can either be a relative - directory, in which case it is resolved relative to the directory containing - your configuration file, or it can be an absolute directory path from the - root of your local file system. +### theme - See [styling your docs][theme_dir] for details if you would like to tweak an - existing theme. +Sets the theme and theme specific configuration of your documentation site. +May be either a string or a set of key/value pairs. - See [custom themes] if you would like to build your own theme from the - ground up. +If a string, it must be the string name of a known installed theme. For a list +of available themes visit [Choosing Your Theme]. - #### static_templates: +An example set of key/value pairs might look something like this: - A list of templates to render as static pages. The templates must be located - in either the theme's template directory or in the `custom_dir` defined in - the theme configuration. +```yaml +theme: + name: mkdocs + locale: en + custom_dir: my_theme_customizations/ + static_templates: + - sitemap.html + include_sidebar: false +``` - #### (theme specific keywords) +If a set of key/value pairs, the following nested keys can be defined: - Any additional keywords supported by the theme can also be defined. See the - documentation for the theme you are using for details. +> BLOCK: +> +> #### name +> +> The string name of a known installed theme. For a list of available themes +> visit [Choosing Your Theme]. +> +> #### locale +> +> A code representing the language of your site. See [Localizing your theme] +> for details. +> +> #### custom_dir +> +> A directory containing a custom theme. This can either be a relative +> directory, in which case it is resolved relative to the directory containing +> your configuration file or it can be an absolute directory path from the +> root of your local file system. +> +> See [Customizing Your Theme][theme_dir] for details if you would like to tweak an +> existing theme. +> +> See the [Theme Developer Guide] if you would like to build your own theme +> from the ground up. +> +> #### static_templates +> +> A list of templates to render as static pages. The templates must be located +> in either the theme's template directory or in the `custom_dir` defined in +> the theme configuration. +> +> #### (theme specific keywords) +> +> Any additional keywords supported by the theme can also be defined. See the +> documentation for the theme you are using for details. **default**: `'mkdocs'` @@ -292,50 +498,81 @@ the root of your local file system. **default**: `'site'` -!!! note "Note:" - If you are using source code control you will normally want to ensure that - your *build output* files are not committed into the repository, and only - keep the *source* files under version control. For example, if using `git` - you might add the following line to your `.gitignore` file: - - site/ - - If you're using another source code control tool, you'll want to check its - documentation on how to ignore specific directories. +> NOTE: +> If you are using source code control you will normally want to ensure that +> your *build output* files are not committed into the repository, and only +> keep the *source* files under version control. For example, if using `git` +> you might add the following line to your `.gitignore` file: +> +> ```text +> site/ +> ``` +> +> If you're using another source code control tool, you'll want to check its +> documentation on how to ignore specific directories. ### extra_css -Set a list of CSS files in your `docs_dir` to be included by the theme. For -example, the following example will include the extra.css file within the -css subdirectory in your [docs_dir](#docs_dir). +Set a list of CSS files (relative to `docs_dir`) to be included by the theme, typically as `` tags. + +Example: ```yaml extra_css: - - css/extra.css - - css/second_extra.css + - css/extra.css + - css/second_extra.css ``` **default**: `[]` (an empty list). ### extra_javascript -Set a list of JavaScript files in your `docs_dir` to be included by the theme. -See the example in [extra_css] for usage. +Set a list of JavaScript files in your `docs_dir` to be included by the theme, as ` + # New behavior in MkDocs 1.5: + - implicitly_as_module.mjs # + # Config keys only supported since MkDocs 1.5: + - path: explicitly_as_module.mjs # + type: module + - path: deferred_plain.js # + defer: true + - path: scripts/async_module.mjs # + type: module + async: true +``` + +So, each item can be either: + +* a plain string, or +* a mapping that has the required `path` key and 3 optional keys `type` (string), `async` (boolean), `defer` (boolean). + +Only the plain string variant detects the `.mjs` extension and adds `type="module"`, otherwise `type: module` must be written out regardless of extension. **default**: `[]` (an empty list). +NOTE: `*.js` and `*.css` files, just like any other type of file, are always copied from `docs_dir` into the site's deployed copy, regardless if they're linked to the pages via the above configs or not. + ### extra_templates Set a list of templates in your `docs_dir` to be built by MkDocs. To see more about writing templates for MkDocs read the documentation about [custom themes] -and specifically the section about the [variables that are available] to +and specifically the section about the [available variables] to templates. See the example in [extra_css] for usage. **default**: `[]` (an empty list). ### extra -A set of key value pairs, where the values can be any valid YAML construct, that +A set of key-value pairs, where the values can be any valid YAML construct, that will be passed to the template. This allows for great flexibility when creating custom themes. @@ -344,13 +581,34 @@ version, you can pass it to the theme like this: ```yaml extra: - version: 1.0 + version: 1.0 ``` -**default**: By default `extra` will be an empty key value mapping. +**default**: By default `extra` will be an empty key-value mapping. ## Preview controls +## Live Reloading + +### watch + +Determines additional directories to watch when running `mkdocs serve`. +Configuration is a YAML list. + +```yaml +watch: + - directory_a + - directory_b +``` + +Allows a custom default to be set without the need to pass it through the `-w`/`--watch` +option every time the `mkdocs serve` command is called. + +> NOTE: +> The paths provided via the configuration file are relative to the configuration file. +> +> The paths provided via the `-w`/`--watch` CLI parameters are not. + ### use_directory_urls This setting controls the style used for linking to pages within the @@ -368,9 +626,9 @@ about/license.md | /about/license/ | /about/license.html The default style of `use_directory_urls: true` creates more user friendly URLs, and is usually what you'll want to use. -The alternate style can occasionally be useful if you want your documentation to -remain properly linked when opening pages directly from the file system, because -it creates links that point directly to the target *file* rather than the target +The alternate style can be useful if you want your documentation to remain +properly linked when opening pages directly from the file system, because it +creates links that point directly to the target *file* rather than the target *directory*. **default**: `true` @@ -380,6 +638,8 @@ it creates links that point directly to the target *file* rather than the target Determines how warnings are handled. Set to `true` to halt processing when a warning is raised. Set to `false` to print a warning and continue processing. +This is also available as a command line flag: `--strict`. + **default**: `false` ### dev_addr @@ -392,6 +652,8 @@ Allows a custom default to be set without the need to pass it through the **default**: `'127.0.0.1:8000'` +See also: [site_url](#site_url). + ## Formatting options ### markdown_extensions @@ -406,7 +668,7 @@ For example, to enable the [SmartyPants typography extension][smarty], use: ```yaml markdown_extensions: - - smarty + - smarty ``` Some extensions provide configuration options of their own. If you would like to @@ -419,8 +681,8 @@ For example, to enable permalinks in the (included) `toc` extension, use: ```yaml markdown_extensions: - - toc: - permalink: True + - toc: + permalink: true ``` Note that a colon (`:`) must follow the extension name (`toc`) and then on a new @@ -430,9 +692,9 @@ defined on a separate line: ```yaml markdown_extensions: - - toc: - permalink: True - separator: "_" + - toc: + permalink: true + separator: "_" ``` Add an additional item to the list for each extension. If you have no @@ -441,27 +703,100 @@ for that extension: ```yaml markdown_extensions: - - smarty - - toc: - permalink: True - - sane_lists + - smarty + - toc: + permalink: true + - sane_lists +``` + +> NOTE: **Dynamic config values.** +> +> To dynamically configure the extensions, you can get the config values from [environment variables](#environment-variables) or [obtain paths](#paths-relative-to-the-current-file-or-site) of the currently rendered Markdown file or the overall MkDocs site. + +In the above examples, each extension is a list item (starts with a `-`). As an +alternative, key/value pairs can be used instead. However, in that case an empty +value must be provided for extensions for which no options are defined. +Therefore, the last example above could also be defined as follows: + +```yaml +markdown_extensions: + smarty: {} + toc: + permalink: true + sane_lists: {} ``` -!!! note "See Also:" - The Python-Markdown documentation provides a [list of extensions][exts] - which are available out-of-the-box. For a list of configuration options - available for a given extension, see the documentation for that extension. +This alternative syntax is required if you intend to override some options via +[inheritance]. - You may also install and use various [third party extensions][3rd]. Consult - the documentation provided by those extensions for installation instructions - and available configuration options. +> NOTE: **More extensions.** +> +> The Python-Markdown documentation provides a [list of extensions][exts] +> which are available out-of-the-box. For a list of configuration options +> available for a given extension, see the documentation for that extension. +> +> You may also install and use various third party extensions ([Python-Markdown wiki], [MkDocs project catalog][catalog]). Consult +> the documentation provided by those extensions for installation instructions +> and available configuration options. **default**: `[]` (an empty list). +### hooks + +NEW: **New in version 1.4.** + +A list of paths to Python scripts (relative to `mkdocs.yml`) that are loaded and used as [plugin](#plugins) instances. + +For example: + +```yaml +hooks: + - my_hooks.py +``` + +Then the file *my_hooks.py* can contain any [plugin event handlers](../dev-guide/plugins.md#events) (without `self`), e.g.: + +```python +def on_page_markdown(markdown, **kwargs): + return markdown.replace('a', 'z') +``` + +>? EXAMPLE: **Advanced example:** +> +> This produces warnings based on the Markdown content (and warnings are fatal in [strict](#strict) mode): +> +> ```python +> import logging, re +> import mkdocs.plugins +> +> log = logging.getLogger('mkdocs') +> +> @mkdocs.plugins.event_priority(-50) +> def on_page_markdown(markdown, page, **kwargs): +> path = page.file.src_uri +> for m in re.finditer(r'\bhttp://[^) ]+', markdown): +> log.warning(f"Documentation file '{path}' contains a non-HTTPS link: {m[0]}") +> ``` + +This does not enable any new abilities compared to [plugins][], it only simplifies one-off usages, as these don't need to be *installed* like plugins do. + +Note that for `mkdocs serve` the hook module will *not* be reloaded on each build. + +You might have seen this feature in the [mkdocs-simple-hooks plugin](https://github.com/aklajnert/mkdocs-simple-hooks). If using standard method names, it can be directly replaced, e.g.: + +```diff +-plugins: +- - mkdocs-simple-hooks: +- hooks: +- on_page_markdown: 'my_hooks:on_page_markdown' ++hooks: ++ - my_hooks.py +``` + ### plugins A list of plugins (with optional configuration settings) to use when building -the site . See the [Plugins] documentation for full details. +the site. See the [Plugins] documentation for full details. If the `plugins` config setting is defined in the `mkdocs.yml` config file, then any defaults (such as `search`) are ignored and you need to explicitly re-enable @@ -469,10 +804,36 @@ the defaults if you would like to continue using them: ```yaml plugins: - - search - - your_other_plugin + - search + - your_other_plugin +``` + +To define options for a given plugin, use a nested set of key/value pairs: + +```yaml +plugins: + - search + - your_other_plugin: + option1: value + option2: other value ``` +In the above examples, each plugin is a list item (starts with a `-`). As an +alternative, key/value pairs can be used instead. However, in that case an empty +value must be provided for plugins for which no options are defined. Therefore, +the last example above could also be defined as follows: + +```yaml +plugins: + search: {} + your_other_plugin: + option1: value + option2: other value +``` + +This alternative syntax is required if you intend to override some options via +[inheritance]. + To completely disable all plugins, including any defaults, set the `plugins` setting to an empty list: @@ -496,27 +857,27 @@ the dot (`.`) as a word separator you might do this: ```yaml plugins: - - search: - separator: '[\s\-\.]+' + - search: + separator: '[\s\-\.]+' ``` - **default**: `'[\s\-]+'` +**default**: `'[\s\-]+'` ##### **min_search_length** An integer value that defines the minimum length for a search query. By default searches shorter than 3 chars in length are ignored as search result quality with -short search terms is poor. However, for some use cases (such as documentation +short search terms are poor. However, for some use cases (such as documentation about Message Queues which might generate searches for 'MQ') it may be preferable to set a shorter limit. ```yaml plugins: - - search: - min_search_length: 2 + - search: + min_search_length: 2 ``` - **default**: 3 +**default**: 3 ##### **lang** @@ -546,67 +907,279 @@ supported: You may [contribute additional languages]. -!!! Warning +WARNING: +While search does support using multiple languages together, it is best not +to add additional languages unless you really need them. Each additional +language adds significant bandwidth requirements and uses more browser +resources. Generally, it is best to keep each instance of MkDocs to a single +language. - While search does support using multiple languages together, it is best not - to add additional languages unless you really need them. Each additional - language adds significant bandwidth requirements and uses more browser - resources. Generally it is best to keep each instance of MkDocs to a single - language. +NOTE: +Lunr Languages does not currently include support for Chinese or other Asian +languages. However, some users have reported decent results using Japanese. -!!! Note - - Lunr Languages does not currently include support for Chinese or other Asian - languages. However, some users have reported decent results using Japanese. - -**default**: `['en']` +**default**: The value of `theme.locale` if set, otherwise `[en]`. ##### **prebuild_index** Optionally generates a pre-built index of all pages, which provides some -performance improvements for larger sites. Before enabling, check that the +performance improvements for larger sites. Before enabling, confirm that the theme you are using explicitly supports using a prebuilt index (the builtin -themes do). +themes do). Set to `true` to enable. -There are two options for prebuilding the index: +WARNING: +This option requires that [Node.js] be installed and the command `node` be +on the system path. If the call to `node` fails for any reason, a warning +is issued and the build continues uninterrupted. You may use the `--strict` +flag when building to cause such a failure to raise an error instead. -Using [Node.js] setting `prebuild_index` to `True` or `node`. This option -requires that Node.js be installed and the command `node` be on the system -path. If this feature is enabled and fails for any reason, a warning is issued. -You may use the `--strict` flag when building to cause such a failure to raise -an error instead. +NOTE: +On smaller sites, using a pre-built index is not recommended as it creates a +significant increase is bandwidth requirements with little to no noticeable +improvement to your users. However, for larger sites (hundreds of pages), +the bandwidth increase is relatively small and your users will notice a +significant improvement in search performance. -Using [Lunr.py] setting `prebuild_index` to `python`. Lunr.py is installed -as part of mkdocs and guarantees compatibility with Lunr.js even on languages -other than english. If you find substantial inconsistencies or problems please -report it on [Lunr.py's issues] and fall back to the Node.js version. +**default**: `False` -!!! Note +##### **indexing** - On smaller sites, using a pre-built index is not recommended as it creates a - significant increase is bandwidth requirements with little to no noticeable - improvement to your users. However, for larger sites (hundreds of pages), - the bandwidth increase is relatively small and your users will notice a - significant improvement in search performance. +Configures what strategy the search indexer will use when building the index +for your pages. This property is particularly useful if your project is large +in scale, and the index takes up an enormous amount of disk space. -**default**: `False` +```yaml +plugins: + - search: + indexing: 'full' +``` + +###### Options + +|Option|Description| +|------|-----------| +|`full`|Indexes the title, section headings, and full text of each page.| +|`sections`|Indexes the title and section headings of each page.| +|`titles`|Indexes only the title of each page.| + +**default**: `full` + +## Special YAML tags + +### Environment variables + +In most cases, the value of a configuration option is set directly in the +configuration file. However, as an option, the value of a configuration option +may be set to the value of an environment variable using the `!ENV` tag. For +example, to set the value of the `site_name` option to the value of the +variable `SITE_NAME` the YAML file may contain the following: + +```yaml +site_name: !ENV SITE_NAME +``` + +If the environment variable is not defined, then the configuration setting +would be assigned a `null` (or `None` in Python) value. A default value can be +defined as the last value in a list. Like this: + +```yaml +site_name: !ENV [SITE_NAME, 'My default site name'] +``` + +Multiple fallback variables can be used as well. Note that the last value is +not an environment variable, but must be a value to use as a default if none +of the specified environment variables are defined. + +```yaml +site_name: !ENV [SITE_NAME, OTHER_NAME, 'My default site name'] +``` + +Simple types defined within an environment variable such as string, bool, +integer, float, datestamp and null are parsed as if they were defined directly +in the YAML file, which means that the value will be converted to the +appropriate type. However, complex types such as lists and key/value pairs +cannot be defined within a single environment variable. + +For more details, see the [pyyaml_env_tag](https://github.com/waylan/pyyaml-env-tag) +project. + +### Paths relative to the current file or site + +NEW: **New in version 1.5.** + +Some Markdown extensions can benefit from knowing the path of the Markdown file that's currently being processed, or just the root path of the current site. For that, the special tag `!relative` can be used in most contexts within the config file, though the only known usecases are within [`markdown_extensions`](#markdown_extensions). + +Examples of the possible values are: + +```yaml +- !relative # Relative to the directory of the current Markdown file +- !relative $docs_dir # Path of the docs_dir +- !relative $config_dir # Path of the directory that contains the main mkdocs.yml +- !relative $config_dir/some/child/dir # Some subdirectory of the root config directory +``` + +(Here, `$docs_dir` and `$config_dir` are currently the *only* special prefixes that are recognized.) + +Example: + +```yaml +markdown_extensions: + - pymdownx.snippets: + base_path: !relative # Relative to the current Markdown file +``` + +This allows the [pymdownx.snippets] extension to include files relative to the current Markdown file, which without this tag it would have no way of knowing. + +> NOTE: Even for the default case, any extension's base path is technically the *current working directory* although the assumption is that it's the *directory of mkdocs.yml*. So even if you don't want the paths to be relative, to improve the default behavior, always prefer to use this idiom: +> +> ```yaml +> markdown_extensions: +> - pymdownx.snippets: +> base_path: !relative $config_dir # Relative to the root directory with mkdocs.yml +> ``` + +## Configuration Inheritance + +Generally, a single file would hold the entire configuration for a site. +However, some organizations may maintain multiple sites which all share a common +configuration across them. Rather than maintaining separate configurations for +each, the common configuration options can be defined in a parent configuration +file which each site's primary configuration file inherits. + +To define the parent for a configuration file, set the `INHERIT` (all caps) key +to the path of the parent file. The path must be relative to the location of the +primary file. + +For configuration options to be merged with a parent configuration, those +options must be defined as key/value pairs. Specifically, the +[markdown_extensions] and [plugins](#plugins) options must use the alternative syntax +which does not use list items (lines which start with `-`). + +For example, suppose the common (parent) configuration is defined in `base.yml`: + +```yaml +theme: + name: mkdocs + locale: en + highlightjs: true + +markdown_extensions: + toc: + permalink: true + admonition: {} +``` + +Then, for the "foo" site, the primary configuration file would be defined at +`foo/mkdocs.yml`: + +```yml +INHERIT: ../base.yml +site_name: Foo Project +site_url: https://example.com/foo +``` + +When running `mkdocs build`, the file at `foo/mkdocs.yml` would be passed in as +the configuration file. MkDocs will then parse that file, retrieve and parse the +parent file `base.yml` and deep merge the two. This would result in MkDocs +receiving the following merged configuration: + +```yaml +site_name: Foo Project +site_url: https://example.com/foo + +theme: + name: mkdocs + locale: en + highlightjs: true + +markdown_extensions: + toc: + permalink: true + admonition: {} +``` + +Deep merging allows you to add and/or override various values in your primary +configuration file. For example, suppose for one site you wanted to add support +for definition lists, use a different symbol for permalinks, and define a +different separator. In that site's primary configuration file you could do: + +```yaml +INHERIT: ../base.yml +site_name: Bar Project +site_url: https://example.com/bar + +markdown_extensions: + def_list: {} + toc: + permalink:  + separator: "_" +``` + +In that case, the above configuration would be deep merged with `base.yml` and +result in the following configuration: + +```yaml +site_name: Bar Project +site_url: https://example.com/bar + +theme: + name: mkdocs + locale: en + highlightjs: true + +markdown_extensions: + def_list: {} + toc: + permalink:  + separator: "_" + admonition: {} +``` + +Notice that the `admonition` extension was retained from the parent +configuration, the `def_list` extension was added, the value of +`toc.permalink` was replaced, and the value of `toc.separator` was added. + +You can replace or merge the value of any key. However, any non-key is always +replaced. Therefore, you cannot append items to a list. You must redefine the +entire list. + +As the [nav] configuration is made up of nested lists, this means that you +cannot merge navigation items. Of course, you can replace the entire `nav` +configuration with a new one. However, it is generally expected that the entire +navigation would be defined in the primary configuration file for a project. + +WARNING: +As a reminder, all path based configuration options must be relative to the +primary configuration file and MkDocs does not alter the paths when merging. +Therefore, defining paths in a parent file which is inherited by multiple +different sites may not work as expected. It is generally best to define +path based options in the primary configuration file only. + +The inheritance can also be used as a quick way to override keys on the command line - by using stdin as the config file. For example: + +```bash +echo '{INHERIT: mkdocs.yml, site_name: "Renamed site"}' | mkdocs build -f - +``` -[custom themes]: custom-themes.md -[variables that are available]: custom-themes.md#template-variables +[Theme Developer Guide]: ../dev-guide/themes.md [pymdk-extensions]: https://python-markdown.github.io/extensions/ [pymkd]: https://python-markdown.github.io/ [smarty]: https://python-markdown.github.io/extensions/smarty/ [exts]: https://python-markdown.github.io/extensions/ -[3rd]: https://github.com/Python-Markdown/markdown/wiki/Third-Party-Extensions +[Python-Markdown wiki]: https://github.com/Python-Markdown/markdown/wiki/Third-Party-Extensions +[catalog]: https://github.com/mkdocs/catalog [configuring pages and navigation]: writing-your-docs.md#configure-pages-and-navigation -[theme_dir]: styling-your-docs.md#using-the-theme_dir -[styling your docs]: styling-your-docs.md +[theme_dir]: customizing-your-theme.md#using-the-theme_dir +[choosing your theme]: choosing-your-theme.md +[Localizing your theme]: localizing-your-theme.md [extra_css]: #extra_css -[Plugins]: plugins.md +[Plugins]: ../dev-guide/plugins.md [lunr.js]: https://lunrjs.com/ [ISO 639-1]: https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes [Lunr Languages]: https://github.com/MihaiValentin/lunr-languages#lunr-languages----- [contribute additional languages]: https://github.com/MihaiValentin/lunr-languages/blob/master/CONTRIBUTING.md [Node.js]: https://nodejs.org/ -[Lunr.py]: http://lunr.readthedocs.io/ -[Lunr.py's issues]: https://github.com/yeraydiazdiaz/lunr.py/issues +[markdown_extensions]: #markdown_extensions +[nav]: #nav +[inheritance]: #configuration-inheritance +[pymdownx.snippets]: https://facelessuser.github.io/pymdown-extensions/extensions/snippets/ diff --git a/docs/user-guide/custom-themes.md b/docs/user-guide/custom-themes.md deleted file mode 100644 index 28a52e5..0000000 --- a/docs/user-guide/custom-themes.md +++ /dev/null @@ -1,869 +0,0 @@ -# Custom themes - -A guide to creating and distributing custom themes. - ---- - -!!! Note - - If you are looking for third party themes, they are listed in the MkDocs - [community wiki](https://github.com/mkdocs/mkdocs/wiki/MkDocs-Themes). If - you want to share a theme you create, you should list it on the Wiki. - -When creating a new theme, you can either follow the steps in this guide to -create one from scratch or you can download the `mkdocs-basic-theme` as a -basic, yet complete, theme with all the boilerplate required. **You can find -this base theme on [GitHub](https://github.com/mkdocs/mkdocs-basic-theme)**. -It contains detailed comments in the code to describe the different features -and their usage. - -## Creating a custom theme - -The bare minimum required for a custom theme is a `main.html` [Jinja2 template] -file which is placed in a directory that is *not* a child of the [docs_dir]. -Within `mkdocs.yml`, set the theme.[custom_dir] option to the path of the -directory containing `main.html`. The path should be relative to the -configuration file. For example, given this example project layout: - -```no-highlight -mkdocs.yml -docs/ - index.md - about.md -custom_theme/ - main.html - ... -``` - -... you would include the following settings in `mkdocs.yml` to use the custom theme -directory: - -```yaml -theme: - name: null - custom_dir: 'custom_theme/' -``` - -!!! Note - - Generally, when building your own custom theme, the theme.[name] - configuration setting would be set to `null`. However, if the - theme.[custom_dir] configuration value is used in combination with an - existing theme, the theme.[custom_dir] can be used to replace only specific - parts of a built-in theme. For example, with the above layout and if you set - `name: "mkdocs"` then the `main.html` file in the theme.[custom_dir] would - replace the file of the same name in the `mkdocs` theme but otherwise the - `mkdocs` theme would remain unchanged. This is useful if you want to make - small adjustments to an existing theme. - - For more specific information, see [styling your docs]. - -!!! Warning - - A theme's [configuration] defined in a `mkdocs_theme.yml` file is not loaded - from `theme.custom_dir`. When an entire theme exists in `theme.custom_dir` - and `theme.name` is set to `null`, then the entire theme configuration must - be defined in the [theme] configuration option in the `mkdocs.yml` file. - - However, when a theme is [packaged] up for distribution, and loaded using - the `theme.name` configuration option, then a `mkdocs_theme.yml` file - is required for the theme. - -[styling your docs]: ./styling-your-docs.md#using-the-theme-custom_dir -[custom_dir]: ./configuration.md#custom_dir -[name]: ./configuration.md#name -[docs_dir]:./configuration.md#docs_dir -[configuration]: #theme-configuration -[packaged]: #packaging-themes -[theme]: ./configuration.md#theme - -## Basic theme - -The simplest `main.html` file is the following: - -```django - - - - {% if page.title %}{{ page.title }} - {% endif %}{{ config.site_name }} - - - {{ page.content }} - - -``` - -The body content from each page specified in `mkdocs.yml` is inserted using the -`{{ page.content }}` tag. Style-sheets and scripts can be brought into this -theme as with a normal HTML file. Navbars and tables of contents can also be -generated and included automatically, through the `nav` and `toc` objects, -respectively. If you wish to write your own theme, it is recommended to start -with one of the [built-in themes] and modify it accordingly. - -!!! Note - - As MkDocs uses [Jinja] as its template engine, you have access to all the - power of Jinja, including [template inheritance]. You may notice that the - themes included with MkDocs make extensive use of template inheritance and - blocks, allowing users to easily override small bits and pieces of the - templates from the theme [custom_dir]. Therefore, the built-in themes are - implemented in a `base.html` file, which `main.html` extends. Although not - required, third party template authors are encouraged to follow a similar - pattern and may want to define the same [blocks] as are used in the built-in - themes for consistency. - -[Jinja]: http://jinja.pocoo.org/ -[template inheritance]: http://jinja.pocoo.org/docs/dev/templates/#template-inheritance -[theme_dir]: ./styling-your-docs.md#using-the-theme_dir -[blocks]: ./styling-your-docs.md#overriding-template-blocks - -## Theme Files - -There are various files which a theme treats special in some way. Any other -files are simply copied from the theme directory to the same path in the -`site_dir` when the site it built. For example image and CSS files have no -special significance and are copied as-is. Note, however, that if the user -provides a file with the same path in their `docs_dir`, then the user's file -will replace the theme file. - -### Template Files - -Any files with the `.html` extension are considered to be template files and are -not copied from the theme directory or any subdirectories. Also, any files -listed in [static_templates] are treated as templates regardless of their file -extension. - -[static_templates]: #static_templates - -### Theme Meta Files - -The various files required for packaging a theme are also ignored. Specifically, -the `mkdocs_theme.yml` configuration file and any Python files. - -### Dot Files - -Theme authors can explicitly force MkDocs to ignore files by starting a file or -directory name with a dot. Any of the following files would be ignored: - -```text -.ignored.txt -.ignored/file.txt -foo/.ignored.txt -foo/.ignored/file.txt -``` - -### Documentation Files - -All documentation files are ignored. Specifically, any Markdown files (using any -of the file extensions supported by MKDocs). Additionally, any README files -which may exist in the theme directories are ignored. - -## Template Variables - -Each template in a theme is built with a template context. These are the -variables that are available to themes. The context varies depending on the -template that is being built. At the moment templates are either built with -the global context or with a page specific context. The global context is used -for HTML pages that don't represent an individual Markdown document, for -example a 404.html page or search.html. - -### Global Context - -The following variables are available globally on any template. - -#### config - -The `config` variable is an instance of MkDocs' config object generated from the -`mkdocs.yml` config file. While you can use any config option, some commonly -used options include: - -* [config.site_name](./configuration.md#site_name) -* [config.site_url](./configuration.md#site_url) -* [config.site_author](./configuration.md#site_author) -* [config.site_description](./configuration.md#site_description) -* [config.extra_javascript](./configuration.md#extra_javascript) -* [config.extra_css](./configuration.md#extra_css) -* [config.repo_url](./configuration.md#repo_url) -* [config.repo_name](./configuration.md#repo_name) -* [config.copyright](./configuration.md#copyright) -* [config.google_analytics](./configuration.md#google_analytics) - -#### nav - -The `nav` variable is used to create the navigation for the documentation. The -`nav` object is an iterable of [navigation objects](#navigation-objects) as -defined by the [nav] configuration setting. - -[nav]: configuration.md#nav - -In addition to the iterable of [navigation objects](#navigation-objects), the -`nav` object contains the following attributes: - -##### nav.homepage - -The [page](#page) object for the homepage of the site. - -##### nav.pages - -A flat list of all [page](#page) objects contained in the navigation. This list -is not necessarily a complete list of all site pages as it does not contain -pages which are not included in the navigation. This list does match the list -and order of pages used for all "next page" and "previous page" links. For a -list of all pages, use the [pages](#pages) template variable. - -##### Nav Example - -Following is a basic usage example which outputs the first and second level -navigation as a nested list. - -```django -{% if nav|length>1 %} -
    - {% for nav_item in nav %} - {% if nav_item.children %} -
  • {{ nav_item.title }} - -
  • - {% else %} - - {% endif %} - {% endfor %} -
-{% endif %} -``` - -#### base_url - -The `base_url` provides a relative path to the root of the MkDocs project. While -this can be used directly by prepending it to a local relative URL, it is best -to use the [url](#url) template filter, which is smarter about how it applies -`base_url`. - -#### mkdocs_version - -Contains the current MkDocs version. - -#### build_date_utc - -A Python datetime object that represents the date and time the documentation -was built in UTC. This is useful for showing how recently the documentation -was updated. - -#### pages - -A list of [page](#page) objects including *all* pages in the project. The list -is a flat list with all pages sorted alphanumerically by directory and file -name. Note that index pages sort to the top within a directory. This list can -contain pages not included in the global [navigation](#nav) and may not match -the order of pages within that navigation. - -#### page - -In templates which are not rendered from a Markdown source file, the `page` -variable is `None`. In templates which are rendered from a Markdown source file, -the `page` variable contains a `page` object. The same `page` objects are used -as `page` [navigation objects](#navigation-objects) in the global -[navigation](#nav) and in the [pages](#pages) template variable. - -All `page` objects contain the following attributes: - -##### page.title - -Contains the Title for the current page. - -##### page.content - -The rendered Markdown as HTML, this is the contents of the documentation. - -##### page.toc - -An iterable object representing the Table of contents for a page. Each item in -the `toc` is an `AnchorLink` which contains the following attributes: - -* `AnchorLink.title`: The text of the item. -* `AnchorLink.url`: The hash fragment of a URL pointing to the item. -* `AnchorLink.level`: The zero-based level of the item. -* `AnchorLink.children`: An iterable of any child items. - -The following example would display the top two levels of the Table of Contents -for a page. - -```django - -``` - -##### page.meta - -A mapping of the metadata included at the top of the markdown page. In this -example we define a `source` property above the page title. - -```no-highlight -source: generics.py - mixins.py - -# Page title - -Content... -``` - -A template can access this metadata for the page with the `meta.source` -variable. This could then be used to link to source files related to the -documentation page. - -```django -{% for filename in page.meta.source %} - - {{ filename }} - -{% endfor %} -``` - -##### page.url - -The URL of the page relative to the MkDocs `site_dir`. It is expected that this -be used with the [url](#url) filter to ensure the URL is relative to the current -page. - -```django -{{ page.title }} -``` - -[base_url]: #base_url - -##### page.abs_url - -The absolute URL of the page from the server root as determined by the value -assigned to the [site_url] configuration setting. The value includes any -subdirectory included in the `site_url`, but not the domain. [base_url] should -not be used with this variable. - -For example, if `site_url: https://example.com/`, then the value of -`page.abs_url` for the page `foo.md` would be `/foo/`. However, if -`site_url: https://example.com/bar/`, then the value of `page.abs_url` for the -page `foo.md` would be `/bar/foo/`. - -[site_url]: ./configuration.md#site_url - -##### page.canonical_url - -The full, canonical URL to the current page as determined by the value assigned -to the [site_url] configuration setting. The value includes the domain and any -subdirectory included in the `site_url`. [base_url] should not be used with this -variable. - -##### page.edit_url - -The full URL to the source page in the source repository. Typically used to -provide a link to edit the source page. [base_url] should not be used with this -variable. - -##### page.is_homepage - -Evaluates to `True` for the homepage of the site and `False` for all other -pages. This can be used in conjunction with other attributes of the `page` -object to alter the behavior. For example, to display a different title -on the homepage: - -```django -{% if not page.is_homepage %}{{ page.title }} - {% endif %}{{ site_name }} -``` - -##### page.previous_page - -The page object for the previous page or `None`. The value will be `None` if the -current page is the first item in the site navigation or if the current page is -not included in the navigation at all. When the value is a page object, the -usage is the same as for `page`. - -##### page.next_page - -The page object for the next page or `None`. The value will be `None` if the -current page is the last item in the site navigation or if the current page is -not included in the navigation at all. When the value is a page object, the -usage is the same as for `page`. - -##### page.parent - -The immediate parent of the page in the [site navigation](#nav). `None` if the -page is at the top level. - -##### page.children - -Pages do not contain children and the attribute is always `None`. - -##### page.active - -When `True`, indicates that this page is the currently viewed page. Defaults -to `False`. - -##### page.is_section - -Indicates that the navigation object is a "section" object. Always `False` for -page objects. - -##### page.is_page - -Indicates that the navigation object is a "page" object. Always `True` for -page objects. - -##### page.is_link - -Indicates that the navigation object is a "link" object. Always `False` for -page objects. - -### Navigation Objects - -Navigation objects contained in the [nav](#nav) template variable may be one of -[section](#section) objects, [page](#page) objects, and [link](#link) objects. -While section objects may contain nested navigation objects, pages and links do -not. - -Page objects are the full page object as used for the current [page](#page) with -all of the same attributes available. Section and Link objects contain a subset -of those attributes as defined below: - -#### Section - -A `section` navigation object defines a named section in the navigation and -contains a list of child navigation objects. Note that sections do not contain -URLs and are not links of any kind. However, by default, MkDocs sorts index -pages to the top and the first child might be used as the URL for a section if a -theme choses to do so. - - The following attributes are available on `section` objects: - -##### section.title - -The title of the section. - -##### section.parent - -The immediate parent of the section or `None` if the section is at the top -level. - -##### section.children - -An iterable of all child navigation objects. Children may include nested -sections, pages and links. - -##### section.active - -When `True`, indicates that a child page of this section is the current page and -can be used to highlight the section as the currently viewed section. Defaults -to `False`. - -##### section.is_section - -Indicates that the navigation object is a "section" object. Always `True` for -section objects. - -##### section.is_page - -Indicates that the navigation object is a "page" object. Always `False` for -section objects. - -##### section.is_link - -Indicates that the navigation object is a "link" object. Always `False` for -section objects. - -#### Link - -A `link` navigation object contains a link which does not point to an internal -MkDocs page. The following attributes are available on `link` objects: - -##### link.title - -The title of the link. This would generally be used as the label of the link. - -##### link.url - -The URL that the link points to. The URL should always be an absolute URLs and -should not need to have `base_url` prepened. - -##### link.parent - -The immediate parent of the link. `None` if the link is at the top level. - -##### link.children - -Links do not contain children and the attribute is always `None`. - -##### link.active - -External links cannot be "active" and the attribute is always `False`. - -##### link.is_section - -Indicates that the navigation object is a "section" object. Always `False` for -link objects. - -##### link.is_page - -Indicates that the navigation object is a "page" object. Always `False` for -link objects. - -##### link.is_link - -Indicates that the navigation object is a "link" object. Always `True` for -link objects. - -### Extra Context - -Additional variables can be passed to the template with the -[`extra`](/user-guide/configuration.md#extra) configuration option. This is a -set of key value pairs that can make custom templates far more flexible. - -For example, this could be used to include the project version of all pages -and a list of links related to the project. This can be achieved with the -following `extra` configuration: - -```yaml -extra: - version: 0.13.0 - links: - - https://github.com/mkdocs - - https://docs.readthedocs.org/en/latest/builds.html#mkdocs - - https://www.mkdocs.org/ -``` - -And then displayed with this HTML in the custom theme. - -```django -{{ config.extra.version }} - -{% if config.extra.links %} -
    - {% for link in config.extra.links %} -
  • {{ link }}
  • - {% endfor %} -
-{% endif %} -``` - -## Template Filters - -In addition to Jinja's default filters, the following custom filters are -available to use in MkDocs templates: - -### url - -Normalizes a URL. Absolute URLs are passed through unaltered. If the URL is -relative and the template context includes a page object, then the URL is -returned relative to the page object. Otherwise, the URL is returned with -[base_url](#base_url) prepended. - -```django -{{ page.title }} -``` - -### tojson - -Safety convert a Python object to a value in a JavaScript script. - -```django - -``` - -## Search and themes - -As of MkDocs version *0.17* client side search support has been added to MkDocs -via the `search` plugin. A theme needs to provide a few things for the plugin to -work with the theme. - -While the `search` plugin is activated by default, users can disable the plugin -and themes should account for this. It is recommended that theme templates wrap -search specific markup with a check for the plugin: - -```django -{% if 'search' in config['plugins'] %} - search stuff here... -{% endif %} -``` - -At its most basic functionality, the search plugin will simply provide an index -file which is no more than a JSON file containing the content of all pages. -The theme would need to implement its own search functionality client-side. -However, with a few settings and the necessary templates, the plugin can provide -a complete functioning client-side search tool based on [lunr.js]. - -The following HTML needs to be added to the theme so that the provided -JavaScript is able to properly load the search scripts and make relative links -to the search results from the current page. - -```django - -``` - -With properly configured settings, the following HTML in a template will add a -full search implementation to your theme. - -```django -

Search Results

- -
- -
- -
- Sorry, page not found. -
-``` - -The JavaScript in the plugin works by looking for the specific ID's used in the -above HTML. The form input for the user to type the search query must be -identified with `id="mkdocs-search-query"` and the div where the results will be -placed must be identified with `id="mkdocs-search-results"`. - -The plugin supports the following options being set in the [theme's -configuration file], `mkdocs_theme.yml`: - -### include_search_page - -Determines whether the search plugin expects the theme to provide a dedicated -search page via a template located at `search/search.html`. - -When `include_search_page` is set to `true`, the search template will be built -and available at `search/search.html`. This method is used by the `readthedocs` -theme. - -When `include_search_page` is set to `false` or not defined, it is expected that -the theme provide some other mechanisms for displaying search results. For -example, the `mkdocs` theme displays results on any page via a modal. - -### search_index_only - -Determines whether the search plugin should only generate a search index or a -complete search solution. - -When `search_index_only` is set to `false`, then the search plugin modifies the -Jinja environment by adding its own `templates` directory (with a lower -precedence than the theme) and adds its scripts to the `extra_javascript` config -setting. - -When `search_index_only` is set to `true` or not defined, the search plugin -makes no modifications to the Jinja environment. A complete solution using the -provided index file is the responsibility of the theme. - -The search index is written to a JSON file at `search/search_index.json` in the -[site_dir]. The JSON object contained within the file may contain up to three -objects. - -```json -{ - config: {...}, - data: [...], - index: {...} -} -``` - -If present, the `config` object contains the key/value pairs of config options -defined for the plugin in the user's `mkdocs.yml` config file under -`plugings.search`. The `config` object was new in MkDocs version *1.0*. - -The `data` object contains a list of document objects. Each document object is -made up of a `location` (URL), a `title`, and `text` which can be used to create -a search index and/or display search results. - -If present, the `index` object contains a pre-built index which offers -performance improvements for larger sites. Note that the pre-built index is only -created if the user explicitly enables the [prebuild_index] config option. -Themes should expect the index to not be present, but can choose to use the -index when it is available. The `index` object was new in MkDocs version *1.0*. - -[Jinja2 template]: http://jinja.pocoo.org/docs/dev/ -[built-in themes]: https://github.com/mkdocs/mkdocs/tree/master/mkdocs/themes -[theme's configuration file]: #theme-configuration -[lunr.js]: https://lunrjs.com/ -[site_dir]: configuration.md#site_dir -[prebuild_index]: configuration.md#prebuild_index - -## Packaging Themes - -MkDocs makes use of [Python packaging] to distribute themes. This comes with a -few requirements. - -To see an example of a package containing one theme, see the [MkDocs Bootstrap -theme] and to see a package that contains many themes, see the [MkDocs -Bootswatch theme]. - -!!! Note - - It is not strictly necessary to package a theme, as the entire theme - can be contained in the `custom_dir`. If you have created a "one-off theme," - that should be sufficient. However, if you intend to distribute your theme - for others to use, packaging the theme has some advantages. By packaging - your theme, your users can more easily install it, they can rely on a default - [configuration] being defined, and they can then take advantage of the - [custom_dir] to make tweaks to your theme to better suit their needs. - -[Python packaging]: https://packaging.python.org/en/latest/ -[MkDocs Bootstrap theme]: https://mkdocs.github.io/mkdocs-bootstrap/ -[MkDocs Bootswatch theme]: https://mkdocs.github.io/mkdocs-bootswatch/ - -### Package Layout - -The following layout is recommended for themes. Two files at the top level -directory called `MANIFEST.in` and `setup.py` beside the theme directory which -contains an empty `__init__.py` file, a theme configuration file -(`mkdocs-theme.yml`), and your template and media files. - -```no-highlight -. -|-- MANIFEST.in -|-- theme_name -| |-- __init__.py -| |-- mkdocs-theme.yml -| |-- main.html -| |-- styles.css -`-- setup.py -``` - -The `MANIFEST.in` file should contain the following contents but with -theme_name updated and any extra file extensions added to the include. - -```no-highlight -recursive-include theme_name *.ico *.js *.css *.png *.html *.eot *.svg *.ttf *.woff -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] -``` - -The `setup.py` should include the following text with the modifications -described below. - -```python -from setuptools import setup, find_packages - -VERSION = '0.0.1' - - -setup( - name="mkdocs-themename", - version=VERSION, - url='', - license='', - description='', - author='', - author_email='', - packages=find_packages(), - include_package_data=True, - entry_points={ - 'mkdocs.themes': [ - 'themename = theme_name', - ] - }, - zip_safe=False -) -``` - -Fill in the URL, license, description, author and author email address. - -The name should follow the convention `mkdocs-themename` (like `mkdocs- -bootstrap` and `mkdocs-bootswatch`), starting with MkDocs, using hyphens to -separate words and including the name of your theme. - -Most of the rest of the file can be left unedited. The last section we need to -change is the entry_points. This is how MkDocs finds the theme(s) you are -including in the package. The name on the left is the one that users will use -in their mkdocs.yml and the one on the right is the directory containing your -theme files. - -The directory you created at the start of this section with the main.html file -should contain all of the other theme files. The minimum requirement is that -it includes a `main.html` for the theme. It **must** also include a -`__init__.py` file which should be empty, this file tells Python that the -directory is a package. - -### Theme Configuration - -A packaged theme is required to include a configuration file named -`mkdocs_theme.yml` which is placed in the root of your template files. The file -should contain default configuration options for the theme. However, if the -theme offers no configuration options, the file is still required and can be -left blank. A theme which is not packaged does not need a `mkdocs_theme.yml` -file as that file is not loaded from `theme.custom_dir`. - -The theme author is free to define any arbitrary options deemed necessary and -those options will be made available in the templates to control behavior. -For example, a theme might want to make a sidebar optional and include the -following in the `mkdocs_theme.yml` file: - -```yaml -show_sidebar: true -``` - -Then in a template, that config option could be referenced: - -```django -{% if config.theme.show_sidebar %} - -{% endif %} -``` - -And the user could override the default in their project's `mkdocs.yml` config -file: - -```yaml -theme: - name: themename - show_sidebar: false -``` - -In addition to arbitrary options defined by the theme, MkDocs defines a few -special options which alters its behavior: - -!!! block "" - - #### static_templates - - This option mirrors the [theme] config option of the same name and allows - some defaults to be set by the theme. Note that while the user can add - templates to this list, the user cannot remove templates included in the - theme's config. - - #### extends - - Defines a parent theme that this theme inherits from. The value should be - the string name of the parent theme. Normal Jinja inheritance rules apply. - -Plugins may also define some options which allow the theme to inform a plugin -about which set of plugin options it expects. See the documentation for any -plugins you may wish to support in your theme. - -### Distributing Themes - -With the above changes, your theme should now be ready to install. This can be -done with pip, using `pip install .` if you are still in the same directory as -the setup.py. - -Most Python packages, including MkDocs, are distributed on PyPI. To do this, -you should run the following command. - -```no-highlight -python setup.py register -``` - -If you don't have an account setup, you should be prompted to create one. - -For a much more detailed guide, see the official Python packaging -documentation for [Packaging and Distributing Projects]. - -[Packaging and Distributing Projects]: https://packaging.python.org/en/latest/distributing/ -[theme]: ./configuration.md#theme diff --git a/docs/user-guide/customizing-your-theme.md b/docs/user-guide/customizing-your-theme.md new file mode 100644 index 0000000..8d9155e --- /dev/null +++ b/docs/user-guide/customizing-your-theme.md @@ -0,0 +1,226 @@ +# Customizing Your Theme + +Altering a theme to suit your needs. + +--- + +If you would like to make a few tweaks to an existing theme, there is no need +to create your own theme from scratch. For minor tweaks which only require +some CSS and/or JavaScript, you can [use the docs_dir](#using-the-docs_dir). +However, for more complex customizations, including overriding templates, you +will need to [use the theme custom_dir](#using-the-theme-custom_dir) setting. + +## Using the docs_dir + +The [extra_css] and [extra_javascript] configuration options can be used to +make tweaks and customizations to existing themes. To use these, you simply +need to include either CSS or JavaScript files within your [documentation +directory]. + +For example, to change the color of the headers in your documentation, create +a file called (for example) `style.css` and place it next to the documentation Markdown. In +that file add the following CSS. + +```css +h1 { + color: red; +} +``` + +Then you need to add it to `mkdocs.yml`: + +```yaml +extra_css: + - style.css +``` + +After making these changes, they should be visible when you run +`mkdocs serve` - if you already had this running, you should see that the CSS +changes were automatically picked up and the documentation will be updated. + +NOTE: +Any extra CSS or JavaScript files will be added to the generated HTML +document after the page content. If you desire to include a JavaScript +library, you may have better success including the library by using the +theme [custom_dir]. + +## Using the theme custom_dir + +The [`theme.custom_dir`][custom_dir] configuration option can be used to point +to a directory of files which override the files in a parent theme. The parent +theme would be the theme defined in the [`theme.name`][name] configuration +option. Any file in the `custom_dir` with the same name as a file in the +parent theme will replace the file of the same name in the parent theme. Any +additional files in the `custom_dir` will be added to the parent theme. The +contents of the `custom_dir` should mirror the directory structure of the +parent theme. You may include templates, JavaScript files, CSS files, images, +fonts, or any other media included in a theme. + +NOTE: +For this to work, the `theme.name` setting must be set to a known +installed theme. If the `name` setting is instead set to `null` (or not +defined), then there is no theme to override and the contents of the +`custom_dir` must be a complete, standalone theme. See the [Theme +Developer Guide][custom theme] for more information. + +For example, the [mkdocs] theme ([browse source]), contains the following +directory structure (in part): + +```nohighlight +- css\ +- fonts\ +- img\ + - favicon.ico + - grid.png +- js\ +- 404.html +- base.html +- content.html +- nav-sub.html +- nav.html +- toc.html +``` + +To override any of the files contained in that theme, create a new directory +next to your `docs_dir`: + +```bash +mkdir custom_theme +``` + +And then point your `mkdocs.yml` configuration file at the new directory: + +```yaml +theme: + name: mkdocs + custom_dir: custom_theme/ +``` + +To override the 404 error page ("file not found"), add a new template file named +`404.html` to the `custom_theme` directory. For information on what can be +included in a template, review the [Theme Developer Guide][custom theme]. + +To override the favicon, you can add a new icon file at +`custom_theme/img/favicon.ico`. + +To include a JavaScript library, copy the library to the `custom_theme/js/` +directory. + +Your directory structure should now look like this: + +```nohighlight +- docs/ + - index.html +- custom_theme/ + - img/ + - favicon.ico + - js/ + - somelib.js + - 404.html +- config.yml +``` + +NOTE: +Any files included in the parent theme (defined in `name`) but not +included in the `custom_dir` will still be utilized. The `custom_dir` will +only override/replace files in the parent theme. If you want to remove +files, or build a theme from scratch, then you should review the [Theme +Developer Guide][custom theme]. + +### Overriding Template Blocks + +The built-in themes implement many of their parts inside template blocks which +can be individually overridden in the `main.html` template. Simply create a +`main.html` template file in your `custom_dir` and define replacement blocks +within that file. Just make sure that the `main.html` extends `base.html`. For +example, to alter the title of the MkDocs theme, your replacement `main.html` +template would contain the following: + +```django +{% extends "base.html" %} + +{% block htmltitle %} +Custom title goes here +{% endblock %} +``` + +In the above example, the `htmltitle` block defined in your custom `main.html` file +will be used in place of the default `htmltitle` block defined in the parent theme. +You may re-define as many blocks as you desire, as long as those blocks are +defined in the parent. For example, you could replace the Google Analytics +script with one for a different service or replace the search feature with your +own. You will need to consult the parent theme you are using to determine what +blocks are available to override. The MkDocs and ReadTheDocs themes provide the +following blocks: + +* `site_meta`: Contains meta tags in the document head. +* `htmltitle`: Contains the page title in the document head. +* `styles`: Contains the link tags for stylesheets. +* `libs`: Contains the JavaScript libraries (jQuery, etc) included in the page header. +* `scripts`: Contains JavaScript scripts which should execute after a page loads. +* `analytics`: Contains the analytics script. +* `extrahead`: An empty block in the `` to insert custom tags/scripts/etc. +* `site_name`: Contains the site name in the navigation bar. +* `site_nav`: Contains the site navigation in the navigation bar. +* `search_button`: Contains the search box in the navigation bar. +* `next_prev`: Contains the next and previous buttons in the navigation bar. +* `repo`: Contains the repository link in the navigation bar. +* `content`: Contains the page content and table of contents for the page. +* `footer`: Contains the page footer. + +You may need to view the source template files to ensure your modifications will +work with the structure of the site. See [Template Variables] for a list of +variables you can use within your custom blocks. For a more complete +explanation of blocks, consult the [Jinja documentation]. + +### Combining the custom_dir and Template Blocks + +Adding a JavaScript library to the `custom_dir` will make it available, but +won't include it in the pages generated by MkDocs. Therefore, a link needs to +be added to the library from the HTML. + +Starting the with directory structure above (truncated): + +```nohighlight +- docs/ +- custom_theme/ + - js/ + - somelib.js +- config.yml +``` + +A link to the `custom_theme/js/somelib.js` file needs to be added to the +template. As `somelib.js` is a JavaScript library, it would logically go in the +`libs` block. However, a new `libs` block that only includes the new script will +replace the block defined in the parent template and any links to libraries in +the parent template will be removed. To avoid breaking the template, a +[super block] can be used with a call to `super` from within the block: + +```django +{% extends "base.html" %} + +{% block libs %} + {{ super() }} + +{% endblock %} +``` + +Note that the [base_url] template variable was used to ensure that the link is +always relative to the current page. + +Now the generated pages will include links to the template provided libraries as +well as the library included in the `custom_dir`. The same would be required for +any additional CSS files included in the `custom_dir`. + +[custom theme]: ../dev-guide/themes.md +[extra_css]: ./configuration.md#extra_css +[extra_javascript]: ./configuration.md#extra_javascript +[documentation directory]: ./configuration.md#docs_dir +[custom_dir]: ./configuration.md#custom_dir +[name]: ./configuration.md#name +[mkdocs]: ./choosing-your-theme.md#mkdocs +[browse source]: https://github.com/mkdocs/mkdocs/tree/master/mkdocs/themes/mkdocs +[Template Variables]: ../dev-guide/themes.md#template-variables +[Jinja documentation]: https://jinja.palletsprojects.com/en/latest/templates/#template-inheritance +[super block]: https://jinja.palletsprojects.com/en/latest/templates/#super-blocks +[base_url]: ../dev-guide/themes.md#base_url diff --git a/docs/user-guide/deploying-your-docs.md b/docs/user-guide/deploying-your-docs.md index f4698b5..27b9847 100644 --- a/docs/user-guide/deploying-your-docs.md +++ b/docs/user-guide/deploying-your-docs.md @@ -35,11 +35,14 @@ to GitHub. Therefore, you may want to verify any changes you make to the docs beforehand by using the `build` or `serve` commands and reviewing the built files locally. -!!! warning +WARNING: +You should never edit files in your pages repository by hand if you're using +the `gh-deploy` command because you will lose your work the next time you +run the command. - You should never edit files in your pages repository by hand if you're using - the `gh-deploy` command because you will lose your work the next time you - run the command. +WARNING: +If there are untracked files or uncommitted work in the local repository where +`mkdocs gh-deploy` is run, these will be included in the pages that are deployed. ### Organization and User Pages @@ -49,7 +52,7 @@ with the GitHub account name. Therefore, you need working copies of two repositories on our local system. For example, consider the following file structure: -```no-highlight +```text my-project/ mkdocs.yml docs/ @@ -72,17 +75,6 @@ with the [remote_branch] configuration setting, but if you forget to change directories before running the deploy script, it will commit to the `master` branch of your project, which you probably don't want. -Be aware that you will not be able to review the built site before it is pushed -to GitHub. Therefore, you may want to verify any changes you make to the docs -beforehand by using the `build` or `serve` commands and reviewing the built -files locally. - -!!! warning - - You should never edit files in your pages repository by hand if you're using - the `gh-deploy` command because you will lose your work the next time you - run the command. - ### Custom Domains GitHub Pages includes support for using a [Custom Domain] for your site. In @@ -122,17 +114,16 @@ create an account and point it at your publicly hosted repository. If properly configured, your documentation will update each time you push commits to your public repository. -!!! note - - To benefit from all of the [features] offered by Read the Docs, you will need - to use the [Read the Docs theme][theme] which ships with MkDocs. The various - themes which may be referenced in Read the Docs' documentation are Sphinx - specific themes and will not work with MkDocs. +NOTE: +To benefit from all of the [features] offered by Read the Docs, you will need +to use the [Read the Docs theme][theme] which ships with MkDocs. The various +themes which may be referenced in Read the Docs' documentation are Sphinx +specific themes and will not work with MkDocs. [rtd]: https://readthedocs.org/ [instructions]: https://docs.readthedocs.io/en/stable/intro/getting-started-with-mkdocs.html [features]: https://docs.readthedocs.io/en/latest/features.html -[theme]: ./styling-your-docs.md#readthedocs +[theme]: ./choosing-your-theme.md#readthedocs ## Other Providers @@ -169,6 +160,64 @@ your hosts' file system. See your host's documentation for specifics. You will likely want to search their documentation for "ftp" or "uploading site". +## Local Files + +Rather than hosting your documentation on a server, you may instead distribute +the files directly, which can then be viewed in a browser using the `file://` +scheme. + +Note that, due to the security settings of all modern browsers, some things +will not work the same and some features may not work at all. In fact, a few +settings will need to be customized in very specific ways. + +- [site_url]: + + The `site_url` must be set to an empty string, which instructs MkDocs to + build your site so that it will work with the `file://` scheme. + + ```yaml + site_url: "" + ``` + +- [use_directory_urls]: + + Set `use_directory_urls` to `false`. Otherwise, internal links between + pages will not work properly. + + ```yaml + use_directory_urls: false + ``` + +- [search]: + + You will need to either disable the search plugin, or use a third-party + search plugin which is specifically designed to work with the `file://` + scheme. To disable all plugins, set the `plugins` setting to an empty list. + + ```yaml + plugins: [] + ``` + + If you have other plugins enabled, simply ensure that `search` is not + included in the list. + +When writing your documentation, it is imperative that all internal links use +relative URLs as [documented][internal links]. Remember, each reader of your +documentation will be using a different device and the files will likely be in a +different location on that device. + +If you expect your documentation to be viewed off-line, you may also need to be +careful about which themes you choose. Many themes make use of CDNs for various +support files, which require a live Internet connection. You will need to choose +a theme which includes all support files directly in the theme. + +When you build your site (using the `mkdocs build` command), all of the files +are written to the directory assigned to the [site_dir] configuration option +(defaults to `"site"`) in your `mkdocs.yaml` config file. Generally, you will +simply need to copy the contents of that directory and distribute it to your +readers. Alternatively, you may choose to use a third party tool to convert the +HTML files to some other documentation format. + ## 404 Pages When MkDocs builds the documentation it will include a 404.html file in the @@ -178,3 +227,7 @@ servers may be configured to use it but the feature won't always be available. See the documentation for your server of choice for more information. [site_dir]: ./configuration.md#site_dir +[site_url]: ./configuration.md#site_url +[use_directory_urls]: ./configuration.md#use_directory_urls +[search]: ./configuration.md#search +[internal links]: ./writing-your-docs.md#internal-links diff --git a/docs/user-guide/installation.md b/docs/user-guide/installation.md new file mode 100644 index 0000000..fbf0b91 --- /dev/null +++ b/docs/user-guide/installation.md @@ -0,0 +1,107 @@ +# MkDocs Installation + +A detailed guide. + +--- + +## Requirements + +MkDocs requires a recent version of [Python] and the Python package +manager, [pip], to be installed on your system. + +You can check if you already have these installed from the command line: + +```console +$ python --version +Python 3.8.2 +$ pip --version +pip 20.0.2 from /usr/local/lib/python3.8/site-packages/pip (python 3.8) +``` + +If you already have those packages installed, you may skip down to [Installing +MkDocs](#installing-mkdocs). + +### Installing Python + +Install [Python] using your package manager of choice, or by downloading an +installer appropriate for your system from [python.org] and running it. + +> NOTE: +> If you are installing Python on Windows, be sure to check the box to have +> Python added to your PATH if the installer offers such an option (it's +> normally off by default). +> +> ![Add Python to PATH](../img/win-py-install.png) + +### Installing pip + +If you're using a recent version of Python, the Python package manager, [pip], +is most likely installed by default. However, you may need to upgrade pip to the +lasted version: + +```bash +pip install --upgrade pip +``` + +If you need to install pip for the first time, download [get-pip.py]. +Then run the following command to install it: + +```bash +python get-pip.py +``` + +## Installing MkDocs + +Install the `mkdocs` package using pip: + +```bash +pip install mkdocs +``` + +You should now have the `mkdocs` command installed on your system. Run `mkdocs +--version` to check that everything worked okay. + +```console +$ mkdocs --version +mkdocs, version 1.2.0 from /usr/local/lib/python3.8/site-packages/mkdocs (Python 3.8) +``` + +> NOTE: +> If you would like manpages installed for MkDocs, the [click-man] tool can +> generate and install them for you. Simply run the following two commands: +> +> ```bash +> pip install click-man +> click-man --target path/to/man/pages mkdocs +> ``` +> +> See the [click-man documentation] for an explanation of why manpages are +> not automatically generated and installed by pip. + +> NOTE: +> If you are using Windows, some of the above commands may not work +> out-of-the-box. +> +> A quick solution may be to preface every Python command with `python -m` +> like this: +> +> ```bash +> python -m pip install mkdocs +> python -m mkdocs +> ``` +> +> For a more permanent solution, you may need to edit your `PATH` environment +> variable to include the `Scripts` directory of your Python installation. +> Recent versions of Python include a script to do this for you. Navigate to +> your Python installation directory (for example `C:\Python38\`), open the +> `Tools`, then `Scripts` folder, and run the `win_add2path.py` file by double +> clicking on it. Alternatively, you can download the [script][a2p] and run it +> (`python win_add2path.py`). + +[Python]: https://www.python.org/ +[python.org]: https://www.python.org/downloads/ +[pip]: https://pip.readthedocs.io/en/stable/installing/ +[get-pip.py]: https://bootstrap.pypa.io/get-pip.py +[click-man]: https://github.com/click-contrib/click-man +[click-man documentation]: https://github.com/click-contrib/click-man#automatic-man-page-installation-with-setuptools-and-pip +[a2p]: https://github.com/python/cpython/blob/master/Tools/scripts/win_add2path.py diff --git a/docs/user-guide/localizing-your-theme.md b/docs/user-guide/localizing-your-theme.md new file mode 100644 index 0000000..42c81fc --- /dev/null +++ b/docs/user-guide/localizing-your-theme.md @@ -0,0 +1,63 @@ +# Localizing Your Theme + +Display your theme in your preferred language. + +--- + +NOTE: +Theme localization only translates the text elements of the theme itself +(such as "next" and "previous" links), not the actual content of your +documentation. If you wish to create multilingual documentation, you need +to combine theme localization as described here with a third-party +internationalization/localization plugin. + +## Installation + +For theme localization to work, you must use a theme which supports it and +enable `i18n` (internationalization) support by installing `mkdocs[i18n]`: + +```bash +pip install mkdocs[i18n] +``` + +## Supported locales + +In most cases a locale is designated by the [ISO-639-1] (2-letter) abbreviation +for your language. However, a locale may also include a territory (or region or +county) code as well. The language and territory must be separated by an +underscore. For example, some possible locales for English might include `en`, +`en_AU`, `en_GB`, and `en_US`. + +For a list of locales supported by the theme you are using, see that theme's +documentation. + +- [mkdocs](choosing-your-theme.md#mkdocs-locale) +- [readthedocs](choosing-your-theme.md#readthedocs-locale) + +WARNING: +If you configure a language locale which is not yet supported by the theme +that you are using, MkDocs will fall back to the theme's default locale. + +## Usage + +To specify the locale that MkDocs should use, set the [locale] +parameter of the [theme] configuration option to the appropriate code. + +For example, to build the `mkdocs` theme in French you would use the following +in your `mkdocs.yml` configuration file: + +```yaml +theme: + name: mkdocs + locale: fr +``` + +## Contributing theme translations + +If a theme has not yet been translated into your language, feel free to +contribute a translation using the [Translation Guide]. + +[Translation Guide]: ../dev-guide/translations.md +[locale]: configuration.md#locale +[theme]: configuration.md#theme +[ISO-639-1]: https://en.wikipedia.org/wiki/ISO_639-1 diff --git a/docs/user-guide/plugins.md b/docs/user-guide/plugins.md deleted file mode 100644 index fb7bc8b..0000000 --- a/docs/user-guide/plugins.md +++ /dev/null @@ -1,417 +0,0 @@ -# MkDocs Plugins - -A Guide to installing, using and creating MkDocs Plugins - ---- - -## Installing Plugins - -Before a plugin can be used, it must be installed on the system. If you are -using a plugin which comes with MkDocs, then it was installed when you installed -MkDocs. However, to install third party plugins, you need to determine the -appropriate package name and install it using `pip`: - - pip install mkdocs-foo-plugin - -Once a plugin has been successfully installed, it is ready to use. It just needs -to be [enabled](#using-plugins) in the configuration file. The [MkDocs Plugins] -wiki page has a growing list of plugins that you can install and use. - -## Using Plugins - -The [`plugins`][config] configuration option should contain a list of plugins to -use when building the site. Each "plugin" must be a string name assigned to the -plugin (see the documentation for a given plugin to determine its "name"). A -plugin listed here must already be [installed](#installing-plugins). - -```yaml -plugins: - - search -``` - -Some plugins may provide configuration options of their own. If you would like -to set any configuration options, then you can nest a key/value mapping -(`option_name: option value`) of any options that a given plugin supports. Note -that a colon (`:`) must follow the plugin name and then on a new line the option -name and value must be indented and separated by a colon. If you would like to -define multiple options for a single plugin, each option must be defined on a -separate line. - -```yaml -plugins: - - search: - lang: en - foo: bar -``` - -For information regarding the configuration options available for a given plugin, -see that plugin's documentation. - -For a list of default plugins and how to override them, see the -[configuration][config] documentation. - -## Developing Plugins - -Like MkDocs, plugins must be written in Python. It is generally expected that -each plugin would be distributed as a separate Python module, although it is -possible to define multiple plugins in the same module. At a minimum, a MkDocs -Plugin must consist of a [BasePlugin] subclass and an [entry point] which -points to it. - -### BasePlugin - -A subclass of `mkdocs.plugins.BasePlugin` should define the behavior of the plugin. -The class generally consists of actions to perform on specific events in the build -process as well as a configuration scheme for the plugin. - -All `BasePlugin` subclasses contain the following attributes: - -#### config_scheme - -: A tuple of configuration validation instances. Each item must consist of a - two item tuple in which the first item is the string name of the - configuration option and the second item is an instance of - `mkdocs.config.config_options.BaseConfigOption` or any of its subclasses. - - For example, the following `config_scheme` defines three configuration options: `foo`, which accepts a string; `bar`, which accepts an integer; and `baz`, which accepts a boolean value. - - class MyPlugin(mkdocs.plugins.BasePlugin): - config_scheme = ( - ('foo', mkdocs.config.config_options.Type(str, default='a default value')), - ('bar', mkdocs.config.config_options.Type(int, default=0)), - ('baz', mkdocs.config.config_options.Type(bool, default=True)) - ) - - When the user's configuration is loaded, the above scheme will be used to - validate the configuration and fill in any defaults for settings not - provided by the user. The validation classes may be any of the classes - provided in `mkdocs.config.config_options` or a third party subclass defined - in the plugin. - - Any settings provided by the user which fail validation or are not defined - in the `config_scheme` will raise a `mkdocs.config.base.ValidationError`. - -#### config - -: A dictionary of configuration options for the plugin, which is populated by - the `load_config` method after configuration validation has completed. Use - this attribute to access options provided by the user. - - def on_pre_build(self, config): - if self.config['bool_option']: - # implement "bool_option" functionality here... - -All `BasePlugin` subclasses contain the following method(s): - -#### load_config(options) - -: Loads configuration from a dictionary of options. Returns a tuple of - `(errors, warnings)`. This method is called by MkDocs during configuration - validation and should not need to be called by the plugin. - -#### on_<event_name>() - -: Optional methods which define the behavior for specific [events]. The plugin - should define its behavior within these methods. Replace `` with - the actual name of the event. For example, the `pre_build` event would be - defined in the `on_pre_build` method. - - Most events accept one positional argument and various keyword arguments. It - is generally expected that the positional argument would be modified (or - replaced) by the plugin and returned. If nothing is returned (the method - returns `None`), then the original, unmodified object is used. The keyword - arguments are simply provided to give context and/or supply data which may - be used to determine how the positional argument should be modified. It is - good practice to accept keyword arguments as `**kwargs`. In the event that - additional keywords are provided to an event in a future version of MkDocs, - there will be no need to alter your plugin. - - For example, the following event would add an additional static_template to - the theme config: - - class MyPlugin(BasePlugin): - def on_config(self, config, **kwargs): - config['theme'].static_templates.add('my_template.html') - return config - -### Events - -There are three kinds of events: [Global Events], [Page Events] and -[Template Events]. - -#### Global Events - -Global events are called once per build at either the beginning or end of the -build process. Any changes made in these events will have a global effect on the -entire site. - -##### on_serve - -: The `serve` event is only called when the `serve` command is used during - development. It is passed the `Server` instance which can be modified before - it is activated. For example, additional files or directories could be added - to the list of "watched" files for auto-reloading. - - Parameters: - : __server:__ `livereload.Server` instance - : __config:__ global configuration object - : __builder:__ a callable which gets passed to each call to `server.watch` - - Returns: - : `livereload.Server` instance - -##### on_config - -: The `config` event is the first event called on build and is run immediately - after the user configuration is loaded and validated. Any alterations to the - config should be made here. - - Parameters: - : __config:__ global configuration object - - Returns: - : global configuration object - -##### on_pre_build - -: The `pre_build` event does not alter any variables. Use this event to call - pre-build scripts. - - Parameters: - : __config:__ global configuration object - -##### on_files - -: The `files` event is called after the files collection is populated from the - `docs_dir`. Use this event to add, remove, or alter files in the - collection. Note that Page objects have not yet been associated with the - file objects in the collection. Use [Page Events] to manipulate page - specific data. - - Parameters: - : __files:__ global files collection - : __config:__ global configuration object - - Returns: - : global files collection - -##### on_nav - -: The `nav` event is called after the site navigation is created and can - be used to alter the site navigation. - - Parameters: - : __nav:__ global navigation object - : __config:__ global configuration object - : __files:__ global files collection - - Returns: - : global navigation object - -##### on_env - -: The `env` event is called after the Jinja template environment is created - and can be used to alter the Jinja environment. - - Parameters: - : __env:__ global Jinja environment - : __config:__ global configuration object - : __files:__ global files collection - - Returns: - : global Jinja Environment - -##### on_post_build - -: The `post_build` event does not alter any variables. Use this event to call - post-build scripts. - - Parameters: - : __config:__ global configuration object - -#### Template Events - -Template events are called once for each non-page template. Each template event -will be called for each template defined in the [extra_templates] config setting -as well as any [static_templates] defined in the theme. All template events are -called after the [env] event and before any [page events]. - -##### on_pre_template - -: The `pre_template` event is called immediately after the subject template is - loaded and can be used to alter the content of the template. - - Parameters: - : __template__: the template contents as string - : __template_name__: string filename of template - : __config:__ global configuration object - - Returns: - : template contents as string - -##### on_template_context - -: The `template_context` event is called immediately after the context is created - for the subject template and can be used to alter the context for that specific - template only. - - Parameters: - : __context__: dict of template context variables - : __template_name__: string filename of template - : __config:__ global configuration object - - Returns: - : dict of template context variables - -##### on_post_template - -: The `post_template` event is called after the template is rendered, but before - it is written to disc and can be used to alter the output of the template. - If an empty string is returned, the template is skipped and nothing is is - written to disc. - - Parameters: - : __output_content__: output of rendered template as string - : __template_name__: string filename of template - : __config:__ global configuration object - - Returns: - : output of rendered template as string - -#### Page Events - -Page events are called once for each Markdown page included in the site. All -page events are called after the [post_template] event and before the -[post_build] event. - -##### on_pre_page - -: The `pre_page` event is called before any actions are taken on the subject - page and can be used to alter the `Page` instance. - - Parameters: - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - : __files:__ global files collection - - Returns: - : `mkdocs.nav.Page` instance - -##### on_page_read_source - -: The `on_page_read_source` event can replace the default mechanism to read - the contents of a page's source from the filesystem. - - Parameters: - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - - Returns: - : The raw source for a page as unicode string. If `None` is returned, the - default loading from a file will be performed. - -##### on_page_markdown - -: The `page_markdown` event is called after the page's markdown is loaded - from file and can be used to alter the Markdown source text. The meta- - data has been stripped off and is available as `page.meta` at this point. - - Parameters: - : __markdown:__ Markdown source text of page as string - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - : __files:__ global files collection - - Returns: - : Markdown source text of page as string - -##### on_page_content - -: The `page_content` event is called after the Markdown text is rendered to - HTML (but before being passed to a template) and can be used to alter the - HTML body of the page. - - Parameters: - : __html:__ HTML rendered from Markdown source as string - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - : __files:__ global files collection - - Returns: - : HTML rendered from Markdown source as string - -##### on_page_context - -: The `page_context` event is called after the context for a page is created - and can be used to alter the context for that specific page only. - - Parameters: - : __context__: dict of template context variables - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - : __nav:__ global navigation object - - Returns: - : dict of template context variables - -##### on_post_page - -: The `post_page` event is called after the template is rendered, but - before it is written to disc and can be used to alter the output of the - page. If an empty string is returned, the page is skipped and nothing is - written to disc. - - Parameters: - : __output:__ output of rendered template as string - : __page:__ `mkdocs.nav.Page` instance - : __config:__ global configuration object - - Returns: - : output of rendered template as string - -### Entry Point - -Plugins need to be packaged as Python libraries (distributed on PyPI separate -from MkDocs) and each must register as a Plugin via a setuptools entry_point. -Add the following to your `setup.py` script: - -```python -entry_points={ - 'mkdocs.plugins': [ - 'pluginname = path.to.some_plugin:SomePluginClass', - ] -} -``` - -The `pluginname` would be the name used by users (in the config file) and -`path.to.some_plugin:SomePluginClass` would be the importable plugin itself -(`from path.to.some_plugin import SomePluginClass`) where `SomePluginClass` is a -subclass of [BasePlugin] which defines the plugin behavior. Naturally, multiple -Plugin classes could exist in the same module. Simply define each as a separate -entry_point. - -```python -entry_points={ - 'mkdocs.plugins': [ - 'featureA = path.to.my_plugins:PluginA', - 'featureB = path.to.my_plugins:PluginB' - ] -} -``` - -Note that registering a plugin does not activate it. The user still needs to -tell MkDocs to use if via the config. - -[BasePlugin]:#baseplugin -[config]: configuration.md#plugins -[entry point]: #entry-point -[env]: #on_env -[events]: #events -[extra_templates]: configuration.md#extra_templates -[Global Events]: #global-events -[Page Events]: #page-events -[post_build]: #on_post_build -[post_template]: #on_post_template -[static_templates]: configuration.md#static_templates -[Template Events]: #template-events -[MkDocs Plugins]: https://github.com/mkdocs/mkdocs/wiki/MkDocs-Plugins diff --git a/docs/user-guide/styling-your-docs.md b/docs/user-guide/styling-your-docs.md deleted file mode 100644 index c48f40a..0000000 --- a/docs/user-guide/styling-your-docs.md +++ /dev/null @@ -1,373 +0,0 @@ -# Styling your docs - -How to style and theme your documentation. - ---- - -MkDocs includes a couple [built-in themes] as well as various [third party -themes], all of which can easily be customized with [extra CSS or -JavaScript][docs_dir] or overridden from the theme's [custom_dir]. You can also -create your own [custom theme] from the ground up for your documentation. - -To use a theme that is included in MkDocs, simply add this to your -`mkdocs.yml` config file. - - theme: readthedocs - -Replace [`readthedocs`](#readthedocs) with any of the [built-in themes] listed below. - -To create a new custom theme see the [Custom Themes][custom theme] page, or to -more heavily customize an existing theme, see -the [Customizing a Theme][customize] section below. - -## Built-in themes - -### mkdocs - -The default theme, which was built as a custom [Bootstrap] theme, supports most -every feature of MkDocs. - -![mkdocs](../img/mkdocs.png) - -In addition to the default [theme configuration options], the `mkdocs` theme -supports the following options: - -* __`highlightjs`__: Enables highlighting of source code in code blocks using - the [highlight.js] JavaScript library. Default: `True`. - -* __`hljs_style`__: The highlight.js library provides 79 different [styles] - (color variations) for highlighting source code in code blocks. Set this to - the name of the desired style. Default: `github`. - -* __`hljs_languages`__: By default, highlight.js only supports 23 common - languages. List additional languages here to include support for them. - - theme: - name: mkdocs - highlightjs: true - hljs_languages: - - yaml - - rust - -* __`shortcuts`__: Defines keyboard shortcut keys. - - theme: - name: mkdocs - shortcuts: - help: 191 # ? - next: 78 # n - previous: 80 # p - search: 83 # s - - All values much be numeric key codes. It is best to use keys which are - available on all keyboards. You may use to determine - the key code for a given key. - - * __`help`__: Display a help modal which lists the keyboard shortcuts. - Default: `191` (?) - - * __`next`__: Navigate to the "next" page. Default: `78` (n) - - * __`previous`__: Navigate to the "previous" page. Default: `80` (p) - - * __`search`__: Display the search modal. Default: `83` (s) - -* __`navigation_depth`__: The maximum depth of the navigation tree in the - sidebar. Default: `2`. - -* __`nav_style`__: This adjusts the visual style for the top navigation bar; by - default, this is set to `primary` (the default), but it can also be set to - `dark` or `light`. - - theme: - name: mkdocs - nav_style: dark - -[styles]: https://highlightjs.org/static/demo/ - -### readthedocs - -A clone of the default theme used by the [Read the Docs] service, which offers -the same restricted feature-set as its parent theme. Like its parent theme, only -two levels of navigation are supported. - -![ReadTheDocs](../img/readthedocs.png) - -In addition to the default [theme configuration options], the `readthedocs` -theme supports the following options: - -* __`highlightjs`__: Enables highlighting of source code in code blocks using - the [highlight.js] JavaScript library. Default: `True`. - -* __`hljs_languages`__: By default, highlight.js only supports 23 common - languages. List additional languages here to include support for them. - - theme: - name: readthedocs - highlightjs: true - hljs_languages: - - yaml - - rust - -* __`include_homepage_in_sidebar`__: Lists the homepage in the sidebar menu. As - MkDocs requires that the homepage be listed in the `nav` configuration - option, this setting allows the homepage to be included or excluded from - the sidebar. Note that the site name/logo always links to the homepage. - Default: `True`. - -* __`prev_next_buttons_location`__: One of `bottom`, `top`, `both` , or `none`. - Displays the “Next” and “Previous” buttons accordingly. Default: `bottom`. - -* __`navigation_depth`__: The maximum depth of the navigation tree in the - sidebar. Default: `4`. - -* __`collapse_navigation`__: Only include the page section headers in the - sidebar for the current page. Default: `True`. - -* __`titles_only`__: Only include page titles in the sidebar, excluding all - section headers for all pages. Default: `False`. - -* __`sticky_navigation`__: If True, causes the sidebar to scroll with the main - page content as you scroll the page. Default: `True`. - -### Third Party Themes - -A list of third party themes can be found in the MkDocs [community wiki]. If you -have created your own, please feel free to add it to the list. - -## Customizing a Theme - -If you would like to make a few tweaks to an existing theme, there is no need to -create your own theme from scratch. For minor tweaks which only require some CSS -and/or JavaScript, you can use the [docs_dir]. However, for more complex -customizations, including overriding templates, you will need to use the theme -[custom_dir] setting. - -### Using the docs_dir - -The [extra_css] and [extra_javascript] configuration options can be used to -make tweaks and customizations to existing themes. To use these, you simply -need to include either CSS or JavaScript files within your [documentation -directory]. - -For example, to change the colour of the headers in your documentation, create -a file called `extra.css` and place it next to the documentation Markdown. In -that file add the following CSS. - -```CSS -h1 { - color: red; -} -``` - -!!! note - - If you are deploying your documentation with [ReadTheDocs]. You will need - to explicitly list the CSS and JavaScript files you want to include in - your config. To do this, add the following to your mkdocs.yml. - - extra_css: [extra.css] - -After making these changes, they should be visible when you run -`mkdocs serve` - if you already had this running, you should see that the CSS -changes were automatically picked up and the documentation will be updated. - -!!! note - - Any extra CSS or JavaScript files will be added to the generated HTML - document after the page content. If you desire to include a JavaScript - library, you may have better success including the library by using the - theme [custom_dir]. - -### Using the theme custom_dir - -The theme.[custom_dir] configuration option can be used to point to a directory -of files which override the files in a parent theme. The parent theme would be -the theme defined in the theme.[name] configuration option. Any file in the -`custom_dir` with the same name as a file in the parent theme will replace the -file of the same name in the parent theme. Any additional files in the -`custom_dir` will be added to the parent theme. The contents of the `custom_dir` -should mirror the directory structure of the parent theme. You may include -templates, JavaScript files, CSS files, images, fonts, or any other media -included in a theme. - -!!! Note - - For this to work, the theme `name` setting must be set to a known installed theme. - If the `name` setting is instead set to `null` (or not defined), then there - is no theme to override and the contents of the `custom_dir` must be a - complete, standalone theme. See [Custom Themes][custom theme] for more - information. - -For example, the [mkdocs] theme ([browse source]), contains the following -directory structure (in part): - -```nohighlight -- css\ -- fonts\ -- img\ - - favicon.ico - - grid.png -- js\ -- 404.html -- base.html -- content.html -- nav-sub.html -- nav.html -- toc.html -``` - -To override any of the files contained in that theme, create a new directory -next to your `docs_dir`: - -```bash -mkdir custom_theme -``` - -And then point your `mkdocs.yml` configuration file at the new directory: - -```yaml -theme: - name: mkdocs - custom_dir: custom_theme/ -``` - -To override the 404 error page ("file not found"), add a new template file named -`404.html` to the `custom_theme` directory. For information on what can be -included in a template, review the documentation for building a [custom theme]. - -To override the favicon, you can add a new icon file at -`custom_theme/img/favicon.ico`. - -To include a JavaScript library, copy the library to the `custom_theme/js/` -directory. - -Your directory structure should now look like this: - -```nohighlight -- docs/ - - index.html -- custom_theme/ - - img/ - - favicon.ico - - js/ - - somelib.js - - 404.html -- config.yml -``` - -!!! Note - - Any files included in the parent theme (defined in `name`) but not included - in the `custom_dir` will still be utilized. The `custom_dir` will only - override/replace files in the parent theme. If you want to remove files, or - build a theme from scratch, then you should review the documentation for - building a [custom theme]. - -#### Overriding Template Blocks - -The built-in themes implement many of their parts inside template blocks which -can be individually overridden in the `main.html` template. Simply create a -`main.html` template file in your `custom_dir` and define replacement blocks -within that file. Just make sure that the `main.html` extends `base.html`. For -example, to alter the title of the MkDocs theme, your replacement `main.html` -template would contain the following: - -```django -{% extends "base.html" %} - -{% block htmltitle %} -Custom title goes here -{% endblock %} -``` - -In the above example, the `htmltitle` block defined in your custom `main.html` file -will be used in place of the default `htmltitle` block defined in the parent theme. -You may re-define as many blocks as you desire, as long as those blocks are -defined in the parent. For example, you could replace the Google Analytics -script with one for a different service or replace the search feature with your -own. You will need to consult the parent theme you are using to determine what -blocks are available to override. The MkDocs and ReadTheDocs themes provide the -following blocks: - -* `site_meta`: Contains meta tags in the document head. -* `htmltitle`: Contains the page title in the document head. -* `styles`: Contains the link tags for stylesheets. -* `libs`: Contains the JavaScript libraries (jQuery, etc) included in the page header. -* `scripts`: Contains JavaScript scripts which should execute after a page loads. -* `analytics`: Contains the analytics script. -* `extrahead`: An empty block in the `` to insert custom tags/scripts/etc. -* `site_name`: Contains the site name in the navigation bar. -* `site_nav`: Contains the site navigation in the navigation bar. -* `search_button`: Contains the search box in the navigation bar. -* `next_prev`: Contains the next and previous buttons in the navigation bar. -* `repo`: Contains the repository link in the navigation bar. -* `content`: Contains the page content and table of contents for the page. -* `footer`: Contains the page footer. - -You may need to view the source template files to ensure your modifications will -work with the structure of the site. See [Template Variables] for a list of -variables you can use within your custom blocks. For a more complete -explanation of blocks, consult the [Jinja documentation]. - -#### Combining the custom_dir and Template Blocks - -Adding a JavaScript library to the `custom_dir` will make it available, but -won't include it in the pages generated by MkDocs. Therefore, a link needs to -be added to the library from the HTML. - -Starting the with directory structure above (truncated): - -```nohighlight -- docs/ -- custom_theme/ - - js/ - - somelib.js -- config.yml -``` - -A link to the `custom_theme/js/somelib.js` file needs to be added to the -template. As `somelib.js` is a JavaScript library, it would logically go in the -`libs` block. However, a new `libs` block that only includes the new script will -replace the block defined in the parent template and any links to libraries in -the parent template will be removed. To avoid breaking the template, a -[super block] can be used with a call to `super` from within the block: - -```django -{% extends "base.html" %} - -{% block libs %} - {{ super() }} - -{% endblock %} -``` - -Note that the [base_url] template variable was used to ensure that the link is -always relative to the current page. - -Now the generated pages will include links to the template provided libraries as -well as the library included in the `custom_dir`. The same would be required for -any additional CSS files included in the `custom_dir`. - -[browse source]: https://github.com/mkdocs/mkdocs/tree/master/mkdocs/themes/mkdocs -[built-in themes]: #built-in-themes -[Bootstrap]: https://getbootstrap.com/ -[theme configuration options]: ./configuration.md#theme -[Read the Docs]: https://readthedocs.org/ -[community wiki]: https://github.com/mkdocs/mkdocs/wiki/MkDocs-Themes -[custom theme]: ./custom-themes.md -[customize]: #customizing-a-theme -[docs_dir]: #using-the-docs_dir -[documentation directory]: ./configuration.md#docs_dir -[extra_css]: ./configuration.md#extra_css -[extra_javascript]: ./configuration.md#extra_javascript -[Jinja documentation]: http://jinja.pocoo.org/docs/dev/templates/#template-inheritance -[mkdocs]: #mkdocs -[ReadTheDocs]: ./deploying-your-docs.md#readthedocs -[Template Variables]: ./custom-themes.md#template-variables -[custom_dir]: ./configuration.md#custom_dir -[name]: ./configuration.md#name -[third party themes]: #third-party-themes -[super block]: http://jinja.pocoo.org/docs/dev/templates/#super-blocks -[base_url]: ./custom-themes.md#base_url -[highlight.js]: https://highlightjs.org/ diff --git a/docs/user-guide/writing-your-docs.md b/docs/user-guide/writing-your-docs.md index 4a2821e..d26a68f 100644 --- a/docs/user-guide/writing-your-docs.md +++ b/docs/user-guide/writing-your-docs.md @@ -14,29 +14,25 @@ the `mkdocs.yml` configuration file. The simplest project you can create will look something like this: -```no-highlight +```text mkdocs.yml docs/ index.md ``` By convention your project homepage should be named `index.md` (see [Index -pages](#index_pages) below for details). Any of the following file +pages](#index-pages) below for details). Any of the following file extensions may be used for your Markdown source files: `markdown`, `mdown`, `mkdn`, `mkd`, `md`. All Markdown files included in your documentation directory will be rendered in the built site regardless of any settings. -!!! note - - Files and directories with names which begin with a dot (for example: - `.foo.md` or `.bar/baz.md`) are ignored by MkDocs, which matches the - behavior of most web servers. There is no option to override this - behavior. +NOTE: +Files and directories with names which begin with a dot (for example: `.foo.md` or `.bar/baz.md`) are ignored by MkDocs. This can be overridden with the [`exclude_docs` config](configuration.md#exclude_docs). You can also create multi-page documentation, by creating several Markdown files: -```no-highlight +```text mkdocs.yml docs/ index.md @@ -47,7 +43,7 @@ docs/ The file layout you use determines the URLs that are used for the generated pages. Given the above layout, pages would be generated for the following URLs: -```no-highlight +```text / /about/ /license/ @@ -56,7 +52,7 @@ pages. Given the above layout, pages would be generated for the following URLs: You can also include your Markdown files in nested directories if that better suits your documentation layout. -```no-highlight +```text docs/ index.md user-guide/getting-started.md @@ -67,7 +63,7 @@ docs/ Source files inside nested directories will cause pages to be generated with nested URLs, like so: -```no-highlight +```text / /user-guide/getting-started/ /user-guide/configuration-options/ @@ -76,8 +72,8 @@ nested URLs, like so: Any files which are not identified as Markdown files (by their file extension) within the [documentation directory](configuration.md#docs_dir) are copied by -MkDocs to the built site unaltered. See [how to link to images and media] -(#linking_to_images_and_media) below for details. +MkDocs to the built site unaltered. See +[how to link to images and media](#linking-to-images-and-media) below for details. ### Index pages @@ -112,10 +108,10 @@ your navigation menu sorted differently. A minimal navigation configuration could look like this: -```no-highlight +```yaml nav: - - 'index.md' - - 'about.md' + - 'index.md' + - 'about.md' ``` All paths in the navigation configuration must be relative to the `docs_dir` @@ -128,10 +124,10 @@ level and with their titles inferred from the contents of the Markdown file or, if no title is defined within the file, of the file name. To override the title in the `nav` setting add a title right before the filename. -```no-highlight +```yaml nav: - - Home: 'index.md' - - About: 'about.md' + - Home: 'index.md' + - About: 'about.md' ``` Note that if a title is defined for a page in the navigation, that title will be @@ -141,15 +137,15 @@ within the page itself. Navigation sub-sections can be created by listing related pages together under a section title. For example: -```no-highlight +```yaml nav: - - Home: 'index.md' - - 'User Guide': - - 'Writing your docs': 'writing-your-docs.md' - - 'Styling your docs': 'styling-your-docs.md' - - About: - - 'License': 'license.md' - - 'Release Notes': 'release-notes.md' + - Home: 'index.md' + - 'User Guide': + - 'Writing your docs': 'writing-your-docs.md' + - 'Styling your docs': 'styling-your-docs.md' + - About: + - 'License': 'license.md' + - 'Release Notes': 'release-notes.md' ``` With the above configuration we have three top level items: "Home", "User Guide" @@ -207,24 +203,24 @@ When linking between pages in the documentation you can simply use the regular Markdown [linking][links] syntax, including the *relative path* to the Markdown document you wish to link to. -```no-highlight +```markdown Please see the [project license](license.md) for further details. ``` When the MkDocs build runs, these Markdown links will automatically be transformed into an HTML hyperlink to the appropriate HTML page. -!!! warning - Using absolute paths with links is not officially supported. Relative paths - are adjusted by MkDocs to ensure they are always relative to the page. Absolute - paths are not modified at all. This means that your links using absolute paths - might work fine in your local environment but they might break once you deploy - them to your production server. +WARNING: +Using absolute paths with links is not officially supported. Relative paths +are adjusted by MkDocs to ensure they are always relative to the page. Absolute +paths are not modified at all. This means that your links using absolute paths +might work fine in your local environment but they might break once you deploy +them to your production server. If the target documentation file is in another directory you'll need to make sure to include any relative directory path in the link. -```no-highlight +```markdown Please see the [project license](../about/license.md) for further details. ``` @@ -233,7 +229,7 @@ Markdown documents. You can use that ID to link to a section within a target document by using an anchor link. The generated HTML will correctly transform the path portion of the link, and leave the anchor portion intact. -```no-highlight +```markdown Please see the [project license](about.md#license) for further details. ``` @@ -244,45 +240,51 @@ dashes. Consecutive dashes are then reduced to a single dash. There are a few configuration settings provided by the toc extension which you can set in your `mkdocs.yml` configuration file to alter the default behavior: -`permalink`: +* **`permalink`** -: Generate permanent links at the end of each header. Default: `False`. + Generate permanent links at the end of each header. Default: `False`. When set to True the paragraph symbol (¶ or `¶`) is used as the link text. When set to a string, the provided string is used as the link text. For example, to use the hash symbol (`#`) instead, do: - markdown_extensions: - - toc: - permalink: "#" + ```yaml + markdown_extensions: + - toc: + permalink: "#" + ``` -`baselevel`: +* **`baselevel`** -: Base level for headers. Default: `1`. + Base level for headers. Default: `1`. This setting allows the header levels to be automatically adjusted to fit within the hierarchy of your HTML templates. For example, if the Markdown text for a page should not contain any headers higher than level 2 (`

`), do: - markdown_extensions: - - toc: - baselevel: 2 + ```yaml + markdown_extensions: + - toc: + baselevel: 2 + ``` Then any headers in your document would be increased by 1. For example, the header `# Header` would be rendered as a level 2 header (`

`) in the HTML output. -`separator`: +* **`separator`** -: Word separator. Default: `-`. + Word separator. Default: `-`. Character which replaces white-space in generated IDs. If you prefer underscores, then do: - markdown_extensions: - - toc: - separator: "_" + ```yaml + markdown_extensions: + - toc: + separator: "_" + ``` Note that if you would like to define multiple of the above settings, you must do so under a single `toc` entry in the `markdown_extensions` configuration @@ -290,10 +292,10 @@ option. ```yml markdown_extensions: - - toc: - permalink: "#" - baselevel: 2 - separator: "_" + - toc: + permalink: "#" + baselevel: 2 + separator: "_" ``` [toc]: https://python-markdown.github.io/extensions/toc/ @@ -304,11 +306,11 @@ As well as the Markdown source files, you can also include other file types in your documentation, which will be copied across when generating your documentation site. These might include images and other media. -For example, if your project documentation needed to include a [GitHub pages +For example, if your project documentation needed to include a [GitHub Pages CNAME file] and a PNG formatted screenshot image then your file layout might look as follows: -```no-highlight +```text mkdocs.yml docs/ CNAME @@ -333,7 +335,7 @@ Cupcake indexer is a snazzy new project for indexing small cakes. Your image will now be embedded when you build the documentation, and should also be previewed if you're working on the documentation with a Markdown editor. -[GitHub pages CNAME file]: https://help.github.com/articles/using-a-custom-domain-with-github-pages/ +[GitHub Pages CNAME file]: https://help.github.com/articles/using-a-custom-domain-with-github-pages/ #### Linking from raw HTML @@ -359,27 +361,30 @@ In addition to displaying information in a template, MkDocs includes support for a few predefined meta-data keys which can alter the behavior of MkDocs for that specific page. The following keys are supported: -`template`: +* **`template`** -: The template to use with the current page. + The template to use with the current page. By default, MkDocs uses the `main.html` template of a theme to render Markdown pages. You can use the `template` meta-data key to define a different template file for that specific page. The template file must be available on the path(s) defined in the theme's environment. -`title`: +* **`title`** -: The "title" to use for the document. + The "title" to use for the document. MkDocs will attempt to determine the title of a document in the following ways, in order: - 1. A title defined in the [nav] configuration setting for a document. - 2. A title defined in the `title` meta-data key of a document. - 3. A level 1 Markdown header on the first line of the document body. - Please note that [Setext-style] headers are not supported. - 4. The filename of a document. + 1. A title defined in the [nav] configuration setting for a document. + + 2. A title defined in the `title` meta-data key of a document. + + 3. A level 1 Markdown header on the first line of the document body. + ([Setext-style] headers are supported *only since MkDocs 1.5*.) + + 4. The filename of a document. Upon finding a title for a page, MkDoc does not continue checking any additional sources in the above list. @@ -389,12 +394,12 @@ specific page. The following keys are supported: #### YAML Style Meta-Data YAML style meta-data consists of [YAML] key/value pairs wrapped in YAML style -deliminators to mark the start and/or end of the meta-data. The first line of +delimiters to mark the start and/or end of the meta-data. The first line of a document must be `---`. The meta-data ends at the first line containing an -end deliminator (either `---` or `...`). The content between the deliminators is +end deliminator (either `---` or `...`). The content between the delimiters is parsed as [YAML]. -```no-highlight +```text --- title: My Document summary: A brief description of my document. @@ -423,7 +428,7 @@ MultiMarkdown style meta-data uses a format first introduced by the [MultiMarkdown] project. The data consists of a series of keywords and values defined at the beginning of a Markdown document, like this: -```no-highlight +```text Title: My Document Summary: A brief description of my document. Authors: Waylan Limberg @@ -446,18 +451,17 @@ many lines as desired. All lines are joined into a single string. The first blank line ends all meta-data for the document. Therefore, the first line of a document must not be blank. -!!! note - - MkDocs does not support YAML style deliminators (`---` or `...`) for - MultiMarkdown style meta-data. In fact, MkDocs relies on the the presence or - absence of the deliminators to determine whether YAML style meta-data or - MultiMarkdown style meta-data is being used. If the deliminators are - detected, but the content between the deliminators is not valid YAML - meta-data, MkDocs does not attempt to parse the content as MultiMarkdown - style meta-data. - -[YAML]: http://yaml.org -[MultiMarkdown]: http://fletcherpenney.net/MultiMarkdown_Syntax_Guide#metadata +NOTE: +MkDocs does not support YAML style delimiters (`---` or `...`) for +MultiMarkdown style meta-data. In fact, MkDocs relies on the the presence or +absence of the delimiters to determine whether YAML style meta-data or +MultiMarkdown style meta-data is being used. If the delimiters are +detected, but the content between the delimiters is not valid YAML +meta-data, MkDocs does not attempt to parse the content as MultiMarkdown +style meta-data. + +[YAML]: https://yaml.org +[MultiMarkdown]: https://fletcherpenney.net/MultiMarkdown_Syntax_Guide#metadata [nav]: configuration.md#nav ### Tables @@ -468,7 +472,7 @@ only useful for simple tabular data. A simple table looks like this: -```no-highlight +```markdown First Header | Second Header | Third Header ------------ | ------------- | ------------ Content Cell | Content Cell | Content Cell @@ -477,7 +481,7 @@ Content Cell | Content Cell | Content Cell If you wish, you can add a leading and tailing pipe to each line of the table: -```no-highlight +```markdown | First Header | Second Header | Third Header | | ------------ | ------------- | ------------ | | Content Cell | Content Cell | Content Cell | @@ -486,7 +490,7 @@ If you wish, you can add a leading and tailing pipe to each line of the table: Specify alignment for each column by adding colons to separator lines: -```no-highlight +```markdown First Header | Second Header | Third Header :----------- |:-------------:| -----------: Left | Center | Right @@ -510,7 +514,7 @@ blocks without indentation. The first line should contain 3 or more backtick (`` ` ``) characters, and the last line should contain the same number of backtick characters (`` ` ``): -````no-highlight +````markdown ``` Fenced code blocks are like Standard Markdown’s regular code blocks, except that @@ -523,7 +527,7 @@ code block. With this approach, the language can optionally be specified on the first line after the backticks which informs any syntax highlighters of the language used: -````no-highlight +````markdown ```python def fn(): pass diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 0000000..9f76ebd --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,14 @@ +import os + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomBuildHook(BuildHookInterface): + def initialize(self, version, build_data): + from babel.messages.frontend import compile_catalog + + for theme in 'mkdocs', 'readthedocs': + cmd = compile_catalog() + cmd.directory = os.path.join('mkdocs', 'themes', theme, 'locales') + cmd.finalize_options() + cmd.run() diff --git a/mkdocs.yml b/mkdocs.yml index f120128..f857075 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,46 +1,74 @@ site_name: MkDocs -site_url: https://www.mkdocs.org +site_url: https://www.mkdocs.org/ site_description: Project documentation with Markdown. site_author: MkDocs Team repo_url: https://github.com/mkdocs/mkdocs/ -edit_uri: "" +edit_uri: blob/master/docs/ theme: - name: mkdocs - highlightjs: true - hljs_languages: - - yaml - - django + name: mkdocs + locale: en + analytics: {gtag: 'G-274394082'} + highlightjs: true + hljs_languages: + - yaml + - django nav: - - Home: index.md - - User Guide: - - Writing Your Docs: user-guide/writing-your-docs.md - - Styling Your Docs: user-guide/styling-your-docs.md - - Configuration: user-guide/configuration.md - - Deploying Your Docs: user-guide/deploying-your-docs.md - - Custom Themes: user-guide/custom-themes.md - - Plugins: user-guide/plugins.md - - About: - - Release Notes: about/release-notes.md - - Contributing: about/contributing.md - - License: about/license.md + - Home: index.md + - Getting Started: getting-started.md + - User Guide: user-guide/ + - Developer Guide: dev-guide/ + - About: + - Release Notes: about/release-notes.md + - Contributing: about/contributing.md + - License: about/license.md extra_css: - - css/extra.css + - css/extra.css markdown_extensions: - - toc: - permalink:  - - admonition - - def_list - - mdx_gh_links: - user: mkdocs - repo: mkdocs + - toc: + permalink:  + - attr_list + - def_list + - tables + - pymdownx.highlight: + use_pygments: false + - pymdownx.snippets + - pymdownx.superfences + - callouts + - mdx_gh_links: + user: mkdocs + repo: mkdocs + - mkdocs-click -copyright: Copyright © 2014 Tom Christie, Maintained by the MkDocs Team. -google_analytics: ['UA-27795084-5', 'mkdocs.org'] +copyright: Copyright © 2014 Tom Christie, Maintained by the MkDocs Team. + +hooks: + - docs/hooks.py plugins: - - search + - search + - redirects: + redirect_maps: + user-guide/plugins.md: dev-guide/plugins.md + user-guide/custom-themes.md: dev-guide/themes.md + user-guide/styling-your-docs.md: user-guide/choosing-your-theme.md + - autorefs + - literate-nav: + nav_file: README.md + implicit_index: true + - mkdocstrings: + handlers: + python: + options: + docstring_section_style: list + members_order: source + show_root_heading: true + show_source: false + show_signature_annotations: true + +watch: + - mkdocs diff --git a/mkdocs/__init__.py b/mkdocs/__init__.py index 64811e6..c6257ad 100644 --- a/mkdocs/__init__.py +++ b/mkdocs/__init__.py @@ -2,4 +2,4 @@ # For acceptable version formats, see https://www.python.org/dev/peps/pep-0440/ -__version__ = '1.1.2' +__version__ = '1.5.3' diff --git a/mkdocs/__main__.py b/mkdocs/__main__.py index f017432..77618fb 100644 --- a/mkdocs/__main__.py +++ b/mkdocs/__main__.py @@ -1,64 +1,154 @@ #!/usr/bin/env python +from __future__ import annotations + +import logging import os +import shutil import sys -import logging +import textwrap +import traceback +import warnings + import click -# TODO: Remove this check at some point in the future. -# (also remove flake8's 'ignore E402' comments below) -if sys.version_info[0] < 3: # pragma: no cover - raise ImportError('A recent version of Python 3 is required.') +from mkdocs import __version__, config, utils -from mkdocs import __version__ # noqa: E402 -from mkdocs import utils # noqa: E402 -from mkdocs import exceptions # noqa: E402 -from mkdocs import config # noqa: E402 -from mkdocs.commands import build, gh_deploy, new, serve # noqa: E402 +if sys.platform.startswith("win"): + try: + import colorama + except ImportError: + pass + else: + colorama.init() log = logging.getLogger(__name__) +def _showwarning(message, category, filename, lineno, file=None, line=None): + try: + # Last stack frames: + # * ... + # * Location of call to deprecated function <-- include this + # * Location of call to warn() <-- include this + # * (stdlib) Location of call to showwarning function + # * (this function) Location of call to extract_stack() + stack = [frame for frame in traceback.extract_stack() if frame.line][-4:-2] + # Make sure the actual affected file's name is still present (the case of syntax warning): + if not any(frame.filename == filename for frame in stack): + stack = stack[-1:] + [traceback.FrameSummary(filename, lineno, '')] + + tb = ''.join(traceback.format_list(stack)) + except Exception: + tb = f' File "{filename}", line {lineno}' + + log.info(f'{category.__name__}: {message}\n{tb}') + + +def _enable_warnings(): + from mkdocs.commands import build + + build.log.addFilter(utils.DuplicateFilter()) + + warnings.simplefilter('module', DeprecationWarning) + warnings.showwarning = _showwarning + + +class ColorFormatter(logging.Formatter): + colors = { + 'CRITICAL': 'red', + 'ERROR': 'red', + 'WARNING': 'yellow', + 'DEBUG': 'blue', + } + + text_wrapper = textwrap.TextWrapper( + width=shutil.get_terminal_size(fallback=(0, 0)).columns, + replace_whitespace=False, + break_long_words=False, + break_on_hyphens=False, + initial_indent=' ' * 11, + subsequent_indent=' ' * 11, + ) + + def format(self, record): + message = super().format(record) + prefix = f'{record.levelname:<8}- ' + if record.levelname in self.colors: + prefix = click.style(prefix, fg=self.colors[record.levelname]) + if self.text_wrapper.width: + # Only wrap text if a terminal width was detected + msg = '\n'.join(self.text_wrapper.fill(line) for line in message.splitlines()) + # Prepend prefix after wrapping so that color codes don't affect length + return prefix + msg[11:] + return prefix + message + + class State: - ''' Maintain logging level.''' + """Maintain logging level.""" def __init__(self, log_name='mkdocs', level=logging.INFO): self.logger = logging.getLogger(log_name) + # Don't restrict level on logger; use handler + self.logger.setLevel(1) self.logger.propagate = False - stream = logging.StreamHandler() - formatter = logging.Formatter("%(levelname)-7s - %(message)s ") - stream.setFormatter(formatter) - self.logger.addHandler(stream) - self.logger.setLevel(level) + self.stream = logging.StreamHandler() + self.stream.setFormatter(ColorFormatter()) + self.stream.setLevel(level) + self.stream.name = 'MkDocsStreamHandler' + self.logger.addHandler(self.stream) + + def __del__(self): + self.logger.removeHandler(self.stream) pass_state = click.make_pass_decorator(State, ensure=True) clean_help = "Remove old files from the site_dir before building (the default)." -config_help = "Provide a specific MkDocs config" -dev_addr_help = ("IP address and port to serve documentation locally (default: " - "localhost:8000)") -strict_help = ("Enable strict mode. This will cause MkDocs to abort the build " - "on any warnings.") +config_help = ( + "Provide a specific MkDocs config. This can be a file name, or '-' to read from stdin." +) +dev_addr_help = "IP address and port to serve documentation locally (default: localhost:8000)" +strict_help = "Enable strict mode. This will cause MkDocs to abort the build on any warnings." theme_help = "The theme to use when building your documentation." -theme_choices = utils.get_theme_names() +theme_choices = sorted(utils.get_theme_names()) site_dir_help = "The directory to output the result of the documentation build." use_directory_urls_help = "Use directory URLs when building pages (the default)." reload_help = "Enable the live reloading in the development server (this is the default)" no_reload_help = "Disable the live reloading in the development server." -dirty_reload_help = "Enable the live reloading in the development server, but only re-build files that have changed" -commit_message_help = ("A commit message to use when committing to the " - "Github Pages remote branch. Commit {sha} and MkDocs {version} are available as expansions") -remote_branch_help = ("The remote branch to commit to for Github Pages. This " - "overrides the value specified in config") -remote_name_help = ("The remote name to commit to for Github Pages. This " - "overrides the value specified in config") +serve_dirty_help = "Only re-build files that have changed." +serve_clean_help = ( + "Build the site without any effects of `mkdocs serve` - pure `mkdocs build`, then serve." +) +commit_message_help = ( + "A commit message to use when committing to the " + "GitHub Pages remote branch. Commit {sha} and MkDocs {version} are available as expansions" +) +remote_branch_help = ( + "The remote branch to commit to for GitHub Pages. This " + "overrides the value specified in config" +) +remote_name_help = ( + "The remote name to commit to for GitHub Pages. This overrides the value specified in config" +) force_help = "Force the push to the repository." -ignore_version_help = "Ignore check that build is not being deployed with an older version of MkDocs." - - -def add_options(opts): +no_history_help = "Replace the whole Git history with one new commit." +ignore_version_help = ( + "Ignore check that build is not being deployed with an older version of MkDocs." +) +watch_theme_help = ( + "Include the theme in list of files to watch for live reloading. " + "Ignored when live reload is not used." +) +shell_help = "Use the shell when invoking Git." +watch_help = "A directory or file to watch for live reloading. Can be supplied multiple times." +projects_file_help = ( + "URL or local path of the registry file that declares all known MkDocs-related projects." +) + + +def add_options(*opts): def inner(f): for i in reversed(opts): f = i(f) @@ -71,46 +161,88 @@ def verbose_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: - state.logger.setLevel(logging.DEBUG) - return click.option('-v', '--verbose', - is_flag=True, - expose_value=False, - help='Enable verbose output', - callback=callback)(f) + state.stream.setLevel(logging.DEBUG) + + return click.option( + '-v', + '--verbose', + is_flag=True, + expose_value=False, + help='Enable verbose output', + callback=callback, + )(f) def quiet_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: - state.logger.setLevel(logging.ERROR) - return click.option('-q', '--quiet', - is_flag=True, - expose_value=False, - help='Silence warnings', - callback=callback)(f) + state.stream.setLevel(logging.ERROR) + + return click.option( + '-q', + '--quiet', + is_flag=True, + expose_value=False, + help='Silence warnings', + callback=callback, + )(f) -common_options = add_options([quiet_option, verbose_option]) -common_config_options = add_options([ +def color_option(f): + def callback(ctx, param, value): + state = ctx.ensure_object(State) + if value is False or ( + value is None + and ( + not sys.stdout.isatty() + or os.environ.get('NO_COLOR') + or os.environ.get('TERM') == 'dumb' + ) + ): + state.stream.setFormatter(logging.Formatter('%(levelname)-8s- %(message)s')) + + return click.option( + '--color/--no-color', + is_flag=True, + default=None, + expose_value=False, + help="Force enable or disable color and wrapping for the output. Default is auto-detect.", + callback=callback, + )(f) + + +common_options = add_options(quiet_option, verbose_option) +common_config_options = add_options( click.option('-f', '--config-file', type=click.File('rb'), help=config_help), # Don't override config value if user did not specify --strict flag # Conveniently, load_config drops None values - click.option('-s', '--strict', is_flag=True, default=None, help=strict_help), + click.option('-s', '--strict/--no-strict', is_flag=True, default=None, help=strict_help), click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help), # As with --strict, set the default to None so that this doesn't incorrectly # override the config file - click.option('--use-directory-urls/--no-directory-urls', is_flag=True, default=None, help=use_directory_urls_help) -]) + click.option( + '--use-directory-urls/--no-directory-urls', + is_flag=True, + default=None, + help=use_directory_urls_help, + ), +) + +PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" -pgk_dir = os.path.dirname(os.path.abspath(__file__)) +PKG_DIR = os.path.dirname(os.path.abspath(__file__)) -@click.group(context_settings={'help_option_names': ['-h', '--help']}) +@click.group(context_settings=dict(help_option_names=['-h', '--help'], max_content_width=120)) @click.version_option( - '{} from {} (Python {})'.format(__version__, pgk_dir, sys.version[:3]), - '-V', '--version') + __version__, + '-V', + '--version', + message=f'%(prog)s, version %(version)s from { PKG_DIR } (Python { PYTHON_VERSION })', +) @common_options +@color_option def cli(): """ MkDocs - Project documentation with Markdown. @@ -119,25 +251,23 @@ def cli(): @cli.command(name="serve") @click.option('-a', '--dev-addr', help=dev_addr_help, metavar='') -@click.option('--livereload', 'livereload', flag_value='livereload', help=reload_help, default=True) -@click.option('--no-livereload', 'livereload', flag_value='no-livereload', help=no_reload_help) -@click.option('--dirtyreload', 'livereload', flag_value='dirty', help=dirty_reload_help) +@click.option('--no-livereload', 'livereload', flag_value=False, help=no_reload_help) +@click.option('--livereload', 'livereload', flag_value=True, default=True, hidden=True) +@click.option('--dirtyreload', 'build_type', flag_value='dirty', hidden=True) +@click.option('--dirty', 'build_type', flag_value='dirty', help=serve_dirty_help) +@click.option('-c', '--clean', 'build_type', flag_value='clean', help=serve_clean_help) +@click.option('--watch-theme', help=watch_theme_help, is_flag=True) +@click.option( + '-w', '--watch', help=watch_help, type=click.Path(exists=True), multiple=True, default=[] +) @common_config_options @common_options -def serve_command(dev_addr, livereload, **kwargs): +def serve_command(**kwargs): """Run the builtin development server""" + from mkdocs.commands import serve - logging.getLogger('tornado').setLevel(logging.WARNING) - - try: - serve.serve( - dev_addr=dev_addr, - livereload=livereload, - **kwargs - ) - except (exceptions.ConfigurationError, OSError) as e: # pragma: no cover - # Avoid ugly, unhelpful traceback - raise SystemExit('\n' + str(e)) + _enable_warnings() + serve.serve(**kwargs) @cli.command(name="build") @@ -147,12 +277,15 @@ def serve_command(dev_addr, livereload, **kwargs): @common_options def build_command(clean, **kwargs): """Build the MkDocs documentation""" + from mkdocs.commands import build + _enable_warnings() + cfg = config.load_config(**kwargs) + cfg.plugins.on_startup(command='build', dirty=not clean) try: - build.build(config.load_config(**kwargs), dirty=not clean) - except exceptions.ConfigurationError as e: # pragma: no cover - # Avoid ugly, unhelpful traceback - raise SystemExit('\n' + str(e)) + build.build(cfg, dirty=not clean) + finally: + cfg.plugins.on_shutdown() @cli.command(name="gh-deploy") @@ -161,23 +294,57 @@ def build_command(clean, **kwargs): @click.option('-b', '--remote-branch', help=remote_branch_help) @click.option('-r', '--remote-name', help=remote_name_help) @click.option('--force', is_flag=True, help=force_help) +@click.option('--no-history', is_flag=True, help=no_history_help) @click.option('--ignore-version', is_flag=True, help=ignore_version_help) +@click.option('--shell', is_flag=True, help=shell_help) @common_config_options @click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help) @common_options -def gh_deploy_command(clean, message, remote_branch, remote_name, force, ignore_version, **kwargs): +def gh_deploy_command( + clean, message, remote_branch, remote_name, force, no_history, ignore_version, shell, **kwargs +): """Deploy your documentation to GitHub Pages""" + from mkdocs.commands import build, gh_deploy + + _enable_warnings() + cfg = config.load_config(remote_branch=remote_branch, remote_name=remote_name, **kwargs) + cfg.plugins.on_startup(command='gh-deploy', dirty=not clean) try: - cfg = config.load_config( - remote_branch=remote_branch, - remote_name=remote_name, - **kwargs - ) build.build(cfg, dirty=not clean) - gh_deploy.gh_deploy(cfg, message=message, force=force, ignore_version=ignore_version) - except exceptions.ConfigurationError as e: # pragma: no cover - # Avoid ugly, unhelpful traceback - raise SystemExit('\n' + str(e)) + finally: + cfg.plugins.on_shutdown() + gh_deploy.gh_deploy( + cfg, + message=message, + force=force, + no_history=no_history, + ignore_version=ignore_version, + shell=shell, + ) + + +@cli.command(name="get-deps") +@verbose_option +@click.option('-f', '--config-file', type=click.File('rb'), help=config_help) +@click.option( + '-p', + '--projects-file', + default='https://raw.githubusercontent.com/mkdocs/catalog/main/projects.yaml', + help=projects_file_help, + show_default=True, +) +def get_deps_command(config_file, projects_file): + """Show required PyPI packages inferred from plugins in mkdocs.yml""" + from mkdocs.commands import get_deps + + warning_counter = utils.CountHandler() + warning_counter.setLevel(logging.WARNING) + logging.getLogger('mkdocs').addHandler(warning_counter) + + get_deps.get_deps(projects_file_url=projects_file, config_file_path=config_file) + + if warning_counter.get_counts(): + sys.exit(1) @cli.command(name="new") @@ -185,6 +352,8 @@ def gh_deploy_command(clean, message, remote_branch, remote_name, force, ignore_ @common_options def new_command(project_directory): """Create a new MkDocs project""" + from mkdocs.commands import new + new.new(project_directory) diff --git a/mkdocs/commands/build.py b/mkdocs/commands/build.py index a4574bf..4596464 100644 --- a/mkdocs/commands/build.py +++ b/mkdocs/commands/build.py @@ -1,142 +1,146 @@ +from __future__ import annotations + +import gzip import logging import os -import gzip -from urllib.parse import urlparse +import time +from typing import TYPE_CHECKING, Sequence +from urllib.parse import urljoin, urlsplit -from jinja2.exceptions import TemplateNotFound import jinja2 +from jinja2.exceptions import TemplateNotFound -from mkdocs import utils -from mkdocs.structure.files import get_files -from mkdocs.structure.nav import get_navigation import mkdocs +from mkdocs import utils +from mkdocs.exceptions import Abort, BuildError +from mkdocs.structure.files import File, Files, InclusionLevel, _set_exclusions, get_files +from mkdocs.structure.nav import Navigation, get_navigation +from mkdocs.structure.pages import Page +from mkdocs.utils import DuplicateFilter # noqa: F401 - legacy re-export +from mkdocs.utils import templates +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig -class DuplicateFilter: - ''' Avoid logging duplicate messages. ''' - def __init__(self): - self.msgs = set() - - def filter(self, record): - rv = record.msg not in self.msgs - self.msgs.add(record.msg) - return rv - +if TYPE_CHECKING: + from mkdocs.livereload import LiveReloadServer log = logging.getLogger(__name__) -log.addFilter(DuplicateFilter()) -log.addFilter(utils.warning_filter) -def get_context(nav, files, config, page=None, base_url=''): +def get_context( + nav: Navigation, + files: Sequence[File] | Files, + config: MkDocsConfig, + page: Page | None = None, + base_url: str = '', +) -> templates.TemplateContext: """ Return the template context for a given page or template. """ - if page is not None: base_url = utils.get_relative_url('.', page.url) - extra_javascript = utils.create_media_urls(config['extra_javascript'], page, base_url) - - extra_css = utils.create_media_urls(config['extra_css'], page, base_url) - - return { - 'nav': nav, - 'pages': files.documentation_pages(), - - 'base_url': base_url, - - 'extra_css': extra_css, - 'extra_javascript': extra_javascript, - - 'mkdocs_version': mkdocs.__version__, - 'build_date_utc': utils.get_build_datetime(), - - 'config': config, - 'page': page, - } + extra_javascript = [ + utils.normalize_url(str(script), page, base_url) for script in config.extra_javascript + ] + extra_css = [utils.normalize_url(path, page, base_url) for path in config.extra_css] + + if isinstance(files, Files): + files = files.documentation_pages() + + return templates.TemplateContext( + nav=nav, + pages=files, + base_url=base_url, + extra_css=extra_css, + extra_javascript=extra_javascript, + mkdocs_version=mkdocs.__version__, + build_date_utc=utils.get_build_datetime(), + config=config, + page=page, + ) -def _build_template(name, template, files, config, nav): +def _build_template( + name: str, template: jinja2.Template, files: Files, config: MkDocsConfig, nav: Navigation +) -> str: """ Return rendered output for given template as a string. """ - # Run `pre_template` plugin events. - template = config['plugins'].run_event( - 'pre_template', template, template_name=name, config=config - ) + template = config.plugins.on_pre_template(template, template_name=name, config=config) if utils.is_error_template(name): # Force absolute URLs in the nav of error pages and account for the - # possability that the docs root might be different than the server root. + # possibility that the docs root might be different than the server root. # See https://github.com/mkdocs/mkdocs/issues/77. # However, if site_url is not set, assume the docs root and server root # are the same. See https://github.com/mkdocs/mkdocs/issues/1598. - base_url = urlparse(config['site_url'] or '/').path + base_url = urlsplit(config.site_url or '/').path else: base_url = utils.get_relative_url('.', name) context = get_context(nav, files, config, base_url=base_url) # Run `template_context` plugin events. - context = config['plugins'].run_event( - 'template_context', context, template_name=name, config=config - ) + context = config.plugins.on_template_context(context, template_name=name, config=config) output = template.render(context) # Run `post_template` plugin events. - output = config['plugins'].run_event( - 'post_template', output, template_name=name, config=config - ) + output = config.plugins.on_post_template(output, template_name=name, config=config) return output -def _build_theme_template(template_name, env, files, config, nav): - """ Build a template using the theme environment. """ +def _build_theme_template( + template_name: str, env: jinja2.Environment, files: Files, config: MkDocsConfig, nav: Navigation +) -> None: + """Build a template using the theme environment.""" - log.debug("Building theme template: {}".format(template_name)) + log.debug(f"Building theme template: {template_name}") try: template = env.get_template(template_name) except TemplateNotFound: - log.warning("Template skipped: '{}' not found in theme directories.".format(template_name)) + log.warning(f"Template skipped: '{template_name}' not found in theme directories.") return output = _build_template(template_name, template, files, config, nav) if output.strip(): - output_path = os.path.join(config['site_dir'], template_name) + output_path = os.path.join(config.site_dir, template_name) utils.write_file(output.encode('utf-8'), output_path) if template_name == 'sitemap.xml': - log.debug("Gzipping template: %s", template_name) - gz_filename = '{}.gz'.format(output_path) + log.debug(f"Gzipping template: {template_name}") + gz_filename = f'{output_path}.gz' with open(gz_filename, 'wb') as f: timestamp = utils.get_build_timestamp() - with gzip.GzipFile(fileobj=f, filename=gz_filename, mode='wb', mtime=timestamp) as gz_buf: + with gzip.GzipFile( + fileobj=f, filename=gz_filename, mode='wb', mtime=timestamp + ) as gz_buf: gz_buf.write(output.encode('utf-8')) else: - log.info("Template skipped: '{}' generated empty output.".format(template_name)) + log.info(f"Template skipped: '{template_name}' generated empty output.") -def _build_extra_template(template_name, files, config, nav): - """ Build user templates which are not part of the theme. """ +def _build_extra_template(template_name: str, files: Files, config: MkDocsConfig, nav: Navigation): + """Build user templates which are not part of the theme.""" - log.debug("Building extra template: {}".format(template_name)) + log.debug(f"Building extra template: {template_name}") file = files.get_file_from_path(template_name) if file is None: - log.warning("Template skipped: '{}' not found in docs_dir.".format(template_name)) + log.warning(f"Template skipped: '{template_name}' not found in docs_dir.") return try: - with open(file.abs_src_path, 'r', encoding='utf-8', errors='strict') as f: + with open(file.abs_src_path, encoding='utf-8', errors='strict') as f: template = jinja2.Template(f.read()) except Exception as e: - log.warning("Error reading template '{}': {}".format(template_name, e)) + log.warning(f"Error reading template '{template_name}': {e}") return output = _build_template(template_name, template, files, config, nav) @@ -144,12 +148,13 @@ def _build_extra_template(template_name, files, config, nav): if output.strip(): utils.write_file(output.encode('utf-8'), file.abs_dest_path) else: - log.info("Template skipped: '{}' generated empty output.".format(template_name)) + log.info(f"Template skipped: '{template_name}' generated empty output.") -def _populate_page(page, config, files, dirty=False): - """ Read page content from docs_dir and render Markdown. """ +def _populate_page(page: Page, config: MkDocsConfig, files: Files, dirty: bool = False) -> None: + """Read page content from docs_dir and render Markdown.""" + config._current_page = page try: # When --dirty is used, only read the page if the file has been modified since the # previous build of the output. @@ -157,150 +162,217 @@ def _populate_page(page, config, files, dirty=False): return # Run the `pre_page` plugin event - page = config['plugins'].run_event( - 'pre_page', page, config=config, files=files - ) + page = config.plugins.on_pre_page(page, config=config, files=files) page.read_source(config) + assert page.markdown is not None # Run `page_markdown` plugin events. - page.markdown = config['plugins'].run_event( - 'page_markdown', page.markdown, page=page, config=config, files=files + page.markdown = config.plugins.on_page_markdown( + page.markdown, page=page, config=config, files=files ) page.render(config, files) + assert page.content is not None # Run `page_content` plugin events. - page.content = config['plugins'].run_event( - 'page_content', page.content, page=page, config=config, files=files + page.content = config.plugins.on_page_content( + page.content, page=page, config=config, files=files ) except Exception as e: - log.error("Error reading page '{}': {}".format(page.file.src_path, e)) + message = f"Error reading page '{page.file.src_uri}':" + # Prevent duplicated the error message because it will be printed immediately afterwards. + if not isinstance(e, BuildError): + message += f" {e}" + log.error(message) raise - - -def _build_page(page, config, files, nav, env, dirty=False): - """ Pass a Page to theme template and write output to site_dir. """ - + finally: + config._current_page = None + + +def _build_page( + page: Page, + config: MkDocsConfig, + doc_files: Sequence[File], + nav: Navigation, + env: jinja2.Environment, + dirty: bool = False, + excluded: bool = False, +) -> None: + """Pass a Page to theme template and write output to site_dir.""" + + config._current_page = page try: # When --dirty is used, only build the page if the file has been modified since the # previous build of the output. if dirty and not page.file.is_modified(): return - log.debug("Building page {}".format(page.file.src_path)) + log.debug(f"Building page {page.file.src_uri}") # Activate page. Signals to theme that this is the current page. page.active = True - context = get_context(nav, files, config, page) + context = get_context(nav, doc_files, config, page) # Allow 'template:' override in md source files. - if 'template' in page.meta: - template = env.get_template(page.meta['template']) - else: - template = env.get_template('main.html') + template = env.get_template(page.meta.get('template', 'main.html')) # Run `page_context` plugin events. - context = config['plugins'].run_event( - 'page_context', context, page=page, config=config, nav=nav - ) + context = config.plugins.on_page_context(context, page=page, config=config, nav=nav) + + if excluded: + page.content = ( + '
' + 'DRAFT' + '
' + (page.content or '') + ) # Render the template. output = template.render(context) # Run `post_page` plugin events. - output = config['plugins'].run_event( - 'post_page', output, page=page, config=config - ) + output = config.plugins.on_post_page(output, page=page, config=config) # Write the output file. if output.strip(): - utils.write_file(output.encode('utf-8', errors='xmlcharrefreplace'), page.file.abs_dest_path) + utils.write_file( + output.encode('utf-8', errors='xmlcharrefreplace'), page.file.abs_dest_path + ) else: - log.info("Page skipped: '{}'. Generated empty output.".format(page.file.src_path)) + log.info(f"Page skipped: '{page.file.src_uri}'. Generated empty output.") - # Deactivate page - page.active = False except Exception as e: - log.error("Error building page '{}': {}".format(page.file.src_path, e)) + message = f"Error building page '{page.file.src_uri}':" + # Prevent duplicated the error message because it will be printed immediately afterwards. + if not isinstance(e, BuildError): + message += f" {e}" + log.error(message) raise + finally: + # Deactivate page + page.active = False + config._current_page = None -def build(config, live_server=False, dirty=False): - """ Perform a full site build. """ - from time import time - start = time() - - # Run `config` plugin events. - config = config['plugins'].run_event('config', config) - - # Run `pre_build` plugin events. - config['plugins'].run_event('pre_build', config=config) - - if not dirty: - log.info("Cleaning site directory") - utils.clean_directory(config['site_dir']) - else: # pragma: no cover - # Warn user about problems that may occur with --dirty option - log.warning("A 'dirty' build is being performed, this will likely lead to inaccurate navigation and other" - " links within your site. This option is designed for site development purposes only.") - - if not live_server: # pragma: no cover - log.info("Building documentation to directory: %s", config['site_dir']) - if dirty and site_directory_contains_stale_files(config['site_dir']): - log.info("The directory contains stale files. Use --clean to remove them.") - - # First gather all data from all files/pages to ensure all data is consistent across all pages. - - files = get_files(config) - env = config['theme'].get_env() - files.add_files_from_theme(env, config) - - # Run `files` plugin events. - files = config['plugins'].run_event('files', files, config=config) - - nav = get_navigation(files, config) - - # Run `nav` plugin events. - nav = config['plugins'].run_event('nav', nav, config=config, files=files) - - log.debug("Reading markdown pages.") - for file in files.documentation_pages(): - log.debug("Reading: " + file.src_path) - _populate_page(file.page, config, files, dirty) - - # Run `env` plugin events. - env = config['plugins'].run_event( - 'env', env, config=config, files=files - ) - - # Start writing files to site_dir now that all data is gathered. Note that order matters. Files - # with lower precedence get written first so that files with higher precedence can overwrite them. - - log.debug("Copying static assets.") - files.copy_static_files(dirty=dirty) +def build( + config: MkDocsConfig, live_server: LiveReloadServer | None = None, dirty: bool = False +) -> None: + """Perform a full site build.""" - for template in config['theme'].static_templates: - _build_theme_template(template, env, files, config, nav) + logger = logging.getLogger('mkdocs') - for template in config['extra_templates']: - _build_extra_template(template, files, config, nav) + # Add CountHandler for strict mode + warning_counter = utils.CountHandler() + warning_counter.setLevel(logging.WARNING) + if config.strict: + logging.getLogger('mkdocs').addHandler(warning_counter) - log.debug("Building markdown pages.") - for file in files.documentation_pages(): - _build_page(file.page, config, files, nav, env, dirty) + inclusion = InclusionLevel.all if live_server else InclusionLevel.is_included - # Run `post_build` plugin events. - config['plugins'].run_event('post_build', config=config) + try: + start = time.monotonic() + + # Run `config` plugin events. + config = config.plugins.on_config(config) + + # Run `pre_build` plugin events. + config.plugins.on_pre_build(config=config) + + if not dirty: + log.info("Cleaning site directory") + utils.clean_directory(config.site_dir) + else: # pragma: no cover + # Warn user about problems that may occur with --dirty option + log.warning( + "A 'dirty' build is being performed, this will likely lead to inaccurate navigation and other" + " links within your site. This option is designed for site development purposes only." + ) + + if not live_server: # pragma: no cover + log.info(f"Building documentation to directory: {config.site_dir}") + if dirty and site_directory_contains_stale_files(config.site_dir): + log.info("The directory contains stale files. Use --clean to remove them.") + + # First gather all data from all files/pages to ensure all data is consistent across all pages. + + files = get_files(config) + env = config.theme.get_env() + files.add_files_from_theme(env, config) + + # Run `files` plugin events. + files = config.plugins.on_files(files, config=config) + # If plugins have added files but haven't set their inclusion level, calculate it again. + _set_exclusions(files._files, config) + + nav = get_navigation(files, config) + + # Run `nav` plugin events. + nav = config.plugins.on_nav(nav, config=config, files=files) + + log.debug("Reading markdown pages.") + excluded = [] + for file in files.documentation_pages(inclusion=inclusion): + log.debug(f"Reading: {file.src_uri}") + if file.page is None and file.inclusion.is_excluded(): + if live_server: + excluded.append(urljoin(live_server.url, file.url)) + Page(None, file, config) + assert file.page is not None + _populate_page(file.page, config, files, dirty) + if excluded: + log.info( + "The following pages are being built only for the preview " + "but will be excluded from `mkdocs build` per `exclude_docs`:\n - " + + "\n - ".join(excluded) + ) + + # Run `env` plugin events. + env = config.plugins.on_env(env, config=config, files=files) + + # Start writing files to site_dir now that all data is gathered. Note that order matters. Files + # with lower precedence get written first so that files with higher precedence can overwrite them. + + log.debug("Copying static assets.") + files.copy_static_files(dirty=dirty, inclusion=inclusion) + + for template in config.theme.static_templates: + _build_theme_template(template, env, files, config, nav) + + for template in config.extra_templates: + _build_extra_template(template, files, config, nav) + + log.debug("Building markdown pages.") + doc_files = files.documentation_pages(inclusion=inclusion) + for file in doc_files: + assert file.page is not None + _build_page( + file.page, config, doc_files, nav, env, dirty, excluded=file.inclusion.is_excluded() + ) + + # Run `post_build` plugin events. + config.plugins.on_post_build(config=config) + + counts = warning_counter.get_counts() + if counts: + msg = ', '.join(f'{v} {k.lower()}s' for k, v in counts) + raise Abort(f'Aborted with {msg} in strict mode!') + + log.info(f'Documentation built in {time.monotonic() - start:.2f} seconds') - if config['strict'] and utils.warning_filter.count: - raise SystemExit('\nExited with {} warnings in strict mode.'.format(utils.warning_filter.count)) + except Exception as e: + # Run `build_error` plugin events. + config.plugins.on_build_error(error=e) + if isinstance(e, BuildError): + log.error(str(e)) + raise Abort('Aborted with a BuildError!') + raise - log.info('Documentation built in %.2f seconds', time() - start) + finally: + logger.removeHandler(warning_counter) -def site_directory_contains_stale_files(site_directory): - """ Check if the site directory contains stale files from a previous build. """ +def site_directory_contains_stale_files(site_directory: str) -> bool: + """Check if the site directory contains stale files from a previous build.""" - return True if os.path.exists(site_directory) and os.listdir(site_directory) else False + return bool(os.path.exists(site_directory) and os.listdir(site_directory)) diff --git a/mkdocs/commands/get_deps.py b/mkdocs/commands/get_deps.py new file mode 100644 index 0000000..0d2e1c3 --- /dev/null +++ b/mkdocs/commands/get_deps.py @@ -0,0 +1,178 @@ +from __future__ import annotations + +import dataclasses +import datetime +import functools +import logging +import sys +from typing import Mapping, Sequence + +if sys.version_info >= (3, 10): + from importlib.metadata import EntryPoint, entry_points +else: + from importlib_metadata import EntryPoint, entry_points + +import yaml + +from mkdocs import utils +from mkdocs.config.base import _open_config_file +from mkdocs.utils.cache import download_and_cache_url + +log = logging.getLogger(__name__) + +# Note: do not rely on functions in this module, it is not public API. + + +class YamlLoader(yaml.SafeLoader): + pass + + +# Prevent errors from trying to access external modules which may not be installed yet. +YamlLoader.add_constructor("!ENV", lambda loader, node: None) # type: ignore +YamlLoader.add_constructor("!relative", lambda loader, node: None) # type: ignore +YamlLoader.add_multi_constructor( + "tag:yaml.org,2002:python/name:", lambda loader, suffix, node: None +) +YamlLoader.add_multi_constructor( + "tag:yaml.org,2002:python/object/apply:", lambda loader, suffix, node: None +) + +NotFound = () + + +def dig(cfg, keys: str): + """Receives a string such as 'foo.bar' and returns `cfg['foo']['bar']`, or `NotFound`. + + A list of single-item dicts gets converted to a flat dict. This is intended for `plugins` config. + """ + key, _, rest = keys.partition('.') + try: + cfg = cfg[key] + except (KeyError, TypeError): + return NotFound + if isinstance(cfg, list): + orig_cfg = cfg + cfg = {} + for item in reversed(orig_cfg): + if isinstance(item, dict) and len(item) == 1: + cfg.update(item) + elif isinstance(item, str): + cfg[item] = {} + if not rest: + return cfg + return dig(cfg, rest) + + +def strings(obj) -> Sequence[str]: + if isinstance(obj, str): + return (obj,) + else: + return tuple(obj) + + +@functools.lru_cache() +def _entry_points(group: str) -> Mapping[str, EntryPoint]: + eps = {ep.name: ep for ep in entry_points(group=group)} + log.debug(f"Available '{group}' entry points: {sorted(eps)}") + return eps + + +@dataclasses.dataclass(frozen=True) +class PluginKind: + projects_key: str + entry_points_key: str + + def __str__(self) -> str: + return self.projects_key.rpartition('_')[-1] + + +def get_deps(projects_file_url: str, config_file_path: str | None = None) -> None: + """ + Print PyPI package dependencies inferred from a mkdocs.yml file based on a reverse mapping of known projects. + + Parameters: + projects_file_url: URL or local path of the registry file that declares all known MkDocs-related projects. + The file is in YAML format and contains `projects: [{mkdocs_theme:, mkdocs_plugin:, markdown_extension:}] + config_file_path: Non-default path to mkdocs.yml. + """ + with _open_config_file(config_file_path) as f: + cfg = utils.yaml_load(f, loader=YamlLoader) # type: ignore + + packages_to_install = set() + + if all(c not in cfg for c in ('site_name', 'theme', 'plugins', 'markdown_extensions')): + log.warning("The passed config file doesn't seem to be a mkdocs.yml config file") + else: + if dig(cfg, 'theme.locale') not in (NotFound, 'en'): + packages_to_install.add('mkdocs[i18n]') + else: + packages_to_install.add('mkdocs') + + try: + theme = cfg['theme']['name'] + except (KeyError, TypeError): + theme = cfg.get('theme') + themes = {theme} if theme else set() + + plugins = set(strings(dig(cfg, 'plugins'))) + extensions = set(strings(dig(cfg, 'markdown_extensions'))) + + wanted_plugins = ( + (PluginKind('mkdocs_theme', 'mkdocs.themes'), themes - {'mkdocs', 'readthedocs'}), + (PluginKind('mkdocs_plugin', 'mkdocs.plugins'), plugins - {'search'}), + (PluginKind('markdown_extension', 'markdown.extensions'), extensions), + ) + for kind, wanted in wanted_plugins: + log.debug(f'Wanted {kind}s: {sorted(wanted)}') + + content = download_and_cache_url(projects_file_url, datetime.timedelta(days=7)) + projects = yaml.safe_load(content)['projects'] + + for project in projects: + for kind, wanted in wanted_plugins: + available = strings(project.get(kind.projects_key, ())) + for entry_name in available: + if ( # Also check theme-namespaced plugin names against the current theme. + '/' in entry_name + and theme is not None + and kind.projects_key == 'mkdocs_plugin' + and entry_name.startswith(f'{theme}/') + and entry_name[len(theme) + 1 :] in wanted + and entry_name not in wanted + ): + entry_name = entry_name[len(theme) + 1 :] + if entry_name in wanted: + if 'pypi_id' in project: + install_name = project['pypi_id'] + elif 'github_id' in project: + install_name = 'git+https://github.com/{github_id}'.format_map(project) + else: + log.error( + f"Can't find how to install {kind} '{entry_name}' although it was identified as {project}" + ) + continue + packages_to_install.add(install_name) + for extra_key, extra_pkgs in project.get('extra_dependencies', {}).items(): + if dig(cfg, extra_key) is not NotFound: + packages_to_install.update(strings(extra_pkgs)) + + wanted.remove(entry_name) + + for kind, wanted in wanted_plugins: + for entry_name in sorted(wanted): + dist_name = None + ep = _entry_points(kind.entry_points_key).get(entry_name) + if ep is not None and ep.dist is not None: + dist_name = ep.dist.name + if dist_name not in ('mkdocs', 'Markdown'): + warning = f"{str(kind).capitalize()} '{entry_name}' is not provided by any registered project" + if ep is not None: + warning += " but is installed locally" + if dist_name: + warning += f" from '{dist_name}'" + log.info(warning) + else: + log.warning(warning) + + for pkg in sorted(packages_to_install): + print(pkg) # noqa: T201 diff --git a/mkdocs/commands/gh_deploy.py b/mkdocs/commands/gh_deploy.py index a9c04d8..f6e5fa0 100644 --- a/mkdocs/commands/gh_deploy.py +++ b/mkdocs/commands/gh_deploy.py @@ -1,96 +1,115 @@ +from __future__ import annotations + import logging -import subprocess import os import re -from pkg_resources import parse_version +import subprocess +from typing import TYPE_CHECKING + +import ghp_import +from packaging import version import mkdocs -from mkdocs.utils import ghp_import +from mkdocs.exceptions import Abort + +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig log = logging.getLogger(__name__) default_message = """Deployed {sha} with MkDocs version: {version}""" -def _is_cwd_git_repo(): +def _is_cwd_git_repo() -> bool: try: proc = subprocess.Popen( ['git', 'rev-parse', '--is-inside-work-tree'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, ) except FileNotFoundError: log.error("Could not find git - is it installed and on your path?") - raise SystemExit(1) + raise Abort('Deployment Aborted!') proc.communicate() return proc.wait() == 0 -def _get_current_sha(repo_path): - - proc = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], cwd=repo_path, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) +def _get_current_sha(repo_path) -> str: + proc = subprocess.Popen( + ['git', 'rev-parse', '--short', 'HEAD'], + cwd=repo_path or None, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) stdout, _ = proc.communicate() sha = stdout.decode('utf-8').strip() return sha -def _get_remote_url(remote_name): - +def _get_remote_url(remote_name: str) -> tuple[str, str] | tuple[None, None]: # No CNAME found. We will use the origin URL to determine the GitHub - # pages location. - remote = "remote.%s.url" % remote_name - proc = subprocess.Popen(["git", "config", "--get", remote], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + # Pages location. + remote = f"remote.{remote_name}.url" + proc = subprocess.Popen( + ["git", "config", "--get", remote], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) stdout, _ = proc.communicate() url = stdout.decode('utf-8').strip() - host = None - path = None if 'github.com/' in url: host, path = url.split('github.com/', 1) elif 'github.com:' in url: host, path = url.split('github.com:', 1) - + else: + return None, None return host, path -def _check_version(branch): - - proc = subprocess.Popen(['git', 'show', '-s', '--format=%s', 'refs/heads/{}'.format(branch)], - stdout=subprocess.PIPE, stderr=subprocess.PIPE) +def _check_version(branch: str) -> None: + proc = subprocess.Popen( + ['git', 'show', '-s', '--format=%s', f'refs/heads/{branch}'], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) stdout, _ = proc.communicate() msg = stdout.decode('utf-8').strip() m = re.search(r'\d+(\.\d+)+((a|b|rc)\d+)?(\.post\d+)?(\.dev\d+)?', msg, re.X | re.I) - previousv = parse_version(m.group()) if m else None - currentv = parse_version(mkdocs.__version__) + previousv = version.parse(m.group()) if m else None + currentv = version.parse(mkdocs.__version__) if not previousv: log.warning('Version check skipped: No version specified in previous deployment.') elif currentv > previousv: log.info( - 'Previous deployment was done with MkDocs version {}; ' - 'you are deploying with a newer version ({})'.format(previousv, currentv) + f'Previous deployment was done with MkDocs version {previousv}; ' + f'you are deploying with a newer version ({currentv})' ) elif currentv < previousv: log.error( - 'Deployment terminated: Previous deployment was made with MkDocs version {}; ' - 'you are attempting to deploy with an older version ({}). Use --ignore-version ' - 'to deploy anyway.'.format(previousv, currentv) + f'Deployment terminated: Previous deployment was made with MkDocs version {previousv}; ' + f'you are attempting to deploy with an older version ({currentv}). Use --ignore-version ' + 'to deploy anyway.' ) - raise SystemExit(1) + raise Abort('Deployment Aborted!') -def gh_deploy(config, message=None, force=False, ignore_version=False): - +def gh_deploy( + config: MkDocsConfig, + message: str | None = None, + force=False, + no_history=False, + ignore_version=False, + shell=False, +) -> None: if not _is_cwd_git_repo(): - log.error('Cannot deploy - this directory does not appear to be a git ' - 'repository') + log.error('Cannot deploy - this directory does not appear to be a git repository') - remote_branch = config['remote_branch'] - remote_name = config['remote_name'] + remote_branch = config.remote_branch + remote_name = config.remote_name if not ignore_version: _check_version(remote_branch) @@ -100,35 +119,51 @@ def gh_deploy(config, message=None, force=False, ignore_version=False): sha = _get_current_sha(os.path.dirname(config.config_file_path)) message = message.format(version=mkdocs.__version__, sha=sha) - log.info("Copying '%s' to '%s' branch and pushing to GitHub.", - config['site_dir'], config['remote_branch']) + log.info( + "Copying '%s' to '%s' branch and pushing to GitHub.", + config.site_dir, + config.remote_branch, + ) + + try: + ghp_import.ghp_import( + config.site_dir, + mesg=message, + remote=remote_name, + branch=remote_branch, + push=True, + force=force, + use_shell=shell, + no_history=no_history, + nojekyll=True, + ) + except ghp_import.GhpError as e: + log.error(f"Failed to deploy to GitHub with error: \n{e.message}") + raise Abort('Deployment Aborted!') + + cname_file = os.path.join(config.site_dir, 'CNAME') + # Does this repository have a CNAME set for GitHub Pages? + if os.path.isfile(cname_file): + # This GitHub Pages repository has a CNAME configured. + with open(cname_file) as f: + cname_host = f.read().strip() + log.info( + f'Based on your CNAME file, your documentation should be ' + f'available shortly at: http://{cname_host}' + ) + log.info( + 'NOTE: Your DNS records must be configured appropriately for your CNAME URL to work.' + ) + return + + host, path = _get_remote_url(remote_name) - result, error = ghp_import.ghp_import(config['site_dir'], message, remote_name, - remote_branch, force) - if not result: - log.error("Failed to deploy to GitHub with error: \n%s", error) - raise SystemExit(1) + if host is None or path is None: + # This could be a GitHub Enterprise deployment. + log.info('Your documentation should be available shortly.') else: - cname_file = os.path.join(config['site_dir'], 'CNAME') - # Does this repository have a CNAME set for GitHub pages? - if os.path.isfile(cname_file): - # This GitHub pages repository has a CNAME configured. - with(open(cname_file, 'r')) as f: - cname_host = f.read().strip() - log.info('Based on your CNAME file, your documentation should be ' - 'available shortly at: http://%s', cname_host) - log.info('NOTE: Your DNS records must be configured appropriately for ' - 'your CNAME URL to work.') - return - - host, path = _get_remote_url(remote_name) - - if host is None: - # This could be a GitHub Enterprise deployment. - log.info('Your documentation should be available shortly.') - else: - username, repo = path.split('/', 1) - if repo.endswith('.git'): - repo = repo[:-len('.git')] - url = 'https://{}.github.io/{}/'.format(username, repo) - log.info('Your documentation should shortly be available at: ' + url) + username, repo = path.split('/', 1) + if repo.endswith('.git'): + repo = repo[: -len('.git')] + url = f'https://{username}.github.io/{repo}/' + log.info(f"Your documentation should shortly be available at: {url}") diff --git a/mkdocs/commands/new.py b/mkdocs/commands/new.py index 264e365..38c3068 100644 --- a/mkdocs/commands/new.py +++ b/mkdocs/commands/new.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import os @@ -24,8 +26,7 @@ log = logging.getLogger(__name__) -def new(output_dir): - +def new(output_dir: str) -> None: docs_dir = os.path.join(output_dir, 'docs') config_path = os.path.join(output_dir, 'mkdocs.yml') index_path = os.path.join(docs_dir, 'index.md') @@ -35,17 +36,17 @@ def new(output_dir): return if not os.path.exists(output_dir): - log.info('Creating project directory: %s', output_dir) + log.info(f'Creating project directory: {output_dir}') os.mkdir(output_dir) - log.info('Writing config file: %s', config_path) + log.info(f'Writing config file: {config_path}') with open(config_path, 'w', encoding='utf-8') as f: f.write(config_text) if os.path.exists(index_path): return - log.info('Writing initial docs: %s', index_path) + log.info(f'Writing initial docs: {index_path}') if not os.path.exists(docs_dir): os.mkdir(docs_dir) with open(index_path, 'w', encoding='utf-8') as f: diff --git a/mkdocs/commands/serve.py b/mkdocs/commands/serve.py index 390f134..c5ae98b 100644 --- a/mkdocs/commands/serve.py +++ b/mkdocs/commands/serve.py @@ -1,109 +1,33 @@ +from __future__ import annotations + import logging import shutil import tempfile -import sys +from os.path import isdir, isfile, join +from typing import TYPE_CHECKING +from urllib.parse import urlsplit + +import jinja2.exceptions -from os.path import isfile, join from mkdocs.commands.build import build from mkdocs.config import load_config +from mkdocs.exceptions import Abort +from mkdocs.livereload import LiveReloadServer -log = logging.getLogger(__name__) - - -def _init_asyncio_patch(): - """ - Select compatible event loop for Tornado 5+. - - As of Python 3.8, the default event loop on Windows is `proactor`, - however Tornado requires the old default "selector" event loop. - As Tornado has decided to leave this to users to set, MkDocs needs - to set it. See https://github.com/tornadoweb/tornado/issues/2608. - """ - if sys.platform.startswith("win") and sys.version_info >= (3, 8): - import asyncio - try: - from asyncio import WindowsSelectorEventLoopPolicy - except ImportError: - pass # Can't assign a policy which doesn't exist. - else: - if not isinstance(asyncio.get_event_loop_policy(), WindowsSelectorEventLoopPolicy): - asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy()) - - -def _get_handler(site_dir, StaticFileHandler): - - from tornado.template import Loader - - class WebHandler(StaticFileHandler): - - def write_error(self, status_code, **kwargs): - - if status_code in (404, 500): - error_page = '{}.html'.format(status_code) - if isfile(join(site_dir, error_page)): - self.write(Loader(site_dir).load(error_page).generate()) - else: - super().write_error(status_code, **kwargs) - - return WebHandler - - -def _livereload(host, port, config, builder, site_dir): - - # We are importing here for anyone that has issues with livereload. Even if - # this fails, the --no-livereload alternative should still work. - _init_asyncio_patch() - from livereload import Server - import livereload.handlers - - class LiveReloadServer(Server): - - def get_web_handlers(self, script): - handlers = super().get_web_handlers(script) - # replace livereload handler - return [(handlers[0][0], _get_handler(site_dir, livereload.handlers.StaticFileHandler), handlers[0][2],)] - - server = LiveReloadServer() - - # Watch the documentation files, the config file and the theme files. - server.watch(config['docs_dir'], builder) - server.watch(config['config_file_path'], builder) - - for d in config['theme'].dirs: - server.watch(d, builder) +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig - # Run `serve` plugin events. - server = config['plugins'].run_event('serve', server, config=config, builder=builder) - - server.serve(root=site_dir, host=host, port=port, restart_delay=0) - - -def _static_server(host, port, site_dir): - - # Importing here to separate the code paths from the --livereload - # alternative. - _init_asyncio_patch() - from tornado import ioloop - from tornado import web - - application = web.Application([ - (r"/(.*)", _get_handler(site_dir, web.StaticFileHandler), { - "path": site_dir, - "default_filename": "index.html" - }), - ]) - application.listen(port=port, address=host) - - log.info('Running at: http://%s:%s/', host, port) - log.info('Hold ctrl+c to quit.') - try: - ioloop.IOLoop.instance().start() - except KeyboardInterrupt: - log.info('Stopping server...') +log = logging.getLogger(__name__) -def serve(config_file=None, dev_addr=None, strict=None, theme=None, - theme_dir=None, livereload='livereload', **kwargs): +def serve( + config_file: str | None = None, + livereload: bool = True, + build_type: str | None = None, + watch_theme: bool = False, + watch: list[str] = [], + **kwargs, +) -> None: """ Start the MkDocs development server @@ -111,40 +35,85 @@ def serve(config_file=None, dev_addr=None, strict=None, theme=None, it will rebuild the documentation and refresh the page automatically whenever a file is edited. """ - # Create a temporary build directory, and set some options to serve it # PY2 returns a byte string by default. The Unicode prefix ensures a Unicode # string is returned. And it makes MkDocs temp dirs easier to identify. site_dir = tempfile.mkdtemp(prefix='mkdocs_') - def builder(): - log.info("Building documentation...") + def mount_path(config: MkDocsConfig): + return urlsplit(config.site_url or '/').path + + def get_config(): config = load_config( config_file=config_file, - dev_addr=dev_addr, - strict=strict, - theme=theme, - theme_dir=theme_dir, site_dir=site_dir, - **kwargs + **kwargs, ) - # Override a few config settings after validation - config['site_url'] = 'http://{}/'.format(config['dev_addr']) - - live_server = livereload in ['dirty', 'livereload'] - dirty = livereload == 'dirty' - build(config, live_server=live_server, dirty=dirty) + config.watch.extend(watch) + config.site_url = f'http://{config.dev_addr}{mount_path(config)}' return config + is_clean = build_type == 'clean' + is_dirty = build_type == 'dirty' + + config = get_config() + config.plugins.on_startup(command=('build' if is_clean else 'serve'), dirty=is_dirty) + + def builder(config: MkDocsConfig | None = None): + log.info("Building documentation...") + if config is None: + config = get_config() + + build(config, live_server=None if is_clean else server, dirty=is_dirty) + + host, port = config.dev_addr + server = LiveReloadServer( + builder=builder, host=host, port=port, root=site_dir, mount_path=mount_path(config) + ) + + def error_handler(code) -> bytes | None: + if code in (404, 500): + error_page = join(site_dir, f'{code}.html') + if isfile(error_page): + with open(error_page, 'rb') as f: + return f.read() + return None + + server.error_handler = error_handler + try: # Perform the initial build - config = builder() + builder(config) + + if livereload: + # Watch the documentation files, the config file and the theme files. + server.watch(config.docs_dir) + if config.config_file_path: + server.watch(config.config_file_path) - host, port = config['dev_addr'] + if watch_theme: + for d in config.theme.dirs: + server.watch(d) - if livereload in ['livereload', 'dirty']: - _livereload(host, port, config, builder, site_dir) - else: - _static_server(host, port, site_dir) + # Run `serve` plugin events. + server = config.plugins.on_serve(server, config=config, builder=builder) + + for item in config.watch: + server.watch(item) + + try: + server.serve() + except KeyboardInterrupt: + log.info("Shutting down...") + finally: + server.shutdown() + except jinja2.exceptions.TemplateError: + # This is a subclass of OSError, but shouldn't be suppressed. + raise + except OSError as e: # pragma: no cover + # Avoid ugly, unhelpful traceback + raise Abort(f'{type(e).__name__}: {e}') finally: - shutil.rmtree(site_dir) + config.plugins.on_shutdown() + if isdir(site_dir): + shutil.rmtree(site_dir) diff --git a/mkdocs/config/__init__.py b/mkdocs/config/__init__.py index 3f8314f..3fa69c6 100644 --- a/mkdocs/config/__init__.py +++ b/mkdocs/config/__init__.py @@ -1,6 +1,3 @@ -from mkdocs.config.base import load_config, Config -from mkdocs.config.defaults import DEFAULT_SCHEMA +from mkdocs.config.base import Config, load_config -__all__ = [load_config.__name__, - Config.__name__, - 'DEFAULT_SCHEMA'] +__all__ = ['load_config', 'Config'] diff --git a/mkdocs/config/base.py b/mkdocs/config/base.py index 246cb1b..cee32e3 100644 --- a/mkdocs/config/base.py +++ b/mkdocs/config/base.py @@ -1,35 +1,166 @@ +from __future__ import annotations + +import functools import logging import os import sys -from yaml import YAMLError +import warnings from collections import UserDict - -from mkdocs import exceptions -from mkdocs import utils +from contextlib import contextmanager +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + Iterator, + List, + Mapping, + Sequence, + Tuple, + TypeVar, + overload, +) + +from mkdocs import exceptions, utils +from mkdocs.utils import weak_property + +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig log = logging.getLogger('mkdocs.config') +T = TypeVar('T') + + +class BaseConfigOption(Generic[T]): + def __init__(self) -> None: + self.warnings: list[str] = [] + self.default = None + + @property + def default(self): + try: + # ensure no mutable values are assigned + return self._default.copy() + except AttributeError: + return self._default + + @default.setter + def default(self, value): + self._default = value + + def validate(self, value: object) -> T: + return self.run_validation(value) + + def reset_warnings(self) -> None: + self.warnings = [] + + def pre_validation(self, config: Config, key_name: str) -> None: + """ + Before all options are validated, perform a pre-validation process. + + The pre-validation process method should be implemented by subclasses. + """ + + def run_validation(self, value: object): + """ + Perform validation for a value. + + The run_validation method should be implemented by subclasses. + """ + return value + + def post_validation(self, config: Config, key_name: str) -> None: + """ + After all options have passed validation, perform a post-validation + process to do any additional changes dependent on other config values. + + The post-validation process method should be implemented by subclasses. + """ + + def __set_name__(self, owner, name): + if name.endswith('_') and not name.startswith('_'): + name = name[:-1] + self._name = name + + @overload + def __get__(self, obj: Config, type=None) -> T: + ... + + @overload + def __get__(self, obj, type=None) -> BaseConfigOption: + ... + + def __get__(self, obj, type=None): + if not isinstance(obj, Config): + return self + return obj[self._name] + + def __set__(self, obj, value: T): + if not isinstance(obj, Config): + raise AttributeError( + f"can't set attribute ({self._name}) because the parent is a {type(obj)} not a {Config}" + ) + obj[self._name] = value + + class ValidationError(Exception): """Raised during the validation process of the config on errors.""" + def __eq__(self, other): + return type(self) is type(other) and str(self) == str(other) + + +PlainConfigSchemaItem = Tuple[str, BaseConfigOption] +PlainConfigSchema = Sequence[PlainConfigSchemaItem] + +ConfigErrors = List[Tuple[str, Exception]] +ConfigWarnings = List[Tuple[str, str]] + class Config(UserDict): """ - MkDocs Configuration dict + Base class for MkDocs configuration, plugin configuration (and sub-configuration) objects. + + It should be subclassed and have `ConfigOption`s defined as attributes. + For examples, see mkdocs/contrib/search/__init__.py and mkdocs/config/defaults.py. - This is a fairly simple extension of a standard dictionary. It adds methods - for running validation on the structure and contents. + Behavior as it was prior to MkDocs 1.4 is now handled by LegacyConfig. """ - def __init__(self, schema, config_file_path=None): - """ - The schema is a Python dict which maps the config name to a validator. - """ + _schema: PlainConfigSchema + config_file_path: str + + def __init_subclass__(cls): + schema = dict(getattr(cls, '_schema', ())) + for attr_name, attr in cls.__dict__.items(): + if isinstance(attr, BaseConfigOption): + schema[getattr(attr, '_name', attr_name)] = attr + cls._schema = tuple(schema.items()) + + for attr_name, attr in cls._schema: + attr.required = True + if getattr(attr, '_legacy_required', None) is not None: + raise TypeError( + f"{cls.__name__}.{attr_name}: " + "Setting 'required' is unsupported in class-based configs. " + "All values are required, or can be wrapped into config_options.Optional" + ) + + def __new__(cls, *args, **kwargs) -> Config: + """Compatibility: allow referring to `LegacyConfig(...)` constructor as `Config(...)`.""" + if cls is Config: + return LegacyConfig(*args, **kwargs) + return super().__new__(cls) + + def __init__(self, config_file_path: str | bytes | None = None): + super().__init__() + self.__user_configs: list[dict] = [] + self.set_defaults() - self._schema = schema - self._schema_keys = set(dict(schema).keys()) + self._schema_keys = {k for k, v in self._schema} # Ensure config_file_path is a Unicode string if config_file_path is not None and not isinstance(config_file_path, str): try: @@ -37,71 +168,63 @@ def __init__(self, schema, config_file_path=None): config_file_path = config_file_path.decode(encoding=sys.getfilesystemencoding()) except UnicodeDecodeError: raise ValidationError("config_file_path is not a Unicode string.") - self.config_file_path = config_file_path - self.data = {} - - self.user_configs = [] - self.set_defaults() + self.config_file_path = config_file_path or '' - def set_defaults(self): + def set_defaults(self) -> None: """ Set the base config by going through each validator and getting the default if it has one. """ - for key, config_option in self._schema: self[key] = config_option.default - def _validate(self): - - failed, warnings = [], [] + def _validate(self) -> tuple[ConfigErrors, ConfigWarnings]: + failed: ConfigErrors = [] + warnings: ConfigWarnings = [] for key, config_option in self._schema: try: value = self.get(key) self[key] = config_option.validate(value) - warnings.extend([(key, w) for w in config_option.warnings]) + warnings.extend((key, w) for w in config_option.warnings) config_option.reset_warnings() except ValidationError as e: failed.append((key, e)) - for key in (set(self.keys()) - self._schema_keys): - warnings.append(( - key, "Unrecognised configuration name: {}".format(key) - )) + for key in set(self.keys()) - self._schema_keys: + warnings.append((key, f"Unrecognised configuration name: {key}")) return failed, warnings - def _pre_validate(self): - - failed, warnings = [], [] + def _pre_validate(self) -> tuple[ConfigErrors, ConfigWarnings]: + failed: ConfigErrors = [] + warnings: ConfigWarnings = [] for key, config_option in self._schema: try: config_option.pre_validation(self, key_name=key) - warnings.extend([(key, w) for w in config_option.warnings]) + warnings.extend((key, w) for w in config_option.warnings) config_option.reset_warnings() except ValidationError as e: failed.append((key, e)) return failed, warnings - def _post_validate(self): - - failed, warnings = [], [] + def _post_validate(self) -> tuple[ConfigErrors, ConfigWarnings]: + failed: ConfigErrors = [] + warnings: ConfigWarnings = [] for key, config_option in self._schema: try: config_option.post_validation(self, key_name=key) - warnings.extend([(key, w) for w in config_option.warnings]) + warnings.extend((key, w) for w in config_option.warnings) config_option.reset_warnings() except ValidationError as e: failed.append((key, e)) return failed, warnings - def validate(self): - + def validate(self) -> tuple[ConfigErrors, ConfigWarnings]: failed, warnings = self._pre_validate() run_failed, run_warnings = self._validate() @@ -118,54 +241,109 @@ def validate(self): return failed, warnings - def load_dict(self, patch): + def load_dict(self, patch: dict) -> None: + """Load config options from a dictionary.""" if not isinstance(patch, dict): raise exceptions.ConfigurationError( - "The configuration is invalid. The expected type was a key " - "value mapping (a python dict) but we got an object of type: " - "{}".format(type(patch))) + "The configuration is invalid. Expected a key-" + f"value mapping (dict) but received: {type(patch)}" + ) - self.user_configs.append(patch) - self.data.update(patch) + self.__user_configs.append(patch) + self.update(patch) - def load_file(self, config_file): - try: - return self.load_dict(utils.yaml_load(config_file)) - except YAMLError as e: - # MkDocs knows and understands ConfigurationErrors - raise exceptions.ConfigurationError( - "MkDocs encountered as error parsing the configuration file: {}".format(e) - ) + def load_file(self, config_file: IO) -> None: + """Load config options from the open file descriptor of a YAML file.""" + warnings.warn( + "Config.load_file is not used since MkDocs 1.5 and will be removed soon. " + "Use MkDocsConfig.load_file instead", + DeprecationWarning, + ) + return self.load_dict(utils.yaml_load(config_file)) + @weak_property + def user_configs(self) -> Sequence[Mapping[str, Any]]: + warnings.warn( + "user_configs is never used in MkDocs and will be removed soon.", DeprecationWarning + ) + return self.__user_configs -def _open_config_file(config_file): - # Default to the standard config filename. - if config_file is None: - config_file = os.path.abspath('mkdocs.yml') +@functools.lru_cache(maxsize=None) +def get_schema(cls: type) -> PlainConfigSchema: + """ + Extract ConfigOptions defined in a class (used just as a container) and put them into a schema tuple. + """ + if issubclass(cls, Config): + return cls._schema + return tuple((k, v) for k, v in cls.__dict__.items() if isinstance(v, BaseConfigOption)) - # If closed file descriptor, get file path to reopen later. - if hasattr(config_file, 'closed') and config_file.closed: - config_file = config_file.name - log.debug("Loading configuration file: {}".format(config_file)) +class LegacyConfig(Config): + """ + A configuration object for plugins, as just a dict without type-safe attribute access. + """ + + def __init__(self, schema: PlainConfigSchema, config_file_path: str | None = None): + self._schema = tuple((k, v) for k, v in schema) # Re-create just for validation + super().__init__(config_file_path) + +@contextmanager +def _open_config_file(config_file: str | IO | None) -> Iterator[IO]: + """ + A context manager which yields an open file descriptor ready to be read. + + Accepts a filename as a string, an open or closed file descriptor, or None. + When None, it defaults to `mkdocs.yml` in the CWD. If a closed file descriptor + is received, a new file descriptor is opened for the same file. + + The file descriptor is automatically closed when the context manager block is existed. + """ + # Default to the standard config filename. + if config_file is None: + paths_to_try = ['mkdocs.yml', 'mkdocs.yaml'] # If it is a string, we can assume it is a path and attempt to open it. - if isinstance(config_file, str): - if os.path.exists(config_file): - config_file = open(config_file, 'rb') + elif isinstance(config_file, str): + paths_to_try = [config_file] + # If closed file descriptor, get file path to reopen later. + elif getattr(config_file, 'closed', False): + paths_to_try = [config_file.name] + else: + result_config_file = config_file + paths_to_try = None + + if paths_to_try: + # config_file is not a file descriptor, so open it as a path. + for path in paths_to_try: + path = os.path.abspath(path) + log.debug(f"Loading configuration file: {path}") + try: + result_config_file = open(path, 'rb') + break + except FileNotFoundError: + continue else: - raise exceptions.ConfigurationError( - "Config file '{}' does not exist.".format(config_file)) - - # Ensure file descriptor is at begining - config_file.seek(0) + raise exceptions.ConfigurationError(f"Config file '{paths_to_try[0]}' does not exist.") + else: + log.debug(f"Loading configuration file: {result_config_file}") + # Ensure file descriptor is at beginning + try: + result_config_file.seek(0) + except OSError: + pass - return config_file + try: + yield result_config_file + finally: + if hasattr(result_config_file, 'close'): + result_config_file.close() -def load_config(config_file=None, **kwargs): +def load_config( + config_file: str | IO | None = None, *, config_file_path: str | None = None, **kwargs +) -> MkDocsConfig: """ Load the configuration for a given file object or name @@ -183,35 +361,36 @@ def load_config(config_file=None, **kwargs): if value is None: options.pop(key) - config_file = _open_config_file(config_file) - options['config_file_path'] = getattr(config_file, 'name', '') + with _open_config_file(config_file) as fd: + # Initialize the config with the default schema. + from mkdocs.config.defaults import MkDocsConfig + + if config_file_path is None: + if fd is not sys.stdin.buffer: + config_file_path = getattr(fd, 'name', None) + cfg = MkDocsConfig(config_file_path=config_file_path) + # load the config file + cfg.load_file(fd) - # Initialise the config with the default schema . - from mkdocs import config - cfg = Config(schema=config.DEFAULT_SCHEMA, config_file_path=options['config_file_path']) - # First load the config file - cfg.load_file(config_file) # Then load the options to overwrite anything in the config. cfg.load_dict(options) errors, warnings = cfg.validate() for config_name, warning in warnings: - log.warning("Config value: '%s'. Warning: %s", config_name, warning) + log.warning(f"Config value '{config_name}': {warning}") for config_name, error in errors: - log.error("Config value: '%s'. Error: %s", config_name, error) + log.error(f"Config value '{config_name}': {error}") for key, value in cfg.items(): - log.debug("Config value: '%s' = %r", key, value) + log.debug(f"Config value '{key}' = {value!r}") if len(errors) > 0: - raise exceptions.ConfigurationError( - "Aborted with {} Configuration Errors!".format(len(errors)) - ) - elif cfg['strict'] and len(warnings) > 0: - raise exceptions.ConfigurationError( - "Aborted with {} Configuration Warnings in 'strict' mode!".format(len(warnings)) + raise exceptions.Abort(f"Aborted with {len(errors)} configuration errors!") + elif cfg.strict and len(warnings) > 0: + raise exceptions.Abort( + f"Aborted with {len(warnings)} configuration warnings in 'strict' mode!" ) return cfg diff --git a/mkdocs/config/config_options.py b/mkdocs/config/config_options.py index 93c62dd..0707312 100644 --- a/mkdocs/config/config_options.py +++ b/mkdocs/config/config_options.py @@ -1,111 +1,167 @@ -import os -from collections import Sequence, namedtuple -from urllib.parse import urlparse +from __future__ import annotations + +import functools import ipaddress -import markdown +import logging +import os +import string +import sys +import traceback +import types +import warnings +from collections import Counter, UserString +from typing import ( + Any, + Callable, + Collection, + Dict, + Generic, + Iterator, + List, + Mapping, + MutableMapping, + NamedTuple, + TypeVar, + Union, + overload, +) +from urllib.parse import quote as urlquote +from urllib.parse import urlsplit, urlunsplit -from mkdocs import utils, theme, plugins -from mkdocs.config.base import Config, ValidationError +import markdown +import pathspec +import pathspec.gitignore +from mkdocs import plugins, theme, utils +from mkdocs.config.base import ( + BaseConfigOption, + Config, + LegacyConfig, + PlainConfigSchemaItem, + ValidationError, +) +from mkdocs.exceptions import ConfigurationError -class BaseConfigOption: +T = TypeVar('T') +SomeConfig = TypeVar('SomeConfig', bound=Config) - def __init__(self): - self.warnings = [] - self.default = None - def is_required(self): - return False +class SubConfig(Generic[SomeConfig], BaseConfigOption[SomeConfig]): + """ + Subconfig Config Option - def validate(self, value): - return self.run_validation(value) + New: If targeting MkDocs 1.4+, please pass a subclass of Config to the + constructor, instead of the old style of a sequence of ConfigOption instances. + Validation is then enabled by default. - def reset_warnings(self): - self.warnings = [] + A set of `config_options` grouped under a single config option. + By default, validation errors and warnings resulting from validating + `config_options` are ignored (`validate=False`). Users should typically + enable validation with `validate=True`. + """ - def pre_validation(self, config, key_name): - """ - Before all options are validated, perform a pre-validation process. + _config_file_path: str | None = None + config_class: type[SomeConfig] - The pre-validation process method should be implemented by subclasses. - """ + @overload + def __init__( + self: SubConfig[SomeConfig], config_class: type[SomeConfig], *, validate: bool = True + ): + """Create a sub-config in a type-safe way, using fields defined in a Config subclass.""" - def run_validation(self, value): - """ - Perform validation for a value. + @overload + def __init__( + self: SubConfig[LegacyConfig], + *config_options: PlainConfigSchemaItem, + validate: bool = False, + ): + """Create an untyped sub-config, using directly passed fields.""" - The run_validation method should be implemented by subclasses. - """ - return value + def __init__(self, *config_options, validate=None): + super().__init__() + self.default = {} + self._do_validation = True if validate is None else validate + if type(self) is SubConfig: + if ( + len(config_options) == 1 + and isinstance(config_options[0], type) + and issubclass(config_options[0], Config) + ): + (self.config_class,) = config_options + else: + self.config_class = functools.partial(LegacyConfig, config_options) + self._do_validation = False if validate is None else validate - def post_validation(self, config, key_name): - """ - After all options have passed validation, perform a post-validation - process to do any additional changes dependant on other config values. + def __class_getitem__(cls, config_class: type[Config]): + """Eliminates the need to write `config_class = FooConfig` when subclassing SubConfig[FooConfig]""" + name = f'{cls.__name__}[{config_class.__name__}]' + return type(name, (cls,), dict(config_class=config_class)) - The post-validation process method should be implemented by subclasses. - """ + def pre_validation(self, config: Config, key_name: str): + self._config_file_path = config.config_file_path + def run_validation(self, value: object) -> SomeConfig: + config = self.config_class(config_file_path=self._config_file_path) + try: + config.load_dict(value) # type: ignore + failed, warnings = config.validate() + except ConfigurationError as e: + raise ValidationError(str(e)) -class SubConfig(BaseConfigOption, Config): - def __init__(self, *config_options): - BaseConfigOption.__init__(self) - Config.__init__(self, config_options) - self.default = {} + if self._do_validation: + # Capture errors and warnings + self.warnings.extend(f"Sub-option '{key}': {msg}" for key, msg in warnings) + if failed: + # Get the first failing one + key, err = failed[0] + raise ValidationError(f"Sub-option '{key}': {err}") - def validate(self, value): - self.load_dict(value) - return self.run_validation(value) + return config - def run_validation(self, value): - Config.validate(self) - return self +class PropagatingSubConfig(SubConfig[SomeConfig], Generic[SomeConfig]): + """A SubConfig that must consist of SubConfigs with defined schemas. -class ConfigItems(BaseConfigOption): + Any value set on the top config gets moved to sub-configs with matching keys. """ - Config Items Option - Validates a list of mappings that all must match the same set of - options. + def run_validation(self, value: object): + if isinstance(value, dict): + to_discard = set() + for k1, v1 in self.config_class._schema: + if isinstance(v1, SubConfig): + for k2, _ in v1.config_class._schema: + if k2 in value: + subdict = value.setdefault(k1, {}) + if isinstance(subdict, dict): + to_discard.add(k2) + subdict.setdefault(k2, value[k2]) + for k in to_discard: + del value[k] + return super().run_validation(value) + + +class OptionallyRequired(Generic[T], BaseConfigOption[T]): """ - def __init__(self, *config_options, **kwargs): - BaseConfigOption.__init__(self) - self.item_config = SubConfig(*config_options) - self.required = kwargs.get('required', False) - - def __repr__(self): - return '{}: {}'.format(self.__class__.__name__, self.item_config) - - def run_validation(self, value): - if value is None: - if self.required: - raise ValidationError("Required configuration not provided.") - else: - return () - - if not isinstance(value, Sequence): - raise ValidationError('Expected a sequence of mappings, but a %s ' - 'was given.' % type(value)) - result = [] - for item in value: - result.append(self.item_config.validate(item)) - return result - + Soft-deprecated, do not use. -class OptionallyRequired(BaseConfigOption): - """ A subclass of BaseConfigOption that adds support for default values and required values. It is a base class for config options. """ - def __init__(self, default=None, required=False): + @overload + def __init__(self, default=None): + ... + + @overload + def __init__(self, default=None, *, required: bool): + ... + + def __init__(self, default=None, required=None): super().__init__() self.default = default - self.required = required - - def is_required(self): - return self.required + self._legacy_required = required + self.required = bool(required) def validate(self, value): """ @@ -115,126 +171,306 @@ def validate(self, value): it is empty but has a default, use that. Finally, call the run_validation method on the subclass unless. """ - if value is None: if self.default is not None: - if hasattr(self.default, 'copy'): - # ensure no mutable values are assigned - value = self.default.copy() - else: - value = self.default + value = self.default elif not self.required: - return + return None elif self.required: raise ValidationError("Required configuration not provided.") return self.run_validation(value) -class Type(OptionallyRequired): +class ListOfItems(Generic[T], BaseConfigOption[List[T]]): + """ + Validates a homogeneous list of items. + + E.g. for `config_options.ListOfItems(config_options.Type(int))` a valid item is `[1, 2, 3]`. + """ + + required: bool | None = None # Only for subclasses to set. + + def __init__(self, option_type: BaseConfigOption[T], default=None) -> None: + super().__init__() + self.default = default + self.option_type = option_type + self.option_type.warnings = self.warnings + + def __repr__(self) -> str: + return f'{type(self).__name__}: {self.option_type}' + + def pre_validation(self, config: Config, key_name: str): + self._config = config + self._key_name = key_name + + def run_validation(self, value: object) -> list[T]: + if value is None: + if self.required or self.default is None: + raise ValidationError("Required configuration not provided.") + value = self.default + if not isinstance(value, list): + raise ValidationError(f'Expected a list of items, but a {type(value)} was given.') + if not value: # Optimization for empty list + return value + + fake_config = LegacyConfig(()) + try: + fake_config.config_file_path = self._config.config_file_path + except AttributeError: + pass + + # Emulate a config-like environment for pre_validation and post_validation. + parent_key_name = getattr(self, '_key_name', '') + fake_keys = [f'{parent_key_name}[{i}]' for i in range(len(value))] + fake_config.data = dict(zip(fake_keys, value)) + + self.option_type.warnings = self.warnings + for key_name in fake_config: + self.option_type.pre_validation(fake_config, key_name) + for key_name in fake_config: + # Specifically not running `validate` to avoid the OptionallyRequired effect. + fake_config[key_name] = self.option_type.run_validation(fake_config[key_name]) + for key_name in fake_config: + self.option_type.post_validation(fake_config, key_name) + + return [fake_config[k] for k in fake_keys] + + +class DictOfItems(Generic[T], BaseConfigOption[Dict[str, T]]): + """ + Validates a dict of items. Keys are always strings. + + E.g. for `config_options.DictOfItems(config_options.Type(int))` a valid item is `{"a": 1, "b": 2}`. + """ + + required: bool | None = None # Only for subclasses to set. + + def __init__(self, option_type: BaseConfigOption[T], default=None) -> None: + super().__init__() + self.default = default + self.option_type = option_type + self.option_type.warnings = self.warnings + + def __repr__(self) -> str: + return f"{type(self).__name__}: {self.option_type}" + + def pre_validation(self, config: Config, key_name: str): + self._config = config + self._key_name = key_name + + def run_validation(self, value: object) -> dict[str, T]: + if value is None: + if self.required or self.default is None: + raise ValidationError("Required configuration not provided.") + value = self.default + if not isinstance(value, dict): + raise ValidationError(f"Expected a dict of items, but a {type(value)} was given.") + if not value: # Optimization for empty list + return value + + fake_config = LegacyConfig(()) + try: + fake_config.config_file_path = self._config.config_file_path + except AttributeError: + pass + + # Emulate a config-like environment for pre_validation and post_validation. + fake_config.data = value + + for key in fake_config: + self.option_type.pre_validation(fake_config, key) + for key in fake_config: + if not isinstance(key, str): + raise ValidationError( + f"Expected type: {str} for keys, but received: {type(key)} (key={key})" + ) + for key in fake_config: + # Specifically not running `validate` to avoid the OptionallyRequired effect. + fake_config[key] = self.option_type.run_validation(fake_config[key]) + for key in fake_config: + self.option_type.post_validation(fake_config, key) + + return value + + +class ConfigItems(ListOfItems[LegacyConfig]): + """ + Deprecated: Use `ListOfItems(SubConfig(...))` instead of `ConfigItems(...)`. + + Validates a list of mappings that all must match the same set of + options. + """ + + @overload + def __init__(self, *config_options: PlainConfigSchemaItem): + ... + + @overload + def __init__(self, *config_options: PlainConfigSchemaItem, required: bool): + ... + + def __init__(self, *config_options: PlainConfigSchemaItem, required=None) -> None: + super().__init__(SubConfig(*config_options), default=[]) + self._legacy_required = required + self.required = bool(required) + + +class Type(Generic[T], OptionallyRequired[T]): """ Type Config Option Validate the type of a config option against a given Python type. """ - def __init__(self, type_, length=None, **kwargs): + @overload + def __init__(self, type_: type[T], length: int | None = None, **kwargs): + ... + + @overload + def __init__(self, type_: tuple[type[T], ...], length: int | None = None, **kwargs): + ... + + def __init__(self, type_, length=None, **kwargs) -> None: super().__init__(**kwargs) self._type = type_ self.length = length - def run_validation(self, value): - + def run_validation(self, value: object) -> T: if not isinstance(value, self._type): - msg = ("Expected type: {} but received: {}" - .format(self._type, type(value))) + msg = f"Expected type: {self._type} but received: {type(value)}" elif self.length is not None and len(value) != self.length: - msg = ("Expected type: {0} with length {2} but received: {1} with " - "length {3}").format(self._type, value, self.length, - len(value)) + msg = ( + f"Expected type: {self._type} with length {self.length}" + f" but received: {value!r} with length {len(value)}" + ) else: return value raise ValidationError(msg) -class Choice(OptionallyRequired): +class Choice(Generic[T], OptionallyRequired[T]): """ Choice Config Option Validate the config option against a strict set of values. """ - def __init__(self, choices, **kwargs): - super().__init__(**kwargs) + def __init__(self, choices: Collection[T], default: T | None = None, **kwargs) -> None: + super().__init__(default=default, **kwargs) try: length = len(choices) except TypeError: length = 0 if not length or isinstance(choices, str): - raise ValueError('Expected iterable of choices, got {}', choices) + raise ValueError(f'Expected iterable of choices, got {choices}') + if default is not None and default not in choices: + raise ValueError(f'{default!r} is not one of {choices!r}') self.choices = choices - def run_validation(self, value): + def run_validation(self, value: object) -> T: if value not in self.choices: - msg = ("Expected one of: {} but received: {}" - .format(self.choices, value)) - else: - return value - - raise ValidationError(msg) + raise ValidationError(f"Expected one of: {self.choices} but received: {value!r}") + return value # type: ignore class Deprecated(BaseConfigOption): + """ + Deprecated Config Option - def __init__(self, moved_to=None): + Raises a warning as the option is deprecated. Uses `message` for the + warning. If `move_to` is set to the name of a new config option, the value + is moved to the new option on pre_validation. If `option_type` is set to a + ConfigOption instance, then the value is validated against that type. + """ + + def __init__( + self, + moved_to: str | None = None, + message: str | None = None, + removed: bool = False, + option_type: BaseConfigOption | None = None, + ) -> None: super().__init__() self.default = None self.moved_to = moved_to + if not message: + if removed: + message = "The configuration option '{}' was removed from MkDocs." + else: + message = ( + "The configuration option '{}' has been deprecated and " + "will be removed in a future release." + ) + if moved_to: + message += f" Use '{moved_to}' instead." - def pre_validation(self, config, key_name): + self.message = message + self.removed = removed + self.option = option_type or BaseConfigOption() - if config.get(key_name) is None or self.moved_to is None: - return + self.warnings = self.option.warnings - warning = ('The configuration option {} has been deprecated and ' - 'will be removed in a future release of MkDocs.' - ''.format(key_name)) - self.warnings.append(warning) + def pre_validation(self, config: Config, key_name: str): + self.option.pre_validation(config, key_name) - if '.' not in self.moved_to: - target = config - target_key = self.moved_to - else: - move_to, target_key = self.moved_to.rsplit('.', 1) + if config.get(key_name) is not None: + if self.removed: + raise ValidationError(self.message.format(key_name)) + self.warnings.append(self.message.format(key_name)) + + if self.moved_to is not None: + *parent_keys, target_key = self.moved_to.split('.') + target: Any = config + + for key in parent_keys: + if target.get(key) is None: + target[key] = {} + target = target[key] + + if not isinstance(target, dict): + # We can't move it for the user + return - target = config - for key in move_to.split('.'): - target = target.setdefault(key, {}) + target[target_key] = config.pop(key_name) + + def validate(self, value): + return self.option.validate(value) + + def post_validation(self, config: Config, key_name: str): + self.option.post_validation(config, key_name) + + def reset_warnings(self): + self.option.reset_warnings() + self.warnings = self.option.warnings - if not isinstance(target, dict): - # We can't move it for the user - return - target[target_key] = config.pop(key_name) +class _IpAddressValue(NamedTuple): + host: str + port: int + def __str__(self) -> str: + return f'{self.host}:{self.port}' -class IpAddress(OptionallyRequired): + +class IpAddress(OptionallyRequired[_IpAddressValue]): """ IpAddress Config Option - Validate that an IP address is in an apprioriate format + Validate that an IP address is in an appropriate format """ - def run_validation(self, value): - try: - host, port = value.rsplit(':', 1) - except Exception: + def run_validation(self, value: object) -> _IpAddressValue: + if not isinstance(value, str) or ':' not in value: raise ValidationError("Must be a string of format 'IP:PORT'") + host, port_str = value.rsplit(':', 1) if host != 'localhost': + if host.startswith('[') and host.endswith(']'): + host = host[1:-1] try: # Validate and normalize IP Address host = str(ipaddress.ip_address(host)) @@ -242,63 +478,108 @@ def run_validation(self, value): raise ValidationError(e) try: - port = int(port) + port = int(port_str) except Exception: - raise ValidationError("'{}' is not a valid port".format(port)) - - class Address(namedtuple('Address', 'host port')): - def __str__(self): - return '{}:{}'.format(self.host, self.port) + raise ValidationError(f"'{port_str}' is not a valid port") - return Address(host, port) + return _IpAddressValue(host, port) - def post_validation(self, config, key_name): + def post_validation(self, config: Config, key_name: str): host = config[key_name].host if key_name == 'dev_addr' and host in ['0.0.0.0', '::']: self.warnings.append( - ("The use of the IP address '{}' suggests a production environment " - "or the use of a proxy to connect to the MkDocs server. However, " - "the MkDocs' server is intended for local development purposes only. " - "Please use a third party production-ready server instead.").format(host) + f"The use of the IP address '{host}' suggests a production environment " + "or the use of a proxy to connect to the MkDocs server. However, " + "the MkDocs' server is intended for local development purposes only. " + "Please use a third party production-ready server instead." ) -class URL(OptionallyRequired): +class URL(OptionallyRequired[str]): """ URL Config Option Validate a URL by requiring a scheme is present. """ - def __init__(self, default='', required=False): - super().__init__(default, required) + @overload + def __init__(self, default=None, *, is_dir: bool = False): + ... - def run_validation(self, value): + @overload + def __init__(self, default=None, *, required: bool, is_dir: bool = False): + ... + + def __init__(self, default=None, required=None, is_dir: bool = False) -> None: + self.is_dir = is_dir + super().__init__(default, required=required) + + def run_validation(self, value: object) -> str: + if not isinstance(value, str): + raise ValidationError(f"Expected a string, got {type(value)}") if value == '': return value - try: - parsed_url = urlparse(value) + parsed_url = urlsplit(value) except (AttributeError, TypeError): raise ValidationError("Unable to parse the URL.") - if parsed_url.scheme: - return value + if parsed_url.scheme and parsed_url.netloc: + if self.is_dir and not parsed_url.path.endswith('/'): + parsed_url = parsed_url._replace(path=f'{parsed_url.path}/') + return urlunsplit(parsed_url) - raise ValidationError( - "The URL isn't valid, it should include the http:// (scheme)") + raise ValidationError("The URL isn't valid, it should include the http:// (scheme)") -class RepoURL(URL): - """ - Repo URL Config Option +class Optional(Generic[T], BaseConfigOption[Union[T, None]]): + """Wraps a field and makes a None value possible for it when no value is set. - A small extension to the URL config that sets the repo_name and edit_uri, - based on the url if they haven't already been provided. + E.g. `my_field = config_options.Optional(config_options.Type(str))` """ - def post_validation(self, config, key_name): - repo_host = urlparse(config['repo_url']).netloc.lower() + def __init__(self, config_option: BaseConfigOption[T]) -> None: + if config_option.default is not None: + raise ValueError( + f"This option already has a default ({config_option.default!r}) " + f"and doesn't need to be wrapped into Optional" + ) + super().__init__() + self.option = config_option + self.warnings = config_option.warnings + + def __getattr__(self, key): + if key in ('option', 'warnings'): + raise AttributeError + return getattr(self.option, key) + + def pre_validation(self, config: Config, key_name: str): + return self.option.pre_validation(config, key_name) + + def run_validation(self, value: object) -> T | None: + if value is None: + return None + return self.option.validate(value) + + def post_validation(self, config: Config, key_name: str): + result = self.option.post_validation(config, key_name) # type: ignore + self.warnings = self.option.warnings + return result + + def reset_warnings(self): + self.option.reset_warnings() + self.warnings = self.option.warnings + + +class RepoURL(URL): + def __init__(self, *args, **kwargs): + warnings.warn( + "RepoURL is no longer used in MkDocs and will be removed.", DeprecationWarning + ) + super().__init__(*args, **kwargs) + + def post_validation(self, config: Config, key_name: str): + repo_host = urlsplit(config['repo_url']).netloc.lower() edit_uri = config.get('edit_uri') # derive repo_name from repo_url if unset @@ -322,38 +603,119 @@ def post_validation(self, config, key_name): edit_uri = '' # ensure a well-formed edit_uri - if edit_uri: - if not edit_uri.startswith(('?', '#')) \ - and not config['repo_url'].endswith('/'): - config['repo_url'] += '/' - if not edit_uri.endswith('/'): - edit_uri += '/' + if edit_uri and not edit_uri.endswith('/'): + edit_uri += '/' config['edit_uri'] = edit_uri -class FilesystemObject(Type): +class EditURI(Type[str]): + def __init__(self, repo_url_key: str) -> None: + super().__init__(str) + self.repo_url_key = repo_url_key + + def post_validation(self, config: Config, key_name: str): + edit_uri = config.get(key_name) + repo_url = config.get(self.repo_url_key) + + if edit_uri is None and repo_url is not None: + repo_host = urlsplit(repo_url).netloc.lower() + if repo_host == 'github.com' or repo_host == 'gitlab.com': + edit_uri = 'edit/master/docs/' + elif repo_host == 'bitbucket.org': + edit_uri = 'src/default/docs/' + + # ensure a well-formed edit_uri + if edit_uri and not edit_uri.endswith('/'): + edit_uri += '/' + + config[key_name] = edit_uri + + +class EditURITemplate(BaseConfigOption[str]): + class Formatter(string.Formatter): + def convert_field(self, value, conversion): + if conversion == 'q': + return urlquote(value, safe='') + return super().convert_field(value, conversion) + + class Template(UserString): + def __init__(self, formatter, data) -> None: + super().__init__(data) + self.formatter = formatter + try: + self.format('', '') + except KeyError as e: + raise ValueError(f"Unknown template substitute: {e}") + + def format(self, path, path_noext): + return self.formatter.format(self.data, path=path, path_noext=path_noext) + + def __init__(self, edit_uri_key: str | None = None) -> None: + super().__init__() + self.edit_uri_key = edit_uri_key + + def run_validation(self, value: object): + try: + return self.Template(self.Formatter(), value) + except Exception as e: + raise ValidationError(e) + + def post_validation(self, config: Config, key_name: str): + if self.edit_uri_key and config.get(key_name) and config.get(self.edit_uri_key): + self.warnings.append( + f"The option '{self.edit_uri_key}' has no effect when '{key_name}' is set." + ) + + +class RepoName(Type[str]): + def __init__(self, repo_url_key: str) -> None: + super().__init__(str) + self.repo_url_key = repo_url_key + + def post_validation(self, config: Config, key_name: str): + repo_name = config.get(key_name) + repo_url = config.get(self.repo_url_key) + + # derive repo_name from repo_url if unset + if repo_url is not None and repo_name is None: + repo_host = urlsplit(config['repo_url']).netloc.lower() + if repo_host == 'github.com': + repo_name = 'GitHub' + elif repo_host == 'bitbucket.org': + repo_name = 'Bitbucket' + elif repo_host == 'gitlab.com': + repo_name = 'GitLab' + else: + repo_name = repo_host.split('.')[0].title() + config[key_name] = repo_name + + +class FilesystemObject(Type[str]): """ Base class for options that point to filesystem objects. """ - def __init__(self, exists=False, **kwargs): + + existence_test: Callable[[str], bool] = staticmethod(os.path.exists) + name = 'file or directory' + + def __init__(self, exists: bool = False, **kwargs) -> None: super().__init__(type_=str, **kwargs) self.exists = exists - self.config_dir = None + self.config_dir: str | None = None - def pre_validation(self, config, key_name): - self.config_dir = os.path.dirname(config.config_file_path) if config.config_file_path else None + def pre_validation(self, config: Config, key_name: str): + self.config_dir = ( + os.path.dirname(config.config_file_path) if config.config_file_path else None + ) - def run_validation(self, value): + def run_validation(self, value: object) -> str: value = super().run_validation(value) if self.config_dir and not os.path.isabs(value): value = os.path.join(self.config_dir, value) if self.exists and not self.existence_test(value): - raise ValidationError("The path {path} isn't an existing {name}.". - format(path=value, name=self.name)) - value = os.path.abspath(value) - assert isinstance(value, str) - return value + raise ValidationError(f"The path '{value}' isn't an existing {self.name}.") + return os.path.abspath(value) class Dir(FilesystemObject): @@ -362,19 +724,23 @@ class Dir(FilesystemObject): Validate a path to a directory, optionally verifying that it exists. """ + existence_test = staticmethod(os.path.isdir) name = 'directory' - def post_validation(self, config, key_name): - if config.config_file_path is None: + +class DocsDir(Dir): + def post_validation(self, config: Config, key_name: str): + if not config.config_file_path: return # Validate that the dir is not the parent dir of the config file. if os.path.dirname(config.config_file_path) == config[key_name]: raise ValidationError( - ("The '{0}' should not be the parent directory of the config " - "file. Use a child directory instead so that the '{0}' " - "is a sibling of the config file.").format(key_name)) + f"The '{key_name}' should not be the parent directory of the" + f" config file. Use a child directory instead so that the" + f" '{key_name}' is a sibling of the config file." + ) class File(FilesystemObject): @@ -383,10 +749,35 @@ class File(FilesystemObject): Validate a path to a file, optionally verifying that it exists. """ + existence_test = staticmethod(os.path.isfile) name = 'file' +class ListOfPaths(ListOfItems[str]): + """ + List of Paths Config Option + + A list of file system paths. Raises an error if one of the paths does not exist. + + For greater flexibility, prefer ListOfItems, e.g. to require files specifically: + + config_options.ListOfItems(config_options.File(exists=True)) + """ + + @overload + def __init__(self, default=[]): + ... + + @overload + def __init__(self, default=[], *, required: bool): + ... + + def __init__(self, default=[], required=None) -> None: + super().__init__(FilesystemObject(exists=True), default) + self.required = required + + class SiteDir(Dir): """ SiteDir Config Option @@ -394,240 +785,439 @@ class SiteDir(Dir): Validates the site_dir and docs_dir directories do not contain each other. """ - def post_validation(self, config, key_name): - + def post_validation(self, config: Config, key_name: str): super().post_validation(config, key_name) + docs_dir = config['docs_dir'] + site_dir = config['site_dir'] # Validate that the docs_dir and site_dir don't contain the # other as this will lead to copying back and forth on each # and eventually make a deep nested mess. - if (config['docs_dir'] + os.sep).startswith(config['site_dir'].rstrip(os.sep) + os.sep): + if (docs_dir + os.sep).startswith(site_dir.rstrip(os.sep) + os.sep): raise ValidationError( - ("The 'docs_dir' should not be within the 'site_dir' as this " - "can mean the source files are overwritten by the output or " - "it will be deleted if --clean is passed to mkdocs build." - "(site_dir: '{}', docs_dir: '{}')" - ).format(config['site_dir'], config['docs_dir'])) - elif (config['site_dir'] + os.sep).startswith(config['docs_dir'].rstrip(os.sep) + os.sep): + f"The 'docs_dir' should not be within the 'site_dir' as this " + f"can mean the source files are overwritten by the output or " + f"it will be deleted if --clean is passed to mkdocs build. " + f"(site_dir: '{site_dir}', docs_dir: '{docs_dir}')" + ) + elif (site_dir + os.sep).startswith(docs_dir.rstrip(os.sep) + os.sep): raise ValidationError( - ("The 'site_dir' should not be within the 'docs_dir' as this " - "leads to the build directory being copied into itself and " - "duplicate nested files in the 'site_dir'." - "(site_dir: '{}', docs_dir: '{}')" - ).format(config['site_dir'], config['docs_dir'])) + f"The 'site_dir' should not be within the 'docs_dir' as this " + f"leads to the build directory being copied into itself and " + f"duplicate nested files in the 'site_dir'. " + f"(site_dir: '{site_dir}', docs_dir: '{docs_dir}')" + ) -class Theme(BaseConfigOption): +class Theme(BaseConfigOption[theme.Theme]): """ Theme Config Option Validate that the theme exists and build Theme instance. """ - def __init__(self, default=None): + def __init__(self, default=None) -> None: super().__init__() self.default = default - def validate(self, value): - if value is None and self.default is not None: - value = {'name': self.default} + def pre_validation(self, config: Config, key_name: str): + self.config_file_path = config.config_file_path - if isinstance(value, str): - value = {'name': value} + def run_validation(self, value: object) -> theme.Theme: + if value is None and self.default is not None: + theme_config = {'name': self.default} + elif isinstance(value, str): + theme_config = {'name': value} + elif isinstance(value, dict): + if 'name' not in value: + raise ValidationError("No theme name set.") + theme_config = value + else: + raise ValidationError( + f'Invalid type {type(value)}. Expected a string or key/value pairs.' + ) themes = utils.get_theme_names() - - if isinstance(value, dict): - if 'name' in value: - if value['name'] is None or value['name'] in themes: - return value - - raise ValidationError( - "Unrecognised theme name: '{}'. The available installed themes " - "are: {}".format(value['name'], ', '.join(themes)) - ) - - raise ValidationError("No theme name set.") - - raise ValidationError('Invalid type "{}". Expected a string or key/value pairs.'.format(type(value))) - - def post_validation(self, config, key_name): - theme_config = config[key_name] + if theme_config['name'] is not None and theme_config['name'] not in themes: + raise ValidationError( + f"Unrecognised theme name: '{theme_config['name']}'. " + f"The available installed themes are: {', '.join(themes)}" + ) if not theme_config['name'] and 'custom_dir' not in theme_config: - raise ValidationError("At least one of 'theme.name' or 'theme.custom_dir' must be defined.") + raise ValidationError("At least one of 'name' or 'custom_dir' must be defined.") # Ensure custom_dir is an absolute path if 'custom_dir' in theme_config and not os.path.isabs(theme_config['custom_dir']): - config_dir = os.path.dirname(config.config_file_path) + config_dir = os.path.dirname(self.config_file_path) theme_config['custom_dir'] = os.path.join(config_dir, theme_config['custom_dir']) if 'custom_dir' in theme_config and not os.path.isdir(theme_config['custom_dir']): - raise ValidationError("The path set in {name}.custom_dir ('{path}') does not exist.". - format(path=theme_config['custom_dir'], name=key_name)) + raise ValidationError( + "The path set in custom_dir ('{path}') does not exist.".format( + path=theme_config['custom_dir'] + ) + ) + + if 'locale' in theme_config and not isinstance(theme_config['locale'], str): + raise ValidationError("'locale' must be a string.") - config[key_name] = theme.Theme(**theme_config) + return theme.Theme(**theme_config) class Nav(OptionallyRequired): """ Nav Config Option - Validate the Nav config. Automatically add all markdown files if empty. + Validate the Nav config. """ - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.file_match = utils.is_markdown_file - - def run_validation(self, value): + def run_validation(self, value: object, *, top=True): + if isinstance(value, list): + for subitem in value: + self._validate_nav_item(subitem) + if top and not value: + value = None + elif isinstance(value, dict) and value and not top: + # TODO: this should be an error. + self.warnings.append(f"Expected nav to be a list, got {self._repr_item(value)}") + for subitem in value.values(): + self.run_validation(subitem, top=False) + elif isinstance(value, str) and not top: + pass + else: + raise ValidationError(f"Expected nav to be a list, got {self._repr_item(value)}") + return value - if not isinstance(value, list): + def _validate_nav_item(self, value): + if isinstance(value, str): + pass + elif isinstance(value, dict): + if len(value) != 1: + raise ValidationError( + f"Expected nav item to be a dict of size 1, got {self._repr_item(value)}" + ) + for subnav in value.values(): + self.run_validation(subnav, top=False) + else: raise ValidationError( - "Expected a list, got {}".format(type(value))) + f"Expected nav item to be a string or dict, got {self._repr_item(value)}" + ) - if len(value) == 0: - return + @classmethod + def _repr_item(cls, value) -> str: + if isinstance(value, dict) and value: + return f"dict with keys {tuple(value.keys())}" + elif isinstance(value, (str, type(None))): + return repr(value) + else: + return f"a {type(value).__name__}: {value!r}" - config_types = {type(item) for item in value} - if config_types.issubset({str, dict}): - return value - raise ValidationError("Invalid pages config. {} {}".format( - config_types, {str, dict} - )) +class Private(Generic[T], BaseConfigOption[T]): + """A config option that can only be populated programmatically. Raises an error if set by the user.""" - def post_validation(self, config, key_name): - # TODO: remove this when `pages` config setting is fully deprecated. - if key_name == 'pages' and config['pages'] is not None: - if config['nav'] is None: - # copy `pages` config to new 'nav' config setting - config['nav'] = config['pages'] - warning = ("The 'pages' configuration option has been deprecated and will " - "be removed in a future release of MkDocs. Use 'nav' instead.") - self.warnings.append(warning) + def run_validation(self, value: object) -> None: + if value is not None: + raise ValidationError('For internal use only.') -class Private(OptionallyRequired): - """ - Private Config Option +class ExtraScriptValue(Config): + """An extra script to be added to the page. The `extra_javascript` config is a list of these.""" - A config option only for internal use. Raises an error if set by the user. - """ + path = Type(str) + """The value of the `src` tag of the script.""" + type = Type(str, default='') + """The value of the `type` tag of the script.""" + defer = Type(bool, default=False) + """Whether to add the `defer` tag to the script.""" + async_ = Type(bool, default=False) + """Whether to add the `async` tag to the script.""" + + def __init__(self, path: str = '', config_file_path=None): + super().__init__(config_file_path=config_file_path) + self.path = path + + def __str__(self): + return self.path + + def __fspath__(self): + return self.path - def run_validation(self, value): - raise ValidationError('For internal use only.') +class ExtraScript(BaseConfigOption[Union[ExtraScriptValue, str]]): + def __init__(self): + super().__init__() + self.option_type = SubConfig[ExtraScriptValue]() + + def run_validation(self, value: object) -> ExtraScriptValue | str: + self.option_type.warnings = self.warnings + if isinstance(value, str): + if value.endswith('.mjs'): + return self.option_type.run_validation({'path': value, 'type': 'module'}) + return value + return self.option_type.run_validation(value) -class MarkdownExtensions(OptionallyRequired): + +class MarkdownExtensions(OptionallyRequired[List[str]]): """ Markdown Extensions Config Option - A list of extensions. If a list item contains extension configs, - those are set on the private setting passed to `configkey`. The - `builtins` keyword accepts a list of extensions which cannot be - overriden by the user. However, builtins can be duplicated to define - config options for them if desired. - """ - def __init__(self, builtins=None, configkey='mdx_configs', **kwargs): - super().__init__(**kwargs) + A list or dict of extensions. Each list item may contain either a string or a one item dict. + A string must be a valid Markdown extension name with no config options defined. The key of + a dict item must be a valid Markdown extension name and the value must be a dict of config + options for that extension. Extension configs are set on the private setting passed to + `configkey`. The `builtins` keyword accepts a list of extensions which cannot be overridden by + the user. However, builtins can be duplicated to define config options for them if desired.""" + + def __init__( + self, + builtins: list[str] | None = None, + configkey: str = 'mdx_configs', + default: list[str] = [], + **kwargs, + ) -> None: + super().__init__(default=default, **kwargs) self.builtins = builtins or [] self.configkey = configkey - self.configdata = {} - def run_validation(self, value): - if not isinstance(value, (list, tuple)): + def validate_ext_cfg(self, ext: object, cfg: object) -> None: + if not isinstance(ext, str): + raise ValidationError(f"'{ext}' is not a valid Markdown Extension name.") + if not cfg: + return + if not isinstance(cfg, dict): + raise ValidationError(f"Invalid config options for Markdown Extension '{ext}'.") + self.configdata[ext] = cfg + + def run_validation(self, value: object) -> list[str]: + self.configdata: dict[str, dict] = {} + if not isinstance(value, (list, tuple, dict)): raise ValidationError('Invalid Markdown Extensions configuration') extensions = [] - for item in value: - if isinstance(item, dict): - if len(item) > 1: - raise ValidationError('Invalid Markdown Extensions configuration') - ext, cfg = item.popitem() + if isinstance(value, dict): + for ext, cfg in value.items(): + self.validate_ext_cfg(ext, cfg) extensions.append(ext) - if cfg is None: - continue - if not isinstance(cfg, dict): - raise ValidationError('Invalid config options for Markdown ' - "Extension '{}'.".format(ext)) - self.configdata[ext] = cfg - elif isinstance(item, str): - extensions.append(item) - else: - raise ValidationError('Invalid Markdown Extensions configuration') + else: + for item in value: + if isinstance(item, dict): + if len(item) > 1: + raise ValidationError('Invalid Markdown Extensions configuration') + ext, cfg = item.popitem() + self.validate_ext_cfg(ext, cfg) + extensions.append(ext) + elif isinstance(item, str): + extensions.append(item) + else: + raise ValidationError('Invalid Markdown Extensions configuration') extensions = utils.reduce_list(self.builtins + extensions) # Confirm that Markdown considers extensions to be valid - try: - markdown.Markdown(extensions=extensions, extension_configs=self.configdata) - except Exception as e: - raise ValidationError(e.args[0]) + md = markdown.Markdown() + for ext in extensions: + try: + md.registerExtensions((ext,), self.configdata) + except Exception as e: + stack: list = [] + for frame in reversed(traceback.extract_tb(sys.exc_info()[2])): + if not frame.line: # Ignore frames before + break + stack.insert(0, frame) + tb = ''.join(traceback.format_list(stack)) + + raise ValidationError( + f"Failed to load extension '{ext}'.\n{tb}{type(e).__name__}: {e}" + ) return extensions - def post_validation(self, config, key_name): + def post_validation(self, config: Config, key_name: str): config[self.configkey] = self.configdata -class Plugins(OptionallyRequired): +class Plugins(OptionallyRequired[plugins.PluginCollection]): """ Plugins config option. - A list of plugins. If a plugin defines config options those are used when + A list or dict of plugins. If a plugin defines config options those are used when initializing the plugin class. """ - def __init__(self, **kwargs): + def __init__(self, theme_key: str | None = None, **kwargs) -> None: super().__init__(**kwargs) self.installed_plugins = plugins.get_plugins() - self.config_file_path = None + self.theme_key = theme_key + self._config: Config | None = None + self.plugin_cache: dict[str, plugins.BasePlugin] = {} def pre_validation(self, config, key_name): - self.config_file_path = config.config_file_path + self._config = config + + def run_validation(self, value: object) -> plugins.PluginCollection: + if not isinstance(value, (list, tuple, dict)): + raise ValidationError('Invalid Plugins configuration. Expected a list or dict.') + self.plugins = plugins.PluginCollection() + self._instance_counter: MutableMapping[str, int] = Counter() + for name, cfg in self._parse_configs(value): + self.load_plugin_with_namespace(name, cfg) + return self.plugins + + @classmethod + def _parse_configs(cls, value: list | tuple | dict) -> Iterator[tuple[str, dict]]: + if isinstance(value, dict): + for name, cfg in value.items(): + if not isinstance(name, str): + raise ValidationError(f"'{name}' is not a valid plugin name.") + yield name, cfg + else: + for item in value: + if isinstance(item, dict): + if len(item) != 1: + raise ValidationError('Invalid Plugins configuration') + name, cfg = item.popitem() + else: + name = item + cfg = {} + if not isinstance(name, str): + raise ValidationError(f"'{name}' is not a valid plugin name.") + yield name, cfg + + def load_plugin_with_namespace(self, name: str, config) -> tuple[str, plugins.BasePlugin]: + if '/' in name: # It's already specified with a namespace. + # Special case: allow to explicitly skip namespaced loading: + if name.startswith('/'): + name = name[1:] + else: + # Attempt to load with prepended namespace for the current theme. + if self.theme_key and self._config: + current_theme = self._config[self.theme_key] + if not isinstance(current_theme, str): + current_theme = current_theme['name'] + if current_theme: + expanded_name = f'{current_theme}/{name}' + if expanded_name in self.installed_plugins: + name = expanded_name + return (name, self.load_plugin(name, config)) + + def load_plugin(self, name: str, config) -> plugins.BasePlugin: + if name not in self.installed_plugins: + raise ValidationError(f'The "{name}" plugin is not installed') - def run_validation(self, value): - if not isinstance(value, (list, tuple)): - raise ValidationError('Invalid Plugins configuration. Expected a list of plugins') - plgins = plugins.PluginCollection() - for item in value: - if isinstance(item, dict): - if len(item) > 1: - raise ValidationError('Invalid Plugins configuration') - name, cfg = item.popitem() - cfg = cfg or {} # Users may define a null (None) config - if not isinstance(cfg, dict): - raise ValidationError('Invalid config options for ' - 'the "{}" plugin.'.format(name)) - item = name - else: - cfg = {} + config = config or {} # Users may define a null (None) config + if not isinstance(config, dict): + raise ValidationError(f"Invalid config options for the '{name}' plugin.") - if not isinstance(item, str): - raise ValidationError('Invalid Plugins configuration') + self._instance_counter[name] += 1 + inst_number = self._instance_counter[name] + inst_name = name + if inst_number > 1: + inst_name += f' #{inst_number}' - plgins[item] = self.load_plugin(item, cfg) + plugin = self.plugin_cache.get(inst_name) + if plugin is None: + plugin_cls = self.installed_plugins[name].load() - return plgins + if not issubclass(plugin_cls, plugins.BasePlugin): + raise ValidationError( + f'{plugin_cls.__module__}.{plugin_cls.__name__} must be a subclass of' + f' {plugins.BasePlugin.__module__}.{plugins.BasePlugin.__name__}' + ) - def load_plugin(self, name, config): - if name not in self.installed_plugins: - raise ValidationError('The "{}" plugin is not installed'.format(name)) + plugin = plugin_cls() - Plugin = self.installed_plugins[name].load() + if hasattr(plugin, 'on_startup') or hasattr(plugin, 'on_shutdown'): + self.plugin_cache[inst_name] = plugin - if not issubclass(Plugin, plugins.BasePlugin): - raise ValidationError('{}.{} must be a subclass of {}.{}'.format( - Plugin.__module__, Plugin.__name__, plugins.BasePlugin.__module__, - plugins.BasePlugin.__name__)) + if inst_number > 1 and not getattr(plugin, 'supports_multiple_instances', False): + self.warnings.append( + f"Plugin '{name}' was specified multiple times - this is likely a mistake, " + "because the plugin doesn't declare `supports_multiple_instances`." + ) - plugin = Plugin() - errors, warnings = plugin.load_config(config, self.config_file_path) - self.warnings.extend(warnings) - errors_message = '\n'.join( - "Plugin value: '{}'. Error: {}".format(x, y) - for x, y in errors + errors, warns = plugin.load_config( + config, self._config.config_file_path if self._config else None ) + for warning in warns: + if isinstance(warning, str): + self.warnings.append(f"Plugin '{inst_name}': {warning}") + else: + key, msg = warning + self.warnings.append(f"Plugin '{inst_name}' option '{key}': {msg}") + + errors_message = '\n'.join(f"Plugin '{name}' option '{key}': {msg}" for key, msg in errors) if errors_message: raise ValidationError(errors_message) + self.plugins[inst_name] = plugin return plugin + + +class Hooks(BaseConfigOption[List[types.ModuleType]]): + """A list of Python scripts to be treated as instances of plugins.""" + + def __init__(self, plugins_key: str) -> None: + super().__init__() + self.default = [] + self.plugins_key = plugins_key + + def pre_validation(self, config: Config, key_name: str): + self._base_option = ListOfItems(File(exists=True)) + self._base_option.pre_validation(config, key_name) + + def run_validation(self, value: object) -> Mapping[str, Any]: + paths = self._base_option.validate(value) + self.warnings.extend(self._base_option.warnings) + assert isinstance(value, list) + + hooks = {} + for name, path in zip(value, paths): + hooks[name] = self._load_hook(name, path) + return hooks + + @functools.lru_cache(maxsize=None) + def _load_hook(self, name, path): + import importlib.util + + spec = importlib.util.spec_from_file_location(name, path) + if spec is None: + raise ValidationError(f"Cannot import path '{path}' as a Python module") + module = importlib.util.module_from_spec(spec) + sys.modules[name] = module + if spec.loader is None: + raise ValidationError(f"Cannot import path '{path}' as a Python module") + spec.loader.exec_module(module) + return module + + def post_validation(self, config: Config, key_name: str): + plugins = config[self.plugins_key] + for name, hook in config[key_name].items(): + plugins[name] = hook + + +class PathSpec(BaseConfigOption[pathspec.gitignore.GitIgnoreSpec]): + """A path pattern based on gitignore-like syntax.""" + + def run_validation(self, value: object) -> pathspec.gitignore.GitIgnoreSpec: + if not isinstance(value, str): + raise ValidationError(f'Expected a multiline string, but a {type(value)} was given.') + try: + return pathspec.gitignore.GitIgnoreSpec.from_lines(lines=value.splitlines()) + except ValueError as e: + raise ValidationError(str(e)) + + +class _LogLevel(OptionallyRequired[int]): + levels: Mapping[str, int] = { + "warn": logging.WARNING, + "info": logging.INFO, + "ignore": logging.DEBUG, + } + + def run_validation(self, value: object) -> int: + if not isinstance(value, str): + raise ValidationError(f'Expected a string, but a {type(value)} was given.') + try: + return self.levels[value] + except KeyError: + raise ValidationError(f'Expected one of {list(self.levels)}, got {value!r}') diff --git a/mkdocs/config/defaults.py b/mkdocs/config/defaults.py index 103e5fa..e4b6df6 100644 --- a/mkdocs/config/defaults.py +++ b/mkdocs/config/defaults.py @@ -1,114 +1,190 @@ -from mkdocs.config import config_options +from __future__ import annotations + +from typing import IO, TYPE_CHECKING, Dict + +from mkdocs.config import base +from mkdocs.config import config_options as c +from mkdocs.utils.yaml import get_yaml_loader, yaml_load + +if TYPE_CHECKING: + import mkdocs.structure.pages + # NOTE: The order here is important. During validation some config options # depend on others. So, if config option A depends on B, then A should be # listed higher in the schema. +class MkDocsConfig(base.Config): + """The configuration of MkDocs itself (the root object of mkdocs.yml).""" + + config_file_path: str = c.Type(str) # type: ignore[assignment] + """The path to the mkdocs.yml config file. Can't be populated from the config.""" + + site_name = c.Type(str) + """The title to use for the documentation.""" + + nav = c.Optional(c.Nav()) + """Defines the structure of the navigation.""" + pages = c.Deprecated(removed=True, moved_to='nav') + + exclude_docs = c.Optional(c.PathSpec()) + """Gitignore-like patterns of files (relative to docs dir) to exclude from the site.""" + + not_in_nav = c.Optional(c.PathSpec()) + """Gitignore-like patterns of files (relative to docs dir) that are not intended to be in the nav. + + This marks doc files that are expected not to be in the nav, otherwise they will cause a log message + (see also `validation.nav.omitted_files`). + """ + + site_url = c.Optional(c.URL(is_dir=True)) + """The full URL to where the documentation will be hosted.""" + + site_description = c.Optional(c.Type(str)) + """A description for the documentation project that will be added to the + HTML meta tags.""" + site_author = c.Optional(c.Type(str)) + """The name of the author to add to the HTML meta tags.""" + + theme = c.Theme(default='mkdocs') + """The MkDocs theme for the documentation.""" + + docs_dir = c.DocsDir(default='docs', exists=True) + """The directory containing the documentation markdown.""" + + site_dir = c.SiteDir(default='site') + """The directory where the site will be built to""" + + copyright = c.Optional(c.Type(str)) + """A copyright notice to add to the footer of documentation.""" + + google_analytics = c.Deprecated( + message=( + 'The configuration option {} has been deprecated and ' + 'will be removed in a future release of MkDocs. See the ' + 'options available on your theme for an alternative.' + ), + option_type=c.Type(list, length=2), + ) + """set of values for Google analytics containing the account IO and domain + this should look like, ['UA-27795084-5', 'mkdocs.org']""" + + dev_addr = c.IpAddress(default='127.0.0.1:8000') + """The address on which to serve the live reloading docs server.""" + + use_directory_urls = c.Type(bool, default=True) + """If `True`, use `/index.hmtl` style files with hyperlinks to + the directory.If `False`, use `.html style file with + hyperlinks to the file. + True generates nicer URLs, but False is useful if browsing the output on + a filesystem.""" + + repo_url = c.Optional(c.URL()) + """Specify a link to the project source repo to be included + in the documentation pages.""" + + repo_name = c.Optional(c.RepoName('repo_url')) + """A name to use for the link to the project source repo. + Default, If repo_url is unset then None, otherwise + "GitHub", "Bitbucket" or "GitLab" for known url or Hostname + for unknown urls.""" + + edit_uri_template = c.Optional(c.EditURITemplate('edit_uri')) + edit_uri = c.Optional(c.EditURI('repo_url')) + """Specify a URI to the docs dir in the project source repo, relative to the + repo_url. When set, a link directly to the page in the source repo will + be added to the generated HTML. If repo_url is not set also, this option + is ignored.""" + + extra_css = c.Type(list, default=[]) + extra_javascript = c.ListOfItems(c.ExtraScript(), default=[]) + """Specify which css or javascript files from the docs directory should be + additionally included in the site.""" + + extra_templates = c.Type(list, default=[]) + """Similar to the above, but each template (HTML or XML) will be build with + Jinja2 and the global context.""" + + markdown_extensions = c.MarkdownExtensions( + builtins=['toc', 'tables', 'fenced_code'], configkey='mdx_configs' + ) + """PyMarkdown extension names.""" + + mdx_configs = c.Private[Dict[str, dict]]() + """PyMarkdown extension configs. Populated from `markdown_extensions`.""" + + strict = c.Type(bool, default=False) + """Enabling strict mode causes MkDocs to stop the build when a problem is + encountered rather than display an error.""" + + remote_branch = c.Type(str, default='gh-pages') + """The remote branch to commit to when using gh-deploy.""" + + remote_name = c.Type(str, default='origin') + """The remote name to push to when using gh-deploy.""" + + extra = c.SubConfig() + """extra is a mapping/dictionary of data that is passed to the template. + This allows template authors to require extra configuration that not + relevant to all themes and doesn't need to be explicitly supported by + MkDocs itself. A good example here would be including the current + project version.""" + + plugins = c.Plugins(theme_key='theme', default=['search']) + """A list of plugins. Each item may contain a string name or a key value pair. + A key value pair should be the string name (as the key) and a dict of config + options (as the value).""" + + hooks = c.Hooks('plugins') + """A list of filenames that will be imported as Python modules and used as + an instance of a plugin each.""" + + watch = c.ListOfPaths(default=[]) + """A list of extra paths to watch while running `mkdocs serve`.""" + + class Validation(base.Config): + class NavValidation(base.Config): + omitted_files = c._LogLevel(default='info') + """Warning level for when a doc file is never mentioned in the navigation. + For granular configuration, see `not_in_nav`.""" + + not_found = c._LogLevel(default='warn') + """Warning level for when the navigation links to a relative path that isn't an existing page on the site.""" + + absolute_links = c._LogLevel(default='info') + """Warning level for when the navigation links to an absolute path (starting with `/`).""" + + nav = c.SubConfig(NavValidation) + + class LinksValidation(base.Config): + not_found = c._LogLevel(default='warn') + """Warning level for when a Markdown doc links to a relative path that isn't an existing document on the site.""" + + absolute_links = c._LogLevel(default='info') + """Warning level for when a Markdown doc links to an absolute path (starting with `/`).""" + + unrecognized_links = c._LogLevel(default='info') + """Warning level for when a Markdown doc links to a relative path that doesn't look like + it could be a valid internal link. For example, if the link ends with `/`.""" + + links = c.SubConfig(LinksValidation) + + validation = c.PropagatingSubConfig[Validation]() + + _current_page: mkdocs.structure.pages.Page | None = None + """The currently rendered page. Please do not access this and instead + rely on the `page` argument to event handlers.""" + + def load_dict(self, patch: dict) -> None: + super().load_dict(patch) + if 'config_file_path' in patch: + raise base.ValidationError("Can't set config_file_path in config") -# Once we drop Python 2.6 support, this could be an OrderedDict, however, it -# isn't really needed either as we always sequentially process the schema other -# than at initialisation when we grab the full set of keys for convenience. + def load_file(self, config_file: IO) -> None: + """Load config options from the open file descriptor of a YAML file.""" + loader = get_yaml_loader(config=self) + self.load_dict(yaml_load(config_file, loader)) -DEFAULT_SCHEMA = ( - # Reserved for internal use, stores the mkdocs.yml config file. - ('config_file_path', config_options.Type(str)), - - # The title to use for the documentation - ('site_name', config_options.Type(str, required=True)), - - # Defines the structure of the navigation. - ('nav', config_options.Nav()), - # TODO: remove this when the `pages` config setting is fully deprecated. - ('pages', config_options.Nav()), - - # The full URL to where the documentation will be hosted - ('site_url', config_options.URL()), - - # A description for the documentation project that will be added to the - # HTML meta tags. - ('site_description', config_options.Type(str)), - # The name of the author to add to the HTML meta tags - ('site_author', config_options.Type(str)), - - # The MkDocs theme for the documentation. - ('theme', config_options.Theme(default='mkdocs')), - - # The directory containing the documentation markdown. - ('docs_dir', config_options.Dir(default='docs', exists=True)), - - # The directory where the site will be built to - ('site_dir', config_options.SiteDir(default='site')), - - # A copyright notice to add to the footer of documentation. - ('copyright', config_options.Type(str)), - - # set of values for Google analytics containing the account IO and domain, - # this should look like, ['UA-27795084-5', 'mkdocs.org'] - ('google_analytics', config_options.Type(list, length=2)), - - # The address on which to serve the live reloading docs server. - ('dev_addr', config_options.IpAddress(default='127.0.0.1:8000')), - - # If `True`, use `/index.hmtl` style files with hyperlinks to - # the directory.If `False`, use `.html style file with - # hyperlinks to the file. - # True generates nicer URLs, but False is useful if browsing the output on - # a filesystem. - ('use_directory_urls', config_options.Type(bool, default=True)), - - # Specify a link to the project source repo to be included - # in the documentation pages. - ('repo_url', config_options.RepoURL()), - - # A name to use for the link to the project source repo. - # Default, If repo_url is unset then None, otherwise - # "GitHub", "Bitbucket" or "GitLab" for known url or Hostname - # for unknown urls. - ('repo_name', config_options.Type(str)), - - # Specify a URI to the docs dir in the project source repo, relative to the - # repo_url. When set, a link directly to the page in the source repo will - # be added to the generated HTML. If repo_url is not set also, this option - # is ignored. - ('edit_uri', config_options.Type(str)), - - # Specify which css or javascript files from the docs directory should be - # additionally included in the site. - ('extra_css', config_options.Type(list, default=[])), - ('extra_javascript', config_options.Type(list, default=[])), - - # Similar to the above, but each template (HTML or XML) will be build with - # Jinja2 and the global context. - ('extra_templates', config_options.Type(list, default=[])), - - # PyMarkdown extension names. - ('markdown_extensions', config_options.MarkdownExtensions( - builtins=['toc', 'tables', 'fenced_code'], - configkey='mdx_configs', default=[])), - - # PyMarkdown Extension Configs. For internal use only. - ('mdx_configs', config_options.Private()), - - # enabling strict mode causes MkDocs to stop the build when a problem is - # encountered rather than display an error. - ('strict', config_options.Type(bool, default=False)), - - # the remote branch to commit to when using gh-deploy - ('remote_branch', config_options.Type( - str, default='gh-pages')), - - # the remote name to push to when using gh-deploy - ('remote_name', config_options.Type(str, default='origin')), - - # extra is a mapping/dictionary of data that is passed to the template. - # This allows template authors to require extra configuration that not - # relevant to all themes and doesn't need to be explicitly supported by - # MkDocs itself. A good example here would be including the current - # project version. - ('extra', config_options.SubConfig()), - - # a list of plugins. Each item may contain a string name or a key value pair. - # A key value pair should be the string name (as the key) and a dict of config - # options (as the value). - ('plugins', config_options.Plugins(default=['search'])), -) +def get_schema() -> base.PlainConfigSchema: + """Soft-deprecated, do not use.""" + return MkDocsConfig._schema diff --git a/mkdocs/contrib/search/__init__.py b/mkdocs/contrib/search/__init__.py index a2c7f7c..5b8f3d8 100644 --- a/mkdocs/contrib/search/__init__.py +++ b/mkdocs/contrib/search/__init__.py @@ -1,82 +1,117 @@ -import os +from __future__ import annotations + import logging +import os +from typing import TYPE_CHECKING, List + from mkdocs import utils -from mkdocs.plugins import BasePlugin -from mkdocs.config import config_options +from mkdocs.config import base +from mkdocs.config import config_options as c from mkdocs.contrib.search.search_index import SearchIndex +from mkdocs.plugins import BasePlugin +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig + from mkdocs.structure.pages import Page + from mkdocs.util.templates import TemplateContext log = logging.getLogger(__name__) base_path = os.path.dirname(os.path.abspath(__file__)) -class LangOption(config_options.OptionallyRequired): - """ Validate Language(s) provided in config are known languages. """ +class LangOption(c.OptionallyRequired[List[str]]): + """Validate Language(s) provided in config are known languages.""" - def lang_file_exists(self, lang): - path = os.path.join(base_path, 'lunr-language', 'lunr.{}.js'.format(lang)) - return os.path.isfile(path) + def get_lunr_supported_lang(self, lang): + fallback = {'uk': 'ru'} + for lang_part in lang.split("_"): + lang_part = lang_part.lower() + lang_part = fallback.get(lang_part, lang_part) + if os.path.isfile(os.path.join(base_path, 'lunr-language', f'lunr.{lang_part}.js')): + return lang_part - def run_validation(self, value): + def run_validation(self, value: object): if isinstance(value, str): value = [value] - elif not isinstance(value, (list, tuple)): - raise config_options.ValidationError('Expected a list of language codes.') - for lang in value: - if lang != 'en' and not self.lang_file_exists(lang): - raise config_options.ValidationError( - '"{}" is not a supported language code.'.format(lang) - ) + if not isinstance(value, list): + raise c.ValidationError('Expected a list of language codes.') + for lang in value[:]: + if lang != 'en': + lang_detected = self.get_lunr_supported_lang(lang) + if not lang_detected: + log.info(f"Option search.lang '{lang}' is not supported, falling back to 'en'") + value.remove(lang) + if 'en' not in value: + value.append('en') + elif lang_detected != lang: + value.remove(lang) + value.append(lang_detected) + log.info(f"Option search.lang '{lang}' switched to '{lang_detected}'") return value -class SearchPlugin(BasePlugin): - """ Add a search feature to MkDocs. """ +class _PluginConfig(base.Config): + lang = c.Optional(LangOption()) + separator = c.Type(str, default=r'[\s\-]+') + min_search_length = c.Type(int, default=3) + prebuild_index = c.Choice((False, True, 'node', 'python'), default=False) + indexing = c.Choice(('full', 'sections', 'titles'), default='full') + - config_scheme = ( - ('lang', LangOption(default=['en'])), - ('separator', config_options.Type(str, default=r'[\s\-]+')), - ('min_search_length', config_options.Type(int, default=3)), - ('prebuild_index', config_options.Choice((False, True, 'node', 'python'), default=False)), - ) +class SearchPlugin(BasePlugin[_PluginConfig]): + """Add a search feature to MkDocs.""" - def on_config(self, config, **kwargs): + def on_config(self, config: MkDocsConfig, **kwargs) -> MkDocsConfig: "Add plugin templates and scripts to config." - if 'include_search_page' in config['theme'] and config['theme']['include_search_page']: - config['theme'].static_templates.add('search.html') - if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): + if config.theme.get('include_search_page'): + config.theme.static_templates.add('search.html') + if not config.theme.get('search_index_only'): path = os.path.join(base_path, 'templates') - config['theme'].dirs.append(path) - if 'search/main.js' not in config['extra_javascript']: - config['extra_javascript'].append('search/main.js') + config.theme.dirs.append(path) + if 'search/main.js' not in config.extra_javascript: + config.extra_javascript.append('search/main.js') # type: ignore + if self.config.lang is None: + # lang setting undefined. Set default based on theme locale + validate = _PluginConfig.lang.run_validation + self.config.lang = validate(config.theme.locale.language) + # The `python` method of `prebuild_index` is pending deprecation as of version 1.2. + # TODO: Raise a deprecation warning in a future release (1.3?). + if self.config.prebuild_index == 'python': + log.info( + "The 'python' method of the search plugin's 'prebuild_index' config option " + "is pending deprecation and will not be supported in a future release." + ) return config - def on_pre_build(self, config, **kwargs): + def on_pre_build(self, config: MkDocsConfig, **kwargs) -> None: "Create search index instance for later use." self.search_index = SearchIndex(**self.config) - def on_page_context(self, context, **kwargs): + def on_page_context(self, context: TemplateContext, page: Page, **kwargs) -> None: "Add page to search index." - self.search_index.add_entry_from_context(context['page']) + self.search_index.add_entry_from_context(page) - def on_post_build(self, config, **kwargs): + def on_post_build(self, config: MkDocsConfig, **kwargs) -> None: "Build search index." - output_base_path = os.path.join(config['site_dir'], 'search') + output_base_path = os.path.join(config.site_dir, 'search') search_index = self.search_index.generate_search_index() json_output_path = os.path.join(output_base_path, 'search_index.json') utils.write_file(search_index.encode('utf-8'), json_output_path) - if not ('search_index_only' in config['theme'] and config['theme']['search_index_only']): + assert self.config.lang is not None + if not config.theme.get('search_index_only'): # Include language support files in output. Copy them directly # so that only the needed files are included. files = [] - if len(self.config['lang']) > 1 or 'en' not in self.config['lang']: + if len(self.config.lang) > 1 or 'en' not in self.config.lang: files.append('lunr.stemmer.support.js') - if len(self.config['lang']) > 1: + if len(self.config.lang) > 1: files.append('lunr.multi.js') - for lang in self.config['lang']: - if (lang != 'en'): - files.append('lunr.{}.js'.format(lang)) + if 'ja' in self.config.lang or 'jp' in self.config.lang: + files.append('tinyseg.js') + for lang in self.config.lang: + if lang != 'en': + files.append(f'lunr.{lang}.js') for filename in files: from_path = os.path.join(base_path, 'lunr-language', filename) diff --git a/mkdocs/contrib/search/prebuild-index.js b/mkdocs/contrib/search/prebuild-index.js index ae26da4..835d17b 100644 --- a/mkdocs/contrib/search/prebuild-index.js +++ b/mkdocs/contrib/search/prebuild-index.js @@ -22,6 +22,9 @@ stdin.on('end', function () { if (lang.length > 1) { require('./lunr-language/lunr.multi')(lunr); } + if (lang.includes("ja") || lang.includes("jp")) { + require('./lunr-language/tinyseg')(lunr); + } for (var i=0; i < lang.length; i++) { if (lang[i] != 'en') { require('./lunr-language/lunr.' + lang[i])(lunr); diff --git a/mkdocs/contrib/search/search_index.py b/mkdocs/contrib/search/search_index.py index 1aa9fa7..f736121 100644 --- a/mkdocs/contrib/search/search_index.py +++ b/mkdocs/contrib/search/search_index.py @@ -1,12 +1,23 @@ -import os -import re +from __future__ import annotations + import json import logging +import os +import re import subprocess +from html.parser import HTMLParser +from typing import TYPE_CHECKING -from lunr import lunr +if TYPE_CHECKING: + from mkdocs.structure.pages import Page + from mkdocs.structure.toc import AnchorLink, TableOfContents -from html.parser import HTMLParser +try: + from lunr import lunr + + haslunrpy = True +except ImportError: + haslunrpy = False log = logging.getLogger(__name__) @@ -17,11 +28,11 @@ class SearchIndex: tags and their following content are sections). """ - def __init__(self, **config): - self._entries = [] + def __init__(self, **config) -> None: + self._entries: list[dict] = [] self.config = config - def _find_toc_by_id(self, toc, id_): + def _find_toc_by_id(self, toc, id_: str | None) -> AnchorLink | None: """ Given a table of contents and HTML ID, iterate through and return the matched item in the TOC. @@ -32,32 +43,28 @@ def _find_toc_by_id(self, toc, id_): toc_item_r = self._find_toc_by_id(toc_item.children, id_) if toc_item_r is not None: return toc_item_r + return None - def _add_entry(self, title, text, loc): + def _add_entry(self, title: str | None, text: str, loc: str) -> None: """ - A simple wrapper to add an entry and ensure the contents - is UTF8 encoded. + A simple wrapper to add an entry, dropping bad characters. """ text = text.replace('\u00a0', ' ') text = re.sub(r'[ \t\n\r\f\v]+', ' ', text.strip()) - self._entries.append({ - 'title': title, - 'text': str(text.encode('utf-8'), encoding='utf-8'), - 'location': loc - }) + self._entries.append({'title': title, 'text': text, 'location': loc}) - def add_entry_from_context(self, page): + def add_entry_from_context(self, page: Page) -> None: """ Create a set of entries in the index for a page. One for the page itself and then one for each of its' heading tags. """ - # Create the content parser and feed in the HTML for the # full page. This handles all the parsing and prepares # us to iterate through it. parser = ContentParser() + assert page.content is not None parser.feed(page.content) parser.close() @@ -66,113 +73,95 @@ def add_entry_from_context(self, page): url = page.url # Create an entry for the full page. - self._add_entry( - title=page.title, - text=self.strip_tags(page.content).rstrip('\n'), - loc=url - ) + text = parser.stripped_html.rstrip('\n') if self.config['indexing'] == 'full' else '' + self._add_entry(title=page.title, text=text, loc=url) - for section in parser.data: - self.create_entry_for_section(section, page.toc, url) + if self.config['indexing'] in ['full', 'sections']: + for section in parser.data: + self.create_entry_for_section(section, page.toc, url) - def create_entry_for_section(self, section, toc, abs_url): + def create_entry_for_section( + self, section: ContentSection, toc: TableOfContents, abs_url: str + ) -> None: """ Given a section on the page, the table of contents and the absolute url for the page create an entry in the index """ - toc_item = self._find_toc_by_id(toc, section.id) + text = ' '.join(section.text) if self.config['indexing'] == 'full' else '' if toc_item is not None: - self._add_entry( - title=toc_item.title, - text=" ".join(section.text), - loc=abs_url + toc_item.url - ) + self._add_entry(title=toc_item.title, text=text, loc=abs_url + toc_item.url) - def generate_search_index(self): + def generate_search_index(self) -> str: """python to json conversion""" - page_dicts = { - 'docs': self._entries, - 'config': self.config - } - data = json.dumps(page_dicts, sort_keys=True, separators=(',', ':')) + page_dicts = {'docs': self._entries, 'config': self.config} + data = json.dumps(page_dicts, sort_keys=True, separators=(',', ':'), default=str) if self.config['prebuild_index'] in (True, 'node'): try: - script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'prebuild-index.js') + script_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'prebuild-index.js' + ) p = subprocess.Popen( ['node', script_path], stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, + encoding='utf-8', ) - idx, err = p.communicate(data.encode('utf-8')) + idx, err = p.communicate(data) if not err: - idx = idx.decode('utf-8') if hasattr(idx, 'decode') else idx page_dicts['index'] = json.loads(idx) data = json.dumps(page_dicts, sort_keys=True, separators=(',', ':')) log.debug('Pre-built search index created successfully.') else: - log.warning('Failed to pre-build search index. Error: {}'.format(err)) + log.warning(f'Failed to pre-build search index. Error: {err}') except (OSError, ValueError) as e: - log.warning('Failed to pre-build search index. Error: {}'.format(e)) + log.warning(f'Failed to pre-build search index. Error: {e}') elif self.config['prebuild_index'] == 'python': - idx = lunr( - ref='location', fields=('title', 'text'), documents=self._entries, - languages=self.config['lang']) - page_dicts['index'] = idx.serialize() - data = json.dumps(page_dicts, sort_keys=True, separators=(',', ':')) + if haslunrpy: + lunr_idx = lunr( + ref='location', + fields=('title', 'text'), + documents=self._entries, + languages=self.config['lang'], + ) + page_dicts['index'] = lunr_idx.serialize() + data = json.dumps(page_dicts, sort_keys=True, separators=(',', ':')) + else: + log.warning( + "Failed to pre-build search index. The 'python' method was specified; " + "however, the 'lunr.py' library does not appear to be installed. Try " + "installing it with 'pip install lunr'. If you are using any language " + "other than English you will also need to install 'lunr[languages]'." + ) return data - def strip_tags(self, html): - """strip html tags from data""" - s = HTMLStripper() - s.feed(html) - return s.get_data() - - -class HTMLStripper(HTMLParser): - """ - A simple HTML parser that stores all of the data within tags - but ignores the tags themselves and thus strips them from the - content. - """ - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.data = [] - - def handle_data(self, d): - """ - Called for the text contents of each tag. - """ - self.data.append(d) - - def get_data(self): - return '\n'.join(self.data) - class ContentSection: """ Used by the ContentParser class to capture the information we - need when it is parsing the HMTL. + need when it is parsing the HTML. """ - def __init__(self, text=None, id_=None, title=None): + def __init__( + self, + text: list[str] | None = None, + id_: str | None = None, + title: str | None = None, + ) -> None: self.text = text or [] self.id = id_ self.title = title def __eq__(self, other): - return all([ - self.text == other.text, - self.id == other.id, - self.title == other.title - ]) + return self.text == other.text and self.id == other.id and self.title == other.title + + +_HEADER_TAGS = tuple(f"h{x}" for x in range(1, 7)) class ContentParser(HTMLParser): @@ -182,19 +171,19 @@ class ContentParser(HTMLParser): for that section. """ - def __init__(self, *args, **kwargs): - + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self.data = [] - self.section = None + self.data: list[ContentSection] = [] + self.section: ContentSection | None = None self.is_header_tag = False + self._stripped_html: list[str] = [] - def handle_starttag(self, tag, attrs): + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: """Called at the start of every HTML tag.""" # We only care about the opening tag for headings. - if tag not in (["h%d" % x for x in range(1, 7)]): + if tag not in _HEADER_TAGS: return # We are dealing with a new header, create a new section @@ -207,19 +196,20 @@ def handle_starttag(self, tag, attrs): if attr[0] == "id": self.section.id = attr[1] - def handle_endtag(self, tag): + def handle_endtag(self, tag: str) -> None: """Called at the end of every HTML tag.""" # We only care about the opening tag for headings. - if tag not in (["h%d" % x for x in range(1, 7)]): + if tag not in _HEADER_TAGS: return self.is_header_tag = False - def handle_data(self, data): + def handle_data(self, data: str) -> None: """ Called for the text contents of each tag. """ + self._stripped_html.append(data) if self.section is None: # This means we have some content at the start of the @@ -235,3 +225,7 @@ def handle_data(self, data): self.section.title = data else: self.section.text.append(data.rstrip('\n')) + + @property + def stripped_html(self) -> str: + return '\n'.join(self._stripped_html) diff --git a/mkdocs/contrib/search/templates/search/lunr.js b/mkdocs/contrib/search/templates/search/lunr.js index c353765..aca0a16 100644 --- a/mkdocs/contrib/search/templates/search/lunr.js +++ b/mkdocs/contrib/search/templates/search/lunr.js @@ -1,6 +1,6 @@ /** - * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.8 - * Copyright (C) 2019 Oliver Nightingale + * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 2.3.9 + * Copyright (C) 2020 Oliver Nightingale * @license MIT */ @@ -54,10 +54,10 @@ var lunr = function (config) { return builder.build() } -lunr.version = "2.3.8" +lunr.version = "2.3.9" /*! * lunr.utils - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -177,7 +177,7 @@ lunr.FieldRef.prototype.toString = function () { } /*! * lunr.Set - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -211,8 +211,8 @@ lunr.Set.complete = { return other }, - union: function (other) { - return other + union: function () { + return this }, contains: function () { @@ -389,7 +389,7 @@ lunr.Token.prototype.clone = function (fn) { } /*! * lunr.tokenizer - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -465,7 +465,7 @@ lunr.tokenizer = function (obj, metadata) { lunr.tokenizer.separator = /[\s\-]+/ /*! * lunr.Pipeline - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -732,7 +732,7 @@ lunr.Pipeline.prototype.toJSON = function () { } /*! * lunr.Vector - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -929,7 +929,7 @@ lunr.Vector.prototype.toJSON = function () { /* eslint-disable */ /*! * lunr.stemmer - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale * Includes code from - http://tartarus.org/~martin/PorterStemmer/js.txt */ @@ -1151,7 +1151,7 @@ lunr.stemmer = (function(){ lunr.Pipeline.registerFunction(lunr.stemmer, 'stemmer') /*! * lunr.stopWordFilter - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -1316,7 +1316,7 @@ lunr.stopWordFilter = lunr.generateStopWordFilter([ lunr.Pipeline.registerFunction(lunr.stopWordFilter, 'stopWordFilter') /*! * lunr.trimmer - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -1343,7 +1343,7 @@ lunr.trimmer = function (token) { lunr.Pipeline.registerFunction(lunr.trimmer, 'trimmer') /*! * lunr.TokenSet - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -1827,7 +1827,7 @@ lunr.TokenSet.Builder.prototype.minimize = function (downTo) { } /*! * lunr.Index - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -1994,7 +1994,7 @@ lunr.Index.prototype.query = function (fn) { */ var clause = query.clauses[i], terms = null, - clauseMatches = lunr.Set.complete + clauseMatches = lunr.Set.empty if (clause.usePipeline) { terms = this.pipeline.runString(clause.term, { @@ -2319,7 +2319,7 @@ lunr.Index.load = function (serializedIndex) { } /*! * lunr.Builder - * Copyright (C) 2019 Oliver Nightingale + * Copyright (C) 2020 Oliver Nightingale */ /** @@ -3456,7 +3456,7 @@ lunr.QueryParser.parseBoost = function (parser) { } else if (typeof exports === 'object') { /** * Node. Does not work with strict CommonJS, but - * only CommonJS-like enviroments that support module.exports, + * only CommonJS-like environments that support module.exports, * like Node. */ module.exports = factory() diff --git a/mkdocs/contrib/search/templates/search/main.js b/mkdocs/contrib/search/templates/search/main.js index 0e1fc81..a5e469d 100644 --- a/mkdocs/contrib/search/templates/search/main.js +++ b/mkdocs/contrib/search/templates/search/main.js @@ -21,8 +21,15 @@ function joinUrl (base, path) { return base + "/" + path; } +function escapeHtml (value) { + return value.replace(/&/g, '&') + .replace(/"/g, '"') + .replace(//g, '>'); +} + function formatResult (location, title, summary) { - return ''; + return ''; } function displayResults (results) { @@ -37,7 +44,11 @@ function displayResults (results) { search_results.insertAdjacentHTML('beforeend', html); } } else { - search_results.insertAdjacentHTML('beforeend', "

No results found

"); + var noResultsText = search_results.getAttribute('data-no-results-text'); + if (!noResultsText) { + noResultsText = "No results found"; + } + search_results.insertAdjacentHTML('beforeend', '

' + noResultsText + '

'); } } diff --git a/mkdocs/contrib/search/templates/search/worker.js b/mkdocs/contrib/search/templates/search/worker.js index 9cce2f7..8628dbc 100644 --- a/mkdocs/contrib/search/templates/search/worker.js +++ b/mkdocs/contrib/search/templates/search/worker.js @@ -44,6 +44,9 @@ function onJSONLoaded () { if (lang.length > 1) { scriptsToLoad.push('lunr.multi.js'); } + if (lang.includes("ja") || lang.includes("jp")) { + scriptsToLoad.push('tinyseg.js'); + } for (var i=0; i < lang.length; i++) { if (lang[i] != 'en') { scriptsToLoad.push(['lunr', lang[i], 'js'].join('.')); diff --git a/mkdocs/exceptions.py b/mkdocs/exceptions.py index f968df3..d28005e 100644 --- a/mkdocs/exceptions.py +++ b/mkdocs/exceptions.py @@ -1,9 +1,33 @@ -from click import ClickException +from __future__ import annotations + +from click import ClickException, echo class MkDocsException(ClickException): - """Base exceptions for all MkDocs Exceptions""" + """The base class which all MkDocs exceptions inherit from. This should + not be raised directly. One of the subclasses should be raised instead.""" + + +class Abort(MkDocsException, SystemExit): + """Abort the build""" + + code = 1 + + def show(self, *args, **kwargs) -> None: + echo('\n' + self.format_message()) class ConfigurationError(MkDocsException): - """Error in configuration""" + """This error is raised by configuration validation when a validation error + is encountered. This error should be raised by any configuration options + defined in a plugin's [config_scheme][].""" + + +class BuildError(MkDocsException): + """This error may be raised by MkDocs during the build process. Plugins should + not raise this error.""" + + +class PluginError(BuildError): + """A subclass of [`mkdocs.exceptions.BuildError`][] which can be raised by plugin + events.""" diff --git a/mkdocs/livereload/__init__.py b/mkdocs/livereload/__init__.py new file mode 100644 index 0000000..1057c9c --- /dev/null +++ b/mkdocs/livereload/__init__.py @@ -0,0 +1,364 @@ +from __future__ import annotations + +import functools +import io +import ipaddress +import logging +import mimetypes +import os +import os.path +import pathlib +import posixpath +import re +import socket +import socketserver +import string +import sys +import threading +import time +import traceback +import urllib.parse +import warnings +import wsgiref.simple_server +import wsgiref.util +from typing import Any, BinaryIO, Callable, Iterable + +import watchdog.events +import watchdog.observers.polling + +_SCRIPT_TEMPLATE_STR = """ +var livereload = function(epoch, requestId) { + var req = new XMLHttpRequest(); + req.onloadend = function() { + window.removeEventListener("beforeunload", abort); + if (parseFloat(this.responseText) > epoch) { + location.reload(); + return; + } + var launchNext = livereload.bind(this, epoch, requestId); + if (this.status === 200) { + launchNext(); + } else { + setTimeout(launchNext, 3000); + } + }; + var abort = req.abort.bind(req); + window.addEventListener("beforeunload", abort); + req.open("GET", "/livereload/" + epoch + "/" + requestId); + req.send(); + + console.log('Enabled live reload'); +} +livereload(${epoch}, ${request_id}); +""" +_SCRIPT_TEMPLATE = string.Template(_SCRIPT_TEMPLATE_STR) + + +class _LoggerAdapter(logging.LoggerAdapter): + def process(self, msg: str, kwargs: dict) -> tuple[str, dict]: # type: ignore[override] + return time.strftime("[%H:%M:%S] ") + msg, kwargs + + +log = _LoggerAdapter(logging.getLogger(__name__), {}) + + +class LiveReloadServer(socketserver.ThreadingMixIn, wsgiref.simple_server.WSGIServer): + daemon_threads = True + poll_response_timeout = 60 + + def __init__( + self, + builder: Callable[[], None], + host: str, + port: int, + root: str, + mount_path: str = "/", + polling_interval: float = 0.5, + shutdown_delay: float = 0.25, + ) -> None: + self.builder = builder + self.server_name = host + self.server_port = port + try: + if isinstance(ipaddress.ip_address(host), ipaddress.IPv6Address): + self.address_family = socket.AF_INET6 + except Exception: + pass + self.root = os.path.abspath(root) + self.mount_path = ("/" + mount_path.lstrip("/")).rstrip("/") + "/" + self.url = f"http://{self.server_name}:{self.server_port}{self.mount_path}" + self.build_delay = 0.1 + self.shutdown_delay = shutdown_delay + # To allow custom error pages. + self.error_handler: Callable[[int], bytes | None] = lambda code: None + + super().__init__((host, port), _Handler, bind_and_activate=False) + self.set_app(self.serve_request) + + self._wanted_epoch = _timestamp() # The version of the site that started building. + self._visible_epoch = self._wanted_epoch # Latest fully built version of the site. + self._epoch_cond = threading.Condition() # Must be held when accessing _visible_epoch. + + self._to_rebuild: dict[ + Callable[[], None], bool + ] = {} # Used as an ordered set of functions to call. + self._rebuild_cond = threading.Condition() # Must be held when accessing _to_rebuild. + + self._shutdown = False + self.serve_thread = threading.Thread(target=lambda: self.serve_forever(shutdown_delay)) + self.observer = watchdog.observers.polling.PollingObserver(timeout=polling_interval) + + self._watched_paths: dict[str, int] = {} + self._watch_refs: dict[str, Any] = {} + + def watch( + self, path: str, func: Callable[[], None] | None = None, recursive: bool = True + ) -> None: + """Add the 'path' to watched paths, call the function and reload when any file changes under it.""" + path = os.path.abspath(path) + if func is None or func is self.builder: + funct = self.builder + else: + funct = func + warnings.warn( + "Plugins should not pass the 'func' parameter of watch(). " + "The ability to execute custom callbacks will be removed soon.", + DeprecationWarning, + stacklevel=2, + ) + + if path in self._watched_paths: + self._watched_paths[path] += 1 + return + self._watched_paths[path] = 1 + + def callback(event): + if event.is_directory: + return + log.debug(str(event)) + with self._rebuild_cond: + self._to_rebuild[funct] = True + self._rebuild_cond.notify_all() + + handler = watchdog.events.FileSystemEventHandler() + handler.on_any_event = callback # type: ignore[method-assign] + log.debug(f"Watching '{path}'") + self._watch_refs[path] = self.observer.schedule(handler, path, recursive=recursive) + + def unwatch(self, path: str) -> None: + """Stop watching file changes for path. Raises if there was no corresponding `watch` call.""" + path = os.path.abspath(path) + + self._watched_paths[path] -= 1 + if self._watched_paths[path] <= 0: + self._watched_paths.pop(path) + self.observer.unschedule(self._watch_refs.pop(path)) + + def serve(self): + self.server_bind() + self.server_activate() + + if self._watched_paths: + self.observer.start() + + paths_str = ", ".join(f"'{_try_relativize_path(path)}'" for path in self._watched_paths) + log.info(f"Watching paths for changes: {paths_str}") + + log.info(f"Serving on {self.url}") + self.serve_thread.start() + + self._build_loop() + + def _build_loop(self): + while True: + with self._rebuild_cond: + while not self._rebuild_cond.wait_for( + lambda: self._to_rebuild or self._shutdown, timeout=self.shutdown_delay + ): + # We could have used just one wait instead of a loop + timeout, but we need + # occasional breaks, otherwise on Windows we can't receive KeyboardInterrupt. + pass + if self._shutdown: + break + log.info("Detected file changes") + while self._rebuild_cond.wait(timeout=self.build_delay): + log.debug("Waiting for file changes to stop happening") + + self._wanted_epoch = _timestamp() + funcs = list(self._to_rebuild) + self._to_rebuild.clear() + + try: + for func in funcs: + func() + except Exception as e: + if isinstance(e, SystemExit): + print(e, file=sys.stderr) # noqa: T201 + else: + traceback.print_exc() + log.error( + "An error happened during the rebuild. The server will appear stuck until build errors are resolved." + ) + continue + + with self._epoch_cond: + log.info("Reloading browsers") + self._visible_epoch = self._wanted_epoch + self._epoch_cond.notify_all() + + def shutdown(self, wait=False) -> None: + self.observer.stop() + with self._rebuild_cond: + self._shutdown = True + self._rebuild_cond.notify_all() + + if self.serve_thread.is_alive(): + super().shutdown() + self.server_close() + if wait: + self.serve_thread.join() + self.observer.join() + + def serve_request(self, environ, start_response) -> Iterable[bytes]: + try: + result = self._serve_request(environ, start_response) + except Exception: + code = 500 + msg = "500 Internal Server Error" + log.exception(msg) + else: + if result is not None: + return result + code = 404 + msg = "404 Not Found" + + error_content = None + try: + error_content = self.error_handler(code) + except Exception: + log.exception("Failed to render an error message!") + if error_content is None: + error_content = msg.encode() + + start_response(msg, [("Content-Type", "text/html")]) + return [error_content] + + def _serve_request(self, environ, start_response) -> Iterable[bytes] | None: + # https://bugs.python.org/issue16679 + # https://github.com/bottlepy/bottle/blob/f9b1849db4/bottle.py#L984 + path = environ["PATH_INFO"].encode("latin-1").decode("utf-8", "ignore") + + if path.startswith("/livereload/"): + m = re.fullmatch(r"/livereload/([0-9]+)/[0-9]+", path) + if m: + epoch = int(m[1]) + start_response("200 OK", [("Content-Type", "text/plain")]) + + def condition(): + return self._visible_epoch > epoch + + with self._epoch_cond: + if not condition(): + # Stall the browser, respond as soon as there's something new. + # If there's not, respond anyway after a minute. + self._log_poll_request(environ.get("HTTP_REFERER"), request_id=path) + self._epoch_cond.wait_for(condition, timeout=self.poll_response_timeout) + return [b"%d" % self._visible_epoch] + + if (path + "/").startswith(self.mount_path): + rel_file_path = path[len(self.mount_path) :] + + if path.endswith("/"): + rel_file_path += "index.html" + # Prevent directory traversal - normalize the path. + rel_file_path = posixpath.normpath("/" + rel_file_path).lstrip("/") + file_path = os.path.join(self.root, rel_file_path) + elif path == "/": + start_response("302 Found", [("Location", urllib.parse.quote(self.mount_path))]) + return [] + else: + return None # Not found + + # Wait until the ongoing rebuild (if any) finishes, so we're not serving a half-built site. + with self._epoch_cond: + self._epoch_cond.wait_for(lambda: self._visible_epoch == self._wanted_epoch) + epoch = self._visible_epoch + + try: + file: BinaryIO = open(file_path, "rb") + except OSError: + if not path.endswith("/") and os.path.isfile(os.path.join(file_path, "index.html")): + start_response("302 Found", [("Location", urllib.parse.quote(path) + "/")]) + return [] + return None # Not found + + if self._watched_paths and file_path.endswith(".html"): + with file: + content = file.read() + content = self._inject_js_into_html(content, epoch) + file = io.BytesIO(content) + content_length = len(content) + else: + content_length = os.path.getsize(file_path) + + content_type = self._guess_type(file_path) + start_response( + "200 OK", [("Content-Type", content_type), ("Content-Length", str(content_length))] + ) + return wsgiref.util.FileWrapper(file) + + def _inject_js_into_html(self, content, epoch): + try: + body_end = content.rindex(b"") + except ValueError: + body_end = len(content) + # The page will reload if the livereload poller returns a newer epoch than what it knows. + # The other timestamp becomes just a unique identifier for the initiating page. + script = _SCRIPT_TEMPLATE.substitute(epoch=epoch, request_id=_timestamp()) + return b"%b%b" % ( + content[:body_end], + script.encode(), + content[body_end:], + ) + + @classmethod + @functools.lru_cache() # "Cache" to not repeat the same message for the same browser tab. + def _log_poll_request(cls, url, request_id): + log.info(f"Browser connected: {url}") + + @classmethod + def _guess_type(cls, path): + # MkDocs only ensures a few common types (as seen in livereload_tests.py::test_mime_types). + # Other uncommon types will not be accepted. + if path.endswith((".js", ".JS", ".mjs")): + return "application/javascript" + if path.endswith(".gz"): + return "application/gzip" + + guess, _ = mimetypes.guess_type(path) + if guess: + return guess + return "application/octet-stream" + + +class _Handler(wsgiref.simple_server.WSGIRequestHandler): + def log_request(self, code="-", size="-"): + level = logging.DEBUG if str(code) == "200" else logging.WARNING + log.log(level, f'"{self.requestline}" code {code}') + + def log_message(self, format, *args): + log.debug(format, *args) + + +def _timestamp() -> int: + return round(time.monotonic() * 1000) + + +def _try_relativize_path(path: str) -> str: + """Make the path relative to current directory if it's under that directory.""" + p = pathlib.Path(path) + try: + p = p.relative_to(os.getcwd()) + except ValueError: + pass + return str(p) diff --git a/mkdocs/localization.py b/mkdocs/localization.py new file mode 100644 index 0000000..87a2756 --- /dev/null +++ b/mkdocs/localization.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +import logging +import os +from typing import TYPE_CHECKING, Sequence + +from jinja2.ext import Extension, InternationalizationExtension + +from mkdocs.config.base import ValidationError + +if TYPE_CHECKING: + import jinja2 + +try: + from babel.core import Locale, UnknownLocaleError + from babel.support import NullTranslations, Translations + + has_babel = True +except ImportError: # pragma: no cover + from mkdocs.utils.babel_stub import Locale, UnknownLocaleError + + has_babel = False + + +log = logging.getLogger(__name__) +base_path = os.path.dirname(os.path.abspath(__file__)) + + +class NoBabelExtension(InternationalizationExtension): # pragma: no cover + def __init__(self, environment): + Extension.__init__(self, environment) + environment.extend( + install_null_translations=self._install_null, + newstyle_gettext=False, + ) + + +def parse_locale(locale: str) -> Locale: + try: + return Locale.parse(locale, sep='_') + except (ValueError, UnknownLocaleError, TypeError) as e: + raise ValidationError(f'Invalid value for locale: {e}') + + +def install_translations( + env: jinja2.Environment, locale: Locale, theme_dirs: Sequence[str] +) -> None: + if has_babel: + env.add_extension('jinja2.ext.i18n') + translations = _get_merged_translations(theme_dirs, 'locales', locale) + if translations is not None: + env.install_gettext_translations(translations) + else: + env.install_null_translations() + if locale.language != 'en': + log.warning( + f"No translations could be found for the locale '{locale}'. " + 'Defaulting to English.' + ) + else: # pragma: no cover + # no babel installed, add dummy support for trans/endtrans blocks + env.add_extension(NoBabelExtension) + env.install_null_translations() + + +def _get_merged_translations( + theme_dirs: Sequence[str], locales_dir: str, locale: Locale +) -> Translations | None: + merged_translations: Translations | None = None + + log.debug(f"Looking for translations for locale '{locale}'") + if locale.territory: + locale_str = f"{locale.language}_{locale.territory}" + else: + locale_str = locale.language + for theme_dir in reversed(theme_dirs): + dirname = os.path.join(theme_dir, locales_dir) + translations = Translations.load(dirname, [locale_str]) + + if type(translations) is NullTranslations: + log.debug(f"No translations found here: '{dirname}'") + continue + + log.debug(f"Translations found here: '{dirname}'") + if merged_translations is None: + merged_translations = translations + else: + merged_translations.merge(translations) + + return merged_translations diff --git a/mkdocs/plugins.py b/mkdocs/plugins.py index f2e5fbb..d25e61e 100644 --- a/mkdocs/plugins.py +++ b/mkdocs/plugins.py @@ -2,53 +2,456 @@ Implements the plugin API for MkDocs. """ +from __future__ import annotations - -import pkg_resources import logging -from collections import OrderedDict +import sys +from typing import TYPE_CHECKING, Any, Callable, Generic, MutableMapping, TypeVar, overload + +if sys.version_info >= (3, 10): + from importlib.metadata import EntryPoint, entry_points +else: + from importlib_metadata import EntryPoint, entry_points + +if TYPE_CHECKING: + import jinja2.environment -from mkdocs.config.base import Config + if sys.version_info >= (3, 8): + from typing import Literal + else: + from typing_extensions import Literal + +from mkdocs import utils +from mkdocs.config.base import Config, ConfigErrors, ConfigWarnings, LegacyConfig, PlainConfigSchema + +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig + from mkdocs.livereload import LiveReloadServer + from mkdocs.structure.files import Files + from mkdocs.structure.nav import Navigation + from mkdocs.structure.pages import Page + from mkdocs.utils.templates import TemplateContext log = logging.getLogger('mkdocs.plugins') -EVENTS = ( - 'config', 'pre_build', 'files', 'nav', 'env', 'pre_template', 'template_context', - 'post_template', 'pre_page', 'page_read_source', 'page_markdown', - 'page_content', 'page_context', 'post_page', 'post_build', 'serve' -) +def get_plugins() -> dict[str, EntryPoint]: + """Return a dict of all installed Plugins as {name: EntryPoint}.""" + + plugins = entry_points(group='mkdocs.plugins') + # Allow third-party plugins to override core plugins + pluginmap = {} + for plugin in plugins: + if plugin.name in pluginmap and plugin.value.startswith("mkdocs.contrib."): + continue -def get_plugins(): - """ Return a dict of all installed Plugins by name. """ + pluginmap[plugin.name] = plugin - plugins = pkg_resources.iter_entry_points(group='mkdocs.plugins') + return pluginmap - return {plugin.name: plugin for plugin in plugins} +SomeConfig = TypeVar('SomeConfig', bound=Config) -class BasePlugin: + +class BasePlugin(Generic[SomeConfig]): """ Plugin base class. All plugins should subclass this class. """ - config_scheme = () - config = {} + config_class: type[SomeConfig] = LegacyConfig # type: ignore[assignment] + config_scheme: PlainConfigSchema = () + config: SomeConfig = {} # type: ignore[assignment] + + supports_multiple_instances: bool = False + """Set to true in subclasses to declare support for adding the same plugin multiple times.""" + + def __class_getitem__(cls, config_class: type[Config]): + """Eliminates the need to write `config_class = FooConfig` when subclassing BasePlugin[FooConfig]""" + name = f'{cls.__name__}[{config_class.__name__}]' + return type(name, (cls,), dict(config_class=config_class)) + + def __init_subclass__(cls): + if not issubclass(cls.config_class, Config): + raise TypeError( + f"config_class {cls.config_class} must be a subclass of `mkdocs.config.base.Config`" + ) + if cls.config_class is not LegacyConfig: + cls.config_scheme = cls.config_class._schema # For compatibility. - def load_config(self, options, config_file_path=None): - """ Load config from a dict of options. Returns a tuple of (errors, warnings).""" + def load_config( + self, options: dict[str, Any], config_file_path: str | None = None + ) -> tuple[ConfigErrors, ConfigWarnings]: + """Load config from a dict of options. Returns a tuple of (errors, warnings).""" + + if self.config_class is LegacyConfig: + self.config = LegacyConfig(self.config_scheme, config_file_path=config_file_path) # type: ignore + else: + self.config = self.config_class(config_file_path=config_file_path) - self.config = Config(schema=self.config_scheme, config_file_path=config_file_path) self.config.load_dict(options) return self.config.validate() + # One-time events + + def on_startup(self, *, command: Literal['build', 'gh-deploy', 'serve'], dirty: bool) -> None: + """ + The `startup` event runs once at the very beginning of an `mkdocs` invocation. + + New in MkDocs 1.4. + + The presence of an `on_startup` method (even if empty) migrates the plugin to the new + system where the plugin object is kept across builds within one `mkdocs serve`. + + Note that for initializing variables, the `__init__` method is still preferred. + For initializing per-build variables (and whenever in doubt), use the `on_config` event. + + Parameters: + command: the command that MkDocs was invoked with, e.g. "serve" for `mkdocs serve`. + dirty: whether `--dirty` flag was passed. + """ + + def on_shutdown(self) -> None: + """ + The `shutdown` event runs once at the very end of an `mkdocs` invocation, before exiting. + + This event is relevant only for support of `mkdocs serve`, otherwise within a + single build it's undistinguishable from `on_post_build`. + + New in MkDocs 1.4. + + The presence of an `on_shutdown` method (even if empty) migrates the plugin to the new + system where the plugin object is kept across builds within one `mkdocs serve`. + + Note the `on_post_build` method is still preferred for cleanups, when possible, as it has + a much higher chance of actually triggering. `on_shutdown` is "best effort" because it + relies on detecting a graceful shutdown of MkDocs. + """ + + def on_serve( + self, server: LiveReloadServer, *, config: MkDocsConfig, builder: Callable + ) -> LiveReloadServer | None: + """ + The `serve` event is only called when the `serve` command is used during + development. It runs only once, after the first build finishes. + It is passed the `Server` instance which can be modified before + it is activated. For example, additional files or directories could be added + to the list of "watched" files for auto-reloading. + + Parameters: + server: `livereload.Server` instance + config: global configuration object + builder: a callable which gets passed to each call to `server.watch` + + Returns: + `livereload.Server` instance + """ + return server + + # Global events + + def on_config(self, config: MkDocsConfig) -> MkDocsConfig | None: + """ + The `config` event is the first event called on build and is run immediately + after the user configuration is loaded and validated. Any alterations to the + config should be made here. + + Parameters: + config: global configuration object + + Returns: + global configuration object + """ + return config + + def on_pre_build(self, *, config: MkDocsConfig) -> None: + """ + The `pre_build` event does not alter any variables. Use this event to call + pre-build scripts. + + Parameters: + config: global configuration object + """ + + def on_files(self, files: Files, *, config: MkDocsConfig) -> Files | None: + """ + The `files` event is called after the files collection is populated from the + `docs_dir`. Use this event to add, remove, or alter files in the + collection. Note that Page objects have not yet been associated with the + file objects in the collection. Use [Page Events](plugins.md#page-events) to manipulate page + specific data. + + Parameters: + files: global files collection + config: global configuration object + + Returns: + global files collection + """ + return files + + def on_nav(self, nav: Navigation, *, config: MkDocsConfig, files: Files) -> Navigation | None: + """ + The `nav` event is called after the site navigation is created and can + be used to alter the site navigation. + + Parameters: + nav: global navigation object + config: global configuration object + files: global files collection + + Returns: + global navigation object + """ + return nav + + def on_env( + self, env: jinja2.Environment, *, config: MkDocsConfig, files: Files + ) -> jinja2.Environment | None: + """ + The `env` event is called after the Jinja template environment is created + and can be used to alter the + [Jinja environment](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Environment). + + Parameters: + env: global Jinja environment + config: global configuration object + files: global files collection + + Returns: + global Jinja Environment + """ + return env + + def on_post_build(self, *, config: MkDocsConfig) -> None: + """ + The `post_build` event does not alter any variables. Use this event to call + post-build scripts. + + Parameters: + config: global configuration object + """ + + def on_build_error(self, *, error: Exception) -> None: + """ + The `build_error` event is called after an exception of any kind + is caught by MkDocs during the build process. + Use this event to clean things up before MkDocs terminates. Note that any other + events which were scheduled to run after the error will have been skipped. See + [Handling Errors](plugins.md#handling-errors) for more details. + + Parameters: + error: exception raised + """ + + # Template events + + def on_pre_template( + self, template: jinja2.Template, *, template_name: str, config: MkDocsConfig + ) -> jinja2.Template | None: + """ + The `pre_template` event is called immediately after the subject template is + loaded and can be used to alter the template. + + Parameters: + template: a Jinja2 [Template](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Template) object + template_name: string filename of template + config: global configuration object + + Returns: + a Jinja2 [Template](https://jinja.palletsprojects.com/en/latest/api/#jinja2.Template) object + """ + return template + + def on_template_context( + self, context: TemplateContext, *, template_name: str, config: MkDocsConfig + ) -> TemplateContext | None: + """ + The `template_context` event is called immediately after the context is created + for the subject template and can be used to alter the context for that specific + template only. + + Parameters: + context: dict of template context variables + template_name: string filename of template + config: global configuration object + + Returns: + dict of template context variables + """ + return context + + def on_post_template( + self, output_content: str, *, template_name: str, config: MkDocsConfig + ) -> str | None: + """ + The `post_template` event is called after the template is rendered, but before + it is written to disc and can be used to alter the output of the template. + If an empty string is returned, the template is skipped and nothing is is + written to disc. + + Parameters: + output_content: output of rendered template as string + template_name: string filename of template + config: global configuration object + + Returns: + output of rendered template as string + """ + return output_content + + # Page events + + def on_pre_page(self, page: Page, *, config: MkDocsConfig, files: Files) -> Page | None: + """ + The `pre_page` event is called before any actions are taken on the subject + page and can be used to alter the `Page` instance. + + Parameters: + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + files: global files collection -class PluginCollection(OrderedDict): + Returns: + `mkdocs.structure.pages.Page` instance + """ + return page + + def on_page_read_source(self, *, page: Page, config: MkDocsConfig) -> str | None: + """ + The `on_page_read_source` event can replace the default mechanism to read + the contents of a page's source from the filesystem. + + Parameters: + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + + Returns: + The raw source for a page as unicode string. If `None` is returned, the + default loading from a file will be performed. + """ + return None + + def on_page_markdown( + self, markdown: str, *, page: Page, config: MkDocsConfig, files: Files + ) -> str | None: + """ + The `page_markdown` event is called after the page's markdown is loaded + from file and can be used to alter the Markdown source text. The meta- + data has been stripped off and is available as `page.meta` at this point. + + Parameters: + markdown: Markdown source text of page as string + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + files: global files collection + + Returns: + Markdown source text of page as string + """ + return markdown + + def on_page_content( + self, html: str, *, page: Page, config: MkDocsConfig, files: Files + ) -> str | None: + """ + The `page_content` event is called after the Markdown text is rendered to + HTML (but before being passed to a template) and can be used to alter the + HTML body of the page. + + Parameters: + html: HTML rendered from Markdown source as string + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + files: global files collection + + Returns: + HTML rendered from Markdown source as string + """ + return html + + def on_page_context( + self, context: TemplateContext, *, page: Page, config: MkDocsConfig, nav: Navigation + ) -> TemplateContext | None: + """ + The `page_context` event is called after the context for a page is created + and can be used to alter the context for that specific page only. + + Parameters: + context: dict of template context variables + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + nav: global navigation object + + Returns: + dict of template context variables + """ + return context + + def on_post_page(self, output: str, *, page: Page, config: MkDocsConfig) -> str | None: + """ + The `post_page` event is called after the template is rendered, but + before it is written to disc and can be used to alter the output of the + page. If an empty string is returned, the page is skipped and nothing is + written to disc. + + Parameters: + output: output of rendered template as string + page: `mkdocs.structure.pages.Page` instance + config: global configuration object + + Returns: + output of rendered template as string + """ + return output + + +EVENTS = tuple(k[3:] for k in BasePlugin.__dict__ if k.startswith("on_")) + +# The above definitions were just for docs and type checking, we don't actually want them. +for k in EVENTS: + delattr(BasePlugin, 'on_' + k) + + +T = TypeVar('T') + + +def event_priority(priority: float) -> Callable[[T], T]: + """A decorator to set an event priority for an event handler method. + + Recommended priority values: + `100` "first", `50` "early", `0` "default", `-50` "late", `-100` "last". + As different plugins discover more precise relations to each other, the values should be further tweaked. + + ```python + @plugins.event_priority(-100) # Wishing to run this after all other plugins' `on_files` events. + def on_files(self, files, config, **kwargs): + ... + ``` + + New in MkDocs 1.4. + Recommended shim for backwards compatibility: + + ```python + try: + from mkdocs.plugins import event_priority + except ImportError: + event_priority = lambda priority: lambda f: f # No-op fallback + ``` + """ + + def decorator(event_method): + event_method.mkdocs_priority = priority + return event_method + + return decorator + + +class PluginCollection(dict, MutableMapping[str, BasePlugin]): """ A collection of plugins. @@ -57,28 +460,36 @@ class PluginCollection(OrderedDict): by calling `run_event`. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) - self.events = {x: [] for x in EVENTS} + self.events: dict[str, list[Callable]] = {k: [] for k in EVENTS} - def _register_event(self, event_name, method): - """ Register a method for an event. """ - self.events[event_name].append(method) + def _register_event(self, event_name: str, method: Callable) -> None: + """Register a method for an event.""" + utils.insort( + self.events[event_name], method, key=lambda m: -getattr(m, 'mkdocs_priority', 0) + ) - def __setitem__(self, key, value, **kwargs): - if not isinstance(value, BasePlugin): - raise TypeError( - '{0}.{1} only accepts values which are instances of {2}.{3} ' - 'sublcasses'.format(self.__module__, self.__name__, - BasePlugin.__module__, BasePlugin.__name__)) - super().__setitem__(key, value, **kwargs) + def __getitem__(self, key: str) -> BasePlugin: + return super().__getitem__(key) + + def __setitem__(self, key: str, value: BasePlugin) -> None: + super().__setitem__(key, value) # Register all of the event methods defined for this Plugin. for event_name in (x for x in dir(value) if x.startswith('on_')): - method = getattr(value, event_name) + method = getattr(value, event_name, None) if callable(method): self._register_event(event_name[3:], method) - def run_event(self, name, item=None, **kwargs): + @overload + def run_event(self, name: str, **kwargs) -> Any: + ... + + @overload + def run_event(self, name: str, item: T, **kwargs) -> T: + ... + + def run_event(self, name: str, item=None, **kwargs): """ Run all registered methods of an event. @@ -87,9 +498,11 @@ def run_event(self, name, item=None, **kwargs): All other keywords are variables for context, but would not generally be modified by the event method. """ - pass_item = item is not None - for method in self.events[name]: + events = self.events[name] + if events: + log.debug(f'Running {len(events)} `{name}` events') + for method in events: if pass_item: result = method(item, **kwargs) else: @@ -98,3 +511,126 @@ def run_event(self, name, item=None, **kwargs): if result is not None: item = result return item + + def on_startup(self, *, command: Literal['build', 'gh-deploy', 'serve'], dirty: bool) -> None: + return self.run_event('startup', command=command, dirty=dirty) + + def on_shutdown(self) -> None: + return self.run_event('shutdown') + + def on_serve( + self, server: LiveReloadServer, *, config: MkDocsConfig, builder: Callable + ) -> LiveReloadServer: + return self.run_event('serve', server, config=config, builder=builder) + + def on_config(self, config: MkDocsConfig) -> MkDocsConfig: + return self.run_event('config', config) + + def on_pre_build(self, *, config: MkDocsConfig) -> None: + return self.run_event('pre_build', config=config) + + def on_files(self, files: Files, *, config: MkDocsConfig) -> Files: + return self.run_event('files', files, config=config) + + def on_nav(self, nav: Navigation, *, config: MkDocsConfig, files: Files) -> Navigation: + return self.run_event('nav', nav, config=config, files=files) + + def on_env(self, env: jinja2.Environment, *, config: MkDocsConfig, files: Files): + return self.run_event('env', env, config=config, files=files) + + def on_post_build(self, *, config: MkDocsConfig) -> None: + return self.run_event('post_build', config=config) + + def on_build_error(self, *, error: Exception) -> None: + return self.run_event('build_error', error=error) + + def on_pre_template( + self, template: jinja2.Template, *, template_name: str, config: MkDocsConfig + ) -> jinja2.Template: + return self.run_event('pre_template', template, template_name=template_name, config=config) + + def on_template_context( + self, context: TemplateContext, *, template_name: str, config: MkDocsConfig + ) -> TemplateContext: + return self.run_event( + 'template_context', context, template_name=template_name, config=config + ) + + def on_post_template( + self, output_content: str, *, template_name: str, config: MkDocsConfig + ) -> str: + return self.run_event( + 'post_template', output_content, template_name=template_name, config=config + ) + + def on_pre_page(self, page: Page, *, config: MkDocsConfig, files: Files) -> Page: + return self.run_event('pre_page', page, config=config, files=files) + + def on_page_read_source(self, *, page: Page, config: MkDocsConfig) -> str | None: + return self.run_event('page_read_source', page=page, config=config) + + def on_page_markdown( + self, markdown: str, *, page: Page, config: MkDocsConfig, files: Files + ) -> str: + return self.run_event('page_markdown', markdown, page=page, config=config, files=files) + + def on_page_content(self, html: str, *, page: Page, config: MkDocsConfig, files: Files) -> str: + return self.run_event('page_content', html, page=page, config=config, files=files) + + def on_page_context( + self, context: TemplateContext, *, page: Page, config: MkDocsConfig, nav: Navigation + ) -> TemplateContext: + return self.run_event('page_context', context, page=page, config=config, nav=nav) + + def on_post_page(self, output: str, *, page: Page, config: MkDocsConfig) -> str: + return self.run_event('post_page', output, page=page, config=config) + + +class PrefixedLogger(logging.LoggerAdapter): + """A logger adapter to prefix log messages.""" + + def __init__(self, prefix: str, logger: logging.Logger) -> None: + """ + Initialize the logger adapter. + + Arguments: + prefix: The string to insert in front of every message. + logger: The logger instance. + """ + super().__init__(logger, {}) + self.prefix = prefix + + def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, Any]: + """ + Process the message. + + Arguments: + msg: The message: + kwargs: Remaining arguments. + + Returns: + The processed message. + """ + return f"{self.prefix}: {msg}", kwargs + + +def get_plugin_logger(name: str) -> PrefixedLogger: + """Return a logger for plugins. + + Arguments: + name: The name to use with `logging.getLogger`. + + Returns: + A logger configured to work well in MkDocs, + prefixing each message with the plugin package name. + + Example: + ```python + from mkdocs.plugins import get_plugin_logger + + log = get_plugin_logger(__name__) + log.info("My plugin message") + ``` + """ + logger = logging.getLogger(f"mkdocs.plugins.{name}") + return PrefixedLogger(name.split(".", 1)[0], logger) diff --git a/mkdocs/py.typed b/mkdocs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/mkdocs/structure/__init__.py b/mkdocs/structure/__init__.py index e69de29..c99b657 100644 --- a/mkdocs/structure/__init__.py +++ b/mkdocs/structure/__init__.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import abc +from typing import TYPE_CHECKING, Iterable + +if TYPE_CHECKING: + from mkdocs.structure.nav import Section + + +class StructureItem(metaclass=abc.ABCMeta): + """An item in MkDocs structure - see concrete subclasses Section, Page or Link.""" + + @abc.abstractmethod + def __init__(self): + ... + + parent: Section | None = None + """The immediate parent of the item in the site navigation. `None` if it's at the top level.""" + + @property + def is_top_level(self) -> bool: + return self.parent is None + + title: str | None + is_section: bool = False + is_page: bool = False + is_link: bool = False + + @property + def ancestors(self) -> Iterable[StructureItem]: + if self.parent is None: + return [] + return [self.parent, *self.parent.ancestors] + + def _indent_print(self, depth: int = 0) -> str: + return (' ' * depth) + repr(self) diff --git a/mkdocs/structure/files.py b/mkdocs/structure/files.py index b0a7120..a918cb1 100644 --- a/mkdocs/structure/files.py +++ b/mkdocs/structure/files.py @@ -1,85 +1,160 @@ +from __future__ import annotations + +import enum import fnmatch -import os import logging -from functools import cmp_to_key +import os +import posixpath +import shutil +import warnings +from pathlib import PurePath +from typing import TYPE_CHECKING, Callable, Iterable, Iterator, Sequence from urllib.parse import quote as urlquote +import pathspec +import pathspec.gitignore +import pathspec.util + from mkdocs import utils +if TYPE_CHECKING: + import jinja2.environment + + from mkdocs.config.defaults import MkDocsConfig + from mkdocs.structure.pages import Page + log = logging.getLogger(__name__) -log.addFilter(utils.warning_filter) + + +class InclusionLevel(enum.Enum): + EXCLUDED = -2 + """The file is excluded from the final site, but will still be populated during `mkdocs serve`.""" + NOT_IN_NAV = -1 + """The file is part of the site, but doesn't produce nav warnings.""" + UNDEFINED = 0 + """Still needs to be computed based on the config. If the config doesn't kick in, acts the same as `included`.""" + INCLUDED = 1 + """The file is part of the site. Documentation pages that are omitted from the nav will produce warnings.""" + + def all(self): + return True + + def is_included(self): + return self.value > self.EXCLUDED.value + + def is_excluded(self): + return self.value <= self.EXCLUDED.value + + def is_in_nav(self): + return self.value > self.NOT_IN_NAV.value + + def is_not_in_nav(self): + return self.value <= self.NOT_IN_NAV.value class Files: - """ A collection of File objects. """ - def __init__(self, files): + """A collection of [File][mkdocs.structure.files.File] objects.""" + + def __init__(self, files: list[File]) -> None: self._files = files - self.src_paths = {file.src_path: file for file in files} + self._src_uris: dict[str, File] | None = None - def __iter__(self): + def __iter__(self) -> Iterator[File]: + """Iterate over the files within.""" return iter(self._files) - def __len__(self): + def __len__(self) -> int: + """The number of files within.""" return len(self._files) - def __contains__(self, path): - return path in self.src_paths - - def get_file_from_path(self, path): - """ Return a File instance with File.src_path equal to path. """ - return self.src_paths.get(os.path.normpath(path)) - - def append(self, file): - """ Append file to Files collection. """ + def __contains__(self, path: str) -> bool: + """Whether the file with this `src_uri` is in the collection.""" + return PurePath(path).as_posix() in self.src_uris + + @property + def src_paths(self) -> dict[str, File]: + """Soft-deprecated, prefer `src_uris`.""" + return {file.src_path: file for file in self._files} + + @property + def src_uris(self) -> dict[str, File]: + """A mapping containing every file, with the keys being their + [`src_uri`][mkdocs.structure.files.File.src_uri].""" + if self._src_uris is None: + self._src_uris = {file.src_uri: file for file in self._files} + return self._src_uris + + def get_file_from_path(self, path: str) -> File | None: + """Return a File instance with File.src_uri equal to path.""" + return self.src_uris.get(PurePath(path).as_posix()) + + def append(self, file: File) -> None: + """Append file to Files collection.""" + self._src_uris = None self._files.append(file) - self.src_paths[file.src_path] = file - def copy_static_files(self, dirty=False): - """ Copy static files from source to destination. """ + def remove(self, file: File) -> None: + """Remove file from Files collection.""" + self._src_uris = None + self._files.remove(file) + + def copy_static_files( + self, + dirty: bool = False, + *, + inclusion: Callable[[InclusionLevel], bool] = InclusionLevel.is_included, + ) -> None: + """Copy static files from source to destination.""" for file in self: - if not file.is_documentation_page(): + if not file.is_documentation_page() and inclusion(file.inclusion): file.copy_file(dirty) - def documentation_pages(self): - """ Return iterable of all Markdown page file objects. """ - return [file for file in self if file.is_documentation_page()] + def documentation_pages( + self, *, inclusion: Callable[[InclusionLevel], bool] = InclusionLevel.is_included + ) -> Sequence[File]: + """Return iterable of all Markdown page file objects.""" + return [file for file in self if file.is_documentation_page() and inclusion(file.inclusion)] - def static_pages(self): - """ Return iterable of all static page file objects. """ + def static_pages(self) -> Sequence[File]: + """Return iterable of all static page file objects.""" return [file for file in self if file.is_static_page()] - def media_files(self): - """ Return iterable of all file objects which are not documentation or static pages. """ + def media_files(self) -> Sequence[File]: + """Return iterable of all file objects which are not documentation or static pages.""" return [file for file in self if file.is_media_file()] - def javascript_files(self): - """ Return iterable of all javascript file objects. """ + def javascript_files(self) -> Sequence[File]: + """Return iterable of all javascript file objects.""" return [file for file in self if file.is_javascript()] - def css_files(self): - """ Return iterable of all CSS file objects. """ + def css_files(self) -> Sequence[File]: + """Return iterable of all CSS file objects.""" return [file for file in self if file.is_css()] - def add_files_from_theme(self, env, config): - """ Retrieve static files from Jinja environment and add to collection. """ + def add_files_from_theme(self, env: jinja2.Environment, config: MkDocsConfig) -> None: + """Retrieve static files from Jinja environment and add to collection.""" + def filter(name): # '.*' filters dot files/dirs at root level whereas '*/.*' filters nested levels patterns = ['.*', '*/.*', '*.py', '*.pyc', '*.html', '*readme*', 'mkdocs_theme.yml'] - patterns.extend('*{}'.format(x) for x in utils.markdown_extensions) - patterns.extend(config['theme'].static_templates) + # Exclude translation files + patterns.append("locales/*") + patterns.extend(f'*{x}' for x in utils.markdown_extensions) + patterns.extend(config.theme.static_templates) for pattern in patterns: if fnmatch.fnmatch(name.lower(), pattern): return False return True + for path in env.list_templates(filter_func=filter): # Theme files do not override docs_dir files - path = os.path.normpath(path) - if path not in self: - for dir in config['theme'].dirs: + path = PurePath(path).as_posix() + if path not in self.src_uris: + for dir in config.theme.dirs: # Find the first theme dir which contains path if os.path.isfile(os.path.join(dir, path)): - self.append(File(path, dir, config['site_dir'], config['use_directory_urls'])) + self.append(File(path, dir, config.site_dir, config.use_directory_urls)) break @@ -99,165 +174,226 @@ class File: `use_directory_urls` argument has no effect on non-Markdown files. File objects have the following properties, which are Unicode strings: + """ - File.src_path - The pure path of the source file relative to the source directory. + src_uri: str + """The pure path (always '/'-separated) of the source file relative to the source directory.""" - File.abs_src_path - The absolute concrete path of the source file. + abs_src_path: str + """The absolute concrete path of the source file. Will use backslashes on Windows.""" - File.dest_path - The pure path of the destination file relative to the destination directory. + dest_uri: str + """The pure path (always '/'-separated) of the destination file relative to the destination directory.""" - File.abs_dest_path - The absolute concrete path of the destination file. + abs_dest_path: str + """The absolute concrete path of the destination file. Will use backslashes on Windows.""" - File.url - The url of the destination file relative to the destination directory as a string. - """ - def __init__(self, path, src_dir, dest_dir, use_directory_urls): - self.page = None - self.src_path = os.path.normpath(path) - self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_path)) - self.name = self._get_stem() - self.dest_path = self._get_dest_path(use_directory_urls) - self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_path)) - self.url = self._get_url(use_directory_urls) + url: str + """The URI of the destination file relative to the destination directory as a string.""" + + inclusion: InclusionLevel = InclusionLevel.UNDEFINED + """Whether the file will be excluded from the built site.""" + + @property + def src_path(self) -> str: + """Same as `src_uri` (and synchronized with it) but will use backslashes on Windows. Discouraged.""" + return os.path.normpath(self.src_uri) - def __eq__(self, other): + @src_path.setter + def src_path(self, value): + self.src_uri = PurePath(value).as_posix() - def sub_dict(d): - return {key: value for key, value in d.items() if key in ['src_path', 'abs_src_path', 'url']} + @property + def dest_path(self) -> str: + """Same as `dest_uri` (and synchronized with it) but will use backslashes on Windows. Discouraged.""" + return os.path.normpath(self.dest_uri) - return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__)) + @dest_path.setter + def dest_path(self, value): + self.dest_uri = PurePath(value).as_posix() + + page: Page | None + + def __init__( + self, + path: str, + src_dir: str, + dest_dir: str, + use_directory_urls: bool, + *, + dest_uri: str | None = None, + inclusion: InclusionLevel = InclusionLevel.UNDEFINED, + ) -> None: + self.page = None + self.src_path = path + self.name = self._get_stem() + if dest_uri is None: + dest_uri = self._get_dest_path(use_directory_urls) + self.dest_uri = dest_uri + self.url = self._get_url(use_directory_urls) + self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_uri)) + self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_uri)) + self.inclusion = inclusion + + def __eq__(self, other) -> bool: + return ( + isinstance(other, self.__class__) + and self.src_uri == other.src_uri + and self.abs_src_path == other.abs_src_path + and self.url == other.url + ) - def __ne__(self, other): - return not self.__eq__(other) + def __repr__(self): + return ( + f"File(src_uri='{self.src_uri}', dest_uri='{self.dest_uri}'," + f" name='{self.name}', url='{self.url}')" + ) - def _get_stem(self): - """ Return the name of the file without it's extension. """ - filename = os.path.basename(self.src_path) - stem, ext = os.path.splitext(filename) - return 'index' if stem in ('index', 'README') else stem + def _get_stem(self) -> str: + """Return the name of the file without its extension.""" + filename = posixpath.basename(self.src_uri) + stem, ext = posixpath.splitext(filename) + return 'index' if stem == 'README' else stem - def _get_dest_path(self, use_directory_urls): - """ Return destination path based on source path. """ + def _get_dest_path(self, use_directory_urls: bool) -> str: + """Return destination path based on source path.""" if self.is_documentation_page(): - parent, filename = os.path.split(self.src_path) + parent, filename = posixpath.split(self.src_uri) if not use_directory_urls or self.name == 'index': # index.md or README.md => index.html # foo.md => foo.html - return os.path.join(parent, self.name + '.html') + return posixpath.join(parent, self.name + '.html') else: # foo.md => foo/index.html - return os.path.join(parent, self.name, 'index.html') - return self.src_path + return posixpath.join(parent, self.name, 'index.html') + return self.src_uri - def _get_url(self, use_directory_urls): - """ Return url based in destination path. """ - url = self.dest_path.replace(os.path.sep, '/') - dirname, filename = os.path.split(url) + def _get_url(self, use_directory_urls: bool) -> str: + """Return url based in destination path.""" + url = self.dest_uri + dirname, filename = posixpath.split(url) if use_directory_urls and filename == 'index.html': - if dirname == '': - url = '.' - else: - url = dirname + '/' + url = (dirname or '.') + '/' return urlquote(url) - def url_relative_to(self, other): - """ Return url for file relative to other file. """ + def url_relative_to(self, other: File | str) -> str: + """Return url for file relative to other file.""" return utils.get_relative_url(self.url, other.url if isinstance(other, File) else other) - def copy_file(self, dirty=False): - """ Copy source file to destination, ensuring parent directories exist. """ + def copy_file(self, dirty: bool = False) -> None: + """Copy source file to destination, ensuring parent directories exist.""" if dirty and not self.is_modified(): - log.debug("Skip copying unmodified file: '{}'".format(self.src_path)) + log.debug(f"Skip copying unmodified file: '{self.src_uri}'") else: - log.debug("Copying media file: '{}'".format(self.src_path)) - utils.copy_file(self.abs_src_path, self.abs_dest_path) + log.debug(f"Copying media file: '{self.src_uri}'") + try: + utils.copy_file(self.abs_src_path, self.abs_dest_path) + except shutil.SameFileError: + pass # Let plugins write directly into site_dir. - def is_modified(self): + def is_modified(self) -> bool: if os.path.isfile(self.abs_dest_path): return os.path.getmtime(self.abs_dest_path) < os.path.getmtime(self.abs_src_path) return True - def is_documentation_page(self): - """ Return True if file is a Markdown page. """ - return os.path.splitext(self.src_path)[1] in utils.markdown_extensions - - def is_static_page(self): - """ Return True if file is a static page (html, xml, json). """ - return os.path.splitext(self.src_path)[1] in ( - '.html', - '.htm', - '.xml', - '.json', - ) + def is_documentation_page(self) -> bool: + """Return True if file is a Markdown page.""" + return utils.is_markdown_file(self.src_uri) - def is_media_file(self): - """ Return True if file is not a documentation or static page. """ + def is_static_page(self) -> bool: + """Return True if file is a static page (HTML, XML, JSON).""" + return self.src_uri.endswith(('.html', '.htm', '.xml', '.json')) + + def is_media_file(self) -> bool: + """Return True if file is not a documentation or static page.""" return not (self.is_documentation_page() or self.is_static_page()) - def is_javascript(self): - """ Return True if file is a JavaScript file. """ - return os.path.splitext(self.src_path)[1] in ( - '.js', - '.javascript', - ) + def is_javascript(self) -> bool: + """Return True if file is a JavaScript file.""" + return self.src_uri.endswith(('.js', '.javascript', '.mjs')) - def is_css(self): - """ Return True if file is a CSS file. """ - return os.path.splitext(self.src_path)[1] in ( - '.css', - ) + def is_css(self) -> bool: + """Return True if file is a CSS file.""" + return self.src_uri.endswith('.css') -def get_files(config): - """ Walk the `docs_dir` and return a Files collection. """ - files = [] - exclude = ['.*', '/templates'] +_default_exclude = pathspec.gitignore.GitIgnoreSpec.from_lines(['.*', '/templates/']) + +def _set_exclusions(files: Iterable[File], config: MkDocsConfig) -> None: + """Re-calculate which files are excluded, based on the patterns in the config.""" + exclude: pathspec.gitignore.GitIgnoreSpec | None = config.get('exclude_docs') + exclude = _default_exclude + exclude if exclude else _default_exclude + nav_exclude: pathspec.gitignore.GitIgnoreSpec | None = config.get('not_in_nav') + + for file in files: + if file.inclusion == InclusionLevel.UNDEFINED: + if exclude.match_file(file.src_uri): + file.inclusion = InclusionLevel.EXCLUDED + elif nav_exclude and nav_exclude.match_file(file.src_uri): + file.inclusion = InclusionLevel.NOT_IN_NAV + else: + file.inclusion = InclusionLevel.INCLUDED + + +def get_files(config: MkDocsConfig) -> Files: + """Walk the `docs_dir` and return a Files collection.""" + files: list[File] = [] + conflicting_files: list[tuple[File, File]] = [] for source_dir, dirnames, filenames in os.walk(config['docs_dir'], followlinks=True): relative_dir = os.path.relpath(source_dir, config['docs_dir']) - - for dirname in list(dirnames): - path = os.path.normpath(os.path.join(relative_dir, dirname)) - # Skip any excluded directories - if _filter_paths(basename=dirname, path=path, is_dir=True, exclude=exclude): - dirnames.remove(dirname) dirnames.sort() - - for filename in _sort_files(filenames): - path = os.path.normpath(os.path.join(relative_dir, filename)) - # Skip any excluded files - if _filter_paths(basename=filename, path=path, is_dir=False, exclude=exclude): - continue - # Skip README.md if an index file also exists in dir - if filename.lower() == 'readme.md' and 'index.md' in filenames: - log.warning("Both index.md and readme.md found. Skipping readme.md from {}".format(source_dir)) - continue - files.append(File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls'])) + filenames.sort(key=_file_sort_key) + + files_by_dest: dict[str, File] = {} + for filename in filenames: + file = File( + os.path.join(relative_dir, filename), + config['docs_dir'], + config['site_dir'], + config['use_directory_urls'], + ) + # Skip README.md if an index file also exists in dir (part 1) + prev_file = files_by_dest.setdefault(file.dest_uri, file) + if prev_file is not file: + conflicting_files.append((prev_file, file)) + files.append(file) + prev_file = file + + _set_exclusions(files, config) + # Skip README.md if an index file also exists in dir (part 2) + for a, b in conflicting_files: + if b.inclusion.is_included(): + if a.inclusion.is_included(): + log.warning( + f"Excluding '{a.src_uri}' from the site because it conflicts with '{b.src_uri}'." + ) + try: + files.remove(a) + except ValueError: + pass # Catching this to avoid errors if attempting to remove the same file twice. + else: + try: + files.remove(b) + except ValueError: + pass return Files(files) -def _sort_files(filenames): - """ Always sort `index` or `README` as first filename in list. """ +def _file_sort_key(f: str): + """Always sort `index` or `README` as first filename in list.""" + return (os.path.splitext(f)[0] not in ('index', 'README'), f) - def compare(x, y): - if x == y: - return 0 - if os.path.splitext(y)[0] in ['index', 'README']: - return 1 - if os.path.splitext(x)[0] in ['index', 'README'] or x < y: - return -1 - return 1 - return sorted(filenames, key=cmp_to_key(compare)) +def _sort_files(filenames: Iterable[str]) -> list[str]: + return sorted(filenames, key=_file_sort_key) -def _filter_paths(basename, path, is_dir, exclude): - """ .gitignore style file filtering. """ +def _filter_paths(basename: str, path: str, is_dir: bool, exclude: Iterable[str]) -> bool: + warnings.warn( + "_filter_paths is not used since MkDocs 1.5 and will be removed soon.", DeprecationWarning + ) for item in exclude: # Items ending in '/' apply only to directories. if item.endswith('/') and not is_dir: diff --git a/mkdocs/structure/nav.py b/mkdocs/structure/nav.py index 077ec19..018794e 100644 --- a/mkdocs/structure/nav.py +++ b/mkdocs/structure/nav.py @@ -1,15 +1,24 @@ +from __future__ import annotations + import logging -from urllib.parse import urlparse +import warnings +from typing import TYPE_CHECKING, Iterator, TypeVar +from urllib.parse import urlsplit +from mkdocs.structure import StructureItem from mkdocs.structure.pages import Page -from mkdocs.utils import nest_paths, warning_filter +from mkdocs.utils import nest_paths + +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig + from mkdocs.structure.files import Files + log = logging.getLogger(__name__) -log.addFilter(warning_filter) class Navigation: - def __init__(self, items, pages): + def __init__(self, items: list, pages: list[Page]) -> None: self.items = items # Nested List with full navigation of Sections, Pages, and Links. self.pages = pages # Flat List of subset of Pages in nav, in order. @@ -19,86 +28,108 @@ def __init__(self, items, pages): self.homepage = page break - def __repr__(self): - return '\n'.join([item._indent_print() for item in self]) + homepage: Page | None + """The [page][mkdocs.structure.pages.Page] object for the homepage of the site.""" - def __iter__(self): + pages: list[Page] + """A flat list of all [page][mkdocs.structure.pages.Page] objects contained in the navigation.""" + + def __str__(self) -> str: + return '\n'.join(item._indent_print() for item in self) + + def __iter__(self) -> Iterator: return iter(self.items) - def __len__(self): + def __len__(self) -> int: return len(self.items) -class Section: - def __init__(self, title, children): +class Section(StructureItem): + def __init__(self, title: str, children: list[StructureItem]) -> None: self.title = title self.children = children - self.parent = None self.active = False - self.is_section = True - self.is_page = False - self.is_link = False - def __repr__(self): - return "Section(title='{}')".format(self.title) + name = self.__class__.__name__ + return f"{name}(title={self.title!r})" - def _get_active(self): - """ Return active status of section. """ + title: str + """The title of the section.""" + + children: list[StructureItem] + """An iterable of all child navigation objects. Children may include nested sections, pages and links.""" + + @property + def active(self) -> bool: + """ + When `True`, indicates that a child page of this section is the current page and + can be used to highlight the section as the currently viewed section. Defaults + to `False`. + """ return self.__active - def _set_active(self, value): - """ Set active status of section and ancestors. """ + @active.setter + def active(self, value: bool): + """Set active status of section and ancestors.""" self.__active = bool(value) if self.parent is not None: self.parent.active = bool(value) - active = property(_get_active, _set_active) + is_section: bool = True + """Indicates that the navigation object is a "section" object. Always `True` for section objects.""" - @property - def ancestors(self): - if self.parent is None: - return [] - return [self.parent] + self.parent.ancestors + is_page: bool = False + """Indicates that the navigation object is a "page" object. Always `False` for section objects.""" + + is_link: bool = False + """Indicates that the navigation object is a "link" object. Always `False` for section objects.""" - def _indent_print(self, depth=0): - ret = ['{}{}'.format(' ' * depth, repr(self))] + def _indent_print(self, depth: int = 0) -> str: + ret = [super()._indent_print(depth)] for item in self.children: ret.append(item._indent_print(depth + 1)) return '\n'.join(ret) -class Link: - def __init__(self, title, url): +class Link(StructureItem): + def __init__(self, title: str, url: str): self.title = title self.url = url - self.parent = None - - # These should never change but are included for consistency with sections and pages. - self.children = None - self.active = False - self.is_section = False - self.is_page = False - self.is_link = True def __repr__(self): - title = "'{}'".format(self.title) if (self.title is not None) else '[blank]' - return "Link(title={}, url='{}')".format(title, self.url) + name = self.__class__.__name__ + title = f"{self.title!r}" if self.title is not None else '[blank]' + return f"{name}(title={title}, url={self.url!r})" - @property - def ancestors(self): - if self.parent is None: - return [] - return [self.parent] + self.parent.ancestors + title: str + """The title of the link. This would generally be used as the label of the link.""" + + url: str + """The URL that the link points to. The URL should always be an absolute URLs and + should not need to have `base_url` prepended.""" + + children: None = None + """Links do not contain children and the attribute is always `None`.""" + + active: bool = False + """External links cannot be "active" and the attribute is always `False`.""" + + is_section: bool = False + """Indicates that the navigation object is a "section" object. Always `False` for link objects.""" + + is_page: bool = False + """Indicates that the navigation object is a "page" object. Always `False` for link objects.""" - def _indent_print(self, depth=0): - return '{}{}'.format(' ' * depth, repr(self)) + is_link: bool = True + """Indicates that the navigation object is a "link" object. Always `True` for link objects.""" -def get_navigation(files, config): - """ Build site navigation from config and files.""" - nav_config = config['nav'] or nest_paths(f.src_path for f in files.documentation_pages()) +def get_navigation(files: Files, config: MkDocsConfig) -> Navigation: + """Build site navigation from config and files.""" + documentation_pages = files.documentation_pages() + nav_config = config['nav'] or nest_paths(f.src_uri for f in documentation_pages) items = _data_to_navigation(nav_config, files, config) if not isinstance(items, list): items = [items] @@ -110,74 +141,100 @@ def get_navigation(files, config): _add_previous_and_next_links(pages) _add_parent_links(items) - missing_from_config = [file for file in files.documentation_pages() if file.page is None] + missing_from_config = [] + for file in documentation_pages: + if file.page is None: + # Any documentation files not found in the nav should still have an associated page, so we + # create them here. The Page object will automatically be assigned to `file.page` during + # its creation (and this is the only way in which these page objects are accessible). + Page(None, file, config) + if file.inclusion.is_in_nav(): + missing_from_config.append(file.src_path) if missing_from_config: - log.info( + log.log( + config.validation.nav.omitted_files, 'The following pages exist in the docs directory, but are not ' - 'included in the "nav" configuration:\n - {}'.format( - '\n - '.join([file.src_path for file in missing_from_config])) + 'included in the "nav" configuration:\n - ' + '\n - '.join(missing_from_config), ) - # Any documentation files not found in the nav should still have an associated page, so we - # create them here. The Page object will automatically be assigned to `file.page` during - # its creation (and this is the only way in which these page objects are accessable). - for file in missing_from_config: - Page(None, file, config) links = _get_by_type(items, Link) for link in links: - scheme, netloc, path, params, query, fragment = urlparse(link.url) + scheme, netloc, path, query, fragment = urlsplit(link.url) if scheme or netloc: - log.debug( - "An external link to '{}' is included in " - "the 'nav' configuration.".format(link.url) - ) + log.debug(f"An external link to '{link.url}' is included in the 'nav' configuration.") elif link.url.startswith('/'): - log.debug( - "An absolute path to '{}' is included in the 'nav' configuration, " - "which presumably points to an external resource.".format(link.url) + log.log( + config.validation.nav.absolute_links, + f"An absolute path to '{link.url}' is included in the 'nav' " + "configuration, which presumably points to an external resource.", ) else: - msg = ( - "A relative path to '{}' is included in the 'nav' configuration, " - "which is not found in the documentation files".format(link.url) + log.log( + config.validation.nav.not_found, + f"A relative path to '{link.url}' is included in the 'nav' " + "configuration, which is not found in the documentation files.", ) - log.warning(msg) return Navigation(items, pages) -def _data_to_navigation(data, files, config): +def _data_to_navigation(data, files: Files, config: MkDocsConfig): if isinstance(data, dict): return [ _data_to_navigation((key, value), files, config) - if isinstance(value, str) else - Section(title=key, children=_data_to_navigation(value, files, config)) + if isinstance(value, str) + else Section(title=key, children=_data_to_navigation(value, files, config)) for key, value in data.items() ] elif isinstance(data, list): return [ _data_to_navigation(item, files, config)[0] - if isinstance(item, dict) and len(item) == 1 else - _data_to_navigation(item, files, config) + if isinstance(item, dict) and len(item) == 1 + else _data_to_navigation(item, files, config) for item in data ] title, path = data if isinstance(data, tuple) else (None, data) file = files.get_file_from_path(path) if file: + if file.inclusion.is_excluded(): + log.log( + min(logging.INFO, config.validation.nav.not_found), + f"A reference to '{file.src_path}' is included in the 'nav' " + "configuration, but this file is excluded from the built site.", + ) + page = file.page + if page is not None: + if isinstance(page, Page): + if type(page) is not Page: # Strict subclass + return page + warnings.warn( + "A plugin has set File.page to an instance of Page and it got overwritten. " + "The behavior of this will change in MkDocs 1.6.", + DeprecationWarning, + ) + else: + warnings.warn( # type: ignore[unreachable] + "A plugin has set File.page to a type other than Page. " + "This will be an error in MkDocs 1.6.", + DeprecationWarning, + ) return Page(title, file, config) return Link(title, path) -def _get_by_type(nav, T): +T = TypeVar('T') + + +def _get_by_type(nav, t: type[T]) -> list[T]: ret = [] for item in nav: - if isinstance(item, T): + if isinstance(item, t): ret.append(item) - elif item.children: - ret.extend(_get_by_type(item.children, T)) + if item.children: + ret.extend(_get_by_type(item.children, t)) return ret -def _add_parent_links(nav): +def _add_parent_links(nav) -> None: for item in nav: if item.is_section: for child in item.children: @@ -185,8 +242,8 @@ def _add_parent_links(nav): _add_parent_links(item.children) -def _add_previous_and_next_links(pages): - bookended = [None] + pages + [None] - zipped = zip(bookended[:-2], bookended[1:-1], bookended[2:]) +def _add_previous_and_next_links(pages: list[Page]) -> None: + bookended = [None, *pages, None] + zipped = zip(bookended[:-2], pages, bookended[2:]) for page0, page1, page2 in zipped: page1.previous_page, page1.next_page = page0, page2 diff --git a/mkdocs/structure/pages.py b/mkdocs/structure/pages.py index bdb64eb..e51ca2b 100644 --- a/mkdocs/structure/pages.py +++ b/mkdocs/structure/pages.py @@ -1,137 +1,226 @@ -import os +from __future__ import annotations + +import copy import logging -from urllib.parse import urlparse, urlunparse, urljoin +import posixpath +import warnings +from typing import TYPE_CHECKING, Any, Callable, Iterator, MutableMapping, Sequence from urllib.parse import unquote as urlunquote +from urllib.parse import urljoin, urlsplit, urlunsplit import markdown -from markdown.extensions import Extension -from markdown.treeprocessors import Treeprocessor +import markdown.postprocessors +import markdown.treeprocessors from markdown.util import AMP_SUBSTITUTE +from mkdocs import utils +from mkdocs.structure import StructureItem from mkdocs.structure.toc import get_toc -from mkdocs.utils import meta, get_build_date, get_markdown_title, warning_filter +from mkdocs.utils import _removesuffix, get_build_date, get_markdown_title, meta, weak_property + +if TYPE_CHECKING: + from xml.etree import ElementTree as etree + + from mkdocs.config.defaults import MkDocsConfig + from mkdocs.structure.files import File, Files + from mkdocs.structure.toc import TableOfContents + log = logging.getLogger(__name__) -log.addFilter(warning_filter) -class Page: - def __init__(self, title, file, config): +class Page(StructureItem): + def __init__(self, title: str | None, file: File, config: MkDocsConfig) -> None: file.page = self self.file = file - self.title = title + if title is not None: + self.title = title # Navigation attributes - self.parent = None self.children = None self.previous_page = None self.next_page = None self.active = False - self.is_section = False - self.is_page = True - self.is_link = False - self.update_date = get_build_date() self._set_canonical_url(config.get('site_url', None)) - self._set_edit_url(config.get('repo_url', None), config.get('edit_uri', None)) + self._set_edit_url( + config.get('repo_url', None), config.get('edit_uri'), config.get('edit_uri_template') + ) # Placeholders to be filled in later in the build process. self.markdown = None + self._title_from_render: str | None = None self.content = None - self.toc = [] + self.toc = [] # type: ignore self.meta = {} - def __eq__(self, other): + def __eq__(self, other) -> bool: + return ( + isinstance(other, self.__class__) + and self.title == other.title + and self.file == other.file + ) - def sub_dict(d): - return {key: value for key, value in d.items() if key in ['title', 'file']} + def __repr__(self): + name = self.__class__.__name__ + title = f"{self.title!r}" if self.title is not None else '[blank]' + url = self.abs_url or self.file.url + return f"{name}(title={title}, url={url!r})" - return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__)) + markdown: str | None + """The original Markdown content from the file.""" - def __ne__(self, other): - return not self.__eq__(other) + content: str | None + """The rendered Markdown as HTML, this is the contents of the documentation.""" - def __repr__(self): - title = "'{}'".format(self.title) if (self.title is not None) else '[blank]' - return "Page(title={}, url='{}')".format(title, self.abs_url or self.file.url) + toc: TableOfContents + """An iterable object representing the Table of contents for a page. Each item in + the `toc` is an [`AnchorLink`][mkdocs.structure.toc.AnchorLink].""" + + meta: MutableMapping[str, Any] + """A mapping of the metadata included at the top of the markdown page.""" - def _indent_print(self, depth=0): - return '{}{}'.format(' ' * depth, repr(self)) + @property + def url(self) -> str: + """The URL of the page relative to the MkDocs `site_dir`.""" + url = self.file.url + if url in ('.', './'): + return '' + return url + + file: File + """The documentation [`File`][mkdocs.structure.files.File] that the page is being rendered from.""" + + abs_url: str | None + """The absolute URL of the page from the server root as determined by the value + assigned to the [site_url][] configuration setting. The value includes any + subdirectory included in the `site_url`, but not the domain. [base_url][] should + not be used with this variable.""" + + canonical_url: str | None + """The full, canonical URL to the current page as determined by the value assigned + to the [site_url][] configuration setting. The value includes the domain and any + subdirectory included in the `site_url`. [base_url][] should not be used with this + variable.""" - def _get_active(self): - """ Return active status of page. """ + @property + def active(self) -> bool: + """When `True`, indicates that this page is the currently viewed page. Defaults to `False`.""" return self.__active - def _set_active(self, value): - """ Set active status of page and ancestors. """ + @active.setter + def active(self, value: bool): + """Set active status of page and ancestors.""" self.__active = bool(value) if self.parent is not None: self.parent.active = bool(value) - active = property(_get_active, _set_active) - @property - def is_index(self): + def is_index(self) -> bool: return self.file.name == 'index' - @property - def is_top_level(self): - return self.parent is None + edit_url: str | None + """The full URL to the source page in the source repository. Typically used to + provide a link to edit the source page. [base_url][] should not be used with this + variable.""" @property - def is_homepage(self): - return self.is_top_level and self.is_index and self.file.url == '.' + def is_homepage(self) -> bool: + """Evaluates to `True` for the homepage of the site and `False` for all other pages.""" + return self.is_top_level and self.is_index and self.file.url in ('.', './', 'index.html') - @property - def url(self): - return '' if self.file.url == '.' else self.file.url + previous_page: Page | None + """The [page][mkdocs.structure.pages.Page] object for the previous page or `None`. + The value will be `None` if the current page is the first item in the site navigation + or if the current page is not included in the navigation at all.""" - @property - def ancestors(self): - if self.parent is None: - return [] - return [self.parent] + self.parent.ancestors + next_page: Page | None + """The [page][mkdocs.structure.pages.Page] object for the next page or `None`. + The value will be `None` if the current page is the last item in the site navigation + or if the current page is not included in the navigation at all.""" + + children: None = None + """Pages do not contain children and the attribute is always `None`.""" + + is_section: bool = False + """Indicates that the navigation object is a "section" object. Always `False` for page objects.""" + + is_page: bool = True + """Indicates that the navigation object is a "page" object. Always `True` for page objects.""" - def _set_canonical_url(self, base): + is_link: bool = False + """Indicates that the navigation object is a "link" object. Always `False` for page objects.""" + + def _set_canonical_url(self, base: str | None) -> None: if base: if not base.endswith('/'): base += '/' - self.canonical_url = urljoin(base, self.url) - self.abs_url = urlparse(self.canonical_url).path + self.canonical_url = canonical_url = urljoin(base, self.url) + self.abs_url = urlsplit(canonical_url).path else: self.canonical_url = None self.abs_url = None - def _set_edit_url(self, repo_url, edit_uri): - if repo_url and edit_uri: - src_path = self.file.src_path.replace('\\', '/') - self.edit_url = urljoin(repo_url, edit_uri + src_path) + def _set_edit_url( + self, + repo_url: str | None, + edit_uri: str | None = None, + edit_uri_template: str | None = None, + ) -> None: + if edit_uri or edit_uri_template: + src_uri = self.file.src_uri + if edit_uri_template: + noext = posixpath.splitext(src_uri)[0] + edit_uri = edit_uri_template.format(path=src_uri, path_noext=noext) + else: + assert edit_uri is not None and edit_uri.endswith('/') + edit_uri += src_uri + if repo_url: + # Ensure urljoin behavior is correct + if not edit_uri.startswith(('?', '#')) and not repo_url.endswith('/'): + repo_url += '/' + else: + try: + parsed_url = urlsplit(edit_uri) + if not parsed_url.scheme or not parsed_url.netloc: + log.warning( + f"edit_uri: {edit_uri!r} is not a valid URL, it should include the http:// (scheme)" + ) + except ValueError as e: + log.warning(f"edit_uri: {edit_uri!r} is not a valid URL: {e}") + + self.edit_url = urljoin(repo_url or '', edit_uri) else: self.edit_url = None - def read_source(self, config): - source = config['plugins'].run_event( - 'page_read_source', page=self, config=config - ) + def read_source(self, config: MkDocsConfig) -> None: + source = config.plugins.on_page_read_source(page=self, config=config) if source is None: try: - with open(self.file.abs_src_path, 'r', encoding='utf-8-sig', errors='strict') as f: + with open(self.file.abs_src_path, encoding='utf-8-sig', errors='strict') as f: source = f.read() except OSError: - log.error('File not found: {}'.format(self.file.src_path)) + log.error(f'File not found: {self.file.src_path}') raise except ValueError: - log.error('Encoding error reading file: {}'.format(self.file.src_path)) + log.error(f'Encoding error reading file: {self.file.src_path}') raise self.markdown, self.meta = meta.get_data(source) - self._set_title() - def _set_title(self): + def _set_title(self) -> None: + warnings.warn( + "_set_title is no longer used in MkDocs and will be removed soon.", DeprecationWarning + ) + + @weak_property + def title(self) -> str | None: # type: ignore[override] """ - Set the title for a Markdown document. + Returns the title for the current page. + + Before calling `read_source()`, this value is empty. It can also be updated by `render()`. Check these in order and use the first that returns a valid title: - value provided on init (passed in from config) @@ -139,49 +228,58 @@ def _set_title(self): - content of the first H1 in Markdown content - convert filename to title """ - if self.title is not None: - return + if self.markdown is None: + return None if 'title' in self.meta: - self.title = self.meta['title'] - return + return self.meta['title'] - title = get_markdown_title(self.markdown) + if self._title_from_render: + return self._title_from_render + elif self.content is None: # Preserve legacy behavior only for edge cases in plugins. + title_from_md = get_markdown_title(self.markdown) + if title_from_md is not None: + return title_from_md - if title is None: - if self.is_homepage: - title = 'Home' - else: - title = self.file.name.replace('-', ' ').replace('_', ' ') - # Capitalize if the filename was all lowercase, otherwise leave it as-is. - if title.lower() == title: - title = title.capitalize() + if self.is_homepage: + return 'Home' - self.title = title + title = self.file.name.replace('-', ' ').replace('_', ' ') + # Capitalize if the filename was all lowercase, otherwise leave it as-is. + if title.lower() == title: + title = title.capitalize() + return title - def render(self, config, files): + def render(self, config: MkDocsConfig, files: Files) -> None: """ Convert the Markdown source file to HTML as per the config. """ - - extensions = [ - _RelativePathExtension(self.file, files) - ] + config['markdown_extensions'] + if self.markdown is None: + raise RuntimeError("`markdown` field hasn't been set (via `read_source`)") md = markdown.Markdown( - extensions=extensions, - extension_configs=config['mdx_configs'] or {} + extensions=config['markdown_extensions'], + extension_configs=config['mdx_configs'] or {}, ) + + relative_path_ext = _RelativePathTreeprocessor(self.file, files, config) + relative_path_ext._register(md) + + extract_title_ext = _ExtractTitleTreeprocessor() + extract_title_ext._register(md) + self.content = md.convert(self.markdown) self.toc = get_toc(getattr(md, 'toc_tokens', [])) + self._title_from_render = extract_title_ext.title -class _RelativePathTreeprocessor(Treeprocessor): - def __init__(self, file, files): +class _RelativePathTreeprocessor(markdown.treeprocessors.Treeprocessor): + def __init__(self, file: File, files: Files, config: MkDocsConfig) -> None: self.file = file self.files = files + self.config = config - def run(self, root): + def run(self, root: etree.Element) -> etree.Element: """ Update urls on anchors and images to make them relative @@ -197,48 +295,164 @@ def run(self, root): continue url = element.get(key) + assert url is not None new_url = self.path_to_url(url) element.set(key, new_url) return root - def path_to_url(self, url): - scheme, netloc, path, params, query, fragment = urlparse(url) + @classmethod + def _target_uri(cls, src_path: str, dest_path: str): + return posixpath.normpath( + posixpath.join(posixpath.dirname(src_path), dest_path).lstrip('/') + ) - if (scheme or netloc or not path or url.startswith('/') or url.startswith('\\') - or AMP_SUBSTITUTE in url or '.' not in os.path.split(path)[-1]): - # Ignore URLs unless they are a relative link to a source file. - # AMP_SUBSTITUTE is used internally by Markdown only for email. - # No '.' in the last part of a path indicates path does not point to a file. + @classmethod + def _possible_target_uris( + cls, file: File, path: str, use_directory_urls: bool + ) -> Iterator[str]: + """First yields the resolved file uri for the link, then proceeds to yield guesses for possible mistakes.""" + target_uri = cls._target_uri(file.src_uri, path) + yield target_uri + + if posixpath.normpath(path) == '.': + # Explicitly link to current file. + yield file.src_uri + return + tried = {target_uri} + + prefixes = [target_uri, cls._target_uri(file.url, path)] + if prefixes[0] == prefixes[1]: + prefixes.pop() + + suffixes: list[Callable[[str], str]] = [] + if use_directory_urls: + suffixes.append(lambda p: p) + if not posixpath.splitext(target_uri)[-1]: + suffixes.append(lambda p: posixpath.join(p, 'index.md')) + suffixes.append(lambda p: posixpath.join(p, 'README.md')) + if ( + not target_uri.endswith('.') + and not path.endswith('.md') + and (use_directory_urls or not path.endswith('/')) + ): + suffixes.append(lambda p: _removesuffix(p, '.html') + '.md') + + for pref in prefixes: + for suf in suffixes: + guess = posixpath.normpath(suf(pref)) + if guess not in tried and not guess.startswith('../'): + yield guess + tried.add(guess) + + def path_to_url(self, url: str) -> str: + scheme, netloc, path, query, fragment = urlsplit(url) + + warning_level, warning = 0, '' + + # Ignore URLs unless they are a relative link to a source file. + if scheme or netloc: # External link. return url - - # Determine the filepath of the target. - target_path = os.path.join(os.path.dirname(self.file.src_path), urlunquote(path)) - target_path = os.path.normpath(target_path).lstrip(os.sep) - - # Validate that the target exists in files collection. - if target_path not in self.files: - log.warning( - "Documentation file '{}' contains a link to '{}' which is not found " - "in the documentation files.".format(self.file.src_path, target_path) - ) + elif url.startswith(('/', '\\')): # Absolute link. + warning_level = self.config.validation.links.absolute_links + warning = f"Doc file '{self.file.src_uri}' contains an absolute link '{url}', it was left as is." + elif AMP_SUBSTITUTE in url: # AMP_SUBSTITUTE is used internally by Markdown only for email. + return url + elif not path: # Self-link containing only query or fragment. return url - target_file = self.files.get_file_from_path(target_path) - path = target_file.url_relative_to(self.file) - components = (scheme, netloc, path, params, query, fragment) - return urlunparse(components) + path = urlunquote(path) + # Determine the filepath of the target. + possible_target_uris = self._possible_target_uris( + self.file, path, self.config.use_directory_urls + ) -class _RelativePathExtension(Extension): - """ - The Extension class is what we pass to markdown, it then - registers the Treeprocessor. - """ + if warning: + # For absolute path (already has a warning), the primary lookup path should be preserved as a tip option. + target_uri = url + target_file = None + else: + # Validate that the target exists in files collection. + target_uri = next(possible_target_uris) + target_file = self.files.get_file_from_path(target_uri) + + if target_file is None and not warning: + # Primary lookup path had no match, definitely produce a warning, just choose which one. + if not posixpath.splitext(path)[-1]: + # No '.' in the last part of a path indicates path does not point to a file. + warning_level = self.config.validation.links.unrecognized_links + warning = ( + f"Doc file '{self.file.src_uri}' contains an unrecognized relative link '{url}', " + f"it was left as is." + ) + else: + target = f" '{target_uri}'" if target_uri != url else "" + warning_level = self.config.validation.links.not_found + warning = ( + f"Doc file '{self.file.src_uri}' contains a relative link '{url}', " + f"but the target{target} is not found among documentation files." + ) + + if warning: + # There was no match, so try to guess what other file could've been intended. + if warning_level > logging.DEBUG: + suggest_url = '' + for path in possible_target_uris: + if self.files.get_file_from_path(path) is not None: + if fragment and path == self.file.src_uri: + path = '' + else: + path = utils.get_relative_url(path, self.file.src_uri) + suggest_url = urlunsplit(('', '', path, query, fragment)) + break + else: + if '@' in url and '.' in url and '/' not in url: + suggest_url = f'mailto:{url}' + if suggest_url: + warning += f" Did you mean '{suggest_url}'?" + log.log(warning_level, warning) + return url - def __init__(self, file, files): - self.file = file - self.files = files + assert target_uri is not None + assert target_file is not None + if target_file.inclusion.is_excluded(): + warning_level = min(logging.INFO, self.config.validation.links.not_found) + warning = ( + f"Doc file '{self.file.src_uri}' contains a link to " + f"'{target_uri}' which is excluded from the built site." + ) + log.log(warning_level, warning) + path = utils.get_relative_url(target_file.url, self.file.url) + return urlunsplit(('', '', path, query, fragment)) + + def _register(self, md: markdown.Markdown) -> None: + md.treeprocessors.register(self, "relpath", 0) + + +class _ExtractTitleTreeprocessor(markdown.treeprocessors.Treeprocessor): + title: str | None = None + postprocessors: Sequence[markdown.postprocessors.Postprocessor] = () + + def run(self, root: etree.Element) -> etree.Element: + for el in root: + if el.tag == 'h1': + # Drop anchorlink from the element, if present. + if len(el) > 0 and el[-1].tag == 'a' and not (el[-1].tail or '').strip(): + el = copy.copy(el) + del el[-1] + # Extract the text only, recursively. + title = ''.join(el.itertext()) + # Unescape per Markdown implementation details. + for pp in self.postprocessors: + title = pp.run(title) + self.title = title + break + return root - def extendMarkdown(self, md): - relpath = _RelativePathTreeprocessor(self.file, self.files) - md.treeprocessors.register(relpath, "relpath", 0) + def _register(self, md: markdown.Markdown) -> None: + self.postprocessors = tuple(md.postprocessors) + md.treeprocessors.register( + self, + "mkdocs_extract_title", + priority=-1, # After the end. + ) diff --git a/mkdocs/structure/toc.py b/mkdocs/structure/toc.py index 143292a..d52a57d 100644 --- a/mkdocs/structure/toc.py +++ b/mkdocs/structure/toc.py @@ -5,57 +5,85 @@ generate a list of dicts for each toc item, and then store it as AnchorLinks to maintain compatibility with older versions of MkDocs. """ +from __future__ import annotations +import sys +from typing import Iterable, Iterator -def get_toc(toc_tokens): - toc = [_parse_toc_token(i) for i in toc_tokens] - # For the table of contents, always mark the first element as active - if len(toc): - toc[0].active = True - return TableOfContents(toc) +if sys.version_info >= (3, 8): + from typing import TypedDict +else: + from typing_extensions import TypedDict -class TableOfContents: - """ - Represents the table of contents for a given page. - """ - def __init__(self, items): - self.items = items - - def __iter__(self): - return iter(self.items) +class _TocToken(TypedDict): + level: int + id: str + name: str + children: list[_TocToken] - def __len__(self): - return len(self.items) - def __str__(self): - return ''.join([str(item) for item in self]) +def get_toc(toc_tokens: list[_TocToken]) -> TableOfContents: + toc = [_parse_toc_token(i) for i in toc_tokens] + # For the table of contents, always mark the first element as active + if len(toc): + toc[0].active = True # type: ignore[attr-defined] + return TableOfContents(toc) class AnchorLink: """ A single entry in the table of contents. """ - def __init__(self, title, id, level): + + def __init__(self, title: str, id: str, level: int) -> None: self.title, self.id, self.level = title, id, level self.children = [] + title: str + """The text of the item.""" + @property - def url(self): + def url(self) -> str: + """The hash fragment of a URL pointing to the item.""" return '#' + self.id - def __str__(self): + level: int + """The zero-based level of the item.""" + + children: list[AnchorLink] + """An iterable of any child items.""" + + def __str__(self) -> str: return self.indent_print() - def indent_print(self, depth=0): + def indent_print(self, depth: int = 0) -> str: indent = ' ' * depth - ret = '{}{} - {}\n'.format(indent, self.title, self.url) + ret = f'{indent}{self.title} - {self.url}\n' for item in self.children: ret += item.indent_print(depth + 1) return ret -def _parse_toc_token(token): +class TableOfContents(Iterable[AnchorLink]): + """ + Represents the table of contents for a given page. + """ + + def __init__(self, items: list[AnchorLink]) -> None: + self.items = items + + def __iter__(self) -> Iterator[AnchorLink]: + return iter(self.items) + + def __len__(self) -> int: + return len(self.items) + + def __str__(self) -> str: + return ''.join(str(item) for item in self) + + +def _parse_toc_token(token: _TocToken) -> AnchorLink: anchor = AnchorLink(token['name'], token['id'], token['level']) for i in token['children']: anchor.children.append(_parse_toc_token(i)) diff --git a/mkdocs/templates/sitemap.xml b/mkdocs/templates/sitemap.xml index fee00ef..e60c7d0 100644 --- a/mkdocs/templates/sitemap.xml +++ b/mkdocs/templates/sitemap.xml @@ -1,22 +1,12 @@ -{%- macro nav_item(item) -%} - {%- if item.children -%} - {%- for child in item.children -%} - {{ nav_item(child) }} - {%- endfor -%} - {%- else %} - {%- if not item.is_link -%} + + +{%- for file in pages -%} + {% if not file.page.is_link and (file.page.abs_url or file.page.canonical_url) %} - {% if item.canonical_url %}{{ item.canonical_url|e }}{% else %}{{ item.abs_url|e }}{% endif %} - {% if item.update_date %}{{item.update_date}}{% endif %} - daily + {% if file.page.canonical_url %}{{ file.page.canonical_url|e }}{% else %}{{ file.page.abs_url|e }}{% endif %} + {% if file.page.update_date %}{{file.page.update_date}}{% endif %} + daily - {%- endif -%} {%- endif -%} -{%- endmacro -%} - - - -{%- for item in nav -%} - {{ nav_item(item) }} -{%- endfor %} +{% endfor %} diff --git a/mkdocs/tests/__init__.py b/mkdocs/tests/__init__.py old mode 100755 new mode 100644 index e69de29..e024878 --- a/mkdocs/tests/__init__.py +++ b/mkdocs/tests/__init__.py @@ -0,0 +1,16 @@ +import logging +import unittest.util + +unittest.util._MAX_LENGTH = 100000 + + +class DisallowLogsHandler(logging.Handler): + def __init__(self, level=logging.WARNING): + super().__init__(level=level) + self.formatter = logging.Formatter("%(levelname)s:%(name)s:%(message)s") + + def emit(self, record): + raise AssertionError(f'Unexpected log: {self.format(record)!r}') + + +logging.lastResort = DisallowLogsHandler() # type: ignore diff --git a/mkdocs/tests/base.py b/mkdocs/tests/base.py index 1ea373a..c480eaa 100644 --- a/mkdocs/tests/base.py +++ b/mkdocs/tests/base.py @@ -1,11 +1,15 @@ -import textwrap -import markdown +from __future__ import annotations + +import contextlib import os +import textwrap from functools import wraps from tempfile import TemporaryDirectory -from mkdocs import config +import markdown + from mkdocs import utils +from mkdocs.config.defaults import MkDocsConfig def dedent(text): @@ -13,30 +17,27 @@ def dedent(text): def get_markdown_toc(markdown_source): - """ Return TOC generated by Markdown parser from Markdown source text. """ + """Return TOC generated by Markdown parser from Markdown source text.""" md = markdown.Markdown(extensions=['toc']) md.convert(markdown_source) return md.toc_tokens -def load_config(**cfg): - """ Helper to build a simple config for testing. """ - path_base = os.path.join( - os.path.abspath(os.path.dirname(__file__)), 'integration', 'minimal' - ) - cfg = cfg or {} +def load_config(config_file_path: str | None = None, **cfg) -> MkDocsConfig: + """Helper to build a simple config for testing.""" + path_base = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'integration', 'minimal') if 'site_name' not in cfg: cfg['site_name'] = 'Example' - if 'config_file_path' not in cfg: - cfg['config_file_path'] = os.path.join(path_base, 'mkdocs.yml') if 'docs_dir' not in cfg: # Point to an actual dir to avoid a 'does not exist' error on validation. cfg['docs_dir'] = os.path.join(path_base, 'docs') - conf = config.Config(schema=config.DEFAULT_SCHEMA, config_file_path=cfg['config_file_path']) + if 'plugins' not in cfg: + cfg['plugins'] = [] + conf = MkDocsConfig(config_file_path=config_file_path or os.path.join(path_base, 'mkdocs.yml')) conf.load_dict(cfg) errors_warnings = conf.validate() - assert(errors_warnings == ([], [])), errors_warnings + assert errors_warnings == ([], []), errors_warnings return conf @@ -67,8 +68,7 @@ def example(self, tdir): """ files = {f: '' for f in files} if isinstance(files, (list, tuple)) else files or {} - if 'prefix' not in kw: - kw['prefix'] = 'mkdocs_test-' + kw['prefix'] = 'mkdocs_test-' + kw.get('prefix', '') def decorator(fn): @wraps(fn) @@ -78,10 +78,22 @@ def wrapper(self, *args): pth = os.path.join(td, path) utils.write_file(content.encode(encoding='utf-8'), pth) return fn(self, td, *args) + return wrapper + return decorator +@contextlib.contextmanager +def change_dir(path): + old_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(old_cwd) + + class PathAssertionMixin: """ Assertion methods for testing paths. @@ -90,40 +102,28 @@ class PathAssertionMixin: """ def assertPathsEqual(self, a, b, msg=None): - self.assertEqual(a.replace('\\', '/'), b.replace('\\', '/')) + self.assertEqual(a.replace(os.sep, '/'), b.replace(os.sep, '/')) def assertPathExists(self, *parts): path = os.path.join(*parts) if not os.path.exists(path): - msg = self._formatMessage(None, "The path '{}' does not exist".format(path)) + msg = self._formatMessage(None, f"The path '{path}' does not exist") raise self.failureException(msg) def assertPathNotExists(self, *parts): path = os.path.join(*parts) if os.path.exists(path): - msg = self._formatMessage(None, "The path '{}' does exist".format(path)) + msg = self._formatMessage(None, f"The path '{path}' does exist") raise self.failureException(msg) def assertPathIsFile(self, *parts): path = os.path.join(*parts) if not os.path.isfile(path): - msg = self._formatMessage(None, "The path '{}' is not a file that exists".format(path)) - raise self.failureException(msg) - - def assertPathNotFile(self, *parts): - path = os.path.join(*parts) - if os.path.isfile(path): - msg = self._formatMessage(None, "The path '{}' is a file that exists".format(path)) + msg = self._formatMessage(None, f"The path '{path}' is not a file that exists") raise self.failureException(msg) def assertPathIsDir(self, *parts): path = os.path.join(*parts) if not os.path.isdir(path): - msg = self._formatMessage(None, "The path '{}' is not a directory that exists".format(path)) - raise self.failureException(msg) - - def assertPathNotDir(self, *parts): - path = os.path.join(*parts) - if os.path.isfile(path): - msg = self._formatMessage(None, "The path '{}' is a directory that exists".format(path)) + msg = self._formatMessage(None, f"The path '{path}' is not a directory that exists") raise self.failureException(msg) diff --git a/mkdocs/tests/build_tests.py b/mkdocs/tests/build_tests.py index 4a8bb4b..ff62a82 100644 --- a/mkdocs/tests/build_tests.py +++ b/mkdocs/tests/build_tests.py @@ -1,64 +1,76 @@ #!/usr/bin/env python +from __future__ import annotations -from unittest import mock +import contextlib +import io +import os.path +import re +import textwrap import unittest +from pathlib import Path +from typing import TYPE_CHECKING +from unittest import mock -from mkdocs.structure.pages import Page +import markdown.preprocessors + +from mkdocs.commands import build +from mkdocs.config import base +from mkdocs.exceptions import PluginError +from mkdocs.livereload import LiveReloadServer from mkdocs.structure.files import File, Files from mkdocs.structure.nav import get_navigation -from mkdocs.commands import build -from mkdocs.tests.base import load_config, tempdir, PathAssertionMixin +from mkdocs.structure.pages import Page +from mkdocs.tests.base import PathAssertionMixin, load_config, tempdir from mkdocs.utils import meta +if TYPE_CHECKING: + from mkdocs.config.defaults import MkDocsConfig + def build_page(title, path, config, md_src=''): - """ Helper which returns a Page object. """ + """Helper which returns a Page object.""" - files = Files([File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls'])]) + files = Files([File(path, config.docs_dir, config.site_dir, config.use_directory_urls)]) page = Page(title, list(files)[0], config) # Fake page.read_source() page.markdown, page.meta = meta.get_data(md_src) return page, files -class BuildTests(PathAssertionMixin, unittest.TestCase): +def testing_server(root, builder=lambda: None, mount_path="/"): + with mock.patch("socket.socket"): + return LiveReloadServer( + builder, host="localhost", port=123, root=root, mount_path=mount_path + ) - def assert_mock_called_once(self, mock): - """assert that the mock was called only once. - The `mock.assert_called_once()` method was added in PY36. - TODO: Remove this when PY35 support is dropped. - """ - try: - mock.assert_called_once() - except AttributeError: - if not mock.call_count == 1: - msg = ("Expected '%s' to have been called once. Called %s times." % - (mock._mock_name or 'mock', self.call_count)) - raise AssertionError(msg) +class BuildTests(PathAssertionMixin, unittest.TestCase): + def _get_env_with_null_translations(self, config): + env = config.theme.get_env() + env.add_extension('jinja2.ext.i18n') + env.install_null_translations() + return env # Test build.get_context def test_context_base_url_homepage(self): nav_cfg = [ - {'Home': 'index.md'} + {'Home': 'index.md'}, ] cfg = load_config(nav=nav_cfg, use_directory_urls=False) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[0]) self.assertEqual(context['base_url'], '.') def test_context_base_url_homepage_use_directory_urls(self): nav_cfg = [ - {'Home': 'index.md'} + {'Home': 'index.md'}, ] cfg = load_config(nav=nav_cfg) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[0]) self.assertEqual(context['base_url'], '.') @@ -66,13 +78,14 @@ def test_context_base_url_homepage_use_directory_urls(self): def test_context_base_url_nested_page(self): nav_cfg = [ {'Home': 'index.md'}, - {'Nested': 'foo/bar.md'} + {'Nested': 'foo/bar.md'}, ] cfg = load_config(nav=nav_cfg, use_directory_urls=False) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('foo/bar.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[1]) self.assertEqual(context['base_url'], '..') @@ -80,13 +93,14 @@ def test_context_base_url_nested_page(self): def test_context_base_url_nested_page_use_directory_urls(self): nav_cfg = [ {'Home': 'index.md'}, - {'Nested': 'foo/bar.md'} + {'Nested': 'foo/bar.md'}, ] cfg = load_config(nav=nav_cfg) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('foo/bar.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[1]) self.assertEqual(context['base_url'], '../..') @@ -123,17 +137,16 @@ def test_context_base_url__absolute_nested_no_page_use_directory_urls(self): def test_context_extra_css_js_from_homepage(self): nav_cfg = [ - {'Home': 'index.md'} + {'Home': 'index.md'}, ] cfg = load_config( nav=nav_cfg, extra_css=['style.css'], extra_javascript=['script.js'], - use_directory_urls=False + use_directory_urls=False, ) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[0]) self.assertEqual(context['extra_css'], ['style.css']) @@ -142,18 +155,19 @@ def test_context_extra_css_js_from_homepage(self): def test_context_extra_css_js_from_nested_page(self): nav_cfg = [ {'Home': 'index.md'}, - {'Nested': 'foo/bar.md'} + {'Nested': 'foo/bar.md'}, ] cfg = load_config( nav=nav_cfg, extra_css=['style.css'], extra_javascript=['script.js'], - use_directory_urls=False + use_directory_urls=False, ) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('foo/bar.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[1]) self.assertEqual(context['extra_css'], ['../style.css']) @@ -162,22 +176,46 @@ def test_context_extra_css_js_from_nested_page(self): def test_context_extra_css_js_from_nested_page_use_directory_urls(self): nav_cfg = [ {'Home': 'index.md'}, - {'Nested': 'foo/bar.md'} + {'Nested': 'foo/bar.md'}, ] cfg = load_config( nav=nav_cfg, extra_css=['style.css'], - extra_javascript=['script.js'] + extra_javascript=['script.js'], ) - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('foo/bar.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) nav = get_navigation(files, cfg) context = build.get_context(nav, files, cfg, nav.pages[1]) self.assertEqual(context['extra_css'], ['../../style.css']) self.assertEqual(context['extra_javascript'], ['../../script.js']) + # TODO: This shouldn't pass on Linux + # @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows") + def test_context_extra_css_path_warning(self): + nav_cfg = [ + {'Home': 'index.md'}, + ] + cfg = load_config( + nav=nav_cfg, + extra_css=['assets\\style.css'], + use_directory_urls=False, + ) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + nav = get_navigation(files, cfg) + with self.assertLogs('mkdocs') as cm: + context = build.get_context(nav, files, cfg, nav.pages[0]) + self.assertEqual(context['extra_css'], ['assets/style.css']) + self.assertEqual( + '\n'.join(cm.output), + "WARNING:mkdocs.utils:Path 'assets\\style.css' uses OS-specific separator '\\'. " + "That will be unsupported in a future release. Please change it to '/'.", + ) + def test_context_extra_css_js_no_page(self): cfg = load_config(extra_css=['style.css'], extra_javascript=['script.js']) context = build.get_context(mock.Mock(), mock.Mock(), cfg, base_url='..') @@ -195,32 +233,35 @@ def test_extra_context(self): @mock.patch('mkdocs.commands.build._build_template', return_value='some content') def test_build_theme_template(self, mock_build_template, mock_write_file): cfg = load_config() - env = cfg['theme'].get_env() + env = cfg.theme.get_env() build._build_theme_template('main.html', env, mock.Mock(), cfg, mock.Mock()) - self.assert_mock_called_once(mock_write_file) - self.assert_mock_called_once(mock_build_template) + mock_write_file.assert_called_once() + mock_build_template.assert_called_once() @mock.patch('mkdocs.utils.write_file') @mock.patch('mkdocs.commands.build._build_template', return_value='some content') @mock.patch('gzip.GzipFile') - def test_build_sitemap_template(self, mock_gzip_gzipfile, mock_build_template, mock_write_file): - cfg = load_config() - env = cfg['theme'].get_env() + @tempdir() + def test_build_sitemap_template( + self, site_dir, mock_gzip_gzipfile, mock_build_template, mock_write_file + ): + cfg = load_config(site_dir=site_dir) + env = cfg.theme.get_env() build._build_theme_template('sitemap.xml', env, mock.Mock(), cfg, mock.Mock()) - self.assert_mock_called_once(mock_write_file) - self.assert_mock_called_once(mock_build_template) - self.assert_mock_called_once(mock_gzip_gzipfile) + mock_write_file.assert_called_once() + mock_build_template.assert_called_once() + mock_gzip_gzipfile.assert_called_once() @mock.patch('mkdocs.utils.write_file') @mock.patch('mkdocs.commands.build._build_template', return_value='') def test_skip_missing_theme_template(self, mock_build_template, mock_write_file): cfg = load_config() - env = cfg['theme'].get_env() - with self.assertLogs('mkdocs', level='WARN') as cm: + env = cfg.theme.get_env() + with self.assertLogs('mkdocs') as cm: build._build_theme_template('missing.html', env, mock.Mock(), cfg, mock.Mock()) self.assertEqual( - cm.output, - ["WARNING:mkdocs.commands.build:Template skipped: 'missing.html' not found in theme directories."] + '\n'.join(cm.output), + "WARNING:mkdocs.commands.build:Template skipped: 'missing.html' not found in theme directories.", ) mock_write_file.assert_not_called() mock_build_template.assert_not_called() @@ -229,63 +270,60 @@ def test_skip_missing_theme_template(self, mock_build_template, mock_write_file) @mock.patch('mkdocs.commands.build._build_template', return_value='') def test_skip_theme_template_empty_output(self, mock_build_template, mock_write_file): cfg = load_config() - env = cfg['theme'].get_env() - with self.assertLogs('mkdocs', level='INFO') as cm: + env = cfg.theme.get_env() + with self.assertLogs('mkdocs') as cm: build._build_theme_template('main.html', env, mock.Mock(), cfg, mock.Mock()) self.assertEqual( - cm.output, - ["INFO:mkdocs.commands.build:Template skipped: 'main.html' generated empty output."] + '\n'.join(cm.output), + "INFO:mkdocs.commands.build:Template skipped: 'main.html' generated empty output.", ) mock_write_file.assert_not_called() - self.assert_mock_called_once(mock_build_template) + mock_build_template.assert_called_once() # Test build._build_extra_template + @tempdir() @mock.patch('mkdocs.commands.build.open', mock.mock_open(read_data='template content')) - def test_build_extra_template(self): - cfg = load_config() - files = Files([ - File('foo.html', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + def test_build_extra_template(self, site_dir): + cfg = load_config(site_dir=site_dir) + fs = [File('foo.html', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) build._build_extra_template('foo.html', files, cfg, mock.Mock()) @mock.patch('mkdocs.commands.build.open', mock.mock_open(read_data='template content')) def test_skip_missing_extra_template(self): cfg = load_config() - files = Files([ - File('foo.html', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) - with self.assertLogs('mkdocs', level='INFO') as cm: + fs = [File('foo.html', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + with self.assertLogs('mkdocs') as cm: build._build_extra_template('missing.html', files, cfg, mock.Mock()) self.assertEqual( - cm.output, - ["WARNING:mkdocs.commands.build:Template skipped: 'missing.html' not found in docs_dir."] + '\n'.join(cm.output), + "WARNING:mkdocs.commands.build:Template skipped: 'missing.html' not found in docs_dir.", ) @mock.patch('mkdocs.commands.build.open', side_effect=OSError('Error message.')) def test_skip_ioerror_extra_template(self, mock_open): cfg = load_config() - files = Files([ - File('foo.html', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) - with self.assertLogs('mkdocs', level='INFO') as cm: + fs = [File('foo.html', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + with self.assertLogs('mkdocs') as cm: build._build_extra_template('foo.html', files, cfg, mock.Mock()) self.assertEqual( - cm.output, - ["WARNING:mkdocs.commands.build:Error reading template 'foo.html': Error message."] + '\n'.join(cm.output), + "WARNING:mkdocs.commands.build:Error reading template 'foo.html': Error message.", ) @mock.patch('mkdocs.commands.build.open', mock.mock_open(read_data='')) def test_skip_extra_template_empty_output(self): cfg = load_config() - files = Files([ - File('foo.html', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) - with self.assertLogs('mkdocs', level='INFO') as cm: + fs = [File('foo.html', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + with self.assertLogs('mkdocs') as cm: build._build_extra_template('foo.html', files, cfg, mock.Mock()) self.assertEqual( - cm.output, - ["INFO:mkdocs.commands.build:Template skipped: 'foo.html' generated empty output."] + '\n'.join(cm.output), + "INFO:mkdocs.commands.build:Template skipped: 'foo.html' generated empty output.", ) # Test build._populate_page @@ -293,7 +331,7 @@ def test_skip_extra_template_empty_output(self): @tempdir(files={'index.md': 'page content'}) def test_populate_page(self, docs_dir): cfg = load_config(docs_dir=docs_dir) - file = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + file = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) page = Page('Foo', file, cfg) build._populate_page(page, cfg, Files([file])) self.assertEqual(page.content, '

page content

') @@ -301,17 +339,19 @@ def test_populate_page(self, docs_dir): @tempdir(files={'testing.html': '

page content

'}) def test_populate_page_dirty_modified(self, site_dir): cfg = load_config(site_dir=site_dir) - file = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + file = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) page = Page('Foo', file, cfg) build._populate_page(page, cfg, Files([file]), dirty=True) self.assertTrue(page.markdown.startswith('# Welcome to MkDocs')) - self.assertTrue(page.content.startswith('

Welcome to MkDocs

')) + self.assertTrue( + page.content.startswith('

Welcome to MkDocs

') + ) @tempdir(files={'index.md': 'page content'}) @tempdir(files={'index.html': '

page content

'}) def test_populate_page_dirty_not_modified(self, site_dir, docs_dir): cfg = load_config(docs_dir=docs_dir, site_dir=site_dir) - file = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + file = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) page = Page('Foo', file, cfg) build._populate_page(page, cfg, Files([file]), dirty=True) # Content is empty as file read was skipped @@ -322,88 +362,112 @@ def test_populate_page_dirty_not_modified(self, site_dir, docs_dir): @mock.patch('mkdocs.structure.pages.open', side_effect=OSError('Error message.')) def test_populate_page_read_error(self, docs_dir, mock_open): cfg = load_config(docs_dir=docs_dir) - file = File('missing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + file = File('missing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) page = Page('Foo', file, cfg) - with self.assertLogs('mkdocs', level='ERROR') as cm: - self.assertRaises(OSError, build._populate_page, page, cfg, Files([file])) + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(OSError): + build._populate_page(page, cfg, Files([file])) self.assertEqual( - cm.output, [ + cm.output, + [ 'ERROR:mkdocs.structure.pages:File not found: missing.md', - "ERROR:mkdocs.commands.build:Error reading page 'missing.md': Error message." - ] + "ERROR:mkdocs.commands.build:Error reading page 'missing.md': Error message.", + ], + ) + mock_open.assert_called_once() + + @tempdir(files={'index.md': 'page content'}) + def test_populate_page_read_plugin_error(self, docs_dir): + def on_page_markdown(*args, **kwargs): + raise PluginError('Error message.') + + cfg = load_config(docs_dir=docs_dir) + cfg.plugins.events['page_markdown'].append(on_page_markdown) + + file = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + page = Page('Foo', file, cfg) + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(PluginError): + build._populate_page(page, cfg, Files([file])) + self.assertEqual( + '\n'.join(cm.output), + "ERROR:mkdocs.commands.build:Error reading page 'index.md':", ) - self.assert_mock_called_once(mock_open) # Test build._build_page @tempdir() def test_build_page(self, site_dir): - cfg = load_config(site_dir=site_dir, nav=['index.md'], plugins=[]) - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + cfg = load_config(site_dir=site_dir, nav=['index.md']) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) page = files.documentation_pages()[0].page # Fake populate page page.title = 'Title' page.markdown = 'page content' page.content = '

page content

' - build._build_page(page, cfg, files, nav, cfg['theme'].get_env()) + build._build_page(page, cfg, files, nav, self._get_env_with_null_translations(cfg)) self.assertPathIsFile(site_dir, 'index.html') - # TODO: fix this. It seems that jinja2 chokes on the mock object. Not sure how to resolve. - # @tempdir() - # @mock.patch('jinja2.environment.Template') - # def test_build_page_empty(self, site_dir, mock_template): - # mock_template.render = mock.Mock(return_value='') - # cfg = load_config(site_dir=site_dir, nav=['index.md'], plugins=[]) - # files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) - # nav = get_navigation(files, cfg) - # page = files.documentation_pages()[0].page - # # Fake populate page - # page.title = '' - # page.markdown = '' - # page.content = '' - # with self.assertLogs('mkdocs', level='INFO') as cm: - # build._build_page(page, cfg, files, nav, cfg['theme'].get_env()) - # self.assertEqual( - # cm.output, - # ["INFO:mkdocs.commands.build:Page skipped: 'index.md'. Generated empty output."] - # ) - # self.assert_mock_called_once(mock_template.render) - # self.assertPathNotFile(site_dir, 'index.html') + @tempdir() + @mock.patch('jinja2.environment.Template.render', return_value='') + def test_build_page_empty(self, site_dir, render_mock): + cfg = load_config(site_dir=site_dir, nav=['index.md']) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + nav = get_navigation(files, cfg) + with self.assertLogs('mkdocs') as cm: + build._build_page( + files.documentation_pages()[0].page, cfg, files, nav, cfg.theme.get_env() + ) + self.assertEqual( + '\n'.join(cm.output), + "INFO:mkdocs.commands.build:Page skipped: 'index.md'. Generated empty output.", + ) + self.assertPathNotExists(site_dir, 'index.html') + render_mock.assert_called_once() @tempdir(files={'index.md': 'page content'}) @tempdir(files={'index.html': '

page content

'}) @mock.patch('mkdocs.utils.write_file') def test_build_page_dirty_modified(self, site_dir, docs_dir, mock_write_file): - cfg = load_config(docs_dir=docs_dir, site_dir=site_dir, nav=['index.md'], plugins=[]) - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + cfg = load_config(docs_dir=docs_dir, site_dir=site_dir, nav=['index.md']) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) page = files.documentation_pages()[0].page # Fake populate page page.title = 'Title' page.markdown = 'new page content' page.content = '

new page content

' - build._build_page(page, cfg, files, nav, cfg['theme'].get_env(), dirty=True) + build._build_page( + page, cfg, files, nav, self._get_env_with_null_translations(cfg), dirty=True + ) mock_write_file.assert_not_called() @tempdir(files={'testing.html': '

page content

'}) @mock.patch('mkdocs.utils.write_file') def test_build_page_dirty_not_modified(self, site_dir, mock_write_file): - cfg = load_config(site_dir=site_dir, nav=['testing.md'], plugins=[]) - files = Files([File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + cfg = load_config(site_dir=site_dir, nav=['testing.md']) + fs = [File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) page = files.documentation_pages()[0].page # Fake populate page page.title = 'Title' page.markdown = 'page content' page.content = '

page content

' - build._build_page(page, cfg, files, nav, cfg['theme'].get_env(), dirty=True) - self.assert_mock_called_once(mock_write_file) + build._build_page( + page, cfg, files, nav, self._get_env_with_null_translations(cfg), dirty=True + ) + mock_write_file.assert_called_once() @tempdir() def test_build_page_custom_template(self, site_dir): - cfg = load_config(site_dir=site_dir, nav=['index.md'], plugins=[]) - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + cfg = load_config(site_dir=site_dir, nav=['index.md']) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) page = files.documentation_pages()[0].page # Fake populate page @@ -411,44 +475,71 @@ def test_build_page_custom_template(self, site_dir): page.meta = {'template': '404.html'} page.markdown = 'page content' page.content = '

page content

' - build._build_page(page, cfg, files, nav, cfg['theme'].get_env()) + build._build_page(page, cfg, files, nav, self._get_env_with_null_translations(cfg)) self.assertPathIsFile(site_dir, 'index.html') @tempdir() @mock.patch('mkdocs.utils.write_file', side_effect=OSError('Error message.')) def test_build_page_error(self, site_dir, mock_write_file): - cfg = load_config(site_dir=site_dir, nav=['index.md'], plugins=[]) - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + cfg = load_config(site_dir=site_dir, nav=['index.md']) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) nav = get_navigation(files, cfg) page = files.documentation_pages()[0].page # Fake populate page page.title = 'Title' page.markdown = 'page content' page.content = '

page content

' - with self.assertLogs('mkdocs', level='ERROR') as cm: - self.assertRaises(OSError, build._build_page, page, cfg, files, nav, cfg['theme'].get_env()) + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(OSError): + build._build_page(page, cfg, files, nav, self._get_env_with_null_translations(cfg)) self.assertEqual( - cm.output, - ["ERROR:mkdocs.commands.build:Error building page 'index.md': Error message."] + '\n'.join(cm.output), + "ERROR:mkdocs.commands.build:Error building page 'index.md': Error message.", + ) + mock_write_file.assert_called_once() + + @tempdir() + def test_build_page_plugin_error(self, site_dir): + def on_page_context(*args, **kwargs): + raise PluginError('Error message.') + + cfg = load_config(site_dir=site_dir, nav=['index.md']) + cfg.plugins.events['page_context'].append(on_page_context) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + nav = get_navigation(files, cfg) + page = files.documentation_pages()[0].page + # Fake populate page + page.title = 'Title' + page.markdown = 'page content' + page.content = '

page content

' + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(PluginError): + build._build_page(page, cfg, files, nav, cfg.theme.get_env()) + self.assertEqual( + '\n'.join(cm.output), + "ERROR:mkdocs.commands.build:Error building page 'index.md':", ) - self.assert_mock_called_once(mock_write_file) # Test build.build - @tempdir(files={ - 'index.md': 'page content', - 'empty.md': '', - 'img.jpg': '', - 'static.html': 'content', - '.hidden': 'content', - '.git/hidden': 'content' - }) + @tempdir( + files={ + 'index.md': 'page content', + 'empty.md': '', + 'img.jpg': '', + 'static.html': 'content', + '.hidden': 'content', + '.git/hidden': 'content', + } + ) @tempdir() def test_copying_media(self, site_dir, docs_dir): cfg = load_config(docs_dir=docs_dir, site_dir=site_dir) build.build(cfg) - # Verify that only non-empty md file (coverted to html), static HTML file and image are copied. + # Verify that only non-empty md file (converted to html), static HTML file and image are copied. self.assertPathIsFile(site_dir, 'index.html') self.assertPathIsFile(site_dir, 'img.jpg') self.assertPathIsFile(site_dir, 'static.html') @@ -462,7 +553,7 @@ def test_copy_theme_files(self, site_dir, docs_dir): cfg = load_config(docs_dir=docs_dir, site_dir=site_dir) build.build(cfg) - # Verify only theme media are copied, not templates or Python files. + # Verify only theme media are copied, not templates, Python or localization files. self.assertPathIsFile(site_dir, 'index.html') self.assertPathIsFile(site_dir, '404.html') self.assertPathIsDir(site_dir, 'js') @@ -474,6 +565,228 @@ def test_copy_theme_files(self, site_dir, docs_dir): self.assertPathNotExists(site_dir, 'base.html') self.assertPathNotExists(site_dir, 'content.html') self.assertPathNotExists(site_dir, 'main.html') + self.assertPathNotExists(site_dir, 'locales') + + @contextlib.contextmanager + def _assert_build_logs(self, expected): + with self.assertLogs('mkdocs') as cm: + yield + msgs = [f'{r.levelname}:{r.message}' for r in cm.records] + if msgs and msgs[0].startswith('INFO:Cleaning site directory'): + del msgs[0] + if msgs and msgs[0].startswith('INFO:Building documentation to directory'): + del msgs[0] + if msgs and msgs[-1].startswith('INFO:Documentation built'): + del msgs[-1] + self.assertEqual('\n'.join(msgs), textwrap.dedent(expected).strip('\n')) + + @tempdir( + files={ + 'test/foo.md': 'page1 content, [bar](bar.md)', + 'test/bar.md': 'page2 content, [baz](baz.md)', + 'test/baz.md': 'page3 content, [foo](foo.md)', + '.zoo.md': 'page4 content', + } + ) + @tempdir() + def test_exclude_pages_with_invalid_links(self, site_dir, docs_dir): + cfg = load_config( + docs_dir=docs_dir, + site_dir=site_dir, + use_directory_urls=False, + exclude_docs='ba*.md', + ) + + with self.subTest(live_server=None): + expected_logs = ''' + INFO:Doc file 'test/foo.md' contains a link to 'test/bar.md' which is excluded from the built site. + ''' + with self._assert_build_logs(expected_logs): + build.build(cfg) + self.assertPathIsFile(site_dir, 'test', 'foo.html') + self.assertPathNotExists(site_dir, 'test', 'baz.html') + self.assertPathNotExists(site_dir, '.zoo.html') + + server = testing_server(site_dir, mount_path='/documentation/') + with self.subTest(live_server=server): + expected_logs = ''' + INFO:Doc file 'test/bar.md' contains a link to 'test/baz.md' which is excluded from the built site. + INFO:Doc file 'test/foo.md' contains a link to 'test/bar.md' which is excluded from the built site. + INFO:The following pages are being built only for the preview but will be excluded from `mkdocs build` per `exclude_docs`: + - http://localhost:123/documentation/.zoo.html + - http://localhost:123/documentation/test/bar.html + - http://localhost:123/documentation/test/baz.html + ''' + with self._assert_build_logs(expected_logs): + build.build(cfg, live_server=server) + + foo_path = Path(site_dir, 'test', 'foo.html') + self.assertTrue(foo_path.is_file()) + self.assertNotIn('DRAFT', foo_path.read_text()) + + baz_path = Path(site_dir, 'test', 'baz.html') + self.assertPathIsFile(baz_path) + self.assertIn('DRAFT', baz_path.read_text()) + + self.assertPathIsFile(site_dir, '.zoo.html') + + @tempdir( + files={ + 'foo/README.md': 'page1 content', + 'foo/index.md': 'page2 content', + } + ) + @tempdir() + def test_conflicting_readme_and_index(self, site_dir, docs_dir): + cfg = load_config(docs_dir=docs_dir, site_dir=site_dir, use_directory_urls=False) + + for server in None, testing_server(site_dir): + with self.subTest(live_server=server): + expected_logs = ''' + WARNING:Excluding 'foo/README.md' from the site because it conflicts with 'foo/index.md'. + ''' + with self._assert_build_logs(expected_logs): + build.build(cfg, live_server=server) + + index_path = Path(site_dir, 'foo', 'index.html') + self.assertPathIsFile(index_path) + self.assertRegex(index_path.read_text(), r'page2 content') + + @tempdir( + files={ + 'foo/README.md': 'page1 content', + 'foo/index.md': 'page2 content', + } + ) + @tempdir() + def test_exclude_readme_and_index(self, site_dir, docs_dir): + cfg = load_config( + docs_dir=docs_dir, site_dir=site_dir, use_directory_urls=False, exclude_docs='index.md' + ) + + for server in None, testing_server(site_dir): + with self.subTest(live_server=server): + with self._assert_build_logs(''): + build.build(cfg, live_server=server) + + index_path = Path(site_dir, 'foo', 'index.html') + self.assertPathIsFile(index_path) + self.assertRegex(index_path.read_text(), r'page1 content') + + @tempdir( + files={ + 'foo.md': 'page1 content', + 'bar.md': 'page2 content', + } + ) + @tempdir() + @tempdir() + def test_plugins_adding_files_and_interacting(self, tmp_dir, site_dir, docs_dir): + def on_files_1(files: Files, config: MkDocsConfig) -> Files: + # Plugin 1 generates a file. + Path(tmp_dir, 'SUMMARY.md').write_text('foo.md\nbar.md\n') + files.append(File('SUMMARY.md', tmp_dir, config.site_dir, config.use_directory_urls)) + return files + + def on_files_2(files: Files, config: MkDocsConfig) -> None: + # Plugin 2 reads that file and uses it to configure the nav. + f = files.get_file_from_path('SUMMARY.md') + assert f is not None + config.nav = Path(f.abs_src_path).read_text().splitlines() + + for server in None, testing_server(site_dir): + for exclude in 'full', 'nav', None: + with self.subTest(live_server=server, exclude=exclude): + cfg = load_config( + docs_dir=docs_dir, + site_dir=site_dir, + use_directory_urls=False, + exclude_docs='SUMMARY.md' if exclude == 'full' else '', + not_in_nav='SUMMARY.md' if exclude == 'nav' else '', + ) + cfg.plugins.events['files'] += [on_files_1, on_files_2] + + expected_logs = '' + if exclude is None: + expected_logs = ''' + INFO:The following pages exist in the docs directory, but are not included in the "nav" configuration: + - SUMMARY.md + ''' + if exclude == 'full' and server: + expected_logs = ''' + INFO:The following pages are being built only for the preview but will be excluded from `mkdocs build` per `exclude_docs`: + - http://localhost:123/SUMMARY.html + ''' + with self._assert_build_logs(expected_logs): + build.build(cfg, live_server=server) + + foo_path = Path(site_dir, 'foo.html') + self.assertPathIsFile(foo_path) + self.assertRegex( + foo_path.read_text(), + r'href="foo.html"[\s\S]+href="bar.html"', # Nav order is respected + ) + + summary_path = Path(site_dir, 'SUMMARY.html') + if exclude == 'full' and not server: + self.assertPathNotExists(summary_path) + else: + self.assertPathExists(summary_path) + + @tempdir( + files={ + 'README.md': 'CONFIG_README\n', + 'docs/foo.md': 'ROOT_FOO\n', + 'docs/test/bar.md': 'TEST_BAR\n', + 'docs/main/foo.md': 'MAIN_FOO\n', + 'docs/main/main.md': ( + '--8<-- "README.md"\n\n' + '--8<-- "foo.md"\n\n' + '--8<-- "test/bar.md"\n\n' + '--8<-- "../foo.md"\n\n' + ), + } + ) + def test_markdown_extension_with_relative(self, config_dir): + for base_path, expected in { + '!relative': ''' +

(Failed to read 'README.md')

+

MAIN_FOO

+

(Failed to read 'test/bar.md')

+

ROOT_FOO

''', + '!relative $docs_dir': ''' +

(Failed to read 'README.md')

+

ROOT_FOO

+

TEST_BAR

+

(Failed to read '../foo.md')

''', + '!relative $config_dir/docs': ''' +

(Failed to read 'README.md')

+

ROOT_FOO

+

TEST_BAR

+

(Failed to read '../foo.md')

''', + '!relative $config_dir': ''' +

CONFIG_README

+

(Failed to read 'foo.md')

+

(Failed to read 'test/bar.md')

+

(Failed to read '../foo.md')

''', + }.items(): + with self.subTest(base_path=base_path): + cfg = f''' + site_name: test + use_directory_urls: false + markdown_extensions: + - mkdocs.tests.build_tests: + base_path: {base_path} + ''' + config = base.load_config( + io.StringIO(cfg), config_file_path=os.path.join(config_dir, 'mkdocs.yml') + ) + + with self._assert_build_logs(''): + build.build(config) + main_path = Path(config_dir, 'site', 'main', 'main.html') + self.assertTrue(main_path.is_file()) + self.assertIn(textwrap.dedent(expected), main_path.read_text()) # Test build.site_directory_contains_stale_files @@ -484,3 +797,29 @@ def test_site_dir_contains_stale_files(self, site_dir): @tempdir() def test_not_site_dir_contains_stale_files(self, site_dir): self.assertFalse(build.site_directory_contains_stale_files(site_dir)) + + +class _TestPreprocessor(markdown.preprocessors.Preprocessor): + def __init__(self, base_path: str) -> None: + self.base_path = base_path + + def run(self, lines: list[str]) -> list[str]: + for i, line in enumerate(lines): + m = re.search(r'^--8<-- "(.+)"$', line) + if m: + try: + lines[i] = Path(self.base_path, m[1]).read_text() + except OSError: + lines[i] = f"(Failed to read {m[1]!r})\n" + return lines + + +class _TestExtension(markdown.extensions.Extension): + def __init__(self, base_path: str) -> None: + self.base_path = base_path + + def extendMarkdown(self, md: markdown.Markdown) -> None: + md.preprocessors.register(_TestPreprocessor(self.base_path), "mkdocs_test", priority=32) + + +makeExtension = _TestExtension diff --git a/mkdocs/tests/cli_tests.py b/mkdocs/tests/cli_tests.py index c27015a..6ce03db 100644 --- a/mkdocs/tests/cli_tests.py +++ b/mkdocs/tests/cli_tests.py @@ -1,9 +1,9 @@ #!/usr/bin/env python +import io +import logging import unittest from unittest import mock -import logging -import io from click.testing import CliRunner @@ -11,31 +11,31 @@ class CLITests(unittest.TestCase): - def setUp(self): self.runner = CliRunner() @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_default(self, mock_serve): - - result = self.runner.invoke( - cli.cli, ["serve"], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ["serve"], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_config_file(self, mock_serve): - result = self.runner.invoke( - cli.cli, ["serve", "--config-file", "mkdocs.yml"], catch_exceptions=False) + cli.cli, ["serve", "--config-file", "mkdocs.yml"], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_serve.call_count, 1) @@ -46,138 +46,169 @@ def test_serve_config_file(self, mock_serve): @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_dev_addr(self, mock_serve): - result = self.runner.invoke( - cli.cli, ["serve", '--dev-addr', '0.0.0.0:80'], catch_exceptions=False) + cli.cli, ["serve", '--dev-addr', '0.0.0.0:80'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr='0.0.0.0:80', - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_strict(self, mock_serve): - - result = self.runner.invoke( - cli.cli, ["serve", '--strict'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ["serve", '--strict'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=True, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_theme(self, mock_serve): - result = self.runner.invoke( - cli.cli, ["serve", '--theme', 'readthedocs'], catch_exceptions=False) + cli.cli, ["serve", '--theme', 'readthedocs'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme='readthedocs', - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_use_directory_urls(self, mock_serve): - result = self.runner.invoke( - cli.cli, ["serve", '--use-directory-urls'], catch_exceptions=False) + cli.cli, ["serve", '--use-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=True + use_directory_urls=True, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_no_directory_urls(self, mock_serve): - result = self.runner.invoke( - cli.cli, ["serve", '--no-directory-urls'], catch_exceptions=False) + cli.cli, ["serve", '--no-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=False + use_directory_urls=False, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_livereload(self, mock_serve): - - result = self.runner.invoke( - cli.cli, ["serve", '--livereload'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ["serve", '--livereload'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='livereload', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_no_livereload(self, mock_serve): - - result = self.runner.invoke( - cli.cli, ["serve", '--no-livereload'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ["serve", '--no-livereload'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='no-livereload', + livereload=False, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=False, + watch=(), ) @mock.patch('mkdocs.commands.serve.serve', autospec=True) def test_serve_dirtyreload(self, mock_serve): + result = self.runner.invoke(cli.cli, ["serve", '--dirty'], catch_exceptions=False) - result = self.runner.invoke( - cli.cli, ["serve", '--dirtyreload'], catch_exceptions=False) + self.assertEqual(result.exit_code, 0) + mock_serve.assert_called_once_with( + dev_addr=None, + livereload=True, + build_type='dirty', + config_file=None, + strict=None, + theme=None, + use_directory_urls=None, + watch_theme=False, + watch=(), + ) + + @mock.patch('mkdocs.commands.serve.serve', autospec=True) + def test_serve_watch_theme(self, mock_serve): + result = self.runner.invoke(cli.cli, ["serve", '--watch-theme'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_serve.assert_called_once_with( dev_addr=None, - livereload='dirty', + livereload=True, + build_type=None, config_file=None, strict=None, theme=None, - use_directory_urls=None + use_directory_urls=None, + watch_theme=True, + watch=(), ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_defaults(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -189,17 +220,15 @@ def test_build_defaults(self, mock_build, mock_load_config): strict=None, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) - logger = logging.getLogger('mkdocs') - self.assertEqual(logger.level, logging.INFO) + handler = logging._handlers.get('MkDocsStreamHandler') + self.assertEqual(handler.level, logging.INFO) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_clean(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build', '--clean'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build', '--clean'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -210,9 +239,7 @@ def test_build_clean(self, mock_build, mock_load_config): @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_dirty(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build', '--dirty'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build', '--dirty'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -223,9 +250,9 @@ def test_build_dirty(self, mock_build, mock_load_config): @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_config_file(self, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['build', '--config-file', 'mkdocs.yml'], catch_exceptions=False) + cli.cli, ['build', '--config-file', 'mkdocs.yml'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -238,9 +265,7 @@ def test_build_config_file(self, mock_build, mock_load_config): @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_strict(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build', '--strict'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build', '--strict'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -249,15 +274,15 @@ def test_build_strict(self, mock_build, mock_load_config): strict=True, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_theme(self, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['build', '--theme', 'readthedocs'], catch_exceptions=False) + cli.cli, ['build', '--theme', 'readthedocs'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -266,15 +291,15 @@ def test_build_theme(self, mock_build, mock_load_config): strict=None, theme='readthedocs', use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_use_directory_urls(self, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['build', '--use-directory-urls'], catch_exceptions=False) + cli.cli, ['build', '--use-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -283,15 +308,15 @@ def test_build_use_directory_urls(self, mock_build, mock_load_config): strict=None, theme=None, use_directory_urls=True, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_no_directory_urls(self, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['build', '--no-directory-urls'], catch_exceptions=False) + cli.cli, ['build', '--no-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -300,15 +325,15 @@ def test_build_no_directory_urls(self, mock_build, mock_load_config): strict=None, theme=None, use_directory_urls=False, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_site_dir(self, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['build', '--site-dir', 'custom'], catch_exceptions=False) + cli.cli, ['build', '--site-dir', 'custom'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) @@ -323,32 +348,26 @@ def test_build_site_dir(self, mock_build, mock_load_config): @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_verbose(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build', '--verbose'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build', '--verbose'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) - logger = logging.getLogger('mkdocs') - self.assertEqual(logger.level, logging.DEBUG) + handler = logging._handlers.get('MkDocsStreamHandler') + self.assertEqual(handler.level, logging.DEBUG) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) def test_build_quiet(self, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['build', '--quiet'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['build', '--quiet'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_build.call_count, 1) - logger = logging.getLogger('mkdocs') - self.assertEqual(logger.level, logging.ERROR) + handler = logging._handlers.get('MkDocsStreamHandler') + self.assertEqual(handler.level, logging.ERROR) @mock.patch('mkdocs.commands.new.new', autospec=True) def test_new(self, mock_new): - - result = self.runner.invoke( - cli.cli, ["new", "project"], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ["new", "project"], catch_exceptions=False) self.assertEqual(result.exit_code, 0) mock_new.assert_called_once_with('project') @@ -357,9 +376,7 @@ def test_new(self, mock_new): @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_defaults(self, mock_gh_deploy, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['gh-deploy'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['gh-deploy'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -381,16 +398,14 @@ def test_gh_deploy_defaults(self, mock_gh_deploy, mock_build, mock_load_config): strict=None, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_clean(self, mock_gh_deploy, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--clean'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['gh-deploy', '--clean'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -403,9 +418,7 @@ def test_gh_deploy_clean(self, mock_gh_deploy, mock_build, mock_load_config): @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_dirty(self, mock_gh_deploy, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--dirty'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['gh-deploy', '--dirty'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -418,9 +431,9 @@ def test_gh_deploy_dirty(self, mock_gh_deploy, mock_build, mock_load_config): @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_config_file(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--config-file', 'mkdocs.yml'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--config-file', 'mkdocs.yml'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -435,9 +448,9 @@ def test_gh_deploy_config_file(self, mock_gh_deploy, mock_build, mock_load_confi @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_message(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--message', 'A commit message'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--message', 'A commit message'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -451,9 +464,9 @@ def test_gh_deploy_message(self, mock_gh_deploy, mock_build, mock_load_config): @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_remote_branch(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--remote-branch', 'foo'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--remote-branch', 'foo'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -465,16 +478,16 @@ def test_gh_deploy_remote_branch(self, mock_gh_deploy, mock_build, mock_load_con strict=None, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_remote_name(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--remote-name', 'foo'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--remote-name', 'foo'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -486,16 +499,14 @@ def test_gh_deploy_remote_name(self, mock_gh_deploy, mock_build, mock_load_confi strict=None, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_force(self, mock_gh_deploy, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--force'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['gh-deploy', '--force'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -509,9 +520,9 @@ def test_gh_deploy_force(self, mock_gh_deploy, mock_build, mock_load_config): @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_ignore_version(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--ignore-version'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--ignore-version'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -525,9 +536,7 @@ def test_gh_deploy_ignore_version(self, mock_gh_deploy, mock_build, mock_load_co @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_strict(self, mock_gh_deploy, mock_build, mock_load_config): - - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--strict'], catch_exceptions=False) + result = self.runner.invoke(cli.cli, ['gh-deploy', '--strict'], catch_exceptions=False) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -539,16 +548,16 @@ def test_gh_deploy_strict(self, mock_gh_deploy, mock_build, mock_load_config): strict=True, theme=None, use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_theme(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--theme', 'readthedocs'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--theme', 'readthedocs'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -560,16 +569,16 @@ def test_gh_deploy_theme(self, mock_gh_deploy, mock_build, mock_load_config): strict=None, theme='readthedocs', use_directory_urls=None, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_use_directory_urls(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--use-directory-urls'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--use-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -581,16 +590,16 @@ def test_gh_deploy_use_directory_urls(self, mock_gh_deploy, mock_build, mock_loa strict=None, theme=None, use_directory_urls=True, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_no_directory_urls(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--no-directory-urls'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--no-directory-urls'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -602,16 +611,16 @@ def test_gh_deploy_no_directory_urls(self, mock_gh_deploy, mock_build, mock_load strict=None, theme=None, use_directory_urls=False, - site_dir=None + site_dir=None, ) @mock.patch('mkdocs.config.load_config', autospec=True) @mock.patch('mkdocs.commands.build.build', autospec=True) @mock.patch('mkdocs.commands.gh_deploy.gh_deploy', autospec=True) def test_gh_deploy_site_dir(self, mock_gh_deploy, mock_build, mock_load_config): - result = self.runner.invoke( - cli.cli, ['gh-deploy', '--site-dir', 'custom'], catch_exceptions=False) + cli.cli, ['gh-deploy', '--site-dir', 'custom'], catch_exceptions=False + ) self.assertEqual(result.exit_code, 0) self.assertEqual(mock_gh_deploy.call_count, 1) @@ -623,5 +632,5 @@ def test_gh_deploy_site_dir(self, mock_gh_deploy, mock_build, mock_load_config): strict=None, theme=None, use_directory_urls=None, - site_dir='custom' + site_dir='custom', ) diff --git a/mkdocs/tests/config/base_tests.py b/mkdocs/tests/config/base_tests.py index 6bfc8bc..2590b71 100644 --- a/mkdocs/tests/config/base_tests.py +++ b/mkdocs/tests/config/base_tests.py @@ -1,231 +1,233 @@ import os -import tempfile import unittest -from tempfile import TemporaryDirectory from mkdocs import exceptions -from mkdocs.config import base, defaults -from mkdocs.config.config_options import BaseConfigOption +from mkdocs.config import base +from mkdocs.config import config_options as c +from mkdocs.config import defaults +from mkdocs.config.base import ValidationError +from mkdocs.tests.base import change_dir, tempdir class ConfigBaseTests(unittest.TestCase): - def test_unrecognised_keys(self): - - c = base.Config(schema=defaults.DEFAULT_SCHEMA) - c.load_dict({ - 'not_a_valid_config_option': "test" - }) - - failed, warnings = c.validate() - - self.assertEqual(warnings, [ - ('not_a_valid_config_option', - 'Unrecognised configuration name: not_a_valid_config_option') - ]) + conf = defaults.MkDocsConfig() + conf.load_dict( + { + 'not_a_valid_config_option': "test", + } + ) + + failed, warnings = conf.validate() + + self.assertEqual( + warnings, + [ + ( + 'not_a_valid_config_option', + 'Unrecognised configuration name: not_a_valid_config_option', + ) + ], + ) def test_missing_required(self): + conf = defaults.MkDocsConfig() - c = base.Config(schema=defaults.DEFAULT_SCHEMA) - - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'site_name') - self.assertEqual(str(errors[0][1]), 'Required configuration not provided.') + self.assertEqual( + errors, [('site_name', ValidationError('Required configuration not provided.'))] + ) + self.assertEqual(warnings, []) - self.assertEqual(len(warnings), 0) - - def test_load_from_file(self): + @tempdir() + def test_load_from_file(self, temp_dir): """ Users can explicitly set the config file using the '--config' option. Allows users to specify a config other than the default `mkdocs.yml`. """ + with open(os.path.join(temp_dir, 'mkdocs.yml'), 'w') as config_file: + config_file.write("site_name: MkDocs Test\n") + os.mkdir(os.path.join(temp_dir, 'docs')) - temp_dir = TemporaryDirectory() - config_file = open(os.path.join(temp_dir.name, 'mkdocs.yml'), 'w') - os.mkdir(os.path.join(temp_dir.name, 'docs')) - try: + cfg = base.load_config(config_file=config_file.name) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') + + @tempdir() + def test_load_default_file(self, temp_dir): + """ + test that `mkdocs.yml` will be loaded when '--config' is not set. + """ + with open(os.path.join(temp_dir, 'mkdocs.yml'), 'w') as config_file: + config_file.write("site_name: MkDocs Test\n") + os.mkdir(os.path.join(temp_dir, 'docs')) + with change_dir(temp_dir): + cfg = base.load_config(config_file=None) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') + + @tempdir + def test_load_default_file_with_yaml(self, temp_dir): + """ + test that `mkdocs.yml` will be loaded when '--config' is not set. + """ + with open(os.path.join(temp_dir, 'mkdocs.yaml'), 'w') as config_file: config_file.write("site_name: MkDocs Test\n") - config_file.flush() - config_file.close() + os.mkdir(os.path.join(temp_dir, 'docs')) + with change_dir(temp_dir): + cfg = base.load_config(config_file=None) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') + + @tempdir() + def test_load_default_file_prefer_yml(self, temp_dir): + """ + test that `mkdocs.yml` will be loaded when '--config' is not set. + """ + with open(os.path.join(temp_dir, 'mkdocs.yml'), 'w') as config_file1: + config_file1.write("site_name: MkDocs Test1\n") + with open(os.path.join(temp_dir, 'mkdocs.yaml'), 'w') as config_file2: + config_file2.write("site_name: MkDocs Test2\n") - cfg = base.load_config(config_file=config_file.name) - self.assertTrue(isinstance(cfg, base.Config)) - self.assertEqual(cfg['site_name'], 'MkDocs Test') - finally: - os.remove(config_file.name) - temp_dir.cleanup() + os.mkdir(os.path.join(temp_dir, 'docs')) + with change_dir(temp_dir): + cfg = base.load_config(config_file=None) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test1') def test_load_from_missing_file(self): + with self.assertRaisesRegex( + exceptions.ConfigurationError, "Config file 'missing_file.yml' does not exist." + ): + base.load_config(config_file='missing_file.yml') - self.assertRaises(exceptions.ConfigurationError, - base.load_config, config_file='missing_file.yml') - - def test_load_from_open_file(self): + @tempdir() + def test_load_from_open_file(self, temp_path): """ `load_config` can accept an open file descriptor. """ - - temp_dir = TemporaryDirectory() - temp_path = temp_dir.name config_fname = os.path.join(temp_path, 'mkdocs.yml') - config_file = open(config_fname, 'w+') + config_file.write("site_name: MkDocs Test\n") + config_file.flush() os.mkdir(os.path.join(temp_path, 'docs')) - try: - config_file.write("site_name: MkDocs Test\n") - config_file.flush() - cfg = base.load_config(config_file=config_file) - self.assertTrue(isinstance(cfg, base.Config)) - self.assertEqual(cfg['site_name'], 'MkDocs Test') - # load_config will always close the file - self.assertTrue(config_file.closed) - finally: - temp_dir.cleanup() + cfg = base.load_config(config_file=config_file) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') + # load_config will always close the file + self.assertTrue(config_file.closed) - def test_load_from_closed_file(self): + @tempdir() + def test_load_from_closed_file(self, temp_dir): """ The `serve` command with auto-reload may pass in a closed file descriptor. Ensure `load_config` reloads the closed file. """ - - temp_dir = TemporaryDirectory() - config_file = open(os.path.join(temp_dir.name, 'mkdocs.yml'), 'w') - os.mkdir(os.path.join(temp_dir.name, 'docs')) - - try: + with open(os.path.join(temp_dir, 'mkdocs.yml'), 'w') as config_file: config_file.write("site_name: MkDocs Test\n") - config_file.flush() - config_file.close() + os.mkdir(os.path.join(temp_dir, 'docs')) - cfg = base.load_config(config_file=config_file) - self.assertTrue(isinstance(cfg, base.Config)) - self.assertEqual(cfg['site_name'], 'MkDocs Test') - finally: - temp_dir.cleanup() + cfg = base.load_config(config_file=config_file) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') - def test_load_from_deleted_file(self): - """ - Deleting the config file could trigger a server reload. - """ - - config_file = tempfile.NamedTemporaryFile('w', delete=False) - try: - config_file.write("site_name: MkDocs Test\n") - config_file.flush() - config_file.close() - finally: - os.remove(config_file.name) - self.assertRaises(exceptions.ConfigurationError, - base.load_config, config_file=config_file) - - def test_load_missing_required(self): + @tempdir + def test_load_missing_required(self, temp_dir): """ `site_name` is a required setting. """ - - config_file = tempfile.NamedTemporaryFile('w', delete=False) - try: - config_file.write( - "site_dir: output\nsite_uri: https://www.mkdocs.org\n") - config_file.flush() - config_file.close() - - self.assertRaises(exceptions.ConfigurationError, - base.load_config, config_file=config_file.name) - finally: - os.remove(config_file.name) + with open(os.path.join(temp_dir, 'mkdocs.yml'), 'w') as config_file: + config_file.write("site_dir: output\nsite_url: https://www.mkdocs.org\n") + os.mkdir(os.path.join(temp_dir, 'docs')) + + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(exceptions.Abort): + base.load_config(config_file=config_file.name) + self.assertEqual( + '\n'.join(cm.output), + "ERROR:mkdocs.config:Config value 'site_name': Required configuration not provided.", + ) def test_pre_validation_error(self): - class InvalidConfigOption(BaseConfigOption): + class InvalidConfigOption(c.BaseConfigOption): def pre_validation(self, config, key_name): - raise base.ValidationError('pre_validation error') + raise ValidationError('pre_validation error') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'invalid_option') - self.assertEqual(str(errors[0][1]), 'pre_validation error') - self.assertTrue(isinstance(errors[0][1], base.ValidationError)) - self.assertEqual(len(warnings), 0) + self.assertEqual(errors, [('invalid_option', ValidationError('pre_validation error'))]) + self.assertEqual(warnings, []) def test_run_validation_error(self): - class InvalidConfigOption(BaseConfigOption): + class InvalidConfigOption(c.BaseConfigOption): def run_validation(self, value): - raise base.ValidationError('run_validation error') + raise ValidationError('run_validation error') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'invalid_option') - self.assertEqual(str(errors[0][1]), 'run_validation error') - self.assertTrue(isinstance(errors[0][1], base.ValidationError)) - self.assertEqual(len(warnings), 0) + self.assertEqual(errors, [('invalid_option', ValidationError('run_validation error'))]) + self.assertEqual(warnings, []) def test_post_validation_error(self): - class InvalidConfigOption(BaseConfigOption): + class InvalidConfigOption(c.BaseConfigOption): def post_validation(self, config, key_name): - raise base.ValidationError('post_validation error') + raise ValidationError('post_validation error') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'invalid_option') - self.assertEqual(str(errors[0][1]), 'post_validation error') - self.assertTrue(isinstance(errors[0][1], base.ValidationError)) - self.assertEqual(len(warnings), 0) + self.assertEqual(errors, [('invalid_option', ValidationError('post_validation error'))]) + self.assertEqual(warnings, []) def test_pre_and_run_validation_errors(self): - """ A pre_validation error does not stop run_validation from running. """ - class InvalidConfigOption(BaseConfigOption): + """A pre_validation error does not stop run_validation from running.""" + + class InvalidConfigOption(c.BaseConfigOption): def pre_validation(self, config, key_name): - raise base.ValidationError('pre_validation error') + raise ValidationError('pre_validation error') def run_validation(self, value): - raise base.ValidationError('run_validation error') + raise ValidationError('run_validation error') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 2) - self.assertEqual(errors[0][0], 'invalid_option') - self.assertEqual(str(errors[0][1]), 'pre_validation error') - self.assertTrue(isinstance(errors[0][1], base.ValidationError)) - self.assertEqual(errors[1][0], 'invalid_option') - self.assertEqual(str(errors[1][1]), 'run_validation error') - self.assertTrue(isinstance(errors[1][1], base.ValidationError)) - self.assertEqual(len(warnings), 0) + self.assertEqual( + errors, + [ + ('invalid_option', ValidationError('pre_validation error')), + ('invalid_option', ValidationError('run_validation error')), + ], + ) + self.assertEqual(warnings, []) def test_run_and_post_validation_errors(self): - """ A run_validation error stops post_validation from running. """ - class InvalidConfigOption(BaseConfigOption): + """A run_validation error stops post_validation from running.""" + + class InvalidConfigOption(c.BaseConfigOption): def run_validation(self, value): - raise base.ValidationError('run_validation error') + raise ValidationError('run_validation error') def post_validation(self, config, key_name): - raise base.ValidationError('post_validation error') + raise ValidationError('post_validation error') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'invalid_option') - self.assertEqual(str(errors[0][1]), 'run_validation error') - self.assertTrue(isinstance(errors[0][1], base.ValidationError)) - self.assertEqual(len(warnings), 0) + self.assertEqual(errors, [('invalid_option', ValidationError('run_validation error'))]) + self.assertEqual(warnings, []) def test_validation_warnings(self): - class InvalidConfigOption(BaseConfigOption): + class InvalidConfigOption(c.BaseConfigOption): def pre_validation(self, config, key_name): self.warnings.append('pre_validation warning') @@ -235,40 +237,48 @@ def run_validation(self, value): def post_validation(self, config, key_name): self.warnings.append('post_validation warning') - c = base.Config(schema=(('invalid_option', InvalidConfigOption()), )) + conf = base.Config(schema=(('invalid_option', InvalidConfigOption()),)) - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 0) - self.assertEqual(warnings, [ - ('invalid_option', 'pre_validation warning'), - ('invalid_option', 'run_validation warning'), - ('invalid_option', 'post_validation warning'), - ]) + self.assertEqual(errors, []) + self.assertEqual( + warnings, + [ + ('invalid_option', 'pre_validation warning'), + ('invalid_option', 'run_validation warning'), + ('invalid_option', 'post_validation warning'), + ], + ) - def test_load_from_file_with_relative_paths(self): + @tempdir() + def test_load_from_file_with_relative_paths(self, config_dir): """ When explicitly setting a config file, paths should be relative to the config file, not the working directory. """ - - config_dir = TemporaryDirectory() - config_fname = os.path.join(config_dir.name, 'mkdocs.yml') - docs_dir = os.path.join(config_dir.name, 'src') + config_fname = os.path.join(config_dir, 'mkdocs.yml') + with open(config_fname, 'w') as config_file: + config_file.write("docs_dir: src\nsite_name: MkDocs Test\n") + docs_dir = os.path.join(config_dir, 'src') os.mkdir(docs_dir) - config_file = open(config_fname, 'w') - - try: - config_file.write("docs_dir: src\nsite_name: MkDocs Test\n") - config_file.flush() - config_file.close() - - cfg = base.load_config(config_file=config_file) - self.assertTrue(isinstance(cfg, base.Config)) - self.assertEqual(cfg['site_name'], 'MkDocs Test') - self.assertEqual(cfg['docs_dir'], docs_dir) - self.assertEqual(cfg.config_file_path, config_fname) - self.assertIsInstance(cfg.config_file_path, str) - finally: - config_dir.cleanup() + cfg = base.load_config(config_file=config_file) + self.assertTrue(isinstance(cfg, defaults.MkDocsConfig)) + self.assertEqual(cfg.site_name, 'MkDocs Test') + self.assertEqual(cfg.docs_dir, docs_dir) + self.assertEqual(cfg.config_file_path, config_fname) + self.assertIsInstance(cfg.config_file_path, str) + + def test_get_schema(self): + class FooConfig: + z = c.URL() + aa = c.Type(int) + + self.assertEqual( + base.get_schema(FooConfig), + ( + ('z', FooConfig.z), + ('aa', FooConfig.aa), + ), + ) diff --git a/mkdocs/tests/config/config_options_legacy_tests.py b/mkdocs/tests/config/config_options_legacy_tests.py new file mode 100644 index 0000000..0f76bf6 --- /dev/null +++ b/mkdocs/tests/config/config_options_legacy_tests.py @@ -0,0 +1,1657 @@ +from __future__ import annotations + +import contextlib +import copy +import io +import os +import re +import sys +import textwrap +import unittest +from typing import Any +from unittest.mock import patch + +import mkdocs +from mkdocs.config import base +from mkdocs.config import config_options as c +from mkdocs.tests.base import tempdir +from mkdocs.utils import write_file, yaml_load + + +class UnexpectedError(Exception): + pass + + +class TestCase(unittest.TestCase): + @contextlib.contextmanager + def expect_error(self, **kwargs): + [(key, msg)] = kwargs.items() + with self.assertRaises(UnexpectedError) as cm: + yield + if isinstance(msg, re.Pattern): + self.assertRegex(str(cm.exception), f'^{key}="{msg.pattern}"$') + else: + self.assertEqual(f'{key}="{msg}"', str(cm.exception)) + + def get_config( + self, + schema: type, + cfg: dict[str, Any], + warnings: dict[str, str] = {}, + config_file_path=None, + ): + config = base.LegacyConfig(base.get_schema(schema), config_file_path=config_file_path) + config.load_dict(cfg) + actual_errors, actual_warnings = config.validate() + if actual_errors: + raise UnexpectedError(', '.join(f'{key}="{msg}"' for key, msg in actual_errors)) + self.assertEqual(warnings, dict(actual_warnings)) + return config + + +class OptionallyRequiredTest(TestCase): + def test_empty(self): + class Schema: + option = c.OptionallyRequired() + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], None) + + self.assertEqual(Schema.option.required, False) + + def test_required(self): + class Schema: + option = c.OptionallyRequired(required=True) + + with self.expect_error(option="Required configuration not provided."): + self.get_config(Schema, {'option': None}) + + self.assertEqual(Schema.option.required, True) + + def test_required_no_default(self): + class Schema: + option = c.OptionallyRequired(required=True) + + conf = self.get_config(Schema, {'option': 2}) + self.assertEqual(conf['option'], 2) + + def test_default(self): + class Schema: + option = c.OptionallyRequired(default=1) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], 1) + + def test_replace_default(self): + class Schema: + option = c.OptionallyRequired(default=1) + + conf = self.get_config(Schema, {'option': 2}) + self.assertEqual(conf['option'], 2) + + +class TypeTest(TestCase): + def test_single_type(self): + class Schema: + option = c.Type(str) + + conf = self.get_config(Schema, {'option': "Testing"}) + self.assertEqual(conf['option'], "Testing") + + def test_multiple_types(self): + class Schema: + option = c.Type((list, tuple)) + + conf = self.get_config(Schema, {'option': [1, 2, 3]}) + self.assertEqual(conf['option'], [1, 2, 3]) + + conf = self.get_config(Schema, {'option': (1, 2, 3)}) + self.assertEqual(conf['option'], (1, 2, 3)) + + with self.expect_error( + option="Expected type: (, ) but received: " + ): + self.get_config(Schema, {'option': {'a': 1}}) + + def test_length(self): + class Schema: + option = c.Type(str, length=7) + + conf = self.get_config(Schema, {'option': "Testing"}) + self.assertEqual(conf['option'], "Testing") + + with self.expect_error( + option="Expected type: with length 7 but received: 'Testing Long' with length 12" + ): + self.get_config(Schema, {'option': "Testing Long"}) + + +class ChoiceTest(TestCase): + def test_required(self): + class Schema: + option = c.Choice(('python', 'node'), required=True) + + conf = self.get_config(Schema, {'option': 'python'}) + self.assertEqual(conf['option'], 'python') + + def test_optional(self): + class Schema: + option = c.Choice(('python', 'node')) + + conf = self.get_config(Schema, {'option': 'python'}) + self.assertEqual(conf['option'], 'python') + + conf = self.get_config(Schema, {}) + self.assertEqual(conf['option'], None) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], None) + + def test_default(self): + class Schema: + option = c.Choice(('a', 'b', 'c'), default='b') + + conf = self.get_config(Schema, {}) + self.assertEqual(conf['option'], 'b') + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], 'b') + + with self.expect_error(option="Expected one of: ('a', 'b', 'c') but received: 'go'"): + self.get_config(Schema, {'option': 'go'}) + + def test_invalid_default(self): + with self.assertRaises(ValueError): + c.Choice(('a', 'b'), default='c') + with self.assertRaises(ValueError): + c.Choice(('a', 'b'), default='c', required=True) + + def test_invalid_choice(self): + class Schema: + option = c.Choice(('python', 'node')) + + with self.expect_error(option="Expected one of: ('python', 'node') but received: 'go'"): + self.get_config(Schema, {'option': 'go'}) + + def test_invalid_choices(self): + with self.assertRaises(ValueError): + c.Choice('') + with self.assertRaises(ValueError): + c.Choice([]) + with self.assertRaises(ValueError): + c.Choice(5) + + +class DeprecatedTest(TestCase): + def test_deprecated_option_simple(self): + class Schema: + d = c.Deprecated() + + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) + + def test_deprecated_option_message(self): + class Schema: + d = c.Deprecated(message='custom message for {} key') + + self.get_config(Schema, {'d': 'value'}, warnings={'d': 'custom message for d key'}) + + def test_deprecated_option_with_type(self): + class Schema: + d = c.Deprecated(option_type=c.Type(str)) + + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) + + def test_deprecated_option_with_invalid_type(self): + class Schema: + d = c.Deprecated(option_type=c.Type(list)) + + with self.expect_error(d="Expected type: but received: "): + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) + + def test_removed_option(self): + class Schema: + d = c.Deprecated(removed=True, moved_to='foo') + + with self.expect_error( + d="The configuration option 'd' was removed from MkDocs. Use 'foo' instead.", + ): + self.get_config(Schema, {'d': 'value'}) + + def test_deprecated_option_with_type_undefined(self): + class Schema: + option = c.Deprecated(option_type=c.Type(str)) + + self.get_config(Schema, {'option': None}) + + def test_deprecated_option_move(self): + class Schema: + new = c.Type(str) + old = c.Deprecated(moved_to='new') + + conf = self.get_config( + Schema, + {'old': 'value'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'new' instead." + ), + ) + self.assertEqual(conf, {'new': 'value', 'old': None}) + + def test_deprecated_option_move_complex(self): + class Schema: + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') + + conf = self.get_config( + Schema, + {'old': 'value'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) + self.assertEqual(conf, {'foo': {'bar': 'value'}, 'old': None}) + + def test_deprecated_option_move_existing(self): + class Schema: + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') + + conf = self.get_config( + Schema, + {'old': 'value', 'foo': {'existing': 'existing'}}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) + self.assertEqual(conf, {'foo': {'existing': 'existing', 'bar': 'value'}, 'old': None}) + + def test_deprecated_option_move_invalid(self): + class Schema: + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') + + with self.expect_error(foo="Expected type: but received: "): + self.get_config( + Schema, + {'old': 'value', 'foo': 'wrong type'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) + + +class IpAddressTest(TestCase): + class Schema: + option = c.IpAddress() + + def test_valid_address(self): + addr = '127.0.0.1:8000' + + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(str(conf['option']), addr) + self.assertEqual(conf['option'].host, '127.0.0.1') + self.assertEqual(conf['option'].port, 8000) + + def test_valid_IPv6_address(self): + addr = '::1:8000' + + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(str(conf['option']), addr) + self.assertEqual(conf['option'].host, '::1') + self.assertEqual(conf['option'].port, 8000) + + def test_valid_full_IPv6_address(self): + addr = '[2001:db8:85a3::8a2e:370:7334]:123' + + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(conf['option'].host, '2001:db8:85a3::8a2e:370:7334') + self.assertEqual(conf['option'].port, 123) + + def test_named_address(self): + addr = 'localhost:8000' + + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(str(conf['option']), addr) + self.assertEqual(conf['option'].host, 'localhost') + self.assertEqual(conf['option'].port, 8000) + + def test_default_address(self): + addr = '127.0.0.1:8000' + + class Schema: + option = c.IpAddress(default=addr) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(str(conf['option']), addr) + self.assertEqual(conf['option'].host, '127.0.0.1') + self.assertEqual(conf['option'].port, 8000) + + @unittest.skipIf( + sys.version_info < (3, 9, 5), + "Leading zeros allowed in IP addresses before Python3.9.5", + ) + def test_invalid_leading_zeros(self): + with self.expect_error( + option="'127.000.000.001' does not appear to be an IPv4 or IPv6 address" + ): + self.get_config(self.Schema, {'option': '127.000.000.001:8000'}) + + def test_invalid_address_range(self): + with self.expect_error(option="'277.0.0.1' does not appear to be an IPv4 or IPv6 address"): + self.get_config(self.Schema, {'option': '277.0.0.1:8000'}) + + def test_invalid_address_format(self): + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': '127.0.0.18000'}) + + def test_invalid_address_type(self): + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': 123}) + + def test_invalid_address_port(self): + with self.expect_error(option="'foo' is not a valid port"): + self.get_config(self.Schema, {'option': '127.0.0.1:foo'}) + + def test_invalid_address_missing_port(self): + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': '127.0.0.1'}) + + def test_unsupported_address(self): + class Schema: + dev_addr = c.IpAddress() + + self.get_config( + Schema, + {'dev_addr': '0.0.0.0:8000'}, + warnings=dict( + dev_addr="The use of the IP address '0.0.0.0' suggests a production " + "environment or the use of a proxy to connect to the MkDocs " + "server. However, the MkDocs' server is intended for local " + "development purposes only. Please use a third party " + "production-ready server instead." + ), + ) + + def test_unsupported_IPv6_address(self): + class Schema: + dev_addr = c.IpAddress() + + self.get_config( + Schema, + {'dev_addr': ':::8000'}, + warnings=dict( + dev_addr="The use of the IP address '::' suggests a production environment " + "or the use of a proxy to connect to the MkDocs server. However, " + "the MkDocs' server is intended for local development purposes " + "only. Please use a third party production-ready server instead." + ), + ) + + +class URLTest(TestCase): + def test_valid_url(self): + class Schema: + option = c.URL() + + conf = self.get_config(Schema, {'option': "https://mkdocs.org"}) + self.assertEqual(conf['option'], "https://mkdocs.org") + + conf = self.get_config(Schema, {'option': ""}) + self.assertEqual(conf['option'], "") + + def test_valid_url_is_dir(self): + class Schema: + option = c.URL(is_dir=True) + + conf = self.get_config(Schema, {'option': "http://mkdocs.org/"}) + self.assertEqual(conf['option'], "http://mkdocs.org/") + + conf = self.get_config(Schema, {'option': "https://mkdocs.org"}) + self.assertEqual(conf['option'], "https://mkdocs.org/") + + def test_optional(self): + class Schema: + option = c.URL(is_dir=True) + + conf = self.get_config(Schema, {'option': ''}) + self.assertEqual(conf['option'], '') + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], None) + + def test_invalid_url(self): + class Schema: + option = c.URL(required=True) + + with self.expect_error(option="Required configuration not provided."): + self.get_config(Schema, {'option': None}) + + for url in "www.mkdocs.org", "//mkdocs.org/test", "http:/mkdocs.org/", "/hello/": + with self.subTest(url=url): + with self.expect_error( + option="The URL isn't valid, it should include the http:// (scheme)" + ): + self.get_config(Schema, {'option': url}) + + def test_invalid_type(self): + class Schema: + option = c.URL() + + with self.expect_error(option="Expected a string, got "): + self.get_config(Schema, {'option': 1}) + + +class EditURITest(TestCase): + class Schema: + repo_url = c.URL() + repo_name = c.RepoName('repo_url') + edit_uri_template = c.EditURITemplate('edit_uri') + edit_uri = c.EditURI('repo_url') + + def test_repo_name_github(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs"}, + ) + self.assertEqual(conf['repo_name'], "GitHub") + + def test_repo_name_bitbucket(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://bitbucket.org/gutworth/six/"}, + ) + self.assertEqual(conf['repo_name'], "Bitbucket") + + def test_repo_name_gitlab(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"}, + ) + self.assertEqual(conf['repo_name'], "GitLab") + + def test_repo_name_custom(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://launchpad.net/python-tuskarclient"}, + ) + self.assertEqual(conf['repo_name'], "Launchpad") + + def test_edit_uri_github(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs"}, + ) + self.assertEqual(conf['edit_uri'], 'edit/master/docs/') + self.assertEqual(conf['repo_url'], "https://github.com/mkdocs/mkdocs") + + def test_edit_uri_bitbucket(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://bitbucket.org/gutworth/six/"}, + ) + self.assertEqual(conf['edit_uri'], 'src/default/docs/') + self.assertEqual(conf['repo_url'], "https://bitbucket.org/gutworth/six/") + + def test_edit_uri_gitlab(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"}, + ) + self.assertEqual(conf['edit_uri'], 'edit/master/docs/') + + def test_edit_uri_custom(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://launchpad.net/python-tuskarclient"}, + ) + self.assertEqual(conf['edit_uri'], None) + self.assertEqual(conf['repo_url'], "https://launchpad.net/python-tuskarclient") + + def test_repo_name_custom_and_empty_edit_uri(self): + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs", 'repo_name': 'mkdocs'}, + ) + self.assertEqual(conf['edit_uri'], 'edit/master/docs/') + + def test_edit_uri_template_ok(self): + conf = self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/foo/docs/{path}', + }, + ) + self.assertEqual(conf['edit_uri_template'], 'edit/foo/docs/{path}') + + def test_edit_uri_template_errors(self): + with self.expect_error( + edit_uri_template=re.compile(r'.*[{}].*') # Complains about unclosed '{' or missing '}' + ): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{path', + }, + ) + + with self.expect_error(edit_uri_template=re.compile(r'.*\bz\b.*')): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{path!z}', + }, + ) + + with self.expect_error(edit_uri_template="Unknown template substitute: 'foo'"): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{foo}', + }, + ) + + def test_edit_uri_template_warning(self): + conf = self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri': 'edit', + 'edit_uri_template': 'edit/master/{path}', + }, + warnings=dict( + edit_uri_template="The option 'edit_uri' has no effect when 'edit_uri_template' is set." + ), + ) + self.assertEqual(conf['edit_uri_template'], 'edit/master/{path}') + + +class ListOfItemsTest(TestCase): + def test_int_type(self): + class Schema: + option = c.ListOfItems(c.Type(int)) + + conf = self.get_config(Schema, {'option': [1, 2, 3]}) + self.assertEqual(conf['option'], [1, 2, 3]) + + with self.expect_error( + option="Expected type: but received: " + ): + conf = self.get_config(Schema, {'option': [1, None, 3]}) + + def test_combined_float_type(self): + class Schema: + option = c.ListOfItems(c.Type((int, float))) + + conf = self.get_config(Schema, {'option': [1.4, 2, 3]}) + self.assertEqual(conf['option'], [1.4, 2, 3]) + + with self.expect_error( + option="Expected type: (, ) but received: " + ): + self.get_config(Schema, {'option': ['a']}) + + def test_list_default(self): + class Schema: + option = c.ListOfItems(c.Type(int), default=[]) + + conf = self.get_config(Schema, {}) + self.assertEqual(conf['option'], []) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'], []) + + def test_none_without_default(self): + class Schema: + option = c.ListOfItems(c.Type(str)) + + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {}) + + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {'option': None}) + + conf = self.get_config(Schema, {'option': ['foo']}) + self.assertEqual(conf['option'], ['foo']) + + def test_string_not_a_list_of_strings(self): + class Schema: + option = c.ListOfItems(c.Type(str)) + + with self.expect_error(option="Expected a list of items, but a was given."): + self.get_config(Schema, {'option': 'foo'}) + + def test_post_validation_error(self): + class Schema: + option = c.ListOfItems(c.IpAddress()) + + with self.expect_error(option="'asdf' is not a valid port"): + self.get_config(Schema, {'option': ["localhost:8000", "1.2.3.4:asdf"]}) + + +class FilesystemObjectTest(TestCase): + def test_valid_dir(self): + for cls in c.Dir, c.FilesystemObject: + with self.subTest(cls): + d = os.path.dirname(__file__) + + class Schema: + option = cls(exists=True) + + conf = self.get_config(Schema, {'option': d}) + self.assertEqual(conf['option'], d) + + def test_valid_file(self): + for cls in c.File, c.FilesystemObject: + with self.subTest(cls): + f = __file__ + + class Schema: + option = cls(exists=True) + + conf = self.get_config(Schema, {'option': f}) + self.assertEqual(conf['option'], f) + + def test_missing_without_exists(self): + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + d = os.path.join("not", "a", "real", "path", "I", "hope") + + class Schema: + option = cls() + + conf = self.get_config(Schema, {'option': d}) + self.assertEqual(conf['option'], os.path.abspath(d)) + + def test_missing_but_required(self): + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + d = os.path.join("not", "a", "real", "path", "I", "hope") + + class Schema: + option = cls(exists=True) + + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing .+")): + self.get_config(Schema, {'option': d}) + + def test_not_a_dir(self): + d = __file__ + + class Schema: + option = c.Dir(exists=True) + + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing directory.")): + self.get_config(Schema, {'option': d}) + + def test_not_a_file(self): + d = os.path.dirname(__file__) + + class Schema: + option = c.File(exists=True) + + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing file.")): + self.get_config(Schema, {'option': d}) + + def test_incorrect_type_error(self): + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + + class Schema: + option = cls() + + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': 1}) + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': []}) + + def test_with_unicode(self): + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + + class Schema: + dir = cls() + + conf = self.get_config(Schema, {'dir': 'юникод'}) + self.assertIsInstance(conf['dir'], str) + + def test_dir_bytes(self): + class Schema: + dir = c.Dir() + + with self.expect_error(dir="Expected type: but received: "): + self.get_config(Schema, {'dir': b'foo'}) + + def test_config_dir_prepended(self): + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + base_path = os.path.dirname(os.path.abspath(__file__)) + + class Schema: + dir = cls() + + conf = self.get_config( + Schema, + {'dir': 'foo'}, + config_file_path=os.path.join(base_path, 'mkdocs.yml'), + ) + self.assertEqual(conf['dir'], os.path.join(base_path, 'foo')) + + def test_site_dir_is_config_dir_fails(self): + class Schema: + dir = c.DocsDir() + + with self.expect_error( + dir="The 'dir' should not be the parent directory of the config file. " + "Use a child directory instead so that the 'dir' is a sibling of the config file." + ): + self.get_config( + Schema, + {'dir': '.'}, + config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), + ) + + +class ListOfPathsTest(TestCase): + def test_valid_path(self): + paths = [os.path.dirname(__file__)] + + class Schema: + option = c.ListOfPaths() + + self.get_config(Schema, {'option': paths}) + + def test_missing_path(self): + paths = [os.path.join("does", "not", "exist", "i", "hope")] + + class Schema: + option = c.ListOfPaths() + + with self.expect_error( + option=f"The path '{paths[0]}' isn't an existing file or directory." + ): + self.get_config(Schema, {'option': paths}) + + def test_non_path(self): + paths = [os.path.dirname(__file__), None] + + class Schema: + option = c.ListOfPaths() + + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': paths}) + + def test_empty_list(self): + class Schema: + option = c.ListOfPaths() + + conf = self.get_config(Schema, {'option': []}) + self.assertEqual(conf['option'], []) + + def test_non_list(self): + paths = os.path.dirname(__file__) + + class Schema: + option = c.ListOfPaths() + + with self.expect_error(option="Expected a list of items, but a was given."): + self.get_config(Schema, {'option': paths}) + + def test_file(self): + paths = [__file__] + + class Schema: + option = c.ListOfPaths() + + self.get_config(Schema, {'option': paths}) + + @tempdir() + def test_paths_localized_to_config(self, base_path): + with open(os.path.join(base_path, 'foo'), 'w') as f: + f.write('hi') + + class Schema: + watch = c.ListOfPaths() + + conf = self.get_config( + Schema, + {'watch': ['foo']}, + config_file_path=os.path.join(base_path, 'mkdocs.yml'), + ) + + self.assertEqual(conf['watch'], [os.path.join(base_path, 'foo')]) + + +class SiteDirTest(TestCase): + class Schema: + site_dir = c.SiteDir() + docs_dir = c.Dir() + + def test_doc_dir_in_site_dir(self): + j = os.path.join + # The parent dir is not the same on every system, so use the actual dir name + parent_dir = mkdocs.__file__.split(os.sep)[-3] + + test_configs = ( + {'docs_dir': j('site', 'docs'), 'site_dir': 'site'}, + {'docs_dir': 'docs', 'site_dir': '.'}, + {'docs_dir': '.', 'site_dir': '.'}, + {'docs_dir': 'docs', 'site_dir': ''}, + {'docs_dir': '', 'site_dir': ''}, + {'docs_dir': j('..', parent_dir, 'docs'), 'site_dir': 'docs'}, + {'docs_dir': 'docs', 'site_dir': '/'}, + ) + + for test_config in test_configs: + with self.subTest(test_config): + with self.expect_error( + site_dir=re.compile(r"The 'docs_dir' should not be within the 'site_dir'.*") + ): + self.get_config(self.Schema, test_config) + + def test_site_dir_in_docs_dir(self): + j = os.path.join + + test_configs = ( + {'docs_dir': 'docs', 'site_dir': j('docs', 'site')}, + {'docs_dir': '.', 'site_dir': 'site'}, + {'docs_dir': '', 'site_dir': 'site'}, + {'docs_dir': '/', 'site_dir': 'site'}, + ) + + for test_config in test_configs: + with self.subTest(test_config): + with self.expect_error( + site_dir=re.compile(r"The 'site_dir' should not be within the 'docs_dir'.*") + ): + self.get_config(self.Schema, test_config) + + def test_common_prefix(self): + """Legitimate settings with common prefixes should not fail validation.""" + + test_configs = ( + {'docs_dir': 'docs', 'site_dir': 'docs-site'}, + {'docs_dir': 'site-docs', 'site_dir': 'site'}, + ) + + for test_config in test_configs: + with self.subTest(test_config): + self.get_config(self.Schema, test_config) + + +class ThemeTest(TestCase): + def test_theme_as_string(self): + class Schema: + option = c.Theme() + + conf = self.get_config(Schema, {'option': "mkdocs"}) + self.assertEqual(conf['option'].name, 'mkdocs') + + def test_uninstalled_theme_as_string(self): + class Schema: + option = c.Theme() + + with self.expect_error( + option=re.compile( + r"Unrecognised theme name: 'mkdocs2'. The available installed themes are: .+" + ) + ): + self.get_config(Schema, {'option': "mkdocs2"}) + + def test_theme_default(self): + class Schema: + option = c.Theme(default='mkdocs') + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf['option'].name, 'mkdocs') + + def test_theme_as_simple_config(self): + config = { + 'name': 'mkdocs', + } + + class Schema: + option = c.Theme() + + conf = self.get_config(Schema, {'option': config}) + self.assertEqual(conf['option'].name, 'mkdocs') + + @tempdir() + def test_theme_as_complex_config(self, custom_dir): + config = { + 'name': 'mkdocs', + 'custom_dir': custom_dir, + 'static_templates': ['sitemap.html'], + 'show_sidebar': False, + } + + class Schema: + option = c.Theme() + + conf = self.get_config(Schema, {'option': config}) + self.assertEqual(conf['option'].name, 'mkdocs') + self.assertIn(custom_dir, conf['option'].dirs) + self.assertEqual( + conf['option'].static_templates, + {'404.html', 'sitemap.xml', 'sitemap.html'}, + ) + self.assertEqual(conf['option']['show_sidebar'], False) + + def test_theme_name_is_none(self): + config = { + 'name': None, + } + + class Schema: + option = c.Theme() + + with self.expect_error(option="At least one of 'name' or 'custom_dir' must be defined."): + self.get_config(Schema, {'option': config}) + + def test_theme_config_missing_name(self): + config = { + 'custom_dir': 'custom', + } + + class Schema: + option = c.Theme() + + with self.expect_error(option="No theme name set."): + self.get_config(Schema, {'option': config}) + + def test_uninstalled_theme_as_config(self): + config = { + 'name': 'mkdocs2', + } + + class Schema: + option = c.Theme() + + with self.expect_error( + option=re.compile( + r"Unrecognised theme name: 'mkdocs2'. The available installed themes are: .+" + ) + ): + self.get_config(Schema, {'option': config}) + + def test_theme_invalid_type(self): + config = ['mkdocs2'] + + class Schema: + option = c.Theme() + + with self.expect_error( + option="Invalid type . Expected a string or key/value pairs." + ): + self.get_config(Schema, {'option': config}) + + def test_post_validation_none_theme_name_and_missing_custom_dir(self): + config = { + 'theme': { + 'name': None, + }, + } + + class Schema: + theme = c.Theme() + + with self.expect_error(theme="At least one of 'name' or 'custom_dir' must be defined."): + self.get_config(Schema, config) + + @tempdir() + def test_post_validation_inexisting_custom_dir(self, abs_base_path): + path = os.path.join(abs_base_path, 'inexisting_custom_dir') + config = { + 'theme': { + 'name': None, + 'custom_dir': path, + }, + } + + class Schema: + theme = c.Theme() + + with self.expect_error(theme=f"The path set in custom_dir ('{path}') does not exist."): + self.get_config(Schema, config) + + def test_post_validation_locale_none(self): + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': None, + }, + } + + class Schema: + theme = c.Theme() + + with self.expect_error(theme="'locale' must be a string."): + self.get_config(Schema, config) + + def test_post_validation_locale_invalid_type(self): + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': 0, + }, + } + + class Schema: + theme = c.Theme() + + with self.expect_error(theme="'locale' must be a string."): + self.get_config(Schema, config) + + def test_post_validation_locale(self): + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': 'fr', + }, + } + + class Schema: + theme = c.Theme() + + conf = self.get_config(Schema, config) + self.assertEqual(conf['theme']['locale'].language, 'fr') + + +class NavTest(TestCase): + class Schema: + option = c.Nav() + + def test_old_format(self): + with self.expect_error( + option="Expected nav item to be a string or dict, got a list: ['index.md']" + ): + self.get_config(self.Schema, {'option': [['index.md']]}) + + def test_provided_dict(self): + conf = self.get_config(self.Schema, {'option': ['index.md', {"Page": "page.md"}]}) + self.assertEqual(conf['option'], ['index.md', {'Page': 'page.md'}]) + + def test_provided_empty(self): + conf = self.get_config(self.Schema, {'option': []}) + self.assertEqual(conf['option'], None) + + def test_normal_nav(self): + nav_yaml = textwrap.dedent( + '''\ + - Home: index.md + - getting-started.md + - User Guide: + - Overview: user-guide/index.md + - Installation: user-guide/installation.md + ''' + ) + nav = yaml_load(io.StringIO(nav_yaml)) + + conf = self.get_config(self.Schema, {'option': nav}) + self.assertEqual(conf['option'], nav) + + def test_invalid_type_dict(self): + with self.expect_error(option="Expected nav to be a list, got a dict: {}"): + self.get_config(self.Schema, {'option': {}}) + + def test_invalid_type_int(self): + with self.expect_error(option="Expected nav to be a list, got a int: 5"): + self.get_config(self.Schema, {'option': 5}) + + def test_invalid_item_int(self): + with self.expect_error(option="Expected nav item to be a string or dict, got a int: 1"): + self.get_config(self.Schema, {'option': [1]}) + + def test_invalid_item_none(self): + with self.expect_error(option="Expected nav item to be a string or dict, got None"): + self.get_config(self.Schema, {'option': [None]}) + + def test_invalid_children_config_int(self): + with self.expect_error(option="Expected nav to be a list, got a int: 1"): + self.get_config(self.Schema, {'option': [{"foo.md": [{"bar.md": 1}]}]}) + + def test_invalid_children_config_none(self): + with self.expect_error(option="Expected nav to be a list, got None"): + self.get_config(self.Schema, {'option': [{"foo.md": None}]}) + + def test_invalid_children_empty_dict(self): + nav = ['foo', {}] + with self.expect_error(option="Expected nav item to be a dict of size 1, got a dict: {}"): + self.get_config(self.Schema, {'option': nav}) + + def test_invalid_nested_list(self): + nav = [{'aaa': [[{"bbb": "user-guide/index.md"}]]}] + with self.expect_error( + option="Expected nav item to be a string or dict, got a list: [{'bbb': 'user-guide/index.md'}]" + ): + self.get_config(self.Schema, {'option': nav}) + + def test_invalid_children_oversized_dict(self): + nav = [{"aaa": [{"bbb": "user-guide/index.md", "ccc": "user-guide/installation.md"}]}] + with self.expect_error( + option="Expected nav item to be a dict of size 1, got dict with keys ('bbb', 'ccc')" + ): + self.get_config(self.Schema, {'option': nav}) + + def test_warns_for_dict(self): + self.get_config( + self.Schema, + {'option': [{"a": {"b": "c.md", "d": "e.md"}}]}, + warnings=dict(option="Expected nav to be a list, got dict with keys ('b', 'd')"), + ) + + +class PrivateTest(TestCase): + def test_defined(self): + class Schema: + option = c.Private() + + with self.expect_error(option="For internal use only."): + self.get_config(Schema, {'option': 'somevalue'}) + + +class SubConfigTest(TestCase): + def test_subconfig_wrong_type(self): + # Test that an error is raised if subconfig does not receive a dict + class Schema: + option = c.SubConfig() + + for val in "not_a_dict", ("not_a_dict",), ["not_a_dict"]: + with self.subTest(val): + with self.expect_error( + option=re.compile( + r"The configuration is invalid. Expected a key-value mapping " + r"\(dict\) but received: .+" + ) + ): + self.get_config(Schema, {'option': val}) + + def test_subconfig_ignored(self): + """Default behaviour of subconfig: validation is ignored""" + + # Nominal + class Schema1: + option = c.SubConfig(('cc', c.Choice(('foo', 'bar')))) + + conf = self.get_config(Schema1, {'option': {'cc': 'foo'}}) + self.assertEqual(conf, {'option': {'cc': 'foo'}}) + + # Invalid option: No error + class Schema2: + option = c.SubConfig(('cc', c.Choice(('foo', 'bar')))) + + conf = self.get_config(Schema2, {'option': {'cc': True}}) + self.assertEqual(conf, {'option': {'cc': True}}) + + # Missing option: Will be considered optional with default None + class Schema3: + option = c.SubConfig(('cc', c.Choice(('foo', 'bar')))) + + conf = self.get_config(Schema3, {'option': {}}) + self.assertEqual(conf, {'option': {'cc': None}}) + + # Unknown option: No warning + class Schema4: + option = c.SubConfig(('cc', c.Choice(('foo', 'bar')))) + + conf = self.get_config(Schema4, {'option': {'unknown_key_is_ok': 0}}) + self.assertEqual(conf, {'option': {'cc': None, 'unknown_key_is_ok': 0}}) + + def test_subconfig_unknown_option(self): + class Schema: + option = c.SubConfig(validate=True) + + conf = self.get_config( + Schema, + {'option': {'unknown': 0}}, + warnings=dict(option="Sub-option 'unknown': Unrecognised configuration name: unknown"), + ) + self.assertEqual(conf['option'], {"unknown": 0}) + + def test_subconfig_invalid_option(self): + class Schema: + option = c.SubConfig( + ('cc', c.Choice(('foo', 'bar'))), + validate=True, + ) + + with self.expect_error( + option="Sub-option 'cc': Expected one of: ('foo', 'bar') but received: True" + ): + self.get_config(Schema, {'option': {'cc': True}}) + + def test_subconfig_normal(self): + class Schema: + option = c.SubConfig( + ('cc', c.Choice(('foo', 'bar'))), + ) + + conf = self.get_config(Schema, {'option': {'cc': 'foo'}}) + self.assertEqual(conf['option'], {'cc': 'foo'}) + + def test_config_file_path_pass_through(self): + """Necessary to ensure FilesystemObject validates the correct path""" + + passed_config_path = None + + class SubType(c.BaseConfigOption): + def pre_validation(self, config, key_name): + nonlocal passed_config_path + passed_config_path = config.config_file_path + + class Schema: + sub = c.SubConfig(('opt', SubType())) + + config_path = "foo/mkdocs.yaml" + self.get_config(Schema, {"sub": {"opt": "bar"}}, config_file_path=config_path) + self.assertEqual(passed_config_path, config_path) + + +class ConfigItemsTest(TestCase): + def test_subconfig_with_multiple_items(self): + # This had a bug where subsequent items would get merged into the same dict. + class Schema: + the_items = c.ConfigItems( + ("value", c.Type(str)), + ) + + conf = self.get_config( + Schema, + { + 'the_items': [ + {'value': 'a'}, + {'value': 'b'}, + ] + }, + ) + self.assertEqual(conf['the_items'], [{'value': 'a'}, {'value': 'b'}]) + + def test_optional(self): + class Schema: + sub = c.ListOfItems( + c.SubConfig( + ('opt', c.Type(int)), + validate=True, + ), + default=[], + ) + + conf = self.get_config(Schema, {}) + self.assertEqual(conf['sub'], []) + + conf = self.get_config(Schema, {'sub': None}) + self.assertEqual(conf['sub'], []) + + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {}]}) + self.assertEqual(conf['sub'], [{'opt': 1}, {'opt': None}]) + + conf = self.get_config(Schema, {'sub': []}) + + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {'opt': 2}]}) + self.assertEqual(conf['sub'], [{'opt': 1}, {'opt': 2}]) + + def test_required(self): + class Schema: + sub = c.ListOfItems( + c.SubConfig( + ('opt', c.Type(int, required=True)), + validate=True, + ) + ) + + with self.expect_error(sub="Required configuration not provided."): + conf = self.get_config(Schema, {}) + + with self.expect_error(sub="Required configuration not provided."): + conf = self.get_config(Schema, {'sub': None}) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + conf = self.get_config(Schema, {'sub': [{'opt': 'asdf'}, {}]}) + + conf = self.get_config(Schema, {'sub': []}) + + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {'opt': 2}]}) + self.assertEqual(conf['sub'], [{'opt': 1}, {'opt': 2}]) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + self.get_config(Schema, {'sub': [{'opt': 'z'}, {'opt': 2}]}) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + conf = self.get_config(Schema, {'sub': [{'opt': 'z'}, {'opt': 2}]}) + + with self.expect_error( + sub="The configuration is invalid. Expected a key-value mapping " + "(dict) but received: " + ): + conf = self.get_config(Schema, {'sub': [1, 2]}) + + +class MarkdownExtensionsTest(TestCase): + @patch('markdown.Markdown') + def test_simple_list(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + config = { + 'markdown_extensions': ['foo', 'bar'], + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['foo', 'bar']) + self.assertEqual(conf['mdx_configs'], {}) + + @patch('markdown.Markdown') + def test_list_dicts(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + config = { + 'markdown_extensions': [ + {'foo': {'foo_option': 'foo value'}}, + {'bar': {'bar_option': 'bar value'}}, + {'baz': None}, + ] + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['foo', 'bar', 'baz']) + self.assertEqual( + conf['mdx_configs'], + { + 'foo': {'foo_option': 'foo value'}, + 'bar': {'bar_option': 'bar value'}, + }, + ) + + @patch('markdown.Markdown') + def test_mixed_list(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + config = { + 'markdown_extensions': [ + 'foo', + {'bar': {'bar_option': 'bar value'}}, + ] + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['foo', 'bar']) + self.assertEqual( + conf['mdx_configs'], + { + 'bar': {'bar_option': 'bar value'}, + }, + ) + + @patch('markdown.Markdown') + def test_dict_of_dicts(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + config = { + 'markdown_extensions': { + 'foo': {'foo_option': 'foo value'}, + 'bar': {'bar_option': 'bar value'}, + 'baz': {}, + } + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['foo', 'bar', 'baz']) + self.assertEqual( + conf['mdx_configs'], + { + 'foo': {'foo_option': 'foo value'}, + 'bar': {'bar_option': 'bar value'}, + }, + ) + + @patch('markdown.Markdown') + def test_builtins(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private() + + config = { + 'markdown_extensions': ['foo', 'bar'], + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['meta', 'toc', 'foo', 'bar']) + self.assertEqual(conf['mdx_configs'], {}) + + def test_duplicates(self): + class Schema: + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private() + + config = { + 'markdown_extensions': ['meta', 'toc'], + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['meta', 'toc']) + self.assertEqual(conf['mdx_configs'], {}) + + def test_builtins_config(self): + class Schema: + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private() + + config = { + 'markdown_extensions': [ + {'toc': {'permalink': True}}, + ], + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['meta', 'toc']) + self.assertEqual(conf['mdx_configs'], {'toc': {'permalink': True}}) + + @patch('markdown.Markdown') + def test_configkey(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions(configkey='bar') + bar = c.Private() + + config = { + 'markdown_extensions': [ + {'foo': {'foo_option': 'foo value'}}, + ] + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], ['foo']) + self.assertEqual( + conf['bar'], + { + 'foo': {'foo_option': 'foo value'}, + }, + ) + + def test_missing_default(self): + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + conf = self.get_config(Schema, {}) + self.assertEqual(conf['markdown_extensions'], []) + self.assertEqual(conf['mdx_configs'], {}) + + def test_none(self): + class Schema: + markdown_extensions = c.MarkdownExtensions(default=[]) + mdx_configs = c.Private() + + config = { + 'markdown_extensions': None, + } + conf = self.get_config(Schema, config) + self.assertEqual(conf['markdown_extensions'], []) + self.assertEqual(conf['mdx_configs'], {}) + + @patch('markdown.Markdown') + def test_not_list(self, mock_md): + class Schema: + option = c.MarkdownExtensions() + + with self.expect_error(option="Invalid Markdown Extensions configuration"): + self.get_config(Schema, {'option': 'not a list'}) + + @patch('markdown.Markdown') + def test_invalid_config_option(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + + config = { + 'markdown_extensions': [ + {'foo': 'not a dict'}, + ], + } + with self.expect_error( + markdown_extensions="Invalid config options for Markdown Extension 'foo'." + ): + self.get_config(Schema, config) + + @patch('markdown.Markdown') + def test_invalid_config_item(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + + config = { + 'markdown_extensions': [ + ['not a dict'], + ], + } + with self.expect_error(markdown_extensions="Invalid Markdown Extensions configuration"): + self.get_config(Schema, config) + + @patch('markdown.Markdown') + def test_invalid_dict_item(self, mock_md): + class Schema: + markdown_extensions = c.MarkdownExtensions() + + config = { + 'markdown_extensions': [ + {'key1': 'value', 'key2': 'too many keys'}, + ], + } + with self.expect_error(markdown_extensions="Invalid Markdown Extensions configuration"): + self.get_config(Schema, config) + + def test_unknown_extension(self): + class Schema: + markdown_extensions = c.MarkdownExtensions() + + config = { + 'markdown_extensions': ['unknown'], + } + with self.expect_error( + markdown_extensions=re.compile(r"Failed to load extension 'unknown'.\n.+") + ): + self.get_config(Schema, config) + + def test_multiple_markdown_config_instances(self): + # This had a bug where an extension config would persist to separate + # config instances that didn't specify extensions. + class Schema: + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private() + + conf = self.get_config( + Schema, + { + 'markdown_extensions': [{'toc': {'permalink': '##'}}], + }, + ) + self.assertEqual(conf['mdx_configs']['toc'], {'permalink': '##'}) + + conf = self.get_config( + Schema, + {}, + ) + self.assertIsNone(conf['mdx_configs'].get('toc')) + + +class HooksTest(TestCase): + class Schema: + plugins = c.Plugins(default=[]) + hooks = c.Hooks('plugins') + + @tempdir() + def test_hooks(self, src_dir): + write_file( + b'def on_page_markdown(markdown, **kwargs): return markdown.replace("f", "z")', + os.path.join(src_dir, 'hooks', 'my_hook.py'), + ) + write_file( + b'foo foo', + os.path.join(src_dir, 'docs', 'index.md'), + ) + conf = self.get_config( + self.Schema, + {'hooks': ['hooks/my_hook.py']}, + config_file_path=os.path.join(src_dir, 'mkdocs.yml'), + ) + self.assertIn('hooks/my_hook.py', conf['plugins']) + hook = conf['plugins']['hooks/my_hook.py'] + self.assertTrue(hasattr(hook, 'on_page_markdown')) + self.assertEqual( + {**conf['plugins'].events, 'page_markdown': [hook.on_page_markdown]}, + conf['plugins'].events, + ) + self.assertEqual(hook.on_page_markdown('foo foo'), 'zoo zoo') + self.assertFalse(hasattr(hook, 'on_nav')) + + +class SchemaTest(TestCase): + def test_copy(self): + copy.deepcopy( + base.LegacyConfig( + (('foo', c.MarkdownExtensions()),), + ), + ) + + copy.deepcopy(self.get_config(IpAddressTest.Schema, {'option': '1.2.3.4:5678'})) + copy.deepcopy(IpAddressTest.Schema) + copy.deepcopy(base.get_schema(IpAddressTest.Schema)) + + copy.deepcopy(self.get_config(EditURITest.Schema, {})) + copy.deepcopy(EditURITest.Schema) + copy.deepcopy(base.get_schema(EditURITest.Schema)) diff --git a/mkdocs/tests/config/config_options_tests.py b/mkdocs/tests/config/config_options_tests.py index 7d567fb..7fd2149 100644 --- a/mkdocs/tests/config/config_options_tests.py +++ b/mkdocs/tests/config/config_options_tests.py @@ -1,447 +1,1079 @@ +from __future__ import annotations + +import contextlib +import copy +import io +import logging import os +import re import sys +import textwrap import unittest +from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypeVar, Union from unittest.mock import patch +if TYPE_CHECKING: + from typing_extensions import assert_type +else: + + def assert_type(val, typ): + return None + + import mkdocs -from mkdocs.config import config_options +from mkdocs.config import config_options as c +from mkdocs.config import defaults from mkdocs.config.base import Config +from mkdocs.plugins import BasePlugin, PluginCollection +from mkdocs.tests.base import tempdir +from mkdocs.theme import Theme +from mkdocs.utils import write_file, yaml_load + +SomeConfig = TypeVar('SomeConfig', bound=Config) + + +class UnexpectedError(Exception): + pass + + +class TestCase(unittest.TestCase): + @contextlib.contextmanager + def expect_error(self, **kwargs): + [(key, msg)] = kwargs.items() + with self.assertRaises(UnexpectedError) as cm: + yield + if isinstance(msg, re.Pattern): + self.assertRegex(str(cm.exception), f'^{key}="{msg.pattern}"$') + else: + self.assertEqual(f'{key}="{msg}"', str(cm.exception)) + def get_config( + self, + config_class: type[SomeConfig], + cfg: dict[str, Any], + warnings: dict[str, str] = {}, + config_file_path=None, + ) -> SomeConfig: + config = config_class(config_file_path=config_file_path) + config.load_dict(cfg) + actual_errors, actual_warnings = config.validate() + if actual_errors: + raise UnexpectedError(', '.join(f'{key}="{msg}"' for key, msg in actual_errors)) + self.assertEqual(warnings, dict(actual_warnings)) + return config + + +class TypeTest(TestCase): + def test_single_type(self) -> None: + class Schema(Config): + option = c.Type(str) + + conf = self.get_config(Schema, {'option': "Testing"}) + assert_type(conf.option, str) + self.assertEqual(conf.option, "Testing") + + def test_multiple_types(self) -> None: + class Schema(Config): + option = c.Type((list, tuple)) + + conf = self.get_config(Schema, {'option': [1, 2, 3]}) + self.assertEqual(conf.option, [1, 2, 3]) + + conf = self.get_config(Schema, {'option': (1, 2, 3)}) + self.assertEqual(conf.option, (1, 2, 3)) + + with self.expect_error( + option="Expected type: (, ) but received: " + ): + self.get_config(Schema, {'option': {'a': 1}}) + + def test_length(self) -> None: + class Schema(Config): + option = c.Type(str, length=7) + + conf = self.get_config(Schema, {'option': "Testing"}) + assert_type(conf.option, str) + self.assertEqual(conf.option, "Testing") + + with self.expect_error( + option="Expected type: with length 7 but received: 'Testing Long' with length 12" + ): + self.get_config(Schema, {'option': "Testing Long"}) + + def test_optional_with_default(self) -> None: + with self.assertRaisesRegex(ValueError, "doesn't need to be wrapped into Optional"): + c.Optional(c.Type(int, default=5)) + + +class ChoiceTest(TestCase): + def test_required(self) -> None: + class Schema(Config): + option = c.Choice(('python', 'node')) + + conf = self.get_config(Schema, {'option': 'python'}) + assert_type(conf.option, str) + self.assertEqual(conf.option, 'python') + + def test_optional(self) -> None: + class Schema(Config): + option = c.Optional(c.Choice(('python', 'node'))) + + conf = self.get_config(Schema, {'option': 'python'}) + assert_type(conf.option, Optional[str]) + self.assertEqual(conf.option, 'python') + + conf = self.get_config(Schema, {}) + self.assertEqual(conf.option, None) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option, None) + + def test_default(self) -> None: + class Schema(Config): + option = c.Choice(('a', 'b', 'c'), default='b') + + conf = self.get_config(Schema, {}) + assert_type(conf.option, str) + self.assertEqual(conf.option, 'b') + + conf = self.get_config(Schema, {}) + self.assertEqual(conf.option, 'b') + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option, 'b') + + with self.expect_error(option="Expected one of: ('a', 'b', 'c') but received: 'go'"): + self.get_config(Schema, {'option': 'go'}) + + def test_invalid_default(self) -> None: + with self.assertRaises(ValueError): + c.Choice(('a', 'b'), default='c') + with self.assertRaises(ValueError): + c.Choice(('a', 'b'), default='c', required=True) + + def test_invalid_choice(self) -> None: + class Schema(Config): + option = c.Choice(('python', 'node')) -class OptionallyRequiredTest(unittest.TestCase): + with self.expect_error(option="Expected one of: ('python', 'node') but received: 'go'"): + self.get_config(Schema, {'option': 'go'}) - def test_empty(self): + def test_invalid_choices(self) -> None: + with self.assertRaises(ValueError): + c.Choice('') + with self.assertRaises(ValueError): + c.Choice([]) - option = config_options.OptionallyRequired() - value = option.validate(None) - self.assertEqual(value, None) - self.assertEqual(option.is_required(), False) +class DeprecatedTest(TestCase): + def test_deprecated_option_simple(self) -> None: + class Schema(Config): + d = c.Deprecated() - def test_required(self): + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) - option = config_options.OptionallyRequired(required=True) - self.assertRaises(config_options.ValidationError, - option.validate, None) + def test_deprecated_option_message(self) -> None: + class Schema(Config): + d = c.Deprecated(message='custom message for {} key') - self.assertEqual(option.is_required(), True) + self.get_config(Schema, {'d': 'value'}, warnings={'d': 'custom message for d key'}) - def test_required_no_default(self): + def test_deprecated_option_with_type(self) -> None: + class Schema(Config): + d = c.Deprecated(option_type=c.Type(str)) - option = config_options.OptionallyRequired(required=True) - value = option.validate(2) - self.assertEqual(2, value) + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) - def test_default(self): + def test_deprecated_option_with_invalid_type(self) -> None: + class Schema(Config): + d = c.Deprecated(option_type=c.Type(list)) + + with self.expect_error(d="Expected type: but received: "): + self.get_config( + Schema, + {'d': 'value'}, + warnings=dict( + d="The configuration option 'd' has been deprecated and will be removed in a " + "future release." + ), + ) + + def test_removed_option(self) -> None: + class Schema(Config): + d = c.Deprecated(removed=True, moved_to='foo') + + with self.expect_error( + d="The configuration option 'd' was removed from MkDocs. Use 'foo' instead.", + ): + self.get_config(Schema, {'d': 'value'}) + + def test_deprecated_option_with_type_undefined(self) -> None: + class Schema(Config): + option = c.Deprecated(option_type=c.Type(str)) + + self.get_config(Schema, {'option': None}) + + def test_deprecated_option_move(self) -> None: + class Schema(Config): + new = c.Type(str) + old = c.Deprecated(moved_to='new') + + conf = self.get_config( + Schema, + {'old': 'value'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'new' instead." + ), + ) + self.assertEqual(conf, {'new': 'value', 'old': None}) + + def test_deprecated_option_move_complex(self) -> None: + class Schema(Config): + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') + + conf = self.get_config( + Schema, + {'old': 'value'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) + self.assertEqual(conf, {'foo': {'bar': 'value'}, 'old': None}) + + def test_deprecated_option_move_existing(self) -> None: + class Schema(Config): + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') + + conf = self.get_config( + Schema, + {'old': 'value', 'foo': {'existing': 'existing'}}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) + self.assertEqual(conf, {'foo': {'existing': 'existing', 'bar': 'value'}, 'old': None}) - option = config_options.OptionallyRequired(default=1) - value = option.validate(None) - self.assertEqual(1, value) + def test_deprecated_option_move_invalid(self) -> None: + class Schema(Config): + foo = c.Type(dict) + old = c.Deprecated(moved_to='foo.bar') - def test_replace_default(self): + with self.expect_error(foo="Expected type: but received: "): + self.get_config( + Schema, + {'old': 'value', 'foo': 'wrong type'}, + warnings=dict( + old="The configuration option 'old' has been deprecated and will be removed in a " + "future release. Use 'foo.bar' instead." + ), + ) - option = config_options.OptionallyRequired(default=1) - value = option.validate(2) - self.assertEqual(2, value) +class IpAddressTest(TestCase): + class Schema(Config): + option = c.IpAddress() -class TypeTest(unittest.TestCase): + def test_valid_address(self) -> None: + addr = '127.0.0.1:8000' - def test_single_type(self): + conf = self.get_config(self.Schema, {'option': addr}) - option = config_options.Type(str) - value = option.validate("Testing") - self.assertEqual(value, "Testing") + assert_type(conf.option, c._IpAddressValue) + assert_type(conf.option.host, str) + assert_type(conf.option.port, int) - def test_multiple_types(self): - option = config_options.Type((list, tuple)) + self.assertEqual(str(conf.option), addr) + self.assertEqual(conf.option.host, '127.0.0.1') + self.assertEqual(conf.option.port, 8000) - value = option.validate([1, 2, 3]) - self.assertEqual(value, [1, 2, 3]) + def test_valid_IPv6_address(self) -> None: + addr = '::1:8000' - value = option.validate((1, 2, 3)) - self.assertEqual(value, (1, 2, 3)) + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(str(conf.option), addr) + self.assertEqual(conf.option.host, '::1') + self.assertEqual(conf.option.port, 8000) - self.assertRaises(config_options.ValidationError, - option.validate, {'a': 1}) + def test_valid_full_IPv6_address(self) -> None: + addr = '[2001:db8:85a3::8a2e:370:7334]:123' - def test_length(self): - option = config_options.Type(str, length=7) + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(conf.option.host, '2001:db8:85a3::8a2e:370:7334') + self.assertEqual(conf.option.port, 123) - value = option.validate("Testing") - self.assertEqual(value, "Testing") + def test_named_address(self) -> None: + addr = 'localhost:8000' - self.assertRaises(config_options.ValidationError, - option.validate, "Testing Long") + conf = self.get_config(self.Schema, {'option': addr}) + self.assertEqual(str(conf.option), addr) + self.assertEqual(conf.option.host, 'localhost') + self.assertEqual(conf.option.port, 8000) + def test_default_address(self) -> None: + addr = '127.0.0.1:8000' -class ChoiceTest(unittest.TestCase): + class Schema(Config): + option = c.IpAddress(default=addr) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(str(conf.option), addr) + self.assertEqual(conf.option.host, '127.0.0.1') + self.assertEqual(conf.option.port, 8000) + + @unittest.skipIf( + sys.version_info < (3, 9, 5), + "Leading zeros allowed in IP addresses before Python3.9.5", + ) + def test_invalid_leading_zeros(self) -> None: + with self.expect_error( + option="'127.000.000.001' does not appear to be an IPv4 or IPv6 address" + ): + self.get_config(self.Schema, {'option': '127.000.000.001:8000'}) + + def test_invalid_address_range(self) -> None: + with self.expect_error(option="'277.0.0.1' does not appear to be an IPv4 or IPv6 address"): + self.get_config(self.Schema, {'option': '277.0.0.1:8000'}) + + def test_invalid_address_format(self) -> None: + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': '127.0.0.18000'}) + + def test_invalid_address_type(self) -> None: + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': 123}) + + def test_invalid_address_port(self) -> None: + with self.expect_error(option="'foo' is not a valid port"): + self.get_config(self.Schema, {'option': '127.0.0.1:foo'}) + + def test_invalid_address_missing_port(self) -> None: + with self.expect_error(option="Must be a string of format 'IP:PORT'"): + self.get_config(self.Schema, {'option': '127.0.0.1'}) + + def test_unsupported_address(self) -> None: + class Schema(Config): + dev_addr = c.IpAddress() + + self.get_config( + Schema, + {'dev_addr': '0.0.0.0:8000'}, + warnings=dict( + dev_addr="The use of the IP address '0.0.0.0' suggests a production " + "environment or the use of a proxy to connect to the MkDocs " + "server. However, the MkDocs' server is intended for local " + "development purposes only. Please use a third party " + "production-ready server instead." + ), + ) - def test_valid_choice(self): - option = config_options.Choice(('python', 'node')) - value = option.validate('python') - self.assertEqual(value, 'python') + def test_unsupported_IPv6_address(self) -> None: + class Schema(Config): + dev_addr = c.IpAddress() + + self.get_config( + Schema, + {'dev_addr': ':::8000'}, + warnings=dict( + dev_addr="The use of the IP address '::' suggests a production environment " + "or the use of a proxy to connect to the MkDocs server. However, " + "the MkDocs' server is intended for local development purposes " + "only. Please use a third party production-ready server instead." + ), + ) - def test_invalid_choice(self): - option = config_options.Choice(('python', 'node')) - self.assertRaises( - config_options.ValidationError, option.validate, 'go') - def test_invalid_choices(self): - self.assertRaises(ValueError, config_options.Choice, '') - self.assertRaises(ValueError, config_options.Choice, []) - self.assertRaises(ValueError, config_options.Choice, 5) +class URLTest(TestCase): + def test_valid_url(self) -> None: + class Schema(Config): + option = c.URL() + conf = self.get_config(Schema, {'option': "https://mkdocs.org"}) + assert_type(conf.option, str) + self.assertEqual(conf.option, "https://mkdocs.org") -class IpAddressTest(unittest.TestCase): + conf = self.get_config(Schema, {'option': ""}) + self.assertEqual(conf.option, "") - def test_valid_address(self): - addr = '127.0.0.1:8000' + def test_valid_url_is_dir(self) -> None: + class Schema(Config): + option = c.URL(is_dir=True) - option = config_options.IpAddress() - value = option.validate(addr) - self.assertEqual(str(value), addr) - self.assertEqual(value.host, '127.0.0.1') - self.assertEqual(value.port, 8000) + conf = self.get_config(Schema, {'option': "http://mkdocs.org/"}) + self.assertEqual(conf.option, "http://mkdocs.org/") - def test_valid_IPv6_address(self): - addr = '::1:8000' + conf = self.get_config(Schema, {'option': "https://mkdocs.org"}) + self.assertEqual(conf.option, "https://mkdocs.org/") - option = config_options.IpAddress() - value = option.validate(addr) - self.assertEqual(str(value), addr) - self.assertEqual(value.host, '::1') - self.assertEqual(value.port, 8000) + def test_optional(self): + class Schema(Config): + option = c.Optional(c.URL(is_dir=True)) - def test_named_address(self): - addr = 'localhost:8000' + conf = self.get_config(Schema, {'option': ''}) + self.assertEqual(conf.option, '') - option = config_options.IpAddress() - value = option.validate(addr) - self.assertEqual(str(value), addr) - self.assertEqual(value.host, 'localhost') - self.assertEqual(value.port, 8000) + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option, None) - def test_default_address(self): - addr = '127.0.0.1:8000' + def test_invalid_url(self) -> None: + class Schema(Config): + option = c.URL() + + with self.expect_error(option="Required configuration not provided."): + self.get_config(Schema, {'option': None}) + + for url in "www.mkdocs.org", "//mkdocs.org/test", "http:/mkdocs.org/", "/hello/": + with self.subTest(url=url): + with self.expect_error( + option="The URL isn't valid, it should include the http:// (scheme)" + ): + self.get_config(Schema, {'option': url}) + + def test_invalid_type(self) -> None: + class Schema(Config): + option = c.URL() + + with self.expect_error(option="Expected a string, got "): + self.get_config(Schema, {'option': 1}) - option = config_options.IpAddress(default=addr) - value = option.validate(None) - self.assertEqual(str(value), addr) - self.assertEqual(value.host, '127.0.0.1') - self.assertEqual(value.port, 8000) - def test_IP_normalization(self): - addr = '127.000.000.001:8000' - option = config_options.IpAddress(default=addr) - value = option.validate(None) - self.assertEqual(str(value), '127.0.0.1:8000') - self.assertEqual(value.host, '127.0.0.1') - self.assertEqual(value.port, 8000) +class EditURITest(TestCase): + class Schema(Config): + repo_url = c.Optional(c.URL()) + repo_name = c.Optional(c.RepoName('repo_url')) + edit_uri_template = c.Optional(c.EditURITemplate('edit_uri')) + edit_uri = c.Optional(c.EditURI('repo_url')) - def test_invalid_address_range(self): - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, '277.0.0.1:8000' + def test_repo_name_github(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs"}, ) + assert_type(conf.repo_name, Optional[str]) + self.assertEqual(conf.repo_name, "GitHub") - def test_invalid_address_format(self): - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, '127.0.0.18000' + def test_repo_name_bitbucket(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://bitbucket.org/gutworth/six/"}, ) + self.assertEqual(conf.repo_name, "Bitbucket") - def test_invalid_address_type(self): - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, 123 + def test_repo_name_gitlab(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"}, ) + self.assertEqual(conf.repo_name, "GitLab") - def test_invalid_address_port(self): - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, '127.0.0.1:foo' + def test_repo_name_custom(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://launchpad.net/python-tuskarclient"}, ) + self.assertEqual(conf.repo_name, "Launchpad") - def test_invalid_address_missing_port(self): - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, '127.0.0.1' + def test_edit_uri_github(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs"}, ) + assert_type(conf.edit_uri, Optional[str]) + assert_type(conf.repo_url, Optional[str]) + self.assertEqual(conf.edit_uri, 'edit/master/docs/') + self.assertEqual(conf.repo_url, "https://github.com/mkdocs/mkdocs") + + def test_edit_uri_bitbucket(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://bitbucket.org/gutworth/six/"}, + ) + self.assertEqual(conf.edit_uri, 'src/default/docs/') + self.assertEqual(conf.repo_url, "https://bitbucket.org/gutworth/six/") - def test_unsupported_address(self): - option = config_options.IpAddress() - value = option.validate('0.0.0.0:8000') - option.post_validation({'dev_addr': value}, 'dev_addr') - self.assertEqual(len(option.warnings), 1) + def test_edit_uri_gitlab(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"}, + ) + self.assertEqual(conf.edit_uri, 'edit/master/docs/') - def test_unsupported_IPv6_address(self): - option = config_options.IpAddress() - value = option.validate(':::8000') - option.post_validation({'dev_addr': value}, 'dev_addr') - self.assertEqual(len(option.warnings), 1) + def test_edit_uri_custom(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://launchpad.net/python-tuskarclient"}, + ) + self.assertEqual(conf.edit_uri, None) + self.assertEqual(conf.repo_url, "https://launchpad.net/python-tuskarclient") - def test_invalid_IPv6_address(self): - # The server will error out with this so we treat it as invalid. - option = config_options.IpAddress() - self.assertRaises( - config_options.ValidationError, - option.validate, '[::1]:8000' + def test_repo_name_custom_and_empty_edit_uri(self) -> None: + conf = self.get_config( + self.Schema, + {'repo_url': "https://github.com/mkdocs/mkdocs", 'repo_name': 'mkdocs'}, + ) + self.assertEqual(conf.edit_uri, 'edit/master/docs/') + + def test_edit_uri_template_ok(self) -> None: + conf = self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/foo/docs/{path}', + }, ) + assert_type(conf.edit_uri_template, Optional[str]) + self.assertEqual(conf.edit_uri_template, 'edit/foo/docs/{path}') + + def test_edit_uri_template_errors(self) -> None: + with self.expect_error( + edit_uri_template=re.compile(r'.*[{}].*') # Complains about unclosed '{' or missing '}' + ): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{path', + }, + ) + + with self.expect_error(edit_uri_template=re.compile(r'.*\bz\b.*')): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{path!z}', + }, + ) + + with self.expect_error(edit_uri_template="Unknown template substitute: 'foo'"): + self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri_template': 'edit/master/{foo}', + }, + ) + + def test_edit_uri_template_warning(self) -> None: + conf = self.get_config( + self.Schema, + { + 'repo_url': "https://github.com/mkdocs/mkdocs", + 'edit_uri': 'edit', + 'edit_uri_template': 'edit/master/{path}', + }, + warnings=dict( + edit_uri_template="The option 'edit_uri' has no effect when 'edit_uri_template' is set." + ), + ) + self.assertEqual(conf.edit_uri_template, 'edit/master/{path}') + +class ListOfItemsTest(TestCase): + def test_int_type(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Type(int)) -class URLTest(unittest.TestCase): + conf = self.get_config(Schema, {'option': [1, 2, 3]}) + assert_type(conf.option, List[int]) + self.assertEqual(conf.option, [1, 2, 3]) - def test_valid_url(self): + with self.expect_error( + option="Expected type: but received: " + ): + conf = self.get_config(Schema, {'option': [1, None, 3]}) - url = "https://mkdocs.org" + def test_combined_float_type(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Type((int, float))) - option = config_options.URL() - value = option.validate(url) - self.assertEqual(value, url) + conf = self.get_config(Schema, {'option': [1.4, 2, 3]}) + self.assertEqual(conf.option, [1.4, 2, 3]) - def test_invalid_url(self): + with self.expect_error( + option="Expected type: (, ) but received: " + ): + self.get_config(Schema, {'option': ['a']}) - option = config_options.URL() - self.assertRaises(config_options.ValidationError, - option.validate, "www.mkdocs.org") + def test_list_default(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Type(int), default=[]) - def test_invalid(self): + conf = self.get_config(Schema, {}) + assert_type(conf.option, List[int]) + self.assertEqual(conf.option, []) - option = config_options.URL() - self.assertRaises(config_options.ValidationError, - option.validate, 1) + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {'option': None}) + def test_none_without_default(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Type(str)) -class RepoURLTest(unittest.TestCase): + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {}) - def test_repo_name_github(self): + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {'option': None}) - option = config_options.RepoURL() - config = {'repo_url': "https://github.com/mkdocs/mkdocs"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['repo_name'], "GitHub") + conf = self.get_config(Schema, {'option': ['foo']}) + self.assertEqual(conf.option, ['foo']) - def test_repo_name_bitbucket(self): + def test_optional(self) -> None: + class Schema(Config): + option = c.Optional(c.ListOfItems(c.Type(str))) - option = config_options.RepoURL() - config = {'repo_url': "https://bitbucket.org/gutworth/six/"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['repo_name'], "Bitbucket") + conf = self.get_config(Schema, {}) + assert_type(conf.option, Optional[List[str]]) + self.assertEqual(conf.option, None) - def test_repo_name_gitlab(self): + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option, None) - option = config_options.RepoURL() - config = {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['repo_name'], "GitLab") + conf = self.get_config(Schema, {'option': ['foo']}) + self.assertEqual(conf.option, ['foo']) - def test_repo_name_custom(self): + def test_list_of_optional(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Optional(c.Type(int)), default=[]) - option = config_options.RepoURL() - config = {'repo_url': "https://launchpad.net/python-tuskarclient"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['repo_name'], "Launchpad") + conf = self.get_config(Schema, {}) + assert_type(conf.option, List[Optional[int]]) + self.assertEqual(conf.option, []) - def test_edit_uri_github(self): + conf = self.get_config(Schema, {'option': [4, None]}) + self.assertEqual(conf.option, [4, None]) - option = config_options.RepoURL() - config = {'repo_url': "https://github.com/mkdocs/mkdocs"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['edit_uri'], 'edit/master/docs/') + with self.expect_error(option="Expected type: but received: "): + conf = self.get_config(Schema, {'option': ['foo']}) + self.assertEqual(conf.option, ['foo']) - def test_edit_uri_bitbucket(self): + def test_string_not_a_list_of_strings(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Type(str)) - option = config_options.RepoURL() - config = {'repo_url': "https://bitbucket.org/gutworth/six/"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['edit_uri'], 'src/default/docs/') + with self.expect_error(option="Expected a list of items, but a was given."): + self.get_config(Schema, {'option': 'foo'}) - def test_edit_uri_gitlab(self): + def test_post_validation_error(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.IpAddress()) - option = config_options.RepoURL() - config = {'repo_url': "https://gitlab.com/gitlab-org/gitlab-ce/"} - option.post_validation(config, 'repo_url') - self.assertEqual(config['edit_uri'], 'edit/master/docs/') + with self.expect_error(option="'asdf' is not a valid port"): + self.get_config(Schema, {'option': ["localhost:8000", "1.2.3.4:asdf"]}) - def test_edit_uri_custom(self): + def test_warning(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.Deprecated()) + + self.get_config( + Schema, + {'option': ['a']}, + warnings=dict( + option="The configuration option 'option[0]' has been " + "deprecated and will be removed in a future release." + ), + ) - option = config_options.RepoURL() - config = {'repo_url': "https://launchpad.net/python-tuskarclient"} - option.post_validation(config, 'repo_url') - self.assertEqual(config.get('edit_uri'), '') - def test_repo_name_custom_and_empty_edit_uri(self): +class ExtraScriptsTest(TestCase): + def test_js_async(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.ExtraScript(), default=[]) + + conf = self.get_config(Schema, {'option': ['foo.js', {'path': 'bar.js', 'async': True}]}) + assert_type(conf.option, List[Union[c.ExtraScriptValue, str]]) + self.assertEqual(len(conf.option), 2) + self.assertIsInstance(conf.option[1], c.ExtraScriptValue) + self.assertEqual( + conf.option, + [ + 'foo.js', + {'path': 'bar.js', 'type': '', 'defer': False, 'async': True}, + ], + ) - option = config_options.RepoURL() - config = {'repo_url': "https://github.com/mkdocs/mkdocs", - 'repo_name': 'mkdocs'} - option.post_validation(config, 'repo_url') - self.assertEqual(config.get('edit_uri'), 'edit/master/docs/') + def test_mjs(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.ExtraScript(), default=[]) + conf = self.get_config( + Schema, {'option': ['foo.mjs', {'path': 'bar.js', 'type': 'module'}]} + ) + assert_type(conf.option, List[Union[c.ExtraScriptValue, str]]) + self.assertEqual(len(conf.option), 2) + self.assertIsInstance(conf.option[0], c.ExtraScriptValue) + self.assertEqual( + conf.option, + [ + {'path': 'foo.mjs', 'type': 'module', 'defer': False, 'async': False}, + {'path': 'bar.js', 'type': 'module', 'defer': False, 'async': False}, + ], + ) -class DirTest(unittest.TestCase): + def test_wrong_type(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.ExtraScript(), default=[]) - def test_valid_dir(self): + with self.expect_error( + option="The configuration is invalid. Expected a key-value mapping (dict) but received: " + ): + self.get_config(Schema, {'option': [1]}) - d = os.path.dirname(__file__) - option = config_options.Dir(exists=True) - value = option.validate(d) - self.assertEqual(d, value) + def test_unknown_key(self) -> None: + class Schema(Config): + option = c.ListOfItems(c.ExtraScript(), default=[]) + + conf = self.get_config( + Schema, + {'option': [{'path': 'foo.js', 'foo': 'bar'}]}, + warnings=dict(option="Sub-option 'foo': Unrecognised configuration name: foo"), + ) + self.assertEqual( + conf.option, + [{'path': 'foo.js', 'type': '', 'defer': False, 'async': False, 'foo': 'bar'}], + ) + + +class DictOfItemsTest(TestCase): + def test_int_type(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type(int)) + + conf = self.get_config(Schema, {'option': {"a": 1, "b": 2}}) + assert_type(conf.option, Dict[str, int]) + self.assertEqual(conf.option, {"a": 1, "b": 2}) + + with self.expect_error( + option="Expected type: but received: " + ): + conf = self.get_config(Schema, {'option': {"a": 1, "b": None}}) + + def test_combined_float_type(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type((int, float))) + + conf = self.get_config(Schema, {'option': {"a": 1, "b": 2.3}}) + self.assertEqual(conf.option, {"a": 1, "b": 2.3}) + + with self.expect_error( + option="Expected type: (, ) but received: " + ): + self.get_config(Schema, {'option': {"a": 1, "b": "2"}}) + + def test_dict_default(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type(int), default={}) + + conf = self.get_config(Schema, {}) + assert_type(conf.option, Dict[str, int]) + self.assertEqual(conf.option, {}) + + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {'option': None}) + + def test_none_without_default(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type(str)) + + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {}) + + with self.expect_error(option="Required configuration not provided."): + conf = self.get_config(Schema, {'option': None}) + + conf = self.get_config(Schema, {'option': {"foo": "bar"}}) + self.assertEqual(conf.option, {"foo": "bar"}) + + def test_optional(self) -> None: + class Schema(Config): + option = c.Optional(c.DictOfItems(c.Type(str))) + + conf = self.get_config(Schema, {}) + assert_type(conf.option, Optional[Dict[str, str]]) + self.assertEqual(conf.option, None) + + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option, None) + + conf = self.get_config(Schema, {'option': {"foo": "bar"}}) + self.assertEqual(conf.option, {"foo": "bar"}) + + def test_dict_of_optional(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Optional(c.Type(int)), default={}) + + conf = self.get_config(Schema, {}) + assert_type(conf.option, Dict[str, Optional[int]]) + self.assertEqual(conf.option, {}) + + conf = self.get_config(Schema, {'option': {"a": 1, "b": None}}) + self.assertEqual(conf.option, {"a": 1, "b": None}) + + with self.expect_error(option="Expected type: but received: "): + conf = self.get_config(Schema, {'option': {"foo": "bar"}}) + self.assertEqual(conf.option, {"foo": "bar"}) + + def test_string_not_a_dict_of_strings(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type(str)) + + with self.expect_error(option="Expected a dict of items, but a was given."): + self.get_config(Schema, {'option': 'foo'}) + + def test_post_validation_error(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.IpAddress()) + + with self.expect_error(option="'asdf' is not a valid port"): + self.get_config( + Schema, {'option': {"ip_foo": "localhost:8000", "ip_bar": "1.2.3.4:asdf"}} + ) + + def test_all_keys_are_strings(self) -> None: + class Schema(Config): + option = c.DictOfItems(c.Type(int)) + + with self.expect_error( + option="Expected type: for keys, but received: (key=2)" + ): + self.get_config(Schema, {'option': {"a": 1, 2: 3}}) + + +class FilesystemObjectTest(TestCase): + def test_valid_dir(self) -> None: + for cls in c.Dir, c.FilesystemObject: + with self.subTest(cls): + d = os.path.dirname(__file__) + + class Schema(Config): + option = cls(exists=True) + + conf = self.get_config(Schema, {'option': d}) + assert_type(conf.option, str) + self.assertEqual(conf.option, d) - def test_missing_dir(self): + def test_valid_file(self) -> None: + for cls in c.File, c.FilesystemObject: + with self.subTest(cls): + f = __file__ - d = os.path.join("not", "a", "real", "path", "I", "hope") - option = config_options.Dir() - value = option.validate(d) - self.assertEqual(os.path.abspath(d), value) + class Schema(Config): + option = cls(exists=True) - def test_missing_dir_but_required(self): + conf = self.get_config(Schema, {'option': f}) + assert_type(conf.option, str) + self.assertEqual(conf.option, f) - d = os.path.join("not", "a", "real", "path", "I", "hope") - option = config_options.Dir(exists=True) - self.assertRaises(config_options.ValidationError, - option.validate, d) + def test_missing_without_exists(self) -> None: + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + d = os.path.join("not", "a", "real", "path", "I", "hope") - def test_file(self): + class Schema(Config): + option = cls() + + conf = self.get_config(Schema, {'option': d}) + assert_type(conf.option, str) + self.assertEqual(conf.option, os.path.abspath(d)) + + def test_missing_but_required(self) -> None: + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + d = os.path.join("not", "a", "real", "path", "I", "hope") + + class Schema(Config): + option = cls(exists=True) + + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing .+")): + self.get_config(Schema, {'option': d}) + + def test_not_a_dir(self) -> None: d = __file__ - option = config_options.Dir(exists=True) - self.assertRaises(config_options.ValidationError, - option.validate, d) - def test_incorrect_type_attribute_error(self): - option = config_options.Dir() - self.assertRaises(config_options.ValidationError, - option.validate, 1) + class Schema(Config): + option = c.Dir(exists=True) - def test_incorrect_type_type_error(self): - option = config_options.Dir() - self.assertRaises(config_options.ValidationError, - option.validate, []) + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing directory.")): + self.get_config(Schema, {'option': d}) - def test_dir_unicode(self): - cfg = Config( - [('dir', config_options.Dir())], - config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), - ) + def test_not_a_file(self) -> None: + d = os.path.dirname(__file__) - test_config = { - 'dir': 'юникод' - } + class Schema(Config): + option = c.File(exists=True) - cfg.load_dict(test_config) + with self.expect_error(option=re.compile(r"The path '.+' isn't an existing file.")): + self.get_config(Schema, {'option': d}) - fails, warns = cfg.validate() + def test_incorrect_type_error(self) -> None: + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): - self.assertEqual(len(fails), 0) - self.assertEqual(len(warns), 0) - self.assertIsInstance(cfg['dir'], str) + class Schema(Config): + option = cls() - def test_dir_filesystemencoding(self): - cfg = Config( - [('dir', config_options.Dir())], - config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), - ) + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': 1}) + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': []}) - test_config = { - 'dir': 'Übersicht'.encode(encoding=sys.getfilesystemencoding()) - } + def test_with_unicode(self) -> None: + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): - cfg.load_dict(test_config) + class Schema(Config): + dir = cls() - fails, warns = cfg.validate() + conf = self.get_config(Schema, {'dir': 'юникод'}) + self.assertIsInstance(conf.dir, str) - # str does not include byte strings so validation fails - self.assertEqual(len(fails), 1) - self.assertEqual(len(warns), 0) + def test_dir_bytes(self) -> None: + class Schema(Config): + dir = c.Dir() - def test_dir_bad_encoding_fails(self): - cfg = Config( - [('dir', config_options.Dir())], - config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), - ) + with self.expect_error(dir="Expected type: but received: "): + self.get_config(Schema, {'dir': b'foo'}) - test_config = { - 'dir': 'юникод'.encode(encoding='ISO 8859-5') - } + def test_config_dir_prepended(self) -> None: + for cls in c.Dir, c.File, c.FilesystemObject: + with self.subTest(cls): + base_path = os.path.dirname(os.path.abspath(__file__)) - cfg.load_dict(test_config) + class Schema(Config): + dir = cls() - fails, warns = cfg.validate() + conf = self.get_config( + Schema, + {'dir': 'foo'}, + config_file_path=os.path.join(base_path, 'mkdocs.yml'), + ) + self.assertEqual(conf.dir, os.path.join(base_path, 'foo')) - self.assertEqual(len(fails), 1) - self.assertEqual(len(warns), 0) + def test_site_dir_is_config_dir_fails(self) -> None: + class Schema(Config): + dir = c.DocsDir() - def test_config_dir_prepended(self): - base_path = os.path.abspath('.') - cfg = Config( - [('dir', config_options.Dir())], - config_file_path=os.path.join(base_path, 'mkdocs.yml'), - ) + with self.expect_error( + dir="The 'dir' should not be the parent directory of the config file. " + "Use a child directory instead so that the 'dir' is a sibling of the config file." + ): + self.get_config( + Schema, + {'dir': '.'}, + config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), + ) - test_config = { - 'dir': 'foo' - } - cfg.load_dict(test_config) +class ListOfPathsTest(TestCase): + def test_valid_path(self) -> None: + paths = [os.path.dirname(__file__)] - fails, warns = cfg.validate() + class Schema(Config): + option = c.ListOfPaths() - self.assertEqual(len(fails), 0) - self.assertEqual(len(warns), 0) - self.assertIsInstance(cfg['dir'], str) - self.assertEqual(cfg['dir'], os.path.join(base_path, 'foo')) + self.get_config(Schema, {'option': paths}) - def test_dir_is_config_dir_fails(self): - cfg = Config( - [('dir', config_options.Dir())], - config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml'), - ) + def test_missing_path(self) -> None: + paths = [os.path.join("does", "not", "exist", "i", "hope")] - test_config = { - 'dir': '.' - } + class Schema(Config): + option = c.ListOfPaths() - cfg.load_dict(test_config) + with self.expect_error( + option=f"The path '{paths[0]}' isn't an existing file or directory." + ): + self.get_config(Schema, {'option': paths}) - fails, warns = cfg.validate() + def test_non_path(self) -> None: + paths = [os.path.dirname(__file__), None] - self.assertEqual(len(fails), 1) - self.assertEqual(len(warns), 0) + class Schema(Config): + option = c.ListOfPaths() + with self.expect_error( + option="Expected type: but received: " + ): + self.get_config(Schema, {'option': paths}) -class SiteDirTest(unittest.TestCase): + def test_empty_list(self) -> None: + class Schema(Config): + option = c.ListOfPaths() - def validate_config(self, config): - """ Given a config with values for site_dir and doc_dir, run site_dir post_validation. """ - site_dir = config_options.SiteDir() - docs_dir = config_options.Dir() + conf = self.get_config(Schema, {'option': []}) + assert_type(conf.option, List[str]) + self.assertEqual(conf.option, []) - fname = os.path.join(os.path.abspath('..'), 'mkdocs.yml') + def test_none(self) -> None: + class Schema(Config): + option = c.ListOfPaths() - config['docs_dir'] = docs_dir.validate(config['docs_dir']) - config['site_dir'] = site_dir.validate(config['site_dir']) + with self.expect_error(option="Required configuration not provided."): + self.get_config(Schema, {'option': None}) - schema = [ - ('site_dir', site_dir), - ('docs_dir', docs_dir), - ] - cfg = Config(schema, fname) - cfg.load_dict(config) - failed, warned = cfg.validate() + def test_non_list(self) -> None: + paths = os.path.dirname(__file__) - if failed: - raise config_options.ValidationError(failed) + class Schema(Config): + option = c.ListOfPaths() - return True + with self.expect_error(option="Expected a list of items, but a was given."): + self.get_config(Schema, {'option': paths}) - def test_doc_dir_in_site_dir(self): + def test_file(self) -> None: + paths = [__file__] + class Schema(Config): + option = c.ListOfPaths() + + conf = self.get_config(Schema, {'option': paths}) + assert_type(conf.option, List[str]) + + @tempdir() + def test_paths_localized_to_config(self, base_path) -> None: + with open(os.path.join(base_path, 'foo'), 'w') as f: + f.write('hi') + + class Schema(Config): + watch = c.ListOfPaths() + + conf = self.get_config( + Schema, + {'watch': ['foo']}, + config_file_path=os.path.join(base_path, 'mkdocs.yml'), + ) + + self.assertEqual(conf.watch, [os.path.join(base_path, 'foo')]) + + +class SiteDirTest(TestCase): + class Schema(Config): + site_dir = c.SiteDir() + docs_dir = c.Dir() + + def test_doc_dir_in_site_dir(self) -> None: j = os.path.join # The parent dir is not the same on every system, so use the actual dir name parent_dir = mkdocs.__file__.split(os.sep)[-3] @@ -453,15 +1085,17 @@ def test_doc_dir_in_site_dir(self): {'docs_dir': 'docs', 'site_dir': ''}, {'docs_dir': '', 'site_dir': ''}, {'docs_dir': j('..', parent_dir, 'docs'), 'site_dir': 'docs'}, - {'docs_dir': 'docs', 'site_dir': '/'} + {'docs_dir': 'docs', 'site_dir': '/'}, ) for test_config in test_configs: - self.assertRaises(config_options.ValidationError, - self.validate_config, test_config) - - def test_site_dir_in_docs_dir(self): + with self.subTest(test_config): + with self.expect_error( + site_dir=re.compile(r"The 'docs_dir' should not be within the 'site_dir'.*") + ): + self.get_config(self.Schema, test_config) + def test_site_dir_in_docs_dir(self) -> None: j = os.path.join test_configs = ( @@ -472,11 +1106,14 @@ def test_site_dir_in_docs_dir(self): ) for test_config in test_configs: - self.assertRaises(config_options.ValidationError, - self.validate_config, test_config) + with self.subTest(test_config): + with self.expect_error( + site_dir=re.compile(r"The 'site_dir' should not be within the 'docs_dir'.*") + ): + self.get_config(self.Schema, test_config) - def test_common_prefix(self): - """ Legitimate settings with common prefixes should not fail validation. """ + def test_common_prefix(self) -> None: + """Legitimate settings with common prefixes should not fail validation.""" test_configs = ( {'docs_dir': 'docs', 'site_dir': 'docs-site'}, @@ -484,308 +1121,1254 @@ def test_common_prefix(self): ) for test_config in test_configs: - assert self.validate_config(test_config) + with self.subTest(test_config): + self.get_config(self.Schema, test_config) -class ThemeTest(unittest.TestCase): +class ThemeTest(TestCase): + def test_theme_as_string(self) -> None: + class Schema(Config): + option = c.Theme() - def test_theme_as_string(self): + conf = self.get_config(Schema, {'option': "mkdocs"}) + assert_type(conf.option, Theme) + assert_type(conf.option.name, Optional[str]) + self.assertEqual(conf.option.name, 'mkdocs') - option = config_options.Theme() - value = option.validate("mkdocs") - self.assertEqual({'name': 'mkdocs'}, value) + def test_uninstalled_theme_as_string(self) -> None: + class Schema(Config): + theme = c.Theme() + plugins = c.Plugins(theme_key='theme') - def test_uninstalled_theme_as_string(self): + with self.expect_error( + theme=re.compile( + r"Unrecognised theme name: 'mkdocs2'. The available installed themes are: .+" + ) + ): + self.get_config(Schema, {'theme': "mkdocs2", 'plugins': "search"}) - option = config_options.Theme() - self.assertRaises(config_options.ValidationError, - option.validate, "mkdocs2") + def test_theme_default(self) -> None: + class Schema(Config): + option = c.Theme(default='mkdocs') - def test_theme_default(self): - option = config_options.Theme(default='mkdocs') - value = option.validate(None) - self.assertEqual({'name': 'mkdocs'}, value) - - def test_theme_as_simple_config(self): + conf = self.get_config(Schema, {'option': None}) + self.assertEqual(conf.option.name, 'mkdocs') + def test_theme_as_simple_config(self) -> None: config = { - 'name': 'mkdocs' + 'name': 'mkdocs', } - option = config_options.Theme() - value = option.validate(config) - self.assertEqual(config, value) - def test_theme_as_complex_config(self): + class Schema(Config): + option = c.Theme() + conf = self.get_config(Schema, {'option': config}) + self.assertEqual(conf.option.name, 'mkdocs') + + @tempdir() + def test_theme_as_complex_config(self, custom_dir) -> None: config = { 'name': 'mkdocs', - 'custom_dir': 'custom', + 'custom_dir': custom_dir, 'static_templates': ['sitemap.html'], - 'show_sidebar': False + 'show_sidebar': False, } - option = config_options.Theme() - value = option.validate(config) - self.assertEqual(config, value) - def test_theme_name_is_none(self): + class Schema(Config): + option = c.Theme() + + conf = self.get_config(Schema, {'option': config}) + self.assertEqual(conf.option.name, 'mkdocs') + self.assertEqual(conf.option.custom_dir, custom_dir) + self.assertIn(custom_dir, conf.option.dirs) + self.assertEqual( + conf.option.static_templates, + {'404.html', 'sitemap.xml', 'sitemap.html'}, + ) + self.assertEqual(conf.option['show_sidebar'], False) + def test_theme_name_is_none(self) -> None: config = { - 'name': None + 'name': None, } - option = config_options.Theme() - value = option.validate(config) - self.assertEqual(config, value) - def test_theme_config_missing_name(self): + class Schema(Config): + option = c.Theme() + with self.expect_error(option="At least one of 'name' or 'custom_dir' must be defined."): + self.get_config(Schema, {'option': config}) + + def test_theme_config_missing_name(self) -> None: config = { 'custom_dir': 'custom', } - option = config_options.Theme() - self.assertRaises(config_options.ValidationError, - option.validate, config) - def test_uninstalled_theme_as_config(self): + class Schema(Config): + option = c.Theme() + + with self.expect_error(option="No theme name set."): + self.get_config(Schema, {'option': config}) + def test_uninstalled_theme_as_config(self) -> None: config = { - 'name': 'mkdocs2' + 'name': 'mkdocs2', } - option = config_options.Theme() - self.assertRaises(config_options.ValidationError, - option.validate, config) - def test_theme_invalid_type(self): + class Schema(Config): + option = c.Theme() + + with self.expect_error( + option=re.compile( + r"Unrecognised theme name: 'mkdocs2'. The available installed themes are: .+" + ) + ): + self.get_config(Schema, {'option': config}) + def test_theme_invalid_type(self) -> None: config = ['mkdocs2'] - option = config_options.Theme() - self.assertRaises(config_options.ValidationError, - option.validate, config) + class Schema(Config): + option = c.Theme() + + with self.expect_error( + option="Invalid type . Expected a string or key/value pairs." + ): + self.get_config(Schema, {'option': config}) + + def test_post_validation_none_theme_name_and_missing_custom_dir(self) -> None: + config = { + 'theme': { + 'name': None, + }, + } + + class Schema(Config): + theme = c.Theme() + + with self.expect_error(theme="At least one of 'name' or 'custom_dir' must be defined."): + self.get_config(Schema, config) -class NavTest(unittest.TestCase): + @tempdir() + def test_post_validation_inexisting_custom_dir(self, abs_base_path) -> None: + path = os.path.join(abs_base_path, 'inexisting_custom_dir') + config = { + 'theme': { + 'name': None, + 'custom_dir': path, + }, + } + + class Schema(Config): + theme = c.Theme() - def test_old_format(self): + with self.expect_error(theme=f"The path set in custom_dir ('{path}') does not exist."): + self.get_config(Schema, config) - option = config_options.Nav() - self.assertRaises( - config_options.ValidationError, - option.validate, - [['index.md', ], ] + def test_post_validation_locale_none(self) -> None: + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': None, + }, + } + + class Schema(Config): + theme = c.Theme() + + with self.expect_error(theme="'locale' must be a string."): + self.get_config(Schema, config) + + def test_post_validation_locale_invalid_type(self) -> None: + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': 0, + }, + } + + class Schema(Config): + theme = c.Theme() + + with self.expect_error(theme="'locale' must be a string."): + self.get_config(Schema, config) + + def test_post_validation_locale(self) -> None: + config = { + 'theme': { + 'name': 'mkdocs', + 'locale': 'fr', + }, + } + + class Schema(Config): + theme = c.Theme() + + conf = self.get_config(Schema, config) + self.assertEqual(conf.theme.locale.language, 'fr') + + +class NavTest(TestCase): + class Schema(Config): + option = c.Nav() + + def test_old_format(self) -> None: + with self.expect_error( + option="Expected nav item to be a string or dict, got a list: ['index.md']" + ): + self.get_config(self.Schema, {'option': [['index.md']]}) + + def test_provided_dict(self) -> None: + conf = self.get_config(self.Schema, {'option': ['index.md', {"Page": "page.md"}]}) + self.assertEqual(conf.option, ['index.md', {'Page': 'page.md'}]) + + def test_provided_empty(self) -> None: + conf = self.get_config(self.Schema, {'option': []}) + self.assertEqual(conf.option, None) + + def test_normal_nav(self) -> None: + nav_yaml = textwrap.dedent( + '''\ + - Home: index.md + - getting-started.md + - User Guide: + - Overview: user-guide/index.md + - Installation: user-guide/installation.md + ''' + ) + nav = yaml_load(io.StringIO(nav_yaml)) + + conf = self.get_config(self.Schema, {'option': nav}) + self.assertEqual(conf.option, nav) + + def test_invalid_type_dict(self) -> None: + with self.expect_error(option="Expected nav to be a list, got a dict: {}"): + self.get_config(self.Schema, {'option': {}}) + + def test_invalid_type_int(self) -> None: + with self.expect_error(option="Expected nav to be a list, got a int: 5"): + self.get_config(self.Schema, {'option': 5}) + + def test_invalid_item_int(self) -> None: + with self.expect_error(option="Expected nav item to be a string or dict, got a int: 1"): + self.get_config(self.Schema, {'option': [1]}) + + def test_invalid_item_none(self) -> None: + with self.expect_error(option="Expected nav item to be a string or dict, got None"): + self.get_config(self.Schema, {'option': [None]}) + + def test_invalid_children_config_int(self) -> None: + with self.expect_error(option="Expected nav to be a list, got a int: 1"): + self.get_config(self.Schema, {'option': [{"foo.md": [{"bar.md": 1}]}]}) + + def test_invalid_children_config_none(self) -> None: + with self.expect_error(option="Expected nav to be a list, got None"): + self.get_config(self.Schema, {'option': [{"foo.md": None}]}) + + def test_invalid_children_empty_dict(self) -> None: + nav = ['foo', {}] + with self.expect_error(option="Expected nav item to be a dict of size 1, got a dict: {}"): + self.get_config(self.Schema, {'option': nav}) + + def test_invalid_nested_list(self) -> None: + nav = [{'aaa': [[{"bbb": "user-guide/index.md"}]]}] + with self.expect_error( + option="Expected nav item to be a string or dict, got a list: [{'bbb': 'user-guide/index.md'}]" + ): + self.get_config(self.Schema, {'option': nav}) + + def test_invalid_children_oversized_dict(self) -> None: + nav = [{"aaa": [{"bbb": "user-guide/index.md", "ccc": "user-guide/installation.md"}]}] + with self.expect_error( + option="Expected nav item to be a dict of size 1, got dict with keys ('bbb', 'ccc')" + ): + self.get_config(self.Schema, {'option': nav}) + + def test_warns_for_dict(self) -> None: + self.get_config( + self.Schema, + {'option': [{"a": {"b": "c.md", "d": "e.md"}}]}, + warnings=dict(option="Expected nav to be a list, got dict with keys ('b', 'd')"), ) - def test_provided_dict(self): - option = config_options.Nav() - value = option.validate([ - 'index.md', - {"Page": "page.md"} - ]) - self.assertEqual(['index.md', {'Page': 'page.md'}], value) +class PrivateTest(TestCase): + def test_defined(self) -> None: + class Schema(Config): + option = c.Private[Any]() - option.post_validation({'extra_stuff': []}, 'extra_stuff') + with self.expect_error(option="For internal use only."): + self.get_config(Schema, {'option': 'somevalue'}) - def test_provided_empty(self): - option = config_options.Nav() - value = option.validate([]) - self.assertEqual(None, value) +class SubConfigTest(TestCase): + def test_subconfig_wrong_type(self) -> None: + # Test that an error is raised if subconfig does not receive a dict + class Schema(Config): + option = c.SubConfig() - option.post_validation({'extra_stuff': []}, 'extra_stuff') + for val in "not_a_dict", ("not_a_dict",), ["not_a_dict"]: + with self.subTest(val): + with self.expect_error( + option=re.compile( + r"The configuration is invalid. Expected a key-value mapping " + r"\(dict\) but received: .+" + ) + ): + self.get_config(Schema, {'option': val}) - def test_invalid_type(self): + def test_subconfig_unknown_option(self) -> None: + class Schema(Config): + option = c.SubConfig(validate=True) - option = config_options.Nav() - self.assertRaises(config_options.ValidationError, - option.validate, {}) + conf = self.get_config( + Schema, + {'option': {'unknown': 0}}, + warnings=dict(option="Sub-option 'unknown': Unrecognised configuration name: unknown"), + ) + self.assertEqual(conf.option, {"unknown": 0}) + + def test_subconfig_invalid_option(self) -> None: + class Sub(Config): + cc = c.Choice(('foo', 'bar')) + + class Schema(Config): + option = c.SubConfig(Sub) + + with self.expect_error( + option="Sub-option 'cc': Expected one of: ('foo', 'bar') but received: True" + ): + self.get_config(Schema, {'option': {'cc': True}}) + + def test_subconfig_normal(self) -> None: + class Sub(Config): + cc = c.Choice(('foo', 'bar')) + + class Schema(Config): + option = c.SubConfig(Sub) + + conf = self.get_config(Schema, {'option': {'cc': 'foo'}}) + assert_type(conf.option, Sub) + self.assertEqual(conf.option, {'cc': 'foo'}) + assert_type(conf.option.cc, str) + self.assertEqual(conf.option.cc, 'foo') + + def test_subconfig_with_multiple_items(self) -> None: + # This had a bug where subsequent items would get merged into the same dict. + class Sub(Config): + value = c.Type(str) + + class Schema(Config): + the_items = c.ListOfItems(c.SubConfig(Sub)) + + conf = self.get_config( + Schema, + { + 'the_items': [ + {'value': 'a'}, + {'value': 'b'}, + ] + }, + ) + assert_type(conf.the_items, List[Sub]) + self.assertEqual(conf.the_items, [{'value': 'a'}, {'value': 'b'}]) + assert_type(conf.the_items[1].value, str) + self.assertEqual(conf.the_items[1].value, 'b') - def test_invalid_config(self): + def test_optional(self) -> None: + class Sub(Config): + opt = c.Optional(c.Type(int)) - option = config_options.Nav() - self.assertRaises(config_options.ValidationError, - option.validate, [[], 1]) + class Schema(Config): + sub = c.Optional(c.ListOfItems(c.SubConfig(Sub))) + conf = self.get_config(Schema, {}) + self.assertEqual(conf.sub, None) -class PrivateTest(unittest.TestCase): + conf = self.get_config(Schema, {'sub': None}) + self.assertEqual(conf.sub, None) - def test_defined(self): + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {}]}) + assert_type(conf.sub, Optional[List[Sub]]) + self.assertEqual(conf.sub, [{'opt': 1}, {'opt': None}]) + assert conf.sub is not None + assert_type(conf.sub[0].opt, Optional[int]) + self.assertEqual(conf.sub[0].opt, 1) + + conf = self.get_config(Schema, {'sub': []}) + + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {'opt': 2}]}) + self.assertEqual(conf.sub, [{'opt': 1}, {'opt': 2}]) + + def test_required(self) -> None: + class Sub(Config): + opt = c.Type(int) + + class Schema(Config): + sub = c.ListOfItems(c.SubConfig(Sub)) + + with self.expect_error(sub="Required configuration not provided."): + conf = self.get_config(Schema, {}) + + with self.expect_error(sub="Required configuration not provided."): + conf = self.get_config(Schema, {'sub': None}) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + conf = self.get_config(Schema, {'sub': [{'opt': 'asdf'}, {}]}) + + conf = self.get_config(Schema, {'sub': []}) + + conf = self.get_config(Schema, {'sub': [{'opt': 1}, {'opt': 2}]}) + assert_type(conf.sub, List[Sub]) + self.assertEqual(conf.sub, [{'opt': 1}, {'opt': 2}]) + assert_type(conf.sub[0].opt, int) + self.assertEqual(conf.sub[0].opt, 1) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + self.get_config(Schema, {'sub': [{'opt': 'z'}, {'opt': 2}]}) + + with self.expect_error( + sub="Sub-option 'opt': Expected type: but received: " + ): + conf = self.get_config(Schema, {'sub': [{'opt': 'z'}, {'opt': 2}]}) + + with self.expect_error( + sub="The configuration is invalid. Expected a key-value mapping " + "(dict) but received: " + ): + conf = self.get_config(Schema, {'sub': [1, 2]}) + + def test_default(self) -> None: + class Sub(Config): + opt = c.Type(int) + + class Schema(Config): + sub = c.ListOfItems(c.SubConfig(Sub), default=[]) + + conf = self.get_config(Schema, {}) + assert_type(conf.sub, List[Sub]) + self.assertEqual(conf.sub, []) + + with self.expect_error(sub="Required configuration not provided."): + conf = self.get_config(Schema, {'sub': None}) + + def test_config_file_path_pass_through(self) -> None: + """Necessary to ensure FilesystemObject validates the correct path""" + + passed_config_path = None + + class SubType(c.BaseConfigOption): + def pre_validation(self, config: Config, key_name: str) -> None: + nonlocal passed_config_path + passed_config_path = config.config_file_path + + class Sub(Config): + opt = SubType() + + class Schema(Config): + sub = c.ListOfItems(c.SubConfig(Sub), default=[]) - option = config_options.Private() - self.assertRaises(config_options.ValidationError, - option.validate, 'somevalue') + config_path = "foo/mkdocs.yaml" + self.get_config(Schema, {"sub": [{"opt": "bar"}]}, config_file_path=config_path) + self.assertEqual(passed_config_path, config_path) -class MarkdownExtensionsTest(unittest.TestCase): +class NestedSubConfigTest(TestCase): + def defaults(self): + return { + 'nav': { + 'omitted_files': logging.INFO, + 'not_found': logging.WARNING, + 'absolute_links': logging.INFO, + }, + 'links': { + 'not_found': logging.WARNING, + 'absolute_links': logging.INFO, + 'unrecognized_links': logging.INFO, + }, + } + class Schema(Config): + validation = c.PropagatingSubConfig[defaults.MkDocsConfig.Validation]() + + def test_unspecified(self) -> None: + for cfg in {}, {'validation': {}}: + with self.subTest(cfg): + conf = self.get_config( + self.Schema, + {}, + ) + self.assertEqual(conf.validation, self.defaults()) + + def test_sets_nested_and_not_nested(self) -> None: + conf = self.get_config( + self.Schema, + {'validation': {'not_found': 'ignore', 'links': {'absolute_links': 'warn'}}}, + ) + expected = self.defaults() + expected['nav']['not_found'] = logging.DEBUG + expected['links']['not_found'] = logging.DEBUG + expected['links']['absolute_links'] = logging.WARNING + self.assertEqual(conf.validation, expected) + + def test_sets_nested_different(self) -> None: + conf = self.get_config( + self.Schema, + {'validation': {'not_found': 'ignore', 'links': {'not_found': 'warn'}}}, + ) + expected = self.defaults() + expected['nav']['not_found'] = logging.DEBUG + expected['links']['not_found'] = logging.WARNING + self.assertEqual(conf.validation, expected) + + def test_sets_only_one_nested(self) -> None: + conf = self.get_config( + self.Schema, + {'validation': {'omitted_files': 'ignore'}}, + ) + expected = self.defaults() + expected['nav']['omitted_files'] = logging.DEBUG + self.assertEqual(conf.validation, expected) + + def test_sets_nested_not_dict(self) -> None: + with self.expect_error( + validation="Sub-option 'links': Sub-option 'unrecognized_links': Expected a string, but a was given." + ): + self.get_config( + self.Schema, + {'validation': {'unrecognized_links': [], 'links': {'absolute_links': 'warn'}}}, + ) + + def test_wrong_key_nested(self) -> None: + conf = self.get_config( + self.Schema, + {'validation': {'foo': 'warn', 'not_found': 'warn'}}, + warnings=dict(validation="Sub-option 'foo': Unrecognised configuration name: foo"), + ) + expected = self.defaults() + expected['nav']['not_found'] = logging.WARNING + expected['links']['not_found'] = logging.WARNING + expected['foo'] = 'warn' + self.assertEqual(conf.validation, expected) + + def test_wrong_type_nested(self) -> None: + with self.expect_error( + validation="Sub-option 'nav': Sub-option 'omitted_files': Expected one of ['warn', 'info', 'ignore'], got 'hi'" + ): + self.get_config( + self.Schema, + {'validation': {'omitted_files': 'hi'}}, + ) + + +class MarkdownExtensionsTest(TestCase): @patch('markdown.Markdown') - def test_simple_list(self, mockMd): - option = config_options.MarkdownExtensions() + def test_simple_list(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + config = { - 'markdown_extensions': ['foo', 'bar'] - } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ 'markdown_extensions': ['foo', 'bar'], - 'mdx_configs': {} - }, config) + } + conf = self.get_config(Schema, config) + assert_type(conf.markdown_extensions, List[str]) + assert_type(conf.mdx_configs, Dict[str, dict]) + self.assertEqual(conf.markdown_extensions, ['foo', 'bar']) + self.assertEqual(conf.mdx_configs, {}) @patch('markdown.Markdown') - def test_list_dicts(self, mockMd): - option = config_options.MarkdownExtensions() + def test_list_dicts(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + config = { 'markdown_extensions': [ {'foo': {'foo_option': 'foo value'}}, {'bar': {'bar_option': 'bar value'}}, - {'baz': None} + {'baz': None}, ] } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': ['foo', 'bar', 'baz'], - 'mdx_configs': { + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['foo', 'bar', 'baz']) + self.assertEqual( + conf.mdx_configs, + { 'foo': {'foo_option': 'foo value'}, - 'bar': {'bar_option': 'bar value'} - } - }, config) + 'bar': {'bar_option': 'bar value'}, + }, + ) @patch('markdown.Markdown') - def test_mixed_list(self, mockMd): - option = config_options.MarkdownExtensions() + def test_mixed_list(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + config = { 'markdown_extensions': [ 'foo', - {'bar': {'bar_option': 'bar value'}} + {'bar': {'bar_option': 'bar value'}}, ] } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': ['foo', 'bar'], - 'mdx_configs': { - 'bar': {'bar_option': 'bar value'} - } - }, config) + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['foo', 'bar']) + self.assertEqual( + conf.mdx_configs, + { + 'bar': {'bar_option': 'bar value'}, + }, + ) @patch('markdown.Markdown') - def test_builtins(self, mockMd): - option = config_options.MarkdownExtensions(builtins=['meta', 'toc']) + def test_dict_of_dicts(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + config = { - 'markdown_extensions': ['foo', 'bar'] + 'markdown_extensions': { + 'foo': {'foo_option': 'foo value'}, + 'bar': {'bar_option': 'bar value'}, + 'baz': {}, + } } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': ['meta', 'toc', 'foo', 'bar'], - 'mdx_configs': {} - }, config) + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['foo', 'bar', 'baz']) + self.assertEqual( + conf.mdx_configs, + { + 'foo': {'foo_option': 'foo value'}, + 'bar': {'bar_option': 'bar value'}, + }, + ) + + @patch('markdown.Markdown') + def test_builtins(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private[Dict[str, dict]]() - def test_duplicates(self): - option = config_options.MarkdownExtensions(builtins=['meta', 'toc']) config = { - 'markdown_extensions': ['meta', 'toc'] + 'markdown_extensions': ['foo', 'bar'], } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['meta', 'toc', 'foo', 'bar']) + self.assertEqual(conf.mdx_configs, {}) + + def test_duplicates(self) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private[Dict[str, dict]]() + + config = { 'markdown_extensions': ['meta', 'toc'], - 'mdx_configs': {} - }, config) + } + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['meta', 'toc']) + self.assertEqual(conf.mdx_configs, {}) + + def test_builtins_config(self) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions(builtins=['meta', 'toc']) + mdx_configs = c.Private[Dict[str, dict]]() - def test_builtins_config(self): - option = config_options.MarkdownExtensions(builtins=['meta', 'toc']) config = { 'markdown_extensions': [ - {'toc': {'permalink': True}} - ] + {'toc': {'permalink': True}}, + ], } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': ['meta', 'toc'], - 'mdx_configs': {'toc': {'permalink': True}} - }, config) + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['meta', 'toc']) + self.assertEqual(conf.mdx_configs, {'toc': {'permalink': True}}) @patch('markdown.Markdown') - def test_configkey(self, mockMd): - option = config_options.MarkdownExtensions(configkey='bar') + def test_configkey(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions(configkey='bar') + bar = c.Private[Dict[str, dict]]() + config = { 'markdown_extensions': [ - {'foo': {'foo_option': 'foo value'}} + {'foo': {'foo_option': 'foo value'}}, ] } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': ['foo'], - 'bar': { - 'foo': {'foo_option': 'foo value'} - } - }, config) + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, ['foo']) + self.assertEqual( + conf.bar, + { + 'foo': {'foo_option': 'foo value'}, + }, + ) + + def test_missing_default(self) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + + conf = self.get_config(Schema, {}) + self.assertEqual(conf.markdown_extensions, []) + self.assertEqual(conf.mdx_configs, {}) + + def test_none(self) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions(default=[]) + mdx_configs = c.Private[Dict[str, dict]]() - def test_none(self): - option = config_options.MarkdownExtensions(default=[]) config = { - 'markdown_extensions': None + 'markdown_extensions': None, } - config['markdown_extensions'] = option.validate(config['markdown_extensions']) - option.post_validation(config, 'markdown_extensions') - self.assertEqual({ - 'markdown_extensions': [], - 'mdx_configs': {} - }, config) + conf = self.get_config(Schema, config) + self.assertEqual(conf.markdown_extensions, []) + self.assertEqual(conf.mdx_configs, {}) @patch('markdown.Markdown') - def test_not_list(self, mockMd): - option = config_options.MarkdownExtensions() - self.assertRaises(config_options.ValidationError, - option.validate, 'not a list') + def test_not_list(self, mock_md) -> None: + class Schema(Config): + option = c.MarkdownExtensions() + + with self.expect_error(option="Invalid Markdown Extensions configuration"): + self.get_config(Schema, {'option': 'not a list'}) @patch('markdown.Markdown') - def test_invalid_config_option(self, mockMd): - option = config_options.MarkdownExtensions() + def test_invalid_config_option(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + config = { 'markdown_extensions': [ - {'foo': 'not a dict'} - ] + {'foo': 'not a dict'}, + ], } - self.assertRaises( - config_options.ValidationError, - option.validate, config['markdown_extensions'] - ) + with self.expect_error( + markdown_extensions="Invalid config options for Markdown Extension 'foo'." + ): + self.get_config(Schema, config) @patch('markdown.Markdown') - def test_invalid_config_item(self, mockMd): - option = config_options.MarkdownExtensions() + def test_invalid_config_item(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + config = { 'markdown_extensions': [ - ['not a dict'] - ] + ['not a dict'], + ], } - self.assertRaises( - config_options.ValidationError, - option.validate, config['markdown_extensions'] - ) + with self.expect_error(markdown_extensions="Invalid Markdown Extensions configuration"): + self.get_config(Schema, config) @patch('markdown.Markdown') - def test_invalid_dict_item(self, mockMd): - option = config_options.MarkdownExtensions() + def test_invalid_dict_item(self, mock_md) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + config = { 'markdown_extensions': [ - {'key1': 'value', 'key2': 'too many keys'} - ] + {'key1': 'value', 'key2': 'too many keys'}, + ], } - self.assertRaises( - config_options.ValidationError, - option.validate, config['markdown_extensions'] - ) + with self.expect_error(markdown_extensions="Invalid Markdown Extensions configuration"): + self.get_config(Schema, config) + + def test_unknown_extension(self) -> None: + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() - def test_unknown_extension(self): - option = config_options.MarkdownExtensions() config = { - 'markdown_extensions': ['unknown'] + 'markdown_extensions': ['unknown'], } - self.assertRaises( - config_options.ValidationError, - option.validate, config['markdown_extensions'] + with self.expect_error( + markdown_extensions=re.compile(r"Failed to load extension 'unknown'.\n.+") + ): + self.get_config(Schema, config) + + def test_multiple_markdown_config_instances(self) -> None: + # This had a bug where an extension config would persist to separate + # config instances that didn't specify extensions. + class Schema(Config): + markdown_extensions = c.MarkdownExtensions() + mdx_configs = c.Private[Dict[str, dict]]() + + conf = self.get_config( + Schema, + { + 'markdown_extensions': [{'toc': {'permalink': '##'}}], + }, + ) + self.assertEqual(conf.mdx_configs['toc'], {'permalink': '##'}) + + conf = self.get_config( + Schema, + {}, ) + self.assertIsNone(conf.mdx_configs.get('toc')) + + +class _FakePluginConfig(Config): + foo = c.Type(str, default='default foo') + bar = c.Type(int, default=0) + dir = c.Optional(c.Dir(exists=False)) + + +class FakePlugin(BasePlugin[_FakePluginConfig]): + pass + + +class _FakePlugin2Config(_FakePluginConfig): + depr = c.Deprecated() + + +class FakePlugin2(BasePlugin[_FakePlugin2Config]): + supports_multiple_instances = True + + +class ThemePlugin(BasePlugin[_FakePluginConfig]): + pass + + +class ThemePlugin2(BasePlugin[_FakePluginConfig]): + pass + + +class FakeEntryPoint: + def __init__(self, name, cls): + self.name = name + self.cls = cls + + def load(self): + return self.cls + + +@patch( + 'mkdocs.plugins.entry_points', + return_value=[ + FakeEntryPoint('sample', FakePlugin), + FakeEntryPoint('sample2', FakePlugin2), + FakeEntryPoint('readthedocs/sub_plugin', ThemePlugin), + FakeEntryPoint('overridden', FakePlugin2), + FakeEntryPoint('readthedocs/overridden', ThemePlugin2), + ], +) +class PluginsTest(TestCase): + def test_plugin_config_without_options(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': ['sample'], + } + conf = self.get_config(Schema, cfg) + + assert_type(conf.plugins, PluginCollection) + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertIn('sample', conf.plugins) + + plugin = conf.plugins['sample'] + assert_type(plugin, BasePlugin) + self.assertIsInstance(plugin, FakePlugin) + self.assertIsInstance(plugin.config, _FakePluginConfig) + expected = { + 'foo': 'default foo', + 'bar': 0, + 'dir': None, + } + self.assertEqual(plugin.config, expected) + + def test_plugin_config_with_options(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': [ + { + 'sample': { + 'foo': 'foo value', + 'bar': 42, + }, + } + ], + } + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertIn('sample', conf.plugins) + self.assertIsInstance(conf.plugins['sample'], BasePlugin) + expected = { + 'foo': 'foo value', + 'bar': 42, + 'dir': None, + } + self.assertEqual(conf.plugins['sample'].config, expected) + + def test_plugin_config_as_dict(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': { + 'sample': { + 'foo': 'foo value', + 'bar': 42, + }, + }, + } + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertIn('sample', conf.plugins) + self.assertIsInstance(conf.plugins['sample'], BasePlugin) + expected = { + 'foo': 'foo value', + 'bar': 42, + 'dir': None, + } + self.assertEqual(conf.plugins['sample'].config, expected) + + def test_plugin_config_with_explicit_theme_namespace(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + cfg = {'theme': 'readthedocs', 'plugins': ['readthedocs/sub_plugin']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'readthedocs/sub_plugin'}) + self.assertIsInstance(conf.plugins['readthedocs/sub_plugin'], ThemePlugin) + + cfg = {'plugins': ['readthedocs/sub_plugin']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'readthedocs/sub_plugin'}) + self.assertIsInstance(conf.plugins['readthedocs/sub_plugin'], ThemePlugin) + + def test_plugin_config_with_deduced_theme_namespace(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + cfg = {'theme': 'readthedocs', 'plugins': ['sub_plugin']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'readthedocs/sub_plugin'}) + self.assertIsInstance(conf.plugins['readthedocs/sub_plugin'], ThemePlugin) + + cfg = {'plugins': ['sub_plugin']} + with self.expect_error(plugins='The "sub_plugin" plugin is not installed'): + self.get_config(Schema, cfg) + + def test_plugin_config_with_deduced_theme_namespace_overridden(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + cfg = {'theme': 'readthedocs', 'plugins': ['overridden']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'readthedocs/overridden'}) + self.assertIsInstance(next(iter(conf.plugins.values())), ThemePlugin2) + + cfg = {'plugins': ['overridden']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'overridden'}) + self.assertIsInstance(conf.plugins['overridden'], FakePlugin2) + + def test_plugin_config_with_explicit_empty_namespace(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + cfg = {'theme': 'readthedocs', 'plugins': ['/overridden']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'overridden'}) + self.assertIsInstance(next(iter(conf.plugins.values())), FakePlugin2) + + cfg = {'plugins': ['/overridden']} + conf = self.get_config(Schema, cfg) + + self.assertEqual(set(conf.plugins), {'overridden'}) + self.assertIsInstance(conf.plugins['overridden'], FakePlugin2) + + def test_plugin_config_with_multiple_instances(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + cfg = { + 'plugins': [ + {'sample2': {'foo': 'foo value', 'bar': 42}}, + {'sample2': {'foo': 'foo2 value'}}, + ], + } + conf = self.get_config(Schema, cfg) + + self.assertEqual( + set(conf.plugins), + {'sample2', 'sample2 #2'}, + ) + self.assertEqual(conf.plugins['sample2'].config['bar'], 42) + self.assertEqual(conf.plugins['sample2 #2'].config['bar'], 0) + + def test_plugin_config_with_multiple_instances_and_warning(self, mock_class) -> None: + class Schema(Config): + theme = c.Theme(default='mkdocs') + plugins = c.Plugins(theme_key='theme') + + test_cfgs: list[dict[str, Any]] = [ + { + 'theme': 'readthedocs', + 'plugins': [{'sub_plugin': {}}, {'sample2': {}}, {'sub_plugin': {}}, 'sample2'], + }, + { + 'theme': 'readthedocs', + 'plugins': ['sub_plugin', 'sample2', 'sample2', 'sub_plugin'], + }, + ] + + for cfg in test_cfgs: + conf = self.get_config( + Schema, + cfg, + warnings=dict( + plugins="Plugin 'readthedocs/sub_plugin' was specified multiple times - " + "this is likely a mistake, because the plugin doesn't declare " + "`supports_multiple_instances`." + ), + ) + self.assertEqual( + set(conf.plugins), + {'readthedocs/sub_plugin', 'readthedocs/sub_plugin #2', 'sample2', 'sample2 #2'}, + ) + + def test_plugin_config_empty_list_with_empty_default(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins(default=[]) + + cfg: dict[str, Any] = {'plugins': []} + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertEqual(len(conf.plugins), 0) + + def test_plugin_config_empty_list_with_default(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins(default=['sample']) + + # Default is ignored + cfg: dict[str, Any] = {'plugins': []} + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertEqual(len(conf.plugins), 0) + + def test_plugin_config_none_with_empty_default(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins(default=[]) + + cfg = {'plugins': None} + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertEqual(len(conf.plugins), 0) + + def test_plugin_config_none_with_default(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins(default=['sample']) + + # Default is used. + cfg = {'plugins': None} + conf = self.get_config(Schema, cfg) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertIn('sample', conf.plugins) + self.assertIsInstance(conf.plugins['sample'], BasePlugin) + expected = { + 'foo': 'default foo', + 'bar': 0, + 'dir': None, + } + self.assertEqual(conf.plugins['sample'].config, expected) + + def test_plugin_config_uninstalled(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = {'plugins': ['uninstalled']} + with self.expect_error(plugins='The "uninstalled" plugin is not installed'): + self.get_config(Schema, cfg) + + def test_plugin_config_not_list(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = {'plugins': 'sample'} + with self.expect_error(plugins="Invalid Plugins configuration. Expected a list or dict."): + self.get_config(Schema, cfg) + + def test_plugin_config_multivalue_dict(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': [ + { + 'sample': { + 'foo': 'foo value', + 'bar': 42, + }, + 'extra_key': 'baz', + } + ], + } + with self.expect_error(plugins="Invalid Plugins configuration"): + self.get_config(Schema, cfg) + + cfg = { + 'plugins': [ + {}, + ], + } + with self.expect_error(plugins="Invalid Plugins configuration"): + self.get_config(Schema, cfg) + + def test_plugin_config_not_string_or_dict(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': [('not a string or dict',)], + } + with self.expect_error(plugins="'('not a string or dict',)' is not a valid plugin name."): + self.get_config(Schema, cfg) + + def test_plugin_config_options_not_dict(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': [{'sample': 'not a dict'}], + } + with self.expect_error(plugins="Invalid config options for the 'sample' plugin."): + self.get_config(Schema, cfg) + + def test_plugin_config_sub_error(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins(default=['sample']) + + cfg = { + 'plugins': { + 'sample': {'bar': 'not an int'}, + } + } + with self.expect_error( + plugins="Plugin 'sample' option 'bar': Expected type: but received: " + ): + self.get_config(Schema, cfg) + + def test_plugin_config_sub_warning(self, mock_class) -> None: + class Schema(Config): + plugins = c.Plugins() + + cfg = { + 'plugins': { + 'sample2': {'depr': 'deprecated value'}, + } + } + conf = self.get_config( + Schema, + cfg, + warnings=dict( + plugins="Plugin 'sample2' option 'depr': The configuration option " + "'depr' has been deprecated and will be removed in a future release." + ), + ) + + self.assertIsInstance(conf.plugins, PluginCollection) + self.assertIn('sample2', conf.plugins) + + +class HooksTest(TestCase): + class Schema(Config): + plugins = c.Plugins(default=[]) + hooks = c.Hooks('plugins') + + @tempdir() + def test_hooks(self, src_dir) -> None: + write_file( + b'def on_page_markdown(markdown, **kwargs): return markdown.replace("f", "z")', + os.path.join(src_dir, 'hooks', 'my_hook.py'), + ) + write_file( + b'foo foo', + os.path.join(src_dir, 'docs', 'index.md'), + ) + conf = self.get_config( + self.Schema, + {'hooks': ['hooks/my_hook.py']}, + config_file_path=os.path.join(src_dir, 'mkdocs.yml'), + ) + self.assertIn('hooks/my_hook.py', conf.plugins) + hook = conf.plugins['hooks/my_hook.py'] + self.assertTrue(hasattr(hook, 'on_page_markdown')) + self.assertEqual( + {**conf.plugins.events, 'page_markdown': [hook.on_page_markdown]}, + conf.plugins.events, + ) + self.assertEqual(hook.on_page_markdown('foo foo'), 'zoo zoo') # type: ignore[call-arg] + self.assertFalse(hasattr(hook, 'on_nav')) + + def test_hooks_wrong_type(self) -> None: + with self.expect_error(hooks="Expected a list of items, but a was given."): + self.get_config(self.Schema, {'hooks': 6}) + + with self.expect_error(hooks="Expected type: but received: "): + self.get_config(self.Schema, {'hooks': [7]}) + + +class SchemaTest(TestCase): + def test_copy(self) -> None: + class Schema(Config): + foo = c.MarkdownExtensions() + + copy.deepcopy(Schema()) + + copy.deepcopy(self.get_config(IpAddressTest.Schema, {'option': '1.2.3.4:5678'})) + copy.deepcopy(IpAddressTest.Schema) + copy.deepcopy(IpAddressTest.Schema()) + + copy.deepcopy(self.get_config(EditURITest.Schema, {})) + copy.deepcopy(EditURITest.Schema) + copy.deepcopy(EditURITest.Schema()) + + def test_subclass(self) -> None: + class A(Config): + foo = c.Type(int) + bar = c.Optional(c.Type(str)) + + class B(A): + baz = c.ListOfItems(c.Type(str)) + + conf = self.get_config(B, {'foo': 1, 'baz': ['b']}) + assert_type(conf.foo, int) + self.assertEqual(conf.foo, 1) + assert_type(conf.bar, Optional[str]) + self.assertEqual(conf.bar, None) + assert_type(conf.baz, List[str]) + self.assertEqual(conf.baz, ['b']) + + with self.expect_error(baz="Required configuration not provided."): + self.get_config(B, {'foo': 1}) + with self.expect_error(foo="Required configuration not provided."): + self.get_config(B, {'baz': ['b']}) + + bconf = self.get_config(A, {'foo': 2, 'bar': 'a'}) + assert_type(bconf.foo, int) + self.assertEqual(bconf.foo, 2) + self.assertEqual(bconf.bar, 'a') diff --git a/mkdocs/tests/config/config_tests.py b/mkdocs/tests/config/config_tests.py index 8d562dc..5469318 100644 --- a/mkdocs/tests/config/config_tests.py +++ b/mkdocs/tests/config/config_tests.py @@ -1,243 +1,246 @@ #!/usr/bin/env python import os -import tempfile import unittest -from tempfile import TemporaryDirectory import mkdocs from mkdocs import config -from mkdocs.config import config_options +from mkdocs.config import config_options as c +from mkdocs.config import defaults +from mkdocs.config.base import ValidationError from mkdocs.exceptions import ConfigurationError -from mkdocs.tests.base import dedent +from mkdocs.localization import parse_locale +from mkdocs.tests.base import dedent, tempdir class ConfigTests(unittest.TestCase): def test_missing_config_file(self): - - def load_missing_config(): + with self.assertRaises(ConfigurationError): config.load_config(config_file='bad_filename.yaml') - self.assertRaises(ConfigurationError, load_missing_config) def test_missing_site_name(self): - c = config.Config(schema=config.DEFAULT_SCHEMA) - c.load_dict({}) - errors, warnings = c.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(errors[0][0], 'site_name') - self.assertEqual(str(errors[0][1]), 'Required configuration not provided.') - - self.assertEqual(len(warnings), 0) - - def test_empty_config(self): - def load_empty_config(): - config.load_config(config_file='/dev/null') - self.assertRaises(ConfigurationError, load_empty_config) + conf = defaults.MkDocsConfig() + conf.load_dict({}) + errors, warnings = conf.validate() + self.assertEqual( + errors, [('site_name', ValidationError("Required configuration not provided."))] + ) + self.assertEqual(warnings, []) def test_nonexistant_config(self): - def load_empty_config(): + with self.assertRaises(ConfigurationError): config.load_config(config_file='/path/that/is/not/real') - self.assertRaises(ConfigurationError, load_empty_config) - def test_invalid_config(self): - file_contents = dedent(""" - - ['index.md', 'Introduction'] - - ['index.md', 'Introduction'] - - ['index.md', 'Introduction'] - """) - config_file = tempfile.NamedTemporaryFile('w', delete=False) - try: + @tempdir() + def test_invalid_config(self, temp_path): + file_contents = dedent( + """ + - ['index.md', 'Introduction'] + - ['index.md', 'Introduction'] + - ['index.md', 'Introduction'] + """ + ) + config_path = os.path.join(temp_path, 'foo.yml') + with open(config_path, 'w') as config_file: config_file.write(file_contents) - config_file.flush() - config_file.close() - self.assertRaises( - ConfigurationError, - config.load_config, config_file=open(config_file.name, 'rb') - ) - finally: - os.remove(config_file.name) + with self.assertRaises(ConfigurationError): + config.load_config(config_file=open(config_file.name, 'rb')) - def test_config_option(self): + @tempdir() + def test_config_option(self, temp_path): """ Users can explicitly set the config file using the '--config' option. Allows users to specify a config other than the default `mkdocs.yml`. """ expected_result = { 'site_name': 'Example', - 'pages': [ - {'Introduction': 'index.md'} - ], + 'nav': [{'Introduction': 'index.md'}], } - file_contents = dedent(""" - site_name: Example - pages: - - 'Introduction': 'index.md' - """) - with TemporaryDirectory() as temp_path: - os.mkdir(os.path.join(temp_path, 'docs')) - config_path = os.path.join(temp_path, 'mkdocs.yml') - config_file = open(config_path, 'w') - + file_contents = dedent( + """ + site_name: Example + nav: + - 'Introduction': 'index.md' + """ + ) + config_path = os.path.join(temp_path, 'mkdocs.yml') + with open(config_path, 'w') as config_file: config_file.write(file_contents) - config_file.flush() - config_file.close() + os.mkdir(os.path.join(temp_path, 'docs')) - result = config.load_config(config_file=config_file.name) - self.assertEqual(result['site_name'], expected_result['site_name']) - self.assertEqual(result['pages'], expected_result['pages']) + result = config.load_config(config_file=config_file.name) + self.assertEqual(result['site_name'], expected_result['site_name']) + self.assertEqual(result['nav'], expected_result['nav']) - def test_theme(self): - with TemporaryDirectory() as mytheme, TemporaryDirectory() as custom: - configs = [ - dict(), # default theme - {"theme": "readthedocs"}, # builtin theme - {"theme": {'name': 'readthedocs'}}, # builtin as complex - {"theme": {'name': None, 'custom_dir': mytheme}}, # custom only as complex - {"theme": {'name': 'readthedocs', 'custom_dir': custom}}, # builtin and custom as complex - { # user defined variables - 'theme': { - 'name': 'mkdocs', - 'static_templates': ['foo.html'], - 'show_sidebar': False, - 'some_var': 'bar' - } + @tempdir() + @tempdir() + def test_theme(self, mytheme, custom): + configs = [ + {}, # default theme + {"theme": "readthedocs"}, # builtin theme + {"theme": {'name': 'readthedocs'}}, # builtin as complex + {"theme": {'name': None, 'custom_dir': mytheme}}, # custom only as complex + { + "theme": {'name': 'readthedocs', 'custom_dir': custom} + }, # builtin and custom as complex + { # user defined variables + 'theme': { + 'name': 'mkdocs', + 'locale': 'fr', + 'static_templates': ['foo.html'], + 'show_sidebar': False, + 'some_var': 'bar', } - ] - - mkdocs_dir = os.path.abspath(os.path.dirname(mkdocs.__file__)) - mkdocs_templates_dir = os.path.join(mkdocs_dir, 'templates') - theme_dir = os.path.abspath(os.path.join(mkdocs_dir, 'themes')) + }, + ] - results = ( - { - 'dirs': [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir], - 'static_templates': ['404.html', 'sitemap.xml'], - 'vars': { - 'include_search_page': False, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_style': 'github', - 'hljs_languages': [], - 'navigation_depth': 2, - 'nav_style': 'primary', - 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83} - } - }, { - 'dirs': [os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], - 'static_templates': ['404.html', 'sitemap.xml'], - 'vars': { - 'include_search_page': True, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_languages': [], - 'include_homepage_in_sidebar': True, - 'prev_next_buttons_location': 'bottom', - 'navigation_depth': 4, - 'sticky_navigation': True, - 'titles_only': False, - 'collapse_navigation': True - } - }, { - 'dirs': [os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], - 'static_templates': ['404.html', 'sitemap.xml'], - 'vars': { - 'include_search_page': True, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_languages': [], - 'include_homepage_in_sidebar': True, - 'prev_next_buttons_location': 'bottom', - 'navigation_depth': 4, - 'sticky_navigation': True, - 'titles_only': False, - 'collapse_navigation': True - } - }, { - 'dirs': [mytheme, mkdocs_templates_dir], - 'static_templates': ['sitemap.xml'], - 'vars': {} - }, { - 'dirs': [custom, os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], - 'static_templates': ['404.html', 'sitemap.xml'], - 'vars': { - 'include_search_page': True, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_languages': [], - 'include_homepage_in_sidebar': True, - 'prev_next_buttons_location': 'bottom', - 'navigation_depth': 4, - 'sticky_navigation': True, - 'titles_only': False, - 'collapse_navigation': True - } - }, { - 'dirs': [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir], - 'static_templates': ['404.html', 'sitemap.xml', 'foo.html'], - 'vars': { - 'show_sidebar': False, - 'some_var': 'bar', - 'include_search_page': False, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_style': 'github', - 'hljs_languages': [], - 'navigation_depth': 2, - 'nav_style': 'primary', - 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83} - } - } - ) + mkdocs_dir = os.path.abspath(os.path.dirname(mkdocs.__file__)) + mkdocs_templates_dir = os.path.join(mkdocs_dir, 'templates') + theme_dir = os.path.abspath(os.path.join(mkdocs_dir, 'themes')) - for config_contents, result in zip(configs, results): + results = ( + { + 'dirs': [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir], + 'static_templates': ['404.html', 'sitemap.xml'], + 'vars': { + 'name': 'mkdocs', + 'locale': parse_locale('en'), + 'include_search_page': False, + 'search_index_only': False, + 'analytics': {'gtag': None}, + 'highlightjs': True, + 'hljs_style': 'github', + 'hljs_languages': [], + 'navigation_depth': 2, + 'nav_style': 'primary', + 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83}, + }, + }, + { + 'dirs': [os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], + 'static_templates': ['404.html', 'sitemap.xml'], + 'vars': { + 'name': 'readthedocs', + 'locale': parse_locale('en'), + 'include_search_page': True, + 'search_index_only': False, + 'analytics': {'anonymize_ip': False, 'gtag': None}, + 'highlightjs': True, + 'hljs_languages': [], + 'hljs_style': 'github', + 'include_homepage_in_sidebar': True, + 'prev_next_buttons_location': 'bottom', + 'navigation_depth': 4, + 'sticky_navigation': True, + 'logo': None, + 'titles_only': False, + 'collapse_navigation': True, + }, + }, + { + 'dirs': [os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], + 'static_templates': ['404.html', 'sitemap.xml'], + 'vars': { + 'name': 'readthedocs', + 'locale': parse_locale('en'), + 'include_search_page': True, + 'search_index_only': False, + 'analytics': {'anonymize_ip': False, 'gtag': None}, + 'highlightjs': True, + 'hljs_languages': [], + 'hljs_style': 'github', + 'include_homepage_in_sidebar': True, + 'prev_next_buttons_location': 'bottom', + 'navigation_depth': 4, + 'sticky_navigation': True, + 'logo': None, + 'titles_only': False, + 'collapse_navigation': True, + }, + }, + { + 'dirs': [mytheme, mkdocs_templates_dir], + 'static_templates': ['sitemap.xml'], + 'vars': {'name': None, 'locale': parse_locale('en')}, + }, + { + 'dirs': [custom, os.path.join(theme_dir, 'readthedocs'), mkdocs_templates_dir], + 'static_templates': ['404.html', 'sitemap.xml'], + 'vars': { + 'name': 'readthedocs', + 'locale': parse_locale('en'), + 'include_search_page': True, + 'search_index_only': False, + 'analytics': {'anonymize_ip': False, 'gtag': None}, + 'highlightjs': True, + 'hljs_languages': [], + 'hljs_style': 'github', + 'include_homepage_in_sidebar': True, + 'prev_next_buttons_location': 'bottom', + 'navigation_depth': 4, + 'sticky_navigation': True, + 'logo': None, + 'titles_only': False, + 'collapse_navigation': True, + }, + }, + { + 'dirs': [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir], + 'static_templates': ['404.html', 'sitemap.xml', 'foo.html'], + 'vars': { + 'name': 'mkdocs', + 'locale': parse_locale('fr'), + 'show_sidebar': False, + 'some_var': 'bar', + 'include_search_page': False, + 'search_index_only': False, + 'analytics': {'gtag': None}, + 'highlightjs': True, + 'hljs_style': 'github', + 'hljs_languages': [], + 'navigation_depth': 2, + 'nav_style': 'primary', + 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83}, + }, + }, + ) - c = config.Config(schema=(('theme', config_options.Theme(default='mkdocs')),)) - c.load_dict(config_contents) - errors, warnings = c.validate() - self.assertEqual(len(errors), 0) - self.assertEqual(c['theme'].dirs, result['dirs']) - self.assertEqual(c['theme'].static_templates, set(result['static_templates'])) - self.assertEqual({k: c['theme'][k] for k in iter(c['theme'])}, result['vars']) + for config_contents, result in zip(configs, results): + with self.subTest(config_contents): + conf = config.Config(schema=(('theme', c.Theme(default='mkdocs')),)) + conf.load_dict(config_contents) + errors, warnings = conf.validate() + self.assertEqual(errors, []) + self.assertEqual(warnings, []) + self.assertEqual(conf['theme'].dirs, result['dirs']) + self.assertEqual(conf['theme'].static_templates, set(result['static_templates'])) + self.assertEqual(dict(conf['theme']), result['vars']) def test_empty_nav(self): - conf = config.Config(schema=config.DEFAULT_SCHEMA) - conf.load_dict({ - 'site_name': 'Example', - 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml') - }) + conf = defaults.MkDocsConfig( + config_file_path=os.path.join(os.path.abspath('.'), 'mkdocs.yml') + ) + conf.load_dict({'site_name': 'Example'}) conf.validate() self.assertEqual(conf['nav'], None) - def test_copy_pages_to_nav(self): - # TODO: remove this when pages config setting is fully deprecated. - conf = config.Config(schema=config.DEFAULT_SCHEMA) - conf.load_dict({ - 'site_name': 'Example', - 'pages': ['index.md', 'about.md'], - 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml') - }) - conf.validate() - self.assertEqual(conf['nav'], ['index.md', 'about.md']) - - def test_dont_overwrite_nav_with_pages(self): - # TODO: remove this when pages config setting is fully deprecated. - conf = config.Config(schema=config.DEFAULT_SCHEMA) - conf.load_dict({ - 'site_name': 'Example', - 'pages': ['index.md', 'about.md'], - 'nav': ['foo.md', 'bar.md'], - 'config_file_path': os.path.join(os.path.abspath('.'), 'mkdocs.yml') - }) - conf.validate() - self.assertEqual(conf['nav'], ['foo.md', 'bar.md']) + def test_error_on_pages(self): + conf = defaults.MkDocsConfig() + conf.load_dict( + { + 'site_name': 'Example', + 'pages': ['index.md', 'about.md'], + } + ) + errors, warnings = conf.validate() + exp_error = "The configuration option 'pages' was removed from MkDocs. Use 'nav' instead." + self.assertEqual(errors, [('pages', ValidationError(exp_error))]) + self.assertEqual(warnings, []) def test_doc_dir_in_site_dir(self): - - j = os.path.join - test_configs = ( - {'docs_dir': j('site', 'docs'), 'site_dir': 'site'}, + {'docs_dir': os.path.join('site', 'docs'), 'site_dir': 'site'}, {'docs_dir': 'docs', 'site_dir': '.'}, {'docs_dir': '.', 'site_dir': '.'}, {'docs_dir': 'docs', 'site_dir': ''}, @@ -245,24 +248,19 @@ def test_doc_dir_in_site_dir(self): {'docs_dir': 'docs', 'site_dir': 'docs'}, ) - conf = { - 'config_file_path': j(os.path.abspath('..'), 'mkdocs.yml') - } - for test_config in test_configs: + with self.subTest(test_config): + # Same as the default schema, but don't verify the docs_dir exists. + conf = config.Config( + schema=( + ('docs_dir', c.Dir(default='docs')), + ('site_dir', c.SiteDir(default='site')), + ), + config_file_path=os.path.join(os.path.abspath('..'), 'mkdocs.yml'), + ) + conf.load_dict(test_config) - patch = conf.copy() - patch.update(test_config) - - # Same as the default schema, but don't verify the docs_dir exists. - c = config.Config(schema=( - ('docs_dir', config_options.Dir(default='docs')), - ('site_dir', config_options.SiteDir(default='site')), - ('config_file_path', config_options.Type(str)) - )) - c.load_dict(patch) - - errors, warnings = c.validate() + errors, warnings = conf.validate() - self.assertEqual(len(errors), 1) - self.assertEqual(warnings, []) + self.assertEqual(len(errors), 1) + self.assertEqual(warnings, []) diff --git a/mkdocs/tests/get_deps_tests.py b/mkdocs/tests/get_deps_tests.py new file mode 100644 index 0000000..58a95af --- /dev/null +++ b/mkdocs/tests/get_deps_tests.py @@ -0,0 +1,172 @@ +import contextlib +import io +import os +import textwrap +import unittest + +from mkdocs.commands.get_deps import get_deps +from mkdocs.tests.base import tempdir + +_projects_file_path = os.path.join( + os.path.abspath(os.path.dirname(__file__)), 'integration', 'projects.yaml' +) + + +class TestGetDeps(unittest.TestCase): + @contextlib.contextmanager + def _assert_logs(self, expected): + with self.assertLogs('mkdocs.commands.get_deps') as cm: + yield + msgs = [f'{r.levelname}:{r.message}' for r in cm.records] + self.assertEqual('\n'.join(msgs), textwrap.dedent(expected).strip('\n')) + + @tempdir() + def _test_get_deps(self, tempdir, yml, expected): + if yml: + yml = 'site_name: Test\n' + textwrap.dedent(yml) + projects_path = os.path.join(tempdir, 'projects.yaml') + with open(projects_path, 'w') as f: + f.write(yml) + buf = io.StringIO() + with contextlib.redirect_stdout(buf): + get_deps(_projects_file_path, projects_path) + self.assertEqual(buf.getvalue().split(), expected) + + def test_empty_config(self): + expected_logs = "WARNING:The passed config file doesn't seem to be a mkdocs.yml config file" + with self._assert_logs(expected_logs): + self._test_get_deps('', []) + + def test_just_search(self): + cfg = ''' + plugins: [search] + ''' + self._test_get_deps(cfg, ['mkdocs']) + + def test_mkdocs_config(self): + cfg = ''' + site_name: MkDocs + theme: + name: mkdocs + locale: en + markdown_extensions: + - toc: + permalink:  + - attr_list + - def_list + - tables + - pymdownx.highlight: + use_pygments: false + - pymdownx.snippets + - pymdownx.superfences + - callouts + - mdx_gh_links: + user: mkdocs + repo: mkdocs + - mkdocs-click + plugins: + - search + - redirects: + - autorefs + - literate-nav: + nav_file: README.md + implicit_index: true + - mkdocstrings: + handlers: + python: + options: + docstring_section_style: list + ''' + self._test_get_deps( + cfg, + [ + 'markdown-callouts', + 'mdx-gh-links', + 'mkdocs', + 'mkdocs-autorefs', + 'mkdocs-click', + 'mkdocs-literate-nav', + 'mkdocs-redirects', + 'mkdocstrings', + 'mkdocstrings-python', + 'pymdown-extensions', + ], + ) + + def test_dict_keys_and_ignores_env(self): + cfg = ''' + theme: + name: material + plugins: + code-validator: + enabled: !ENV [LINT, false] + markdown_extensions: + pymdownx.emoji: + emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:materialx.emoji.to_svg + ''' + self._test_get_deps( + cfg, ['mkdocs', 'mkdocs-code-validator', 'mkdocs-material', 'pymdown-extensions'] + ) + + def test_theme_precedence(self): + cfg = ''' + plugins: + - tags + theme: material + ''' + self._test_get_deps(cfg, ['mkdocs', 'mkdocs-material']) + + cfg = ''' + plugins: + - material/tags + ''' + self._test_get_deps(cfg, ['mkdocs', 'mkdocs-material']) + + cfg = ''' + plugins: + - tags + ''' + self._test_get_deps(cfg, ['mkdocs', 'mkdocs-plugin-tags']) + + def test_nonexistent(self): + cfg = ''' + plugins: + - taglttghhmdu + - syyisjupkbpo + - redirects + theme: qndyakplooyh + markdown_extensions: + - saqdhyndpvpa + ''' + expected_logs = """ + WARNING:Theme 'qndyakplooyh' is not provided by any registered project + WARNING:Plugin 'syyisjupkbpo' is not provided by any registered project + WARNING:Plugin 'taglttghhmdu' is not provided by any registered project + WARNING:Extension 'saqdhyndpvpa' is not provided by any registered project + """ + with self._assert_logs(expected_logs): + self._test_get_deps(cfg, ['mkdocs', 'mkdocs-redirects']) + + def test_git_and_shadowed(self): + cfg = ''' + theme: bootstrap4 + plugins: [blog] + ''' + self._test_get_deps( + cfg, ['git+https://github.com/andyoakley/mkdocs-blog', 'mkdocs', 'mkdocs-bootstrap4'] + ) + + def test_multi_theme(self): + cfg = ''' + theme: minty + ''' + self._test_get_deps(cfg, ['mkdocs', 'mkdocs-bootswatch']) + + def test_with_locale(self): + cfg = ''' + theme: + name: mkdocs + locale: uk + ''' + self._test_get_deps(cfg, ['mkdocs[i18n]']) diff --git a/mkdocs/tests/gh_deploy_tests.py b/mkdocs/tests/gh_deploy_tests.py index 65f0c66..093e659 100644 --- a/mkdocs/tests/gh_deploy_tests.py +++ b/mkdocs/tests/gh_deploy_tests.py @@ -1,54 +1,38 @@ import unittest from unittest import mock -from mkdocs.tests.base import load_config -from mkdocs.commands import gh_deploy +from ghp_import import GhpError + from mkdocs import __version__ +from mkdocs.commands import gh_deploy +from mkdocs.exceptions import Abort +from mkdocs.tests.base import load_config class TestGitHubDeploy(unittest.TestCase): - - def assert_mock_called_once(self, mock): - """assert that the mock was called only once. - - The `mock.assert_called_once()` method was added in PY36. - TODO: Remove this when PY35 support is dropped. - """ - try: - mock.assert_called_once() - except AttributeError: - if not mock.call_count == 1: - msg = ("Expected '%s' to have been called once. Called %s times." % - (mock._mock_name or 'mock', self.call_count)) - raise AssertionError(msg) - @mock.patch('subprocess.Popen') def test_is_cwd_git_repo(self, mock_popeno): - mock_popeno().wait.return_value = 0 self.assertTrue(gh_deploy._is_cwd_git_repo()) @mock.patch('subprocess.Popen') def test_is_cwd_not_git_repo(self, mock_popeno): - mock_popeno().wait.return_value = 1 self.assertFalse(gh_deploy._is_cwd_git_repo()) @mock.patch('subprocess.Popen') def test_get_current_sha(self, mock_popeno): - mock_popeno().communicate.return_value = (b'6d98394\n', b'') self.assertEqual(gh_deploy._get_current_sha('.'), '6d98394') @mock.patch('subprocess.Popen') def test_get_remote_url_ssh(self, mock_popeno): - mock_popeno().communicate.return_value = ( b'git@github.com:mkdocs/mkdocs.git\n', - b'' + b'', ) expected = ('git@', 'mkdocs/mkdocs.git') @@ -56,10 +40,9 @@ def test_get_remote_url_ssh(self, mock_popeno): @mock.patch('subprocess.Popen') def test_get_remote_url_http(self, mock_popeno): - mock_popeno().communicate.return_value = ( b'https://github.com/mkdocs/mkdocs.git\n', - b'' + b'', ) expected = ('https://', 'mkdocs/mkdocs.git') @@ -67,10 +50,9 @@ def test_get_remote_url_http(self, mock_popeno): @mock.patch('subprocess.Popen') def test_get_remote_url_enterprise(self, mock_popeno): - mock_popeno().communicate.return_value = ( b'https://notgh.com/mkdocs/mkdocs.git\n', - b'' + b'', ) expected = (None, None) @@ -80,9 +62,8 @@ def test_get_remote_url_enterprise(self, mock_popeno): @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') @mock.patch('mkdocs.commands.gh_deploy._get_remote_url', return_value=(None, None)) @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.commands.gh_deploy.ghp_import.ghp_import', return_value=(True, '')) + @mock.patch('ghp_import.ghp_import') def test_deploy(self, mock_import, check_version, get_remote, get_sha, is_repo): - config = load_config( remote_branch='test', ) @@ -92,11 +73,11 @@ def test_deploy(self, mock_import, check_version, get_remote, get_sha, is_repo): @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') @mock.patch('mkdocs.commands.gh_deploy._get_remote_url', return_value=(None, None)) @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.commands.gh_deploy.ghp_import.ghp_import', return_value=(True, '')) + @mock.patch('ghp_import.ghp_import') @mock.patch('os.path.isfile', return_value=False) - def test_deploy_no_cname(self, mock_isfile, mock_import, check_version, get_remote, - get_sha, is_repo): - + def test_deploy_no_cname( + self, mock_isfile, mock_import, check_version, get_remote, get_sha, is_repo + ): config = load_config( remote_branch='test', ) @@ -104,12 +85,12 @@ def test_deploy_no_cname(self, mock_isfile, mock_import, check_version, get_remo @mock.patch('mkdocs.commands.gh_deploy._is_cwd_git_repo', return_value=True) @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') - @mock.patch('mkdocs.commands.gh_deploy._get_remote_url', return_value=( - 'git@', 'mkdocs/mkdocs.git')) + @mock.patch( + 'mkdocs.commands.gh_deploy._get_remote_url', return_value=('git@', 'mkdocs/mkdocs.git') + ) @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.commands.gh_deploy.ghp_import.ghp_import', return_value=(True, '')) + @mock.patch('ghp_import.ghp_import') def test_deploy_hostname(self, mock_import, check_version, get_remote, get_sha, is_repo): - config = load_config( remote_branch='test', ) @@ -119,22 +100,22 @@ def test_deploy_hostname(self, mock_import, check_version, get_remote, get_sha, @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') @mock.patch('mkdocs.commands.gh_deploy._get_remote_url', return_value=(None, None)) @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.commands.gh_deploy.ghp_import.ghp_import', return_value=(True, '')) - def test_deploy_ignore_version_default(self, mock_import, check_version, get_remote, get_sha, is_repo): - + @mock.patch('ghp_import.ghp_import') + def test_deploy_ignore_version_default( + self, mock_import, check_version, get_remote, get_sha, is_repo + ): config = load_config( remote_branch='test', ) gh_deploy.gh_deploy(config) - self.assert_mock_called_once(check_version) + check_version.assert_called_once() @mock.patch('mkdocs.commands.gh_deploy._is_cwd_git_repo', return_value=True) @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') @mock.patch('mkdocs.commands.gh_deploy._get_remote_url', return_value=(None, None)) @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.commands.gh_deploy.ghp_import.ghp_import', return_value=(True, '')) + @mock.patch('ghp_import.ghp_import') def test_deploy_ignore_version(self, mock_import, check_version, get_remote, get_sha, is_repo): - config = load_config( remote_branch='test', ) @@ -144,56 +125,66 @@ def test_deploy_ignore_version(self, mock_import, check_version, get_remote, get @mock.patch('mkdocs.commands.gh_deploy._is_cwd_git_repo', return_value=True) @mock.patch('mkdocs.commands.gh_deploy._get_current_sha', return_value='shashas') @mock.patch('mkdocs.commands.gh_deploy._check_version') - @mock.patch('mkdocs.utils.ghp_import.ghp_import') - @mock.patch('mkdocs.commands.gh_deploy.log') - def test_deploy_error(self, mock_log, mock_import, check_version, get_sha, is_repo): - error_string = 'TestError123' - mock_import.return_value = (False, error_string) + @mock.patch('ghp_import.ghp_import') + def test_deploy_error(self, mock_import, check_version, get_sha, is_repo): + mock_import.side_effect = GhpError('TestError123') config = load_config( remote_branch='test', ) - self.assertRaises(SystemExit, gh_deploy.gh_deploy, config) - mock_log.error.assert_called_once_with('Failed to deploy to GitHub with error: \n%s', - error_string) + with self.assertLogs('mkdocs', level='ERROR') as cm: + with self.assertRaises(Abort): + gh_deploy.gh_deploy(config) + self.assertEqual( + cm.output, + [ + 'ERROR:mkdocs.commands.gh_deploy:Failed to deploy to GitHub with error: \n' + 'TestError123' + ], + ) class TestGitHubDeployLogs(unittest.TestCase): - @mock.patch('subprocess.Popen') def test_mkdocs_newer(self, mock_popeno): + mock_popeno().communicate.return_value = ( + b'Deployed 12345678 with MkDocs version: 0.1.2\n', + b'', + ) - mock_popeno().communicate.return_value = (b'Deployed 12345678 with MkDocs version: 0.1.2\n', b'') - - with self.assertLogs('mkdocs', level='INFO') as cm: + with self.assertLogs('mkdocs') as cm: gh_deploy._check_version('gh-pages') self.assertEqual( - cm.output, ['INFO:mkdocs.commands.gh_deploy:Previous deployment was done with MkDocs ' - 'version 0.1.2; you are deploying with a newer version ({})'.format(__version__)] + '\n'.join(cm.output), + f'INFO:mkdocs.commands.gh_deploy:Previous deployment was done with MkDocs ' + f'version 0.1.2; you are deploying with a newer version ({__version__})', ) @mock.patch('subprocess.Popen') def test_mkdocs_older(self, mock_popeno): - - mock_popeno().communicate.return_value = (b'Deployed 12345678 with MkDocs version: 10.1.2\n', b'') + mock_popeno().communicate.return_value = ( + b'Deployed 12345678 with MkDocs version: 10.1.2\n', + b'', + ) with self.assertLogs('mkdocs', level='ERROR') as cm: - self.assertRaises(SystemExit, gh_deploy._check_version, 'gh-pages') + with self.assertRaises(Abort): + gh_deploy._check_version('gh-pages') self.assertEqual( - cm.output, ['ERROR:mkdocs.commands.gh_deploy:Deployment terminated: Previous deployment was made with ' - 'MkDocs version 10.1.2; you are attempting to deploy with an older version ({}). Use ' - '--ignore-version to deploy anyway.'.format(__version__)] + '\n'.join(cm.output), + f'ERROR:mkdocs.commands.gh_deploy:Deployment terminated: Previous deployment was made with ' + f'MkDocs version 10.1.2; you are attempting to deploy with an older version ({__version__}).' + f' Use --ignore-version to deploy anyway.', ) @mock.patch('subprocess.Popen') def test_version_unknown(self, mock_popeno): - mock_popeno().communicate.return_value = (b'No version specified\n', b'') - with self.assertLogs('mkdocs', level='WARNING') as cm: + with self.assertLogs('mkdocs') as cm: gh_deploy._check_version('gh-pages') self.assertEqual( - cm.output, - ['WARNING:mkdocs.commands.gh_deploy:Version check skipped: No version specified in previous deployment.'] + '\n'.join(cm.output), + 'WARNING:mkdocs.commands.gh_deploy:Version check skipped: No version specified in previous deployment.', ) diff --git a/mkdocs/tests/integration.py b/mkdocs/tests/integration.py index f4a3f2a..a956933 100644 --- a/mkdocs/tests/integration.py +++ b/mkdocs/tests/integration.py @@ -15,41 +15,44 @@ """ -import click import logging import os import subprocess +import tempfile -from mkdocs import utils +import click log = logging.getLogger('mkdocs') DIR = os.path.dirname(__file__) MKDOCS_CONFIG = os.path.abspath(os.path.join(DIR, '../../mkdocs.yml')) -MKDOCS_THEMES = utils.get_theme_names() +MKDOCS_THEMES = ['mkdocs', 'readthedocs'] TEST_PROJECTS = os.path.abspath(os.path.join(DIR, 'integration')) @click.command() -@click.option('--output', - help="The output directory to use when building themes", - type=click.Path(file_okay=False, writable=True), - required=True) +@click.option( + '--output', + help="The output directory to use when building themes", + type=click.Path(file_okay=False, writable=True), +) def main(output=None): + if output is None: + directory = tempfile.TemporaryDirectory(prefix='mkdocs_integration-') + output = directory.name log.propagate = False stream = logging.StreamHandler() - formatter = logging.Formatter( - "\033[1m\033[1;32m *** %(message)s *** \033[0m") + formatter = logging.Formatter("\033[1m\033[1;32m *** %(message)s *** \033[0m") stream.setFormatter(formatter) log.addHandler(stream) log.setLevel(logging.DEBUG) - base_cmd = ['mkdocs', 'build', '-s', '-v', '--site-dir', ] + base_cmd = ['mkdocs', 'build', '-q', '-s', '--site-dir'] log.debug("Building installed themes.") for theme in sorted(MKDOCS_THEMES): - log.debug("Building theme: {}".format(theme)) + log.debug(f"Building theme: {theme}") project_dir = os.path.dirname(MKDOCS_CONFIG) out = os.path.join(output, theme) command = base_cmd + [out, '--theme', theme] @@ -57,13 +60,15 @@ def main(output=None): log.debug("Building test projects.") for project in os.listdir(TEST_PROJECTS): - log.debug("Building test project: {}".format(project)) project_dir = os.path.join(TEST_PROJECTS, project) + if not os.path.isdir(project_dir): + continue + log.debug(f"Building test project: {project}") out = os.path.join(output, project) - command = base_cmd + [out, ] + command = base_cmd + [out] subprocess.check_call(command, cwd=project_dir) - log.debug("Theme and integration builds are in {}".format(output)) + log.debug(f"Theme and integration builds are in {output}") if __name__ == '__main__': diff --git a/mkdocs/tests/integration/complicated_config/mkdocs.yml b/mkdocs/tests/integration/complicated_config/mkdocs.yml index d1076df..d654cc7 100644 --- a/mkdocs/tests/integration/complicated_config/mkdocs.yml +++ b/mkdocs/tests/integration/complicated_config/mkdocs.yml @@ -1,26 +1,26 @@ site_name: My Docs nav: - - Home: index.md - - User Guide: - - Writing your docs: index.md - - About: - - License: index.md - - Release Notes: - - Version 1: index.md - - Version 2: index.md - - Version 3: index.md + - Home: index.md + - User Guide: + - Writing your docs: index.md + - About: + - License: index.md + - Release Notes: + - Version 1: index.md + - Version 2: index.md + - Version 3: index.md site_url: http://www.mkdocs.org/ docs_dir: documentation site_dir: output theme: - name: mkdocs - custom_dir: theme_tweaks + name: mkdocs + custom_dir: theme_tweaks + analytics: {gtag: 'G-ABC123'} copyright: "Dougal Matthews" -google_analytics: ["1", "2"] dev_addr: ::1:8000 use_directory_urls: false @@ -34,13 +34,13 @@ extra_templates: ["custom.html"] markdown_extensions: - - toc: - permalink:  - - admonition: + - toc: + permalink:  + - admonition: strict: true remote_branch: none remote_name: upstream extra: - some value: 1 + some value: 1 diff --git a/mkdocs/tests/integration/minimal/mkdocs.yml b/mkdocs/tests/integration/minimal/mkdocs.yml index ff21753..022d60c 100644 --- a/mkdocs/tests/integration/minimal/mkdocs.yml +++ b/mkdocs/tests/integration/minimal/mkdocs.yml @@ -1,6 +1,6 @@ site_name: MyTest nav: - - 'testing.md' + - 'testing.md' site_author: "Tom Christie & Dougal Matthews" diff --git a/mkdocs/tests/integration/projects.yaml b/mkdocs/tests/integration/projects.yaml new file mode 100644 index 0000000..4cca01e --- /dev/null +++ b/mkdocs/tests/integration/projects.yaml @@ -0,0 +1,82 @@ +# DO NOT UPDATE THIS FILE, only for tests. +# This is an intentionally small subset of https://github.com/mkdocs/catalog +projects: +- name: Material for MkDocs + mkdocs_theme: material + mkdocs_plugin: [material/info, material/offline, material/search, material/social, material/tags] + github_id: squidfunk/mkdocs-material + pypi_id: mkdocs-material +- name: Bootstrap4 + mkdocs_theme: bootstrap4 + github_id: byrnereese/mkdocs-bootstrap4 + pypi_id: mkdocs-bootstrap4 +- name: Bootstrap 4 + mkdocs_theme: bootstrap4 + shadowed: [mkdocs_theme] + github_id: LukeCarrier/mkdocs-theme-bootstrap4 + pypi_id: mkdocs-theme-bootstrap4 +- name: Bootswatch + mkdocs_theme: [cerulean, cosmo, cyborg, darkly, flatly, journal, litera, lumen, lux, materia, minty, pulse, sandstone, simplex, slate, solar, spacelab, superhero, united, yeti] + github_id: mkdocs/mkdocs-bootswatch + pypi_id: mkdocs-bootswatch +- name: mkdocstrings + mkdocs_plugin: mkdocstrings + extra_dependencies: + plugins.mkdocstrings.handlers.crystal: mkdocstrings-crystal + plugins.mkdocstrings.handlers.python: mkdocstrings-python + github_id: mkdocstrings/mkdocstrings + pypi_id: mkdocstrings +- name: mkdocs-click + markdown_extension: mkdocs-click + github_id: DataDog/mkdocs-click + pypi_id: mkdocs-click +- name: blog + mkdocs_plugin: blog + github_id: andyoakley/mkdocs-blog +- name: Blogs for MkDocs + shadowed: [mkdocs_plugin] + mkdocs_plugin: blog + github_id: fmaida/mkdocs-blog-plugin +- name: foo + homepage: foo + gitlab_id: bar/foo +- name: Termage + mkdocs_plugin: termage + github_id: bczsalba/Termage +- name: Github-Links + markdown_extension: mdx_gh_links + github_id: Python-Markdown/github-links + pypi_id: mdx-gh-links +- name: autorefs + mkdocs_plugin: autorefs + github_id: mkdocstrings/autorefs + pypi_id: mkdocs-autorefs +- name: mkdocs-redirects + mkdocs_plugin: redirects + github_id: mkdocs/mkdocs-redirects + pypi_id: mkdocs-redirects +- name: markdown-callouts + markdown_extension: callouts + github_id: oprypin/markdown-callouts + pypi_id: markdown-callouts +- name: PyMdown Extensions + markdown_extension: [pymdownx.arithmatex, pymdownx.b64, pymdownx.betterem, pymdownx.caret, pymdownx.critic, pymdownx.details, pymdownx.emoji, pymdownx.escapeall, pymdownx.extra, pymdownx.highlight, pymdownx.inlinehilite, pymdownx.keys, pymdownx.magiclink, pymdownx.mark, pymdownx.pathconverter, pymdownx.progressbar, pymdownx.saneheaders, pymdownx.smartsymbols, pymdownx.snippets, pymdownx.striphtml, pymdownx.superfences, pymdownx.tabbed, pymdownx.tasklist, pymdownx.tilde] + github_id: facelessuser/pymdown-extensions + pypi_id: pymdown-extensions +- name: literate-nav + mkdocs_plugin: literate-nav + github_id: oprypin/mkdocs-literate-nav + pypi_id: mkdocs-literate-nav +- name: mkdocs-code-validator + mkdocs_plugin: code-validator + github_id: oprypin/mkdocs-code-validator + pypi_id: mkdocs-code-validator +- name: tags + mkdocs_plugin: tags + description: Processes tags in yaml metadata + github_id: jldiaz/mkdocs-plugin-tags + pypi_id: mkdocs-plugin-tags +- name: tags + mkdocs_plugin: autotag + github_id: six-two/mkdocs-auto-tag-plugin + pypi_id: mkdocs-auto-tag-plugin diff --git a/mkdocs/tests/livereload_tests.py b/mkdocs/tests/livereload_tests.py new file mode 100644 index 0000000..db6b791 --- /dev/null +++ b/mkdocs/tests/livereload_tests.py @@ -0,0 +1,646 @@ +#!/usr/bin/env python + +import contextlib +import email +import io +import sys +import threading +import time +import unittest +from pathlib import Path +from unittest import mock + +from mkdocs.livereload import LiveReloadServer +from mkdocs.tests.base import change_dir, tempdir + + +class FakeRequest: + def __init__(self, content): + self.in_file = io.BytesIO(content.encode()) + self.out_file = io.BytesIO() + self.out_file.close = lambda: None + + def makefile(self, *args, **kwargs): + return self.in_file + + def sendall(self, data): + self.out_file.write(data) + + +@contextlib.contextmanager +def testing_server(root, builder=lambda: None, mount_path="/"): + """Create the server and start most of its parts, but don't listen on a socket.""" + with mock.patch("socket.socket"): + server = LiveReloadServer( + builder, + host="localhost", + port=0, + root=root, + mount_path=mount_path, + polling_interval=0.2, + ) + server.setup_environ() + server.observer.start() + thread = threading.Thread(target=server._build_loop, daemon=True) + thread.start() + yield server + server.shutdown() + thread.join() + + +def do_request(server, content): + request = FakeRequest(content + " HTTP/1.1") + server.RequestHandlerClass(request, ("127.0.0.1", 0), server) + response = request.out_file.getvalue() + + headers, _, content = response.partition(b"\r\n\r\n") + status, _, headers = headers.partition(b"\r\n") + status = status.split(None, 1)[1].decode() + + headers = email.message_from_bytes(headers) + headers["_status"] = status + return headers, content.decode() + + +SCRIPT_REGEX = r'' + + +class BuildTests(unittest.TestCase): + @tempdir({"test.css": "div { color: red; }"}) + def test_serves_normal_file(self, site_dir): + with testing_server(site_dir) as server: + headers, output = do_request(server, "GET /test.css") + self.assertEqual(output, "div { color: red; }") + self.assertEqual(headers["_status"], "200 OK") + self.assertEqual(headers.get("content-length"), str(len(output))) + + @tempdir({"docs/foo.docs": "docs1", "mkdocs.yml": "yml1"}) + @tempdir({"foo.site": "original"}) + def test_basic_rebuild(self, site_dir, origin_dir): + docs_dir = Path(origin_dir, "docs") + + started_building = threading.Event() + + def rebuild(): + started_building.set() + Path(site_dir, "foo.site").write_text( + Path(docs_dir, "foo.docs").read_text() + Path(origin_dir, "mkdocs.yml").read_text() + ) + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir, rebuild) + server.watch(Path(origin_dir, "mkdocs.yml"), rebuild) + time.sleep(0.01) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "original") + + Path(docs_dir, "foo.docs").write_text("docs2") + self.assertTrue(started_building.wait(timeout=10)) + started_building.clear() + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "docs2yml1") + + Path(origin_dir, "mkdocs.yml").write_text("yml2") + self.assertTrue(started_building.wait(timeout=10)) + started_building.clear() + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "docs2yml2") + + @tempdir({"foo.docs": "a"}) + @tempdir({"foo.site": "original"}) + def test_rebuild_after_delete(self, site_dir, docs_dir): + started_building = threading.Event() + + def rebuild(): + started_building.set() + Path(site_dir, "foo.site").unlink() + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir, rebuild) + time.sleep(0.01) + + Path(docs_dir, "foo.docs").write_text("b") + self.assertTrue(started_building.wait(timeout=10)) + + with self.assertLogs("mkdocs.livereload"): + _, output = do_request(server, "GET /foo.site") + + self.assertIn("404", output) + + @tempdir({"aaa": "something"}) + def test_rebuild_after_rename(self, site_dir): + started_building = threading.Event() + + with testing_server(site_dir, started_building.set) as server: + server.watch(site_dir) + time.sleep(0.01) + + Path(site_dir, "aaa").rename(Path(site_dir, "bbb")) + self.assertTrue(started_building.wait(timeout=10)) + + @tempdir() + def test_rebuild_on_edit(self, site_dir): + started_building = threading.Event() + + with open(Path(site_dir, "test"), "wb") as f: + time.sleep(0.01) + + with testing_server(site_dir, started_building.set) as server: + server.watch(site_dir) + time.sleep(0.01) + + f.write(b"hi\n") + f.flush() + + self.assertTrue(started_building.wait(timeout=10)) + + @tempdir() + def test_unwatch(self, site_dir): + started_building = threading.Event() + + with testing_server(site_dir, started_building.set) as server: + with self.assertRaises(KeyError): + server.unwatch(site_dir) + + server.watch(site_dir) + server.watch(site_dir) + server.unwatch(site_dir) + time.sleep(0.01) + + Path(site_dir, "foo").write_text("foo") + self.assertTrue(started_building.wait(timeout=10)) + started_building.clear() + + server.unwatch(site_dir) + Path(site_dir, "foo").write_text("bar") + self.assertFalse(started_building.wait(timeout=0.5)) + + with self.assertRaises(KeyError): + server.unwatch(site_dir) + + @tempdir({"foo.docs": "a"}) + @tempdir({"foo.site": "original"}) + def test_custom_action_warns(self, site_dir, docs_dir): + started_building = threading.Event() + + def rebuild(): + started_building.set() + content = Path(docs_dir, "foo.docs").read_text() + Path(site_dir, "foo.site").write_text(content * 5) + + with testing_server(site_dir) as server: + with self.assertWarnsRegex(DeprecationWarning, "func") as cm: + server.watch(docs_dir, rebuild) + time.sleep(0.01) + self.assertIn("livereload_tests.py", cm.filename) + + Path(docs_dir, "foo.docs").write_text("b") + self.assertTrue(started_building.wait(timeout=10)) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "bbbbb") + + @tempdir({"foo.docs": "docs1"}) + @tempdir({"foo.extra": "extra1"}) + @tempdir({"foo.site": "original"}) + def test_multiple_dirs_can_cause_rebuild(self, site_dir, extra_dir, docs_dir): + started_building = threading.Barrier(2) + + def rebuild(): + started_building.wait(timeout=10) + content1 = Path(docs_dir, "foo.docs").read_text() + content2 = Path(extra_dir, "foo.extra").read_text() + Path(site_dir, "foo.site").write_text(content1 + content2) + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir) + server.watch(extra_dir) + time.sleep(0.01) + + Path(docs_dir, "foo.docs").write_text("docs2") + started_building.wait(timeout=10) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "docs2extra1") + + Path(extra_dir, "foo.extra").write_text("extra2") + started_building.wait(timeout=10) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "docs2extra2") + + @tempdir({"foo.docs": "docs1"}) + @tempdir({"foo.extra": "extra1"}) + @tempdir({"foo.site": "original"}) + def test_multiple_dirs_changes_rebuild_only_once(self, site_dir, extra_dir, docs_dir): + started_building = threading.Event() + + def rebuild(): + self.assertFalse(started_building.is_set()) + started_building.set() + content1 = Path(docs_dir, "foo.docs").read_text() + content2 = Path(extra_dir, "foo.extra").read_text() + Path(site_dir, "foo.site").write_text(content1 + content2) + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir) + server.watch(extra_dir) + time.sleep(0.01) + + _, output = do_request(server, "GET /foo.site") + Path(docs_dir, "foo.docs").write_text("docs2") + Path(extra_dir, "foo.extra").write_text("extra2") + self.assertTrue(started_building.wait(timeout=10)) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "docs2extra2") + + @tempdir({"foo.docs": "a"}) + @tempdir({"foo.site": "original"}) + def test_change_is_detected_while_building(self, site_dir, docs_dir): + before_finished_building = threading.Barrier(2) + can_finish_building = threading.Event() + + def rebuild(): + content = Path(docs_dir, "foo.docs").read_text() + Path(site_dir, "foo.site").write_text(content * 5) + before_finished_building.wait(timeout=10) + self.assertTrue(can_finish_building.wait(timeout=10)) + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir) + time.sleep(0.01) + + Path(docs_dir, "foo.docs").write_text("b") + before_finished_building.wait(timeout=10) + Path(docs_dir, "foo.docs").write_text("c") + can_finish_building.set() + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "bbbbb") + + before_finished_building.wait(timeout=10) + + _, output = do_request(server, "GET /foo.site") + self.assertEqual(output, "ccccc") + + @tempdir({"foo.docs": "a"}) + @tempdir({"foo.site": "original"}) + def test_recovers_from_build_error(self, site_dir, docs_dir): + started_building = threading.Event() + build_count = 0 + + def rebuild(): + started_building.set() + nonlocal build_count + build_count += 1 + if build_count == 1: + raise ValueError("oh no") + else: + content = Path(docs_dir, "foo.docs").read_text() + Path(site_dir, "foo.site").write_text(content * 5) + + with testing_server(site_dir, rebuild) as server: + server.watch(docs_dir) + time.sleep(0.01) + + err = io.StringIO() + with contextlib.redirect_stderr(err), self.assertLogs("mkdocs.livereload") as cm: + Path(docs_dir, "foo.docs").write_text("b") + started_building.wait(timeout=10) + + Path(docs_dir, "foo.docs").write_text("c") + + _, output = do_request(server, "GET /foo.site") + + self.assertIn("ValueError: oh no", err.getvalue()) + self.assertRegex( + "\n".join(cm.output), + r".*Detected file changes\n" + r".*An error happened during the rebuild.*\n" + r".*Detected file changes\n", + ) + self.assertEqual(output, "ccccc") + + @tempdir( + { + "normal.html": "hello", + "no_body.html": "

hi", + "empty.html": "", + "multi_body.html": "foobar", + } + ) + def test_serves_modified_html(self, site_dir): + with testing_server(site_dir) as server: + server.watch(site_dir) + + headers, output = do_request(server, "GET /normal.html") + self.assertRegex(output, fr"^hello{SCRIPT_REGEX}$") + self.assertEqual(headers.get("content-type"), "text/html") + self.assertEqual(headers.get("content-length"), str(len(output))) + + _, output = do_request(server, "GET /no_body.html") + self.assertRegex(output, fr"^

hi{SCRIPT_REGEX}$") + + headers, output = do_request(server, "GET /empty.html") + self.assertRegex(output, fr"^{SCRIPT_REGEX}$") + self.assertEqual(headers.get("content-length"), str(len(output))) + + _, output = do_request(server, "GET /multi_body.html") + self.assertRegex(output, fr"^foobar{SCRIPT_REGEX}$") + + @tempdir({"index.html": "aaa", "foo/index.html": "bbb"}) + def test_serves_directory_index(self, site_dir): + with testing_server(site_dir) as server: + headers, output = do_request(server, "GET /") + self.assertRegex(output, r"^aaa$") + self.assertEqual(headers["_status"], "200 OK") + self.assertEqual(headers.get("content-type"), "text/html") + self.assertEqual(headers.get("content-length"), str(len(output))) + + for path in "/foo/", "/foo/index.html": + _, output = do_request(server, f"GET {path}") + self.assertRegex(output, r"^bbb$") + + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /foo/index.html/") + self.assertEqual(headers["_status"], "404 Not Found") + + @tempdir( + { + "foo/bar/index.html": "aaa", + "foo/測試/index.html": "bbb", + } + ) + def test_redirects_to_directory(self, site_dir): + with testing_server(site_dir, mount_path="/sub") as server: + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /sub/foo/bar") + self.assertEqual(headers["_status"], "302 Found") + self.assertEqual(headers.get("location"), "/sub/foo/bar/") + + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /sub/foo/測試") + self.assertEqual(headers["_status"], "302 Found") + self.assertEqual(headers.get("location"), "/sub/foo/%E6%B8%AC%E8%A9%A6/") + + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /sub/foo/%E6%B8%AC%E8%A9%A6") + self.assertEqual(headers["_status"], "302 Found") + self.assertEqual(headers.get("location"), "/sub/foo/%E6%B8%AC%E8%A9%A6/") + + @tempdir({"я.html": "aaa", "测试2/index.html": "bbb"}) + def test_serves_with_unicode_characters(self, site_dir): + with testing_server(site_dir) as server: + _, output = do_request(server, "GET /я.html") + self.assertRegex(output, r"^aaa$") + _, output = do_request(server, "GET /%D1%8F.html") + self.assertRegex(output, r"^aaa$") + + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /%D1.html") + self.assertEqual(headers["_status"], "404 Not Found") + + _, output = do_request(server, "GET /测试2/") + self.assertRegex(output, r"^bbb$") + _, output = do_request(server, "GET /%E6%B5%8B%E8%AF%952/index.html") + self.assertRegex(output, r"^bbb$") + + @tempdir() + def test_serves_polling_instantly(self, site_dir): + with testing_server(site_dir) as server: + _, output = do_request(server, "GET /livereload/0/0") + self.assertTrue(output.isdigit()) + + @tempdir() + def test_serves_polling_with_mount_path(self, site_dir): + with testing_server(site_dir, mount_path="/test/f*o") as server: + _, output = do_request(server, "GET /livereload/0/0") + self.assertTrue(output.isdigit()) + + @tempdir() + @tempdir() + def test_serves_polling_after_event(self, site_dir, docs_dir): + with testing_server(site_dir) as server: + initial_epoch = server._visible_epoch + + server.watch(docs_dir) + time.sleep(0.01) + + Path(docs_dir, "foo.docs").write_text("b") + + _, output = do_request(server, f"GET /livereload/{initial_epoch}/0") + + self.assertNotEqual(server._visible_epoch, initial_epoch) + self.assertEqual(output, str(server._visible_epoch)) + + @tempdir() + def test_serves_polling_with_timeout(self, site_dir): + with testing_server(site_dir) as server: + server.poll_response_timeout = 0.2 + initial_epoch = server._visible_epoch + + start_time = time.monotonic() + _, output = do_request(server, f"GET /livereload/{initial_epoch}/0") + self.assertGreaterEqual(time.monotonic(), start_time + 0.2) + self.assertEqual(output, str(initial_epoch)) + + @tempdir() + def test_error_handler(self, site_dir): + with testing_server(site_dir) as server: + server.error_handler = lambda code: b"[%d]" % code + with self.assertLogs("mkdocs.livereload") as cm: + headers, output = do_request(server, "GET /missing") + + self.assertEqual(headers["_status"], "404 Not Found") + self.assertEqual(output, "[404]") + self.assertRegex( + "\n".join(cm.output), + r'^WARNING:mkdocs.livereload:.*"GET /missing HTTP/1.1" code 404', + ) + + @tempdir() + def test_bad_error_handler(self, site_dir): + with testing_server(site_dir) as server: + server.error_handler = lambda code: 0 / 0 + with self.assertLogs("mkdocs.livereload") as cm: + headers, output = do_request(server, "GET /missing") + + self.assertEqual(headers["_status"], "404 Not Found") + self.assertIn("404", output) + self.assertRegex( + "\n".join(cm.output), r"Failed to render an error message[\s\S]+/missing.+code 404" + ) + + @tempdir( + { + "test.html": "\nhi", + "test.xml": '\n', + "test.css": "div { color: red; }", + "test.js": "use strict;", + "test.json": '{"a": "b"}', + } + ) + def test_mime_types(self, site_dir): + with testing_server(site_dir) as server: + headers, _ = do_request(server, "GET /test.html") + self.assertEqual(headers.get("content-type"), "text/html") + + headers, _ = do_request(server, "GET /test.xml") + self.assertIn(headers.get("content-type"), ["text/xml", "application/xml"]) + + headers, _ = do_request(server, "GET /test.css") + self.assertEqual(headers.get("content-type"), "text/css") + + headers, _ = do_request(server, "GET /test.js") + self.assertEqual(headers.get("content-type"), "application/javascript") + + headers, _ = do_request(server, "GET /test.json") + self.assertEqual(headers.get("content-type"), "application/json") + + @tempdir({"index.html": "aaa", "sub/sub.html": "bbb"}) + def test_serves_from_mount_path(self, site_dir): + with testing_server(site_dir, mount_path="/sub") as server: + headers, output = do_request(server, "GET /sub/") + self.assertRegex(output, r"^aaa$") + self.assertEqual(headers.get("content-type"), "text/html") + + _, output = do_request(server, "GET /sub/sub/sub.html") + self.assertRegex(output, r"^bbb$") + + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /sub/sub.html") + self.assertEqual(headers["_status"], "404 Not Found") + + @tempdir() + def test_redirects_to_mount_path(self, site_dir): + with testing_server(site_dir, mount_path="/mount/path") as server: + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /") + self.assertEqual(headers["_status"], "302 Found") + self.assertEqual(headers.get("location"), "/mount/path/") + + @tempdir() + def test_redirects_to_unicode_mount_path(self, site_dir): + with testing_server(site_dir, mount_path="/mount/測試") as server: + with self.assertLogs("mkdocs.livereload"): + headers, _ = do_request(server, "GET /") + self.assertEqual(headers["_status"], "302 Found") + self.assertEqual(headers.get("location"), "/mount/%E6%B8%AC%E8%A9%A6/") + + @tempdir({"mkdocs.yml": "original", "mkdocs2.yml": "original"}, prefix="tmp_dir") + @tempdir(prefix="origin_dir") + @tempdir({"subdir/foo.md": "original"}, prefix="dest_docs_dir") + def test_watches_direct_symlinks(self, dest_docs_dir, origin_dir, tmp_dir): + try: + Path(origin_dir, "docs").symlink_to(dest_docs_dir, target_is_directory=True) + Path(origin_dir, "mkdocs.yml").symlink_to(Path(tmp_dir, "mkdocs.yml")) + except NotImplementedError: # PyPy on Windows + self.skipTest("Creating symlinks not supported") + + started_building = threading.Event() + + def wait_for_build(): + result = started_building.wait(timeout=10) + started_building.clear() + with self.assertLogs("mkdocs.livereload"): + do_request(server, "GET /") + return result + + with testing_server(tmp_dir, started_building.set) as server: + server.watch(Path(origin_dir, "docs")) + server.watch(Path(origin_dir, "mkdocs.yml")) + time.sleep(0.01) + + Path(origin_dir, "unrelated.md").write_text("foo") + self.assertFalse(started_building.wait(timeout=0.5)) + + Path(tmp_dir, "mkdocs.yml").write_text("edited") + self.assertTrue(wait_for_build()) + + Path(dest_docs_dir, "subdir", "foo.md").write_text("edited") + self.assertTrue(wait_for_build()) + + @tempdir(["file_dest_1.md", "file_dest_2.md", "file_dest_unused.md"], prefix="tmp_dir") + @tempdir(["file_under.md"], prefix="dir_to_link_to") + @tempdir() + def test_watches_through_symlinks(self, docs_dir, dir_to_link_to, tmp_dir): + try: + Path(docs_dir, "link1.md").symlink_to(Path(tmp_dir, "file_dest_1.md")) + Path(docs_dir, "linked_dir").symlink_to(dir_to_link_to, target_is_directory=True) + + Path(dir_to_link_to, "sublink.md").symlink_to(Path(tmp_dir, "file_dest_2.md")) + except NotImplementedError: # PyPy on Windows + self.skipTest("Creating symlinks not supported") + + started_building = threading.Event() + + def wait_for_build(): + result = started_building.wait(timeout=10) + started_building.clear() + with self.assertLogs("mkdocs.livereload"): + do_request(server, "GET /") + return result + + with testing_server(docs_dir, started_building.set) as server: + server.watch(docs_dir) + time.sleep(0.01) + + Path(tmp_dir, "file_dest_1.md").write_text("edited") + self.assertTrue(wait_for_build()) + + Path(dir_to_link_to, "file_under.md").write_text("edited") + self.assertTrue(wait_for_build()) + + Path(tmp_dir, "file_dest_2.md").write_text("edited") + self.assertTrue(wait_for_build()) + + Path(docs_dir, "link1.md").unlink() + self.assertTrue(wait_for_build()) + + Path(tmp_dir, "file_dest_unused.md").write_text("edited") + self.assertFalse(started_building.wait(timeout=0.5)) + + @tempdir(prefix="site_dir") + @tempdir(["docs/unused.md", "README.md"], prefix="origin_dir") + def test_watches_through_relative_symlinks(self, origin_dir, site_dir): + docs_dir = Path(origin_dir, "docs") + with change_dir(docs_dir): + try: + Path(docs_dir, "README.md").symlink_to(Path("..", "README.md")) + except NotImplementedError: # PyPy on Windows + self.skipTest("Creating symlinks not supported") + + started_building = threading.Event() + + with testing_server(docs_dir, started_building.set) as server: + server.watch(docs_dir) + time.sleep(0.01) + + Path(origin_dir, "README.md").write_text("edited") + self.assertTrue(started_building.wait(timeout=10)) + + @tempdir() + def test_watch_with_broken_symlinks(self, docs_dir): + Path(docs_dir, "subdir").mkdir() + + try: + if sys.platform != "win32": + Path(docs_dir, "subdir", "circular").symlink_to(Path(docs_dir)) + + Path(docs_dir, "broken_1").symlink_to(Path(docs_dir, "oh no")) + Path(docs_dir, "broken_2").symlink_to(Path(docs_dir, "oh no"), target_is_directory=True) + Path(docs_dir, "broken_3").symlink_to(Path(docs_dir, "broken_2")) + except NotImplementedError: # PyPy on Windows + self.skipTest("Creating symlinks not supported") + + started_building = threading.Event() + with testing_server(docs_dir, started_building.set) as server: + server.watch(docs_dir) + time.sleep(0.01) + + Path(docs_dir, "subdir", "test").write_text("test") + self.assertTrue(started_building.wait(timeout=10)) diff --git a/mkdocs/tests/localization_tests.py b/mkdocs/tests/localization_tests.py new file mode 100644 index 0000000..05a966c --- /dev/null +++ b/mkdocs/tests/localization_tests.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python + + +import unittest +from unittest import mock + +from mkdocs.config.base import ValidationError +from mkdocs.localization import install_translations, parse_locale +from mkdocs.tests.base import tempdir + + +class LocalizationTests(unittest.TestCase): + def setUp(self): + self.env = mock.Mock() + + def test_jinja_extension_installed(self): + install_translations(self.env, parse_locale('en'), []) + self.env.add_extension.assert_called_once_with('jinja2.ext.i18n') + + def test_valid_language(self): + locale = parse_locale('en') + self.assertEqual(locale.language, 'en') + + def test_valid_language_territory(self): + locale = parse_locale('en_US') + self.assertEqual(locale.language, 'en') + self.assertEqual(locale.territory, 'US') + self.assertEqual(str(locale), 'en_US') + + def test_unknown_locale(self): + self.assertRaises(ValidationError, parse_locale, 'foo') + + def test_invalid_locale(self): + self.assertRaises(ValidationError, parse_locale, '42') + + @tempdir() + def test_no_translations_found(self, dir_without_translations): + with self.assertLogs('mkdocs') as cm: + install_translations(self.env, parse_locale('fr_CA'), [dir_without_translations]) + self.assertEqual( + '\n'.join(cm.output), + "WARNING:mkdocs.localization:No translations could be found for the locale 'fr_CA'. " + "Defaulting to English.", + ) + self.env.install_null_translations.assert_called_once() + + @tempdir + def test_translations_found(self, tdir): + translations = mock.Mock() + + with mock.patch('mkdocs.localization.Translations.load', return_value=translations): + install_translations(self.env, parse_locale('en'), [tdir]) + + self.env.install_gettext_translations.assert_called_once_with(translations) + + @tempdir() + @tempdir() + def test_merge_translations(self, custom_dir, theme_dir): + custom_dir_translations = mock.Mock() + theme_dir_translations = mock.Mock() + + def side_effet(*args, **kwargs): + dirname = args[0] + if dirname.startswith(custom_dir): + return custom_dir_translations + elif dirname.startswith(theme_dir): + return theme_dir_translations + else: + self.fail() + + with mock.patch('mkdocs.localization.Translations.load', side_effect=side_effet): + install_translations(self.env, parse_locale('en'), [custom_dir, theme_dir]) + + theme_dir_translations.merge.assert_called_once_with(custom_dir_translations) diff --git a/mkdocs/tests/new_tests.py b/mkdocs/tests/new_tests.py index 735fdf5..120cfa5 100644 --- a/mkdocs/tests/new_tests.py +++ b/mkdocs/tests/new_tests.py @@ -1,27 +1,24 @@ #!/usr/bin/env python -import tempfile -import unittest import os +import unittest from mkdocs.commands import new +from mkdocs.tests.base import change_dir, tempdir class NewTests(unittest.TestCase): - - def test_new(self): - - tempdir = tempfile.mkdtemp() - os.chdir(tempdir) - - new.new("myproject") - - expected_paths = [ - os.path.join(tempdir, "myproject"), - os.path.join(tempdir, "myproject", "mkdocs.yml"), - os.path.join(tempdir, "myproject", "docs"), - os.path.join(tempdir, "myproject", "docs", "index.md"), - ] - - for expected_path in expected_paths: - self.assertTrue(os.path.exists(expected_path)) + @tempdir() + def test_new(self, temp_dir): + with change_dir(temp_dir): + new.new("myproject") + + expected_paths = [ + os.path.join(temp_dir, "myproject"), + os.path.join(temp_dir, "myproject", "mkdocs.yml"), + os.path.join(temp_dir, "myproject", "docs"), + os.path.join(temp_dir, "myproject", "docs", "index.md"), + ] + + for expected_path in expected_paths: + self.assertTrue(os.path.exists(expected_path)) diff --git a/mkdocs/tests/plugin_tests.py b/mkdocs/tests/plugin_tests.py index 4c84716..78e5287 100644 --- a/mkdocs/tests/plugin_tests.py +++ b/mkdocs/tests/plugin_tests.py @@ -1,41 +1,53 @@ #!/usr/bin/env python +from __future__ import annotations - -import unittest -from unittest import mock import os +import unittest +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from typing_extensions import assert_type +else: + + def assert_type(val, typ): + return None + from mkdocs import plugins -from mkdocs import config +from mkdocs.commands import build +from mkdocs.config import base +from mkdocs.config import config_options as c +from mkdocs.config.base import ValidationError +from mkdocs.exceptions import Abort, BuildError, PluginError +from mkdocs.tests.base import load_config, tempdir -class DummyPlugin(plugins.BasePlugin): - config_scheme = ( - ('foo', config.config_options.Type(str, default='default foo')), - ('bar', config.config_options.Type(int, default=0)), - ('dir', config.config_options.Dir(exists=False)), - ) +class _DummyPluginConfig(base.Config): + foo = c.Type(str, default='default foo') + bar = c.Type(int, default=0) + dir = c.Optional(c.Dir(exists=False)) - def on_pre_page(self, content, **kwargs): - """ modify page content by prepending `foo` config value. """ - return '{} {}'.format(self.config['foo'], content) - def on_nav(self, item, **kwargs): - """ do nothing (return None) to not modify item. """ +class DummyPlugin(plugins.BasePlugin[_DummyPluginConfig]): + def on_page_content(self, html, **kwargs) -> str: + """modify page content by prepending `foo` config value.""" + return f'{self.config.foo} {html}' + + def on_nav(self, nav, **kwargs) -> None: + """do nothing (return None) to not modify item.""" return None - def on_page_read_source(self, **kwargs): - """ create new source by prepending `foo` config value to 'source'. """ - return '{} {}'.format(self.config['foo'], 'source') + def on_page_read_source(self, **kwargs) -> str: + """create new source by prepending `foo` config value to 'source'.""" + return f'{self.config.foo} source' - def on_pre_build(self, **kwargs): - """ do nothing (return None). """ + def on_pre_build(self, **kwargs) -> None: + """do nothing (return None).""" return None class TestPluginClass(unittest.TestCase): - - def test_valid_plugin_options(self): + def test_valid_plugin_options(self) -> None: test_dir = 'test' options = { @@ -47,46 +59,121 @@ def test_valid_plugin_options(self): cfg_fname = os.path.abspath(cfg_fname) cfg_dirname = os.path.dirname(cfg_fname) - expected = os.path.join(cfg_dirname, test_dir) - expected = { 'foo': 'some value', 'bar': 0, - 'dir': expected, + 'dir': os.path.join(cfg_dirname, test_dir), } plugin = DummyPlugin() errors, warnings = plugin.load_config(options, config_file_path=cfg_fname) - self.assertEqual(plugin.config, expected) self.assertEqual(errors, []) self.assertEqual(warnings, []) - def test_invalid_plugin_options(self): + assert_type(plugin.config, _DummyPluginConfig) + self.assertEqual(plugin.config, expected) + assert_type(plugin.config.bar, int) + self.assertEqual(plugin.config.bar, 0) + assert_type(plugin.config.dir, Optional[str]) + + def test_invalid_plugin_options(self): plugin = DummyPlugin() errors, warnings = plugin.load_config({'foo': 42}) - self.assertEqual(len(errors), 1) - self.assertIn('foo', errors[0]) + self.assertEqual( + errors, + [('foo', ValidationError("Expected type: but received: "))], + ) self.assertEqual(warnings, []) errors, warnings = plugin.load_config({'bar': 'a string'}) - self.assertEqual(len(errors), 1) - self.assertIn('bar', errors[0]) + self.assertEqual( + errors, + [('bar', ValidationError("Expected type: but received: "))], + ) self.assertEqual(warnings, []) errors, warnings = plugin.load_config({'invalid_key': 'value'}) self.assertEqual(errors, []) - self.assertEqual(len(warnings), 1) - self.assertIn('invalid_key', warnings[0]) + self.assertEqual( + warnings, [('invalid_key', "Unrecognised configuration name: invalid_key")] + ) class TestPluginCollection(unittest.TestCase): + def test_correct_events_registered(self): + collection = plugins.PluginCollection() + plugin = DummyPlugin() + collection['foo'] = plugin + self.assertEqual( + collection.events, + { + 'startup': [], + 'shutdown': [], + 'serve': [], + 'config': [], + 'pre_build': [plugin.on_pre_build], + 'files': [], + 'nav': [plugin.on_nav], + 'env': [], + 'post_build': [], + 'build_error': [], + 'pre_template': [], + 'template_context': [], + 'post_template': [], + 'pre_page': [], + 'page_read_source': [plugin.on_page_read_source], + 'page_markdown': [], + 'page_content': [plugin.on_page_content], + 'page_context': [], + 'post_page': [], + }, + ) + + def test_event_priorities(self) -> None: + class PrioPlugin(plugins.BasePlugin): + config_scheme = base.get_schema(_DummyPluginConfig) + + @plugins.event_priority(100) + def on_page_content(self, html, **kwargs) -> None: + pass + + @plugins.event_priority(-100) + def on_nav(self, nav, **kwargs) -> None: + pass + + def on_page_read_source(self, **kwargs) -> None: + pass + + @plugins.event_priority(-50) + def on_post_build(self, **kwargs) -> None: + pass + + collection = plugins.PluginCollection() + collection['dummy'] = dummy = DummyPlugin() + collection['prio'] = prio = PrioPlugin() + self.assertEqual( + collection.events['page_content'], + [prio.on_page_content, dummy.on_page_content], + ) + self.assertEqual( + collection.events['nav'], + [dummy.on_nav, prio.on_nav], + ) + self.assertEqual( + collection.events['page_read_source'], + [dummy.on_page_read_source, prio.on_page_read_source], + ) + self.assertEqual( + collection.events['post_build'], + [prio.on_post_build], + ) def test_set_plugin_on_collection(self): collection = plugins.PluginCollection() plugin = DummyPlugin() collection['foo'] = plugin - self.assertEqual([(k, v) for k, v in collection.items()], [('foo', plugin)]) + self.assertEqual(list(collection.items()), [('foo', plugin)]) def test_set_multiple_plugins_on_collection(self): collection = plugins.PluginCollection() @@ -94,14 +181,17 @@ def test_set_multiple_plugins_on_collection(self): collection['foo'] = plugin1 plugin2 = DummyPlugin() collection['bar'] = plugin2 - self.assertEqual([(k, v) for k, v in collection.items()], [('foo', plugin1), ('bar', plugin2)]) + self.assertEqual(list(collection.items()), [('foo', plugin1), ('bar', plugin2)]) def test_run_event_on_collection(self): collection = plugins.PluginCollection() plugin = DummyPlugin() plugin.load_config({'foo': 'new'}) collection['foo'] = plugin - self.assertEqual(collection.run_event('pre_page', 'page content'), 'new page content') + self.assertEqual( + collection.on_page_content('page content', page=None, config={}, files=[]), + 'new page content', + ) def test_run_event_twice_on_collection(self): collection = plugins.PluginCollection() @@ -111,166 +201,104 @@ def test_run_event_twice_on_collection(self): plugin2 = DummyPlugin() plugin2.load_config({'foo': 'second'}) collection['bar'] = plugin2 - self.assertEqual(collection.run_event('pre_page', 'page content'), - 'second new page content') + self.assertEqual( + collection.on_page_content('page content', page=None, config={}, files=[]), + 'second new page content', + ) def test_event_returns_None(self): collection = plugins.PluginCollection() plugin = DummyPlugin() plugin.load_config({'foo': 'new'}) collection['foo'] = plugin - self.assertEqual(collection.run_event('nav', 'nav item'), 'nav item') + self.assertEqual(collection.on_nav(['nav item'], config={}, files=[]), ['nav item']) def test_event_empty_item(self): collection = plugins.PluginCollection() plugin = DummyPlugin() plugin.load_config({'foo': 'new'}) collection['foo'] = plugin - self.assertEqual(collection.run_event('page_read_source'), 'new source') + self.assertEqual(collection.on_page_read_source(page=None, config={}), 'new source') def test_event_empty_item_returns_None(self): collection = plugins.PluginCollection() plugin = DummyPlugin() plugin.load_config({'foo': 'new'}) collection['foo'] = plugin - self.assertEqual(collection.run_event('pre_build'), None) + self.assertEqual(collection.on_pre_build(config={}), None) def test_run_undefined_event_on_collection(self): collection = plugins.PluginCollection() - self.assertEqual(collection.run_event('pre_page', 'page content'), 'page content') + self.assertEqual( + collection.on_page_markdown('page markdown', page=None, config={}, files=[]), + 'page markdown', + ) def test_run_unknown_event_on_collection(self): collection = plugins.PluginCollection() - self.assertRaises(KeyError, collection.run_event, 'unknown', 'page content') - - -MockEntryPoint = mock.Mock() -MockEntryPoint.configure_mock(**{'name': 'sample', 'load.return_value': DummyPlugin}) - - -@mock.patch('pkg_resources.iter_entry_points', return_value=[MockEntryPoint]) -class TestPluginConfig(unittest.TestCase): - - def test_plugin_config_without_options(self, mock_class): - - cfg = {'plugins': ['sample']} - option = config.config_options.Plugins() - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertIn('sample', cfg['plugins']) - self.assertIsInstance(cfg['plugins']['sample'], plugins.BasePlugin) - expected = { - 'foo': 'default foo', - 'bar': 0, - 'dir': None, - } - self.assertEqual(cfg['plugins']['sample'].config, expected) - - def test_plugin_config_with_options(self, mock_class): - - cfg = { - 'plugins': [{ - 'sample': { - 'foo': 'foo value', - 'bar': 42 - } - }] - } - option = config.config_options.Plugins() - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertIn('sample', cfg['plugins']) - self.assertIsInstance(cfg['plugins']['sample'], plugins.BasePlugin) - expected = { - 'foo': 'foo value', - 'bar': 42, - 'dir': None, - } - self.assertEqual(cfg['plugins']['sample'].config, expected) - - def test_plugin_config_empty_list_with_empty_default(self, mock_class): - cfg = {'plugins': []} - option = config.config_options.Plugins(default=[]) - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertEqual(len(cfg['plugins']), 0) - - def test_plugin_config_empty_list_with_default(self, mock_class): - # Default is ignored - cfg = {'plugins': []} - option = config.config_options.Plugins(default=['sample']) - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertEqual(len(cfg['plugins']), 0) - - def test_plugin_config_none_with_empty_default(self, mock_class): - cfg = {'plugins': None} - option = config.config_options.Plugins(default=[]) - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertEqual(len(cfg['plugins']), 0) - - def test_plugin_config_none_with_default(self, mock_class): - # Default is used. - cfg = {'plugins': None} - option = config.config_options.Plugins(default=['sample']) - cfg['plugins'] = option.validate(cfg['plugins']) - - self.assertIsInstance(cfg['plugins'], plugins.PluginCollection) - self.assertIn('sample', cfg['plugins']) - self.assertIsInstance(cfg['plugins']['sample'], plugins.BasePlugin) - expected = { - 'foo': 'default foo', - 'bar': 0, - 'dir': None, - } - self.assertEqual(cfg['plugins']['sample'].config, expected) - - def test_plugin_config_uninstalled(self, mock_class): - - cfg = {'plugins': ['uninstalled']} - option = config.config_options.Plugins() - self.assertRaises(config.base.ValidationError, option.validate, cfg['plugins']) - - def test_plugin_config_not_list(self, mock_class): - - cfg = {'plugins': 'sample'} # should be a list - option = config.config_options.Plugins() - self.assertRaises(config.base.ValidationError, option.validate, cfg['plugins']) - - def test_plugin_config_multivalue_dict(self, mock_class): - - cfg = { - 'plugins': [{ - 'sample': { - 'foo': 'foo value', - 'bar': 42 - }, - 'extra_key': 'baz' - }] - } - option = config.config_options.Plugins() - self.assertRaises(config.base.ValidationError, option.validate, cfg['plugins']) - - def test_plugin_config_not_string_or_dict(self, mock_class): - - cfg = { - 'plugins': [('not a string or dict',)] - } - option = config.config_options.Plugins() - self.assertRaises(config.base.ValidationError, option.validate, cfg['plugins']) - - def test_plugin_config_options_not_dict(self, mock_class): - - cfg = { - 'plugins': [{ - 'sample': 'not a dict' - }] - } - option = config.config_options.Plugins() - self.assertRaises(config.base.ValidationError, option.validate, cfg['plugins']) + with self.assertRaises(KeyError): + collection.run_event('unknown', 'page content') + + @tempdir() + def test_run_build_error_event(self, site_dir): + build_errors = [] + + class PluginRaisingError(plugins.BasePlugin): + def __init__(self, error_on): + self.error_on = error_on + + def on_pre_page(self, page, **kwargs): + if self.error_on == 'pre_page': + raise BuildError('pre page error') + return page + + def on_page_markdown(self, markdown, **kwargs): + if self.error_on == 'page_markdown': + raise BuildError('page markdown error') + return markdown + + def on_page_content(self, html, **kwargs): + if self.error_on == 'page_content': + raise PluginError('page content error') + return html + + def on_post_page(self, html, **kwargs): + if self.error_on == 'post_page': + raise ValueError('post page error') + + def on_build_error(self, error, **kwargs): + build_errors.append(error) + + cfg = load_config(site_dir=site_dir) + cfg.plugins['errorplugin'] = PluginRaisingError(error_on='pre_page') + with self.assertLogs('mkdocs', level='ERROR'): + self.assertRaises(Abort, build.build, cfg) + + cfg = load_config(site_dir=site_dir) + cfg.plugins['errorplugin'] = PluginRaisingError(error_on='page_markdown') + with self.assertLogs('mkdocs', level='ERROR'): + self.assertRaises(Abort, build.build, cfg) + + cfg = load_config(site_dir=site_dir) + cfg.plugins['errorplugin'] = PluginRaisingError(error_on='page_content') + with self.assertLogs('mkdocs', level='ERROR'): + self.assertRaises(Abort, build.build, cfg) + + cfg = load_config(site_dir=site_dir) + cfg.plugins['errorplugin'] = PluginRaisingError(error_on='post_page') + with self.assertLogs('mkdocs', level='ERROR'): + self.assertRaises(ValueError, build.build, cfg) + + cfg = load_config(site_dir=site_dir) + cfg.plugins['errorplugin'] = PluginRaisingError(error_on='') + build.build(cfg) + + self.assertEqual(len(build_errors), 4) + self.assertIs(build_errors[0].__class__, BuildError) + self.assertEqual(str(build_errors[0]), 'pre page error') + self.assertIs(build_errors[1].__class__, BuildError) + self.assertEqual(str(build_errors[1]), 'page markdown error') + self.assertIs(build_errors[2].__class__, PluginError) + self.assertEqual(str(build_errors[2]), 'page content error') + self.assertIs(build_errors[3].__class__, ValueError) + self.assertEqual(str(build_errors[3]), 'post page error') diff --git a/mkdocs/tests/search_tests.py b/mkdocs/tests/search_tests.py index 9953762..b8ece67 100644 --- a/mkdocs/tests/search_tests.py +++ b/mkdocs/tests/search_tests.py @@ -1,15 +1,15 @@ #!/usr/bin/env python +import json import unittest from unittest import mock -import json +from mkdocs.config.config_options import ValidationError +from mkdocs.contrib import search +from mkdocs.contrib.search import search_index from mkdocs.structure.files import File from mkdocs.structure.pages import Page from mkdocs.structure.toc import get_toc -from mkdocs.contrib import search -from mkdocs.contrib.search import search_index -from mkdocs.config.config_options import ValidationError from mkdocs.tests.base import dedent, get_markdown_toc, load_config @@ -18,7 +18,6 @@ def strip_whitespace(string): class SearchConfigTests(unittest.TestCase): - def test_lang_default(self): option = search.LangOption(default=['en']) value = option.validate(None) @@ -39,27 +38,50 @@ def test_lang_multi_list(self): value = option.validate(['en', 'es', 'fr']) self.assertEqual(['en', 'es', 'fr'], value) + def test_lang_no_default_none(self): + option = search.LangOption() + value = option.validate(None) + self.assertIsNone(value) + + def test_lang_no_default_str(self): + option = search.LangOption(default=[]) + value = option.validate('en') + self.assertEqual(['en'], value) + + def test_lang_no_default_list(self): + option = search.LangOption(default=[]) + value = option.validate(['en']) + self.assertEqual(['en'], value) + def test_lang_bad_type(self): option = search.LangOption() - self.assertRaises(ValidationError, option.validate, {}) + with self.assertRaises(ValidationError): + option.validate({}) def test_lang_bad_code(self): option = search.LangOption() - self.assertRaises(ValidationError, option.validate, ['foo']) + value = option.validate(['foo']) + self.assertEqual(['en'], value) def test_lang_good_and_bad_code(self): option = search.LangOption() - self.assertRaises(ValidationError, option.validate, ['en', 'foo']) + value = option.validate(['en', 'foo']) + self.assertEqual(['en'], value) + def test_lang_missing_and_with_territory(self): + option = search.LangOption() + value = option.validate(['cs_CZ', 'pt_BR', 'fr']) + self.assertEqual(['fr', 'en', 'pt'], value) -class SearchPluginTests(unittest.TestCase): +class SearchPluginTests(unittest.TestCase): def test_plugin_config_defaults(self): expected = { - 'lang': ['en'], + 'lang': None, 'separator': r'[\s\-]+', 'min_search_length': 3, - 'prebuild_index': False + 'prebuild_index': False, + 'indexing': 'full', } plugin = search.SearchPlugin() errors, warnings = plugin.load_config({}) @@ -72,7 +94,8 @@ def test_plugin_config_lang(self): 'lang': ['es'], 'separator': r'[\s\-]+', 'min_search_length': 3, - 'prebuild_index': False + 'prebuild_index': False, + 'indexing': 'full', } plugin = search.SearchPlugin() errors, warnings = plugin.load_config({'lang': 'es'}) @@ -82,10 +105,11 @@ def test_plugin_config_lang(self): def test_plugin_config_separator(self): expected = { - 'lang': ['en'], + 'lang': None, 'separator': r'[\s\-\.]+', 'min_search_length': 3, - 'prebuild_index': False + 'prebuild_index': False, + 'indexing': 'full', } plugin = search.SearchPlugin() errors, warnings = plugin.load_config({'separator': r'[\s\-\.]+'}) @@ -95,10 +119,11 @@ def test_plugin_config_separator(self): def test_plugin_config_min_search_length(self): expected = { - 'lang': ['en'], + 'lang': None, 'separator': r'[\s\-]+', 'min_search_length': 2, - 'prebuild_index': False + 'prebuild_index': False, + 'indexing': 'full', } plugin = search.SearchPlugin() errors, warnings = plugin.load_config({'min_search_length': 2}) @@ -108,10 +133,11 @@ def test_plugin_config_min_search_length(self): def test_plugin_config_prebuild_index(self): expected = { - 'lang': ['en'], + 'lang': None, 'separator': r'[\s\-]+', 'min_search_length': 3, - 'prebuild_index': True + 'prebuild_index': True, + 'indexing': 'full', } plugin = search.SearchPlugin() errors, warnings = plugin.load_config({'prebuild_index': True}) @@ -119,6 +145,20 @@ def test_plugin_config_prebuild_index(self): self.assertEqual(errors, []) self.assertEqual(warnings, []) + def test_plugin_config_indexing(self): + expected = { + 'lang': None, + 'separator': r'[\s\-]+', + 'min_search_length': 3, + 'prebuild_index': False, + 'indexing': 'titles', + } + plugin = search.SearchPlugin() + errors, warnings = plugin.load_config({'indexing': 'titles'}) + self.assertEqual(plugin.config, expected) + self.assertEqual(errors, []) + self.assertEqual(warnings, []) + def test_event_on_config_defaults(self): plugin = search.SearchPlugin() plugin.load_config({}) @@ -128,22 +168,53 @@ def test_event_on_config_defaults(self): self.assertEqual(result['theme'].static_templates, {'404.html', 'sitemap.xml'}) self.assertEqual(len(result['theme'].dirs), 3) self.assertEqual(result['extra_javascript'], ['search/main.js']) + self.assertEqual(plugin.config.lang, [result['theme']['locale'].language]) + + def test_event_on_config_lang(self): + plugin = search.SearchPlugin() + plugin.load_config({'lang': 'es'}) + result = plugin.on_config(load_config(theme='mkdocs', extra_javascript=[])) + self.assertFalse(result['theme']['search_index_only']) + self.assertFalse(result['theme']['include_search_page']) + self.assertEqual(result['theme'].static_templates, {'404.html', 'sitemap.xml'}) + self.assertEqual(len(result['theme'].dirs), 3) + self.assertEqual(result['extra_javascript'], ['search/main.js']) + self.assertEqual(plugin.config.lang, ['es']) + + def test_event_on_config_theme_locale(self): + plugin = search.SearchPlugin() + plugin.load_config({}) + result = plugin.on_config( + load_config(theme={'name': 'mkdocs', 'locale': 'fr'}, extra_javascript=[]) + ) + self.assertFalse(result['theme']['search_index_only']) + self.assertFalse(result['theme']['include_search_page']) + self.assertEqual(result['theme'].static_templates, {'404.html', 'sitemap.xml'}) + self.assertEqual(len(result['theme'].dirs), 3) + self.assertEqual(result['extra_javascript'], ['search/main.js']) + self.assertEqual(plugin.config.lang, [result['theme']['locale'].language]) def test_event_on_config_include_search_page(self): plugin = search.SearchPlugin() plugin.load_config({}) - config = load_config(theme={'name': 'mkdocs', 'include_search_page': True}, extra_javascript=[]) + config = load_config( + theme={'name': 'mkdocs', 'include_search_page': True}, extra_javascript=[] + ) result = plugin.on_config(config) self.assertFalse(result['theme']['search_index_only']) self.assertTrue(result['theme']['include_search_page']) - self.assertEqual(result['theme'].static_templates, {'404.html', 'sitemap.xml', 'search.html'}) + self.assertEqual( + result['theme'].static_templates, {'404.html', 'sitemap.xml', 'search.html'} + ) self.assertEqual(len(result['theme'].dirs), 3) self.assertEqual(result['extra_javascript'], ['search/main.js']) def test_event_on_config_search_index_only(self): plugin = search.SearchPlugin() plugin.load_config({}) - config = load_config(theme={'name': 'mkdocs', 'search_index_only': True}, extra_javascript=[]) + config = load_config( + theme={'name': 'mkdocs', 'search_index_only': True}, extra_javascript=[] + ) result = plugin.on_config(config) self.assertTrue(result['theme']['search_index_only']) self.assertFalse(result['theme']['include_search_page']) @@ -157,6 +228,7 @@ def test_event_on_post_build_defaults(self, mock_copy_file, mock_write_file): plugin = search.SearchPlugin() plugin.load_config({}) config = load_config(theme='mkdocs') + plugin.on_config(config) plugin.on_pre_build(config) plugin.on_post_build(config) self.assertEqual(mock_copy_file.call_count, 0) @@ -197,56 +269,44 @@ def test_event_on_post_build_search_index_only(self, mock_copy_file, mock_write_ class SearchIndexTests(unittest.TestCase): - - def test_html_stripper(self): - - stripper = search_index.HTMLStripper() + def test_html_stripping(self): + stripper = search_index.ContentParser() stripper.feed("

Testing

Content

") - self.assertEqual(stripper.data, ["Testing", "Content"]) + self.assertEqual(stripper.stripped_html, "Testing\nContent") def test_content_parser(self): - parser = search_index.ContentParser() parser.feed('

Title

TEST') parser.close() - self.assertEqual(parser.data, [search_index.ContentSection( - text=["TEST"], - id_="title", - title="Title" - )]) + self.assertEqual( + parser.data, [search_index.ContentSection(text=["TEST"], id_="title", title="Title")] + ) def test_content_parser_no_id(self): - parser = search_index.ContentParser() parser.feed("

Title

TEST") parser.close() - self.assertEqual(parser.data, [search_index.ContentSection( - text=["TEST"], - id_=None, - title="Title" - )]) + self.assertEqual( + parser.data, [search_index.ContentSection(text=["TEST"], id_=None, title="Title")] + ) def test_content_parser_content_before_header(self): - parser = search_index.ContentParser() parser.feed("Content Before H1

Title

TEST") parser.close() - self.assertEqual(parser.data, [search_index.ContentSection( - text=["TEST"], - id_=None, - title="Title" - )]) + self.assertEqual( + parser.data, [search_index.ContentSection(text=["TEST"], id_=None, title="Title")] + ) def test_content_parser_no_sections(self): - parser = search_index.ContentParser() parser.feed("No H1 or H2TitleTEST") @@ -257,14 +317,15 @@ def test_find_toc_by_id(self): """ Test finding the relevant TOC item by the tag ID. """ - index = search_index.SearchIndex() - md = dedent(""" - # Heading 1 - ## Heading 2 - ### Heading 3 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + ### Heading 3 + """ + ) toc = get_toc(get_markdown_toc(md)) toc_item = index._find_toc_by_id(toc, "heading-1") @@ -280,7 +341,6 @@ def test_find_toc_by_id(self): self.assertEqual(toc_item3.title, "Heading 3") def test_create_search_index(self): - html_content = """

Heading 1

Content 1

@@ -290,20 +350,33 @@ def test_create_search_index(self):

Content 3

""" - cfg = load_config() + base_cfg = load_config() pages = [ - Page('Home', File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg), - Page('About', File('about.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg) + Page( + 'Home', + File('index.md', base_cfg.docs_dir, base_cfg.site_dir, base_cfg.use_directory_urls), + base_cfg, + ), + Page( + 'About', + File('about.md', base_cfg.docs_dir, base_cfg.site_dir, base_cfg.use_directory_urls), + base_cfg, + ), ] - md = dedent(""" - # Heading 1 - ## Heading 2 - ### Heading 3 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + ### Heading 3 + """ + ) toc = get_toc(get_markdown_toc(md)) - full_content = ''.join("""Heading{0}Content{0}""".format(i) for i in range(1, 4)) + full_content = ''.join(f"Heading{i}Content{i}" for i in range(1, 4)) + + plugin = search.SearchPlugin() + errors, warnings = plugin.load_config({}) for page in pages: # Fake page.read_source() and page.render() @@ -311,7 +384,7 @@ def test_create_search_index(self): page.toc = toc page.content = html_content - index = search_index.SearchIndex() + index = search_index.SearchIndex(**plugin.config) index.add_entry_from_context(page) self.assertEqual(len(index._entries), 4) @@ -324,15 +397,88 @@ def test_create_search_index(self): self.assertEqual(index._entries[1]['title'], "Heading 1") self.assertEqual(index._entries[1]['text'], "Content 1") - self.assertEqual(index._entries[1]['location'], "{}#heading-1".format(loc)) + self.assertEqual(index._entries[1]['location'], f"{loc}#heading-1") self.assertEqual(index._entries[2]['title'], "Heading 2") self.assertEqual(strip_whitespace(index._entries[2]['text']), "Content2") - self.assertEqual(index._entries[2]['location'], "{}#heading-2".format(loc)) + self.assertEqual(index._entries[2]['location'], f"{loc}#heading-2") self.assertEqual(index._entries[3]['title'], "Heading 3") self.assertEqual(strip_whitespace(index._entries[3]['text']), "Content3") - self.assertEqual(index._entries[3]['location'], "{}#heading-3".format(loc)) + self.assertEqual(index._entries[3]['location'], f"{loc}#heading-3") + + def test_search_indexing_options(self): + def test_page(title, filename, config): + test_page = Page( + title, + File(filename, config.docs_dir, config.site_dir, config.use_directory_urls), + config, + ) + test_page.content = """ +

Heading 1

+

Content 1

+

Heading 2

+

Content 2

+

Heading 3

+

Content 3

""" + test_page.markdown = dedent( + """ + # Heading 1 + ## Heading 2 + ### Heading 3""" + ) + test_page.toc = get_toc(get_markdown_toc(test_page.markdown)) + return test_page + + def validate_full(data, page): + self.assertEqual(len(data), 4) + for x in data: + self.assertTrue(x['title']) + self.assertTrue(x['text']) + + def validate_sections(data, page): + # Sanity + self.assertEqual(len(data), 4) + # Page + self.assertEqual(data[0]['title'], page.title) + self.assertFalse(data[0]['text']) + # Headings + for x in data[1:]: + self.assertTrue(x['title']) + self.assertFalse(x['text']) + + def validate_titles(data, page): + # Sanity + self.assertEqual(len(data), 1) + for x in data: + self.assertFalse(x['text']) + + for option, validate in { + 'full': validate_full, + 'sections': validate_sections, + 'titles': validate_titles, + }.items(): + with self.subTest(option): + plugin = search.SearchPlugin() + + # Load plugin config, overriding indexing for test case + errors, warnings = plugin.load_config({'indexing': option}) + self.assertEqual(errors, []) + self.assertEqual(warnings, []) + + base_cfg = load_config(plugins=['search']) + base_cfg.plugins['search'].config.indexing = option + + pages = [ + test_page('Home', 'index.md', base_cfg), + test_page('About', 'about.md', base_cfg), + ] + + for page in pages: + index = search_index.SearchIndex(**plugin.config) + index.add_entry_from_context(page) + data = index.generate_search_index() + validate(json.loads(data)['docs'], page) @mock.patch('subprocess.Popen', autospec=True) def test_prebuild_index(self, mock_popen): @@ -346,7 +492,7 @@ def test_prebuild_index(self, mock_popen): expected = { 'docs': [], 'config': {'prebuild_index': True}, - 'index': {'mock': 'index'} + 'index': {'mock': 'index'}, } result = json.loads(index.generate_search_index()) self.assertEqual(mock_popen.call_count, 1) @@ -364,9 +510,15 @@ def test_prebuild_index_returns_error(self, mock_popen): index = search_index.SearchIndex(prebuild_index=True) expected = { 'docs': [], - 'config': {'prebuild_index': True} + 'config': {'prebuild_index': True}, } - result = json.loads(index.generate_search_index()) + with self.assertLogs('mkdocs') as cm: + result = json.loads(index.generate_search_index()) + self.assertEqual( + '\n'.join(cm.output), + 'WARNING:mkdocs.contrib.search.search_index:Failed to pre-build search index. Error: Some Error', + ) + self.assertEqual(mock_popen.call_count, 1) self.assertEqual(mock_popen_obj.communicate.call_count, 1) self.assertEqual(result, expected) @@ -382,9 +534,15 @@ def test_prebuild_index_raises_ioerror(self, mock_popen): index = search_index.SearchIndex(prebuild_index=True) expected = { 'docs': [], - 'config': {'prebuild_index': True} + 'config': {'prebuild_index': True}, } - result = json.loads(index.generate_search_index()) + with self.assertLogs('mkdocs') as cm: + result = json.loads(index.generate_search_index()) + self.assertEqual( + '\n'.join(cm.output), + 'WARNING:mkdocs.contrib.search.search_index:Failed to pre-build search index. Error: ', + ) + self.assertEqual(mock_popen.call_count, 1) self.assertEqual(mock_popen_obj.communicate.call_count, 1) self.assertEqual(result, expected) @@ -394,15 +552,21 @@ def test_prebuild_index_raises_oserror(self, mock_popen): # See https://stackoverflow.com/a/36501078/866026 mock_popen.return_value = mock.Mock() mock_popen_obj = mock_popen.return_value - mock_popen_obj.communicate.return_value = ('', '') + mock_popen_obj.communicate.return_value = ('foo', 'bar') mock_popen_obj.returncode = 0 index = search_index.SearchIndex(prebuild_index=True) expected = { 'docs': [], - 'config': {'prebuild_index': True} + 'config': {'prebuild_index': True}, } - result = json.loads(index.generate_search_index()) + with self.assertLogs('mkdocs') as cm: + result = json.loads(index.generate_search_index()) + self.assertEqual( + '\n'.join(cm.output), + 'WARNING:mkdocs.contrib.search.search_index:Failed to pre-build search index. Error: ', + ) + self.assertEqual(mock_popen.call_count, 1) self.assertEqual(mock_popen_obj.communicate.call_count, 0) self.assertEqual(result, expected) @@ -418,13 +582,14 @@ def test_prebuild_index_false(self, mock_popen): index = search_index.SearchIndex(prebuild_index=False) expected = { 'docs': [], - 'config': {'prebuild_index': False} + 'config': {'prebuild_index': False}, } result = json.loads(index.generate_search_index()) self.assertEqual(mock_popen.call_count, 0) self.assertEqual(mock_popen_obj.communicate.call_count, 0) self.assertEqual(result, expected) + @unittest.skipUnless(search_index.haslunrpy, 'lunr.py is not installed') @mock.patch('mkdocs.contrib.search.search_index.lunr', autospec=True) def test_prebuild_index_python(self, mock_lunr): mock_lunr.return_value.serialize.return_value = {'mock': 'index'} @@ -432,12 +597,24 @@ def test_prebuild_index_python(self, mock_lunr): expected = { 'docs': [], 'config': {'prebuild_index': 'python', 'lang': 'en'}, - 'index': {'mock': 'index'} + 'index': {'mock': 'index'}, } result = json.loads(index.generate_search_index()) self.assertEqual(mock_lunr.call_count, 1) self.assertEqual(result, expected) + @unittest.skipIf(search_index.haslunrpy, 'lunr.py is installed') + def test_prebuild_index_python_missing_lunr(self): + # When the lunr.py dependencies are not installed no prebuilt index is created. + index = search_index.SearchIndex(prebuild_index='python', lang='en') + expected = { + 'docs': [], + 'config': {'prebuild_index': 'python', 'lang': 'en'}, + } + with self.assertLogs('mkdocs', level='WARNING'): + result = json.loads(index.generate_search_index()) + self.assertEqual(result, expected) + @mock.patch('subprocess.Popen', autospec=True) def test_prebuild_index_node(self, mock_popen): # See https://stackoverflow.com/a/36501078/866026 @@ -450,7 +627,7 @@ def test_prebuild_index_node(self, mock_popen): expected = { 'docs': [], 'config': {'prebuild_index': 'node'}, - 'index': {'mock': 'index'} + 'index': {'mock': 'index'}, } result = json.loads(index.generate_search_index()) self.assertEqual(mock_popen.call_count, 1) diff --git a/mkdocs/tests/structure/file_tests.py b/mkdocs/tests/structure/file_tests.py index 8aa2e20..86c1071 100644 --- a/mkdocs/tests/structure/file_tests.py +++ b/mkdocs/tests/structure/file_tests.py @@ -1,62 +1,88 @@ -import unittest import os +import sys +import unittest from unittest import mock -from mkdocs.structure.files import Files, File, get_files, _sort_files, _filter_paths -from mkdocs.tests.base import load_config, tempdir, PathAssertionMixin +from mkdocs.structure.files import File, Files, _sort_files, get_files +from mkdocs.tests.base import PathAssertionMixin, load_config, tempdir class TestFiles(PathAssertionMixin, unittest.TestCase): - def test_file_eq(self): file = File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertTrue(file == File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)) + self.assertTrue( + file == File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) + ) def test_file_ne(self): file = File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) # Different filename - self.assertTrue(file != File('b.md', '/path/to/docs', '/path/to/site', use_directory_urls=False)) + self.assertTrue( + file != File('b.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) + ) # Different src_path - self.assertTrue(file != File('a.md', '/path/to/other', '/path/to/site', use_directory_urls=False)) + self.assertTrue( + file != File('a.md', '/path/to/other', '/path/to/site', use_directory_urls=False) + ) # Different URL - self.assertTrue(file != File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=True)) + self.assertTrue( + file != File('a.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) + ) + + @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows") + def test_src_path_windows(self): + f = File('foo\\a.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(f.src_uri, 'foo/a.md') + self.assertEqual(f.src_path, 'foo\\a.md') + f.src_uri = 'foo/b.md' + self.assertEqual(f.src_uri, 'foo/b.md') + self.assertEqual(f.src_path, 'foo\\b.md') + f.src_path = 'foo/c.md' + self.assertEqual(f.src_uri, 'foo/c.md') + self.assertEqual(f.src_path, 'foo\\c.md') + f.src_path = 'foo\\d.md' + self.assertEqual(f.src_uri, 'foo/d.md') + self.assertEqual(f.src_path, 'foo\\d.md') + f.src_uri = 'foo\\e.md' + self.assertEqual(f.src_uri, 'foo\\e.md') + self.assertEqual(f.src_path, 'foo\\e.md') def test_sort_files(self): self.assertEqual( _sort_files(['b.md', 'bb.md', 'a.md', 'index.md', 'aa.md']), - ['index.md', 'a.md', 'aa.md', 'b.md', 'bb.md'] + ['index.md', 'a.md', 'aa.md', 'b.md', 'bb.md'], ) self.assertEqual( _sort_files(['b.md', 'index.html', 'a.md', 'index.md']), - ['index.html', 'index.md', 'a.md', 'b.md'] + ['index.html', 'index.md', 'a.md', 'b.md'], ) self.assertEqual( _sort_files(['a.md', 'index.md', 'b.md', 'index.html']), - ['index.md', 'index.html', 'a.md', 'b.md'] + ['index.html', 'index.md', 'a.md', 'b.md'], ) self.assertEqual( _sort_files(['.md', '_.md', 'a.md', 'index.md', '1.md']), - ['index.md', '.md', '1.md', '_.md', 'a.md'] + ['index.md', '.md', '1.md', '_.md', 'a.md'], ) self.assertEqual( _sort_files(['a.md', 'b.md', 'a.md']), - ['a.md', 'a.md', 'b.md'] + ['a.md', 'a.md', 'b.md'], ) self.assertEqual( _sort_files(['A.md', 'B.md', 'README.md']), - ['README.md', 'A.md', 'B.md'] + ['README.md', 'A.md', 'B.md'], ) def test_md_file(self): f = File('foo.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo.md') + self.assertEqual(f.src_uri, 'foo.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo.md') - self.assertPathsEqual(f.dest_path, 'foo.html') + self.assertEqual(f.dest_uri, 'foo.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo.html') self.assertEqual(f.url, 'foo.html') self.assertEqual(f.name, 'foo') @@ -68,9 +94,9 @@ def test_md_file(self): def test_md_file_use_directory_urls(self): f = File('foo.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo.md') + self.assertEqual(f.src_uri, 'foo.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo.md') - self.assertPathsEqual(f.dest_path, 'foo/index.html') + self.assertEqual(f.dest_uri, 'foo/index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html') self.assertEqual(f.url, 'foo/') self.assertEqual(f.name, 'foo') @@ -82,9 +108,9 @@ def test_md_file_use_directory_urls(self): def test_md_file_nested(self): f = File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/bar.md') + self.assertEqual(f.src_uri, 'foo/bar.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.md') - self.assertPathsEqual(f.dest_path, 'foo/bar.html') + self.assertEqual(f.dest_uri, 'foo/bar.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html') self.assertEqual(f.url, 'foo/bar.html') self.assertEqual(f.name, 'bar') @@ -96,9 +122,9 @@ def test_md_file_nested(self): def test_md_file_nested_use_directory_urls(self): f = File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/bar.md') + self.assertEqual(f.src_uri, 'foo/bar.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.md') - self.assertPathsEqual(f.dest_path, 'foo/bar/index.html') + self.assertEqual(f.dest_uri, 'foo/bar/index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar/index.html') self.assertEqual(f.url, 'foo/bar/') self.assertEqual(f.name, 'bar') @@ -110,9 +136,9 @@ def test_md_file_nested_use_directory_urls(self): def test_md_index_file(self): f = File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'index.md') + self.assertEqual(f.src_uri, 'index.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/index.md') - self.assertPathsEqual(f.dest_path, 'index.html') + self.assertEqual(f.dest_uri, 'index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html') self.assertEqual(f.url, 'index.html') self.assertEqual(f.name, 'index') @@ -124,9 +150,9 @@ def test_md_index_file(self): def test_md_readme_index_file(self): f = File('README.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'README.md') + self.assertEqual(f.src_uri, 'README.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/README.md') - self.assertPathsEqual(f.dest_path, 'index.html') + self.assertEqual(f.dest_uri, 'index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html') self.assertEqual(f.url, 'index.html') self.assertEqual(f.name, 'index') @@ -138,11 +164,11 @@ def test_md_readme_index_file(self): def test_md_index_file_use_directory_urls(self): f = File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'index.md') + self.assertEqual(f.src_uri, 'index.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/index.md') - self.assertPathsEqual(f.dest_path, 'index.html') + self.assertEqual(f.dest_uri, 'index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html') - self.assertEqual(f.url, '.') + self.assertEqual(f.url, './') self.assertEqual(f.name, 'index') self.assertTrue(f.is_documentation_page()) self.assertFalse(f.is_static_page()) @@ -152,11 +178,11 @@ def test_md_index_file_use_directory_urls(self): def test_md_readme_index_file_use_directory_urls(self): f = File('README.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'README.md') + self.assertEqual(f.src_uri, 'README.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/README.md') - self.assertPathsEqual(f.dest_path, 'index.html') + self.assertEqual(f.dest_uri, 'index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/index.html') - self.assertEqual(f.url, '.') + self.assertEqual(f.url, './') self.assertEqual(f.name, 'index') self.assertTrue(f.is_documentation_page()) self.assertFalse(f.is_static_page()) @@ -166,9 +192,9 @@ def test_md_readme_index_file_use_directory_urls(self): def test_md_index_file_nested(self): f = File('foo/index.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/index.md') + self.assertEqual(f.src_uri, 'foo/index.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/index.md') - self.assertPathsEqual(f.dest_path, 'foo/index.html') + self.assertEqual(f.dest_uri, 'foo/index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html') self.assertEqual(f.url, 'foo/index.html') self.assertEqual(f.name, 'index') @@ -180,9 +206,9 @@ def test_md_index_file_nested(self): def test_md_index_file_nested_use_directory_urls(self): f = File('foo/index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/index.md') + self.assertEqual(f.src_uri, 'foo/index.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/index.md') - self.assertPathsEqual(f.dest_path, 'foo/index.html') + self.assertEqual(f.dest_uri, 'foo/index.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/index.html') self.assertEqual(f.url, 'foo/') self.assertEqual(f.name, 'index') @@ -194,9 +220,9 @@ def test_md_index_file_nested_use_directory_urls(self): def test_static_file(self): f = File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/bar.html') + self.assertEqual(f.src_uri, 'foo/bar.html') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.html') - self.assertPathsEqual(f.dest_path, 'foo/bar.html') + self.assertEqual(f.dest_uri, 'foo/bar.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html') self.assertEqual(f.url, 'foo/bar.html') self.assertEqual(f.name, 'bar') @@ -208,9 +234,9 @@ def test_static_file(self): def test_static_file_use_directory_urls(self): f = File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/bar.html') + self.assertEqual(f.src_uri, 'foo/bar.html') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.html') - self.assertPathsEqual(f.dest_path, 'foo/bar.html') + self.assertEqual(f.dest_uri, 'foo/bar.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.html') self.assertEqual(f.url, 'foo/bar.html') self.assertEqual(f.name, 'bar') @@ -222,9 +248,9 @@ def test_static_file_use_directory_urls(self): def test_media_file(self): f = File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/bar.jpg') + self.assertEqual(f.src_uri, 'foo/bar.jpg') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.jpg') - self.assertPathsEqual(f.dest_path, 'foo/bar.jpg') + self.assertEqual(f.dest_uri, 'foo/bar.jpg') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.jpg') self.assertEqual(f.url, 'foo/bar.jpg') self.assertEqual(f.name, 'bar') @@ -236,9 +262,9 @@ def test_media_file(self): def test_media_file_use_directory_urls(self): f = File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/bar.jpg') + self.assertEqual(f.src_uri, 'foo/bar.jpg') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.jpg') - self.assertPathsEqual(f.dest_path, 'foo/bar.jpg') + self.assertEqual(f.dest_uri, 'foo/bar.jpg') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.jpg') self.assertEqual(f.url, 'foo/bar.jpg') self.assertEqual(f.name, 'bar') @@ -250,9 +276,9 @@ def test_media_file_use_directory_urls(self): def test_javascript_file(self): f = File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/bar.js') + self.assertEqual(f.src_uri, 'foo/bar.js') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.js') - self.assertPathsEqual(f.dest_path, 'foo/bar.js') + self.assertEqual(f.dest_uri, 'foo/bar.js') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.js') self.assertEqual(f.url, 'foo/bar.js') self.assertEqual(f.name, 'bar') @@ -264,9 +290,9 @@ def test_javascript_file(self): def test_javascript_file_use_directory_urls(self): f = File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/bar.js') + self.assertEqual(f.src_uri, 'foo/bar.js') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.js') - self.assertPathsEqual(f.dest_path, 'foo/bar.js') + self.assertEqual(f.dest_uri, 'foo/bar.js') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.js') self.assertEqual(f.url, 'foo/bar.js') self.assertEqual(f.name, 'bar') @@ -278,9 +304,9 @@ def test_javascript_file_use_directory_urls(self): def test_css_file(self): f = File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo/bar.css') + self.assertEqual(f.src_uri, 'foo/bar.css') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.css') - self.assertPathsEqual(f.dest_path, 'foo/bar.css') + self.assertEqual(f.dest_uri, 'foo/bar.css') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.css') self.assertEqual(f.url, 'foo/bar.css') self.assertEqual(f.name, 'bar') @@ -292,9 +318,9 @@ def test_css_file(self): def test_css_file_use_directory_urls(self): f = File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertPathsEqual(f.src_path, 'foo/bar.css') + self.assertEqual(f.src_uri, 'foo/bar.css') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo/bar.css') - self.assertPathsEqual(f.dest_path, 'foo/bar.css') + self.assertEqual(f.dest_uri, 'foo/bar.css') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo/bar.css') self.assertEqual(f.url, 'foo/bar.css') self.assertEqual(f.name, 'bar') @@ -306,13 +332,33 @@ def test_css_file_use_directory_urls(self): def test_file_name_with_space(self): f = File('foo bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertPathsEqual(f.src_path, 'foo bar.md') + self.assertEqual(f.src_uri, 'foo bar.md') self.assertPathsEqual(f.abs_src_path, '/path/to/docs/foo bar.md') - self.assertPathsEqual(f.dest_path, 'foo bar.html') + self.assertEqual(f.dest_uri, 'foo bar.html') self.assertPathsEqual(f.abs_dest_path, '/path/to/site/foo bar.html') self.assertEqual(f.url, 'foo%20bar.html') self.assertEqual(f.name, 'foo bar') + def test_file_name_with_custom_dest_uri(self): + for use_directory_urls in True, False: + with self.subTest(use_directory_urls=use_directory_urls): + f = File( + 'stuff/foo.md', + src_dir='/path/to/docs', + dest_dir='/path/to/site', + use_directory_urls=use_directory_urls, + dest_uri='stuff/1-foo/index.html', + ) + self.assertEqual(f.src_uri, 'stuff/foo.md') + self.assertPathsEqual(f.abs_src_path, '/path/to/docs/stuff/foo.md') + self.assertEqual(f.dest_uri, 'stuff/1-foo/index.html') + self.assertPathsEqual(f.abs_dest_path, '/path/to/site/stuff/1-foo/index.html') + if use_directory_urls: + self.assertEqual(f.url, 'stuff/1-foo/') + else: + self.assertEqual(f.url, 'stuff/1-foo/index.html') + self.assertEqual(f.name, 'foo') + def test_files(self): fs = [ File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True), @@ -320,10 +366,10 @@ def test_files(self): File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=True), File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True), File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=True), - File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True) + File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True), ] files = Files(fs) - self.assertEqual([f for f in files], fs) + self.assertEqual(list(files), fs) self.assertEqual(len(files), 6) self.assertEqual(files.documentation_pages(), [fs[0], fs[1]]) self.assertEqual(files.static_pages(), [fs[2]]) @@ -333,77 +379,54 @@ def test_files(self): self.assertEqual(files.get_file_from_path('foo/bar.jpg'), fs[3]) self.assertEqual(files.get_file_from_path('foo/bar.jpg'), fs[3]) self.assertEqual(files.get_file_from_path('missing.jpg'), None) - self.assertTrue(fs[2].src_path in files) - self.assertTrue(fs[2].src_path in files) + self.assertTrue(fs[2].src_uri in files.src_uris) extra_file = File('extra.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertFalse(extra_file.src_path in files) + self.assertFalse(extra_file.src_uri in files.src_uris) files.append(extra_file) self.assertEqual(len(files), 7) - self.assertTrue(extra_file.src_path in files) + self.assertTrue(extra_file.src_uri in files.src_uris) self.assertEqual(files.documentation_pages(), [fs[0], fs[1], extra_file]) + files.remove(fs[1]) + self.assertEqual(files.documentation_pages(), [fs[0], extra_file]) - @tempdir(files=[ - 'favicon.ico', - 'index.md' - ]) - @tempdir(files=[ - 'base.html', - 'favicon.ico', - 'style.css', - 'foo.md', - 'README', - '.ignore.txt', - '.ignore/file.txt', - 'foo/.ignore.txt', - 'foo/.ignore/file.txt' - ]) + @tempdir( + files=[ + 'favicon.ico', + 'index.md', + ] + ) + @tempdir( + files=[ + 'base.html', + 'favicon.ico', + 'style.css', + 'foo.md', + 'README', + '.ignore.txt', + '.ignore/file.txt', + 'foo/.ignore.txt', + 'foo/.ignore/file.txt', + ] + ) def test_add_files_from_theme(self, tdir, ddir): config = load_config(docs_dir=ddir, theme={'name': None, 'custom_dir': tdir}) - env = config['theme'].get_env() + env = config.theme.get_env() files = get_files(config) self.assertEqual( [file.src_path for file in files], - ['index.md', 'favicon.ico'] + ['index.md', 'favicon.ico'], ) files.add_files_from_theme(env, config) self.assertEqual( [file.src_path for file in files], - ['index.md', 'favicon.ico', 'style.css'] + ['index.md', 'favicon.ico', 'style.css'], ) # Ensure theme file does not override docs_dir file self.assertEqual( files.get_file_from_path('favicon.ico').abs_src_path, - os.path.normpath(os.path.join(ddir, 'favicon.ico')) + os.path.normpath(os.path.join(ddir, 'favicon.ico')), ) - def test_filter_paths(self): - # Root level file - self.assertFalse(_filter_paths('foo.md', 'foo.md', False, ['bar.md'])) - self.assertTrue(_filter_paths('foo.md', 'foo.md', False, ['foo.md'])) - - # Nested file - self.assertFalse(_filter_paths('foo.md', 'baz/foo.md', False, ['bar.md'])) - self.assertTrue(_filter_paths('foo.md', 'baz/foo.md', False, ['foo.md'])) - - # Wildcard - self.assertFalse(_filter_paths('foo.md', 'foo.md', False, ['*.txt'])) - self.assertTrue(_filter_paths('foo.md', 'foo.md', False, ['*.md'])) - - # Root level dir - self.assertFalse(_filter_paths('bar', 'bar', True, ['/baz'])) - self.assertFalse(_filter_paths('bar', 'bar', True, ['/baz/'])) - self.assertTrue(_filter_paths('bar', 'bar', True, ['/bar'])) - self.assertTrue(_filter_paths('bar', 'bar', True, ['/bar/'])) - - # Nested dir - self.assertFalse(_filter_paths('bar', 'foo/bar', True, ['/bar'])) - self.assertFalse(_filter_paths('bar', 'foo/bar', True, ['/bar/'])) - self.assertTrue(_filter_paths('bar', 'foo/bar', True, ['bar/'])) - - # Files that look like dirs (no extension). Note that `is_dir` is `False`. - self.assertFalse(_filter_paths('bar', 'bar', False, ['bar/'])) - self.assertFalse(_filter_paths('bar', 'foo/bar', False, ['bar/'])) - def test_get_relative_url_use_directory_urls(self): to_files = [ 'index.md', @@ -412,87 +435,86 @@ def test_get_relative_url_use_directory_urls(self): 'foo/bar/baz/index.md', 'foo.md', 'foo/bar.md', - 'foo/bar/baz.md' + 'foo/bar/baz.md', ] - to_file_urls = [ - '.', + './', 'foo/', 'foo/bar/', 'foo/bar/baz/', 'foo/', 'foo/bar/', - 'foo/bar/baz/' + 'foo/bar/baz/', ] from_file = File('img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True) + self.assertEqual(from_file.url, 'img.jpg') + expected = [ - 'img.jpg', # img.jpg relative to . - '../img.jpg', # img.jpg relative to foo/ - '../../img.jpg', # img.jpg relative to foo/bar/ + 'img.jpg', # img.jpg relative to . + '../img.jpg', # img.jpg relative to foo/ + '../../img.jpg', # img.jpg relative to foo/bar/ '../../../img.jpg', # img.jpg relative to foo/bar/baz/ - '../img.jpg', # img.jpg relative to foo - '../../img.jpg', # img.jpg relative to foo/bar - '../../../img.jpg' # img.jpg relative to foo/bar/baz + '../img.jpg', # img.jpg relative to foo + '../../img.jpg', # img.jpg relative to foo/bar + '../../../img.jpg', # img.jpg relative to foo/bar/baz ] - for i, filename in enumerate(to_files): file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertEqual(from_file.url, 'img.jpg') self.assertEqual(file.url, to_file_urls[i]) self.assertEqual(from_file.url_relative_to(file.url), expected[i]) self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('foo/img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True) + self.assertEqual(from_file.url, 'foo/img.jpg') + expected = [ - 'foo/img.jpg', # foo/img.jpg relative to . - 'img.jpg', # foo/img.jpg relative to foo/ - '../img.jpg', # foo/img.jpg relative to foo/bar/ + 'foo/img.jpg', # foo/img.jpg relative to . + 'img.jpg', # foo/img.jpg relative to foo/ + '../img.jpg', # foo/img.jpg relative to foo/bar/ '../../img.jpg', # foo/img.jpg relative to foo/bar/baz/ - 'img.jpg', # foo/img.jpg relative to foo - '../img.jpg', # foo/img.jpg relative to foo/bar - '../../img.jpg' # foo/img.jpg relative to foo/bar/baz + 'img.jpg', # foo/img.jpg relative to foo + '../img.jpg', # foo/img.jpg relative to foo/bar + '../../img.jpg', # foo/img.jpg relative to foo/bar/baz ] - for i, filename in enumerate(to_files): file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertEqual(from_file.url, 'foo/img.jpg') self.assertEqual(file.url, to_file_urls[i]) self.assertEqual(from_file.url_relative_to(file.url), expected[i]) self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('index.html', '/path/to/docs', '/path/to/site', use_directory_urls=True) + self.assertEqual(from_file.url, './') + expected = [ - '.', # . relative to . - '..', # . relative to foo/ - '../..', # . relative to foo/bar/ - '../../..', # . relative to foo/bar/baz/ - '..', # . relative to foo - '../..', # . relative to foo/bar - '../../..' # . relative to foo/bar/baz + './', # . relative to . + '../', # . relative to foo/ + '../../', # . relative to foo/bar/ + '../../../', # . relative to foo/bar/baz/ + '../', # . relative to foo + '../../', # . relative to foo/bar + '../../../', # . relative to foo/bar/baz ] - for i, filename in enumerate(to_files): file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertEqual(from_file.url, '.') self.assertEqual(file.url, to_file_urls[i]) self.assertEqual(from_file.url_relative_to(file.url), expected[i]) self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('file.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) + self.assertEqual(from_file.url, 'file/') + expected = [ - 'file/', # file relative to . - '../file/', # file relative to foo/ - '../../file/', # file relative to foo/bar/ + 'file/', # file relative to . + '../file/', # file relative to foo/ + '../../file/', # file relative to foo/bar/ '../../../file/', # file relative to foo/bar/baz/ - '../file/', # file relative to foo - '../../file/', # file relative to foo/bar - '../../../file/' # file relative to foo/bar/baz + '../file/', # file relative to foo + '../../file/', # file relative to foo/bar + '../../../file/', # file relative to foo/bar/baz ] - for i, filename in enumerate(to_files): file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=True) - self.assertEqual(from_file.url, 'file/') self.assertEqual(file.url, to_file_urls[i]) self.assertEqual(from_file.url_relative_to(file.url), expected[i]) self.assertEqual(from_file.url_relative_to(file), expected[i]) @@ -505,9 +527,8 @@ def test_get_relative_url(self): 'foo/bar/baz/index.md', 'foo.md', 'foo/bar.md', - 'foo/bar/baz.md' + 'foo/bar/baz.md', ] - to_file_urls = [ 'index.html', 'foo/index.html', @@ -515,123 +536,141 @@ def test_get_relative_url(self): 'foo/bar/baz/index.html', 'foo.html', 'foo/bar.html', - 'foo/bar/baz.html' + 'foo/bar/baz.html', ] from_file = File('img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(from_file.url, 'img.jpg') + expected = [ - 'img.jpg', # img.jpg relative to . - '../img.jpg', # img.jpg relative to foo/ - '../../img.jpg', # img.jpg relative to foo/bar/ + 'img.jpg', # img.jpg relative to . + '../img.jpg', # img.jpg relative to foo/ + '../../img.jpg', # img.jpg relative to foo/bar/ '../../../img.jpg', # img.jpg relative to foo/bar/baz/ - 'img.jpg', # img.jpg relative to foo.html - '../img.jpg', # img.jpg relative to foo/bar.html - '../../img.jpg' # img.jpg relative to foo/bar/baz.html + 'img.jpg', # img.jpg relative to foo.html + '../img.jpg', # img.jpg relative to foo/bar.html + '../../img.jpg', # img.jpg relative to foo/bar/baz.html ] - for i, filename in enumerate(to_files): - file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertEqual(from_file.url, 'img.jpg') - self.assertEqual(file.url, to_file_urls[i]) - self.assertEqual(from_file.url_relative_to(file.url), expected[i]) - self.assertEqual(from_file.url_relative_to(file), expected[i]) + with self.subTest(from_file=from_file.src_path, to_file=filename): + file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(file.url, to_file_urls[i]) + self.assertEqual(from_file.url_relative_to(file.url), expected[i]) + self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('foo/img.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(from_file.url, 'foo/img.jpg') + expected = [ - 'foo/img.jpg', # foo/img.jpg relative to . - 'img.jpg', # foo/img.jpg relative to foo/ - '../img.jpg', # foo/img.jpg relative to foo/bar/ + 'foo/img.jpg', # foo/img.jpg relative to . + 'img.jpg', # foo/img.jpg relative to foo/ + '../img.jpg', # foo/img.jpg relative to foo/bar/ '../../img.jpg', # foo/img.jpg relative to foo/bar/baz/ - 'foo/img.jpg', # foo/img.jpg relative to foo.html - 'img.jpg', # foo/img.jpg relative to foo/bar.html - '../img.jpg' # foo/img.jpg relative to foo/bar/baz.html + 'foo/img.jpg', # foo/img.jpg relative to foo.html + 'img.jpg', # foo/img.jpg relative to foo/bar.html + '../img.jpg', # foo/img.jpg relative to foo/bar/baz.html ] - for i, filename in enumerate(to_files): - file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertEqual(from_file.url, 'foo/img.jpg') - self.assertEqual(file.url, to_file_urls[i]) - self.assertEqual(from_file.url_relative_to(file.url), expected[i]) - self.assertEqual(from_file.url_relative_to(file), expected[i]) + with self.subTest(from_file=from_file.src_path, to_file=filename): + file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(file.url, to_file_urls[i]) + self.assertEqual(from_file.url_relative_to(file.url), expected[i]) + self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('index.html', '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(from_file.url, 'index.html') + expected = [ - 'index.html', # index.html relative to . - '../index.html', # index.html relative to foo/ - '../../index.html', # index.html relative to foo/bar/ + 'index.html', # index.html relative to . + '../index.html', # index.html relative to foo/ + '../../index.html', # index.html relative to foo/bar/ '../../../index.html', # index.html relative to foo/bar/baz/ - 'index.html', # index.html relative to foo.html - '../index.html', # index.html relative to foo/bar.html - '../../index.html' # index.html relative to foo/bar/baz.html + 'index.html', # index.html relative to foo.html + '../index.html', # index.html relative to foo/bar.html + '../../index.html', # index.html relative to foo/bar/baz.html ] - for i, filename in enumerate(to_files): - file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertEqual(from_file.url, 'index.html') - self.assertEqual(file.url, to_file_urls[i]) - self.assertEqual(from_file.url_relative_to(file.url), expected[i]) - self.assertEqual(from_file.url_relative_to(file), expected[i]) + with self.subTest(from_file=from_file.src_path, to_file=filename): + file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(file.url, to_file_urls[i]) + self.assertEqual(from_file.url_relative_to(file.url), expected[i]) + self.assertEqual(from_file.url_relative_to(file), expected[i]) from_file = File('file.html', '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(from_file.url, 'file.html') + expected = [ - 'file.html', # file.html relative to . - '../file.html', # file.html relative to foo/ - '../../file.html', # file.html relative to foo/bar/ + 'file.html', # file.html relative to . + '../file.html', # file.html relative to foo/ + '../../file.html', # file.html relative to foo/bar/ '../../../file.html', # file.html relative to foo/bar/baz/ - 'file.html', # file.html relative to foo.html - '../file.html', # file.html relative to foo/bar.html - '../../file.html' # file.html relative to foo/bar/baz.html + 'file.html', # file.html relative to foo.html + '../file.html', # file.html relative to foo/bar.html + '../../file.html', # file.html relative to foo/bar/baz.html ] - for i, filename in enumerate(to_files): - file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) - self.assertEqual(from_file.url, 'file.html') - self.assertEqual(file.url, to_file_urls[i]) - self.assertEqual(from_file.url_relative_to(file.url), expected[i]) - self.assertEqual(from_file.url_relative_to(file), expected[i]) - - @tempdir(files=[ - 'index.md', - 'bar.css', - 'bar.html', - 'bar.jpg', - 'bar.js', - 'bar.md', - '.dotfile', - 'templates/foo.html' - ]) + with self.subTest(from_file=from_file.src_path, to_file=filename): + file = File(filename, '/path/to/docs', '/path/to/site', use_directory_urls=False) + self.assertEqual(file.url, to_file_urls[i]) + self.assertEqual(from_file.url_relative_to(file.url), expected[i]) + self.assertEqual(from_file.url_relative_to(file), expected[i]) + + @tempdir( + files=[ + 'index.md', + 'readme.md', + 'bar.css', + 'bar.html', + 'bar.jpg', + 'bar.js', + 'bar.md', + '.dotfile', + 'templates/foo.html', + ] + ) def test_get_files(self, tdir): config = load_config(docs_dir=tdir, extra_css=['bar.css'], extra_javascript=['bar.js']) files = get_files(config) - expected = ['index.md', 'bar.css', 'bar.html', 'bar.jpg', 'bar.js', 'bar.md'] self.assertIsInstance(files, Files) - self.assertEqual(len(files), len(expected)) - self.assertEqual([f.src_path for f in files], expected) + self.assertEqual( + [f.src_path for f in files if f.inclusion.is_included()], + ['index.md', 'bar.css', 'bar.html', 'bar.jpg', 'bar.js', 'bar.md', 'readme.md'], + ) + self.assertEqual( + [f.src_path for f in files if f.inclusion.is_excluded()], + ['.dotfile', 'templates/foo.html'], + ) - @tempdir(files=[ - 'README.md', - 'foo.md' - ]) + @tempdir( + files=[ + 'README.md', + 'foo.md', + ] + ) def test_get_files_include_readme_without_index(self, tdir): config = load_config(docs_dir=tdir) files = get_files(config) - expected = ['README.md', 'foo.md'] self.assertIsInstance(files, Files) - self.assertEqual(len(files), len(expected)) - self.assertEqual([f.src_path for f in files], expected) - - @tempdir(files=[ - 'index.md', - 'README.md', - 'foo.md' - ]) + self.assertEqual([f.src_path for f in files], ['README.md', 'foo.md']) + + @tempdir( + files=[ + 'index.md', + 'README.md', + 'foo.md', + ] + ) def test_get_files_exclude_readme_with_index(self, tdir): config = load_config(docs_dir=tdir) - files = get_files(config) - expected = ['index.md', 'foo.md'] + with self.assertLogs('mkdocs') as cm: + files = get_files(config) + self.assertRegex( + '\n'.join(cm.output), + r"^WARNING:mkdocs.structure.files:" + r"Excluding 'README.md' from the site because it conflicts with 'index.md'.$", + ) self.assertIsInstance(files, Files) - self.assertEqual(len(files), len(expected)) - self.assertEqual([f.src_path for f in files], expected) + self.assertEqual([f.src_path for f in files], ['index.md', 'foo.md']) @tempdir() @tempdir(files={'test.txt': 'source content'}) @@ -642,6 +681,15 @@ def test_copy_file(self, src_dir, dest_dir): file.copy_file() self.assertPathIsFile(dest_path) + @tempdir(files={'test.txt': 'source content'}) + def test_copy_file_same_file(self, dest_dir): + file = File('test.txt', dest_dir, dest_dir, use_directory_urls=False) + dest_path = os.path.join(dest_dir, 'test.txt') + file.copy_file() + self.assertPathIsFile(dest_path) + with open(dest_path, encoding='utf-8') as f: + self.assertEqual(f.read(), 'source content') + @tempdir(files={'test.txt': 'destination content'}) @tempdir(files={'test.txt': 'source content'}) def test_copy_file_clean_modified(self, src_dir, dest_dir): @@ -650,7 +698,7 @@ def test_copy_file_clean_modified(self, src_dir, dest_dir): dest_path = os.path.join(dest_dir, 'test.txt') file.copy_file(dirty=False) self.assertPathIsFile(dest_path) - with open(dest_path, 'r', encoding='utf-8') as f: + with open(dest_path, encoding='utf-8') as f: self.assertEqual(f.read(), 'source content') @tempdir(files={'test.txt': 'destination content'}) @@ -661,7 +709,7 @@ def test_copy_file_dirty_modified(self, src_dir, dest_dir): dest_path = os.path.join(dest_dir, 'test.txt') file.copy_file(dirty=True) self.assertPathIsFile(dest_path) - with open(dest_path, 'r', encoding='utf-8') as f: + with open(dest_path, encoding='utf-8') as f: self.assertEqual(f.read(), 'source content') @tempdir(files={'test.txt': 'destination content'}) @@ -672,5 +720,28 @@ def test_copy_file_dirty_not_modified(self, src_dir, dest_dir): dest_path = os.path.join(dest_dir, 'test.txt') file.copy_file(dirty=True) self.assertPathIsFile(dest_path) - with open(dest_path, 'r', encoding='utf-8') as f: + with open(dest_path, encoding='utf-8') as f: self.assertEqual(f.read(), 'destination content') + + def test_files_append_remove_src_paths(self): + fs = [ + File('index.md', '/path/to/docs', '/path/to/site', use_directory_urls=True), + File('foo/bar.md', '/path/to/docs', '/path/to/site', use_directory_urls=True), + File('foo/bar.html', '/path/to/docs', '/path/to/site', use_directory_urls=True), + File('foo/bar.jpg', '/path/to/docs', '/path/to/site', use_directory_urls=True), + File('foo/bar.js', '/path/to/docs', '/path/to/site', use_directory_urls=True), + File('foo/bar.css', '/path/to/docs', '/path/to/site', use_directory_urls=True), + ] + files = Files(fs) + self.assertEqual(len(files), 6) + self.assertEqual(len(files.src_uris), 6) + extra_file = File('extra.md', '/path/to/docs', '/path/to/site', use_directory_urls=True) + self.assertFalse(extra_file.src_uri in files.src_uris) + files.append(extra_file) + self.assertEqual(len(files), 7) + self.assertEqual(len(files.src_uris), 7) + self.assertTrue(extra_file.src_uri in files.src_uris) + files.remove(extra_file) + self.assertEqual(len(files), 6) + self.assertEqual(len(files.src_uris), 6) + self.assertFalse(extra_file.src_uri in files.src_uris) diff --git a/mkdocs/tests/structure/nav_tests.py b/mkdocs/tests/structure/nav_tests.py index ae130fc..44ae2ad 100644 --- a/mkdocs/tests/structure/nav_tests.py +++ b/mkdocs/tests/structure/nav_tests.py @@ -3,30 +3,32 @@ import sys import unittest -from mkdocs.structure.nav import get_navigation from mkdocs.structure.files import File, Files +from mkdocs.structure.nav import Section, _get_by_type, get_navigation from mkdocs.structure.pages import Page from mkdocs.tests.base import dedent, load_config class SiteNavigationTests(unittest.TestCase): - maxDiff = None def test_simple_nav(self): nav_cfg = [ {'Home': 'index.md'}, - {'About': 'about.md'} + {'About': 'about.md'}, ] - expected = dedent(""" - Page(title='Home', url='/') - Page(title='About', url='/about/') - """) - cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files( - [File(list(item.values())[0], cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - for item in nav_cfg] + expected = dedent( + """ + Page(title='Home', url='/') + Page(title='About', url='/about/') + """ ) + cfg = load_config(nav=nav_cfg, site_url='http://example.com/') + fs = [ + File(list(item.values())[0], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + for item in nav_cfg + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 2) @@ -36,34 +38,41 @@ def test_simple_nav(self): def test_nav_no_directory_urls(self): nav_cfg = [ {'Home': 'index.md'}, - {'About': 'about.md'} + {'About': 'about.md'}, ] - expected = dedent(""" - Page(title='Home', url='/index.html') - Page(title='About', url='/about.html') - """) - cfg = load_config(nav=nav_cfg, use_directory_urls=False, site_url='http://example.com/') - files = Files( - [File(list(item.values())[0], cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - for item in nav_cfg] + expected = dedent( + """ + Page(title='Home', url='/index.html') + Page(title='About', url='/about.html') + """ ) + cfg = load_config(nav=nav_cfg, use_directory_urls=False, site_url='http://example.com/') + fs = [ + File(list(item.values())[0], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + for item in nav_cfg + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 2) self.assertEqual(len(site_navigation.pages), 2) + self.assertEqual(repr(site_navigation.homepage), "Page(title='Home', url='/index.html')") def test_nav_missing_page(self): nav_cfg = [ - {'Home': 'index.md'} + {'Home': 'index.md'}, ] - expected = dedent(""" - Page(title='Home', url='/') - """) + expected = dedent( + """ + Page(title='Home', url='/') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('page_not_in_nav.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('page_not_in_nav.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 1) @@ -74,17 +83,20 @@ def test_nav_missing_page(self): def test_nav_no_title(self): nav_cfg = [ 'index.md', - {'About': 'about.md'} + {'About': 'about.md'}, ] - expected = dedent(""" - Page(title=[blank], url='/') - Page(title='About', url='/about/') - """) + expected = dedent( + """ + Page(title=[blank], url='/') + Page(title='About', url='/about/') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([ - File(nav_cfg[0], cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File(nav_cfg[1]['About'], cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File(nav_cfg[0], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File(nav_cfg[1]['About'], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 2) @@ -94,25 +106,26 @@ def test_nav_external_links(self): nav_cfg = [ {'Home': 'index.md'}, {'Local': '/local.html'}, - {'External': 'http://example.com/external.html'} + {'External': 'http://example.com/external.html'}, ] - expected = dedent(""" - Page(title='Home', url='/') - Link(title='Local', url='/local.html') - Link(title='External', url='http://example.com/external.html') - """) + expected = dedent( + """ + Page(title='Home', url='/') + Link(title='Local', url='/local.html') + Link(title='External', url='http://example.com/external.html') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) with self.assertLogs('mkdocs', level='DEBUG') as cm: site_navigation = get_navigation(files, cfg) self.assertEqual( cm.output, [ - "DEBUG:mkdocs.structure.nav:An absolute path to '/local.html' is included in the " - "'nav' configuration, which presumably points to an external resource.", - "DEBUG:mkdocs.structure.nav:An external link to 'http://example.com/external.html' " - "is included in the 'nav' configuration." - ] + "INFO:mkdocs.structure.nav:An absolute path to '/local.html' is included in the 'nav' configuration, which presumably points to an external resource.", + "DEBUG:mkdocs.structure.nav:An external link to 'http://example.com/external.html' is included in the 'nav' configuration.", + ], ) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) @@ -122,25 +135,26 @@ def test_nav_bad_links(self): nav_cfg = [ {'Home': 'index.md'}, {'Missing': 'missing.html'}, - {'Bad External': 'example.com'} + {'Bad External': 'example.com'}, ] - expected = dedent(""" - Page(title='Home', url='/') - Link(title='Missing', url='missing.html') - Link(title='Bad External', url='example.com') - """) + expected = dedent( + """ + Page(title='Home', url='/') + Link(title='Missing', url='missing.html') + Link(title='Bad External', url='example.com') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])]) - with self.assertLogs('mkdocs', level='WARNING') as cm: + fs = [File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + with self.assertLogs('mkdocs') as cm: site_navigation = get_navigation(files, cfg) self.assertEqual( cm.output, [ - "WARNING:mkdocs.structure.nav:A relative path to 'missing.html' is included " - "in the 'nav' configuration, which is not found in the documentation files", - "WARNING:mkdocs.structure.nav:A relative path to 'example.com' is included " - "in the 'nav' configuration, which is not found in the documentation files" - ] + "WARNING:mkdocs.structure.nav:A relative path to 'missing.html' is included in the 'nav' configuration, which is not found in the documentation files.", + "WARNING:mkdocs.structure.nav:A relative path to 'example.com' is included in the 'nav' configuration, which is not found in the documentation files.", + ], ) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) @@ -149,42 +163,51 @@ def test_nav_bad_links(self): def test_indented_nav(self): nav_cfg = [ {'Home': 'index.md'}, - {'API Guide': [ - {'Running': 'api-guide/running.md'}, - {'Testing': 'api-guide/testing.md'}, - {'Debugging': 'api-guide/debugging.md'}, - {'Advanced': [ - {'Part 1': 'api-guide/advanced/part-1.md'}, - ]}, - ]}, - {'About': [ - {'Release notes': 'about/release-notes.md'}, - {'License': '/license.html'} - ]}, - {'External': 'https://example.com/'} + { + 'API Guide': [ + {'Running': 'api-guide/running.md'}, + {'Testing': 'api-guide/testing.md'}, + {'Debugging': 'api-guide/debugging.md'}, + { + 'Advanced': [ + {'Part 1': 'api-guide/advanced/part-1.md'}, + ] + }, + ] + }, + { + 'About': [ + {'Release notes': 'about/release-notes.md'}, + {'License': '/license.html'}, + ] + }, + {'External': 'https://example.com/'}, ] - expected = dedent(""" - Page(title='Home', url='/') - Section(title='API Guide') - Page(title='Running', url='/api-guide/running/') - Page(title='Testing', url='/api-guide/testing/') - Page(title='Debugging', url='/api-guide/debugging/') - Section(title='Advanced') - Page(title='Part 1', url='/api-guide/advanced/part-1/') - Section(title='About') - Page(title='Release notes', url='/about/release-notes/') - Link(title='License', url='/license.html') - Link(title='External', url='https://example.com/') - """) + expected = dedent( + """ + Page(title='Home', url='/') + Section(title='API Guide') + Page(title='Running', url='/api-guide/running/') + Page(title='Testing', url='/api-guide/testing/') + Page(title='Debugging', url='/api-guide/debugging/') + Section(title='Advanced') + Page(title='Part 1', url='/api-guide/advanced/part-1/') + Section(title='About') + Page(title='Release notes', url='/about/release-notes/') + Link(title='License', url='/license.html') + Link(title='External', url='https://example.com/') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/running.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/debugging.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/advanced/part-1.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about/release-notes.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [ + 'index.md', + 'api-guide/running.md', + 'api-guide/testing.md', + 'api-guide/debugging.md', + 'api-guide/advanced/part-1.md', + 'about/release-notes.md', + ] + files = Files([File(s, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for s in fs]) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 4) @@ -195,23 +218,40 @@ def test_indented_nav(self): self.assertIsNone(site_navigation.items[1].parent) self.assertEqual(site_navigation.items[1].ancestors, []) self.assertEqual(len(site_navigation.items[1].children), 4) - self.assertEqual(repr(site_navigation.items[1].children[0].parent), "Section(title='API Guide')") + self.assertEqual( + repr(site_navigation.items[1].children[0].parent), "Section(title='API Guide')" + ) self.assertEqual(site_navigation.items[1].children[0].ancestors, [site_navigation.items[1]]) - self.assertEqual(repr(site_navigation.items[1].children[1].parent), "Section(title='API Guide')") + self.assertEqual( + repr(site_navigation.items[1].children[1].parent), "Section(title='API Guide')" + ) self.assertEqual(site_navigation.items[1].children[1].ancestors, [site_navigation.items[1]]) - self.assertEqual(repr(site_navigation.items[1].children[2].parent), "Section(title='API Guide')") + self.assertEqual( + repr(site_navigation.items[1].children[2].parent), "Section(title='API Guide')" + ) self.assertEqual(site_navigation.items[1].children[2].ancestors, [site_navigation.items[1]]) - self.assertEqual(repr(site_navigation.items[1].children[3].parent), "Section(title='API Guide')") + self.assertEqual( + repr(site_navigation.items[1].children[3].parent), "Section(title='API Guide')" + ) self.assertEqual(site_navigation.items[1].children[3].ancestors, [site_navigation.items[1]]) self.assertEqual(len(site_navigation.items[1].children[3].children), 1) - self.assertEqual(repr(site_navigation.items[1].children[3].children[0].parent), "Section(title='Advanced')") - self.assertEqual(site_navigation.items[1].children[3].children[0].ancestors, - [site_navigation.items[1].children[3], site_navigation.items[1]]) + self.assertEqual( + repr(site_navigation.items[1].children[3].children[0].parent), + "Section(title='Advanced')", + ) + self.assertEqual( + site_navigation.items[1].children[3].children[0].ancestors, + [site_navigation.items[1].children[3], site_navigation.items[1]], + ) self.assertIsNone(site_navigation.items[2].parent) self.assertEqual(len(site_navigation.items[2].children), 2) - self.assertEqual(repr(site_navigation.items[2].children[0].parent), "Section(title='About')") + self.assertEqual( + repr(site_navigation.items[2].children[0].parent), "Section(title='About')" + ) self.assertEqual(site_navigation.items[2].children[0].ancestors, [site_navigation.items[2]]) - self.assertEqual(repr(site_navigation.items[2].children[1].parent), "Section(title='About')") + self.assertEqual( + repr(site_navigation.items[2].children[1].parent), "Section(title='About')" + ) self.assertEqual(site_navigation.items[2].children[1].ancestors, [site_navigation.items[2]]) self.assertIsNone(site_navigation.items[3].parent) self.assertEqual(site_navigation.items[3].ancestors, []) @@ -223,16 +263,19 @@ def test_nested_ungrouped_nav(self): {'Contact': 'about/contact.md'}, {'License Title': 'about/sub/license.md'}, ] - expected = dedent(""" - Page(title='Home', url='/') - Page(title='Contact', url='/about/contact/') - Page(title='License Title', url='/about/sub/license/') - """) - cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files( - [File(list(item.values())[0], cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - for item in nav_cfg] + expected = dedent( + """ + Page(title='Home', url='/') + Page(title='Contact', url='/about/contact/') + Page(title='License Title', url='/about/sub/license/') + """ ) + cfg = load_config(nav=nav_cfg, site_url='http://example.com/') + fs = [ + File(list(item.values())[0], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + for item in nav_cfg + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) @@ -242,18 +285,19 @@ def test_nested_ungrouped_nav_no_titles(self): nav_cfg = [ 'index.md', 'about/contact.md', - 'about/sub/license.md' + 'about/sub/license.md', ] - expected = dedent(""" - Page(title=[blank], url='/') - Page(title=[blank], url='/about/contact/') - Page(title=[blank], url='/about/sub/license/') - """) + expected = dedent( + """ + Page(title=[blank], url='/') + Page(title=[blank], url='/about/contact/') + Page(title=[blank], url='/about/sub/license/') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files( - [File(item, cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) for item in nav_cfg] - ) + fs = [File(item, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for item in nav_cfg] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) @@ -267,31 +311,35 @@ def test_nested_ungrouped_no_titles_windows(self): 'about\\contact.md', 'about\\sub\\license.md', ] - expected = dedent(""" - Page(title=[blank], url='/') - Page(title=[blank], url='/about/contact/') - Page(title=[blank], url='/about/sub/license/') - """) + expected = dedent( + """ + Page(title=[blank], url='/') + Page(title=[blank], url='/about/contact/') + Page(title=[blank], url='/about/sub/license/') + """ + ) cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files( - [File(item, cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) for item in nav_cfg] - ) + fs = [File(item, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for item in nav_cfg] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) self.assertEqual(len(site_navigation.pages), 3) def test_nav_from_files(self): - expected = dedent(""" - Page(title=[blank], url='/') - Page(title=[blank], url='/about/') - """) + expected = dedent( + """ + Page(title=[blank], url='/') + Page(title=[blank], url='/about/') + """ + ) cfg = load_config(site_url='http://example.com/') - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - ]) + fs = [ + File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + File('about.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls), + ] + files = Files(fs) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 2) @@ -299,60 +347,98 @@ def test_nav_from_files(self): self.assertEqual(repr(site_navigation.homepage), "Page(title=[blank], url='/')") def test_nav_from_nested_files(self): - expected = dedent(""" - Page(title=[blank], url='/') - Section(title='About') - Page(title=[blank], url='/about/license/') - Page(title=[blank], url='/about/release-notes/') - Section(title='Api guide') - Page(title=[blank], url='/api-guide/debugging/') - Page(title=[blank], url='/api-guide/running/') - Page(title=[blank], url='/api-guide/testing/') - Section(title='Advanced') - Page(title=[blank], url='/api-guide/advanced/part-1/') - """) + expected = dedent( + """ + Page(title=[blank], url='/') + Section(title='About') + Page(title=[blank], url='/about/license/') + Page(title=[blank], url='/about/release-notes/') + Section(title='Api guide') + Page(title=[blank], url='/api-guide/debugging/') + Page(title=[blank], url='/api-guide/running/') + Page(title=[blank], url='/api-guide/testing/') + Section(title='Advanced') + Page(title=[blank], url='/api-guide/advanced/part-1/') + """ + ) cfg = load_config(site_url='http://example.com/') - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about/license.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about/release-notes.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/debugging.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/running.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/advanced/part-1.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [ + 'index.md', + 'about/license.md', + 'about/release-notes.md', + 'api-guide/debugging.md', + 'api-guide/running.md', + 'api-guide/testing.md', + 'api-guide/advanced/part-1.md', + ] + files = Files([File(s, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for s in fs]) site_navigation = get_navigation(files, cfg) self.assertEqual(str(site_navigation).strip(), expected) self.assertEqual(len(site_navigation.items), 3) self.assertEqual(len(site_navigation.pages), 7) self.assertEqual(repr(site_navigation.homepage), "Page(title=[blank], url='/')") + def test_nav_page_subclass(self): + class PageSubclass(Page): + pass + + nav_cfg = [ + {'Home': 'index.md'}, + {'About': 'about.md'}, + ] + expected = dedent( + """ + PageSubclass(title=[blank], url='/') + PageSubclass(title=[blank], url='/about/') + """ + ) + cfg = load_config(nav=nav_cfg, site_url='http://example.com/') + fs = [ + File(list(item.values())[0], cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + for item in nav_cfg + ] + files = Files(fs) + for file in files: + PageSubclass(None, file, cfg) + site_navigation = get_navigation(files, cfg) + self.assertEqual(str(site_navigation).strip(), expected) + self.assertEqual(len(site_navigation.items), 2) + self.assertEqual(len(site_navigation.pages), 2) + self.assertEqual(repr(site_navigation.homepage), "PageSubclass(title=[blank], url='/')") + def test_active(self): nav_cfg = [ {'Home': 'index.md'}, - {'API Guide': [ - {'Running': 'api-guide/running.md'}, - {'Testing': 'api-guide/testing.md'}, - {'Debugging': 'api-guide/debugging.md'}, - {'Advanced': [ - {'Part 1': 'api-guide/advanced/part-1.md'}, - ]}, - ]}, - {'About': [ - {'Release notes': 'about/release-notes.md'}, - {'License': 'about/license.md'} - ]} + { + 'API Guide': [ + {'Running': 'api-guide/running.md'}, + {'Testing': 'api-guide/testing.md'}, + {'Debugging': 'api-guide/debugging.md'}, + { + 'Advanced': [ + {'Part 1': 'api-guide/advanced/part-1.md'}, + ] + }, + ] + }, + { + 'About': [ + {'Release notes': 'about/release-notes.md'}, + {'License': 'about/license.md'}, + ] + }, ] cfg = load_config(nav=nav_cfg, site_url='http://example.com/') - files = Files([ - File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/running.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/debugging.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('api-guide/advanced/part-1.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about/release-notes.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - File('about/license.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), - ]) + fs = [ + 'index.md', + 'api-guide/running.md', + 'api-guide/testing.md', + 'api-guide/debugging.md', + 'api-guide/advanced/part-1.md', + 'about/release-notes.md', + 'about/license.md', + ] + files = Files([File(s, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for s in fs]) site_navigation = get_navigation(files, cfg) # Confirm nothing is active self.assertTrue(all(page.active is False for page in site_navigation.pages)) @@ -377,3 +463,21 @@ def test_active(self): self.assertFalse(site_navigation.items[1].children[3].children[0].active) self.assertFalse(site_navigation.items[1].children[3].active) self.assertFalse(site_navigation.items[1].active) + + def test_get_by_type_nested_sections(self): + nav_cfg = [ + { + 'Section 1': [ + { + 'Section 2': [ + {'Page': 'page.md'}, + ] + }, + ] + }, + ] + cfg = load_config(nav=nav_cfg, site_url='http://example.com/') + fs = [File('page.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls)] + files = Files(fs) + site_navigation = get_navigation(files, cfg) + self.assertEqual(len(_get_by_type(site_navigation, Section)), 2) diff --git a/mkdocs/tests/structure/page_tests.py b/mkdocs/tests/structure/page_tests.py index ae35d6c..97a79c4 100644 --- a/mkdocs/tests/structure/page_tests.py +++ b/mkdocs/tests/structure/page_tests.py @@ -1,21 +1,40 @@ -import unittest +from __future__ import annotations + import os import sys +import textwrap +import unittest from unittest import mock -from tempfile import TemporaryDirectory -from mkdocs.structure.pages import Page +from mkdocs.config.defaults import MkDocsConfig from mkdocs.structure.files import File, Files -from mkdocs.tests.base import load_config, dedent +from mkdocs.structure.pages import Page, _RelativePathTreeprocessor +from mkdocs.tests.base import dedent, tempdir +DOCS_DIR = os.path.join( + os.path.abspath(os.path.dirname(__file__)), '..', 'integration', 'subpages', 'docs' +) -class PageTests(unittest.TestCase): - DOCS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../integration/subpages/docs') +def load_config(**cfg) -> MkDocsConfig: + cfg.setdefault('site_name', 'Example') + cfg.setdefault( + 'docs_dir', + os.path.join( + os.path.abspath(os.path.dirname(__file__)), '..', 'integration', 'minimal', 'docs' + ), + ) + conf = MkDocsConfig() + conf.load_dict(cfg) + errors_warnings = conf.validate() + assert errors_warnings == ([], []), errors_warnings + return conf + +class PageTests(unittest.TestCase): def test_homepage(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) self.assertIsNone(fl.page) pg = Page('Foo', fl, cfg) self.assertEqual(fl.page, pg) @@ -39,8 +58,8 @@ def test_homepage(self): self.assertEqual(pg.toc, []) def test_nested_index_page(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('sub1/index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('sub1/index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) pg.parent = 'foo' self.assertEqual(pg.url, 'sub1/') @@ -63,8 +82,8 @@ def test_nested_index_page(self): self.assertEqual(pg.toc, []) def test_nested_index_page_no_parent(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('sub1/index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('sub1/index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) pg.parent = None # non-homepage at nav root level; see #1919. self.assertEqual(pg.url, 'sub1/') @@ -87,8 +106,8 @@ def test_nested_index_page_no_parent(self): self.assertEqual(pg.toc, []) def test_nested_index_page_no_parent_no_directory_urls(self): - cfg = load_config(docs_dir=self.DOCS_DIR, use_directory_urls=False) - fl = File('sub1/index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR, use_directory_urls=False) + fl = File('sub1/index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) pg.parent = None # non-homepage at nav root level; see #1919. self.assertEqual(pg.url, 'sub1/index.html') @@ -111,8 +130,8 @@ def test_nested_index_page_no_parent_no_directory_urls(self): self.assertEqual(pg.toc, []) def test_nested_nonindex_page(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('sub1/non-index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('sub1/non-index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) pg.parent = 'foo' self.assertEqual(pg.url, 'sub1/non-index/') @@ -136,7 +155,7 @@ def test_nested_nonindex_page(self): def test_page_defaults(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertRegex(pg.update_date, r'\d{4}-\d{2}-\d{2}') self.assertEqual(pg.url, 'testing/') @@ -160,7 +179,7 @@ def test_page_defaults(self): def test_page_no_directory_url(self): cfg = load_config(use_directory_urls=False) - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertEqual(pg.url, 'testing.html') self.assertEqual(pg.abs_url, None) @@ -183,7 +202,7 @@ def test_page_no_directory_url(self): def test_page_canonical_url(self): cfg = load_config(site_url='http://example.com') - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertEqual(pg.url, 'testing/') self.assertEqual(pg.abs_url, '/testing/') @@ -206,7 +225,7 @@ def test_page_canonical_url(self): def test_page_canonical_url_nested(self): cfg = load_config(site_url='http://example.com/foo/') - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertEqual(pg.url, 'testing/') self.assertEqual(pg.abs_url, '/foo/testing/') @@ -229,7 +248,7 @@ def test_page_canonical_url_nested(self): def test_page_canonical_url_nested_no_slash(self): cfg = load_config(site_url='http://example.com/foo') - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertEqual(pg.url, 'testing/') self.assertEqual(pg.abs_url, '/foo/testing/') @@ -252,7 +271,7 @@ def test_page_canonical_url_nested_no_slash(self): def test_predefined_page_title(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Page Title', fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, 'testing/') @@ -276,7 +295,7 @@ def test_predefined_page_title(self): def test_page_title_from_markdown(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page(None, fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, 'testing/') @@ -296,11 +315,89 @@ def test_page_title_from_markdown(self): self.assertEqual(pg.parent, None) self.assertEqual(pg.previous_page, None) self.assertEqual(pg.title, 'Welcome to MkDocs') - self.assertEqual(pg.toc, []) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Welcome to MkDocs') + + _SETEXT_CONTENT = dedent( + ''' + Welcome to MkDocs Setext + ======================== + + This tests extracting a setext style title. + ''' + ) + + @tempdir(files={'testing_setext_title.md': _SETEXT_CONTENT}) + def test_page_title_from_setext_markdown(self, docs_dir): + cfg = load_config() + fl = File('testing_setext_title.md', docs_dir, docs_dir, use_directory_urls=True) + pg = Page(None, fl, cfg) + self.assertIsNone(pg.title) + pg.read_source(cfg) + self.assertEqual(pg.title, 'Testing setext title') + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Welcome to MkDocs Setext') + + @tempdir(files={'testing_setext_title.md': _SETEXT_CONTENT}) + def test_page_title_from_markdown_stripped_anchorlinks(self, docs_dir): + cfg = MkDocsConfig() + cfg.site_name = 'example' + cfg.markdown_extensions = {'toc': {'permalink': '&'}} + self.assertEqual(cfg.validate(), ([], [])) + fl = File('testing_setext_title.md', docs_dir, docs_dir, use_directory_urls=True) + pg = Page(None, fl, cfg) + pg.read_source(cfg) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Welcome to MkDocs Setext') + + _FORMATTING_CONTENT = dedent( + ''' + # \\*Hello --- *beautiful* `world` + + Hi. + ''' + ) + + @tempdir(files={'testing_formatting.md': _FORMATTING_CONTENT}) + def test_page_title_from_markdown_strip_formatting(self, docs_dir): + cfg = load_config() + cfg.markdown_extensions.append('smarty') + fl = File('testing_formatting.md', docs_dir, docs_dir, use_directory_urls=True) + pg = Page(None, fl, cfg) + pg.read_source(cfg) + pg.render(cfg, fl) + self.assertEqual(pg.title, '*Hello — beautiful world') + + _ATTRLIST_CONTENT = dedent( + ''' + # Welcome to MkDocs Attr { #welcome } + + This tests extracting the title, with enabled attr_list markdown_extension. + ''' + ) + + @tempdir(files={'testing_attr_list.md': _ATTRLIST_CONTENT}) + def test_page_title_from_markdown_stripped_attr_list(self, docs_dir): + cfg = load_config() + cfg.markdown_extensions.append('attr_list') + fl = File('testing_attr_list.md', docs_dir, docs_dir, use_directory_urls=True) + pg = Page(None, fl, cfg) + pg.read_source(cfg) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Welcome to MkDocs Attr') + + @tempdir(files={'testing_attr_list.md': _ATTRLIST_CONTENT}) + def test_page_title_from_markdown_preserved_attr_list(self, docs_dir): + cfg = load_config() + fl = File('testing_attr_list.md', docs_dir, docs_dir, use_directory_urls=True) + pg = Page(None, fl, cfg) + pg.read_source(cfg) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Welcome to MkDocs Attr { #welcome }') def test_page_title_from_meta(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('metadata.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('metadata.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page(None, fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, 'metadata/') @@ -321,10 +418,12 @@ def test_page_title_from_meta(self): self.assertEqual(pg.previous_page, None) self.assertEqual(pg.title, 'A Page Title') self.assertEqual(pg.toc, []) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'A Page Title') def test_page_title_from_filename(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('page-title.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('page-title.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page(None, fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, 'page-title/') @@ -344,11 +443,12 @@ def test_page_title_from_filename(self): self.assertEqual(pg.parent, None) self.assertEqual(pg.previous_page, None) self.assertEqual(pg.title, 'Page title') - self.assertEqual(pg.toc, []) + pg.render(cfg, fl) + self.assertEqual(pg.title, 'Page title') def test_page_title_from_capitalized_filename(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('pageTitle.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('pageTitle.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page(None, fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, 'pageTitle/') @@ -368,11 +468,10 @@ def test_page_title_from_capitalized_filename(self): self.assertEqual(pg.parent, None) self.assertEqual(pg.previous_page, None) self.assertEqual(pg.title, 'pageTitle') - self.assertEqual(pg.toc, []) def test_page_title_from_homepage_filename(self): - cfg = load_config(docs_dir=self.DOCS_DIR) - fl = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + cfg = load_config(docs_dir=DOCS_DIR) + fl = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page(None, fl, cfg) pg.read_source(cfg) self.assertEqual(pg.url, '') @@ -396,287 +495,230 @@ def test_page_title_from_homepage_filename(self): def test_page_eq(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertTrue(pg == Page('Foo', fl, cfg)) def test_page_ne(self): cfg = load_config() - f1 = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - f2 = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + f1 = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + f2 = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', f1, cfg) # Different Title self.assertTrue(pg != Page('Bar', f1, cfg)) # Different File self.assertTrue(pg != Page('Foo', f2, cfg)) - def test_BOM(self): + @tempdir() + def test_BOM(self, docs_dir): md_src = '# An UTF-8 encoded file with a BOM' - with TemporaryDirectory() as docs_dir: - # We don't use mkdocs.tests.base.tempdir decorator here due to uniqueness of this test. - cfg = load_config(docs_dir=docs_dir) - fl = File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - pg = Page(None, fl, cfg) - # Create an UTF-8 Encoded file with BOM (as Micorsoft editors do). See #1186 - with open(fl.abs_src_path, 'w', encoding='utf-8-sig') as f: - f.write(md_src) - # Now read the file. - pg.read_source(cfg) - # Ensure the BOM (`\ufeff`) is removed - self.assertNotIn('\ufeff', pg.markdown) - self.assertEqual(pg.markdown, md_src) - self.assertEqual(pg.meta, {}) - - def test_page_edit_url(self): - configs = [ - { - 'repo_url': 'http://github.com/mkdocs/mkdocs' - }, - { - 'repo_url': 'https://github.com/mkdocs/mkdocs/' - }, { - 'repo_url': 'http://example.com' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': 'edit/master' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '?query=edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '?query=edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '#edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '#edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '' # Set to blank value - }, { - # Nothing defined - } - ] - - expected = [ - 'http://github.com/mkdocs/mkdocs/edit/master/docs/testing.md', - 'https://github.com/mkdocs/mkdocs/edit/master/docs/testing.md', - None, - 'http://example.com/edit/master/testing.md', - 'http://example.com/edit/master/testing.md', - 'http://example.com/edit/master/testing.md', - 'http://example.com/edit/master/testing.md', - 'http://example.com/edit/master/testing.md', - 'http://example.com/foo/edit/master/testing.md', - 'http://example.com/foo/edit/master/testing.md', - 'http://example.com?query=edit/master/testing.md', - 'http://example.com/?query=edit/master/testing.md', - 'http://example.com#edit/master/testing.md', - 'http://example.com/#edit/master/testing.md', - None, - None - ] - - for i, c in enumerate(configs): - cfg = load_config(**c) - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - pg = Page('Foo', fl, cfg) - self.assertEqual(pg.url, 'testing/') - self.assertEqual(pg.edit_url, expected[i]) - - def test_nested_page_edit_url(self): - configs = [ - { - 'repo_url': 'http://github.com/mkdocs/mkdocs' - }, - { - 'repo_url': 'https://github.com/mkdocs/mkdocs/' - }, { - 'repo_url': 'http://example.com' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': 'edit/master' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '?query=edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '?query=edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '#edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '#edit/master/' - } - ] - - expected = [ - 'http://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', - 'https://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', - None, - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/foo/edit/master/sub1/non-index.md', - 'http://example.com/foo/edit/master/sub1/non-index.md', - 'http://example.com?query=edit/master/sub1/non-index.md', - 'http://example.com/?query=edit/master/sub1/non-index.md', - 'http://example.com#edit/master/sub1/non-index.md', - 'http://example.com/#edit/master/sub1/non-index.md' - ] - - for i, c in enumerate(configs): - c['docs_dir'] = self.DOCS_DIR - cfg = load_config(**c) - fl = File('sub1/non-index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - pg = Page('Foo', fl, cfg) - self.assertEqual(pg.url, 'sub1/non-index/') - self.assertEqual(pg.edit_url, expected[i]) + cfg = load_config(docs_dir=docs_dir) + fl = File('index.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + pg = Page(None, fl, cfg) + # Create an UTF-8 Encoded file with BOM (as Microsoft editors do). See #1186 + with open(fl.abs_src_path, 'w', encoding='utf-8-sig') as f: + f.write(md_src) + # Now read the file. + pg.read_source(cfg) + # Ensure the BOM (`\ufeff`) is removed + self.assertNotIn('\ufeff', pg.markdown) + self.assertEqual(pg.markdown, md_src) + self.assertEqual(pg.meta, {}) + + def test_page_edit_url( + self, paths={'testing.md': 'testing/', 'sub1/non-index.md': 'sub1/non-index/'} + ): + for case in [ + dict( + config={'repo_url': 'http://github.com/mkdocs/mkdocs'}, + edit_url='http://github.com/mkdocs/mkdocs/edit/master/docs/testing.md', + edit_url2='http://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', + ), + dict( + config={'repo_url': 'https://github.com/mkdocs/mkdocs/'}, + edit_url='https://github.com/mkdocs/mkdocs/edit/master/docs/testing.md', + edit_url2='https://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com'}, + edit_url=None, + edit_url2=None, + ), + dict( + config={'repo_url': 'http://example.com', 'edit_uri': 'edit/master'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com', 'edit_uri': '/edit/master'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/foo/', 'edit_uri': '/edit/master/'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/foo', 'edit_uri': '/edit/master/'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/foo/', 'edit_uri': '/edit/master'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/foo/', 'edit_uri': 'edit/master/'}, + edit_url='http://example.com/foo/edit/master/testing.md', + edit_url2='http://example.com/foo/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/foo', 'edit_uri': 'edit/master/'}, + edit_url='http://example.com/foo/edit/master/testing.md', + edit_url2='http://example.com/foo/edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com', 'edit_uri': '?query=edit/master'}, + edit_url='http://example.com?query=edit/master/testing.md', + edit_url2='http://example.com?query=edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/', 'edit_uri': '?query=edit/master/'}, + edit_url='http://example.com/?query=edit/master/testing.md', + edit_url2='http://example.com/?query=edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com', 'edit_uri': '#edit/master'}, + edit_url='http://example.com#edit/master/testing.md', + edit_url2='http://example.com#edit/master/sub1/non-index.md', + ), + dict( + config={'repo_url': 'http://example.com/', 'edit_uri': '#edit/master/'}, + edit_url='http://example.com/#edit/master/testing.md', + edit_url2='http://example.com/#edit/master/sub1/non-index.md', + ), + dict( + config={'edit_uri': 'http://example.com/edit/master'}, + edit_url='http://example.com/edit/master/testing.md', + edit_url2='http://example.com/edit/master/sub1/non-index.md', + ), + dict( + config={'edit_uri_template': 'https://github.com/project/repo/wiki/{path_noext}'}, + edit_url='https://github.com/project/repo/wiki/testing', + edit_url2='https://github.com/project/repo/wiki/sub1/non-index', + ), + dict( + config={ + 'repo_url': 'https://github.com/project/repo/wiki', + 'edit_uri_template': '{path_noext}/_edit', + }, + edit_url='https://github.com/project/repo/wiki/testing/_edit', + edit_url2='https://github.com/project/repo/wiki/sub1/non-index/_edit', + ), + dict( + config={ + 'repo_url': 'https://gitlab.com/project/repo', + 'edit_uri_template': '-/sse/master/docs%2F{path!q}', + }, + edit_url='https://gitlab.com/project/repo/-/sse/master/docs%2Ftesting.md', + edit_url2='https://gitlab.com/project/repo/-/sse/master/docs%2Fsub1%2Fnon-index.md', + ), + dict( + config={ + 'repo_url': 'https://bitbucket.org/project/repo/', + 'edit_uri_template': 'src/master/docs/{path}?mode=edit', + }, + edit_url='https://bitbucket.org/project/repo/src/master/docs/testing.md?mode=edit', + edit_url2='https://bitbucket.org/project/repo/src/master/docs/sub1/non-index.md?mode=edit', + ), + dict( + config={ + 'repo_url': 'http://example.com', + 'edit_uri': '', + 'edit_uri_template': '', + }, # Set to blank value + edit_url=None, + edit_url2=None, + ), + dict(config={}, edit_url=None, edit_url2=None), # Nothing defined + ]: + for i, path in enumerate(paths, 1): + edit_url_key = f'edit_url{i}' if i > 1 else 'edit_url' + with self.subTest(case['config'], path=path): + cfg = load_config(**case['config']) + fl = File(path, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + pg = Page('Foo', fl, cfg) + self.assertEqual(pg.url, paths[path]) + self.assertEqual(pg.edit_url, case[edit_url_key]) @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows") - def test_nested_page_edit_url_windows(self): - configs = [ - { - 'repo_url': 'http://github.com/mkdocs/mkdocs' - }, - { - 'repo_url': 'https://github.com/mkdocs/mkdocs/' - }, { - 'repo_url': 'http://example.com' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': 'edit/master' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': '/edit/master/' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': '/edit/master' - }, { - 'repo_url': 'http://example.com/foo/', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com/foo', - 'edit_uri': 'edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '?query=edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '?query=edit/master/' - }, { - 'repo_url': 'http://example.com', - 'edit_uri': '#edit/master' - }, { - 'repo_url': 'http://example.com/', - 'edit_uri': '#edit/master/' - } - ] - - expected = [ - 'http://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', - 'https://github.com/mkdocs/mkdocs/edit/master/docs/sub1/non-index.md', - None, - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/edit/master/sub1/non-index.md', - 'http://example.com/foo/edit/master/sub1/non-index.md', - 'http://example.com/foo/edit/master/sub1/non-index.md', - 'http://example.com?query=edit/master/sub1/non-index.md', - 'http://example.com/?query=edit/master/sub1/non-index.md', - 'http://example.com#edit/master/sub1/non-index.md', - 'http://example.com/#edit/master/sub1/non-index.md' - ] - - for i, c in enumerate(configs): - c['docs_dir'] = self.DOCS_DIR - cfg = load_config(**c) - fl = File('sub1\\non-index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) - pg = Page('Foo', fl, cfg) - self.assertEqual(pg.url, 'sub1/non-index/') - self.assertEqual(pg.edit_url, expected[i]) + def test_page_edit_url_windows(self): + self.test_page_edit_url( + paths={'testing.md': 'testing/', 'sub1\\non-index.md': 'sub1/non-index/'} + ) + + def test_page_edit_url_warning(self): + for case in [ + dict( + config={'edit_uri': 'edit/master'}, + edit_url='edit/master/testing.md', + warning="WARNING:mkdocs.structure.pages:edit_uri: " + "'edit/master/testing.md' is not a valid URL, it should include the http:// (scheme)", + ), + ]: + with self.subTest(case['config']): + with self.assertLogs('mkdocs') as cm: + cfg = load_config(**case['config']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) + pg = Page('Foo', fl, cfg) + self.assertEqual(pg.url, 'testing/') + self.assertEqual(pg.edit_url, case['edit_url']) + self.assertEqual(cm.output, [case['warning']]) def test_page_render(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) pg.read_source(cfg) self.assertEqual(pg.content, None) self.assertEqual(pg.toc, []) pg.render(cfg, [fl]) - self.assertTrue(pg.content.startswith( - '

Welcome to MkDocs

\n' - )) - self.assertEqual(str(pg.toc).strip(), dedent(""" - Welcome to MkDocs - #welcome-to-mkdocs - Commands - #commands - Project layout - #project-layout - """)) + self.assertTrue( + pg.content.startswith('

Welcome to MkDocs

\n') + ) + self.assertEqual( + str(pg.toc).strip(), + dedent( + """ + Welcome to MkDocs - #welcome-to-mkdocs + Commands - #commands + Project layout - #project-layout + """ + ), + ) def test_missing_page(self): cfg = load_config() - fl = File('missing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('missing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) - self.assertRaises(OSError, pg.read_source, cfg) + with self.assertLogs('mkdocs') as cm: + with self.assertRaises(OSError): + pg.read_source(cfg) + self.assertEqual( + '\n'.join(cm.output), 'ERROR:mkdocs.structure.pages:File not found: missing.md' + ) class SourceDateEpochTests(unittest.TestCase): - def setUp(self): self.default = os.environ.get('SOURCE_DATE_EPOCH', None) os.environ['SOURCE_DATE_EPOCH'] = '0' def test_source_date_epoch(self): cfg = load_config() - fl = File('testing.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']) + fl = File('testing.md', cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) pg = Page('Foo', fl, cfg) self.assertEqual(pg.update_date, '1970-01-01') @@ -688,165 +730,520 @@ def tearDown(self): class RelativePathExtensionTests(unittest.TestCase): + def get_rendered_result( + self, *, content: str, files: list[str], logs: str = '', **kwargs + ) -> str: + cfg = load_config(docs_dir=DOCS_DIR, **kwargs) + fs = [File(f, cfg.docs_dir, cfg.site_dir, cfg.use_directory_urls) for f in files] + pg = Page('Foo', fs[0], cfg) - DOCS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../integration/subpages/docs') + with mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data=content)): + pg.read_source(cfg) + if logs: + with self.assertLogs('mkdocs.structure.pages') as cm: + pg.render(cfg, Files(fs)) + msgs = [f'{r.levelname}:{r.message}' for r in cm.records] + self.assertEqual('\n'.join(msgs), textwrap.dedent(logs).strip('\n')) + elif sys.version_info >= (3, 10): + with self.assertNoLogs('mkdocs.structure.pages'): + pg.render(cfg, Files(fs)) + else: + pg.render(cfg, Files(fs)) - def get_rendered_result(self, files): - cfg = load_config(docs_dir=self.DOCS_DIR) - fs = [] - for f in files: - fs.append(File(f.replace('/', os.sep), cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls'])) - pg = Page('Foo', fs[0], cfg) - pg.read_source(cfg) - pg.render(cfg, Files(fs)) - return pg.content + assert pg.content is not None + content = pg.content + if content.startswith('

') and content.endswith('

'): + content = content[3:-4] + return content - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](non-index.md)')) def test_relative_html_link(self): self.assertEqual( - self.get_rendered_result(['index.md', 'non-index.md']), - '

link

' # No trailing / + self.get_rendered_result( + content='[link](non-index.md)', files=['index.md', 'non-index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](non-index.md)', + files=['index.md', 'non-index.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](index.md)')) def test_relative_html_link_index(self): self.assertEqual( - self.get_rendered_result(['non-index.md', 'index.md']), - '

link

' + self.get_rendered_result( + content='[link](index.md)', files=['non-index.md', 'index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](index.md)', + files=['non-index.md', 'index.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](sub2/index.md)')) def test_relative_html_link_sub_index(self): self.assertEqual( - self.get_rendered_result(['index.md', 'sub2/index.md']), - '

link

' # No trailing / + self.get_rendered_result( + content='[link](sub2/index.md)', files=['index.md', 'sub2/index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](sub2/index.md)', + files=['index.md', 'sub2/index.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](sub2/non-index.md)')) def test_relative_html_link_sub_page(self): self.assertEqual( - self.get_rendered_result(['index.md', 'sub2/non-index.md']), - '

link

' # No trailing / + self.get_rendered_result( + content='[link](sub2/non-index.md)', files=['index.md', 'sub2/non-index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](sub2/non-index.md)', + files=['index.md', 'sub2/non-index.md'], + ), + 'link', + ) + + def test_relative_doc_link_without_extension(self): + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](bar/Dockerfile)', + files=['foo/bar.md', 'foo/bar/Dockerfile'], + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + content='[link](bar/Dockerfile)', + files=['foo/bar.md', 'foo/bar/Dockerfile'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](file%20name.md)')) def test_relative_html_link_with_encoded_space(self): self.assertEqual( - self.get_rendered_result(['index.md', 'file name.md']), - '

link

' + self.get_rendered_result( + content='[link](file%20name.md)', files=['index.md', 'file name.md'] + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](file name.md)')) def test_relative_html_link_with_unencoded_space(self): self.assertEqual( - self.get_rendered_result(['index.md', 'file name.md']), - '

link

' + self.get_rendered_result( + use_directory_urls=False, + content='[link](file name.md)', + files=['index.md', 'file name.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](../index.md)')) def test_relative_html_link_parent_index(self): self.assertEqual( - self.get_rendered_result(['sub2/non-index.md', 'index.md']), - '

link

' + self.get_rendered_result( + content='[link](../index.md)', files=['sub2/non-index.md', 'index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](../index.md)', + files=['sub2/non-index.md', 'index.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](non-index.md#hash)')) def test_relative_html_link_hash(self): self.assertEqual( - self.get_rendered_result(['index.md', 'non-index.md']), - '

link

' + self.get_rendered_result( + content='[link](non-index.md#hash)', files=['index.md', 'non-index.md'] + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](sub2/index.md#hash)')) def test_relative_html_link_sub_index_hash(self): self.assertEqual( - self.get_rendered_result(['index.md', 'sub2/index.md']), - '

link

' + self.get_rendered_result( + content='[link](sub2/index.md#hash)', files=['index.md', 'sub2/index.md'] + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[link](sub2/index.md#hash)', + files=['index.md', 'sub2/index.md'], + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](sub2/non-index.md#hash)')) def test_relative_html_link_sub_page_hash(self): self.assertEqual( - self.get_rendered_result(['index.md', 'sub2/non-index.md']), - '

link

' + self.get_rendered_result( + content='[link](sub2/non-index.md#hash)', files=['index.md', 'sub2/non-index.md'] + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](#hash)')) def test_relative_html_link_hash_only(self): - self.assertEqual( - self.get_rendered_result(['index.md']), - '

link

' - ) + for use_directory_urls in True, False: + self.assertEqual( + self.get_rendered_result( + use_directory_urls=use_directory_urls, + content='[link](#hash)', + files=['index.md'], + ), + 'link', + ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='![image](image.png)')) def test_relative_image_link_from_homepage(self): - self.assertEqual( - self.get_rendered_result(['index.md', 'image.png']), - '

image

' # no opening ./ - ) + for use_directory_urls in True, False: + self.assertEqual( + self.get_rendered_result( + use_directory_urls=use_directory_urls, + content='![image](image.png)', + files=['index.md', 'image.png'], + ), + 'image', # no opening ./ + ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='![image](../image.png)')) def test_relative_image_link_from_subpage(self): self.assertEqual( - self.get_rendered_result(['sub2/non-index.md', 'image.png']), - '

image

' + self.get_rendered_result( + content='![image](../image.png)', files=['sub2/non-index.md', 'image.png'] + ), + 'image', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='![image](image.png)')) def test_relative_image_link_from_sibling(self): self.assertEqual( - self.get_rendered_result(['non-index.md', 'image.png']), - '

image

' + self.get_rendered_result( + content='![image](image.png)', files=['non-index.md', 'image.png'] + ), + 'image', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='![image](image.png)', + files=['non-index.md', 'image.png'], + ), + 'image', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='*__not__ a link*.')) def test_no_links(self): self.assertEqual( - self.get_rendered_result(['index.md']), - '

not a link.

' + self.get_rendered_result(content='*__not__ a link*.', files=['index.md']), + 'not a link.', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[link](non-existant.md)')) - def test_bad_relative_html_link(self): - with self.assertLogs('mkdocs', level='WARNING') as cm: - self.assertEqual( - self.get_rendered_result(['index.md']), - '

link

' - ) + def test_bad_relative_doc_link(self): + self.assertEqual( + self.get_rendered_result( + content='[link](non-existent.md)', + files=['index.md'], + logs="WARNING:Doc file 'index.md' contains a relative link 'non-existent.md', but the target is not found among documentation files.", + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + validation=dict(links=dict(not_found='info')), + content='[link](../non-existent.md)', + files=['sub/index.md'], + logs="INFO:Doc file 'sub/index.md' contains a relative link '../non-existent.md', but the target 'non-existent.md' is not found among documentation files.", + ), + 'link', + ) + + def test_relative_slash_link_with_suggestion(self): self.assertEqual( - cm.output, - ["WARNING:mkdocs.structure.pages:Documentation file 'index.md' contains a link " - "to 'non-existant.md' which is not found in the documentation files."] + self.get_rendered_result( + content='[link](../about/)', + files=['foo/index.md', 'about.md'], + logs="INFO:Doc file 'foo/index.md' contains an unrecognized relative link '../about/', it was left as is. Did you mean '../about.md'?", + ), + 'link', + ) + self.assertEqual( + self.get_rendered_result( + validation=dict(links=dict(unrecognized_links='warn')), + content='[link](../#example)', + files=['foo/bar.md', 'index.md'], + logs="WARNING:Doc file 'foo/bar.md' contains an unrecognized relative link '../#example', it was left as is. Did you mean '../index.md#example'?", + ), + 'link', + ) + + def test_self_anchor_link_with_suggestion(self): + self.assertEqual( + self.get_rendered_result( + content='[link](./#test)', + files=['index.md'], + logs="INFO:Doc file 'index.md' contains an unrecognized relative link './#test', it was left as is. Did you mean '#test'?", + ), + 'link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[external](http://example.com/index.md)')) def test_external_link(self): self.assertEqual( - self.get_rendered_result(['index.md']), - '

external

' + self.get_rendered_result( + content='[external](http://example.com/index.md)', files=['index.md'] + ), + 'external', + ) + + def test_absolute_link_with_suggestion(self): + self.assertEqual( + self.get_rendered_result( + content='[absolute link](/path/to/file.md)', + files=['index.md', 'path/to/file.md'], + logs="INFO:Doc file 'index.md' contains an absolute link '/path/to/file.md', it was left as is. Did you mean 'path/to/file.md'?", + ), + 'absolute link', + ) + self.assertEqual( + self.get_rendered_result( + use_directory_urls=False, + content='[absolute link](/path/to/file/)', + files=['path/index.md', 'path/to/file.md'], + logs="INFO:Doc file 'path/index.md' contains an absolute link '/path/to/file/', it was left as is.", + ), + 'absolute link', + ) + self.assertEqual( + self.get_rendered_result( + content='[absolute link](/path/to/file)', + files=['path/index.md', 'path/to/file.md'], + logs="INFO:Doc file 'path/index.md' contains an absolute link '/path/to/file', it was left as is. Did you mean 'to/file.md'?", + ), + 'absolute link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[absolute link](/path/to/file.md)')) def test_absolute_link(self): self.assertEqual( - self.get_rendered_result(['index.md']), - '

absolute link

' + self.get_rendered_result( + validation=dict(links=dict(absolute_links='warn')), + content='[absolute link](/path/to/file.md)', + files=['index.md'], + logs="WARNING:Doc file 'index.md' contains an absolute link '/path/to/file.md', it was left as is.", + ), + 'absolute link', + ) + self.assertEqual( + self.get_rendered_result( + validation=dict(links=dict(absolute_links='ignore')), + content='[absolute link](/path/to/file.md)', + files=['index.md'], + ), + 'absolute link', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='[absolute local path](\\image.png)')) - def test_absolute_win_local_path(self): + def test_image_link_with_suggestion(self): + self.assertEqual( + self.get_rendered_result( + content='![image](../image.png)', + files=['foo/bar.md', 'foo/image.png'], + logs="WARNING:Doc file 'foo/bar.md' contains a relative link '../image.png', but the target 'image.png' is not found among documentation files. Did you mean 'image.png'?", + ), + 'image', + ) self.assertEqual( - self.get_rendered_result(['index.md']), - '

absolute local path

' + self.get_rendered_result( + content='![image](/image.png)', + files=['foo/bar.md', 'image.png'], + logs="INFO:Doc file 'foo/bar.md' contains an absolute link '/image.png', it was left as is. Did you mean '../image.png'?", + ), + 'image', ) - @mock.patch('mkdocs.structure.pages.open', mock.mock_open(read_data='')) + def test_absolute_win_local_path(self): + for use_directory_urls in True, False: + self.assertEqual( + self.get_rendered_result( + use_directory_urls=use_directory_urls, + content='[absolute local path](\\image.png)', + files=['index.md'], + logs="INFO:Doc file 'index.md' contains an absolute link '\\image.png', it was left as is.", + ), + 'absolute local path', + ) + def test_email_link(self): self.assertEqual( - self.get_rendered_result(['index.md']), + self.get_rendered_result(content='', files=['index.md']), # Markdown's default behavior is to obscure email addresses by entity-encoding them. - # The following is equivalent to: '

mail@example.com

' - '

mail@example.com' + 'mail@' - 'example.com

' + 'example.com', + ) + + def test_invalid_email_link(self): + self.assertEqual( + self.get_rendered_result( + content='[contact](mail@example.com)', + files=['index.md'], + logs="WARNING:Doc file 'index.md' contains a relative link 'mail@example.com', but the target is not found among documentation files. Did you mean 'mailto:mail@example.com'?", + ), + 'contact', + ) + + def test_possible_target_uris(self): + def test(paths, expected='', exp_true=None, exp_false=None): + """Test that `possible_target_uris` yields expected values, for use_directory_urls = true and false""" + for use_directory_urls, expected_paths in ( + (True, exp_true or expected), + (False, exp_false or expected), + ): + with self.subTest(paths, use_directory_urls=use_directory_urls): + src_path, dest_path = paths + f = File(src_path, '', '', use_directory_urls) + actual = _RelativePathTreeprocessor._possible_target_uris( + f, dest_path, use_directory_urls + ) + self.assertEqual(list(actual), expected_paths.split(', ')) + + test(('index.md', 'index.md'), expected='index.md') + test(('index.md', 'foo/bar.md'), expected='foo/bar.md') + test( + ('index.md', 'foo/bar'), + expected='foo/bar, foo/bar/index.md, foo/bar/README.md, foo/bar.md', + ) + + test(('index.md', 'foo/bar.html'), expected='foo/bar.html, foo/bar.md') + test( + ('foo.md', 'foo/bar.html'), + exp_true='foo/bar.html, foo/bar.md, foo/foo/bar.html, foo/foo/bar.md', + exp_false='foo/bar.html, foo/bar.md', + ) + + test(('foo.md', 'index.md'), exp_true='index.md, foo/index.md', exp_false='index.md') + test(('foo.md', 'foo.md'), exp_true='foo.md, foo/foo.md', exp_false='foo.md') + test(('foo.md', 'bar.md'), exp_true='bar.md, foo/bar.md', exp_false='bar.md') + test( + ('foo.md', 'foo/bar.md'), exp_true='foo/bar.md, foo/foo/bar.md', exp_false='foo/bar.md' + ) + test( + ('foo.md', 'foo'), + exp_true='foo, foo/index.md, foo/README.md, foo.md, foo/foo, foo/foo/index.md, foo/foo/README.md, foo/foo.md', + exp_false='foo, foo/index.md, foo/README.md, foo.md', + ) + test( + ('foo.md', 'foo/bar'), + exp_true='foo/bar, foo/bar/index.md, foo/bar/README.md, foo/bar.md, foo/foo/bar, foo/foo/bar/index.md, foo/foo/bar/README.md, foo/foo/bar.md', + exp_false='foo/bar, foo/bar/index.md, foo/bar/README.md, foo/bar.md', + ) + test( + ('foo.md', 'foo/bar/'), + exp_true='foo/bar, foo/bar/index.md, foo/bar/README.md, foo/bar.md, foo/foo/bar, foo/foo/bar/index.md, foo/foo/bar/README.md, foo/foo/bar.md', + exp_false='foo/bar, foo/bar/index.md, foo/bar/README.md', + ) + + test( + ('foo.md', 'foo.html'), + exp_true='foo.html, foo.md, foo/foo.html, foo/foo.md', + exp_false='foo.html, foo.md', + ) + test( + ('foo.md', '../foo/'), + exp_true='../foo, foo, foo/index.md, foo/README.md, foo.md', + exp_false='../foo', + ) + test( + ('foo.md', 'foo/'), + exp_true='foo, foo/index.md, foo/README.md, foo.md, foo/foo, foo/foo/index.md, foo/foo/README.md, foo/foo.md', + exp_false='foo, foo/index.md, foo/README.md', + ) + test(('foo/index.md', 'index.md'), expected='foo/index.md') + test(('foo/index.md', 'foo/bar.html'), expected='foo/foo/bar.html, foo/foo/bar.md') + test(('foo/index.md', '../foo.html'), expected='foo.html, foo.md') + test(('foo/index.md', '../'), expected='., index.md, README.md') + test( + ('foo/bar.md', 'index.md'), + exp_true='foo/index.md, foo/bar/index.md', + exp_false='foo/index.md', + ) + test( + ('foo/bar.md', 'foo.md'), + exp_true='foo/foo.md, foo/bar/foo.md', + exp_false='foo/foo.md', + ) + test( + ('foo/bar.md', 'bar.md'), + exp_true='foo/bar.md, foo/bar/bar.md', + exp_false='foo/bar.md', + ) + test( + ('foo/bar.md', 'foo/bar.md'), + exp_true='foo/foo/bar.md, foo/bar/foo/bar.md', + exp_false='foo/foo/bar.md', + ) + test( + ('foo/bar.md', 'foo'), + exp_true='foo/foo, foo/foo/index.md, foo/foo/README.md, foo/foo.md, foo/bar/foo, foo/bar/foo/index.md, foo/bar/foo/README.md, foo/bar/foo.md', + exp_false='foo/foo, foo/foo/index.md, foo/foo/README.md, foo/foo.md', + ) + test( + ('foo/bar.md', 'foo/bar'), + exp_true='foo/foo/bar, foo/foo/bar/index.md, foo/foo/bar/README.md, foo/foo/bar.md, foo/bar/foo/bar, foo/bar/foo/bar/index.md, foo/bar/foo/bar/README.md, foo/bar/foo/bar.md', + exp_false='foo/foo/bar, foo/foo/bar/index.md, foo/foo/bar/README.md, foo/foo/bar.md', + ) + test( + ('foo/bar.md', 'foo.html'), + exp_true='foo/foo.html, foo/foo.md, foo/bar/foo.html, foo/bar/foo.md', + exp_false='foo/foo.html, foo/foo.md', + ) + test( + ('foo/bar.md', 'foo/bar.html'), + exp_true='foo/foo/bar.html, foo/foo/bar.md, foo/bar/foo/bar.html, foo/bar/foo/bar.md', + exp_false='foo/foo/bar.html, foo/foo/bar.md', + ) + test( + ('foo/bar.md', '../foo/bar.html'), + exp_true='foo/bar.html, foo/bar.md, foo/foo/bar.html, foo/foo/bar.md', + exp_false='foo/bar.html, foo/bar.md', + ) + test( + ('foo/bar.md', '../foo'), + exp_true='foo, foo/index.md, foo/README.md, foo.md, foo/foo, foo/foo/index.md, foo/foo/README.md, foo/foo.md', + exp_false='foo, foo/index.md, foo/README.md, foo.md', + ) + test( + ('foo/bar.md', '../'), + exp_true='., index.md, README.md, foo, foo/index.md, foo/README.md', + exp_false='., index.md, README.md', + ) + + for src in 'foo/bar.md', 'foo.md', 'foo/index.md': + test((src, '/foo'), expected='foo, foo/index.md, foo/README.md, foo.md') + test((src, '/foo/bar.md'), expected='foo/bar.md') + test((src, '/foo/bar.html'), expected='foo/bar.html, foo/bar.md') + + for dest in '', '.', './': + test(('index.md', dest), expected='., index.md') + test(('foo/bar.md', dest), expected='foo, foo/bar.md') + + test( + ('foo/bar.md', '../test.png'), + exp_true='test.png, test.png.md, foo/test.png, foo/test.png.md', + exp_false='test.png, test.png.md', ) diff --git a/mkdocs/tests/structure/toc_tests.py b/mkdocs/tests/structure/toc_tests.py index 591d4f7..2f83e1c 100644 --- a/mkdocs/tests/structure/toc_tests.py +++ b/mkdocs/tests/structure/toc_tests.py @@ -1,140 +1,172 @@ #!/usr/bin/env python import unittest + from mkdocs.structure.toc import get_toc from mkdocs.tests.base import dedent, get_markdown_toc class TableOfContentsTests(unittest.TestCase): - def test_indented_toc(self): - md = dedent(""" - # Heading 1 - ## Heading 2 - ### Heading 3 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + ### Heading 3 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 1) def test_indented_toc_html(self): - md = dedent(""" - # Heading 1 - ## Heading 2 - ## Heading 3 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + ## Heading 3 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 1) def test_flat_toc(self): - md = dedent(""" - # Heading 1 - # Heading 2 - # Heading 3 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - """) + md = dedent( + """ + # Heading 1 + # Heading 2 + # Heading 3 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 3) def test_flat_h2_toc(self): - md = dedent(""" - ## Heading 1 - ## Heading 2 - ## Heading 3 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - """) + md = dedent( + """ + ## Heading 1 + ## Heading 2 + ## Heading 3 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 3) def test_mixed_toc(self): - md = dedent(""" - # Heading 1 - ## Heading 2 - # Heading 3 - ### Heading 4 - ### Heading 5 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - Heading 4 - #heading-4 - Heading 5 - #heading-5 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + # Heading 3 + ### Heading 4 + ### Heading 5 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + Heading 4 - #heading-4 + Heading 5 - #heading-5 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 2) def test_mixed_html(self): - md = dedent(""" - # Heading 1 - ## Heading 2 - # Heading 3 - ### Heading 4 - ### Heading 5 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - Heading 4 - #heading-4 - Heading 5 - #heading-5 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + # Heading 3 + ### Heading 4 + ### Heading 5 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + Heading 4 - #heading-4 + Heading 5 - #heading-5 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 2) def test_nested_anchor(self): - md = dedent(""" - # Heading 1 - ## Heading 2 - # Heading 3 - ### Heading 4 - ### Heading 5 - """) - expected = dedent(""" - Heading 1 - #heading-1 - Heading 2 - #heading-2 - Heading 3 - #heading-3 - Heading 4 - #heading-4 - Heading 5 - #heading-5 - """) + md = dedent( + """ + # Heading 1 + ## Heading 2 + # Heading 3 + ### Heading 4 + ### Heading 5 + """ + ) + expected = dedent( + """ + Heading 1 - #heading-1 + Heading 2 - #heading-2 + Heading 3 - #heading-3 + Heading 4 - #heading-4 + Heading 5 - #heading-5 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 2) def test_entityref(self): - md = dedent(""" - # Heading & 1 - ## Heading > 2 - ### Heading < 3 - """) - expected = dedent(""" - Heading & 1 - #heading-1 - Heading > 2 - #heading-2 - Heading < 3 - #heading-3 - """) + md = dedent( + """ + # Heading & 1 + ## Heading > 2 + ### Heading < 3 + """ + ) + expected = dedent( + """ + Heading & 1 - #heading-1 + Heading > 2 - #heading-2 + Heading < 3 - #heading-3 + """ + ) toc = get_toc(get_markdown_toc(md)) self.assertEqual(str(toc).strip(), expected) self.assertEqual(len(toc), 1) @@ -147,13 +179,15 @@ def test_charref(self): self.assertEqual(len(toc), 1) def test_level(self): - md = dedent(""" - # Heading 1 - ## Heading 1.1 - ### Heading 1.1.1 - ### Heading 1.1.2 - ## Heading 1.2 - """) + md = dedent( + """ + # Heading 1 + ## Heading 1.1 + ### Heading 1.1.1 + ### Heading 1.1.2 + ## Heading 1.2 + """ + ) toc = get_toc(get_markdown_toc(md)) def get_level_sequence(items): diff --git a/mkdocs/tests/theme_tests.py b/mkdocs/tests/theme_tests.py index 24e0ebb..4d123c6 100644 --- a/mkdocs/tests/theme_tests.py +++ b/mkdocs/tests/theme_tests.py @@ -1,9 +1,10 @@ import os -import tempfile import unittest from unittest import mock import mkdocs +from mkdocs.localization import parse_locale +from mkdocs.tests.base import tempdir from mkdocs.theme import Theme abs_path = os.path.abspath(os.path.dirname(__file__)) @@ -12,56 +13,56 @@ theme_dir = os.path.abspath(os.path.join(mkdocs_dir, 'themes')) -def get_vars(theme): - """ Return dict of theme vars. """ - return {k: theme[k] for k in iter(theme)} - - class ThemeTests(unittest.TestCase): - def test_simple_theme(self): theme = Theme(name='mkdocs') self.assertEqual( theme.dirs, - [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir] + [os.path.join(theme_dir, 'mkdocs'), mkdocs_templates_dir], ) self.assertEqual(theme.static_templates, {'404.html', 'sitemap.xml'}) - self.assertEqual(get_vars(theme), { - 'include_search_page': False, - 'search_index_only': False, - 'highlightjs': True, - 'hljs_style': 'github', - 'hljs_languages': [], - 'navigation_depth': 2, - 'nav_style': 'primary', - 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83} - }) + self.assertEqual( + dict(theme), + { + 'name': 'mkdocs', + 'locale': parse_locale('en'), + 'include_search_page': False, + 'search_index_only': False, + 'analytics': {'gtag': None}, + 'highlightjs': True, + 'hljs_style': 'github', + 'hljs_languages': [], + 'navigation_depth': 2, + 'nav_style': 'primary', + 'shortcuts': {'help': 191, 'next': 78, 'previous': 80, 'search': 83}, + }, + ) - def test_custom_dir(self): - custom = tempfile.mkdtemp() + @tempdir() + def test_custom_dir(self, custom): theme = Theme(name='mkdocs', custom_dir=custom) self.assertEqual( theme.dirs, [ custom, os.path.join(theme_dir, 'mkdocs'), - mkdocs_templates_dir - ] + mkdocs_templates_dir, + ], ) - def test_custom_dir_only(self): - custom = tempfile.mkdtemp() + @tempdir() + def test_custom_dir_only(self, custom): theme = Theme(name=None, custom_dir=custom) self.assertEqual( theme.dirs, - [custom, mkdocs_templates_dir] + [custom, mkdocs_templates_dir], ) def static_templates(self): theme = Theme(name='mkdocs', static_templates='foo.html') self.assertEqual( theme.static_templates, - {'404.html', 'sitemap.xml', 'foo.html'} + {'404.html', 'sitemap.xml', 'foo.html'}, ) def test_vars(self): @@ -69,22 +70,25 @@ def test_vars(self): self.assertEqual(theme['foo'], 'bar') self.assertEqual(theme['baz'], True) self.assertTrue('new' not in theme) - self.assertRaises(KeyError, lambda t, k: t[k], theme, 'new') + with self.assertRaises(KeyError): + theme['new'] theme['new'] = 42 self.assertTrue('new' in theme) self.assertEqual(theme['new'], 42) - @mock.patch('mkdocs.utils.yaml_load', return_value=None) + @mock.patch('mkdocs.utils.yaml_load', return_value={}) def test_no_theme_config(self, m): theme = Theme(name='mkdocs') self.assertEqual(m.call_count, 1) self.assertEqual(theme.static_templates, {'sitemap.xml'}) def test_inherited_theme(self): - m = mock.Mock(side_effect=[ - {'extends': 'readthedocs', 'static_templates': ['child.html']}, - {'static_templates': ['parent.html']} - ]) + m = mock.Mock( + side_effect=[ + {'extends': 'readthedocs', 'static_templates': ['child.html']}, + {'static_templates': ['parent.html']}, + ] + ) with mock.patch('mkdocs.utils.yaml_load', m) as m: theme = Theme(name='mkdocs') self.assertEqual(m.call_count, 2) @@ -93,9 +97,7 @@ def test_inherited_theme(self): [ os.path.join(theme_dir, 'mkdocs'), os.path.join(theme_dir, 'readthedocs'), - mkdocs_templates_dir - ] - ) - self.assertEqual( - theme.static_templates, {'sitemap.xml', 'child.html', 'parent.html'} + mkdocs_templates_dir, + ], ) + self.assertEqual(theme.static_templates, {'sitemap.xml', 'child.html', 'parent.html'}) diff --git a/mkdocs/tests/utils/babel_stub_tests.py b/mkdocs/tests/utils/babel_stub_tests.py new file mode 100644 index 0000000..ec81047 --- /dev/null +++ b/mkdocs/tests/utils/babel_stub_tests.py @@ -0,0 +1,55 @@ +import unittest + +from mkdocs.utils.babel_stub import Locale, UnknownLocaleError + + +class BabelStubTests(unittest.TestCase): + def test_locale_language_only(self): + locale = Locale('es') + self.assertEqual(locale.language, 'es') + self.assertEqual(locale.territory, '') + self.assertEqual(str(locale), 'es') + + def test_locale_language_territory(self): + locale = Locale('es', 'ES') + self.assertEqual(locale.language, 'es') + self.assertEqual(locale.territory, 'ES') + self.assertEqual(str(locale), 'es_ES') + + def test_parse_locale_language_only(self): + locale = Locale.parse('fr', '_') + self.assertEqual(locale.language, 'fr') + self.assertEqual(locale.territory, '') + self.assertEqual(str(locale), 'fr') + + def test_parse_locale_language_territory(self): + locale = Locale.parse('fr_FR', '_') + self.assertEqual(locale.language, 'fr') + self.assertEqual(locale.territory, 'FR') + self.assertEqual(str(locale), 'fr_FR') + + def test_parse_locale_language_territory_sep(self): + locale = Locale.parse('fr-FR', '-') + self.assertEqual(locale.language, 'fr') + self.assertEqual(locale.territory, 'FR') + self.assertEqual(str(locale), 'fr_FR') + + def test_parse_locale_bad_type(self): + with self.assertRaises(TypeError): + Locale.parse(['list'], '_') + + def test_parse_locale_invalid_characters(self): + with self.assertRaises(ValueError): + Locale.parse('42', '_') + + def test_parse_locale_bad_format(self): + with self.assertRaises(ValueError): + Locale.parse('en-GB', '_') + + def test_parse_locale_bad_format_sep(self): + with self.assertRaises(ValueError): + Locale.parse('en_GB', '-') + + def test_parse_locale_unknown_locale(self): + with self.assertRaises(UnknownLocaleError): + Locale.parse('foo', '_') diff --git a/mkdocs/tests/utils/ghp_import_tests.py b/mkdocs/tests/utils/ghp_import_tests.py deleted file mode 100644 index 84b4451..0000000 --- a/mkdocs/tests/utils/ghp_import_tests.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env python - - -from unittest import mock -import os -import subprocess -import tempfile -import unittest -import shutil - -from mkdocs.utils import ghp_import - - -class UtilsTests(unittest.TestCase): - - @mock.patch('subprocess.call', auto_spec=True) - @mock.patch('subprocess.Popen', auto_spec=True) - def test_try_rebase(self, mock_popen, mock_call): - - popen = mock.Mock() - mock_popen.return_value = popen - popen.communicate.return_value = ( - '4c82346e4b1b816be89dd709d35a6b169aa3df61\n', '') - popen.wait.return_value = 0 - - ghp_import.try_rebase('origin', 'gh-pages') - - mock_popen.assert_called_once_with( - ['git', 'rev-list', '--max-count=1', 'origin/gh-pages'], - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - mock_call.assert_called_once_with( - ['git', 'update-ref', 'refs/heads/gh-pages', - '4c82346e4b1b816be89dd709d35a6b169aa3df61']) - - @mock.patch('subprocess.Popen', auto_spec=True) - def test_get_prev_commit(self, mock_popen): - - popen = mock.Mock() - mock_popen.return_value = popen - popen.communicate.return_value = ( - b'4c82346e4b1b816be89dd709d35a6b169aa3df61\n', '') - popen.wait.return_value = 0 - - result = ghp_import.get_prev_commit('test-branch') - - self.assertEqual(result, '4c82346e4b1b816be89dd709d35a6b169aa3df61') - mock_popen.assert_called_once_with( - ['git', 'rev-list', '--max-count=1', 'test-branch', '--'], - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - @mock.patch('subprocess.Popen', auto_spec=True) - def test_get_config(self, mock_popen): - - popen = mock.Mock() - mock_popen.return_value = popen - popen.communicate.return_value = ( - b'Dougal Matthews\n', '') - - result = ghp_import.get_config('user.name') - - self.assertEqual(result, 'Dougal Matthews') - mock_popen.assert_called_once_with( - ['git', 'config', 'user.name'], - stdout=subprocess.PIPE, stdin=subprocess.PIPE) - - @mock.patch('mkdocs.utils.ghp_import.get_prev_commit') - @mock.patch('mkdocs.utils.ghp_import.get_config') - def test_start_commit(self, mock_get_config, mock_get_prev_commit): - - pipe = mock.Mock() - mock_get_config.side_effect = ['username', 'email'] - mock_get_prev_commit.return_value = 'SHA' - - ghp_import.start_commit(pipe, 'test-branch', 'test-message') - - mock_get_prev_commit.assert_called_once_with('test-branch') - self.assertEqual(pipe.stdin.write.call_count, 5) - - @mock.patch('mkdocs.utils.ghp_import.try_rebase', return_value=True) - @mock.patch('mkdocs.utils.ghp_import.get_prev_commit', return_value='sha') - @mock.patch('mkdocs.utils.ghp_import.get_config', return_value='config') - @mock.patch('subprocess.call', auto_spec=True) - @mock.patch('subprocess.Popen', auto_spec=True) - def test_ghp_import(self, mock_popen, mock_call, mock_get_config, - mock_get_prev_commit, mock_try_rebase): - - directory = tempfile.mkdtemp() - open(os.path.join(directory, 'file'), 'a').close() - - try: - popen = mock.Mock() - mock_popen.return_value = popen - popen.communicate.return_value = ('', '') - popen.wait.return_value = 0 - - ghp_import.ghp_import(directory, "test message", - remote='fake-remote-name', - branch='fake-branch-name') - - self.assertEqual(mock_popen.call_count, 2) - self.assertEqual(mock_call.call_count, 0) - finally: - shutil.rmtree(directory) - - @mock.patch('mkdocs.utils.ghp_import.try_rebase', return_value=True) - @mock.patch('mkdocs.utils.ghp_import.get_prev_commit', return_value='sha') - @mock.patch('mkdocs.utils.ghp_import.get_config', return_value='config') - @mock.patch('mkdocs.utils.ghp_import.run_import') - @mock.patch('subprocess.call', auto_spec=True) - @mock.patch('subprocess.Popen', auto_spec=True) - def test_ghp_import_error(self, mock_popen, mock_call, mock_get_config, - mock_run_import, mock_get_prev_commit, mock_try_rebase): - - directory = tempfile.mkdtemp() - open(os.path.join(directory, 'file'), 'a').close() - - try: - popen = mock.Mock() - mock_popen.return_value = popen - - error_string = 'TestError123' - popen.communicate.return_value = ('', error_string) - popen.wait.return_value = 1 - - result, ghp_error = ghp_import.ghp_import(directory, "test message", - remote='fake-remote-name', - branch='fake-branch-name') - - self.assertEqual(result, False) - self.assertEqual(ghp_error, error_string) - finally: - shutil.rmtree(directory) diff --git a/mkdocs/tests/utils/templates_tests.py b/mkdocs/tests/utils/templates_tests.py new file mode 100644 index 0000000..c6a8daa --- /dev/null +++ b/mkdocs/tests/utils/templates_tests.py @@ -0,0 +1,49 @@ +import unittest +from textwrap import dedent + +import yaml + +from mkdocs.tests.base import load_config +from mkdocs.utils import templates + + +class UtilsTemplatesTests(unittest.TestCase): + def test_script_tag(self): + cfg_yaml = dedent( + ''' + extra_javascript: + - some_plain_javascript.js + - implicitly_as_module.mjs + - path: explicitly_as_module.mjs + type: module + - path: deferred_plain.js + defer: true + - path: scripts/async_module.mjs + type: module + async: true + - path: 'aaaaaa/"my script".mjs' + type: module + async: true + defer: true + - path: plain.mjs + ''' + ) + config = load_config(**yaml.safe_load(cfg_yaml)) + config.extra_javascript.append('plain_string.mjs') + + self.assertEqual( + [ + str(templates.script_tag_filter({'page': None, 'base_url': 'here'}, item)) + for item in config.extra_javascript + ], + [ + '', + '', + '', + '', + '', + '', + '', + '', + ], + ) diff --git a/mkdocs/tests/utils/utils_tests.py b/mkdocs/tests/utils/utils_tests.py index 609b9f0..a72044e 100644 --- a/mkdocs/tests/utils/utils_tests.py +++ b/mkdocs/tests/utils/utils_tests.py @@ -1,298 +1,324 @@ #!/usr/bin/env python - -from unittest import mock +import dataclasses +import datetime +import logging import os -import unittest -import tempfile -import shutil +import posixpath import stat -import datetime +import unittest +from unittest import mock -from mkdocs import utils, exceptions -from mkdocs.structure.files import File -from mkdocs.structure.pages import Page -from mkdocs.tests.base import dedent, load_config +from mkdocs import exceptions, utils +from mkdocs.tests.base import dedent, tempdir +from mkdocs.utils import meta + +BASEYML = """ +INHERIT: parent.yml +foo: bar +baz: + sub1: replaced + sub3: new +deep1: + deep2-1: + deep3-1: replaced +""" +PARENTYML = """ +foo: foo +baz: + sub1: 1 + sub2: 2 +deep1: + deep2-1: + deep3-1: foo + deep3-2: bar + deep2-2: baz +""" class UtilsTests(unittest.TestCase): - def test_html_path(self): - expected_results = { - 'index.md': 'index.html', - 'api-guide.md': 'api-guide/index.html', - 'api-guide/index.md': 'api-guide/index.html', - 'api-guide/testing.md': 'api-guide/testing/index.html', - } - for file_path, expected_html_path in expected_results.items(): - html_path = utils.get_html_path(file_path) - self.assertEqual(html_path, expected_html_path) - - def test_url_path(self): - expected_results = { - 'index.md': '/', - 'api-guide.md': '/api-guide/', - 'api-guide/index.md': '/api-guide/', - 'api-guide/testing.md': '/api-guide/testing/', - } - for file_path, expected_html_path in expected_results.items(): - html_path = utils.get_url_path(file_path) - self.assertEqual(html_path, expected_html_path) - def test_is_markdown_file(self): expected_results = { 'index.md': True, - 'index.MARKDOWN': True, + 'index.markdown': True, + 'index.MARKDOWN': False, 'index.txt': False, - 'indexmd': False + 'indexmd': False, } for path, expected_result in expected_results.items(): - is_markdown = utils.is_markdown_file(path) - self.assertEqual(is_markdown, expected_result) - - def test_is_html_file(self): - expected_results = { - 'index.htm': True, - 'index.HTML': True, - 'index.txt': False, - 'indexhtml': False - } - for path, expected_result in expected_results.items(): - is_html = utils.is_html_file(path) - self.assertEqual(is_html, expected_result) - - def test_create_media_urls(self): - - expected_results = { - 'https://media.cdn.org/jq.js': [ - 'https://media.cdn.org/jq.js', - 'https://media.cdn.org/jq.js', - 'https://media.cdn.org/jq.js' - ], - 'http://media.cdn.org/jquery.js': [ - 'http://media.cdn.org/jquery.js', - 'http://media.cdn.org/jquery.js', - 'http://media.cdn.org/jquery.js' - ], - '//media.cdn.org/jquery.js': [ - '//media.cdn.org/jquery.js', - '//media.cdn.org/jquery.js', - '//media.cdn.org/jquery.js' - ], - 'media.cdn.org/jquery.js': [ - 'media.cdn.org/jquery.js', - 'media.cdn.org/jquery.js', - '../media.cdn.org/jquery.js' - ], - 'local/file/jquery.js': [ - 'local/file/jquery.js', - 'local/file/jquery.js', - '../local/file/jquery.js' - ], - 'local\\windows\\file\\jquery.js': [ - 'local/windows/file/jquery.js', - 'local/windows/file/jquery.js', - '../local/windows/file/jquery.js' - ], - 'image.png': [ - 'image.png', - 'image.png', - '../image.png' - ], - 'style.css?v=20180308c': [ - 'style.css?v=20180308c', - 'style.css?v=20180308c', - '../style.css?v=20180308c' - ], - '#some_id': [ - '#some_id', - '#some_id', - '#some_id' - ] - } - - cfg = load_config(use_directory_urls=False) - pages = [ - Page('Home', File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg), - Page('About', File('about.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg), - Page('FooBar', File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg) - ] - - for i, page in enumerate(pages): - urls = utils.create_media_urls(expected_results.keys(), page) - self.assertEqual([v[i] for v in expected_results.values()], urls) - - def test_create_media_urls_use_directory_urls(self): - - expected_results = { - 'https://media.cdn.org/jq.js': [ - 'https://media.cdn.org/jq.js', - 'https://media.cdn.org/jq.js', - 'https://media.cdn.org/jq.js' - ], - 'http://media.cdn.org/jquery.js': [ - 'http://media.cdn.org/jquery.js', - 'http://media.cdn.org/jquery.js', - 'http://media.cdn.org/jquery.js' - ], - '//media.cdn.org/jquery.js': [ - '//media.cdn.org/jquery.js', - '//media.cdn.org/jquery.js', - '//media.cdn.org/jquery.js' - ], - 'media.cdn.org/jquery.js': [ - 'media.cdn.org/jquery.js', - '../media.cdn.org/jquery.js', - '../../media.cdn.org/jquery.js' - ], - 'local/file/jquery.js': [ - 'local/file/jquery.js', - '../local/file/jquery.js', - '../../local/file/jquery.js' - ], - 'local\\windows\\file\\jquery.js': [ - 'local/windows/file/jquery.js', - '../local/windows/file/jquery.js', - '../../local/windows/file/jquery.js' - ], - 'image.png': [ - 'image.png', - '../image.png', - '../../image.png' - ], - 'style.css?v=20180308c': [ - 'style.css?v=20180308c', - '../style.css?v=20180308c', - '../../style.css?v=20180308c' - ], - '#some_id': [ - '#some_id', - '#some_id', - '#some_id' - ] - } - - cfg = load_config(use_directory_urls=True) - pages = [ - Page('Home', File('index.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg), - Page('About', File('about.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg), - Page('FooBar', File('foo/bar.md', cfg['docs_dir'], cfg['site_dir'], cfg['use_directory_urls']), cfg) - ] - - for i, page in enumerate(pages): - urls = utils.create_media_urls(expected_results.keys(), page) - self.assertEqual([v[i] for v in expected_results.values()], urls) + with self.subTest(path): + is_markdown = utils.is_markdown_file(path) + self.assertEqual(is_markdown, expected_result) + + def test_get_relative_url(self): + for case in [ + dict(url='foo/bar', other='foo', expected='bar'), + dict(url='foo/bar.txt', other='foo', expected='bar.txt'), + dict(url='foo', other='foo/bar', expected='..'), + dict(url='foo', other='foo/bar.txt', expected='.'), + dict(url='foo/../../bar', other='.', expected='bar'), + dict(url='foo/../../bar', other='foo', expected='../bar'), + dict(url='foo//./bar/baz', other='foo/bar/baz', expected='.'), + dict(url='a/b/.././../c', other='.', expected='c'), + dict(url='a/b/c/d/ee', other='a/b/c/d/e', expected='../ee'), + dict(url='a/b/c/d/ee', other='a/b/z/d/e', expected='../../../c/d/ee'), + dict(url='foo', other='bar.', expected='foo'), + dict(url='foo', other='bar./', expected='../foo'), + dict(url='foo', other='foo/bar./', expected='..'), + dict(url='foo', other='foo/bar./.', expected='..'), + dict(url='foo', other='foo/bar././', expected='..'), + dict(url='foo/', other='foo/bar././', expected='../'), + dict(url='foo', other='foo', expected='.'), + dict(url='.foo', other='.foo', expected='.foo'), + dict(url='.foo/', other='.foo', expected='.foo/'), + dict(url='.foo', other='.foo/', expected='.'), + dict(url='.foo/', other='.foo/', expected='./'), + dict(url='///', other='', expected='./'), + dict(url='a///', other='', expected='a/'), + dict(url='a///', other='a', expected='./'), + dict(url='.', other='here', expected='..'), + dict(url='..', other='here', expected='..'), + dict(url='../..', other='here', expected='..'), + dict(url='../../a', other='here', expected='../a'), + dict(url='..', other='here.txt', expected='.'), + dict(url='a', other='', expected='a'), + dict(url='a', other='..', expected='a'), + dict(url='a', other='b', expected='../a'), + # The dots are considered a file. Documenting a long-standing bug: + dict(url='a', other='b/..', expected='../a'), + dict(url='a', other='b/../..', expected='a'), + dict(url='a/..../b', other='a/../b', expected='../a/..../b'), + dict(url='a/я/b', other='a/я/c', expected='../b'), + dict(url='a/я/b', other='a/яя/c', expected='../../я/b'), + ]: + url, other, expected = case['url'], case['other'], case['expected'] + with self.subTest(url=url, other=other): + # Leading slash intentionally ignored + self.assertEqual(utils.get_relative_url(url, other), expected) + self.assertEqual(utils.get_relative_url('/' + url, other), expected) + self.assertEqual(utils.get_relative_url(url, '/' + other), expected) + self.assertEqual(utils.get_relative_url('/' + url, '/' + other), expected) + + def test_get_relative_url_empty(self): + for url in ['', '.', '/.']: + for other in ['', '.', '/', '/.']: + with self.subTest(url=url, other=other): + self.assertEqual(utils.get_relative_url(url, other), '.') + + self.assertEqual(utils.get_relative_url('/', ''), './') + self.assertEqual(utils.get_relative_url('/', '/'), './') + self.assertEqual(utils.get_relative_url('/', '.'), './') + self.assertEqual(utils.get_relative_url('/', '/.'), './') + + def test_normalize_url(self): + def test(path, base, expected): + self.assertEqual(utils.normalize_url(path, _Page(base)), expected) + + # Absolute paths and anchors are unaffected. + for base in 'about.html', 'foo/bar.html', 'index.html', '', 'about/', 'foo/bar/': + test('https://media.cdn.org/jq.js', base, 'https://media.cdn.org/jq.js') + test('http://media.cdn.org/jquery.js', base, 'http://media.cdn.org/jquery.js') + test('//media.cdn.org/jquery.js', base, '//media.cdn.org/jquery.js') + test('#some_id', base, '#some_id') + + path = 'media.cdn.org/jquery.js' + test(path, '', 'media.cdn.org/jquery.js') + test(path, 'index.html', 'media.cdn.org/jquery.js') + test(path, 'about.html', 'media.cdn.org/jquery.js') + test(path, 'about/', '../media.cdn.org/jquery.js') + test(path, 'foo/bar.html', '../media.cdn.org/jquery.js') + test(path, 'foo/bar/', '../../media.cdn.org/jquery.js') + + path = 'local/file/jquery.js' + test(path, '', 'local/file/jquery.js') + test(path, 'index.html', 'local/file/jquery.js') + test(path, 'about.html', 'local/file/jquery.js') + test(path, 'about/', '../local/file/jquery.js') + test(path, 'foo/bar.html', '../local/file/jquery.js') + test(path, 'foo/bar/', '../../local/file/jquery.js') + + path = '../../../../above/jquery.js' + test(path, '', '../../../../above/jquery.js') + test(path, 'index.html', '../../../../above/jquery.js') + test(path, 'about.html', '../../../../above/jquery.js') + test(path, 'about/', '../../../../../above/jquery.js') + test(path, 'foo/bar.html', '../../../../../above/jquery.js') + test(path, 'foo/bar/', '../../../../../../above/jquery.js') + + path = '../some/dir/' + test(path, '', '../some/dir/') + test(path, 'index.html', '../some/dir/') + test(path, 'about.html', '../some/dir/') + test(path, 'about/', '../../some/dir/') + test(path, 'foo/bar.html', '../../some/dir/') + test(path, 'foo/bar/', '../../../some/dir/') + + path = 'image.png' + test(path, '', 'image.png') + test(path, 'index.html', 'image.png') + test(path, 'about.html', 'image.png') + test(path, 'about/', '../image.png') + test(path, 'foo/bar.html', '../image.png') + test(path, 'foo/bar/', '../../image.png') + + path = 'style.css?v=20180308c' + test(path, '', 'style.css?v=20180308c') + test(path, 'index.html', 'style.css?v=20180308c') + test(path, 'about.html', 'style.css?v=20180308c') + test(path, 'about/', '../style.css?v=20180308c') + test(path, 'foo/bar.html', '../style.css?v=20180308c') + test(path, 'foo/bar/', '../../style.css?v=20180308c') + + # TODO: This shouldn't pass on Linux + # @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows") + def test_normalize_url_windows(self): + def test(path, base, expected): + self.assertEqual(utils.normalize_url(path, _Page(base)), expected) + + with self.assertLogs('mkdocs', level='WARNING'): + path = 'local\\windows\\file\\jquery.js' + test(path, '', 'local/windows/file/jquery.js') + test(path, 'about/', '../local/windows/file/jquery.js') + test(path, 'foo/bar/', '../../local/windows/file/jquery.js') def test_reduce_list(self): self.assertEqual( utils.reduce_list([1, 2, 3, 4, 5, 5, 2, 4, 6, 7, 8]), - [1, 2, 3, 4, 5, 6, 7, 8] + [1, 2, 3, 4, 5, 6, 7, 8], ) - def test_get_themes(self): + def test_insort(self): + a = [1, 2, 3] + utils.insort(a, 5) + self.assertEqual(a, [1, 2, 3, 5]) + utils.insort(a, -1) + self.assertEqual(a, [-1, 1, 2, 3, 5]) + utils.insort(a, 2) + self.assertEqual(a, [-1, 1, 2, 2, 3, 5]) + utils.insort(a, 4) + self.assertEqual(a, [-1, 1, 2, 2, 3, 4, 5]) + + def test_insort_key(self): + a = [(1, 'a'), (1, 'b'), (2, 'c')] + utils.insort(a, (1, 'a'), key=lambda v: v[0]) + self.assertEqual(a, [(1, 'a'), (1, 'b'), (1, 'a'), (2, 'c')]) - self.assertEqual( - sorted(utils.get_theme_names()), - ['mkdocs', 'readthedocs']) + def test_get_themes(self): + themes = utils.get_theme_names() + self.assertIn('mkdocs', themes) + self.assertIn('readthedocs', themes) - @mock.patch('pkg_resources.iter_entry_points', autospec=True) + @mock.patch('mkdocs.utils.entry_points', autospec=True) def test_get_theme_dir(self, mock_iter): - path = 'some/path' theme = mock.Mock() theme.name = 'mkdocs2' - theme.dist.key = 'mkdocs2' + theme.dist.name = 'mkdocs2' theme.load().__file__ = os.path.join(path, '__init__.py') - mock_iter.return_value = iter([theme]) + mock_iter.return_value = [theme] self.assertEqual(utils.get_theme_dir(theme.name), os.path.abspath(path)) def test_get_theme_dir_keyerror(self): + with self.assertRaises(KeyError): + utils.get_theme_dir('nonexistanttheme') - self.assertRaises(KeyError, utils.get_theme_dir, 'nonexistanttheme') - - @mock.patch('pkg_resources.iter_entry_points', autospec=True) + @mock.patch('mkdocs.utils.entry_points', autospec=True) def test_get_theme_dir_importerror(self, mock_iter): - theme = mock.Mock() theme.name = 'mkdocs2' - theme.dist.key = 'mkdocs2' + theme.dist.name = 'mkdocs2' theme.load.side_effect = ImportError() - mock_iter.return_value = iter([theme]) + mock_iter.return_value = [theme] - self.assertRaises(ImportError, utils.get_theme_dir, theme.name) + with self.assertRaises(ImportError): + utils.get_theme_dir(theme.name) - @mock.patch('pkg_resources.iter_entry_points', autospec=True) + @mock.patch('mkdocs.utils.entry_points', autospec=True) def test_get_themes_warning(self, mock_iter): - theme1 = mock.Mock() theme1.name = 'mkdocs2' - theme1.dist.key = 'mkdocs2' + theme1.dist.name = 'mkdocs2' theme1.load().__file__ = "some/path1" theme2 = mock.Mock() theme2.name = 'mkdocs2' - theme2.dist.key = 'mkdocs3' + theme2.dist.name = 'mkdocs3' theme2.load().__file__ = "some/path2" - mock_iter.return_value = iter([theme1, theme2]) + mock_iter.return_value = [theme1, theme2] + with self.assertLogs('mkdocs') as cm: + theme_names = utils.get_theme_names() self.assertEqual( - sorted(utils.get_theme_names()), - sorted(['mkdocs2', ])) - - @mock.patch('pkg_resources.iter_entry_points', autospec=True) - @mock.patch('pkg_resources.get_entry_map', autospec=True) - def test_get_themes_error(self, mock_get, mock_iter): + '\n'.join(cm.output), + "WARNING:mkdocs.utils:A theme named 'mkdocs2' is provided by the Python " + "packages 'mkdocs3' and 'mkdocs2'. The one in 'mkdocs3' will be used.", + ) + self.assertCountEqual(theme_names, ['mkdocs2']) + @mock.patch('mkdocs.utils.entry_points', autospec=True) + def test_get_themes_error(self, mock_iter): theme1 = mock.Mock() theme1.name = 'mkdocs' - theme1.dist.key = 'mkdocs' + theme1.dist.name = 'mkdocs' theme1.load().__file__ = "some/path1" theme2 = mock.Mock() theme2.name = 'mkdocs' - theme2.dist.key = 'mkdocs2' + theme2.dist.name = 'mkdocs2' theme2.load().__file__ = "some/path2" - mock_iter.return_value = iter([theme1, theme2]) - mock_get.return_value = {'mkdocs': theme1, } - - self.assertRaises(exceptions.ConfigurationError, utils.get_theme_names) - - def test_nest_paths(self): + mock_iter.return_value = [theme1, theme2] - j = os.path.join + with self.assertRaisesRegex( + exceptions.ConfigurationError, + "The theme 'mkdocs' is a builtin theme but the package 'mkdocs2' " + "attempts to provide a theme with the same name.", + ): + utils.get_theme_names() - result = utils.nest_paths([ - 'index.md', - j('user-guide', 'configuration.md'), - j('user-guide', 'styling-your-docs.md'), - j('user-guide', 'writing-your-docs.md'), - j('about', 'contributing.md'), - j('about', 'license.md'), - j('about', 'release-notes.md'), - ]) + def test_nest_paths(self, j=posixpath.join): + result = utils.nest_paths( + [ + 'index.md', + j('user-guide', 'configuration.md'), + j('user-guide', 'styling-your-docs.md'), + j('user-guide', 'writing-your-docs.md'), + j('about', 'contributing.md'), + j('about', 'license.md'), + j('about', 'release-notes.md'), + ] + ) self.assertEqual( result, [ 'index.md', - {'User guide': [ - j('user-guide', 'configuration.md'), - j('user-guide', 'styling-your-docs.md'), - j('user-guide', 'writing-your-docs.md')]}, - {'About': [ - j('about', 'contributing.md'), - j('about', 'license.md'), - j('about', 'release-notes.md')]} - ] + { + 'User guide': [ + j('user-guide', 'configuration.md'), + j('user-guide', 'styling-your-docs.md'), + j('user-guide', 'writing-your-docs.md'), + ] + }, + { + 'About': [ + j('about', 'contributing.md'), + j('about', 'license.md'), + j('about', 'release-notes.md'), + ] + }, + ], ) - def test_unicode_yaml(self): + def test_nest_paths_native(self): + self.test_nest_paths(os.path.join) + def test_unicode_yaml(self): yaml_src = dedent( ''' key: value @@ -305,73 +331,113 @@ def test_unicode_yaml(self): self.assertTrue(isinstance(config['key'], str)) self.assertTrue(isinstance(config['key2'][0], str)) - def test_copy_files(self): - src_paths = [ - 'foo.txt', - 'bar.txt', - 'baz.txt', - ] - dst_paths = [ - 'foo.txt', - 'foo/', # ensure src filename is appended - 'foo/bar/baz.txt' # ensure missing dirs are created - ] - expected = [ - 'foo.txt', - 'foo/bar.txt', - 'foo/bar/baz.txt', + @mock.patch.dict(os.environ, {'VARNAME': 'Hello, World!', 'BOOLVAR': 'false'}) + def test_env_var_in_yaml(self): + yaml_src = dedent( + ''' + key1: !ENV VARNAME + key2: !ENV UNDEFINED + key3: !ENV [UNDEFINED, default] + key4: !ENV [UNDEFINED, VARNAME, default] + key5: !ENV BOOLVAR + ''' + ) + config = utils.yaml_load(yaml_src) + self.assertEqual(config['key1'], 'Hello, World!') + self.assertIsNone(config['key2']) + self.assertEqual(config['key3'], 'default') + self.assertEqual(config['key4'], 'Hello, World!') + self.assertIs(config['key5'], False) + + @tempdir(files={'base.yml': BASEYML, 'parent.yml': PARENTYML}) + def test_yaml_inheritance(self, tdir): + expected = { + 'foo': 'bar', + 'baz': { + 'sub1': 'replaced', + 'sub2': 2, + 'sub3': 'new', + }, + 'deep1': { + 'deep2-1': { + 'deep3-1': 'replaced', + 'deep3-2': 'bar', + }, + 'deep2-2': 'baz', + }, + } + with open(os.path.join(tdir, 'base.yml')) as fd: + result = utils.yaml_load(fd) + self.assertEqual(result, expected) + + @tempdir(files={'base.yml': BASEYML}) + def test_yaml_inheritance_missing_parent(self, tdir): + with open(os.path.join(tdir, 'base.yml')) as fd: + with self.assertRaises(exceptions.ConfigurationError): + utils.yaml_load(fd) + + @tempdir() + @tempdir() + def test_copy_files(self, src_dir, dst_dir): + cases = [ + dict( + src_path='foo.txt', + dst_path='foo.txt', + expected='foo.txt', + ), + dict( + src_path='bar.txt', + dst_path='foo/', # ensure src filename is appended + expected='foo/bar.txt', + ), + dict( + src_path='baz.txt', + dst_path='foo/bar/baz.txt', # ensure missing dirs are created + expected='foo/bar/baz.txt', + ), ] - src_dir = tempfile.mkdtemp() - dst_dir = tempfile.mkdtemp() - - try: - for i, src in enumerate(src_paths): + for case in cases: + src, dst, expected = case['src_path'], case['dst_path'], case['expected'] + with self.subTest(src): src = os.path.join(src_dir, src) with open(src, 'w') as f: f.write('content') - dst = os.path.join(dst_dir, dst_paths[i]) + dst = os.path.join(dst_dir, dst) utils.copy_file(src, dst) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, expected[i]))) - finally: - shutil.rmtree(src_dir) - shutil.rmtree(dst_dir) - - def test_copy_files_without_permissions(self): - src_paths = [ - 'foo.txt', - 'bar.txt', - 'baz.txt', - ] - expected = [ - 'foo.txt', - 'bar.txt', - 'baz.txt', + self.assertTrue(os.path.isfile(os.path.join(dst_dir, expected))) + + @tempdir() + @tempdir() + def test_copy_files_without_permissions(self, src_dir, dst_dir): + cases = [ + dict(src_path='foo.txt', expected='foo.txt'), + dict(src_path='bar.txt', expected='bar.txt'), + dict(src_path='baz.txt', expected='baz.txt'), ] - src_dir = tempfile.mkdtemp() - dst_dir = tempfile.mkdtemp() - try: - for i, src in enumerate(src_paths): - src = os.path.join(src_dir, src) - with open(src, 'w') as f: - f.write('content') - # Set src file to read-only - os.chmod(src, stat.S_IRUSR) - utils.copy_file(src, dst_dir) - self.assertTrue(os.path.isfile(os.path.join(dst_dir, expected[i]))) - self.assertNotEqual(os.stat(src).st_mode, os.stat(os.path.join(dst_dir, expected[i])).st_mode) - # While src was read-only, dst must remain writable - self.assertTrue(os.access(os.path.join(dst_dir, expected[i]), os.W_OK)) + for case in cases: + src, expected = case['src_path'], case['expected'] + with self.subTest(src): + src = os.path.join(src_dir, src) + with open(src, 'w') as f: + f.write('content') + # Set src file to read-only + os.chmod(src, stat.S_IRUSR) + utils.copy_file(src, dst_dir) + self.assertTrue(os.path.isfile(os.path.join(dst_dir, expected))) + self.assertNotEqual( + os.stat(src).st_mode, os.stat(os.path.join(dst_dir, expected)).st_mode + ) + # While src was read-only, dst must remain writable + self.assertTrue(os.access(os.path.join(dst_dir, expected), os.W_OK)) finally: - for src in src_paths: + for case in cases: # Undo read-only so we can delete temp files - src = os.path.join(src_dir, src) + src = os.path.join(src_dir, case['src_path']) if os.path.exists(src): os.chmod(src, stat.S_IRUSR | stat.S_IWUSR) - shutil.rmtree(src_dir) - shutil.rmtree(dst_dir) def test_mm_meta_data(self): doc = dedent( @@ -387,21 +453,21 @@ def test_mm_meta_data(self): """ ) self.assertEqual( - utils.meta.get_data(doc), + meta.get_data(doc), ( "Doc body", { 'title': 'Foo Bar', 'date': '2018-07-10', 'summary': 'Line one Line two', - 'tags': 'foo bar' - } - ) + 'tags': 'foo bar', + }, + ), ) def test_mm_meta_data_blank_first_line(self): doc = '\nfoo: bar\nDoc body' - self.assertEqual(utils.meta.get_data(doc), (doc.lstrip(), {})) + self.assertEqual(meta.get_data(doc), (doc.lstrip(), {})) def test_yaml_meta_data(self): doc = dedent( @@ -419,16 +485,16 @@ def test_yaml_meta_data(self): """ ) self.assertEqual( - utils.meta.get_data(doc), + meta.get_data(doc), ( "Doc body", { 'Title': 'Foo Bar', 'Date': datetime.date(2018, 7, 10), 'Summary': 'Line one Line two', - 'Tags': ['foo', 'bar'] - } - ) + 'Tags': ['foo', 'bar'], + }, + ), ) def test_yaml_meta_data_not_dict(self): @@ -440,7 +506,7 @@ def test_yaml_meta_data_not_dict(self): Doc body """ ) - self.assertEqual(utils.meta.get_data(doc), (doc, {})) + self.assertEqual(meta.get_data(doc), (doc, {})) def test_yaml_meta_data_invalid(self): doc = dedent( @@ -451,7 +517,7 @@ def test_yaml_meta_data_invalid(self): Doc body """ ) - self.assertEqual(utils.meta.get_data(doc), (doc, {})) + self.assertEqual(meta.get_data(doc), (doc, {})) def test_no_meta_data(self): doc = dedent( @@ -459,4 +525,73 @@ def test_no_meta_data(self): Doc body """ ) - self.assertEqual(utils.meta.get_data(doc), (doc, {})) + self.assertEqual(meta.get_data(doc), (doc, {})) + + +class LogCounterTests(unittest.TestCase): + def setUp(self): + self.log = logging.getLogger('dummy') + self.log.propagate = False + self.log.setLevel(1) + self.counter = utils.CountHandler() + self.log.addHandler(self.counter) + + def tearDown(self): + self.log.removeHandler(self.counter) + + def test_default_values(self): + self.assertEqual(self.counter.get_counts(), []) + + def test_count_critical(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.critical('msg') + self.assertEqual(self.counter.get_counts(), [('CRITICAL', 1)]) + + def test_count_error(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.error('msg') + self.assertEqual(self.counter.get_counts(), [('ERROR', 1)]) + + def test_count_warning(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.warning('msg') + self.assertEqual(self.counter.get_counts(), [('WARNING', 1)]) + + def test_count_info(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.info('msg') + self.assertEqual(self.counter.get_counts(), [('INFO', 1)]) + + def test_count_debug(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.debug('msg') + self.assertEqual(self.counter.get_counts(), [('DEBUG', 1)]) + + def test_count_multiple(self): + self.assertEqual(self.counter.get_counts(), []) + self.log.warning('msg 1') + self.assertEqual(self.counter.get_counts(), [('WARNING', 1)]) + self.log.warning('msg 2') + self.assertEqual(self.counter.get_counts(), [('WARNING', 2)]) + self.log.debug('msg 3') + self.assertEqual(self.counter.get_counts(), [('WARNING', 2), ('DEBUG', 1)]) + self.log.error('mdg 4') + self.assertEqual(self.counter.get_counts(), [('ERROR', 1), ('WARNING', 2), ('DEBUG', 1)]) + + def test_log_level(self): + self.assertEqual(self.counter.get_counts(), []) + self.counter.setLevel(logging.ERROR) + self.log.error('counted') + self.log.warning('not counted') + self.log.info('not counted') + self.assertEqual(self.counter.get_counts(), [('ERROR', 1)]) + self.counter.setLevel(logging.WARNING) + self.log.error('counted') + self.log.warning('counted') + self.log.info('not counted') + self.assertEqual(self.counter.get_counts(), [('ERROR', 2), ('WARNING', 1)]) + + +@dataclasses.dataclass +class _Page: + url: str diff --git a/mkdocs/theme.py b/mkdocs/theme.py index 3f1c380..467d26e 100644 --- a/mkdocs/theme.py +++ b/mkdocs/theme.py @@ -1,34 +1,44 @@ +from __future__ import annotations + +import logging import os +from typing import Any, Collection, MutableMapping + import jinja2 -import logging -from mkdocs import utils -from mkdocs.utils import filters +from mkdocs import localization, utils from mkdocs.config.base import ValidationError +from mkdocs.utils import templates log = logging.getLogger(__name__) -log.addFilter(utils.warning_filter) -class Theme: +class Theme(MutableMapping[str, Any]): """ A Theme object. - Keywords: - + Parameters: name: The name of the theme as defined by its entrypoint. - custom_dir: User defined directory for custom templates. - static_templates: A list of templates to render as static pages. All other keywords are passed as-is and made available as a key/value mapping. - """ - def __init__(self, name=None, **user_config): + def __init__( + self, + name: str | None = None, + *, + custom_dir: str | None = None, + static_templates: Collection[str] = (), + locale: str | None = None, + **user_config, + ) -> None: self.name = name - self._vars = {} + self._custom_dir = custom_dir + _vars: dict[str, Any] = {'name': name, 'locale': 'en'} + # _vars is soft-deprecated, intentionally hide it from mypy. + setattr(self, '_vars', _vars) # MkDocs provided static templates are always included package_dir = os.path.abspath(os.path.dirname(__file__)) @@ -38,39 +48,67 @@ def __init__(self, name=None, **user_config): # Build self.dirs from various sources in order of precedence self.dirs = [] - if 'custom_dir' in user_config: - self.dirs.append(user_config.pop('custom_dir')) + if custom_dir is not None: + self.dirs.append(custom_dir) - if self.name: + if name: self._load_theme_config(name) # Include templates provided directly by MkDocs (outside any theme) self.dirs.append(mkdocs_templates) # Handle remaining user configs. Override theme configs (if set) - self.static_templates.update(user_config.pop('static_templates', [])) - self._vars.update(user_config) + self.static_templates.update(static_templates) + _vars.update(user_config) - def __repr__(self): - return "{}(name='{}', dirs={}, static_templates={}, {})".format( - self.__class__.__name__, self.name, self.dirs, list(self.static_templates), - ', '.join('{}={}'.format(k, repr(v)) for k, v in self._vars.items()) + # Validate locale and convert to Locale object + _vars['locale'] = localization.parse_locale( + locale if locale is not None else _vars['locale'] ) - def __getitem__(self, key): - return self._vars[key] + name: str | None + + @property + def locale(self) -> localization.Locale: + return self['locale'] + + @property + def custom_dir(self) -> str | None: + return self._custom_dir + + dirs: list[str] + + static_templates: set[str] + + def __repr__(self) -> str: + return "{}(name={!r}, dirs={!r}, static_templates={!r}, {})".format( + self.__class__.__name__, + self.name, + self.dirs, + self.static_templates, + ', '.join(f'{k}={v!r}' for k, v in self.items()), + ) + + def __getitem__(self, key: str) -> Any: + return self._vars[key] # type: ignore[attr-defined] + + def __setitem__(self, key: str, value): + self._vars[key] = value # type: ignore[attr-defined] + + def __delitem__(self, key: str): + del self._vars[key] # type: ignore[attr-defined] - def __setitem__(self, key, value): - self._vars[key] = value + def __contains__(self, item: object) -> bool: + return item in self._vars # type: ignore[attr-defined] - def __contains__(self, item): - return item in self._vars + def __len__(self): + return len(self._vars) # type: ignore[attr-defined] def __iter__(self): - return iter(self._vars) + return iter(self._vars) # type: ignore[attr-defined] - def _load_theme_config(self, name): - """ Recursively load theme and any parent themes. """ + def _load_theme_config(self, name: str) -> None: + """Recursively load theme and any parent themes.""" theme_dir = utils.get_theme_dir(name) self.dirs.append(theme_dir) @@ -79,35 +117,35 @@ def _load_theme_config(self, name): file_path = os.path.join(theme_dir, 'mkdocs_theme.yml') with open(file_path, 'rb') as f: theme_config = utils.yaml_load(f) - if theme_config is None: - theme_config = {} except OSError as e: log.debug(e) raise ValidationError( - "The theme '{}' does not appear to have a configuration file. " - "Please upgrade to a current version of the theme.".format(name) + f"The theme '{name}' does not appear to have a configuration file. " + f"Please upgrade to a current version of the theme." ) - log.debug("Loaded theme configuration for '%s' from '%s': %s", name, file_path, theme_config) + log.debug(f"Loaded theme configuration for '{name}' from '{file_path}': {theme_config}") parent_theme = theme_config.pop('extends', None) if parent_theme: themes = utils.get_theme_names() if parent_theme not in themes: raise ValidationError( - "The theme '{}' inherits from '{}', which does not appear to be installed. " - "The available installed themes are: {}".format(name, parent_theme, ', '.join(themes)) + f"The theme '{name}' inherits from '{parent_theme}', which does not appear to be installed. " + f"The available installed themes are: {', '.join(themes)}" ) self._load_theme_config(parent_theme) self.static_templates.update(theme_config.pop('static_templates', [])) - self._vars.update(theme_config) + self._vars.update(theme_config) # type: ignore[attr-defined] - def get_env(self): - """ Return a Jinja environment for the theme. """ + def get_env(self) -> jinja2.Environment: + """Return a Jinja environment for the theme.""" loader = jinja2.FileSystemLoader(self.dirs) - env = jinja2.Environment(loader=loader) - env.filters['tojson'] = filters.tojson - env.filters['url'] = filters.url_filter + # No autoreload because editing a template in the middle of a build is not useful. + env = jinja2.Environment(loader=loader, auto_reload=False) + env.filters['url'] = templates.url_filter + env.filters['script_tag'] = templates.script_tag_filter + localization.install_translations(env, self.locale, self.dirs) return env diff --git a/mkdocs/themes/babel.cfg b/mkdocs/themes/babel.cfg new file mode 100644 index 0000000..6e03abf --- /dev/null +++ b/mkdocs/themes/babel.cfg @@ -0,0 +1,6 @@ +[jinja2: **.html] +ignore_tags = script,style +include_attrs = alt title summary + +[extractors] +jinja2 = jinja2.ext:babel_extract diff --git a/mkdocs/themes/mkdocs/404.html b/mkdocs/themes/mkdocs/404.html index c45fda8..c0503d7 100644 --- a/mkdocs/themes/mkdocs/404.html +++ b/mkdocs/themes/mkdocs/404.html @@ -5,7 +5,7 @@

404

-

Page not found

+

{% trans %}Page not found{% endtrans %}

diff --git a/mkdocs/themes/mkdocs/base.html b/mkdocs/themes/mkdocs/base.html index 1961aa4..2b077bd 100644 --- a/mkdocs/themes/mkdocs/base.html +++ b/mkdocs/themes/mkdocs/base.html @@ -1,11 +1,11 @@ - + {%- block site_meta %} - {% if page and page.is_homepage %}{% endif %} + {% if page and page.is_homepage %}{% endif %} {% if config.site_author %}{% endif %} {% if page and page.canonical_url %}{% endif %} {% if config.site_favicon %} @@ -21,35 +21,41 @@ {%- if config.theme.highlightjs %} - + {%- endif %} - {%- for path in config['extra_css'] %} + {%- for path in config.extra_css %} {%- endfor %} {%- endblock %} {%- block libs %} - - - {%- if config.theme.highlightjs %} - + {%- for lang in config.theme.hljs_languages %} - + {%- endfor %} - + {%- endif %} {%- endblock %} {%- block analytics %} - {%- if config.google_analytics %} + {%- if config.theme.analytics.gtag %} + + + {%- elif config.google_analytics %} {%- endif %} @@ -101,10 +107,10 @@ +

{{ nav_item.title }}

+ + {%- for nav_item in nav_item.children %} + + {%- endfor %} + {%- elif config.theme.include_homepage_in_sidebar or (not nav_item == nav.homepage) %} - - - + + + {%- endif %} - {%- endfor %} - {%- endblock %} + {%- endfor %} + {%- endblock %}
- {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} -