Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/sphinx-doc/sphinx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES10
-rw-r--r--sphinx/addnodes.py34
-rw-r--r--sphinx/application.py199
-rw-r--r--sphinx/config.py93
-rw-r--r--sphinx/deprecation.py35
-rw-r--r--sphinx/environment/__init__.py2
-rw-r--r--sphinx/environment/adapters/indexentries.py5
-rw-r--r--sphinx/environment/collectors/__init__.py19
-rw-r--r--sphinx/errors.py22
-rw-r--r--sphinx/events.py23
-rw-r--r--sphinx/ext/autosummary/generate.py5
-rw-r--r--sphinx/extension.py16
-rw-r--r--sphinx/highlighting.py30
-rw-r--r--sphinx/io.py64
-rw-r--r--sphinx/jinja2glue.py60
-rw-r--r--sphinx/locale/__init__.py81
-rw-r--r--sphinx/parsers.py24
-rw-r--r--sphinx/registry.py157
-rw-r--r--sphinx/roles.py117
-rw-r--r--sphinx/search/__init__.py85
-rw-r--r--sphinx/search/da.py12
-rw-r--r--sphinx/search/de.py12
-rw-r--r--sphinx/search/en.py12
-rw-r--r--sphinx/search/es.py12
-rw-r--r--sphinx/search/fi.py12
-rw-r--r--sphinx/search/fr.py12
-rw-r--r--sphinx/search/hu.py12
-rw-r--r--sphinx/search/it.py12
-rw-r--r--sphinx/search/ja.py58
-rw-r--r--sphinx/search/nl.py12
-rw-r--r--sphinx/search/no.py12
-rw-r--r--sphinx/search/pt.py12
-rw-r--r--sphinx/search/ro.py13
-rw-r--r--sphinx/search/ru.py12
-rw-r--r--sphinx/search/sv.py13
-rw-r--r--sphinx/search/tr.py13
-rw-r--r--sphinx/search/zh.py18
-rw-r--r--sphinx/theming.py57
-rw-r--r--sphinx/transforms/__init__.py3
-rw-r--r--sphinx/util/__init__.py3
-rw-r--r--sphinx/versioning.py26
41 files changed, 560 insertions, 869 deletions
diff --git a/CHANGES b/CHANGES
index 4204b361c..e8c0561ab 100644
--- a/CHANGES
+++ b/CHANGES
@@ -68,7 +68,7 @@ Bugs fixed
Testing
--------
-Release 2.3.1 (in development)
+Release 2.3.2 (in development)
==============================
Dependencies
@@ -89,6 +89,14 @@ Bugs fixed
Testing
--------
+Release 2.3.1 (released Dec 22, 2019)
+=====================================
+
+Bugs fixed
+----------
+
+* #6936: sphinx-autogen: raises AttributeError
+
Release 2.3.0 (released Dec 15, 2019)
=====================================
diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py
index e0e49a9b9..7b1edc018 100644
--- a/sphinx/addnodes.py
+++ b/sphinx/addnodes.py
@@ -9,6 +9,7 @@
"""
import warnings
+from typing import Any, Dict, List, Sequence
from docutils import nodes
@@ -16,8 +17,7 @@ from sphinx.deprecation import RemovedInSphinx40Warning
if False:
# For type annotation
- from typing import Any, Dict, List, Sequence # NOQA
- from sphinx.application import Sphinx # NOQA
+ from sphinx.application import Sphinx
class translatable(nodes.Node):
@@ -34,18 +34,15 @@ class translatable(nodes.Node):
Because they are used at final step; extraction.
"""
- def preserve_original_messages(self):
- # type: () -> None
+ def preserve_original_messages(self) -> None:
"""Preserve original translatable messages."""
raise NotImplementedError
- def apply_translated_message(self, original_message, translated_message):
- # type: (str, str) -> None
+ def apply_translated_message(self, original_message: str, translated_message: str) -> None:
"""Apply translated message."""
raise NotImplementedError
- def extract_original_messages(self):
- # type: () -> Sequence[str]
+ def extract_original_messages(self) -> Sequence[str]:
"""Extract translation messages.
:returns: list of extracted messages or messages generator
@@ -61,8 +58,7 @@ class not_smartquotable:
class toctree(nodes.General, nodes.Element, translatable):
"""Node for inserting a "TOC tree"."""
- def preserve_original_messages(self):
- # type: () -> None
+ def preserve_original_messages(self) -> None:
# toctree entries
rawentries = self.setdefault('rawentries', [])
for title, docname in self['entries']:
@@ -73,8 +69,7 @@ class toctree(nodes.General, nodes.Element, translatable):
if self.get('caption'):
self['rawcaption'] = self['caption']
- def apply_translated_message(self, original_message, translated_message):
- # type: (str, str) -> None
+ def apply_translated_message(self, original_message: str, translated_message: str) -> None:
# toctree entries
for i, (title, docname) in enumerate(self['entries']):
if title == original_message:
@@ -84,8 +79,7 @@ class toctree(nodes.General, nodes.Element, translatable):
if self.get('rawcaption') == original_message:
self['caption'] = translated_message
- def extract_original_messages(self):
- # type: () -> List[str]
+ def extract_original_messages(self) -> List[str]:
messages = [] # type: List[str]
# toctree entries
@@ -143,8 +137,7 @@ class desc_type(nodes.Part, nodes.Inline, nodes.FixedTextElement):
class desc_returns(desc_type):
"""Node for a "returns" annotation (a la -> in Python)."""
- def astext(self):
- # type: () -> str
+ def astext(self) -> str:
return ' -> ' + super().astext()
@@ -165,8 +158,7 @@ class desc_optional(nodes.Part, nodes.Inline, nodes.FixedTextElement):
"""Node for marking optional parts of the parameter list."""
child_text_separator = ', '
- def astext(self):
- # type: () -> str
+ def astext(self) -> str:
return '[' + super().astext() + ']'
@@ -313,8 +305,7 @@ class abbreviation(nodes.abbreviation):
.. deprecated:: 2.0
"""
- def __init__(self, rawsource='', text='', *children, **attributes):
- # type: (str, str, *nodes.Node, **Any) -> None
+ def __init__(self, rawsource: str = '', text: str = '', *children, **attributes) -> None:
warnings.warn("abbrevition node for Sphinx was replaced by docutils'.",
RemovedInSphinx40Warning, stacklevel=2)
@@ -325,8 +316,7 @@ class manpage(nodes.Inline, nodes.FixedTextElement):
"""Node for references to manpages."""
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.add_node(toctree)
app.add_node(desc)
app.add_node(desc_signature)
diff --git a/sphinx/application.py b/sphinx/application.py
index 99490cecd..65b6c4eb2 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -18,8 +18,12 @@ import warnings
from collections import deque
from io import StringIO
from os import path
+from typing import Any, Callable, Dict, IO, List, Tuple, Union
+from docutils import nodes
+from docutils.nodes import Element, TextElement
from docutils.parsers.rst import Directive, roles
+from docutils.transforms import Transform
from pygments.lexer import Lexer
import sphinx
@@ -27,12 +31,16 @@ from sphinx import package_dir, locale
from sphinx.config import Config
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.environment import BuildEnvironment
+from sphinx.environment.collectors import EnvironmentCollector
from sphinx.errors import ApplicationError, ConfigError, VersionRequirementError
from sphinx.events import EventManager
+from sphinx.extension import Extension
from sphinx.highlighting import lexer_classes, lexers
from sphinx.locale import __
from sphinx.project import Project
from sphinx.registry import SphinxComponentRegistry
+from sphinx.roles import XRefRole
+from sphinx.theming import Theme
from sphinx.util import docutils
from sphinx.util import logging
from sphinx.util import progress_message
@@ -42,21 +50,14 @@ from sphinx.util.i18n import CatalogRepository
from sphinx.util.logging import prefixed_warnings
from sphinx.util.osutil import abspath, ensuredir, relpath
from sphinx.util.tags import Tags
+from sphinx.util.typing import RoleFunction, TitleGetter
if False:
# For type annotation
- from typing import Any, Callable, Dict, IO, Iterable, Iterator, List, Tuple, Union # NOQA
+ from docutils.nodes import Node # NOQA
from typing import Type # for python3.5.1
- from docutils import nodes # NOQA
- from docutils.parsers import Parser # NOQA
- from docutils.transforms import Transform # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.domains import Domain, Index # NOQA
- from sphinx.environment.collectors import EnvironmentCollector # NOQA
- from sphinx.extension import Extension # NOQA
- from sphinx.roles import XRefRole # NOQA
- from sphinx.theming import Theme # NOQA
- from sphinx.util.typing import RoleFunction, TitleGetter # NOQA
+ from sphinx.builders import Builder
+
builtin_extensions = (
'sphinx.addnodes',
@@ -132,11 +133,11 @@ class Sphinx:
:ivar outdir: Directory for storing build documents.
"""
- def __init__(self, srcdir, confdir, outdir, doctreedir, buildername,
- confoverrides=None, status=sys.stdout, warning=sys.stderr,
- freshenv=False, warningiserror=False, tags=None, verbosity=0,
- parallel=0, keep_going=False):
- # type: (str, str, str, str, str, Dict, IO, IO, bool, bool, List[str], int, int, bool) -> None # NOQA
+ def __init__(self, srcdir: str, confdir: str, outdir: str, doctreedir: str,
+ buildername: str, confoverrides: Dict = None,
+ status: IO = sys.stdout, warning: IO = sys.stderr,
+ freshenv: bool = False, warningiserror: bool = False, tags: List[str] = None,
+ verbosity: int = 0, parallel: int = 0, keep_going: bool = False) -> None:
self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self.extensions = {} # type: Dict[str, Extension]
@@ -270,8 +271,7 @@ class Sphinx:
# set up the builder
self._init_builder()
- def _init_i18n(self):
- # type: () -> None
+ def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
"""
@@ -296,8 +296,7 @@ class Sphinx:
else:
logger.info(__('not available for built-in messages'))
- def _init_env(self, freshenv):
- # type: (bool) -> None
+ def _init_env(self, freshenv: bool) -> None:
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if freshenv or not os.path.exists(filename):
self.env = BuildEnvironment()
@@ -313,28 +312,24 @@ class Sphinx:
logger.info(__('failed: %s'), err)
self._init_env(freshenv=True)
- def preload_builder(self, name):
- # type: (str) -> None
+ def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
- def create_builder(self, name):
- # type: (str) -> Builder
+ def create_builder(self, name: str) -> "Builder":
if name is None:
logger.info(__('No builder selected, using default: html'))
name = 'html'
return self.registry.create_builder(self, name)
- def _init_builder(self):
- # type: () -> None
+ def _init_builder(self) -> None:
self.builder.set_environment(self.env)
self.builder.init()
self.events.emit('builder-inited')
# ---- main "build" method -------------------------------------------------
- def build(self, force_all=False, filenames=None):
- # type: (bool, List[str]) -> None
+ def build(self, force_all: bool = False, filenames: List[str] = None) -> None:
self.phase = BuildPhase.READING
try:
if force_all:
@@ -385,8 +380,7 @@ class Sphinx:
# ---- general extensibility interface -------------------------------------
- def setup_extension(self, extname):
- # type: (str) -> None
+ def setup_extension(self, extname: str) -> None:
"""Import and setup a Sphinx extension module.
Load the extension given by the module *name*. Use this if your
@@ -396,8 +390,7 @@ class Sphinx:
logger.debug('[app] setting up extension: %r', extname)
self.registry.load_extension(self, extname)
- def require_sphinx(self, version):
- # type: (str) -> None
+ def require_sphinx(self, version: str) -> None:
"""Check the Sphinx version if requested.
Compare *version* (which must be a ``major.minor`` version string, e.g.
@@ -410,8 +403,7 @@ class Sphinx:
raise VersionRequirementError(version)
# event interface
- def connect(self, event, callback):
- # type: (str, Callable) -> int
+ def connect(self, event: str, callback: Callable) -> int:
"""Register *callback* to be called when *event* is emitted.
For details on available core events and the arguments of callback
@@ -424,14 +416,12 @@ class Sphinx:
logger.debug('[app] connecting event %r: %r [id=%s]', event, callback, listener_id)
return listener_id
- def disconnect(self, listener_id):
- # type: (int) -> None
+ def disconnect(self, listener_id: int) -> None:
"""Unregister callback by *listener_id*."""
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
self.events.disconnect(listener_id)
- def emit(self, event, *args):
- # type: (str, Any) -> List
+ def emit(self, event: str, *args) -> List:
"""Emit *event* and pass *arguments* to the callback functions.
Return the return values of all callbacks as a list. Do not emit core
@@ -439,8 +429,7 @@ class Sphinx:
"""
return self.events.emit(event, *args)
- def emit_firstresult(self, event, *args):
- # type: (str, Any) -> Any
+ def emit_firstresult(self, event: str, *args) -> Any:
"""Emit *event* and pass *arguments* to the callback functions.
Return the result of the first callback that doesn't return ``None``.
@@ -451,8 +440,7 @@ class Sphinx:
# registering addon parts
- def add_builder(self, builder, override=False):
- # type: (Type[Builder], bool) -> None
+ def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
"""Register a new builder.
*builder* must be a class that inherits from
@@ -464,8 +452,8 @@ class Sphinx:
self.registry.add_builder(builder, override=override)
# TODO(stephenfin): Describe 'types' parameter
- def add_config_value(self, name, default, rebuild, types=()):
- # type: (str, Any, Union[bool, str], Any) -> None
+ def add_config_value(self, name: str, default: Any, rebuild: Union[bool, str],
+ types: Any = ()) -> None:
"""Register a configuration value.
This is necessary for Sphinx to recognize new values and set default
@@ -497,8 +485,7 @@ class Sphinx:
rebuild = 'env' if rebuild else ''
self.config.add(name, default, rebuild, types)
- def add_event(self, name):
- # type: (str) -> None
+ def add_event(self, name: str) -> None:
"""Register an event called *name*.
This is needed to be able to emit it.
@@ -506,8 +493,8 @@ class Sphinx:
logger.debug('[app] adding event: %r', name)
self.events.add(name)
- def set_translator(self, name, translator_class, override=False):
- # type: (str, Type[nodes.NodeVisitor], bool) -> None
+ def set_translator(self, name: str, translator_class: "Type[nodes.NodeVisitor]",
+ override: bool = False) -> None:
"""Register or override a Docutils translator class.
This is used to register a custom output translator or to replace a
@@ -520,8 +507,7 @@ class Sphinx:
"""
self.registry.add_translator(name, translator_class, override=override)
- def add_node(self, node, override=False, **kwds):
- # type: (Type[nodes.Element], bool, Any) -> None
+ def add_node(self, node: "Type[Element]", override: bool = False, **kwds) -> None:
"""Register a Docutils node class.
This is necessary for Docutils internals. It may also be used in the
@@ -559,8 +545,9 @@ class Sphinx:
docutils.register_node(node)
self.registry.add_translation_handlers(node, **kwds)
- def add_enumerable_node(self, node, figtype, title_getter=None, override=False, **kwds):
- # type: (Type[nodes.Element], str, TitleGetter, bool, Any) -> None
+ def add_enumerable_node(self, node: "Type[Element]", figtype: str,
+ title_getter: TitleGetter = None, override: bool = False,
+ **kwds) -> None:
"""Register a Docutils node class as a numfig target.
Sphinx numbers the node automatically. And then the users can refer it
@@ -587,8 +574,7 @@ class Sphinx:
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
self.add_node(node, override=override, **kwds)
- def add_directive(self, name, cls, override=False):
- # type: (str, Type[Directive], bool) -> None
+ def add_directive(self, name: str, cls: "Type[Directive]", override: bool = False):
"""Register a Docutils directive.
*name* must be the prospective directive name. *cls* is a directive
@@ -632,8 +618,7 @@ class Sphinx:
docutils.register_directive(name, cls)
- def add_role(self, name, role, override=False):
- # type: (str, Any, bool) -> None
+ def add_role(self, name: str, role: Any, override: bool = False) -> None:
"""Register a Docutils role.
*name* must be the role name that occurs in the source, *role* the role
@@ -650,8 +635,7 @@ class Sphinx:
name, type='app', subtype='add_role')
docutils.register_role(name, role)
- def add_generic_role(self, name, nodeclass, override=False):
- # type: (str, Any, bool) -> None
+ def add_generic_role(self, name: str, nodeclass: Any, override: bool = False) -> None:
"""Register a generic Docutils role.
Register a Docutils role that does nothing but wrap its contents in the
@@ -670,8 +654,7 @@ class Sphinx:
role = roles.GenericRole(name, nodeclass)
docutils.register_role(name, role)
- def add_domain(self, domain, override=False):
- # type: (Type[Domain], bool) -> None
+ def add_domain(self, domain: "Type[Domain]", override: bool = False) -> None:
"""Register a domain.
Make the given *domain* (which must be a class; more precisely, a
@@ -683,8 +666,8 @@ class Sphinx:
"""
self.registry.add_domain(domain, override=override)
- def add_directive_to_domain(self, domain, name, cls, override=False):
- # type: (str, str, Type[Directive], bool) -> None
+ def add_directive_to_domain(self, domain: str, name: str,
+ cls: "Type[Directive]", override: bool = False) -> None:
"""Register a Docutils directive in a domain.
Like :meth:`add_directive`, but the directive is added to the domain
@@ -696,8 +679,8 @@ class Sphinx:
"""
self.registry.add_directive_to_domain(domain, name, cls, override=override)
- def add_role_to_domain(self, domain, name, role, override=False):
- # type: (str, str, Union[RoleFunction, XRefRole], bool) -> None
+ def add_role_to_domain(self, domain: str, name: str, role: Union[RoleFunction, XRefRole],
+ override: bool = False) -> None:
"""Register a Docutils role in a domain.
Like :meth:`add_role`, but the role is added to the domain named
@@ -709,8 +692,8 @@ class Sphinx:
"""
self.registry.add_role_to_domain(domain, name, role, override=override)
- def add_index_to_domain(self, domain, index, override=False):
- # type: (str, Type[Index], bool) -> None
+ def add_index_to_domain(self, domain: str, index: "Type[Index]", override: bool = False
+ ) -> None:
"""Register a custom index for a domain.
Add a custom *index* class to the domain named *domain*. *index* must
@@ -722,10 +705,10 @@ class Sphinx:
"""
self.registry.add_index_to_domain(domain, index)
- def add_object_type(self, directivename, rolename, indextemplate='',
- parse_node=None, ref_nodeclass=None, objname='',
- doc_field_types=[], override=False):
- # type: (str, str, str, Callable, Type[nodes.TextElement], str, List, bool) -> None
+ def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
+ parse_node: Callable = None, ref_nodeclass: "Type[TextElement]" = None,
+ objname: str = '', doc_field_types: List = [], override: bool = False
+ ) -> None:
"""Register a new object type.
This method is a very convenient way to add a new :term:`object` type
@@ -786,9 +769,9 @@ class Sphinx:
ref_nodeclass, objname, doc_field_types,
override=override)
- def add_crossref_type(self, directivename, rolename, indextemplate='',
- ref_nodeclass=None, objname='', override=False):
- # type: (str, str, str, Type[nodes.TextElement], str, bool) -> None
+ def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
+ ref_nodeclass: "Type[TextElement]" = None, objname: str = '',
+ override: bool = False) -> None:
"""Register a new crossref object type.
This method is very similar to :meth:`add_object_type` except that the
@@ -822,8 +805,7 @@ class Sphinx:
indextemplate, ref_nodeclass, objname,
override=override)
- def add_transform(self, transform):
- # type: (Type[Transform]) -> None
+ def add_transform(self, transform: "Type[Transform]") -> None:
"""Register a Docutils transform to be applied after parsing.
Add the standard docutils :class:`Transform` subclass *transform* to
@@ -856,8 +838,7 @@ class Sphinx:
""" # NOQA
self.registry.add_transform(transform)
- def add_post_transform(self, transform):
- # type: (Type[Transform]) -> None
+ def add_post_transform(self, transform: "Type[Transform]") -> None:
"""Register a Docutils transform to be applied before writing.
Add the standard docutils :class:`Transform` subclass *transform* to
@@ -866,16 +847,14 @@ class Sphinx:
"""
self.registry.add_post_transform(transform)
- def add_javascript(self, filename, **kwargs):
- # type: (str, **str) -> None
+ def add_javascript(self, filename: str, **kwargs: str) -> None:
"""An alias of :meth:`add_js_file`."""
warnings.warn('The app.add_javascript() is deprecated. '
'Please use app.add_js_file() instead.',
RemovedInSphinx40Warning, stacklevel=2)
self.add_js_file(filename, **kwargs)
- def add_js_file(self, filename, **kwargs):
- # type: (str, **str) -> None
+ def add_js_file(self, filename: str, **kwargs: str) -> None:
"""Register a JavaScript file to include in the HTML output.
Add *filename* to the list of JavaScript files that the default HTML
@@ -901,8 +880,7 @@ class Sphinx:
if hasattr(self.builder, 'add_js_file'):
self.builder.add_js_file(filename, **kwargs) # type: ignore
- def add_css_file(self, filename, **kwargs):
- # type: (str, **str) -> None
+ def add_css_file(self, filename: str, **kwargs: str) -> None:
"""Register a stylesheet to include in the HTML output.
Add *filename* to the list of CSS files that the default HTML template
@@ -941,8 +919,8 @@ class Sphinx:
if hasattr(self.builder, 'add_css_file'):
self.builder.add_css_file(filename, **kwargs) # type: ignore
- def add_stylesheet(self, filename, alternate=False, title=None):
- # type: (str, bool, str) -> None
+ def add_stylesheet(self, filename: str, alternate: bool = False, title: str = None
+ ) -> None:
"""An alias of :meth:`add_css_file`."""
warnings.warn('The app.add_stylesheet() is deprecated. '
'Please use app.add_css_file() instead.',
@@ -959,8 +937,7 @@ class Sphinx:
self.add_css_file(filename, **attributes)
- def add_latex_package(self, packagename, options=None):
- # type: (str, str) -> None
+ def add_latex_package(self, packagename: str, options: str = None) -> None:
r"""Register a package to include in the LaTeX source code.
Add *packagename* to the list of packages that LaTeX source code will
@@ -978,8 +955,7 @@ class Sphinx:
"""
self.registry.add_latex_package(packagename, options)
- def add_lexer(self, alias, lexer):
- # type: (str, Union[Lexer, Type[Lexer]]) -> None
+ def add_lexer(self, alias: str, lexer: Union[Lexer, "Type[Lexer]"]) -> None:
"""Register a new lexer for source code.
Use *lexer* to highlight code blocks with the given language *alias*.
@@ -998,8 +974,7 @@ class Sphinx:
else:
lexer_classes[alias] = lexer
- def add_autodocumenter(self, cls, override=False):
- # type: (Any, bool) -> None
+ def add_autodocumenter(self, cls: Any, override: bool = False) -> None:
"""Register a new documenter class for the autodoc extension.
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
@@ -1019,8 +994,8 @@ class Sphinx:
self.registry.add_documenter(cls.objtype, cls)
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
- def add_autodoc_attrgetter(self, typ, getter):
- # type: (Type, Callable[[Any, str, Any], Any]) -> None
+ def add_autodoc_attrgetter(self, typ: "Type", getter: Callable[[Any, str, Any], Any]
+ ) -> None:
"""Register a new ``getattr``-like function for the autodoc extension.
Add *getter*, which must be a function with an interface compatible to
@@ -1034,8 +1009,7 @@ class Sphinx:
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
self.registry.add_autodoc_attrgetter(typ, getter)
- def add_search_language(self, cls):
- # type: (Any) -> None
+ def add_search_language(self, cls: Any) -> None:
"""Register a new language for the HTML search index.
Add *cls*, which must be a subclass of
@@ -1051,8 +1025,7 @@ class Sphinx:
assert issubclass(cls, SearchLanguage)
languages[cls.lang] = cls
- def add_source_suffix(self, suffix, filetype, override=False):
- # type: (str, str, bool) -> None
+ def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
"""Register a suffix of source files.
Same as :confval:`source_suffix`. The users can override this
@@ -1062,8 +1035,7 @@ class Sphinx:
"""
self.registry.add_source_suffix(suffix, filetype, override=override)
- def add_source_parser(self, *args, **kwargs):
- # type: (Any, Any) -> None
+ def add_source_parser(self, *args, **kwargs) -> None:
"""Register a parser class.
.. versionadded:: 1.4
@@ -1075,8 +1047,7 @@ class Sphinx:
"""
self.registry.add_source_parser(*args, **kwargs)
- def add_env_collector(self, collector):
- # type: (Type[EnvironmentCollector]) -> None
+ def add_env_collector(self, collector: "Type[EnvironmentCollector]") -> None:
"""Register an environment collector class.
Refer to :ref:`collector-api`.
@@ -1086,8 +1057,7 @@ class Sphinx:
logger.debug('[app] adding environment collector: %r', collector)
collector().enable(self)
- def add_html_theme(self, name, theme_path):
- # type: (str, str) -> None
+ def add_html_theme(self, name: str, theme_path: str) -> None:
"""Register a HTML Theme.
The *name* is a name of theme, and *path* is a full path to the theme
@@ -1098,8 +1068,9 @@ class Sphinx:
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
self.html_themes[name] = theme_path
- def add_html_math_renderer(self, name, inline_renderers=None, block_renderers=None):
- # type: (str, Tuple[Callable, Callable], Tuple[Callable, Callable]) -> None
+ def add_html_math_renderer(self, name: str,
+ inline_renderers: Tuple[Callable, Callable] = None,
+ block_renderers: Tuple[Callable, Callable] = None) -> None:
"""Register a math renderer for HTML.
The *name* is a name of math renderer. Both *inline_renderers* and
@@ -1113,8 +1084,7 @@ class Sphinx:
"""
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
- def add_message_catalog(self, catalog, locale_dir):
- # type: (str, str) -> None
+ def add_message_catalog(self, catalog: str, locale_dir: str) -> None:
"""Register a message catalog.
The *catalog* is a name of catalog, and *locale_dir* is a base path
@@ -1127,8 +1097,7 @@ class Sphinx:
locale.init_console(locale_dir, catalog)
# ---- other methods -------------------------------------------------
- def is_parallel_allowed(self, typ):
- # type: (str) -> bool
+ def is_parallel_allowed(self, typ: str) -> bool:
"""Check parallel processing is allowed or not.
``typ`` is a type of processing; ``'read'`` or ``'write'``.
@@ -1170,8 +1139,7 @@ class TemplateBridge:
that renders templates given a template name and a context.
"""
- def init(self, builder, theme=None, dirs=None):
- # type: (Builder, Theme, List[str]) -> None
+ def init(self, builder: "Builder", theme: Theme = None, dirs: List[str] = None) -> None:
"""Called by the builder to initialize the template system.
*builder* is the builder object; you'll probably want to look at the
@@ -1182,23 +1150,20 @@ class TemplateBridge:
"""
raise NotImplementedError('must be implemented in subclasses')
- def newest_template_mtime(self):
- # type: () -> float
+ def newest_template_mtime(self) -> float:
"""Called by the builder to determine if output files are outdated
because of template changes. Return the mtime of the newest template
file that was changed. The default implementation returns ``0``.
"""
return 0
- def render(self, template, context):
- # type: (str, Dict) -> None
+ def render(self, template: str, context: Dict) -> None:
"""Called by the builder to render a template given as a filename with
a specified context (a Python dictionary).
"""
raise NotImplementedError('must be implemented in subclasses')
- def render_string(self, template, context):
- # type: (str, Dict) -> str
+ def render_string(self, template: str, context: Dict) -> str:
"""Called by the builder to render a template given as a string with a
specified context (a Python dictionary).
"""
diff --git a/sphinx/config.py b/sphinx/config.py
index c12841acb..ab00a0555 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -14,7 +14,9 @@ import types
import warnings
from collections import OrderedDict
from os import path, getenv
-from typing import Any, NamedTuple, Union
+from typing import (
+ Any, Callable, Dict, Generator, Iterator, List, NamedTuple, Set, Tuple, Union
+)
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.errors import ConfigError, ExtensionError
@@ -23,14 +25,13 @@ from sphinx.util import logging
from sphinx.util.i18n import format_date
from sphinx.util.osutil import cd
from sphinx.util.pycompat import execfile_
+from sphinx.util.tags import Tags
from sphinx.util.typing import NoneType
if False:
# For type annotation
- from typing import Callable, Dict, Generator, Iterator, List, Set, Tuple # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.util.tags import Tags # NOQA
+ from sphinx.application import Sphinx
+ from sphinx.environment import BuildEnvironment
logger = logging.getLogger(__name__)
@@ -43,8 +44,7 @@ ConfigValue = NamedTuple('ConfigValue', [('name', str),
('rebuild', Union[bool, str])])
-def is_serializable(obj):
- # type: (Any) -> bool
+def is_serializable(obj: Any) -> bool:
"""Check if object is serializable or not."""
if isinstance(obj, UNSERIALIZABLE_TYPES):
return False
@@ -64,12 +64,10 @@ class ENUM:
Example:
app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
"""
- def __init__(self, *candidates):
- # type: (str) -> None
+ def __init__(self, *candidates: str) -> None:
self.candidates = candidates
- def match(self, value):
- # type: (Union[str, List, Tuple]) -> bool
+ def match(self, value: Union[str, List, Tuple]) -> bool:
if isinstance(value, (list, tuple)):
return all(item in self.candidates for item in value)
else:
@@ -156,8 +154,7 @@ class Config:
'env', []),
} # type: Dict[str, Tuple]
- def __init__(self, config={}, overrides={}):
- # type: (Dict[str, Any], Dict[str, Any]) -> None
+ def __init__(self, config: Dict[str, Any] = {}, overrides: Dict[str, Any] = {}) -> None:
self.overrides = dict(overrides)
self.values = Config.config_values.copy()
self._raw_config = config
@@ -171,15 +168,13 @@ class Config:
self.extensions = config.get('extensions', []) # type: List[str]
@classmethod
- def read(cls, confdir, overrides=None, tags=None):
- # type: (str, Dict, Tags) -> Config
+ def read(cls, confdir: str, overrides: Dict = None, tags: Tags = None) -> "Config":
"""Create a Config object from configuration file."""
filename = path.join(confdir, CONFIG_FILENAME)
namespace = eval_config_file(filename, tags)
return cls(namespace, overrides or {})
- def convert_overrides(self, name, value):
- # type: (str, Any) -> Any
+ def convert_overrides(self, name: str, value: Any) -> Any:
if not isinstance(value, str):
return value
else:
@@ -212,8 +207,7 @@ class Config:
else:
return value
- def pre_init_values(self):
- # type: () -> None
+ def pre_init_values(self) -> None:
"""
Initialize some limited config variables before initialize i18n and loading extensions
"""
@@ -227,8 +221,7 @@ class Config:
except ValueError as exc:
logger.warning("%s", exc)
- def init_values(self):
- # type: () -> None
+ def init_values(self) -> None:
config = self._raw_config
for valname, value in self.overrides.items():
try:
@@ -250,8 +243,7 @@ class Config:
if name in self.values:
self.__dict__[name] = config[name]
- def __getattr__(self, name):
- # type: (str) -> Any
+ def __getattr__(self, name: str) -> Any:
if name.startswith('_'):
raise AttributeError(name)
if name not in self.values:
@@ -261,42 +253,34 @@ class Config:
return default(self)
return default
- def __getitem__(self, name):
- # type: (str) -> str
+ def __getitem__(self, name: str) -> str:
return getattr(self, name)
- def __setitem__(self, name, value):
- # type: (str, Any) -> None
+ def __setitem__(self, name: str, value: Any) -> None:
setattr(self, name, value)
- def __delitem__(self, name):
- # type: (str) -> None
+ def __delitem__(self, name: str) -> None:
delattr(self, name)
- def __contains__(self, name):
- # type: (str) -> bool
+ def __contains__(self, name: str) -> bool:
return name in self.values
- def __iter__(self):
- # type: () -> Generator[ConfigValue, None, None]
+ def __iter__(self) -> Generator[ConfigValue, None, None]:
for name, value in self.values.items():
yield ConfigValue(name, getattr(self, name), value[1])
- def add(self, name, default, rebuild, types):
- # type: (str, Any, Union[bool, str], Any) -> None
+ def add(self, name: str, default: Any, rebuild: Union[bool, str], types: Any) -> None:
if name in self.values:
raise ExtensionError(__('Config value %r already present') % name)
else:
self.values[name] = (default, rebuild, types)
- def filter(self, rebuild):
- # type: (Union[str, List[str]]) -> Iterator[ConfigValue]
+ def filter(self, rebuild: Union[str, List[str]]) -> Iterator[ConfigValue]:
if isinstance(rebuild, str):
rebuild = [rebuild]
return (value for value in self if value.rebuild in rebuild)
- def __getstate__(self):
- # type: () -> Dict
+ def __getstate__(self) -> Dict:
"""Obtains serializable data for pickling."""
# remove potentially pickling-problematic values from config
__dict__ = {}
@@ -319,13 +303,11 @@ class Config:
return __dict__
- def __setstate__(self, state):
- # type: (Dict) -> None
+ def __setstate__(self, state: Dict) -> None:
self.__dict__.update(state)
-def eval_config_file(filename, tags):
- # type: (str, Tags) -> Dict[str, Any]
+def eval_config_file(filename: str, tags: Tags) -> Dict[str, Any]:
"""Evaluate a config file."""
namespace = {} # type: Dict[str, Any]
namespace['__file__'] = filename
@@ -349,8 +331,7 @@ def eval_config_file(filename, tags):
return namespace
-def convert_source_suffix(app, config):
- # type: (Sphinx, Config) -> None
+def convert_source_suffix(app: "Sphinx", config: Config) -> None:
"""This converts old styled source_suffix to new styled one.
* old style: str or list
@@ -375,8 +356,7 @@ def convert_source_suffix(app, config):
"But `%r' is given." % source_suffix))
-def init_numfig_format(app, config):
- # type: (Sphinx, Config) -> None
+def init_numfig_format(app: "Sphinx", config: Config) -> None:
"""Initialize :confval:`numfig_format`."""
numfig_format = {'section': _('Section %s'),
'figure': _('Fig. %s'),
@@ -388,8 +368,7 @@ def init_numfig_format(app, config):
config.numfig_format = numfig_format # type: ignore
-def correct_copyright_year(app, config):
- # type: (Sphinx, Config) -> None
+def correct_copyright_year(app: "Sphinx", config: Config) -> None:
"""correct values of copyright year that are not coherent with
the SOURCE_DATE_EPOCH environment variable (if set)
@@ -402,8 +381,7 @@ def correct_copyright_year(app, config):
config[k] = copyright_year_re.sub(replace, config[k])
-def check_confval_types(app, config):
- # type: (Sphinx, Config) -> None
+def check_confval_types(app: "Sphinx", config: Config) -> None:
"""check all values for deviation from the default value's type, since
that can result in TypeErrors all over the place NB.
"""
@@ -458,8 +436,7 @@ def check_confval_types(app, config):
default=type(default)))
-def check_unicode(config):
- # type: (Config) -> None
+def check_unicode(config: Config) -> None:
"""check all string values for non-ASCII characters in bytestrings,
since that can result in UnicodeErrors all over the place
"""
@@ -475,16 +452,15 @@ def check_unicode(config):
'Please use Unicode strings, e.g. %r.'), name, 'Content')
-def check_primary_domain(app, config):
- # type: (Sphinx, Config) -> None
+def check_primary_domain(app: "Sphinx", config: Config) -> None:
primary_domain = config.primary_domain
if primary_domain and not app.registry.has_domain(primary_domain):
logger.warning(__('primary_domain %r not found, ignored.'), primary_domain)
config.primary_domain = None # type: ignore
-def check_master_doc(app, env, added, changed, removed):
- # type: (Sphinx, BuildEnvironment, Set[str], Set[str], Set[str]) -> Set[str]
+def check_master_doc(app: "Sphinx", env: "BuildEnvironment", added: Set[str],
+ changed: Set[str], removed: Set[str]) -> Set[str]:
"""Adjust master_doc to 'contents' to support an old project which does not have
no master_doc setting.
"""
@@ -498,8 +474,7 @@ def check_master_doc(app, env, added, changed, removed):
return changed
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.connect('config-inited', convert_source_suffix)
app.connect('config-inited', init_numfig_format)
app.connect('config-inited', correct_copyright_year)
diff --git a/sphinx/deprecation.py b/sphinx/deprecation.py
index 38cd00d70..855e41599 100644
--- a/sphinx/deprecation.py
+++ b/sphinx/deprecation.py
@@ -11,11 +11,8 @@
import sys
import warnings
from importlib import import_module
-
-if False:
- # For type annotation
- from typing import Any, Dict # NOQA
- from typing import Type # for python3.5.1
+from typing import Any, Dict
+from typing import Type # for python3.5.1
class RemovedInSphinx40Warning(DeprecationWarning):
@@ -29,22 +26,20 @@ class RemovedInSphinx50Warning(PendingDeprecationWarning):
RemovedInNextVersionWarning = RemovedInSphinx40Warning
-def deprecated_alias(modname, objects, warning):
- # type: (str, Dict, Type[Warning]) -> None
+def deprecated_alias(modname: str, objects: Dict, warning: Type[Warning]) -> None:
module = import_module(modname)
sys.modules[modname] = _ModuleWrapper(module, modname, objects, warning) # type: ignore
class _ModuleWrapper:
- def __init__(self, module, modname, objects, warning):
- # type: (Any, str, Dict, Type[Warning]) -> None
+ def __init__(self, module: Any, modname: str, objects: Dict, warning: Type[Warning]
+ ) -> None:
self._module = module
self._modname = modname
self._objects = objects
self._warning = warning
- def __getattr__(self, name):
- # type: (str) -> Any
+ def __getattr__(self, name: str) -> Any:
if name in self._objects:
warnings.warn("%s.%s is deprecated. Check CHANGES for Sphinx "
"API modifications." % (self._modname, name),
@@ -57,33 +52,27 @@ class _ModuleWrapper:
class DeprecatedDict(dict):
"""A deprecated dict which warns on each access."""
- def __init__(self, data, message, warning):
- # type: (Dict, str, Type[Warning]) -> None
+ def __init__(self, data: Dict, message: str, warning: Type[Warning]) -> None:
self.message = message
self.warning = warning
super().__init__(data)
- def __setitem__(self, key, value):
- # type: (str, Any) -> None
+ def __setitem__(self, key: str, value: Any) -> None:
warnings.warn(self.message, self.warning, stacklevel=2)
super().__setitem__(key, value)
- def setdefault(self, key, default=None):
- # type: (str, Any) -> None
+ def setdefault(self, key: str, default: Any = None) -> Any:
warnings.warn(self.message, self.warning, stacklevel=2)
return super().setdefault(key, default)
- def __getitem__(self, key):
- # type: (str) -> None
+ def __getitem__(self, key: str) -> None:
warnings.warn(self.message, self.warning, stacklevel=2)
return super().__getitem__(key)
- def get(self, key, default=None):
- # type: (str, Any) -> None
+ def get(self, key: str, default: Any = None) -> Any:
warnings.warn(self.message, self.warning, stacklevel=2)
return super().get(key, default)
- def update(self, other=None): # type: ignore
- # type: (Dict) -> None
+ def update(self, other: Dict = None) -> None: # type: ignore
warnings.warn(self.message, self.warning, stacklevel=2)
super().update(other)
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 25648e38b..aa2bfed99 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -640,7 +640,7 @@ class BuildEnvironment:
@property
def indexentries(self) -> Dict[str, List[Tuple[str, str, str, str, str]]]:
warnings.warn('env.indexentries() is deprecated. Please use IndexDomain instead.',
- RemovedInSphinx40Warning)
+ RemovedInSphinx40Warning, stacklevel=2)
from sphinx.domains.index import IndexDomain
domain = cast(IndexDomain, self.get_domain('index'))
return domain.entries
diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py
index 68198040d..9394f92bc 100644
--- a/sphinx/environment/adapters/indexentries.py
+++ b/sphinx/environment/adapters/indexentries.py
@@ -12,8 +12,10 @@ import re
import unicodedata
from itertools import groupby
from typing import Any, Dict, Pattern, List, Tuple
+from typing import cast
from sphinx.builders import Builder
+from sphinx.domains.index import IndexDomain
from sphinx.environment import BuildEnvironment
from sphinx.errors import NoUri
from sphinx.locale import _, __
@@ -53,7 +55,8 @@ class IndexEntries:
# maintain links in sorted/deterministic order
bisect.insort(entry[0], (main, uri))
- for fn, entries in self.env.indexentries.items():
+ domain = cast(IndexDomain, self.env.get_domain('index'))
+ for fn, entries in domain.entries.items():
# new entry types must be listed in directives/other.py!
for type, value, tid, main, index_key in entries:
try:
diff --git a/sphinx/environment/collectors/__init__.py b/sphinx/environment/collectors/__init__.py
index eb16a9f25..137a10302 100644
--- a/sphinx/environment/collectors/__init__.py
+++ b/sphinx/environment/collectors/__init__.py
@@ -12,9 +12,12 @@ from typing import Dict, List, Set
from docutils import nodes
-from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
+if False:
+ # For type annotation
+ from sphinx.application import Sphinx
+
class EnvironmentCollector:
"""An EnvironmentCollector is a specific data collector from each document.
@@ -27,7 +30,7 @@ class EnvironmentCollector:
listener_ids = None # type: Dict[str, int]
- def enable(self, app: Sphinx) -> None:
+ def enable(self, app: "Sphinx") -> None:
assert self.listener_ids is None
self.listener_ids = {
'doctree-read': app.connect('doctree-read', self.process_doc),
@@ -37,38 +40,38 @@ class EnvironmentCollector:
'env-get-outdated': app.connect('env-get-outdated', self.get_outdated_docs),
}
- def disable(self, app: Sphinx) -> None:
+ def disable(self, app: "Sphinx") -> None:
assert self.listener_ids is not None
for listener_id in self.listener_ids.values():
app.disconnect(listener_id)
self.listener_ids = None
- def clear_doc(self, app: Sphinx, env: BuildEnvironment, docname: str) -> None:
+ def clear_doc(self, app: "Sphinx", env: BuildEnvironment, docname: str) -> None:
"""Remove specified data of a document.
This method is called on the removal of the document."""
raise NotImplementedError
- def merge_other(self, app: Sphinx, env: BuildEnvironment,
+ def merge_other(self, app: "Sphinx", env: BuildEnvironment,
docnames: Set[str], other: BuildEnvironment) -> None:
"""Merge in specified data regarding docnames from a different `BuildEnvironment`
object which coming from a subprocess in parallel builds."""
raise NotImplementedError
- def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
+ def process_doc(self, app: "Sphinx", doctree: nodes.document) -> None:
"""Process a document and gather specific data from it.
This method is called after the document is read."""
raise NotImplementedError
- def get_updated_docs(self, app: Sphinx, env: BuildEnvironment) -> List[str]:
+ def get_updated_docs(self, app: "Sphinx", env: BuildEnvironment) -> List[str]:
"""Return a list of docnames to re-read.
This methods is called after reading the whole of documents (experimental).
"""
return []
- def get_outdated_docs(self, app: Sphinx, env: BuildEnvironment,
+ def get_outdated_docs(self, app: "Sphinx", env: BuildEnvironment,
added: Set[str], changed: Set[str], removed: Set[str]) -> List[str]:
"""Return a list of docnames to re-read.
diff --git a/sphinx/errors.py b/sphinx/errors.py
index 64036721f..d67d6d1f7 100644
--- a/sphinx/errors.py
+++ b/sphinx/errors.py
@@ -9,9 +9,7 @@
:license: BSD, see LICENSE for details.
"""
-if False:
- # For type annotation
- from typing import Any # NOQA
+from typing import Any
class SphinxError(Exception):
@@ -51,21 +49,18 @@ class ExtensionError(SphinxError):
"""Extension error."""
category = 'Extension error'
- def __init__(self, message, orig_exc=None):
- # type: (str, Exception) -> None
+ def __init__(self, message: str, orig_exc: Exception = None) -> None:
super().__init__(message)
self.message = message
self.orig_exc = orig_exc
- def __repr__(self):
- # type: () -> str
+ def __repr__(self) -> str:
if self.orig_exc:
return '%s(%r, %r)' % (self.__class__.__name__,
self.message, self.orig_exc)
return '%s(%r)' % (self.__class__.__name__, self.message)
- def __str__(self):
- # type: () -> str
+ def __str__(self) -> str:
parent_str = super().__str__()
if self.orig_exc:
return '%s (exception: %s)' % (parent_str, self.orig_exc)
@@ -102,21 +97,18 @@ class SphinxParallelError(SphinxError):
category = 'Sphinx parallel build error'
- def __init__(self, message, traceback):
- # type: (str, Any) -> None
+ def __init__(self, message: str, traceback: Any) -> None:
self.message = message
self.traceback = traceback
- def __str__(self):
- # type: () -> str
+ def __str__(self) -> str:
return self.message
class PycodeError(Exception):
"""Pycode Python source code analyser error."""
- def __str__(self):
- # type: () -> str
+ def __str__(self) -> str:
res = self.args[0]
if len(self.args) > 1:
res += ' (exception was: %r)' % self.args[1]
diff --git a/sphinx/events.py b/sphinx/events.py
index df72f8f21..52a87439b 100644
--- a/sphinx/events.py
+++ b/sphinx/events.py
@@ -12,6 +12,7 @@
import warnings
from collections import OrderedDict, defaultdict
+from typing import Any, Callable, Dict, List
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.errors import ExtensionError
@@ -20,8 +21,8 @@ from sphinx.util import logging
if False:
# For type annotation
- from typing import Any, Callable, Dict, List # NOQA
- from sphinx.application import Sphinx # NOQA
+ from sphinx.application import Sphinx
+
logger = logging.getLogger(__name__)
@@ -50,8 +51,7 @@ core_events = {
class EventManager:
"""Event manager for Sphinx."""
- def __init__(self, app=None):
- # type: (Sphinx) -> None
+ def __init__(self, app: "Sphinx" = None) -> None:
if app is None:
warnings.warn('app argument is required for EventManager.',
RemovedInSphinx40Warning)
@@ -60,15 +60,13 @@ class EventManager:
self.listeners = defaultdict(OrderedDict) # type: Dict[str, Dict[int, Callable]]
self.next_listener_id = 0
- def add(self, name):
- # type: (str) -> None
+ def add(self, name: str) -> None:
"""Register a custom Sphinx event."""
if name in self.events:
raise ExtensionError(__('Event %r already present') % name)
self.events[name] = ''
- def connect(self, name, callback):
- # type: (str, Callable) -> int
+ def connect(self, name: str, callback: Callable) -> int:
"""Connect a handler to specific event."""
if name not in self.events:
raise ExtensionError(__('Unknown event name: %s') % name)
@@ -78,14 +76,12 @@ class EventManager:
self.listeners[name][listener_id] = callback
return listener_id
- def disconnect(self, listener_id):
- # type: (int) -> None
+ def disconnect(self, listener_id: int) -> None:
"""Disconnect a handler."""
for event in self.listeners.values():
event.pop(listener_id, None)
- def emit(self, name, *args):
- # type: (str, Any) -> List
+ def emit(self, name: str, *args) -> List:
"""Emit a Sphinx event."""
try:
logger.debug('[app] emitting event: %r%s', name, repr(args)[:100])
@@ -103,8 +99,7 @@ class EventManager:
results.append(callback(self.app, *args))
return results
- def emit_firstresult(self, name, *args):
- # type: (str, Any) -> Any
+ def emit_firstresult(self, name: str, *args) -> Any:
"""Emit a Sphinx event and returns first result.
This returns the result of the first handler that doesn't return ``None``.
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index 28a5f1fbf..d72c12529 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -59,6 +59,11 @@ class DummyApplication:
self.registry = SphinxComponentRegistry()
self.messagelog = [] # type: List[str]
self.verbosity = 0
+ self._warncount = 0
+ self.warningiserror = False
+
+ def emit_firstresult(self, *args) -> None:
+ pass
def setup_documenters(app: Any) -> None:
diff --git a/sphinx/extension.py b/sphinx/extension.py
index 7565cbc85..3cab20c2f 100644
--- a/sphinx/extension.py
+++ b/sphinx/extension.py
@@ -8,22 +8,22 @@
:license: BSD, see LICENSE for details.
"""
+from typing import Any, Dict
+
+from sphinx.config import Config
from sphinx.errors import VersionRequirementError
from sphinx.locale import __
from sphinx.util import logging
if False:
# For type annotation
- from typing import Any, Dict # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.config import Config # NOQA
+ from sphinx.application import Sphinx
logger = logging.getLogger(__name__)
class Extension:
- def __init__(self, name, module, **kwargs):
- # type: (str, Any, Any) -> None
+ def __init__(self, name: str, module: Any, **kwargs) -> None:
self.name = name
self.module = module
self.metadata = kwargs
@@ -40,8 +40,7 @@ class Extension:
self.parallel_write_safe = kwargs.pop('parallel_write_safe', True)
-def verify_needs_extensions(app, config):
- # type: (Sphinx, Config) -> None
+def verify_needs_extensions(app: "Sphinx", config: Config) -> None:
"""Verify the required Sphinx extensions are loaded."""
if config.needs_extensions is None:
return
@@ -60,8 +59,7 @@ def verify_needs_extensions(app, config):
(extname, reqversion, extension.version))
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.connect('config-inited', verify_needs_extensions)
return {
diff --git a/sphinx/highlighting.py b/sphinx/highlighting.py
index c659114d8..9ef753253 100644
--- a/sphinx/highlighting.py
+++ b/sphinx/highlighting.py
@@ -10,14 +10,17 @@
from functools import partial
from importlib import import_module
+from typing import Any, Dict
from pygments import highlight
from pygments.filters import ErrorToken
+from pygments.formatter import Formatter
from pygments.formatters import HtmlFormatter, LatexFormatter
from pygments.lexer import Lexer
from pygments.lexers import get_lexer_by_name, guess_lexer
from pygments.lexers import PythonLexer, Python3Lexer, PythonConsoleLexer, \
CLexer, TextLexer, RstLexer
+from pygments.style import Style
from pygments.styles import get_style_by_name
from pygments.util import ClassNotFound
@@ -25,12 +28,6 @@ from sphinx.locale import __
from sphinx.pygments_styles import SphinxStyle, NoneStyle
from sphinx.util import logging, texescape
-if False:
- # For type annotation
- from typing import Any, Dict # NOQA
- from pygments.formatter import Formatter # NOQA
- from pygments.style import Style # NOQA
-
logger = logging.getLogger(__name__)
@@ -63,8 +60,8 @@ class PygmentsBridge:
html_formatter = HtmlFormatter
latex_formatter = LatexFormatter
- def __init__(self, dest='html', stylename='sphinx', latex_engine=None):
- # type: (str, str, str) -> None
+ def __init__(self, dest: str = 'html', stylename: str = 'sphinx',
+ latex_engine: str = None) -> None:
self.dest = dest
self.latex_engine = latex_engine
@@ -76,8 +73,7 @@ class PygmentsBridge:
self.formatter = self.latex_formatter
self.formatter_args['commandprefix'] = 'PYG'
- def get_style(self, stylename):
- # type: (str) -> Style
+ def get_style(self, stylename: str) -> Style:
if stylename is None or stylename == 'sphinx':
return SphinxStyle
elif stylename == 'none':
@@ -88,13 +84,12 @@ class PygmentsBridge:
else:
return get_style_by_name(stylename)
- def get_formatter(self, **kwargs):
- # type: (Any) -> Formatter
+ def get_formatter(self, **kwargs) -> Formatter:
kwargs.update(self.formatter_args)
return self.formatter(**kwargs)
- def get_lexer(self, source, lang, opts=None, force=False, location=None):
- # type: (str, str, Dict, bool, Any) -> Lexer
+ def get_lexer(self, source: str, lang: str, opts: Dict = None,
+ force: bool = False, location: Any = None) -> Lexer:
if not opts:
opts = {}
@@ -137,8 +132,8 @@ class PygmentsBridge:
return lexer
- def highlight_block(self, source, lang, opts=None, force=False, location=None, **kwargs):
- # type: (str, str, Dict, bool, Any, Any) -> str
+ def highlight_block(self, source: str, lang: str, opts: Dict = None,
+ force: bool = False, location: Any = None, **kwargs) -> str:
if not isinstance(source, str):
source = source.decode()
@@ -167,8 +162,7 @@ class PygmentsBridge:
# MEMO: this is done to escape Unicode chars with non-Unicode engines
return texescape.hlescape(hlsource, self.latex_engine)
- def get_stylesheet(self):
- # type: () -> str
+ def get_stylesheet(self) -> str:
formatter = self.get_formatter()
if self.dest == 'html':
return formatter.get_style_defs('.highlight')
diff --git a/sphinx/io.py b/sphinx/io.py
index 060ba59ce..a5a715716 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -9,16 +9,23 @@
"""
import codecs
import warnings
-from typing import Any
+from typing import Any, List, Tuple
+from typing import Type # for python3.5.1
+from docutils import nodes
from docutils.core import Publisher
-from docutils.io import FileInput, NullOutput
+from docutils.frontend import Values
+from docutils.io import FileInput, Input, NullOutput
+from docutils.parsers import Parser
from docutils.parsers.rst import Parser as RSTParser
from docutils.readers import standalone
+from docutils.statemachine import StringList, string2lines
+from docutils.transforms import Transform
from docutils.transforms.references import DanglingReferences
from docutils.writers import UnfilteredWriter
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
+from sphinx.environment import BuildEnvironment
from sphinx.errors import FiletypeNotFoundError
from sphinx.transforms import (
AutoIndexUpgrader, DoctreeReadEvent, FigureAligner, SphinxTransformer
@@ -34,15 +41,7 @@ from sphinx.versioning import UIDTransform
if False:
# For type annotation
- from typing import Dict, List, Tuple # NOQA
- from typing import Type # for python3.5.1
- from docutils import nodes # NOQA
- from docutils.frontend import Values # NOQA
- from docutils.io import Input # NOQA
- from docutils.parsers import Parser # NOQA
- from docutils.transforms import Transform # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
+ from sphinx.application import Sphinx
logger = logging.getLogger(__name__)
@@ -57,8 +56,7 @@ class SphinxBaseReader(standalone.Reader):
transforms = [] # type: List[Type[Transform]]
- def __init__(self, *args, **kwargs):
- # type: (Any, Any) -> None
+ def __init__(self, *args, **kwargs) -> None:
from sphinx.application import Sphinx
if len(args) > 0 and isinstance(args[0], Sphinx):
self._app = args[0]
@@ -68,26 +66,22 @@ class SphinxBaseReader(standalone.Reader):
super().__init__(*args, **kwargs)
@property
- def app(self):
- # type: () -> Sphinx
+ def app(self) -> "Sphinx":
warnings.warn('SphinxBaseReader.app is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
return self._app
@property
- def env(self):
- # type: () -> BuildEnvironment
+ def env(self) -> BuildEnvironment:
warnings.warn('SphinxBaseReader.env is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
return self._env
- def setup(self, app):
- # type: (Sphinx) -> None
+ def setup(self, app: "Sphinx") -> None:
self._app = app # hold application object only for compatibility
self._env = app.env
- def get_transforms(self):
- # type: () -> List[Type[Transform]]
+ def get_transforms(self) -> List[Type[Transform]]:
transforms = super().get_transforms() + self.transforms
# remove transforms which is not needed for Sphinx
@@ -98,8 +92,7 @@ class SphinxBaseReader(standalone.Reader):
return transforms
- def new_document(self):
- # type: () -> nodes.document
+ def new_document(self) -> nodes.document:
"""Creates a new document object which having a special reporter object good
for logging.
"""
@@ -121,13 +114,11 @@ class SphinxStandaloneReader(SphinxBaseReader):
A basic document reader for Sphinx.
"""
- def setup(self, app):
- # type: (Sphinx) -> None
+ def setup(self, app: "Sphinx") -> None:
self.transforms = self.transforms + app.registry.get_transforms()
super().setup(app)
- def read(self, source, parser, settings):
- # type: (Input, Parser, Values) -> nodes.document
+ def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document:
self.source = source
if not self.parser:
self.parser = parser
@@ -136,8 +127,7 @@ class SphinxStandaloneReader(SphinxBaseReader):
self.parse()
return self.document
- def read_source(self, env):
- # type: (BuildEnvironment) -> str
+ def read_source(self, env: BuildEnvironment) -> str:
"""Read content from source and do post-process."""
content = self.source.read()
@@ -156,8 +146,7 @@ class SphinxI18nReader(SphinxBaseReader):
Because the translated texts are partial and they don't have correct line numbers.
"""
- def setup(self, app):
- # type: (Sphinx) -> None
+ def setup(self, app: "Sphinx") -> None:
super().setup(app)
self.transforms = self.transforms + app.registry.get_transforms()
@@ -174,27 +163,24 @@ class SphinxDummyWriter(UnfilteredWriter):
supported = ('html',) # needed to keep "meta" nodes
- def translate(self):
- # type: () -> None
+ def translate(self) -> None:
pass
-def SphinxDummySourceClass(source, *args, **kwargs):
- # type: (Any, Any, Any) -> Any
+def SphinxDummySourceClass(source: Any, *args, **kwargs) -> Any:
"""Bypass source object as is to cheat Publisher."""
return source
class SphinxFileInput(FileInput):
"""A basic FileInput for Sphinx."""
- def __init__(self, *args, **kwargs):
- # type: (Any, Any) -> None
+ def __init__(self, *args, **kwargs) -> None:
kwargs['error_handler'] = 'sphinx'
super().__init__(*args, **kwargs)
-def read_doc(app, env, filename):
- # type: (Sphinx, BuildEnvironment, str) -> nodes.document
+
+def read_doc(app: "Sphinx", env: BuildEnvironment, filename: str) -> nodes.document:
"""Parse a document and convert to doctree."""
# set up error_handler for the target document
error_handler = UnicodeDecodeErrorHandler(env.docname)
diff --git a/sphinx/jinja2glue.py b/sphinx/jinja2glue.py
index 1a356c021..060b9f1bd 100644
--- a/sphinx/jinja2glue.py
+++ b/sphinx/jinja2glue.py
@@ -10,42 +10,37 @@
from os import path
from pprint import pformat
-from typing import Any, Callable, Iterator, Tuple # NOQA
+from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
-from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, \
- contextfunction
+from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, contextfunction
+from jinja2.environment import Environment
from jinja2.sandbox import SandboxedEnvironment
from jinja2.utils import open_if_exists
from sphinx.application import TemplateBridge
+from sphinx.theming import Theme
from sphinx.util import logging
from sphinx.util.osutil import mtimes_of_files
if False:
# For type annotation
- from typing import Dict, List, Union # NOQA
- from jinja2.environment import Environment # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.theming import Theme # NOQA
+ from sphinx.builders import Builder
-def _tobool(val):
- # type: (str) -> bool
+def _tobool(val: str) -> bool:
if isinstance(val, str):
return val.lower() in ('true', '1', 'yes', 'on')
return bool(val)
-def _toint(val):
- # type: (str) -> int
+def _toint(val: str) -> int:
try:
return int(val)
except ValueError:
return 0
-def _todim(val):
- # type: (Union[int, str]) -> str
+def _todim(val: Union[int, str]) -> str:
"""
Make val a css dimension. In particular the following transformations
are performed:
@@ -63,8 +58,7 @@ def _todim(val):
return val # type: ignore
-def _slice_index(values, slices):
- # type: (List, int) -> Iterator[List]
+def _slice_index(values: List, slices: int) -> Iterator[List]:
seq = list(values)
length = 0
for value in values:
@@ -85,8 +79,7 @@ def _slice_index(values, slices):
yield seq[start:offset]
-def accesskey(context, key):
- # type: (Any, str) -> str
+def accesskey(context: Any, key: str) -> str:
"""Helper to output each access key only once."""
if '_accesskeys' not in context:
context.vars['_accesskeys'] = {}
@@ -97,24 +90,20 @@ def accesskey(context, key):
class idgen:
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
self.id = 0
- def current(self):
- # type: () -> int
+ def current(self) -> int:
return self.id
- def __next__(self):
- # type: () -> int
+ def __next__(self) -> int:
self.id += 1
return self.id
next = __next__ # Python 2/Jinja compatibility
@contextfunction
-def warning(context, message, *args, **kwargs):
- # type: (Dict, str, Any, Any) -> str
+def warning(context: Dict, message: str, *args, **kwargs) -> str:
if 'pagename' in context:
filename = context.get('pagename') + context.get('file_suffix', '')
message = 'in rendering %s: %s' % (filename, message)
@@ -129,8 +118,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
template names.
"""
- def get_source(self, environment, template):
- # type: (Environment, str) -> Tuple[str, str, Callable]
+ def get_source(self, environment: Environment, template: str) -> Tuple[str, str, Callable]:
for searchpath in self.searchpath:
filename = path.join(searchpath, template)
f = open_if_exists(filename)
@@ -141,8 +129,7 @@ class SphinxFileSystemLoader(FileSystemLoader):
mtime = path.getmtime(filename)
- def uptodate():
- # type: () -> bool
+ def uptodate() -> bool:
try:
return path.getmtime(filename) == mtime
except OSError:
@@ -158,8 +145,7 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
# TemplateBridge interface
- def init(self, builder, theme=None, dirs=None):
- # type: (Builder, Theme, List[str]) -> None
+ def init(self, builder: "Builder", theme: Theme = None, dirs: List[str] = None) -> None:
# create a chain of paths to search
if theme:
# the theme's own dir and its bases' dirs
@@ -202,22 +188,18 @@ class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
if use_i18n:
self.environment.install_gettext_translations(builder.app.translator) # type: ignore # NOQA
- def render(self, template, context): # type: ignore
- # type: (str, Dict) -> str
+ def render(self, template: str, context: Dict) -> str: # type: ignore
return self.environment.get_template(template).render(context)
- def render_string(self, source, context):
- # type: (str, Dict) -> str
+ def render_string(self, source: str, context: Dict) -> str:
return self.environment.from_string(source).render(context)
- def newest_template_mtime(self):
- # type: () -> float
+ def newest_template_mtime(self) -> float:
return max(mtimes_of_files(self.pathchain, '.html'))
# Loader interface
- def get_source(self, environment, template):
- # type: (Environment, str) -> Tuple[str, str, Callable]
+ def get_source(self, environment: Environment, template: str) -> Tuple[str, str, Callable]:
loaders = self.loaders
# exclamation mark starts search from theme
if template.startswith('!'):
diff --git a/sphinx/locale/__init__.py b/sphinx/locale/__init__.py
index 2ef565236..df4f4f51d 100644
--- a/sphinx/locale/__init__.py
+++ b/sphinx/locale/__init__.py
@@ -12,10 +12,9 @@ import gettext
import locale
from collections import UserString, defaultdict
from gettext import NullTranslations
+from typing import Any, Callable, Dict, Iterable, List, Tuple, Union
-if False:
- # For type annotation
- from typing import Any, Callable, Dict, Iterable, List, Tuple, Union # NOQA
+from sphinx.deprecation import RemovedInSphinx30Warning
class _TranslationProxy(UserString):
@@ -32,32 +31,27 @@ class _TranslationProxy(UserString):
"""
__slots__ = ('_func', '_args')
- def __new__(cls, func, *args): # type: ignore
- # type: (Callable, str) -> object
+ def __new__(cls, func: Callable, *args: str) -> object: # type: ignore
if not args:
# not called with "function" and "arguments", but a plain string
return str(func)
return object.__new__(cls)
- def __getnewargs__(self):
- # type: () -> Tuple[str]
+ def __getnewargs__(self) -> Tuple[str]:
return (self._func,) + self._args # type: ignore
- def __init__(self, func, *args):
- # type: (Callable, str) -> None
+ def __init__(self, func: Callable, *args: str) -> None:
self._func = func
self._args = args
@property
- def data(self): # type: ignore
- # type: () -> str
+ def data(self) -> str: # type: ignore
return self._func(*self._args)
# replace function from UserString; it instantiates a self.__class__
# for the encoding result
- def encode(self, encoding=None, errors=None): # type: ignore
- # type: (str, str) -> bytes
+ def encode(self, encoding: str = None, errors: str = None) -> bytes: # type: ignore
if encoding:
if errors:
return self.data.encode(encoding, errors)
@@ -66,58 +60,45 @@ class _TranslationProxy(UserString):
else:
return self.data.encode()
- def __dir__(self):
- # type: () -> List[str]
+ def __dir__(self) -> List[str]:
return dir(str)
- def __str__(self):
- # type: () -> str
+ def __str__(self) -> str:
return str(self.data)
- def __add__(self, other): # type: ignore
- # type: (str) -> str
+ def __add__(self, other: str) -> str: # type: ignore
return self.data + other
- def __radd__(self, other):
- # type: (str) -> str
+ def __radd__(self, other: str) -> str:
return other + self.data
- def __mod__(self, other): # type: ignore
- # type: (str) -> str
+ def __mod__(self, other: str) -> str: # type: ignore
return self.data % other
- def __rmod__(self, other):
- # type: (str) -> str
+ def __rmod__(self, other: str) -> str:
return other % self.data
- def __mul__(self, other): # type: ignore
- # type: (Any) -> str
+ def __mul__(self, other: Any) -> str: # type: ignore
return self.data * other
- def __rmul__(self, other):
- # type: (Any) -> str
+ def __rmul__(self, other: Any) -> str:
return other * self.data
- def __getattr__(self, name):
- # type: (str) -> Any
+ def __getattr__(self, name: str) -> Any:
if name == '__members__':
return self.__dir__()
return getattr(self.data, name)
- def __getstate__(self):
- # type: () -> Tuple[Callable, Tuple[str, ...]]
+ def __getstate__(self) -> Tuple[Callable, Tuple[str, ...]]:
return self._func, self._args
- def __setstate__(self, tup):
- # type: (Tuple[Callable, Tuple[str]]) -> None
+ def __setstate__(self, tup: Tuple[Callable, Tuple[str]]) -> None:
self._func, self._args = tup
- def __copy__(self):
- # type: () -> _TranslationProxy
+ def __copy__(self) -> "_TranslationProxy":
return self
- def __repr__(self):
- # type: () -> str
+ def __repr__(self) -> str:
try:
return 'i' + repr(str(self.data))
except Exception:
@@ -127,8 +108,8 @@ class _TranslationProxy(UserString):
translators = defaultdict(NullTranslations) # type: Dict[Tuple[str, str], NullTranslations]
-def init(locale_dirs, language, catalog='sphinx', namespace='general'):
- # type: (List[str], str, str, str) -> Tuple[NullTranslations, bool]
+def init(locale_dirs: List[str], language: str,
+ catalog: str = 'sphinx', namespace: str = 'general') -> Tuple[NullTranslations, bool]:
"""Look for message catalogs in `locale_dirs` and *ensure* that there is at
least a NullTranslations catalog set in `translators`. If called multiple
times or if several ``.mo`` files are found, their contents are merged
@@ -167,8 +148,7 @@ def init(locale_dirs, language, catalog='sphinx', namespace='general'):
return translator, has_translation
-def setlocale(category, value=None):
- # type: (int, Union[str, Iterable[str]]) -> None
+def setlocale(category: int, value: Union[str, Iterable[str]] = None) -> None:
"""Update locale settings.
This does not throw any exception even if update fails.
@@ -188,8 +168,7 @@ def setlocale(category, value=None):
pass
-def init_console(locale_dir, catalog):
- # type: (str, str) -> Tuple[NullTranslations, bool]
+def init_console(locale_dir: str, catalog: str) -> Tuple[NullTranslations, bool]:
"""Initialize locale for console.
.. versionadded:: 1.8
@@ -204,18 +183,15 @@ def init_console(locale_dir, catalog):
return init([locale_dir], language, catalog, 'console')
-def get_translator(catalog='sphinx', namespace='general'):
- # type: (str, str) -> NullTranslations
+def get_translator(catalog: str = 'sphinx', namespace: str = 'general') -> NullTranslations:
return translators[(namespace, catalog)]
-def is_translator_registered(catalog='sphinx', namespace='general'):
- # type: (str, str) -> bool
+def is_translator_registered(catalog: str = 'sphinx', namespace: str = 'general') -> bool:
return (namespace, catalog) in translators
-def _lazy_translate(catalog, namespace, message):
- # type: (str, str, str) -> str
+def _lazy_translate(catalog: str, namespace: str, message: str) -> str:
"""Used instead of _ when creating TranslationProxy, because _ is
not bound yet at that time.
"""
@@ -248,8 +224,7 @@ def get_translation(catalog, namespace='general'):
.. versionadded:: 1.8
"""
- def gettext(message, *args):
- # type: (str, *Any) -> str
+ def gettext(message: str, *args) -> str:
if not is_translator_registered(catalog, namespace):
# not initialized yet
return _TranslationProxy(_lazy_translate, catalog, namespace, message) # type: ignore # NOQA
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index f228fb31b..6c09d65b2 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -8,8 +8,11 @@
:license: BSD, see LICENSE for details.
"""
+from typing import Any, Dict, List, Union
+
import docutils.parsers
import docutils.parsers.rst
+from docutils import nodes
from docutils.parsers.rst import states
from docutils.statemachine import StringList
from docutils.transforms.universal import SmartQuotes
@@ -18,11 +21,9 @@ from sphinx.util.rst import append_epilog, prepend_prolog
if False:
# For type annotation
- from typing import Any, Dict, List, Union # NOQA
- from typing import Type # for python3.5.1
- from docutils import nodes # NOQA
from docutils.transforms import Transform # NOQA
- from sphinx.application import Sphinx # NOQA
+ from typing import Type # NOQA # for python3.5.1
+ from sphinx.application import Sphinx
class Parser(docutils.parsers.Parser):
@@ -48,8 +49,7 @@ class Parser(docutils.parsers.Parser):
``warn()`` and ``info()`` is deprecated. Use :mod:`sphinx.util.logging` instead.
"""
- def set_application(self, app):
- # type: (Sphinx) -> None
+ def set_application(self, app: "Sphinx") -> None:
"""set_application will be called from Sphinx to set app and other instance variables
:param sphinx.application.Sphinx app: Sphinx application object
@@ -62,8 +62,7 @@ class Parser(docutils.parsers.Parser):
class RSTParser(docutils.parsers.rst.Parser, Parser):
"""A reST parser for Sphinx."""
- def get_transforms(self):
- # type: () -> List[Type[Transform]]
+ def get_transforms(self) -> List["Type[Transform]"]:
"""Sphinx's reST parser replaces a transform class for smart-quotes by own's
refs: sphinx.io.SphinxStandaloneReader
@@ -72,8 +71,7 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
transforms.remove(SmartQuotes)
return transforms
- def parse(self, inputstring, document):
- # type: (Union[str, StringList], nodes.document) -> None
+ def parse(self, inputstring: Union[str, StringList], document: nodes.document) -> None:
"""Parse text and generate a document tree."""
self.setup_parse(inputstring, document) # type: ignore
self.statemachine = states.RSTStateMachine(
@@ -95,15 +93,13 @@ class RSTParser(docutils.parsers.rst.Parser, Parser):
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
- def decorate(self, content):
- # type: (StringList) -> None
+ def decorate(self, content: StringList) -> None:
"""Preprocess reST content before parsing."""
prepend_prolog(content, self.config.rst_prolog)
append_epilog(content, self.config.rst_epilog)
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.add_source_parser(RSTParser)
return {
diff --git a/sphinx/registry.py b/sphinx/registry.py
index 51c240b0e..d2f9da3a1 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -11,36 +11,37 @@
import traceback
from importlib import import_module
from types import MethodType
+from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
+from docutils import nodes
+from docutils.io import Input
+from docutils.nodes import Element, Node, TextElement
+from docutils.parsers import Parser
from docutils.parsers.rst import Directive
+from docutils.transforms import Transform
from pkg_resources import iter_entry_points
-from sphinx.domains import ObjType
+from sphinx.builders import Builder
+from sphinx.config import Config
+from sphinx.deprecation import RemovedInSphinx30Warning
+from sphinx.domains import Domain, Index, ObjType
from sphinx.domains.std import GenericObject, Target
+from sphinx.environment import BuildEnvironment
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
from sphinx.extension import Extension
+from sphinx.io import SphinxFileInput
from sphinx.locale import __
from sphinx.parsers import Parser as SphinxParser
from sphinx.roles import XRefRole
from sphinx.util import logging
from sphinx.util.logging import prefixed_warnings
+from sphinx.util.typing import RoleFunction, TitleGetter
if False:
# For type annotation
- from typing import Any, Callable, Dict, Iterator, List, Tuple, Union # NOQA
from typing import Type # for python3.5.1
- from docutils import nodes # NOQA
- from docutils.io import Input # NOQA
- from docutils.parsers import Parser # NOQA
- from docutils.transforms import Transform # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.builders import Builder # NOQA
- from sphinx.config import Config # NOQA
- from sphinx.domains import Domain, Index # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.ext.autodoc import Documenter # NOQA
- from sphinx.io import SphinxFileInput # NOQA
- from sphinx.util.typing import RoleFunction, TitleGetter # NOQA
+ from sphinx.application import Sphinx
+ from sphinx.ext.autodoc import Documenter
logger = logging.getLogger(__name__)
@@ -52,8 +53,7 @@ EXTENSION_BLACKLIST = {
class SphinxComponentRegistry:
- def __init__(self):
- # type: () -> None
+ def __init__(self) -> None:
#: special attrgetter for autodoc; class object -> attrgetter
self.autodoc_attrgettrs = {} # type: Dict[Type, Callable[[Any, str, Any], Any]]
@@ -87,7 +87,7 @@ class SphinxComponentRegistry:
#: additional enumerable nodes
#: a dict of node class -> tuple of figtype and title_getter function
- self.enumerable_nodes = {} # type: Dict[Type[nodes.Node], Tuple[str, TitleGetter]]
+ self.enumerable_nodes = {} # type: Dict[Type[Node], Tuple[str, TitleGetter]]
#: HTML inline and block math renderers
#: a dict of name -> tuple of visit function and depart function
@@ -122,8 +122,7 @@ class SphinxComponentRegistry:
#: additional transforms; list of transforms
self.transforms = [] # type: List[Type[Transform]]
- def add_builder(self, builder, override=False):
- # type: (Type[Builder], bool) -> None
+ def add_builder(self, builder: "Type[Builder]", override: bool = False) -> None:
logger.debug('[app] adding builder: %r', builder)
if not hasattr(builder, 'name'):
raise ExtensionError(__('Builder class %s has no "name" attribute') % builder)
@@ -132,8 +131,7 @@ class SphinxComponentRegistry:
(builder.name, self.builders[builder.name].__module__))
self.builders[builder.name] = builder
- def preload_builder(self, app, name):
- # type: (Sphinx, str) -> None
+ def preload_builder(self, app: "Sphinx", name: str) -> None:
if name is None:
return
@@ -147,26 +145,22 @@ class SphinxComponentRegistry:
self.load_extension(app, entry_point.module_name)
- def create_builder(self, app, name):
- # type: (Sphinx, str) -> Builder
+ def create_builder(self, app: "Sphinx", name: str) -> Builder:
if name not in self.builders:
raise SphinxError(__('Builder name %s not registered') % name)
return self.builders[name](app)
- def add_domain(self, domain, override=False):
- # type: (Type[Domain], bool) -> None
+ def add_domain(self, domain: "Type[Domain]", override: bool = False) -> None:
logger.debug('[app] adding domain: %r', domain)
if domain.name in self.domains and not override:
raise ExtensionError(__('domain %s already registered') % domain.name)
self.domains[domain.name] = domain
- def has_domain(self, domain):
- # type: (str) -> bool
+ def has_domain(self, domain: str) -> bool:
return domain in self.domains
- def create_domains(self, env):
- # type: (BuildEnvironment) -> Iterator[Domain]
+ def create_domains(self, env: BuildEnvironment) -> Iterator[Domain]:
for DomainClass in self.domains.values():
domain = DomainClass(env)
@@ -179,8 +173,8 @@ class SphinxComponentRegistry:
yield domain
- def add_directive_to_domain(self, domain, name, cls, override=False):
- # type: (str, str, Type[Directive], bool) -> None
+ def add_directive_to_domain(self, domain: str, name: str,
+ cls: "Type[Directive]", override: bool = False) -> None:
logger.debug('[app] adding directive to domain: %r', (domain, name, cls))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
@@ -191,8 +185,9 @@ class SphinxComponentRegistry:
(name, domain))
directives[name] = cls
- def add_role_to_domain(self, domain, name, role, override=False):
- # type: (str, str, Union[RoleFunction, XRefRole], bool) -> None
+ def add_role_to_domain(self, domain: str, name: str,
+ role: Union[RoleFunction, XRefRole], override: bool = False
+ ) -> None:
logger.debug('[app] adding role to domain: %r', (domain, name, role))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
@@ -202,8 +197,8 @@ class SphinxComponentRegistry:
(name, domain))
roles[name] = role
- def add_index_to_domain(self, domain, index, override=False):
- # type: (str, Type[Index], bool) -> None
+ def add_index_to_domain(self, domain: str, index: "Type[Index]",
+ override: bool = False) -> None:
logger.debug('[app] adding index to domain: %r', (domain, index))
if domain not in self.domains:
raise ExtensionError(__('domain %s not yet registered') % domain)
@@ -213,10 +208,10 @@ class SphinxComponentRegistry:
(index.name, domain))
indices.append(index)
- def add_object_type(self, directivename, rolename, indextemplate='',
- parse_node=None, ref_nodeclass=None, objname='',
- doc_field_types=[], override=False):
- # type: (str, str, str, Callable, Type[nodes.TextElement], str, List, bool) -> None
+ def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
+ parse_node: Callable = None, ref_nodeclass: "Type[TextElement]" = None,
+ objname: str = '', doc_field_types: List = [], override: bool = False
+ ) -> None:
logger.debug('[app] adding object type: %r',
(directivename, rolename, indextemplate, parse_node,
ref_nodeclass, objname, doc_field_types))
@@ -237,9 +232,9 @@ class SphinxComponentRegistry:
directivename)
object_types[directivename] = ObjType(objname or directivename, rolename)
- def add_crossref_type(self, directivename, rolename, indextemplate='',
- ref_nodeclass=None, objname='', override=False):
- # type: (str, str, str, Type[nodes.TextElement], str, bool) -> None
+ def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
+ ref_nodeclass: "Type[TextElement]" = None, objname: str = '',
+ override: bool = False) -> None:
logger.debug('[app] adding crossref type: %r',
(directivename, rolename, indextemplate, ref_nodeclass, objname))
@@ -257,17 +252,16 @@ class SphinxComponentRegistry:
directivename)
object_types[directivename] = ObjType(objname or directivename, rolename)
- def add_source_suffix(self, suffix, filetype, override=False):
- # type: (str, str, bool) -> None
+ def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
logger.debug('[app] adding source_suffix: %r, %r', suffix, filetype)
if suffix in self.source_suffix and not override:
raise ExtensionError(__('source_suffix %r is already registered') % suffix)
else:
self.source_suffix[suffix] = filetype
- def add_source_parser(self, parser, **kwargs):
- # type: (Type[Parser], bool) -> None
+ def add_source_parser(self, parser: "Type[Parser]", **kwargs) -> None:
logger.debug('[app] adding search source_parser: %r', parser)
+
# create a map from filetype to parser
for filetype in parser.supported:
if filetype in self.source_parsers and not kwargs.get('override'):
@@ -276,27 +270,23 @@ class SphinxComponentRegistry:
else:
self.source_parsers[filetype] = parser
- def get_source_parser(self, filetype):
- # type: (str) -> Type[Parser]
+ def get_source_parser(self, filetype: str) -> "Type[Parser]":
try:
return self.source_parsers[filetype]
except KeyError:
raise SphinxError(__('Source parser for %s not registered') % filetype)
- def get_source_parsers(self):
- # type: () -> Dict[str, Type[Parser]]
+ def get_source_parsers(self) -> Dict[str, "Type[Parser]"]:
return self.source_parsers
- def create_source_parser(self, app, filename):
- # type: (Sphinx, str) -> Parser
+ def create_source_parser(self, app: "Sphinx", filename: str) -> Parser:
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
parser.set_application(app)
return parser
- def get_source_input(self, filetype):
- # type: (str) -> Type[Input]
+ def get_source_input(self, filetype: str) -> "Type[Input]":
try:
return self.source_inputs[filetype]
except KeyError:
@@ -306,15 +296,14 @@ class SphinxComponentRegistry:
except KeyError:
return None
- def add_translator(self, name, translator, override=False):
- # type: (str, Type[nodes.NodeVisitor], bool) -> None
+ def add_translator(self, name: str, translator: "Type[nodes.NodeVisitor]",
+ override: bool = False) -> None:
logger.debug('[app] Change of translator for the %s builder.' % name)
if name in self.translators and not override:
raise ExtensionError(__('Translator for %r already exists') % name)
self.translators[name] = translator
- def add_translation_handlers(self, node, **kwargs):
- # type: (Type[nodes.Element], Any) -> None
+ def add_translation_handlers(self, node: "Type[Element]", **kwargs) -> None:
logger.debug('[app] adding translation_handlers: %r, %r', node, kwargs)
for builder_name, handlers in kwargs.items():
translation_handlers = self.translation_handlers.setdefault(builder_name, {})
@@ -325,13 +314,11 @@ class SphinxComponentRegistry:
raise ExtensionError(__('kwargs for add_node() must be a (visit, depart) '
'function tuple: %r=%r') % builder_name, handlers)
- def get_translator_class(self, builder):
- # type: (Builder) -> Type[nodes.NodeVisitor]
+ def get_translator_class(self, builder: Builder) -> "Type[nodes.NodeVisitor]":
return self.translators.get(builder.name,
builder.default_translator_class)
- def create_translator(self, builder, *args):
- # type: (Builder, Any) -> nodes.NodeVisitor
+ def create_translator(self, builder: Builder, *args) -> nodes.NodeVisitor:
translator_class = self.get_translator_class(builder)
assert translator_class, "translator not found for %s" % builder.name
translator = translator_class(*args)
@@ -349,54 +336,48 @@ class SphinxComponentRegistry:
return translator
- def add_transform(self, transform):
- # type: (Type[Transform]) -> None
+ def add_transform(self, transform: "Type[Transform]") -> None:
logger.debug('[app] adding transform: %r', transform)
self.transforms.append(transform)
- def get_transforms(self):
- # type: () -> List[Type[Transform]]
+ def get_transforms(self) -> List["Type[Transform]"]:
return self.transforms
- def add_post_transform(self, transform):
- # type: (Type[Transform]) -> None
+ def add_post_transform(self, transform: "Type[Transform]") -> None:
logger.debug('[app] adding post transform: %r', transform)
self.post_transforms.append(transform)
- def get_post_transforms(self):
- # type: () -> List[Type[Transform]]
+ def get_post_transforms(self) -> List["Type[Transform]"]:
return self.post_transforms
- def add_documenter(self, objtype, documenter):
- # type: (str, Type[Documenter]) -> None
+ def add_documenter(self, objtype: str, documenter: "Type[Documenter]") -> None:
self.documenters[objtype] = documenter
- def add_autodoc_attrgetter(self, typ, attrgetter):
- # type: (Type, Callable[[Any, str, Any], Any]) -> None
+ def add_autodoc_attrgetter(self, typ: "Type",
+ attrgetter: Callable[[Any, str, Any], Any]) -> None:
self.autodoc_attrgettrs[typ] = attrgetter
def add_css_files(self, filename, **attributes):
self.css_files.append((filename, attributes))
- def add_js_file(self, filename, **attributes):
- # type: (str, **str) -> None
+ def add_js_file(self, filename: str, **attributes: str) -> None:
logger.debug('[app] adding js_file: %r, %r', filename, attributes)
self.js_files.append((filename, attributes))
- def add_latex_package(self, name, options):
- # type: (str, str) -> None
+ def add_latex_package(self, name: str, options: str) -> None:
logger.debug('[app] adding latex package: %r', name)
self.latex_packages.append((name, options))
- def add_enumerable_node(self, node, figtype, title_getter=None, override=False):
- # type: (Type[nodes.Node], str, TitleGetter, bool) -> None
+ def add_enumerable_node(self, node: "Type[Node]", figtype: str,
+ title_getter: TitleGetter = None, override: bool = False) -> None:
logger.debug('[app] adding enumerable node: (%r, %r, %r)', node, figtype, title_getter)
if node in self.enumerable_nodes and not override:
raise ExtensionError(__('enumerable_node %r already registered') % node)
self.enumerable_nodes[node] = (figtype, title_getter)
- def add_html_math_renderer(self, name, inline_renderers, block_renderers):
- # type: (str, Tuple[Callable, Callable], Tuple[Callable, Callable]) -> None
+ def add_html_math_renderer(self, name: str,
+ inline_renderers: Tuple[Callable, Callable],
+ block_renderers: Tuple[Callable, Callable]) -> None:
logger.debug('[app] adding html_math_renderer: %s, %r, %r',
name, inline_renderers, block_renderers)
if name in self.html_inline_math_renderers:
@@ -405,8 +386,7 @@ class SphinxComponentRegistry:
self.html_inline_math_renderers[name] = inline_renderers
self.html_block_math_renderers[name] = block_renderers
- def load_extension(self, app, extname):
- # type: (Sphinx, str) -> None
+ def load_extension(self, app: "Sphinx", extname: str) -> None:
"""Load a Sphinx extension."""
if extname in app.extensions: # alread loaded
return
@@ -451,8 +431,7 @@ class SphinxComponentRegistry:
app.extensions[extname] = Extension(extname, mod, **metadata)
- def get_envversion(self, app):
- # type: (Sphinx) -> Dict[str, str]
+ def get_envversion(self, app: "Sphinx") -> Dict[str, str]:
from sphinx.environment import ENV_VERSION
envversion = {ext.name: ext.metadata['env_version'] for ext in app.extensions.values()
if ext.metadata.get('env_version')}
@@ -460,8 +439,7 @@ class SphinxComponentRegistry:
return envversion
-def merge_source_suffix(app, config):
- # type: (Sphinx, Config) -> None
+def merge_source_suffix(app: "Sphinx", config: Config) -> None:
"""Merge source_suffix which specified by user and added by extensions."""
for suffix, filetype in app.registry.source_suffix.items():
if suffix not in app.config.source_suffix:
@@ -475,8 +453,7 @@ def merge_source_suffix(app, config):
app.registry.source_suffix = app.config.source_suffix
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.connect('config-inited', merge_source_suffix)
return {
diff --git a/sphinx/roles.py b/sphinx/roles.py
index 381b9b6f0..5757183ce 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -10,25 +10,27 @@
import re
import warnings
+from typing import Any, Dict, List, Tuple
+from typing import Type # for python3.5.1
from docutils import nodes, utils
+from docutils.nodes import Element, Node, TextElement, system_message
+from docutils.parsers.rst.states import Inliner
from sphinx import addnodes
from sphinx.deprecation import RemovedInSphinx40Warning
from sphinx.locale import _
from sphinx.util import ws_re
from sphinx.util.docutils import ReferenceRole, SphinxRole
-from sphinx.util.nodes import split_explicit_title, process_index_entry, \
- set_role_source_info
+from sphinx.util.nodes import (
+ split_explicit_title, process_index_entry, set_role_source_info
+)
+from sphinx.util.typing import RoleFunction
if False:
# For type annotation
- from typing import Any, Dict, List, Tuple # NOQA
- from typing import Type # for python3.5.1
- from docutils.parsers.rst.states import Inliner # NOQA
- from sphinx.application import Sphinx # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
- from sphinx.util.typing import RoleFunction # NOQA
+ from sphinx.application import Sphinx
+ from sphinx.environment import BuildEnvironment
generic_docroles = {
@@ -71,12 +73,12 @@ class XRefRole(ReferenceRole):
* Subclassing and overwriting `process_link()` and/or `result_nodes()`.
"""
- nodeclass = addnodes.pending_xref # type: Type[nodes.Element]
- innernodeclass = nodes.literal # type: Type[nodes.TextElement]
+ nodeclass = addnodes.pending_xref # type: Type[Element]
+ innernodeclass = nodes.literal # type: Type[TextElement]
- def __init__(self, fix_parens=False, lowercase=False,
- nodeclass=None, innernodeclass=None, warn_dangling=False):
- # type: (bool, bool, Type[nodes.Element], Type[nodes.TextElement], bool) -> None
+ def __init__(self, fix_parens: bool = False, lowercase: bool = False,
+ nodeclass: Type[Element] = None, innernodeclass: Type[TextElement] = None,
+ warn_dangling: bool = False) -> None:
self.fix_parens = fix_parens
self.lowercase = lowercase
self.warn_dangling = warn_dangling
@@ -87,8 +89,8 @@ class XRefRole(ReferenceRole):
super().__init__()
- def _fix_parens(self, env, has_explicit_title, title, target):
- # type: (BuildEnvironment, bool, str, str) -> Tuple[str, str]
+ def _fix_parens(self, env: "BuildEnvironment", has_explicit_title: bool, title: str,
+ target: str) -> Tuple[str, str]:
warnings.warn('XRefRole._fix_parens() is deprecated.',
RemovedInSphinx40Warning, stacklevel=2)
if not has_explicit_title:
@@ -103,8 +105,7 @@ class XRefRole(ReferenceRole):
target = target[:-2]
return title, target
- def update_title_and_target(self, title, target):
- # type: (str, str) -> Tuple[str, str]
+ def update_title_and_target(self, title: str, target: str) -> Tuple[str, str]:
if not self.has_explicit_title:
if title.endswith('()'):
# remove parentheses
@@ -117,8 +118,7 @@ class XRefRole(ReferenceRole):
target = target[:-2]
return title, target
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
if ':' not in self.name:
self.refdomain, self.reftype = '', self.name
self.classes = ['xref', self.reftype]
@@ -132,8 +132,7 @@ class XRefRole(ReferenceRole):
else:
return self.create_xref_node()
- def create_non_xref_node(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def create_non_xref_node(self) -> Tuple[List[Node], List[system_message]]:
text = utils.unescape(self.text[1:])
if self.fix_parens:
self.has_explicit_title = False # treat as implicit
@@ -142,8 +141,7 @@ class XRefRole(ReferenceRole):
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
- def create_xref_node(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def create_xref_node(self) -> Tuple[List[Node], List[system_message]]:
target = self.target
title = self.title
if self.lowercase:
@@ -170,8 +168,8 @@ class XRefRole(ReferenceRole):
# methods that can be overwritten
- def process_link(self, env, refnode, has_explicit_title, title, target):
- # type: (BuildEnvironment, nodes.Element, bool, str, str) -> Tuple[str, str]
+ def process_link(self, env: "BuildEnvironment", refnode: Element, has_explicit_title: bool,
+ title: str, target: str) -> Tuple[str, str]:
"""Called after parsing title and target text, and creating the
reference node (given in *refnode*). This method can alter the
reference node and must return a new (or the same) ``(title, target)``
@@ -179,8 +177,8 @@ class XRefRole(ReferenceRole):
"""
return title, ws_re.sub(' ', target)
- def result_nodes(self, document, env, node, is_ref):
- # type: (nodes.document, BuildEnvironment, nodes.Element, bool) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+ def result_nodes(self, document: nodes.document, env: "BuildEnvironment", node: Element,
+ is_ref: bool) -> Tuple[List[Node], List[system_message]]:
"""Called before returning the finished nodes. *node* is the reference
node if one was created (*is_ref* is then true), else the content node.
This method can add other nodes and must return a ``(nodes, messages)``
@@ -190,16 +188,17 @@ class XRefRole(ReferenceRole):
class AnyXRefRole(XRefRole):
- def process_link(self, env, refnode, has_explicit_title, title, target):
- # type: (BuildEnvironment, nodes.Element, bool, str, str) -> Tuple[str, str]
+ def process_link(self, env: "BuildEnvironment", refnode: Element, has_explicit_title: bool,
+ title: str, target: str) -> Tuple[str, str]:
result = super().process_link(env, refnode, has_explicit_title, title, target)
# add all possible context info (i.e. std:program, py:module etc.)
refnode.attributes.update(env.ref_context)
return result
-def indexmarkup_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+def indexmarkup_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
+ options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
"""Role for PEP/RFC references that generate an index entry."""
warnings.warn('indexmarkup_role() is deprecated. Please use PEP or RFC class instead.',
RemovedInSphinx40Warning, stacklevel=2)
@@ -267,8 +266,7 @@ def indexmarkup_role(typ, rawtext, text, lineno, inliner, options={}, content=[]
class PEP(ReferenceRole):
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
target_id = 'index-%s' % self.env.new_serialno('index')
entries = [('single', _('Python Enhancement Proposals; PEP %s') % self.target,
target_id, '', None)]
@@ -293,8 +291,7 @@ class PEP(ReferenceRole):
return [index, target, reference], []
- def build_uri(self):
- # type: () -> str
+ def build_uri(self) -> str:
base_url = self.inliner.document.settings.pep_base_url
ret = self.target.split('#', 1)
if len(ret) == 2:
@@ -304,8 +301,7 @@ class PEP(ReferenceRole):
class RFC(ReferenceRole):
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+ def run(self) -> Tuple[List[Node], List[system_message]]:
target_id = 'index-%s' % self.env.new_serialno('index')
entries = [('single', 'RFC; RFC %s' % self.target, target_id, '', None)]
@@ -329,8 +325,7 @@ class RFC(ReferenceRole):
return [index, target, reference], []
- def build_uri(self):
- # type: () -> str
+ def build_uri(self) -> str:
base_url = self.inliner.document.settings.rfc_base_url
ret = self.target.split('#', 1)
if len(ret) == 2:
@@ -342,8 +337,9 @@ class RFC(ReferenceRole):
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
-def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+def menusel_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
+ options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
warnings.warn('menusel_role() is deprecated. '
'Please use MenuSelection or GUILabel class instead.',
RemovedInSphinx40Warning, stacklevel=2)
@@ -382,8 +378,7 @@ def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
class GUILabel(SphinxRole):
amp_re = re.compile(r'(?<!&)&(?![&\s])')
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
node = nodes.inline(rawtext=self.rawtext, classes=[self.name])
spans = self.amp_re.split(self.text)
node += nodes.Text(spans.pop(0))
@@ -399,8 +394,7 @@ class GUILabel(SphinxRole):
class MenuSelection(GUILabel):
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
self.text = self.text.replace('-->', '\N{TRIANGULAR BULLET}')
return super().run()
@@ -409,9 +403,9 @@ _litvar_re = re.compile('{([^}]+)}')
parens_re = re.compile(r'(\\*{|\\*})')
-def emph_literal_role(typ, rawtext, text, lineno, inliner,
- options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+def emph_literal_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
+ options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
warnings.warn('emph_literal_role() is deprecated. '
'Please use EmphasizedLiteral class instead.',
RemovedInSphinx40Warning, stacklevel=2)
@@ -465,17 +459,15 @@ def emph_literal_role(typ, rawtext, text, lineno, inliner,
class EmphasizedLiteral(SphinxRole):
parens_re = re.compile(r'(\\\\|\\{|\\}|{|})')
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
children = self.parse(self.text)
node = nodes.literal(self.rawtext, '', *children,
role=self.name.lower(), classes=[self.name])
return [node], []
- def parse(self, text):
- # type: (str) -> List[nodes.Node]
- result = [] # type: List[nodes.Node]
+ def parse(self, text: str) -> List[Node]:
+ result = [] # type: List[Node]
stack = ['']
for part in self.parens_re.split(text):
@@ -517,8 +509,9 @@ class EmphasizedLiteral(SphinxRole):
_abbr_re = re.compile(r'\((.*)\)$', re.S)
-def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+def abbr_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
+ options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
warnings.warn('abbr_role() is deprecated. Please use Abbrevation class instead.',
RemovedInSphinx40Warning, stacklevel=2)
text = utils.unescape(text)
@@ -535,8 +528,7 @@ def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
class Abbreviation(SphinxRole):
abbr_re = re.compile(r'\((.*)\)$', re.S)
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
matched = self.abbr_re.search(self.text)
if matched:
text = self.text[:matched.start()].strip()
@@ -547,8 +539,9 @@ class Abbreviation(SphinxRole):
return [nodes.abbreviation(self.rawtext, text, **self.options)], []
-def index_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
- # type: (str, str, str, int, Inliner, Dict, List[str]) -> Tuple[List[nodes.Node], List[nodes.system_message]] # NOQA
+def index_role(typ: str, rawtext: str, text: str, lineno: int, inliner: Inliner,
+ options: Dict = {}, content: List[str] = []
+ ) -> Tuple[List[Node], List[system_message]]:
warnings.warn('index_role() is deprecated. Please use Index class instead.',
RemovedInSphinx40Warning, stacklevel=2)
# create new reference target
@@ -579,8 +572,7 @@ def index_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
class Index(ReferenceRole):
- def run(self):
- # type: () -> Tuple[List[nodes.Node], List[nodes.system_message]]
+ def run(self) -> Tuple[List[Node], List[system_message]]:
target_id = 'index-%s' % self.env.new_serialno('index')
if self.has_explicit_title:
# if an explicit target is given, process it as a full entry
@@ -619,8 +611,7 @@ specific_docroles = {
} # type: Dict[str, RoleFunction]
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
from docutils.parsers.rst import roles
for rolename, nodeclass in generic_docroles.items():
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index 22ba14d40..54a1aad92 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -13,21 +13,21 @@ import re
import warnings
from importlib import import_module
from os import path
+from typing import Any, Dict, IO, Iterable, List, Tuple, Set
from docutils import nodes
+from docutils.nodes import Node
from sphinx import addnodes
from sphinx import package_dir
from sphinx.deprecation import RemovedInSphinx40Warning
+from sphinx.environment import BuildEnvironment
from sphinx.search.jssplitter import splitter_code
from sphinx.util import jsdump, rpartition
if False:
# For type annotation
- from typing import Any, Dict, IO, Iterable, List, Tuple, Set # NOQA
from typing import Type # for python3.5.1
- from docutils import nodes # NOQA
- from sphinx.environment import BuildEnvironment # NOQA
class SearchLanguage:
@@ -69,19 +69,16 @@ var Stemmer = function() {
_word_re = re.compile(r'(?u)\w+')
- def __init__(self, options):
- # type: (Dict) -> None
+ def __init__(self, options: Dict) -> None:
self.options = options
self.init(options)
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
"""
Initialize the class with the options the user has given.
"""
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
"""
This method splits a sentence into words. Default splitter splits input
at white spaces, which should be enough for most languages except CJK
@@ -89,8 +86,7 @@ var Stemmer = function() {
"""
return self._word_re.findall(input)
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
"""
This method implements stemming algorithm of the Python version.
@@ -103,8 +99,7 @@ var Stemmer = function() {
"""
return word
- def word_filter(self, word):
- # type: (str) -> bool
+ def word_filter(self, word: str) -> bool:
"""
Return true if the target word should be registered in the search index.
This method is called after stemming.
@@ -121,8 +116,7 @@ var Stemmer = function() {
from sphinx.search.en import SearchEnglish
-def parse_stop_word(source):
- # type: (str) -> Set[str]
+def parse_stop_word(source: str) -> Set[str]:
"""
parse snowball style word list like this:
@@ -166,24 +160,20 @@ class _JavaScriptIndex:
PREFIX = 'Search.setIndex('
SUFFIX = ')'
- def dumps(self, data):
- # type: (Any) -> str
+ def dumps(self, data: Any) -> str:
return self.PREFIX + jsdump.dumps(data) + self.SUFFIX
- def loads(self, s):
- # type: (str) -> Any
+ def loads(self, s: str) -> Any:
data = s[len(self.PREFIX):-len(self.SUFFIX)]
if not data or not s.startswith(self.PREFIX) or not \
s.endswith(self.SUFFIX):
raise ValueError('invalid data')
return jsdump.loads(data)
- def dump(self, data, f):
- # type: (Any, IO) -> None
+ def dump(self, data: Any, f: IO) -> None:
f.write(self.dumps(data))
- def load(self, f):
- # type: (IO) -> Any
+ def load(self, f: IO) -> Any:
return self.loads(f.read())
@@ -195,15 +185,13 @@ class WordCollector(nodes.NodeVisitor):
A special visitor that collects words for the `IndexBuilder`.
"""
- def __init__(self, document, lang):
- # type: (nodes.document, SearchLanguage) -> None
+ def __init__(self, document: nodes.document, lang: SearchLanguage) -> None:
super().__init__(document)
self.found_words = [] # type: List[str]
self.found_title_words = [] # type: List[str]
self.lang = lang
- def is_meta_keywords(self, node, nodetype=None):
- # type: (addnodes.meta, Any) -> bool
+ def is_meta_keywords(self, node: addnodes.meta, nodetype: Any = None) -> bool:
if nodetype is not None:
warnings.warn('"nodetype" argument for WordCollector.is_meta_keywords() '
'is deprecated.', RemovedInSphinx40Warning)
@@ -217,8 +205,7 @@ class WordCollector(nodes.NodeVisitor):
return False
- def dispatch_visit(self, node):
- # type: (nodes.Node) -> None
+ def dispatch_visit(self, node: Node) -> None:
if isinstance(node, nodes.comment):
raise nodes.SkipNode
elif isinstance(node, nodes.raw):
@@ -251,8 +238,7 @@ class IndexBuilder:
'pickle': pickle
}
- def __init__(self, env, lang, options, scoring):
- # type: (BuildEnvironment, str, Dict, str) -> None
+ def __init__(self, env: BuildEnvironment, lang: str, options: Dict, scoring: str) -> None:
self.env = env
self._titles = {} # type: Dict[str, str]
# docname -> title
@@ -292,8 +278,7 @@ class IndexBuilder:
self.js_scorer_code = ''
self.js_splitter_code = splitter_code
- def load(self, stream, format):
- # type: (IO, Any) -> None
+ def load(self, stream: IO, format: Any) -> None:
"""Reconstruct from frozen data."""
if isinstance(format, str):
format = self.formats[format]
@@ -306,8 +291,7 @@ class IndexBuilder:
self._filenames = dict(zip(index2fn, frozen['filenames']))
self._titles = dict(zip(index2fn, frozen['titles']))
- def load_terms(mapping):
- # type: (Dict[str, Any]) -> Dict[str, Set[str]]
+ def load_terms(mapping: Dict[str, Any]) -> Dict[str, Set[str]]:
rv = {}
for k, v in mapping.items():
if isinstance(v, int):
@@ -320,15 +304,14 @@ class IndexBuilder:
self._title_mapping = load_terms(frozen['titleterms'])
# no need to load keywords/objtypes
- def dump(self, stream, format):
- # type: (IO, Any) -> None
+ def dump(self, stream: IO, format: Any) -> None:
"""Dump the frozen index to a stream."""
if isinstance(format, str):
format = self.formats[format]
format.dump(self.freeze(), stream)
- def get_objects(self, fn2index):
- # type: (Dict[str, int]) -> Dict[str, Dict[str, Tuple[int, int, int, str]]]
+ def get_objects(self, fn2index: Dict[str, int]
+ ) -> Dict[str, Dict[str, Tuple[int, int, int, str]]]:
rv = {} # type: Dict[str, Dict[str, Tuple[int, int, int, str]]]
otypes = self._objtypes
onames = self._objnames
@@ -364,8 +347,7 @@ class IndexBuilder:
pdict[name] = (fn2index[docname], typeindex, prio, shortanchor)
return rv
- def get_terms(self, fn2index):
- # type: (Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]
+ def get_terms(self, fn2index: Dict) -> Tuple[Dict[str, List[str]], Dict[str, List[str]]]:
rvs = {}, {} # type: Tuple[Dict[str, List[str]], Dict[str, List[str]]]
for rv, mapping in zip(rvs, (self._mapping, self._title_mapping)):
for k, v in mapping.items():
@@ -377,8 +359,7 @@ class IndexBuilder:
rv[k] = sorted([fn2index[fn] for fn in v if fn in fn2index])
return rvs
- def freeze(self):
- # type: () -> Dict[str, Any]
+ def freeze(self) -> Dict[str, Any]:
"""Create a usable data structure for serializing."""
docnames, titles = zip(*sorted(self._titles.items()))
filenames = [self._filenames.get(docname) for docname in docnames]
@@ -392,12 +373,10 @@ class IndexBuilder:
objects=objects, objtypes=objtypes, objnames=objnames,
titleterms=title_terms, envversion=self.env.version)
- def label(self):
- # type: () -> str
+ def label(self) -> str:
return "%s (code: %s)" % (self.lang.language_name, self.lang.lang)
- def prune(self, docnames):
- # type: (Iterable[str]) -> None
+ def prune(self, docnames: Iterable[str]) -> None:
"""Remove data for all docnames not in the list."""
new_titles = {}
new_filenames = {}
@@ -412,8 +391,7 @@ class IndexBuilder:
for wordnames in self._title_mapping.values():
wordnames.intersection_update(docnames)
- def feed(self, docname, filename, title, doctree):
- # type: (str, str, str, nodes.document) -> None
+ def feed(self, docname: str, filename: str, title: str, doctree: nodes.document) -> None:
"""Feed a doctree to the index."""
self._titles[docname] = title
self._filenames[docname] = filename
@@ -422,8 +400,7 @@ class IndexBuilder:
doctree.walk(visitor)
# memoize self.lang.stem
- def stem(word):
- # type: (str) -> str
+ def stem(word: str) -> str:
try:
return self._stem_cache[word]
except KeyError:
@@ -447,8 +424,7 @@ class IndexBuilder:
if _filter(stemmed_word) and not already_indexed:
self._mapping.setdefault(stemmed_word, set()).add(docname)
- def context_for_searchtool(self):
- # type: () -> Dict[str, Any]
+ def context_for_searchtool(self) -> Dict[str, Any]:
return {
'search_language_stemming_code': self.lang.js_stemmer_code,
'search_language_stop_words': jsdump.dumps(sorted(self.lang.stopwords)),
@@ -456,8 +432,7 @@ class IndexBuilder:
'search_word_splitter_code': self.js_splitter_code,
}
- def get_js_stemmer_rawcode(self):
- # type: () -> str
+ def get_js_stemmer_rawcode(self) -> str:
if self.lang.js_stemmer_rawcode:
return path.join(package_dir, 'search', 'non-minified-js',
self.lang.js_stemmer_rawcode)
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index 228fdf086..b04679e10 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
danish_stopwords = parse_stop_word('''
@@ -128,10 +126,8 @@ class SearchDanish(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = danish_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('danish')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index 5413e0732..ae1827bf9 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
german_stopwords = parse_stop_word('''
@@ -311,10 +309,8 @@ class SearchGerman(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = german_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('german')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/en.py b/sphinx/search/en.py
index fa8e1f66b..1fabef78d 100644
--- a/sphinx/search/en.py
+++ b/sphinx/search/en.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
+from typing import Dict
+
from sphinx.search import SearchLanguage
from sphinx.util.stemmer import get_stemmer
-if False:
- # For type annotation
- from typing import Dict # NOQA
-
english_stopwords = set("""
a and are as at
be but by
@@ -220,10 +218,8 @@ class SearchEnglish(SearchLanguage):
js_stemmer_code = js_porter_stemmer
stopwords = english_stopwords
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = get_stemmer()
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stem(word.lower())
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index c6f0dae9c..1009961c8 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
spanish_stopwords = parse_stop_word('''
@@ -371,10 +369,8 @@ class SearchSpanish(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = spanish_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('spanish')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index b8ff1d1f8..67bee89fe 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
finnish_stopwords = parse_stop_word('''
@@ -121,10 +119,8 @@ class SearchFinnish(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = finnish_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('finnish')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index 0848843f3..b15271888 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
french_stopwords = parse_stop_word('''
@@ -207,10 +205,8 @@ class SearchFrench(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = french_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('french')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index 973475cb3..085773383 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
hungarian_stopwords = parse_stop_word('''
@@ -235,10 +233,8 @@ class SearchHungarian(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = hungarian_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('hungarian')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index 41039818b..e76cd99dd 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
italian_stopwords = parse_stop_word('''
@@ -324,10 +322,8 @@ class SearchItalian(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = italian_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('italian')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/ja.py b/sphinx/search/ja.py
index e1f18209a..1b0a0e865 100644
--- a/sphinx/search/ja.py
+++ b/sphinx/search/ja.py
@@ -19,6 +19,7 @@
import os
import re
import sys
+from typing import Any, Dict, List
try:
import MeCab
@@ -36,21 +37,13 @@ from sphinx.errors import SphinxError, ExtensionError
from sphinx.search import SearchLanguage
from sphinx.util import import_object
-if False:
- # For type annotation
- from typing import Any, Dict, List # NOQA
-
class BaseSplitter:
-
- def __init__(self, options):
- # type: (Dict) -> None
+ def __init__(self, options: Dict) -> None:
self.options = options
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
"""
-
:param str input:
:return:
:rtype: list[str]
@@ -59,8 +52,7 @@ class BaseSplitter:
class MecabSplitter(BaseSplitter):
- def __init__(self, options):
- # type: (Dict) -> None
+ def __init__(self, options: Dict) -> None:
super().__init__(options)
self.ctypes_libmecab = None # type: Any
self.ctypes_mecab = None # type: Any
@@ -70,8 +62,7 @@ class MecabSplitter(BaseSplitter):
self.init_native(options)
self.dict_encode = options.get('dic_enc', 'utf-8')
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
if native_module:
result = self.native.parse(input)
else:
@@ -79,16 +70,14 @@ class MecabSplitter(BaseSplitter):
self.ctypes_mecab, input.encode(self.dict_encode))
return result.split(' ')
- def init_native(self, options):
- # type: (Dict) -> None
+ def init_native(self, options: Dict) -> None:
param = '-Owakati'
dict = options.get('dict')
if dict:
param += ' -d %s' % dict
self.native = MeCab.Tagger(param)
- def init_ctypes(self, options):
- # type: (Dict) -> None
+ def init_ctypes(self, options: Dict) -> None:
import ctypes.util
lib = options.get('lib')
@@ -124,8 +113,7 @@ class MecabSplitter(BaseSplitter):
if self.ctypes_mecab is None:
raise SphinxError('mecab initialization failed')
- def __del__(self):
- # type: () -> None
+ def __del__(self) -> None:
if self.ctypes_libmecab:
self.ctypes_libmecab.mecab_destroy(self.ctypes_mecab)
@@ -133,21 +121,18 @@ MeCabBinder = MecabSplitter # keep backward compatibility until Sphinx-1.6
class JanomeSplitter(BaseSplitter):
- def __init__(self, options):
- # type: (Dict) -> None
+ def __init__(self, options: Dict) -> None:
super().__init__(options)
self.user_dict = options.get('user_dic')
self.user_dict_enc = options.get('user_dic_enc', 'utf8')
self.init_tokenizer()
- def init_tokenizer(self):
- # type: () -> None
+ def init_tokenizer(self) -> None:
if not janome_module:
raise RuntimeError('Janome is not available')
self.tokenizer = janome.tokenizer.Tokenizer(udic=self.user_dict, udic_enc=self.user_dict_enc)
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
result = ' '.join(token.surface for token in self.tokenizer.tokenize(input))
return result.split(' ')
@@ -423,23 +408,20 @@ class DefaultSplitter(BaseSplitter):
'郎': 1082, '1': -270, 'E1': 306, 'ル': -673, 'ン': -496}
# ctype_
- def ctype_(self, char):
- # type: (str) -> str
+ def ctype_(self, char: str) -> str:
for pattern, value in self.patterns_.items():
if pattern.match(char):
return value
return 'O'
# ts_
- def ts_(self, dict, key):
- # type: (Dict[str, int], str) -> int
+ def ts_(self, dict: Dict[str, int], key: str) -> int:
if key in dict:
return dict[key]
return 0
# segment
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
if not input:
return []
@@ -542,8 +524,7 @@ class SearchJapanese(SearchLanguage):
lang = 'ja'
language_name = 'Japanese'
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
dotted_path = options.get('type', 'sphinx.search.ja.DefaultSplitter')
try:
self.splitter = import_object(dotted_path)(options)
@@ -551,14 +532,11 @@ class SearchJapanese(SearchLanguage):
raise ExtensionError("Splitter module %r can't be imported" %
dotted_path)
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
return self.splitter.split(input)
- def word_filter(self, stemmed_word):
- # type: (str) -> bool
+ def word_filter(self, stemmed_word: str) -> bool:
return len(stemmed_word) > 1
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return word
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index 076c190b2..0e2e2ef23 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
dutch_stopwords = parse_stop_word('''
@@ -135,10 +133,8 @@ class SearchDutch(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = dutch_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('dutch')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index 106c6b670..68c1ac207 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
norwegian_stopwords = parse_stop_word('''
@@ -210,10 +208,8 @@ class SearchNorwegian(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = norwegian_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('norwegian')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index 143759387..2538511f7 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
portuguese_stopwords = parse_stop_word('''
@@ -270,10 +268,8 @@ class SearchPortuguese(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = portuguese_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('portuguese')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py
index e385d6f01..cfae772c9 100644
--- a/sphinx/search/ro.py
+++ b/sphinx/search/ro.py
@@ -8,13 +8,12 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage
+from typing import Dict, Set
import snowballstemmer
-if False:
- # For type annotation
- from typing import Dict, Set # NOQA
+from sphinx.search import SearchLanguage
+
js_stemmer = """
var JSX={};(function(j){function l(b,e){var a=function(){};a.prototype=e.prototype;var c=new a;for(var d in b){b[d].prototype=c}}function L(c,b){for(var a in b.prototype)if(b.prototype.hasOwnProperty(a))c.prototype[a]=b.prototype[a]}function h(a,b,d){function c(a,b,c){delete a[b];a[b]=c;return c}Object.defineProperty(a,b,{get:function(){return c(a,b,d())},set:function(d){c(a,b,d)},enumerable:true,configurable:true})}function M(a,b,c){return a[b]=a[b]/c|0}var E=parseInt;var C=parseFloat;function N(a){return a!==a}var A=isFinite;var z=encodeURIComponent;var y=decodeURIComponent;var x=encodeURI;var w=decodeURI;var u=Object.prototype.toString;var D=Object.prototype.hasOwnProperty;function k(){}j.require=function(b){var a=r[b];return a!==undefined?a:null};j.profilerIsRunning=function(){return k.getResults!=null};j.getProfileResults=function(){return(k.getResults||function(){return{}})()};j.postProfileResults=function(a,b){if(k.postResults==null)throw new Error('profiler has not been turned on');return k.postResults(a,b)};j.resetProfileResults=function(){if(k.resetResults==null)throw new Error('profiler has not been turned on');return k.resetResults()};j.DEBUG=false;function t(){};l([t],Error);function a(a,b,c){this.F=a.length;this.K=a;this.L=b;this.I=c;this.H=null;this.P=null};l([a],Object);function n(){};l([n],Object);function g(){var a;var b;var c;this.G={};a=this.E='';b=this._=0;c=this.A=a.length;this.D=0;this.B=b;this.C=c};l([g],n);function v(a,b){a.E=b.E;a._=b._;a.A=b.A;a.D=b.D;a.B=b.B;a.C=b.C};function d(b,d,c,e){var a;if(b._>=b.A){return false}a=b.E.charCodeAt(b._);if(a>e||a<c){return false}a-=c;if((d[a>>>3]&1<<(a&7))===0){return false}b._++;return true};function e(a,d,c,e){var b;if(a._>=a.A){return false}b=a.E.charCodeAt(a._);if(b>e||b<c){a._++;return true}b-=c;if((d[b>>>3]&1<<(b&7))===0){a._++;return true}return false};function p(a,d,c,e){var b;if(a._<=a.D){return false}b=a.E.charCodeAt(a._-1);if(b>e||b<c){a._--;return true}b-=c;if((d[b>>>3]&1<<(b&7))===0){a._--;return true}return false};function m(a,b,d){var c;if(a.A-a._<b){return false}if(a.E.slice(c=a._,c+b)!==d){return false}a._+=b;return true};function i(a,b,d){var c;if(a._-a.D<b){return false}if(a.E.slice((c=a._)-b,c)!==d){return false}a._-=b;return true};function q(f,m,p){var b;var d;var e;var n;var g;var k;var l;var i;var h;var c;var a;var j;var o;b=0;d=p;e=f._;n=f.A;g=0;k=0;l=false;while(true){i=b+(d-b>>>1);h=0;c=g<k?g:k;a=m[i];for(j=c;j<a.F;j++){if(e+c===n){h=-1;break}h=f.E.charCodeAt(e+c)-a.K.charCodeAt(j);if(h!==0){break}c++}if(h<0){d=i;k=c}else{b=i;g=c}if(d-b<=1){if(b>0){break}if(d===b){break}if(l){break}l=true}}while(true){a=m[b];if(g>=a.F){f._=e+a.F|0;if(a.H==null){return a.I}o=a.H(a.P);f._=e+a.F|0;if(o){return a.I}}b=a.L;if(b<0){return 0}}return-1};function f(d,m,p){var b;var g;var e;var n;var f;var k;var l;var i;var h;var c;var a;var j;var o;b=0;g=p;e=d._;n=d.D;f=0;k=0;l=false;while(true){i=b+(g-b>>1);h=0;c=f<k?f:k;a=m[i];for(j=a.F-1-c;j>=0;j--){if(e-c===n){h=-1;break}h=d.E.charCodeAt(e-1-c)-a.K.charCodeAt(j);if(h!==0){break}c++}if(h<0){g=i;k=c}else{b=i;f=c}if(g-b<=1){if(b>0){break}if(g===b){break}if(l){break}l=true}}while(true){a=m[b];if(f>=a.F){d._=e-a.F|0;if(a.H==null){return a.I}o=a.H(d);d._=e-a.F|0;if(o){return a.I}}b=a.L;if(b<0){return 0}}return-1};function s(a,b,d,e){var c;c=e.length-(d-b);a.E=a.E.slice(0,b)+e+a.E.slice(d);a.A+=c|0;if(a._>=d){a._+=c|0}else if(a._>b){a._=b}return c|0};function c(a,f){var b;var c;var d;var e;b=false;if((c=a.B)<0||c>(d=a.C)||d>(e=a.A)||e>a.E.length?false:true){s(a,a.B,a.C,f);b=true}return b};g.prototype.J=function(){return false};g.prototype.b=function(b){var a;var c;var d;var e;a=this.G['.'+b];if(a==null){c=this.E=b;d=this._=0;e=this.A=c.length;this.D=0;this.B=d;this.C=e;this.J();a=this.E;this.G['.'+b]=a}return a};g.prototype.stemWord=g.prototype.b;g.prototype.c=function(e){var d;var b;var c;var a;var f;var g;var h;d=[];for(b=0;b<e.length;b++){c=e[b];a=this.G['.'+c];if(a==null){f=this.E=c;g=this._=0;h=this.A=f.length;this.D=0;this.B=g;this.C=h;this.J();a=this.E;this.G['.'+c]=a}d.push(a)}return d};g.prototype.stemWords=g.prototype.c;function b(){g.call(this);this.B_standard_suffix_removed=false;this.I_p2=0;this.I_p1=0;this.I_pV=0};l([b],g);b.prototype.M=function(a){this.B_standard_suffix_removed=a.B_standard_suffix_removed;this.I_p2=a.I_p2;this.I_p1=a.I_p1;this.I_pV=a.I_pV;v(this,a)};b.prototype.copy_from=b.prototype.M;b.prototype.W=function(){var i;var a;var j;var e;var f;var g;var h;var k;b:while(true){i=this._;e=true;d:while(e===true){e=false;e:while(true){a=this._;f=true;a:while(f===true){f=false;if(!d(this,b.g_v,97,259)){break a}this.B=this._;g=true;f:while(g===true){g=false;j=this._;h=true;c:while(h===true){h=false;if(!m(this,1,'u')){break c}this.C=this._;if(!d(this,b.g_v,97,259)){break c}if(!c(this,'U')){return false}break f}this._=j;if(!m(this,1,'i')){break a}this.C=this._;if(!d(this,b.g_v,97,259)){break a}if(!c(this,'I')){return false}}this._=a;break e}k=this._=a;if(k>=this.A){break d}this._++}continue b}this._=i;break b}return true};b.prototype.r_prelude=b.prototype.W;function G(a){var j;var e;var k;var f;var g;var h;var i;var l;b:while(true){j=a._;f=true;d:while(f===true){f=false;e:while(true){e=a._;g=true;a:while(g===true){g=false;if(!d(a,b.g_v,97,259)){break a}a.B=a._;h=true;f:while(h===true){h=false;k=a._;i=true;c:while(i===true){i=false;if(!m(a,1,'u')){break c}a.C=a._;if(!d(a,b.g_v,97,259)){break c}if(!c(a,'U')){return false}break f}a._=k;if(!m(a,1,'i')){break a}a.C=a._;if(!d(a,b.g_v,97,259)){break a}if(!c(a,'I')){return false}}a._=e;break e}l=a._=e;if(l>=a.A){break d}a._++}continue b}a._=j;break b}return true};b.prototype.U=function(){var u;var w;var x;var y;var t;var l;var f;var g;var h;var i;var c;var j;var k;var a;var m;var n;var o;var p;var q;var r;var s;var v;this.I_pV=s=this.A;this.I_p1=s;this.I_p2=s;u=this._;l=true;a:while(l===true){l=false;f=true;g:while(f===true){f=false;w=this._;g=true;b:while(g===true){g=false;if(!d(this,b.g_v,97,259)){break b}h=true;f:while(h===true){h=false;x=this._;i=true;c:while(i===true){i=false;if(!e(this,b.g_v,97,259)){break c}d:while(true){c=true;e:while(c===true){c=false;if(!d(this,b.g_v,97,259)){break e}break d}if(this._>=this.A){break c}this._++}break f}this._=x;if(!d(this,b.g_v,97,259)){break b}c:while(true){j=true;d:while(j===true){j=false;if(!e(this,b.g_v,97,259)){break d}break c}if(this._>=this.A){break b}this._++}}break g}this._=w;if(!e(this,b.g_v,97,259)){break a}k=true;c:while(k===true){k=false;y=this._;a=true;b:while(a===true){a=false;if(!e(this,b.g_v,97,259)){break b}e:while(true){m=true;d:while(m===true){m=false;if(!d(this,b.g_v,97,259)){break d}break e}if(this._>=this.A){break b}this._++}break c}this._=y;if(!d(this,b.g_v,97,259)){break a}if(this._>=this.A){break a}this._++}}this.I_pV=this._}v=this._=u;t=v;n=true;a:while(n===true){n=false;b:while(true){o=true;c:while(o===true){o=false;if(!d(this,b.g_v,97,259)){break c}break b}if(this._>=this.A){break a}this._++}b:while(true){p=true;c:while(p===true){p=false;if(!e(this,b.g_v,97,259)){break c}break b}if(this._>=this.A){break a}this._++}this.I_p1=this._;b:while(true){q=true;c:while(q===true){q=false;if(!d(this,b.g_v,97,259)){break c}break b}if(this._>=this.A){break a}this._++}c:while(true){r=true;b:while(r===true){r=false;if(!e(this,b.g_v,97,259)){break b}break c}if(this._>=this.A){break a}this._++}this.I_p2=this._}this._=t;return true};b.prototype.r_mark_regions=b.prototype.U;function H(a){var x;var y;var z;var u;var v;var l;var f;var g;var h;var i;var j;var k;var c;var m;var n;var o;var p;var q;var r;var s;var t;var w;a.I_pV=t=a.A;a.I_p1=t;a.I_p2=t;x=a._;l=true;a:while(l===true){l=false;f=true;g:while(f===true){f=false;y=a._;g=true;b:while(g===true){g=false;if(!d(a,b.g_v,97,259)){break b}h=true;f:while(h===true){h=false;z=a._;i=true;c:while(i===true){i=false;if(!e(a,b.g_v,97,259)){break c}d:while(true){j=true;e:while(j===true){j=false;if(!d(a,b.g_v,97,259)){break e}break d}if(a._>=a.A){break c}a._++}break f}a._=z;if(!d(a,b.g_v,97,259)){break b}c:while(true){k=true;d:while(k===true){k=false;if(!e(a,b.g_v,97,259)){break d}break c}if(a._>=a.A){break b}a._++}}break g}a._=y;if(!e(a,b.g_v,97,259)){break a}c=true;c:while(c===true){c=false;u=a._;m=true;b:while(m===true){m=false;if(!e(a,b.g_v,97,259)){break b}e:while(true){n=true;d:while(n===true){n=false;if(!d(a,b.g_v,97,259)){break d}break e}if(a._>=a.A){break b}a._++}break c}a._=u;if(!d(a,b.g_v,97,259)){break a}if(a._>=a.A){break a}a._++}}a.I_pV=a._}w=a._=x;v=w;o=true;a:while(o===true){o=false;b:while(true){p=true;c:while(p===true){p=false;if(!d(a,b.g_v,97,259)){break c}break b}if(a._>=a.A){break a}a._++}b:while(true){q=true;c:while(q===true){q=false;if(!e(a,b.g_v,97,259)){break c}break b}if(a._>=a.A){break a}a._++}a.I_p1=a._;b:while(true){r=true;c:while(r===true){r=false;if(!d(a,b.g_v,97,259)){break c}break b}if(a._>=a.A){break a}a._++}c:while(true){s=true;b:while(s===true){s=false;if(!e(a,b.g_v,97,259)){break b}break c}if(a._>=a.A){break a}a._++}a.I_p2=a._}a._=v;return true};b.prototype.V=function(){var a;var e;var d;b:while(true){e=this._;d=true;a:while(d===true){d=false;this.B=this._;a=q(this,b.a_0,3);if(a===0){break a}this.C=this._;switch(a){case 0:break a;case 1:if(!c(this,'i')){return false}break;case 2:if(!c(this,'u')){return false}break;case 3:if(this._>=this.A){break a}this._++;break}continue b}this._=e;break b}return true};b.prototype.r_postlude=b.prototype.V;function I(a){var d;var f;var e;b:while(true){f=a._;e=true;a:while(e===true){e=false;a.B=a._;d=q(a,b.a_0,3);if(d===0){break a}a.C=a._;switch(d){case 0:break a;case 1:if(!c(a,'i')){return false}break;case 2:if(!c(a,'u')){return false}break;case 3:if(a._>=a.A){break a}a._++;break}continue b}a._=f;break b}return true};b.prototype.S=function(){return!(this.I_pV<=this._)?false:true};b.prototype.r_RV=b.prototype.S;b.prototype.Q=function(){return!(this.I_p1<=this._)?false:true};b.prototype.r_R1=b.prototype.Q;b.prototype.R=function(){return!(this.I_p2<=this._)?false:true};b.prototype.r_R2=b.prototype.R;b.prototype.Y=function(){var a;var e;var d;var g;this.C=this._;a=f(this,b.a_1,16);if(a===0){return false}this.B=g=this._;if(!(!(this.I_p1<=g)?false:true)){return false}switch(a){case 0:return false;case 1:if(!c(this,'')){return false}break;case 2:if(!c(this,'a')){return false}break;case 3:if(!c(this,'e')){return false}break;case 4:if(!c(this,'i')){return false}break;case 5:e=this.A-this._;d=true;a:while(d===true){d=false;if(!i(this,2,'ab')){break a}return false}this._=this.A-e;if(!c(this,'i')){return false}break;case 6:if(!c(this,'at')){return false}break;case 7:if(!c(this,'aţi')){return false}break}return true};b.prototype.r_step_0=b.prototype.Y;function J(a){var d;var g;var e;var h;a.C=a._;d=f(a,b.a_1,16);if(d===0){return false}a.B=h=a._;if(!(!(a.I_p1<=h)?false:true)){return false}switch(d){case 0:return false;case 1:if(!c(a,'')){return false}break;case 2:if(!c(a,'a')){return false}break;case 3:if(!c(a,'e')){return false}break;case 4:if(!c(a,'i')){return false}break;case 5:g=a.A-a._;e=true;a:while(e===true){e=false;if(!i(a,2,'ab')){break a}return false}a._=a.A-g;if(!c(a,'i')){return false}break;case 6:if(!c(a,'at')){return false}break;case 7:if(!c(a,'aţi')){return false}break}return true};b.prototype.T=function(){var a;var d;var e;var g;d=this.A-(e=this._);this.C=e;a=f(this,b.a_2,46);if(a===0){return false}this.B=g=this._;if(!(!(this.I_p1<=g)?false:true)){return false}switch(a){case 0:return false;case 1:if(!c(this,'abil')){return false}break;case 2:if(!c(this,'ibil')){return false}break;case 3:if(!c(this,'iv')){return false}break;case 4:if(!c(this,'ic')){return false}break;case 5:if(!c(this,'at')){return false}break;case 6:if(!c(this,'it')){return false}break}this.B_standard_suffix_removed=true;this._=this.A-d;return true};b.prototype.r_combo_suffix=b.prototype.T;function o(a){var d;var e;var g;var h;e=a.A-(g=a._);a.C=g;d=f(a,b.a_2,46);if(d===0){return false}a.B=h=a._;if(!(!(a.I_p1<=h)?false:true)){return false}switch(d){case 0:return false;case 1:if(!c(a,'abil')){return false}break;case 2:if(!c(a,'ibil')){return false}break;case 3:if(!c(a,'iv')){return false}break;case 4:if(!c(a,'ic')){return false}break;case 5:if(!c(a,'at')){return false}break;case 6:if(!c(a,'it')){return false}break}a.B_standard_suffix_removed=true;a._=a.A-e;return true};b.prototype.X=function(){var a;var e;var d;var g;this.B_standard_suffix_removed=false;a:while(true){e=this.A-this._;d=true;b:while(d===true){d=false;if(!o(this)){break b}continue a}this._=this.A-e;break a}this.C=this._;a=f(this,b.a_3,62);if(a===0){return false}this.B=g=this._;if(!(!(this.I_p2<=g)?false:true)){return false}switch(a){case 0:return false;case 1:if(!c(this,'')){return false}break;case 2:if(!i(this,1,'ţ')){return false}this.B=this._;if(!c(this,'t')){return false}break;case 3:if(!c(this,'ist')){return false}break}this.B_standard_suffix_removed=true;return true};b.prototype.r_standard_suffix=b.prototype.X;function K(a){var d;var g;var e;var h;a.B_standard_suffix_removed=false;a:while(true){g=a.A-a._;e=true;b:while(e===true){e=false;if(!o(a)){break b}continue a}a._=a.A-g;break a}a.C=a._;d=f(a,b.a_3,62);if(d===0){return false}a.B=h=a._;if(!(!(a.I_p2<=h)?false:true)){return false}switch(d){case 0:return false;case 1:if(!c(a,'')){return false}break;case 2:if(!i(a,1,'ţ')){return false}a.B=a._;if(!c(a,'t')){return false}break;case 3:if(!c(a,'ist')){return false}break}a.B_standard_suffix_removed=true;return true};b.prototype.Z=function(){var d;var h;var a;var j;var e;var g;var k;var l;var m;h=this.A-(k=this._);if(k<this.I_pV){return false}l=this._=this.I_pV;a=this.D;this.D=l;m=this._=this.A-h;this.C=m;d=f(this,b.a_4,94);if(d===0){this.D=a;return false}this.B=this._;switch(d){case 0:this.D=a;return false;case 1:e=true;a:while(e===true){e=false;j=this.A-this._;g=true;b:while(g===true){g=false;if(!p(this,b.g_v,97,259)){break b}break a}this._=this.A-j;if(!i(this,1,'u')){this.D=a;return false}}if(!c(this,'')){return false}break;case 2:if(!c(this,'')){return false}break}this.D=a;return true};b.prototype.r_verb_suffix=b.prototype.Z;function F(a){var e;var l;var d;var j;var g;var h;var m;var n;var k;l=a.A-(m=a._);if(m<a.I_pV){return false}n=a._=a.I_pV;d=a.D;a.D=n;k=a._=a.A-l;a.C=k;e=f(a,b.a_4,94);if(e===0){a.D=d;return false}a.B=a._;switch(e){case 0:a.D=d;return false;case 1:g=true;a:while(g===true){g=false;j=a.A-a._;h=true;b:while(h===true){h=false;if(!p(a,b.g_v,97,259)){break b}break a}a._=a.A-j;if(!i(a,1,'u')){a.D=d;return false}}if(!c(a,'')){return false}break;case 2:if(!c(a,'')){return false}break}a.D=d;return true};b.prototype.a=function(){var a;var d;this.C=this._;a=f(this,b.a_5,5);if(a===0){return false}this.B=d=this._;if(!(!(this.I_pV<=d)?false:true)){return false}switch(a){case 0:return false;case 1:if(!c(this,'')){return false}break}return true};b.prototype.r_vowel_suffix=b.prototype.a;function B(a){var d;var e;a.C=a._;d=f(a,b.a_5,5);if(d===0){return false}a.B=e=a._;if(!(!(a.I_pV<=e)?false:true)){return false}switch(d){case 0:return false;case 1:if(!c(a,'')){return false}break}return true};b.prototype.J=function(){var n;var j;var k;var l;var m;var o;var p;var b;var c;var d;var e;var f;var a;var g;var h;var i;var r;var s;var t;var u;var v;var w;var x;var y;var q;n=this._;b=true;a:while(b===true){b=false;if(!G(this)){break a}}r=this._=n;j=r;c=true;a:while(c===true){c=false;if(!H(this)){break a}}s=this._=j;this.D=s;u=this._=t=this.A;k=t-u;d=true;a:while(d===true){d=false;if(!J(this)){break a}}w=this._=(v=this.A)-k;l=v-w;e=true;a:while(e===true){e=false;if(!K(this)){break a}}y=this._=(x=this.A)-l;m=x-y;f=true;a:while(f===true){f=false;a=true;b:while(a===true){a=false;o=this.A-this._;g=true;c:while(g===true){g=false;if(!this.B_standard_suffix_removed){break c}break b}this._=this.A-o;if(!F(this)){break a}}}this._=this.A-m;h=true;a:while(h===true){h=false;if(!B(this)){break a}}q=this._=this.D;p=q;i=true;a:while(i===true){i=false;if(!I(this)){break a}}this._=p;return true};b.prototype.stem=b.prototype.J;b.prototype.N=function(a){return a instanceof b};b.prototype.equals=b.prototype.N;b.prototype.O=function(){var c;var a;var b;var d;c='RomanianStemmer';a=0;for(b=0;b<c.length;b++){d=c.charCodeAt(b);a=(a<<5)-a+d;a=a&a}return a|0};b.prototype.hashCode=b.prototype.O;b.serialVersionUID=1;h(b,'methodObject',function(){return new b});h(b,'a_0',function(){return[new a('',-1,3),new a('I',0,1),new a('U',0,2)]});h(b,'a_1',function(){return[new a('ea',-1,3),new a('aţia',-1,7),new a('aua',-1,2),new a('iua',-1,4),new a('aţie',-1,7),new a('ele',-1,3),new a('ile',-1,5),new a('iile',6,4),new a('iei',-1,4),new a('atei',-1,6),new a('ii',-1,4),new a('ului',-1,1),new a('ul',-1,1),new a('elor',-1,3),new a('ilor',-1,4),new a('iilor',14,4)]});h(b,'a_2',function(){return[new a('icala',-1,4),new a('iciva',-1,4),new a('ativa',-1,5),new a('itiva',-1,6),new a('icale',-1,4),new a('aţiune',-1,5),new a('iţiune',-1,6),new a('atoare',-1,5),new a('itoare',-1,6),new a('ătoare',-1,5),new a('icitate',-1,4),new a('abilitate',-1,1),new a('ibilitate',-1,2),new a('ivitate',-1,3),new a('icive',-1,4),new a('ative',-1,5),new a('itive',-1,6),new a('icali',-1,4),new a('atori',-1,5),new a('icatori',18,4),new a('itori',-1,6),new a('ători',-1,5),new a('icitati',-1,4),new a('abilitati',-1,1),new a('ivitati',-1,3),new a('icivi',-1,4),new a('ativi',-1,5),new a('itivi',-1,6),new a('icităi',-1,4),new a('abilităi',-1,1),new a('ivităi',-1,3),new a('icităţi',-1,4),new a('abilităţi',-1,1),new a('ivităţi',-1,3),new a('ical',-1,4),new a('ator',-1,5),new a('icator',35,4),new a('itor',-1,6),new a('ător',-1,5),new a('iciv',-1,4),new a('ativ',-1,5),new a('itiv',-1,6),new a('icală',-1,4),new a('icivă',-1,4),new a('ativă',-1,5),new a('itivă',-1,6)]});h(b,'a_3',function(){return[new a('ica',-1,1),new a('abila',-1,1),new a('ibila',-1,1),new a('oasa',-1,1),new a('ata',-1,1),new a('ita',-1,1),new a('anta',-1,1),new a('ista',-1,3),new a('uta',-1,1),new a('iva',-1,1),new a('ic',-1,1),new a('ice',-1,1),new a('abile',-1,1),new a('ibile',-1,1),new a('isme',-1,3),new a('iune',-1,2),new a('oase',-1,1),new a('ate',-1,1),new a('itate',17,1),new a('ite',-1,1),new a('ante',-1,1),new a('iste',-1,3),new a('ute',-1,1),new a('ive',-1,1),new a('ici',-1,1),new a('abili',-1,1),new a('ibili',-1,1),new a('iuni',-1,2),new a('atori',-1,1),new a('osi',-1,1),new a('ati',-1,1),new a('itati',30,1),new a('iti',-1,1),new a('anti',-1,1),new a('isti',-1,3),new a('uti',-1,1),new a('işti',-1,3),new a('ivi',-1,1),new a('ităi',-1,1),new a('oşi',-1,1),new a('ităţi',-1,1),new a('abil',-1,1),new a('ibil',-1,1),new a('ism',-1,3),new a('ator',-1,1),new a('os',-1,1),new a('at',-1,1),new a('it',-1,1),new a('ant',-1,1),new a('ist',-1,3),new a('ut',-1,1),new a('iv',-1,1),new a('ică',-1,1),new a('abilă',-1,1),new a('ibilă',-1,1),new a('oasă',-1,1),new a('ată',-1,1),new a('ită',-1,1),new a('antă',-1,1),new a('istă',-1,3),new a('ută',-1,1),new a('ivă',-1,1)]});h(b,'a_4',function(){return[new a('ea',-1,1),new a('ia',-1,1),new a('esc',-1,1),new a('ăsc',-1,1),new a('ind',-1,1),new a('ând',-1,1),new a('are',-1,1),new a('ere',-1,1),new a('ire',-1,1),new a('âre',-1,1),new a('se',-1,2),new a('ase',10,1),new a('sese',10,2),new a('ise',10,1),new a('use',10,1),new a('âse',10,1),new a('eşte',-1,1),new a('ăşte',-1,1),new a('eze',-1,1),new a('ai',-1,1),new a('eai',19,1),new a('iai',19,1),new a('sei',-1,2),new a('eşti',-1,1),new a('ăşti',-1,1),new a('ui',-1,1),new a('ezi',-1,1),new a('âi',-1,1),new a('aşi',-1,1),new a('seşi',-1,2),new a('aseşi',29,1),new a('seseşi',29,2),new a('iseşi',29,1),new a('useşi',29,1),new a('âseşi',29,1),new a('işi',-1,1),new a('uşi',-1,1),new a('âşi',-1,1),new a('aţi',-1,2),new a('eaţi',38,1),new a('iaţi',38,1),new a('eţi',-1,2),new a('iţi',-1,2),new a('âţi',-1,2),new a('arăţi',-1,1),new a('serăţi',-1,2),new a('aserăţi',45,1),new a('seserăţi',45,2),new a('iserăţi',45,1),new a('userăţi',45,1),new a('âserăţi',45,1),new a('irăţi',-1,1),new a('urăţi',-1,1),new a('ârăţi',-1,1),new a('am',-1,1),new a('eam',54,1),new a('iam',54,1),new a('em',-1,2),new a('asem',57,1),new a('sesem',57,2),new a('isem',57,1),new a('usem',57,1),new a('âsem',57,1),new a('im',-1,2),new a('âm',-1,2),new a('ăm',-1,2),new a('arăm',65,1),new a('serăm',65,2),new a('aserăm',67,1),new a('seserăm',67,2),new a('iserăm',67,1),new a('userăm',67,1),new a('âserăm',67,1),new a('irăm',65,1),new a('urăm',65,1),new a('ârăm',65,1),new a('au',-1,1),new a('eau',76,1),new a('iau',76,1),new a('indu',-1,1),new a('ându',-1,1),new a('ez',-1,1),new a('ească',-1,1),new a('ară',-1,1),new a('seră',-1,2),new a('aseră',84,1),new a('seseră',84,2),new a('iseră',84,1),new a('useră',84,1),new a('âseră',84,1),new a('iră',-1,1),new a('ură',-1,1),new a('âră',-1,1),new a('ează',-1,1)]});h(b,'a_5',function(){return[new a('a',-1,1),new a('e',-1,1),new a('ie',1,1),new a('i',-1,1),new a('ă',-1,1)]});h(b,'g_v',function(){return[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4]});var r={'src/stemmer.jsx':{Stemmer:n},'src/romanian-stemmer.jsx':{RomanianStemmer:b}}}(JSX))
@@ -29,10 +28,8 @@ class SearchRomanian(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = set() # type: Set[str]
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('romanian')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index 8719ef1d2..9c0e30394 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -8,13 +8,11 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any # NOQA
+from sphinx.search import SearchLanguage, parse_stop_word
russian_stopwords = parse_stop_word('''
@@ -259,10 +257,8 @@ class SearchRussian(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = russian_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('russian')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index cfdd15f92..4af7f7835 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -8,13 +8,12 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage, parse_stop_word
+from typing import Dict
import snowballstemmer
-if False:
- # For type annotation
- from typing import Any
+from sphinx.search import SearchLanguage, parse_stop_word
+
swedish_stopwords = parse_stop_word('''
| source: http://snowball.tartarus.org/algorithms/swedish/stop.txt
@@ -147,10 +146,8 @@ class SearchSwedish(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = swedish_stopwords
- def init(self, options):
- # type: (Any) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('swedish')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py
index ba2cdf951..8db42f676 100644
--- a/sphinx/search/tr.py
+++ b/sphinx/search/tr.py
@@ -8,13 +8,12 @@
:license: BSD, see LICENSE for details.
"""
-from sphinx.search import SearchLanguage
+from typing import Dict, Set
import snowballstemmer
-if False:
- # For type annotation
- from typing import Dict, Set # NOQA
+from sphinx.search import SearchLanguage
+
js_stemmer = """
var JSX={};(function(q){function r(b,e){var a=function(){};a.prototype=e.prototype;var c=new a;for(var d in b){b[d].prototype=c}}function Q(c,b){for(var a in b.prototype)if(b.prototype.hasOwnProperty(a))c.prototype[a]=b.prototype[a]}function j(a,b,d){function c(a,b,c){delete a[b];a[b]=c;return c}Object.defineProperty(a,b,{get:function(){return c(a,b,d())},set:function(d){c(a,b,d)},enumerable:true,configurable:true})}function R(a,b,c){return a[b]=a[b]/c|0}var M=parseInt;var K=parseFloat;function P(a){return a!==a}var A=isFinite;var G=encodeURIComponent;var F=decodeURIComponent;var E=encodeURI;var D=decodeURI;var C=Object.prototype.toString;var H=Object.prototype.hasOwnProperty;function p(){}q.require=function(b){var a=y[b];return a!==undefined?a:null};q.profilerIsRunning=function(){return p.getResults!=null};q.getProfileResults=function(){return(p.getResults||function(){return{}})()};q.postProfileResults=function(a,b){if(p.postResults==null)throw new Error('profiler has not been turned on');return p.postResults(a,b)};q.resetProfileResults=function(){if(p.resetResults==null)throw new Error('profiler has not been turned on');return p.resetResults()};q.DEBUG=false;function I(){};r([I],Error);function d(a,b,c){this.G=a.length;this.A_=a;this.D_=b;this.J=c;this.I=null;this.E_=null};r([d],Object);function u(){};r([u],Object);function m(){var a;var b;var c;this.F={};a=this.E='';b=this._=0;c=this.A=a.length;this.D=0;this.B=b;this.C=c};r([m],u);function B(a,b){a.E=b.E;a._=b._;a.A=b.A;a.D=b.D;a.B=b.B;a.C=b.C};function v(b,d,c,e){var a;if(b._>=b.A){return false}a=b.E.charCodeAt(b._);if(a>e||a<c){return false}a-=c;if((d[a>>>3]&1<<(a&7))===0){return false}b._++;return true};function f(b,d,c,e){var a;if(b._<=b.D){return false}a=b.E.charCodeAt(b._-1);if(a>e||a<c){return false}a-=c;if((d[a>>>3]&1<<(a&7))===0){return false}b._--;return true};function t(a,d,c,e){var b;if(a._<=a.D){return false}b=a.E.charCodeAt(a._-1);if(b>e||b<c){a._--;return true}b-=c;if((d[b>>>3]&1<<(b&7))===0){a._--;return true}return false};function s(a,b,d){var c;if(a.A-a._<b){return false}if(a.E.slice(c=a._,c+b)!==d){return false}a._+=b;return true};function g(a,b,d){var c;if(a._-a.D<b){return false}if(a.E.slice((c=a._)-b,c)!==d){return false}a._-=b;return true};function b(d,m,p){var b;var g;var e;var n;var f;var k;var l;var i;var h;var c;var a;var j;var o;b=0;g=p;e=d._;n=d.D;f=0;k=0;l=false;while(true){i=b+(g-b>>1);h=0;c=f<k?f:k;a=m[i];for(j=a.G-1-c;j>=0;j--){if(e-c===n){h=-1;break}h=d.E.charCodeAt(e-1-c)-a.A_.charCodeAt(j);if(h!==0){break}c++}if(h<0){g=i;k=c}else{b=i;f=c}if(g-b<=1){if(b>0){break}if(g===b){break}if(l){break}l=true}}while(true){a=m[b];if(f>=a.G){d._=e-a.G|0;if(a.I==null){return a.J}o=a.I(d);d._=e-a.G|0;if(o){return a.J}}b=a.D_;if(b<0){return 0}}return-1};function n(a,b,d,e){var c;c=e.length-(d-b);a.E=a.E.slice(0,b)+e+a.E.slice(d);a.A+=c|0;if(a._>=d){a._+=c|0}else if(a._>b){a._=b}return c|0};function e(a,f){var b;var c;var d;var e;b=false;if((c=a.B)<0||c>(d=a.C)||d>(e=a.A)||e>a.E.length?false:true){n(a,a.B,a.C,f);b=true}return b};m.prototype.H=function(){return false};m.prototype.B_=function(b){var a;var c;var d;var e;a=this.F['.'+b];if(a==null){c=this.E=b;d=this._=0;e=this.A=c.length;this.D=0;this.B=d;this.C=e;this.H();a=this.E;this.F['.'+b]=a}return a};m.prototype.stemWord=m.prototype.B_;m.prototype.C_=function(e){var d;var b;var c;var a;var f;var g;var h;d=[];for(b=0;b<e.length;b++){c=e[b];a=this.F['.'+c];if(a==null){f=this.E=c;g=this._=0;h=this.A=f.length;this.D=0;this.B=g;this.C=h;this.H();a=this.E;this.F['.'+c]=a}d.push(a)}return d};m.prototype.stemWords=m.prototype.C_;function a(){m.call(this);this.B_continue_stemming_noun_suffixes=false;this.I_strlen=0};r([a],m);a.prototype.K=function(a){this.B_continue_stemming_noun_suffixes=a.B_continue_stemming_noun_suffixes;this.I_strlen=a.I_strlen;B(this,a)};a.prototype.copy_from=a.prototype.K;a.prototype.O=function(){var E;var q;var b;var e;var h;var i;var j;var k;var l;var m;var n;var o;var p;var c;var r;var s;var t;var u;var d;var v;var w;var x;var y;var z;var A;var B;var C;var D;var G;var H;var I;var J;var K;var L;var M;var N;var F;E=this.A-this._;b:while(true){q=this.A-this._;o=true;a:while(o===true){o=false;if(!f(this,a.g_vowel,97,305)){break a}this._=this.A-q;break b}G=this._=this.A-q;if(G<=this.D){return false}this._--}p=true;a:while(p===true){p=false;b=this.A-this._;c=true;b:while(c===true){c=false;if(!g(this,1,'a')){break b}c:while(true){e=this.A-this._;r=true;d:while(r===true){r=false;if(!f(this,a.g_vowel1,97,305)){break d}this._=this.A-e;break c}H=this._=this.A-e;if(H<=this.D){break b}this._--}break a}this._=this.A-b;s=true;b:while(s===true){s=false;if(!g(this,1,'e')){break b}c:while(true){h=this.A-this._;t=true;d:while(t===true){t=false;if(!f(this,a.g_vowel2,101,252)){break d}this._=this.A-h;break c}I=this._=this.A-h;if(I<=this.D){break b}this._--}break a}this._=this.A-b;u=true;b:while(u===true){u=false;if(!g(this,1,'ı')){break b}c:while(true){i=this.A-this._;d=true;d:while(d===true){d=false;if(!f(this,a.g_vowel3,97,305)){break d}this._=this.A-i;break c}J=this._=this.A-i;if(J<=this.D){break b}this._--}break a}this._=this.A-b;v=true;b:while(v===true){v=false;if(!g(this,1,'i')){break b}c:while(true){j=this.A-this._;w=true;d:while(w===true){w=false;if(!f(this,a.g_vowel4,101,105)){break d}this._=this.A-j;break c}K=this._=this.A-j;if(K<=this.D){break b}this._--}break a}this._=this.A-b;x=true;b:while(x===true){x=false;if(!g(this,1,'o')){break b}c:while(true){k=this.A-this._;y=true;d:while(y===true){y=false;if(!f(this,a.g_vowel5,111,117)){break d}this._=this.A-k;break c}L=this._=this.A-k;if(L<=this.D){break b}this._--}break a}this._=this.A-b;z=true;b:while(z===true){z=false;if(!g(this,1,'ö')){break b}c:while(true){l=this.A-this._;A=true;d:while(A===true){A=false;if(!f(this,a.g_vowel6,246,252)){break d}this._=this.A-l;break c}M=this._=this.A-l;if(M<=this.D){break b}this._--}break a}this._=this.A-b;B=true;b:while(B===true){B=false;if(!g(this,1,'u')){break b}c:while(true){m=this.A-this._;C=true;d:while(C===true){C=false;if(!f(this,a.g_vowel5,111,117)){break d}this._=this.A-m;break c}N=this._=this.A-m;if(N<=this.D){break b}this._--}break a}this._=this.A-b;if(!g(this,1,'ü')){return false}b:while(true){n=this.A-this._;D=true;c:while(D===true){D=false;if(!f(this,a.g_vowel6,246,252)){break c}this._=this.A-n;break b}F=this._=this.A-n;if(F<=this.D){return false}this._--}}this._=this.A-E;return true};a.prototype.r_check_vowel_harmony=a.prototype.O;function c(b){var F;var r;var c;var e;var h;var i;var j;var k;var l;var m;var n;var o;var p;var q;var d;var s;var t;var u;var v;var w;var x;var y;var z;var A;var B;var C;var D;var E;var H;var I;var J;var K;var L;var M;var N;var O;var G;F=b.A-b._;b:while(true){r=b.A-b._;o=true;a:while(o===true){o=false;if(!f(b,a.g_vowel,97,305)){break a}b._=b.A-r;break b}H=b._=b.A-r;if(H<=b.D){return false}b._--}p=true;a:while(p===true){p=false;c=b.A-b._;q=true;b:while(q===true){q=false;if(!g(b,1,'a')){break b}c:while(true){e=b.A-b._;d=true;d:while(d===true){d=false;if(!f(b,a.g_vowel1,97,305)){break d}b._=b.A-e;break c}I=b._=b.A-e;if(I<=b.D){break b}b._--}break a}b._=b.A-c;s=true;b:while(s===true){s=false;if(!g(b,1,'e')){break b}c:while(true){h=b.A-b._;t=true;d:while(t===true){t=false;if(!f(b,a.g_vowel2,101,252)){break d}b._=b.A-h;break c}J=b._=b.A-h;if(J<=b.D){break b}b._--}break a}b._=b.A-c;u=true;b:while(u===true){u=false;if(!g(b,1,'ı')){break b}c:while(true){i=b.A-b._;v=true;d:while(v===true){v=false;if(!f(b,a.g_vowel3,97,305)){break d}b._=b.A-i;break c}K=b._=b.A-i;if(K<=b.D){break b}b._--}break a}b._=b.A-c;w=true;b:while(w===true){w=false;if(!g(b,1,'i')){break b}c:while(true){j=b.A-b._;x=true;d:while(x===true){x=false;if(!f(b,a.g_vowel4,101,105)){break d}b._=b.A-j;break c}L=b._=b.A-j;if(L<=b.D){break b}b._--}break a}b._=b.A-c;y=true;b:while(y===true){y=false;if(!g(b,1,'o')){break b}c:while(true){k=b.A-b._;z=true;d:while(z===true){z=false;if(!f(b,a.g_vowel5,111,117)){break d}b._=b.A-k;break c}M=b._=b.A-k;if(M<=b.D){break b}b._--}break a}b._=b.A-c;A=true;b:while(A===true){A=false;if(!g(b,1,'ö')){break b}c:while(true){l=b.A-b._;B=true;d:while(B===true){B=false;if(!f(b,a.g_vowel6,246,252)){break d}b._=b.A-l;break c}N=b._=b.A-l;if(N<=b.D){break b}b._--}break a}b._=b.A-c;C=true;b:while(C===true){C=false;if(!g(b,1,'u')){break b}c:while(true){m=b.A-b._;D=true;d:while(D===true){D=false;if(!f(b,a.g_vowel5,111,117)){break d}b._=b.A-m;break c}O=b._=b.A-m;if(O<=b.D){break b}b._--}break a}b._=b.A-c;if(!g(b,1,'ü')){return false}b:while(true){n=b.A-b._;E=true;c:while(E===true){E=false;if(!f(b,a.g_vowel6,246,252)){break c}b._=b.A-n;break b}G=b._=b.A-n;if(G<=b.D){return false}b._--}}b._=b.A-F;return true};a.prototype.j=function(){var k;var h;var l;var i;var m;var j;var b;var e;var d;var n;var o;var p;var q;var c;b=true;b:while(b===true){b=false;k=this.A-this._;e=true;a:while(e===true){e=false;h=this.A-this._;if(!g(this,1,'n')){break a}n=this._=this.A-h;if(n<=this.D){break a}this._--;l=this.A-this._;if(!f(this,a.g_vowel,97,305)){break a}this._=this.A-l;break b}p=this._=(o=this.A)-k;i=o-p;d=true;a:while(d===true){d=false;m=this.A-this._;if(!g(this,1,'n')){break a}this._=this.A-m;return false}c=this._=(q=this.A)-i;j=q-c;if(c<=this.D){return false}this._--;if(!f(this,a.g_vowel,97,305)){return false}this._=this.A-j}return true};a.prototype.r_mark_suffix_with_optional_n_consonant=a.prototype.j;function o(b){var i;var m;var l;var j;var n;var k;var c;var e;var d;var o;var p;var q;var r;var h;c=true;b:while(c===true){c=false;i=b.A-b._;e=true;a:while(e===true){e=false;m=b.A-b._;if(!g(b,1,'n')){break a}o=b._=b.A-m;if(o<=b.D){break a}b._--;l=b.A-b._;if(!f(b,a.g_vowel,97,305)){break a}b._=b.A-l;break b}q=b._=(p=b.A)-i;j=p-q;d=true;a:while(d===true){d=false;n=b.A-b._;if(!g(b,1,'n')){break a}b._=b.A-n;return false}h=b._=(r=b.A)-j;k=r-h;if(h<=b.D){return false}b._--;if(!f(b,a.g_vowel,97,305)){return false}b._=b.A-k}return true};a.prototype.k=function(){var k;var h;var l;var i;var m;var j;var b;var e;var d;var n;var o;var p;var q;var c;b=true;b:while(b===true){b=false;k=this.A-this._;e=true;a:while(e===true){e=false;h=this.A-this._;if(!g(this,1,'s')){break a}n=this._=this.A-h;if(n<=this.D){break a}this._--;l=this.A-this._;if(!f(this,a.g_vowel,97,305)){break a}this._=this.A-l;break b}p=this._=(o=this.A)-k;i=o-p;d=true;a:while(d===true){d=false;m=this.A-this._;if(!g(this,1,'s')){break a}this._=this.A-m;return false}c=this._=(q=this.A)-i;j=q-c;if(c<=this.D){return false}this._--;if(!f(this,a.g_vowel,97,305)){return false}this._=this.A-j}return true};a.prototype.r_mark_suffix_with_optional_s_consonant=a.prototype.k;function l(b){var i;var m;var l;var j;var n;var k;var c;var e;var d;var o;var p;var q;var r;var h;c=true;b:while(c===true){c=false;i=b.A-b._;e=true;a:while(e===true){e=false;m=b.A-b._;if(!g(b,1,'s')){break a}o=b._=b.A-m;if(o<=b.D){break a}b._--;l=b.A-b._;if(!f(b,a.g_vowel,97,305)){break a}b._=b.A-l;break b}q=b._=(p=b.A)-i;j=p-q;d=true;a:while(d===true){d=false;n=b.A-b._;if(!g(b,1,'s')){break a}b._=b.A-n;return false}h=b._=(r=b.A)-j;k=r-h;if(h<=b.D){return false}b._--;if(!f(b,a.g_vowel,97,305)){return false}b._=b.A-k}return true};a.prototype.l=function(){var k;var h;var l;var i;var m;var j;var b;var e;var d;var n;var o;var p;var q;var c;b=true;b:while(b===true){b=false;k=this.A-this._;e=true;a:while(e===true){e=false;h=this.A-this._;if(!g(this,1,'y')){break a}n=this._=this.A-h;if(n<=this.D){break a}this._--;l=this.A-this._;if(!f(this,a.g_vowel,97,305)){break a}this._=this.A-l;break b}p=this._=(o=this.A)-k;i=o-p;d=true;a:while(d===true){d=false;m=this.A-this._;if(!g(this,1,'y')){break a}this._=this.A-m;return false}c=this._=(q=this.A)-i;j=q-c;if(c<=this.D){return false}this._--;if(!f(this,a.g_vowel,97,305)){return false}this._=this.A-j}return true};a.prototype.r_mark_suffix_with_optional_y_consonant=a.prototype.l;function h(b){var i;var m;var l;var j;var n;var k;var c;var e;var d;var o;var p;var q;var r;var h;c=true;b:while(c===true){c=false;i=b.A-b._;e=true;a:while(e===true){e=false;m=b.A-b._;if(!g(b,1,'y')){break a}o=b._=b.A-m;if(o<=b.D){break a}b._--;l=b.A-b._;if(!f(b,a.g_vowel,97,305)){break a}b._=b.A-l;break b}q=b._=(p=b.A)-i;j=p-q;d=true;a:while(d===true){d=false;n=b.A-b._;if(!g(b,1,'y')){break a}b._=b.A-n;return false}h=b._=(r=b.A)-j;k=r-h;if(h<=b.D){return false}b._--;if(!f(b,a.g_vowel,97,305)){return false}b._=b.A-k}return true};a.prototype.i=function(){var j;var g;var k;var h;var l;var i;var b;var e;var d;var m;var n;var o;var p;var c;b=true;b:while(b===true){b=false;j=this.A-this._;e=true;a:while(e===true){e=false;g=this.A-this._;if(!f(this,a.g_U,105,305)){break a}m=this._=this.A-g;if(m<=this.D){break a}this._--;k=this.A-this._;if(!t(this,a.g_vowel,97,305)){break a}this._=this.A-k;break b}o=this._=(n=this.A)-j;h=n-o;d=true;a:while(d===true){d=false;l=this.A-this._;if(!f(this,a.g_U,105,305)){break a}this._=this.A-l;return false}c=this._=(p=this.A)-h;i=p-c;if(c<=this.D){return false}this._--;if(!t(this,a.g_vowel,97,305)){return false}this._=this.A-i}return true};a.prototype.r_mark_suffix_with_optional_U_vowel=a.prototype.i;function k(b){var h;var l;var k;var i;var m;var j;var c;var e;var d;var n;var o;var p;var q;var g;c=true;b:while(c===true){c=false;h=b.A-b._;e=true;a:while(e===true){e=false;l=b.A-b._;if(!f(b,a.g_U,105,305)){break a}n=b._=b.A-l;if(n<=b.D){break a}b._--;k=b.A-b._;if(!t(b,a.g_vowel,97,305)){break a}b._=b.A-k;break b}p=b._=(o=b.A)-h;i=o-p;d=true;a:while(d===true){d=false;m=b.A-b._;if(!f(b,a.g_U,105,305)){break a}b._=b.A-m;return false}g=b._=(q=b.A)-i;j=q-g;if(g<=b.D){return false}b._--;if(!t(b,a.g_vowel,97,305)){return false}b._=b.A-j}return true};a.prototype.e=function(){return b(this,a.a_0,10)===0?false:!k(this)?false:true};a.prototype.r_mark_possessives=a.prototype.e;a.prototype.f=function(){return!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true};a.prototype.r_mark_sU=a.prototype.f;a.prototype.W=function(){return b(this,a.a_1,2)===0?false:true};a.prototype.r_mark_lArI=a.prototype.W;a.prototype.o=function(){return!c(this)?false:!f(this,a.g_U,105,305)?false:!h(this)?false:true};a.prototype.r_mark_yU=a.prototype.o;a.prototype.Y=function(){return!c(this)?false:b(this,a.a_2,4)===0?false:true};a.prototype.r_mark_nU=a.prototype.Y;a.prototype.Z=function(){return!c(this)?false:b(this,a.a_3,4)===0?false:!o(this)?false:true};a.prototype.r_mark_nUn=a.prototype.Z;a.prototype.m=function(){return!c(this)?false:b(this,a.a_4,2)===0?false:!h(this)?false:true};a.prototype.r_mark_yA=a.prototype.m;a.prototype.X=function(){return!c(this)?false:b(this,a.a_5,2)===0?false:true};a.prototype.r_mark_nA=a.prototype.X;a.prototype.Q=function(){return!c(this)?false:b(this,a.a_6,4)===0?false:true};a.prototype.r_mark_DA=a.prototype.Q;a.prototype.c=function(){return!c(this)?false:b(this,a.a_7,2)===0?false:true};a.prototype.r_mark_ndA=a.prototype.c;a.prototype.R=function(){return!c(this)?false:b(this,a.a_8,4)===0?false:true};a.prototype.r_mark_DAn=a.prototype.R;a.prototype.d=function(){return!c(this)?false:b(this,a.a_9,2)===0?false:true};a.prototype.r_mark_ndAn=a.prototype.d;a.prototype.s=function(){return!c(this)?false:b(this,a.a_10,2)===0?false:!h(this)?false:true};a.prototype.r_mark_ylA=a.prototype.s;a.prototype.U=function(){return!g(this,2,'ki')?false:true};a.prototype.r_mark_ki=a.prototype.U;a.prototype.b=function(){return!c(this)?false:b(this,a.a_11,2)===0?false:!o(this)?false:true};a.prototype.r_mark_ncA=a.prototype.b;a.prototype.p=function(){return!c(this)?false:b(this,a.a_12,4)===0?false:!h(this)?false:true};a.prototype.r_mark_yUm=a.prototype.p;a.prototype.g=function(){return!c(this)?false:b(this,a.a_13,4)===0?false:true};a.prototype.r_mark_sUn=a.prototype.g;a.prototype.q=function(){return!c(this)?false:b(this,a.a_14,4)===0?false:!h(this)?false:true};a.prototype.r_mark_yUz=a.prototype.q;a.prototype.h=function(){return b(this,a.a_15,4)===0?false:true};a.prototype.r_mark_sUnUz=a.prototype.h;a.prototype.V=function(){return!c(this)?false:b(this,a.a_16,2)===0?false:true};a.prototype.r_mark_lAr=a.prototype.V;a.prototype.a=function(){return!c(this)?false:b(this,a.a_17,4)===0?false:true};a.prototype.r_mark_nUz=a.prototype.a;a.prototype.S=function(){return!c(this)?false:b(this,a.a_18,8)===0?false:true};a.prototype.r_mark_DUr=a.prototype.S;a.prototype.T=function(){return b(this,a.a_19,2)===0?false:true};a.prototype.r_mark_cAsInA=a.prototype.T;a.prototype.n=function(){return!c(this)?false:b(this,a.a_20,32)===0?false:!h(this)?false:true};a.prototype.r_mark_yDU=a.prototype.n;a.prototype.u=function(){return b(this,a.a_21,8)===0?false:!h(this)?false:true};a.prototype.r_mark_ysA=a.prototype.u;a.prototype.t=function(){return!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true};a.prototype.r_mark_ymUs_=a.prototype.t;a.prototype.r=function(){return!g(this,3,'ken')?false:!h(this)?false:true};a.prototype.r_mark_yken=a.prototype.r;a.prototype.y=function(){var i;var j;var d;var Y;var k;var X;var l;var W;var V;var f;var r;var s;var t;var u;var v;var w;var x;var y;var z;var A;var B;var C;var m;var E;var F;var G;var H;var I;var J;var K;var L;var M;var N;var O;var P;var Q;var R;var S;var T;var U;var p;var o;var D;var n;var q;this.C=this._;this.B_continue_stemming_noun_suffixes=true;r=true;a:while(r===true){r=false;i=this.A-this._;s=true;d:while(s===true){s=false;t=true;b:while(t===true){t=false;j=this.A-this._;u=true;c:while(u===true){u=false;if(!(!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true)){break c}break b}this._=this.A-j;v=true;c:while(v===true){v=false;if(!(!c(this)?false:b(this,a.a_20,32)===0?false:!h(this)?false:true)){break c}break b}this._=this.A-j;w=true;c:while(w===true){w=false;if(!(b(this,a.a_21,8)===0?false:!h(this)?false:true)){break c}break b}this._=this.A-j;if(!(!g(this,3,'ken')?false:!h(this)?false:true)){break d}}break a}this._=this.A-i;x=true;c:while(x===true){x=false;if(!(b(this,a.a_19,2)===0?false:true)){break c}y=true;b:while(y===true){y=false;d=this.A-this._;z=true;d:while(z===true){z=false;if(!(b(this,a.a_15,4)===0?false:true)){break d}break b}this._=this.A-d;A=true;d:while(A===true){A=false;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break d}break b}this._=this.A-d;B=true;d:while(B===true){B=false;if(!(!c(this)?false:b(this,a.a_12,4)===0?false:!h(this)?false:true)){break d}break b}this._=this.A-d;C=true;d:while(C===true){C=false;if(!(!c(this)?false:b(this,a.a_13,4)===0?false:true)){break d}break b}this._=this.A-d;m=true;d:while(m===true){m=false;if(!(!c(this)?false:b(this,a.a_14,4)===0?false:!h(this)?false:true)){break d}break b}this._=this.A-d}if(!(!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true)){break c}break a}this._=this.A-i;E=true;c:while(E===true){E=false;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break c}this.B=this._;if(!e(this,'')){return false}Y=this.A-this._;F=true;d:while(F===true){F=false;this.C=this._;G=true;b:while(G===true){G=false;k=this.A-this._;H=true;e:while(H===true){H=false;if(!(!c(this)?false:b(this,a.a_18,8)===0?false:true)){break e}break b}this._=this.A-k;I=true;e:while(I===true){I=false;if(!(!c(this)?false:b(this,a.a_20,32)===0?false:!h(this)?false:true)){break e}break b}this._=this.A-k;J=true;e:while(J===true){J=false;if(!(b(this,a.a_21,8)===0?false:!h(this)?false:true)){break e}break b}this._=this.A-k;if(!(!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true)){this._=this.A-Y;break d}}}this.B_continue_stemming_noun_suffixes=false;break a}this._=this.A-i;K=true;b:while(K===true){K=false;if(!(!c(this)?false:b(this,a.a_17,4)===0?false:true)){break b}L=true;c:while(L===true){L=false;X=this.A-this._;M=true;d:while(M===true){M=false;if(!(!c(this)?false:b(this,a.a_20,32)===0?false:!h(this)?false:true)){break d}break c}this._=this.A-X;if(!(b(this,a.a_21,8)===0?false:!h(this)?false:true)){break b}}break a}this._=this.A-i;N=true;c:while(N===true){N=false;O=true;b:while(O===true){O=false;l=this.A-this._;P=true;d:while(P===true){P=false;if(!(b(this,a.a_15,4)===0?false:true)){break d}break b}this._=this.A-l;Q=true;d:while(Q===true){Q=false;if(!(!c(this)?false:b(this,a.a_14,4)===0?false:!h(this)?false:true)){break d}break b}this._=this.A-l;R=true;d:while(R===true){R=false;if(!(!c(this)?false:b(this,a.a_13,4)===0?false:true)){break d}break b}this._=this.A-l;if(!(!c(this)?false:b(this,a.a_12,4)===0?false:!h(this)?false:true)){break c}}this.B=this._;if(!e(this,'')){return false}W=this.A-this._;S=true;b:while(S===true){S=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true)){this._=this.A-W;break b}}break a}this._=this.A-i;if(!(!c(this)?false:b(this,a.a_18,8)===0?false:true)){return false}this.B=this._;if(!e(this,'')){return false}V=this.A-this._;T=true;d:while(T===true){T=false;this.C=this._;U=true;b:while(U===true){U=false;f=this.A-this._;p=true;c:while(p===true){p=false;if(!(b(this,a.a_15,4)===0?false:true)){break c}break b}this._=this.A-f;o=true;c:while(o===true){o=false;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break c}break b}this._=this.A-f;D=true;c:while(D===true){D=false;if(!(!c(this)?false:b(this,a.a_12,4)===0?false:!h(this)?false:true)){break c}break b}this._=this.A-f;n=true;c:while(n===true){n=false;if(!(!c(this)?false:b(this,a.a_13,4)===0?false:true)){break c}break b}this._=this.A-f;q=true;c:while(q===true){q=false;if(!(!c(this)?false:b(this,a.a_14,4)===0?false:!h(this)?false:true)){break c}break b}this._=this.A-f}if(!(!c(this)?false:b(this,a.a_22,4)===0?false:!h(this)?false:true)){this._=this.A-V;break d}}}this.B=this._;return!e(this,'')?false:true};a.prototype.r_stem_nominal_verb_suffixes=a.prototype.y;function J(d){var f;var k;var i;var Z;var l;var Y;var m;var X;var W;var j;var s;var t;var u;var v;var w;var x;var y;var z;var A;var B;var C;var n;var E;var F;var G;var H;var I;var J;var K;var L;var M;var N;var O;var P;var Q;var R;var S;var T;var U;var V;var q;var p;var D;var o;var r;d.C=d._;d.B_continue_stemming_noun_suffixes=true;s=true;a:while(s===true){s=false;f=d.A-d._;t=true;d:while(t===true){t=false;u=true;b:while(u===true){u=false;k=d.A-d._;v=true;c:while(v===true){v=false;if(!(!c(d)?false:b(d,a.a_22,4)===0?false:!h(d)?false:true)){break c}break b}d._=d.A-k;w=true;c:while(w===true){w=false;if(!(!c(d)?false:b(d,a.a_20,32)===0?false:!h(d)?false:true)){break c}break b}d._=d.A-k;x=true;c:while(x===true){x=false;if(!(b(d,a.a_21,8)===0?false:!h(d)?false:true)){break c}break b}d._=d.A-k;if(!(!g(d,3,'ken')?false:!h(d)?false:true)){break d}}break a}d._=d.A-f;y=true;c:while(y===true){y=false;if(!(b(d,a.a_19,2)===0?false:true)){break c}z=true;b:while(z===true){z=false;i=d.A-d._;A=true;d:while(A===true){A=false;if(!(b(d,a.a_15,4)===0?false:true)){break d}break b}d._=d.A-i;B=true;d:while(B===true){B=false;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break d}break b}d._=d.A-i;C=true;d:while(C===true){C=false;if(!(!c(d)?false:b(d,a.a_12,4)===0?false:!h(d)?false:true)){break d}break b}d._=d.A-i;n=true;d:while(n===true){n=false;if(!(!c(d)?false:b(d,a.a_13,4)===0?false:true)){break d}break b}d._=d.A-i;E=true;d:while(E===true){E=false;if(!(!c(d)?false:b(d,a.a_14,4)===0?false:!h(d)?false:true)){break d}break b}d._=d.A-i}if(!(!c(d)?false:b(d,a.a_22,4)===0?false:!h(d)?false:true)){break c}break a}d._=d.A-f;F=true;c:while(F===true){F=false;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break c}d.B=d._;if(!e(d,'')){return false}Z=d.A-d._;G=true;d:while(G===true){G=false;d.C=d._;H=true;b:while(H===true){H=false;l=d.A-d._;I=true;e:while(I===true){I=false;if(!(!c(d)?false:b(d,a.a_18,8)===0?false:true)){break e}break b}d._=d.A-l;J=true;e:while(J===true){J=false;if(!(!c(d)?false:b(d,a.a_20,32)===0?false:!h(d)?false:true)){break e}break b}d._=d.A-l;K=true;e:while(K===true){K=false;if(!(b(d,a.a_21,8)===0?false:!h(d)?false:true)){break e}break b}d._=d.A-l;if(!(!c(d)?false:b(d,a.a_22,4)===0?false:!h(d)?false:true)){d._=d.A-Z;break d}}}d.B_continue_stemming_noun_suffixes=false;break a}d._=d.A-f;L=true;b:while(L===true){L=false;if(!(!c(d)?false:b(d,a.a_17,4)===0?false:true)){break b}M=true;c:while(M===true){M=false;Y=d.A-d._;N=true;d:while(N===true){N=false;if(!(!c(d)?false:b(d,a.a_20,32)===0?false:!h(d)?false:true)){break d}break c}d._=d.A-Y;if(!(b(d,a.a_21,8)===0?false:!h(d)?false:true)){break b}}break a}d._=d.A-f;O=true;c:while(O===true){O=false;P=true;b:while(P===true){P=false;m=d.A-d._;Q=true;d:while(Q===true){Q=false;if(!(b(d,a.a_15,4)===0?false:true)){break d}break b}d._=d.A-m;R=true;d:while(R===true){R=false;if(!(!c(d)?false:b(d,a.a_14,4)===0?false:!h(d)?false:true)){break d}break b}d._=d.A-m;S=true;d:while(S===true){S=false;if(!(!c(d)?false:b(d,a.a_13,4)===0?false:true)){break d}break b}d._=d.A-m;if(!(!c(d)?false:b(d,a.a_12,4)===0?false:!h(d)?false:true)){break c}}d.B=d._;if(!e(d,'')){return false}X=d.A-d._;T=true;b:while(T===true){T=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_22,4)===0?false:!h(d)?false:true)){d._=d.A-X;break b}}break a}d._=d.A-f;if(!(!c(d)?false:b(d,a.a_18,8)===0?false:true)){return false}d.B=d._;if(!e(d,'')){return false}W=d.A-d._;U=true;d:while(U===true){U=false;d.C=d._;V=true;b:while(V===true){V=false;j=d.A-d._;q=true;c:while(q===true){q=false;if(!(b(d,a.a_15,4)===0?false:true)){break c}break b}d._=d.A-j;p=true;c:while(p===true){p=false;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break c}break b}d._=d.A-j;D=true;c:while(D===true){D=false;if(!(!c(d)?false:b(d,a.a_12,4)===0?false:!h(d)?false:true)){break c}break b}d._=d.A-j;o=true;c:while(o===true){o=false;if(!(!c(d)?false:b(d,a.a_13,4)===0?false:true)){break c}break b}d._=d.A-j;r=true;c:while(r===true){r=false;if(!(!c(d)?false:b(d,a.a_14,4)===0?false:!h(d)?false:true)){break c}break b}d._=d.A-j}if(!(!c(d)?false:b(d,a.a_22,4)===0?false:!h(d)?false:true)){d._=d.A-W;break d}}}d.B=d._;return!e(d,'')?false:true};a.prototype.__=function(){var z;var N;var M;var L;var p;var K;var r;var J;var t;var u;var v;var w;var x;var y;var d;var A;var B;var C;var D;var E;var F;var G;var H;var I;var s;var q;var n;var m;var j;var h;this.C=this._;if(!(!g(this,2,'ki')?false:true)){return false}w=true;b:while(w===true){w=false;z=this.A-this._;x=true;c:while(x===true){x=false;if(!(!c(this)?false:b(this,a.a_6,4)===0?false:true)){break c}this.B=this._;if(!e(this,'')){return false}N=this.A-this._;y=true;f:while(y===true){y=false;this.C=this._;d=true;e:while(d===true){d=false;M=this.A-this._;A=true;d:while(A===true){A=false;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break d}this.B=this._;if(!e(this,'')){return false}L=this.A-this._;B=true;a:while(B===true){B=false;if(!i(this)){this._=this.A-L;break a}}break e}this._=this.A-M;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){this._=this.A-N;break f}this.B=this._;if(!e(this,'')){return false}p=this.A-this._;C=true;a:while(C===true){C=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-p;break a}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-p;break a}}}}break b}this._=this.A-z;D=true;d:while(D===true){D=false;if(!(!c(this)?false:b(this,a.a_3,4)===0?false:!o(this)?false:true)){break d}this.B=this._;if(!e(this,'')){return false}K=this.A-this._;E=true;e:while(E===true){E=false;this.C=this._;F=true;a:while(F===true){F=false;r=this.A-this._;G=true;c:while(G===true){G=false;if(!(b(this,a.a_1,2)===0?false:true)){break c}this.B=this._;if(!e(this,'')){return false}break a}this._=this.A-r;H=true;f:while(H===true){H=false;this.C=this._;I=true;g:while(I===true){I=false;J=this.A-this._;s=true;c:while(s===true){s=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break c}break g}this._=this.A-J;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break f}}this.B=this._;if(!e(this,'')){return false}t=this.A-this._;q=true;c:while(q===true){q=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-t;break c}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-t;break c}}break a}this._=this.A-r;if(!i(this)){this._=this.A-K;break e}}}break b}this._=this.A-z;if(!(!c(this)?false:b(this,a.a_7,2)===0?false:true)){return false}n=true;a:while(n===true){n=false;u=this.A-this._;m=true;c:while(m===true){m=false;if(!(b(this,a.a_1,2)===0?false:true)){break c}this.B=this._;if(!e(this,'')){return false}break a}this._=this.A-u;j=true;d:while(j===true){j=false;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break d}this.B=this._;if(!e(this,'')){return false}v=this.A-this._;h=true;c:while(h===true){h=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-v;break c}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-v;break c}}break a}this._=this.A-u;if(!i(this)){return false}}}return true};a.prototype.r_stem_suffix_chain_before_ki=a.prototype.__;function i(d){var j;var O;var N;var M;var q;var L;var s;var K;var u;var v;var w;var x;var y;var z;var h;var B;var C;var D;var E;var F;var G;var H;var I;var J;var t;var r;var p;var n;var m;var A;d.C=d._;if(!(!g(d,2,'ki')?false:true)){return false}x=true;b:while(x===true){x=false;j=d.A-d._;y=true;c:while(y===true){y=false;if(!(!c(d)?false:b(d,a.a_6,4)===0?false:true)){break c}d.B=d._;if(!e(d,'')){return false}O=d.A-d._;z=true;f:while(z===true){z=false;d.C=d._;h=true;e:while(h===true){h=false;N=d.A-d._;B=true;d:while(B===true){B=false;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break d}d.B=d._;if(!e(d,'')){return false}M=d.A-d._;C=true;a:while(C===true){C=false;if(!i(d)){d._=d.A-M;break a}}break e}d._=d.A-N;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){d._=d.A-O;break f}d.B=d._;if(!e(d,'')){return false}q=d.A-d._;D=true;a:while(D===true){D=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-q;break a}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-q;break a}}}}break b}d._=d.A-j;E=true;d:while(E===true){E=false;if(!(!c(d)?false:b(d,a.a_3,4)===0?false:!o(d)?false:true)){break d}d.B=d._;if(!e(d,'')){return false}L=d.A-d._;F=true;e:while(F===true){F=false;d.C=d._;G=true;a:while(G===true){G=false;s=d.A-d._;H=true;c:while(H===true){H=false;if(!(b(d,a.a_1,2)===0?false:true)){break c}d.B=d._;if(!e(d,'')){return false}break a}d._=d.A-s;I=true;f:while(I===true){I=false;d.C=d._;J=true;g:while(J===true){J=false;K=d.A-d._;t=true;c:while(t===true){t=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break c}break g}d._=d.A-K;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break f}}d.B=d._;if(!e(d,'')){return false}u=d.A-d._;r=true;c:while(r===true){r=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-u;break c}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-u;break c}}break a}d._=d.A-s;if(!i(d)){d._=d.A-L;break e}}}break b}d._=d.A-j;if(!(!c(d)?false:b(d,a.a_7,2)===0?false:true)){return false}p=true;a:while(p===true){p=false;v=d.A-d._;n=true;c:while(n===true){n=false;if(!(b(d,a.a_1,2)===0?false:true)){break c}d.B=d._;if(!e(d,'')){return false}break a}d._=d.A-v;m=true;d:while(m===true){m=false;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break d}d.B=d._;if(!e(d,'')){return false}w=d.A-d._;A=true;c:while(A===true){A=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-w;break c}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-w;break c}}break a}d._=d.A-v;if(!i(d)){return false}}}return true};a.prototype.z=function(){var d;var ar;var S;var j;var av;var m;var aq;var n;var p;var ax;var ay;var q;var ap;var r;var s;var as;var at;var au;var t;var aw;var u;var v;var w;var aA;var aB;var ao;var x;var y;var z;var A;var B;var C;var D;var E;var F;var G;var H;var I;var J;var K;var L;var M;var N;var O;var P;var Q;var R;var g;var T;var U;var V;var W;var X;var Y;var Z;var _;var $;var a0;var a1;var a2;var a3;var a4;var a5;var a6;var a7;var a8;var a9;var aa;var ab;var ac;var ad;var ae;var af;var ag;var ah;var ai;var aj;var ak;var al;var am;var an;var aC;var az;y=true;a:while(y===true){y=false;d=this.A-this._;z=true;b:while(z===true){z=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break b}this.B=this._;if(!e(this,'')){return false}ar=this.A-this._;A=true;c:while(A===true){A=false;if(!i(this)){this._=this.A-ar;break c}}break a}this._=this.A-d;B=true;g:while(B===true){B=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_11,2)===0?false:!o(this)?false:true)){break g}this.B=this._;if(!e(this,'')){return false}S=this.A-this._;C=true;b:while(C===true){C=false;D=true;c:while(D===true){D=false;j=this.A-this._;E=true;d:while(E===true){E=false;this.C=this._;if(!(b(this,a.a_1,2)===0?false:true)){break d}this.B=this._;if(!e(this,'')){return false}break c}this._=this.A-j;F=true;f:while(F===true){F=false;this.C=this._;G=true;d:while(G===true){G=false;av=this.A-this._;H=true;e:while(H===true){H=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break e}break d}this._=this.A-av;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break f}}this.B=this._;if(!e(this,'')){return false}m=this.A-this._;I=true;d:while(I===true){I=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-m;break d}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-m;break d}}break c}aC=this._=this.A-j;this.C=aC;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-S;break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-S;break b}}}break a}this._=this.A-d;J=true;b:while(J===true){J=false;this.C=this._;K=true;d:while(K===true){K=false;aq=this.A-this._;L=true;c:while(L===true){L=false;if(!(!c(this)?false:b(this,a.a_7,2)===0?false:true)){break c}break d}this._=this.A-aq;if(!(!c(this)?false:b(this,a.a_5,2)===0?false:true)){break b}}M=true;c:while(M===true){M=false;n=this.A-this._;N=true;d:while(N===true){N=false;if(!(b(this,a.a_1,2)===0?false:true)){break d}this.B=this._;if(!e(this,'')){return false}break c}this._=this.A-n;O=true;e:while(O===true){O=false;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break e}this.B=this._;if(!e(this,'')){return false}p=this.A-this._;P=true;d:while(P===true){P=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-p;break d}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-p;break d}}break c}this._=this.A-n;if(!i(this)){break b}}break a}this._=this.A-d;Q=true;c:while(Q===true){Q=false;this.C=this._;R=true;b:while(R===true){R=false;ax=this.A-this._;g=true;d:while(g===true){g=false;if(!(!c(this)?false:b(this,a.a_9,2)===0?false:true)){break d}break b}this._=this.A-ax;if(!(!c(this)?false:b(this,a.a_2,4)===0?false:true)){break c}}T=true;d:while(T===true){T=false;ay=this.A-this._;U=true;e:while(U===true){U=false;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break e}this.B=this._;if(!e(this,'')){return false}q=this.A-this._;V=true;b:while(V===true){V=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-q;break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-q;break b}}break d}this._=this.A-ay;if(!(b(this,a.a_1,2)===0?false:true)){break c}}break a}this._=this.A-d;W=true;d:while(W===true){W=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_8,4)===0?false:true)){break d}this.B=this._;if(!e(this,'')){return false}ap=this.A-this._;X=true;e:while(X===true){X=false;this.C=this._;Y=true;c:while(Y===true){Y=false;r=this.A-this._;Z=true;f:while(Z===true){Z=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break f}this.B=this._;if(!e(this,'')){return false}s=this.A-this._;_=true;b:while(_===true){_=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-s;break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-s;break b}}break c}this._=this.A-r;$=true;b:while($===true){$=false;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break b}this.B=this._;if(!e(this,'')){return false}as=this.A-this._;a0=true;f:while(a0===true){a0=false;if(!i(this)){this._=this.A-as;break f}}break c}this._=this.A-r;if(!i(this)){this._=this.A-ap;break e}}}break a}this._=this.A-d;a1=true;d:while(a1===true){a1=false;this.C=this._;a2=true;b:while(a2===true){a2=false;at=this.A-this._;a3=true;c:while(a3===true){a3=false;if(!(!c(this)?false:b(this,a.a_3,4)===0?false:!o(this)?false:true)){break c}break b}this._=this.A-at;if(!(!c(this)?false:b(this,a.a_10,2)===0?false:!h(this)?false:true)){break d}}this.B=this._;if(!e(this,'')){return false}au=this.A-this._;a4=true;e:while(a4===true){a4=false;a5=true;c:while(a5===true){a5=false;t=this.A-this._;a6=true;b:while(a6===true){a6=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){break b}break c}this._=this.A-t;a7=true;f:while(a7===true){a7=false;this.C=this._;a8=true;b:while(a8===true){a8=false;aw=this.A-this._;a9=true;g:while(a9===true){a9=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break g}break b}this._=this.A-aw;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){break f}}this.B=this._;if(!e(this,'')){return false}u=this.A-this._;aa=true;b:while(aa===true){aa=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-u;break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-u;break b}}break c}this._=this.A-t;if(!i(this)){this._=this.A-au;break e}}}break a}this._=this.A-d;ab=true;b:while(ab===true){ab=false;this.C=this._;if(!(b(this,a.a_1,2)===0?false:true)){break b}this.B=this._;if(!e(this,'')){return false}break a}this._=this.A-d;ac=true;b:while(ac===true){ac=false;if(!i(this)){break b}break a}this._=this.A-d;ad=true;c:while(ad===true){ad=false;this.C=this._;ae=true;b:while(ae===true){ae=false;v=this.A-this._;af=true;d:while(af===true){af=false;if(!(!c(this)?false:b(this,a.a_6,4)===0?false:true)){break d}break b}this._=this.A-v;ag=true;d:while(ag===true){ag=false;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!h(this)?false:true)){break d}break b}this._=this.A-v;if(!(!c(this)?false:b(this,a.a_4,2)===0?false:!h(this)?false:true)){break c}}this.B=this._;if(!e(this,'')){return false}w=this.A-this._;ah=true;b:while(ah===true){ah=false;this.C=this._;ai=true;d:while(ai===true){ai=false;aA=this.A-this._;aj=true;e:while(aj===true){aj=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break e}this.B=this._;if(!e(this,'')){return false}aB=this.A-this._;ak=true;f:while(ak===true){ak=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-aB;break f}}break d}this._=this.A-aA;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-w;break b}}this.B=this._;if(!e(this,'')){return false}this.C=this._;if(!i(this)){this._=this.A-w;break b}}break a}az=this._=this.A-d;this.C=az;al=true;b:while(al===true){al=false;ao=this.A-this._;am=true;c:while(am===true){am=false;if(!(b(this,a.a_0,10)===0?false:!k(this)?false:true)){break c}break b}this._=this.A-ao;if(!(!c(this)?false:!f(this,a.g_U,105,305)?false:!l(this)?false:true)){return false}}this.B=this._;if(!e(this,'')){return false}x=this.A-this._;an=true;b:while(an===true){an=false;this.C=this._;if(!(!c(this)?false:b(this,a.a_16,2)===0?false:true)){this._=this.A-x;break b}this.B=this._;if(!e(this,'')){return false}if(!i(this)){this._=this.A-x;break b}}}return true};a.prototype.r_stem_noun_suffixes=a.prototype.z;function L(d){var g;var as;var S;var m;var aw;var n;var ar;var p;var q;var ay;var az;var r;var aq;var s;var t;var at;var au;var av;var u;var ax;var v;var w;var x;var aB;var aC;var ap;var y;var z;var A;var B;var C;var D;var E;var F;var G;var H;var I;var J;var K;var L;var M;var N;var O;var P;var Q;var R;var j;var T;var U;var V;var W;var X;var Y;var Z;var _;var $;var a0;var a1;var a2;var a3;var a4;var a5;var a6;var a7;var a8;var a9;var aa;var ab;var ac;var ad;var ae;var af;var ag;var ah;var ai;var aj;var ak;var al;var am;var an;var ao;var aD;var aA;z=true;a:while(z===true){z=false;g=d.A-d._;A=true;b:while(A===true){A=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break b}d.B=d._;if(!e(d,'')){return false}as=d.A-d._;B=true;c:while(B===true){B=false;if(!i(d)){d._=d.A-as;break c}}break a}d._=d.A-g;C=true;g:while(C===true){C=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_11,2)===0?false:!o(d)?false:true)){break g}d.B=d._;if(!e(d,'')){return false}S=d.A-d._;D=true;b:while(D===true){D=false;E=true;c:while(E===true){E=false;m=d.A-d._;F=true;d:while(F===true){F=false;d.C=d._;if(!(b(d,a.a_1,2)===0?false:true)){break d}d.B=d._;if(!e(d,'')){return false}break c}d._=d.A-m;G=true;f:while(G===true){G=false;d.C=d._;H=true;d:while(H===true){H=false;aw=d.A-d._;I=true;e:while(I===true){I=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break e}break d}d._=d.A-aw;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break f}}d.B=d._;if(!e(d,'')){return false}n=d.A-d._;J=true;d:while(J===true){J=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-n;break d}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-n;break d}}break c}aD=d._=d.A-m;d.C=aD;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-S;break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-S;break b}}}break a}d._=d.A-g;K=true;b:while(K===true){K=false;d.C=d._;L=true;d:while(L===true){L=false;ar=d.A-d._;M=true;c:while(M===true){M=false;if(!(!c(d)?false:b(d,a.a_7,2)===0?false:true)){break c}break d}d._=d.A-ar;if(!(!c(d)?false:b(d,a.a_5,2)===0?false:true)){break b}}N=true;c:while(N===true){N=false;p=d.A-d._;O=true;d:while(O===true){O=false;if(!(b(d,a.a_1,2)===0?false:true)){break d}d.B=d._;if(!e(d,'')){return false}break c}d._=d.A-p;P=true;e:while(P===true){P=false;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break e}d.B=d._;if(!e(d,'')){return false}q=d.A-d._;Q=true;d:while(Q===true){Q=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-q;break d}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-q;break d}}break c}d._=d.A-p;if(!i(d)){break b}}break a}d._=d.A-g;R=true;c:while(R===true){R=false;d.C=d._;j=true;b:while(j===true){j=false;ay=d.A-d._;T=true;d:while(T===true){T=false;if(!(!c(d)?false:b(d,a.a_9,2)===0?false:true)){break d}break b}d._=d.A-ay;if(!(!c(d)?false:b(d,a.a_2,4)===0?false:true)){break c}}U=true;d:while(U===true){U=false;az=d.A-d._;V=true;e:while(V===true){V=false;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break e}d.B=d._;if(!e(d,'')){return false}r=d.A-d._;W=true;b:while(W===true){W=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-r;break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-r;break b}}break d}d._=d.A-az;if(!(b(d,a.a_1,2)===0?false:true)){break c}}break a}d._=d.A-g;X=true;d:while(X===true){X=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_8,4)===0?false:true)){break d}d.B=d._;if(!e(d,'')){return false}aq=d.A-d._;Y=true;e:while(Y===true){Y=false;d.C=d._;Z=true;c:while(Z===true){Z=false;s=d.A-d._;_=true;f:while(_===true){_=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break f}d.B=d._;if(!e(d,'')){return false}t=d.A-d._;$=true;b:while($===true){$=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-t;break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-t;break b}}break c}d._=d.A-s;a0=true;b:while(a0===true){a0=false;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break b}d.B=d._;if(!e(d,'')){return false}at=d.A-d._;a1=true;f:while(a1===true){a1=false;if(!i(d)){d._=d.A-at;break f}}break c}d._=d.A-s;if(!i(d)){d._=d.A-aq;break e}}}break a}d._=d.A-g;a2=true;d:while(a2===true){a2=false;d.C=d._;a3=true;b:while(a3===true){a3=false;au=d.A-d._;a4=true;c:while(a4===true){a4=false;if(!(!c(d)?false:b(d,a.a_3,4)===0?false:!o(d)?false:true)){break c}break b}d._=d.A-au;if(!(!c(d)?false:b(d,a.a_10,2)===0?false:!h(d)?false:true)){break d}}d.B=d._;if(!e(d,'')){return false}av=d.A-d._;a5=true;e:while(a5===true){a5=false;a6=true;c:while(a6===true){a6=false;u=d.A-d._;a7=true;b:while(a7===true){a7=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){break b}break c}d._=d.A-u;a8=true;f:while(a8===true){a8=false;d.C=d._;a9=true;b:while(a9===true){a9=false;ax=d.A-d._;aa=true;g:while(aa===true){aa=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break g}break b}d._=d.A-ax;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){break f}}d.B=d._;if(!e(d,'')){return false}v=d.A-d._;ab=true;b:while(ab===true){ab=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-v;break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-v;break b}}break c}d._=d.A-u;if(!i(d)){d._=d.A-av;break e}}}break a}d._=d.A-g;ac=true;b:while(ac===true){ac=false;d.C=d._;if(!(b(d,a.a_1,2)===0?false:true)){break b}d.B=d._;if(!e(d,'')){return false}break a}d._=d.A-g;ad=true;b:while(ad===true){ad=false;if(!i(d)){break b}break a}d._=d.A-g;ae=true;c:while(ae===true){ae=false;d.C=d._;af=true;b:while(af===true){af=false;w=d.A-d._;ag=true;d:while(ag===true){ag=false;if(!(!c(d)?false:b(d,a.a_6,4)===0?false:true)){break d}break b}d._=d.A-w;ah=true;d:while(ah===true){ah=false;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!h(d)?false:true)){break d}break b}d._=d.A-w;if(!(!c(d)?false:b(d,a.a_4,2)===0?false:!h(d)?false:true)){break c}}d.B=d._;if(!e(d,'')){return false}x=d.A-d._;ai=true;b:while(ai===true){ai=false;d.C=d._;aj=true;d:while(aj===true){aj=false;aB=d.A-d._;ak=true;e:while(ak===true){ak=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break e}d.B=d._;if(!e(d,'')){return false}aC=d.A-d._;al=true;f:while(al===true){al=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-aC;break f}}break d}d._=d.A-aB;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-x;break b}}d.B=d._;if(!e(d,'')){return false}d.C=d._;if(!i(d)){d._=d.A-x;break b}}break a}aA=d._=d.A-g;d.C=aA;am=true;b:while(am===true){am=false;ap=d.A-d._;an=true;c:while(an===true){an=false;if(!(b(d,a.a_0,10)===0?false:!k(d)?false:true)){break c}break b}d._=d.A-ap;if(!(!c(d)?false:!f(d,a.g_U,105,305)?false:!l(d)?false:true)){return false}}d.B=d._;if(!e(d,'')){return false}y=d.A-d._;ao=true;b:while(ao===true){ao=false;d.C=d._;if(!(!c(d)?false:b(d,a.a_16,2)===0?false:true)){d._=d.A-y;break b}d.B=d._;if(!e(d,'')){return false}if(!i(d)){d._=d.A-y;break b}}}return true};a.prototype.w=function(){var c;this.C=this._;c=b(this,a.a_23,4);if(c===0){return false}this.B=this._;switch(c){case 0:return false;case 1:if(!e(this,'p')){return false}break;case 2:if(!e(this,'ç')){return false}break;case 3:if(!e(this,'t')){return false}break;case 4:if(!e(this,'k')){return false}break}return true};a.prototype.r_post_process_last_consonants=a.prototype.w;function w(c){var d;c.C=c._;d=b(c,a.a_23,4);if(d===0){return false}c.B=c._;switch(d){case 0:return false;case 1:if(!e(c,'p')){return false}break;case 2:if(!e(c,'ç')){return false}break;case 3:if(!e(c,'t')){return false}break;case 4:if(!e(c,'k')){return false}break}return true};a.prototype.N=function(){var L;var _;var i;var Y;var B;var W;var K;var l;var S;var Q;var p;var O;var M;var s;var U;var u;var v;var w;var x;var y;var z;var A;var b;var C;var D;var j;var F;var G;var H;var I;var J;var E;var t;var r;var N;var q;var P;var o;var R;var m;var T;var k;var V;var h;var X;var e;var Z;var d;var $;var a0;var a1;var c;L=this.A-this._;u=true;a:while(u===true){u=false;_=this.A-this._;v=true;b:while(v===true){v=false;if(!g(this,1,'d')){break b}break a}this._=this.A-_;if(!g(this,1,'g')){return false}}this._=this.A-L;w=true;a:while(w===true){w=false;i=this.A-this._;x=true;b:while(x===true){x=false;Y=this.A-this._;d:while(true){B=this.A-this._;y=true;c:while(y===true){y=false;if(!f(this,a.g_vowel,97,305)){break c}this._=this.A-B;break d}V=this._=this.A-B;if(V<=this.D){break b}this._--}z=true;c:while(z===true){z=false;W=this.A-this._;A=true;d:while(A===true){A=false;if(!g(this,1,'a')){break d}break c}this._=this.A-W;if(!g(this,1,'ı')){break b}}h=this._=this.A-Y;b=h;N=h;q=n(this,h,h,'ı');if(h<=this.B){this.B+=q|0}if(N<=this.C){this.C+=q|0}this._=b;break a}this._=this.A-i;C=true;b:while(C===true){C=false;K=this.A-this._;c:while(true){l=this.A-this._;D=true;d:while(D===true){D=false;if(!f(this,a.g_vowel,97,305)){break d}this._=this.A-l;break c}X=this._=this.A-l;if(X<=this.D){break b}this._--}j=true;c:while(j===true){j=false;S=this.A-this._;F=true;d:while(F===true){F=false;if(!g(this,1,'e')){break d}break c}this._=this.A-S;if(!g(this,1,'i')){break b}}e=this._=this.A-K;b=e;P=e;o=n(this,e,e,'i');if(e<=this.B){this.B+=o|0}if(P<=this.C){this.C+=o|0}this._=b;break a}this._=this.A-i;G=true;b:while(G===true){G=false;Q=this.A-this._;c:while(true){p=this.A-this._;H=true;d:while(H===true){H=false;if(!f(this,a.g_vowel,97,305)){break d}this._=this.A-p;break c}Z=this._=this.A-p;if(Z<=this.D){break b}this._--}I=true;c:while(I===true){I=false;O=this.A-this._;J=true;d:while(J===true){J=false;if(!g(this,1,'o')){break d}break c}this._=this.A-O;if(!g(this,1,'u')){break b}}d=this._=this.A-Q;b=d;R=d;m=n(this,d,d,'u');if(d<=this.B){this.B+=m|0}if(R<=this.C){this.C+=m|0}this._=b;break a}a1=this._=(a0=this.A)-i;M=a0-a1;b:while(true){s=this.A-this._;E=true;c:while(E===true){E=false;if(!f(this,a.g_vowel,97,305)){break c}this._=this.A-s;break b}$=this._=this.A-s;if($<=this.D){return false}this._--}t=true;b:while(t===true){t=false;U=this.A-this._;r=true;c:while(r===true){r=false;if(!g(this,1,'ö')){break c}break b}this._=this.A-U;if(!g(this,1,'ü')){return false}}c=this._=this.A-M;b=c;T=c;k=n(this,c,c,'ü');if(c<=this.B){this.B+=k|0}if(T<=this.C){this.C+=k|0}this._=b}return true};a.prototype.r_append_U_to_stems_ending_with_d_or_g=a.prototype.N;function z(b){var $;var Z;var j;var X;var F;var L;var T;var m;var R;var P;var q;var N;var V;var t;var M;var v;var w;var x;var y;var z;var A;var B;var c;var D;var E;var C;var G;var H;var I;var J;var K;var u;var s;var r;var O;var p;var Q;var o;var S;var l;var U;var k;var W;var i;var Y;var h;var _;var e;var a0;var a1;var a2;var d;$=b.A-b._;v=true;a:while(v===true){v=false;Z=b.A-b._;w=true;b:while(w===true){w=false;if(!g(b,1,'d')){break b}break a}b._=b.A-Z;if(!g(b,1,'g')){return false}}b._=b.A-$;x=true;a:while(x===true){x=false;j=b.A-b._;y=true;b:while(y===true){y=false;X=b.A-b._;d:while(true){F=b.A-b._;z=true;c:while(z===true){z=false;if(!f(b,a.g_vowel,97,305)){break c}b._=b.A-F;break d}W=b._=b.A-F;if(W<=b.D){break b}b._--}A=true;c:while(A===true){A=false;L=b.A-b._;B=true;d:while(B===true){B=false;if(!g(b,1,'a')){break d}break c}b._=b.A-L;if(!g(b,1,'ı')){break b}}i=b._=b.A-X;c=i;O=i;p=n(b,i,i,'ı');if(i<=b.B){b.B+=p|0}if(O<=b.C){b.C+=p|0}b._=c;break a}b._=b.A-j;D=true;b:while(D===true){D=false;T=b.A-b._;c:while(true){m=b.A-b._;E=true;d:while(E===true){E=false;if(!f(b,a.g_vowel,97,305)){break d}b._=b.A-m;break c}Y=b._=b.A-m;if(Y<=b.D){break b}b._--}C=true;c:while(C===true){C=false;R=b.A-b._;G=true;d:while(G===true){G=false;if(!g(b,1,'e')){break d}break c}b._=b.A-R;if(!g(b,1,'i')){break b}}h=b._=b.A-T;c=h;Q=h;o=n(b,h,h,'i');if(h<=b.B){b.B+=o|0}if(Q<=b.C){b.C+=o|0}b._=c;break a}b._=b.A-j;H=true;b:while(H===true){H=false;P=b.A-b._;c:while(true){q=b.A-b._;I=true;d:while(I===true){I=false;if(!f(b,a.g_vowel,97,305)){break d}b._=b.A-q;break c}_=b._=b.A-q;if(_<=b.D){break b}b._--}J=true;c:while(J===true){J=false;N=b.A-b._;K=true;d:while(K===true){K=false;if(!g(b,1,'o')){break d}break c}b._=b.A-N;if(!g(b,1,'u')){break b}}e=b._=b.A-P;c=e;S=e;l=n(b,e,e,'u');if(e<=b.B){b.B+=l|0}if(S<=b.C){b.C+=l|0}b._=c;break a}a2=b._=(a1=b.A)-j;V=a1-a2;b:while(true){t=b.A-b._;u=true;c:while(u===true){u=false;if(!f(b,a.g_vowel,97,305)){break c}b._=b.A-t;break b}a0=b._=b.A-t;if(a0<=b.D){return false}b._--}s=true;b:while(s===true){s=false;M=b.A-b._;r=true;c:while(r===true){r=false;if(!g(b,1,'ö')){break c}break b}b._=b.A-M;if(!g(b,1,'ü')){return false}}d=b._=b.A-V;c=d;U=d;k=n(b,d,d,'ü');if(d<=b.B){b.B+=k|0}if(U<=b.C){b.C+=k|0}b._=c}return true};a.prototype.v=function(){var e;var f;var b;var c;var d;e=this._;b=2;a:while(true){f=this._;c=true;b:while(c===true){c=false;c:while(true){d=true;d:while(d===true){d=false;if(!v(this,a.g_vowel,97,305)){break d}break c}if(this._>=this.A){break b}this._++}b--;continue a}this._=f;break a}if(b>0){return false}this._=e;return true};a.prototype.r_more_than_one_syllable_word=a.prototype.v;function N(b){var f;var g;var c;var d;var e;f=b._;c=2;a:while(true){g=b._;d=true;b:while(d===true){d=false;c:while(true){e=true;d:while(e===true){e=false;if(!v(b,a.g_vowel,97,305)){break d}break c}if(b._>=b.A){break b}b._++}c--;continue a}b._=g;break a}if(c>0){return false}b._=f;return true};a.prototype.P=function(){var f;var g;var h;var b;var a;var c;var d;var i;var j;var e;b=true;b:while(b===true){b=false;f=this._;a=true;a:while(a===true){a=false;g=this._;c:while(true){c=true;d:while(c===true){c=false;if(!s(this,2,'ad')){break d}break c}if(this._>=this.A){break a}this._++}i=this.I_strlen=2;if(!(i===this.A)){break a}this._=g;break b}j=this._=f;h=j;a:while(true){d=true;c:while(d===true){d=false;if(!s(this,5,'soyad')){break c}break a}if(this._>=this.A){return false}this._++}e=this.I_strlen=5;if(!(e===this.A)){return false}this._=h}return true};a.prototype.r_is_reserved_word=a.prototype.P;function x(a){var g;var h;var i;var c;var b;var d;var e;var j;var k;var f;c=true;b:while(c===true){c=false;g=a._;b=true;a:while(b===true){b=false;h=a._;c:while(true){d=true;d:while(d===true){d=false;if(!s(a,2,'ad')){break d}break c}if(a._>=a.A){break a}a._++}j=a.I_strlen=2;if(!(j===a.A)){break a}a._=h;break b}k=a._=g;i=k;a:while(true){e=true;c:while(e===true){e=false;if(!s(a,5,'soyad')){break c}break a}if(a._>=a.A){return false}a._++}f=a.I_strlen=5;if(!(f===a.A)){return false}a._=i}return true};a.prototype.x=function(){var d;var e;var a;var b;var c;var f;var g;var h;d=this._;a=true;a:while(a===true){a=false;if(!x(this)){break a}return false}f=this._=d;this.D=f;h=this._=g=this.A;e=g-h;b=true;a:while(b===true){b=false;if(!z(this)){break a}}this._=this.A-e;c=true;a:while(c===true){c=false;if(!w(this)){break a}}this._=this.D;return true};a.prototype.r_postlude=a.prototype.x;function O(a){var e;var f;var b;var c;var d;var g;var h;var i;e=a._;b=true;a:while(b===true){b=false;if(!x(a)){break a}return false}g=a._=e;a.D=g;i=a._=h=a.A;f=h-i;c=true;a:while(c===true){c=false;if(!z(a)){break a}}a._=a.A-f;d=true;a:while(d===true){d=false;if(!w(a)){break a}}a._=a.D;return true};a.prototype.H=function(){var c;var a;var b;var d;var e;if(!N(this)){return false}this.D=this._;e=this._=d=this.A;c=d-e;a=true;a:while(a===true){a=false;if(!J(this)){break a}}this._=this.A-c;if(!this.B_continue_stemming_noun_suffixes){return false}b=true;a:while(b===true){b=false;if(!L(this)){break a}}this._=this.D;return!O(this)?false:true};a.prototype.stem=a.prototype.H;a.prototype.L=function(b){return b instanceof a};a.prototype.equals=a.prototype.L;a.prototype.M=function(){var c;var a;var b;var d;c='TurkishStemmer';a=0;for(b=0;b<c.length;b++){d=c.charCodeAt(b);a=(a<<5)-a+d;a=a&a}return a|0};a.prototype.hashCode=a.prototype.M;a.serialVersionUID=1;j(a,'methodObject',function(){return new a});j(a,'a_0',function(){return[new d('m',-1,-1),new d('n',-1,-1),new d('miz',-1,-1),new d('niz',-1,-1),new d('muz',-1,-1),new d('nuz',-1,-1),new d('müz',-1,-1),new d('nüz',-1,-1),new d('mız',-1,-1),new d('nız',-1,-1)]});j(a,'a_1',function(){return[new d('leri',-1,-1),new d('ları',-1,-1)]});j(a,'a_2',function(){return[new d('ni',-1,-1),new d('nu',-1,-1),new d('nü',-1,-1),new d('nı',-1,-1)]});j(a,'a_3',function(){return[new d('in',-1,-1),new d('un',-1,-1),new d('ün',-1,-1),new d('ın',-1,-1)]});j(a,'a_4',function(){return[new d('a',-1,-1),new d('e',-1,-1)]});j(a,'a_5',function(){return[new d('na',-1,-1),new d('ne',-1,-1)]});j(a,'a_6',function(){return[new d('da',-1,-1),new d('ta',-1,-1),new d('de',-1,-1),new d('te',-1,-1)]});j(a,'a_7',function(){return[new d('nda',-1,-1),new d('nde',-1,-1)]});j(a,'a_8',function(){return[new d('dan',-1,-1),new d('tan',-1,-1),new d('den',-1,-1),new d('ten',-1,-1)]});j(a,'a_9',function(){return[new d('ndan',-1,-1),new d('nden',-1,-1)]});j(a,'a_10',function(){return[new d('la',-1,-1),new d('le',-1,-1)]});j(a,'a_11',function(){return[new d('ca',-1,-1),new d('ce',-1,-1)]});j(a,'a_12',function(){return[new d('im',-1,-1),new d('um',-1,-1),new d('üm',-1,-1),new d('ım',-1,-1)]});j(a,'a_13',function(){return[new d('sin',-1,-1),new d('sun',-1,-1),new d('sün',-1,-1),new d('sın',-1,-1)]});j(a,'a_14',function(){return[new d('iz',-1,-1),new d('uz',-1,-1),new d('üz',-1,-1),new d('ız',-1,-1)]});j(a,'a_15',function(){return[new d('siniz',-1,-1),new d('sunuz',-1,-1),new d('sünüz',-1,-1),new d('sınız',-1,-1)]});j(a,'a_16',function(){return[new d('lar',-1,-1),new d('ler',-1,-1)]});j(a,'a_17',function(){return[new d('niz',-1,-1),new d('nuz',-1,-1),new d('nüz',-1,-1),new d('nız',-1,-1)]});j(a,'a_18',function(){return[new d('dir',-1,-1),new d('tir',-1,-1),new d('dur',-1,-1),new d('tur',-1,-1),new d('dür',-1,-1),new d('tür',-1,-1),new d('dır',-1,-1),new d('tır',-1,-1)]});j(a,'a_19',function(){return[new d('casına',-1,-1),new d('cesine',-1,-1)]});j(a,'a_20',function(){return[new d('di',-1,-1),new d('ti',-1,-1),new d('dik',-1,-1),new d('tik',-1,-1),new d('duk',-1,-1),new d('tuk',-1,-1),new d('dük',-1,-1),new d('tük',-1,-1),new d('dık',-1,-1),new d('tık',-1,-1),new d('dim',-1,-1),new d('tim',-1,-1),new d('dum',-1,-1),new d('tum',-1,-1),new d('düm',-1,-1),new d('tüm',-1,-1),new d('dım',-1,-1),new d('tım',-1,-1),new d('din',-1,-1),new d('tin',-1,-1),new d('dun',-1,-1),new d('tun',-1,-1),new d('dün',-1,-1),new d('tün',-1,-1),new d('dın',-1,-1),new d('tın',-1,-1),new d('du',-1,-1),new d('tu',-1,-1),new d('dü',-1,-1),new d('tü',-1,-1),new d('dı',-1,-1),new d('tı',-1,-1)]});j(a,'a_21',function(){return[new d('sa',-1,-1),new d('se',-1,-1),new d('sak',-1,-1),new d('sek',-1,-1),new d('sam',-1,-1),new d('sem',-1,-1),new d('san',-1,-1),new d('sen',-1,-1)]});j(a,'a_22',function(){return[new d('miş',-1,-1),new d('muş',-1,-1),new d('müş',-1,-1),new d('mış',-1,-1)]});j(a,'a_23',function(){return[new d('b',-1,1),new d('c',-1,2),new d('d',-1,3),new d('ğ',-1,4)]});j(a,'g_vowel',function(){return[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1]});j(a,'g_U',function(){return[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1]});j(a,'g_vowel1',function(){return[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1]});j(a,'g_vowel2',function(){return[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130]});j(a,'g_vowel3',function(){return[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1]});j(a,'g_vowel4',function(){return[17]});j(a,'g_vowel5',function(){return[65]});j(a,'g_vowel6',function(){return[65]});var y={'src/stemmer.jsx':{Stemmer:u},'src/turkish-stemmer.jsx':{TurkishStemmer:a}}}(JSX))
@@ -29,10 +28,8 @@ class SearchTurkish(SearchLanguage):
js_stemmer_code = js_stemmer
stopwords = set() # type: Set[str]
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
self.stemmer = snowballstemmer.stemmer('turkish')
- def stem(self, word):
- # type: (str) -> str
+ def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py
index ed5f52265..62a6d1206 100644
--- a/sphinx/search/zh.py
+++ b/sphinx/search/zh.py
@@ -10,6 +10,7 @@
import os
import re
+from typing import Dict, List
from sphinx.search import SearchLanguage
from sphinx.util.stemmer import get_stemmer
@@ -20,10 +21,6 @@ try:
except ImportError:
JIEBA = False
-if False:
- # For type annotation
- from typing import Dict, List # NOQA
-
english_stopwords = set("""
a and are as at
be but by
@@ -235,8 +232,7 @@ class SearchChinese(SearchLanguage):
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
latin_terms = [] # type: List[str]
- def init(self, options):
- # type: (Dict) -> None
+ def init(self, options: Dict) -> None:
if JIEBA:
dict_path = options.get('dict')
if dict_path and os.path.isfile(dict_path):
@@ -244,8 +240,7 @@ class SearchChinese(SearchLanguage):
self.stemmer = get_stemmer()
- def split(self, input):
- # type: (str) -> List[str]
+ def split(self, input: str) -> List[str]:
chinese = [] # type: List[str]
if JIEBA:
chinese = list(jieba.cut_for_search(input))
@@ -255,13 +250,10 @@ class SearchChinese(SearchLanguage):
self.latin_terms.extend(latin1)
return chinese + latin1
- def word_filter(self, stemmed_word):
- # type: (str) -> bool
+ def word_filter(self, stemmed_word: str) -> bool:
return len(stemmed_word) > 1
- def stem(self, word):
- # type: (str) -> str
-
+ def stem(self, word: str) -> str:
# Don't stem Latin words that are long enough to be relevant for search
# if not stemmed, but would be too short after being stemmed
# avoids some issues with acronyms
diff --git a/sphinx/theming.py b/sphinx/theming.py
index 669e71dde..bbbac5488 100644
--- a/sphinx/theming.py
+++ b/sphinx/theming.py
@@ -13,6 +13,7 @@ import os
import shutil
import tempfile
from os import path
+from typing import Any, Dict, List
from zipfile import ZipFile
import pkg_resources
@@ -23,19 +24,18 @@ from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.osutil import ensuredir
-logger = logging.getLogger(__name__)
-
if False:
# For type annotation
- from typing import Any, Dict, List # NOQA
- from sphinx.application import Sphinx # NOQA
+ from sphinx.application import Sphinx
+
+
+logger = logging.getLogger(__name__)
NODEFAULT = object()
THEMECONF = 'theme.conf'
-def extract_zip(filename, targetdir):
- # type: (str, str) -> None
+def extract_zip(filename: str, targetdir: str) -> None:
"""Extract zip file to target directory."""
ensuredir(targetdir)
@@ -54,8 +54,7 @@ class Theme:
This class supports both theme directory and theme archive (zipped theme)."""
- def __init__(self, name, theme_path, factory):
- # type: (str, str, HTMLThemeFactory) -> None
+ def __init__(self, name: str, theme_path: str, factory: "HTMLThemeFactory") -> None:
self.name = name
self.base = None
self.rootdir = None
@@ -87,8 +86,7 @@ class Theme:
raise ThemeError(__('no theme named %r found, inherited by %r') %
(inherit, name))
- def get_theme_dirs(self):
- # type: () -> List[str]
+ def get_theme_dirs(self) -> List[str]:
"""Return a list of theme directories, beginning with this theme's,
then the base theme's, then that one's base theme's, etc.
"""
@@ -97,8 +95,7 @@ class Theme:
else:
return [self.themedir] + self.base.get_theme_dirs()
- def get_config(self, section, name, default=NODEFAULT):
- # type: (str, str, Any) -> Any
+ def get_config(self, section: str, name: str, default: Any = NODEFAULT) -> Any:
"""Return the value for a theme configuration setting, searching the
base theme chain.
"""
@@ -114,8 +111,7 @@ class Theme:
else:
return default
- def get_options(self, overrides={}):
- # type: (Dict[str, Any]) -> Dict[str, Any]
+ def get_options(self, overrides: Dict[str, Any] = {}) -> Dict[str, Any]:
"""Return a dictionary of theme options and their values."""
if self.base:
options = self.base.get_options()
@@ -135,8 +131,7 @@ class Theme:
return options
- def cleanup(self):
- # type: () -> None
+ def cleanup(self) -> None:
"""Remove temporary directories."""
if self.rootdir:
try:
@@ -147,8 +142,7 @@ class Theme:
self.base.cleanup()
-def is_archived_theme(filename):
- # type: (str) -> bool
+def is_archived_theme(filename: str) -> bool:
"""Check the specified file is an archived theme file or not."""
try:
with ZipFile(filename) as f:
@@ -160,23 +154,20 @@ def is_archived_theme(filename):
class HTMLThemeFactory:
"""A factory class for HTML Themes."""
- def __init__(self, app):
- # type: (Sphinx) -> None
+ def __init__(self, app: "Sphinx") -> None:
self.app = app
self.themes = app.html_themes
self.load_builtin_themes()
if getattr(app.config, 'html_theme_path', None):
self.load_additional_themes(app.config.html_theme_path)
- def load_builtin_themes(self):
- # type: () -> None
+ def load_builtin_themes(self) -> None:
"""Load built-in themes."""
themes = self.find_themes(path.join(package_dir, 'themes'))
for name, theme in themes.items():
self.themes[name] = theme
- def load_additional_themes(self, theme_paths):
- # type: (str) -> None
+ def load_additional_themes(self, theme_paths: str) -> None:
"""Load additional themes placed at specified directories."""
for theme_path in theme_paths:
abs_theme_path = path.abspath(path.join(self.app.confdir, theme_path))
@@ -184,8 +175,7 @@ class HTMLThemeFactory:
for name, theme in themes.items():
self.themes[name] = theme
- def load_extra_theme(self, name):
- # type: (str) -> None
+ def load_extra_theme(self, name: str) -> None:
"""Try to load a theme having specifed name."""
if name == 'alabaster':
self.load_alabaster_theme()
@@ -194,14 +184,12 @@ class HTMLThemeFactory:
else:
self.load_external_theme(name)
- def load_alabaster_theme(self):
- # type: () -> None
+ def load_alabaster_theme(self) -> None:
"""Load alabaster theme."""
import alabaster
self.themes['alabaster'] = path.join(alabaster.get_path(), 'alabaster')
- def load_sphinx_rtd_theme(self):
- # type: () -> None
+ def load_sphinx_rtd_theme(self) -> None:
"""Load sphinx_rtd_theme theme (if exists)."""
try:
import sphinx_rtd_theme
@@ -210,8 +198,7 @@ class HTMLThemeFactory:
except ImportError:
pass
- def load_external_theme(self, name):
- # type: (str) -> None
+ def load_external_theme(self, name: str) -> None:
"""Try to load a theme using entry_points.
Sphinx refers to ``sphinx_themes`` entry_points.
@@ -225,8 +212,7 @@ class HTMLThemeFactory:
except StopIteration:
pass
- def find_themes(self, theme_path):
- # type: (str) -> Dict[str, str]
+ def find_themes(self, theme_path: str) -> Dict[str, str]:
"""Search themes from specified directory."""
themes = {} # type: Dict[str, str]
if not path.isdir(theme_path):
@@ -247,8 +233,7 @@ class HTMLThemeFactory:
return themes
- def create(self, name):
- # type: (str) -> Theme
+ def create(self, name: str) -> Theme:
"""Create an instance of theme."""
if name not in self.themes:
self.load_extra_theme(name)
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index b6119d8da..df94aca49 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -273,8 +273,7 @@ class DoctestTransform(SphinxTransform):
"""Set "doctest" style to each doctest_block node"""
default_priority = 500
- def apply(self, **kwargs):
- # type: (Any) -> None
+ def apply(self, **kwargs) -> None:
for node in self.document.traverse(nodes.doctest_block):
node['classes'].append('doctest')
diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py
index 2032b94ac..e8fdcfe4c 100644
--- a/sphinx/util/__init__.py
+++ b/sphinx/util/__init__.py
@@ -119,8 +119,7 @@ def get_matching_docs(dirname: str, suffixes: List[str],
break
-def get_filetype(source_suffix, filename):
- # type: (Dict[str, str], str) -> str
+def get_filetype(source_suffix: Dict[str, str], filename: str) -> str:
for suffix, filetype in source_suffix.items():
if filename.endswith(suffix):
# If default filetype (None), considered as restructuredtext.
diff --git a/sphinx/versioning.py b/sphinx/versioning.py
index 56c126da2..0142dfb66 100644
--- a/sphinx/versioning.py
+++ b/sphinx/versioning.py
@@ -12,15 +12,17 @@ import pickle
from itertools import product, zip_longest
from operator import itemgetter
from os import path
+from typing import Any, Dict, Iterator
from uuid import uuid4
+from docutils import nodes
+from docutils.nodes import Node
+
from sphinx.transforms import SphinxTransform
if False:
# For type annotation
- from typing import Any, Dict, Iterator # NOQA
- from docutils import nodes # NOQA
- from sphinx.application import Sphinx # NOQA
+ from sphinx.application import Sphinx
try:
import Levenshtein
@@ -32,8 +34,7 @@ except ImportError:
VERSIONING_RATIO = 65
-def add_uids(doctree, condition):
- # type: (nodes.Node, Any) -> Iterator[nodes.Node]
+def add_uids(doctree: Node, condition: Any) -> Iterator[Node]:
"""Add a unique id to every node in the `doctree` which matches the
condition and yield the nodes.
@@ -48,8 +49,7 @@ def add_uids(doctree, condition):
yield node
-def merge_doctrees(old, new, condition):
- # type: (nodes.Node, nodes.Node, Any) -> Iterator[nodes.Node]
+def merge_doctrees(old: Node, new: Node, condition: Any) -> Iterator[Node]:
"""Merge the `old` doctree with the `new` one while looking at nodes
matching the `condition`.
@@ -116,8 +116,7 @@ def merge_doctrees(old, new, condition):
yield new_node
-def get_ratio(old, new):
- # type: (str, str) -> float
+def get_ratio(old: str, new: str) -> float:
"""Return a "similiarity ratio" (in percent) representing the similarity
between the two strings where 0 is equal and anything above less than equal.
"""
@@ -130,8 +129,7 @@ def get_ratio(old, new):
return levenshtein_distance(old, new) / (len(old) / 100.0)
-def levenshtein_distance(a, b):
- # type: (str, str) -> int
+def levenshtein_distance(a: str, b: str) -> int:
"""Return the Levenshtein edit distance between two strings *a* and *b*."""
if a == b:
return 0
@@ -155,8 +153,7 @@ class UIDTransform(SphinxTransform):
"""Add UIDs to doctree for versioning."""
default_priority = 880
- def apply(self, **kwargs):
- # type: (Any) -> None
+ def apply(self, **kwargs) -> None:
env = self.env
old_doctree = None
if not env.versioning_condition:
@@ -178,8 +175,7 @@ class UIDTransform(SphinxTransform):
list(merge_doctrees(old_doctree, self.document, env.versioning_condition))
-def setup(app):
- # type: (Sphinx) -> Dict[str, Any]
+def setup(app: "Sphinx") -> Dict[str, Any]:
app.add_transform(UIDTransform)
return {