diff --git a/Makefile b/Makefile index 69d3d946..3ab35af7 100644 --- a/Makefile +++ b/Makefile @@ -40,4 +40,5 @@ install: ln -s $(DOTFILES_DIR)/cd-project $(HOME)/go/bin/ sublime-mac: + rm -rf "$(HOME)/Library/Application Support/Sublime Text 3/Packages" ln -s "$(DOTFILES_DIR)/SublimeText" "$(HOME)/Library/Application Support/Sublime Text 3/Packages" diff --git a/SublimeText/User/Package Control.sublime-settings b/SublimeText/User/Package Control.sublime-settings index 14f743cf..b76121c8 100644 --- a/SublimeText/User/Package Control.sublime-settings +++ b/SublimeText/User/Package Control.sublime-settings @@ -4,4 +4,8 @@ [ ], + "installed_packages": + [ + "Package Control" + ], } diff --git a/SublimeText/User/Preferences.sublime-settings b/SublimeText/User/Preferences.sublime-settings index 81c43813..5dd49c57 100644 --- a/SublimeText/User/Preferences.sublime-settings +++ b/SublimeText/User/Preferences.sublime-settings @@ -1,16 +1,16 @@ { - "font_size": 21, + "font_size": 17, "ignored_packages": [ "Vintage", ], "hot_exit": "disabled", "index_files": true, - "theme": "Material One Dark.sublime-theme", + "theme": "Adaptive.sublime-theme", "show_errors_inline": false, "color_scheme": "Packages/Materialize/schemes/Material One Dark.tmTheme", "word_wrap": true, - "auto_complete": false, + // "auto_complete": false, "highlight_line": true, "caret_style": "smooth", } diff --git a/SublimeText/bracex/.gitignore b/SublimeText/bracex/.gitignore new file mode 100644 index 00000000..c821c84a --- /dev/null +++ b/SublimeText/bracex/.gitignore @@ -0,0 +1,131 @@ +.DS_Store + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/SublimeText/bracex/.sublime-dependency b/SublimeText/bracex/.sublime-dependency new file mode 100644 index 00000000..e69de29b diff --git a/SublimeText/bracex/LICENSE.md b/SublimeText/bracex/LICENSE.md new file mode 100644 index 00000000..d75277a5 --- /dev/null +++ b/SublimeText/bracex/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 - 2020 facelessuser + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/SublimeText/bracex/README.md b/SublimeText/bracex/README.md new file mode 100644 index 00000000..bfa58857 --- /dev/null +++ b/SublimeText/bracex/README.md @@ -0,0 +1,7 @@ +# BraceX + +Current Version: 2.1.1 + +Creates arbitrary strings via brace expansion much like Bash's. + +To learn more about what bracex can do, read the [official documentation](https://facelessuser.github.io/bracex/). diff --git a/SublimeText/bracex/dependency-metadata.json b/SublimeText/bracex/dependency-metadata.json new file mode 100644 index 00000000..269ea664 --- /dev/null +++ b/SublimeText/bracex/dependency-metadata.json @@ -0,0 +1 @@ +{"sublime_text": ">=3000", "description": "Bracex creates arbitrary strings via brace expansion much like Bash's.", "version": "2.1.1", "platforms": ["*"], "url": "https://github.com/facelessuser/sublime-bracex/issues"} \ No newline at end of file diff --git a/SublimeText/bracex/st3/bracex/__init__.py b/SublimeText/bracex/st3/bracex/__init__.py new file mode 100644 index 00000000..a31de20c --- /dev/null +++ b/SublimeText/bracex/st3/bracex/__init__.py @@ -0,0 +1,446 @@ +""" +A Bash like brace expander. + +Licensed under MIT +Copyright (c) 2018 - 2020 Isaac Muse + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" +import itertools +import math +import re +from .__meta__ import __version_info__, __version__ # noqa: F401 + +__all__ = ('expand', 'iexpand') + +_alpha = [chr(x) if x != 0x5c else '' for x in range(ord('A'), ord('z') + 1)] +_nalpha = list(reversed(_alpha)) + +RE_INT_ITER = re.compile(r'(-?\d+)\.{2}(-?\d+)(?:\.{2}(-?\d+))?(?=\})') +RE_CHR_ITER = re.compile(r'([A-Za-z])\.{2}([A-Za-z])(?:\.{2}(-?\d+))?(?=\})') + +DEFAULT_LIMIT = 1000 + + +class ExpansionLimitException(Exception): + """Brace expansion limit exception.""" + + +def expand(string, keep_escapes=False, limit=DEFAULT_LIMIT): + """Expand braces.""" + + return list(iexpand(string, keep_escapes, limit)) + + +def iexpand(string, keep_escapes=False, limit=DEFAULT_LIMIT): + """Expand braces and return an iterator.""" + + if isinstance(string, bytes): + is_bytes = True + string = string.decode('latin-1') + + else: + is_bytes = False + + for count, entry in enumerate(ExpandBrace(keep_escapes, limit).expand(string), 1): + yield entry.encode('latin-1') if is_bytes else entry + + +class StringIter(object): + """Preprocess replace tokens.""" + + def __init__(self, string): + """Initialize.""" + + self._string = string + self._index = 0 + + def __iter__(self): + """Iterate.""" + + return self + + def __next__(self): + """Python 3 iterator compatible next.""" + + return self.iternext() + + def match(self, pattern): + """Perform regex match at index.""" + + m = pattern.match(self._string, self._index) + if m: + self._index = m.end() + return m + + @property + def index(self): + """Get current index.""" + + return self._index + + def previous(self): # pragma: no cover + """Get previous char.""" + + return self._string[self._index - 1] + + def advance(self, count): + """Advanced the index.""" + + self._index += count + + def rewind(self, count): + """Rewind index.""" + + if count > self._index: # pragma: no cover + raise ValueError("Can't rewind past beginning!") + + self._index -= count + + def iternext(self): + """Iterate through characters of the string.""" + + try: + char = self._string[self._index] + self._index += 1 + except IndexError: # pragma: no cover + raise StopIteration + + return char + + +class ExpandBrace(object): + """Expand braces like in Bash.""" + + def __init__(self, keep_escapes=False, limit=DEFAULT_LIMIT): + """Initialize.""" + + self.max_limit = limit + self.count = 0 + self.expanding = False + self.keep_escapes = keep_escapes + + def update_count(self, count): + """Update the count and assert if count exceeds the max limit.""" + + if isinstance(count, int): + self.count += count + else: + self.count -= sum(count) + prod = 1 + for c in count: + prod *= c + self.count += prod + if self.max_limit > 0 and self.count > self.max_limit: + raise ExpansionLimitException( + 'Brace expansion has exceeded the limit of {:d}'.format(self.max_limit) + ) + + def set_expanding(self): + """Set that we are expanding a sequence, and return whether a release is required by the caller.""" + + status = not self.expanding + if status: + self.expanding = True + return status + + def is_expanding(self): + """Get status of whether we are expanding.""" + + return self.expanding + + def release_expanding(self, release): + """Release the expand status.""" + + if release: + self.expanding = False + + def get_escape(self, c, i): + """Get an escape.""" + + try: + escaped = next(i) + except StopIteration: + escaped = '' + return c + escaped if self.keep_escapes else escaped + + def squash(self, a, b): + """ + Returns a generator that squashes two iterables into one. + + ``` + ['this', 'that'], [[' and', ' or']] => ['this and', 'this or', 'that and', 'that or'] + ``` + """ + + return ((''.join(x) if isinstance(x, tuple) else x) for x in itertools.product(a, b)) + + def get_literals(self, c, i, depth): + """ + Get a string literal. + + Gather all the literal chars up to opening curly or closing brace. + Also gather chars between braces and commas within a group (is_expanding). + """ + + result = [''] + is_dollar = False + + count = True + seq_count = [] + + try: + while c: + ignore_brace = is_dollar + is_dollar = False + + if c == '$': + is_dollar = True + + elif c == '\\': + c = [self.get_escape(c, i)] + + elif not ignore_brace and c == '{': + # Try and get the group + index = i.index + try: + if self.max_limit > 0: + current_count = self.count + seq = self.get_sequence(next(i), i, depth + 1) + if seq: + if self.max_limit > 0: + diff = self.count - current_count + seq_count.append(diff) + count = False + c = seq + except StopIteration: + # Searched to end of string + # and still didn't find it. + i.rewind(i.index - index) + + elif self.is_expanding() and c in (',', '}'): + # We are Expanding within a group and found a group delimiter + # Return what we gathered before the group delimiters. + i.rewind(1) + self.update_count(1 if count else seq_count) + return (x for x in result) + + # Squash the current set of literals. + result = self.squash(result, [c] if isinstance(c, str) else c) + + c = next(i) + except StopIteration: + if self.is_expanding(): + return None + + self.update_count(1 if count else seq_count) + return (x for x in result) + + def combine(self, a, b): + """A generator that combines two iterables.""" + + for l in (a, b): + for x in l: + yield x + + def get_sequence(self, c, i, depth): + """ + Get the sequence. + + Get sequence between `{}`, such as: `{a,b}`, `{1..2[..inc]}`, etc. + It will basically crawl to the end or find a valid series. + """ + + result = [] + release = self.set_expanding() + has_comma = False # Used to indicate validity of group (`{1..2}` are an exception). + is_empty = True # Tracks whether the current slot is empty `{slot,slot,slot}`. + + # Detect numerical and alphabetic series: `{1..2}` etc. + i.rewind(1) + item = self.get_range(i) + i.advance(1) + if item is not None: + self.release_expanding(release) + return (x for x in item) + + try: + while c: + # Bash has some special top level logic. if `}` follows `{` but hasn't matched + # a group yet, keep going except when the first 2 bytes are `{}` which gets + # completely ignored. + keep_looking = depth == 1 and not has_comma # and i.index not in self.skip_index + if (c == '}' and (not keep_looking or i.index == 2)): + # If there is no comma, we know the sequence is bogus. + if is_empty: + result = (x for x in self.combine(result, [''])) + if not has_comma: + result = ('{' + literal + '}' for literal in result) + self.release_expanding(release) + return (x for x in result) + + elif c == ',': + # Must be the first element in the list. + has_comma = True + if is_empty: + result = (x for x in self.combine(result, [''])) + else: + is_empty = True + + else: + if c == '}': + # Top level: If we didn't find a comma, we haven't + # completed the top level group. Request more and + # append to what we already have for the first slot. + if not result: + result = (x for x in self.combine(result, [c])) + else: + result = self.squash(result, [c]) + value = self.get_literals(next(i), i, depth) + if value is not None: + result = self.squash(result, value) + is_empty = False + else: + # Lower level: Try to find group, but give up if cannot acquire. + value = self.get_literals(c, i, depth) + if value is not None: + result = (x for x in self.combine(result, value)) + is_empty = False + + c = next(i) + except StopIteration: + self.release_expanding(release) + raise + + def get_range(self, i): + """ + Check and retrieve range if value is a valid range. + + Here we are looking to see if the value is series or range. + We look for `{1..2[..inc]}` or `{a..z[..inc]}` (negative numbers are fine). + """ + + try: + m = i.match(RE_INT_ITER) + if m: + return self.get_int_range(*m.groups()) + + m = i.match(RE_CHR_ITER) + if m: + return self.get_char_range(*m.groups()) + except ExpansionLimitException: + raise + except Exception: # pragma: no cover + # TODO: We really should never fail here, + # but if we do, assume the sequence range + # was invalid. This catch can probably + # be removed in the future with more testing. + pass + + return None + + def format_value(self, value, padding): + """Get padding adjusting for negative values.""" + + if padding: + return "{:0{pad}d}".format(value, pad=padding) + + else: + return str(value) + + def get_int_range(self, start, end, increment=None): + """Get an integer range between start and end and increments of increment.""" + + first, last = int(start), int(end) + increment = int(increment) if increment is not None else 1 + max_length = max(len(start), len(end)) + + # Zero doesn't make sense as an incrementer + # but like bash, just assume one + if increment == 0: + increment = 1 + + if start[0] == '-': + start = start[1:] + + if end[0] == '-': + end = end[1:] + + if (len(start) > 1 and start[0] == '0') or (len(end) > 1 and end[0] == '0'): + padding = max_length + + else: + padding = 0 + + if first < last: + self.update_count(math.ceil(abs(((last + 1) - first) / increment))) + r = range(first, last + 1, -increment if increment < 0 else increment) + else: + self.update_count(math.ceil(abs(((first + 1) - last) / increment))) + r = range(first, last - 1, increment if increment < 0 else -increment) + + return (self.format_value(value, padding) for value in r) + + def get_char_range(self, start, end, increment=None): + """Get a range of alphabetic characters.""" + + increment = int(increment) if increment else 1 + if increment < 0: + increment = -increment + + # Zero doesn't make sense as an incrementer + # but like bash, just assume one + if increment == 0: + increment = 1 + + inverse = start > end + alpha = _nalpha if inverse else _alpha + + start = alpha.index(start) + end = alpha.index(end) + + if start < end: + self.update_count(math.ceil(((end + 1) - start) / increment)) + return (c for c in alpha[start:end + 1:increment]) + + else: + self.update_count(math.ceil(((start + 1) - end) / increment)) + return (c for c in alpha[end:start + 1:increment]) + + def expand(self, string): + """Expand.""" + + self.expanding = False + empties = [] + found_literal = False + if string: + i = iter(StringIter(string)) + value = self.get_literals(next(i), i, 0) + if value is not None: + for x in value: + # We don't want to return trailing empty strings. + # Store empty strings and output only when followed by a literal. + if not x: + empties.append(x) + continue + found_literal = True + while empties: + yield empties.pop(0) + yield x + empties = [] + + # We found no literals so return an empty string + if not found_literal: + yield "" diff --git a/SublimeText/bracex/st3/bracex/__meta__.py b/SublimeText/bracex/st3/bracex/__meta__.py new file mode 100644 index 00000000..72a8e1dc --- /dev/null +++ b/SublimeText/bracex/st3/bracex/__meta__.py @@ -0,0 +1,190 @@ +"""Meta related things.""" +from __future__ import unicode_literals +from collections import namedtuple +import re + +RE_VER = re.compile( + r'''(?x) + (?P\d+)(?:\.(?P\d+))?(?:\.(?P\d+))? + (?:(?Pa|b|rc)(?P
\d+))?
+    (?:\.post(?P\d+))?
+    (?:\.dev(?P\d+))?
+    '''
+)
+
+REL_MAP = {
+    ".dev": "",
+    ".dev-alpha": "a",
+    ".dev-beta": "b",
+    ".dev-candidate": "rc",
+    "alpha": "a",
+    "beta": "b",
+    "candidate": "rc",
+    "final": ""
+}
+
+DEV_STATUS = {
+    ".dev": "2 - Pre-Alpha",
+    ".dev-alpha": "2 - Pre-Alpha",
+    ".dev-beta": "2 - Pre-Alpha",
+    ".dev-candidate": "2 - Pre-Alpha",
+    "alpha": "3 - Alpha",
+    "beta": "4 - Beta",
+    "candidate": "4 - Beta",
+    "final": "5 - Production/Stable"
+}
+
+PRE_REL_MAP = {"a": 'alpha', "b": 'beta', "rc": 'candidate'}
+
+
+class Version(namedtuple("Version", ["major", "minor", "micro", "release", "pre", "post", "dev"])):
+    """
+    Get the version (PEP 440).
+
+    A biased approach to the PEP 440 semantic version.
+
+    Provides a tuple structure which is sorted for comparisons `v1 > v2` etc.
+      (major, minor, micro, release type, pre-release build, post-release build, development release build)
+    Release types are named in is such a way they are comparable with ease.
+    Accessors to check if a development, pre-release, or post-release build. Also provides accessor to get
+    development status for setup files.
+
+    How it works (currently):
+
+    - You must specify a release type as either `final`, `alpha`, `beta`, or `candidate`.
+    - To define a development release, you can use either `.dev`, `.dev-alpha`, `.dev-beta`, or `.dev-candidate`.
+      The dot is used to ensure all development specifiers are sorted before `alpha`.
+      You can specify a `dev` number for development builds, but do not have to as implicit development releases
+      are allowed.
+    - You must specify a `pre` value greater than zero if using a prerelease as this project (not PEP 440) does not
+      allow implicit prereleases.
+    - You can optionally set `post` to a value greater than zero to make the build a post release. While post releases
+      are technically allowed in prereleases, it is strongly discouraged, so we are rejecting them. It should be
+      noted that we do not allow `post0` even though PEP 440 does not restrict this. This project specifically
+      does not allow implicit post releases.
+    - It should be noted that we do not support epochs `1!` or local versions `+some-custom.version-1`.
+
+    Acceptable version releases:
+
+    ```
+    Version(1, 0, 0, "final")                    1.0
+    Version(1, 2, 0, "final")                    1.2
+    Version(1, 2, 3, "final")                    1.2.3
+    Version(1, 2, 0, ".dev-alpha", pre=4)        1.2a4
+    Version(1, 2, 0, ".dev-beta", pre=4)         1.2b4
+    Version(1, 2, 0, ".dev-candidate", pre=4)    1.2rc4
+    Version(1, 2, 0, "final", post=1)            1.2.post1
+    Version(1, 2, 3, ".dev")                     1.2.3.dev0
+    Version(1, 2, 3, ".dev", dev=1)              1.2.3.dev1
+    ```
+
+    """
+
+    def __new__(cls, major, minor, micro, release="final", pre=0, post=0, dev=0):
+        """Validate version info."""
+
+        # Ensure all parts are positive integers.
+        for value in (major, minor, micro, pre, post):
+            if not (isinstance(value, int) and value >= 0):
+                raise ValueError("All version parts except 'release' should be integers.")
+
+        if release not in REL_MAP:
+            raise ValueError("'{}' is not a valid release type.".format(release))
+
+        # Ensure valid pre-release (we do not allow implicit pre-releases).
+        if ".dev-candidate" < release < "final":
+            if pre == 0:
+                raise ValueError("Implicit pre-releases not allowed.")
+            elif dev:
+                raise ValueError("Version is not a development release.")
+            elif post:
+                raise ValueError("Post-releases are not allowed with pre-releases.")
+
+        # Ensure valid development or development/pre release
+        elif release < "alpha":
+            if release > ".dev" and pre == 0:
+                raise ValueError("Implicit pre-release not allowed.")
+            elif post:
+                raise ValueError("Post-releases are not allowed with pre-releases.")
+
+        # Ensure a valid normal release
+        else:
+            if pre:
+                raise ValueError("Version is not a pre-release.")
+            elif dev:
+                raise ValueError("Version is not a development release.")
+
+        return super(Version, cls).__new__(cls, major, minor, micro, release, pre, post, dev)
+
+    def _is_pre(self):
+        """Is prerelease."""
+
+        return self.pre > 0
+
+    def _is_dev(self):
+        """Is development."""
+
+        return bool(self.release < "alpha")
+
+    def _is_post(self):
+        """Is post."""
+
+        return self.post > 0
+
+    def _get_dev_status(self):  # pragma: no cover
+        """Get development status string."""
+
+        return DEV_STATUS[self.release]
+
+    def _get_canonical(self):
+        """Get the canonical output string."""
+
+        # Assemble major, minor, micro version and append `pre`, `post`, or `dev` if needed..
+        if self.micro == 0:
+            ver = "{}.{}".format(self.major, self.minor)
+        else:
+            ver = "{}.{}.{}".format(self.major, self.minor, self.micro)
+        if self._is_pre():
+            ver += '{}{}'.format(REL_MAP[self.release], self.pre)
+        if self._is_post():
+            ver += ".post{}".format(self.post)
+        if self._is_dev():
+            ver += ".dev{}".format(self.dev)
+
+        return ver
+
+
+def parse_version(ver, pre=False):
+    """Parse version into a comparable Version tuple."""
+
+    m = RE_VER.match(ver)
+
+    # Handle major, minor, micro
+    major = int(m.group('major'))
+    minor = int(m.group('minor')) if m.group('minor') else 0
+    micro = int(m.group('micro')) if m.group('micro') else 0
+
+    # Handle pre releases
+    if m.group('type'):
+        release = PRE_REL_MAP[m.group('type')]
+        pre = int(m.group('pre'))
+    else:
+        release = "final"
+        pre = 0
+
+    # Handle development releases
+    dev = m.group('dev') if m.group('dev') else 0
+    if m.group('dev'):
+        dev = int(m.group('dev'))
+        release = '.dev-' + release if pre else '.dev'
+    else:
+        dev = 0
+
+    # Handle post
+    post = int(m.group('post')) if m.group('post') else 0
+
+    return Version(major, minor, micro, release, pre, post, dev)
+
+
+__version_info__ = Version(2, 1, 1, "final")
+__version__ = __version_info__._get_canonical()
diff --git a/SublimeText/lsp_utils/.sublime-dependency b/SublimeText/lsp_utils/.sublime-dependency
new file mode 100644
index 00000000..f599e28b
--- /dev/null
+++ b/SublimeText/lsp_utils/.sublime-dependency
@@ -0,0 +1 @@
+10
diff --git a/SublimeText/lsp_utils/LICENSE b/SublimeText/lsp_utils/LICENSE
new file mode 100644
index 00000000..4f0707c5
--- /dev/null
+++ b/SublimeText/lsp_utils/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 SublimeLSP
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/SublimeText/lsp_utils/README.md b/SublimeText/lsp_utils/README.md
new file mode 100644
index 00000000..d5afecc7
--- /dev/null
+++ b/SublimeText/lsp_utils/README.md
@@ -0,0 +1,24 @@
+# LSP utilities for Package Control
+
+Module with LSP-related utilities for Sublime Text.
+
+📘 [Documentation](https://sublimelsp.github.io/lsp_utils/)
+
+## How to use
+
+1. Create a `dependencies.json` file in your package root with the following contents:
+
+```js
+{
+   "*": {
+      "*": [
+         "lsp_utils",
+         "sublime_lib"
+      ]
+   }
+}
+```
+
+2. Run the **Package Control: Satisfy Dependencies** command via the _Command Palette_.
+
+See also [Documentation on Dependencies](https://packagecontrol.io/docs/dependencies)
diff --git a/SublimeText/lsp_utils/dependency-metadata.json b/SublimeText/lsp_utils/dependency-metadata.json
new file mode 100644
index 00000000..c3129e86
--- /dev/null
+++ b/SublimeText/lsp_utils/dependency-metadata.json
@@ -0,0 +1 @@
+{"sublime_text": ">=4070", "description": "Module with LSP-related utilities", "version": "3.1.1", "platforms": ["*"], "url": "https://github.com/sublimelsp/lsp_utils/issues"}
\ No newline at end of file
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/__init__.py b/SublimeText/lsp_utils/st3/lsp_utils/__init__.py
new file mode 100644
index 00000000..5733aa5e
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/__init__.py
@@ -0,0 +1,27 @@
+from ._client_handler import ClientHandler
+from ._client_handler import notification_handler
+from ._client_handler import request_handler
+from .api_wrapper_interface import ApiWrapperInterface
+from .constants import SETTINGS_FILENAME
+from .generic_client_handler import GenericClientHandler
+from .node_runtime import NodeRuntime
+from .npm_client_handler import NpmClientHandler
+from .server_npm_resource import ServerNpmResource
+from .server_pip_resource import ServerPipResource
+from .server_resource_interface import ServerResourceInterface
+from .server_resource_interface import ServerStatus
+
+__all__ = [
+    'ApiWrapperInterface',
+    'ClientHandler',
+    'SETTINGS_FILENAME',
+    'GenericClientHandler',
+    'NodeRuntime',
+    'NpmClientHandler',
+    'ServerResourceInterface',
+    'ServerStatus',
+    'ServerNpmResource',
+    'ServerPipResource',
+    'notification_handler',
+    'request_handler',
+]
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/__init__.py b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/__init__.py
new file mode 100644
index 00000000..6c03556e
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/__init__.py
@@ -0,0 +1,9 @@
+from .abstract_plugin import ClientHandler
+from .api_decorator import notification_handler
+from .api_decorator import request_handler
+
+__all__ = [
+    'ClientHandler',
+    'notification_handler',
+    'request_handler',
+]
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/abstract_plugin.py b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/abstract_plugin.py
new file mode 100644
index 00000000..d59cb4ea
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/abstract_plugin.py
@@ -0,0 +1,162 @@
+from .._util import weak_method
+from ..api_wrapper_interface import ApiWrapperInterface
+from ..server_resource_interface import ServerStatus
+from .api_decorator import register_decorated_handlers
+from .interface import ClientHandlerInterface
+from functools import partial
+from LSP.plugin import AbstractPlugin
+from LSP.plugin import ClientConfig
+from LSP.plugin import Notification
+from LSP.plugin import register_plugin
+from LSP.plugin import Request
+from LSP.plugin import Response
+from LSP.plugin import Session
+from LSP.plugin import unregister_plugin
+from LSP.plugin import WorkspaceFolder
+from LSP.plugin.core.rpc import method2attr
+from LSP.plugin.core.typing import Any, Callable, Dict, List, Optional, Tuple, TypedDict
+from os import path
+from weakref import ref
+import sublime
+
+__all__ = ['ClientHandler']
+
+LanguagesDict = TypedDict('LanguagesDict', {
+    'document_selector': Optional[str],
+    'languageId': Optional[str],
+    'scopes': Optional[List[str]],
+    'syntaxes': Optional[List[str]],
+}, total=False)
+ApiNotificationHandler = Callable[[Any], None]
+ApiRequestHandler = Callable[[Any, Callable[[Any], None]], None]
+
+
+class ApiWrapper(ApiWrapperInterface):
+    def __init__(self, plugin: 'ref[AbstractPlugin]'):
+        self.__plugin = plugin
+
+    def __session(self) -> Optional[Session]:
+        plugin = self.__plugin()
+        return plugin.weaksession() if plugin else None
+
+    # --- ApiWrapperInterface -----------------------------------------------------------------------------------------
+
+    def on_notification(self, method: str, handler: ApiNotificationHandler) -> None:
+        def handle_notification(weak_handler: ApiNotificationHandler, params: Any) -> None:
+            weak_handler(params)
+
+        plugin = self.__plugin()
+        if plugin:
+            setattr(plugin, method2attr(method), partial(handle_notification, weak_method(handler)))
+
+    def on_request(self, method: str, handler: ApiRequestHandler) -> None:
+        def send_response(request_id: Any, result: Any) -> None:
+            session = self.__session()
+            if session:
+                session.send_response(Response(request_id, result))
+
+        def on_response(weak_handler: ApiRequestHandler, params: Any, request_id: Any) -> None:
+            weak_handler(params, lambda result: send_response(request_id, result))
+
+        plugin = self.__plugin()
+        if plugin:
+            setattr(plugin, method2attr(method), partial(on_response, weak_method(handler)))
+
+    def send_notification(self, method: str, params: Any) -> None:
+        session = self.__session()
+        if session:
+            session.send_notification(Notification(method, params))
+
+    def send_request(self, method: str, params: Any, handler: Callable[[Any, bool], None]) -> None:
+        session = self.__session()
+        if session:
+            session.send_request(
+                Request(method, params), lambda result: handler(result, False), lambda result: handler(result, True))
+        else:
+            handler(None, True)
+
+
+class ClientHandler(AbstractPlugin, ClientHandlerInterface):
+    """
+    The base class for creating an LSP plugin.
+    """
+
+    # --- AbstractPlugin handlers -------------------------------------------------------------------------------------
+
+    @classmethod
+    def name(cls) -> str:
+        return cls.get_displayed_name()
+
+    @classmethod
+    def configuration(cls) -> Tuple[sublime.Settings, str]:
+        return cls.read_settings()
+
+    @classmethod
+    def additional_variables(cls) -> Dict[str, str]:
+        return cls.get_additional_variables()
+
+    @classmethod
+    def needs_update_or_installation(cls) -> bool:
+        if cls.manages_server():
+            server = cls.get_server()
+            return bool(server and server.needs_installation())
+        return False
+
+    @classmethod
+    def install_or_update(cls) -> None:
+        server = cls.get_server()
+        if server:
+            server.install_or_update()
+
+    @classmethod
+    def can_start(cls, window: sublime.Window, initiating_view: sublime.View,
+                  workspace_folders: List[WorkspaceFolder], configuration: ClientConfig) -> Optional[str]:
+        if cls.manages_server():
+            server = cls.get_server()
+            if not server or server.get_status() == ServerStatus.ERROR:
+                return "{}: Error installing server dependencies.".format(cls.package_name)
+            if server.get_status() != ServerStatus.READY:
+                return "{}: Server installation in progress...".format(cls.package_name)
+        message = cls.is_allowed_to_start(window, initiating_view, workspace_folders, configuration)
+        if message:
+            return message
+        # Lazily update command after server has initialized if not set manually by the user.
+        if not configuration.command:
+            configuration.command = cls.get_command()
+        return None
+
+    @classmethod
+    def on_pre_start(cls, window: sublime.Window, initiating_view: sublime.View,
+                     workspace_folders: List[WorkspaceFolder], configuration: ClientConfig) -> Optional[str]:
+        extra_paths = cls.get_additional_paths()
+        if extra_paths:
+            original_path_raw = configuration.env.get('PATH') or ''
+            if isinstance(original_path_raw, str):
+                original_paths = original_path_raw.split(path.pathsep)
+            else:
+                original_paths = original_path_raw
+            # To fix https://github.com/TerminalFi/LSP-copilot/issues/163 ,
+            # We don't want to add the same path multiple times whenever a new server session is created.
+            # Note that additional paths should be prepended to the original paths.
+            wanted_paths = [path for path in extra_paths if path not in original_paths]
+            wanted_paths.extend(original_paths)
+            configuration.env['PATH'] = path.pathsep.join(wanted_paths)
+        return None
+
+    # --- ClientHandlerInterface --------------------------------------------------------------------------------------
+
+    @classmethod
+    def setup(cls) -> None:
+        register_plugin(cls)
+
+    @classmethod
+    def cleanup(cls) -> None:
+        unregister_plugin(cls)
+
+    # --- Internals ---------------------------------------------------------------------------------------------------
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        super().__init__(*args, **kwargs)
+        api = ApiWrapper(ref(self))  # type: ignore
+        register_decorated_handlers(self, api)
+        self.on_ready(api)
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/api_decorator.py b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/api_decorator.py
new file mode 100644
index 00000000..7af3b31e
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/api_decorator.py
@@ -0,0 +1,86 @@
+from ..api_wrapper_interface import ApiWrapperInterface
+from .interface import ClientHandlerInterface
+from LSP.plugin.core.typing import Any, Callable, List, Optional, TypeVar, Union
+import inspect
+
+__all__ = [
+    "notification_handler",
+    "request_handler",
+    "register_decorated_handlers",
+]
+
+T = TypeVar('T')
+# the first argument is always "self"
+NotificationHandler = Callable[[Any, Any], None]
+RequestHandler = Callable[[Any, Any, Callable[[Any], None]], None]
+MessageMethods = Union[str, List[str]]
+
+_HANDLER_MARKS = {
+    "notification": "__handle_notification_message_methods",
+    "request": "__handle_request_message_methods",
+}
+
+
+def notification_handler(notification_methods: MessageMethods) -> Callable[[NotificationHandler], NotificationHandler]:
+    """
+    Marks the decorated function as a "notification" message handler.
+
+    On server sending the notification, the decorated function will be called with the `params` argument which contains
+    the payload.
+    """
+
+    return _create_handler("notification", notification_methods)
+
+
+def request_handler(request_methods: MessageMethods) -> Callable[[RequestHandler], RequestHandler]:
+    """
+    Marks the decorated function as a "request" message handler.
+
+    On server sending the request, the decorated function will be called with two arguments (`params` and `respond`).
+    The first argument (`params`) is the payload of the request and the second argument (`respond`) is the function that
+    must be used to respond to the request. The `respond` function takes any data that should be sent back to the
+    server.
+    """
+
+    return _create_handler("request", request_methods)
+
+
+def _create_handler(client_event: str, message_methods: MessageMethods) -> Callable[[T], T]:
+    """ Marks the decorated function as a message handler. """
+
+    message_methods = [message_methods] if isinstance(message_methods, str) else message_methods
+
+    def decorator(func: T) -> T:
+        setattr(func, _HANDLER_MARKS[client_event], message_methods)
+        return func
+
+    return decorator
+
+
+def register_decorated_handlers(client_handler: ClientHandlerInterface, api: ApiWrapperInterface) -> None:
+    """
+    Register decorator-style custom message handlers.
+
+    This method works as following:
+
+    1. Scan through all methods of `client_handler`.
+    2. If a method is decorated, it will have a "handler mark" attribute which is set by the decorator.
+    3. Register the method with wanted message methods, which are stored in the "handler mark" attribute.
+
+    :param client_handler: The instance of the client handler.
+    :param api: The API instance for interacting with the server.
+    """
+    for _, func in inspect.getmembers(client_handler, predicate=inspect.isroutine):
+        for client_event, handler_mark in _HANDLER_MARKS.items():
+            message_methods = getattr(func, handler_mark, None)  # type: Optional[List[str]]
+            if message_methods is None:
+                continue
+
+            event_registrator = getattr(api, "on_" + client_event, None)
+            if callable(event_registrator):
+                for message_method in message_methods:
+                    event_registrator(message_method, func)
+
+                # it makes no sense that a handler handles both "notification" and "request"
+                # so we do early break once we've registered a handler
+                break
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/interface.py b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/interface.py
new file mode 100644
index 00000000..151fda87
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_client_handler/interface.py
@@ -0,0 +1,99 @@
+from ..api_wrapper_interface import ApiWrapperInterface
+from ..server_resource_interface import ServerResourceInterface
+from abc import ABCMeta
+from abc import abstractmethod
+from LSP.plugin import ClientConfig
+from LSP.plugin import DottedDict
+from LSP.plugin import WorkspaceFolder
+from LSP.plugin.core.typing import Dict, List, Optional, Tuple
+import sublime
+
+__all__ = ['ClientHandlerInterface']
+
+
+class ClientHandlerInterface(metaclass=ABCMeta):
+    package_name = ''
+
+    @classmethod
+    @abstractmethod
+    def setup(cls) -> None:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def cleanup(cls) -> None:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_displayed_name(cls) -> str:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def package_storage(cls) -> str:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_additional_variables(cls) -> Dict[str, str]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_additional_paths(cls) -> List[str]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def manages_server(cls) -> bool:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_command(cls) -> List[str]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def binary_path(cls) -> str:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_server(cls) -> Optional[ServerResourceInterface]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def get_binary_arguments(cls) -> List[str]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def read_settings(cls) -> Tuple[sublime.Settings, str]:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def on_settings_read(cls, settings: sublime.Settings) -> bool:
+        ...
+
+    @classmethod
+    @abstractmethod
+    def is_allowed_to_start(
+        cls,
+        window: sublime.Window,
+        initiating_view: Optional[sublime.View] = None,
+        workspace_folders: Optional[List[WorkspaceFolder]] = None,
+        configuration: Optional[ClientConfig] = None
+    ) -> Optional[str]:
+        ...
+
+    @abstractmethod
+    def on_ready(self, api: ApiWrapperInterface) -> None:
+        ...
+
+    @abstractmethod
+    def on_settings_changed(self, settings: DottedDict) -> None:
+        ...
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_util/__init__.py b/SublimeText/lsp_utils/st3/lsp_utils/_util/__init__.py
new file mode 100644
index 00000000..7eda31c7
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_util/__init__.py
@@ -0,0 +1,5 @@
+from .weak_method import weak_method
+
+__all__ = [
+    'weak_method',
+]
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/_util/weak_method.py b/SublimeText/lsp_utils/st3/lsp_utils/_util/weak_method.py
new file mode 100644
index 00000000..dbd2899b
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/_util/weak_method.py
@@ -0,0 +1,34 @@
+from LSP.plugin.core.typing import Any, Callable
+from types import MethodType
+import weakref
+
+
+__all__ = ['weak_method']
+
+
+# An implementation of weak method borrowed from sublime_lib [1]
+#
+# We need it to be able to weak reference bound methods as `weakref.WeakMethod` is not available in
+# 3.3 runtime.
+#
+# The reason this is necessary is explained in the documentation of `weakref.WeakMethod`:
+# > A custom ref subclass which simulates a weak reference to a bound method (i.e., a method defined
+# > on a class and looked up on an instance). Since a bound method is ephemeral, a standard weak
+# > reference cannot keep hold of it.
+#
+# [1] https://github.com/SublimeText/sublime_lib/blob/master/st3/sublime_lib/_util/weak_method.py
+
+def weak_method(method: Callable[..., Any]) -> Callable[..., Any]:
+    assert isinstance(method, MethodType)
+    self_ref = weakref.ref(method.__self__)
+    function_ref = weakref.ref(method.__func__)
+
+    def wrapped(*args: Any, **kwargs: Any) -> Any:
+        self = self_ref()
+        fn = function_ref()
+        if self is None or fn is None:
+            print('[lsp_utils] Error: weak_method not called due to a deleted reference', [self, fn])
+            return
+        return fn(self, *args, **kwargs)  # type: ignore
+
+    return wrapped
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/api_wrapper_interface.py b/SublimeText/lsp_utils/st3/lsp_utils/api_wrapper_interface.py
new file mode 100644
index 00000000..e13a2687
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/api_wrapper_interface.py
@@ -0,0 +1,46 @@
+from abc import ABCMeta, abstractmethod
+from LSP.plugin.core.typing import Any, Callable
+
+__all__ = ['ApiWrapperInterface']
+
+
+NotificationHandler = Callable[[Any], None]
+RequestHandler = Callable[[Any, Callable[[Any], None]], None]
+
+
+class ApiWrapperInterface(metaclass=ABCMeta):
+    """
+    An interface for sending and receiving requests and notifications from and to the server. An implementation of it
+    is available through the :func:`GenericClientHandler.on_ready()` override.
+    """
+
+    @abstractmethod
+    def on_notification(self, method: str, handler: NotificationHandler) -> None:
+        """
+        Registers a handler for given notification name. The handler will be called with optional params.
+        """
+        ...
+
+    @abstractmethod
+    def on_request(self, method: str, handler: RequestHandler) -> None:
+        """
+        Registers a handler for given request name. The handler will be called with two arguments - first the params
+        sent with the request and second the function that must be used to respond to the request. The response
+        function takes params to respond with.
+        """
+        ...
+
+    @abstractmethod
+    def send_notification(self, method: str, params: Any) -> None:
+        """
+        Sends a notification to the server.
+        """
+        ...
+
+    @abstractmethod
+    def send_request(self, method: str, params: Any, handler: Callable[[Any, bool], None]) -> None:
+        """
+        Sends a request to the server. The handler will be called with the result received from the server and
+        a boolean value `False` if request has succeeded and `True` if it returned an error.
+        """
+        ...
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/constants.py b/SublimeText/lsp_utils/st3/lsp_utils/constants.py
new file mode 100644
index 00000000..d924eb14
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/constants.py
@@ -0,0 +1 @@
+SETTINGS_FILENAME = 'lsp_utils.sublime-settings'
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/generic_client_handler.py b/SublimeText/lsp_utils/st3/lsp_utils/generic_client_handler.py
new file mode 100644
index 00000000..85c625fe
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/generic_client_handler.py
@@ -0,0 +1,204 @@
+from ._client_handler import ClientHandler
+from .api_wrapper_interface import ApiWrapperInterface
+from .helpers import rmtree_ex
+from .server_resource_interface import ServerResourceInterface
+from abc import ABCMeta
+from LSP.plugin import ClientConfig
+from LSP.plugin import DottedDict
+from LSP.plugin import WorkspaceFolder
+from LSP.plugin.core.typing import Any, Dict, List, Optional, Tuple
+from package_control import events  # type: ignore
+import os
+import sublime
+
+__all__ = ['GenericClientHandler']
+
+
+class GenericClientHandler(ClientHandler, metaclass=ABCMeta):
+    """
+    An generic implementation of an LSP plugin handler.
+    """
+
+    package_name = ''
+    """
+    The name of the released package. Also used for the name of the LSP client and for reading package settings.
+
+    This name must be set and must match the basename of the corresponding `*.sublime-settings` file.
+    It's also used as a directory name for package storage when implementing a server resource interface.
+    Recommended to use `__package__` value fo this one. If you need to override handler name in the UI,
+    override :meth:`get_displayed_name()` also.
+
+    :required: Yes
+    """
+
+    # --- ClientHandler handlers --------------------------------------------------------------------------------------
+
+    @classmethod
+    def setup(cls) -> None:
+        if not cls.package_name:
+            raise Exception('ERROR: [lsp_utils] package_name is required to instantiate an instance of {}'.format(cls))
+        super().setup()
+
+    @classmethod
+    def cleanup(cls) -> None:
+
+        def run_async() -> None:
+            if os.path.isdir(cls.package_storage()):
+                rmtree_ex(cls.package_storage())
+
+        if events.remove(cls.package_name):
+            sublime.set_timeout_async(run_async, 1000)
+        super().cleanup()
+
+    @classmethod
+    def get_displayed_name(cls) -> str:
+        """
+        Returns the name that will be shown in the ST UI (for example in the status field).
+
+        Defaults to the value of :attr:`package_name`.
+        """
+        return cls.package_name
+
+    @classmethod
+    def storage_path(cls) -> str:
+        """
+        The storage path. Use this as your base directory to install server files. Its path is '$DATA/Package Storage'.
+        """
+        return super().storage_path()
+
+    @classmethod
+    def package_storage(cls) -> str:
+        """
+        The storage path for this package. Its path is '$DATA/Package Storage/[Package_Name]'.
+        """
+        return os.path.join(cls.storage_path(), cls.package_name)
+
+    @classmethod
+    def get_command(cls) -> List[str]:
+        """
+        Returns a list of arguments to use to start the server. The default implementation returns combined result of
+        :meth:`binary_path()` and :meth:`get_binary_arguments()`.
+        """
+        return [cls.binary_path()] + cls.get_binary_arguments()
+
+    @classmethod
+    def binary_path(cls) -> str:
+        """
+        The filesystem path to the server executable.
+
+        The default implementation returns `binary_path` property of the server instance (returned from
+        :meth:`get_server()`), if available.
+        """
+        if cls.manages_server():
+            server = cls.get_server()
+            if server:
+                return server.binary_path
+        return ''
+
+    @classmethod
+    def get_binary_arguments(cls) -> List[str]:
+        """
+        Returns a list of extra arguments to append to the `command` when starting the server.
+
+        See :meth:`get_command()`.
+        """
+        return []
+
+    @classmethod
+    def read_settings(cls) -> Tuple[sublime.Settings, str]:
+        filename = "{}.sublime-settings".format(cls.package_name)
+        loaded_settings = sublime.load_settings(filename)
+        changed = cls.on_settings_read(loaded_settings)
+        if changed:
+            sublime.save_settings(filename)
+        filepath = "Packages/{}/{}".format(cls.package_name, filename)
+        return (loaded_settings, filepath)
+
+    @classmethod
+    def get_additional_variables(cls) -> Dict[str, str]:
+        """
+        Override to add more variables here to be expanded when reading settings.
+
+        Default implementation adds a `${server_path}` variable that holds filesystem path to the server
+        binary (only when :meth:`manages_server` is `True`).
+
+        Remember to call the super class and merge the results if overriding.
+        """
+        return {
+            'pathsep': os.pathsep,
+            'server_path': cls.binary_path(),
+        }
+
+    @classmethod
+    def get_additional_paths(cls) -> List[str]:
+        """
+        Override to prepend additional paths to the default PATH environment variable.
+
+        Remember to call the super class and merge the results if overriding.
+        """
+        return []
+
+    @classmethod
+    def manages_server(cls) -> bool:
+        """
+        Whether this handler manages a server. If the response is `True` then the :meth:`get_server()` should also be
+        implemented.
+        """
+        return False
+
+    @classmethod
+    def get_server(cls) -> Optional[ServerResourceInterface]:
+        """
+        :returns: The instance of the server managed by this plugin. Only used when :meth:`manages_server()`
+                  returns `True`.
+        """
+        return None
+
+    @classmethod
+    def on_settings_read(cls, settings: sublime.Settings) -> bool:
+        """
+        Called when package settings were read. Receives a `sublime.Settings` object.
+
+        It's recommended to use :meth:`on_settings_changed()` instead if you don't need to persistent your changes to
+        the disk.
+
+        :returns: `True` to save modifications back into the settings file.
+        """
+        return False
+
+    @classmethod
+    def is_allowed_to_start(
+        cls,
+        window: sublime.Window,
+        initiating_view: Optional[sublime.View] = None,
+        workspace_folders: Optional[List[WorkspaceFolder]] = None,
+        configuration: Optional[ClientConfig] = None
+    ) -> Optional[str]:
+        """
+        Determines if the session is allowed to start.
+
+        :returns: A string describing the reason why we should not start a language server session, or `None` if we
+                  should go ahead and start a session.
+        """
+        return None
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:
+        # Seems unnecessary to override but it's to hide the original argument from the documentation.
+        super().__init__(*args, **kwargs)
+
+    def on_ready(self, api: ApiWrapperInterface) -> None:
+        """
+        Called when the instance is ready.
+
+        :param api: The API instance for interacting with the server.
+        """
+        pass
+
+    def on_settings_changed(self, settings: DottedDict) -> None:
+        """
+        Override this method to alter the settings that are returned to the server for the
+        workspace/didChangeConfiguration notification and the workspace/configuration requests.
+
+        :param settings: The settings that the server should receive.
+        """
+        pass
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/helpers.py b/SublimeText/lsp_utils/st3/lsp_utils/helpers.py
new file mode 100644
index 00000000..c87f1df3
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/helpers.py
@@ -0,0 +1,95 @@
+from LSP.plugin.core.typing import Any, Callable, Dict, List, Optional, Tuple
+import os
+import shutil
+import sublime
+import subprocess
+import threading
+
+StringCallback = Callable[[str], None]
+SemanticVersion = Tuple[int, int, int]
+
+is_windows = sublime.platform() == 'windows'
+
+
+def run_command_sync(
+    args: List[str],
+    cwd: Optional[str] = None,
+    extra_env: Optional[Dict[str, str]] = None,
+    extra_paths: List[str] = [],
+    shell: bool = is_windows,
+) -> Tuple[str, Optional[str]]:
+    """
+    Runs the given command synchronously.
+
+    :returns: A two-element tuple with the returned value and an optional error. If running the command has failed, the
+              first tuple element will be empty string and the second will contain the potential `stderr` output. If the
+              command has succeeded then the second tuple element will be `None`.
+    """
+    try:
+        env = None
+        if extra_env or extra_paths:
+            env = os.environ.copy()
+            if extra_env:
+                env.update(extra_env)
+            if extra_paths:
+                env['PATH'] = os.path.pathsep.join(extra_paths) + os.path.pathsep + env['PATH']
+        startupinfo = None
+        if is_windows:
+            startupinfo = subprocess.STARTUPINFO()  # type: ignore
+            startupinfo.dwFlags |= subprocess.SW_HIDE | subprocess.STARTF_USESHOWWINDOW  # type: ignore
+        output = subprocess.check_output(
+            args, cwd=cwd, shell=shell, stderr=subprocess.STDOUT, env=env, startupinfo=startupinfo)
+        return (decode_bytes(output).strip(), None)
+    except subprocess.CalledProcessError as error:
+        return ('', decode_bytes(error.output).strip())
+
+
+def run_command_async(args: List[str], on_success: StringCallback, on_error: StringCallback, **kwargs: Any) -> None:
+    """
+    Runs the given command asynchronously.
+
+    On success calls the provided `on_success` callback with the value the the command has returned.
+    On error calls the provided `on_error` callback with the potential `stderr` output.
+    """
+
+    def execute(on_success: StringCallback, on_error: StringCallback, args: List[str]) -> None:
+        result, error = run_command_sync(args, **kwargs)
+        on_error(error) if error is not None else on_success(result)
+
+    thread = threading.Thread(target=execute, args=(on_success, on_error, args))
+    thread.start()
+
+
+def decode_bytes(data: bytes) -> str:
+    """
+    Decodes provided bytes using `utf-8` decoding, ignoring potential decoding errors.
+    """
+    return data.decode('utf-8', 'ignore')
+
+
+def rmtree_ex(path: str, ignore_errors: bool = False) -> None:
+    # On Windows, "shutil.rmtree" will raise file not found errors when deleting a long path (>255 chars).
+    # See https://stackoverflow.com/a/14076169/4643765
+    # See https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation
+    path = R'\\?\{}'.format(path) if sublime.platform() == 'windows' else path
+    shutil.rmtree(path, ignore_errors)
+
+
+def version_to_string(version: SemanticVersion) -> str:
+    """
+    Returns a string representation of a version tuple.
+    """
+    return '.'.join([str(c) for c in version])
+
+
+def log_and_show_message(message: str, additional_logs: Optional[str] = None, show_in_status: bool = True) -> None:
+    """
+    Logs the message in the console and optionally sets it as a status message on the window.
+
+    :param message: The message to log or show in the status.
+    :param additional_logs: The extra value to log on a separate line.
+    :param show_in_status: Whether to briefly show the message in the status bar of the current window.
+    """
+    print(message, '\n', additional_logs) if additional_logs else print(message)
+    if show_in_status:
+        sublime.active_window().status_message(message)
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/node_runtime.py b/SublimeText/lsp_utils/st3/lsp_utils/node_runtime.py
new file mode 100644
index 00000000..824c9161
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/node_runtime.py
@@ -0,0 +1,511 @@
+from .constants import SETTINGS_FILENAME
+from .helpers import rmtree_ex
+from .helpers import run_command_sync
+from .helpers import SemanticVersion
+from .helpers import version_to_string
+from .third_party.semantic_version import NpmSpec, Version
+from contextlib import contextmanager
+from LSP.plugin.core.logging import debug
+from LSP.plugin.core.typing import cast, Any, Dict, Generator, List, Optional, Tuple, Union
+from os import path
+from os import remove
+from sublime_lib import ActivityIndicator
+import os
+import shutil
+import sublime
+import subprocess
+import sys
+import tarfile
+import urllib.request
+import zipfile
+
+__all__ = ['NodeRuntime']
+
+IS_WINDOWS_7_OR_LOWER = sys.platform == 'win32' and sys.getwindowsversion()[:2] <= (6, 1)  # type: ignore
+
+NODE_RUNTIME_VERSION = '18.18.1'
+NODE_DIST_URL = 'https://nodejs.org/dist/v{version}/{filename}'
+
+ELECTRON_RUNTIME_VERSION = '27.0.0'  # includes Node.js v18.17.1
+ELECTRON_NODE_VERSION = '18.17.1'
+ELECTRON_DIST_URL = 'https://github.com/electron/electron/releases/download/v{version}/{filename}'
+YARN_URL = 'https://github.com/yarnpkg/yarn/releases/download/v1.22.21/yarn-1.22.21.js'
+
+NO_NODE_FOUND_MESSAGE = 'Could not start {package_name} due to not being able to resolve suitable Node.js \
+runtime on the PATH. Press the "Download Node.js" button to get required Node.js version \
+(note that it will be used only by LSP and will not affect your system otherwise).'
+
+
+class NodeRuntime:
+    _node_runtime_resolved = False
+    _node_runtime = None  # Optional[NodeRuntime]
+    """
+    Cached instance of resolved Node.js runtime. This is only done once per-session to avoid unnecessary IO.
+    """
+
+    @classmethod
+    def get(
+        cls, package_name: str, storage_path: str, required_node_version: Union[str, SemanticVersion]
+    ) -> Optional['NodeRuntime']:
+        if isinstance(required_node_version, tuple):
+            required_semantic_version = NpmSpec('>={}'.format(version_to_string(required_node_version)))
+        else:
+            required_semantic_version = NpmSpec(required_node_version)
+        if cls._node_runtime_resolved:
+            if cls._node_runtime:
+                cls._node_runtime.check_satisfies_version(required_semantic_version)
+            return cls._node_runtime
+        cls._node_runtime_resolved = True
+        cls._node_runtime = cls._resolve_node_runtime(package_name, storage_path, required_semantic_version)
+        debug('Resolved Node.js Runtime for package {}: {}'.format(package_name, cls._node_runtime))
+        return cls._node_runtime
+
+    @classmethod
+    def _resolve_node_runtime(
+        cls, package_name: str, storage_path: str, required_node_version: NpmSpec
+    ) -> 'NodeRuntime':
+        resolved_runtime = None  # type: Optional[NodeRuntime]
+        default_runtimes = ['system', 'local']
+        settings = sublime.load_settings(SETTINGS_FILENAME)
+        selected_runtimes = cast(List[str], settings.get('nodejs_runtime') or default_runtimes)
+        log_lines = ['--- lsp_utils Node.js resolving start ---']
+        for runtime_type in selected_runtimes:
+            if runtime_type == 'system':
+                log_lines.append('Resolving Node.js Runtime in env PATH for package {}...'.format(package_name))
+                path_runtime = NodeRuntimePATH()
+                try:
+                    path_runtime.check_binary_present()
+                except Exception as ex:
+                    log_lines.append(' * Failed: {}'.format(ex))
+                    continue
+                try:
+                    path_runtime.check_satisfies_version(required_node_version)
+                    resolved_runtime = path_runtime
+                    break
+                except Exception as ex:
+                    log_lines.append(' * {}'.format(ex))
+            elif runtime_type == 'local':
+                log_lines.append('Resolving Node.js Runtime from lsp_utils for package {}...'.format(package_name))
+                use_electron = cast(bool, settings.get('local_use_electron') or False)
+                runtime_dir = path.join(storage_path, 'lsp_utils', 'node-runtime')
+                local_runtime = ElectronRuntimeLocal(runtime_dir) if use_electron else NodeRuntimeLocal(runtime_dir)
+                try:
+                    local_runtime.check_binary_present()
+                except Exception as ex:
+                    log_lines.append(' * Binaries check failed: {}'.format(ex))
+                    if selected_runtimes[0] != 'local':
+                        if not sublime.ok_cancel_dialog(
+                                NO_NODE_FOUND_MESSAGE.format(package_name=package_name), 'Download Node.js'):
+                            log_lines.append(' * Download skipped')
+                            continue
+                    # Remove outdated runtimes.
+                    if path.isdir(runtime_dir):
+                        for directory in next(os.walk(runtime_dir))[1]:
+                            old_dir = path.join(runtime_dir, directory)
+                            print('[lsp_utils] Deleting outdated Node.js runtime directory "{}"'.format(old_dir))
+                            try:
+                                rmtree_ex(old_dir)
+                            except Exception as ex:
+                                log_lines.append(' * Failed deleting: {}'.format(ex))
+                    try:
+                        local_runtime.install_node()
+                    except Exception as ex:
+                        log_lines.append(' * Failed downloading: {}'.format(ex))
+                        continue
+                    try:
+                        local_runtime.check_binary_present()
+                    except Exception as ex:
+                        log_lines.append(' * Failed: {}'.format(ex))
+                        continue
+                try:
+                    local_runtime.check_satisfies_version(required_node_version)
+                    resolved_runtime = local_runtime
+                    break
+                except Exception as ex:
+                    log_lines.append(' * {}'.format(ex))
+        if not resolved_runtime:
+            log_lines.append('--- lsp_utils Node.js resolving end ---')
+            print('\n'.join(log_lines))
+            raise Exception('Failed resolving Node.js Runtime. Please check in the console for more details.')
+        return resolved_runtime
+
+    def __init__(self) -> None:
+        self._node = None  # type: Optional[str]
+        self._npm = None  # type: Optional[str]
+        self._version = None  # type: Optional[Version]
+        self._additional_paths = []  # type: List[str]
+
+    def __repr__(self) -> str:
+        return '{}(node: {}, npm: {}, version: {})'.format(
+            self.__class__.__name__, self._node, self._npm, self._version if self._version else None)
+
+    def install_node(self) -> None:
+        raise Exception('Not supported!')
+
+    def node_bin(self) -> Optional[str]:
+        return self._node
+
+    def npm_bin(self) -> Optional[str]:
+        return self._npm
+
+    def node_env(self) -> Dict[str, str]:
+        if IS_WINDOWS_7_OR_LOWER:
+            return {'NODE_SKIP_PLATFORM_CHECK': '1'}
+        return {}
+
+    def check_binary_present(self) -> None:
+        if self._node is None:
+            raise Exception('"node" binary not found')
+        if self._npm is None:
+            raise Exception('"npm" binary not found')
+
+    def check_satisfies_version(self, required_node_version: NpmSpec) -> None:
+        node_version = self.resolve_version()
+        if node_version not in required_node_version:
+            raise Exception(
+                'Node.js version requirement failed. Expected {}, got {}.'.format(required_node_version, node_version))
+
+    def resolve_version(self) -> Version:
+        if self._version:
+            return self._version
+        if not self._node:
+            raise Exception('Node.js not initialized')
+        # In this case we have fully resolved binary path already so shouldn't need `shell` on Windows.
+        version, error = run_command_sync([self._node, '--version'], extra_env=self.node_env(), shell=False)
+        if error is None:
+            self._version = Version(version.replace('v', ''))
+        else:
+            raise Exception('Failed resolving Node.js version. Error:\n{}'.format(error))
+        return self._version
+
+    def run_node(
+        self,
+        args: List[str],
+        stdin: int = subprocess.PIPE,
+        stdout: int = subprocess.PIPE,
+        stderr: int = subprocess.PIPE,
+        env: Dict[str, Any] = {}
+    ) -> Optional['subprocess.Popen[bytes]']:
+        node_bin = self.node_bin()
+        if node_bin is None:
+            return None
+        os_env = os.environ.copy()
+        os_env.update(self.node_env())
+        os_env.update(env)
+        startupinfo = None
+        if sys.platform == 'win32':
+            startupinfo = subprocess.STARTUPINFO()
+            startupinfo.dwFlags |= subprocess.SW_HIDE | subprocess.STARTF_USESHOWWINDOW
+        return subprocess.Popen(
+            [node_bin] + args, stdin=stdin, stdout=stdout, stderr=stderr, env=os_env, startupinfo=startupinfo)
+
+    def run_install(self, cwd: str) -> None:
+        if not path.isdir(cwd):
+            raise Exception('Specified working directory "{}" does not exist'.format(cwd))
+        if not self._node:
+            raise Exception('Node.js not installed. Use NodeInstaller to install it first.')
+        args = [
+            'ci',
+            '--omit=dev',
+            '--scripts-prepend-node-path=true',
+            '--verbose',
+        ]
+        stdout, error = run_command_sync(
+            self.npm_command() + args, cwd=cwd, extra_env=self.node_env(), extra_paths=self._additional_paths,
+            shell=False
+        )
+        print('[lsp_utils] START output of command: "{}"'.format(' '.join(args)))
+        print(stdout)
+        print('[lsp_utils] Command output END')
+        if error is not None:
+            raise Exception('Failed to run npm command "{}":\n{}'.format(' '.join(args), error))
+
+    def npm_command(self) -> List[str]:
+        if self._npm is None:
+            raise Exception('Npm command not initialized')
+        return [self._npm]
+
+
+class NodeRuntimePATH(NodeRuntime):
+    def __init__(self) -> None:
+        super().__init__()
+        self._node = shutil.which('node')
+        self._npm = shutil.which('npm')
+
+
+class NodeRuntimeLocal(NodeRuntime):
+    def __init__(self, base_dir: str, node_version: str = NODE_RUNTIME_VERSION):
+        super().__init__()
+        self._base_dir = path.abspath(path.join(base_dir, node_version))
+        self._node_version = node_version
+        self._node_dir = path.join(self._base_dir, 'node')
+        self._install_in_progress_marker_file = path.join(self._base_dir, '.installing')
+        self._resolve_paths()
+
+    # --- NodeRuntime overrides ----------------------------------------------------------------------------------------
+
+    def npm_command(self) -> List[str]:
+        if not self._node or not self._npm:
+            raise Exception('Node.js or Npm command not initialized')
+        return [self._node, self._npm]
+
+    def install_node(self) -> None:
+        os.makedirs(os.path.dirname(self._install_in_progress_marker_file), exist_ok=True)
+        open(self._install_in_progress_marker_file, 'a').close()
+        with ActivityIndicator(sublime.active_window(), 'Downloading Node.js'):
+            install_node = NodeInstaller(self._base_dir, self._node_version)
+            install_node.run()
+            self._resolve_paths()
+        remove(self._install_in_progress_marker_file)
+        self._resolve_paths()
+
+    # --- private methods ----------------------------------------------------------------------------------------------
+
+    def _resolve_paths(self) -> None:
+        if path.isfile(self._install_in_progress_marker_file):
+            # Will trigger re-installation.
+            return
+        self._node = self._resolve_binary()
+        self._node_lib = self._resolve_lib()
+        self._npm = path.join(self._node_lib, 'npm', 'bin', 'npm-cli.js')
+        self._additional_paths = [path.dirname(self._node)] if self._node else []
+
+    def _resolve_binary(self) -> Optional[str]:
+        exe_path = path.join(self._node_dir, 'node.exe')
+        binary_path = path.join(self._node_dir, 'bin', 'node')
+        if path.isfile(exe_path):
+            return exe_path
+        if path.isfile(binary_path):
+            return binary_path
+        return None
+
+    def _resolve_lib(self) -> str:
+        lib_path = path.join(self._node_dir, 'lib', 'node_modules')
+        if not path.isdir(lib_path):
+            lib_path = path.join(self._node_dir, 'node_modules')
+        return lib_path
+
+
+class NodeInstaller:
+    '''Command to install a local copy of Node.js'''
+
+    def __init__(self, base_dir: str, node_version: str = NODE_RUNTIME_VERSION) -> None:
+        """
+        :param base_dir: The base directory for storing given Node.js runtime version
+        :param node_version: The Node.js version to install
+        """
+        self._base_dir = base_dir
+        self._node_version = node_version
+        self._cache_dir = path.join(self._base_dir, 'cache')
+
+    def run(self) -> None:
+        archive, url = self._node_archive()
+        print('[lsp_utils] Downloading Node.js {} from {}'.format(self._node_version, url))
+        if not self._archive_exists(archive):
+            self._download_node(url, archive)
+        self._install_node(archive)
+
+    def _node_archive(self) -> Tuple[str, str]:
+        platform = sublime.platform()
+        arch = sublime.arch()
+        if platform == 'windows' and arch == 'x64':
+            node_os = 'win'
+            archive = 'zip'
+        elif platform == 'linux':
+            node_os = 'linux'
+            archive = 'tar.gz'
+        elif platform == 'osx':
+            node_os = 'darwin'
+            archive = 'tar.gz'
+        else:
+            raise Exception('{} {} is not supported'.format(arch, platform))
+        filename = 'node-v{}-{}-{}.{}'.format(self._node_version, node_os, arch, archive)
+        dist_url = NODE_DIST_URL.format(version=self._node_version, filename=filename)
+        return filename, dist_url
+
+    def _archive_exists(self, filename: str) -> bool:
+        archive = path.join(self._cache_dir, filename)
+        return path.isfile(archive)
+
+    def _download_node(self, url: str, filename: str) -> None:
+        if not path.isdir(self._cache_dir):
+            os.makedirs(self._cache_dir)
+        archive = path.join(self._cache_dir, filename)
+        with urllib.request.urlopen(url) as response:
+            with open(archive, 'wb') as f:
+                shutil.copyfileobj(response, f)
+
+    def _install_node(self, filename: str) -> None:
+        archive = path.join(self._cache_dir, filename)
+        opener = zipfile.ZipFile if filename.endswith('.zip') else tarfile.open  # type: Any
+        try:
+            with opener(archive) as f:
+                names = f.namelist() if hasattr(f, 'namelist') else f.getnames()
+                install_dir, _ = next(x for x in names if '/' in x).split('/', 1)
+                bad_members = [x for x in names if x.startswith('/') or x.startswith('..')]
+                if bad_members:
+                    raise Exception('{} appears to be malicious, bad filenames: {}'.format(filename, bad_members))
+                f.extractall(self._base_dir)
+                with chdir(self._base_dir):
+                    os.rename(install_dir, 'node')
+        except Exception as ex:
+            raise ex
+        finally:
+            remove(archive)
+
+
+class ElectronRuntimeLocal(NodeRuntime):
+    def __init__(self, base_dir: str):
+        super().__init__()
+        self._base_dir = path.abspath(path.join(base_dir, ELECTRON_NODE_VERSION))
+        self._yarn = path.join(self._base_dir, 'yarn.js')
+        self._install_in_progress_marker_file = path.join(self._base_dir, '.installing')
+        if not path.isfile(self._install_in_progress_marker_file):
+            self._resolve_paths()
+
+    # --- NodeRuntime overrides ----------------------------------------------------------------------------------------
+
+    def node_env(self) -> Dict[str, str]:
+        extra_env = super().node_env()
+        extra_env.update({'ELECTRON_RUN_AS_NODE': 'true'})
+        return extra_env
+
+    def install_node(self) -> None:
+        os.makedirs(os.path.dirname(self._install_in_progress_marker_file), exist_ok=True)
+        open(self._install_in_progress_marker_file, 'a').close()
+        with ActivityIndicator(sublime.active_window(), 'Downloading Node.js'):
+            install_node = ElectronInstaller(self._base_dir)
+            install_node.run()
+            self._resolve_paths()
+        remove(self._install_in_progress_marker_file)
+
+    def run_install(self, cwd: str) -> None:
+        self._run_yarn(['import'], cwd)
+        args = [
+            'install',
+            '--production',
+            '--frozen-lockfile',
+            '--scripts-prepend-node-path=true',
+            '--cache-folder={}'.format(path.join(self._base_dir, 'cache', 'yarn')),
+            # '--verbose',
+        ]
+        self._run_yarn(args, cwd)
+
+    # --- private methods ----------------------------------------------------------------------------------------------
+
+    def _resolve_paths(self) -> None:
+        self._node = self._resolve_binary()
+        self._npm = path.join(self._base_dir, 'yarn.js')
+
+    def _resolve_binary(self) -> Optional[str]:
+        binary_path = None
+        platform = sublime.platform()
+        if platform == 'osx':
+            binary_path = path.join(self._base_dir, 'Electron.app', 'Contents', 'MacOS', 'Electron')
+        elif platform == 'windows':
+            binary_path = path.join(self._base_dir, 'electron.exe')
+        else:
+            binary_path = path.join(self._base_dir, 'electron')
+        return binary_path if binary_path and path.isfile(binary_path) else None
+
+    def _run_yarn(self, args: List[str], cwd: str) -> None:
+        if not path.isdir(cwd):
+            raise Exception('Specified working directory "{}" does not exist'.format(cwd))
+        if not self._node:
+            raise Exception('Node.js not installed. Use NodeInstaller to install it first.')
+        stdout, error = run_command_sync(
+            [self._node, self._yarn] + args, cwd=cwd, extra_env=self.node_env(), shell=False
+        )
+        print('[lsp_utils] START output of command: "{}"'.format(' '.join(args)))
+        print(stdout)
+        print('[lsp_utils] Command output END')
+        if error is not None:
+            raise Exception('Failed to run yarn command "{}":\n{}'.format(' '.join(args), error))
+
+
+class ElectronInstaller:
+    '''Command to install a local copy of Node.js'''
+
+    def __init__(self, base_dir: str) -> None:
+        """
+        :param base_dir: The base directory for storing given Node.js runtime version
+        """
+        self._base_dir = base_dir
+        self._cache_dir = path.join(self._base_dir, 'cache')
+
+    def run(self) -> None:
+        archive, url = self._node_archive()
+        print(
+            '[lsp_utils] Downloading Electron {} (Node.js runtime {}) from {}'.format(
+                ELECTRON_RUNTIME_VERSION, ELECTRON_NODE_VERSION, url
+            )
+        )
+        if not self._archive_exists(archive):
+            self._download(url, archive)
+        self._install(archive)
+        self._download_yarn()
+
+    def _node_archive(self) -> Tuple[str, str]:
+        platform = sublime.platform()
+        arch = sublime.arch()
+        if platform == 'windows':
+            platform_code = 'win32'
+        elif platform == 'linux':
+            platform_code = 'linux'
+        elif platform == 'osx':
+            platform_code = 'darwin'
+        else:
+            raise Exception('{} {} is not supported'.format(arch, platform))
+        filename = 'electron-v{}-{}-{}.zip'.format(ELECTRON_RUNTIME_VERSION, platform_code, arch)
+        dist_url = ELECTRON_DIST_URL.format(version=ELECTRON_RUNTIME_VERSION, filename=filename)
+        return filename, dist_url
+
+    def _archive_exists(self, filename: str) -> bool:
+        archive = path.join(self._cache_dir, filename)
+        return path.isfile(archive)
+
+    def _download(self, url: str, filename: str) -> None:
+        if not path.isdir(self._cache_dir):
+            os.makedirs(self._cache_dir)
+        archive = path.join(self._cache_dir, filename)
+        with urllib.request.urlopen(url) as response:
+            with open(archive, 'wb') as f:
+                shutil.copyfileobj(response, f)
+
+    def _install(self, filename: str) -> None:
+        archive = path.join(self._cache_dir, filename)
+        try:
+            if sublime.platform() == 'windows':
+                with zipfile.ZipFile(archive) as f:
+                    names = f.namelist()
+                    _, _ = next(x for x in names if '/' in x).split('/', 1)
+                    bad_members = [x for x in names if x.startswith('/') or x.startswith('..')]
+                    if bad_members:
+                        raise Exception('{} appears to be malicious, bad filenames: {}'.format(filename, bad_members))
+                    f.extractall(self._base_dir)
+            else:
+                # ZipFile doesn't handle symlinks and permissions correctly on Linux and Mac. Use unzip instead.
+                _, error = run_command_sync(['unzip', archive, '-d', self._base_dir], cwd=self._cache_dir)
+                if error:
+                    raise Exception('Error unzipping electron archive: {}'.format(error))
+        except Exception as ex:
+            raise ex
+        finally:
+            remove(archive)
+
+    def _download_yarn(self) -> None:
+        archive = path.join(self._base_dir, 'yarn.js')
+        with urllib.request.urlopen(YARN_URL) as response:
+            with open(archive, 'wb') as f:
+                shutil.copyfileobj(response, f)
+
+
+@contextmanager
+def chdir(new_dir: str) -> Generator[None, None, None]:
+    '''Context Manager for changing the working directory'''
+    cur_dir = os.getcwd()
+    os.chdir(new_dir)
+    try:
+        yield
+    finally:
+        os.chdir(cur_dir)
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/npm_client_handler.py b/SublimeText/lsp_utils/st3/lsp_utils/npm_client_handler.py
new file mode 100644
index 00000000..b9334bff
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/npm_client_handler.py
@@ -0,0 +1,168 @@
+from .generic_client_handler import GenericClientHandler
+from .server_npm_resource import ServerNpmResource
+from .server_resource_interface import ServerResourceInterface
+from LSP.plugin import ClientConfig
+from LSP.plugin import WorkspaceFolder
+from LSP.plugin.core.typing import Dict, List, Optional, Tuple
+from os import path
+import sublime
+
+__all__ = ['NpmClientHandler']
+
+
+class NpmClientHandler(GenericClientHandler):
+    """
+    An implementation of :class:`GenericClientHandler` for handling NPM-based LSP plugins.
+
+    Automatically manages an NPM-based server by installing and updating it in the package storage directory.
+    """
+    __server = None  # type: Optional[ServerNpmResource]
+
+    server_directory = ''
+    """
+    The path to the server source directory, relative to the root directory of this package.
+
+    :required: Yes
+    """
+
+    server_binary_path = ''
+    """
+    The path to the server "binary", relative to plugin's storage directory.
+
+    :required: Yes
+    """
+
+    skip_npm_install = False
+    """
+    Whether to skip the step that runs "npm install" in case the server doesn't need any dependencies.
+
+    :required: No
+    """
+
+    # --- NpmClientHandler handlers -----------------------------------------------------------------------------------
+
+    @classmethod
+    def minimum_node_version(cls) -> Tuple[int, int, int]:
+        """
+        .. deprecated:: 2.1.0
+           Use :meth:`required_node_version` instead.
+
+        The minimum Node version required for this plugin.
+
+        :returns: The semantic version tuple with the minimum required version. Defaults to :code:`(8, 0, 0)`.
+        """
+        return (8, 0, 0)
+
+    @classmethod
+    def required_node_version(cls) -> str:
+        """
+        The NPM semantic version (typically a range) specifying which version of Node is required for this plugin.
+
+        Examples:
+         - `16.1.1` - only allows a single version
+         - `16.x` - allows any build for major version 16
+         - `>=16` - allows version 16 and above
+         - `16 - 18` allows any version between version 16 and 18 (inclusive). It's important to have spaces around
+           the dash in this case.
+
+        Also see more examples and a testing playground at https://semver.npmjs.com/ .
+
+        :returns: Required NPM semantic version. Defaults to :code:`0.0.0` which means "no restrictions".
+        """
+        return '0.0.0'
+
+    @classmethod
+    def get_additional_variables(cls) -> Dict[str, str]:
+        """
+        Overrides :meth:`GenericClientHandler.get_additional_variables`, providing additional variable for use in the
+        settings.
+
+        The additional variables are:
+
+        - `${node_bin}`: - holds the binary path of currently used Node.js runtime. This can resolve to just `node`
+          when using Node.js runtime from the PATH or to a full filesystem path if using the local Node.js runtime.
+        - `${server_directory_path}` - holds filesystem path to the server directory (only
+          when :meth:`GenericClientHandler.manages_server()` is `True`).
+
+        Remember to call the super class and merge the results if overriding.
+        """
+        variables = super().get_additional_variables()
+        variables.update({
+            'node_bin': cls._node_bin(),
+            'server_directory_path': cls._server_directory_path(),
+        })
+        return variables
+
+    @classmethod
+    def get_additional_paths(cls) -> List[str]:
+        node_bin = cls._node_bin()
+        if node_bin:
+            node_path = path.dirname(node_bin)
+            if node_path:
+                return [node_path]
+        return []
+
+    # --- GenericClientHandler handlers -------------------------------------------------------------------------------
+
+    @classmethod
+    def get_command(cls) -> List[str]:
+        return [cls._node_bin(), cls.binary_path()] + cls.get_binary_arguments()
+
+    @classmethod
+    def get_binary_arguments(cls) -> List[str]:
+        return ['--stdio']
+
+    @classmethod
+    def manages_server(cls) -> bool:
+        return True
+
+    @classmethod
+    def get_server(cls) -> Optional[ServerResourceInterface]:
+        if not cls.__server:
+            cls.__server = ServerNpmResource.create({
+                'package_name': cls.package_name,
+                'server_directory': cls.server_directory,
+                'server_binary_path': cls.server_binary_path,
+                'package_storage': cls.package_storage(),
+                'minimum_node_version': cls.minimum_node_version(),
+                'required_node_version': cls.required_node_version(),
+                'storage_path': cls.storage_path(),
+                'skip_npm_install': cls.skip_npm_install,
+            })
+        return cls.__server
+
+    @classmethod
+    def cleanup(cls) -> None:
+        cls.__server = None
+        super().cleanup()
+
+    @classmethod
+    def can_start(cls, window: sublime.Window, initiating_view: sublime.View,
+                  workspace_folders: List[WorkspaceFolder], configuration: ClientConfig) -> Optional[str]:
+        reason = super().can_start(window, initiating_view, workspace_folders, configuration)
+        if reason:
+            return reason
+        node_env = cls._node_env()
+        if node_env:
+            configuration.env.update(node_env)
+        return None
+
+    # --- Internal ----------------------------------------------------------------------------------------------------
+
+    @classmethod
+    def _server_directory_path(cls) -> str:
+        if cls.__server:
+            return cls.__server.server_directory_path
+        return ''
+
+    @classmethod
+    def _node_bin(cls) -> str:
+        if cls.__server:
+            return cls.__server.node_bin
+        return ''
+
+    @classmethod
+    def _node_env(cls) -> Optional[Dict[str, str]]:
+        if cls.__server:
+            return cls.__server.node_env
+        return None
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/pip_client_handler.py b/SublimeText/lsp_utils/st3/lsp_utils/pip_client_handler.py
new file mode 100644
index 00000000..391037eb
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/pip_client_handler.py
@@ -0,0 +1,77 @@
+from .generic_client_handler import GenericClientHandler
+from .server_pip_resource import ServerPipResource
+from .server_resource_interface import ServerResourceInterface
+from LSP.plugin.core.typing import List, Optional
+from os import path
+import shutil
+import sublime
+
+__all__ = ['PipClientHandler']
+
+
+class PipClientHandler(GenericClientHandler):
+    """
+    An implementation of :class:`GenericClientHandler` for handling pip-based LSP plugins.
+
+    Automatically manages a pip-based server by installing and updating dependencies based on provided
+    `requirements.txt` file.
+    """
+    __server = None  # type: Optional[ServerPipResource]
+
+    requirements_txt_path = ''
+    """
+    The path to the `requirements.txt` file containing a list of dependencies required by the server.
+
+    If the package `LSP-foo` has a `requirements.txt` file at the root then the path will be just `requirements.txt`.
+
+    The file format is `dependency_name==dependency_version` or just a direct path to the dependency (for example to
+    a github repo). For example:
+
+    .. code::
+
+        pyls==0.1.2
+        colorama==1.2.2
+        git+https://github.com/tomv564/pyls-mypy.git
+
+    :required: Yes
+    """
+
+    server_filename = ''
+    """
+    The file name of the binary used to start the server.
+
+    :required: Yes
+    """
+
+    @classmethod
+    def get_python_binary(cls) -> str:
+        """
+        Returns a binary name or a full path to the Python interpreter used to create environment for the server.
+
+        The default implementation returns `python` on Windows and `python3` on other platforms. When only the binary
+        name is specified then it will be expected that it can be found on the PATH.
+        """
+        return 'python' if sublime.platform() == 'windows' else 'python3'
+
+    # --- GenericClientHandler handlers -------------------------------------------------------------------------------
+
+    @classmethod
+    def manages_server(cls) -> bool:
+        return True
+
+    @classmethod
+    def get_server(cls) -> Optional[ServerResourceInterface]:
+        if not cls.__server:
+            python_binary = cls.get_python_binary()
+            if not shutil.which(python_binary):
+                raise Exception('Python binary "{}" not found!'.format(python_binary))
+            cls.__server = ServerPipResource(
+                cls.storage_path(), cls.package_name, cls.requirements_txt_path, cls.server_filename, python_binary)
+        return cls.__server
+
+    @classmethod
+    def get_additional_paths(cls) -> List[str]:
+        server = cls.get_server()
+        if server:
+            return [path.dirname(server.binary_path)]
+        return []
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/server_npm_resource.py b/SublimeText/lsp_utils/st3/lsp_utils/server_npm_resource.py
new file mode 100644
index 00000000..ea1ea0c4
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/server_npm_resource.py
@@ -0,0 +1,140 @@
+from .helpers import rmtree_ex
+from .helpers import SemanticVersion
+from .node_runtime import NodeRuntime
+from .server_resource_interface import ServerResourceInterface
+from .server_resource_interface import ServerStatus
+from hashlib import md5
+from LSP.plugin.core.typing import Dict, Optional, TypedDict, Union
+from os import makedirs
+from os import path
+from os import remove
+from os import walk
+from sublime_lib import ResourcePath
+
+__all__ = ['ServerNpmResource']
+
+ServerNpmResourceCreateOptions = TypedDict('ServerNpmResourceCreateOptions', {
+    'package_name': str,
+    'server_directory': str,
+    'server_binary_path': str,
+    'package_storage': str,
+    'storage_path': str,
+    'minimum_node_version': SemanticVersion,
+    'required_node_version': str,
+    'skip_npm_install': bool,
+})
+
+
+class ServerNpmResource(ServerResourceInterface):
+    """
+    An implementation of :class:`lsp_utils.ServerResourceInterface` implementing server management for
+    node-based severs. Handles installation and updates of the server in package storage.
+    """
+
+    @classmethod
+    def create(cls, options: ServerNpmResourceCreateOptions) -> 'ServerNpmResource':
+        package_name = options['package_name']
+        server_directory = options['server_directory']
+        server_binary_path = options['server_binary_path']
+        package_storage = options['package_storage']
+        storage_path = options['storage_path']
+        minimum_node_version = options['minimum_node_version']
+        required_node_version = options['required_node_version']  # type: Union[str, SemanticVersion]
+        skip_npm_install = options['skip_npm_install']
+        # Fallback to "minimum_node_version" if "required_node_version" is 0.0.0 (not overridden).
+        if '0.0.0' == required_node_version:
+            required_node_version = minimum_node_version
+        node_runtime = NodeRuntime.get(package_name, storage_path, required_node_version)
+        if not node_runtime:
+            raise Exception('Failed resolving Node.js Runtime. Please see Sublime Text console for more information.')
+        return ServerNpmResource(
+            package_name, server_directory, server_binary_path, package_storage, node_runtime, skip_npm_install)
+
+    def __init__(self, package_name: str, server_directory: str, server_binary_path: str,
+                 package_storage: str, node_runtime: NodeRuntime, skip_npm_install: bool) -> None:
+        if not package_name or not server_directory or not server_binary_path or not node_runtime:
+            raise Exception('ServerNpmResource could not initialize due to wrong input')
+        self._status = ServerStatus.UNINITIALIZED
+        self._package_name = package_name
+        self._package_storage = package_storage
+        self._server_src = 'Packages/{}/{}/'.format(self._package_name, server_directory)
+        node_version = str(node_runtime.resolve_version())
+        self._node_version = node_version
+        self._server_dest = path.join(package_storage, node_version, server_directory)
+        self._binary_path = path.join(package_storage, node_version, server_binary_path)
+        self._installation_marker_file = path.join(package_storage, node_version, '.installing')
+        self._node_runtime = node_runtime
+        self._skip_npm_install = skip_npm_install
+
+    @property
+    def server_directory_path(self) -> str:
+        return self._server_dest
+
+    @property
+    def node_bin(self) -> str:
+        node_bin = self._node_runtime.node_bin()
+        if node_bin is None:
+            raise Exception('Failed to resolve path to the Node.js runtime')
+        return node_bin
+
+    @property
+    def node_env(self) -> Optional[Dict[str, str]]:
+        return self._node_runtime.node_env()
+
+    # --- ServerResourceInterface -------------------------------------------------------------------------------------
+
+    @property
+    def binary_path(self) -> str:
+        return self._binary_path
+
+    def get_status(self) -> int:
+        return self._status
+
+    def needs_installation(self) -> bool:
+        installed = False
+        if self._skip_npm_install or path.isdir(path.join(self._server_dest, 'node_modules')):
+            # Server already installed. Check if version has changed or last installation did not complete.
+            src_package_json = ResourcePath(self._server_src, 'package.json')
+            if not src_package_json.exists():
+                raise Exception('Missing required "package.json" in {}'.format(self._server_src))
+            src_hash = md5(src_package_json.read_bytes()).hexdigest()
+            try:
+                with open(path.join(self._server_dest, 'package.json'), 'rb') as file:
+                    dst_hash = md5(file.read()).hexdigest()
+                if src_hash == dst_hash and not path.isfile(self._installation_marker_file):
+                    installed = True
+            except FileNotFoundError:
+                # Needs to be re-installed.
+                pass
+        if installed:
+            self._status = ServerStatus.READY
+            return False
+        return True
+
+    def install_or_update(self) -> None:
+        try:
+            self._cleanup_package_storage()
+            makedirs(path.dirname(self._installation_marker_file), exist_ok=True)
+            open(self._installation_marker_file, 'a').close()
+            if path.isdir(self._server_dest):
+                rmtree_ex(self._server_dest)
+            ResourcePath(self._server_src).copytree(self._server_dest, exist_ok=True)
+            if not self._skip_npm_install:
+                self._node_runtime.run_install(cwd=self._server_dest)
+            remove(self._installation_marker_file)
+        except Exception as error:
+            self._status = ServerStatus.ERROR
+            raise Exception('Error installing the server:\n{}'.format(error))
+        self._status = ServerStatus.READY
+
+    def _cleanup_package_storage(self) -> None:
+        if not path.isdir(self._package_storage):
+            return
+        """Clean up subdirectories of package storage that belong to other node versions."""
+        subdirectories = next(walk(self._package_storage))[1]
+        for directory in subdirectories:
+            if directory == self._node_version:
+                continue
+            node_storage_path = path.join(self._package_storage, directory)
+            print('[lsp_utils] Deleting outdated storage directory "{}"'.format(node_storage_path))
+            rmtree_ex(node_storage_path)
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/server_pip_resource.py b/SublimeText/lsp_utils/st3/lsp_utils/server_pip_resource.py
new file mode 100644
index 00000000..de150388
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/server_pip_resource.py
@@ -0,0 +1,109 @@
+from .helpers import rmtree_ex
+from .helpers import run_command_sync
+from .server_resource_interface import ServerResourceInterface
+from .server_resource_interface import ServerStatus
+from hashlib import md5
+from LSP.plugin.core.typing import Any, Optional
+from os import path
+from sublime_lib import ResourcePath
+import os
+import sublime
+
+__all__ = ['ServerPipResource']
+
+
+class ServerPipResource(ServerResourceInterface):
+    """
+    An implementation of :class:`lsp_utils.ServerResourceInterface` implementing server management for
+    pip-based servers. Handles installation and updates of the server in the package storage.
+
+    :param storage_path: The path to the package storage (pass :meth:`lsp_utils.GenericClientHandler.storage_path()`)
+    :param package_name: The package name (used as a directory name for storage)
+    :param requirements_path: The path to the `requirements.txt` file, relative to the package directory.
+           If the package `LSP-foo` has a `requirements.txt` file at the root then the path will be `requirements.txt`.
+    :param server_binary_filename: The name of the file used to start the server.
+    """
+
+    @classmethod
+    def file_extension(cls) -> str:
+        return '.exe' if sublime.platform() == 'windows' else ''
+
+    @classmethod
+    def run(cls, *args: Any, cwd: Optional[str] = None) -> str:
+        output, error = run_command_sync(list(args), cwd=cwd)
+        if error:
+            raise Exception(error)
+        return output
+
+    def __init__(self, storage_path: str, package_name: str, requirements_path: str,
+                 server_binary_filename: str, python_binary: str) -> None:
+        self._storage_path = storage_path
+        self._package_name = package_name
+        self._requirements_path_relative = requirements_path
+        self._requirements_path = 'Packages/{}/{}'.format(self._package_name, requirements_path)
+        self._server_binary_filename = server_binary_filename
+        self._python_binary = python_binary
+        self._status = ServerStatus.UNINITIALIZED
+
+    def basedir(self) -> str:
+        return path.join(self._storage_path, self._package_name)
+
+    def bindir(self) -> str:
+        bin_dir = 'Scripts' if sublime.platform() == 'windows' else 'bin'
+        return path.join(self.basedir(), bin_dir)
+
+    def server_binary(self) -> str:
+        return path.join(self.bindir(), self._server_binary_filename + self.file_extension())
+
+    def pip_binary(self) -> str:
+        return path.join(self.bindir(), 'pip' + self.file_extension())
+
+    def python_version(self) -> str:
+        return path.join(self.basedir(), 'python_version')
+
+    # --- ServerResourceInterface handlers ----------------------------------------------------------------------------
+
+    @property
+    def binary_path(self) -> str:
+        return self.server_binary()
+
+    def get_status(self) -> int:
+        return self._status
+
+    def needs_installation(self) -> bool:
+        if not path.exists(self.server_binary()) or not path.exists(self.pip_binary()):
+            return True
+        if not path.exists(self.python_version()):
+            return True
+        with open(self.python_version(), 'r') as f:
+            if f.readline().strip() != self.run(self._python_binary, '--version').strip():
+                return True
+        src_requirements_resource = ResourcePath(self._requirements_path)
+        if not src_requirements_resource.exists():
+            raise Exception('Missing required "requirements.txt" in {}'.format(self._requirements_path))
+        src_requirements_hash = md5(src_requirements_resource.read_bytes()).hexdigest()
+        try:
+            with open(path.join(self.basedir(), self._requirements_path_relative), 'rb') as file:
+                dst_requirements_hash = md5(file.read()).hexdigest()
+            if src_requirements_hash != dst_requirements_hash:
+                return True
+        except FileNotFoundError:
+            # Needs to be re-installed.
+            return True
+        self._status = ServerStatus.READY
+        return False
+
+    def install_or_update(self) -> None:
+        rmtree_ex(self.basedir(), ignore_errors=True)
+        try:
+            os.makedirs(self.basedir(), exist_ok=True)
+            self.run(self._python_binary, '-m', 'venv', self._package_name, cwd=self._storage_path)
+            dest_requirements_txt_path = path.join(self._storage_path, self._package_name, 'requirements.txt')
+            ResourcePath(self._requirements_path).copy(dest_requirements_txt_path)
+            self.run(self.pip_binary(), 'install', '-r', dest_requirements_txt_path, '--disable-pip-version-check')
+            with open(self.python_version(), 'w') as f:
+                f.write(self.run(self._python_binary, '--version'))
+        except Exception as error:
+            self._status = ServerStatus.ERROR
+            raise Exception('Error installing the server:\n{}'.format(error))
+        self._status = ServerStatus.READY
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/server_resource_interface.py b/SublimeText/lsp_utils/st3/lsp_utils/server_resource_interface.py
new file mode 100644
index 00000000..48990713
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/server_resource_interface.py
@@ -0,0 +1,66 @@
+from abc import ABCMeta
+from abc import abstractmethod
+from abc import abstractproperty
+
+__all__ = ['ServerStatus', 'ServerResourceInterface']
+
+
+class ServerStatus():
+    """
+    A :class:`ServerStatus` enum for use as a return value from :func:`ServerResourceInterface.get_status()`.
+    """
+
+    UNINITIALIZED = 1
+    """Initial status of the server."""
+    ERROR = 2
+    """Initiallation or update has failed."""
+    READY = 3
+    """Server is ready to provide resources."""
+
+
+class ServerResourceInterface(metaclass=ABCMeta):
+    """
+    An interface for implementating server resource handlers. Use this interface in plugins that manage their own
+    server binary (:func:`GenericClientHandler.manages_server` returns `True`).
+
+    After implementing this interface, return an instance of implemented class from
+    :meth:`GenericClientHandler.get_server()`.
+    """
+
+    @abstractmethod
+    def needs_installation(self) -> bool:
+        """
+        This is the place to check whether the binary needs an update, or whether it needs to be installed before
+        starting the language server.
+
+        :returns: `True` if the server needs to be installed or updated. This will result in calling
+                  :meth:`install_or_update()`.
+        """
+        ...
+
+    @abstractmethod
+    def install_or_update(self) -> None:
+        """
+        Do the actual update/installation of the server binary. Don't start extra threads to do the work as everything
+        is handled automatically.
+        """
+        ...
+
+    @abstractmethod
+    def get_status(self) -> int:
+        """
+        Determines the current status of the server. The state changes as the server is being installed, updated or
+        runs into an error doing those. Initialize with :attr:`ServerStatus.UNINITIALIZED` and change to either
+        Set to :attr:`ServerStatus.ERROR` or :attr:`ServerStatus.READY` depending on if the server was installed
+        correctly or is already installed.
+
+        :returns: A number corresponding to the :class:`ServerStatus` class members.
+        """
+        ...
+
+    @abstractproperty
+    def binary_path(self) -> str:
+        """
+        Returns a filesystem path to the server binary.
+        """
+        ...
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/LICENSE b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/LICENSE
new file mode 100644
index 00000000..66aba18c
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) The python-semanticversion project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met: 
+
+1. Redistributions of source code must retain the above copyright notice, this
+   list of conditions and the following disclaimer. 
+2. Redistributions in binary form must reproduce the above copyright notice,
+   this list of conditions and the following disclaimer in the documentation
+   and/or other materials provided with the distribution. 
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/README.rst b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/README.rst
new file mode 100644
index 00000000..c50163a4
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/README.rst
@@ -0,0 +1,279 @@
+Introduction
+============
+
+This small python library provides a few tools to handle `SemVer`_ in Python.
+It follows strictly the 2.0.0 version of the SemVer scheme.
+
+.. image:: https://github.com/rbarrois/python-semanticversion/actions/workflows/test.yml/badge.svg
+    :target: https://github.com/rbarrois/python-semanticversion/actions/workflows/test.yml
+
+.. image:: https://img.shields.io/pypi/v/semantic_version.svg
+    :target: https://python-semanticversion.readthedocs.io/en/latest/changelog.html
+    :alt: Latest Version
+
+.. image:: https://img.shields.io/pypi/pyversions/semantic_version.svg
+    :target: https://pypi.python.org/pypi/semantic_version/
+    :alt: Supported Python versions
+
+.. image:: https://img.shields.io/pypi/wheel/semantic_version.svg
+    :target: https://pypi.python.org/pypi/semantic_version/
+    :alt: Wheel status
+
+.. image:: https://img.shields.io/pypi/l/semantic_version.svg
+    :target: https://pypi.python.org/pypi/semantic_version/
+    :alt: License
+
+Links
+-----
+
+- Package on `PyPI`_: https://pypi.org/project/semantic-version/
+- Doc on `ReadTheDocs `_: https://python-semanticversion.readthedocs.io/
+- Source on `GitHub `_: http://github.com/rbarrois/python-semanticversion/
+- Build on Github Actions: https://github.com/rbarrois/python-semanticversion/actions
+- Semantic Version specification: `SemVer`_
+
+
+Getting started
+===============
+
+Install the package from `PyPI`_, using pip:
+
+.. code-block:: sh
+
+    pip install semantic-version
+
+Or from GitHub:
+
+.. code-block:: sh
+
+    $ git clone git://github.com/rbarrois/python-semanticversion.git
+
+
+Import it in your code:
+
+
+.. code-block:: python
+
+    import semantic_version
+
+
+This module provides classes to handle semantic versions:
+
+- ``Version`` represents a version number (``0.1.1-alpha+build.2012-05-15``)
+- ``BaseSpec``-derived classes represent requirement specifications (``>=0.1.1,<0.3.0``):
+
+  - ``SimpleSpec`` describes a natural description syntax
+  - ``NpmSpec`` is used for NPM-style range descriptions.
+
+Versions
+--------
+
+Defining a ``Version`` is quite simple:
+
+
+.. code-block:: pycon
+
+    >>> import semantic_version
+    >>> v = semantic_version.Version('0.1.1')
+    >>> v.major
+    0
+    >>> v.minor
+    1
+    >>> v.patch
+    1
+    >>> v.prerelease
+    []
+    >>> v.build
+    []
+    >>> list(v)
+    [0, 1, 1, [], []]
+
+If the provided version string is invalid, a ``ValueError`` will be raised:
+
+.. code-block:: pycon
+
+    >>> semantic_version.Version('0.1')
+    Traceback (most recent call last):
+      File "", line 1, in 
+      File "/Users/rbarrois/dev/semantic_version/src/semantic_version/base.py", line 64, in __init__
+        major, minor, patch, prerelease, build = self.parse(version_string, partial)
+      File "/Users/rbarrois/dev/semantic_version/src/semantic_version/base.py", line 86, in parse
+        raise ValueError('Invalid version string: %r' % version_string)
+    ValueError: Invalid version string: '0.1'
+
+
+One may also create a ``Version`` with named components:
+
+.. code-block:: pycon
+
+    >>> semantic_version.Version(major=0, minor=1, patch=2)
+    Version('0.1.2')
+
+In that case, ``major``, ``minor`` and ``patch`` are mandatory, and must be integers.
+``prerelease`` and ``build``, if provided, must be tuples of strings:
+
+.. code-block:: pycon
+
+    >>> semantic_version.Version(major=0, minor=1, patch=2, prerelease=('alpha', '2'))
+    Version('0.1.2-alpha.2')
+
+
+Some user-supplied input might not match the semantic version scheme.
+For such cases, the ``Version.coerce`` method will try to convert any
+version-like string into a valid semver version:
+
+.. code-block:: pycon
+
+    >>> Version.coerce('0')
+    Version('0.0.0')
+    >>> Version.coerce('0.1.2.3.4')
+    Version('0.1.2+3.4')
+    >>> Version.coerce('0.1.2a3')
+    Version('0.1.2-a3')
+
+Working with versions
+"""""""""""""""""""""
+
+Obviously, versions can be compared:
+
+
+.. code-block:: pycon
+
+    >>> semantic_version.Version('0.1.1') < semantic_version.Version('0.1.2')
+    True
+    >>> semantic_version.Version('0.1.1') > semantic_version.Version('0.1.1-alpha')
+    True
+    >>> semantic_version.Version('0.1.1') <= semantic_version.Version('0.1.1-alpha')
+    False
+
+You can also get a new version that represents a bump in one of the version levels:
+
+.. code-block:: pycon
+
+    >>> v = semantic_version.Version('0.1.1+build')
+    >>> new_v = v.next_major()
+    >>> str(new_v)
+    '1.0.0'
+    >>> v = semantic_version.Version('1.1.1+build')
+    >>> new_v = v.next_minor()
+    >>> str(new_v)
+    '1.2.0'
+    >>> v = semantic_version.Version('1.1.1+build')
+    >>> new_v = v.next_patch()
+    >>> str(new_v)
+    '1.1.2'
+
+
+
+Requirement specification
+-------------------------
+
+python-semanticversion provides a couple of ways to describe a range of accepted
+versions:
+
+- The ``SimpleSpec`` class provides a simple, easily understood scheme --
+  somewhat inspired from PyPI range notations;
+- The ``NpmSpec`` class supports the whole NPM range specification scheme:
+
+  .. code-block:: pycon
+
+      >>> Version('0.1.2') in NpmSpec('0.1.0-alpha.2 .. 0.2.4')
+      True
+      >>> Version('0.1.2') in NpmSpec('>=0.1.1 <0.1.3 || 2.x')
+      True
+      >>> Version('2.3.4') in NpmSpec('>=0.1.1 <0.1.3 || 2.x')
+      True
+
+The ``SimpleSpec`` scheme
+"""""""""""""""""""""""""
+
+Basic usage is simply a comparator and a base version:
+
+.. code-block:: pycon
+
+    >>> s = SimpleSpec('>=0.1.1')  # At least 0.1.1
+    >>> s.match(Version('0.1.1'))
+    True
+    >>> s.match(Version('0.1.1-alpha1'))  # pre-release doesn't satisfy version spec
+    False
+    >>> s.match(Version('0.1.0'))
+    False
+
+Combining specifications can be expressed as follows:
+
+  .. code-block:: pycon
+
+      >>> SimpleSpec('>=0.1.1,<0.3.0')
+
+Simpler test syntax is also available using the ``in`` keyword:
+
+.. code-block:: pycon
+
+    >>> s = SimpleSpec('==0.1.1')
+    >>> Version('0.1.1+git7ccc72') in s  # build variants are equivalent to full versions
+    True
+    >>> Version('0.1.1-alpha1') in s     # pre-release variants don't match the full version.
+    False
+    >>> Version('0.1.2') in s
+    False
+
+
+Refer to the full documentation at
+https://python-semanticversion.readthedocs.io/en/latest/ for more details on the
+``SimpleSpec`` scheme.
+
+
+
+Using a specification
+"""""""""""""""""""""
+
+The ``SimpleSpec.filter`` method filters an iterable of ``Version``:
+
+.. code-block:: pycon
+
+    >>> s = SimpleSpec('>=0.1.0,<0.4.0')
+    >>> versions = (Version('0.%d.0' % i) for i in range(6))
+    >>> for v in s.filter(versions):
+    ...     print v
+    0.1.0
+    0.2.0
+    0.3.0
+
+It is also possible to select the 'best' version from such iterables:
+
+
+.. code-block:: pycon
+
+    >>> s = SimpleSpec('>=0.1.0,<0.4.0')
+    >>> versions = (Version('0.%d.0' % i) for i in range(6))
+    >>> s.select(versions)
+    Version('0.3.0')
+
+
+
+Contributing
+============
+
+In order to contribute to the source code:
+
+- Open an issue on `GitHub`_: https://github.com/rbarrois/python-semanticversion/issues
+- Fork the `repository `_
+  and submit a pull request on `GitHub`_
+- Or send me a patch (mailto:raphael.barrois+semver@polytechnique.org)
+
+When submitting patches or pull requests, you should respect the following rules:
+
+- Coding conventions are based on :pep:`8`
+- The whole test suite must pass after adding the changes
+- The test coverage for a new feature must be 100%
+- New features and methods should be documented in the ``reference`` section
+  and included in the ``changelog``
+- Include your name in the ``contributors`` section
+
+.. note:: All files should contain the following header::
+
+          # -*- encoding: utf-8 -*-
+          # Copyright (c) The python-semanticversion project
+
+.. _SemVer: http://semver.org/
+.. _PyPI: http://pypi.python.org/
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/__init__.py b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/__init__.py
new file mode 100644
index 00000000..c50f3050
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/__init__.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) The python-semanticversion project
+# This code is distributed under the two-clause BSD License.
+
+
+from .base import compare, match, validate, SimpleSpec, NpmSpec, Spec, SpecItem, Version
+
+
+__author__ = "Raphaël Barrois "
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/base.py b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/base.py
new file mode 100644
index 00000000..777c27ac
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/third_party/semantic_version/base.py
@@ -0,0 +1,1449 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) The python-semanticversion project
+# This code is distributed under the two-clause BSD License.
+
+import functools
+import re
+import warnings
+
+
+def _has_leading_zero(value):
+    return (value
+            and value[0] == '0'
+            and value.isdigit()
+            and value != '0')
+
+
+class MaxIdentifier(object):
+    __slots__ = []
+
+    def __repr__(self):
+        return 'MaxIdentifier()'
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+
+@functools.total_ordering
+class NumericIdentifier(object):
+    __slots__ = ['value']
+
+    def __init__(self, value):
+        self.value = int(value)
+
+    def __repr__(self):
+        return 'NumericIdentifier(%r)' % self.value
+
+    def __eq__(self, other):
+        if isinstance(other, NumericIdentifier):
+            return self.value == other.value
+        return NotImplemented
+
+    def __lt__(self, other):
+        if isinstance(other, MaxIdentifier):
+            return True
+        elif isinstance(other, AlphaIdentifier):
+            return True
+        elif isinstance(other, NumericIdentifier):
+            return self.value < other.value
+        else:
+            return NotImplemented
+
+
+@functools.total_ordering
+class AlphaIdentifier(object):
+    __slots__ = ['value']
+
+    def __init__(self, value):
+        self.value = value.encode('ascii')
+
+    def __repr__(self):
+        return 'AlphaIdentifier(%r)' % self.value
+
+    def __eq__(self, other):
+        if isinstance(other, AlphaIdentifier):
+            return self.value == other.value
+        return NotImplemented
+
+    def __lt__(self, other):
+        if isinstance(other, MaxIdentifier):
+            return True
+        elif isinstance(other, NumericIdentifier):
+            return False
+        elif isinstance(other, AlphaIdentifier):
+            return self.value < other.value
+        else:
+            return NotImplemented
+
+
+class Version(object):
+
+    version_re = re.compile(r'^(\d+)\.(\d+)\.(\d+)(?:-([0-9a-zA-Z.-]+))?(?:\+([0-9a-zA-Z.-]+))?$')
+    partial_version_re = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+))?)?(?:-([0-9a-zA-Z.-]*))?(?:\+([0-9a-zA-Z.-]*))?$')
+
+    def __init__(
+            self,
+            version_string=None,
+            major=None,
+            minor=None,
+            patch=None,
+            prerelease=None,
+            build=None,
+            partial=False):
+        if partial:
+            warnings.warn(
+                "Partial versions will be removed in 3.0; use SimpleSpec('1.x.x') instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        has_text = version_string is not None
+        has_parts = not (major is minor is patch is prerelease is build is None)
+        if not has_text ^ has_parts:
+            raise ValueError("Call either Version('1.2.3') or Version(major=1, ...).")
+
+        if has_text:
+            major, minor, patch, prerelease, build = self.parse(version_string, partial)
+        else:
+            # Convenience: allow to omit prerelease/build.
+            prerelease = tuple(prerelease or ())
+            if not partial:
+                build = tuple(build or ())
+            self._validate_kwargs(major, minor, patch, prerelease, build, partial)
+
+        self.major = major
+        self.minor = minor
+        self.patch = patch
+        self.prerelease = prerelease
+        self.build = build
+
+        self.partial = partial
+
+        # Cached precedence keys
+        # _cmp_precedence_key is used for semver-precedence comparison
+        self._cmp_precedence_key = self._build_precedence_key(with_build=False)
+        # _sort_precedence_key is used for self.precedence_key, esp. for sorted(...)
+        self._sort_precedence_key = self._build_precedence_key(with_build=True)
+
+    @classmethod
+    def _coerce(cls, value, allow_none=False):
+        if value is None and allow_none:
+            return value
+        return int(value)
+
+    def next_major(self):
+        if self.prerelease and self.minor == self.patch == 0:
+            return Version(
+                major=self.major,
+                minor=0,
+                patch=0,
+                partial=self.partial,
+            )
+        else:
+            return Version(
+                major=self.major + 1,
+                minor=0,
+                patch=0,
+                partial=self.partial,
+            )
+
+    def next_minor(self):
+        if self.prerelease and self.patch == 0:
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=0,
+                partial=self.partial,
+            )
+        else:
+            return Version(
+                major=self.major,
+                minor=self.minor + 1,
+                patch=0,
+                partial=self.partial,
+            )
+
+    def next_patch(self):
+        if self.prerelease:
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=self.patch,
+                partial=self.partial,
+            )
+        else:
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=self.patch + 1,
+                partial=self.partial,
+            )
+
+    def truncate(self, level='patch'):
+        """Return a new Version object, truncated up to the selected level."""
+        if level == 'build':
+            return self
+        elif level == 'prerelease':
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=self.patch,
+                prerelease=self.prerelease,
+                partial=self.partial,
+            )
+        elif level == 'patch':
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=self.patch,
+                partial=self.partial,
+            )
+        elif level == 'minor':
+            return Version(
+                major=self.major,
+                minor=self.minor,
+                patch=None if self.partial else 0,
+                partial=self.partial,
+            )
+        elif level == 'major':
+            return Version(
+                major=self.major,
+                minor=None if self.partial else 0,
+                patch=None if self.partial else 0,
+                partial=self.partial,
+            )
+        else:
+            raise ValueError("Invalid truncation level `%s`." % level)
+
+    @classmethod
+    def coerce(cls, version_string, partial=False):
+        """Coerce an arbitrary version string into a semver-compatible one.
+
+        The rule is:
+        - If not enough components, fill minor/patch with zeroes; unless
+          partial=True
+        - If more than 3 dot-separated components, extra components are "build"
+          data. If some "build" data already appeared, append it to the
+          extra components
+
+        Examples:
+            >>> Version.coerce('0.1')
+            Version(0, 1, 0)
+            >>> Version.coerce('0.1.2.3')
+            Version(0, 1, 2, (), ('3',))
+            >>> Version.coerce('0.1.2.3+4')
+            Version(0, 1, 2, (), ('3', '4'))
+            >>> Version.coerce('0.1+2-3+4_5')
+            Version(0, 1, 0, (), ('2-3', '4-5'))
+        """
+        base_re = re.compile(r'^\d+(?:\.\d+(?:\.\d+)?)?')
+
+        match = base_re.match(version_string)
+        if not match:
+            raise ValueError(
+                "Version string lacks a numerical component: %r"
+                % version_string
+            )
+
+        version = version_string[:match.end()]
+        if not partial:
+            # We need a not-partial version.
+            while version.count('.') < 2:
+                version += '.0'
+
+        # Strip leading zeros in components
+        # Version is of the form nn, nn.pp or nn.pp.qq
+        version = '.'.join(
+            # If the part was '0', we end up with an empty string.
+            part.lstrip('0') or '0'
+            for part in version.split('.')
+        )
+
+        if match.end() == len(version_string):
+            return Version(version, partial=partial)
+
+        rest = version_string[match.end():]
+
+        # Cleanup the 'rest'
+        rest = re.sub(r'[^a-zA-Z0-9+.-]', '-', rest)
+
+        if rest[0] == '+':
+            # A 'build' component
+            prerelease = ''
+            build = rest[1:]
+        elif rest[0] == '.':
+            # An extra version component, probably 'build'
+            prerelease = ''
+            build = rest[1:]
+        elif rest[0] == '-':
+            rest = rest[1:]
+            if '+' in rest:
+                prerelease, build = rest.split('+', 1)
+            else:
+                prerelease, build = rest, ''
+        elif '+' in rest:
+            prerelease, build = rest.split('+', 1)
+        else:
+            prerelease, build = rest, ''
+
+        build = build.replace('+', '.')
+
+        if prerelease:
+            version = '%s-%s' % (version, prerelease)
+        if build:
+            version = '%s+%s' % (version, build)
+
+        return cls(version, partial=partial)
+
+    @classmethod
+    def parse(cls, version_string, partial=False, coerce=False):
+        """Parse a version string into a tuple of components:
+           (major, minor, patch, prerelease, build).
+
+        Args:
+            version_string (str), the version string to parse
+            partial (bool), whether to accept incomplete input
+            coerce (bool), whether to try to map the passed in string into a
+                valid Version.
+        """
+        if not version_string:
+            raise ValueError('Invalid empty version string: %r' % version_string)
+
+        if partial:
+            version_re = cls.partial_version_re
+        else:
+            version_re = cls.version_re
+
+        match = version_re.match(version_string)
+        if not match:
+            raise ValueError('Invalid version string: %r' % version_string)
+
+        major, minor, patch, prerelease, build = match.groups()
+
+        if _has_leading_zero(major):
+            raise ValueError("Invalid leading zero in major: %r" % version_string)
+        if _has_leading_zero(minor):
+            raise ValueError("Invalid leading zero in minor: %r" % version_string)
+        if _has_leading_zero(patch):
+            raise ValueError("Invalid leading zero in patch: %r" % version_string)
+
+        major = int(major)
+        minor = cls._coerce(minor, partial)
+        patch = cls._coerce(patch, partial)
+
+        if prerelease is None:
+            if partial and (build is None):
+                # No build info, strip here
+                return (major, minor, patch, None, None)
+            else:
+                prerelease = ()
+        elif prerelease == '':
+            prerelease = ()
+        else:
+            prerelease = tuple(prerelease.split('.'))
+            cls._validate_identifiers(prerelease, allow_leading_zeroes=False)
+
+        if build is None:
+            if partial:
+                build = None
+            else:
+                build = ()
+        elif build == '':
+            build = ()
+        else:
+            build = tuple(build.split('.'))
+            cls._validate_identifiers(build, allow_leading_zeroes=True)
+
+        return (major, minor, patch, prerelease, build)
+
+    @classmethod
+    def _validate_identifiers(cls, identifiers, allow_leading_zeroes=False):
+        for item in identifiers:
+            if not item:
+                raise ValueError(
+                    "Invalid empty identifier %r in %r"
+                    % (item, '.'.join(identifiers))
+                )
+
+            if item[0] == '0' and item.isdigit() and item != '0' and not allow_leading_zeroes:
+                raise ValueError("Invalid leading zero in identifier %r" % item)
+
+    @classmethod
+    def _validate_kwargs(cls, major, minor, patch, prerelease, build, partial):
+        if (
+                major != int(major)
+                or minor != cls._coerce(minor, partial)
+                or patch != cls._coerce(patch, partial)
+                or prerelease is None and not partial
+                or build is None and not partial
+        ):
+            raise ValueError(
+                "Invalid kwargs to Version(major=%r, minor=%r, patch=%r, "
+                "prerelease=%r, build=%r, partial=%r" % (
+                    major, minor, patch, prerelease, build, partial
+                ))
+        if prerelease is not None:
+            cls._validate_identifiers(prerelease, allow_leading_zeroes=False)
+        if build is not None:
+            cls._validate_identifiers(build, allow_leading_zeroes=True)
+
+    def __iter__(self):
+        return iter((self.major, self.minor, self.patch, self.prerelease, self.build))
+
+    def __str__(self):
+        version = '%d' % self.major
+        if self.minor is not None:
+            version = '%s.%d' % (version, self.minor)
+        if self.patch is not None:
+            version = '%s.%d' % (version, self.patch)
+
+        if self.prerelease or (self.partial and self.prerelease == () and self.build is None):
+            version = '%s-%s' % (version, '.'.join(self.prerelease))
+        if self.build or (self.partial and self.build == ()):
+            version = '%s+%s' % (version, '.'.join(self.build))
+        return version
+
+    def __repr__(self):
+        return '%s(%r%s)' % (
+            self.__class__.__name__,
+            str(self),
+            ', partial=True' if self.partial else '',
+        )
+
+    def __hash__(self):
+        # We don't include 'partial', since this is strictly equivalent to having
+        # at least a field being `None`.
+        return hash((self.major, self.minor, self.patch, self.prerelease, self.build))
+
+    def _build_precedence_key(self, with_build=False):
+        """Build a precedence key.
+
+        The "build" component should only be used when sorting an iterable
+        of versions.
+        """
+        if self.prerelease:
+            prerelease_key = tuple(
+                NumericIdentifier(part) if part.isdigit() else AlphaIdentifier(part)
+                for part in self.prerelease
+            )
+        else:
+            prerelease_key = (
+                MaxIdentifier(),
+            )
+
+        if not with_build:
+            return (
+                self.major,
+                self.minor,
+                self.patch,
+                prerelease_key,
+            )
+
+        build_key = tuple(
+            NumericIdentifier(part) if part.isdigit() else AlphaIdentifier(part)
+            for part in self.build or ()
+        )
+
+        return (
+            self.major,
+            self.minor,
+            self.patch,
+            prerelease_key,
+            build_key,
+        )
+
+    @property
+    def precedence_key(self):
+        return self._sort_precedence_key
+
+    def __cmp__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        if self < other:
+            return -1
+        elif self > other:
+            return 1
+        elif self == other:
+            return 0
+        else:
+            return NotImplemented
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return (
+            self.major == other.major
+            and self.minor == other.minor
+            and self.patch == other.patch
+            and (self.prerelease or ()) == (other.prerelease or ())
+            and (self.build or ()) == (other.build or ())
+        )
+
+    def __ne__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return tuple(self) != tuple(other)
+
+    def __lt__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return self._cmp_precedence_key < other._cmp_precedence_key
+
+    def __le__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return self._cmp_precedence_key <= other._cmp_precedence_key
+
+    def __gt__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return self._cmp_precedence_key > other._cmp_precedence_key
+
+    def __ge__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        return self._cmp_precedence_key >= other._cmp_precedence_key
+
+
+class SpecItem(object):
+    """A requirement specification."""
+
+    KIND_ANY = '*'
+    KIND_LT = '<'
+    KIND_LTE = '<='
+    KIND_EQUAL = '=='
+    KIND_SHORTEQ = '='
+    KIND_EMPTY = ''
+    KIND_GTE = '>='
+    KIND_GT = '>'
+    KIND_NEQ = '!='
+    KIND_CARET = '^'
+    KIND_TILDE = '~'
+    KIND_COMPATIBLE = '~='
+
+    # Map a kind alias to its full version
+    KIND_ALIASES = {
+        KIND_SHORTEQ: KIND_EQUAL,
+        KIND_EMPTY: KIND_EQUAL,
+    }
+
+    re_spec = re.compile(r'^(<|<=||=|==|>=|>|!=|\^|~|~=)(\d.*)$')
+
+    def __init__(self, requirement_string, _warn=True):
+        if _warn:
+            warnings.warn(
+                "The `SpecItem` class will be removed in 3.0.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        kind, spec = self.parse(requirement_string)
+        self.kind = kind
+        self.spec = spec
+        self._clause = Spec(requirement_string).clause
+
+    @classmethod
+    def parse(cls, requirement_string):
+        if not requirement_string:
+            raise ValueError("Invalid empty requirement specification: %r" % requirement_string)
+
+        # Special case: the 'any' version spec.
+        if requirement_string == '*':
+            return (cls.KIND_ANY, '')
+
+        match = cls.re_spec.match(requirement_string)
+        if not match:
+            raise ValueError("Invalid requirement specification: %r" % requirement_string)
+
+        kind, version = match.groups()
+        if kind in cls.KIND_ALIASES:
+            kind = cls.KIND_ALIASES[kind]
+
+        spec = Version(version, partial=True)
+        if spec.build is not None and kind not in (cls.KIND_EQUAL, cls.KIND_NEQ):
+            raise ValueError(
+                "Invalid requirement specification %r: build numbers have no ordering."
+                % requirement_string
+            )
+        return (kind, spec)
+
+    @classmethod
+    def from_matcher(cls, matcher):
+        if matcher == Always():
+            return cls('*', _warn=False)
+        elif matcher == Never():
+            return cls('<0.0.0-', _warn=False)
+        elif isinstance(matcher, Range):
+            return cls('%s%s' % (matcher.operator, matcher.target), _warn=False)
+
+    def match(self, version):
+        return self._clause.match(version)
+
+    def __str__(self):
+        return '%s%s' % (self.kind, self.spec)
+
+    def __repr__(self):
+        return '' % (self.kind, self.spec)
+
+    def __eq__(self, other):
+        if not isinstance(other, SpecItem):
+            return NotImplemented
+        return self.kind == other.kind and self.spec == other.spec
+
+    def __hash__(self):
+        return hash((self.kind, self.spec))
+
+
+def compare(v1, v2):
+    return Version(v1).__cmp__(Version(v2))
+
+
+def match(spec, version):
+    return Spec(spec).match(Version(version))
+
+
+def validate(version_string):
+    """Validates a version string againt the SemVer specification."""
+    try:
+        Version.parse(version_string)
+        return True
+    except ValueError:
+        return False
+
+
+DEFAULT_SYNTAX = 'simple'
+
+
+class BaseSpec(object):
+    """A specification of compatible versions.
+
+    Usage:
+    >>> Spec('>=1.0.0', syntax='npm')
+
+    A version matches a specification if it matches any
+    of the clauses of that specification.
+
+    Internally, a Spec is AnyOf(
+        AllOf(Matcher, Matcher, Matcher),
+        AllOf(...),
+    )
+    """
+    SYNTAXES = {}
+
+    @classmethod
+    def register_syntax(cls, subclass):
+        syntax = subclass.SYNTAX
+        if syntax is None:
+            raise ValueError("A Spec needs its SYNTAX field to be set.")
+        elif syntax in cls.SYNTAXES:
+            raise ValueError(
+                "Duplicate syntax for %s: %r, %r"
+                % (syntax, cls.SYNTAXES[syntax], subclass)
+            )
+        cls.SYNTAXES[syntax] = subclass
+        return subclass
+
+    def __init__(self, expression):
+        super(BaseSpec, self).__init__()
+        self.expression = expression
+        self.clause = self._parse_to_clause(expression)
+
+    @classmethod
+    def parse(cls, expression, syntax=DEFAULT_SYNTAX):
+        """Convert a syntax-specific expression into a BaseSpec instance."""
+        return cls.SYNTAXES[syntax](expression)
+
+    @classmethod
+    def _parse_to_clause(cls, expression):
+        """Converts an expression to a clause."""
+        raise NotImplementedError()
+
+    def filter(self, versions):
+        """Filter an iterable of versions satisfying the Spec."""
+        for version in versions:
+            if self.match(version):
+                yield version
+
+    def match(self, version):
+        """Check whether a Version satisfies the Spec."""
+        return self.clause.match(version)
+
+    def select(self, versions):
+        """Select the best compatible version among an iterable of options."""
+        options = list(self.filter(versions))
+        if options:
+            return max(options)
+        return None
+
+    def __contains__(self, version):
+        """Whether `version in self`."""
+        if isinstance(version, Version):
+            return self.match(version)
+        return False
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self.clause == other.clause
+
+    def __hash__(self):
+        return hash(self.clause)
+
+    def __str__(self):
+        return self.expression
+
+    def __repr__(self):
+        return '<%s: %r>' % (self.__class__.__name__, self.expression)
+
+
+class Clause(object):
+    __slots__ = []
+
+    def match(self, version):
+        raise NotImplementedError()
+
+    def __and__(self, other):
+        raise NotImplementedError()
+
+    def __or__(self, other):
+        raise NotImplementedError()
+
+    def __eq__(self, other):
+        raise NotImplementedError()
+
+    def prettyprint(self, indent='\t'):
+        """Pretty-print the clause.
+        """
+        return '\n'.join(self._pretty()).replace('\t', indent)
+
+    def _pretty(self):
+        """Actual pretty-printing logic.
+
+        Yields:
+            A list of string. Indentation is performed with \t.
+        """
+        yield repr(self)
+
+    def __ne__(self, other):
+        return not self == other
+
+    def simplify(self):
+        return self
+
+
+class AnyOf(Clause):
+    __slots__ = ['clauses']
+
+    def __init__(self, *clauses):
+        super(AnyOf, self).__init__()
+        self.clauses = frozenset(clauses)
+
+    def match(self, version):
+        return any(c.match(version) for c in self.clauses)
+
+    def simplify(self):
+        subclauses = set()
+        for clause in self.clauses:
+            simplified = clause.simplify()
+            if isinstance(simplified, AnyOf):
+                subclauses |= simplified.clauses
+            elif simplified == Never():
+                continue
+            else:
+                subclauses.add(simplified)
+        if len(subclauses) == 1:
+            return subclauses.pop()
+        return AnyOf(*subclauses)
+
+    def __hash__(self):
+        return hash((AnyOf, self.clauses))
+
+    def __iter__(self):
+        return iter(self.clauses)
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__) and self.clauses == other.clauses
+
+    def __and__(self, other):
+        if isinstance(other, AllOf):
+            return other & self
+        elif isinstance(other, Matcher) or isinstance(other, AnyOf):
+            return AllOf(self, other)
+        else:
+            return NotImplemented
+
+    def __or__(self, other):
+        if isinstance(other, AnyOf):
+            clauses = list(self.clauses | other.clauses)
+        elif isinstance(other, Matcher) or isinstance(other, AllOf):
+            clauses = list(self.clauses | set([other]))
+        else:
+            return NotImplemented
+        return AnyOf(*clauses)
+
+    def __repr__(self):
+        return 'AnyOf(%s)' % ', '.join(sorted(repr(c) for c in self.clauses))
+
+    def _pretty(self):
+        yield 'AnyOF('
+        for clause in self.clauses:
+            lines = list(clause._pretty())
+            for line in lines[:-1]:
+                yield '\t' + line
+            yield '\t' + lines[-1] + ','
+        yield ')'
+
+
+class AllOf(Clause):
+    __slots__ = ['clauses']
+
+    def __init__(self, *clauses):
+        super(AllOf, self).__init__()
+        self.clauses = frozenset(clauses)
+
+    def match(self, version):
+        return all(clause.match(version) for clause in self.clauses)
+
+    def simplify(self):
+        subclauses = set()
+        for clause in self.clauses:
+            simplified = clause.simplify()
+            if isinstance(simplified, AllOf):
+                subclauses |= simplified.clauses
+            elif simplified == Always():
+                continue
+            else:
+                subclauses.add(simplified)
+        if len(subclauses) == 1:
+            return subclauses.pop()
+        return AllOf(*subclauses)
+
+    def __hash__(self):
+        return hash((AllOf, self.clauses))
+
+    def __iter__(self):
+        return iter(self.clauses)
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__) and self.clauses == other.clauses
+
+    def __and__(self, other):
+        if isinstance(other, Matcher) or isinstance(other, AnyOf):
+            clauses = list(self.clauses | set([other]))
+        elif isinstance(other, AllOf):
+            clauses = list(self.clauses | other.clauses)
+        else:
+            return NotImplemented
+        return AllOf(*clauses)
+
+    def __or__(self, other):
+        if isinstance(other, AnyOf):
+            return other | self
+        elif isinstance(other, Matcher):
+            return AnyOf(self, AllOf(other))
+        elif isinstance(other, AllOf):
+            return AnyOf(self, other)
+        else:
+            return NotImplemented
+
+    def __repr__(self):
+        return 'AllOf(%s)' % ', '.join(sorted(repr(c) for c in self.clauses))
+
+    def _pretty(self):
+        yield 'AllOF('
+        for clause in self.clauses:
+            lines = list(clause._pretty())
+            for line in lines[:-1]:
+                yield '\t' + line
+            yield '\t' + lines[-1] + ','
+        yield ')'
+
+
+class Matcher(Clause):
+    __slots__ = []
+
+    def __and__(self, other):
+        if isinstance(other, AllOf):
+            return other & self
+        elif isinstance(other, Matcher) or isinstance(other, AnyOf):
+            return AllOf(self, other)
+        else:
+            return NotImplemented
+
+    def __or__(self, other):
+        if isinstance(other, AnyOf):
+            return other | self
+        elif isinstance(other, Matcher) or isinstance(other, AllOf):
+            return AnyOf(self, other)
+        else:
+            return NotImplemented
+
+
+class Never(Matcher):
+    __slots__ = []
+
+    def match(self, version):
+        return False
+
+    def __hash__(self):
+        return hash((Never,))
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __and__(self, other):
+        return self
+
+    def __or__(self, other):
+        return other
+
+    def __repr__(self):
+        return 'Never()'
+
+
+class Always(Matcher):
+    __slots__ = []
+
+    def match(self, version):
+        return True
+
+    def __hash__(self):
+        return hash((Always,))
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __and__(self, other):
+        return other
+
+    def __or__(self, other):
+        return self
+
+    def __repr__(self):
+        return 'Always()'
+
+
+class Range(Matcher):
+    OP_EQ = '=='
+    OP_GT = '>'
+    OP_GTE = '>='
+    OP_LT = '<'
+    OP_LTE = '<='
+    OP_NEQ = '!='
+
+    # <1.2.3 matches 1.2.3-a1
+    PRERELEASE_ALWAYS = 'always'
+    # <1.2.3 does not match 1.2.3-a1
+    PRERELEASE_NATURAL = 'natural'
+    # 1.2.3-a1 is only considered if target == 1.2.3-xxx
+    PRERELEASE_SAMEPATCH = 'same-patch'
+
+    # 1.2.3 matches 1.2.3+*
+    BUILD_IMPLICIT = 'implicit'
+    # 1.2.3 matches only 1.2.3, not 1.2.3+4
+    BUILD_STRICT = 'strict'
+
+    __slots__ = ['operator', 'target', 'prerelease_policy', 'build_policy']
+
+    def __init__(self, operator, target, prerelease_policy=PRERELEASE_NATURAL, build_policy=BUILD_IMPLICIT):
+        super(Range, self).__init__()
+        if target.build and operator not in (self.OP_EQ, self.OP_NEQ):
+            raise ValueError(
+                "Invalid range %s%s: build numbers have no ordering."
+                % (operator, target))
+        self.operator = operator
+        self.target = target
+        self.prerelease_policy = prerelease_policy
+        self.build_policy = self.BUILD_STRICT if target.build else build_policy
+
+    def match(self, version):
+        if self.build_policy != self.BUILD_STRICT:
+            version = version.truncate('prerelease')
+
+        if version.prerelease:
+            same_patch = self.target.truncate() == version.truncate()
+
+            if self.prerelease_policy == self.PRERELEASE_SAMEPATCH and not same_patch:
+                return False
+
+        if self.operator == self.OP_EQ:
+            if self.build_policy == self.BUILD_STRICT:
+                return (
+                    self.target.truncate('prerelease') == version.truncate('prerelease')
+                    and version.build == self.target.build
+                )
+            return version == self.target
+        elif self.operator == self.OP_GT:
+            return version > self.target
+        elif self.operator == self.OP_GTE:
+            return version >= self.target
+        elif self.operator == self.OP_LT:
+            if (
+                version.prerelease
+                and self.prerelease_policy == self.PRERELEASE_NATURAL
+                and version.truncate() == self.target.truncate()
+                and not self.target.prerelease
+            ):
+                return False
+            return version < self.target
+        elif self.operator == self.OP_LTE:
+            return version <= self.target
+        else:
+            assert self.operator == self.OP_NEQ
+            if self.build_policy == self.BUILD_STRICT:
+                return not (
+                    self.target.truncate('prerelease') == version.truncate('prerelease')
+                    and version.build == self.target.build
+                )
+
+            if (
+                version.prerelease
+                and self.prerelease_policy == self.PRERELEASE_NATURAL
+                and version.truncate() == self.target.truncate()
+                and not self.target.prerelease
+            ):
+                return False
+            return version != self.target
+
+    def __hash__(self):
+        return hash((Range, self.operator, self.target, self.prerelease_policy))
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, self.__class__)
+            and self.operator == other.operator
+            and self.target == other.target
+            and self.prerelease_policy == other.prerelease_policy
+        )
+
+    def __str__(self):
+        return '%s%s' % (self.operator, self.target)
+
+    def __repr__(self):
+        policy_part = (
+            '' if self.prerelease_policy == self.PRERELEASE_NATURAL
+            else ', prerelease_policy=%r' % self.prerelease_policy
+        ) + (
+            '' if self.build_policy == self.BUILD_IMPLICIT
+            else ', build_policy=%r' % self.build_policy
+        )
+        return 'Range(%r, %r%s)' % (
+            self.operator,
+            self.target,
+            policy_part,
+        )
+
+
+@BaseSpec.register_syntax
+class SimpleSpec(BaseSpec):
+
+    SYNTAX = 'simple'
+
+    @classmethod
+    def _parse_to_clause(cls, expression):
+        return cls.Parser.parse(expression)
+
+    class Parser:
+        NUMBER = r'\*|0|[1-9][0-9]*'
+        NAIVE_SPEC = re.compile(r"""^
+            (?P<|<=||=|==|>=|>|!=|\^|~|~=)
+            (?P{nb})(?:\.(?P{nb})(?:\.(?P{nb}))?)?
+            (?:-(?P[a-z0-9A-Z.-]*))?
+            (?:\+(?P[a-z0-9A-Z.-]*))?
+            $
+            """.format(nb=NUMBER),
+            re.VERBOSE,
+        )
+
+        @classmethod
+        def parse(cls, expression):
+            blocks = expression.split(',')
+            clause = Always()
+            for block in blocks:
+                if not cls.NAIVE_SPEC.match(block):
+                    raise ValueError("Invalid simple block %r" % block)
+                clause &= cls.parse_block(block)
+
+            return clause
+
+        PREFIX_CARET = '^'
+        PREFIX_TILDE = '~'
+        PREFIX_COMPATIBLE = '~='
+        PREFIX_EQ = '=='
+        PREFIX_NEQ = '!='
+        PREFIX_GT = '>'
+        PREFIX_GTE = '>='
+        PREFIX_LT = '<'
+        PREFIX_LTE = '<='
+
+        PREFIX_ALIASES = {
+            '=': PREFIX_EQ,
+            '': PREFIX_EQ,
+        }
+
+        EMPTY_VALUES = ['*', 'x', 'X', None]
+
+        @classmethod
+        def parse_block(cls, expr):
+            if not cls.NAIVE_SPEC.match(expr):
+                raise ValueError("Invalid simple spec component: %r" % expr)
+            prefix, major_t, minor_t, patch_t, prerel, build = cls.NAIVE_SPEC.match(expr).groups()
+            prefix = cls.PREFIX_ALIASES.get(prefix, prefix)
+
+            major = None if major_t in cls.EMPTY_VALUES else int(major_t)
+            minor = None if minor_t in cls.EMPTY_VALUES else int(minor_t)
+            patch = None if patch_t in cls.EMPTY_VALUES else int(patch_t)
+
+            if major is None:  # '*'
+                target = Version(major=0, minor=0, patch=0)
+                if prefix not in (cls.PREFIX_EQ, cls.PREFIX_GTE):
+                    raise ValueError("Invalid simple spec: %r" % expr)
+            elif minor is None:
+                target = Version(major=major, minor=0, patch=0)
+            elif patch is None:
+                target = Version(major=major, minor=minor, patch=0)
+            else:
+                target = Version(
+                    major=major,
+                    minor=minor,
+                    patch=patch,
+                    prerelease=prerel.split('.') if prerel else (),
+                    build=build.split('.') if build else (),
+                )
+
+            if (major is None or minor is None or patch is None) and (prerel or build):
+                raise ValueError("Invalid simple spec: %r" % expr)
+
+            if build is not None and prefix not in (cls.PREFIX_EQ, cls.PREFIX_NEQ):
+                raise ValueError("Invalid simple spec: %r" % expr)
+
+            if prefix == cls.PREFIX_CARET:
+                # Accept anything with the same most-significant digit
+                if target.major:
+                    high = target.next_major()
+                elif target.minor:
+                    high = target.next_minor()
+                else:
+                    high = target.next_patch()
+                return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high)
+
+            elif prefix == cls.PREFIX_TILDE:
+                assert major is not None
+                # Accept any higher patch in the same minor
+                # Might go higher if the initial version was a partial
+                if minor is None:
+                    high = target.next_major()
+                else:
+                    high = target.next_minor()
+                return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high)
+
+            elif prefix == cls.PREFIX_COMPATIBLE:
+                assert major is not None
+                # ~1 is 1.0.0..2.0.0; ~=2.2 is 2.2.0..3.0.0; ~=1.4.5 is 1.4.5..1.5.0
+                if minor is None or patch is None:
+                    # We got a partial version
+                    high = target.next_major()
+                else:
+                    high = target.next_minor()
+                return Range(Range.OP_GTE, target) & Range(Range.OP_LT, high)
+
+            elif prefix == cls.PREFIX_EQ:
+                if major is None:
+                    return Range(Range.OP_GTE, target)
+                elif minor is None:
+                    return Range(Range.OP_GTE, target) & Range(Range.OP_LT, target.next_major())
+                elif patch is None:
+                    return Range(Range.OP_GTE, target) & Range(Range.OP_LT, target.next_minor())
+                elif build == '':
+                    return Range(Range.OP_EQ, target, build_policy=Range.BUILD_STRICT)
+                else:
+                    return Range(Range.OP_EQ, target)
+
+            elif prefix == cls.PREFIX_NEQ:
+                assert major is not None
+                if minor is None:
+                    # !=1.x => <1.0.0 || >=2.0.0
+                    return Range(Range.OP_LT, target) | Range(Range.OP_GTE, target.next_major())
+                elif patch is None:
+                    # !=1.2.x => <1.2.0 || >=1.3.0
+                    return Range(Range.OP_LT, target) | Range(Range.OP_GTE, target.next_minor())
+                elif prerel == '':
+                    # !=1.2.3-
+                    return Range(Range.OP_NEQ, target, prerelease_policy=Range.PRERELEASE_ALWAYS)
+                elif build == '':
+                    # !=1.2.3+ or !=1.2.3-a2+
+                    return Range(Range.OP_NEQ, target, build_policy=Range.BUILD_STRICT)
+                else:
+                    return Range(Range.OP_NEQ, target)
+
+            elif prefix == cls.PREFIX_GT:
+                assert major is not None
+                if minor is None:
+                    # >1.x => >=2.0
+                    return Range(Range.OP_GTE, target.next_major())
+                elif patch is None:
+                    return Range(Range.OP_GTE, target.next_minor())
+                else:
+                    return Range(Range.OP_GT, target)
+
+            elif prefix == cls.PREFIX_GTE:
+                return Range(Range.OP_GTE, target)
+
+            elif prefix == cls.PREFIX_LT:
+                assert major is not None
+                if prerel == '':
+                    # <1.2.3-
+                    return Range(Range.OP_LT, target, prerelease_policy=Range.PRERELEASE_ALWAYS)
+                return Range(Range.OP_LT, target)
+
+            else:
+                assert prefix == cls.PREFIX_LTE
+                assert major is not None
+                if minor is None:
+                    # <=1.x => <2.0
+                    return Range(Range.OP_LT, target.next_major())
+                elif patch is None:
+                    return Range(Range.OP_LT, target.next_minor())
+                else:
+                    return Range(Range.OP_LTE, target)
+
+
+class LegacySpec(SimpleSpec):
+    def __init__(self, *expressions):
+        warnings.warn(
+            "The Spec() class will be removed in 3.1; use SimpleSpec() instead.",
+            PendingDeprecationWarning,
+            stacklevel=2,
+        )
+
+        if len(expressions) > 1:
+            warnings.warn(
+                "Passing 2+ arguments to SimpleSpec will be removed in 3.0; concatenate them with ',' instead.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        expression = ','.join(expressions)
+        super(LegacySpec, self).__init__(expression)
+
+    @property
+    def specs(self):
+        return list(self)
+
+    def __iter__(self):
+        warnings.warn(
+            "Iterating over the components of a SimpleSpec object will be removed in 3.0.",
+            DeprecationWarning,
+            stacklevel=2,
+        )
+        try:
+            clauses = list(self.clause)
+        except TypeError:  # Not an iterable
+            clauses = [self.clause]
+        for clause in clauses:
+            yield SpecItem.from_matcher(clause)
+
+
+Spec = LegacySpec
+
+
+@BaseSpec.register_syntax
+class NpmSpec(BaseSpec):
+    SYNTAX = 'npm'
+
+    @classmethod
+    def _parse_to_clause(cls, expression):
+        return cls.Parser.parse(expression)
+
+    class Parser:
+        JOINER = '||'
+        HYPHEN = ' - '
+
+        NUMBER = r'x|X|\*|0|[1-9][0-9]*'
+        PART = r'[a-zA-Z0-9.-]*'
+        NPM_SPEC_BLOCK = re.compile(r"""
+            ^(?:v)?                     # Strip optional initial v
+            (?P<|<=|>=|>|=|\^|~|)   # Operator, can be empty
+            (?P{nb})(?:\.(?P{nb})(?:\.(?P{nb}))?)?
+            (?:-(?P{part}))?    # Optional re-release
+            (?:\+(?P{part}))?    # Optional build
+            $""".format(nb=NUMBER, part=PART),
+            re.VERBOSE,
+        )
+
+        @classmethod
+        def range(cls, operator, target):
+            return Range(operator, target, prerelease_policy=Range.PRERELEASE_SAMEPATCH)
+
+        @classmethod
+        def parse(cls, expression):
+            result = Never()
+            groups = expression.split(cls.JOINER)
+            for group in groups:
+                group = group.strip()
+                if not group:
+                    group = '>=0.0.0'
+
+                subclauses = []
+                if cls.HYPHEN in group:
+                    low, high = group.split(cls.HYPHEN, 2)
+                    subclauses = cls.parse_simple('>=' + low) + cls.parse_simple('<=' + high)
+
+                else:
+                    blocks = group.split(' ')
+                    for block in blocks:
+                        if not cls.NPM_SPEC_BLOCK.match(block):
+                            raise ValueError("Invalid NPM block in %r: %r" % (expression, block))
+
+                        subclauses.extend(cls.parse_simple(block))
+
+                prerelease_clauses = []
+                non_prerel_clauses = []
+                for clause in subclauses:
+                    if clause.target.prerelease:
+                        if clause.operator in (Range.OP_GT, Range.OP_GTE):
+                            prerelease_clauses.append(Range(
+                                operator=Range.OP_LT,
+                                target=Version(
+                                    major=clause.target.major,
+                                    minor=clause.target.minor,
+                                    patch=clause.target.patch + 1,
+                                ),
+                                prerelease_policy=Range.PRERELEASE_ALWAYS,
+                            ))
+                        elif clause.operator in (Range.OP_LT, Range.OP_LTE):
+                            prerelease_clauses.append(Range(
+                                operator=Range.OP_GTE,
+                                target=Version(
+                                    major=clause.target.major,
+                                    minor=clause.target.minor,
+                                    patch=0,
+                                    prerelease=(),
+                                ),
+                                prerelease_policy=Range.PRERELEASE_ALWAYS,
+                            ))
+                        prerelease_clauses.append(clause)
+                        non_prerel_clauses.append(cls.range(
+                            operator=clause.operator,
+                            target=clause.target.truncate(),
+                        ))
+                    else:
+                        non_prerel_clauses.append(clause)
+                if prerelease_clauses:
+                    result |= AllOf(*prerelease_clauses)
+                result |= AllOf(*non_prerel_clauses)
+
+            return result
+
+        PREFIX_CARET = '^'
+        PREFIX_TILDE = '~'
+        PREFIX_EQ = '='
+        PREFIX_GT = '>'
+        PREFIX_GTE = '>='
+        PREFIX_LT = '<'
+        PREFIX_LTE = '<='
+
+        PREFIX_ALIASES = {
+            '': PREFIX_EQ,
+        }
+
+        PREFIX_TO_OPERATOR = {
+            PREFIX_EQ: Range.OP_EQ,
+            PREFIX_LT: Range.OP_LT,
+            PREFIX_LTE: Range.OP_LTE,
+            PREFIX_GTE: Range.OP_GTE,
+            PREFIX_GT: Range.OP_GT,
+        }
+
+        EMPTY_VALUES = ['*', 'x', 'X', None]
+
+        @classmethod
+        def parse_simple(cls, simple):
+            match = cls.NPM_SPEC_BLOCK.match(simple)
+
+            prefix, major_t, minor_t, patch_t, prerel, build = match.groups()
+
+            prefix = cls.PREFIX_ALIASES.get(prefix, prefix)
+            major = None if major_t in cls.EMPTY_VALUES else int(major_t)
+            minor = None if minor_t in cls.EMPTY_VALUES else int(minor_t)
+            patch = None if patch_t in cls.EMPTY_VALUES else int(patch_t)
+
+            if build is not None and prefix not in [cls.PREFIX_EQ]:
+                # Ignore the 'build' part when not comparing to a specific part.
+                build = None
+
+            if major is None:  # '*', 'x', 'X'
+                target = Version(major=0, minor=0, patch=0)
+                if prefix not in [cls.PREFIX_EQ, cls.PREFIX_GTE]:
+                    raise ValueError("Invalid expression %r" % simple)
+                prefix = cls.PREFIX_GTE
+            elif minor is None:
+                target = Version(major=major, minor=0, patch=0)
+            elif patch is None:
+                target = Version(major=major, minor=minor, patch=0)
+            else:
+                target = Version(
+                    major=major,
+                    minor=minor,
+                    patch=patch,
+                    prerelease=prerel.split('.') if prerel else (),
+                    build=build.split('.') if build else (),
+                )
+
+            if (major is None or minor is None or patch is None) and (prerel or build):
+                raise ValueError("Invalid NPM spec: %r" % simple)
+
+            if prefix == cls.PREFIX_CARET:
+                if target.major:  # ^1.2.4 => >=1.2.4 <2.0.0 ; ^1.x => >=1.0.0 <2.0.0
+                    high = target.truncate().next_major()
+                elif target.minor:  # ^0.1.2 => >=0.1.2 <0.2.0
+                    high = target.truncate().next_minor()
+                elif minor is None:  # ^0.x => >=0.0.0 <1.0.0
+                    high = target.truncate().next_major()
+                elif patch is None:  # ^0.2.x => >=0.2.0 <0.3.0
+                    high = target.truncate().next_minor()
+                else:  # ^0.0.1 => >=0.0.1 <0.0.2
+                    high = target.truncate().next_patch()
+                return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, high)]
+
+            elif prefix == cls.PREFIX_TILDE:
+                assert major is not None
+                if minor is None:  # ~1.x => >=1.0.0 <2.0.0
+                    high = target.next_major()
+                else:  # ~1.2.x => >=1.2.0 <1.3.0; ~1.2.3 => >=1.2.3 <1.3.0
+                    high = target.next_minor()
+                return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, high)]
+
+            elif prefix == cls.PREFIX_EQ:
+                if major is None:
+                    return [cls.range(Range.OP_GTE, target)]
+                elif minor is None:
+                    return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, target.next_major())]
+                elif patch is None:
+                    return [cls.range(Range.OP_GTE, target), cls.range(Range.OP_LT, target.next_minor())]
+                else:
+                    return [cls.range(Range.OP_EQ, target)]
+
+            elif prefix == cls.PREFIX_GT:
+                assert major is not None
+                if minor is None:  # >1.x
+                    return [cls.range(Range.OP_GTE, target.next_major())]
+                elif patch is None:  # >1.2.x => >=1.3.0
+                    return [cls.range(Range.OP_GTE, target.next_minor())]
+                else:
+                    return [cls.range(Range.OP_GT, target)]
+
+            elif prefix == cls.PREFIX_GTE:
+                return [cls.range(Range.OP_GTE, target)]
+
+            elif prefix == cls.PREFIX_LT:
+                assert major is not None
+                return [cls.range(Range.OP_LT, target)]
+
+            else:
+                assert prefix == cls.PREFIX_LTE
+                assert major is not None
+                if minor is None:  # <=1.x => <2.0.0
+                    return [cls.range(Range.OP_LT, target.next_major())]
+                elif patch is None:  # <=1.2.x => <1.3.0
+                    return [cls.range(Range.OP_LT, target.next_minor())]
+                else:
+                    return [cls.range(Range.OP_LTE, target)]
diff --git a/SublimeText/lsp_utils/st3/lsp_utils/third_party/update-info.log b/SublimeText/lsp_utils/st3/lsp_utils/third_party/update-info.log
new file mode 100644
index 00000000..6d6b8066
--- /dev/null
+++ b/SublimeText/lsp_utils/st3/lsp_utils/third_party/update-info.log
@@ -0,0 +1,2 @@
+ref: 2.10.0
+272a363824b1e09ae4e494ad00092f8782248821
diff --git a/SublimeText/markupsafe/README.md b/SublimeText/markupsafe/README.md
new file mode 100644
index 00000000..18c3c86c
--- /dev/null
+++ b/SublimeText/markupsafe/README.md
@@ -0,0 +1,78 @@
+# *MarkupSafe* module for Package Control
+
+This is the *[MarkupSafe][]* module
+bundled for usage with [Package Control][],
+a package manager
+for the [Sublime Text][] text editor.
+
+
+this repo | pypi
+---- | ----
+![latest tag](https://img.shields.io/github/tag/packagecontrol/MarkupSafe.svg) | [![pypi](https://img.shields.io/pypi/v/MarkupSafe.svg)][pypi]
+
+
+## How to use *MarkupSafe* as a dependency
+
+In order to tell Package Control
+that you are using the *MarkupSafe* module
+in your ST package,
+create a `dependencies.json` file
+in your package root
+with the following contents:
+
+```js
+{
+   "*": {
+      "*": [
+         "python-markupsafe"
+      ]
+   }
+}
+```
+
+If the file exists already,
+add `"python-markupsafe"` to the every dependency list.
+
+Then run the **Package Control: Satisfy Dependencies** command
+to make Package Control
+install the module for you locally
+(if you don't have it already).
+
+After all this
+you can use `import markupsafe`
+in any of your Python plugins.
+
+See also:
+[Documentation on Dependencies](https://packagecontrol.io/docs/dependencies)
+
+
+## How to update this repository (for contributors)
+
+1. Download the latest tarball
+   from [pypi][].
+2. Delete everything inside the `all/` folder.
+3. Copy the `markupsafe` folder (except for the `.c` file)
+   and everything related to copyright/licensing
+   from the tarball
+   to the `all/` folder.
+4. Commit changes
+   and either create a pull request
+   or create a tag directly
+   in the format `v`
+   (in case you have push access).
+
+
+## License
+
+The contents of the root folder
+in this repository
+are released
+under the *public domain*.
+The contents of the `all/` folder
+fall under *their own bundled licenses*.
+
+
+[MarkupSafe]: https://palletsprojects.com/p/MarkupSafe/
+[Package Control]: https://packagecontrol.io/
+[Sublime Text]: https://sublimetext.com/
+[pypi]: https://pypi.python.org/pypi/MarkupSafe
diff --git a/SublimeText/markupsafe/all/CHANGES.rst b/SublimeText/markupsafe/all/CHANGES.rst
new file mode 100644
index 00000000..63ecd670
--- /dev/null
+++ b/SublimeText/markupsafe/all/CHANGES.rst
@@ -0,0 +1,97 @@
+Version 1.1.1
+-------------
+
+Released 2019-02-23
+
+-   Fix segfault when ``__html__`` method raises an exception when using
+    the C speedups. The exception is now propagated correctly. (`#109`_)
+
+.. _#109: https://github.com/pallets/markupsafe/pull/109
+
+
+Version 1.1.0
+-------------
+
+Released 2018-11-05
+
+-   Drop support for Python 2.6 and 3.3.
+-   Build wheels for Linux, Mac, and Windows, allowing systems without
+    a compiler to take advantage of the C extension speedups. (`#104`_)
+-   Use newer CPython API on Python 3, resulting in a 1.5x speedup.
+    (`#64`_)
+-   ``escape`` wraps ``__html__`` result in ``Markup``, consistent with
+    documented behavior. (`#69`_)
+
+.. _#64: https://github.com/pallets/markupsafe/pull/64
+.. _#69: https://github.com/pallets/markupsafe/pull/69
+.. _#104: https://github.com/pallets/markupsafe/pull/104
+
+
+Version 1.0
+-----------
+
+Released 2017-03-07
+
+-   Fixed custom types not invoking ``__unicode__`` when used with
+    ``format()``.
+-   Added ``__version__`` module attribute.
+-   Improve unescape code to leave lone ampersands alone.
+
+
+Version 0.18
+------------
+
+Released 2013-05-22
+
+-   Fixed ``__mul__`` and string splitting on Python 3.
+
+
+Version 0.17
+------------
+
+Released 2013-05-21
+
+-   Fixed a bug with broken interpolation on tuples.
+
+
+Version 0.16
+------------
+
+Released 2013-05-20
+
+-   Improved Python 3 Support and removed 2to3.
+-   Removed support for Python 3.2 and 2.5.
+
+
+Version 0.15
+------------
+
+Released 2011-07-20
+
+-   Fixed a typo that caused the library to fail to install on pypy and
+    jython.
+
+
+Version 0.14
+------------
+
+Released 2011-07-20
+
+-   Release fix for 0.13.
+
+
+Version 0.13
+------------
+
+Released 2011-07-20
+
+-   Do not attempt to compile extension for PyPy or Jython.
+-   Work around some 64bit Windows issues.
+
+
+Version 0.12
+------------
+
+Released 2011-02-17
+
+-   Improved PyPy compatibility.
diff --git a/SublimeText/markupsafe/all/LICENSE.rst b/SublimeText/markupsafe/all/LICENSE.rst
new file mode 100644
index 00000000..9d227a0c
--- /dev/null
+++ b/SublimeText/markupsafe/all/LICENSE.rst
@@ -0,0 +1,28 @@
+Copyright 2010 Pallets
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1.  Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+2.  Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+3.  Neither the name of the copyright holder nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/SublimeText/markupsafe/all/README.rst b/SublimeText/markupsafe/all/README.rst
new file mode 100644
index 00000000..3548b8d1
--- /dev/null
+++ b/SublimeText/markupsafe/all/README.rst
@@ -0,0 +1,69 @@
+MarkupSafe
+==========
+
+MarkupSafe implements a text object that escapes characters so it is
+safe to use in HTML and XML. Characters that have special meanings are
+replaced so that they display as the actual characters. This mitigates
+injection attacks, meaning untrusted user input can safely be displayed
+on a page.
+
+
+Installing
+----------
+
+Install and update using `pip`_:
+
+.. code-block:: text
+
+    pip install -U MarkupSafe
+
+.. _pip: https://pip.pypa.io/en/stable/quickstart/
+
+
+Examples
+--------
+
+.. code-block:: pycon
+
+    >>> from markupsafe import Markup, escape
+    >>> # escape replaces special characters and wraps in Markup
+    >>> escape('')
+    Markup(u'<script>alert(document.cookie);</script>')
+    >>> # wrap in Markup to mark text "safe" and prevent escaping
+    >>> Markup('Hello')
+    Markup('hello')
+    >>> escape(Markup('Hello'))
+    Markup('hello')
+    >>> # Markup is a text subclass (str on Python 3, unicode on Python 2)
+    >>> # methods and operators escape their arguments
+    >>> template = Markup("Hello %s")
+    >>> template % '"World"'
+    Markup('Hello "World"')
+
+
+Donate
+------
+
+The Pallets organization develops and supports MarkupSafe and other
+libraries that use it. In order to grow the community of contributors
+and users, and allow the maintainers to devote more time to the
+projects, `please donate today`_.
+
+.. _please donate today: https://palletsprojects.com/donate
+
+
+Links
+-----
+
+*   Website: https://palletsprojects.com/p/markupsafe/
+*   Documentation: https://markupsafe.palletsprojects.com/
+*   License: `BSD-3-Clause `_
+*   Releases: https://pypi.org/project/MarkupSafe/
+*   Code: https://github.com/pallets/markupsafe
+*   Issue tracker: https://github.com/pallets/markupsafe/issues
+*   Test status:
+
+    *   Linux, Mac: https://travis-ci.org/pallets/markupsafe
+    *   Windows: https://ci.appveyor.com/project/pallets/markupsafe
+
+*   Test coverage: https://codecov.io/gh/pallets/markupsafe
diff --git a/SublimeText/markupsafe/all/markupsafe/__init__.py b/SublimeText/markupsafe/all/markupsafe/__init__.py
new file mode 100644
index 00000000..da05ed32
--- /dev/null
+++ b/SublimeText/markupsafe/all/markupsafe/__init__.py
@@ -0,0 +1,327 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe
+~~~~~~~~~~
+
+Implements an escape function and a Markup string to replace HTML
+special characters with safe representations.
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+import re
+import string
+
+from ._compat import int_types
+from ._compat import iteritems
+from ._compat import Mapping
+from ._compat import PY2
+from ._compat import string_types
+from ._compat import text_type
+from ._compat import unichr
+
+__version__ = "1.1.1"
+
+__all__ = ["Markup", "soft_unicode", "escape", "escape_silent"]
+
+_striptags_re = re.compile(r"(|<[^>]*>)")
+_entity_re = re.compile(r"&([^& ;]+);")
+
+
+class Markup(text_type):
+    """A string that is ready to be safely inserted into an HTML or XML
+    document, either because it was escaped or because it was marked
+    safe.
+
+    Passing an object to the constructor converts it to text and wraps
+    it to mark it safe without escaping. To escape the text, use the
+    :meth:`escape` class method instead.
+
+    >>> Markup('Hello, World!')
+    Markup('Hello, World!')
+    >>> Markup(42)
+    Markup('42')
+    >>> Markup.escape('Hello, World!')
+    Markup('Hello <em>World</em>!')
+
+    This implements the ``__html__()`` interface that some frameworks
+    use. Passing an object that implements ``__html__()`` will wrap the
+    output of that method, marking it safe.
+
+    >>> class Foo:
+    ...     def __html__(self):
+    ...         return 'foo'
+    ...
+    >>> Markup(Foo())
+    Markup('foo')
+
+    This is a subclass of the text type (``str`` in Python 3,
+    ``unicode`` in Python 2). It has the same methods as that type, but
+    all methods escape their arguments and return a ``Markup`` instance.
+
+    >>> Markup('%s') % 'foo & bar'
+    Markup('foo & bar')
+    >>> Markup('Hello ') + ''
+    Markup('Hello <foo>')
+    """
+
+    __slots__ = ()
+
+    def __new__(cls, base=u"", encoding=None, errors="strict"):
+        if hasattr(base, "__html__"):
+            base = base.__html__()
+        if encoding is None:
+            return text_type.__new__(cls, base)
+        return text_type.__new__(cls, base, encoding, errors)
+
+    def __html__(self):
+        return self
+
+    def __add__(self, other):
+        if isinstance(other, string_types) or hasattr(other, "__html__"):
+            return self.__class__(super(Markup, self).__add__(self.escape(other)))
+        return NotImplemented
+
+    def __radd__(self, other):
+        if hasattr(other, "__html__") or isinstance(other, string_types):
+            return self.escape(other).__add__(self)
+        return NotImplemented
+
+    def __mul__(self, num):
+        if isinstance(num, int_types):
+            return self.__class__(text_type.__mul__(self, num))
+        return NotImplemented
+
+    __rmul__ = __mul__
+
+    def __mod__(self, arg):
+        if isinstance(arg, tuple):
+            arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg)
+        else:
+            arg = _MarkupEscapeHelper(arg, self.escape)
+        return self.__class__(text_type.__mod__(self, arg))
+
+    def __repr__(self):
+        return "%s(%s)" % (self.__class__.__name__, text_type.__repr__(self))
+
+    def join(self, seq):
+        return self.__class__(text_type.join(self, map(self.escape, seq)))
+
+    join.__doc__ = text_type.join.__doc__
+
+    def split(self, *args, **kwargs):
+        return list(map(self.__class__, text_type.split(self, *args, **kwargs)))
+
+    split.__doc__ = text_type.split.__doc__
+
+    def rsplit(self, *args, **kwargs):
+        return list(map(self.__class__, text_type.rsplit(self, *args, **kwargs)))
+
+    rsplit.__doc__ = text_type.rsplit.__doc__
+
+    def splitlines(self, *args, **kwargs):
+        return list(map(self.__class__, text_type.splitlines(self, *args, **kwargs)))
+
+    splitlines.__doc__ = text_type.splitlines.__doc__
+
+    def unescape(self):
+        """Convert escaped markup back into a text string. This replaces
+        HTML entities with the characters they represent.
+
+        >>> Markup('Main » About').unescape()
+        'Main » About'
+        """
+        from ._constants import HTML_ENTITIES
+
+        def handle_match(m):
+            name = m.group(1)
+            if name in HTML_ENTITIES:
+                return unichr(HTML_ENTITIES[name])
+            try:
+                if name[:2] in ("#x", "#X"):
+                    return unichr(int(name[2:], 16))
+                elif name.startswith("#"):
+                    return unichr(int(name[1:]))
+            except ValueError:
+                pass
+            # Don't modify unexpected input.
+            return m.group()
+
+        return _entity_re.sub(handle_match, text_type(self))
+
+    def striptags(self):
+        """:meth:`unescape` the markup, remove tags, and normalize
+        whitespace to single spaces.
+
+        >>> Markup('Main »\tAbout').striptags()
+        'Main » About'
+        """
+        stripped = u" ".join(_striptags_re.sub("", self).split())
+        return Markup(stripped).unescape()
+
+    @classmethod
+    def escape(cls, s):
+        """Escape a string. Calls :func:`escape` and ensures that for
+        subclasses the correct type is returned.
+        """
+        rv = escape(s)
+        if rv.__class__ is not cls:
+            return cls(rv)
+        return rv
+
+    def make_simple_escaping_wrapper(name):  # noqa: B902
+        orig = getattr(text_type, name)
+
+        def func(self, *args, **kwargs):
+            args = _escape_argspec(list(args), enumerate(args), self.escape)
+            _escape_argspec(kwargs, iteritems(kwargs), self.escape)
+            return self.__class__(orig(self, *args, **kwargs))
+
+        func.__name__ = orig.__name__
+        func.__doc__ = orig.__doc__
+        return func
+
+    for method in (
+        "__getitem__",
+        "capitalize",
+        "title",
+        "lower",
+        "upper",
+        "replace",
+        "ljust",
+        "rjust",
+        "lstrip",
+        "rstrip",
+        "center",
+        "strip",
+        "translate",
+        "expandtabs",
+        "swapcase",
+        "zfill",
+    ):
+        locals()[method] = make_simple_escaping_wrapper(method)
+
+    def partition(self, sep):
+        return tuple(map(self.__class__, text_type.partition(self, self.escape(sep))))
+
+    def rpartition(self, sep):
+        return tuple(map(self.__class__, text_type.rpartition(self, self.escape(sep))))
+
+    def format(self, *args, **kwargs):
+        formatter = EscapeFormatter(self.escape)
+        kwargs = _MagicFormatMapping(args, kwargs)
+        return self.__class__(formatter.vformat(self, args, kwargs))
+
+    def __html_format__(self, format_spec):
+        if format_spec:
+            raise ValueError("Unsupported format specification " "for Markup.")
+        return self
+
+    # not in python 3
+    if hasattr(text_type, "__getslice__"):
+        __getslice__ = make_simple_escaping_wrapper("__getslice__")
+
+    del method, make_simple_escaping_wrapper
+
+
+class _MagicFormatMapping(Mapping):
+    """This class implements a dummy wrapper to fix a bug in the Python
+    standard library for string formatting.
+
+    See http://bugs.python.org/issue13598 for information about why
+    this is necessary.
+    """
+
+    def __init__(self, args, kwargs):
+        self._args = args
+        self._kwargs = kwargs
+        self._last_index = 0
+
+    def __getitem__(self, key):
+        if key == "":
+            idx = self._last_index
+            self._last_index += 1
+            try:
+                return self._args[idx]
+            except LookupError:
+                pass
+            key = str(idx)
+        return self._kwargs[key]
+
+    def __iter__(self):
+        return iter(self._kwargs)
+
+    def __len__(self):
+        return len(self._kwargs)
+
+
+if hasattr(text_type, "format"):
+
+    class EscapeFormatter(string.Formatter):
+        def __init__(self, escape):
+            self.escape = escape
+
+        def format_field(self, value, format_spec):
+            if hasattr(value, "__html_format__"):
+                rv = value.__html_format__(format_spec)
+            elif hasattr(value, "__html__"):
+                if format_spec:
+                    raise ValueError(
+                        "Format specifier {0} given, but {1} does not"
+                        " define __html_format__. A class that defines"
+                        " __html__ must define __html_format__ to work"
+                        " with format specifiers.".format(format_spec, type(value))
+                    )
+                rv = value.__html__()
+            else:
+                # We need to make sure the format spec is unicode here as
+                # otherwise the wrong callback methods are invoked.  For
+                # instance a byte string there would invoke __str__ and
+                # not __unicode__.
+                rv = string.Formatter.format_field(self, value, text_type(format_spec))
+            return text_type(self.escape(rv))
+
+
+def _escape_argspec(obj, iterable, escape):
+    """Helper for various string-wrapped functions."""
+    for key, value in iterable:
+        if hasattr(value, "__html__") or isinstance(value, string_types):
+            obj[key] = escape(value)
+    return obj
+
+
+class _MarkupEscapeHelper(object):
+    """Helper for Markup.__mod__"""
+
+    def __init__(self, obj, escape):
+        self.obj = obj
+        self.escape = escape
+
+    def __getitem__(self, item):
+        return _MarkupEscapeHelper(self.obj[item], self.escape)
+
+    def __str__(self):
+        return text_type(self.escape(self.obj))
+
+    __unicode__ = __str__
+
+    def __repr__(self):
+        return str(self.escape(repr(self.obj)))
+
+    def __int__(self):
+        return int(self.obj)
+
+    def __float__(self):
+        return float(self.obj)
+
+
+# we have to import it down here as the speedups and native
+# modules imports the markup type which is define above.
+try:
+    from ._speedups import escape, escape_silent, soft_unicode
+except ImportError:
+    from ._native import escape, escape_silent, soft_unicode
+
+if not PY2:
+    soft_str = soft_unicode
+    __all__.append("soft_str")
diff --git a/SublimeText/markupsafe/all/markupsafe/_compat.py b/SublimeText/markupsafe/all/markupsafe/_compat.py
new file mode 100644
index 00000000..bc05090f
--- /dev/null
+++ b/SublimeText/markupsafe/all/markupsafe/_compat.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._compat
+~~~~~~~~~~~~~~~~~~
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+import sys
+
+PY2 = sys.version_info[0] == 2
+
+if not PY2:
+    text_type = str
+    string_types = (str,)
+    unichr = chr
+    int_types = (int,)
+
+    def iteritems(x):
+        return iter(x.items())
+
+    from collections.abc import Mapping
+
+else:
+    text_type = unicode
+    string_types = (str, unicode)
+    unichr = unichr
+    int_types = (int, long)
+
+    def iteritems(x):
+        return x.iteritems()
+
+    from collections import Mapping
diff --git a/SublimeText/markupsafe/all/markupsafe/_constants.py b/SublimeText/markupsafe/all/markupsafe/_constants.py
new file mode 100644
index 00000000..7c57c2d2
--- /dev/null
+++ b/SublimeText/markupsafe/all/markupsafe/_constants.py
@@ -0,0 +1,264 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._constants
+~~~~~~~~~~~~~~~~~~~~~
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+
+HTML_ENTITIES = {
+    "AElig": 198,
+    "Aacute": 193,
+    "Acirc": 194,
+    "Agrave": 192,
+    "Alpha": 913,
+    "Aring": 197,
+    "Atilde": 195,
+    "Auml": 196,
+    "Beta": 914,
+    "Ccedil": 199,
+    "Chi": 935,
+    "Dagger": 8225,
+    "Delta": 916,
+    "ETH": 208,
+    "Eacute": 201,
+    "Ecirc": 202,
+    "Egrave": 200,
+    "Epsilon": 917,
+    "Eta": 919,
+    "Euml": 203,
+    "Gamma": 915,
+    "Iacute": 205,
+    "Icirc": 206,
+    "Igrave": 204,
+    "Iota": 921,
+    "Iuml": 207,
+    "Kappa": 922,
+    "Lambda": 923,
+    "Mu": 924,
+    "Ntilde": 209,
+    "Nu": 925,
+    "OElig": 338,
+    "Oacute": 211,
+    "Ocirc": 212,
+    "Ograve": 210,
+    "Omega": 937,
+    "Omicron": 927,
+    "Oslash": 216,
+    "Otilde": 213,
+    "Ouml": 214,
+    "Phi": 934,
+    "Pi": 928,
+    "Prime": 8243,
+    "Psi": 936,
+    "Rho": 929,
+    "Scaron": 352,
+    "Sigma": 931,
+    "THORN": 222,
+    "Tau": 932,
+    "Theta": 920,
+    "Uacute": 218,
+    "Ucirc": 219,
+    "Ugrave": 217,
+    "Upsilon": 933,
+    "Uuml": 220,
+    "Xi": 926,
+    "Yacute": 221,
+    "Yuml": 376,
+    "Zeta": 918,
+    "aacute": 225,
+    "acirc": 226,
+    "acute": 180,
+    "aelig": 230,
+    "agrave": 224,
+    "alefsym": 8501,
+    "alpha": 945,
+    "amp": 38,
+    "and": 8743,
+    "ang": 8736,
+    "apos": 39,
+    "aring": 229,
+    "asymp": 8776,
+    "atilde": 227,
+    "auml": 228,
+    "bdquo": 8222,
+    "beta": 946,
+    "brvbar": 166,
+    "bull": 8226,
+    "cap": 8745,
+    "ccedil": 231,
+    "cedil": 184,
+    "cent": 162,
+    "chi": 967,
+    "circ": 710,
+    "clubs": 9827,
+    "cong": 8773,
+    "copy": 169,
+    "crarr": 8629,
+    "cup": 8746,
+    "curren": 164,
+    "dArr": 8659,
+    "dagger": 8224,
+    "darr": 8595,
+    "deg": 176,
+    "delta": 948,
+    "diams": 9830,
+    "divide": 247,
+    "eacute": 233,
+    "ecirc": 234,
+    "egrave": 232,
+    "empty": 8709,
+    "emsp": 8195,
+    "ensp": 8194,
+    "epsilon": 949,
+    "equiv": 8801,
+    "eta": 951,
+    "eth": 240,
+    "euml": 235,
+    "euro": 8364,
+    "exist": 8707,
+    "fnof": 402,
+    "forall": 8704,
+    "frac12": 189,
+    "frac14": 188,
+    "frac34": 190,
+    "frasl": 8260,
+    "gamma": 947,
+    "ge": 8805,
+    "gt": 62,
+    "hArr": 8660,
+    "harr": 8596,
+    "hearts": 9829,
+    "hellip": 8230,
+    "iacute": 237,
+    "icirc": 238,
+    "iexcl": 161,
+    "igrave": 236,
+    "image": 8465,
+    "infin": 8734,
+    "int": 8747,
+    "iota": 953,
+    "iquest": 191,
+    "isin": 8712,
+    "iuml": 239,
+    "kappa": 954,
+    "lArr": 8656,
+    "lambda": 955,
+    "lang": 9001,
+    "laquo": 171,
+    "larr": 8592,
+    "lceil": 8968,
+    "ldquo": 8220,
+    "le": 8804,
+    "lfloor": 8970,
+    "lowast": 8727,
+    "loz": 9674,
+    "lrm": 8206,
+    "lsaquo": 8249,
+    "lsquo": 8216,
+    "lt": 60,
+    "macr": 175,
+    "mdash": 8212,
+    "micro": 181,
+    "middot": 183,
+    "minus": 8722,
+    "mu": 956,
+    "nabla": 8711,
+    "nbsp": 160,
+    "ndash": 8211,
+    "ne": 8800,
+    "ni": 8715,
+    "not": 172,
+    "notin": 8713,
+    "nsub": 8836,
+    "ntilde": 241,
+    "nu": 957,
+    "oacute": 243,
+    "ocirc": 244,
+    "oelig": 339,
+    "ograve": 242,
+    "oline": 8254,
+    "omega": 969,
+    "omicron": 959,
+    "oplus": 8853,
+    "or": 8744,
+    "ordf": 170,
+    "ordm": 186,
+    "oslash": 248,
+    "otilde": 245,
+    "otimes": 8855,
+    "ouml": 246,
+    "para": 182,
+    "part": 8706,
+    "permil": 8240,
+    "perp": 8869,
+    "phi": 966,
+    "pi": 960,
+    "piv": 982,
+    "plusmn": 177,
+    "pound": 163,
+    "prime": 8242,
+    "prod": 8719,
+    "prop": 8733,
+    "psi": 968,
+    "quot": 34,
+    "rArr": 8658,
+    "radic": 8730,
+    "rang": 9002,
+    "raquo": 187,
+    "rarr": 8594,
+    "rceil": 8969,
+    "rdquo": 8221,
+    "real": 8476,
+    "reg": 174,
+    "rfloor": 8971,
+    "rho": 961,
+    "rlm": 8207,
+    "rsaquo": 8250,
+    "rsquo": 8217,
+    "sbquo": 8218,
+    "scaron": 353,
+    "sdot": 8901,
+    "sect": 167,
+    "shy": 173,
+    "sigma": 963,
+    "sigmaf": 962,
+    "sim": 8764,
+    "spades": 9824,
+    "sub": 8834,
+    "sube": 8838,
+    "sum": 8721,
+    "sup": 8835,
+    "sup1": 185,
+    "sup2": 178,
+    "sup3": 179,
+    "supe": 8839,
+    "szlig": 223,
+    "tau": 964,
+    "there4": 8756,
+    "theta": 952,
+    "thetasym": 977,
+    "thinsp": 8201,
+    "thorn": 254,
+    "tilde": 732,
+    "times": 215,
+    "trade": 8482,
+    "uArr": 8657,
+    "uacute": 250,
+    "uarr": 8593,
+    "ucirc": 251,
+    "ugrave": 249,
+    "uml": 168,
+    "upsih": 978,
+    "upsilon": 965,
+    "uuml": 252,
+    "weierp": 8472,
+    "xi": 958,
+    "yacute": 253,
+    "yen": 165,
+    "yuml": 255,
+    "zeta": 950,
+    "zwj": 8205,
+    "zwnj": 8204,
+}
diff --git a/SublimeText/markupsafe/all/markupsafe/_native.py b/SublimeText/markupsafe/all/markupsafe/_native.py
new file mode 100644
index 00000000..cd08752c
--- /dev/null
+++ b/SublimeText/markupsafe/all/markupsafe/_native.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+"""
+markupsafe._native
+~~~~~~~~~~~~~~~~~~
+
+Native Python implementation used when the C module is not compiled.
+
+:copyright: 2010 Pallets
+:license: BSD-3-Clause
+"""
+from . import Markup
+from ._compat import text_type
+
+
+def escape(s):
+    """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in
+    the string with HTML-safe sequences. Use this if you need to display
+    text that might contain such characters in HTML.
+
+    If the object has an ``__html__`` method, it is called and the
+    return value is assumed to already be safe for HTML.
+
+    :param s: An object to be converted to a string and escaped.
+    :return: A :class:`Markup` string with the escaped text.
+    """
+    if hasattr(s, "__html__"):
+        return Markup(s.__html__())
+    return Markup(
+        text_type(s)
+        .replace("&", "&")
+        .replace(">", ">")
+        .replace("<", "<")
+        .replace("'", "'")
+        .replace('"', """)
+    )
+
+
+def escape_silent(s):
+    """Like :func:`escape` but treats ``None`` as the empty string.
+    Useful with optional values, as otherwise you get the string
+    ``'None'`` when the value is ``None``.
+
+    >>> escape(None)
+    Markup('None')
+    >>> escape_silent(None)
+    Markup('')
+    """
+    if s is None:
+        return Markup()
+    return escape(s)
+
+
+def soft_unicode(s):
+    """Convert an object to a string if it isn't already. This preserves
+    a :class:`Markup` string rather than converting it back to a basic
+    string, so it will still be marked as safe and won't be escaped
+    again.
+
+    >>> value = escape('')
+    >>> value
+    Markup('<User 1>')
+    >>> escape(str(value))
+    Markup('&lt;User 1&gt;')
+    >>> escape(soft_unicode(value))
+    Markup('<User 1>')
+    """
+    if not isinstance(s, text_type):
+        s = text_type(s)
+    return s
diff --git a/SublimeText/markupsafe/dependency-metadata.json b/SublimeText/markupsafe/dependency-metadata.json
new file mode 100644
index 00000000..5f7124ec
--- /dev/null
+++ b/SublimeText/markupsafe/dependency-metadata.json
@@ -0,0 +1 @@
+{"sublime_text": "*", "description": "Python MarkupSafe module", "version": "1.1.1", "platforms": ["*"], "url": "https://github.com/packagecontrol/MarkupSafe/issues"}
\ No newline at end of file
diff --git a/SublimeText/mdpopups/.gitattributes b/SublimeText/mdpopups/.gitattributes
new file mode 100644
index 00000000..98432faa
--- /dev/null
+++ b/SublimeText/mdpopups/.gitattributes
@@ -0,0 +1,2 @@
+docs/ export-ignore
+.github/ export-ignore
diff --git a/SublimeText/mdpopups/.gitignore b/SublimeText/mdpopups/.gitignore
new file mode 100644
index 00000000..e9b60da8
--- /dev/null
+++ b/SublimeText/mdpopups/.gitignore
@@ -0,0 +1,65 @@
+.DS_Store
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+
+# C extensions
+*.so
+
+.DS_Store
+
+# Distribution / packaging
+.Python
+env/
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*,cover
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+site/*
+*.patch
+dictionary.bin
diff --git a/SublimeText/mdpopups/.pyspelling.yml b/SublimeText/mdpopups/.pyspelling.yml
new file mode 100644
index 00000000..eb513f83
--- /dev/null
+++ b/SublimeText/mdpopups/.pyspelling.yml
@@ -0,0 +1,112 @@
+matrix:
+- name: settings
+  sources:
+  - '**/*.sublime-settings'
+  aspell:
+    lang: en
+  dictionary:
+    wordlists:
+    - docs/src/dictionary/en-custom.txt
+    output: build/dictionary/settings.dic
+  pipeline:
+  - pyspelling.filters.cpp:
+      prefix: 'st'
+      group_comments: true
+      line_comments: false
+  - pyspelling.filters.context:
+      context_visible_first: true
+      escapes: \\[\\`~]
+      delimiters:
+      - open: (?P`+)
+        content: .*?
+        close: (?P=open)
+      - open: (?s)^(?P\s*~{3,})
+        content: .*?
+        close: ^(?P=open)$
+  - pyspelling.filters.url:
+
+- name: mkdocs
+  sources:
+  - site/**/*.html
+  aspell:
+    lang: en
+  dictionary:
+    wordlists:
+    - docs/src/dictionary/en-custom.txt
+    output: build/dictionary/mkdocs.dic
+  pipeline:
+  - pyspelling.filters.html:
+      comments: false
+      attributes:
+      - title
+      - alt
+      ignores:
+      - code
+      - pre
+      - a.magiclink-compare
+      - a.magiclink-commit
+      - a.magiclink-issue
+      - span.keys
+      - .MathJax_Preview
+      - .md-nav__link
+      - .md-footer-custom-text
+      - .md-source__repository
+      - .headerlink
+      - .md-icon
+      - .md-social__link
+  - pyspelling.filters.url:
+
+- name: markdown
+  sources:
+  - README.md
+  aspell:
+    lang: en
+  dictionary:
+    wordlists:
+    - docs/src/dictionary/en-custom.txt
+    output: build/dictionary/mkdocs.dic
+  pipeline:
+  - pyspelling.filters.markdown:
+  - pyspelling.filters.html:
+      comments: false
+      attributes:
+      - title
+      - alt
+      ignores:
+      - code
+      - pre
+  - pyspelling.filters.url:
+
+- name: python
+  sources:
+  - "{st3,tests}/**/*.py|!**/png.py|!**/{coloraide,yaml,markdown,pygments,markupsafe,jinja2,pygments,pymdownx}/**"
+  aspell:
+    lang: en
+  dictionary:
+    wordlists:
+    - docs/src/dictionary/en-custom.txt
+    output: build/dictionary/python.dic
+  pipeline:
+  - pyspelling.filters.python:
+      group_comments: True
+  - pyspelling.flow_control.wildcard:
+      allow:
+      - py-comment
+  - pyspelling.filters.context:
+      context_visible_first: true
+      delimiters:
+      # Ignore lint (noqa) and coverage (pragma) as well as shebang (#!)
+      - open: ^(?:(?:noqa|pragma)\b|!)
+        close: $
+      # Ignore Python encoding string -*- encoding stuff -*-
+      - open: ^ *-\*-
+        close: -\*-$
+  - pyspelling.filters.context:
+      context_visible_first: true
+      escapes: \\[\\`~]
+      delimiters:
+        - open: (?P`+)
+          close: (?P=open)
+        - open: (?s)^(?P\s*~{3,})
+          close: ^(?P=open)$
+  - pyspelling.filters.url:
diff --git a/SublimeText/mdpopups/.sublime-dependency b/SublimeText/mdpopups/.sublime-dependency
new file mode 100644
index 00000000..e69de29b
diff --git a/SublimeText/mdpopups/LICENSE.md b/SublimeText/mdpopups/LICENSE.md
new file mode 100644
index 00000000..de6d4226
--- /dev/null
+++ b/SublimeText/mdpopups/LICENSE.md
@@ -0,0 +1,23 @@
+# License
+
+The MIT License (MIT)
+
+Copyright (c) 2015 - 2022 Isaac Muse
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/SublimeText/mdpopups/Preferences.sublime-settings b/SublimeText/mdpopups/Preferences.sublime-settings
new file mode 100644
index 00000000..709bc758
--- /dev/null
+++ b/SublimeText/mdpopups/Preferences.sublime-settings
@@ -0,0 +1,44 @@
+{
+    // Global kill switch to prevent popups (created by MdPopups) from appearing.
+    "mdpopups.disable": false,
+
+    // Controls which information are dumped out to the console.
+    // This is more useful for plugin developers. It works by specifying an error level.
+    // "0" don't print any message
+    // "1" print errors
+    // "2" print errors, warnings
+    // "3" print errors, warnings, debug, information
+    "mdpopups.debug": 0,
+
+    // Control how long a CSS theme file will be in the cache before being refreshed.
+    // Value should be a positive integer greater than 0.
+    // Units are in minutes. Default is 30.
+    "mdpopups.cache_refresh_time": 30,
+
+    // Control how many CSS theme files will be kept in cache at any given time.
+    // Value should be a positive integer greater than or equal to 0.
+    "mdpopups.cache_limit": 10,
+
+    // Controls whether the Pygments or the native Sublime syntax highlighter
+    // is used for code highlighting. This affects code highlighting in Markdown
+    // conversion and when code is directly processed using syntax_highlight.
+    // Valid values are:
+    //   "true": use Sublime Text
+    //   "false": use Pygments
+    "mdpopups.use_sublime_highlighter": true,
+
+    // This setting is for the Sublime Syntax Highlighter and allows
+    // the mapping of personal Sublime syntax languages which are not yet included,
+    // or will not be included, in the official mapping table.
+    // You can either define your own new entry, or use the same language name
+    // of an existing entry to extend the language mapping_alias or syntax languages.
+    // When extending, the user mappings will be cycled through first.
+    "mdpopups.sublime_user_lang_map": {},
+
+    // Controls whether MdPopups' default styling (contained in `default.css`) will be applied or not.
+    "mdpopups.default_style": true,
+
+    // Relative path of the user defined stylesheet.
+    // Overrides the default CSS and/or CSS of plugins.
+    "mdpopups.user_css": "Packages/User/mdpopups.css"
+}
diff --git a/SublimeText/mdpopups/README.md b/SublimeText/mdpopups/README.md
new file mode 100644
index 00000000..9109784d
--- /dev/null
+++ b/SublimeText/mdpopups/README.md
@@ -0,0 +1,40 @@
+[![Donate via PayPal][donate-image]][donate-link]
+[![Discord][discord-image]][discord-link]
+[![Build][github-ci-image]][github-ci-link]
+![License][license-image]
+# Sublime Markdown Popups
+
+Sublime Markdown Popups (mdpopups) is a library for Sublime Text plugins.  It utilizes the new plugin API found in ST3
+3080+ for generating tooltip popups. It also provides API methods for generating and styling the new phantom elements
+introduced in ST3 3118+.  Mdpopups utilizes Python Markdown with a couple of special extensions to convert Markdown to
+HTML that can be used to create the popups and/or phantoms.  It also provides a number of other helpful API commands to
+aid in creating great tooltips and phantoms.
+
+Mdpopups will use your color scheme to create popups/phantoms that fit your editors look.
+
+![Screenshot](docs/src/markdown/images/tooltips_test.png)
+
+## Features
+
+- Can take Markdown or HTML and create nice looking popup tooltips and phantoms.
+- Dynamically creates popup and phantom themes from your current Sublime color scheme.
+- Can create syntax highlighted code blocks easily using either Pygments or the built-in Sublime Text syntax highlighter
+  automatically in the Markdown environment or outside via API calls.
+- Can create color preview boxes via API calls.
+- A CSS template environment that allows users to override and tweak the overall look of the tooltip and phantom themes
+  to better fit their preferred look.  Using the template filters, users can generically access color scheme colors and
+  manipulate them.
+- Plugins can extend the current CSS to inject plugin specific class styling.  Extended CSS will be run through the
+  template environment.
+
+# Documentation
+
+https://facelessuser.github.io/sublime-markdown-popups
+
+[github-ci-image]: https://github.com/facelessuser/sublime-markdown-popups/workflows/build/badge.svg?branch=master&event=push
+[github-ci-link]: https://github.com/facelessuser/sublime-markdown-popups/actions?query=workflow%3Abuild+branch%3Amaster
+[discord-image]: https://img.shields.io/discord/678289859768745989?logo=discord&logoColor=aaaaaa&color=mediumpurple&labelColor=333333
+[discord-link]: https://discord.gg/TWs8Tgr
+[license-image]: https://img.shields.io/badge/license-MIT-blue.svg?labelColor=333333
+[donate-image]: https://img.shields.io/badge/Donate-PayPal-3fabd1?logo=paypal
+[donate-link]: https://www.paypal.me/facelessuser
diff --git a/SublimeText/mdpopups/dependency-metadata.json b/SublimeText/mdpopups/dependency-metadata.json
new file mode 100644
index 00000000..59192419
--- /dev/null
+++ b/SublimeText/mdpopups/dependency-metadata.json
@@ -0,0 +1 @@
+{"sublime_text": ">=3124", "description": "Markdown Popups for Sublime", "version": "4.2.2", "platforms": ["*"], "url": "https://github.com/facelessuser/sublime-markdown-popups/issues"}
\ No newline at end of file
diff --git a/SublimeText/mdpopups/mkdocs.yml b/SublimeText/mdpopups/mkdocs.yml
new file mode 100644
index 00000000..5b24881c
--- /dev/null
+++ b/SublimeText/mdpopups/mkdocs.yml
@@ -0,0 +1,124 @@
+site_name: Sublime Markdown Popups Documentation
+site_url: https://facelessuser.github.io/sublime-markdown-popups
+repo_url: https://github.com/facelessuser/sublime-markdown-popups
+edit_uri: ""
+site_description: Popup tooltips for Sublime generated with Markdown.
+copyright: |
+  Copyright © 2015 - 2022 Isaac Muse
+
+docs_dir: docs/src/markdown
+theme:
+  name: material
+  custom_dir: docs/theme
+  icon:
+    logo: material/book-open-page-variant
+  palette:
+    scheme: dracula
+    primary: deep purple
+    accent: deep purple
+  font:
+    text: Roboto
+    code: Roboto Mono
+  features:
+    - navigation.tabs
+    - navigation.top
+    - navigation.instant
+    - content.tabs.link
+  pymdownx:
+    sponsor: "https://github.com/sponsors/facelessuser"
+
+nav:
+  - Home:
+    - Markdown Popups: index.md
+    - Installation: installation.md
+    - User Settings: settings.md
+    - API: api.md
+    - CSS Styling: styling.md
+  - F.A.Q:
+    - Frequently Asked Questions: faq.md
+  - About:
+    - Contributing & Support: about/contributing.md
+    - Changelog: about/changelog.md
+    - License: about/license.md
+
+markdown_extensions:
+  - markdown.extensions.toc:
+      slugify: !!python/name:pymdownx.slugs.uslugify
+      permalink: ""
+  - markdown.extensions.admonition:
+  - markdown.extensions.smarty:
+      smart_quotes: false
+  - pymdownx.betterem:
+  - markdown.extensions.attr_list:
+  - markdown.extensions.def_list:
+  - markdown.extensions.tables:
+  - markdown.extensions.abbr:
+  - markdown.extensions.footnotes:
+  - markdown.extensions.md_in_html:
+  - pymdownx.superfences:
+      preserve_tabs: true
+      custom_fences:
+        - name: flow
+          class: uml-flowchart
+          format: !!python/name:pymdownx.superfences.fence_code_format
+        - name: sequence
+          class: uml-sequence-diagram
+          format: !!python/name:pymdownx.superfences.fence_code_format
+        - name: math
+          class: arithmatex
+          format: !!python/name:pymdownx.arithmatex.fence_mathjax_format
+  - pymdownx.highlight:
+      extend_pygments_lang:
+        - name: pycon3
+          lang: pycon
+          options:
+            python3: true
+  - pymdownx.inlinehilite:
+      custom_inline:
+        - name: math
+          class: arithmatex
+          format: !!python/name:pymdownx.arithmatex.inline_mathjax_format
+  - pymdownx.magiclink:
+      repo_url_shortener: true
+      repo_url_shorthand: true
+      social_url_shorthand: true
+      user: facelessuser
+      repo: sublime-markdown-popups
+  - pymdownx.tilde:
+  - pymdownx.caret:
+  - pymdownx.smartsymbols:
+  - pymdownx.emoji:
+      emoji_index: !!python/name:materialx.emoji.twemoji
+      emoji_generator: !!python/name:materialx.emoji.to_svg
+  - pymdownx.escapeall:
+      hardbreak: true
+      nbsp: true
+  - pymdownx.tasklist:
+      custom_checkbox: true
+  - pymdownx.progressbar:
+  - pymdownx.striphtml:
+  - pymdownx.snippets:
+      base_path:
+      - docs/src/markdown/.snippets
+      - LICENSE.md
+      auto_append:
+      - refs.md
+  - pymdownx.keys:
+      separator: "\uff0b"
+  - pymdownx.details:
+  - pymdownx.tabbed:
+  - pymdownx.saneheaders:
+
+extra:
+  social:
+    - icon: fontawesome/brands/github
+      link: https://github.com/facelessuser
+    - icon: fontawesome/brands/discord
+      link: https://discord.gg/TWs8Tgr
+
+plugins:
+  - search
+  - git-revision-date-localized
+  - mkdocs_pymdownx_material_extras
+  - minify:
+      minify_html: true
diff --git a/SublimeText/mdpopups/st3/mdpopups/__init__.py b/SublimeText/mdpopups/st3/mdpopups/__init__.py
new file mode 100644
index 00000000..e3945966
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/__init__.py
@@ -0,0 +1,1056 @@
+# -*- coding: utf-8 -*-
+"""
+Markdown popup.
+
+Markdown tooltips and phantoms for SublimeText.
+
+TextMate theme to CSS.
+
+https://manual.macromates.com/en/language_grammars#naming_conventions
+"""
+import sublime
+import sublime_api
+from . import markdown
+from . import jinja2
+import traceback
+import time
+import codecs
+import html
+import html.parser
+import urllib
+import functools
+import base64
+from . import version as ver
+from . import colorbox
+from collections import OrderedDict
+from .st_scheme_template import SchemeTemplate, POPUP, PHANTOM, SHEET
+from .st_clean_css import clean_css
+from .st_pygments_highlight import syntax_hl as pyg_syntax_hl
+from .st_code_highlight import SublimeHighlight
+from .st_mapping import lang_map
+from . import imagetint
+import re
+import os
+from . import frontmatter
+try:
+    import bs4
+except Exception:
+    bs4 = None
+
+HTML_SHEET_SUPPORT = int(sublime.version()) >= 4074
+
+LOCATION = os.path.dirname(os.path.abspath(__file__))
+DEFAULT_CSS_PATH = os.path.join(LOCATION, 'css', 'default.css')
+
+DEFAULT_CSS = 'Packages/mdpopups/mdpopups_css/default.css'
+OLD_DEFAULT_CSS = 'Packages/mdpopups/css/default.css'
+DEFAULT_USER_CSS = 'Packages/User/mdpopups.css'
+IDK = '''
+
+

¯\\_(ツ)_/¯

+

+MdPopups failed to create
+the popup/phantom!

+Check the console to see if
+there are helpful errors.

+''' +HL_SETTING = 'mdpopups.use_sublime_highlighter' +STYLE_SETTING = 'mdpopups.default_style' +RE_BAD_ENTITIES = re.compile(r'(&(?!amp;|lt;|gt;|nbsp;)(?:\w+;|#\d+;))') + +NODEBUG = 0 +ERROR = 1 +WARNING = 2 +INFO = 3 + + +def _log(msg): + """Log.""" + + print('mdpopups: {}'.format(str(msg))) + + +def _debug(msg, level): + """Debug log.""" + + if int(_get_setting('mdpopups.debug', NODEBUG)) >= level: + _log(msg) + + +def _get_setting(name, default=None): + """Get the Sublime setting.""" + + return sublime.load_settings('Preferences.sublime-settings').get(name, default) + + +def _can_show(view, location=-1): + """ + Check if popup can be shown. + + I have seen Sublime can sometimes crash if trying + to do a popup off screen. Normally it should just not show, + but sometimes it can crash. We will check if popup + can/should be attempted. + """ + + can_show = True + sel = view.sel() + if location >= 0: + region = view.visible_region() + if region.begin() > location or region.end() < location: + can_show = False + elif len(sel) >= 1: + region = view.visible_region() + if region.begin() > sel[0].b or region.end() < sel[0].b: + can_show = False + else: + can_show = False + + return can_show + + +############################## +# Theme/Scheme cache management +############################## +_scheme_cache = OrderedDict() +_highlighter_cache = OrderedDict() + + +def _clear_cache(): + """Clear the CSS cache.""" + + global _scheme_cache + global _highlighter_cache + _scheme_cache = OrderedDict() + _highlighter_cache = OrderedDict() + + +def _is_cache_expired(cache_time): + """Check if the cache entry is expired.""" + + delta_time = _get_setting('mdpopups.cache_refresh_time', 30) + if not isinstance(delta_time, int) or delta_time < 0: + delta_time = 30 + return delta_time == 0 or (time.time() - cache_time) >= (delta_time * 60) + + +def _prune_cache(): + """Prune older items in cache (related to when they were inserted).""" + + limit = _get_setting('mdpopups.cache_limit', 10) + if limit is None or not isinstance(limit, int) or limit <= 0: + limit = 10 + while len(_scheme_cache) >= limit: + _scheme_cache.popitem(last=True) + while len(_highlighter_cache) >= limit: + _highlighter_cache.popitem(last=True) + + +def _get_sublime_highlighter(view): + """Get the `SublimeHighlighter` object.""" + + scheme = view.settings().get('color_scheme') + obj = None + if scheme is not None: + if scheme in _highlighter_cache: + obj, t = _highlighter_cache[scheme] + if _is_cache_expired(t): + obj = None + if obj is None: + try: + obj = SublimeHighlight(scheme) + _prune_cache() + _highlighter_cache[scheme] = (obj, time.time()) + except Exception: + _log('Failed to get Sublime highlighter object!') + _debug(traceback.format_exc(), ERROR) + pass + return obj + + +def _get_scheme(scheme): + """Get the scheme object and user CSS.""" + + settings = sublime.load_settings("Preferences.sublime-settings") + obj = None + user_css = '' + default_css = '' + if scheme is not None: + if scheme in _scheme_cache: + obj, user_css, default_css, t = _scheme_cache[scheme] + # Check if cache expired or user changed Pygments setting. + if ( + _is_cache_expired(t) or + obj.use_pygments != (not settings.get(HL_SETTING, True)) or + obj.default_style != settings.get(STYLE_SETTING, True) + ): + obj = None + user_css = '' + default_css = '' + if obj is None: + try: + obj = SchemeTemplate(scheme) + _prune_cache() + user_css = _get_user_css() + default_css = _get_default_css() + _scheme_cache[scheme] = (obj, user_css, default_css, time.time()) + except Exception: + _log('Failed to convert/retrieve scheme to CSS!') + _debug(traceback.format_exc(), ERROR) + return obj, user_css, default_css + + +def _get_default_css(): + """Get default CSS.""" + + css = '' + try: + with codecs.open(DEFAULT_CSS_PATH, encoding='utf-8') as f: + css = clean_css(f.read()) + except Exception: + pass + + return css + + +def _get_user_css(): + """Get user CSS.""" + + css = None + + user_css = _get_setting('mdpopups.user_css', DEFAULT_USER_CSS) + if user_css == OLD_DEFAULT_CSS: + user_css = DEFAULT_CSS + if user_css == DEFAULT_CSS: + css = _get_default_css() + else: + try: + css = clean_css(sublime.load_resource(user_css)) + except Exception: + pass + return css if css else '' + + +############################## +# Markdown parsing +############################## +class _MdWrapper(markdown.Markdown): + """ + Wrapper around Python Markdown's class. + + This allows us to gracefully continue when a module doesn't load. + """ + + Meta = {} + + def __init__(self, *args, **kwargs): + """Call original initialization.""" + + if 'allow_code_wrap' in kwargs: + self.sublime_wrap = kwargs['allow_code_wrap'] + del kwargs['allow_code_wrap'] + if 'language_map' in kwargs: + self.plugin_map = kwargs['language_map'] + del kwargs['language_map'] + if 'sublime_hl' in kwargs: + self.sublime_hl = kwargs['sublime_hl'] + del kwargs['sublime_hl'] + + super(_MdWrapper, self).__init__(*args, **kwargs) + + def registerExtensions(self, extensions, configs): # noqa + """ + Register extensions with this instance of Markdown. + + Keyword arguments: + + * `extensions`: A list of extensions, which can either + be strings or objects. See the docstring on Markdown. + * `configs`: A dictionary mapping module names to configuration options. + + """ + + from .markdown import util + from .markdown.extensions import Extension + + for ext in extensions: + try: + if isinstance(ext, util.string_type): + ext = self.build_extension(ext, configs.get(ext, {})) + if isinstance(ext, Extension): + ext._extendMarkdown(self) + elif ext is not None: + raise TypeError( + 'Extension "{}.{}" must be of type: "markdown.Extension"'.format( + ext.__class__.__module__, ext.__class__.__name__ + ) + ) + except Exception: + # We want to gracefully continue even if an extension fails. + _log('Failed to load markdown module!') + _debug(traceback.format_exc(), ERROR) + + return self + + +def _get_theme(view, css=None, css_type=POPUP, template_vars=None): + """Get the theme.""" + + obj, user_css, default_css = _get_scheme(view.settings().get('color_scheme')) + try: + return obj.apply_template( + view, + default_css + '\n' + + ((clean_css(css) + '\n') if css else '') + + user_css, + css_type, + template_vars + ) if obj is not None else '' + except Exception: + _log('Failed to retrieve scheme CSS!') + _debug(traceback.format_exc(), ERROR) + return '' + + +def _remove_entities(text): + """Remove unsupported HTML entities.""" + + p = html.parser.HTMLParser() + + def repl(m): + """Replace entities except &, <, >, and `nbsp`.""" + return p.unescape(m.group(1)) + + return RE_BAD_ENTITIES.sub(repl, text) + + +def _create_html( + view, content, md=True, css=None, debug=False, css_type=POPUP, + wrapper_class=None, template_vars=None, template_env_options=None +): + """Create HTML from content.""" + + debug = _get_setting('mdpopups.debug', NODEBUG) + + if css is None or not isinstance(css, str): + css = '' + + style = _get_theme(view, css, css_type, template_vars) + + if debug: + _debug('=====CSS=====', INFO) + _debug(style, INFO) + + if md: + content = md2html( + view, content, template_vars=template_vars, + template_env_options=template_env_options + ) + else: + # Strip out frontmatter if found as we don't currently + # do anything with it when content is just HTML. + content = _markup_template(frontmatter.get_frontmatter(content)[1], template_vars, template_env_options) + + if debug: + _debug('=====HTML OUTPUT=====', INFO) + if bs4: + soup = bs4.BeautifulSoup(content, "html.parser") + _debug('\n' + soup.prettify(), INFO) + else: + _debug('\n' + content, INFO) + + if wrapper_class: + wrapper = ('
'.format(wrapper_class)) + '{}
' + else: + wrapper = '
{}
' + + html = "".format(style) + html += _remove_entities(wrapper.format(content)) + return html + + +def _markup_template(markup, variables, options): + """Template for markup.""" + + if variables: + if options is None: + options = {} + env = jinja2.Environment(**options) + return env.from_string(markup).render(plugin=variables) + return markup + + +############################## +# Public functions +############################## +def version(): + """Get the current version.""" + + return ver.version() + + +def md2html( + view, markup, template_vars=None, template_env_options=None, **kwargs +): + """Convert Markdown to HTML.""" + + if _get_setting('mdpopups.use_sublime_highlighter', True): + sublime_hl = (True, _get_sublime_highlighter(view)) + else: + sublime_hl = (False, None) + + fm, markup = frontmatter.get_frontmatter(markup) + + # We always include these + extensions = [ + "mdpopups.mdx.highlight", + "pymdownx.inlinehilite", + "pymdownx.superfences" + ] + + configs = { + "mdpopups.mdx.highlight": { + "guess_lang": False + }, + "pymdownx.inlinehilite": { + "style_plain_text": True + }, + "pymdownx.superfences": { + "custom_fences": fm.get('custom_fences', []) + } + } + + # Check if plugin is overriding extensions + md_exts = fm.get('markdown_extensions', None) + if md_exts is None: + # No extension override, use defaults + extensions.extend( + [ + "markdown.extensions.admonition", + "markdown.extensions.attr_list", + "markdown.extensions.def_list", + "pymdownx.betterem", + "pymdownx.magiclink", + "markdown.extensions.md_in_html", + "markdown.extensions.nl2br" + ] + ) + else: + for ext in md_exts: + if isinstance(ext, (dict, OrderedDict)): + k, v = next(iter(ext.items())) + # We don't allow plugins to overrides the internal color + if not k.startswith('mdpopups.'): + if k == "pymdownx.extrarawhtml": + k = 'markdown.extensions.md_in_html' + _debug( + "Warning: 'pymdownx.extrarawhtml' no longer exists. 'markdown.extensions.md_in_html'" + " will be used instead. Plugins should migrate as mdpopups will not redirect in the " + "future.", + WARNING + ) + extensions.append(k) + if v is not None: + configs[k] = v + elif isinstance(ext, str): + if not ext.startswith('mdpopups.'): + if ext == "pymdownx.extrarawhtml": + ext = 'markdown.extensions.md_in_html' + _debug( + "Warning: 'pymdownx.extrarawhtml' no longer exists. 'markdown.extensions.md_in_html'" + " will be used instead. Plugins should migrate as mdpopups will not redirect in the" + " future.", + WARNING + ) + extensions.append(ext) + + return _MdWrapper( + extensions=extensions, + extension_configs=configs, + sublime_hl=sublime_hl, + allow_code_wrap=fm.get('allow_code_wrap', False), + language_map=fm.get('language_map', {}) + ).convert(_markup_template(markup, template_vars, template_env_options)) + + +def color_box( + colors, border="#000000ff", border2=None, height=32, width=32, + border_size=1, check_size=4, max_colors=5, alpha=False, border_map=0xF +): + """Color box.""" + + return colorbox.color_box( + colors, border, border2, height, width, + border_size, check_size, max_colors, alpha, border_map + ) + + +def color_box_raw( + colors, border="#000000ff", border2=None, height=32, width=32, + border_size=1, check_size=4, max_colors=5, alpha=False, border_map=0xF +): + """Color box raw.""" + + return colorbox.color_box_raw( + colors, border, border2, height, width, + border_size, check_size, max_colors, alpha, border_map + ) + + +def tint(img, color, opacity=255, height=None, width=None): + """Tint the image.""" + + if isinstance(img, str): + try: + img = sublime.load_binary_resource(img) + except Exception: + _log('Could not open binary file!') + _debug(traceback.format_exc(), ERROR) + return '' + return imagetint.tint(img, color, opacity, height, width) + + +def tint_raw(img, color, opacity=255): + """Tint the image.""" + + if isinstance(img, str): + try: + img = sublime.load_binary_resource(img) + except Exception: + _log('Could not open binary file!') + _debug(traceback.format_exc(), ERROR) + return '' + return imagetint.tint_raw(img, color, opacity) + + +def get_language_from_view(view): + """Guess current language from view.""" + + lang = None + user_map = sublime.load_settings('Preferences.sublime-settings').get('mdpopups.sublime_user_lang_map', {}) + syntax = os.path.splitext(view.settings().get('syntax').replace('Packages/', '', 1))[0] + keys = set(list(lang_map.keys()) + list(user_map.keys())) + for key in keys: + v1 = lang_map.get(key, (tuple(), tuple()))[1] + v2 = user_map.get(key, (tuple(), tuple()))[1] + if syntax in (tuple(v2) + v1): + lang = key + break + return lang + + +def syntax_highlight(view, src, language=None, inline=False, allow_code_wrap=False, language_map=None): + """Syntax highlighting for code.""" + + try: + if _get_setting('mdpopups.use_sublime_highlighter', True): + highlighter = _get_sublime_highlighter(view) + code = highlighter.syntax_highlight( + src, language, inline=inline, code_wrap=(not inline and allow_code_wrap), plugin_map=language_map + ) + else: + code = pyg_syntax_hl( + src, language, inline=inline, code_wrap=(not inline and allow_code_wrap) + ) + except Exception: + code = src + _log('Failed to highlight code!') + _debug(traceback.format_exc(), ERROR) + + return code + + +def tabs2spaces(text, tab_size=4): + """ + Convert tabs to spaces on tab stops. + + Does not account for char width. + """ + + return text.expandtabs(tab_size) + + +def scope2style(view, scope, selected=False, explicit_background=False): + """Convert the scope to a style.""" + + style = { + 'color': None, + 'background': None, + 'style': '' + } + obj = _get_scheme(view.settings().get('color_scheme'))[0] + style_obj = obj.guess_style(view, scope, selected, explicit_background) + style['color'] = style_obj['foreground'] + style['background'] = style_obj['background'] + font = [] + if style_obj['bold']: + font.append('bold') + if style_obj['italic']: + font.append('italic') + if style_obj['underline']: + font.append('underline') + if style_obj['glow']: + font.append('glow') + style['style'] = ' '.join(font) + + return style + + +def clear_cache(): + """Clear cache.""" + + _clear_cache() + + +def hide_popup(view): + """Hide the popup.""" + + view.hide_popup() + + +def update_popup( + view, content, md=True, css=None, wrapper_class=None, + template_vars=None, template_env_options=None, **kwargs +): + """Update the popup.""" + + disabled = _get_setting('mdpopups.disable', False) + if disabled: + _debug('Popups disabled', WARNING) + return + + try: + html = _create_html( + view, content, md, css, css_type=POPUP, wrapper_class=wrapper_class, + template_vars=template_vars, template_env_options=template_env_options + ) + except Exception: + _log(traceback.format_exc()) + html = IDK + + view.update_popup(html) + + +def show_popup( + view, content, md=True, css=None, + flags=0, location=-1, max_width=320, max_height=240, + on_navigate=None, on_hide=None, wrapper_class=None, + template_vars=None, template_env_options=None, **kwargs +): + """Parse the color scheme if needed and show the styled pop-up.""" + + disabled = _get_setting('mdpopups.disable', False) + if disabled: + _debug('Popups disabled', WARNING) + return + + if not _can_show(view, location): + return + + try: + html = _create_html( + view, content, md, css, css_type=POPUP, wrapper_class=wrapper_class, + template_vars=template_vars, template_env_options=template_env_options + ) + except Exception: + _log(traceback.format_exc()) + html = IDK + + view.show_popup( + html, flags=flags, location=location, max_width=max_width, + max_height=max_height, on_navigate=on_navigate, on_hide=on_hide + ) + + +def is_popup_visible(view): + """Check if popup is visible.""" + + return view.is_popup_visible() + + +def add_phantom( + view, key, region, content, layout, md=True, + css=None, on_navigate=None, wrapper_class=None, + template_vars=None, template_env_options=None, **kwargs +): + """Add a phantom and return phantom id.""" + + disabled = _get_setting('mdpopups.disable', False) + if disabled: + _debug('Phantoms disabled', WARNING) + return + + try: + html = _create_html( + view, content, md, css, css_type=PHANTOM, wrapper_class=wrapper_class, + template_vars=template_vars, template_env_options=template_env_options + ) + except Exception: + _log(traceback.format_exc()) + html = IDK + + return view.add_phantom(key, region, html, layout, on_navigate) + + +def erase_phantoms(view, key): + """Erase phantoms.""" + + view.erase_phantoms(key) + + +def erase_phantom_by_id(view, pid): + """Erase phantom by ID.""" + + view.erase_phantom_by_id(pid) + + +def query_phantom(view, pid): + """Query phantom.""" + + return view.query_phantom(pid) + + +def query_phantoms(view, pids): + """Query phantoms.""" + + return view.query_phantoms(pids) + + +if HTML_SHEET_SUPPORT: + def new_html_sheet( + window, name, contents, md=True, css=None, flags=0, group=-1, + wrapper_class=None, template_vars=None, template_env_options=None, **kwargs + ): + """Create new HTML sheet.""" + + view = window.create_output_panel('mdpopups-dummy', unlisted=True) + try: + html = _create_html( + view, contents, md, css, css_type=SHEET, wrapper_class=wrapper_class, + template_vars=template_vars, template_env_options=template_env_options + ) + except Exception: + _log(traceback.format_exc()) + html = IDK + + return window.new_html_sheet(name, html, flags, group) + + def update_html_sheet( + sheet, contents, md=True, css=None, wrapper_class=None, + template_vars=None, template_env_options=None, **kwargs + ): + """Update an HTML sheet.""" + + window = sheet.window() + + # Probably a transient sheet, just get a window + if window is None: + window = sublime.active_window() + + view = window.create_output_panel('mdpopups-dummy', unlisted=True) + + try: + html = _create_html( + view, contents, md, css, css_type=SHEET, wrapper_class=wrapper_class, + template_vars=template_vars, template_env_options=template_env_options + ) + except Exception: + _log(traceback.format_exc()) + html = IDK + + sublime_api.html_sheet_set_contents(sheet.id(), html) + + +class Phantom(sublime.Phantom): + """A phantom object.""" + + def __init__( + self, region, content, layout, md=True, + css=None, on_navigate=None, wrapper_class=None, + template_vars=None, template_env_options=None, **kwargs + ): + """Initialize.""" + + super().__init__(region, content, layout, on_navigate) + self.md = md + self.css = css + self.wrapper_class = wrapper_class + self.template_vars = template_vars + self.template_env_options = template_env_options + + def __eq__(self, rhs): + """Check if phantoms are equal.""" + + # Note that self.id is not considered + return ( + self.region == rhs.region and self.content == rhs.content and + self.layout == rhs.layout and self.on_navigate == rhs.on_navigate and + self.md == rhs.md and self.css == rhs.css and + self.wrapper_class == rhs.wrapper_class and self.template_vars == rhs.template_vars and + self.template_env_options == rhs.template_env_options + ) + + +class PhantomSet(sublime.PhantomSet): + """Object that allows easy updating of phantoms.""" + + def __init__(self, view, key=""): + """Initialize.""" + + super().__init__(view, key) + + def __del__(self): + """Delete phantoms.""" + + for p in self.phantoms: + erase_phantom_by_id(self.view, p.id) + + def update(self, new_phantoms): + """Update the list of phantoms that exist in the text buffer with their current location.""" + + regions = query_phantoms(self.view, [p.id for p in self.phantoms]) + for i in range(len(regions)): + self.phantoms[i].region = regions[i] + + count = 0 + for p in new_phantoms: + if not isinstance(p, Phantom): + # Convert sublime.Phantom to mdpopups.Phantom + p = Phantom( + p.region, p.content, p.layout, + md=False, css=None, on_navigate=p.on_navigate, wrapper_class=None, + template_vars=None, template_env_options=None + ) + new_phantoms[count] = p + try: + # Phantom already exists, copy the id from the current one + idx = self.phantoms.index(p) + p.id = self.phantoms[idx].id + except ValueError: + p.id = add_phantom( + self.view, + self.key, + p.region, + p.content, + p.layout, + p.md, + p.css, + p.on_navigate, + p.wrapper_class, + p.template_vars, + p.template_env_options + ) + count += 1 + + for p in self.phantoms: + # if the region is -1, then it's already been deleted, no need to call erase + if p not in new_phantoms and p.region != sublime.Region(-1): + erase_phantom_by_id(self.view, p.id) + + self.phantoms = new_phantoms + + +def format_frontmatter(values): + """Format values as frontmatter.""" + + return frontmatter.dump_frontmatter(values) + + +RE_TAG_HTML = re.compile( + r'''(?xus) + (?: + (?P + <\s*(?Pscript|style)[^>]*>.*? | + (?:(\r?\n?\s*)(\s*)(?=\r?\n)|) + )| + (?P<\s*(?Pimg)) + (?P(?:\s+[\w\-:]+(?:\s*=\s*(?:"[^"]*"|'[^']*'))?)*) + (?P\s*(?:\/?)>) + ) + ''' +) + +RE_TAG_LINK_ATTR = re.compile( + r'''(?xus) + (?P + (?: + (?P\s+src\s*=\s*) + (?P"[^"]*"|'[^']*') + ) + ) + ''' +) + + +def _image_parser(text): + """Retrieve image source whose attribute `src` URL has scheme 'http' or 'https'.""" + + images = {} + for m in RE_TAG_HTML.finditer(text): + if m.group('avoid'): + continue + start = m.start('attr') + m2 = RE_TAG_LINK_ATTR.search(m.group('attr')) + if m2: + src = m2.group('path')[1:-1] + src = html.parser.HTMLParser().unescape(src) + if urllib.parse.urlparse(src).scheme in ("http", "https"): + s = start + m2.start('path') + 1 + e = start + m2.end('path') - 1 + images.setdefault(src, []).append((s, e)) + return images + + +class _ImageResolver: + """ + Keeps track of which images are downloaded, and builds the final html after all of them have been downloaded. + + Note that this entire class is a workaround for not having a scatter-gather function and not having a promise type. + In an asynchronous world, we would of course use `asyncio.gather`. + """ + + def __init__(self, minihtml, resolver, done_callback, images_to_resolve): + """The constructor.""" + self.minihtml = minihtml + self.done_callback = done_callback + self.images_to_resolve = images_to_resolve + self.resolved = {} + for url in self.images_to_resolve.keys(): + resolver(url, functools.partial(self.on_image_resolved, url)) + + def on_image_resolved(self, url, data, mime, exception): + """ + Called by a resolver when an image has been downloaded. + + The `data` is a bytes object. + The `mime` is the mime-type, e.g. image/png. + When the resolver function encountered an exception, the exception is passed in via the last + argument. So its type is Optional[Exception]. + """ + if exception: + value = (exception, None) + else: + value = (base64.b64encode(data).decode("ascii"), mime) + self.resolved[url] = value + if len(self.resolved) == len(self.images_to_resolve): + self.finalize() + + def finalize(self): + """ + Called when all necessary images have been downloaded. + + This method reconstructs the final html to be presented. + + It invokes the `done_callback` from the `resolve_urls` function in the main thread of Sublime Text. + """ + + def flattened(): + for url, positions in self.images_to_resolve.items(): + for position in positions: + yield url, position[0], position[1] + + todo = sorted(flattened(), key=lambda t: (t[1], t[2])) + chunks = [self.minihtml[:todo[0][1]]] + for index in range(0, len(todo)): + next_index = index + 1 + if next_index >= len(todo): + next_start = len(self.minihtml) + else: + next_start = todo[next_index][1] + data, mime = self.resolved[todo[index][0]] + current_end = todo[index][2] + if isinstance(data, Exception): + # keep the minihtml unchanged + current_start = todo[index][1] + chunks.append(self.minihtml[current_start:current_end]) + else: + # replace the URL with the base64 data + chunks.append("data:") + chunks.append(mime) + chunks.append(";base64,") + chunks.append(data) + chunks.append(self.minihtml[current_end:next_start]) + finalhtml = "".join(chunks) + sublime.set_timeout(lambda: self.done_callback(finalhtml)) + + +@functools.lru_cache(maxsize=8) +def _retrieve(url): + """ + Actually download the image pointed to by the passed URL. + + The most recently used images (8 at most) are kept in a cache. + """ + import urllib.request + with urllib.request.urlopen(url) as response: + # We provide some basic protection against absurdly large images. + # 32MB is chosen as an arbitrary upper limit. This can be raised if desired. + length = response.headers.get("content-length") + if length is None: + raise ValueError("missing content-length header") + length = int(length) + if length == 0: + raise ValueError("empty payload") + elif length >= 32 * 1024 * 1024: + raise ValueError("refusing to read payloads larger than or equal to 32MB") + mime = response.headers.get("content-type", "image/png").lower() + return response.readall(), mime + + +def blocking_resolver(url, done): + """A simple URL resolver that will block the caller.""" + exception = None + payload = None + mime = None + try: + payload, mime = _retrieve(url) + except Exception as ex: + exception = ex + if exception: + done(None, None, exception) + elif payload and mime: + done(payload, mime, None) + else: + done(None, None, RuntimeError("failed to retrieve image")) + + +def ui_thread_resolver(url, done): + """A URL resolver that runs on the main thread.""" + sublime.set_timeout(lambda: blocking_resolver(url, done)) + + +def worker_thread_resolver(url, done): + """A URL resolver that runs on the worker ("async") thread of Sublime Text.""" + sublime.set_timeout_async(lambda: blocking_resolver(url, done)) + + +def resolve_images(minihtml, resolver, on_done): + """ + Download images from the internet. + + Given minihtml containing `` tags with a `src` attribute that points to an image located on the internet, + download those images and replace the `src` attribute with embedded base64-encoded image data. + + The first argument is minihtml as returned by the `md2html` function. + + The second argument is a callable that shall take two arguments. + + - The first argument is a URL to be downloaded. + - The second argument is a callable that shall take one argument: An object of type `bytes`: the raw image data. + The result of downloading the image. + + The third argument is a callable that shall take one argument: + + - A string that is the final minihtml containing embedded base64 encoded images, ready to be presented to a view. + + This function is non-blocking. + It will invoke the passed-in `done_callback` on the UI thread. + It returns an opaque object that should be kept alive for as long as the passed-in `done_callback` is not yet + invoked. + """ + images = _image_parser(minihtml) + if images: + return _ImageResolver(minihtml, resolver, on_done, images) + else: + sublime.set_timeout(lambda: on_done(minihtml)) + return None diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/__init__.py new file mode 100644 index 00000000..861fa795 --- /dev/null +++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/__init__.py @@ -0,0 +1,8 @@ +"""ColorAide Library.""" +from .__meta__ import __version_info__, __version__ # noqa: F401 +from .color import Color +from .color.match import ColorMatch +from .color.interpolate import Piecewise, Lerp +from .util import NaN + +__all__ = ("Color", "ColorMatch", "NaN", "Piecewise", "Lerp") diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/__meta__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/__meta__.py new file mode 100644 index 00000000..57cc1da6 --- /dev/null +++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/__meta__.py @@ -0,0 +1,192 @@ +"""Meta related things.""" +from collections import namedtuple +import re + +RE_VER = re.compile( + r'''(?x) + (?P\d+)(?:\.(?P\d+))?(?:\.(?P\d+))? + (?:(?Pa|b|rc)(?P
\d+))?
+    (?:\.post(?P\d+))?
+    (?:\.dev(?P\d+))?
+    '''
+)
+
+REL_MAP = {
+    ".dev": "",
+    ".dev-alpha": "a",
+    ".dev-beta": "b",
+    ".dev-candidate": "rc",
+    "alpha": "a",
+    "beta": "b",
+    "candidate": "rc",
+    "final": ""
+}
+
+DEV_STATUS = {
+    ".dev": "2 - Pre-Alpha",
+    ".dev-alpha": "2 - Pre-Alpha",
+    ".dev-beta": "2 - Pre-Alpha",
+    ".dev-candidate": "2 - Pre-Alpha",
+    "alpha": "3 - Alpha",
+    "beta": "4 - Beta",
+    "candidate": "4 - Beta",
+    "final": "5 - Production/Stable"
+}
+
+PRE_REL_MAP = {"a": 'alpha', "b": 'beta', "rc": 'candidate'}
+
+
+class Version(namedtuple("Version", ["major", "minor", "micro", "release", "pre", "post", "dev"])):
+    """
+    Get the version (PEP 440).
+
+    A biased approach to the PEP 440 semantic version.
+
+    Provides a tuple structure which is sorted for comparisons `v1 > v2` etc.
+      (major, minor, micro, release type, pre-release build, post-release build, development release build)
+    Release types are named in is such a way they are comparable with ease.
+    Accessors to check if a development, pre-release, or post-release build. Also provides accessor to get
+    development status for setup files.
+
+    How it works (currently):
+
+    - You must specify a release type as either `final`, `alpha`, `beta`, or `candidate`.
+    - To define a development release, you can use either `.dev`, `.dev-alpha`, `.dev-beta`, or `.dev-candidate`.
+      The dot is used to ensure all development specifiers are sorted before `alpha`.
+      You can specify a `dev` number for development builds, but do not have to as implicit development releases
+      are allowed.
+    - You must specify a `pre` value greater than zero if using a prerelease as this project (not PEP 440) does not
+      allow implicit prereleases.
+    - You can optionally set `post` to a value greater than zero to make the build a post release. While post releases
+      are technically allowed in prereleases, it is strongly discouraged, so we are rejecting them. It should be
+      noted that we do not allow `post0` even though PEP 440 does not restrict this. This project specifically
+      does not allow implicit post releases.
+    - It should be noted that we do not support epochs `1!` or local versions `+some-custom.version-1`.
+
+    Acceptable version releases:
+
+    ```
+    Version(1, 0, 0, "final")                    1.0
+    Version(1, 2, 0, "final")                    1.2
+    Version(1, 2, 3, "final")                    1.2.3
+    Version(1, 2, 0, ".dev-alpha", pre=4)        1.2a4
+    Version(1, 2, 0, ".dev-beta", pre=4)         1.2b4
+    Version(1, 2, 0, ".dev-candidate", pre=4)    1.2rc4
+    Version(1, 2, 0, "final", post=1)            1.2.post1
+    Version(1, 2, 3, ".dev")                     1.2.3.dev0
+    Version(1, 2, 3, ".dev", dev=1)              1.2.3.dev1
+    ```
+
+    """
+
+    def __new__(cls, major, minor, micro, release="final", pre=0, post=0, dev=0):
+        """Validate version info."""
+
+        # Ensure all parts are positive integers.
+        for value in (major, minor, micro, pre, post):
+            if not (isinstance(value, int) and value >= 0):
+                raise ValueError("All version parts except 'release' should be integers.")
+
+        if release not in REL_MAP:
+            raise ValueError("'{}' is not a valid release type.".format(release))
+
+        # Ensure valid pre-release (we do not allow implicit pre-releases).
+        if ".dev-candidate" < release < "final":
+            if pre == 0:
+                raise ValueError("Implicit pre-releases not allowed.")
+            elif dev:
+                raise ValueError("Version is not a development release.")
+            elif post:
+                raise ValueError("Post-releases are not allowed with pre-releases.")
+
+        # Ensure valid development or development/pre release
+        elif release < "alpha":
+            if release > ".dev" and pre == 0:
+                raise ValueError("Implicit pre-release not allowed.")
+            elif post:
+                raise ValueError("Post-releases are not allowed with pre-releases.")
+
+        # Ensure a valid normal release
+        else:
+            if pre:
+                raise ValueError("Version is not a pre-release.")
+            elif dev:
+                raise ValueError("Version is not a development release.")
+
+        return super().__new__(cls, major, minor, micro, release, pre, post, dev)
+
+    def _is_pre(self):
+        """Is prerelease."""
+
+        return self.pre > 0
+
+    def _is_dev(self):
+        """Is development."""
+
+        return bool(self.release < "alpha")
+
+    def _is_post(self):
+        """Is post."""
+
+        return self.post > 0
+
+    def _get_dev_status(self):  # pragma: no cover
+        """Get development status string."""
+
+        return DEV_STATUS[self.release]
+
+    def _get_canonical(self):
+        """Get the canonical output string."""
+
+        # Assemble major, minor, micro version and append `pre`, `post`, or `dev` if needed..
+        if self.micro == 0 and self.major != 0:
+            ver = "{}.{}".format(self.major, self.minor)
+        else:
+            ver = "{}.{}.{}".format(self.major, self.minor, self.micro)
+        if self._is_pre():
+            ver += '{}{}'.format(REL_MAP[self.release], self.pre)
+        if self._is_post():
+            ver += ".post{}".format(self.post)
+        if self._is_dev():
+            ver += ".dev{}".format(self.dev)
+
+        return ver
+
+
+def parse_version(ver):
+    """Parse version into a comparable Version tuple."""
+
+    m = RE_VER.match(ver)
+
+    if m is None:
+        raise ValueError("'{}' is not a valid version".format(ver))
+
+    # Handle major, minor, micro
+    major = int(m.group('major'))
+    minor = int(m.group('minor')) if m.group('minor') else 0
+    micro = int(m.group('micro')) if m.group('micro') else 0
+
+    # Handle pre releases
+    if m.group('type'):
+        release = PRE_REL_MAP[m.group('type')]
+        pre = int(m.group('pre'))
+    else:
+        release = "final"
+        pre = 0
+
+    # Handle development releases
+    dev = m.group('dev') if m.group('dev') else 0
+    if m.group('dev'):
+        dev = int(m.group('dev'))
+        release = '.dev-' + release if pre else '.dev'
+    else:
+        dev = 0
+
+    # Handle post
+    post = int(m.group('post')) if m.group('post') else 0
+
+    return Version(major, minor, micro, release, pre, post, dev)
+
+
+__version_info__ = Version(0, 1, 0, "alpha", 24)
+__version__ = __version_info__._get_canonical()
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/__init__.py
new file mode 100644
index 00000000..119743c6
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/__init__.py
@@ -0,0 +1,223 @@
+"""Colors."""
+from collections.abc import Sequence
+from . import distance
+from . import convert
+from . import gamut
+from . import compositing
+from . import interpolate
+from . import contrast
+from . import match
+from .. import util
+from ..spaces.hsv import HSV
+from ..spaces.srgb.css import SRGB
+from ..spaces.srgb_linear import SRGBLinear
+from ..spaces.hsl.css import HSL
+from ..spaces.hwb.css import HWB
+from ..spaces.lab.css import Lab
+from ..spaces.lch.css import Lch
+from ..spaces.lab_d65 import LabD65
+from ..spaces.lch_d65 import LchD65
+from ..spaces.display_p3 import DisplayP3
+from ..spaces.a98_rgb import A98RGB
+from ..spaces.prophoto_rgb import ProPhotoRGB
+from ..spaces.rec2020 import Rec2020
+from ..spaces.xyz import XYZ
+from ..spaces.xyz_d65 import XYZD65
+from ..spaces.oklab import Oklab
+from ..spaces.oklch import Oklch
+from ..spaces.jzazbz import Jzazbz
+from ..spaces.jzczhz import JzCzhz
+from ..spaces.ictcp import ICtCp
+from ..spaces.luv import Luv
+from ..spaces.lchuv import Lchuv
+
+
+SUPPORTED = (
+    HSL, HWB, Lab, Lch, LabD65, LchD65, SRGB, SRGBLinear, HSV,
+    DisplayP3, A98RGB, ProPhotoRGB, Rec2020, XYZ, XYZD65,
+    Oklab, Oklch, Jzazbz, JzCzhz, ICtCp, Luv, Lchuv
+)
+
+
+class Color(
+    convert.Convert,
+    gamut.Gamut,
+    compositing.Compose,
+    interpolate.Interpolate,
+    distance.Distance,
+    contrast.Contrast,
+    match.Match
+):
+    """Color class object which provides access and manipulation of color spaces."""
+
+    CS_MAP = {obj.space(): obj for obj in SUPPORTED}
+
+    PRECISION = util.DEF_PREC
+    FIT = util.DEF_FIT
+    DELTA_E = util.DEF_DELTA_E
+    CHROMATIC_ADAPTATION = 'bradford'
+
+    def __init__(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, **kwargs):
+        """Initialize."""
+
+        self._attach(self._parse(color, data, alpha, filters=filters, **kwargs))
+
+    def __eq__(self, other):
+        """Compare equal."""
+
+        return (
+            other.space() == self.space() and
+            util.cmp_coords(other.coords(), self.coords()) and
+            util.cmp_coords(other.alpha, self.alpha)
+        )
+
+    def _parse(self, color, data=None, alpha=util.DEF_ALPHA, filters=None, **kwargs):
+        """Parse the color."""
+
+        obj = None
+        if data is not None:
+            filters = set(filters) if filters is not None else set()
+            for space, space_class in self.CS_MAP.items():
+                s = color.lower()
+                if space == s and (not filters or s in filters):
+                    if len(data) < space_class.NUM_COLOR_CHANNELS:
+                        data = list(data) + [util.NaN] * (space_class.NUM_COLOR_CHANNELS - len(data))
+                    obj = space_class(data[:space_class.NUM_COLOR_CHANNELS], alpha)
+                    return obj
+        elif isinstance(color, Color):
+            if not filters or color.space() in filters:
+                obj = self.CS_MAP[color.space()](color._space)
+        else:
+            m = self._match(color, fullmatch=True, filters=filters)
+            if m is None:
+                raise ValueError("'{}' is not a valid color".format(color))
+            obj = m.color
+        if obj is None:
+            raise ValueError("Could not process the provided color")
+        return obj
+
+    def is_nan(self, name):
+        """Check if channel is NaN."""
+
+        return util.is_nan(self.get(name))
+
+    def _is_this_color(self, obj):
+        """Test if the input is "this" Color, not a subclass."""
+
+        return type(obj) is type(self)
+
+    def _is_color(self, obj):
+        """Test if the input is a Color."""
+
+        return isinstance(obj, Color)
+
+    def _attach(self, space):
+        """Attach the this objects convert space to the color."""
+
+        self._space = space
+
+    def _handle_color_input(self, color, sequence=False):
+        """Handle color input."""
+
+        if isinstance(color, str) or (self._is_color(color) and not self._is_this_color(color)):
+            color = self.new(color)
+        elif sequence and isinstance(color, Sequence):
+            color = [self._handle_color_input(c) for c in color]
+        elif not self._is_color(color):
+            raise TypeError("Unexpected type '{}'".format(type(color)))
+        return color
+
+    def space(self):
+        """The current color space."""
+
+        return self._space.space()
+
+    def coords(self):
+        """Coordinates."""
+
+        return self._space.coords()
+
+    def new(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, **kwargs):
+        """
+        Create new color object.
+
+        TODO: maybe allow `currentcolor` here? It would basically clone the current object.
+        """
+
+        return type(self)(color, data, alpha, filters=filters, **kwargs)
+
+    def clone(self):
+        """Clone."""
+
+        return self.new(self.space(), self.coords(), self.alpha)
+
+    def to_string(self, **kwargs):
+        """To string."""
+
+        return self._space.to_string(self, **kwargs)
+
+    def __repr__(self):
+        """Representation."""
+
+        return repr(self._space)
+
+    __str__ = __repr__
+
+    def get(self, name):
+        """Get channel."""
+
+        # Handle space.attribute
+        if '.' in name:
+            parts = name.split('.')
+            if len(parts) != 2:
+                raise ValueError("Could not resolve attribute '{}'".format(name))
+            obj = self.convert(parts[0])
+            return obj.get(parts[1])
+
+        return self._space.get(name)
+
+    def set(self, name, value):  # noqa: A003
+        """Set channel."""
+
+        # Handle space.attribute
+        if '.' in name:
+            parts = name.split('.')
+            if len(parts) != 2:
+                raise ValueError("Could not resolve attribute '{}'".format(name))
+            obj = self.convert(parts[0])
+            obj.set(parts[1], value)
+            return self.update(obj)
+
+        # Handle a function that modifies the value or a direct value
+        if callable(value):
+            self.set(name, value(self.get(name)))
+        else:
+            self._space.set(name, value)
+        return self
+
+    def __getattr__(self, name):
+        """Get attribute."""
+
+        # Don't test `_space` as it is used to get Space channel attributes.
+        if name != "_space":
+            # Get channel names
+            names = set()
+            result = getattr(self, "_space")
+            if result is not None:
+                names = result.CHANNEL_NAMES
+            # If requested attribute is a channel name, return the attribute from the Space instance.
+            if name in names:
+                return getattr(result, name)
+
+    def __setattr__(self, name, value):
+        """Set attribute."""
+
+        try:
+            # See if we need to set the space specific channel attributes.
+            if name in self._space.CHANNEL_NAMES:
+                setattr(self._space, name, value)
+                return
+        except AttributeError:
+            pass
+        # Set all attributes on the Color class.
+        super().__setattr__(name, value)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/__init__.py
new file mode 100644
index 00000000..4dc1ec57
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/__init__.py
@@ -0,0 +1,124 @@
+"""
+Compositing and RGB blend modes.
+
+https://www.w3.org/TR/compositing/
+"""
+from collections.abc import Sequence
+from . import porter_duff
+from . import blend_modes
+from ... import util
+from ...spaces import GamutBound
+
+
+def clip_channel(coord, gamut):
+    """Clipping channel."""
+
+    a, b = gamut
+    is_bound = isinstance(gamut, GamutBound)
+
+    # These parameters are unbounded
+    if not is_bound:  # pragma: no cover
+        # Will not execute unless we have a space that defines some coordinates
+        # as bound and others as not. We do not currently have such spaces.
+        a = None
+        b = None
+
+    # Fit value in bounds.
+    return util.clamp(coord, a, b)
+
+
+def compose(color1, color2, blend, operator, non_seperable):
+    """Blend colors using the specified blend mode."""
+
+    # Get the color coordinates
+    csa = util.no_nan(color1.alpha)
+    cba = util.no_nan(color2.alpha)
+    coords1 = util.no_nan(color1.coords())
+    coords2 = util.no_nan(color2.coords())
+
+    # Setup blend mode.
+    if blend is None:
+        blend = 'normal'
+    if blend is not False:
+        blend = blend.lower()
+        blender = blend_modes.get_blender(blend)
+    else:
+        blender = None
+
+    # Setup compositing
+    if operator is None:
+        operator = 'source-over'
+    if operator is not False:
+        compositor = porter_duff.compositor(operator)(cba, csa)
+        cra = compositor.ao()
+    else:
+        cra = csa
+        compositor = None
+
+    # Perform compositing
+    gamut = color1._space.RANGE
+    coords = []
+    if not non_seperable:
+        # Blend each channel. Afterward, clip and apply alpha compositing.
+        i = 0
+        for cb, cs in zip(coords2, coords1):
+            cr = (1 - cba) * cs + cba * blender(cb, cs) if blender is not None else cs
+            cr = clip_channel(cr, gamut[i])
+            coords.append(compositor.co(cb, cr) if compositor is not None else cr)
+            i += 1
+    else:
+        # Convert to a hue, saturation, luminosity space and apply the requested blending.
+        # Afterwards, clip and apply alpha compositing.
+        i = 0
+        blended = blender(coords2, coords1) if blender is not None else coords1
+        for cb, cr in zip(coords2, blended):
+            cr = (1 - cba) * cr + cba * cr if blender is not None else cr
+            cr = clip_channel(cr, gamut[i])
+            coords.append(compositor.co(cb, cr) if compositor is not None else cr)
+            i += 1
+
+    return color1.update(color1.space(), coords, cra)
+
+
+class Compose:
+    """Handle compositing."""
+
+    def compose(self, backdrop, *, blend=None, operator=None, space=None, out_space=None, in_place=False):
+        """Blend colors using the specified blend mode."""
+
+        backdrop = self._handle_color_input(backdrop, sequence=True)
+
+        # If we are doing non-separable, we are converting to a special space that
+        # can only be done from sRGB, so we have to force sRGB anyway.
+        non_seperable = blend_modes.is_non_seperable(blend)
+        if non_seperable:
+            space = 'srgb'
+        space = 'srgb' if space is None else space.lower()
+        outspace = self.space() if out_space is None else out_space.lower()
+
+        if not isinstance(backdrop, Sequence):
+            backdrop = [backdrop]
+
+        if len(backdrop) == 0:
+            return self.convert(outspace)
+
+        if len(backdrop) > 1:
+            dest = backdrop[-1].convert(space, fit=True)
+            for x in range(len(backdrop) - 2, -1, -1):
+                src = backdrop[x].convert(space, fit=True)
+                dest = compose(src, dest, blend, operator, non_seperable)
+        else:
+            dest = backdrop[0].convert(space, fit=True)
+
+        src = self.convert(space, fit=True)
+        dest = compose(src, dest, blend, operator, non_seperable)
+
+        return self.mutate(dest.convert(outspace)) if in_place else dest.convert(outspace)
+
+    @util.deprecated("'overlay' is deprecated, 'compose' should be used instead.")
+    def overlay(self, backdrop, *, space=None, in_place=False):
+        """Redirect to compose."""
+
+        if space is None:
+            space = self.space()
+        return self.compose(backdrop, space=space, out_space=None, in_place=in_place)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/blend_modes.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/blend_modes.py
new file mode 100644
index 00000000..f155dad3
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/blend_modes.py
@@ -0,0 +1,201 @@
+"""Blend modes."""
+import math
+from operator import itemgetter
+
+SUPPORTED = frozenset(
+    [
+        'normal', 'multiply', 'darken', 'lighten', 'color-burn', 'color-dodge', 'screen',
+        'overlay', 'hard-light', 'exclusion', 'difference', 'soft-light',
+        'hue', 'saturation', 'luminosity', 'color'
+    ]
+)
+
+NON_SEPARABLE = frozenset(['color', 'hue', 'saturation', 'luminosity'])
+
+
+def is_non_seperable(mode):
+    """Check if blend mode is non-separable."""
+
+    return mode in frozenset(['color', 'hue', 'saturation', 'luminosity'])
+
+
+# -----------------------------------------
+# Non-separable blending helper functions
+# -----------------------------------------
+def lum(rgb):
+    """Get luminosity."""
+
+    return 0.299 * rgb[0] + 0.587 * rgb[1] + 0.114 * rgb[2]
+
+
+def clip_color(rgb):
+    """Clip color."""
+
+    l = lum(rgb)
+    n = min(*rgb)
+    x = max(*rgb)
+    if n < 0:
+        rgb = [l + (((c - l) * l) / (l - n)) for c in rgb]
+
+    if x > 1:
+        rgb = [l + (((c - l) * (1 - l)) / (x - l)) for c in rgb]
+
+    return rgb
+
+
+def set_lum(rgb, l):
+    """Set luminosity."""
+
+    d = l - lum(rgb)
+    rgb = [c + d for c in rgb]
+    return clip_color(rgb)
+
+
+def sat(rgb):
+    """Saturation."""
+
+    return max(*rgb) - min(*rgb)
+
+
+def set_sat(rgb, s):
+    """Set saturation."""
+
+    final = [0] * 3
+    indices, rgb = zip(*sorted(enumerate(rgb), key=itemgetter(1)))
+    if rgb[2] > rgb[0]:
+        final[indices[1]] = (((rgb[1] - rgb[0]) * s) / (rgb[2] - rgb[0]))
+        final[indices[2]] = s
+    else:
+        final[indices[1]] = 0
+        final[indices[2]] = 0
+    final[indices[0]] = 0
+    return final
+
+
+# -----------------------------------------
+# Blend modes
+# -----------------------------------------
+def blend_normal(cb, cs):
+    """Blend mode 'normal'."""
+
+    return cs
+
+
+def blend_multiply(cb, cs):
+    """Blend mode 'multiply'."""
+
+    return cb * cs
+
+
+def blend_screen(cb, cs):
+    """Blend mode 'screen'."""
+
+    return cb + cs - (cb * cs)
+
+
+def blend_darken(cb, cs):
+    """Blend mode 'darken'."""
+
+    return min(cb, cs)
+
+
+def blend_lighten(cb, cs):
+    """Blend mode 'lighten'."""
+
+    return max(cb, cs)
+
+
+def blend_color_dodge(cb, cs):
+    """Blend mode 'dodge'."""
+
+    if cb == 0:
+        return 0
+    elif cs == 1:
+        return 1
+    else:
+        return min(1, cb / (1 - cs))
+
+
+def blend_color_burn(cb, cs):
+    """Blend mode 'burn'."""
+
+    if cb == 1:
+        return 1
+    elif cs == 0:
+        return 0
+    else:
+        return 1 - min(1, (1 - cb) / cs)
+
+
+def blend_overlay(cb, cs):
+    """Blend mode 'overlay'."""
+
+    if cb >= 0.5:
+        return blend_screen(cb, 2 * cs - 1)
+    else:
+        return blend_multiply(cb, cs * 2)
+
+
+def blend_difference(cb, cs):
+    """Blend mode 'difference'."""
+
+    return abs(cb - cs)
+
+
+def blend_exclusion(cb, cs):
+    """Blend mode 'exclusion'."""
+
+    return cb + cs - 2 * cb * cs
+
+
+def blend_hard_light(cb, cs):
+    """Blend mode 'hard-light'."""
+
+    if cs <= 0.5:
+        return blend_multiply(cb, cs * 2)
+    else:
+        return blend_screen(cb, 2 * cs - 1)
+
+
+def blend_soft_light(cb, cs):
+    """Blend mode 'soft-light'."""
+
+    if cs <= 0.5:
+        return cb - (1 - 2 * cs) * cb * (1 - cb)
+    else:
+        if cb <= 0.25:
+            d = ((16 * cb - 12) * cb + 4) * cb
+        else:
+            d = math.sqrt(cb)
+        return cb + (2 * cs - 1) * (d - cb)
+
+
+def blend_hue(cb, cs):
+    """Blend mode 'hue'."""
+
+    return set_lum(set_sat(cs, sat(cb)), lum(cb))
+
+
+def blend_saturation(cb, cs):
+    """Blend mode 'saturation'."""
+
+    return set_lum(set_sat(cb, sat(cs)), lum(cb))
+
+
+def blend_luminosity(cb, cs):
+    """Blend mode 'luminosity'."""
+    return set_lum(cb, lum(cs))
+
+
+def blend_color(cb, cs):
+    """Blend mode 'color'."""
+
+    return set_lum(cs, lum(cb))
+
+
+def get_blender(blend):
+    """Get desired blend mode."""
+
+    if blend not in SUPPORTED:
+        raise ValueError("'{}' is not a recognized blend mode".format(blend))
+    return globals()['blend_{}'.format(blend.replace('-', '_'))]
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/porter_duff.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/porter_duff.py
new file mode 100644
index 00000000..83ff397f
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/compositing/porter_duff.py
@@ -0,0 +1,242 @@
+"""Porter Duff compositing."""
+from abc import ABCMeta, abstractmethod
+
+
+class PorterDuff(metaclass=ABCMeta):
+    """Porter Duff compositing."""
+
+    def __init__(self, cba, csa):
+        """Initialize."""
+
+        self.cba = cba
+        self.csa = csa
+
+    @abstractmethod
+    def _fa(self):  # pragma: no cover
+        """Calculate `Fa`."""
+
+        raise NotImplementedError('fa is not implemented')
+
+    @abstractmethod
+    def _fb(self):  # pragma: no cover
+        """Calculate `Fb`."""
+
+        raise NotImplementedError('fb is not implemented')
+
+    def co(self, cb, cs):
+        """Calculate premultiplied coordinate."""
+
+        return self.csa * self._fa() * cs + self.cba * self._fb() * cb
+
+    def ao(self):
+        """Calculate output alpha."""
+
+        return self.csa * self._fa() + self.cba * self._fb()
+
+
+class Clear(PorterDuff):
+    """Clear."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 0
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 0
+
+
+class Copy(PorterDuff):
+    """Copy."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 0
+
+
+class Destination(PorterDuff):
+    """Destination."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 0
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1
+
+
+class SourceOver(PorterDuff):
+    """Source over."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1 - self.csa
+
+
+class DestinationOver(PorterDuff):
+    """Destination over."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1 - self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1
+
+
+class SourceIn(PorterDuff):
+    """Source in."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 0
+
+
+class DestinationeIn(PorterDuff):
+    """Destination in."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 0
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return self.csa
+
+
+class SourceOut(PorterDuff):
+    """Source out."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1 - self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 0
+
+
+class DestinationOut(PorterDuff):
+    """Destination out."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 0
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1 - self.csa
+
+
+class SourceAtop(PorterDuff):
+    """Source atop."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1 - self.csa
+
+
+class DestinationAtop(PorterDuff):
+    """Destination atop."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1 - self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return self.csa
+
+
+class XOR(PorterDuff):
+    """XOR."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1 - self.cba
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1 - self.csa
+
+
+class Lighter(PorterDuff):
+    """Lighter."""
+
+    def _fa(self):
+        """Calculate `Fa`."""
+
+        return 1
+
+    def _fb(self):
+        """Calculate `Fb`."""
+
+        return 1
+
+
+SUPPORTED = {
+    'clear': Clear,
+    'copy': Copy,
+    'destination': Destination,
+    'source-over': SourceOver,
+    'destination-over': DestinationOver,
+    'source-in': SourceIn,
+    'destination-in': DestinationeIn,
+    'source-out': SourceOut,
+    'destination-out': DestinationOut,
+    'source-atop': SourceAtop,
+    'destination-atop': DestinationAtop,
+    'xor': XOR,
+    'lighter': Lighter
+}
+
+
+def compositor(name):
+    """Get the requested compositor."""
+
+    name = name.lower()
+    if name not in SUPPORTED:
+        raise ValueError("'{}' compositing is not supported".format(name))
+    return SUPPORTED[name]
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/contrast.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/contrast.py
new file mode 100644
index 00000000..23de783c
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/contrast.py
@@ -0,0 +1,18 @@
+"""Color contrast."""
+
+
+class Contrast:
+    """Contrast."""
+
+    def luminance(self):
+        """Get color's luminance."""
+
+        return self.convert("xyz-d65").y
+
+    def contrast(self, color):
+        """Compare the contrast ratio of this color and the provided color."""
+
+        color = self._handle_color_input(color)
+        lum1 = self.luminance()
+        lum2 = color.luminance()
+        return (lum1 + 0.05) / (lum2 + 0.05) if (lum1 > lum2) else (lum2 + 0.05) / (lum1 + 0.05)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/__init__.py
new file mode 100644
index 00000000..8d377bbf
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/__init__.py
@@ -0,0 +1,68 @@
+"""Convert utilities."""
+from ... import util
+from . import cat
+
+
+class Convert:
+    """Conversion methods."""
+
+    def chromatic_adaptation(self, w1, w2, xyz):
+        """Apply chromatic adaption to XYZ coordinates."""
+
+        method = self.CHROMATIC_ADAPTATION
+        return cat.chromatic_adaptation(w1, w2, xyz, method=method)
+
+    def convert(self, space, *, fit=False, in_place=False):
+        """Convert to color space."""
+
+        space = space.lower()
+
+        if fit:
+            method = None if not isinstance(fit, str) else fit
+            if not self.in_gamut(space, tolerance=0.0):
+                converted = self.convert(space, in_place=in_place)
+                return converted.fit(space, method=method, in_place=True)
+
+        convert_to = '_to_{}'.format(space)
+        convert_from = '_from_{}'.format(self.space())
+
+        obj = self.CS_MAP.get(space)
+        if obj is None:
+            raise ValueError("'{}' is not a valid color space".format(space))
+
+        # See if there is a direct conversion route
+        func = None
+        coords = self.coords()
+        if hasattr(self._space, convert_to):
+            func = getattr(self._space, convert_to)
+            coords = func(self, coords)
+        elif hasattr(obj, convert_from):
+            func = getattr(obj, convert_from)
+            coords = func(self, coords)
+
+        # See if there is an XYZ route
+        if func is None and self.space() != space:
+            func = getattr(self._space, '_to_xyz')
+            coords = func(self, coords)
+
+            if space != 'xyz':
+                func = getattr(obj, '_from_xyz')
+                coords = func(self, coords)
+
+        return self.mutate(space, coords, self.alpha) if in_place else self.new(space, coords, self.alpha)
+
+    def mutate(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, **kwargs):
+        """Mutate the current color to a new color."""
+
+        c = self.new(color, data=data, alpha=alpha, filters=filters, **kwargs)
+        self._attach(c._space)
+        return self
+
+    def update(self, color, data=None, alpha=util.DEF_ALPHA, *, filters=None, **kwargs):
+        """Update the existing color space with the provided color."""
+
+        c = self.new(color, data=data, alpha=alpha, filters=filters, **kwargs)
+        if c.space() != self.space():
+            c.convert(self.space(), in_place=True)
+        self._attach(c._space)
+        return self
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/cat.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/cat.py
new file mode 100644
index 00000000..5a5cfd68
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/convert/cat.py
@@ -0,0 +1,115 @@
+"""Chromatic adaptation transforms."""
+from ... import util
+from ... spaces import WHITES
+from functools import lru_cache
+
+# Conversion matrices
+CATS = {
+    "bradford": [
+        # http://brucelindbloom.com/Eqn_ChromAdapt.html
+        # https://hrcak.srce.hr/file/95370
+        [0.8951000, 0.2664000, -0.1614000],
+        [-0.7502000, 1.7135000, 0.0367000],
+        [0.0389000, -0.0685000, 1.0296000]
+    ],
+    "von-kries": [
+        # http://brucelindbloom.com/Eqn_ChromAdapt.html
+        # https://hrcak.srce.hr/file/95370
+        [0.4002400, 0.7076000, -0.0808100],
+        [-0.2263000, 1.1653200, 0.0457000],
+        [0.0000000, 0.0000000, 0.9182200]
+    ],
+    "xyz-scaling": [
+        # http://brucelindbloom.com/Eqn_ChromAdapt.html
+        # https://hrcak.srce.hr/file/95370
+        [1, 0, 0],
+        [0, 1, 0],
+        [0, 0, 1]
+    ],
+    "cat02": [
+        # https://en.wikipedia.org/wiki/CIECAM02#CAT02
+        [0.7328000, 0.4296000, -0.1624000],
+        [-0.7036000, 1.6975000, 0.0061000],
+        [0.0030000, 0.0136000, 0.9834000]
+    ],
+    "cmccat97": [
+        # https://hrcak.srce.hr/file/95370
+        [0.8951000, -0.7502000, 0.0389000],
+        [0.2664000, 1.7135000, 0.0685000],
+        [-0.1614000, 0.0367000, 1.0296000],
+    ],
+    "sharp": [
+        # https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.14.918&rep=rep1&type=pdf
+        [1.2694000, -0.0988000, -0.1706000],
+        [-0.8364000, 1.8006000, 0.0357000],
+        [0.0297000, -0.0315000, 1.0018000]
+    ],
+    'cmccat2000': [
+        # https://hrcak.srce.hr/file/95370
+        # https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.14.918&rep=rep1&type=pdf
+        [0.7982000, 0.3389000, -0.1371000],
+        [-0.5918000, 1.5512000, 0.0406000],
+        [0.0008000, 0.0239000, 0.9753000]
+    ]
+}
+
+
+@lru_cache(maxsize=20)
+def calc_adaptation_matrices(w1, w2, method='bradford'):
+    """
+    Get the von Kries based adaptation matrix based on the method and illuminants.
+
+    Since these calculated matrices are cached, this greatly reduces
+    performance hit as the initial matrices only have to be calculated
+    once for a given pair of white points and CAT.
+
+    Granted, we are currently, capped at 20 in the cache, but the average user
+    isn't going to be swapping between over 20 methods and white points in a
+    short period of time. We could always increase the cache if necessary.
+    """
+
+    try:
+        m = CATS[method]
+    except KeyError:  # pragma: no cover
+        raise ValueError('Unknown chromatic adaptation method encountered: {}'.format(method))
+    mi = util.inv(m)
+
+    try:
+        first = util.dot(m, WHITES[w1])
+    except KeyError:  # pragma: no cover
+        raise ValueError('Unknown white point encountered: {}'.format(w1))
+
+    try:
+        second = util.dot(m, WHITES[w2])
+    except KeyError:  # pragma: no cover
+        raise ValueError('Unknown white point encountered: {}'.format(w2))
+
+    m2 = util.diag(util.divide(first, second))
+    adapt = util.dot(mi, util.dot(m2, m))
+
+    return adapt, util.inv(adapt)
+
+
+def get_adaptation_matrix(w1, w2, method):
+    """
+    Get the appropriate matrix for chromatic adaptation.
+
+    If the required matrices are not in the cache, they will be calculated.
+    Since white points are sorted by name, regardless of the requested
+    conversion direction, the same matrices will be retrieved from the cache.
+    """
+
+    a, b = sorted([w1, w2])
+    m, mi = calc_adaptation_matrices(a, b, method)
+    return mi if a != w2 else m
+
+
+def chromatic_adaptation(w1, w2, xyz, method='bradford'):
+    """Chromatic adaptation."""
+
+    if w1 == w2:
+        # No adaptation is needed if the white points are identical.
+        return xyz
+    else:
+        # Get the appropriate chromatic adaptation matrix and apply.
+        return util.dot(get_adaptation_matrix(w1, w2, method), xyz)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/__init__.py
new file mode 100644
index 00000000..2b58c905
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/__init__.py
@@ -0,0 +1,33 @@
+"""Distance and Delta E."""
+from . import distance_euclidean
+from . import delta_e_76  # noqa: F401
+from . import delta_e_94  # noqa: F401
+from . import delta_e_cmc  # noqa: F401
+from . import delta_e_2000  # noqa: F401
+from . import delta_e_itp  # noqa: F401
+
+
+class Distance:
+    """Distance."""
+
+    def delta_e(self, color, *, method=None, **kwargs):
+        """Delta E distance."""
+
+        color = self._handle_color_input(color)
+        if method is None:
+            method = self.DELTA_E
+
+        algorithm = method.lower()
+
+        try:
+            de = globals()['delta_e_{}'.format(algorithm.replace('-', '_'))]
+        except KeyError:
+            raise ValueError("'{}' is not currently a supported distancing algorithm.".format(algorithm))
+
+        return de.distance(self, color, **kwargs)
+
+    def distance(self, color, *, space="lab"):
+        """Delta."""
+
+        color = self._handle_color_input(color)
+        return distance_euclidean.distance(self, color, space=space)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_2000.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_2000.py
new file mode 100644
index 00000000..d4459b84
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_2000.py
@@ -0,0 +1,120 @@
+"""Delta E 2000."""
+import math
+from ... import util
+
+G_CONST = 25 ** 7
+
+
+def distance(color1, color2, kl=1, kc=1, kh=1, **kwargs):
+    """
+    Calculate distance doing a direct translation of the algorithm from the CIE Delta E 2000 paper.
+
+    We denoted prime (L') with trailing 'p' and mean is represented with a trailing 'm'.
+    Delta has a preceding 'd'. I'm not sure I was completely consistent.
+
+    http://www2.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf
+    """
+
+    l1, a1, b1 = util.no_nan(color1.convert("lab").coords())
+    l2, a2, b2 = util.no_nan(color2.convert("lab").coords())
+
+    # Equation (2)
+    c1 = math.sqrt(a1 ** 2 + b1 ** 2)
+    c2 = math.sqrt(a2 ** 2 + b2 ** 2)
+
+    # Equation (3)
+    cm = (c1 + c2) / 2
+
+    # Equation (4)
+    c7 = cm ** 7
+    g = 0.5 * (1 - math.sqrt(c7 / (c7 + G_CONST)))
+
+    # Equation (5)
+    ap1 = (1 + g) * a1
+    ap2 = (1 + g) * a2
+
+    # Equation (6)
+    cp1 = math.sqrt(ap1 ** 2 + b1 ** 2)
+    cp2 = math.sqrt(ap2 ** 2 + b2 ** 2)
+
+    # Equation (7)
+    hp1 = 0 if (ap1 == 0 and b1 == 0) else math.atan2(b1, ap1)
+    hp2 = 0 if (ap2 == 0 and b2 == 0) else math.atan2(b2, ap2)
+    hp1 = math.degrees(hp1 + 2 * math.pi if hp1 < 0.0 else hp1)
+    hp2 = math.degrees(hp2 + 2 * math.pi if hp2 < 0.0 else hp2)
+
+    # Equation (8)
+    dl = l2 - l1
+
+    # Equation (9)
+    dc = cp2 - cp1
+
+    # Equation (10)
+    hdiff = hp2 - hp1
+    if cp1 * cp2 == 0.0:
+        dh = 0.0
+    elif abs(hdiff) <= 180.0:
+        dh = hdiff
+    else:
+        # If not `hdiff > 180.0` and not `abs(hdiff) <= 180.0`
+        # then it must be `hdiff < -180`
+        offset = -360 if hdiff > 180.0 else 360
+        dh = hdiff + offset
+
+    # Equation (11)
+    dh = 2 * math.sqrt(cp2 * cp1) * math.sin(math.radians(dh / 2))
+
+    # Equation (12)
+    lpm = (l1 + l2) / 2
+
+    # Equation (13)
+    cpm = (cp1 + cp2) / 2
+
+    # Equation (14)
+    hsum = hp1 + hp2
+    if cp1 * cp2 == 0:
+        hpm = hsum
+    elif abs(hp1 - hp2) > 180:
+        # if not `hsum < 360`
+        # then it must be `hsum >= 360`
+        offset = 360 if hsum < 360 else -360
+        hpm = (hsum + offset) / 2
+    else:  # `abs(hp1 - hp2) <= 180`
+        hpm = hsum / 2
+
+    # Equation (15)
+    t = (
+        1 -
+        (0.17 * math.cos(math.radians(hpm - 30))) +
+        (0.24 * math.cos(math.radians(2 * hpm))) +
+        (0.32 * math.cos(math.radians((3 * hpm) + 6))) -
+        (0.20 * math.cos(math.radians((4 * hpm) - 63)))
+    )
+
+    # Equation (16)
+    dt = 30 * math.exp(-1 * ((hpm - 275) / 25) ** 2)
+
+    # Equation (17)
+    cpm7 = cpm ** 7
+    rc = 2 * math.sqrt(cpm7 / (cpm7 + G_CONST))
+
+    # Equation (18)
+    l_temp = (lpm - 50) ** 2
+    sl = 1 + ((0.015 * l_temp) / math.sqrt(20 + l_temp))
+
+    # Equation (19)
+    sc = 1 + 0.045 * cpm
+
+    # Equation (20)
+    sh = 1 + 0.015 * cpm * t
+
+    # Equation (21)
+    rt = -1 * math.sin(math.radians(2 * dt)) * rc
+
+    # Equation (22)
+    return math.sqrt(
+        (dl / (kl * sl)) ** 2 +
+        (dc / (kc * sc)) ** 2 +
+        (dh / (kh * sh)) ** 2 +
+        rt * (dc / (kc * sc)) * (dh / (kh * sh))
+    )
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_76.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_76.py
new file mode 100644
index 00000000..a5d450d9
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_76.py
@@ -0,0 +1,16 @@
+"""Delta E 76."""
+
+from . import distance_euclidean
+
+
+def distance(color1, color2, **kwargs):
+    """
+    Delta E 1976 color distance formula.
+
+    http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CIE76.html
+
+    Basically this is Euclidean distance in the Lab space.
+    """
+
+    # Equation (1)
+    return distance_euclidean.distance(color1, color2, space="lab")
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_94.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_94.py
new file mode 100644
index 00000000..92c6695f
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_94.py
@@ -0,0 +1,67 @@
+"""Delta E 94."""
+import math
+from ... import util
+
+
+def distance(color1, color2, kl=1, k1=0.045, k2=0.015):
+    """
+    Delta E 1994 color distance formula.
+
+    http://www.brucelindbloom.com/Eqn_DeltaE_CIE94.html
+    """
+
+    l1, a1, b1 = util.no_nan(color1.convert("lab").coords())
+    l2, a2, b2 = util.no_nan(color2.convert("lab").coords())
+
+    # Equation (5)
+    c1 = math.sqrt(a1 ** 2 + b1 ** 2)
+
+    # Equation (6)
+    c2 = math.sqrt(a2 ** 2 + b2 ** 2)
+
+    # Equation  (2)
+    dl = l1 - l2
+
+    # Equation  (3)
+    dc = c1 - c2
+
+    # Equation (7)
+    da = a1 - a2
+
+    # Equation  (8)
+    db = b1 - b2
+
+    # Equation (4)
+    # We never reference `dh` until the very end, and when we do, we square it
+    # before using it, so we don't need the square root as described in the
+    # algorithm. Instead we can just leave the result as is.
+    dh = da ** 2 + db ** 2 - dc ** 2
+
+    # Equation (9)
+    sl = 1
+
+    # Equation (10)
+    sc = 1 + k1 * c1
+
+    # Equation (11)
+    sh = 1 + k2 * c1
+
+    # Equation (12)
+    # Provided by `kl`
+
+    # Equation (13)
+    kc = 1
+
+    # Equation (14)
+    kh = 1
+
+    # Equation (15) and Equation (16)
+    # Provided by `k1` and `k2`
+
+    # Equation (1)
+    return math.sqrt(
+        (dl / (kl * sl)) ** 2 +
+        (dc / (kc * sc)) ** 2 +
+        # Square root just the denominator as `dh` is already squared.
+        dh / ((kh * sh) ** 2)
+    )
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_cmc.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_cmc.py
new file mode 100644
index 00000000..f4b5eb23
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_cmc.py
@@ -0,0 +1,77 @@
+"""Delta E CMC."""
+import math
+from ... import util
+
+
+def distance(color1, color2, l=2, c=1):
+    """
+    Delta E CMC.
+
+    http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CMC.html
+    """
+
+    l1, a1, b1 = util.no_nan(color1.convert("lab").coords())
+    l2, a2, b2 = util.no_nan(color2.convert("lab").coords())
+
+    # Equation (3)
+    c1 = math.sqrt(a1 ** 2 + b1 ** 2)
+
+    # Equation (4)
+    c2 = math.sqrt(a2 ** 2 + b2 ** 2)
+
+    # Equation (2)
+    dc = c1 - c2
+
+    # Equation (6)
+    dl = l1 - l2
+
+    # Equation (7)
+    da = a1 - a2
+
+    # Equation (8)
+    db = b1 - b2
+
+    # Equation (5)
+    # We never reference `dh` until the very end, and when we do, we square it
+    # before using it, so we don't need the square root as described in the
+    # algorithm. Instead we can just leave the result as is.
+    dh = da ** 2 + db ** 2 - dc ** 2
+
+    # Equation (9)
+    if l1 < 16:
+        sl = 0.511
+    else:
+        sl = (0.040975 * l1) / (1 + 0.01765 * l1)
+
+    # Equation (10)
+    sc = ((0.0638 * c1) / (1 + 0.0131 * c1)) + 0.638
+
+    # Equation (14)
+    h = math.degrees(math.atan2(b1, a1))
+
+    # Equation (15)
+    if h >= 0:
+        h1 = h
+    else:
+        h1 = h + 360
+
+    # Equation (12)
+    if 164 <= h1 <= 345:
+        t = 0.56 + abs(0.2 * math.cos(math.radians(h1 + 168)))
+    else:
+        t = 0.36 + abs(0.4 * math.cos(math.radians(h1 + 35)))
+
+    # Equation (13)
+    c1_4 = c1 ** 4
+    f = math.sqrt(c1_4 / (c1_4 + 1900))
+
+    # Equation (11)
+    sh = sc * (f * t + 1 - f)
+
+    # Equation (1)
+    return math.sqrt(
+        (dl / (l * sl)) ** 2 +
+        (dc / (c * sc)) ** 2 +
+        # Square root just the denominator as `dh` is already squared.
+        dh / (sh ** 2)
+    )
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_itp.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_itp.py
new file mode 100644
index 00000000..101912be
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/delta_e_itp.py
@@ -0,0 +1,16 @@
+"""
+Delta E ITP.
+
+https://kb.portrait.com/help/ictcp-color-difference-metric
+"""
+import math
+
+
+def distance(color1, color2, scalar=720, **kwargs):
+    """Delta E ITP color distance formula."""
+
+    i1, t1, p1 = color1.convert('ictcp').coords()
+    i2, t2, p2 = color2.convert('ictcp').coords()
+
+    # Equation (1)
+    return scalar * math.sqrt((i2 - i1) ** 2 + 0.25 * (t2 - t1) ** 2 + (p2 - p1) ** 2)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/distance_euclidean.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/distance_euclidean.py
new file mode 100644
index 00000000..8250e546
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/distance/distance_euclidean.py
@@ -0,0 +1,16 @@
+"""Euclidean distance."""
+import math
+from ... import util
+
+
+def distance(color1, color2, space="lab", **kwargs):
+    """
+    Euclidean distance.
+
+    https://en.wikipedia.org/wiki/Euclidean_distance
+    """
+
+    coords1 = util.no_nan(color1.convert(space).coords())
+    coords2 = util.no_nan(color2.convert(space).coords())
+
+    return math.sqrt(sum((x - y) ** 2.0 for x, y in zip(coords2, coords1)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/__init__.py
new file mode 100644
index 00000000..96d4336b
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/__init__.py
@@ -0,0 +1,99 @@
+"""Gamut handling."""
+from ... import util
+from ... spaces import Angle, GamutBound
+from . import clip
+from . import lch_chroma
+
+
+def norm_angles(color):
+    """Normalize angles."""
+
+    channels = util.no_nan(color.coords())
+    gamut = color._space.RANGE
+    fit = []
+    for i, value in enumerate(channels):
+        a = gamut[i][0]
+
+        # Wrap the angle
+        if isinstance(a, Angle):
+            fit.append(value % 360.0)
+            continue
+
+        # Fit value in bounds.
+        fit.append(value)
+    return fit
+
+
+class Gamut:
+    """Handle gamut related functions."""
+
+    def fit(self, space=None, *, method=None, in_place=False):
+        """Fit the gamut using the provided method."""
+
+        if space is None:
+            space = self.space()
+
+        if method is None:
+            method = self.FIT
+
+        this = self.clone() if not in_place else self
+
+        # Select appropriate mapping algorithm
+        if method == "clip":
+            func = clip.fit
+        elif method == "lch-chroma":
+            func = lch_chroma.fit
+        else:
+            # Unknown fit method
+            raise ValueError("'{}' gamut mapping is not currently supported".format(method))
+
+        # Convert to desired space
+        c = self.convert(space)
+
+        # If we are perfectly in gamut, don't waste time fitting, just normalize hues.
+        # If out of gamut, apply mapping/clipping/etc.
+        c._space._coords, c._space._alpha = (
+            c._space.null_adjust(norm_angles(c) if c.in_gamut(tolerance=0.0) else func(c), self.alpha)
+        )
+
+        # Adjust "this" color
+        return this.update(c)
+
+    def in_gamut(self, space=None, *, tolerance=util.DEF_FIT_TOLERANCE):
+        """Check if current color is in gamut."""
+
+        space = space.lower() if space is not None else self.space()
+
+        # Check gamut in the provided space
+        if space is not None and space != self.space():
+            c = self.convert(space)
+            return c.in_gamut(tolerance=tolerance)
+
+        # Check the color space specified for gamut checking.
+        # If it proves to be in gamut, we will then test if the current
+        # space is constrained properly.
+        if self._space.GAMUT_CHECK is not None:
+            c = self.convert(self._space.GAMUT_CHECK)
+            if not c.in_gamut(tolerance=tolerance):
+                return False
+
+        # Verify the values are in bound
+        channels = util.no_nan(self.coords())
+        for i, value in enumerate(channels):
+            a, b = self._space.RANGE[i]
+            is_bound = isinstance(self._space.RANGE[i], GamutBound)
+
+            # Angles will wrap, so no sense checking them
+            if isinstance(a, Angle):
+                continue
+
+            # These parameters are unbounded
+            if not is_bound:
+                a = None
+                b = None
+
+            # Check if bounded values are in bounds
+            if (a is not None and value < (a - tolerance)) or (b is not None and value > (b + tolerance)):
+                return False
+
+        return True
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/clip.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/clip.py
new file mode 100644
index 00000000..a1f50994
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/clip.py
@@ -0,0 +1,31 @@
+"""Clip to fit in gamut."""
+from ... import util
+from ... spaces import Angle, GamutBound
+
+
+def fit(color):
+    """Gamut clipping."""
+
+    channels = util.no_nan(color.coords())
+    gamut = color._space.RANGE
+    fit = []
+
+    for i, value in enumerate(channels):
+        a, b = gamut[i]
+        is_bound = isinstance(gamut[i], GamutBound)
+
+        # Wrap the angle. Not technically out of gamut, but we will clean it up.
+        if isinstance(a, Angle) and isinstance(b, Angle):
+            fit.append(value % 360.0)
+            continue
+
+        # These parameters are unbounded
+        if not is_bound:  # pragma: no cover
+            # Will not execute unless we have a space that defines some coordinates
+            # as bound and others as not. We do not currently have such spaces.
+            a = None
+            b = None
+
+        # Fit value in bounds.
+        fit.append(util.clamp(value, a, b))
+    return fit
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/lch_chroma.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/lch_chroma.py
new file mode 100644
index 00000000..3a472177
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/gamut/lch_chroma.py
@@ -0,0 +1,62 @@
+"""Fit by compressing chroma in Lch."""
+
+EPSILON = 0.001
+
+
+def fit(color):
+    """
+    Gamut mapping via chroma Lch.
+
+    Algorithm originally came from https://colorjs.io/docs/gamut-mapping.html.
+    Some things have been optimized and fixed though to better perform as intended.
+
+    The idea is to hold hue and lightness constant and decrease chroma until
+    color comes under gamut.
+
+    We'll use a binary search and at after each stage, we will clip the color
+    and compare the distance of the two colors (clipped and current color via binary search).
+    If the distance is less than two, we can return the clipped color.
+
+    ---
+    Original Authors: Lea Verou, Chris Lilley
+    License: MIT (As noted in https://github.com/LeaVerou/color.js/blob/master/package.json)
+    """
+
+    space = color.space()
+
+    # If flooring chroma doesn't work, just clip the floored color
+    # because there is no optimal compression.
+    floor = color.clone().set('lch.chroma', 0)
+    if not floor.in_gamut(tolerance=0):
+        return floor.fit(method="clip").coords()
+
+    # If we are already below the JND, just clip as we will gain no
+    # noticeable difference moving forward.
+    clipped = color.fit(method="clip")
+    if color.delta_e(clipped, method="2000") < 2:
+        return clipped.coords()
+
+    # Convert to CIELCH and set our boundaries
+    mapcolor = color.convert("lch")
+    low = 0.0
+    high = mapcolor.chroma
+
+    # Adjust chroma (using binary search).
+    # This helps preserve the other attributes of the color.
+    # Each time we compare the compressed color to it's clipped form
+    # to see how close we are. A delta less than 2 is our target.
+    while (high - low) > EPSILON:
+        delta = mapcolor.delta_e(
+            mapcolor.fit(space, method="clip"),
+            method="2000"
+        )
+
+        if (delta - 2) < EPSILON:
+            low = mapcolor.chroma
+        else:
+            high = mapcolor.chroma
+
+        mapcolor.chroma = (high + low) * 0.5
+
+    # Update and clip off noise
+    return color.update(mapcolor).fit(space, method="clip", in_place=True).coords()
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/interpolate.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/interpolate.py
new file mode 100644
index 00000000..61adb72f
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/interpolate.py
@@ -0,0 +1,523 @@
+"""
+Interpolation methods.
+
+Originally, the base code for `interpolate`, `mix` and `steps` was ported from the
+https://colorjs.io project. Since that time, there has been significant modifications
+that add additional features etc. The base logic though is attributed to the original
+authors.
+
+In general, the logic mimics in many ways the `color-mix` function as outlined in the Level 5
+color draft (Oct 2020), but the initial approach was modeled directly off of the work done in
+color.js.
+---
+Original Authors: Lea Verou, Chris Lilley
+License: MIT (As noted in https://github.com/LeaVerou/color.js/blob/master/package.json)
+"""
+import math
+from abc import ABCMeta, abstractmethod
+from collections.abc import Sequence, Mapping, Callable
+from collections import namedtuple
+from .. import util
+from ..spaces import Cylindrical, Angle
+
+
+class Lerp:
+    """Linear interpolation."""
+
+    def __init__(self, progress):
+        """Initialize."""
+
+        self.progress = progress
+
+    def __call__(self, a, b, t):
+        """Interpolate with period."""
+
+        return a + (b - a) * (t if not isinstance(self.progress, Callable) else self.progress(t))
+
+
+class Piecewise(namedtuple('Piecewise', ['color', 'stop', 'progress', 'hue', 'premultiplied'])):
+    """Piecewise interpolation input."""
+
+    __slots__ = ()
+
+    def __new__(cls, color, stop=None, progress=None, hue=util.DEF_HUE_ADJ, premultiplied=False):
+        """Initialize."""
+
+        return super().__new__(cls, color, stop, progress, hue, premultiplied)
+
+
+class Interpolator(metaclass=ABCMeta):
+    """Interpolator."""
+
+    @abstractmethod
+    def __init__(self):
+        """Initialize."""
+
+    @abstractmethod
+    def __call__(self, p):
+        """Call the interpolator."""
+
+    @abstractmethod
+    def get_delta(self):
+        """Initialize."""
+
+    def steps(self, steps=2, max_steps=1000, max_delta_e=0):
+        """Steps."""
+
+        return color_steps(self, steps, max_steps, max_delta_e)
+
+
+class InterpolateSingle(Interpolator):
+    """Interpolate a single range of two colors."""
+
+    def __init__(self, channels1, channels2, names, create, progress, space, outspace, premultiplied):
+        """Initialize."""
+
+        self.names = names
+        self.channels1 = channels1
+        self.channels2 = channels2
+        self.create = create
+        self.progress = progress
+        self.space = space
+        self.outspace = outspace
+        self.premultiplied = premultiplied
+
+    def get_delta(self):
+        """Get the delta."""
+
+        return self.create(self.space, self.channels1).delta_e(self.create(self.space, self.channels2))
+
+    def __call__(self, p):
+        """Run through the coordinates and run the interpolation on them."""
+
+        channels = []
+        for i, c1 in enumerate(self.channels1):
+            name = self.names[i]
+            c2 = self.channels2[i]
+            if util.is_nan(c1) and util.is_nan(c2):
+                value = 0.0
+            elif util.is_nan(c1):
+                value = c2
+            elif util.is_nan(c2):
+                value = c1
+            else:
+                progress = None
+                if isinstance(self.progress, Mapping):
+                    progress = self.progress.get(name, self.progress.get('all'))
+                else:
+                    progress = self.progress
+                lerp = progress if isinstance(progress, Lerp) else Lerp(progress)
+                value = lerp(c1, c2, p)
+            channels.append(value)
+        color = self.create(self.space, channels[:-1], channels[-1])
+        if self.premultiplied:
+            postdivide(color)
+        return color.convert(self.outspace, in_place=True) if self.outspace != color.space() else color
+
+
+class InterpolatePiecewise(Interpolator):
+    """Interpolate multiple ranges of colors."""
+
+    def __init__(self, stops, interpolators):
+        """Initialize."""
+
+        self.start = stops[0]
+        self.end = stops[len(stops) - 1]
+        self.stops = stops
+        self.interpolators = interpolators
+
+    def get_delta(self):
+        """Get the delta total."""
+
+        return [i.get_delta() for i in self.interpolators]
+
+    def __call__(self, p):
+        """Interpolate."""
+
+        percent = p
+        if percent > self.end:
+            # Beyond range, just interpolate the last colors
+            return self.interpolators[-1](1 + abs(p - self.end) if p > 1 else 1)
+
+        elif percent < self.start:
+            # Beyond range, just interpolate the last colors
+            return self.interpolators[0](0 - abs(self.start - p) if p < 0 else 0)
+
+        else:
+            last = self.start
+            for i, interpolator in enumerate(self.interpolators, 1):
+                stop = self.stops[i]
+                if percent <= stop:
+                    r = stop - last
+                    p2 = (percent - last) / r if r else 1
+                    return interpolator(p2)
+                last = stop
+
+
+def calc_stops(stops, count):
+    """Calculate stops."""
+
+    # Ensure the first stop is set to zero if not explicitly set
+    if 0 not in stops:
+        stops[0] = 0
+
+    last = stops[0] * 100
+    highest = last
+    empty = None
+    final = {}
+
+    # Build up normalized stops
+    for i in range(count):
+        value = stops.get(i)
+        if value is not None:
+            value *= 100
+
+        # Found an empty hole, track the start
+        if value is None and empty is None:
+            empty = i - 1
+            continue
+        elif value is None:
+            continue
+
+        # We can't have a stop decrease in progression
+        if value < last:
+            value = last
+
+        # Track the largest explicit value set
+        if value > highest:
+            highest = value
+
+        # Fill in hole if one exists.
+        # Holes will be evenly space between the
+        # current and last stop.
+        if empty is not None:
+            r = i - empty
+            increment = (value - last) / r
+            for j in range(empty + 1, i):
+                last += increment
+                final[j] = last / 100
+            empty = None
+
+        # Set the stop and track it as the last
+        last = value
+        final[i] = last / 100
+
+    # If there is a hole at the end, fill in the hole,
+    # equally spacing the stops from the last to 100%.
+    # If the last is greater than 100%, then all will
+    # be equal to the last.
+    if empty is not None:
+        r = (count - 1) - empty
+        if highest > 100:
+            increment = 0
+        else:
+            increment = (100 - last) / r
+        for j in range(empty + 1, count):
+            last += increment
+            final[j] = last / 100
+
+    return final
+
+
+def postdivide(color):
+    """Premultiply the given transparent color."""
+
+    if color.alpha >= 1.0:
+        return
+
+    channels = color.coords()
+    gamut = color._space.RANGE
+    alpha = color.alpha
+    coords = []
+    for i, value in enumerate(channels):
+        a = gamut[i][0]
+
+        # Wrap the angle
+        if isinstance(a, Angle):
+            coords.append(value)
+            continue
+        coords.append(value / alpha if alpha != 0 else value)
+    color._space._coords = coords
+
+
+def premultiply(color):
+    """Premultiply the given transparent color."""
+
+    if color.alpha >= 1.0:
+        return
+
+    channels = color.coords()
+    gamut = color._space.RANGE
+    alpha = color.alpha
+    coords = []
+    for i, value in enumerate(channels):
+        a = gamut[i][0]
+
+        # Wrap the angle
+        if isinstance(a, Angle):
+            coords.append(value)
+            continue
+        coords.append(value * alpha)
+    color._space._coords = coords
+
+
+def adjust_hues(color1, color2, hue):
+    """Adjust hues."""
+
+    hue = hue.lower()
+    if hue == "specified":
+        return
+
+    name = color1._space.hue_name()
+    c1 = color1.get(name)
+    c2 = color2.get(name)
+
+    c1 = c1 % 360
+    c2 = c2 % 360
+
+    if util.is_nan(c1) or util.is_nan(c2):
+        color1.set(name, c1)
+        color2.set(name, c2)
+        return
+
+    if hue == "shorter":
+        if c2 - c1 > 180:
+            c1 += 360
+        elif c2 - c1 < -180:
+            c2 += 360
+
+    elif hue == "longer":
+        if 0 < (c2 - c1) < 180:
+            c1 += 360
+        elif -180 < (c2 - c1) < 0:
+            c2 += 360
+
+    elif hue == "increasing":
+        if c2 < c1:
+            c2 += 360
+
+    elif hue == "decreasing":
+        if c1 < c2:
+            c1 += 360
+
+    else:
+        raise ValueError("Unknown hue adjuster '{}'".format(hue))
+
+    color1.set(name, c1)
+    color2.set(name, c2)
+
+
+def color_steps(interpolator, steps=2, max_steps=1000, max_delta_e=0):
+    """Color steps."""
+
+    if max_delta_e <= 0:
+        actual_steps = steps
+    else:
+        actual_steps = 0
+        deltas = interpolator.get_delta()
+        if not isinstance(deltas, Sequence):
+            deltas = [deltas]
+        actual_steps = sum([d / max_delta_e for d in deltas])
+        actual_steps = max(steps, math.ceil(actual_steps) + 1)
+
+    if max_steps is not None:
+        actual_steps = min(actual_steps, max_steps)
+
+    ret = []
+    if actual_steps == 1:
+        ret = [{"p": 0.5, "color": interpolator(0.5)}]
+    else:
+        step = 1 / (actual_steps - 1)
+        for i in range(actual_steps):
+            p = i * step
+            ret.append({'p': p, 'color': interpolator(p)})
+
+    # Iterate over all the stops inserting stops in between if all colors
+    # if we have any two colors with a max delta greater than what was requested.
+    # We inject between every stop to ensure the midpoint does not shift.
+    if max_delta_e > 0:
+        # Initial check to see if we need to insert more stops
+        m_delta = 0
+        for i, entry in enumerate(ret):
+            if i == 0:
+                continue
+            m_delta = max(m_delta, entry['color'].delta_e(ret[i - 1]['color']))
+
+        while m_delta > max_delta_e:
+            # Inject stops while measuring again to see if it was sufficient
+            m_delta = 0
+            i = 1
+            while i < len(ret) and len(ret) < max_steps:
+                prev = ret[i - 1]
+                cur = ret[i]
+                p = (cur['p'] + prev['p']) / 2
+                color = interpolator(p)
+                m_delta = max(m_delta, color.delta_e(prev['color']), color.delta_e(cur['color']))
+                ret.insert(i, {'p': p, 'color': color})
+                i += 2
+
+    return [i['color'] for i in ret]
+
+
+def color_piecewise_lerp(pw, space, out_space, progress, hue, premultiplied):
+    """Piecewise Interpolation."""
+
+    # Ensure we have something we can interpolate with
+    count = len(pw)
+    if count == 1:
+        pw = [pw[0], pw[0]]
+        count += 1
+
+    # Calculate stops
+    stops = {}
+    for i, x in enumerate(pw, 0):
+        if not isinstance(x, Piecewise):
+            pw[i] = Piecewise(x)
+        elif x.stop is not None:
+            stops[i] = x.stop
+    stops = calc_stops(stops, count)
+
+    # Construct piecewise interpolation object
+    color_map = []
+    current = pw[0].color
+    for i in range(1, count):
+        p = pw[i]
+        color = current._handle_color_input(p.color)
+
+        color_map.append(
+            current.interpolate(
+                color,
+                space=space,
+                out_space=out_space,
+                progress=p.progress if p.progress is not None else progress,
+                hue=p.hue if p.hue is not None else hue,
+                premultiplied=p.premultiplied if p.premultiplied is not None else premultiplied
+            )
+        )
+        current = color
+
+    return InterpolatePiecewise(stops, color_map)
+
+
+def color_lerp(color1, color2, space, out_space, progress, hue, premultiplied):
+    """Color interpolation."""
+
+    # Convert to the color space and ensure the color fits inside
+    color1 = color1.convert(space, fit=True)
+    color2 = color1._handle_color_input(color2).convert(space, fit=True)
+
+    # Adjust hues if we have two valid hues
+    if isinstance(color1._space, Cylindrical):
+        adjust_hues(color1, color2, hue)
+
+    if premultiplied:
+        premultiply(color1)
+        premultiply(color2)
+
+    channels1 = color1.coords()
+    channels2 = color2.coords()
+
+    # Include alpha
+    channels1.append(color1.alpha)
+    channels2.append(color2.alpha)
+
+    return InterpolateSingle(
+        names=color1._space.CHANNEL_NAMES,
+        channels1=channels1,
+        channels2=channels2,
+        create=type(color1),
+        progress=progress,
+        space=space,
+        outspace=out_space,
+        premultiplied=premultiplied
+    )
+
+
+class Interpolate:
+    """Interpolate between colors."""
+
+    def mask(self, channel, *, invert=False, in_place=False):
+        """Mask color channels."""
+
+        this = self if in_place else self.clone()
+        masks = set([channel] if isinstance(channel, str) else channel)
+        for name in self._space.CHANNEL_NAMES:
+            if (not invert and name in masks) or (invert and name not in masks):
+                this.set(name, util.NaN)
+        return this
+
+    def steps(self, color, *, steps=2, max_steps=1000, max_delta_e=0, **interpolate_args):
+        """
+        Discrete steps.
+
+        This is built upon the interpolate function, and will return a list of
+        colors containing a minimum of colors equal to `steps` or steps as specified
+        derived from the `max_delta_e` parameter (whichever is greatest).
+
+        Number of colors can be capped with `max_steps`.
+
+        Default delta E method used is delta E 76.
+        """
+
+        return self.interpolate(color, **interpolate_args).steps(steps, max_steps, max_delta_e)
+
+    def mix(self, color, percent=util.DEF_MIX, *, in_place=False, **interpolate_args):
+        """
+        Mix colors using interpolation.
+
+        This uses the interpolate method to find the center point between the two colors.
+        The basic mixing logic is outlined in the CSS level 5 draft.
+        """
+
+        if not self._is_color(color) and not isinstance(color, (str, Piecewise)):
+            raise TypeError("Unexpected type '{}'".format(type(color)))
+        color = self.interpolate(color, **interpolate_args)(percent)
+        return self.mutate(color) if in_place else color
+
+    def interpolate(
+        self, color, *, space="lab", out_space=None, stop=0, progress=None, hue=util.DEF_HUE_ADJ, premultiplied=False
+    ):
+        """
+        Return an interpolation function.
+
+        The function will return an interpolation function that accepts a value (which should
+        be in the range of [0..1] and will return a color based on that value.
+
+        While we use NaNs to mask off channels when doing the interpolation, we do not allow
+        arbitrary specification of NaNs by the user, they must specify channels via `adjust`
+        if they which to target specific channels for mixing. Null hues become NaNs before
+        mixing occurs.
+        """
+
+        space = space.lower()
+        out_space = self.space() if out_space is None else out_space.lower()
+
+        # A piecewise object was provided, so treat it as such,
+        # or we've changed the stop of the base color, so run it through piecewise.
+        if (
+            isinstance(color, Piecewise) or
+            (stop != 0 and (isinstance(color, str) or self._is_color(color)))
+        ):
+            color = [color]
+
+        if not isinstance(color, str) and isinstance(color, Sequence):
+            # We have a sequence, so use piecewise interpolation
+            return color_piecewise_lerp(
+                [Piecewise(self, stop=stop)] + list(color),
+                space,
+                out_space,
+                progress,
+                hue,
+                premultiplied
+            )
+        else:
+            # We have a sequence, so use piecewise interpolation
+            return color_lerp(
+                self,
+                color,
+                space,
+                out_space,
+                progress,
+                hue,
+                premultiplied
+            )
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/color/match.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/match.py
new file mode 100644
index 00000000..590d4d1c
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/color/match.py
@@ -0,0 +1,51 @@
+"""Match input colors."""
+
+
+class ColorMatch:
+    """Color match object."""
+
+    def __init__(self, color, start, end):
+        """Initialize."""
+
+        self.color = color
+        self.start = start
+        self.end = end
+
+    def __str__(self):  # pragma: no cover
+        """String."""
+
+        return "ColorMatch(color={!r}, start={}, end={})".format(self.color, self.start, self.end)
+
+    __repr__ = __str__
+
+
+class Match:
+    """Match support."""
+
+    @classmethod
+    def _match(cls, string, start=0, fullmatch=False, filters=None):
+        """
+        Match a color in a buffer and return a color object.
+
+        This must return the color space, not the Color object.
+        """
+
+        filters = set(filters) if filters is not None else set()
+
+        for space, space_class in cls.CS_MAP.items():
+            if filters and space not in filters:
+                continue
+            value, match_end = space_class.match(string, start, fullmatch)
+            if value is not None:
+                color = space_class(*value)
+                return ColorMatch(color, start, match_end)
+        return None
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=False, *, filters=None):
+        """Match color."""
+
+        obj = cls._match(string, start, fullmatch, filters=filters)
+        if obj is not None:
+            obj.color = cls(obj.color.space(), obj.color.coords(), obj.color.alpha)
+        return obj
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/__init__.py
new file mode 100644
index 00000000..bebf1568
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/__init__.py
@@ -0,0 +1,270 @@
+"""Color base."""
+from abc import ABCMeta
+from .. import util
+from . import _parse
+
+# Technically this form can handle any number of channels as long as any
+# extra are thrown away. We only support 6 currently. If we ever support
+# colors with more channels, we can bump this.
+RE_DEFAULT_MATCH = r"""(?xi)
+color\(\s*
+(?:({{color_space}})\s+)?
+((?:{percent}|{float})(?:{space}(?:{percent}|{float})){{{{,{{channels:d}}}}}}(?:{slash}(?:{percent}|{float}))?)
+\s*\)
+""".format(
+    **_parse.COLOR_PARTS
+)
+
+WHITES = {
+    "A": [1.09850, 1.00000, 0.35585],
+    "B": [0.99072, 1.00000, 0.85223],
+    "C": [0.98074, 1.00000, 1.18232],
+    "D50": [0.96422, 1.00000, 0.82521],
+    "D55": [0.95682, 1.00000, 0.92149],
+    "D65": [0.95047, 1.00000, 1.08883],
+    "D75": [0.94972, 1.00000, 1.22638],
+    "E": [1.00000, 1.00000, 1.00000],
+    "F2": [0.99186, 1.00000, 0.67393],
+    "F7": [0.95041, 1.00000, 1.08747],
+    "F11": [1.00962, 1.00000, 0.64350]
+}
+
+
+class Angle(float):
+    """Angle type."""
+
+
+class Percent(float):
+    """Percent type."""
+
+
+class OptionalPercent(float):
+    """Optional percent type."""
+
+
+class GamutBound(tuple):
+    """Bounded gamut value."""
+
+
+class GamutUnbound(tuple):
+    """Unbounded gamut value."""
+
+
+class Cylindrical:
+    """Cylindrical space."""
+
+    def hue_name(self):
+        """Hue channel name."""
+
+        return "hue"
+
+
+class Space(
+    metaclass=ABCMeta
+):
+    """Base color space object."""
+
+    # Color space name
+    SPACE = ""
+    # Serialized name
+    SERIALIZE = None
+    # Number of channels
+    NUM_COLOR_CHANNELS = 3
+    # Channel names
+    CHANNEL_NAMES = ("alpha",)
+    # For matching the default form of `color(space coords+ / alpha)`.
+    # Classes should define this if they want to use the default match.
+    DEFAULT_MATCH = ""
+    # Match pattern variable for classes to override so we can also
+    # maintain the default and other alternatives.
+    MATCH = ""
+    # Should this color also be checked in a different color space? Only when set to a string (specifying a color space)
+    # will the default gamut checking also check the specified space as well as the current.
+    #
+    # Gamut checking:
+    #   The specified color space will be checked first followed by the original. Assuming the parent color space fits,
+    #   the original should fit as well, but there are some cases when a parent color space that is slightly out of
+    #   gamut, when evaluated with a threshold, may appear to be in gamut enough, but when checking the original color
+    #   space, the values can be greatly out of specification (looking at you HSL).
+    GAMUT_CHECK = None
+    # White point
+    WHITE = "D50"
+
+    def __init__(self, color, alpha=None):
+        """Initialize."""
+
+        self._alpha = util.NaN
+        self._coords = [util.NaN] * self.NUM_COLOR_CHANNELS
+
+        if isinstance(color, Space):
+            for index, channel in enumerate(color.coords()):
+                self.set(self.CHANNEL_NAMES[index], channel)
+            self.alpha = color.alpha
+        elif isinstance(color, (list, tuple)):
+            if len(color) != self.NUM_COLOR_CHANNELS:  # pragma: no cover
+                # Only likely to happen with direct usage internally.
+                raise ValueError(
+                    "A list of channel values should be at a minimum of {}.".format(self.NUM_COLOR_CHANNELS)
+                )
+            for index in range(self.NUM_COLOR_CHANNELS):
+                self.set(self.CHANNEL_NAMES[index], color[index])
+            self.alpha = 1.0 if alpha is None else alpha
+        else:  # pragma: no cover
+            # Only likely to happen with direct usage internally.
+            raise TypeError("Unexpected type '{}' received".format(type(color)))
+
+    def __repr__(self):
+        """Representation."""
+
+        gamut = self.RANGE
+        values = []
+        for i, coord in enumerate(util.no_nan(self.coords())):
+            value = util.fmt_float(coord, util.DEF_PREC)
+            if isinstance(gamut[i][0], Percent):
+                value += '%'
+            values.append(value)
+
+        return 'color({} {} / {})'.format(
+            self._serialize()[0],
+            ' '.join(values),
+            util.fmt_float(util.no_nan(self.alpha), util.DEF_PREC)
+        )
+
+    __str__ = __repr__
+
+    def _handle_input(self, value):
+        """Handle numerical input."""
+
+        if not util.is_number(value):
+            raise TypeError("Value should be a number not type '{}'".format(type(value)))
+        return float(value) if not util.is_nan(value) else value
+
+    def coords(self):
+        """Coordinates."""
+
+        return self._coords[:]
+
+    @classmethod
+    def space(cls):
+        """Get the color space."""
+
+        return cls.SPACE
+
+    @classmethod
+    def _serialize(cls):
+        """Get the serialized name."""
+
+        return (cls.space(),) if cls.SERIALIZE is None else cls.SERIALIZE
+
+    @classmethod
+    def white(cls):
+        """Get the white color for this color space."""
+
+        return WHITES[cls.WHITE]
+
+    @property
+    def alpha(self):
+        """Alpha channel."""
+
+        return self._alpha
+
+    @alpha.setter
+    def alpha(self, value):
+        """Adjust alpha."""
+
+        self._alpha = util.clamp(self._handle_input(value), 0.0, 1.0)
+
+    def set(self, name, value):  # noqa: A003
+        """Set the given channel."""
+
+        if name not in self.CHANNEL_NAMES:
+            raise ValueError("'{}' is an invalid channel name".format(name))
+
+        setattr(self, name, value)
+        return self
+
+    def get(self, name):
+        """Get the given channel's value."""
+
+        if name not in self.CHANNEL_NAMES:
+            raise ValueError("'{}' is an invalid channel name".format(name))
+        return getattr(self, name)
+
+    def to_string(
+        self, parent, *, alpha=None, precision=None, fit=True, **kwargs
+    ):
+        """Convert to CSS 'color' string: `color(space coords+ / alpha)`."""
+
+        if precision is None:
+            precision = parent.PRECISION
+
+        a = util.no_nan(self.alpha)
+        alpha = alpha is not False and (alpha is True or a < 1.0)
+
+        method = None if not isinstance(fit, str) else fit
+        coords = util.no_nan(parent.fit(method=method).coords() if fit else self.coords())
+        gamut = self.RANGE
+        template = "color({} {} / {})" if alpha else "color({} {})"
+
+        values = []
+        for i, coord in enumerate(coords):
+            value = util.fmt_float(coord, precision)
+            if isinstance(gamut[i][0], Percent):
+                value += '%'
+            values.append(value)
+
+        if alpha:
+            return template.format(
+                self._serialize()[0], ' '.join(values), util.fmt_float(a, max(precision, util.DEF_PREC))
+            )
+        else:
+            return template.format(self._serialize()[0], ' '.join(values))
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """Process coordinates and adjust any channels to null/NaN if required."""
+
+        return coords, alpha
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=True):
+        """Match a color by string."""
+
+        m = cls.DEFAULT_MATCH.match(string, start)
+        if (
+            m is not None and
+            (
+                (m.group(1) and m.group(1).lower() in cls._serialize())
+            ) and (not fullmatch or m.end(0) == len(string))
+        ):
+
+            # Break channels up into a list
+            split = _parse.RE_SLASH_SPLIT.split(m.group(2).strip(), maxsplit=1)
+
+            # Get alpha channel
+            alpha = _parse.norm_alpha_channel(split[-1]) if len(split) > 1 else 1.0
+
+            # Parse color channels
+            channels = []
+            for i, c in enumerate(_parse.RE_CHAN_SPLIT.split(split[0]), 0):
+                if c and i < cls.NUM_COLOR_CHANNELS:
+                    is_percent = isinstance(cls.RANGE[i][0], Percent)
+                    is_optional_percent = isinstance(cls.RANGE[i][0], OptionalPercent)
+                    has_percent = c.endswith('%')
+                    if is_percent and not has_percent:
+                        # We have an invalid percentage channel
+                        return None, None
+                    elif (not is_percent and not is_optional_percent) and has_percent:
+                        # Percents are not allowed for this channel.
+                        return None, None
+                    channels.append(_parse.norm_color_channel(c, not is_percent))
+
+            # Missing channels are filled with zeros
+            if len(channels) < cls.NUM_COLOR_CHANNELS:
+                diff = cls.NUM_COLOR_CHANNELS - len(channels)
+                channels.extend([0.0] * diff)
+
+            # Apply null adjustments (null hues) if applicable
+            return cls.null_adjust(channels, alpha), m.end(0)
+
+        return None, None
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/_parse.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/_parse.py
new file mode 100644
index 00000000..9e0f8556
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/_parse.py
@@ -0,0 +1,101 @@
+"""Parse utilities."""
+import re
+import math
+from .. import util
+
+RGB_CHANNEL_SCALE = 1.0 / 255.0
+HUE_SCALE = 1.0 / 360.0
+SCALE_PERCENT = 1 / 100.0
+
+CONVERT_TURN = 360
+CONVERT_GRAD = 90 / 100
+
+RE_CHAN_SPLIT = re.compile(r'(?:\s*[,/]\s*|\s+)')
+RE_COMMA_SPlIT = re.compile(r'(?:\s*,\s*)')
+RE_SLASH_SPLIT = re.compile(r'(?:\s*/\s*)')
+
+COLOR_PARTS = {
+    "percent": r"[+\-]?(?:(?:[0-9]*\.[0-9]+)|[0-9]+)(?:e[-+]?[0-9]*)?%",
+    "float": r"[+\-]?(?:(?:[0-9]*\.[0-9]+)|[0-9]+)(?:e[-+]?[0-9]*)?",
+    "angle": r"[+\-]?(?:(?:[0-9]*\.[0-9]+)|[0-9]+)(?:e[-+]?[0-9]*)?(deg|rad|turn|grad)?",
+    "space": r"\s+",
+    "comma": r"\s*,\s*",
+    "slash": r"\s*/\s*",
+    "sep": r"(?:\s*,\s*|\s+)",
+    "asep": r"(?:\s*[,/]\s*|\s+)",
+    "hex": r"[a-f0-9]"
+}
+
+
+def norm_float(string):
+    """Normalize a float value."""
+
+    if string.lower().endswith(('e-', 'e+', 'e')):
+        string += '0'
+    return float(string)
+
+
+def norm_hex_channel(string):
+    """Normalize the hex string to a form we can handle."""
+
+    if string.startswith('#'):
+        return int(string[1:], 16) * RGB_CHANNEL_SCALE
+    else:  # pragma: no cover
+        # Should only occur internally if we are doing something wrong.
+        raise ValueError("Unexpected value '{}'".format(string))
+
+
+def norm_percent_channel(value, scale=False):
+    """Normalize percent channel."""
+
+    if value.endswith('%'):
+        value = norm_float(value[:-1])
+        return value / 100.0 if scale else value
+    else:  # pragma: no cover
+        # Should only occur internally if we are doing something wrong.
+        raise ValueError("Unexpected value '{}'".format(value))
+
+
+def norm_color_channel(value, scale=True):
+    """Normalize percent channel."""
+
+    if value.endswith('%'):
+        value = norm_float(value[:-1])
+        return value / 100.0 if scale else value
+    else:
+        return norm_float(value)
+
+
+def norm_rgb_channel(value):
+    """Normalize RGB channel."""
+
+    if value.endswith("%"):
+        return norm_percent_channel(value, True)
+    else:
+        return norm_float(value) * RGB_CHANNEL_SCALE
+
+
+def norm_alpha_channel(value):
+    """Normalize alpha channel."""
+
+    if value.endswith("%"):
+        value = norm_percent_channel(value, True)
+    else:
+        value = norm_float(value)
+    return util.clamp(value, 0.0, 1.0)
+
+
+def norm_angle_channel(angle):
+    """Normalize angle units."""
+
+    if angle.endswith('turn'):
+        value = norm_float(angle[:-4]) * CONVERT_TURN
+    elif angle.endswith('grad'):
+        value = norm_float(angle[:-4]) * CONVERT_GRAD
+    elif angle.endswith('rad'):
+        value = math.degrees(norm_float(angle[:-3]))
+    elif angle.endswith('deg'):
+        value = norm_float(angle[:-3])
+    else:
+        value = norm_float(angle)
+    return value
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/a98_rgb.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/a98_rgb.py
new file mode 100644
index 00000000..e7adefff
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/a98_rgb.py
@@ -0,0 +1,69 @@
+"""A98 RGB color class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .srgb.base import SRGB
+from .xyz import XYZ
+from .. import util
+import re
+
+
+def lin_a98rgb_to_xyz(rgb):
+    """
+    Convert an array of linear-light a98-rgb values to CIE XYZ using D50.D65.
+
+    (so no chromatic adaptation needed afterwards)
+    http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html
+    which has greater numerical precision than section 4.3.5.3 of
+    https://www.adobe.com/digitalimag/pdfs/AdobeRGB1998.pdf
+    """
+
+    m = [
+        [0.5767308871981476, 0.18555395071121408, 0.18818516209063846],
+        [0.2973768637115448, 0.6273490714522, 0.07527406483625539],
+        [0.027034260337413137, 0.0706872193185578, 0.9911085203440293]
+    ]
+
+    return util.dot(m, rgb)
+
+
+def xyz_to_lin_a98rgb(xyz):
+    """Convert XYZ to linear-light a98-rgb."""
+
+    m = [
+        [2.04136897926008, -0.5649463871751959, -0.34469438437784844],
+        [-0.9692660305051867, 1.8760108454466937, 0.04155601753034983],
+        [0.013447387216170269, -0.11838974235412557, 1.0154095719504166]
+    ]
+
+    return util.dot(m, xyz)
+
+
+def lin_a98rgb(rgb):
+    """Convert an array of a98-rgb values in the range 0.0 - 1.0 to linear light (un-corrected) form."""
+
+    return [util.npow(val, 563 / 256) for val in rgb]
+
+
+def gam_a98rgb(rgb):
+    """Convert an array of linear-light a98-rgb  in the range 0.0-1.0 to gamma corrected form."""
+
+    return [util.npow(val, 256 / 563) for val in rgb]
+
+
+class A98RGB(SRGB):
+    """A98 RGB class."""
+
+    SPACE = "a98-rgb"
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space=SPACE, channels=3))
+    WHITE = "D65"
+
+    @classmethod
+    def _to_xyz(cls, parent, rgb):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lin_a98rgb_to_xyz(lin_a98rgb(rgb)))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return gam_a98rgb(xyz_to_lin_a98rgb(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/display_p3.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/display_p3.py
new file mode 100644
index 00000000..d0f4d519
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/display_p3.py
@@ -0,0 +1,67 @@
+"""Display-p3 color class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .srgb.base import SRGB, lin_srgb, gam_srgb
+from .xyz import XYZ
+from .. import util
+import re
+
+
+def lin_p3_to_xyz(rgb):
+    """
+    Convert an array of linear-light image-p3 values to CIE XYZ using  D65 (no chromatic adaptation).
+
+    http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html
+    """
+
+    m = [
+        [4.8663264999999994e-01, 2.6566316250000005e-01, 1.9817418749999988e-01],
+        [2.2900359999999997e-01, 6.9172672500000010e-01, 7.9269674999999956e-02],
+        [-3.9725792100320233e-17, 4.5112612500000052e-02, 1.0437173874999994e+00]
+    ]
+
+    # 0 was computed as -3.972075516933488e-17
+    return util.dot(m, rgb)
+
+
+def xyz_to_lin_p3(xyz):
+    """Convert XYZ to linear-light P3."""
+
+    m = [
+        [2.493180755328967, -0.9312655254971399, -0.40265972375888187],
+        [-0.8295031158210786, 1.7626941211197922, 0.02362508874173957],
+        [0.035853625780071716, -0.07618895478265224, 0.9570926215180221]
+    ]
+
+    return util.dot(m, xyz)
+
+
+def lin_p3(rgb):
+    """Convert an array of image-p3 RGB values in the range 0.0 - 1.0 to linear light (un-corrected) form."""
+
+    return lin_srgb(rgb)  # same as sRGB
+
+
+def gam_p3(rgb):
+    """Convert an array of linear-light image-p3 RGB  in the range 0.0-1.0 to gamma corrected form."""
+
+    return gam_srgb(rgb)  # same as sRGB
+
+
+class DisplayP3(SRGB):
+    """Display-p3 class."""
+
+    SPACE = "display-p3"
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space=SPACE, channels=3))
+    WHITE = "D65"
+
+    @classmethod
+    def _to_xyz(cls, parent, rgb):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lin_p3_to_xyz(lin_p3(rgb)))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return gam_p3(xyz_to_lin_p3(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/__init__.py
new file mode 100644
index 00000000..f9612ca1
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/__init__.py
@@ -0,0 +1 @@
+"""HSL color class."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/base.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/base.py
new file mode 100644
index 00000000..93bab7e4
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/base.py
@@ -0,0 +1,138 @@
+"""HSL class."""
+from ...spaces import Space, RE_DEFAULT_MATCH, Angle, Percent, GamutBound, Cylindrical
+from ..srgb.base import SRGB
+from ... import util
+import re
+
+
+def srgb_to_hsl(rgb):
+    """SRGB to HSL."""
+
+    r, g, b = rgb
+    mx = max(rgb)
+    mn = min(rgb)
+    h = util.NaN
+    s = 0.0
+    l = (mn + mx) / 2
+    c = mx - mn
+
+    if c != 0.0:
+        if mx == r:
+            h = (g - b) / c
+        elif mx == g:
+            h = (b - r) / c + 2.0
+        else:
+            h = (r - g) / c + 4.0
+        s = 0 if l == 0 or l == 1 else (mx - l) / min(l, 1 - l)
+        h *= 60.0
+        if s == 0:
+            h = util.NaN
+
+    return util.constrain_hue(h), s * 100, l * 100
+
+
+def hsl_to_srgb(hsl):
+    """
+    HSL to RGB.
+
+    https://en.wikipedia.org/wiki/HSL_and_HSV#HSL_to_RGB_alternative
+    """
+
+    h, s, l = hsl
+    h = util.no_nan(h)
+    h = h % 360
+    s /= 100.0
+    l /= 100.0
+
+    def f(n):
+        """Calculate the channels."""
+        k = (n + h / 30) % 12
+        a = s * min(l, 1 - l)
+        return l - a * max(-1, min(k - 3, 9 - k, 1))
+
+    return f(0), f(8), f(4)
+
+
+class HSL(Cylindrical, Space):
+    """HSL class."""
+
+    SPACE = "hsl"
+    SERIALIZE = ("--hsl",)
+    CHANNEL_NAMES = ("hue", "saturation", "lightness", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+    GAMUT_CHECK = "srgb"
+
+    RANGE = (
+        GamutBound([Angle(0.0), Angle(360.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)])
+    )
+
+    @property
+    def hue(self):
+        """Hue channel."""
+
+        return self._coords[0]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def saturation(self):
+        """Saturation channel."""
+
+        return self._coords[1]
+
+    @saturation.setter
+    def saturation(self, value):
+        """Saturate or unsaturate the color by the given factor."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def lightness(self):
+        """Lightness channel."""
+
+        return self._coords[2]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Set lightness channel."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] == 0:
+            coords[0] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_srgb(cls, parent, hsl):
+        """To sRGB."""
+
+        return hsl_to_srgb(hsl)
+
+    @classmethod
+    def _from_srgb(cls, parent, rgb):
+        """From sRGB."""
+
+        return srgb_to_hsl(rgb)
+
+    @classmethod
+    def _to_xyz(cls, parent, hsl):
+        """To XYZ."""
+
+        return SRGB._to_xyz(parent, cls._to_srgb(parent, hsl))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_srgb(parent, SRGB._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/css.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/css.py
new file mode 100644
index 00000000..41e67726
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsl/css.py
@@ -0,0 +1,94 @@
+"""HSL class."""
+import re
+from . import base
+from ...spaces import _parse
+from ... import util
+
+
+class HSL(base.HSL):
+    """HSL class."""
+
+    DEF_VALUE = "hsl(0 0% 0% / 1)"
+    START = re.compile(r'(?i)\bhsla?\(')
+    MATCH = re.compile(
+        r"""(?xi)
+        \bhsla?\(\s*
+        (?:
+            # Space separated format
+            {angle}{space}{percent}{space}{percent}(?:{slash}(?:{percent}|{float}))? |
+            # comma separated format
+            {angle}{comma}{percent}{comma}{percent}(?:{comma}(?:{percent}|{float}))?
+        )
+        \s*\)
+        """.format(**_parse.COLOR_PARTS)
+    )
+
+    def to_string(
+        self, parent, *, alpha=None, precision=None, fit=True, **kwargs
+    ):
+        """Convert to CSS."""
+
+        options = kwargs
+        if precision is None:
+            precision = parent.PRECISION
+
+        if options.get("color"):
+            return super().to_string(parent, alpha=alpha, precision=precision, fit=fit, **kwargs)
+
+        a = util.no_nan(self.alpha)
+        alpha = alpha is not False and (alpha is True or a < 1.0)
+        method = None if not isinstance(fit, str) else fit
+        coords = util.no_nan(parent.fit(method=method).coords() if fit else self.coords())
+
+        if alpha:
+            template = "hsla({}, {}%, {}%, {})" if options.get("comma") else "hsl({} {}% {}% / {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision),
+                util.fmt_float(a, max(util.DEF_PREC, precision))
+            )
+        else:
+            template = "hsl({}, {}%, {}%)" if options.get("comma") else "hsl({} {}% {}%)"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision)
+            )
+
+    @classmethod
+    def translate_channel(cls, channel, value):
+        """Translate channel."""
+
+        if channel == 0:
+            return _parse.norm_angle_channel(value)
+        elif channel in (1, 2):
+            return _parse.norm_percent_channel(value)
+        elif channel == -1:
+            return _parse.norm_alpha_channel(value)
+
+    @classmethod
+    def split_channels(cls, color):
+        """Split channels."""
+
+        start = 5 if color[:4].lower() == 'hsla' else 4
+        channels = []
+        alpha = 1.0
+        for i, c in enumerate(_parse.RE_CHAN_SPLIT.split(color[start:-1].strip()), 0):
+            if i <= 2:
+                channels.append(cls.translate_channel(i, c))
+            elif i == 3:
+                alpha = cls.translate_channel(-1, c)
+        return cls.null_adjust(channels, alpha)
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=True):
+        """Match a CSS color string."""
+
+        channels, end = super().match(string, start, fullmatch)
+        if channels is not None:
+            return channels, end
+        m = cls.MATCH.match(string, start)
+        if m is not None and (not fullmatch or m.end(0) == len(string)):
+            return cls.split_channels(string[m.start(0):m.end(0)]), m.end(0)
+        return None, None
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsv.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsv.py
new file mode 100644
index 00000000..486b22d6
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hsv.py
@@ -0,0 +1,146 @@
+"""HSV class."""
+from ..spaces import Space, RE_DEFAULT_MATCH, Angle, Percent, GamutBound, Cylindrical
+from .srgb.base import SRGB
+from .hsl.base import HSL
+from .. import util
+import re
+
+
+def hsv_to_hsl(hsv):
+    """
+    HSV to HSL.
+
+    https://en.wikipedia.org/wiki/HSL_and_HSV#Interconversion
+    """
+
+    h, s, v = hsv
+    s /= 100.0
+    v /= 100.0
+    l = v * (1.0 - s / 2.0)
+    s = 0.0 if (l == 0.0 or l == 1.0) else ((v - l) / min(l, 1.0 - l)) * 100
+
+    if s == 0:
+        h = util.NaN
+
+    return [
+        util.constrain_hue(h),
+        s,
+        l * 100
+    ]
+
+
+def hsl_to_hsv(hsl):
+    """
+    HSL to HSV.
+
+    https://en.wikipedia.org/wiki/HSL_and_HSV#Interconversion
+    """
+
+    h, s, l = hsl
+    s /= 100.0
+    l /= 100.0
+
+    v = l + s * min(l, 1.0 - l)
+    s = 0.0 if (v == 0.0) else 2 * (1.0 - l / v)
+
+    if s == 0:
+        h = util.NaN
+
+    return [util.constrain_hue(h), s * 100.0, v * 100.0]
+
+
+class HSV(Cylindrical, Space):
+    """HSL class."""
+
+    SPACE = "hsv"
+    SERIALIZE = ("--hsv",)
+    CHANNEL_NAMES = ("hue", "saturation", "value", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    GAMUT_CHECK = "srgb"
+    WHITE = "D65"
+
+    RANGE = (
+        GamutBound([Angle(0.0), Angle(360.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)])
+    )
+
+    @property
+    def hue(self):
+        """Hue channel."""
+
+        return self._coords[0]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def saturation(self):
+        """Saturation channel."""
+
+        return self._coords[1]
+
+    @saturation.setter
+    def saturation(self, value):
+        """Saturate or unsaturate the color by the given factor."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def value(self):
+        """Value channel."""
+
+        return self._coords[2]
+
+    @value.setter
+    def value(self, value):
+        """Set value channel."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] == 0:
+            coords[0] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_xyz(cls, parent, hsv):
+        """To XYZ."""
+
+        return SRGB._to_xyz(parent, cls._to_srgb(parent, hsv))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_srgb(parent, SRGB._from_xyz(parent, xyz))
+
+    @classmethod
+    def _to_hsl(cls, parent, hsv):
+        """To HSL."""
+
+        return hsv_to_hsl(hsv)
+
+    @classmethod
+    def _from_hsl(cls, parent, hsl):
+        """From HSL."""
+
+        return hsl_to_hsv(hsl)
+
+    @classmethod
+    def _to_srgb(cls, parent, hsv):
+        """To sRGB."""
+
+        return HSL._to_srgb(parent, cls._to_hsl(parent, hsv))
+
+    @classmethod
+    def _from_srgb(cls, parent, rgb):
+        """From sRGB."""
+
+        return cls._from_hsl(parent, HSL._from_srgb(parent, rgb))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/__init__.py
new file mode 100644
index 00000000..e6d67521
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/__init__.py
@@ -0,0 +1 @@
+"""HWB color class."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/base.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/base.py
new file mode 100644
index 00000000..6ccdedbc
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/base.py
@@ -0,0 +1,145 @@
+"""HWB class."""
+from ...spaces import Space, RE_DEFAULT_MATCH, Angle, Percent, GamutBound, Cylindrical
+from ..srgb.base import SRGB
+from ..hsv import HSV
+from ... import util
+import re
+
+
+def hwb_to_hsv(hwb):
+    """HWB to HSV."""
+
+    h, w, b = hwb
+    w /= 100.0
+    b /= 100.0
+
+    wb = w + b
+    if (wb >= 1):
+        gray = w / wb
+        return [util.NaN, 0.0, gray * 100.0]
+
+    v = 1 - b
+    s = 0 if v == 0 else 1 - w / v
+    return [h, s * 100, v * 100]
+
+
+def hsv_to_hwb(hsv):
+    """HSV to HWB."""
+
+    h, s, v = hsv
+    s /= 100
+    v /= 100
+    w = v * (1 - s)
+    b = 1 - v
+    if w + b >= 1:
+        h = util.NaN
+    return [h, w * 100, b * 100]
+
+
+class HWB(Cylindrical, Space):
+    """HWB class."""
+
+    SPACE = "hwb"
+    SERIALIZE = ("--hwb",)
+    CHANNEL_NAMES = ("hue", "whiteness", "blackness", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    GAMUT_CHECK = "srgb"
+    WHITE = "D65"
+
+    RANGE = (
+        GamutBound([Angle(0.0), Angle(360.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)]),
+        GamutBound([Percent(0.0), Percent(100.0)])
+    )
+
+    @property
+    def hue(self):
+        """Hue channel."""
+
+        return self._coords[0]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def whiteness(self):
+        """Whiteness channel."""
+
+        return self._coords[1]
+
+    @whiteness.setter
+    def whiteness(self, value):
+        """Set whiteness channel."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def blackness(self):
+        """Blackness channel."""
+
+        return self._coords[2]
+
+    @blackness.setter
+    def blackness(self, value):
+        """Set blackness channel."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] + coords[2] >= 100:
+            coords[0] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_xyz(cls, parent, hwb):
+        """SRGB to XYZ."""
+
+        return SRGB._to_xyz(parent, cls._to_srgb(parent, hwb))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """XYZ to SRGB."""
+
+        return cls._from_srgb(parent, SRGB._from_xyz(parent, xyz))
+
+    @classmethod
+    def _to_srgb(cls, parent, hwb):
+        """To sRGB."""
+
+        return HSV._to_srgb(parent, cls._to_hsv(parent, hwb))
+
+    @classmethod
+    def _from_srgb(cls, parent, srgb):
+        """From sRGB."""
+
+        return cls._from_hsv(parent, HSV._from_srgb(parent, srgb))
+
+    @classmethod
+    def _to_hsl(cls, parent, hwb):
+        """To HSL."""
+
+        return HSV._to_hsl(parent, hwb_to_hsv(hwb))
+
+    @classmethod
+    def _from_hsl(cls, parent, hsl):
+        """From HSL."""
+
+        return hsv_to_hwb(HSV._from_hsl(parent, hsl))
+
+    @classmethod
+    def _to_hsv(cls, parent, hwb):
+        """To HSV."""
+
+        return hwb_to_hsv(hwb)
+
+    @classmethod
+    def _from_hsv(cls, parent, hsv):
+        """From HSV."""
+
+        return hsv_to_hwb(hsv)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/css.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/css.py
new file mode 100644
index 00000000..de5c6b25
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/hwb/css.py
@@ -0,0 +1,94 @@
+"""HWB class."""
+import re
+from . import base
+from ...spaces import _parse
+from ... import util
+
+
+class HWB(base.HWB):
+    """HWB class."""
+
+    DEF_VALUE = "hwb(0 0% 0% / 1)"
+    START = re.compile(r'(?i)\bhwb\(')
+    MATCH = re.compile(
+        r"""(?xi)
+        \bhwb\(\s*
+        (?:
+            # Space separated format
+            {angle}{space}{percent}{space}{percent}(?:{slash}(?:{percent}|{float}))? |
+            # comma separated format
+            {angle}{comma}{percent}{comma}{percent}(?:{comma}(?:{percent}|{float}))?
+        )
+        \s*\)
+        """.format(**_parse.COLOR_PARTS)
+    )
+
+    def to_string(
+        self, parent, *, alpha=None, precision=None, fit=True, **kwargs
+    ):
+        """Convert to CSS."""
+
+        if precision is None:
+            precision = parent.PRECISION
+
+        options = kwargs
+        if options.get("color"):
+            return super().to_string(parent, alpha=alpha, precision=precision, fit=fit, **kwargs)
+
+        a = util.no_nan(self.alpha)
+        alpha = alpha is not False and (alpha is True or a < 1.0)
+        method = None if not isinstance(fit, str) else fit
+        coords = util.no_nan(parent.fit(method=method).coords() if fit else self.coords())
+
+        if alpha:
+            template = "hwb({}, {}%, {}%, {})" if options.get("comma") else "hwb({} {}% {}% / {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision),
+                util.fmt_float(self.alpha, max(util.DEF_PREC, precision))
+            )
+        else:
+            template = "hwb({}, {}%, {}%)" if options.get("comma") else "hwb({} {}% {}%)"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision)
+            )
+
+    @classmethod
+    def translate_channel(cls, channel, value):
+        """Translate channel string."""
+
+        if channel == 0:
+            return _parse.norm_angle_channel(value)
+        elif channel in (1, 2):
+            return _parse.norm_percent_channel(value)
+        elif channel == -1:
+            return _parse.norm_alpha_channel(value)
+
+    @classmethod
+    def split_channels(cls, color):
+        """Split channels."""
+
+        start = 4
+        channels = []
+        alpha = 1.0
+        for i, c in enumerate(_parse.RE_CHAN_SPLIT.split(color[start:-1].strip()), 0):
+            if i <= 2:
+                channels.append(cls.translate_channel(i, c))
+            elif i == 3:
+                alpha = cls.translate_channel(-1, c)
+        return cls.null_adjust(channels, alpha)
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=True):
+        """Match a CSS color string."""
+
+        channels, end = super().match(string, start, fullmatch)
+        if channels is not None:
+            return channels, end
+        m = cls.MATCH.match(string, start)
+        if m is not None and (not fullmatch or m.end(0) == len(string)):
+            return cls.split_channels(string[m.start(0):m.end(0)]), m.end(0)
+        return None, None
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/ictcp.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/ictcp.py
new file mode 100644
index 00000000..8f8c1eda
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/ictcp.py
@@ -0,0 +1,144 @@
+"""
+ICtCp class.
+
+https://professional.dolby.com/siteassets/pdfs/ictcp_dolbywhitepaper_v071.pdf
+"""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, OptionalPercent
+from .xyz import XYZ
+from .. import util
+import re
+
+# All PQ Values are equivalent to defaults as stated in link below:
+# https://en.wikipedia.org/wiki/High-dynamic-range_video#Perceptual_quantizer
+#
+# ```
+# M1 = 2610 / 16384
+# M1INV = 16384 / 2610
+# M2 = 2523 / 32
+# M2INV = 32 / 2523
+# C1 = 3424 / 4096
+# C2 = 2413 / 128
+# C3 = 2392 / 128
+# ```
+
+# XYZ transform matrices
+xyz_to_lms_m = [
+    [0.359132, 0.697604, -0.03578],
+    [-0.192188, 1.1003800000000001, 0.07554],
+    [0.006956000000000001, 0.074916, 0.8433400000000001]
+]
+
+lms_to_xyz_mi = [
+    [2.0705082034204145, -1.3267039449989098, 0.2066805790352646],
+    [0.3650251372337387, 0.6804585253538307, -0.04546355870112315],
+    [-0.04950397021841151, -0.0495039702184115, 1.1880952852418762]
+]
+
+# LMS to Izazbz matrices
+lms_p_to_ictcp_m = [
+    [0.5, 0.5, 0.0],
+    [1.61376953125, -3.323486328125, 1.709716796875],
+    [4.378173828125, -4.24560546875, -0.132568359375]
+]
+
+ictcp_to_lms_p_mi = [
+    [0.9999999999999998, 0.008609037037932761, 0.11102962500302598],
+    [0.9999999999999998, -0.008609037037932752, -0.11102962500302593],
+    [0.9999999999999998, 0.5600313357106791, -0.3206271749873188]
+]
+
+
+def ictcp_to_xyz_d65(ictcp):
+    """From ICtCp to XYZ."""
+
+    # Convert to LMS prime
+    pqlms = util.dot(ictcp_to_lms_p_mi, ictcp)
+
+    # Decode PQ LMS to LMS
+    lms = util.pq_st2084_eotf(pqlms)
+
+    # Convert back to absolute XYZ D65
+    absxyz = util.dot(lms_to_xyz_mi, lms)
+
+    # Convert back to normal XYZ D65
+    return util.absxyzd65_to_xyz_d65(absxyz)
+
+
+def xyz_d65_to_ictcp(xyzd65):
+    """From XYZ to ICtCp."""
+
+    # Convert from XYZ D65 to an absolute XYZ D5
+    absxyz = util.xyz_d65_to_absxyzd65(xyzd65)
+
+    # Convert to LMS
+    lms = util.dot(xyz_to_lms_m, absxyz)
+
+    # PQ encode the LMS
+    pqlms = util.pq_st2084_inverse_eotf(lms)
+
+    # Calculate Izazbz
+    return util.dot(lms_p_to_ictcp_m, pqlms)
+
+
+class ICtCp(Space):
+    """ICtCp class."""
+
+    SPACE = "ictcp"
+    SERIALIZE = ("--ictcp",)
+    CHANNEL_NAMES = ("i", "ct", "cp", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([OptionalPercent(0), OptionalPercent(1)]),
+        GamutUnbound([-0.5, 0.5]),
+        GamutUnbound([-0.5, 0.5])
+    )
+
+    @property
+    def i(self):
+        """`I` channel."""
+
+        return self._coords[0]
+
+    @i.setter
+    def i(self, value):
+        """Set `I` channel."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def ct(self):
+        """`Ct` axis."""
+
+        return self._coords[1]
+
+    @ct.setter
+    def ct(self, value):
+        """`Ct` axis."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def cp(self):
+        """`Cp` axis."""
+
+        return self._coords[2]
+
+    @cp.setter
+    def cp(self, value):
+        """Set `Cp` axis."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def _to_xyz(cls, parent, ictcp):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, ictcp_to_xyz_d65(ictcp))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_d65_to_ictcp(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzazbz.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzazbz.py
new file mode 100644
index 00000000..2b8a2dff
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzazbz.py
@@ -0,0 +1,164 @@
+"""
+Jzazbz class.
+
+https://www.osapublishing.org/oe/fulltext.cfm?uri=oe-25-13-15131&id=368272
+"""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, OptionalPercent
+from .xyz import XYZ
+from .. import util
+import re
+
+B = 1.15
+G = 0.66
+D = -0.56
+D0 = 1.6295499532821566E-11
+
+# All PQ Values are equivalent to defaults as stated in link below except `M2` (and `IM2`):
+# https://en.wikipedia.org/wiki/High-dynamic-range_video#Perceptual_quantizer
+#
+# ```
+# M1 = 2610 / (2 ** 14)
+# IM1 = (2 ** 14) / 2610
+# C1 = 3424 / (2 ** 12)
+# C2 = 2413 / (2 ** 7)
+# C3 = 2392 / (2 ** 7)
+# M2 = 1.7 * 2523 / (2 ** 5)
+# IM2 = (2 ** 5) / (1.7 * 2523)
+# ```
+M2 = 1.7 * 2523 / (2 ** 5)
+
+
+# XYZ transform matrices
+xyz_to_lms_m = [
+    [0.41478972, 0.579999, 0.014648],
+    [-0.20151, 1.120649, 0.0531008],
+    [-0.0166008, 0.2648, 0.6684799]
+]
+
+lms_to_xyz_mi = [
+    [1.9242264357876069, -1.0047923125953657, 0.037651404030617994],
+    [0.35031676209499907, 0.7264811939316552, -0.06538442294808501],
+    [-0.09098281098284754, -0.3127282905230739, 1.5227665613052603]
+]
+
+# LMS to Izazbz matrices
+lms_p_to_izazbz_m = [
+    [0.5, 0.5, 0],
+    [3.524, -4.066708, 0.542708],
+    [0.199076, 1.096799, -1.295875]
+]
+
+izazbz_to_lms_p_mi = [
+    [1.0, 0.1386050432715393, 0.05804731615611882],
+    [1.0, -0.13860504327153927, -0.05804731615611891],
+    [1.0, -0.09601924202631895, -0.811891896056039]
+]
+
+
+def jzazbz_to_xyz_d65(jzazbz):
+    """From Jzazbz to XYZ."""
+
+    jz, az, bz = jzazbz
+
+    # Calculate Iz
+    iz = (jz + D0) / (1 + D - D * (jz + D0))
+
+    # Convert to LMS prime
+    pqlms = util.dot(izazbz_to_lms_p_mi, [iz, az, bz])
+
+    # Decode PQ LMS to LMS
+    lms = util.pq_st2084_eotf(pqlms, m2=M2)
+
+    # Convert back to absolute XYZ D65
+    xm, ym, za = util.dot(lms_to_xyz_mi, lms)
+    xa = (xm + ((B - 1) * za)) / B
+    ya = (ym + ((G - 1) * xa)) / G
+
+    # Convert back to normal XYZ D65
+    return util.absxyzd65_to_xyz_d65([xa, ya, za])
+
+
+def xyz_d65_to_jzazbz(xyzd65):
+    """From XYZ to Jzazbz."""
+
+    # Convert from XYZ D65 to an absolute XYZ D5
+    xa, ya, za = util.xyz_d65_to_absxyzd65(xyzd65)
+    xm = (B * xa) - ((B - 1) * za)
+    ym = (G * ya) - ((G - 1) * xa)
+
+    # Convert to LMS
+    lms = util.dot(xyz_to_lms_m, [xm, ym, za])
+
+    # PQ encode the LMS
+    pqlms = util.pq_st2084_inverse_eotf(lms, m2=M2)
+
+    # Calculate Izazbz
+    iz, az, bz = util.dot(lms_p_to_izazbz_m, pqlms)
+
+    # Calculate Jz
+    jz = ((1 + D) * iz) / (1 + (D * iz)) - D0
+    return jz, az, bz
+
+
+class Jzazbz(Space):
+    """Jzazbz class."""
+
+    SPACE = "jzazbz"
+    SERIALIZE = ("--jzazbz",)
+    CHANNEL_NAMES = ("jz", "az", "bz", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([OptionalPercent(0), OptionalPercent(1)]),
+        GamutUnbound([-0.5, 0.5]),
+        GamutUnbound([-0.5, 0.5])
+    )
+
+    @property
+    def jz(self):
+        """Jz channel."""
+
+        return self._coords[0]
+
+    @jz.setter
+    def jz(self, value):
+        """Set jz channel."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def az(self):
+        """Az axis."""
+
+        return self._coords[1]
+
+    @az.setter
+    def az(self, value):
+        """Az axis."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def bz(self):
+        """Bz axis."""
+
+        return self._coords[2]
+
+    @bz.setter
+    def bz(self, value):
+        """Set bz axis."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def _to_xyz(cls, parent, jzazbz):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, jzazbz_to_xyz_d65(jzazbz))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_d65_to_jzazbz(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzczhz.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzczhz.py
new file mode 100644
index 00000000..c64df365
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/jzczhz.py
@@ -0,0 +1,134 @@
+"""
+JzCzhz class.
+
+https://www.osapublishing.org/oe/fulltext.cfm?uri=oe-25-13-15131&id=368272
+"""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Cylindrical, Angle, OptionalPercent
+from .jzazbz import Jzazbz
+from .. import util
+import re
+import math
+
+ACHROMATIC_THRESHOLD = 0.0002
+
+
+def jzazbz_to_jzczhz(jzazbz):
+    """Jzazbz to JzCzhz."""
+
+    jz, az, bz = jzazbz
+
+    cz = math.sqrt(az ** 2 + bz ** 2)
+    hz = math.degrees(math.atan2(bz, az))
+
+    # Achromatic colors will often get extremely close, but not quite hit zero.
+    # Essentially, we want to discard noise through rounding and such.
+    if cz < ACHROMATIC_THRESHOLD:
+        hz = util.NaN
+
+    return [jz, cz, util.constrain_hue(hz)]
+
+
+def jzczhz_to_jzazbz(jzczhz):
+    """JzCzhz to Jzazbz."""
+
+    jz, cz, hz = jzczhz
+    hz = util.no_nan(hz)
+
+    # If, for whatever reason (mainly direct user input),
+    # if chroma is less than zero, clamp to zero.
+    if cz < 0.0:
+        cz = 0.0
+
+    return (
+        jz,
+        cz * math.cos(math.radians(hz)),
+        cz * math.sin(math.radians(hz))
+    )
+
+
+class JzCzhz(Cylindrical, Space):
+    """
+    JzCzhz class.
+
+    https://www.osapublishing.org/oe/fulltext.cfm?uri=oe-25-13-15131&id=368272
+    """
+
+    SPACE = "jzczhz"
+    SERIALIZE = ("--jzczhz",)
+    CHANNEL_NAMES = ("jz", "chroma", "hue", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([OptionalPercent(0), OptionalPercent(1)]),
+        GamutUnbound([0.0, 1.0]),
+        GamutUnbound([Angle(0.0), Angle(360.0)]),
+    )
+
+    @property
+    def jz(self):
+        """Jz."""
+
+        return self._coords[0]
+
+    @jz.setter
+    def jz(self, value):
+        """Set jz."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def chroma(self):
+        """Chroma."""
+
+        return self._coords[1]
+
+    @chroma.setter
+    def chroma(self, value):
+        """Set chroma."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def hue(self):
+        """Hue."""
+
+        return self._coords[2]
+
+    @hue.setter
+    def hue(self, value):
+        """Set hue."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] < ACHROMATIC_THRESHOLD:
+            coords[2] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_jzazbz(cls, parent, jzczhz):
+        """To Jzazbz."""
+
+        return jzczhz_to_jzazbz(jzczhz)
+
+    @classmethod
+    def _from_jzazbz(cls, parent, jzazbz):
+        """From Jzazbz."""
+
+        return jzazbz_to_jzczhz(jzazbz)
+
+    @classmethod
+    def _to_xyz(cls, parent, jzczhz):
+        """To XYZ."""
+
+        return Jzazbz._to_xyz(parent, cls._to_jzazbz(parent, jzczhz))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_jzazbz(parent, Jzazbz._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/__init__.py
new file mode 100644
index 00000000..4cfdcaab
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/__init__.py
@@ -0,0 +1 @@
+"""Lab color class."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/base.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/base.py
new file mode 100644
index 00000000..32e48ea2
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/base.py
@@ -0,0 +1,129 @@
+"""Lab class."""
+from ...spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Percent
+from ..xyz import XYZ
+from ... import util
+import re
+
+EPSILON = 216 / 24389  # `6^3 / 29^3`
+EPSILON3 = 6 / 29  # Cube root of EPSILON
+KAPPA = 24389 / 27
+KE = 8  # KAPPA * EPSILON = 8
+
+
+def lab_to_xyz(lab, white):
+    """
+    Convert Lab to D50-adapted XYZ.
+
+    http://www.brucelindbloom.com/Eqn_Lab_to_XYZ.html
+
+    While the derivation is different than the specification, the results are the same as Appendix D:
+    https://www.cdvplus.cz/file/3-publikace-cie15-2004/
+    """
+
+    l, a, b = lab
+
+    # compute `f`, starting with the luminance-related term
+    fy = (l + 16) / 116
+    fx = a / 500 + fy
+    fz = fy - b / 200
+
+    # compute `xyz`
+    xyz = [
+        fx ** 3 if fx > EPSILON3 else (116 * fx - 16) / KAPPA,
+        fy ** 3 if l > KE else l / KAPPA,
+        fz ** 3 if fz > EPSILON3 else (116 * fz - 16) / KAPPA
+    ]
+
+    # Compute XYZ by scaling `xyz` by reference `white`
+    return util.multiply(xyz, white)
+
+
+def xyz_to_lab(xyz, white):
+    """
+    Assuming XYZ is relative to D50, convert to CIE Lab from CIE standard.
+
+    http://www.brucelindbloom.com/Eqn_XYZ_to_Lab.html
+
+    While the derivation is different than the specification, the results are the same:
+    https://www.cdvplus.cz/file/3-publikace-cie15-2004/
+    """
+
+    # compute `xyz`, which is XYZ scaled relative to reference white
+    xyz = util.divide(xyz, white)
+    # Compute `fx`, `fy`, and `fz`
+    fx, fy, fz = [util.cbrt(i) if i > EPSILON else (KAPPA * i + 16) / 116 for i in xyz]
+
+    return (
+        (116.0 * fy) - 16.0,
+        500.0 * (fx - fy),
+        200.0 * (fy - fz)
+    )
+
+
+class LabBase(Space):
+    """Lab class."""
+
+    CHANNEL_NAMES = ("lightness", "a", "b", "alpha")
+
+    RANGE = (
+        GamutUnbound([Percent(0), Percent(100.0)]),  # Technically we could/should clamp the zero side.
+        GamutUnbound([-160, 160]),  # No limit, but we could impose one +/-160?
+        GamutUnbound([-160, 160])  # No limit, but we could impose one +/-160?
+    )
+
+    @property
+    def lightness(self):
+        """L channel."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def a(self):
+        """A channel."""
+
+        return self._coords[1]
+
+    @a.setter
+    def a(self, value):
+        """A axis."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def b(self):
+        """B channel."""
+
+        return self._coords[2]
+
+    @b.setter
+    def b(self, value):
+        """B axis."""
+
+        self._coords[2] = self._handle_input(value)
+
+
+class Lab(LabBase):
+    """Lab class."""
+
+    SPACE = "lab"
+    SERIALIZE = ("--lab",)
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D50"
+
+    @classmethod
+    def _to_xyz(cls, parent, lab):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lab_to_xyz(lab, cls.white()))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_to_lab(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz), cls.white())
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/css.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/css.py
new file mode 100644
index 00000000..7d8d6225
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab/css.py
@@ -0,0 +1,96 @@
+"""Lab class."""
+import re
+from . import base
+from ...spaces import _parse
+from ... import util
+
+
+class Lab(base.Lab):
+    """Lab class."""
+
+    DEF_VALUE = "lab(0% 0 0 / 1)"
+    START = re.compile(r'(?i)\blab\(')
+    MATCH = re.compile(
+        r"""(?xi)
+        (?:
+            \blab\(\s*
+            (?:
+                # Space separated format
+                {percent}{space}{float}{space}{float}(?:{slash}(?:{percent}|{float}))? |
+                # comma separated format
+                {percent}{comma}{float}{comma}{float}(?:{comma}(?:{percent}|{float}))?
+            )
+            \s*\)
+        )
+        """.format(**_parse.COLOR_PARTS)
+    )
+
+    def to_string(
+        self, parent, *, alpha=None, precision=None, fit=True, **kwargs
+    ):
+        """Convert to CSS."""
+
+        if precision is None:
+            precision = parent.PRECISION
+
+        options = kwargs
+        if options.get("color"):
+            return super().to_string(parent, alpha=alpha, precision=precision, fit=fit, **kwargs)
+
+        a = util.no_nan(self.alpha)
+        alpha = alpha is not False and (alpha is True or a < 1.0)
+        method = None if not isinstance(fit, str) else fit
+        coords = util.no_nan(parent.fit(method=method).coords() if fit else self.coords())
+
+        if alpha:
+            template = "lab({}%, {}, {}, {})" if options.get("comma") else "lab({}% {} {} / {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision),
+                util.fmt_float(a, max(util.DEF_PREC, precision))
+            )
+        else:
+            template = "lab({}%, {}, {})" if options.get("comma") else "lab({}% {} {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision)
+            )
+
+    @classmethod
+    def translate_channel(cls, channel, value):
+        """Translate channel string."""
+
+        if channel == 0:
+            return _parse.norm_percent_channel(value)
+        elif channel in (1, 2):
+            return _parse.norm_float(value)
+        elif channel == -1:
+            return _parse.norm_alpha_channel(value)
+
+    @classmethod
+    def split_channels(cls, color):
+        """Split channels."""
+
+        start = 4
+        channels = []
+        alpha = 1.0
+        for i, c in enumerate(_parse.RE_CHAN_SPLIT.split(color[start:-1].strip()), 0):
+            if i <= 2:
+                channels.append(cls.translate_channel(i, c))
+            else:
+                alpha = cls.translate_channel(-1, c)
+        return cls.null_adjust(channels, alpha)
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=True):
+        """Match a CSS color string."""
+
+        channels, end = super().match(string, start, fullmatch)
+        if channels is not None:
+            return channels, end
+        m = cls.MATCH.match(string, start)
+        if m is not None and (not fullmatch or m.end(0) == len(string)):
+            return cls.split_channels(string[m.start(0):m.end(0)]), m.end(0)
+        return None, None
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab_d65.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab_d65.py
new file mode 100644
index 00000000..de510ab7
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lab_d65.py
@@ -0,0 +1,26 @@
+"""Lab D65 class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .xyz import XYZ
+from .lab.base import LabBase, lab_to_xyz, xyz_to_lab
+import re
+
+
+class LabD65(LabBase):
+    """Lab D65 class."""
+
+    SPACE = "lab-d65"
+    SERIALIZE = ("--lab-d65",)
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    @classmethod
+    def _to_xyz(cls, parent, lab):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lab_to_xyz(lab, cls.white()))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_to_lab(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz), cls.white())
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/__init__.py
new file mode 100644
index 00000000..6711049a
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/__init__.py
@@ -0,0 +1 @@
+"""Lch color class."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/base.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/base.py
new file mode 100644
index 00000000..b43b75ff
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/base.py
@@ -0,0 +1,136 @@
+"""Lch class."""
+from ...spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Cylindrical, Angle, Percent
+from ..lab.base import Lab
+from ... import util
+import re
+import math
+
+ACHROMATIC_THRESHOLD = 0.00000000001
+
+
+def lab_to_lch(lab):
+    """Lab to Lch."""
+
+    l, a, b = lab
+
+    c = math.sqrt(a ** 2 + b ** 2)
+    h = math.degrees(math.atan2(b, a))
+
+    # Achromatic colors will often get extremely close, but not quite hit zero.
+    # Essentially, we want to discard noise through rounding and such.
+    if c < ACHROMATIC_THRESHOLD:
+        h = util.NaN
+
+    test = [l, c, util.constrain_hue(h)]
+    return test
+
+
+def lch_to_lab(lch):
+    """Lch to Lab."""
+
+    l, c, h = lch
+    h = util.no_nan(h)
+
+    # If, for whatever reason (mainly direct user input),
+    # if chroma is less than zero, clamp to zero.
+    if c < 0.0:
+        c = 0.0
+
+    return (
+        l,
+        c * math.cos(math.radians(h)),
+        c * math.sin(math.radians(h))
+    )
+
+
+class LchBase(Cylindrical, Space):
+    """Lch class."""
+
+    CHANNEL_NAMES = ("lightness", "chroma", "hue", "alpha")
+
+    RANGE = (
+        # I think chroma, specifically should be clamped.
+        # Some libraries don't to prevent rounding issues. We should only get
+        # negative chroma via direct user input, but when translating to
+        # Lab, this will be corrected.
+        GamutUnbound([Percent(0.0), Percent(100.0)]),
+        GamutUnbound([0.0, 100.0]),
+        GamutUnbound([Angle(0.0), Angle(360.0)]),
+    )
+
+    @property
+    def lightness(self):
+        """Lightness."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def chroma(self):
+        """Chroma."""
+
+        return self._coords[1]
+
+    @chroma.setter
+    def chroma(self, value):
+        """chroma."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def hue(self):
+        """Hue."""
+
+        return self._coords[2]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] < ACHROMATIC_THRESHOLD:
+            coords[2] = util.NaN
+        return coords, alpha
+
+
+class Lch(LchBase):
+    """Lch class."""
+
+    SPACE = "lch"
+    SERIALIZE = ("--lch",)
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D50"
+
+    @classmethod
+    def _to_lab(cls, parent, lch):
+        """To Lab."""
+
+        return lch_to_lab(lch)
+
+    @classmethod
+    def _from_lab(cls, parent, lab):
+        """To Lab."""
+
+        return lab_to_lch(lab)
+
+    @classmethod
+    def _to_xyz(cls, parent, lch):
+        """To XYZ."""
+
+        return Lab._to_xyz(parent, cls._to_lab(parent, lch))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_lab(parent, Lab._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/css.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/css.py
new file mode 100644
index 00000000..26d441ae
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch/css.py
@@ -0,0 +1,96 @@
+"""Lch class."""
+import re
+from . import base
+from ...spaces import _parse
+from ... import util
+
+
+class Lch(base.Lch):
+    """Lch class."""
+
+    DEF_VALUE = "lch(0% 0 0 / 1)"
+    START = re.compile(r'(?i)\blch\(')
+    MATCH = re.compile(
+        r"""(?xi)
+        \blch\(\s*
+        (?:
+            # Space separated format
+            {percent}{space}{float}{space}{angle}(?:{slash}(?:{percent}|{float}))? |
+            # comma separated format
+            {percent}{comma}{float}{comma}{angle}(?:{comma}(?:{percent}|{float}))?
+        )
+        \s*\)
+        """.format(**_parse.COLOR_PARTS)
+    )
+
+    def to_string(
+        self, parent, *, alpha=None, precision=None, fit=True, **kwargs
+    ):
+        """Convert to CSS."""
+
+        if precision is None:
+            precision = parent.PRECISION
+
+        options = kwargs
+        if options.get("color"):
+            return super().to_string(parent, alpha=alpha, precision=precision, fit=fit, **kwargs)
+
+        a = util.no_nan(self.alpha)
+        alpha = alpha is not False and (alpha is True or a < 1.0)
+        method = None if not isinstance(fit, str) else fit
+        coords = util.no_nan(parent.fit(method=method).coords() if fit else self.coords())
+
+        if alpha:
+            template = "lch({}%, {}, {}, {})" if options.get("comma") else "lch({}% {} {} / {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision),
+                util.fmt_float(a, max(util.DEF_PREC, precision))
+            )
+        else:
+            template = "lch({}%, {}, {})" if options.get("comma") else "lch({}% {} {})"
+            return template.format(
+                util.fmt_float(coords[0], precision),
+                util.fmt_float(coords[1], precision),
+                util.fmt_float(coords[2], precision)
+            )
+
+    @classmethod
+    def translate_channel(cls, channel, value):
+        """Translate channel string."""
+
+        if channel == 0:
+            return _parse.norm_percent_channel(value)
+        elif channel == 1:
+            return _parse.norm_float(value)
+        elif channel == 2:
+            return _parse.norm_angle_channel(value)
+        elif channel == -1:
+            return _parse.norm_alpha_channel(value)
+
+    @classmethod
+    def split_channels(cls, color):
+        """Split channels."""
+
+        start = 4
+        channels = []
+        alpha = 1.0
+        for i, c in enumerate(_parse.RE_CHAN_SPLIT.split(color[start:-1].strip()), 0):
+            if i <= 2:
+                channels.append(cls.translate_channel(i, c))
+            elif i == 3:
+                alpha = cls.translate_channel(-1, c)
+        return cls.null_adjust(channels, alpha)
+
+    @classmethod
+    def match(cls, string, start=0, fullmatch=True):
+        """Match a CSS color string."""
+
+        channels, end = super().match(string, start, fullmatch)
+        if channels is not None:
+            return channels, end
+        m = cls.MATCH.match(string, start)
+        if m is not None and (not fullmatch or m.end(0) == len(string)):
+            return cls.split_channels(string[m.start(0):m.end(0)]), m.end(0)
+        return None, None
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch_d65.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch_d65.py
new file mode 100644
index 00000000..f2a26c34
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lch_d65.py
@@ -0,0 +1,38 @@
+"""Lch D65 class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .lab_d65 import LabD65
+from .lch.base import LchBase, lch_to_lab, lab_to_lch
+import re
+
+
+class LchD65(LchBase):
+    """Lch D65 class."""
+
+    SPACE = "lch-d65"
+    SERIALIZE = ("--lch-d65",)
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    @classmethod
+    def _to_lab_d65(cls, parent, lchd65):
+        """To Lab."""
+
+        return lch_to_lab(lchd65)
+
+    @classmethod
+    def _from_lab_d65(cls, parent, labd65):
+        """To Lab."""
+
+        return lab_to_lch(labd65)
+
+    @classmethod
+    def _to_xyz(cls, parent, lch):
+        """To XYZ."""
+
+        return LabD65._to_xyz(parent, cls._to_lab_d65(parent, lch))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_lab_d65(parent, LabD65._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lchuv.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lchuv.py
new file mode 100644
index 00000000..544dec62
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/lchuv.py
@@ -0,0 +1,126 @@
+"""LCH class."""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Cylindrical, Angle, Percent
+from .luv import Luv
+from .. import util
+import re
+import math
+
+ACHROMATIC_THRESHOLD = 0.000000000002
+
+
+def luv_to_lchuv(luv):
+    """Luv to Lch(uv)."""
+
+    l, u, v = luv
+
+    c = math.sqrt(u ** 2 + v ** 2)
+    h = math.degrees(math.atan2(v, u))
+
+    # Achromatic colors will often get extremely close, but not quite hit zero.
+    # Essentially, we want to discard noise through rounding and such.
+    if c < ACHROMATIC_THRESHOLD:
+        h = util.NaN
+
+    return [l, c, util.constrain_hue(h)]
+
+
+def lchuv_to_luv(lchuv):
+    """Lch(uv) to Luv."""
+
+    l, c, h = lchuv
+    h = util.no_nan(h)
+
+    # If, for whatever reason (mainly direct user input),
+    # if chroma is less than zero, clamp to zero.
+    if c < 0.0:
+        c = 0.0
+
+    return (
+        l,
+        c * math.cos(math.radians(h)),
+        c * math.sin(math.radians(h))
+    )
+
+
+class Lchuv(Cylindrical, Space):
+    """Lch(uv) class."""
+
+    SPACE = "lchuv"
+    SERIALIZE = ("--lchuv",)
+    CHANNEL_NAMES = ("lightness", "chroma", "hue", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([Percent(0), Percent(100.0)]),
+        GamutUnbound([0.0, 176.0]),
+        GamutUnbound([Angle(0.0), Angle(360.0)]),
+    )
+
+    @property
+    def lightness(self):
+        """Lightness."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def chroma(self):
+        """Chroma."""
+
+        return self._coords[1]
+
+    @chroma.setter
+    def chroma(self, value):
+        """chroma."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def hue(self):
+        """Hue."""
+
+        return self._coords[2]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] < ACHROMATIC_THRESHOLD:
+            coords[2] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_luv(cls, parent, lchuv):
+        """To Luv."""
+
+        return lchuv_to_luv(lchuv)
+
+    @classmethod
+    def _from_luv(cls, parent, luv):
+        """To Luv."""
+
+        return luv_to_lchuv(luv)
+
+    @classmethod
+    def _to_xyz(cls, parent, lchuv):
+        """To XYZ."""
+
+        return Luv._to_xyz(parent, cls._to_luv(parent, lchuv))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_luv(parent, Luv._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/luv.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/luv.py
new file mode 100644
index 00000000..ec8baddf
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/luv.py
@@ -0,0 +1,126 @@
+"""
+Luv class.
+
+https://en.wikipedia.org/wiki/CIELUV
+"""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Percent, WHITES
+from .xyz import XYZ
+from .. import util
+import re
+
+
+def xyz_to_uv(xyz):
+    """XYZ to UV."""
+
+    x, y, z = xyz
+    denom = (x + 15 * y + 3 * z)
+    if denom != 0:
+        u = (4 * x) / (x + 15 * y + 3 * z)
+        v = (9 * y) / (x + 15 * y + 3 * z)
+    else:
+        u = v = 0
+
+    return u, v
+
+
+def xyz_to_luv(xyz, white):
+    """XYZ to Luv."""
+
+    u, v = xyz_to_uv(xyz)
+    un, vn = xyz_to_uv(WHITES[white])
+
+    y = xyz[1] / WHITES[white][1]
+    l = 116 * util.nth_root(y, 3) - 16 if y > ((6 / 29) ** 3) else ((29 / 3) ** 3) * y
+
+    return [
+        l,
+        13 * l * (u - un),
+        13 * l * (v - vn),
+    ]
+
+
+def luv_to_xyz(luv, white):
+    """Luv to XYZ."""
+
+    l, u, v = luv
+    un, vn = xyz_to_uv(WHITES[white])
+
+    if l != 0:
+        up = (u / ( 13 * l)) + un
+        vp = (v / ( 13 * l)) + vn
+    else:
+        up = vp = 0
+
+    y = WHITES[white][1] * ((l + 16) / 116) ** 3 if l > 8 else WHITES[white][1] * l * ((3 / 29) ** 3)
+
+    if vp != 0:
+        x = y * ((9 * up) / (4 * vp))
+        z = y * ((12 - 3 * up - 20 * vp) / (4 * vp))
+    else:
+        x = z = 0
+
+    return [x, y, z]
+
+
+class Luv(Space):
+    """Oklab class."""
+
+    SPACE = "luv"
+    SERIALIZE = ("--luv",)
+    CHANNEL_NAMES = ("lightness", "u", "v", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([Percent(0), Percent(100.0)]),
+        GamutUnbound([-175.0, 175.0]),
+        GamutUnbound([-175.0, 175.0])
+    )
+
+    @property
+    def lightness(self):
+        """L channel."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def u(self):
+        """U channel."""
+
+        return self._coords[1]
+
+    @u.setter
+    def u(self, value):
+        """U axis."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def v(self):
+        """V channel."""
+
+        return self._coords[2]
+
+    @v.setter
+    def v(self, value):
+        """V axis."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def _to_xyz(cls, parent, luv):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, luv_to_xyz(luv, cls.WHITE))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_to_luv(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz), cls.WHITE)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklab.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklab.py
new file mode 100644
index 00000000..de7ed14e
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklab.py
@@ -0,0 +1,109 @@
+"""
+Oklab class.
+
+https://bottosson.github.io/posts/oklab/
+"""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, OptionalPercent
+from .xyz import XYZ
+from .. import util
+import re
+
+m1 = [
+    [0.8189330101, 0.0329845436, 0.0482003018],
+    [0.3618667424, 0.9293118715, 0.2643662691],
+    [-0.1288597137, 0.0361456387, 0.633851707]
+]
+
+m2 = [
+    [0.2104542553, 1.9779984951, 0.0259040371],
+    [0.793617785, -2.428592205, 0.7827717662],
+    [-0.0040720468, 0.4505937099, -0.808675766]
+]
+
+m1i = [
+    [1.2270138511035211, -0.04058017842328059, -0.07638128450570689],
+    [-0.5577999806518223, 1.11225686961683, -0.42148197841801266],
+    [0.2812561489664678, -0.0716766786656012, 1.5861632204407947]
+]
+
+m2i = [
+    [0.9999999984505199, 1.0000000088817607, 1.0000000546724108],
+    [0.3963377921737679, -0.10556134232365635, -0.08948418209496575],
+    [0.2158037580607588, -0.06385417477170591, -1.2914855378640917]
+]
+
+
+def xyz_d65_to_oklab(xyzd65):
+    """XYZ D65 to Oklab."""
+
+    return util.dot([util.cbrt(x) for x in util.dot(xyzd65, m1)], m2)
+
+
+def oklab_to_xyz_d65(oklab):
+    """From XYZ to LMS."""
+
+    return util.dot([x ** 3 for x in util.dot(oklab, m2i)], m1i)
+
+
+class Oklab(Space):
+    """Oklab class."""
+
+    SPACE = "oklab"
+    SERIALIZE = ("--oklab",)
+    CHANNEL_NAMES = ("lightness", "a", "b", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([OptionalPercent(0), OptionalPercent(1)]),
+        GamutUnbound([-0.5, 0.5]),
+        GamutUnbound([-0.5, 0.5])
+    )
+
+    @property
+    def lightness(self):
+        """L channel."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def a(self):
+        """A channel."""
+
+        return self._coords[1]
+
+    @a.setter
+    def a(self, value):
+        """A axis."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def b(self):
+        """B channel."""
+
+        return self._coords[2]
+
+    @b.setter
+    def b(self, value):
+        """B axis."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def _to_xyz(cls, parent, oklab):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, oklab_to_xyz_d65(oklab))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return xyz_d65_to_oklab(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklch.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklch.py
new file mode 100644
index 00000000..3281bbd2
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/oklch.py
@@ -0,0 +1,126 @@
+"""LCH class."""
+from ..spaces import Space, RE_DEFAULT_MATCH, GamutUnbound, Cylindrical, Angle, OptionalPercent
+from .oklab import Oklab
+from .. import util
+import re
+import math
+
+ACHROMATIC_THRESHOLD = 0.0002
+
+
+def oklab_to_oklch(oklab):
+    """Oklab to Oklch."""
+
+    l, a, b = oklab
+
+    c = math.sqrt(a ** 2 + b ** 2)
+    h = math.degrees(math.atan2(b, a))
+
+    # Achromatic colors will often get extremely close, but not quite hit zero.
+    # Essentially, we want to discard noise through rounding and such.
+    if c < ACHROMATIC_THRESHOLD:
+        h = util.NaN
+
+    return [l, c, util.constrain_hue(h)]
+
+
+def oklch_to_oklab(oklch):
+    """Oklch to Oklab."""
+
+    l, c, h = oklch
+    h = util.no_nan(h)
+
+    # If, for whatever reason (mainly direct user input),
+    # if chroma is less than zero, clamp to zero.
+    if c < 0.0:
+        c = 0.0
+
+    return (
+        l,
+        c * math.cos(math.radians(h)),
+        c * math.sin(math.radians(h))
+    )
+
+
+class Oklch(Cylindrical, Space):
+    """Oklch class."""
+
+    SPACE = "oklch"
+    SERIALIZE = ("--oklch",)
+    CHANNEL_NAMES = ("lightness", "chroma", "hue", "alpha")
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space='|'.join(SERIALIZE), channels=3))
+    WHITE = "D65"
+
+    RANGE = (
+        GamutUnbound([OptionalPercent(0), OptionalPercent(1)]),
+        GamutUnbound([0.0, 1.0]),
+        GamutUnbound([Angle(0.0), Angle(360.0)]),
+    )
+
+    @property
+    def lightness(self):
+        """Lightness."""
+
+        return self._coords[0]
+
+    @lightness.setter
+    def lightness(self, value):
+        """Get true luminance."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def chroma(self):
+        """Chroma."""
+
+        return self._coords[1]
+
+    @chroma.setter
+    def chroma(self, value):
+        """chroma."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def hue(self):
+        """Hue."""
+
+        return self._coords[2]
+
+    @hue.setter
+    def hue(self, value):
+        """Shift the hue."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def null_adjust(cls, coords, alpha):
+        """On color update."""
+
+        if coords[1] < ACHROMATIC_THRESHOLD:
+            coords[2] = util.NaN
+        return coords, alpha
+
+    @classmethod
+    def _to_oklab(cls, parent, oklch):
+        """To Lab."""
+
+        return oklch_to_oklab(oklch)
+
+    @classmethod
+    def _from_oklab(cls, parent, oklab):
+        """To Lab."""
+
+        return oklab_to_oklch(oklab)
+
+    @classmethod
+    def _to_xyz(cls, parent, oklch):
+        """To XYZ."""
+
+        return Oklab._to_xyz(parent, cls._to_oklab(parent, oklch))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return cls._from_oklab(parent, Oklab._from_xyz(parent, xyz))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/prophoto_rgb.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/prophoto_rgb.py
new file mode 100644
index 00000000..633ff162
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/prophoto_rgb.py
@@ -0,0 +1,96 @@
+"""Pro Photo RGB color class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .srgb.base import SRGB
+from .xyz import XYZ
+from .. import util
+import re
+
+ET = 1 / 512
+ET2 = 16 / 512
+
+
+def lin_prophoto_to_xyz(rgb):
+    """
+    Convert an array of linear-light prophoto-rgb values to CIE XYZ using  D50.D50.
+
+    (so no chromatic adaptation needed afterwards)
+    http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html
+    """
+
+    m = [
+        [7.9767494443060449e-01, 1.3519170147409817e-01, 3.1353354095297416e-02],
+        [2.8804023786231026e-01, 7.1187409723579020e-01, 8.5664901899719714e-05],
+        [0.0000000000000000e+00, 0.0000000000000000e+00, 8.2521000000000000e-01]
+    ]
+
+    return util.dot(m, rgb)
+
+
+def xyz_to_lin_prophoto(xyz):
+    """Convert XYZ to linear-light prophoto-rgb."""
+
+    m = [
+        [1.3459433009386652, -0.25560750931676696, -0.05111176587088495],
+        [-0.544598869458717, 1.508167317720767, 0.020535141586646915],
+        [0.0, 0.0, 1.2118127506937628]
+    ]
+
+    return util.dot(m, xyz)
+
+
+def lin_prophoto(rgb):
+    """
+    Convert an array of prophoto-rgb values in the range 0.0 - 1.0 to linear light (un-corrected) form.
+
+    Transfer curve is gamma 1.8 with a small linear portion.
+
+    https://en.wikipedia.org/wiki/ProPhoto_RGB_color_space
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        if abs(i) < ET2:
+            result.append(i / 16.0)
+        else:
+            result.append(util.npow(i, 1.8))
+    return result
+
+
+def gam_prophoto(rgb):
+    """
+    Convert an array of linear-light prophoto-rgb  in the range 0.0-1.0 to gamma corrected form.
+
+    Transfer curve is gamma 1.8 with a small linear portion.
+
+    https://en.wikipedia.org/wiki/ProPhoto_RGB_color_space
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        if abs(i) < ET:
+            result.append(16.0 * i)
+        else:
+            result.append(util.nth_root(i, 1.8))
+    return result
+
+
+class ProPhotoRGB(SRGB):
+    """Pro Photo RGB class."""
+
+    SPACE = "prophoto-rgb"
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space=SPACE, channels=3))
+    WHITE = "D50"
+
+    @classmethod
+    def _to_xyz(cls, parent, rgb):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lin_prophoto_to_xyz(lin_prophoto(rgb)))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return gam_prophoto(xyz_to_lin_prophoto(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/rec2020.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/rec2020.py
new file mode 100644
index 00000000..663ca0b3
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/rec2020.py
@@ -0,0 +1,96 @@
+"""Rec 2020 color class."""
+from ..spaces import RE_DEFAULT_MATCH
+from .srgb.base import SRGB
+from .xyz import XYZ
+from .. import util
+import re
+import math
+
+ALPHA = 1.09929682680944
+BETA = 0.018053968510807
+BETA45 = 0.018053968510807 * 4.5
+
+
+def lin_2020(rgb):
+    """
+    Convert an array of rec-2020 RGB values in the range 0.0 - 1.0 to linear light (un-corrected) form.
+
+    https://en.wikipedia.org/wiki/Rec._2020#Transfer_characteristics
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        abs_i = abs(i)
+        if abs_i < BETA45:
+            result.append(i / 4.5)
+        else:
+            result.append(math.copysign(util.nth_root((abs_i + ALPHA - 1) / ALPHA, 0.45), i))
+    return result
+
+
+def gam_2020(rgb):
+    """
+    Convert an array of linear-light rec-2020 RGB  in the range 0.0-1.0 to gamma corrected form.
+
+    https://en.wikipedia.org/wiki/Rec._2020#Transfer_characteristics
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        abs_i = abs(i)
+        if abs_i < BETA:
+            result.append(4.5 * i)
+        else:
+            result.append(math.copysign(ALPHA * (abs_i ** 0.45) - (ALPHA - 1), i))
+    return result
+
+
+def lin_2020_to_xyz(rgb):
+    """
+    Convert an array of linear-light rec-2020 values to CIE XYZ using  D65.
+
+    (no chromatic adaptation)
+    http://www.brucelindbloom.com/index.html?Eqn_RGB_XYZ_Matrix.html
+    """
+
+    m = [
+        [6.3701019141110093e-01, 1.4461502739696933e-01, 1.6884478119192992e-01],
+        [2.6272171736164052e-01, 6.7798927550226207e-01, 5.9289007136097520e-02],
+        [4.9945154055471928e-17, 2.8072328847646915e-02, 1.0607576711523534e+00]
+    ]
+
+    return util.dot(m, rgb)
+
+
+def xyz_to_lin_2020(xyz):
+    """Convert XYZ to linear-light rec-2020."""
+
+    m = [
+        [1.7165106697619734, -0.35564166998671587, -0.25334554182190716],
+        [-0.6666930011826241, 1.6165022083469103, 0.015768750389995],
+        [0.017643638767459002, -0.04277978166904461, 0.9423050727200183]
+    ]
+
+    return util.dot(m, xyz)
+
+
+class Rec2020(SRGB):
+    """Rec 2020 class."""
+
+    SPACE = "rec2020"
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space=SPACE, channels=3))
+    WHITE = "D65"
+
+    @classmethod
+    def _to_xyz(cls, parent, rgb):
+        """To XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lin_2020_to_xyz(lin_2020(rgb)))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """From XYZ."""
+
+        return gam_2020(xyz_to_lin_2020(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/__init__.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/__init__.py
new file mode 100644
index 00000000..f978475c
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/__init__.py
@@ -0,0 +1 @@
+"""SRGB color class."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/base.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/base.py
new file mode 100644
index 00000000..ff0ffcd1
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/base.py
@@ -0,0 +1,136 @@
+"""SRGB color class."""
+from ...spaces import RE_DEFAULT_MATCH, Space, GamutBound, OptionalPercent
+from ..xyz import XYZ
+from ... import util
+import re
+import math
+
+
+def lin_srgb_to_xyz(rgb):
+    """
+    Convert an array of linear-light sRGB values to CIE XYZ using sRGB's own white.
+
+    D65 (no chromatic adaptation)
+    """
+
+    m = [
+        [0.41245643908969226, 0.357576077643909, 0.18043748326639897],
+        [0.21267285140562256, 0.715152155287818, 0.07217499330655959],
+        [0.019333895582329303, 0.11919202588130297, 0.950304078536368]
+    ]
+
+    return util.dot(m, rgb)
+
+
+def xyz_to_lin_srgb(xyz):
+    """Convert XYZ to linear-light sRGB."""
+
+    m = [
+        [3.2404541621141045, -1.5371385127977162, -0.49853140955601605],
+        [-0.969266030505187, 1.8760108454466944, 0.04155601753034984],
+        [0.05564343095911475, -0.20402591351675384, 1.057225188223179]
+    ]
+
+    return util.dot(m, xyz)
+
+
+def lin_srgb(rgb):
+    """
+    Convert an array of sRGB values in the range 0.0 - 1.0 to linear light (un-corrected) form.
+
+    https://en.wikipedia.org/wiki/SRGB
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        abs_i = abs(i)
+        if abs_i < 0.04045:
+            result.append(i / 12.92)
+        else:
+            result.append(math.copysign(((abs_i + 0.055) / 1.055) ** 2.4, i))
+    return result
+
+
+def gam_srgb(rgb):
+    """
+    Convert an array of linear-light sRGB values in the range 0.0-1.0 to gamma corrected form.
+
+    https://en.wikipedia.org/wiki/SRGB
+    """
+
+    result = []
+    for i in rgb:
+        # Mirror linear nature of algorithm on the negative axis
+        abs_i = abs(i)
+        if abs_i > 0.0031308:
+            result.append(math.copysign(1.055 * (util.nth_root(abs_i, 2.4)) - 0.055, i))
+        else:
+            result.append(12.92 * i)
+    return result
+
+
+class SRGB(Space):
+    """SRGB class."""
+
+    SPACE = "srgb"
+    # In addition to the current gamut, check HSL as it is much more sensitive to small
+    # gamut changes. This is mainly for a better user experience. Colors will still be
+    # mapped/clipped in the current space, unless specified otherwise.
+    DEFAULT_MATCH = re.compile(RE_DEFAULT_MATCH.format(color_space=SPACE, channels=3))
+    CHANNEL_NAMES = ("red", "green", "blue", "alpha")
+    WHITE = "D65"
+
+    RANGE = (
+        GamutBound([OptionalPercent(0.0), OptionalPercent(1.0)]),
+        GamutBound([OptionalPercent(0.0), OptionalPercent(1.0)]),
+        GamutBound([OptionalPercent(0.0), OptionalPercent(1.0)])
+    )
+
+    @property
+    def red(self):
+        """Adjust red."""
+
+        return self._coords[0]
+
+    @red.setter
+    def red(self, value):
+        """Adjust red."""
+
+        self._coords[0] = self._handle_input(value)
+
+    @property
+    def green(self):
+        """Adjust green."""
+
+        return self._coords[1]
+
+    @green.setter
+    def green(self, value):
+        """Adjust green."""
+
+        self._coords[1] = self._handle_input(value)
+
+    @property
+    def blue(self):
+        """Adjust blue."""
+
+        return self._coords[2]
+
+    @blue.setter
+    def blue(self, value):
+        """Adjust blue."""
+
+        self._coords[2] = self._handle_input(value)
+
+    @classmethod
+    def _to_xyz(cls, parent, rgb):
+        """SRGB to XYZ."""
+
+        return parent.chromatic_adaptation(cls.WHITE, XYZ.WHITE, lin_srgb_to_xyz(lin_srgb(rgb)))
+
+    @classmethod
+    def _from_xyz(cls, parent, xyz):
+        """XYZ to SRGB."""
+
+        return gam_srgb(xyz_to_lin_srgb(parent.chromatic_adaptation(XYZ.WHITE, cls.WHITE, xyz)))
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/color_names.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/color_names.py
new file mode 100644
index 00000000..522a9b2d
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/color_names.py
@@ -0,0 +1,174 @@
+"""
+CSS colors.
+
+A simple name to hex and hex to name map of CSS3 colors.
+
+http://www.w3.org/TR/SVG/types.html#ColorKeywords
+"""
+name2hex_map = {
+    'aliceblue': '#f0f8ff',
+    'antiquewhite': '#faebd7',
+    'aqua': '#00ffff',
+    'aquamarine': '#7fffd4',
+    'azure': '#f0ffff',
+    'beige': '#f5f5dc',
+    'bisque': '#ffe4c4',
+    'black': '#000000',
+    'blanchedalmond': '#ffebcd',
+    'blue': '#0000ff',
+    'blueviolet': '#8a2be2',
+    'brown': '#a52a2a',
+    'burlywood': '#deb887',
+    'cadetblue': '#5f9ea0',
+    'chartreuse': '#7fff00',
+    'chocolate': '#d2691e',
+    'coral': '#ff7f50',
+    'cornflowerblue': '#6495ed',
+    'cornsilk': '#fff8dc',
+    'crimson': '#dc143c',
+    'cyan': '#00ffff',
+    'darkblue': '#00008b',
+    'darkcyan': '#008b8b',
+    'darkgoldenrod': '#b8860b',
+    'darkgray': '#a9a9a9',
+    'darkgrey': '#a9a9a9',
+    'darkgreen': '#006400',
+    'darkkhaki': '#bdb76b',
+    'darkmagenta': '#8b008b',
+    'darkolivegreen': '#556b2f',
+    'darkorange': '#ff8c00',
+    'darkorchid': '#9932cc',
+    'darkred': '#8b0000',
+    'darksalmon': '#e9967a',
+    'darkseagreen': '#8fbc8f',
+    'darkslateblue': '#483d8b',
+    'darkslategray': '#2f4f4f',
+    'darkslategrey': '#2f4f4f',
+    'darkturquoise': '#00ced1',
+    'darkviolet': '#9400d3',
+    'deeppink': '#ff1493',
+    'deepskyblue': '#00bfff',
+    'dimgray': '#696969',
+    'dimgrey': '#696969',
+    'dodgerblue': '#1e90ff',
+    'firebrick': '#b22222',
+    'floralwhite': '#fffaf0',
+    'forestgreen': '#228b22',
+    'fuchsia': '#ff00ff',
+    'gainsboro': '#dcdcdc',
+    'ghostwhite': '#f8f8ff',
+    'gold': '#ffd700',
+    'goldenrod': '#daa520',
+    'gray': '#808080',
+    'grey': '#808080',
+    'green': '#008000',
+    'greenyellow': '#adff2f',
+    'honeydew': '#f0fff0',
+    'hotpink': '#ff69b4',
+    'indianred': '#cd5c5c',
+    'indigo': '#4b0082',
+    'ivory': '#fffff0',
+    'khaki': '#f0e68c',
+    'lavender': '#e6e6fa',
+    'lavenderblush': '#fff0f5',
+    'lawngreen': '#7cfc00',
+    'lemonchiffon': '#fffacd',
+    'lightblue': '#add8e6',
+    'lightcoral': '#f08080',
+    'lightcyan': '#e0ffff',
+    'lightgoldenrodyellow': '#fafad2',
+    'lightgray': '#d3d3d3',
+    'lightgrey': '#d3d3d3',
+    'lightgreen': '#90ee90',
+    'lightpink': '#ffb6c1',
+    'lightsalmon': '#ffa07a',
+    'lightseagreen': '#20b2aa',
+    'lightskyblue': '#87cefa',
+    'lightslategray': '#778899',
+    'lightslategrey': '#778899',
+    'lightsteelblue': '#b0c4de',
+    'lightyellow': '#ffffe0',
+    'lime': '#00ff00',
+    'limegreen': '#32cd32',
+    'linen': '#faf0e6',
+    'magenta': '#ff00ff',
+    'maroon': '#800000',
+    'mediumaquamarine': '#66cdaa',
+    'mediumblue': '#0000cd',
+    'mediumorchid': '#ba55d3',
+    'mediumpurple': '#9370d8',
+    'mediumseagreen': '#3cb371',
+    'mediumslateblue': '#7b68ee',
+    'mediumspringgreen': '#00fa9a',
+    'mediumturquoise': '#48d1cc',
+    'mediumvioletred': '#c71585',
+    'midnightblue': '#191970',
+    'mintcream': '#f5fffa',
+    'mistyrose': '#ffe4e1',
+    'moccasin': '#ffe4b5',
+    'navajowhite': '#ffdead',
+    'navy': '#000080',
+    'oldlace': '#fdf5e6',
+    'olive': '#808000',
+    'olivedrab': '#6b8e23',
+    'orange': '#ffa500',
+    'orangered': '#ff4500',
+    'orchid': '#da70d6',
+    'palegoldenrod': '#eee8aa',
+    'palegreen': '#98fb98',
+    'paleturquoise': '#afeeee',
+    'palevioletred': '#d87093',
+    'papayawhip': '#ffefd5',
+    'peachpuff': '#ffdab9',
+    'peru': '#cd853f',
+    'pink': '#ffc0cb',
+    'plum': '#dda0dd',
+    'powderblue': '#b0e0e6',
+    'purple': '#800080',
+    'rebeccapurple': '#663399',
+    'red': '#ff0000',
+    'rosybrown': '#bc8f8f',
+    'royalblue': '#4169e1',
+    'saddlebrown': '#8b4513',
+    'salmon': '#fa8072',
+    'sandybrown': '#f4a460',
+    'seagreen': '#2e8b57',
+    'seashell': '#fff5ee',
+    'sienna': '#a0522d',
+    'silver': '#c0c0c0',
+    'skyblue': '#87ceeb',
+    'slateblue': '#6a5acd',
+    'slategray': '#708090',
+    'slategrey': '#708090',
+    'snow': '#fffafa',
+    'springgreen': '#00ff7f',
+    'steelblue': '#4682b4',
+    'tan': '#d2b48c',
+    'teal': '#008080',
+    'thistle': '#d8bfd8',
+    'tomato': '#ff6347',
+    'turquoise': '#40e0d0',
+    'violet': '#ee82ee',
+    'wheat': '#f5deb3',
+    'white': '#ffffff',
+    'whitesmoke': '#f5f5f5',
+    'yellow': '#ffff00',
+    'yellowgreen': '#9acd32',
+
+    # Transparent
+    'transparent': '#00000000'
+}
+
+hex2name_map = dict([(v, k) for k, v in name2hex_map.items()])
+
+
+def hex2name(value):
+    """Convert CSS hex to webcolor name."""
+
+    return hex2name_map.get(value.lower(), None)
+
+
+def name2hex(name):
+    """Convert CSS hex to webcolor name."""
+
+    return name2hex_map.get(name.lower(), None)
diff --git a/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/css.py b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/css.py
new file mode 100644
index 00000000..c3aa69a5
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/coloraide/spaces/srgb/css.py
@@ -0,0 +1,205 @@
+"""SRGB color class."""
+import re
+from . import color_names
+from . import base
+from .. import _parse
+from ... import util
+
+RE_COMPRESS = re.compile(r'(?i)^#({hex})\1({hex})\2({hex})\3(?:({hex})\4)?$'.format(**_parse.COLOR_PARTS))
+
+
+class SRGB(base.SRGB):
+    """SRGB class."""
+
+    DEF_VALUE = "rgb(0 0 0 / 1)"
+    START = re.compile(r'(?i)\brgba?\(')
+    MATCH = re.compile(
+        r"""(?xi)
+        (?:
+            # RGB syntax
+            \brgba?\(\s*
+            (?:
+                # Space separated format
+                (?:
+                    # Float form
+                    (?:{float}{space}){{2}}{float} |
+                    # Percent form
+                    (?:{percent}{space}){{2}}{percent}
+                )({slash}(?:{percent}|{float}))? |
+                # Comma separated format
+                (?:
+                    # Float form
+                    (?:{float}{comma}){{2}}{float} |
+                    # Percent form
+                    (?:{percent}{comma}){{2}}{percent}
+                )({comma}(?:{percent}|{float}))?
+            )
+            \s*\) |
+            # Hex syntax
+            \#(?:{hex}{{6}}(?:{hex}{{2}})?|{hex}{{3}}(?:{hex})?)\b |
+            # Names
+            \b(?
+    """
+
+    size = len(matrix)
+    indices = list(range(size))
+    m = copy.deepcopy(matrix)
+
+    # Ensure we have a square matrix
+    for r in m:
+        if len(r) != size:  # pragma: no cover
+            raise ValueError('Matrix must be a n x n matrix')
+
+    # Create an identity matrix of the same size as our provided matrix
+    im = diag([1] * size)
+
+    # Iterating through each row, we will scale each row by it's "focus diagonal".
+    # Then using the scaled row, we will adjust the other rows.
+    # ```
+    # [[fd, 0,  0 ]
+    #  [0,  fd, 0 ]
+    #  [0,  0,  fd]]
+    # ```
+    for fd in indices:
+        # We will divide each value in the row by the "focus diagonal" value.
+        # If the we have a zero for the given `fd` value, we cannot invert.
+        denom = m[fd][fd]
+        if denom == 0:  # pragma: no cover
+            raise ValueError('Matrix is not invertable')
+
+        # We are converting the matrix to the identity and vice versa,
+        # So scale the diagonal such that it will now equal 1.
+        # Additionally, the same operations will be applied to the identity matrix
+        # and will turn it into `m ** -1` (what we are looking for)
+        fd_scalar = 1.0 / denom
+        for j in indices:
+            m[fd][j] *= fd_scalar
+            im[fd][j] *= fd_scalar
+
+        # Now, using the value found at the index `fd` in the remaining rows (excluding `row[fd]`),
+        # Where `cr` is the current row under evaluation, subtract `row[cr][fd] * row[fd] from row[cr]`.
+        for cr in indices[0:fd] + indices[fd + 1:]:
+            # The scalar for the current row
+            cr_scalar = m[cr][fd]
+
+            # Scale each item in the `row[fd]` and subtract it from the current row `row[cr]`
+            for j in indices:
+                m[cr][j] -= cr_scalar * m[fd][j]
+                im[cr][j] -= cr_scalar * im[fd][j]
+
+    # The identify matrix is now the inverse matrix and vice versa.
+    return im
+
+
+def cbrt(n):
+    """Calculate cube root."""
+
+    return nth_root(n, 3)
+
+
+def nth_root(n, p):
+    """Calculate nth root."""
+
+    if p == 0:  # pragma: no cover
+        return float('inf')
+
+    if n == 0:
+        # Can't do anything with zero
+        return 0
+
+    return math.copysign(abs(n) ** (p ** -1), n)
+
+
+def clamp(value, mn=None, mx=None):
+    """Clamp the value to the the given minimum and maximum."""
+
+    if mn is None and mx is None:
+        return value
+    elif mn is None:
+        return min(value, mx)
+    elif mx is None:
+        return max(value, mn)
+    else:
+        return max(min(value, mx), mn)
+
+
+def fmt_float(f, p=0):
+    """
+    Set float precision and trim precision zeros.
+
+    0: Round to whole integer
+    -1: Full precision
+    : precision level
+    """
+
+    value = adjust_precision(f, p)
+    string = ('{{:{}f}}'.format('.53' if p == -1 else '.' + str(p))).format(value)
+    return string if value.is_integer() and p == 0 else string.rstrip('0').rstrip('.')
+
+
+def adjust_precision(f, p=0):
+    """Adjust precision."""
+
+    if p == -1:
+        return f
+
+    elif p == 0:
+        return round_half_up(f)
+
+    else:
+        whole = int(f)
+        digits = 0 if whole == 0 else int(math.log10(-whole if whole < 0 else whole)) + 1
+        return round_half_up(whole if digits >= p else f, p - digits)
+
+
+def round_half_up(n, scale=0):
+    """Round half up."""
+
+    mult = 10 ** scale
+    return math.floor(n * mult + 0.5) / mult
+
+
+def deprecated(message, stacklevel=2):  # pragma: no cover
+    """
+    Raise a `DeprecationWarning` when wrapped function/method is called.
+
+    Borrowed from https://stackoverflow.com/a/48632082/866026
+    """
+
+    def _decorator(func):
+        @wraps(func)
+        def _func(*args, **kwargs):
+            warnings.warn(
+                "'{}' is deprecated. {}".format(func.__name__, message),
+                category=DeprecationWarning,
+                stacklevel=stacklevel
+            )
+            return func(*args, **kwargs)
+        return _func
+    return _decorator
diff --git a/SublimeText/mdpopups/st3/mdpopups/colorbox.py b/SublimeText/mdpopups/st3/mdpopups/colorbox.py
new file mode 100644
index 00000000..03b0a556
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/colorbox.py
@@ -0,0 +1,250 @@
+"""
+Sublime tooltip color box.
+
+Licensed under MIT
+Copyright (c) 2015 - 2020 Isaac Muse 
+"""
+from .png import Writer
+from .coloraide import Color
+import base64
+import io
+
+CHECK_LIGHT = "#FFFFFF"
+CHECK_DARK = "#CCCCCC"
+
+LIGHT = 0
+DARK = 1
+
+TOP = 1
+RIGHT = 2
+BOTTOM = 4
+LEFT = 8
+
+X = 0
+Y = 1
+
+__all__ = ('color_box',)
+
+
+def to_list(rgb, alpha=False):
+    """
+    Break RGB channel into a list.
+
+    Take a color of the format `#RRGGBBAA` (alpha optional and will be stripped)
+    and convert to a list with format `[r, g, b]`.
+    """
+    if alpha:
+        return [
+            int(rgb[1:3], 16),
+            int(rgb[3:5], 16),
+            int(rgb[5:7], 16),
+            int(rgb[7:9], 16) if len(rgb) > 7 else 255
+        ]
+    else:
+        return [
+            int(rgb[1:3], 16),
+            int(rgb[3:5], 16),
+            int(rgb[5:7], 16)
+        ]
+
+
+def checkered_color(color, background):
+    """Mix color with the checkered color."""
+
+    checkered = Color(color)
+    return checkered.compose(background).to_string(hex=True, alpha=False)
+
+
+def get_border_size(direction, border_map):
+    """Get size of border map."""
+
+    size = 0
+    if direction == X:
+        if border_map & LEFT:
+            size += 1
+        if border_map & RIGHT:
+            size += 1
+    elif direction == Y:
+        if border_map & TOP:
+            size += 1
+        if border_map & BOTTOM:
+            size += 1
+    return size
+
+
+def color_box_raw(
+    colors, border="#000000", border2=None, height=32, width=32,
+    border_size=1, check_size=4, max_colors=5, alpha=False, border_map=0xF
+):
+    """
+    Generate palette preview.
+
+    Create a color box with the specified RGBA color(s)
+    and RGB(A) border (alpha will be stripped out of border color).
+    Colors is a list of colors, but only up to 5
+    Border can be up to 2 colors (double border).
+
+    Height, width and border thickness can all be defined.
+
+    If using a transparent color, you can define the checkerboard pattern size that shows through.
+    If using multiple colors, you can control the max colors to display.  Colors currently are done
+    horizontally only.
+
+    Define size of swatch, border width,  and size of checkerboard squares.
+    """
+
+    assert height - (border_size * 2) >= 0, "Border size too big!"
+    assert width - (border_size * 2) >= 0, "Border size too big!"
+
+    # Gather preview colors
+    preview_colors = []
+    count = max_colors if len(colors) >= max_colors else len(colors)
+
+    border = to_list(border, False)
+    if border2 is not None:
+        border2 = to_list(border2, False)
+
+    border1_size = border2_size = int(border_size / 2)
+    border1_size += border_size % 2
+    if border2 is None:
+        border1_size += border2_size
+        border2_size = 0
+
+    if count:
+        for c in range(0, count):
+            if alpha:
+                preview_colors.append(
+                    (
+                        to_list(colors[c], True),
+                        to_list(colors[c], True)
+                    )
+                )
+            else:
+                preview_colors.append(
+                    (
+                        to_list(checkered_color(colors[c], CHECK_LIGHT)),
+                        to_list(checkered_color(colors[c], CHECK_DARK))
+                    )
+                )
+    else:
+        if alpha:
+            preview_colors.append(
+                (to_list('#00000000'), to_list('#00000000'))
+            )
+        else:
+            preview_colors.append(
+                (to_list(CHECK_LIGHT), to_list(CHECK_DARK))
+            )
+
+    color_height = height - (border_size * get_border_size(Y, border_map))
+    color_width = width - (border_size * get_border_size(X, border_map))
+
+    if count:
+        dividers = int(color_width / count)
+        if color_width % count:
+            dividers += 1
+    else:
+        dividers = 0
+
+    color_size_x = color_width
+
+    p = []
+
+    # Top Border
+    if border_map & TOP:
+        for x in range(0, border1_size):
+            row = list(border * width)
+            p.append(row)
+        for x in range(0, border2_size):
+            row = []
+            if border_map & LEFT and border_map & RIGHT:
+                row += list(border * border1_size)
+                row += list(border2 * border2_size)
+                row += list(border2 * color_width)
+                row += list(border2 * border2_size)
+                row += list(border * border1_size)
+            elif border_map & RIGHT:
+                row += list(border2 * color_width)
+                row += list(border2 * border2_size)
+                row += list(border * border1_size)
+            elif border_map & LEFT:
+                row += list(border * border1_size)
+                row += list(border2 * border2_size)
+                row += list(border2 * color_width)
+            else:
+                row += list(border2 * color_width)
+            p.append(row)
+
+    check_color_y = DARK
+    for y in range(0, color_height):
+        index = 0
+        if y % check_size == 0:
+            check_color_y = DARK if check_color_y == LIGHT else LIGHT
+
+        # Left border
+        row = []
+        if border_map & LEFT:
+            row += list(border * border1_size)
+            if border2:
+                row += list(border2 * border2_size)
+
+        check_color_x = check_color_y
+        for x in range(0, color_size_x):
+            if x != 0 and dividers != 0 and x % dividers == 0:
+                index += 1
+            if x % check_size == 0:
+                check_color_x = DARK if check_color_x == LIGHT else LIGHT
+            row += (preview_colors[index][1] if check_color_x == DARK else preview_colors[index][0])
+
+        if border_map & RIGHT:
+            # Right border
+            if border2:
+                row += list(border2 * border2_size)
+            row += list(border * border1_size)
+
+        p.append(row)
+
+    if border_map & BOTTOM:
+        # Bottom border
+        for x in range(0, border2_size):
+            row = []
+            if border_map & LEFT and border_map & RIGHT:
+                row += list(border * border1_size)
+                row += list(border2 * border2_size)
+                row += list(border2 * color_width)
+                row += list(border2 * border2_size)
+                row += list(border * border1_size)
+            elif border_map & LEFT:
+                row += list(border * border1_size)
+                row += list(border2 * border2_size)
+                row += list(border2 * color_width)
+            elif border_map & RIGHT:
+                row += list(border2 * color_width)
+                row += list(border2 * border2_size)
+                row += list(border * border1_size)
+            else:
+                row += list(border2 * color_width)
+            p.append(row)
+        for x in range(0, border1_size):
+            row = list(border * width)
+            p.append(row)
+
+    # Create bytes buffer for PNG
+    with io.BytesIO() as f:
+
+        # Write out PNG
+        img = Writer(width, height, alpha=alpha)
+        img.write(f, p)
+
+        # Read out PNG bytes and base64 encode
+        f.seek(0)
+
+        return f.read()
+
+
+def color_box(*args, **kwargs):
+    """Generate palette preview and base64 encode it."""
+
+    return ''.format(
+        base64.b64encode(color_box_raw(*args, **kwargs)).decode('ascii')
+    )
diff --git a/SublimeText/mdpopups/st3/mdpopups/css/default.css b/SublimeText/mdpopups/st3/mdpopups/css/default.css
new file mode 100644
index 00000000..75a29bc1
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/css/default.css
@@ -0,0 +1,204 @@
+{%- if var.default_style %}
+html {
+  --mdpopups-fg: var(--foreground);
+  --mdpopups-link: var(--bluish);
+  --mdpopups-font-mono: "sf mono", Consolas, "Liberation Mono", Menlo, Courier, monospace;
+  --mdpopups-admon-fg: var(--foreground);
+  --mdpopups-admon-info-fg: var(--foreground);
+  --mdpopups-admon-error-fg: var(--foreground);
+  --mdpopups-admon-warning-fg: var(--foreground);
+  --mdpopups-admon-success-fg: var(--foreground);
+  --mdpopups-admon-bg: var(--background);
+  --mdpopups-admon-info-bg: var(--background);
+  --mdpopups-admon-error-bg: var(--background);
+  --mdpopups-admon-warning-bg: var(--background);
+  --mdpopups-admon-success-bg: var(--background);
+  --mdpopups-admon-accent: color(var(--background) blend(var(--foreground) 50%));
+  --mdpopups-admon-title-fg:  var(--foreground);
+  --mdpopups-admon-info-title-fg:  var(--foreground);
+  --mdpopups-admon-error-title-fg:  var(--foreground);
+  --mdpopups-admon-warning-title-fg:  var(--foreground);
+  --mdpopups-admon-success-title-fg:  var(--foreground);
+  --mdpopups-admon-info-accent: color(var(--background) blend(var(--bluish) 50%));
+  --mdpopups-admon-error-accent: color(var(--background) blend(var(--redish) 50%));
+  --mdpopups-admon-warning-accent: color(var(--background) blend(var(--orangish) 50%));
+  --mdpopups-admon-success-accent: color(var(--background) blend(var(--greenish) 50%));
+  --mdpopups-kbd-fg: var(--background);
+  --mdpopups-kbd-bg: var(--foreground);
+}
+html.light {
+{%- if var.is_phantom or var.is_sheet %}
+  --mdpopups-bg:  var(--background);
+{%- else %}
+  --mdpopups-bg: color(var(--background) blend(black 95%));
+{%- endif %}
+  --mdpopups-hr: color(var(--background) blend(black 85%));
+  --mdpopups-kbd-border: color(var(--foreground) blend(white 80%));
+  --mdpopups-hl-border: color(var(--background) blend(black 90%));
+  --mdpopups-hl-bg: color(var(--background) blend(black 98%));
+  background-color: var(--mdpopups-bg);
+}
+html.dark {
+{%- if var.is_phantom or var.is_sheet %}
+  --mdpopups-bg:  var(--background);
+{%- else %}
+  --mdpopups-bg: color(var(--background) blend(white 95%));
+{%- endif %}
+  --mdpopups-hr: color(var(--background) blend(white 85%));
+  --mdpopups-kbd-border: color(var(--foreground) blend(black 80%));
+  --mdpopups-hl-border: color(var(--background) blend(white 90%));
+  --mdpopups-hl-bg: color(var(--background) blend(white 98%));
+  background-color: var(--mdpopups-bg);
+}
+
+html, body { padding: 0; margin: 0; }
+div.mdpopups {
+  display: block;
+  margin: 0;
+  padding: 0;
+  font-size: 1rem;
+  line-height: 1.1rem;
+  color: var(--mdpopups-fg);
+  background-color: var(--mdpopups-bg);
+}
+.mdpopups a {
+  color: var(--mdpopups-link);
+}
+.mdpopups .highlight,
+.mdpopups code,
+.mdpopups var,
+.mdpopups tt {
+  font-family: var(--mdpopups-font-mono);
+}
+.mdpopups div { display: block; }
+/* Headers */
+.mdpopups h1 { font-size: 1.5rem; }
+.mdpopups h2 { font-size: 1.4rem; }
+.mdpopups h3 { font-size: 1.3rem; }
+.mdpopups h4 { font-size: 1.2rem; }
+.mdpopups h5 { font-size: 1.1rem; }
+.mdpopups h6 { font-size: 1rem; }
+.mdpopups h1, .mdpopups h2, .mdpopups h3, .mdpopups h4, .mdpopups h5, .mdpopups h6 {
+  margin-top: 0.2rem;
+  margin-bottom: 0.2rem;
+}
+/* Blockquote support. */
+.mdpopups blockquote { display: block; font-style: italic; }
+/* Horizontal rule support. */
+.mdpopups hr {
+  display: block;
+  border-color: var(--mdpopups-hr);
+  border-style: solid;
+  border-width: 0 0 1px 0;
+  margin-top: 1rem;
+  margin-bottom: 1rem;
+}
+/* Description list support */
+.mdpopups dl { display: block; }
+.mdpopups dt {
+  display: block;
+  font-style: italic;
+  font-weight: bold;
+  margin-bottom: 0.5rem;
+}
+.mdpopups dd {
+  display: block;
+  margin-left: 1.5rem;
+  margin-bottom: 0.5rem;
+}
+/* Preformatted block */
+.mdpopups pre { display: block; }
+/* Blocks */
+.mdpopups blockquote, .mdpopups ol, .mdpopups ul, .mdpopups dl, .mdpopups p {
+  padding: 0;
+  margin-top: 0;
+  margin-bottom: 0.5rem;
+  margin-left: 0;
+  margin-right: 0;
+}
+.mdpopups ul, .mdpopups ol { padding-left: 2rem; }
+/* kbd */
+.mdpopups kbd {
+  display: inline;
+  font-size: 0.9rem;
+  padding: 0.05rem 0.25rem;
+  border-radius: 0.25rem;
+  background-color: var(--mdpopups-kbd-bg);
+  color: var(--mdpopups-kbd-fg);
+  border: 1px solid var(--mdpopups-kbd-border);
+}
+/* Admontions/Panels */
+.mdpopups .admonition {
+  display: block;
+  padding: 0 0.5rem 0.5rem 0.5rem;
+  border-radius: 0.25rem;
+  margin-bottom: 0.5rem;
+  background-color: var(--mdpopups-admon-bg);
+  color: var(--mdpopups-admon-fg);
+  border: 1px solid var(--mdpopups-admon-accent);
+}
+.mdpopups .admonition-title {
+  font-weight: 600;
+  font-size: 1.1rem;
+  margin: 0 -0.5rem 0.5rem -0.5rem;
+  padding: 0.1rem 0.5rem 0.1rem 0.5rem;
+  border-top-left-radius: 0.25rem;
+  border-top-right-radius: 0.25rem;
+  color:  var(--mdpopups-admon-title-fg);
+  background-color: var(--mdpopups-admon-accent);
+}
+/* Colorized Admonitions classes. */
+.mdpopups .admonition.panel-success .admonition-title {
+  color: var(--mdpopups-admon-success-title-fg);
+  background-color: var(--mdpopups-admon-success-accent);
+}
+.mdpopups .admonition.panel-success {
+  color:  var(--mdpopups-admon-success-fg);
+  background-color: var(--mdpopups-admon-success-bg);
+  border-color: var(--mdpopups-admon-success-accent);
+}
+.mdpopups .admonition.panel-warning .admonition-title {
+  color: var(--mdpopups-admon-warning-title-fg);
+  background-color: var(--mdpopups-admon-warning-accent);
+}
+.mdpopups .admonition.panel-warning {
+  color:  var(--mdpopups-admon-warning-fg);
+  background-color: var(--mdpopups-admon-warning-bg);
+  border-color: var(--mdpopups-admon-warning-accent);
+}
+.mdpopups .admonition.panel-error .admonition-title {
+  color: var(--mdpopups-admon-error-title-fg);
+  background-color: var(--mdpopups-admon-error-accent);
+}
+.mdpopups .admonition.panel-error {
+  color:  var(--mdpopups-admon-error-fg);
+  background-color: var(--mdpopups-admon-error-bg);
+  border-color: var(--mdpopups-admon-error-accent);
+}
+.mdpopups .admonition.panel-info .admonition-title {
+  color: var(--mdpopups-admon-info-title-fg);
+  background-color: var(--mdpopups-admon-info-accent);
+}
+.mdpopups .admonition.panel-info {
+  color:  var(--mdpopups-admon-info-fg);
+  background-color: var(--mdpopups-admon-info-bg);
+  border-color: var(--mdpopups-admon-info-accent);
+}
+.mdpopups .highlight {
+  border: 1px solid var(--mdpopups-hl-border);
+  border-radius: 0.25rem;
+  font-size: 0.9rem;
+  {%- if not var.use_pygments %}
+  background-color: var(--mdpopups-hl-bg);
+  {%- endif %}
+}
+.mdpopups div.highlight,
+.mdpopups pre.highlight {
+  padding: 0.5rem;
+  margin-bottom: 0.5rem;
+  font-size: 1rem;
+}
+.mdpopups code.highlight {
+  padding: 0.05rem 0.25rem;
+}
+{%- endif %}
diff --git a/SublimeText/mdpopups/st3/mdpopups/file_strip/__init__.py b/SublimeText/mdpopups/st3/mdpopups/file_strip/__init__.py
new file mode 100644
index 00000000..379abc39
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/file_strip/__init__.py
@@ -0,0 +1 @@
+"""File Strip."""
diff --git a/SublimeText/mdpopups/st3/mdpopups/file_strip/comments.py b/SublimeText/mdpopups/st3/mdpopups/file_strip/comments.py
new file mode 100644
index 00000000..c614c652
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/file_strip/comments.py
@@ -0,0 +1,130 @@
+"""
+File Strip.
+
+Licensed under MIT
+Copyright (c) 2012 - 2020 Isaac Muse 
+"""
+import re
+
+LINE_PRESERVE = re.compile(r"\r?\n", re.MULTILINE)
+CPP_PATTERN = re.compile(
+    r'''(?x)
+        (?P
+            /\*[^*]*\*+(?:[^/*][^*]*\*+)*/  # multi-line comments
+          | \s*//(?:[^\r\n])*               # single line comments
+        )
+      | (?P
+            "(?:\\.|[^"\\])*"               # double quotes
+          | '(?:\\.|[^'\\])*'               # single quotes
+          | .[^/"']*                        # everything else
+        )
+    ''',
+    re.DOTALL
+)
+PY_PATTERN = re.compile(
+    r'''(?x)
+        (?P
+            \s*\#(?:[^\r\n])*               # single line comments
+        )
+      | (?P
+            "{3}(?:\\.|[^\\])*"{3}          # triple double quotes
+          | '{3}(?:\\.|[^\\])*'{3}          # triple single quotes
+          | "(?:\\.|[^"\\])*"               # double quotes
+          | '(?:\\.|[^'])*'                 # single quotes
+          | .[^\#"']*                       # everything else
+        )
+    ''',
+    re.DOTALL
+)
+
+
+def _strip_regex(pattern, text, preserve_lines):
+    """Generic function that strips out comments passed on the given pattern."""
+
+    def remove_comments(group, preserve_lines=False):
+        """Remove comments."""
+
+        return ''.join([x[0] for x in LINE_PRESERVE.findall(group)]) if preserve_lines else ''
+
+    def evaluate(m, preserve_lines):
+        """Search for comments."""
+
+        g = m.groupdict()
+        return g["code"] if g["code"] is not None else remove_comments(g["comments"], preserve_lines)
+
+    return ''.join(map(lambda m: evaluate(m, preserve_lines), pattern.finditer(text)))
+
+
+@staticmethod
+def _cpp(text, preserve_lines=False):
+    """C/C++ style comment stripper."""
+
+    return _strip_regex(
+        CPP_PATTERN,
+        text,
+        preserve_lines
+    )
+
+
+@staticmethod
+def _python(text, preserve_lines=False):
+    """Python style comment stripper."""
+
+    return _strip_regex(
+        PY_PATTERN,
+        text,
+        preserve_lines
+    )
+
+
+class CommentException(Exception):
+    """Comment exception."""
+
+    def __init__(self, value):
+        """Setup exception."""
+
+        self.value = value
+
+    def __str__(self):
+        """Return exception value `repr` on string convert."""
+
+        return repr(self.value)
+
+
+class Comments(object):
+    """Comment strip class."""
+
+    styles = []
+
+    def __init__(self, style=None, preserve_lines=False):
+        """Initialize."""
+
+        self.preserve_lines = preserve_lines
+        self.call = self.__get_style(style)
+
+    @classmethod
+    def add_style(cls, style, fn):
+        """Add comment style."""
+
+        if style not in cls.__dict__:
+            setattr(cls, style, fn)
+            cls.styles.append(style)
+
+    def __get_style(self, style):
+        """Get the comment style."""
+
+        if style in self.styles:
+            return getattr(self, style)
+        else:
+            raise CommentException(style)
+
+    def strip(self, text):
+        """Strip comments."""
+
+        return self.call(text, self.preserve_lines)
+
+
+Comments.add_style("c", _cpp)
+Comments.add_style("json", _cpp)
+Comments.add_style("cpp", _cpp)
+Comments.add_style("python", _python)
diff --git a/SublimeText/mdpopups/st3/mdpopups/file_strip/json.py b/SublimeText/mdpopups/st3/mdpopups/file_strip/json.py
new file mode 100644
index 00000000..7df10fa1
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/file_strip/json.py
@@ -0,0 +1,70 @@
+"""
+File Strip.
+
+Licensed under MIT
+Copyright (c) 2012 - 2020 Isaac Muse 
+"""
+import re
+from .comments import Comments
+
+JSON_PATTERN = re.compile(
+    r'''(?x)
+        (
+            (?P
+                ,                        # trailing comma
+                (?P[\s\r\n]*) # white space
+                (?P\])   # bracket
+            )
+          | (?P
+                ,                        # trailing comma
+                (?P[\s\r\n]*)  # white space
+                (?P\})    # bracket
+            )
+        )
+      | (?P
+            "(?:\\.|[^"\\])*"            # double quoted string
+          | '(?:\\.|[^'\\])*'            # single quoted string
+          | .[^,"']*                     # everything else
+        )
+    ''',
+    re.DOTALL
+)
+
+
+def strip_dangling_commas(text, preserve_lines=False):
+    """Strip dangling commas."""
+
+    regex = JSON_PATTERN
+
+    def remove_comma(g, preserve_lines):
+        """Remove comma."""
+
+        if preserve_lines:
+            # ,] -> ] else ,} -> }
+            if g["square_comma"] is not None:
+                return g["square_ws"] + g["square_bracket"]
+            else:
+                return g["curly_ws"] + g["curly_bracket"]
+        else:
+            # ,] -> ] else ,} -> }
+            return g["square_bracket"] if g["square_comma"] else g["curly_bracket"]
+
+    def evaluate(m, preserve_lines):
+        """Search for dangling comma."""
+
+        g = m.groupdict()
+        return remove_comma(g, preserve_lines) if g["code"] is None else g["code"]
+
+    return ''.join(map(lambda m: evaluate(m, preserve_lines), regex.finditer(text)))
+
+
+def strip_comments(text, preserve_lines=False):
+    """Strip JavaScript like comments."""
+
+    return Comments('json', preserve_lines).strip(text)
+
+
+def sanitize_json(text, preserve_lines=False):
+    """Sanitize the JSON file by removing comments and dangling commas."""
+
+    return strip_dangling_commas(Comments('json', preserve_lines).strip(text), preserve_lines)
diff --git a/SublimeText/mdpopups/st3/mdpopups/frontmatter.py b/SublimeText/mdpopups/st3/mdpopups/frontmatter.py
new file mode 100644
index 00000000..c5a2a4d6
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/frontmatter.py
@@ -0,0 +1,88 @@
+"""Frontmatter stripping."""
+from . import yaml
+import re
+from collections import OrderedDict
+
+
+def yaml_load(stream, loader=yaml.Loader, object_pairs_hook=OrderedDict):
+    """
+    Custom YAML loader.
+
+    Make all YAML dictionaries load as ordered dictionary.
+    http://stackoverflow.com/a/21912744/3609487
+
+    Load all strings as Unicode.
+    http://stackoverflow.com/a/2967461/3609487
+    """
+
+    def construct_mapping(loader, node):
+        """Convert to ordered dict."""
+
+        loader.flatten_mapping(node)
+        return object_pairs_hook(loader.construct_pairs(node))
+
+    def construct_yaml_str(self, node):
+        """Override the default string handling function to always return Unicode objects."""
+
+        return self.construct_scalar(node)
+
+    class Loader(loader):
+        """Custom Loader."""
+
+    Loader.add_constructor(
+        yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
+        construct_mapping
+    )
+
+    Loader.add_constructor(
+        'tag:yaml.org,2002:str',
+        construct_yaml_str
+    )
+
+    return yaml.load(stream, Loader)
+
+
+def yaml_dump(data, stream=None, dumper=yaml.Dumper):
+    """Special dumper wrapper to modify the YAML dumper."""
+
+    class Dumper(dumper):
+        """Custom dumper."""
+
+    # Handle Ordered Dict
+    Dumper.add_representer(
+        OrderedDict,
+        lambda self, data: self.represent_mapping('tag:yaml.org,2002:map', data.items())
+    )
+
+    return yaml.dump(data, stream, Dumper, width=None, indent=4, allow_unicode=True, default_flow_style=False)
+
+
+def dump_frontmatter(values):
+    """Turn Python dict values to frontmatter string."""
+
+    return '---\n{}\n...\n'.format(yaml_dump(values))
+
+
+def get_frontmatter(string):
+    """Get frontmatter from string."""
+
+    frontmatter = OrderedDict()
+
+    if string.startswith("---"):
+        m = re.search(r'^(-{3}\r?\n(?!\r?\n)(.*?)(?<=\n)(?:-{3}|\.{3})\r?\n)', string, re.DOTALL)
+        if m:
+            yaml_okay = True
+            try:
+                frontmatter = yaml_load(m.group(2))
+                if frontmatter is None:
+                    frontmatter = OrderedDict()
+                # If we didn't get a dictionary, we don't want this as it isn't frontmatter.
+                assert isinstance(frontmatter, (dict, OrderedDict)), TypeError
+            except Exception:
+                # We had a parsing error. This is not the YAML we are looking for.
+                yaml_okay = False
+                frontmatter = OrderedDict()
+            if yaml_okay:
+                string = string[m.end(1):]
+
+    return frontmatter, string
diff --git a/SublimeText/mdpopups/st3/mdpopups/imagetint.py b/SublimeText/mdpopups/st3/mdpopups/imagetint.py
new file mode 100644
index 00000000..21e4c68c
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/imagetint.py
@@ -0,0 +1,73 @@
+"""
+Image tinting.
+
+Licensed under MIT
+Copyright (c) 2015 - 2020 Isaac Muse 
+"""
+from .png import Reader, Writer
+from .coloraide import Color, util
+import base64
+import io
+
+
+def tint_raw(byte_string, color, opacity=255):
+    """Tint the image and return a byte string."""
+
+    # Read the byte string as a RGBA image.
+    width, height, pixels, meta = Reader(bytes=byte_string).asRGBA()
+
+    # Clamp opacity
+    if opacity < 0:
+        opacity = 0
+    elif opacity > 255:
+        opacity = 255
+
+    # Tint
+    p = []
+    y = 0
+    for row in pixels:
+        p.append([])
+        columns = int(len(row) / 4)
+        start = 0
+        for x in range(columns):
+            rgba = Color(color)
+            rgba.alpha = opacity / 255.0
+            rgba.overlay(background='#{:02X}{:02X}{:02X}FF'.format(*row[start:start + 3]))
+            rgba.fit(in_place=True)
+            p[y] += [
+                int(util.round_half_up(rgba.red * 255)),
+                int(util.round_half_up(rgba.green * 255)),
+                int(util.round_half_up(rgba.blue * 255)),
+                row[start + 3]
+            ]
+            start += 4
+        y += 1
+
+    # Create bytes buffer for PNG
+    with io.BytesIO() as f:
+
+        # Write out PNG
+        img = Writer(width, height, alpha=True)
+        img.write(f, p)
+
+        # Read out PNG bytes and base64 encode
+        f.seek(0)
+
+        return f.read()
+
+
+def tint(byte_string, color, opacity=255, height=None, width=None):
+    """Base64 encode the tint."""
+
+    style = ''
+    if width:
+        style = 'style="width: {:d}px;"'.format(width)
+    if height is not None and style is None:
+        style = 'style="height: {:d}px;"'.format(width)
+    elif height is not None:
+        style = style[:-1] + (' height: {:d}px;" '.format(height))
+
+    return ''.format(
+        style,
+        base64.b64encode(tint_raw(byte_string, color, opacity)).decode('ascii')
+    )
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/LICENSE b/SublimeText/mdpopups/st3/mdpopups/jinja2/LICENSE
new file mode 100644
index 00000000..31bf900e
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/LICENSE
@@ -0,0 +1,31 @@
+Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+
+    * The names of the contributors may not be used to endorse or
+      promote products derived from this software without specific
+      prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/__init__.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/__init__.py
new file mode 100644
index 00000000..2c7b5f24
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/__init__.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2
+    ~~~~~~
+
+    Jinja2 is a template engine written in pure Python.  It provides a
+    Django inspired non-XML syntax but supports inline expressions and
+    an optional sandboxed environment.
+
+    Nutshell
+    --------
+
+    Here a small example of a Jinja2 template::
+
+        {% extends 'base.html' %}
+        {% block title %}Memberlist{% endblock %}
+        {% block content %}
+          
+        {% endblock %}
+
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+__docformat__ = 'restructuredtext en'
+__version__ = '2.10.1'
+
+# high level interface
+from .environment import Environment, Template
+
+# loaders
+from .loaders import BaseLoader, FileSystemLoader, PackageLoader, \
+     DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \
+     ModuleLoader
+
+# bytecode caches
+from .bccache import BytecodeCache, FileSystemBytecodeCache, \
+     MemcachedBytecodeCache
+
+# undefined types
+from .runtime import Undefined, DebugUndefined, StrictUndefined, \
+     make_logging_undefined
+
+# exceptions
+from .exceptions import TemplateError, UndefinedError, \
+     TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \
+     TemplateAssertionError, TemplateRuntimeError
+
+# decorators and public utilities
+from .filters import environmentfilter, contextfilter, \
+     evalcontextfilter
+from .utils import Markup, escape, clear_caches, \
+     environmentfunction, evalcontextfunction, contextfunction, \
+     is_undefined, select_autoescape
+
+__all__ = [
+    'Environment', 'Template', 'BaseLoader', 'FileSystemLoader',
+    'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader',
+    'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache',
+    'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined',
+    'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound',
+    'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError',
+    'TemplateRuntimeError',
+    'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape',
+    'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined',
+    'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined',
+    'select_autoescape',
+]
+
+
+def _patch_async():
+    from .utils import have_async_gen
+    if have_async_gen:
+        from .asyncsupport import patch_all
+        patch_all()
+
+
+_patch_async()
+del _patch_async
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/_compat.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/_compat.py
new file mode 100644
index 00000000..61d85301
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/_compat.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2._compat
+    ~~~~~~~~~~~~~~
+
+    Some py2/py3 compatibility support based on a stripped down
+    version of six so we don't have to depend on a specific version
+    of it.
+
+    :copyright: Copyright 2013 by the Jinja team, see AUTHORS.
+    :license: BSD, see LICENSE for details.
+"""
+import sys
+
+PY2 = sys.version_info[0] == 2
+PYPY = hasattr(sys, 'pypy_translation_info')
+_identity = lambda x: x
+
+
+if not PY2:
+    unichr = chr
+    range_type = range
+    text_type = str
+    string_types = (str,)
+    integer_types = (int,)
+
+    iterkeys = lambda d: iter(d.keys())
+    itervalues = lambda d: iter(d.values())
+    iteritems = lambda d: iter(d.items())
+
+    import pickle
+    from io import BytesIO, StringIO
+    NativeStringIO = StringIO
+
+    def reraise(tp, value, tb=None):
+        if value.__traceback__ is not tb:
+            raise value.with_traceback(tb)
+        raise value
+
+    ifilter = filter
+    imap = map
+    izip = zip
+    intern = sys.intern
+
+    implements_iterator = _identity
+    implements_to_string = _identity
+    encode_filename = _identity
+
+else:
+    unichr = unichr
+    text_type = unicode
+    range_type = xrange
+    string_types = (str, unicode)
+    integer_types = (int, long)
+
+    iterkeys = lambda d: d.iterkeys()
+    itervalues = lambda d: d.itervalues()
+    iteritems = lambda d: d.iteritems()
+
+    import cPickle as pickle
+    from cStringIO import StringIO as BytesIO, StringIO
+    NativeStringIO = BytesIO
+
+    exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
+
+    from itertools import imap, izip, ifilter
+    intern = intern
+
+    def implements_iterator(cls):
+        cls.next = cls.__next__
+        del cls.__next__
+        return cls
+
+    def implements_to_string(cls):
+        cls.__unicode__ = cls.__str__
+        cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
+        return cls
+
+    def encode_filename(filename):
+        if isinstance(filename, unicode):
+            return filename.encode('utf-8')
+        return filename
+
+
+def with_metaclass(meta, *bases):
+    """Create a base class with a metaclass."""
+    # This requires a bit of explanation: the basic idea is to make a
+    # dummy metaclass for one level of class instantiation that replaces
+    # itself with the actual metaclass.
+    class metaclass(type):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+    return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+try:
+    from urllib.parse import quote_from_bytes as url_quote
+except ImportError:
+    from urllib import quote as url_quote
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/_identifier.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/_identifier.py
new file mode 100644
index 00000000..2eac35d5
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/_identifier.py
@@ -0,0 +1,2 @@
+# generated by scripts/generate_identifier_pattern.py
+pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯'
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncfilters.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncfilters.py
new file mode 100644
index 00000000..ac2eddea
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncfilters.py
@@ -0,0 +1,146 @@
+from functools import wraps
+
+from .asyncsupport import auto_aiter
+from . import filters
+
+
+async def auto_to_seq(value):
+    seq = []
+    if hasattr(value, '__aiter__'):
+        async for item in value:
+            seq.append(item)
+    else:
+        for item in value:
+            seq.append(item)
+    return seq
+
+
+async def async_select_or_reject(args, kwargs, modfunc, lookup_attr):
+    seq, func = filters.prepare_select_or_reject(
+        args, kwargs, modfunc, lookup_attr)
+    if seq:
+        async for item in auto_aiter(seq):
+            if func(item):
+                yield item
+
+
+def dualfilter(normal_filter, async_filter):
+    wrap_evalctx = False
+    if getattr(normal_filter, 'environmentfilter', False):
+        is_async = lambda args: args[0].is_async
+        wrap_evalctx = False
+    else:
+        if not getattr(normal_filter, 'evalcontextfilter', False) and \
+           not getattr(normal_filter, 'contextfilter', False):
+            wrap_evalctx = True
+        is_async = lambda args: args[0].environment.is_async
+
+    @wraps(normal_filter)
+    def wrapper(*args, **kwargs):
+        b = is_async(args)
+        if wrap_evalctx:
+            args = args[1:]
+        if b:
+            return async_filter(*args, **kwargs)
+        return normal_filter(*args, **kwargs)
+
+    if wrap_evalctx:
+        wrapper.evalcontextfilter = True
+
+    wrapper.asyncfiltervariant = True
+
+    return wrapper
+
+
+def asyncfiltervariant(original):
+    def decorator(f):
+        return dualfilter(original, f)
+    return decorator
+
+
+@asyncfiltervariant(filters.do_first)
+async def do_first(environment, seq):
+    try:
+        return await auto_aiter(seq).__anext__()
+    except StopAsyncIteration:
+        return environment.undefined('No first item, sequence was empty.')
+
+
+@asyncfiltervariant(filters.do_groupby)
+async def do_groupby(environment, value, attribute):
+    expr = filters.make_attrgetter(environment, attribute)
+    return [filters._GroupTuple(key, await auto_to_seq(values))
+            for key, values in filters.groupby(sorted(
+                await auto_to_seq(value), key=expr), expr)]
+
+
+@asyncfiltervariant(filters.do_join)
+async def do_join(eval_ctx, value, d=u'', attribute=None):
+    return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute)
+
+
+@asyncfiltervariant(filters.do_list)
+async def do_list(value):
+    return await auto_to_seq(value)
+
+
+@asyncfiltervariant(filters.do_reject)
+async def do_reject(*args, **kwargs):
+    return async_select_or_reject(args, kwargs, lambda x: not x, False)
+
+
+@asyncfiltervariant(filters.do_rejectattr)
+async def do_rejectattr(*args, **kwargs):
+    return async_select_or_reject(args, kwargs, lambda x: not x, True)
+
+
+@asyncfiltervariant(filters.do_select)
+async def do_select(*args, **kwargs):
+    return async_select_or_reject(args, kwargs, lambda x: x, False)
+
+
+@asyncfiltervariant(filters.do_selectattr)
+async def do_selectattr(*args, **kwargs):
+    return async_select_or_reject(args, kwargs, lambda x: x, True)
+
+
+@asyncfiltervariant(filters.do_map)
+async def do_map(*args, **kwargs):
+    seq, func = filters.prepare_map(args, kwargs)
+    if seq:
+        async for item in auto_aiter(seq):
+            yield func(item)
+
+
+@asyncfiltervariant(filters.do_sum)
+async def do_sum(environment, iterable, attribute=None, start=0):
+    rv = start
+    if attribute is not None:
+        func = filters.make_attrgetter(environment, attribute)
+    else:
+        func = lambda x: x
+    async for item in auto_aiter(iterable):
+        rv += func(item)
+    return rv
+
+
+@asyncfiltervariant(filters.do_slice)
+async def do_slice(value, slices, fill_with=None):
+    return filters.do_slice(await auto_to_seq(value), slices, fill_with)
+
+
+ASYNC_FILTERS = {
+    'first':        do_first,
+    'groupby':      do_groupby,
+    'join':         do_join,
+    'list':         do_list,
+    # we intentionally do not support do_last because that would be
+    # ridiculous
+    'reject':       do_reject,
+    'rejectattr':   do_rejectattr,
+    'map':          do_map,
+    'select':       do_select,
+    'selectattr':   do_selectattr,
+    'sum':          do_sum,
+    'slice':        do_slice,
+}
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncsupport.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncsupport.py
new file mode 100644
index 00000000..b294e004
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/asyncsupport.py
@@ -0,0 +1,256 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.asyncsupport
+    ~~~~~~~~~~~~~~~~~~~
+
+    Has all the code for async support which is implemented as a patch
+    for supported Python versions.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+import sys
+import asyncio
+import inspect
+from functools import update_wrapper
+
+from .utils import concat, internalcode, Markup
+from .environment import TemplateModule
+from .runtime import LoopContextBase, _last_iteration
+
+
+async def concat_async(async_gen):
+    rv = []
+    async def collect():
+        async for event in async_gen:
+            rv.append(event)
+    await collect()
+    return concat(rv)
+
+
+async def generate_async(self, *args, **kwargs):
+    vars = dict(*args, **kwargs)
+    try:
+        async for event in self.root_render_func(self.new_context(vars)):
+            yield event
+    except Exception:
+        exc_info = sys.exc_info()
+    else:
+        return
+    yield self.environment.handle_exception(exc_info, True)
+
+
+def wrap_generate_func(original_generate):
+    def _convert_generator(self, loop, args, kwargs):
+        async_gen = self.generate_async(*args, **kwargs)
+        try:
+            while 1:
+                yield loop.run_until_complete(async_gen.__anext__())
+        except StopAsyncIteration:
+            pass
+    def generate(self, *args, **kwargs):
+        if not self.environment.is_async:
+            return original_generate(self, *args, **kwargs)
+        return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
+    return update_wrapper(generate, original_generate)
+
+
+async def render_async(self, *args, **kwargs):
+    if not self.environment.is_async:
+        raise RuntimeError('The environment was not created with async mode '
+                           'enabled.')
+
+    vars = dict(*args, **kwargs)
+    ctx = self.new_context(vars)
+
+    try:
+        return await concat_async(self.root_render_func(ctx))
+    except Exception:
+        exc_info = sys.exc_info()
+    return self.environment.handle_exception(exc_info, True)
+
+
+def wrap_render_func(original_render):
+    def render(self, *args, **kwargs):
+        if not self.environment.is_async:
+            return original_render(self, *args, **kwargs)
+        loop = asyncio.get_event_loop()
+        return loop.run_until_complete(self.render_async(*args, **kwargs))
+    return update_wrapper(render, original_render)
+
+
+def wrap_block_reference_call(original_call):
+    @internalcode
+    async def async_call(self):
+        rv = await concat_async(self._stack[self._depth](self._context))
+        if self._context.eval_ctx.autoescape:
+            rv = Markup(rv)
+        return rv
+
+    @internalcode
+    def __call__(self):
+        if not self._context.environment.is_async:
+            return original_call(self)
+        return async_call(self)
+
+    return update_wrapper(__call__, original_call)
+
+
+def wrap_macro_invoke(original_invoke):
+    @internalcode
+    async def async_invoke(self, arguments, autoescape):
+        rv = await self._func(*arguments)
+        if autoescape:
+            rv = Markup(rv)
+        return rv
+
+    @internalcode
+    def _invoke(self, arguments, autoescape):
+        if not self._environment.is_async:
+            return original_invoke(self, arguments, autoescape)
+        return async_invoke(self, arguments, autoescape)
+    return update_wrapper(_invoke, original_invoke)
+
+
+@internalcode
+async def get_default_module_async(self):
+    if self._module is not None:
+        return self._module
+    self._module = rv = await self.make_module_async()
+    return rv
+
+
+def wrap_default_module(original_default_module):
+    @internalcode
+    def _get_default_module(self):
+        if self.environment.is_async:
+            raise RuntimeError('Template module attribute is unavailable '
+                               'in async mode')
+        return original_default_module(self)
+    return _get_default_module
+
+
+async def make_module_async(self, vars=None, shared=False, locals=None):
+    context = self.new_context(vars, shared, locals)
+    body_stream = []
+    async for item in self.root_render_func(context):
+        body_stream.append(item)
+    return TemplateModule(self, context, body_stream)
+
+
+def patch_template():
+    from . import Template
+    Template.generate = wrap_generate_func(Template.generate)
+    Template.generate_async = update_wrapper(
+        generate_async, Template.generate_async)
+    Template.render_async = update_wrapper(
+        render_async, Template.render_async)
+    Template.render = wrap_render_func(Template.render)
+    Template._get_default_module = wrap_default_module(
+        Template._get_default_module)
+    Template._get_default_module_async = get_default_module_async
+    Template.make_module_async = update_wrapper(
+        make_module_async, Template.make_module_async)
+
+
+def patch_runtime():
+    from .runtime import BlockReference, Macro
+    BlockReference.__call__ = wrap_block_reference_call(
+        BlockReference.__call__)
+    Macro._invoke = wrap_macro_invoke(Macro._invoke)
+
+
+def patch_filters():
+    from .filters import FILTERS
+    from .asyncfilters import ASYNC_FILTERS
+    FILTERS.update(ASYNC_FILTERS)
+
+
+def patch_all():
+    patch_template()
+    patch_runtime()
+    patch_filters()
+
+
+async def auto_await(value):
+    if inspect.isawaitable(value):
+        return await value
+    return value
+
+
+async def auto_aiter(iterable):
+    if hasattr(iterable, '__aiter__'):
+        async for item in iterable:
+            yield item
+        return
+    for item in iterable:
+        yield item
+
+
+class AsyncLoopContext(LoopContextBase):
+
+    def __init__(self, async_iterator, undefined, after, length, recurse=None,
+                 depth0=0):
+        LoopContextBase.__init__(self, undefined, recurse, depth0)
+        self._async_iterator = async_iterator
+        self._after = after
+        self._length = length
+
+    @property
+    def length(self):
+        if self._length is None:
+            raise TypeError('Loop length for some iterators cannot be '
+                            'lazily calculated in async mode')
+        return self._length
+
+    def __aiter__(self):
+        return AsyncLoopContextIterator(self)
+
+
+class AsyncLoopContextIterator(object):
+    __slots__ = ('context',)
+
+    def __init__(self, context):
+        self.context = context
+
+    def __aiter__(self):
+        return self
+
+    async def __anext__(self):
+        ctx = self.context
+        ctx.index0 += 1
+        if ctx._after is _last_iteration:
+            raise StopAsyncIteration()
+        ctx._before = ctx._current
+        ctx._current = ctx._after
+        try:
+            ctx._after = await ctx._async_iterator.__anext__()
+        except StopAsyncIteration:
+            ctx._after = _last_iteration
+        return ctx._current, ctx
+
+
+async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0):
+    # Length is more complicated and less efficient in async mode.  The
+    # reason for this is that we cannot know if length will be used
+    # upfront but because length is a property we cannot lazily execute it
+    # later.  This means that we need to buffer it up and measure :(
+    #
+    # We however only do this for actual iterators, not for async
+    # iterators as blocking here does not seem like the best idea in the
+    # world.
+    try:
+        length = len(iterable)
+    except (TypeError, AttributeError):
+        if not hasattr(iterable, '__aiter__'):
+            iterable = tuple(iterable)
+            length = len(iterable)
+        else:
+            length = None
+    async_iterator = auto_aiter(iterable)
+    try:
+        after = await async_iterator.__anext__()
+    except StopAsyncIteration:
+        after = _last_iteration
+    return AsyncLoopContext(async_iterator, undefined, after, length, recurse,
+                            depth0)
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/bccache.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/bccache.py
new file mode 100644
index 00000000..114ba10a
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/bccache.py
@@ -0,0 +1,362 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.bccache
+    ~~~~~~~~~~~~~~
+
+    This module implements the bytecode cache system Jinja is optionally
+    using.  This is useful if you have very complex template situations and
+    the compiliation of all those templates slow down your application too
+    much.
+
+    Situations where this is useful are often forking web applications that
+    are initialized on the first request.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD.
+"""
+from os import path, listdir
+import os
+import sys
+import stat
+import errno
+import marshal
+import tempfile
+import fnmatch
+from hashlib import sha1
+from .utils import open_if_exists
+from ._compat import BytesIO, pickle, PY2, text_type
+
+
+# marshal works better on 3.x, one hack less required
+if not PY2:
+    marshal_dump = marshal.dump
+    marshal_load = marshal.load
+else:
+
+    def marshal_dump(code, f):
+        if isinstance(f, file):
+            marshal.dump(code, f)
+        else:
+            f.write(marshal.dumps(code))
+
+    def marshal_load(f):
+        if isinstance(f, file):
+            return marshal.load(f)
+        return marshal.loads(f.read())
+
+
+bc_version = 3
+
+# magic version used to only change with new jinja versions.  With 2.6
+# we change this to also take Python version changes into account.  The
+# reason for this is that Python tends to segfault if fed earlier bytecode
+# versions because someone thought it would be a good idea to reuse opcodes
+# or make Python incompatible with earlier versions.
+bc_magic = 'j2'.encode('ascii') + \
+    pickle.dumps(bc_version, 2) + \
+    pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1])
+
+
+class Bucket(object):
+    """Buckets are used to store the bytecode for one template.  It's created
+    and initialized by the bytecode cache and passed to the loading functions.
+
+    The buckets get an internal checksum from the cache assigned and use this
+    to automatically reject outdated cache material.  Individual bytecode
+    cache subclasses don't have to care about cache invalidation.
+    """
+
+    def __init__(self, environment, key, checksum):
+        self.environment = environment
+        self.key = key
+        self.checksum = checksum
+        self.reset()
+
+    def reset(self):
+        """Resets the bucket (unloads the bytecode)."""
+        self.code = None
+
+    def load_bytecode(self, f):
+        """Loads bytecode from a file or file like object."""
+        # make sure the magic header is correct
+        magic = f.read(len(bc_magic))
+        if magic != bc_magic:
+            self.reset()
+            return
+        # the source code of the file changed, we need to reload
+        checksum = pickle.load(f)
+        if self.checksum != checksum:
+            self.reset()
+            return
+        # if marshal_load fails then we need to reload
+        try:
+            self.code = marshal_load(f)
+        except (EOFError, ValueError, TypeError):
+            self.reset()
+            return
+
+    def write_bytecode(self, f):
+        """Dump the bytecode into the file or file like object passed."""
+        if self.code is None:
+            raise TypeError('can\'t write empty bucket')
+        f.write(bc_magic)
+        pickle.dump(self.checksum, f, 2)
+        marshal_dump(self.code, f)
+
+    def bytecode_from_string(self, string):
+        """Load bytecode from a string."""
+        self.load_bytecode(BytesIO(string))
+
+    def bytecode_to_string(self):
+        """Return the bytecode as string."""
+        out = BytesIO()
+        self.write_bytecode(out)
+        return out.getvalue()
+
+
+class BytecodeCache(object):
+    """To implement your own bytecode cache you have to subclass this class
+    and override :meth:`load_bytecode` and :meth:`dump_bytecode`.  Both of
+    these methods are passed a :class:`~jinja2.bccache.Bucket`.
+
+    A very basic bytecode cache that saves the bytecode on the file system::
+
+        from os import path
+
+        class MyCache(BytecodeCache):
+
+            def __init__(self, directory):
+                self.directory = directory
+
+            def load_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                if path.exists(filename):
+                    with open(filename, 'rb') as f:
+                        bucket.load_bytecode(f)
+
+            def dump_bytecode(self, bucket):
+                filename = path.join(self.directory, bucket.key)
+                with open(filename, 'wb') as f:
+                    bucket.write_bytecode(f)
+
+    A more advanced version of a filesystem based bytecode cache is part of
+    Jinja2.
+    """
+
+    def load_bytecode(self, bucket):
+        """Subclasses have to override this method to load bytecode into a
+        bucket.  If they are not able to find code in the cache for the
+        bucket, it must not do anything.
+        """
+        raise NotImplementedError()
+
+    def dump_bytecode(self, bucket):
+        """Subclasses have to override this method to write the bytecode
+        from a bucket back to the cache.  If it unable to do so it must not
+        fail silently but raise an exception.
+        """
+        raise NotImplementedError()
+
+    def clear(self):
+        """Clears the cache.  This method is not used by Jinja2 but should be
+        implemented to allow applications to clear the bytecode cache used
+        by a particular environment.
+        """
+
+    def get_cache_key(self, name, filename=None):
+        """Returns the unique hash key for this template name."""
+        hash = sha1(name.encode('utf-8'))
+        if filename is not None:
+            filename = '|' + filename
+            if isinstance(filename, text_type):
+                filename = filename.encode('utf-8')
+            hash.update(filename)
+        return hash.hexdigest()
+
+    def get_source_checksum(self, source):
+        """Returns a checksum for the source."""
+        return sha1(source.encode('utf-8')).hexdigest()
+
+    def get_bucket(self, environment, name, filename, source):
+        """Return a cache bucket for the given template.  All arguments are
+        mandatory but filename may be `None`.
+        """
+        key = self.get_cache_key(name, filename)
+        checksum = self.get_source_checksum(source)
+        bucket = Bucket(environment, key, checksum)
+        self.load_bytecode(bucket)
+        return bucket
+
+    def set_bucket(self, bucket):
+        """Put the bucket into the cache."""
+        self.dump_bytecode(bucket)
+
+
+class FileSystemBytecodeCache(BytecodeCache):
+    """A bytecode cache that stores bytecode on the filesystem.  It accepts
+    two arguments: The directory where the cache items are stored and a
+    pattern string that is used to build the filename.
+
+    If no directory is specified a default cache directory is selected.  On
+    Windows the user's temp directory is used, on UNIX systems a directory
+    is created for the user in the system temp directory.
+
+    The pattern can be used to have multiple separate caches operate on the
+    same directory.  The default pattern is ``'__jinja2_%s.cache'``.  ``%s``
+    is replaced with the cache key.
+
+    >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
+
+    This bytecode cache supports clearing of the cache using the clear method.
+    """
+
+    def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
+        if directory is None:
+            directory = self._get_default_cache_dir()
+        self.directory = directory
+        self.pattern = pattern
+
+    def _get_default_cache_dir(self):
+        def _unsafe_dir():
+            raise RuntimeError('Cannot determine safe temp directory.  You '
+                               'need to explicitly provide one.')
+
+        tmpdir = tempfile.gettempdir()
+
+        # On windows the temporary directory is used specific unless
+        # explicitly forced otherwise.  We can just use that.
+        if os.name == 'nt':
+            return tmpdir
+        if not hasattr(os, 'getuid'):
+            _unsafe_dir()
+
+        dirname = '_jinja2-cache-%d' % os.getuid()
+        actual_dir = os.path.join(tmpdir, dirname)
+
+        try:
+            os.mkdir(actual_dir, stat.S_IRWXU)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+        try:
+            os.chmod(actual_dir, stat.S_IRWXU)
+            actual_dir_stat = os.lstat(actual_dir)
+            if actual_dir_stat.st_uid != os.getuid() \
+               or not stat.S_ISDIR(actual_dir_stat.st_mode) \
+               or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU:
+                _unsafe_dir()
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+        actual_dir_stat = os.lstat(actual_dir)
+        if actual_dir_stat.st_uid != os.getuid() \
+           or not stat.S_ISDIR(actual_dir_stat.st_mode) \
+           or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU:
+            _unsafe_dir()
+
+        return actual_dir
+
+    def _get_cache_filename(self, bucket):
+        return path.join(self.directory, self.pattern % bucket.key)
+
+    def load_bytecode(self, bucket):
+        f = open_if_exists(self._get_cache_filename(bucket), 'rb')
+        if f is not None:
+            try:
+                bucket.load_bytecode(f)
+            finally:
+                f.close()
+
+    def dump_bytecode(self, bucket):
+        f = open(self._get_cache_filename(bucket), 'wb')
+        try:
+            bucket.write_bytecode(f)
+        finally:
+            f.close()
+
+    def clear(self):
+        # imported lazily here because google app-engine doesn't support
+        # write access on the file system and the function does not exist
+        # normally.
+        from os import remove
+        files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
+        for filename in files:
+            try:
+                remove(path.join(self.directory, filename))
+            except OSError:
+                pass
+
+
+class MemcachedBytecodeCache(BytecodeCache):
+    """This class implements a bytecode cache that uses a memcache cache for
+    storing the information.  It does not enforce a specific memcache library
+    (tummy's memcache or cmemcache) but will accept any class that provides
+    the minimal interface required.
+
+    Libraries compatible with this class:
+
+    -   `werkzeug `_.contrib.cache
+    -   `python-memcached `_
+    -   `cmemcache `_
+
+    (Unfortunately the django cache interface is not compatible because it
+    does not support storing binary data, only unicode.  You can however pass
+    the underlying cache client to the bytecode cache which is available
+    as `django.core.cache.cache._client`.)
+
+    The minimal interface for the client passed to the constructor is this:
+
+    .. class:: MinimalClientInterface
+
+        .. method:: set(key, value[, timeout])
+
+            Stores the bytecode in the cache.  `value` is a string and
+            `timeout` the timeout of the key.  If timeout is not provided
+            a default timeout or no timeout should be assumed, if it's
+            provided it's an integer with the number of seconds the cache
+            item should exist.
+
+        .. method:: get(key)
+
+            Returns the value for the cache key.  If the item does not
+            exist in the cache the return value must be `None`.
+
+    The other arguments to the constructor are the prefix for all keys that
+    is added before the actual cache key and the timeout for the bytecode in
+    the cache system.  We recommend a high (or no) timeout.
+
+    This bytecode cache does not support clearing of used items in the cache.
+    The clear method is a no-operation function.
+
+    .. versionadded:: 2.7
+       Added support for ignoring memcache errors through the
+       `ignore_memcache_errors` parameter.
+    """
+
+    def __init__(self, client, prefix='jinja2/bytecode/', timeout=None,
+                 ignore_memcache_errors=True):
+        self.client = client
+        self.prefix = prefix
+        self.timeout = timeout
+        self.ignore_memcache_errors = ignore_memcache_errors
+
+    def load_bytecode(self, bucket):
+        try:
+            code = self.client.get(self.prefix + bucket.key)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
+            code = None
+        if code is not None:
+            bucket.bytecode_from_string(code)
+
+    def dump_bytecode(self, bucket):
+        args = (self.prefix + bucket.key, bucket.bytecode_to_string())
+        if self.timeout is not None:
+            args += (self.timeout,)
+        try:
+            self.client.set(*args)
+        except Exception:
+            if not self.ignore_memcache_errors:
+                raise
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/compiler.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/compiler.py
new file mode 100644
index 00000000..0f09feaa
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/compiler.py
@@ -0,0 +1,1721 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.compiler
+    ~~~~~~~~~~~~~~~
+
+    Compiles nodes into python code.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+from itertools import chain
+from copy import deepcopy
+from keyword import iskeyword as is_python_keyword
+from functools import update_wrapper
+from . import nodes
+from .nodes import EvalContext
+from .visitor import NodeVisitor
+from .optimizer import Optimizer
+from .exceptions import TemplateAssertionError
+from .utils import Markup, concat, escape
+from ._compat import range_type, text_type, string_types, \
+     iteritems, NativeStringIO, imap, izip
+from .idtracking import Symbols, VAR_LOAD_PARAMETER, \
+     VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED
+
+
+operators = {
+    'eq':       '==',
+    'ne':       '!=',
+    'gt':       '>',
+    'gteq':     '>=',
+    'lt':       '<',
+    'lteq':     '<=',
+    'in':       'in',
+    'notin':    'not in'
+}
+
+# what method to iterate over items do we want to use for dict iteration
+# in generated code?  on 2.x let's go with iteritems, on 3.x with items
+if hasattr(dict, 'iteritems'):
+    dict_item_iter = 'iteritems'
+else:
+    dict_item_iter = 'items'
+
+code_features = ['division']
+
+# does this python version support generator stops? (PEP 0479)
+try:
+    exec('from __future__ import generator_stop')
+    code_features.append('generator_stop')
+except SyntaxError:
+    pass
+
+# does this python version support yield from?
+try:
+    exec('def f(): yield from x()')
+except SyntaxError:
+    supports_yield_from = False
+else:
+    supports_yield_from = True
+
+
+def optimizeconst(f):
+    def new_func(self, node, frame, **kwargs):
+        # Only optimize if the frame is not volatile
+        if self.optimized and not frame.eval_ctx.volatile:
+            new_node = self.optimizer.visit(node, frame.eval_ctx)
+            if new_node != node:
+                return self.visit(new_node, frame)
+        return f(self, node, frame, **kwargs)
+    return update_wrapper(new_func, f)
+
+
+def generate(node, environment, name, filename, stream=None,
+             defer_init=False, optimized=True):
+    """Generate the python source for a node tree."""
+    if not isinstance(node, nodes.Template):
+        raise TypeError('Can\'t compile non template nodes')
+    generator = environment.code_generator_class(environment, name, filename,
+                                                 stream, defer_init,
+                                                 optimized)
+    generator.visit(node)
+    if stream is None:
+        return generator.stream.getvalue()
+
+
+def has_safe_repr(value):
+    """Does the node have a safe representation?"""
+    if value is None or value is NotImplemented or value is Ellipsis:
+        return True
+    if type(value) in (bool, int, float, complex, range_type, Markup) + string_types:
+        return True
+    if type(value) in (tuple, list, set, frozenset):
+        for item in value:
+            if not has_safe_repr(item):
+                return False
+        return True
+    elif type(value) is dict:
+        for key, value in iteritems(value):
+            if not has_safe_repr(key):
+                return False
+            if not has_safe_repr(value):
+                return False
+        return True
+    return False
+
+
+def find_undeclared(nodes, names):
+    """Check if the names passed are accessed undeclared.  The return value
+    is a set of all the undeclared names from the sequence of names found.
+    """
+    visitor = UndeclaredNameVisitor(names)
+    try:
+        for node in nodes:
+            visitor.visit(node)
+    except VisitorExit:
+        pass
+    return visitor.undeclared
+
+
+class MacroRef(object):
+
+    def __init__(self, node):
+        self.node = node
+        self.accesses_caller = False
+        self.accesses_kwargs = False
+        self.accesses_varargs = False
+
+
+class Frame(object):
+    """Holds compile time information for us."""
+
+    def __init__(self, eval_ctx, parent=None, level=None):
+        self.eval_ctx = eval_ctx
+        self.symbols = Symbols(parent and parent.symbols or None,
+                               level=level)
+
+        # a toplevel frame is the root + soft frames such as if conditions.
+        self.toplevel = False
+
+        # the root frame is basically just the outermost frame, so no if
+        # conditions.  This information is used to optimize inheritance
+        # situations.
+        self.rootlevel = False
+
+        # in some dynamic inheritance situations the compiler needs to add
+        # write tests around output statements.
+        self.require_output_check = parent and parent.require_output_check
+
+        # inside some tags we are using a buffer rather than yield statements.
+        # this for example affects {% filter %} or {% macro %}.  If a frame
+        # is buffered this variable points to the name of the list used as
+        # buffer.
+        self.buffer = None
+
+        # the name of the block we're in, otherwise None.
+        self.block = parent and parent.block or None
+
+        # the parent of this frame
+        self.parent = parent
+
+        if parent is not None:
+            self.buffer = parent.buffer
+
+    def copy(self):
+        """Create a copy of the current one."""
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.symbols = self.symbols.copy()
+        return rv
+
+    def inner(self, isolated=False):
+        """Return an inner frame."""
+        if isolated:
+            return Frame(self.eval_ctx, level=self.symbols.level + 1)
+        return Frame(self.eval_ctx, self)
+
+    def soft(self):
+        """Return a soft frame.  A soft frame may not be modified as
+        standalone thing as it shares the resources with the frame it
+        was created of, but it's not a rootlevel frame any longer.
+
+        This is only used to implement if-statements.
+        """
+        rv = self.copy()
+        rv.rootlevel = False
+        return rv
+
+    __copy__ = copy
+
+
+class VisitorExit(RuntimeError):
+    """Exception used by the `UndeclaredNameVisitor` to signal a stop."""
+
+
+class DependencyFinderVisitor(NodeVisitor):
+    """A visitor that collects filter and test calls."""
+
+    def __init__(self):
+        self.filters = set()
+        self.tests = set()
+
+    def visit_Filter(self, node):
+        self.generic_visit(node)
+        self.filters.add(node.name)
+
+    def visit_Test(self, node):
+        self.generic_visit(node)
+        self.tests.add(node.name)
+
+    def visit_Block(self, node):
+        """Stop visiting at blocks."""
+
+
+class UndeclaredNameVisitor(NodeVisitor):
+    """A visitor that checks if a name is accessed without being
+    declared.  This is different from the frame visitor as it will
+    not stop at closure frames.
+    """
+
+    def __init__(self, names):
+        self.names = set(names)
+        self.undeclared = set()
+
+    def visit_Name(self, node):
+        if node.ctx == 'load' and node.name in self.names:
+            self.undeclared.add(node.name)
+            if self.undeclared == self.names:
+                raise VisitorExit()
+        else:
+            self.names.discard(node.name)
+
+    def visit_Block(self, node):
+        """Stop visiting a blocks."""
+
+
+class CompilerExit(Exception):
+    """Raised if the compiler encountered a situation where it just
+    doesn't make sense to further process the code.  Any block that
+    raises such an exception is not further processed.
+    """
+
+
+class CodeGenerator(NodeVisitor):
+
+    def __init__(self, environment, name, filename, stream=None,
+                 defer_init=False, optimized=True):
+        if stream is None:
+            stream = NativeStringIO()
+        self.environment = environment
+        self.name = name
+        self.filename = filename
+        self.stream = stream
+        self.created_block_context = False
+        self.defer_init = defer_init
+        self.optimized = optimized
+        if optimized:
+            self.optimizer = Optimizer(environment)
+
+        # aliases for imports
+        self.import_aliases = {}
+
+        # a registry for all blocks.  Because blocks are moved out
+        # into the global python scope they are registered here
+        self.blocks = {}
+
+        # the number of extends statements so far
+        self.extends_so_far = 0
+
+        # some templates have a rootlevel extends.  In this case we
+        # can safely assume that we're a child template and do some
+        # more optimizations.
+        self.has_known_extends = False
+
+        # the current line number
+        self.code_lineno = 1
+
+        # registry of all filters and tests (global, not block local)
+        self.tests = {}
+        self.filters = {}
+
+        # the debug information
+        self.debug_info = []
+        self._write_debug_info = None
+
+        # the number of new lines before the next write()
+        self._new_lines = 0
+
+        # the line number of the last written statement
+        self._last_line = 0
+
+        # true if nothing was written so far.
+        self._first_write = True
+
+        # used by the `temporary_identifier` method to get new
+        # unique, temporary identifier
+        self._last_identifier = 0
+
+        # the current indentation
+        self._indentation = 0
+
+        # Tracks toplevel assignments
+        self._assign_stack = []
+
+        # Tracks parameter definition blocks
+        self._param_def_block = []
+
+        # Tracks the current context.
+        self._context_reference_stack = ['context']
+
+    # -- Various compilation helpers
+
+    def fail(self, msg, lineno):
+        """Fail with a :exc:`TemplateAssertionError`."""
+        raise TemplateAssertionError(msg, lineno, self.name, self.filename)
+
+    def temporary_identifier(self):
+        """Get a new unique identifier."""
+        self._last_identifier += 1
+        return 't_%d' % self._last_identifier
+
+    def buffer(self, frame):
+        """Enable buffering for the frame from that point onwards."""
+        frame.buffer = self.temporary_identifier()
+        self.writeline('%s = []' % frame.buffer)
+
+    def return_buffer_contents(self, frame, force_unescaped=False):
+        """Return the buffer contents of the frame."""
+        if not force_unescaped:
+            if frame.eval_ctx.volatile:
+                self.writeline('if context.eval_ctx.autoescape:')
+                self.indent()
+                self.writeline('return Markup(concat(%s))' % frame.buffer)
+                self.outdent()
+                self.writeline('else:')
+                self.indent()
+                self.writeline('return concat(%s)' % frame.buffer)
+                self.outdent()
+                return
+            elif frame.eval_ctx.autoescape:
+                self.writeline('return Markup(concat(%s))' % frame.buffer)
+                return
+        self.writeline('return concat(%s)' % frame.buffer)
+
+    def indent(self):
+        """Indent by one."""
+        self._indentation += 1
+
+    def outdent(self, step=1):
+        """Outdent by step."""
+        self._indentation -= step
+
+    def start_write(self, frame, node=None):
+        """Yield or write into the frame buffer."""
+        if frame.buffer is None:
+            self.writeline('yield ', node)
+        else:
+            self.writeline('%s.append(' % frame.buffer, node)
+
+    def end_write(self, frame):
+        """End the writing process started by `start_write`."""
+        if frame.buffer is not None:
+            self.write(')')
+
+    def simple_write(self, s, frame, node=None):
+        """Simple shortcut for start_write + write + end_write."""
+        self.start_write(frame, node)
+        self.write(s)
+        self.end_write(frame)
+
+    def blockvisit(self, nodes, frame):
+        """Visit a list of nodes as block in a frame.  If the current frame
+        is no buffer a dummy ``if 0: yield None`` is written automatically.
+        """
+        try:
+            self.writeline('pass')
+            for node in nodes:
+                self.visit(node, frame)
+        except CompilerExit:
+            pass
+
+    def write(self, x):
+        """Write a string into the output stream."""
+        if self._new_lines:
+            if not self._first_write:
+                self.stream.write('\n' * self._new_lines)
+                self.code_lineno += self._new_lines
+                if self._write_debug_info is not None:
+                    self.debug_info.append((self._write_debug_info,
+                                            self.code_lineno))
+                    self._write_debug_info = None
+            self._first_write = False
+            self.stream.write('    ' * self._indentation)
+            self._new_lines = 0
+        self.stream.write(x)
+
+    def writeline(self, x, node=None, extra=0):
+        """Combination of newline and write."""
+        self.newline(node, extra)
+        self.write(x)
+
+    def newline(self, node=None, extra=0):
+        """Add one or more newlines before the next write."""
+        self._new_lines = max(self._new_lines, 1 + extra)
+        if node is not None and node.lineno != self._last_line:
+            self._write_debug_info = node.lineno
+            self._last_line = node.lineno
+
+    def signature(self, node, frame, extra_kwargs=None):
+        """Writes a function call to the stream for the current node.
+        A leading comma is added automatically.  The extra keyword
+        arguments may not include python keywords otherwise a syntax
+        error could occour.  The extra keyword arguments should be given
+        as python dict.
+        """
+        # if any of the given keyword arguments is a python keyword
+        # we have to make sure that no invalid call is created.
+        kwarg_workaround = False
+        for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()):
+            if is_python_keyword(kwarg):
+                kwarg_workaround = True
+                break
+
+        for arg in node.args:
+            self.write(', ')
+            self.visit(arg, frame)
+
+        if not kwarg_workaround:
+            for kwarg in node.kwargs:
+                self.write(', ')
+                self.visit(kwarg, frame)
+            if extra_kwargs is not None:
+                for key, value in iteritems(extra_kwargs):
+                    self.write(', %s=%s' % (key, value))
+        if node.dyn_args:
+            self.write(', *')
+            self.visit(node.dyn_args, frame)
+
+        if kwarg_workaround:
+            if node.dyn_kwargs is not None:
+                self.write(', **dict({')
+            else:
+                self.write(', **{')
+            for kwarg in node.kwargs:
+                self.write('%r: ' % kwarg.key)
+                self.visit(kwarg.value, frame)
+                self.write(', ')
+            if extra_kwargs is not None:
+                for key, value in iteritems(extra_kwargs):
+                    self.write('%r: %s, ' % (key, value))
+            if node.dyn_kwargs is not None:
+                self.write('}, **')
+                self.visit(node.dyn_kwargs, frame)
+                self.write(')')
+            else:
+                self.write('}')
+
+        elif node.dyn_kwargs is not None:
+            self.write(', **')
+            self.visit(node.dyn_kwargs, frame)
+
+    def pull_dependencies(self, nodes):
+        """Pull all the dependencies."""
+        visitor = DependencyFinderVisitor()
+        for node in nodes:
+            visitor.visit(node)
+        for dependency in 'filters', 'tests':
+            mapping = getattr(self, dependency)
+            for name in getattr(visitor, dependency):
+                if name not in mapping:
+                    mapping[name] = self.temporary_identifier()
+                self.writeline('%s = environment.%s[%r]' %
+                               (mapping[name], dependency, name))
+
+    def enter_frame(self, frame):
+        undefs = []
+        for target, (action, param) in iteritems(frame.symbols.loads):
+            if action == VAR_LOAD_PARAMETER:
+                pass
+            elif action == VAR_LOAD_RESOLVE:
+                self.writeline('%s = %s(%r)' %
+                               (target, self.get_resolve_func(), param))
+            elif action == VAR_LOAD_ALIAS:
+                self.writeline('%s = %s' % (target, param))
+            elif action == VAR_LOAD_UNDEFINED:
+                undefs.append(target)
+            else:
+                raise NotImplementedError('unknown load instruction')
+        if undefs:
+            self.writeline('%s = missing' % ' = '.join(undefs))
+
+    def leave_frame(self, frame, with_python_scope=False):
+        if not with_python_scope:
+            undefs = []
+            for target, _ in iteritems(frame.symbols.loads):
+                undefs.append(target)
+            if undefs:
+                self.writeline('%s = missing' % ' = '.join(undefs))
+
+    def func(self, name):
+        if self.environment.is_async:
+            return 'async def %s' % name
+        return 'def %s' % name
+
+    def macro_body(self, node, frame):
+        """Dump the function def of a macro or call block."""
+        frame = frame.inner()
+        frame.symbols.analyze_node(node)
+        macro_ref = MacroRef(node)
+
+        explicit_caller = None
+        skip_special_params = set()
+        args = []
+        for idx, arg in enumerate(node.args):
+            if arg.name == 'caller':
+                explicit_caller = idx
+            if arg.name in ('kwargs', 'varargs'):
+                skip_special_params.add(arg.name)
+            args.append(frame.symbols.ref(arg.name))
+
+        undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs'))
+
+        if 'caller' in undeclared:
+            # In older Jinja2 versions there was a bug that allowed caller
+            # to retain the special behavior even if it was mentioned in
+            # the argument list.  However thankfully this was only really
+            # working if it was the last argument.  So we are explicitly
+            # checking this now and error out if it is anywhere else in
+            # the argument list.
+            if explicit_caller is not None:
+                try:
+                    node.defaults[explicit_caller - len(node.args)]
+                except IndexError:
+                    self.fail('When defining macros or call blocks the '
+                              'special "caller" argument must be omitted '
+                              'or be given a default.', node.lineno)
+            else:
+                args.append(frame.symbols.declare_parameter('caller'))
+            macro_ref.accesses_caller = True
+        if 'kwargs' in undeclared and not 'kwargs' in skip_special_params:
+            args.append(frame.symbols.declare_parameter('kwargs'))
+            macro_ref.accesses_kwargs = True
+        if 'varargs' in undeclared and not 'varargs' in skip_special_params:
+            args.append(frame.symbols.declare_parameter('varargs'))
+            macro_ref.accesses_varargs = True
+
+        # macros are delayed, they never require output checks
+        frame.require_output_check = False
+        frame.symbols.analyze_node(node)
+        self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node)
+        self.indent()
+
+        self.buffer(frame)
+        self.enter_frame(frame)
+
+        self.push_parameter_definitions(frame)
+        for idx, arg in enumerate(node.args):
+            ref = frame.symbols.ref(arg.name)
+            self.writeline('if %s is missing:' % ref)
+            self.indent()
+            try:
+                default = node.defaults[idx - len(node.args)]
+            except IndexError:
+                self.writeline('%s = undefined(%r, name=%r)' % (
+                    ref,
+                    'parameter %r was not provided' % arg.name,
+                    arg.name))
+            else:
+                self.writeline('%s = ' % ref)
+                self.visit(default, frame)
+            self.mark_parameter_stored(ref)
+            self.outdent()
+        self.pop_parameter_definitions()
+
+        self.blockvisit(node.body, frame)
+        self.return_buffer_contents(frame, force_unescaped=True)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        return frame, macro_ref
+
+    def macro_def(self, macro_ref, frame):
+        """Dump the macro definition for the def created by macro_body."""
+        arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args)
+        name = getattr(macro_ref.node, 'name', None)
+        if len(macro_ref.node.args) == 1:
+            arg_tuple += ','
+        self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, '
+                   'context.eval_ctx.autoescape)' %
+                   (name, arg_tuple, macro_ref.accesses_kwargs,
+                    macro_ref.accesses_varargs, macro_ref.accesses_caller))
+
+    def position(self, node):
+        """Return a human readable position for the node."""
+        rv = 'line %d' % node.lineno
+        if self.name is not None:
+            rv += ' in ' + repr(self.name)
+        return rv
+
+    def dump_local_context(self, frame):
+        return '{%s}' % ', '.join(
+            '%r: %s' % (name, target) for name, target
+            in iteritems(frame.symbols.dump_stores()))
+
+    def write_commons(self):
+        """Writes a common preamble that is used by root and block functions.
+        Primarily this sets up common local helpers and enforces a generator
+        through a dead branch.
+        """
+        self.writeline('resolve = context.resolve_or_missing')
+        self.writeline('undefined = environment.undefined')
+        self.writeline('if 0: yield None')
+
+    def push_parameter_definitions(self, frame):
+        """Pushes all parameter targets from the given frame into a local
+        stack that permits tracking of yet to be assigned parameters.  In
+        particular this enables the optimization from `visit_Name` to skip
+        undefined expressions for parameters in macros as macros can reference
+        otherwise unbound parameters.
+        """
+        self._param_def_block.append(frame.symbols.dump_param_targets())
+
+    def pop_parameter_definitions(self):
+        """Pops the current parameter definitions set."""
+        self._param_def_block.pop()
+
+    def mark_parameter_stored(self, target):
+        """Marks a parameter in the current parameter definitions as stored.
+        This will skip the enforced undefined checks.
+        """
+        if self._param_def_block:
+            self._param_def_block[-1].discard(target)
+
+    def push_context_reference(self, target):
+        self._context_reference_stack.append(target)
+
+    def pop_context_reference(self):
+        self._context_reference_stack.pop()
+
+    def get_context_ref(self):
+        return self._context_reference_stack[-1]
+
+    def get_resolve_func(self):
+        target = self._context_reference_stack[-1]
+        if target == 'context':
+            return 'resolve'
+        return '%s.resolve' % target
+
+    def derive_context(self, frame):
+        return '%s.derived(%s)' % (
+            self.get_context_ref(),
+            self.dump_local_context(frame),
+        )
+
+    def parameter_is_undeclared(self, target):
+        """Checks if a given target is an undeclared parameter."""
+        if not self._param_def_block:
+            return False
+        return target in self._param_def_block[-1]
+
+    def push_assign_tracking(self):
+        """Pushes a new layer for assignment tracking."""
+        self._assign_stack.append(set())
+
+    def pop_assign_tracking(self, frame):
+        """Pops the topmost level for assignment tracking and updates the
+        context variables if necessary.
+        """
+        vars = self._assign_stack.pop()
+        if not frame.toplevel or not vars:
+            return
+        public_names = [x for x in vars if x[:1] != '_']
+        if len(vars) == 1:
+            name = next(iter(vars))
+            ref = frame.symbols.ref(name)
+            self.writeline('context.vars[%r] = %s' % (name, ref))
+        else:
+            self.writeline('context.vars.update({')
+            for idx, name in enumerate(vars):
+                if idx:
+                    self.write(', ')
+                ref = frame.symbols.ref(name)
+                self.write('%r: %s' % (name, ref))
+            self.write('})')
+        if public_names:
+            if len(public_names) == 1:
+                self.writeline('context.exported_vars.add(%r)' %
+                               public_names[0])
+            else:
+                self.writeline('context.exported_vars.update((%s))' %
+                               ', '.join(imap(repr, public_names)))
+
+    # -- Statement Visitors
+
+    def visit_Template(self, node, frame=None):
+        assert frame is None, 'no root frame allowed'
+        eval_ctx = EvalContext(self.environment, self.name)
+
+        from .runtime import __all__ as exported
+        self.writeline('from __future__ import %s' % ', '.join(code_features))
+        self.writeline('from mdpopups.jinja2.runtime import ' + ', '.join(exported))
+
+        if self.environment.is_async:
+            self.writeline('from mdpopups.jinja2.asyncsupport import auto_await, '
+                           'auto_aiter, make_async_loop_context')
+
+        # if we want a deferred initialization we cannot move the
+        # environment into a local name
+        envenv = not self.defer_init and ', environment=environment' or ''
+
+        # do we have an extends tag at all?  If not, we can save some
+        # overhead by just not processing any inheritance code.
+        have_extends = node.find(nodes.Extends) is not None
+
+        # find all blocks
+        for block in node.find_all(nodes.Block):
+            if block.name in self.blocks:
+                self.fail('block %r defined twice' % block.name, block.lineno)
+            self.blocks[block.name] = block
+
+        # find all imports and import them
+        for import_ in node.find_all(nodes.ImportedName):
+            if import_.importname not in self.import_aliases:
+                imp = import_.importname
+                self.import_aliases[imp] = alias = self.temporary_identifier()
+                if '.' in imp:
+                    module, obj = imp.rsplit('.', 1)
+                    self.writeline('from %s import %s as %s' %
+                                   (module, obj, alias))
+                else:
+                    self.writeline('import %s as %s' % (imp, alias))
+
+        # add the load name
+        self.writeline('name = %r' % self.name)
+
+        # generate the root render function.
+        self.writeline('%s(context, missing=missing%s):' %
+                       (self.func('root'), envenv), extra=1)
+        self.indent()
+        self.write_commons()
+
+        # process the root
+        frame = Frame(eval_ctx)
+        if 'self' in find_undeclared(node.body, ('self',)):
+            ref = frame.symbols.declare_parameter('self')
+            self.writeline('%s = TemplateReference(context)' % ref)
+        frame.symbols.analyze_node(node)
+        frame.toplevel = frame.rootlevel = True
+        frame.require_output_check = have_extends and not self.has_known_extends
+        if have_extends:
+            self.writeline('parent_template = None')
+        self.enter_frame(frame)
+        self.pull_dependencies(node.body)
+        self.blockvisit(node.body, frame)
+        self.leave_frame(frame, with_python_scope=True)
+        self.outdent()
+
+        # make sure that the parent root is called.
+        if have_extends:
+            if not self.has_known_extends:
+                self.indent()
+                self.writeline('if parent_template is not None:')
+            self.indent()
+            if supports_yield_from and not self.environment.is_async:
+                self.writeline('yield from parent_template.'
+                               'root_render_func(context)')
+            else:
+                self.writeline('%sfor event in parent_template.'
+                               'root_render_func(context):' %
+                               (self.environment.is_async and 'async ' or ''))
+                self.indent()
+                self.writeline('yield event')
+                self.outdent()
+            self.outdent(1 + (not self.has_known_extends))
+
+        # at this point we now have the blocks collected and can visit them too.
+        for name, block in iteritems(self.blocks):
+            self.writeline('%s(context, missing=missing%s):' %
+                           (self.func('block_' + name), envenv),
+                           block, 1)
+            self.indent()
+            self.write_commons()
+            # It's important that we do not make this frame a child of the
+            # toplevel template.  This would cause a variety of
+            # interesting issues with identifier tracking.
+            block_frame = Frame(eval_ctx)
+            undeclared = find_undeclared(block.body, ('self', 'super'))
+            if 'self' in undeclared:
+                ref = block_frame.symbols.declare_parameter('self')
+                self.writeline('%s = TemplateReference(context)' % ref)
+            if 'super' in undeclared:
+                ref = block_frame.symbols.declare_parameter('super')
+                self.writeline('%s = context.super(%r, '
+                               'block_%s)' % (ref, name, name))
+            block_frame.symbols.analyze_node(block)
+            block_frame.block = name
+            self.enter_frame(block_frame)
+            self.pull_dependencies(block.body)
+            self.blockvisit(block.body, block_frame)
+            self.leave_frame(block_frame, with_python_scope=True)
+            self.outdent()
+
+        self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x)
+                                                   for x in self.blocks),
+                       extra=1)
+
+        # add a function that returns the debug info
+        self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x
+                                                    in self.debug_info))
+
+    def visit_Block(self, node, frame):
+        """Call a block and register it for the template."""
+        level = 0
+        if frame.toplevel:
+            # if we know that we are a child template, there is no need to
+            # check if we are one
+            if self.has_known_extends:
+                return
+            if self.extends_so_far > 0:
+                self.writeline('if parent_template is None:')
+                self.indent()
+                level += 1
+
+        if node.scoped:
+            context = self.derive_context(frame)
+        else:
+            context = self.get_context_ref()
+
+        if supports_yield_from and not self.environment.is_async and \
+           frame.buffer is None:
+            self.writeline('yield from context.blocks[%r][0](%s)' % (
+                           node.name, context), node)
+        else:
+            loop = self.environment.is_async and 'async for' or 'for'
+            self.writeline('%s event in context.blocks[%r][0](%s):' % (
+                           loop, node.name, context), node)
+            self.indent()
+            self.simple_write('event', frame)
+            self.outdent()
+
+        self.outdent(level)
+
+    def visit_Extends(self, node, frame):
+        """Calls the extender."""
+        if not frame.toplevel:
+            self.fail('cannot use extend from a non top-level scope',
+                      node.lineno)
+
+        # if the number of extends statements in general is zero so
+        # far, we don't have to add a check if something extended
+        # the template before this one.
+        if self.extends_so_far > 0:
+
+            # if we have a known extends we just add a template runtime
+            # error into the generated code.  We could catch that at compile
+            # time too, but i welcome it not to confuse users by throwing the
+            # same error at different times just "because we can".
+            if not self.has_known_extends:
+                self.writeline('if parent_template is not None:')
+                self.indent()
+            self.writeline('raise TemplateRuntimeError(%r)' %
+                           'extended multiple times')
+
+            # if we have a known extends already we don't need that code here
+            # as we know that the template execution will end here.
+            if self.has_known_extends:
+                raise CompilerExit()
+            else:
+                self.outdent()
+
+        self.writeline('parent_template = environment.get_template(', node)
+        self.visit(node.template, frame)
+        self.write(', %r)' % self.name)
+        self.writeline('for name, parent_block in parent_template.'
+                       'blocks.%s():' % dict_item_iter)
+        self.indent()
+        self.writeline('context.blocks.setdefault(name, []).'
+                       'append(parent_block)')
+        self.outdent()
+
+        # if this extends statement was in the root level we can take
+        # advantage of that information and simplify the generated code
+        # in the top level from this point onwards
+        if frame.rootlevel:
+            self.has_known_extends = True
+
+        # and now we have one more
+        self.extends_so_far += 1
+
+    def visit_Include(self, node, frame):
+        """Handles includes."""
+        if node.ignore_missing:
+            self.writeline('try:')
+            self.indent()
+
+        func_name = 'get_or_select_template'
+        if isinstance(node.template, nodes.Const):
+            if isinstance(node.template.value, string_types):
+                func_name = 'get_template'
+            elif isinstance(node.template.value, (tuple, list)):
+                func_name = 'select_template'
+        elif isinstance(node.template, (nodes.Tuple, nodes.List)):
+            func_name = 'select_template'
+
+        self.writeline('template = environment.%s(' % func_name, node)
+        self.visit(node.template, frame)
+        self.write(', %r)' % self.name)
+        if node.ignore_missing:
+            self.outdent()
+            self.writeline('except TemplateNotFound:')
+            self.indent()
+            self.writeline('pass')
+            self.outdent()
+            self.writeline('else:')
+            self.indent()
+
+        skip_event_yield = False
+        if node.with_context:
+            loop = self.environment.is_async and 'async for' or 'for'
+            self.writeline('%s event in template.root_render_func('
+                           'template.new_context(context.get_all(), True, '
+                           '%s)):' % (loop, self.dump_local_context(frame)))
+        elif self.environment.is_async:
+            self.writeline('for event in (await '
+                           'template._get_default_module_async())'
+                           '._body_stream:')
+        else:
+            if supports_yield_from:
+                self.writeline('yield from template._get_default_module()'
+                               '._body_stream')
+                skip_event_yield = True
+            else:
+                self.writeline('for event in template._get_default_module()'
+                               '._body_stream:')
+
+        if not skip_event_yield:
+            self.indent()
+            self.simple_write('event', frame)
+            self.outdent()
+
+        if node.ignore_missing:
+            self.outdent()
+
+    def visit_Import(self, node, frame):
+        """Visit regular imports."""
+        self.writeline('%s = ' % frame.symbols.ref(node.target), node)
+        if frame.toplevel:
+            self.write('context.vars[%r] = ' % node.target)
+        if self.environment.is_async:
+            self.write('await ')
+        self.write('environment.get_template(')
+        self.visit(node.template, frame)
+        self.write(', %r).' % self.name)
+        if node.with_context:
+            self.write('make_module%s(context.get_all(), True, %s)'
+                       % (self.environment.is_async and '_async' or '',
+                          self.dump_local_context(frame)))
+        elif self.environment.is_async:
+            self.write('_get_default_module_async()')
+        else:
+            self.write('_get_default_module()')
+        if frame.toplevel and not node.target.startswith('_'):
+            self.writeline('context.exported_vars.discard(%r)' % node.target)
+
+    def visit_FromImport(self, node, frame):
+        """Visit named imports."""
+        self.newline(node)
+        self.write('included_template = %senvironment.get_template('
+                   % (self.environment.is_async and 'await ' or ''))
+        self.visit(node.template, frame)
+        self.write(', %r).' % self.name)
+        if node.with_context:
+            self.write('make_module%s(context.get_all(), True, %s)'
+                       % (self.environment.is_async and '_async' or '',
+                          self.dump_local_context(frame)))
+        elif self.environment.is_async:
+            self.write('_get_default_module_async()')
+        else:
+            self.write('_get_default_module()')
+
+        var_names = []
+        discarded_names = []
+        for name in node.names:
+            if isinstance(name, tuple):
+                name, alias = name
+            else:
+                alias = name
+            self.writeline('%s = getattr(included_template, '
+                           '%r, missing)' % (frame.symbols.ref(alias), name))
+            self.writeline('if %s is missing:' % frame.symbols.ref(alias))
+            self.indent()
+            self.writeline('%s = undefined(%r %% '
+                           'included_template.__name__, '
+                           'name=%r)' %
+                           (frame.symbols.ref(alias),
+                            'the template %%r (imported on %s) does '
+                            'not export the requested name %s' % (
+                                self.position(node),
+                                repr(name)
+                           ), name))
+            self.outdent()
+            if frame.toplevel:
+                var_names.append(alias)
+                if not alias.startswith('_'):
+                    discarded_names.append(alias)
+
+        if var_names:
+            if len(var_names) == 1:
+                name = var_names[0]
+                self.writeline('context.vars[%r] = %s' %
+                               (name, frame.symbols.ref(name)))
+            else:
+                self.writeline('context.vars.update({%s})' % ', '.join(
+                    '%r: %s' % (name, frame.symbols.ref(name)) for name in var_names
+                ))
+        if discarded_names:
+            if len(discarded_names) == 1:
+                self.writeline('context.exported_vars.discard(%r)' %
+                               discarded_names[0])
+            else:
+                self.writeline('context.exported_vars.difference_'
+                               'update((%s))' % ', '.join(imap(repr, discarded_names)))
+
+    def visit_For(self, node, frame):
+        loop_frame = frame.inner()
+        test_frame = frame.inner()
+        else_frame = frame.inner()
+
+        # try to figure out if we have an extended loop.  An extended loop
+        # is necessary if the loop is in recursive mode if the special loop
+        # variable is accessed in the body.
+        extended_loop = node.recursive or 'loop' in \
+                        find_undeclared(node.iter_child_nodes(
+                            only=('body',)), ('loop',))
+
+        loop_ref = None
+        if extended_loop:
+            loop_ref = loop_frame.symbols.declare_parameter('loop')
+
+        loop_frame.symbols.analyze_node(node, for_branch='body')
+        if node.else_:
+            else_frame.symbols.analyze_node(node, for_branch='else')
+
+        if node.test:
+            loop_filter_func = self.temporary_identifier()
+            test_frame.symbols.analyze_node(node, for_branch='test')
+            self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test)
+            self.indent()
+            self.enter_frame(test_frame)
+            self.writeline(self.environment.is_async and 'async for ' or 'for ')
+            self.visit(node.target, loop_frame)
+            self.write(' in ')
+            self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter')
+            self.write(':')
+            self.indent()
+            self.writeline('if ', node.test)
+            self.visit(node.test, test_frame)
+            self.write(':')
+            self.indent()
+            self.writeline('yield ')
+            self.visit(node.target, loop_frame)
+            self.outdent(3)
+            self.leave_frame(test_frame, with_python_scope=True)
+
+        # if we don't have an recursive loop we have to find the shadowed
+        # variables at that point.  Because loops can be nested but the loop
+        # variable is a special one we have to enforce aliasing for it.
+        if node.recursive:
+            self.writeline('%s(reciter, loop_render_func, depth=0):' %
+                           self.func('loop'), node)
+            self.indent()
+            self.buffer(loop_frame)
+
+            # Use the same buffer for the else frame
+            else_frame.buffer = loop_frame.buffer
+
+        # make sure the loop variable is a special one and raise a template
+        # assertion error if a loop tries to write to loop
+        if extended_loop:
+            self.writeline('%s = missing' % loop_ref)
+
+        for name in node.find_all(nodes.Name):
+            if name.ctx == 'store' and name.name == 'loop':
+                self.fail('Can\'t assign to special loop variable '
+                          'in for-loop target', name.lineno)
+
+        if node.else_:
+            iteration_indicator = self.temporary_identifier()
+            self.writeline('%s = 1' % iteration_indicator)
+
+        self.writeline(self.environment.is_async and 'async for ' or 'for ', node)
+        self.visit(node.target, loop_frame)
+        if extended_loop:
+            if self.environment.is_async:
+                self.write(', %s in await make_async_loop_context(' % loop_ref)
+            else:
+                self.write(', %s in LoopContext(' % loop_ref)
+        else:
+            self.write(' in ')
+
+        if node.test:
+            self.write('%s(' % loop_filter_func)
+        if node.recursive:
+            self.write('reciter')
+        else:
+            if self.environment.is_async and not extended_loop:
+                self.write('auto_aiter(')
+            self.visit(node.iter, frame)
+            if self.environment.is_async and not extended_loop:
+                self.write(')')
+        if node.test:
+            self.write(')')
+
+        if node.recursive:
+            self.write(', undefined, loop_render_func, depth):')
+        else:
+            self.write(extended_loop and ', undefined):' or ':')
+
+        self.indent()
+        self.enter_frame(loop_frame)
+
+        self.blockvisit(node.body, loop_frame)
+        if node.else_:
+            self.writeline('%s = 0' % iteration_indicator)
+        self.outdent()
+        self.leave_frame(loop_frame, with_python_scope=node.recursive
+                         and not node.else_)
+
+        if node.else_:
+            self.writeline('if %s:' % iteration_indicator)
+            self.indent()
+            self.enter_frame(else_frame)
+            self.blockvisit(node.else_, else_frame)
+            self.leave_frame(else_frame)
+            self.outdent()
+
+        # if the node was recursive we have to return the buffer contents
+        # and start the iteration code
+        if node.recursive:
+            self.return_buffer_contents(loop_frame)
+            self.outdent()
+            self.start_write(frame, node)
+            if self.environment.is_async:
+                self.write('await ')
+            self.write('loop(')
+            if self.environment.is_async:
+                self.write('auto_aiter(')
+            self.visit(node.iter, frame)
+            if self.environment.is_async:
+                self.write(')')
+            self.write(', loop)')
+            self.end_write(frame)
+
+    def visit_If(self, node, frame):
+        if_frame = frame.soft()
+        self.writeline('if ', node)
+        self.visit(node.test, if_frame)
+        self.write(':')
+        self.indent()
+        self.blockvisit(node.body, if_frame)
+        self.outdent()
+        for elif_ in node.elif_:
+            self.writeline('elif ', elif_)
+            self.visit(elif_.test, if_frame)
+            self.write(':')
+            self.indent()
+            self.blockvisit(elif_.body, if_frame)
+            self.outdent()
+        if node.else_:
+            self.writeline('else:')
+            self.indent()
+            self.blockvisit(node.else_, if_frame)
+            self.outdent()
+
+    def visit_Macro(self, node, frame):
+        macro_frame, macro_ref = self.macro_body(node, frame)
+        self.newline()
+        if frame.toplevel:
+            if not node.name.startswith('_'):
+                self.write('context.exported_vars.add(%r)' % node.name)
+            ref = frame.symbols.ref(node.name)
+            self.writeline('context.vars[%r] = ' % node.name)
+        self.write('%s = ' % frame.symbols.ref(node.name))
+        self.macro_def(macro_ref, macro_frame)
+
+    def visit_CallBlock(self, node, frame):
+        call_frame, macro_ref = self.macro_body(node, frame)
+        self.writeline('caller = ')
+        self.macro_def(macro_ref, call_frame)
+        self.start_write(frame, node)
+        self.visit_Call(node.call, frame, forward_caller=True)
+        self.end_write(frame)
+
+    def visit_FilterBlock(self, node, frame):
+        filter_frame = frame.inner()
+        filter_frame.symbols.analyze_node(node)
+        self.enter_frame(filter_frame)
+        self.buffer(filter_frame)
+        self.blockvisit(node.body, filter_frame)
+        self.start_write(frame, node)
+        self.visit_Filter(node.filter, filter_frame)
+        self.end_write(frame)
+        self.leave_frame(filter_frame)
+
+    def visit_With(self, node, frame):
+        with_frame = frame.inner()
+        with_frame.symbols.analyze_node(node)
+        self.enter_frame(with_frame)
+        for idx, (target, expr) in enumerate(izip(node.targets, node.values)):
+            self.newline()
+            self.visit(target, with_frame)
+            self.write(' = ')
+            self.visit(expr, frame)
+        self.blockvisit(node.body, with_frame)
+        self.leave_frame(with_frame)
+
+    def visit_ExprStmt(self, node, frame):
+        self.newline(node)
+        self.visit(node.node, frame)
+
+    def visit_Output(self, node, frame):
+        # if we have a known extends statement, we don't output anything
+        # if we are in a require_output_check section
+        if self.has_known_extends and frame.require_output_check:
+            return
+
+        allow_constant_finalize = True
+        if self.environment.finalize:
+            func = self.environment.finalize
+            if getattr(func, 'contextfunction', False) or \
+               getattr(func, 'evalcontextfunction', False):
+                allow_constant_finalize = False
+            elif getattr(func, 'environmentfunction', False):
+                finalize = lambda x: text_type(
+                    self.environment.finalize(self.environment, x))
+            else:
+                finalize = lambda x: text_type(self.environment.finalize(x))
+        else:
+            finalize = text_type
+
+        # if we are inside a frame that requires output checking, we do so
+        outdent_later = False
+        if frame.require_output_check:
+            self.writeline('if parent_template is None:')
+            self.indent()
+            outdent_later = True
+
+        # try to evaluate as many chunks as possible into a static
+        # string at compile time.
+        body = []
+        for child in node.nodes:
+            try:
+                if not allow_constant_finalize:
+                    raise nodes.Impossible()
+                const = child.as_const(frame.eval_ctx)
+            except nodes.Impossible:
+                body.append(child)
+                continue
+            # the frame can't be volatile here, becaus otherwise the
+            # as_const() function would raise an Impossible exception
+            # at that point.
+            try:
+                if frame.eval_ctx.autoescape:
+                    if hasattr(const, '__html__'):
+                        const = const.__html__()
+                    else:
+                        const = escape(const)
+                const = finalize(const)
+            except Exception:
+                # if something goes wrong here we evaluate the node
+                # at runtime for easier debugging
+                body.append(child)
+                continue
+            if body and isinstance(body[-1], list):
+                body[-1].append(const)
+            else:
+                body.append([const])
+
+        # if we have less than 3 nodes or a buffer we yield or extend/append
+        if len(body) < 3 or frame.buffer is not None:
+            if frame.buffer is not None:
+                # for one item we append, for more we extend
+                if len(body) == 1:
+                    self.writeline('%s.append(' % frame.buffer)
+                else:
+                    self.writeline('%s.extend((' % frame.buffer)
+                self.indent()
+            for item in body:
+                if isinstance(item, list):
+                    val = repr(concat(item))
+                    if frame.buffer is None:
+                        self.writeline('yield ' + val)
+                    else:
+                        self.writeline(val + ',')
+                else:
+                    if frame.buffer is None:
+                        self.writeline('yield ', item)
+                    else:
+                        self.newline(item)
+                    close = 1
+                    if frame.eval_ctx.volatile:
+                        self.write('(escape if context.eval_ctx.autoescape'
+                                   ' else to_string)(')
+                    elif frame.eval_ctx.autoescape:
+                        self.write('escape(')
+                    else:
+                        self.write('to_string(')
+                    if self.environment.finalize is not None:
+                        self.write('environment.finalize(')
+                        if getattr(self.environment.finalize,
+                                   "contextfunction", False):
+                            self.write('context, ')
+                        close += 1
+                    self.visit(item, frame)
+                    self.write(')' * close)
+                    if frame.buffer is not None:
+                        self.write(',')
+            if frame.buffer is not None:
+                # close the open parentheses
+                self.outdent()
+                self.writeline(len(body) == 1 and ')' or '))')
+
+        # otherwise we create a format string as this is faster in that case
+        else:
+            format = []
+            arguments = []
+            for item in body:
+                if isinstance(item, list):
+                    format.append(concat(item).replace('%', '%%'))
+                else:
+                    format.append('%s')
+                    arguments.append(item)
+            self.writeline('yield ')
+            self.write(repr(concat(format)) + ' % (')
+            self.indent()
+            for argument in arguments:
+                self.newline(argument)
+                close = 0
+                if frame.eval_ctx.volatile:
+                    self.write('(escape if context.eval_ctx.autoescape else'
+                               ' to_string)(')
+                    close += 1
+                elif frame.eval_ctx.autoescape:
+                    self.write('escape(')
+                    close += 1
+                if self.environment.finalize is not None:
+                    self.write('environment.finalize(')
+                    if getattr(self.environment.finalize,
+                               'contextfunction', False):
+                        self.write('context, ')
+                    elif getattr(self.environment.finalize,
+                               'evalcontextfunction', False):
+                        self.write('context.eval_ctx, ')
+                    elif getattr(self.environment.finalize,
+                               'environmentfunction', False):
+                        self.write('environment, ')
+                    close += 1
+                self.visit(argument, frame)
+                self.write(')' * close + ', ')
+            self.outdent()
+            self.writeline(')')
+
+        if outdent_later:
+            self.outdent()
+
+    def visit_Assign(self, node, frame):
+        self.push_assign_tracking()
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(' = ')
+        self.visit(node.node, frame)
+        self.pop_assign_tracking(frame)
+
+    def visit_AssignBlock(self, node, frame):
+        self.push_assign_tracking()
+        block_frame = frame.inner()
+        # This is a special case.  Since a set block always captures we
+        # will disable output checks.  This way one can use set blocks
+        # toplevel even in extended templates.
+        block_frame.require_output_check = False
+        block_frame.symbols.analyze_node(node)
+        self.enter_frame(block_frame)
+        self.buffer(block_frame)
+        self.blockvisit(node.body, block_frame)
+        self.newline(node)
+        self.visit(node.target, frame)
+        self.write(' = (Markup if context.eval_ctx.autoescape '
+                   'else identity)(')
+        if node.filter is not None:
+            self.visit_Filter(node.filter, block_frame)
+        else:
+            self.write('concat(%s)' % block_frame.buffer)
+        self.write(')')
+        self.pop_assign_tracking(frame)
+        self.leave_frame(block_frame)
+
+    # -- Expression Visitors
+
+    def visit_Name(self, node, frame):
+        if node.ctx == 'store' and frame.toplevel:
+            if self._assign_stack:
+                self._assign_stack[-1].add(node.name)
+        ref = frame.symbols.ref(node.name)
+
+        # If we are looking up a variable we might have to deal with the
+        # case where it's undefined.  We can skip that case if the load
+        # instruction indicates a parameter which are always defined.
+        if node.ctx == 'load':
+            load = frame.symbols.find_load(ref)
+            if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \
+                    not self.parameter_is_undeclared(ref)):
+                self.write('(undefined(name=%r) if %s is missing else %s)' %
+                           (node.name, ref, ref))
+                return
+
+        self.write(ref)
+
+    def visit_NSRef(self, node, frame):
+        # NSRefs can only be used to store values; since they use the normal
+        # `foo.bar` notation they will be parsed as a normal attribute access
+        # when used anywhere but in a `set` context
+        ref = frame.symbols.ref(node.name)
+        self.writeline('if not isinstance(%s, Namespace):' % ref)
+        self.indent()
+        self.writeline('raise TemplateRuntimeError(%r)' %
+                       'cannot assign attribute on non-namespace object')
+        self.outdent()
+        self.writeline('%s[%r]' % (ref, node.attr))
+
+    def visit_Const(self, node, frame):
+        val = node.as_const(frame.eval_ctx)
+        if isinstance(val, float):
+            self.write(str(val))
+        else:
+            self.write(repr(val))
+
+    def visit_TemplateData(self, node, frame):
+        try:
+            self.write(repr(node.as_const(frame.eval_ctx)))
+        except nodes.Impossible:
+            self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)'
+                       % node.data)
+
+    def visit_Tuple(self, node, frame):
+        self.write('(')
+        idx = -1
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(', ')
+            self.visit(item, frame)
+        self.write(idx == 0 and ',)' or ')')
+
+    def visit_List(self, node, frame):
+        self.write('[')
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(', ')
+            self.visit(item, frame)
+        self.write(']')
+
+    def visit_Dict(self, node, frame):
+        self.write('{')
+        for idx, item in enumerate(node.items):
+            if idx:
+                self.write(', ')
+            self.visit(item.key, frame)
+            self.write(': ')
+            self.visit(item.value, frame)
+        self.write('}')
+
+    def binop(operator, interceptable=True):
+        @optimizeconst
+        def visitor(self, node, frame):
+            if self.environment.sandboxed and \
+               operator in self.environment.intercepted_binops:
+                self.write('environment.call_binop(context, %r, ' % operator)
+                self.visit(node.left, frame)
+                self.write(', ')
+                self.visit(node.right, frame)
+            else:
+                self.write('(')
+                self.visit(node.left, frame)
+                self.write(' %s ' % operator)
+                self.visit(node.right, frame)
+            self.write(')')
+        return visitor
+
+    def uaop(operator, interceptable=True):
+        @optimizeconst
+        def visitor(self, node, frame):
+            if self.environment.sandboxed and \
+               operator in self.environment.intercepted_unops:
+                self.write('environment.call_unop(context, %r, ' % operator)
+                self.visit(node.node, frame)
+            else:
+                self.write('(' + operator)
+                self.visit(node.node, frame)
+            self.write(')')
+        return visitor
+
+    visit_Add = binop('+')
+    visit_Sub = binop('-')
+    visit_Mul = binop('*')
+    visit_Div = binop('/')
+    visit_FloorDiv = binop('//')
+    visit_Pow = binop('**')
+    visit_Mod = binop('%')
+    visit_And = binop('and', interceptable=False)
+    visit_Or = binop('or', interceptable=False)
+    visit_Pos = uaop('+')
+    visit_Neg = uaop('-')
+    visit_Not = uaop('not ', interceptable=False)
+    del binop, uaop
+
+    @optimizeconst
+    def visit_Concat(self, node, frame):
+        if frame.eval_ctx.volatile:
+            func_name = '(context.eval_ctx.volatile and' \
+                        ' markup_join or unicode_join)'
+        elif frame.eval_ctx.autoescape:
+            func_name = 'markup_join'
+        else:
+            func_name = 'unicode_join'
+        self.write('%s((' % func_name)
+        for arg in node.nodes:
+            self.visit(arg, frame)
+            self.write(', ')
+        self.write('))')
+
+    @optimizeconst
+    def visit_Compare(self, node, frame):
+        self.visit(node.expr, frame)
+        for op in node.ops:
+            self.visit(op, frame)
+
+    def visit_Operand(self, node, frame):
+        self.write(' %s ' % operators[node.op])
+        self.visit(node.expr, frame)
+
+    @optimizeconst
+    def visit_Getattr(self, node, frame):
+        self.write('environment.getattr(')
+        self.visit(node.node, frame)
+        self.write(', %r)' % node.attr)
+
+    @optimizeconst
+    def visit_Getitem(self, node, frame):
+        # slices bypass the environment getitem method.
+        if isinstance(node.arg, nodes.Slice):
+            self.visit(node.node, frame)
+            self.write('[')
+            self.visit(node.arg, frame)
+            self.write(']')
+        else:
+            self.write('environment.getitem(')
+            self.visit(node.node, frame)
+            self.write(', ')
+            self.visit(node.arg, frame)
+            self.write(')')
+
+    def visit_Slice(self, node, frame):
+        if node.start is not None:
+            self.visit(node.start, frame)
+        self.write(':')
+        if node.stop is not None:
+            self.visit(node.stop, frame)
+        if node.step is not None:
+            self.write(':')
+            self.visit(node.step, frame)
+
+    @optimizeconst
+    def visit_Filter(self, node, frame):
+        if self.environment.is_async:
+            self.write('await auto_await(')
+        self.write(self.filters[node.name] + '(')
+        func = self.environment.filters.get(node.name)
+        if func is None:
+            self.fail('no filter named %r' % node.name, node.lineno)
+        if getattr(func, 'contextfilter', False):
+            self.write('context, ')
+        elif getattr(func, 'evalcontextfilter', False):
+            self.write('context.eval_ctx, ')
+        elif getattr(func, 'environmentfilter', False):
+            self.write('environment, ')
+
+        # if the filter node is None we are inside a filter block
+        # and want to write to the current buffer
+        if node.node is not None:
+            self.visit(node.node, frame)
+        elif frame.eval_ctx.volatile:
+            self.write('(context.eval_ctx.autoescape and'
+                       ' Markup(concat(%s)) or concat(%s))' %
+                       (frame.buffer, frame.buffer))
+        elif frame.eval_ctx.autoescape:
+            self.write('Markup(concat(%s))' % frame.buffer)
+        else:
+            self.write('concat(%s)' % frame.buffer)
+        self.signature(node, frame)
+        self.write(')')
+        if self.environment.is_async:
+            self.write(')')
+
+    @optimizeconst
+    def visit_Test(self, node, frame):
+        self.write(self.tests[node.name] + '(')
+        if node.name not in self.environment.tests:
+            self.fail('no test named %r' % node.name, node.lineno)
+        self.visit(node.node, frame)
+        self.signature(node, frame)
+        self.write(')')
+
+    @optimizeconst
+    def visit_CondExpr(self, node, frame):
+        def write_expr2():
+            if node.expr2 is not None:
+                return self.visit(node.expr2, frame)
+            self.write('undefined(%r)' % ('the inline if-'
+                       'expression on %s evaluated to false and '
+                       'no else section was defined.' % self.position(node)))
+
+        self.write('(')
+        self.visit(node.expr1, frame)
+        self.write(' if ')
+        self.visit(node.test, frame)
+        self.write(' else ')
+        write_expr2()
+        self.write(')')
+
+    @optimizeconst
+    def visit_Call(self, node, frame, forward_caller=False):
+        if self.environment.is_async:
+            self.write('await auto_await(')
+        if self.environment.sandboxed:
+            self.write('environment.call(context, ')
+        else:
+            self.write('context.call(')
+        self.visit(node.node, frame)
+        extra_kwargs = forward_caller and {'caller': 'caller'} or None
+        self.signature(node, frame, extra_kwargs)
+        self.write(')')
+        if self.environment.is_async:
+            self.write(')')
+
+    def visit_Keyword(self, node, frame):
+        self.write(node.key + '=')
+        self.visit(node.value, frame)
+
+    # -- Unused nodes for extensions
+
+    def visit_MarkSafe(self, node, frame):
+        self.write('Markup(')
+        self.visit(node.expr, frame)
+        self.write(')')
+
+    def visit_MarkSafeIfAutoescape(self, node, frame):
+        self.write('(context.eval_ctx.autoescape and Markup or identity)(')
+        self.visit(node.expr, frame)
+        self.write(')')
+
+    def visit_EnvironmentAttribute(self, node, frame):
+        self.write('environment.' + node.name)
+
+    def visit_ExtensionAttribute(self, node, frame):
+        self.write('environment.extensions[%r].%s' % (node.identifier, node.name))
+
+    def visit_ImportedName(self, node, frame):
+        self.write(self.import_aliases[node.importname])
+
+    def visit_InternalName(self, node, frame):
+        self.write(node.name)
+
+    def visit_ContextReference(self, node, frame):
+        self.write('context')
+
+    def visit_Continue(self, node, frame):
+        self.writeline('continue', node)
+
+    def visit_Break(self, node, frame):
+        self.writeline('break', node)
+
+    def visit_Scope(self, node, frame):
+        scope_frame = frame.inner()
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+
+    def visit_OverlayScope(self, node, frame):
+        ctx = self.temporary_identifier()
+        self.writeline('%s = %s' % (ctx, self.derive_context(frame)))
+        self.writeline('%s.vars = ' % ctx)
+        self.visit(node.context, frame)
+        self.push_context_reference(ctx)
+
+        scope_frame = frame.inner(isolated=True)
+        scope_frame.symbols.analyze_node(node)
+        self.enter_frame(scope_frame)
+        self.blockvisit(node.body, scope_frame)
+        self.leave_frame(scope_frame)
+        self.pop_context_reference()
+
+    def visit_EvalContextModifier(self, node, frame):
+        for keyword in node.options:
+            self.writeline('context.eval_ctx.%s = ' % keyword.key)
+            self.visit(keyword.value, frame)
+            try:
+                val = keyword.value.as_const(frame.eval_ctx)
+            except nodes.Impossible:
+                frame.eval_ctx.volatile = True
+            else:
+                setattr(frame.eval_ctx, keyword.key, val)
+
+    def visit_ScopedEvalContextModifier(self, node, frame):
+        old_ctx_name = self.temporary_identifier()
+        saved_ctx = frame.eval_ctx.save()
+        self.writeline('%s = context.eval_ctx.save()' % old_ctx_name)
+        self.visit_EvalContextModifier(node, frame)
+        for child in node.body:
+            self.visit(child, frame)
+        frame.eval_ctx.revert(saved_ctx)
+        self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name)
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/constants.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/constants.py
new file mode 100644
index 00000000..11efd1ed
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/constants.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja.constants
+    ~~~~~~~~~~~~~~~
+
+    Various constants.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+
+
+#: list of lorem ipsum words used by the lipsum() helper function
+LOREM_IPSUM_WORDS = u'''\
+a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at
+auctor augue bibendum blandit class commodo condimentum congue consectetuer
+consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus
+diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend
+elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames
+faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac
+hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum
+justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem
+luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie
+mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non
+nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque
+penatibus per pharetra phasellus placerat platea porta porttitor posuere
+potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus
+ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit
+sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor
+tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices
+ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus
+viverra volutpat vulputate'''
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/debug.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/debug.py
new file mode 100644
index 00000000..625ca4d2
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/debug.py
@@ -0,0 +1,372 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.debug
+    ~~~~~~~~~~~~
+
+    Implements the debug interface for Jinja.  This module does some pretty
+    ugly stuff with the Python traceback system in order to achieve tracebacks
+    with correct line numbers, locals and contents.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+import sys
+import traceback
+from types import TracebackType, CodeType
+from .utils import missing, internal_code
+from .exceptions import TemplateSyntaxError
+from ._compat import iteritems, reraise, PY2
+
+# on pypy we can take advantage of transparent proxies
+try:
+    from __pypy__ import tproxy
+except ImportError:
+    tproxy = None
+
+
+# how does the raise helper look like?
+try:
+    exec("raise TypeError, 'foo'")
+except SyntaxError:
+    raise_helper = 'raise __jinja_exception__[1]'
+except TypeError:
+    raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
+
+
+class TracebackFrameProxy(object):
+    """Proxies a traceback frame."""
+
+    def __init__(self, tb):
+        self.tb = tb
+        self._tb_next = None
+
+    @property
+    def tb_next(self):
+        return self._tb_next
+
+    def set_next(self, next):
+        if tb_set_next is not None:
+            try:
+                tb_set_next(self.tb, next and next.tb or None)
+            except Exception:
+                # this function can fail due to all the hackery it does
+                # on various python implementations.  We just catch errors
+                # down and ignore them if necessary.
+                pass
+        self._tb_next = next
+
+    @property
+    def is_jinja_frame(self):
+        return '__jinja_template__' in self.tb.tb_frame.f_globals
+
+    def __getattr__(self, name):
+        return getattr(self.tb, name)
+
+
+def make_frame_proxy(frame):
+    proxy = TracebackFrameProxy(frame)
+    if tproxy is None:
+        return proxy
+    def operation_handler(operation, *args, **kwargs):
+        if operation in ('__getattribute__', '__getattr__'):
+            return getattr(proxy, args[0])
+        elif operation == '__setattr__':
+            proxy.__setattr__(*args, **kwargs)
+        else:
+            return getattr(proxy, operation)(*args, **kwargs)
+    return tproxy(TracebackType, operation_handler)
+
+
+class ProcessedTraceback(object):
+    """Holds a Jinja preprocessed traceback for printing or reraising."""
+
+    def __init__(self, exc_type, exc_value, frames):
+        assert frames, 'no frames for this traceback?'
+        self.exc_type = exc_type
+        self.exc_value = exc_value
+        self.frames = frames
+
+        # newly concatenate the frames (which are proxies)
+        prev_tb = None
+        for tb in self.frames:
+            if prev_tb is not None:
+                prev_tb.set_next(tb)
+            prev_tb = tb
+        prev_tb.set_next(None)
+
+    def render_as_text(self, limit=None):
+        """Return a string with the traceback."""
+        lines = traceback.format_exception(self.exc_type, self.exc_value,
+                                           self.frames[0], limit=limit)
+        return ''.join(lines).rstrip()
+
+    def render_as_html(self, full=False):
+        """Return a unicode string with the traceback as rendered HTML."""
+        from .debugrenderer import render_traceback
+        return u'%s\n\n' % (
+            render_traceback(self, full=full),
+            self.render_as_text().decode('utf-8', 'replace')
+        )
+
+    @property
+    def is_template_syntax_error(self):
+        """`True` if this is a template syntax error."""
+        return isinstance(self.exc_value, TemplateSyntaxError)
+
+    @property
+    def exc_info(self):
+        """Exception info tuple with a proxy around the frame objects."""
+        return self.exc_type, self.exc_value, self.frames[0]
+
+    @property
+    def standard_exc_info(self):
+        """Standard python exc_info for re-raising"""
+        tb = self.frames[0]
+        # the frame will be an actual traceback (or transparent proxy) if
+        # we are on pypy or a python implementation with support for tproxy
+        if type(tb) is not TracebackType:
+            tb = tb.tb
+        return self.exc_type, self.exc_value, tb
+
+
+def make_traceback(exc_info, source_hint=None):
+    """Creates a processed traceback object from the exc_info."""
+    exc_type, exc_value, tb = exc_info
+    if isinstance(exc_value, TemplateSyntaxError):
+        exc_info = translate_syntax_error(exc_value, source_hint)
+        initial_skip = 0
+    else:
+        initial_skip = 1
+    return translate_exception(exc_info, initial_skip)
+
+
+def translate_syntax_error(error, source=None):
+    """Rewrites a syntax error to please traceback systems."""
+    error.source = source
+    error.translated = True
+    exc_info = (error.__class__, error, None)
+    filename = error.filename
+    if filename is None:
+        filename = ''
+    return fake_exc_info(exc_info, filename, error.lineno)
+
+
+def translate_exception(exc_info, initial_skip=0):
+    """If passed an exc_info it will automatically rewrite the exceptions
+    all the way down to the correct line numbers and frames.
+    """
+    tb = exc_info[2]
+    frames = []
+
+    # skip some internal frames if wanted
+    for x in range(initial_skip):
+        if tb is not None:
+            tb = tb.tb_next
+    initial_tb = tb
+
+    while tb is not None:
+        # skip frames decorated with @internalcode.  These are internal
+        # calls we can't avoid and that are useless in template debugging
+        # output.
+        if tb.tb_frame.f_code in internal_code:
+            tb = tb.tb_next
+            continue
+
+        # save a reference to the next frame if we override the current
+        # one with a faked one.
+        next = tb.tb_next
+
+        # fake template exceptions
+        template = tb.tb_frame.f_globals.get('__jinja_template__')
+        if template is not None:
+            lineno = template.get_corresponding_lineno(tb.tb_lineno)
+            tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
+                               lineno)[2]
+
+        frames.append(make_frame_proxy(tb))
+        tb = next
+
+    # if we don't have any exceptions in the frames left, we have to
+    # reraise it unchanged.
+    # XXX: can we backup here?  when could this happen?
+    if not frames:
+        reraise(exc_info[0], exc_info[1], exc_info[2])
+
+    return ProcessedTraceback(exc_info[0], exc_info[1], frames)
+
+
+def get_jinja_locals(real_locals):
+    ctx = real_locals.get('context')
+    if ctx:
+        locals = ctx.get_all().copy()
+    else:
+        locals = {}
+
+    local_overrides = {}
+
+    for name, value in iteritems(real_locals):
+        if not name.startswith('l_') or value is missing:
+            continue
+        try:
+            _, depth, name = name.split('_', 2)
+            depth = int(depth)
+        except ValueError:
+            continue
+        cur_depth = local_overrides.get(name, (-1,))[0]
+        if cur_depth < depth:
+            local_overrides[name] = (depth, value)
+
+    for name, (_, value) in iteritems(local_overrides):
+        if value is missing:
+            locals.pop(name, None)
+        else:
+            locals[name] = value
+
+    return locals
+
+
+def fake_exc_info(exc_info, filename, lineno):
+    """Helper for `translate_exception`."""
+    exc_type, exc_value, tb = exc_info
+
+    # figure the real context out
+    if tb is not None:
+        locals = get_jinja_locals(tb.tb_frame.f_locals)
+
+        # if there is a local called __jinja_exception__, we get
+        # rid of it to not break the debug functionality.
+        locals.pop('__jinja_exception__', None)
+    else:
+        locals = {}
+
+    # assamble fake globals we need
+    globals = {
+        '__name__':             filename,
+        '__file__':             filename,
+        '__jinja_exception__':  exc_info[:2],
+
+        # we don't want to keep the reference to the template around
+        # to not cause circular dependencies, but we mark it as Jinja
+        # frame for the ProcessedTraceback
+        '__jinja_template__':   None
+    }
+
+    # and fake the exception
+    code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
+
+    # if it's possible, change the name of the code.  This won't work
+    # on some python environments such as google appengine
+    try:
+        if tb is None:
+            location = 'template'
+        else:
+            function = tb.tb_frame.f_code.co_name
+            if function == 'root':
+                location = 'top-level template code'
+            elif function.startswith('block_'):
+                location = 'block "%s"' % function[6:]
+            else:
+                location = 'template'
+
+        if PY2:
+            code = CodeType(0, code.co_nlocals, code.co_stacksize,
+                            code.co_flags, code.co_code, code.co_consts,
+                            code.co_names, code.co_varnames, filename,
+                            location, code.co_firstlineno,
+                            code.co_lnotab, (), ())
+        else:
+            code = CodeType(0, code.co_kwonlyargcount,
+                            code.co_nlocals, code.co_stacksize,
+                            code.co_flags, code.co_code, code.co_consts,
+                            code.co_names, code.co_varnames, filename,
+                            location, code.co_firstlineno,
+                            code.co_lnotab, (), ())
+    except Exception as e:
+        pass
+
+    # execute the code and catch the new traceback
+    try:
+        exec(code, globals, locals)
+    except:
+        exc_info = sys.exc_info()
+        new_tb = exc_info[2].tb_next
+
+    # return without this frame
+    return exc_info[:2] + (new_tb,)
+
+
+def _init_ugly_crap():
+    """This function implements a few ugly things so that we can patch the
+    traceback objects.  The function returned allows resetting `tb_next` on
+    any python traceback object.  Do not attempt to use this on non cpython
+    interpreters
+    """
+    import ctypes
+    from types import TracebackType
+
+    if PY2:
+        # figure out size of _Py_ssize_t for Python 2:
+        if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
+            _Py_ssize_t = ctypes.c_int64
+        else:
+            _Py_ssize_t = ctypes.c_int
+    else:
+        # platform ssize_t on Python 3
+        _Py_ssize_t = ctypes.c_ssize_t
+
+    # regular python
+    class _PyObject(ctypes.Structure):
+        pass
+    _PyObject._fields_ = [
+        ('ob_refcnt', _Py_ssize_t),
+        ('ob_type', ctypes.POINTER(_PyObject))
+    ]
+
+    # python with trace
+    if hasattr(sys, 'getobjects'):
+        class _PyObject(ctypes.Structure):
+            pass
+        _PyObject._fields_ = [
+            ('_ob_next', ctypes.POINTER(_PyObject)),
+            ('_ob_prev', ctypes.POINTER(_PyObject)),
+            ('ob_refcnt', _Py_ssize_t),
+            ('ob_type', ctypes.POINTER(_PyObject))
+        ]
+
+    class _Traceback(_PyObject):
+        pass
+    _Traceback._fields_ = [
+        ('tb_next', ctypes.POINTER(_Traceback)),
+        ('tb_frame', ctypes.POINTER(_PyObject)),
+        ('tb_lasti', ctypes.c_int),
+        ('tb_lineno', ctypes.c_int)
+    ]
+
+    def tb_set_next(tb, next):
+        """Set the tb_next attribute of a traceback object."""
+        if not (isinstance(tb, TracebackType) and
+                (next is None or isinstance(next, TracebackType))):
+            raise TypeError('tb_set_next arguments must be traceback objects')
+        obj = _Traceback.from_address(id(tb))
+        if tb.tb_next is not None:
+            old = _Traceback.from_address(id(tb.tb_next))
+            old.ob_refcnt -= 1
+        if next is None:
+            obj.tb_next = ctypes.POINTER(_Traceback)()
+        else:
+            next = _Traceback.from_address(id(next))
+            next.ob_refcnt += 1
+            obj.tb_next = ctypes.pointer(next)
+
+    return tb_set_next
+
+
+# try to get a tb_set_next implementation if we don't have transparent
+# proxies.
+tb_set_next = None
+if tproxy is None:
+    try:
+        tb_set_next = _init_ugly_crap()
+    except:
+        pass
+    del _init_ugly_crap
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/defaults.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/defaults.py
new file mode 100644
index 00000000..d67351b8
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/defaults.py
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.defaults
+    ~~~~~~~~~~~~~~~
+
+    Jinja default filters and tags.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+from ._compat import range_type
+from .utils import generate_lorem_ipsum, Cycler, Joiner, Namespace
+
+
+# defaults for the parser / lexer
+BLOCK_START_STRING = '{%'
+BLOCK_END_STRING = '%}'
+VARIABLE_START_STRING = '{{'
+VARIABLE_END_STRING = '}}'
+COMMENT_START_STRING = '{#'
+COMMENT_END_STRING = '#}'
+LINE_STATEMENT_PREFIX = None
+LINE_COMMENT_PREFIX = None
+TRIM_BLOCKS = False
+LSTRIP_BLOCKS = False
+NEWLINE_SEQUENCE = '\n'
+KEEP_TRAILING_NEWLINE = False
+
+
+# default filters, tests and namespace
+from .filters import FILTERS as DEFAULT_FILTERS
+from .tests import TESTS as DEFAULT_TESTS
+DEFAULT_NAMESPACE = {
+    'range':        range_type,
+    'dict':         dict,
+    'lipsum':       generate_lorem_ipsum,
+    'cycler':       Cycler,
+    'joiner':       Joiner,
+    'namespace':    Namespace
+}
+
+
+# default policies
+DEFAULT_POLICIES = {
+    'compiler.ascii_str':   True,
+    'urlize.rel':           'noopener',
+    'urlize.target':        None,
+    'truncate.leeway':      5,
+    'json.dumps_function':  None,
+    'json.dumps_kwargs':    {'sort_keys': True},
+    'ext.i18n.trimmed':     False,
+}
+
+
+# export all constants
+__all__ = tuple(x for x in locals().keys() if x.isupper())
diff --git a/SublimeText/mdpopups/st3/mdpopups/jinja2/environment.py b/SublimeText/mdpopups/st3/mdpopups/jinja2/environment.py
new file mode 100644
index 00000000..8c9d9b2d
--- /dev/null
+++ b/SublimeText/mdpopups/st3/mdpopups/jinja2/environment.py
@@ -0,0 +1,1276 @@
+# -*- coding: utf-8 -*-
+"""
+    jinja2.environment
+    ~~~~~~~~~~~~~~~~~~
+
+    Provides a class that holds runtime and parsing time options.
+
+    :copyright: (c) 2017 by the Jinja Team.
+    :license: BSD, see LICENSE for more details.
+"""
+import os
+import sys
+import weakref
+from functools import reduce, partial
+from . import nodes
+from .defaults import BLOCK_START_STRING, \
+     BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
+     COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
+     LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
+     DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \
+     DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
+from .lexer import get_lexer, TokenStream
+from .parser import Parser
+from .nodes import EvalContext
+from .compiler import generate, CodeGenerator
+from .runtime import Undefined, new_context, Context
+from .exceptions import TemplateSyntaxError, TemplateNotFound, \
+     TemplatesNotFound, TemplateRuntimeError
+from .utils import import_string, LRUCache, Markup, missing, \
+     concat, consume, internalcode, have_async_gen
+from ._compat import imap, ifilter, string_types, iteritems, \
+     text_type, reraise, implements_iterator, implements_to_string, \
+     encode_filename, PY2, PYPY
+
+
+# for direct template usage we have up to ten living environments
+_spontaneous_environments = LRUCache(10)
+
+# the function to create jinja traceback objects.  This is dynamically
+# imported on the first exception in the exception handler.
+_make_traceback = None
+
+
+def get_spontaneous_environment(*args):
+    """Return a new spontaneous environment.  A spontaneous environment is an
+    unnamed and unaccessible (in theory) environment that is used for
+    templates generated from a string and not from the file system.
+    """
+    try:
+        env = _spontaneous_environments.get(args)
+    except TypeError:
+        return Environment(*args)
+    if env is not None:
+        return env
+    _spontaneous_environments[args] = env = Environment(*args)
+    env.shared = True
+    return env
+
+
+def create_cache(size):
+    """Return the cache class for the given size."""
+    if size == 0:
+        return None
+    if size < 0:
+        return {}
+    return LRUCache(size)
+
+
+def copy_cache(cache):
+    """Create an empty copy of the given cache."""
+    if cache is None:
+        return None
+    elif type(cache) is dict:
+        return {}
+    return LRUCache(cache.capacity)
+
+
+def load_extensions(environment, extensions):
+    """Load the extensions from the list and bind it to the environment.
+    Returns a dict of instantiated environments.
+    """
+    result = {}
+    for extension in extensions:
+        if isinstance(extension, string_types):
+            extension = import_string(extension)
+        result[extension.identifier] = extension(environment)
+    return result
+
+
+def fail_for_missing_callable(string, name):
+    msg = string % name
+    if isinstance(name, Undefined):
+        try:
+            name._fail_with_undefined_error()
+        except Exception as e:
+            msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e)
+    raise TemplateRuntimeError(msg)
+
+
+def _environment_sanity_check(environment):
+    """Perform a sanity check on the environment."""
+    assert issubclass(environment.undefined, Undefined), 'undefined must ' \
+        'be a subclass of undefined because filters depend on it.'
+    assert environment.block_start_string != \
+        environment.variable_start_string != \
+        environment.comment_start_string, 'block, variable and comment ' \
+        'start strings must be different'
+    assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
+        'newline_sequence set to unknown line ending string.'
+    return environment
+
+
+class Environment(object):
+    r"""The core component of Jinja is the `Environment`.  It contains
+    important shared variables like configuration, filters, tests,
+    globals and others.  Instances of this class may be modified if
+    they are not shared and if no template was loaded so far.
+    Modifications on environments after the first template was loaded
+    will lead to surprising effects and undefined behavior.
+
+    Here are the possible initialization parameters:
+
+        `block_start_string`
+            The string marking the beginning of a block.  Defaults to ``'{%'``.
+
+        `block_end_string`
+            The string marking the end of a block.  Defaults to ``'%}'``.
+
+        `variable_start_string`
+            The string marking the beginning of a print statement.
+            Defaults to ``'{{'``.
+
+        `variable_end_string`
+            The string marking the end of a print statement.  Defaults to
+            ``'}}'``.
+
+        `comment_start_string`
+            The string marking the beginning of a comment.  Defaults to ``'{#'``.
+
+        `comment_end_string`
+            The string marking the end of a comment.  Defaults to ``'#}'``.
+
+        `line_statement_prefix`
+            If given and a string, this will be used as prefix for line based
+            statements.  See also :ref:`line-statements`.
+
+        `line_comment_prefix`
+            If given and a string, this will be used as prefix for line based
+            comments.  See also :ref:`line-statements`.
+
+            .. versionadded:: 2.2
+
+        `trim_blocks`
+            If this is set to ``True`` the first newline after a block is
+            removed (block, not variable tag!).  Defaults to `False`.
+
+        `lstrip_blocks`
+            If this is set to ``True`` leading spaces and tabs are stripped
+            from the start of a line to a block.  Defaults to `False`.
+
+        `newline_sequence`
+            The sequence that starts a newline.  Must be one of ``'\r'``,
+            ``'\n'`` or ``'\r\n'``.  The default is ``'\n'`` which is a
+            useful default for Linux and OS X systems as well as web
+            applications.
+
+        `keep_trailing_newline`
+            Preserve the trailing newline when rendering templates.
+            The default is ``False``, which causes a single newline,
+            if present, to be stripped from the end of the template.
+
+            .. versionadded:: 2.7
+
+        `extensions`
+            List of Jinja extensions to use.  This can either be import paths
+            as strings or extension classes.  For more information have a
+            look at :ref:`the extensions documentation `.
+
+        `optimized`
+            should the optimizer be enabled?  Default is ``True``.
+
+        `undefined`
+            :class:`Undefined` or a subclass of it that is used to represent
+            undefined values in the template.
+
+        `finalize`
+            A callable that can be used to process the result of a variable
+            expression before it is output.  For example one can convert
+            ``None`` implicitly into an empty string here.
+
+        `autoescape`
+            If set to ``True`` the XML/HTML autoescaping feature is enabled by
+            default.  For more details about autoescaping see
+            :class:`~jinja2.utils.Markup`.  As of Jinja 2.4 this can also
+            be a callable that is passed the template name and has to
+            return ``True`` or ``False`` depending on autoescape should be
+            enabled by default.
+
+            .. versionchanged:: 2.4
+               `autoescape` can now be a function
+
+        `loader`
+            The template loader for this environment.
+
+        `cache_size`
+            The size of the cache.  Per default this is ``400`` which means
+            that if more than 400 templates are loaded the loader will clean
+            out the least recently used template.  If the cache size is set to
+            ``0`` templates are recompiled all the time, if the cache size is
+            ``-1`` the cache will not be cleaned.
+
+            .. versionchanged:: 2.8
+               The cache size was increased to 400 from a low 50.
+
+        `auto_reload`
+            Some loaders load templates from locations where the template
+            sources may change (ie: file system or database).  If
+            ``auto_reload`` is set to ``True`` (default) every time a template is
+            requested the loader checks if the source changed and if yes, it
+            will reload the template.  For higher performance it's possible to
+            disable that.
+
+        `bytecode_cache`
+            If set to a bytecode cache object, this object will provide a
+            cache for the internal Jinja bytecode so that templates don't
+            have to be parsed if they were not changed.
+
+            See :ref:`bytecode-cache` for more information.
+
+        `enable_async`
+            If set to true this enables async template execution which allows
+            you to take advantage of newer Python features.  This requires
+            Python 3.6 or later.
+    """
+
+    #: if this environment is sandboxed.  Modifying this variable won't make
+    #: the environment sandboxed though.  For a real sandboxed environment
+    #: have a look at jinja2.sandbox.  This flag alone controls the code
+    #: generation by the compiler.
+    sandboxed = False
+
+    #: True if the environment is just an overlay
+    overlayed = False
+
+    #: the environment this environment is linked to if it is an overlay
+    linked_to = None
+
+    #: shared environments have this set to `True`.  A shared environment
+    #: must not be modified
+    shared = False
+
+    #: these are currently EXPERIMENTAL undocumented features.
+    exception_handler = None
+    exception_formatter = None
+
+    #: the class that is used for code generation.  See
+    #: :class:`~jinja2.compiler.CodeGenerator` for more information.
+    code_generator_class = CodeGenerator
+
+    #: the context class thatis used for templates.  See
+    #: :class:`~jinja2.runtime.Context` for more information.
+    context_class = Context
+
+    def __init__(self,
+                 block_start_string=BLOCK_START_STRING,
+                 block_end_string=BLOCK_END_STRING,
+                 variable_start_string=VARIABLE_START_STRING,
+                 variable_end_string=VARIABLE_END_STRING,
+                 comment_start_string=COMMENT_START_STRING,
+                 comment_end_string=COMMENT_END_STRING,
+                 line_statement_prefix=LINE_STATEMENT_PREFIX,
+                 line_comment_prefix=LINE_COMMENT_PREFIX,
+                 trim_blocks=TRIM_BLOCKS,
+                 lstrip_blocks=LSTRIP_BLOCKS,
+                 newline_sequence=NEWLINE_SEQUENCE,
+                 keep_trailing_newline=KEEP_TRAILING_NEWLINE,
+                 extensions=(),
+                 optimized=True,
+                 undefined=Undefined,
+                 finalize=None,
+                 autoescape=False,
+                 loader=None,
+                 cache_size=400,
+                 auto_reload=True,
+                 bytecode_cache=None,
+                 enable_async=False):
+        # !!Important notice!!
+        #   The constructor accepts quite a few arguments that should be
+        #   passed by keyword rather than position.  However it's important to
+        #   not change the order of arguments because it's used at least
+        #   internally in those cases:
+        #       -   spontaneous environments (i18n extension and Template)
+        #       -   unittests
+        #   If parameter changes are required only add parameters at the end
+        #   and don't change the arguments (or the defaults!) of the arguments
+        #   existing already.
+
+        # lexer / parser information
+        self.block_start_string = block_start_string
+        self.block_end_string = block_end_string
+        self.variable_start_string = variable_start_string
+        self.variable_end_string = variable_end_string
+        self.comment_start_string = comment_start_string
+        self.comment_end_string = comment_end_string
+        self.line_statement_prefix = line_statement_prefix
+        self.line_comment_prefix = line_comment_prefix
+        self.trim_blocks = trim_blocks
+        self.lstrip_blocks = lstrip_blocks
+        self.newline_sequence = newline_sequence
+        self.keep_trailing_newline = keep_trailing_newline
+
+        # runtime information
+        self.undefined = undefined
+        self.optimized = optimized
+        self.finalize = finalize
+        self.autoescape = autoescape
+
+        # defaults
+        self.filters = DEFAULT_FILTERS.copy()
+        self.tests = DEFAULT_TESTS.copy()
+        self.globals = DEFAULT_NAMESPACE.copy()
+
+        # set the loader provided
+        self.loader = loader
+        self.cache = create_cache(cache_size)
+        self.bytecode_cache = bytecode_cache
+        self.auto_reload = auto_reload
+
+        # configurable policies
+        self.policies = DEFAULT_POLICIES.copy()
+
+        # load extensions
+        self.extensions = load_extensions(self, extensions)
+
+        self.enable_async = enable_async
+        self.is_async = self.enable_async and have_async_gen
+
+        _environment_sanity_check(self)
+
+    def add_extension(self, extension):
+        """Adds an extension after the environment was created.
+
+        .. versionadded:: 2.5
+        """
+        self.extensions.update(load_extensions(self, [extension]))
+
+    def extend(self, **attributes):
+        """Add the items to the instance of the environment if they do not exist
+        yet.  This is used by :ref:`extensions ` to register
+        callbacks and configuration values without breaking inheritance.
+        """
+        for key, value in iteritems(attributes):
+            if not hasattr(self, key):
+                setattr(self, key, value)
+
+    def overlay(self, block_start_string=missing, block_end_string=missing,
+                variable_start_string=missing, variable_end_string=missing,
+                comment_start_string=missing, comment_end_string=missing,
+                line_statement_prefix=missing, line_comment_prefix=missing,
+                trim_blocks=missing, lstrip_blocks=missing,
+                extensions=missing, optimized=missing,
+                undefined=missing, finalize=missing, autoescape=missing,
+                loader=missing, cache_size=missing, auto_reload=missing,
+                bytecode_cache=missing):
+        """Create a new overlay environment that shares all the data with the
+        current environment except for cache and the overridden attributes.
+        Extensions cannot be removed for an overlayed environment.  An overlayed
+        environment automatically gets all the extensions of the environment it
+        is linked to plus optional extra extensions.
+
+        Creating overlays should happen after the initial environment was set
+        up completely.  Not all attributes are truly linked, some are just
+        copied over so modifications on the original environment may not shine
+        through.
+        """
+        args = dict(locals())
+        del args['self'], args['cache_size'], args['extensions']
+
+        rv = object.__new__(self.__class__)
+        rv.__dict__.update(self.__dict__)
+        rv.overlayed = True
+        rv.linked_to = self
+
+        for key, value in iteritems(args):
+            if value is not missing:
+                setattr(rv, key, value)
+
+        if cache_size is not missing:
+            rv.cache = create_cache(cache_size)
+        else:
+            rv.cache = copy_cache(self.cache)
+
+        rv.extensions = {}
+        for key, value in iteritems(self.extensions):
+            rv.extensions[key] = value.bind(rv)
+        if extensions is not missing:
+            rv.extensions.update(load_extensions(rv, extensions))
+
+        return _environment_sanity_check(rv)
+
+    lexer = property(get_lexer, doc="The lexer for this environment.")
+
+    def iter_extensions(self):
+        """Iterates over the extensions by priority."""
+        return iter(sorted(self.extensions.values(),
+                           key=lambda x: x.priority))
+
+    def getitem(self, obj, argument):
+        """Get an item or attribute of an object but prefer the item."""
+        try:
+            return obj[argument]
+        except (AttributeError, TypeError, LookupError):
+            if isinstance(argument, string_types):
+                try:
+                    attr = str(argument)
+                except Exception:
+                    pass
+                else:
+                    try:
+                        return getattr(obj, attr)
+                    except AttributeError:
+                        pass
+            return self.undefined(obj=obj, name=argument)
+
+    def getattr(self, obj, attribute):
+        """Get an item or attribute of an object but prefer the attribute.
+        Unlike :meth:`getitem` the attribute *must* be a bytestring.
+        """
+        try:
+            return getattr(obj, attribute)
+        except AttributeError:
+            pass
+        try:
+            return obj[attribute]
+        except (TypeError, LookupError, AttributeError):
+            return self.undefined(obj=obj, name=attribute)
+
+    def call_filter(self, name, value, args=None, kwargs=None,
+                    context=None, eval_ctx=None):
+        """Invokes a filter on a value the same way the compiler does it.
+
+        Note that on Python 3 this might return a coroutine in case the
+        filter is running from an environment in async mode and the filter
+        supports async execution.  It's your responsibility to await this
+        if needed.
+
+        .. versionadded:: 2.7
+        """
+        func = self.filters.get(name)
+        if func is None:
+            fail_for_missing_callable('no filter named %r', name)
+        args = [value] + list(args or ())
+        if getattr(func, 'contextfilter', False):
+            if context is None:
+                raise TemplateRuntimeError('Attempted to invoke context '
+                                           'filter without context')
+            args.insert(0, context)
+        elif getattr(func, 'evalcontextfilter', False):
+            if eval_ctx is None:
+                if context is not None:
+                    eval_ctx = context.eval_ctx
+                else:
+                    eval_ctx = EvalContext(self)
+            args.insert(0, eval_ctx)
+        elif getattr(func, 'environmentfilter', False):
+            args.insert(0, self)
+        return func(*args, **(kwargs or {}))
+
+    def call_test(self, name, value, args=None, kwargs=None):
+        """Invokes a test on a value the same way the compiler does it.
+
+        .. versionadded:: 2.7
+        """
+        func = self.tests.get(name)
+        if func is None:
+            fail_for_missing_callable('no test named %r', name)
+        return func(value, *(args or ()), **(kwargs or {}))
+
+    @internalcode
+    def parse(self, source, name=None, filename=None):
+        """Parse the sourcecode and return the abstract syntax tree.  This
+        tree of nodes is used by the compiler to convert the template into
+        executable source- or bytecode.  This is useful for debugging or to
+        extract information from templates.
+
+        If you are :ref:`developing Jinja2 extensions `
+        this gives you a good overview of the node tree generated.
+        """
+        try:
+            return self._parse(source, name, filename)
+        except TemplateSyntaxError:
+            exc_info = sys.exc_info()
+        self.handle_exception(exc_info, source_hint=source)
+
+    def _parse(self, source, name, filename):
+        """Internal parsing function used by `parse` and `compile`."""
+        return Parser(self, source, name, encode_filename(filename)).parse()
+
+    def lex(self, source, name=None, filename=None):
+        """Lex the given sourcecode and return a generator that yields
+        tokens as tuples in the form ``(lineno, token_type, value)``.
+        This can be useful for :ref:`extension development `
+        and debugging templates.
+
+        This does not perform preprocessing.  If you want the preprocessing
+        of the extensions to be applied you have to filter source through
+        the :meth:`preprocess` method.
+        """
+        source = text_type(source)
+        try:
+            return self.lexer.tokeniter(source, name, filename)
+        except TemplateSyntaxError:
+            exc_info = sys.exc_info()
+        self.handle_exception(exc_info, source_hint=source)
+
+    def preprocess(self, source, name=None, filename=None):
+        """Preprocesses the source with all extensions.  This is automatically
+        called for all parsing and compiling methods but *not* for :meth:`lex`
+        because there you usually only want the actual source tokenized.
+        """
+        return reduce(lambda s, e: e.preprocess(s, name, filename),
+                      self.iter_extensions(), text_type(source))
+
+    def _tokenize(self, source, name, filename=None, state=None):
+        """Called by the parser to do the preprocessing and filtering
+        for all the extensions.  Returns a :class:`~jinja2.lexer.TokenStream`.
+        """
+        source = self.preprocess(source, name, filename)
+        stream = self.lexer.tokenize(source, name, filename, state)
+        for ext in self.iter_extensions():
+            stream = ext.filter_stream(stream)
+            if not isinstance(stream, TokenStream):
+                stream = TokenStream(stream, name, filename)
+        return stream
+
+    def _generate(self, source, name, filename, defer_init=False):
+        """Internal hook that can be overridden to hook a different generate
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return generate(source, self, name, filename, defer_init=defer_init,
+                        optimized=self.optimized)
+
+    def _compile(self, source, filename):
+        """Internal hook that can be overridden to hook a different compile
+        method in.
+
+        .. versionadded:: 2.5
+        """
+        return compile(source, filename, 'exec')
+
+    @internalcode
+    def compile(self, source, name=None, filename=None, raw=False,
+                defer_init=False):
+        """Compile a node or template source code.  The `name` parameter is
+        the load name of the template after it was joined using
+        :meth:`join_path` if necessary, not the filename on the file system.
+        the `filename` parameter is the estimated filename of the template on
+        the file system.  If the template came from a database or memory this
+        can be omitted.
+
+        The return value of this method is a python code object.  If the `raw`
+        parameter is `True` the return value will be a string with python
+        code equivalent to the bytecode returned otherwise.  This method is
+        mainly used internally.
+
+        `defer_init` is use internally to aid the module code generator.  This
+        causes the generated code to be able to import without the global
+        environment variable to be set.
+
+        .. versionadded:: 2.4
+           `defer_init` parameter added.
+        """
+        source_hint = None
+        try:
+            if isinstance(source, string_types):
+                source_hint = source
+                source = self._parse(source, name, filename)
+            source = self._generate(source, name, filename,
+                                    defer_init=defer_init)
+            if raw:
+                return source
+            if filename is None:
+                filename = '