diff --git a/news/6118.vendor.rst b/news/6118.vendor.rst new file mode 100644 index 0000000000..1078d34286 --- /dev/null +++ b/news/6118.vendor.rst @@ -0,0 +1,8 @@ +Spring 2024 Vendoring includes: +* ``click-didyoumean==0.3.1`` +* ``expect==4.9.0`` +* ``pipdeptree==2.16.2`` +* ``python-dotenv==1.0.1`` +* ``ruamel.yaml==0.18.6`` +* ``shellingham==1.5.4`` +* ``tomlkit==0.12.4`` diff --git a/pipenv/vendor/dotenv/main.py b/pipenv/vendor/dotenv/main.py index f40c20ea20..7bc5428572 100644 --- a/pipenv/vendor/dotenv/main.py +++ b/pipenv/vendor/dotenv/main.py @@ -1,6 +1,7 @@ import io import logging import os +import pathlib import shutil import sys import tempfile @@ -131,17 +132,21 @@ def rewrite( path: StrPath, encoding: Optional[str], ) -> Iterator[Tuple[IO[str], IO[str]]]: - if not os.path.isfile(path): - with open(path, mode="w", encoding=encoding) as source: - source.write("") + pathlib.Path(path).touch() + with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: + error = None try: with open(path, encoding=encoding) as source: yield (source, dest) - except BaseException: - os.unlink(dest.name) - raise - shutil.move(dest.name, path) + except BaseException as err: + error = err + + if error is None: + shutil.move(dest.name, path) + else: + os.unlink(dest.name) + raise error from None def set_key( @@ -280,7 +285,10 @@ def find_dotenv( def _is_interactive(): """ Decide whether this is running in a REPL or IPython notebook """ - main = __import__('__main__', None, None, fromlist=['__file__']) + try: + main = __import__('__main__', None, None, fromlist=['__file__']) + except ModuleNotFoundError: + return False return not hasattr(main, '__file__') if usecwd or _is_interactive() or getattr(sys, 'frozen', False): @@ -291,7 +299,9 @@ def _is_interactive(): frame = sys._getframe() current_file = __file__ - while frame.f_code.co_filename == current_file: + while frame.f_code.co_filename == current_file or not os.path.exists( + frame.f_code.co_filename + ): assert frame.f_back is not None frame = frame.f_back frame_filename = frame.f_code.co_filename diff --git a/pipenv/vendor/dotenv/version.py b/pipenv/vendor/dotenv/version.py index 5becc17c04..5c4105cd39 100644 --- a/pipenv/vendor/dotenv/version.py +++ b/pipenv/vendor/dotenv/version.py @@ -1 +1 @@ -__version__ = "1.0.0" +__version__ = "1.0.1" diff --git a/pipenv/vendor/pexpect/__init__.py b/pipenv/vendor/pexpect/__init__.py index 7e30453787..86254ee720 100644 --- a/pipenv/vendor/pexpect/__init__.py +++ b/pipenv/vendor/pexpect/__init__.py @@ -1,6 +1,6 @@ '''Pexpect is a Python module for spawning child applications and controlling them automatically. Pexpect can be used for automating interactive applications -such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup +such as ssh, ftp, passwd, telnet, etc. It can be used to automate setup scripts for duplicating software package installations on different servers. It can be used for automated software testing. Pexpect is in the spirit of Don Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python @@ -29,6 +29,12 @@ child.expect('Password:') child.sendline(mypassword) +Context manager can be used for the spawn() function:: + + with pexpect.spawn('scp foo user@example.com:.') as child: + child.expect('Password:') + child.sendline(mypassword) + This works even for commands that ask for passwords or other input outside of the normal stdio streams. For example, ssh reads input directly from the TTY device which bypasses stdin. @@ -75,7 +81,7 @@ from .pty_spawn import spawn, spawnu from .run import run, runu -__version__ = '4.8.0' +__version__ = '4.9.0' __revision__ = '' __all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'spawnu', 'run', 'runu', 'which', 'split_command_line', '__version__', '__revision__'] diff --git a/pipenv/vendor/pexpect/_async.py b/pipenv/vendor/pexpect/_async.py index 91a87d0e24..417159ddd4 100644 --- a/pipenv/vendor/pexpect/_async.py +++ b/pipenv/vendor/pexpect/_async.py @@ -1,103 +1,28 @@ -import asyncio -import errno -import signal - -from pipenv.vendor.pexpect import EOF - -@asyncio.coroutine -def expect_async(expecter, timeout=None): - # First process data that was previously read - if it maches, we don't need - # async stuff. - idx = expecter.existing_data() - if idx is not None: - return idx - if not expecter.spawn.async_pw_transport: - pw = PatternWaiter() - pw.set_expecter(expecter) - transport, pw = yield from asyncio.get_event_loop()\ - .connect_read_pipe(lambda: pw, expecter.spawn) - expecter.spawn.async_pw_transport = pw, transport - else: - pw, transport = expecter.spawn.async_pw_transport - pw.set_expecter(expecter) - transport.resume_reading() - try: - return (yield from asyncio.wait_for(pw.fut, timeout)) - except asyncio.TimeoutError as e: - transport.pause_reading() - return expecter.timeout(e) - -@asyncio.coroutine -def repl_run_command_async(repl, cmdlines, timeout=-1): - res = [] - repl.child.sendline(cmdlines[0]) - for line in cmdlines[1:]: - yield from repl._expect_prompt(timeout=timeout, async_=True) - res.append(repl.child.before) - repl.child.sendline(line) - - # Command was fully submitted, now wait for the next prompt - prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True) - if prompt_idx == 1: - # We got the continuation prompt - command was incomplete - repl.child.kill(signal.SIGINT) - yield from repl._expect_prompt(timeout=1, async_=True) - raise ValueError("Continuation prompt found - input was incomplete:") - return u''.join(res + [repl.child.before]) - -class PatternWaiter(asyncio.Protocol): - transport = None - - def set_expecter(self, expecter): - self.expecter = expecter - self.fut = asyncio.Future() - - def found(self, result): - if not self.fut.done(): - self.fut.set_result(result) - self.transport.pause_reading() - - def error(self, exc): - if not self.fut.done(): - self.fut.set_exception(exc) - self.transport.pause_reading() - - def connection_made(self, transport): - self.transport = transport - - def data_received(self, data): - spawn = self.expecter.spawn - s = spawn._decoder.decode(data) - spawn._log(s, 'read') - - if self.fut.done(): - spawn._before.write(s) - spawn._buffer.write(s) - return - - try: - index = self.expecter.new_data(s) - if index is not None: - # Found a match - self.found(index) - except Exception as e: - self.expecter.errored() - self.error(e) - - def eof_received(self): - # N.B. If this gets called, async will close the pipe (the spawn object) - # for us - try: - self.expecter.spawn.flag_eof = True - index = self.expecter.eof() - except EOF as e: - self.error(e) - else: - self.found(index) - - def connection_lost(self, exc): - if isinstance(exc, OSError) and exc.errno == errno.EIO: - # We may get here without eof_received being called, e.g on Linux - self.eof_received() - elif exc is not None: - self.error(exc) +"""Facade that provides coroutines implementation pertinent to running Py version. + +Python 3.5 introduced the async def/await syntax keyword. +With later versions coroutines and methods to get the running asyncio loop are +being deprecated, not supported anymore. + +For Python versions later than 3.6, coroutines and objects that are defined via +``async def``/``await`` keywords are imported. + +Here the code is just imported, to provide the same interface to older code. +""" +# pylint: disable=unused-import +# flake8: noqa: F401 +from sys import version_info as py_version_info + +# this assumes async def/await are more stable +if py_version_info >= (3, 6): + from pipenv.vendor.pexpect._async_w_await import ( + PatternWaiter, + expect_async, + repl_run_command_async, + ) +else: + from pipenv.vendor.pexpect._async_pre_await import ( + PatternWaiter, + expect_async, + repl_run_command_async, + ) diff --git a/pipenv/vendor/pexpect/_async_pre_await.py b/pipenv/vendor/pexpect/_async_pre_await.py new file mode 100644 index 0000000000..5a3e926555 --- /dev/null +++ b/pipenv/vendor/pexpect/_async_pre_await.py @@ -0,0 +1,111 @@ +"""Implementation of coroutines without using ``async def``/``await`` keywords. + +``@asyncio.coroutine`` and ``yield from`` are used here instead. +""" +import asyncio +import errno +import signal + +from pipenv.vendor.pexpect import EOF + + +@asyncio.coroutine +def expect_async(expecter, timeout=None): + # First process data that was previously read - if it maches, we don't need + # async stuff. + idx = expecter.existing_data() + if idx is not None: + return idx + if not expecter.spawn.async_pw_transport: + pw = PatternWaiter() + pw.set_expecter(expecter) + transport, pw = yield from asyncio.get_event_loop().connect_read_pipe( + lambda: pw, expecter.spawn + ) + expecter.spawn.async_pw_transport = pw, transport + else: + pw, transport = expecter.spawn.async_pw_transport + pw.set_expecter(expecter) + transport.resume_reading() + try: + return (yield from asyncio.wait_for(pw.fut, timeout)) + except asyncio.TimeoutError as e: + transport.pause_reading() + return expecter.timeout(e) + + +@asyncio.coroutine +def repl_run_command_async(repl, cmdlines, timeout=-1): + res = [] + repl.child.sendline(cmdlines[0]) + for line in cmdlines[1:]: + yield from repl._expect_prompt(timeout=timeout, async_=True) + res.append(repl.child.before) + repl.child.sendline(line) + + # Command was fully submitted, now wait for the next prompt + prompt_idx = yield from repl._expect_prompt(timeout=timeout, async_=True) + if prompt_idx == 1: + # We got the continuation prompt - command was incomplete + repl.child.kill(signal.SIGINT) + yield from repl._expect_prompt(timeout=1, async_=True) + raise ValueError("Continuation prompt found - input was incomplete:") + return "".join(res + [repl.child.before]) + + +class PatternWaiter(asyncio.Protocol): + transport = None + + def set_expecter(self, expecter): + self.expecter = expecter + self.fut = asyncio.Future() + + def found(self, result): + if not self.fut.done(): + self.fut.set_result(result) + self.transport.pause_reading() + + def error(self, exc): + if not self.fut.done(): + self.fut.set_exception(exc) + self.transport.pause_reading() + + def connection_made(self, transport): + self.transport = transport + + def data_received(self, data): + spawn = self.expecter.spawn + s = spawn._decoder.decode(data) + spawn._log(s, "read") + + if self.fut.done(): + spawn._before.write(s) + spawn._buffer.write(s) + return + + try: + index = self.expecter.new_data(s) + if index is not None: + # Found a match + self.found(index) + except Exception as e: + self.expecter.errored() + self.error(e) + + def eof_received(self): + # N.B. If this gets called, async will close the pipe (the spawn object) + # for us + try: + self.expecter.spawn.flag_eof = True + index = self.expecter.eof() + except EOF as e: + self.error(e) + else: + self.found(index) + + def connection_lost(self, exc): + if isinstance(exc, OSError) and exc.errno == errno.EIO: + # We may get here without eof_received being called, e.g on Linux + self.eof_received() + elif exc is not None: + self.error(exc) diff --git a/pipenv/vendor/pexpect/_async_w_await.py b/pipenv/vendor/pexpect/_async_w_await.py new file mode 100644 index 0000000000..0adbe70f55 --- /dev/null +++ b/pipenv/vendor/pexpect/_async_w_await.py @@ -0,0 +1,118 @@ +"""Implementation of coroutines using ``async def``/``await`` keywords. + +These keywords replaced ``@asyncio.coroutine`` and ``yield from`` from +Python 3.5 onwards. +""" +import asyncio +import errno +import signal +from sys import version_info as py_version_info + +from pipenv.vendor.pexpect import EOF + +if py_version_info >= (3, 7): + # get_running_loop, new in 3.7, is preferred to get_event_loop + _loop_getter = asyncio.get_running_loop +else: + # Deprecation warning since 3.10 + _loop_getter = asyncio.get_event_loop + + +async def expect_async(expecter, timeout=None): + # First process data that was previously read - if it maches, we don't need + # async stuff. + idx = expecter.existing_data() + if idx is not None: + return idx + if not expecter.spawn.async_pw_transport: + pattern_waiter = PatternWaiter() + pattern_waiter.set_expecter(expecter) + transport, pattern_waiter = await _loop_getter().connect_read_pipe( + lambda: pattern_waiter, expecter.spawn + ) + expecter.spawn.async_pw_transport = pattern_waiter, transport + else: + pattern_waiter, transport = expecter.spawn.async_pw_transport + pattern_waiter.set_expecter(expecter) + transport.resume_reading() + try: + return await asyncio.wait_for(pattern_waiter.fut, timeout) + except asyncio.TimeoutError as exc: + transport.pause_reading() + return expecter.timeout(exc) + + +async def repl_run_command_async(repl, cmdlines, timeout=-1): + res = [] + repl.child.sendline(cmdlines[0]) + for line in cmdlines[1:]: + await repl._expect_prompt(timeout=timeout, async_=True) + res.append(repl.child.before) + repl.child.sendline(line) + + # Command was fully submitted, now wait for the next prompt + prompt_idx = await repl._expect_prompt(timeout=timeout, async_=True) + if prompt_idx == 1: + # We got the continuation prompt - command was incomplete + repl.child.kill(signal.SIGINT) + await repl._expect_prompt(timeout=1, async_=True) + raise ValueError("Continuation prompt found - input was incomplete:") + return "".join(res + [repl.child.before]) + + +class PatternWaiter(asyncio.Protocol): + transport = None + + def set_expecter(self, expecter): + self.expecter = expecter + self.fut = asyncio.Future() + + def found(self, result): + if not self.fut.done(): + self.fut.set_result(result) + self.transport.pause_reading() + + def error(self, exc): + if not self.fut.done(): + self.fut.set_exception(exc) + self.transport.pause_reading() + + def connection_made(self, transport): + self.transport = transport + + def data_received(self, data): + spawn = self.expecter.spawn + s = spawn._decoder.decode(data) + spawn._log(s, "read") + + if self.fut.done(): + spawn._before.write(s) + spawn._buffer.write(s) + return + + try: + index = self.expecter.new_data(s) + if index is not None: + # Found a match + self.found(index) + except Exception as exc: + self.expecter.errored() + self.error(exc) + + def eof_received(self): + # N.B. If this gets called, async will close the pipe (the spawn object) + # for us + try: + self.expecter.spawn.flag_eof = True + index = self.expecter.eof() + except EOF as exc: + self.error(exc) + else: + self.found(index) + + def connection_lost(self, exc): + if isinstance(exc, OSError) and exc.errno == errno.EIO: + # We may get here without eof_received being called, e.g on Linux + self.eof_received() + elif exc is not None: + self.error(exc) diff --git a/pipenv/vendor/pexpect/bashrc.sh b/pipenv/vendor/pexpect/bashrc.sh index c734ac90b8..d75d1a5b62 100644 --- a/pipenv/vendor/pexpect/bashrc.sh +++ b/pipenv/vendor/pexpect/bashrc.sh @@ -14,3 +14,5 @@ PS1="$" # Unset PROMPT_COMMAND, so that it can't change PS1 to something unexpected. unset PROMPT_COMMAND + +bind 'set enable-bracketed-paste off' diff --git a/pipenv/vendor/pexpect/fdpexpect.py b/pipenv/vendor/pexpect/fdpexpect.py index cddd50e100..140bdfeeda 100644 --- a/pipenv/vendor/pexpect/fdpexpect.py +++ b/pipenv/vendor/pexpect/fdpexpect.py @@ -1,7 +1,11 @@ -'''This is like pexpect, but it will work with any file descriptor that you +'''This is like :mod:`pexpect`, but it will work with any file descriptor that you pass it. You are responsible for opening and close the file descriptor. This allows you to use Pexpect with sockets and named pipes (FIFOs). +.. note:: + socket.fileno() does not give a readable file descriptor on windows. + Use :mod:`pexpect.socket_pexpect` for cross-platform socket support + PEXPECT LICENSE This license is approved by the OSI and FSF as GPL-compatible. diff --git a/pipenv/vendor/pexpect/popen_spawn.py b/pipenv/vendor/pexpect/popen_spawn.py index 4bb58cfe76..e6bdf07d61 100644 --- a/pipenv/vendor/pexpect/popen_spawn.py +++ b/pipenv/vendor/pexpect/popen_spawn.py @@ -57,7 +57,7 @@ def __init__(self, cmd, timeout=30, maxread=2000, searchwindowsize=None, self._read_queue = Queue() self._read_thread = threading.Thread(target=self._read_incoming) - self._read_thread.setDaemon(True) + self._read_thread.daemon = True self._read_thread.start() _read_reached_eof = False diff --git a/pipenv/vendor/pexpect/pxssh.py b/pipenv/vendor/pexpect/pxssh.py index de2e5d977c..6654475016 100644 --- a/pipenv/vendor/pexpect/pxssh.py +++ b/pipenv/vendor/pexpect/pxssh.py @@ -143,8 +143,8 @@ def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, # used to set shell command-line prompt to UNIQUE_PROMPT. self.PROMPT_SET_SH = r"PS1='[PEXPECT]\$ '" self.PROMPT_SET_CSH = r"set prompt='[PEXPECT]\$ '" - self.SSH_OPTS = ("-o'RSAAuthentication=no'" - + " -o 'PubkeyAuthentication=no'") + self.PROMPT_SET_ZSH = "prompt restore;\nPS1='[PEXPECT]%(!.#.$) '" + self.SSH_OPTS = (" -o 'PubkeyAuthentication=no'") # Disabling host key checking, makes you vulnerable to MITM attacks. # + " -o 'StrictHostKeyChecking=no'" # + " -o 'UserKnownHostsFile /dev/null' ") @@ -152,7 +152,7 @@ def __init__ (self, timeout=30, maxread=2000, searchwindowsize=None, # displaying a GUI password dialog. I have not figured out how to # disable only SSH_ASKPASS without also disabling X11 forwarding. # Unsetting SSH_ASKPASS on the remote side doesn't disable it! Annoying! - #self.SSH_OPTS = "-x -o'RSAAuthentication=no' -o 'PubkeyAuthentication=no'" + #self.SSH_OPTS = "-x -o 'PubkeyAuthentication=no'" self.force_password = False self.debug_command_string = debug_command_string @@ -530,8 +530,11 @@ def set_unique_prompt(self): if i == 0: # csh-style self.sendline(self.PROMPT_SET_CSH) i = self.expect([TIMEOUT, self.PROMPT], timeout=10) - if i == 0: - return False + if i == 0: # zsh-style + self.sendline(self.PROMPT_SET_ZSH) + i = self.expect([TIMEOUT, self.PROMPT], timeout=10) + if i == 0: + return False return True # vi:ts=4:sw=4:expandtab:ft=python: diff --git a/pipenv/vendor/pexpect/replwrap.py b/pipenv/vendor/pexpect/replwrap.py index d705f508ef..d49fd7214e 100644 --- a/pipenv/vendor/pexpect/replwrap.py +++ b/pipenv/vendor/pexpect/replwrap.py @@ -108,23 +108,29 @@ def run_command(self, command, timeout=-1, async_=False): + command) return u''.join(res + [self.child.before]) -def python(command="python"): +def python(command=sys.executable): """Start a Python shell and return a :class:`REPLWrapper` object.""" return REPLWrapper(command, u">>> ", u"import sys; sys.ps1={0!r}; sys.ps2={1!r}") -def bash(command="bash"): - """Start a bash shell and return a :class:`REPLWrapper` object.""" - bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh') - child = pexpect.spawn(command, ['--rcfile', bashrc], echo=False, - encoding='utf-8') +def _repl_sh(command, args, non_printable_insert): + child = pexpect.spawn(command, args, echo=False, encoding='utf-8') # If the user runs 'env', the value of PS1 will be in the output. To avoid # replwrap seeing that as the next prompt, we'll embed the marker characters # for invisible characters in the prompt; these show up when inspecting the # environment variable, but not when bash displays the prompt. - ps1 = PEXPECT_PROMPT[:5] + u'\\[\\]' + PEXPECT_PROMPT[5:] - ps2 = PEXPECT_CONTINUATION_PROMPT[:5] + u'\\[\\]' + PEXPECT_CONTINUATION_PROMPT[5:] + ps1 = PEXPECT_PROMPT[:5] + non_printable_insert + PEXPECT_PROMPT[5:] + ps2 = PEXPECT_CONTINUATION_PROMPT[:5] + non_printable_insert + PEXPECT_CONTINUATION_PROMPT[5:] prompt_change = u"PS1='{0}' PS2='{1}' PROMPT_COMMAND=''".format(ps1, ps2) return REPLWrapper(child, u'\\$', prompt_change, extra_init_cmd="export PAGER=cat") + +def bash(command="bash"): + """Start a bash shell and return a :class:`REPLWrapper` object.""" + bashrc = os.path.join(os.path.dirname(__file__), 'bashrc.sh') + return _repl_sh(command, ['--rcfile', bashrc], non_printable_insert='\\[\\]') + +def zsh(command="zsh", args=("--no-rcs", "-V", "+Z")): + """Start a zsh shell and return a :class:`REPLWrapper` object.""" + return _repl_sh(command, list(args), non_printable_insert='%(!..)') diff --git a/pipenv/vendor/pexpect/run.py b/pipenv/vendor/pexpect/run.py index e179cdc257..5be8c9d716 100644 --- a/pipenv/vendor/pexpect/run.py +++ b/pipenv/vendor/pexpect/run.py @@ -66,7 +66,7 @@ def print_ticks(d): The 'events' argument should be either a dictionary or a tuple list that contains patterns and responses. Whenever one of the patterns is seen in the command output, run() will send the associated response string. - So, run() in the above example can be also written as: + So, run() in the above example can be also written as:: run("mencoder dvd://1 -o video.avi -oac copy -ovc copy", events=[(TIMEOUT,print_ticks)], timeout=5) diff --git a/pipenv/vendor/pexpect/socket_pexpect.py b/pipenv/vendor/pexpect/socket_pexpect.py new file mode 100644 index 0000000000..cb11ac2258 --- /dev/null +++ b/pipenv/vendor/pexpect/socket_pexpect.py @@ -0,0 +1,145 @@ +"""This is like :mod:`pexpect`, but it will work with any socket that you +pass it. You are responsible for opening and closing the socket. + +PEXPECT LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2012, Noah Spurrier + PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY + PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE + COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" + +import socket +from contextlib import contextmanager + +from .exceptions import TIMEOUT, EOF +from .spawnbase import SpawnBase + +__all__ = ["SocketSpawn"] + + +class SocketSpawn(SpawnBase): + """This is like :mod:`pexpect.fdpexpect` but uses the cross-platform python socket api, + rather than the unix-specific file descriptor api. Thus, it works with + remote connections on both unix and windows.""" + + def __init__( + self, + socket: socket.socket, + args=None, + timeout=30, + maxread=2000, + searchwindowsize=None, + logfile=None, + encoding=None, + codec_errors="strict", + use_poll=False, + ): + """This takes an open socket.""" + + self.args = None + self.command = None + SpawnBase.__init__( + self, + timeout, + maxread, + searchwindowsize, + logfile, + encoding=encoding, + codec_errors=codec_errors, + ) + self.socket = socket + self.child_fd = socket.fileno() + self.closed = False + self.name = "" % socket + self.use_poll = use_poll + + def close(self): + """Close the socket. + + Calling this method a second time does nothing, but if the file + descriptor was closed elsewhere, :class:`OSError` will be raised. + """ + if self.child_fd == -1: + return + + self.flush() + self.socket.shutdown(socket.SHUT_RDWR) + self.socket.close() + self.child_fd = -1 + self.closed = True + + def isalive(self): + """ Alive if the fileno is valid """ + return self.socket.fileno() >= 0 + + def send(self, s) -> int: + """Write to socket, return number of bytes written""" + s = self._coerce_send_string(s) + self._log(s, "send") + + b = self._encoder.encode(s, final=False) + self.socket.sendall(b) + return len(b) + + def sendline(self, s) -> int: + """Write to socket with trailing newline, return number of bytes written""" + s = self._coerce_send_string(s) + return self.send(s + self.linesep) + + def write(self, s): + """Write to socket, return None""" + self.send(s) + + def writelines(self, sequence): + "Call self.write() for each item in sequence" + for s in sequence: + self.write(s) + + @contextmanager + def _timeout(self, timeout): + saved_timeout = self.socket.gettimeout() + try: + self.socket.settimeout(timeout) + yield + finally: + self.socket.settimeout(saved_timeout) + + def read_nonblocking(self, size=1, timeout=-1): + """ + Read from the file descriptor and return the result as a string. + + The read_nonblocking method of :class:`SpawnBase` assumes that a call + to os.read will not block (timeout parameter is ignored). This is not + the case for POSIX file-like objects such as sockets and serial ports. + + Use :func:`select.select`, timeout is implemented conditionally for + POSIX systems. + + :param int size: Read at most *size* bytes. + :param int timeout: Wait timeout seconds for file descriptor to be + ready to read. When -1 (default), use self.timeout. When 0, poll. + :return: String containing the bytes read + """ + if timeout == -1: + timeout = self.timeout + try: + with self._timeout(timeout): + s = self.socket.recv(size) + if s == b'': + self.flag_eof = True + raise EOF("Socket closed") + return s + except socket.timeout: + raise TIMEOUT("Timeout exceeded.") diff --git a/pipenv/vendor/pexpect/spawnbase.py b/pipenv/vendor/pexpect/spawnbase.py index 59e905764c..abf8071ec1 100644 --- a/pipenv/vendor/pexpect/spawnbase.py +++ b/pipenv/vendor/pexpect/spawnbase.py @@ -141,6 +141,16 @@ def _coerce_expect_string(self, s): return s.encode('ascii') return s + # In bytes mode, regex patterns should also be of bytes type + def _coerce_expect_re(self, r): + p = r.pattern + if self.encoding is None and not isinstance(p, bytes): + return re.compile(p.encode('utf-8')) + # And vice-versa + elif self.encoding is not None and isinstance(p, bytes): + return re.compile(p.decode('utf-8')) + return r + def _coerce_send_string(self, s): if self.encoding is None and not isinstance(s, bytes): return s.encode('utf-8') @@ -153,7 +163,7 @@ def _set_buffer(self, value): self._buffer = self.buffer_type() self._buffer.write(value) - # This property is provided for backwards compatability (self.buffer used + # This property is provided for backwards compatibility (self.buffer used # to be a string/bytes object) buffer = property(_get_buffer, _set_buffer) @@ -235,6 +245,7 @@ def compile_pattern_list(self, patterns): elif p is TIMEOUT: compiled_pattern_list.append(TIMEOUT) elif isinstance(p, type(re.compile(''))): + p = self._coerce_expect_re(p) compiled_pattern_list.append(p) else: self._pattern_type_err(p) diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py index 6669ee9733..1a549c00a1 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py @@ -1,4 +1,5 @@ """The main entry point used for CLI.""" + from __future__ import annotations import os @@ -43,7 +44,7 @@ def main(args: Sequence[str] | None = None) -> None | int: try: tree = tree.filter_nodes(show_only, exclude) except ValueError as e: - if options.warn in ("suppress", "fail"): + if options.warn in {"suppress", "fail"}: print(e, file=sys.stderr) # noqa: T201 return_code |= 1 if options.warn == "fail" else 0 return return_code diff --git a/pipenv/vendor/pipdeptree/_cli.py b/pipenv/vendor/pipdeptree/_cli.py index 9dd1d3754e..2cf43c103d 100644 --- a/pipenv/vendor/pipdeptree/_cli.py +++ b/pipenv/vendor/pipdeptree/_cli.py @@ -13,7 +13,7 @@ class Options(Namespace): freeze: bool python: str - all: bool # noqa: A003 + all: bool local_only: bool user_only: bool warn: Literal["silence", "suppress", "fail"] @@ -26,6 +26,7 @@ class Options(Namespace): output_format: str | None depth: float encoding: str + license: bool class _Formatter(ArgumentDefaultsHelpFormatter): @@ -49,16 +50,6 @@ def build_parser() -> ArgumentParser: "not show warnings at all and always return 0; fail will show warnings and return 1 if any are present" ), ) - parser.add_argument( - "-r", - "--reverse", - action="store_true", - default=False, - help=( - "render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of " - "packages that need them under them" - ), - ) select = parser.add_argument_group(title="select", description="choose what to render") select.add_argument("--python", default=sys.executable, help="Python interpreter to inspect") @@ -106,6 +97,21 @@ def build_parser() -> ArgumentParser: help="limit the depth of the tree (text render only)", metavar="D", ) + render.add_argument( + "-r", + "--reverse", + action="store_true", + default=False, + help=( + "render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of " + "packages that need them under them" + ), + ) + render.add_argument( + "--license", + action="store_true", + help="list the license(s) of a package (text render only)", + ) render_type = render.add_mutually_exclusive_group() render_type.add_argument( @@ -142,11 +148,13 @@ def get_options(args: Sequence[str] | None) -> Options: if parsed_args.exclude and (parsed_args.all or parsed_args.packages): return parser.error("cannot use --exclude with --packages or --all") + if parsed_args.license and parsed_args.freeze: + return parser.error("cannot use --license with --freeze") return cast(Options, parsed_args) __all__ = [ - "get_options", "Options", + "get_options", ] diff --git a/pipenv/vendor/pipdeptree/_discovery.py b/pipenv/vendor/pipdeptree/_discovery.py index 8763371e8d..379578fbc8 100644 --- a/pipenv/vendor/pipdeptree/_discovery.py +++ b/pipenv/vendor/pipdeptree/_discovery.py @@ -11,12 +11,12 @@ def get_installed_distributions( user_only: bool = False, # noqa: FBT001, FBT002 ) -> list[DistInfoDistribution]: try: - from pipenv.patched.pip._internal.metadata import pkg_resources + from pipenv.patched.pip._internal.metadata import pkg_resources # noqa: PLC0415, PLC2701 except ImportError: # For backward compatibility with python ver. 2.7 and pip # version 20.3.4 (the latest pip version that works with python # version 2.7) - from pipenv.patched.pip._internal.utils import misc + from pipenv.patched.pip._internal.utils import misc # noqa: PLC0415, PLC2701 # pragma: no cover return misc.get_installed_distributions( # type: ignore[no-any-return,attr-defined] local_only=local_only, diff --git a/pipenv/vendor/pipdeptree/_models/__init__.py b/pipenv/vendor/pipdeptree/_models/__init__.py index a5128301fc..83afb9fc0a 100644 --- a/pipenv/vendor/pipdeptree/_models/__init__.py +++ b/pipenv/vendor/pipdeptree/_models/__init__.py @@ -4,8 +4,8 @@ from .package import DistPackage, ReqPackage __all__ = [ - "ReqPackage", "DistPackage", "PackageDAG", + "ReqPackage", "ReversedPackageDAG", ] diff --git a/pipenv/vendor/pipdeptree/_models/dag.py b/pipenv/vendor/pipdeptree/_models/dag.py index f3d22539c5..6d09e9670f 100644 --- a/pipenv/vendor/pipdeptree/_models/dag.py +++ b/pipenv/vendor/pipdeptree/_models/dag.py @@ -9,11 +9,12 @@ from pipenv.patched.pip._vendor.pkg_resources import DistInfoDistribution -from .package import DistPackage, ReqPackage +from .package import DistPackage, ReqPackage, pep503_normalize class PackageDAG(Mapping[DistPackage, List[ReqPackage]]): - """Representation of Package dependencies as directed acyclic graph using a dict as the underlying datastructure. + """ + Representation of Package dependencies as directed acyclic graph using a dict as the underlying datastructure. The nodes and their relationships (edges) are internally stored using a map as follows, @@ -42,7 +43,8 @@ def from_pkgs(cls, pkgs: list[DistInfoDistribution]) -> PackageDAG: for p in dist_pkgs: reqs = [] for r in p.requires(): - d = idx.get(r.key) + # Requirement key is not sufficiently normalized in pkg_resources - apply additional normalization + d = idx.get(pep503_normalize(r.key)) # pip's _vendor.packaging.requirements.Requirement uses the exact casing of a dependency's name found in # a project's build config, which is not ideal when rendering. # See https://github.com/tox-dev/pipdeptree/issues/242 @@ -54,7 +56,8 @@ def from_pkgs(cls, pkgs: list[DistInfoDistribution]) -> PackageDAG: return cls(m) def __init__(self, m: dict[DistPackage, list[ReqPackage]]) -> None: - """Initialize the PackageDAG object. + """ + Initialize the PackageDAG object. :param dict m: dict of node objects (refer class docstring) :returns: None @@ -65,7 +68,8 @@ def __init__(self, m: dict[DistPackage, list[ReqPackage]]) -> None: self._index: dict[str, DistPackage] = {p.key: p for p in list(self._obj)} def get_node_as_parent(self, node_key: str) -> DistPackage | None: - """Get the node from the keys of the dict representing the DAG. + """ + Get the node from the keys of the dict representing the DAG. This method is useful if the dict representing the DAG contains different kind of objects in keys and values. Use this method to look up a node obj as a parent (from the keys of the dict) given a node key. @@ -80,7 +84,8 @@ def get_node_as_parent(self, node_key: str) -> DistPackage | None: return None def get_children(self, node_key: str) -> list[ReqPackage]: - """Get child nodes for a node by its key. + """ + Get child nodes for a node by its key. :param node_key: key of the node to get children of :returns: child nodes @@ -90,7 +95,8 @@ def get_children(self, node_key: str) -> list[ReqPackage]: return self._obj[node] if node else [] def filter_nodes(self, include: list[str] | None, exclude: set[str] | None) -> PackageDAG: # noqa: C901, PLR0912 - """Filter nodes in a graph by given parameters. + """ + Filter nodes in a graph by given parameters. If a node is included, then all it's children are also included. @@ -165,7 +171,8 @@ def filter_nodes(self, include: list[str] | None, exclude: set[str] | None) -> P return self.__class__(m) def reverse(self) -> ReversedPackageDAG: - """Reverse the DAG, or turn it upside-down. + """ + Reverse the DAG, or turn it upside-down. In other words, the directions of edges of the nodes in the DAG will be reversed. @@ -192,7 +199,8 @@ def reverse(self) -> ReversedPackageDAG: return ReversedPackageDAG(dict(m)) # type: ignore[arg-type] def sort(self) -> PackageDAG: - """Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. + """ + Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. :returns: Instance of same class with dict @@ -211,7 +219,8 @@ def __len__(self) -> int: class ReversedPackageDAG(PackageDAG): - """Representation of Package dependencies in the reverse order. + """ + Representation of Package dependencies in the reverse order. Similar to it's super class `PackageDAG`, the underlying datastructure is a dict, but here the keys are expected to be of type `ReqPackage` and each item in the values of type `DistPackage`. @@ -221,7 +230,8 @@ class ReversedPackageDAG(PackageDAG): """ def reverse(self) -> PackageDAG: # type: ignore[override] - """Reverse the already reversed DAG to get the PackageDAG again. + """ + Reverse the already reversed DAG to get the PackageDAG again. :returns: reverse of the reversed DAG diff --git a/pipenv/vendor/pipdeptree/_models/package.py b/pipenv/vendor/pipdeptree/_models/package.py index 5d64cf0efe..74590e9db9 100644 --- a/pipenv/vendor/pipdeptree/_models/package.py +++ b/pipenv/vendor/pipdeptree/_models/package.py @@ -1,32 +1,56 @@ from __future__ import annotations +import re from abc import ABC, abstractmethod from importlib import import_module -from importlib.metadata import PackageNotFoundError, version +from importlib.metadata import PackageNotFoundError, metadata, version from inspect import ismodule from typing import TYPE_CHECKING -from pipenv.patched.pip._vendor.pkg_resources import Requirement +from pipenv.patched.pip._vendor.pkg_resources import Requirement # noqa: PLC2701 if TYPE_CHECKING: from pipenv.patched.pip._internal.metadata import BaseDistribution from pipenv.patched.pip._vendor.pkg_resources import DistInfoDistribution +def pep503_normalize(name: str) -> str: + return re.sub("[-_.]+", "-", name) + + class Package(ABC): """Abstract class for wrappers around objects that pip returns.""" + UNKNOWN_LICENSE_STR = "(Unknown license)" + def __init__(self, obj: DistInfoDistribution) -> None: self._obj: DistInfoDistribution = obj - - @property - def key(self) -> str: - return self._obj.key # type: ignore[no-any-return] + self.key = pep503_normalize(obj.key) @property def project_name(self) -> str: return self._obj.project_name # type: ignore[no-any-return] + def licenses(self) -> str: + try: + dist_metadata = metadata(self.key) + except PackageNotFoundError: + return self.UNKNOWN_LICENSE_STR + + license_strs: list[str] = [] + classifiers = dist_metadata.get_all("Classifier", []) + + for classifier in classifiers: + line = str(classifier) + if line.startswith("License"): + license_str = line.split(":: ")[-1] + license_strs.append(license_str) + + if len(license_strs) == 0: + return self.UNKNOWN_LICENSE_STR + + return f'({", ".join(license_strs)})' + @abstractmethod def render_as_root(self, *, frozen: bool) -> str: raise NotImplementedError @@ -36,13 +60,9 @@ def render_as_branch(self, *, frozen: bool) -> str: raise NotImplementedError @abstractmethod - def as_dict(self) -> dict[str, str | None]: + def as_dict(self) -> dict[str, str]: raise NotImplementedError - @property - def version_spec(self) -> None | str: - return None - def render( self, parent: DistPackage | ReqPackage | None = None, @@ -64,12 +84,12 @@ def as_frozen_repr(obj: DistInfoDistribution) -> str: # # This is a hacky backward compatible (with older versions of pip) fix. try: - from pipenv.patched.pip._internal.operations.freeze import FrozenRequirement + from pipenv.patched.pip._internal.operations.freeze import FrozenRequirement # noqa: PLC0415 # pragma: no cover except ImportError: - from pipenv.patched.pip import FrozenRequirement # type: ignore[attr-defined, no-redef] + from pipenv.patched.pip import FrozenRequirement # type: ignore[attr-defined, no-redef] # noqa: PLC0415 # pragma: no cover try: - from pipenv.patched.pip._internal import metadata + from pipenv.patched.pip._internal import metadata # noqa: PLC0415, PLC2701 # pragma: no cover except ImportError: our_dist: BaseDistribution = obj # type: ignore[assignment] else: @@ -89,7 +109,8 @@ def __lt__(self, rhs: Package) -> bool: class DistPackage(Package): - """Wrapper class for pkg_resources.Distribution instances. + """ + Wrapper class for pkg_resources.Distribution instances. :param obj: pkg_resources.Distribution to wrap over :param req: optional ReqPackage object to associate this DistPackage with. This is useful for displaying the tree in @@ -100,10 +121,20 @@ class DistPackage(Package): def __init__(self, obj: DistInfoDistribution, req: ReqPackage | None = None) -> None: super().__init__(obj) self.req = req + self._project_name = "" def requires(self) -> list[Requirement]: return self._obj.requires() # type: ignore[no-untyped-call,no-any-return] + @property + def project_name(self) -> str: + if not self._project_name: + try: + self._project_name = metadata(self.key)["name"] + except (PackageNotFoundError, KeyError): + self._project_name = self._obj.project_name + return self._project_name + @property def version(self) -> str: return self._obj.version # type: ignore[no-any-return] @@ -128,7 +159,8 @@ def as_requirement(self) -> ReqPackage: return ReqPackage(self._obj.as_requirement(), dist=self) # type: ignore[no-untyped-call] def as_parent_of(self, req: ReqPackage | None) -> DistPackage: - """Return a DistPackage instance associated to a requirement. + """ + Return a DistPackage instance associated to a requirement. This association is necessary for reversing the PackageDAG. If `req` is None, and the `req` attribute of the current instance is also None, then the same instance will be @@ -142,12 +174,13 @@ def as_parent_of(self, req: ReqPackage | None) -> DistPackage: return self return self.__class__(self._obj, req) - def as_dict(self) -> dict[str, str | None]: + def as_dict(self) -> dict[str, str]: return {"key": self.key, "package_name": self.project_name, "installed_version": self.version} class ReqPackage(Package): - """Wrapper class for Requirements instance. + """ + Wrapper class for Requirements instance. :param obj: The `Requirements` instance to wrap over :param dist: optional `pkg_resources.Distribution` instance for this requirement @@ -186,7 +219,7 @@ def installed_version(self) -> str: except PackageNotFoundError: pass # Avoid AssertionError with setuptools, see https://github.com/tox-dev/pipdeptree/issues/162 - if self.key in {"setuptools"}: + if self.key == "setuptools": return self.UNKNOWN_VERSION try: m = import_module(self.key) @@ -213,12 +246,12 @@ def is_conflicting(self) -> bool: req_obj = Requirement.parse(req_version_str) # type: ignore[no-untyped-call] return self.installed_version not in req_obj - def as_dict(self) -> dict[str, str | None]: + def as_dict(self) -> dict[str, str]: return { "key": self.key, "package_name": self.project_name, "installed_version": self.installed_version, - "required_version": self.version_spec, + "required_version": self.version_spec if self.version_spec is not None else "Any", } diff --git a/pipenv/vendor/pipdeptree/_non_host.py b/pipenv/vendor/pipdeptree/_non_host.py index 4a2e543dec..0f630afd31 100644 --- a/pipenv/vendor/pipdeptree/_non_host.py +++ b/pipenv/vendor/pipdeptree/_non_host.py @@ -5,7 +5,7 @@ from inspect import getsourcefile from pathlib import Path from shutil import copytree -from subprocess import call +from subprocess import call # noqa: S404 from tempfile import TemporaryDirectory from typing import TYPE_CHECKING diff --git a/pipenv/vendor/pipdeptree/_render/__init__.py b/pipenv/vendor/pipdeptree/_render/__init__.py index d60c8f4a4c..8d523d2744 100644 --- a/pipenv/vendor/pipdeptree/_render/__init__.py +++ b/pipenv/vendor/pipdeptree/_render/__init__.py @@ -30,6 +30,7 @@ def render(options: Options, tree: PackageDAG) -> None: encoding=options.encoding_type, list_all=options.all, frozen=options.freeze, + include_license=options.license, ) diff --git a/pipenv/vendor/pipdeptree/_render/graphviz.py b/pipenv/vendor/pipdeptree/_render/graphviz.py index 2c0b8ba9e6..bafff4c813 100644 --- a/pipenv/vendor/pipdeptree/_render/graphviz.py +++ b/pipenv/vendor/pipdeptree/_render/graphviz.py @@ -15,7 +15,8 @@ def dump_graphviz( # noqa: C901, PLR0912 output_format: str = "dot", is_reverse: bool = False, # noqa: FBT001, FBT002 ) -> str | bytes: - """Output dependency graph as one of the supported GraphViz output formats. + """ + Output dependency graph as one of the supported GraphViz output formats. :param dict tree: dependency graph :param string output_format: output format @@ -25,7 +26,7 @@ def dump_graphviz( # noqa: C901, PLR0912 """ try: - from graphviz import Digraph + from graphviz import Digraph # noqa: PLC0415 except ImportError as exc: print( # noqa: T201 "graphviz is not available, but necessary for the output option. Please install it.", @@ -34,9 +35,9 @@ def dump_graphviz( # noqa: C901, PLR0912 raise SystemExit(1) from exc try: - from graphviz import parameters + from graphviz import parameters # noqa: PLC0415 except ImportError: - from graphviz import backend + from graphviz import backend # noqa: PLC0415 # pragma: no cover valid_formats = backend.FORMATS print( # noqa: T201 @@ -94,7 +95,8 @@ def dump_graphviz( # noqa: C901, PLR0912 def print_graphviz(dump_output: str | bytes) -> None: - """Dump the data generated by GraphViz to stdout. + """ + Dump the data generated by GraphViz to stdout. :param dump_output: The output from dump_graphviz diff --git a/pipenv/vendor/pipdeptree/_render/json.py b/pipenv/vendor/pipdeptree/_render/json.py index c5d48601ac..d39a59aadf 100644 --- a/pipenv/vendor/pipdeptree/_render/json.py +++ b/pipenv/vendor/pipdeptree/_render/json.py @@ -8,7 +8,8 @@ def render_json(tree: PackageDAG) -> str: - """Convert the tree into a flat json representation. + """ + Convert the tree into a flat json representation. The json repr will be a list of hashes, each hash having 2 fields: - package diff --git a/pipenv/vendor/pipdeptree/_render/json_tree.py b/pipenv/vendor/pipdeptree/_render/json_tree.py index bd9a1393cb..a0146db798 100644 --- a/pipenv/vendor/pipdeptree/_render/json_tree.py +++ b/pipenv/vendor/pipdeptree/_render/json_tree.py @@ -4,12 +4,15 @@ from itertools import chain from typing import TYPE_CHECKING, Any +from pipenv.vendor.pipdeptree._models import ReqPackage + if TYPE_CHECKING: - from pipenv.vendor.pipdeptree._models import DistPackage, PackageDAG, ReqPackage + from pipenv.vendor.pipdeptree._models import DistPackage, PackageDAG def render_json_tree(tree: PackageDAG) -> str: - """Convert the tree into a nested json representation. + """ + Convert the tree into a nested json representation. The json repr will be a list of hashes, each hash having the following fields: @@ -37,7 +40,7 @@ def aux( d: dict[str, str | list[Any] | None] = node.as_dict() # type: ignore[assignment] if parent: - d["required_version"] = node.version_spec if node.version_spec else "Any" + d["required_version"] = node.version_spec if isinstance(node, ReqPackage) and node.version_spec else "Any" else: d["required_version"] = d["installed_version"] diff --git a/pipenv/vendor/pipdeptree/_render/mermaid.py b/pipenv/vendor/pipdeptree/_render/mermaid.py index 4b77f28c96..9b5b3fbfd7 100644 --- a/pipenv/vendor/pipdeptree/_render/mermaid.py +++ b/pipenv/vendor/pipdeptree/_render/mermaid.py @@ -35,7 +35,8 @@ def render_mermaid(tree: PackageDAG) -> str: # noqa: C901 - """Produce a Mermaid flowchart from the dependency graph. + """ + Produce a Mermaid flowchart from the dependency graph. :param tree: dependency graph diff --git a/pipenv/vendor/pipdeptree/_render/text.py b/pipenv/vendor/pipdeptree/_render/text.py index 1d4365b863..47822062d5 100644 --- a/pipenv/vendor/pipdeptree/_render/text.py +++ b/pipenv/vendor/pipdeptree/_render/text.py @@ -7,19 +7,21 @@ from pipenv.vendor.pipdeptree._models import DistPackage, PackageDAG, ReqPackage -def render_text( +def render_text( # noqa: PLR0913 tree: PackageDAG, *, max_depth: float, encoding: str, list_all: bool = True, frozen: bool = False, + include_license: bool = False, ) -> None: - """Print tree as text on console. + """ + Print tree as text on console. :param tree: the package tree :param list_all: whether to list all the pgks at the root level or only those that are the sub-dependencies - :param frozen: show the names of the pkgs in the output that's favourable to pip --freeze + :param frozen: show the names of the pkgs in the output that's favorable to pip --freeze :returns: None """ @@ -30,10 +32,10 @@ def render_text( if not list_all: nodes = [p for p in nodes if p.key not in branch_keys] - if encoding in ("utf-8", "utf-16", "utf-32"): - _render_text_with_unicode(tree, nodes, max_depth, frozen) + if encoding in {"utf-8", "utf-16", "utf-32"}: + _render_text_with_unicode(tree, nodes, max_depth, frozen, include_license) else: - _render_text_without_unicode(tree, nodes, max_depth, frozen) + _render_text_without_unicode(tree, nodes, max_depth, frozen, include_license) def _render_text_with_unicode( @@ -41,10 +43,13 @@ def _render_text_with_unicode( nodes: list[DistPackage], max_depth: float, frozen: bool, # noqa: FBT001 + include_license: bool, # noqa: FBT001 ) -> None: + assert not (frozen and include_license) + use_bullets = not frozen - def aux( # noqa: PLR0913 + def aux( # noqa: PLR0913, PLR0917 node: DistPackage | ReqPackage, parent: DistPackage | ReqPackage | None = None, indent: int = 0, @@ -82,6 +87,9 @@ def aux( # noqa: PLR0913 prefix += " " if use_bullets else "" next_prefix = prefix node_str = prefix + bullet + node_str + elif include_license: + node_str += " " + node.licenses() + result = [node_str] children = tree.get_children(node.key) @@ -113,7 +121,10 @@ def _render_text_without_unicode( nodes: list[DistPackage], max_depth: float, frozen: bool, # noqa: FBT001 + include_license: bool, # noqa: FBT001 ) -> None: + assert not (frozen and include_license) + use_bullets = not frozen def aux( @@ -128,6 +139,8 @@ def aux( if parent: prefix = " " * indent + ("- " if use_bullets else "") node_str = prefix + node_str + elif include_license: + node_str += " " + node.licenses() result = [node_str] children = [ aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) diff --git a/pipenv/vendor/pipdeptree/_validate.py b/pipenv/vendor/pipdeptree/_validate.py index ffce300acf..ba67ac9f60 100644 --- a/pipenv/vendor/pipdeptree/_validate.py +++ b/pipenv/vendor/pipdeptree/_validate.py @@ -31,7 +31,8 @@ def validate(args: Options, is_text_output: bool, tree: PackageDAG) -> int: # n def conflicting_deps(tree: PackageDAG) -> dict[DistPackage, list[ReqPackage]]: - """Return dependencies which are not present or conflict with the requirements of other packages. + """ + Return dependencies which are not present or conflict with the requirements of other packages. e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed @@ -62,7 +63,8 @@ def render_conflicts_text(conflicts: dict[DistPackage, list[ReqPackage]]) -> Non def cyclic_deps(tree: PackageDAG) -> list[list[Package]]: - """Return cyclic dependencies as list of lists. + """ + Return cyclic dependencies as list of lists. :param tree: package tree/dag :returns: list of lists, where each list represents a cycle diff --git a/pipenv/vendor/pipdeptree/version.py b/pipenv/vendor/pipdeptree/version.py index 3c2d0cfd1a..7b209035ff 100644 --- a/pipenv/vendor/pipdeptree/version.py +++ b/pipenv/vendor/pipdeptree/version.py @@ -12,5 +12,5 @@ __version_tuple__: VERSION_TUPLE version_tuple: VERSION_TUPLE -__version__ = version = '2.13.1' -__version_tuple__ = version_tuple = (2, 13, 1) +__version__ = version = '2.16.2' +__version_tuple__ = version_tuple = (2, 16, 2) diff --git a/pipenv/vendor/ruamel/yaml/__init__.py b/pipenv/vendor/ruamel/yaml/__init__.py index e7983ab843..0215384bd4 100644 --- a/pipenv/vendor/ruamel/yaml/__init__.py +++ b/pipenv/vendor/ruamel/yaml/__init__.py @@ -1,13 +1,14 @@ -# coding: utf-8 + +from __future__ import annotations if False: # MYPY from typing import Dict, Any # NOQA _package_data = dict( full_package_name='ruamel.yaml', - version_info=(0, 17, 39), - __version__='0.17.39', - version_timestamp='2023-10-19 17:37:02', + version_info=(0, 18, 6), + __version__='0.18.6', + version_timestamp='2024-02-07 07:43:33', author='Anthon van der Neut', author_email='a.van.der.neut@ruamel.eu', description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA @@ -39,8 +40,7 @@ fl8excl='_test/lib,branch_default', ), # universal=True, - python_requires='>=3', - rtfd='yaml', + python_requires='>=3.7', ) # type: Dict[Any, Any] diff --git a/pipenv/vendor/ruamel/yaml/anchor.py b/pipenv/vendor/ruamel/yaml/anchor.py index 1eb1480bdb..61119af24a 100644 --- a/pipenv/vendor/ruamel/yaml/anchor.py +++ b/pipenv/vendor/ruamel/yaml/anchor.py @@ -1,6 +1,8 @@ -# coding: utf-8 -from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA +from __future__ import annotations + +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA anchor_attrib = '_yaml_anchor' diff --git a/pipenv/vendor/ruamel/yaml/comments.py b/pipenv/vendor/ruamel/yaml/comments.py index cb0d38fe9c..f318b3e872 100644 --- a/pipenv/vendor/ruamel/yaml/comments.py +++ b/pipenv/vendor/ruamel/yaml/comments.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations """ stuff to deal with comments and formatting on dict/list/ordereddict/set @@ -18,12 +19,14 @@ from collections.abc import MutableSet, Sized, Set, Mapping -from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA # fmt: off __all__ = ['CommentedSeq', 'CommentedKeySeq', 'CommentedMap', 'CommentedOrderedMap', 'CommentedSet', 'comment_attrib', 'merge_attrib', + 'TaggedScalar', 'C_POST', 'C_PRE', 'C_SPLIT_ON_FIRST_BLANK', 'C_BLANK_LINE_PRESERVE_SPACE', ] # fmt: on @@ -220,6 +223,9 @@ def flow_style(self, default: Optional[Any] = None) -> Any: return default return self._flow_style + def __repr__(self) -> str: + return f'Format({self._flow_style})' + class LineCol: """ @@ -432,7 +438,11 @@ def tag(self) -> Any: def yaml_set_ctag(self, value: Tag) -> None: setattr(self, Tag.attrib, value) - def copy_attributes(self, t: Any, memo: Any = None) -> None: + def copy_attributes(self, t: Any, memo: Any = None) -> Any: + """ + copies the YAML related attributes, not e.g. .values + returns target + """ # fmt: off for a in [Comment.attrib, Format.attrib, LineCol.attrib, Anchor.attrib, Tag.attrib, merge_attrib]: @@ -441,6 +451,7 @@ def copy_attributes(self, t: Any, memo: Any = None) -> None: setattr(t, a, copy.deepcopy(getattr(self, a, memo))) else: setattr(t, a, getattr(self, a)) + return t # fmt: on def _yaml_add_eol_comment(self, comment: Any, key: Any) -> None: @@ -1141,6 +1152,9 @@ def count(self, s: str, start: Optional[int] = None, end: Optional[int] = None) def __getitem__(self, pos: int) -> Any: return self.value[pos] + def __repr__(self) -> str: + return f'TaggedScalar(value={self.value!r}, style={self.style!r}, tag={self.tag!r})' + def dump_comments(d: Any, name: str = "", sep: str = '.', out: Any = sys.stdout) -> None: """ diff --git a/pipenv/vendor/ruamel/yaml/compat.py b/pipenv/vendor/ruamel/yaml/compat.py index b929cb310c..f36f75ccd4 100644 --- a/pipenv/vendor/ruamel/yaml/compat.py +++ b/pipenv/vendor/ruamel/yaml/compat.py @@ -1,25 +1,30 @@ -# coding: utf-8 + +from __future__ import annotations # partially from package six by Benjamin Peterson import sys import os import io -import traceback from abc import abstractmethod import collections.abc +from pipenv.vendor.ruamel.yaml.docinfo import Version # NOQA # fmt: off -from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA -from typing import Optional # NOQA -try: - from typing import SupportsIndex as SupportsIndex # in order to reexport for mypy -except ImportError: - SupportsIndex = int # type: ignore +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA + from typing import Optional # NOQA + try: + from typing import SupportsIndex as SupportsIndex # in order to reexport for mypy + except ImportError: + SupportsIndex = int # type: ignore + + StreamType = Any + StreamTextType = StreamType + VersionType = Union[str , Tuple[int, int] , List[int] , Version , None] # fmt: on - _DEFAULT_YAML_VERSION = (1, 2) try: @@ -50,12 +55,6 @@ def insert(self, pos: int, key: Any, value: Any) -> None: StringIO = io.StringIO BytesIO = io.BytesIO -# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO] -# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore -StreamType = Any - -StreamTextType = StreamType # Union[Text, StreamType] -VersionType = Union[List[int], str, Tuple[int, int]] builtins_module = 'builtins' @@ -97,17 +96,17 @@ def dump(self) -> None: # used from yaml util when testing def dbg(val: Any = None) -> Any: - global _debug - if _debug is None: + debug = _debug + if debug is None: # set to true or false _debugx = os.environ.get('YAMLDEBUG') if _debugx is None: - _debug = 0 + debug = 0 else: - _debug = int(_debugx) + debug = int(_debugx) if val is None: - return _debug - return _debug & val + return debug + return debug & val class Nprint: @@ -117,6 +116,8 @@ def __init__(self, file_name: Any = None) -> None: self._file_name = file_name def __call__(self, *args: Any, **kw: Any) -> None: + import traceback + if not bool(_debug): return out = sys.stdout if self._file_name is None else open(self._file_name, 'a') diff --git a/pipenv/vendor/ruamel/yaml/composer.py b/pipenv/vendor/ruamel/yaml/composer.py index 7265afec4e..9acfb39f22 100644 --- a/pipenv/vendor/ruamel/yaml/composer.py +++ b/pipenv/vendor/ruamel/yaml/composer.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations import warnings @@ -17,7 +18,8 @@ ) from pipenv.vendor.ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode -from typing import Any, Dict, Optional, List # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List # NOQA __all__ = ['Composer', 'ComposerError'] diff --git a/pipenv/vendor/ruamel/yaml/configobjwalker.py b/pipenv/vendor/ruamel/yaml/configobjwalker.py index b83f5455d9..35a5ad2f1f 100644 --- a/pipenv/vendor/ruamel/yaml/configobjwalker.py +++ b/pipenv/vendor/ruamel/yaml/configobjwalker.py @@ -1,10 +1,12 @@ -# coding: utf-8 + +from __future__ import annotations import warnings from pipenv.vendor.ruamel.yaml.util import configobj_walker as new_configobj_walker -from typing import Any +if False: # MYPY + from typing import Any def configobj_walker(cfg: Any) -> Any: diff --git a/pipenv/vendor/ruamel/yaml/constructor.py b/pipenv/vendor/ruamel/yaml/constructor.py index eb4b7cf90e..c100e55e81 100644 --- a/pipenv/vendor/ruamel/yaml/constructor.py +++ b/pipenv/vendor/ruamel/yaml/constructor.py @@ -1,7 +1,8 @@ -# coding: utf-8 + +from __future__ import annotations import datetime -import base64 +from datetime import timedelta as TimeDelta import binascii import sys import types @@ -34,7 +35,8 @@ from pipenv.vendor.ruamel.yaml.timestamp import TimeStamp from pipenv.vendor.ruamel.yaml.util import timestamp_regexp, create_timestamp -from typing import Any, Dict, List, Set, Iterator, Union, Optional # NOQA +if False: # MYPY + from typing import Any, Dict, List, Set, Iterator, Union, Optional # NOQA __all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', @@ -511,6 +513,8 @@ def construct_yaml_float(self, node: Any) -> float: return sign * float(value_s) def construct_yaml_binary(self, node: Any) -> Any: + import base64 + try: value = self.construct_scalar(node).encode('ascii') except UnicodeEncodeError as exc: @@ -662,6 +666,8 @@ def construct_python_unicode(self, node: Any) -> Any: return self.construct_scalar(node) def construct_python_bytes(self, node: Any) -> Any: + import base64 + try: value = self.construct_scalar(node).encode('ascii') except UnicodeEncodeError as exc: @@ -1487,14 +1493,17 @@ def construct_yaml_object(self, node: Any, cls: Any) -> Any: state = SafeConstructor.construct_mapping(self, node, deep=True) data.__setstate__(state) elif is_dataclass(data): - mapping = SafeConstructor.construct_mapping(self, node) + mapping = SafeConstructor.construct_mapping(self, node, deep=True) init_var_defaults = {} for field in data.__dataclass_fields__.values(): # nprintf('field', field, field.default is MISSING, # isinstance(field.type, InitVar)) # in 3.7, InitVar is a singleton if ( - isinstance(field.type, InitVar) or field.type is InitVar + isinstance(field.type, InitVar) + or field.type is InitVar + # this following is for handling from __future__ import allocations + or (isinstance(field.type, str) and field.type.startswith('InitVar')) ) and field.default is not MISSING: init_var_defaults[field.name] = field.default for attr, value in mapping.items(): @@ -1588,6 +1597,10 @@ def construct_yaml_omap(self, node: Any) -> Iterator[CommentedOrderedMap]: def construct_yaml_set(self, node: Any) -> Iterator[CommentedSet]: data = CommentedSet() data._yaml_set_line_col(node.start_mark.line, node.start_mark.column) + if node.flow_style is True: + data.fa.set_flow_style() + elif node.flow_style is False: + data.fa.set_block_style() yield data self.construct_setting(node, data) @@ -1672,20 +1685,21 @@ def construct_yaml_timestamp( else: return create_timestamp(**values) # return SafeConstructor.construct_yaml_timestamp(self, node, values) - dd = create_timestamp(**values) # this has delta applied + # print('>>>>>>>> here', values) + dd = create_timestamp(**values) # this has tzinfo delta = None if values['tz_sign']: - tz_hour = int(values['tz_hour']) + hours = values['tz_hour'] + tz_hour = int(hours) minutes = values['tz_minute'] tz_minute = int(minutes) if minutes else 0 - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) + # ToDo: double work, replace with extraction from dd.tzinfo + delta = TimeDelta(hours=tz_hour, minutes=tz_minute) if values['tz_sign'] == '-': delta = -delta - # should check for None and solve issue 366 should be tzinfo=delta) - # isinstance(datetime.datetime.now, datetime.date) is true) if isinstance(dd, datetime.datetime): data = TimeStamp( - dd.year, dd.month, dd.day, dd.hour, dd.minute, dd.second, dd.microsecond, + dd.year, dd.month, dd.day, dd.hour, dd.minute, dd.second, dd.microsecond, dd.tzinfo, # NOQA ) else: # ToDo: make this into a DateStamp? diff --git a/pipenv/vendor/ruamel/yaml/cyaml.py b/pipenv/vendor/ruamel/yaml/cyaml.py index 7f986795fb..38a4d09f56 100644 --- a/pipenv/vendor/ruamel/yaml/cyaml.py +++ b/pipenv/vendor/ruamel/yaml/cyaml.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations from _ruamel_yaml import CParser, CEmitter # type: ignore @@ -7,8 +8,9 @@ from pipenv.vendor.ruamel.yaml.resolver import Resolver, BaseResolver -from typing import Any, Union, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA +if False: # MYPY + from typing import Any, Union, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA __all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper'] diff --git a/pipenv/vendor/ruamel/yaml/docinfo.py b/pipenv/vendor/ruamel/yaml/docinfo.py new file mode 100644 index 0000000000..1c9254bc14 --- /dev/null +++ b/pipenv/vendor/ruamel/yaml/docinfo.py @@ -0,0 +1,130 @@ + +from __future__ import annotations + +""" +DocInfo + +Although it was possible to read tag directives before this, all handle/prefix +pairs for all documents in all streams were stored in one dictionary per +YAML instance, making it impossible to distinguish where such a pair came +from without sublassing the scanner. + +ToDo: +DocInfo can be used by a yaml dumper to dump a class +- if connected to the root of a data structure +- if provided to the dumper? +""" + +if False: # MYPY + from typing import Optional, Tuple, Any + +# from dataclasses import dataclass, field, MISSING # NOQA + + +# @dataclass(order=True, frozen=True) +class Version: + # major: int + # minor: int + def __init__(self, major: int, minor: int) -> None: + self._major = major + self._minor = minor + + @property + def major(self) -> int: + return self._major + + @property + def minor(self) -> int: + return self._minor + + def __eq__(self, v: Any) -> bool: + if not isinstance(v, Version): + return False + return self._major == v._major and self._minor == v._minor + + def __lt__(self, v: Version) -> bool: + if self._major < v._major: + return True + if self._major > v._major: + return False + return self._minor < v._minor + + def __le__(self, v: Version) -> bool: + if self._major < v._major: + return True + if self._major > v._major: + return False + return self._minor <= v._minor + + def __gt__(self, v: Version) -> bool: + if self._major > v._major: + return True + if self._major < v._major: + return False + return self._minor > v._minor + + def __ge__(self, v: Version) -> bool: + if self._major > v._major: + return True + if self._major < v._major: + return False + return self._minor >= v._minor + + +def version( + major: int | str | Tuple[int, int] | None, + minor: Optional[int] = None, +) -> Optional[Version]: + if major is None: + assert minor is None + return None + if isinstance(major, str): + assert minor is None + parts = major.split('.') + assert len(parts) == 2 + return Version(int(parts[0]), int(parts[1])) + elif isinstance(major, tuple): + assert minor is None + assert len(major) == 2 + major, minor = major + assert minor is not None + return Version(major, minor) + + +# @dataclass(frozen=True) +class Tag: + # handle: str + # prefix: str + def __init__(self, handle: str, prefix: str) -> None: + self._handle = handle + self._prefix = prefix + + @property + def handle(self) -> str: + return self._handle + + @property + def prefix(self) -> str: + return self._prefix + + +# @dataclass +class DocInfo: + """ + Store document information, can be used for analysis of a loaded YAML document + requested_version: if explicitly set before load + doc_version: from %YAML directive + tags: from %TAG directives in scanned order + """ + # requested_version: Optional[Version] = None + # doc_version: Optional[Version] = None + # tags: list[Tag] = field(default_factory=list) + def __init__( + self, + requested_version: Optional[Version] = None, + doc_version: Optional[Version] = None, + tags: Optional[list[Tag]] = None, + ): + self.requested_version = requested_version + self.doc_version = doc_version + self.tags = [] if tags is None else tags diff --git a/pipenv/vendor/ruamel/yaml/dumper.py b/pipenv/vendor/ruamel/yaml/dumper.py index 530b1aed4a..29a904eba1 100644 --- a/pipenv/vendor/ruamel/yaml/dumper.py +++ b/pipenv/vendor/ruamel/yaml/dumper.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.emitter import Emitter from pipenv.vendor.ruamel.yaml.serializer import Serializer @@ -10,8 +11,9 @@ ) from pipenv.vendor.ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver -from typing import Any, Dict, List, Union, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import StreamType, VersionType # NOQA +if False: # MYPY + from typing import Any, Dict, List, Union, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import StreamType, VersionType # NOQA __all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper'] diff --git a/pipenv/vendor/ruamel/yaml/emitter.py b/pipenv/vendor/ruamel/yaml/emitter.py index 13c45c2eaa..17266b3e1d 100644 --- a/pipenv/vendor/ruamel/yaml/emitter.py +++ b/pipenv/vendor/ruamel/yaml/emitter.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations # Emitter expects events obeying the following grammar: # stream ::= STREAM-START document* STREAM-END @@ -17,8 +18,9 @@ # fmt: on -from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import StreamType # NOQA +if False: # MYPY + from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import StreamType # NOQA __all__ = ['Emitter', 'EmitterError'] @@ -48,6 +50,9 @@ def __init__( self.allow_double_quoted = allow_double_quoted self.allow_block = allow_block + def __repr__(self) -> str: + return f'scalar={self.scalar!r}, empty={self.empty}, multiline={self.multiline}, allow_flow_plain={self.allow_flow_plain}, allow_block_plain={self.allow_block_plain}, allow_single_quoted={self.allow_single_quoted}, allow_double_quoted={self.allow_double_quoted}, allow_block={self.allow_block}' # NOQA + class Indents: # replacement for the list based stack of None/int @@ -60,6 +65,14 @@ def append(self, val: Any, seq: Any) -> None: def pop(self) -> Any: return self.values.pop()[0] + def seq_seq(self) -> bool: + try: + if self.values[-2][1] and self.values[-1][1]: + return True + except IndexError: + pass + return False + def last_seq(self) -> bool: # return the seq(uence) value for the element added before the last one # in increase_indent() @@ -416,7 +429,6 @@ def expect_node( # nprint('@', self.indention, self.no_newline, self.column) self.expect_scalar() elif isinstance(self.event, SequenceStartEvent): - # nprint('@', self.indention, self.no_newline, self.column) i2, n2 = self.indention, self.no_newline # NOQA if self.event.comment: if self.event.flow_style is False: @@ -442,6 +454,10 @@ def expect_node( self.expect_flow_sequence(force_flow_indent) else: self.expect_block_sequence() + if self.indents.seq_seq(): + # - - + self.indention = True + self.no_newline = False elif isinstance(self.event, MappingStartEvent): if self.event.flow_style is False and self.event.comment: self.write_post_comment(self.event) @@ -612,7 +628,8 @@ def expect_flow_mapping_key(self) -> None: self.expect_node(mapping=True) def expect_flow_mapping_simple_value(self) -> None: - self.write_indicator(self.prefixed_colon, False) + if getattr(self.event, 'style', '?') != '-': # suppress for flow style sets + self.write_indicator(self.prefixed_colon, False) self.states.append(self.expect_flow_mapping_key) self.expect_node(mapping=True) @@ -841,7 +858,7 @@ def choose_scalar_style(self) -> Any: self.analysis = self.analyze_scalar(self.event.value) if self.event.style == '"' or self.canonical: return '"' - if (not self.event.style or self.event.style == '?') and ( + if (not self.event.style or self.event.style == '?' or self.event.style == '-') and ( self.event.implicit[0] or not self.event.implicit[2] ): if not ( @@ -852,6 +869,8 @@ def choose_scalar_style(self) -> Any: or (not self.flow_level and self.analysis.allow_block_plain) ): return "" + if self.event.style == '-': + return "" self.analysis.allow_block = True if self.event.style and self.event.style in '|>': if ( @@ -889,7 +908,7 @@ def process_scalar(self) -> None: elif self.style == '>': try: cmx = self.event.comment[1][0] - except (IndexError, TypeError): + except (IndexError, TypeError) as e: # NOQA cmx = "" self.write_folded(self.analysis.scalar, cmx) if ( @@ -1712,6 +1731,8 @@ def write_comment(self, comment: Any, pre: bool = False) -> None: self.write_line_break() def write_pre_comment(self, event: Any) -> bool: + if event.comment is None: + return False comments = event.comment[1] if comments is None: return False @@ -1743,7 +1764,6 @@ def prepare_tag(self, ctag: Any) -> Any: if not ctag: raise EmitterError('tag must not be empty') tag = str(ctag) - # print('handling', repr(tag)) if tag == '!' or tag == '!!': return tag handle = ctag.handle diff --git a/pipenv/vendor/ruamel/yaml/error.py b/pipenv/vendor/ruamel/yaml/error.py index 34d3cb3306..b2acf8c1e1 100644 --- a/pipenv/vendor/ruamel/yaml/error.py +++ b/pipenv/vendor/ruamel/yaml/error.py @@ -1,9 +1,11 @@ -# coding: utf-8 + +from __future__ import annotations import warnings -import textwrap +# import textwrap -from typing import Any, Dict, Optional, List, Text # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Text # NOQA __all__ = [ @@ -133,7 +135,7 @@ def __init__( # warn is ignored def __str__(self) -> Any: - lines: List[str] = [] + lines: list[str] = [] if self.context is not None: lines.append(self.context) if self.context_mark is not None and ( @@ -148,11 +150,20 @@ def __str__(self) -> Any: lines.append(self.problem) if self.problem_mark is not None: lines.append(str(self.problem_mark)) - if self.note is not None and self.note: - note = textwrap.dedent(self.note) - lines.append(note) + # if self.note is not None and self.note: + # note = textwrap.dedent(self.note) + # lines.append(note) + self.check_append(lines, self.note) return '\n'.join(lines) + def check_append(self, lines: list[str], val: Optional[str]) -> None: + if val is None or not val: + return + import textwrap + + note = textwrap.dedent(val) + lines.append(note) + class YAMLStreamError(Exception): pass @@ -195,14 +206,24 @@ def __str__(self) -> Any: lines.append(self.problem) if self.problem_mark is not None: lines.append(str(self.problem_mark)) - if self.note is not None and self.note: - note = textwrap.dedent(self.note) - lines.append(note) - if self.warn is not None and self.warn: - warn = textwrap.dedent(self.warn) - lines.append(warn) + # if self.note is not None and self.note: + # note = textwrap.dedent(self.note) + # lines.append(note) + self.check_append(lines, self.note) + # if self.warn is not None and self.warn: + # warn = textwrap.dedent(self.warn) + # lines.append(warn) + self.check_append(lines, self.warn) return '\n'.join(lines) + def check_append(self, lines: list[str], val: Optional[str]) -> None: + if val is None or not val: + return + import textwrap + + note = textwrap.dedent(val) + lines.append(note) + class ReusedAnchorWarning(YAMLWarning): pass @@ -288,10 +309,20 @@ def __str__(self) -> Any: lines.append(self.problem) if self.problem_mark is not None: lines.append(str(self.problem_mark)) - if self.note is not None and self.note: - note = textwrap.dedent(self.note) - lines.append(note) - if self.warn is not None and self.warn: - warn = textwrap.dedent(self.warn) - lines.append(warn) + # if self.note is not None and self.note: + # note = textwrap.dedent(self.note) + # lines.append(note) + self.check_append(lines, self.note) + # if self.warn is not None and self.warn: + # warn = textwrap.dedent(self.warn) + # lines.append(warn) + self.check_append(lines, self.warn) return '\n'.join(lines) + + def check_append(self, lines: list[str], val: Optional[str]) -> None: + if val is None or not val: + return + import textwrap + + note = textwrap.dedent(val) + lines.append(note) diff --git a/pipenv/vendor/ruamel/yaml/events.py b/pipenv/vendor/ruamel/yaml/events.py index d4f9be22e6..2ab9cf5509 100644 --- a/pipenv/vendor/ruamel/yaml/events.py +++ b/pipenv/vendor/ruamel/yaml/events.py @@ -1,8 +1,10 @@ -# coding: utf-8 + +from __future__ import annotations # Abstract classes. -from typing import Any, Dict, Optional, List # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List # NOQA from pipenv.vendor.ruamel.yaml.tag import Tag SHOW_LINES = False diff --git a/pipenv/vendor/ruamel/yaml/loader.py b/pipenv/vendor/ruamel/yaml/loader.py index 75a9b83c2e..5b2c0483b2 100644 --- a/pipenv/vendor/ruamel/yaml/loader.py +++ b/pipenv/vendor/ruamel/yaml/loader.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.reader import Reader from pipenv.vendor.ruamel.yaml.scanner import Scanner, RoundTripScanner @@ -12,8 +13,9 @@ ) from pipenv.vendor.ruamel.yaml.resolver import VersionedResolver -from typing import Any, Dict, List, Union, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import StreamTextType, VersionType # NOQA +if False: # MYPY + from typing import Any, Dict, List, Union, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import StreamTextType, VersionType # NOQA __all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader'] diff --git a/pipenv/vendor/ruamel/yaml/main.py b/pipenv/vendor/ruamel/yaml/main.py index db6209f7f1..97ae08e2c9 100644 --- a/pipenv/vendor/ruamel/yaml/main.py +++ b/pipenv/vendor/ruamel/yaml/main.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations import sys import os @@ -32,11 +33,13 @@ ) from pipenv.vendor.ruamel.yaml.loader import Loader as UnsafeLoader # NOQA from pipenv.vendor.ruamel.yaml.comments import CommentedMap, CommentedSeq, C_PRE +from pipenv.vendor.ruamel.yaml.docinfo import DocInfo, version, Version -from typing import List, Set, Dict, Union, Any, Callable, Optional, Text, Type # NOQA -from types import TracebackType -from pipenv.vendor.ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA -from pathlib import Path # NOQA +if False: # MYPY + from typing import List, Set, Dict, Tuple, Union, Any, Callable, Optional, Text, Type # NOQA + from pipenv.vendor.ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA + from types import TracebackType + from pathlib import Path try: from _ruamel_yaml import CParser, CEmitter # type: ignore @@ -62,7 +65,9 @@ def __init__( """ typ: 'rt'/None -> RoundTripLoader/RoundTripDumper, (default) 'safe' -> SafeLoader/SafeDumper, - 'unsafe' -> normal/unsafe Loader/Dumper + 'unsafe' -> normal/unsafe Loader/Dumper (pending deprecation) + 'full' -> full Dumper only, including python built-ins that are + potentially unsafe to load 'base' -> baseloader pure: if True only use Python modules input/output: needed to work as context manager @@ -108,6 +113,11 @@ def __init__( self.Composer = ruamel.composer.Composer self.Constructor = ruamel.constructor.BaseConstructor elif 'unsafe' in self.typ: + warnings.warn( + "\nyou should no longer specify 'unsafe'.\nFor **dumping only** use yaml=YAML(typ='full')\n", # NOQA + PendingDeprecationWarning, + stacklevel=2, + ) self.Emitter = ( ruamel.emitter.Emitter if pure or CEmitter is None else CEmitter ) @@ -115,6 +125,14 @@ def __init__( self.Parser = ruamel.parser.Parser if pure or CParser is None else CParser self.Composer = ruamel.composer.Composer self.Constructor = ruamel.constructor.Constructor + elif 'full' in self.typ: + self.Emitter = ( + ruamel.emitter.Emitter if pure or CEmitter is None else CEmitter + ) + self.Representer = ruamel.representer.Representer + self.Parser = ruamel.parser.Parser if pure or CParser is None else CParser + # self.Composer = ruamel.composer.Composer + # self.Constructor = ruamel.constructor.Constructor elif 'rtsc' in self.typ: self.default_flow_style = False # no optimized rt-dumper yet @@ -163,7 +181,8 @@ def __init__( self.encoding = 'utf-8' self.explicit_start: Union[bool, None] = None self.explicit_end: Union[bool, None] = None - self.tags = None + self._tags = None + self.doc_infos: List[DocInfo] = [] self.default_style = None self.top_level_block_style_scalar_no_indent_error_1_1 = False # directives end indicator with single scalar document @@ -228,17 +247,25 @@ def composer(self) -> Any: def constructor(self) -> Any: attr = '_' + sys._getframe().f_code.co_name if not hasattr(self, attr): - cnst = self.Constructor(preserve_quotes=self.preserve_quotes, loader=self) + if self.Constructor is None: + if 'full' in self.typ: + raise YAMLError( + "\nyou can only use yaml=YAML(typ='full') for dumping\n", # NOQA + ) + cnst = self.Constructor(preserve_quotes=self.preserve_quotes, loader=self) # type: ignore # NOQA cnst.allow_duplicate_keys = self.allow_duplicate_keys setattr(self, attr, cnst) return getattr(self, attr) @property def resolver(self) -> Any: - attr = '_' + sys._getframe().f_code.co_name - if not hasattr(self, attr): - setattr(self, attr, self.Resolver(version=self.version, loader=self)) - return getattr(self, attr) + try: + rslvr = self._resolver # type: ignore + except AttributeError: + rslvr = None + if rslvr is None or rslvr._loader_version != self.version: + rslvr = self._resolver = self.Resolver(version=self.version, loader=self) + return rslvr @property def emitter(self) -> Any: @@ -315,20 +342,19 @@ def scan(self, stream: StreamTextType) -> Any: # pathlib.Path() instance with stream.open('rb') as fp: return self.scan(fp) + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} _, parser = self.get_constructor_parser(stream) try: while self.scanner.check_token(): yield self.scanner.get_token() finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass def parse(self, stream: StreamTextType) -> Any: """ @@ -338,20 +364,19 @@ def parse(self, stream: StreamTextType) -> Any: # pathlib.Path() instance with stream.open('rb') as fp: return self.parse(fp) + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} _, parser = self.get_constructor_parser(stream) try: while parser.check_event(): yield parser.get_event() finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass def compose(self, stream: Union[Path, StreamTextType]) -> Any: """ @@ -362,39 +387,37 @@ def compose(self, stream: Union[Path, StreamTextType]) -> Any: # pathlib.Path() instance with stream.open('rb') as fp: return self.compose(fp) + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} constructor, parser = self.get_constructor_parser(stream) try: return constructor.composer.get_single_node() finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass def compose_all(self, stream: Union[Path, StreamTextType]) -> Any: """ Parse all YAML documents in a stream and produce corresponding representation trees. """ + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} constructor, parser = self.get_constructor_parser(stream) try: while constructor.composer.check_node(): yield constructor.composer.get_node() finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass # separate output resolver? @@ -421,19 +444,18 @@ def load(self, stream: Union[Path, StreamTextType]) -> Any: # pathlib.Path() instance with stream.open('rb') as fp: return self.load(fp) + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} constructor, parser = self.get_constructor_parser(stream) try: return constructor.get_single_data() finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass def load_all(self, stream: Union[Path, StreamTextType]) -> Any: # *, skip=None): if not hasattr(stream, 'read') and hasattr(stream, 'open'): @@ -446,25 +468,30 @@ def load_all(self, stream: Union[Path, StreamTextType]) -> Any: # *, skip=None) # skip = [] # elif isinstance(skip, int): # skip = [skip] + self.doc_infos.append(DocInfo(requested_version=version(self.version))) + self.tags = {} constructor, parser = self.get_constructor_parser(stream) try: while constructor.check_data(): yield constructor.get_data() + self.doc_infos.append(DocInfo(requested_version=version(self.version))) finally: parser.dispose() - try: - self._reader.reset_reader() - except AttributeError: - pass - try: - self._scanner.reset_scanner() - except AttributeError: - pass + for comp in ('reader', 'scanner'): + try: + getattr(getattr(self, '_' + comp), f'reset_{comp}')() + except AttributeError: + pass def get_constructor_parser(self, stream: StreamTextType) -> Any: """ the old cyaml needs special setup, and therefore the stream """ + if self.Constructor is None: + if 'full' in self.typ: + raise YAMLError( + "\nyou can only use yaml=YAML(typ='full') for dumping\n", # NOQA + ) if self.Parser is not CParser: if self.Reader is None: self.Reader = ruamel.reader.Reader @@ -705,6 +732,7 @@ def __init__( width=self.width, allow_unicode=self.allow_unicode, line_break=self.line_break, + encoding=self.encoding, explicit_start=self.explicit_start, explicit_end=self.explicit_end, version=self.version, @@ -798,23 +826,35 @@ def _indent(self, mapping: Any = None, sequence: Any = None, offset: Any = None) self.sequence_dash_offset = offset @property - def version(self) -> Optional[Any]: + def version(self) -> Optional[Tuple[int, int]]: return self._version @version.setter - def version(self, val: Optional[VersionType]) -> None: + def version(self, val: VersionType) -> None: if val is None: self._version = val return - if isinstance(val, str): + elif isinstance(val, str): sval = tuple(int(x) for x in val.split('.')) - else: + elif isinstance(val, (list, tuple)): sval = tuple(int(x) for x in val) + elif isinstance(val, Version): + sval = (val.major, val.minor) + else: + raise TypeError(f'unknown version type {type(val)}') assert len(sval) == 2, f'version can only have major.minor, got {val}' assert sval[0] == 1, f'version major part can only be 1, got {val}' assert sval[1] in [1, 2], f'version minor part can only be 2 or 1, got {val}' self._version = sval + @property + def tags(self) -> Any: + return self._tags + + @tags.setter + def tags(self, val: Any) -> None: + self._tags = val + @property def indent(self) -> Any: return self._indent @@ -982,15 +1022,26 @@ def warn_deprecation(fun: Any, method: Any, arg: str = '') -> None: ) -def error_deprecation(fun: Any, method: Any, arg: str = '') -> None: - warnings.warn( - f'\n{fun} has been removed, use\n\n yaml=YAML({arg})\n yaml.{method}(...)\n\ninstead', # NOQA - DeprecationWarning, - stacklevel=3, - ) - sys.exit(1) +def error_deprecation(fun: Any, method: Any, arg: str = '', comment: str = 'instead of') -> None: # NOQA + import inspect + + s = f'\n"{fun}()" has been removed, use\n\n yaml = YAML({arg})\n yaml.{method}(...)\n\n{comment}' # NOQA + try: + info = inspect.getframeinfo(inspect.stack()[2][0]) + context = '' if info.code_context is None else "".join(info.code_context) + s += f' file "{info.filename}", line {info.lineno}\n\n{context}' + except Exception as e: + _ = e + s += '\n' + if sys.version_info < (3, 10): + raise AttributeError(s) + else: + raise AttributeError(s, name=None) +_error_dep_arg = "typ='rt'" +_error_dep_comment = "and register any classes that you use, or check the tag attribute on the loaded data,\ninstead of" # NOQA + ######################################################################################## @@ -998,26 +1049,14 @@ def scan(stream: StreamTextType, Loader: Any = Loader) -> Any: """ Scan a YAML stream and produce scanning tokens. """ - warn_deprecation('scan', 'scan', arg="typ='unsafe', pure=True") - loader = Loader(stream) - try: - while loader.scanner.check_token(): - yield loader.scanner.get_token() - finally: - loader._parser.dispose() + error_deprecation('scan', 'scan', arg=_error_dep_arg, comment=_error_dep_comment) def parse(stream: StreamTextType, Loader: Any = Loader) -> Any: """ Parse a YAML stream and produce parsing events. """ - warn_deprecation('parse', 'parse', arg="typ='unsafe', pure=True") - loader = Loader(stream) - try: - while loader._parser.check_event(): - yield loader._parser.get_event() - finally: - loader._parser.dispose() + error_deprecation('parse', 'parse', arg=_error_dep_arg, comment=_error_dep_comment) def compose(stream: StreamTextType, Loader: Any = Loader) -> Any: @@ -1025,12 +1064,7 @@ def compose(stream: StreamTextType, Loader: Any = Loader) -> Any: Parse the first YAML document in a stream and produce the corresponding representation tree. """ - warn_deprecation('compose', 'compose', arg="typ='unsafe', pure=True") - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() + error_deprecation('compose', 'compose', arg=_error_dep_arg, comment=_error_dep_comment) def compose_all(stream: StreamTextType, Loader: Any = Loader) -> Any: @@ -1038,13 +1072,7 @@ def compose_all(stream: StreamTextType, Loader: Any = Loader) -> Any: Parse all YAML documents in a stream and produce corresponding representation trees. """ - warn_deprecation('compose', 'compose', arg="typ='unsafe', pure=True") - loader = Loader(stream) - try: - while loader.check_node(): - yield loader._composer.get_node() - finally: - loader._parser.dispose() + error_deprecation('compose', 'compose', arg=_error_dep_arg, comment=_error_dep_comment) def load( @@ -1054,23 +1082,7 @@ def load( Parse the first YAML document in a stream and produce the corresponding Python object. """ - warn_deprecation('load', 'load', arg="typ='unsafe', pure=True") - if Loader is None: - warnings.warn(UnsafeLoaderWarning.text, UnsafeLoaderWarning, stacklevel=2) - Loader = UnsafeLoader - loader = Loader(stream, version, preserve_quotes=preserve_quotes) # type: Any - try: - return loader._constructor.get_single_data() - finally: - loader._parser.dispose() - try: - loader._reader.reset_reader() - except AttributeError: - pass - try: - loader._scanner.reset_scanner() - except AttributeError: - pass + error_deprecation('load', 'load', arg=_error_dep_arg, comment=_error_dep_comment) def load_all( @@ -1081,24 +1093,7 @@ def load_all( Parse all YAML documents in a stream and produce corresponding Python objects. """ - warn_deprecation('load_all', 'load_all', arg="typ='unsafe', pure=True") - if Loader is None: - warnings.warn(UnsafeLoaderWarning.text, UnsafeLoaderWarning, stacklevel=2) - Loader = UnsafeLoader - loader = Loader(stream, version, preserve_quotes=preserve_quotes) # type: Any - try: - while loader._constructor.check_data(): - yield loader._constructor.get_data() - finally: - loader._parser.dispose() - try: - loader._reader.reset_reader() - except AttributeError: - pass - try: - loader._scanner.reset_scanner() - except AttributeError: - pass + error_deprecation('load_all', 'load_all', arg=_error_dep_arg, comment=_error_dep_comment) def safe_load(stream: StreamTextType, version: Optional[VersionType] = None) -> Any: @@ -1107,8 +1102,7 @@ def safe_load(stream: StreamTextType, version: Optional[VersionType] = None) -> and produce the corresponding Python object. Resolve only basic YAML tags. """ - warn_deprecation('safe_load', 'load', arg="typ='safe', pure=True") - return load(stream, SafeLoader, version) + error_deprecation('safe_load', 'load', arg="typ='safe', pure=True") def safe_load_all(stream: StreamTextType, version: Optional[VersionType] = None) -> Any: @@ -1117,8 +1111,7 @@ def safe_load_all(stream: StreamTextType, version: Optional[VersionType] = None) and produce corresponding Python objects. Resolve only basic YAML tags. """ - warn_deprecation('safe_load_all', 'load_all', arg="typ='safe', pure=True") - return load_all(stream, SafeLoader, version) + error_deprecation('safe_load_all', 'load_all', arg="typ='safe', pure=True") def round_trip_load( @@ -1131,8 +1124,7 @@ def round_trip_load( and produce the corresponding Python object. Resolve only basic YAML tags. """ - warn_deprecation('round_trip_load_all', 'load') - return load(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes) + error_deprecation('round_trip_load_all', 'load') def round_trip_load_all( @@ -1145,8 +1137,7 @@ def round_trip_load_all( and produce corresponding Python objects. Resolve only basic YAML tags. """ - warn_deprecation('round_trip_load_all', 'load_all') - return load_all(stream, RoundTripLoader, version, preserve_quotes=preserve_quotes) + error_deprecation('round_trip_load_all', 'load_all') def emit( @@ -1164,30 +1155,7 @@ def emit( Emit YAML parsing events into a stream. If stream is None, return the produced string instead. """ - warn_deprecation('emit', 'emit', arg="typ='safe', pure=True") - getvalue = None - if stream is None: - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper( - stream, - canonical=canonical, - indent=indent, - width=width, - allow_unicode=allow_unicode, - line_break=line_break, - ) - try: - for event in events: - dumper.emit(event) - finally: - try: - dumper._emitter.dispose() - except AttributeError: - raise - dumper.dispose() # cyaml - if getvalue is not None: - return getvalue() + error_deprecation('emit', 'emit', arg="typ='safe', pure=True") enc = None @@ -1213,40 +1181,7 @@ def serialize_all( Serialize a sequence of representation trees into a YAML stream. If stream is None, return the produced string instead. """ - warn_deprecation('serialize_all', 'serialize_all', arg="typ='safe', pure=True") - getvalue = None - if stream is None: - if encoding is None: - stream = StringIO() - else: - stream = BytesIO() - getvalue = stream.getvalue - dumper = Dumper( - stream, - canonical=canonical, - indent=indent, - width=width, - allow_unicode=allow_unicode, - line_break=line_break, - encoding=encoding, - version=version, - tags=tags, - explicit_start=explicit_start, - explicit_end=explicit_end, - ) - try: - dumper._serializer.open() - for node in nodes: - dumper.serialize(node) - dumper._serializer.close() - finally: - try: - dumper._emitter.dispose() - except AttributeError: - raise - dumper.dispose() # cyaml - if getvalue is not None: - return getvalue() + error_deprecation('serialize_all', 'serialize_all', arg="typ='safe', pure=True") def serialize( @@ -1256,8 +1191,7 @@ def serialize( Serialize a representation tree into a YAML stream. If stream is None, return the produced string instead. """ - warn_deprecation('serialize', 'serialize', arg="typ='safe', pure=True") - return serialize_all([node], stream, Dumper=Dumper, **kwds) + error_deprecation('serialize', 'serialize', arg="typ='safe', pure=True") def dump_all( @@ -1285,52 +1219,7 @@ def dump_all( Serialize a sequence of Python objects into a YAML stream. If stream is None, return the produced string instead. """ - warn_deprecation('dump_all', 'dump_all', arg="typ='unsafe', pure=True") - getvalue = None - if top_level_colon_align is True: - top_level_colon_align = max([len(str(x)) for x in documents[0]]) - if stream is None: - if encoding is None: - stream = StringIO() - else: - stream = BytesIO() - getvalue = stream.getvalue - dumper = Dumper( - stream, - default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, - indent=indent, - width=width, - allow_unicode=allow_unicode, - line_break=line_break, - encoding=encoding, - explicit_start=explicit_start, - explicit_end=explicit_end, - version=version, - tags=tags, - block_seq_indent=block_seq_indent, - top_level_colon_align=top_level_colon_align, - prefix_colon=prefix_colon, - ) - try: - dumper._serializer.open() - for data in documents: - try: - dumper._representer.represent(data) - except AttributeError: - # nprint(dir(dumper._representer)) - raise - dumper._serializer.close() - finally: - try: - dumper._emitter.dispose() - except AttributeError: - raise - dumper.dispose() # cyaml - if getvalue is not None: - return getvalue() - return None + error_deprecation('dump_all', 'dump_all', arg="typ='unsafe', pure=True") def dump( @@ -1359,25 +1248,7 @@ def dump( default_style ∈ None, '', '"', "'", '|', '>' """ - warn_deprecation('dump', 'dump', arg="typ='unsafe', pure=True") - return dump_all( - [data], - stream, - Dumper=Dumper, - default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, - indent=indent, - width=width, - allow_unicode=allow_unicode, - line_break=line_break, - encoding=encoding, - explicit_start=explicit_start, - explicit_end=explicit_end, - version=version, - tags=tags, - block_seq_indent=block_seq_indent, - ) + error_deprecation('dump', 'dump', arg="typ='unsafe', pure=True") def safe_dump(data: Any, stream: Optional[StreamType] = None, **kwds: Any) -> Any: @@ -1386,8 +1257,7 @@ def safe_dump(data: Any, stream: Optional[StreamType] = None, **kwds: Any) -> An Produce only basic YAML tags. If stream is None, return the produced string instead. """ - warn_deprecation('safe_dump', 'dump', arg="typ='safe', pure=True") - return dump_all([data], stream, Dumper=SafeDumper, **kwds) + error_deprecation('safe_dump', 'dump', arg="typ='safe', pure=True") def round_trip_dump( @@ -1411,27 +1281,7 @@ def round_trip_dump( prefix_colon: Any = None, ) -> Any: allow_unicode = True if allow_unicode is None else allow_unicode - warn_deprecation('round_trip_dump', 'dump') - return dump_all( - [data], - stream, - Dumper=Dumper, - default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, - indent=indent, - width=width, - allow_unicode=allow_unicode, - line_break=line_break, - encoding=encoding, - explicit_start=explicit_start, - explicit_end=explicit_end, - version=version, - tags=tags, - block_seq_indent=block_seq_indent, - top_level_colon_align=top_level_colon_align, - prefix_colon=prefix_colon, - ) + error_deprecation('round_trip_dump', 'dump') # Loader/Dumper are no longer composites, to get to the associated diff --git a/pipenv/vendor/ruamel/yaml/nodes.py b/pipenv/vendor/ruamel/yaml/nodes.py index b7eae2071e..07b1099098 100644 --- a/pipenv/vendor/ruamel/yaml/nodes.py +++ b/pipenv/vendor/ruamel/yaml/nodes.py @@ -1,8 +1,10 @@ -# coding: utf-8 + +from __future__ import annotations import sys -from typing import Dict, Any, Text, Optional # NOQA +if False: # MYPY + from typing import Dict, Any, Text, Optional # NOQA from pipenv.vendor.ruamel.yaml.tag import Tag @@ -78,6 +80,7 @@ class ScalarNode(Node): """ styles: ? -> set() ? key, no value + - -> suppressable null value in set " -> double quoted ' -> single quoted | -> literal style diff --git a/pipenv/vendor/ruamel/yaml/parser.py b/pipenv/vendor/ruamel/yaml/parser.py index 17fe801abf..90166d5f38 100644 --- a/pipenv/vendor/ruamel/yaml/parser.py +++ b/pipenv/vendor/ruamel/yaml/parser.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations # The following YAML grammar is LL(1) and is parsed by a recursive descent # parser. @@ -83,7 +84,8 @@ from pipenv.vendor.ruamel.yaml.compat import nprint, nprintf # NOQA from pipenv.vendor.ruamel.yaml.tag import Tag -from typing import Any, Dict, Optional, List, Optional # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Optional # NOQA __all__ = ['Parser', 'RoundTripParser', 'ParserError'] @@ -314,11 +316,14 @@ def process_directives(self) -> Any: else: value = yaml_version, None if self.loader is not None and hasattr(self.loader, 'tags'): + # ToDo: this is used to keep a single loaded file from losing its version + # info, but it affects following versions that have no explicit directive self.loader.version = yaml_version if self.loader.tags is None: self.loader.tags = {} for k in self.tag_handles: self.loader.tags[k] = self.tag_handles[k] + self.loader.doc_infos[-1].tags.append((k, self.tag_handles[k])) for key in self.DEFAULT_TAGS: if key not in self.tag_handles: self.tag_handles[key] = self.DEFAULT_TAGS[key] @@ -383,6 +388,10 @@ def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> ) elif self.scanner.check_token(TagToken): token = self.scanner.get_token() + try: + self.move_token_comment(token) + except NotImplementedError: + pass start_mark = tag_mark = token.start_mark end_mark = token.end_mark # tag = token.value @@ -412,6 +421,9 @@ def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> if pt.comment and pt.comment[0]: comment = [pt.comment[0], []] pt.comment[0] = None + elif pt.comment and pt.comment[0] is None and pt.comment[1]: + comment = [None, pt.comment[1]] + pt.comment[1] = None elif self.loader: if pt.comment: comment = pt.comment @@ -432,7 +444,6 @@ def parse_node(self, block: bool = False, indentless_sequence: bool = False) -> dimplicit = (False, True) else: dimplicit = (False, False) - # nprint('se', token.value, token.comment) event = ScalarEvent( anchor, tag, @@ -821,8 +832,8 @@ def distribute_comment(self, comment: Any, line: Any) -> Any: return None if not comment[0]: return None - if comment[0][0] != line + 1: - nprintf('>>>dcxxx', comment, line) + # if comment[0][0] != line + 1: + # nprintf('>>>dcxxx', comment, line) assert comment[0][0] == line + 1 # if comment[0] - line > 1: # return diff --git a/pipenv/vendor/ruamel/yaml/reader.py b/pipenv/vendor/ruamel/yaml/reader.py index 4c892862d2..4ac9d24912 100644 --- a/pipenv/vendor/ruamel/yaml/reader.py +++ b/pipenv/vendor/ruamel/yaml/reader.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations # This module contains abstractions for the input stream. You don't have to # looks further, there are no pretty code. @@ -24,7 +25,8 @@ from pipenv.vendor.ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError from pipenv.vendor.ruamel.yaml.util import RegExp -from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA # from ruamel.compat import StreamTextType # NOQA __all__ = ['Reader', 'ReaderError'] diff --git a/pipenv/vendor/ruamel/yaml/representer.py b/pipenv/vendor/ruamel/yaml/representer.py index 731c7c3363..d63e5018bb 100644 --- a/pipenv/vendor/ruamel/yaml/representer.py +++ b/pipenv/vendor/ruamel/yaml/representer.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.error import * # NOQA from pipenv.vendor.ruamel.yaml.nodes import * # NOQA @@ -35,7 +36,8 @@ import copyreg import base64 -from typing import Dict, List, Any, Union, Text, Optional # NOQA +if False: # MYPY + from typing import Dict, List, Any, Union, Text, Optional # NOQA # fmt: off __all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', @@ -980,7 +982,8 @@ def represent_set(self, setting: Any) -> MappingNode: if item_comment: assert getattr(node_key, 'comment', None) is None node_key.comment = item_comment[:2] - node_key.style = node_value.style = '?' + node_key.style = '?' + node_value.style = '-' if flow_style else '?' if not (isinstance(node_key, ScalarNode) and not node_key.style): best_style = False if not (isinstance(node_value, ScalarNode) and not node_value.style): @@ -1023,13 +1026,16 @@ def represent_list(self, data: Any) -> SequenceNode: def represent_datetime(self, data: Any) -> ScalarNode: inter = 'T' if data._yaml['t'] else ' ' _yaml = data._yaml - if _yaml['delta']: + if False and _yaml['delta']: data += _yaml['delta'] value = data.isoformat(inter) else: - value = data.isoformat(inter) - if _yaml['tz']: + value = data.isoformat(inter).strip() + if False and _yaml['tz']: value += _yaml['tz'] + if data.tzinfo and str(data.tzinfo): + if value[-6] in '+-': + value = value[:-6] + str(data.tzinfo) return self.represent_scalar('tag:yaml.org,2002:timestamp', value) def represent_tagged_scalar(self, data: Any) -> ScalarNode: diff --git a/pipenv/vendor/ruamel/yaml/resolver.py b/pipenv/vendor/ruamel/yaml/resolver.py index 3b6cffc993..0b99798336 100644 --- a/pipenv/vendor/ruamel/yaml/resolver.py +++ b/pipenv/vendor/ruamel/yaml/resolver.py @@ -1,9 +1,11 @@ -# coding: utf-8 + +from __future__ import annotations import re -from typing import Any, Dict, List, Union, Text, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import VersionType # NOQA +if False: # MYPY + from typing import Any, Dict, List, Union, Text, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import VersionType # NOQA from pipenv.vendor.ruamel.yaml.tag import Tag from pipenv.vendor.ruamel.yaml.compat import _DEFAULT_YAML_VERSION # NOQA @@ -330,6 +332,7 @@ def get_loader_version(self, version: Optional[VersionType]) -> Any: if isinstance(version, list): return tuple(version) # assume string + assert isinstance(version, str) return tuple(map(int, version.split('.'))) @property diff --git a/pipenv/vendor/ruamel/yaml/scalarbool.py b/pipenv/vendor/ruamel/yaml/scalarbool.py index 23b9c8e6fe..b16c362044 100644 --- a/pipenv/vendor/ruamel/yaml/scalarbool.py +++ b/pipenv/vendor/ruamel/yaml/scalarbool.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations """ You cannot subclass bool, and this is necessary for round-tripping anchored @@ -11,7 +12,8 @@ from pipenv.vendor.ruamel.yaml.anchor import Anchor -from typing import Text, Any, Dict, List # NOQA +if False: # MYPY + from typing import Text, Any, Dict, List # NOQA __all__ = ['ScalarBoolean'] diff --git a/pipenv/vendor/ruamel/yaml/scalarfloat.py b/pipenv/vendor/ruamel/yaml/scalarfloat.py index 06af137dbe..b87826947f 100644 --- a/pipenv/vendor/ruamel/yaml/scalarfloat.py +++ b/pipenv/vendor/ruamel/yaml/scalarfloat.py @@ -1,9 +1,11 @@ -# coding: utf-8 + +from __future__ import annotations import sys from pipenv.vendor.ruamel.yaml.anchor import Anchor -from typing import Text, Any, Dict, List # NOQA +if False: # MYPY + from typing import Text, Any, Dict, List # NOQA __all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat'] diff --git a/pipenv/vendor/ruamel/yaml/scalarint.py b/pipenv/vendor/ruamel/yaml/scalarint.py index 4a969ad41e..bd5bcfffbf 100644 --- a/pipenv/vendor/ruamel/yaml/scalarint.py +++ b/pipenv/vendor/ruamel/yaml/scalarint.py @@ -1,8 +1,10 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.anchor import Anchor -from typing import Text, Any, Dict, List # NOQA +if False: # MYPY + from typing import Text, Any, Dict, List # NOQA __all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt'] diff --git a/pipenv/vendor/ruamel/yaml/scalarstring.py b/pipenv/vendor/ruamel/yaml/scalarstring.py index 9908bdbcbd..e05ebe13e2 100644 --- a/pipenv/vendor/ruamel/yaml/scalarstring.py +++ b/pipenv/vendor/ruamel/yaml/scalarstring.py @@ -1,9 +1,11 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.anchor import Anchor -from typing import Text, Any, Dict, List # NOQA -from pipenv.vendor.ruamel.yaml.compat import SupportsIndex +if False: # MYPY + from typing import Text, Any, Dict, List # NOQA + from pipenv.vendor.ruamel.yaml.compat import SupportsIndex __all__ = [ 'ScalarString', diff --git a/pipenv/vendor/ruamel/yaml/scanner.py b/pipenv/vendor/ruamel/yaml/scanner.py index 21d3df14f7..1072674758 100644 --- a/pipenv/vendor/ruamel/yaml/scanner.py +++ b/pipenv/vendor/ruamel/yaml/scanner.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations # Scanner produces tokens of the following types: # STREAM-START @@ -28,13 +29,13 @@ # Read comments in the Scanner code for more details. # -import inspect from pipenv.vendor.ruamel.yaml.error import MarkedYAMLError, CommentMark # NOQA from pipenv.vendor.ruamel.yaml.tokens import * # NOQA -from pipenv.vendor.ruamel.yaml.compat import check_anchorname_char, nprint, nprintf # NOQA +from pipenv.vendor.ruamel.yaml.docinfo import Version, Tag # NOQA +from pipenv.vendor.ruamel.yaml.compat import check_anchorname_char, _debug, nprint, nprintf # NOQA -from typing import Any, Dict, Optional, List, Union, Text # NOQA -from pipenv.vendor.ruamel.yaml.compat import VersionType # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, Text, Tuple # NOQA __all__ = ['Scanner', 'RoundTripScanner', 'ScannerError'] @@ -44,9 +45,9 @@ _SPACE_TAB = ' \t' -def xprintf(*args: Any, **kw: Any) -> Any: - return nprintf(*args, **kw) - pass +if _debug != 0: + def xprintf(*args: Any, **kw: Any) -> Any: + return nprintf(*args, **kw) class ScannerError(MarkedYAMLError): @@ -84,7 +85,6 @@ def __init__(self, loader: Any = None) -> None: self.loader._scanner = self self.reset_scanner() self.first_time = False - self.yaml_version: Any = None @property def flow_level(self) -> int: @@ -142,6 +142,8 @@ def reset_scanner(self) -> None: # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), # '[', or '{' tokens. self.possible_simple_keys: Dict[Any, Any] = {} + self.yaml_version: Any = None + self.tag_directives: List[Tuple[Any, Any]] = [] @property def reader(self) -> Any: @@ -911,6 +913,7 @@ def scan_yaml_directive_value(self, start_mark: Any) -> Any: self.reader.get_mark(), ) self.yaml_version = (major, minor) + self.loader.doc_infos[-1].doc_version = Version(major, minor) return self.yaml_version def scan_yaml_directive_number(self, start_mark: Any) -> Any: @@ -942,7 +945,9 @@ def scan_tag_directive_value(self, start_mark: Any) -> Any: while srp() == ' ': srf() prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) + ret_val = (handle, prefix) + self.tag_directives.append(ret_val) + return ret_val def scan_tag_directive_handle(self, start_mark: Any) -> Any: # See the specification for details. @@ -1979,17 +1984,23 @@ def __init__(self, value: Any, line: Any, column: Any) -> None: self.line = line self.column = column self.used = ' ' - info = inspect.getframeinfo(inspect.stack()[3][0]) - self.function = info.function - self.fline = info.lineno - self.ufun = None - self.uline = None + if _debug != 0: + import inspect + + info = inspect.getframeinfo(inspect.stack()[3][0]) + self.function = info.function + self.fline = info.lineno + self.ufun = None + self.uline = None def set_used(self, v: Any = '+') -> None: self.used = v - info = inspect.getframeinfo(inspect.stack()[1][0]) - self.ufun = info.function # type: ignore - self.uline = info.lineno # type: ignore + if _debug != 0: + import inspect + + info = inspect.getframeinfo(inspect.stack()[1][0]) + self.ufun = info.function # type: ignore + self.uline = info.lineno # type: ignore def set_assigned(self) -> None: self.used = '|' @@ -2087,22 +2098,29 @@ def any_unprocessed(self) -> bool: def unprocessed(self, use: Any = False) -> Any: while len(self.unused) > 0: - first = self.unused.pop(0) if use else self.unused[0] - info = inspect.getframeinfo(inspect.stack()[1][0]) - xprintf('using', first, self.comments[first].value, info.function, info.lineno) + if _debug != 0: + import inspect + + first = self.unused.pop(0) if use else self.unused[0] + info = inspect.getframeinfo(inspect.stack()[1][0]) + xprintf('using', first, self.comments[first].value, info.function, info.lineno) yield first, self.comments[first] if use: self.comments[first].set_used() def assign_pre(self, token: Any) -> Any: token_line = token.start_mark.line - info = inspect.getframeinfo(inspect.stack()[1][0]) - xprintf('assign_pre', token_line, self.unused, info.function, info.lineno) + if _debug != 0: + import inspect + + info = inspect.getframeinfo(inspect.stack()[1][0]) + xprintf('assign_pre', token_line, self.unused, info.function, info.lineno) gobbled = False while self.unused and self.unused[0] < token_line: gobbled = True first = self.unused.pop(0) - xprintf('assign_pre < ', first) + if _debug != 0: + xprintf('assign_pre < ', first) self.comments[first].set_used() token.add_comment_pre(first) return gobbled @@ -2119,7 +2137,8 @@ def assign_eol(self, tokens: Any) -> Any: tokens[-idx], ValueToken, ): idx += 1 - xprintf('idx1', idx) + if _debug != 0: + xprintf('idx1', idx) if ( len(tokens) > idx and isinstance(tokens[-idx], ScalarToken) @@ -2133,13 +2152,15 @@ def assign_eol(self, tokens: Any) -> Any: try: eol_idx = self.unused.pop(0) self.comments[eol_idx].set_used() - xprintf('>>>>>a', idx, eol_idx, KEYCMNT) + if _debug != 0: + xprintf('>>>>>a', idx, eol_idx, KEYCMNT) tokens[-idx].add_comment_eol(eol_idx, KEYCMNT) except IndexError: raise NotImplementedError return except IndexError: - xprintf('IndexError1') + if _debug != 0: + xprintf('IndexError1') pass try: if isinstance(tokens[-idx], ScalarToken) and isinstance( @@ -2153,24 +2174,30 @@ def assign_eol(self, tokens: Any) -> Any: raise NotImplementedError return except IndexError: - xprintf('IndexError2') + if _debug != 0: + xprintf('IndexError2') pass for t in tokens: xprintf('tt-', t) - xprintf('not implemented EOL', type(tokens[-idx])) + if _debug != 0: + xprintf('not implemented EOL', type(tokens[-idx])) import sys sys.exit(0) def assign_post(self, token: Any) -> Any: token_line = token.start_mark.line - info = inspect.getframeinfo(inspect.stack()[1][0]) - xprintf('assign_post', token_line, self.unused, info.function, info.lineno) + if _debug != 0: + import inspect + + info = inspect.getframeinfo(inspect.stack()[1][0]) + xprintf('assign_post', token_line, self.unused, info.function, info.lineno) gobbled = False while self.unused and self.unused[0] < token_line: gobbled = True first = self.unused.pop(0) - xprintf('assign_post < ', first) + if _debug != 0: + xprintf('assign_post < ', first) self.comments[first].set_used() token.add_comment_post(first) return gobbled @@ -2218,11 +2245,12 @@ def need_more_tokens(self) -> bool: if self.tokens[0].start_mark.line == self.tokens[-1].start_mark.line: return True if True: - xprintf('-x--', len(self.tokens)) - for t in self.tokens: - xprintf(t) - # xprintf(self.comments.last()) - xprintf(self.comments.str_unprocessed()) # type: ignore + if _debug != 0: + xprintf('-x--', len(self.tokens)) + for t in self.tokens: + xprintf(t) + # xprintf(self.comments.last()) + xprintf(self.comments.str_unprocessed()) # type: ignore self.comments.assign_pre(self.tokens[0]) # type: ignore self.comments.assign_eol(self.tokens) # type: ignore return False diff --git a/pipenv/vendor/ruamel/yaml/serializer.py b/pipenv/vendor/ruamel/yaml/serializer.py index f1ba9ad3b7..24f2ffeff8 100644 --- a/pipenv/vendor/ruamel/yaml/serializer.py +++ b/pipenv/vendor/ruamel/yaml/serializer.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.error import YAMLError from pipenv.vendor.ruamel.yaml.compat import nprint, DBG_NODE, dbg, nprintf # NOQA @@ -18,8 +19,9 @@ ) from pipenv.vendor.ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode -from typing import Any, Dict, Union, Text, Optional # NOQA -from pipenv.vendor.ruamel.yaml.compat import VersionType # NOQA +if False: # MYPY + from typing import Any, Dict, Union, Text, Optional # NOQA + from pipenv.vendor.ruamel.yaml.compat import VersionType # NOQA __all__ = ['Serializer', 'SerializerError'] diff --git a/pipenv/vendor/ruamel/yaml/tag.py b/pipenv/vendor/ruamel/yaml/tag.py index 7ad23fec01..9a4cad95b9 100644 --- a/pipenv/vendor/ruamel/yaml/tag.py +++ b/pipenv/vendor/ruamel/yaml/tag.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations """ In round-trip mode the original tag needs to be preserved, but the tag @@ -10,7 +11,8 @@ only. """ -from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA tag_attrib = '_yaml_tag' diff --git a/pipenv/vendor/ruamel/yaml/timestamp.py b/pipenv/vendor/ruamel/yaml/timestamp.py index 753dfc1ab4..a9aad05d60 100644 --- a/pipenv/vendor/ruamel/yaml/timestamp.py +++ b/pipenv/vendor/ruamel/yaml/timestamp.py @@ -1,19 +1,21 @@ -# coding: utf-8 -import datetime +from __future__ import annotations + import copy +import datetime # ToDo: at least on PY3 you could probably attach the tzinfo correctly to the object # a more complete datetime might be used by safe loading as well # # add type information (iso8601, spaced) -from typing import Any, Dict, Optional, List # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List # NOQA class TimeStamp(datetime.datetime): def __init__(self, *args: Any, **kw: Any) -> None: - self._yaml: Dict[Any, Any] = dict(t=False, tz=None, delta=0) + self._yaml: Dict[str, Any] = dict(t=False, tz=None, delta=0) def __new__(cls, *args: Any, **kw: Any) -> Any: # datetime is immutable return datetime.datetime.__new__(cls, *args, **kw) diff --git a/pipenv/vendor/ruamel/yaml/tokens.py b/pipenv/vendor/ruamel/yaml/tokens.py index 8cbcf42341..be04352c6d 100644 --- a/pipenv/vendor/ruamel/yaml/tokens.py +++ b/pipenv/vendor/ruamel/yaml/tokens.py @@ -1,8 +1,10 @@ -# coding: utf-8 + +from __future__ import annotations from pipenv.vendor.ruamel.yaml.compat import nprintf # NOQA -from typing import Text, Any, Dict, Optional, List # NOQA +if False: # MYPY + from typing import Text, Any, Dict, Optional, List # NOQA from .error import StreamMark # NOQA SHOW_LINES = True diff --git a/pipenv/vendor/ruamel/yaml/util.py b/pipenv/vendor/ruamel/yaml/util.py index b621ce0758..17cb2d6f5a 100644 --- a/pipenv/vendor/ruamel/yaml/util.py +++ b/pipenv/vendor/ruamel/yaml/util.py @@ -1,4 +1,5 @@ -# coding: utf-8 + +from __future__ import annotations """ some helper functions that might be generally useful @@ -9,8 +10,9 @@ import re -from typing import Any, Dict, Optional, List, Text, Callable, Union # NOQA -from .compat import StreamTextType # NOQA +if False: # MYPY + from typing import Any, Dict, Optional, List, Text, Callable, Union # NOQA + from .compat import StreamTextType # NOQA class LazyEval: @@ -73,12 +75,12 @@ def create_timestamp( tz_hour: Any, tz_minute: Any, ) -> Union[datetime.datetime, datetime.date]: - # create a timestamp from match against timestamp_regexp + # create a timestamp from matching against timestamp_regexp MAX_FRAC = 999999 year = int(year) month = int(month) day = int(day) - if not hour: + if hour is None: return datetime.date(year, month, day) hour = int(hour) minute = int(minute) @@ -97,16 +99,20 @@ def create_timestamp( fraction = frac else: fraction = 0 + tzinfo = None delta = None if tz_sign: tz_hour = int(tz_hour) tz_minute = int(tz_minute) if tz_minute else 0 - delta = datetime.timedelta( - hours=tz_hour, minutes=tz_minute, seconds=1 if frac > MAX_FRAC else 0, + td = datetime.timedelta( + hours=tz_hour, minutes=tz_minute, ) if tz_sign == '-': - delta = -delta - elif frac > MAX_FRAC: + td = -td + tzinfo = datetime.timezone(td, name=tz) + elif tz == 'Z': + tzinfo = datetime.timezone(datetime.timedelta(hours=0), name=tz) + if frac > MAX_FRAC: delta = -datetime.timedelta(seconds=1) # should do something else instead (or hook this up to the preceding if statement # in reverse @@ -116,7 +122,7 @@ def create_timestamp( # datetime.timezone.utc) # the above is not good enough though, should provide tzinfo. In Python3 that is easily # doable drop that kind of support for Python2 as it has not native tzinfo - data = datetime.datetime(year, month, day, hour, minute, second, fraction) + data = datetime.datetime(year, month, day, hour, minute, second, fraction, tzinfo) if delta: data -= delta return data diff --git a/pipenv/vendor/shellingham/__init__.py b/pipenv/vendor/shellingham/__init__.py index 40867c2aac..15f7a90cbd 100644 --- a/pipenv/vendor/shellingham/__init__.py +++ b/pipenv/vendor/shellingham/__init__.py @@ -3,8 +3,7 @@ from ._core import ShellDetectionFailure - -__version__ = "1.5.3" +__version__ = "1.5.4" def detect_shell(pid=None, max_depth=10): diff --git a/pipenv/vendor/shellingham/posix/ps.py b/pipenv/vendor/shellingham/posix/ps.py index d2715cf68b..3bc39a74a5 100644 --- a/pipenv/vendor/shellingham/posix/ps.py +++ b/pipenv/vendor/shellingham/posix/ps.py @@ -31,7 +31,7 @@ def iter_process_parents(pid, max_depth=10): processes_mapping = {} for line in output.split("\n"): try: - pid, ppid, args = line.strip().split(None, 2) + _pid, ppid, args = line.strip().split(None, 2) # XXX: This is not right, but we are really out of options. # ps does not offer a sane way to decode the argument display, # and this is "Good Enough" for obtaining shell names. Hopefully @@ -40,7 +40,7 @@ def iter_process_parents(pid, max_depth=10): args = tuple(a.strip() for a in args.split(" ")) except ValueError: continue - processes_mapping[pid] = Process(args=args, pid=pid, ppid=ppid) + processes_mapping[_pid] = Process(args=args, pid=_pid, ppid=ppid) for _ in range(max_depth): try: diff --git a/pipenv/vendor/tomlkit/__init__.py b/pipenv/vendor/tomlkit/__init__.py index 51464f6c1c..1379fbcfc3 100644 --- a/pipenv/vendor/tomlkit/__init__.py +++ b/pipenv/vendor/tomlkit/__init__.py @@ -27,7 +27,7 @@ from pipenv.vendor.tomlkit.api import ws -__version__ = "0.12.3" +__version__ = "0.12.4" __all__ = [ "aot", "array", diff --git a/pipenv/vendor/tomlkit/_types.py b/pipenv/vendor/tomlkit/_types.py index cc1847b5e6..8eeb75ea9d 100644 --- a/pipenv/vendor/tomlkit/_types.py +++ b/pipenv/vendor/tomlkit/_types.py @@ -43,9 +43,27 @@ def _new(self: WT, value: Any) -> WT: class _CustomList(MutableSequence, list): """Adds MutableSequence mixin while pretending to be a builtin list""" + def __add__(self, other): + new_list = self.copy() + new_list.extend(other) + return new_list + + def __iadd__(self, other): + self.extend(other) + return self + class _CustomDict(MutableMapping, dict): """Adds MutableMapping mixin while pretending to be a builtin dict""" + def __or__(self, other): + new_dict = self.copy() + new_dict.update(other) + return new_dict + + def __ior__(self, other): + self.update(other) + return self + class _CustomInt(Integral, int): """Adds Integral mixin while pretending to be a builtin int""" @@ -54,7 +72,7 @@ class _CustomFloat(Real, float): def wrap_method( - original_method: Callable[Concatenate[WT, P], Any] + original_method: Callable[Concatenate[WT, P], Any], ) -> Callable[Concatenate[WT, P], Any]: def wrapper(self: WT, *args: P.args, **kwargs: P.kwargs) -> Any: result = original_method(self, *args, **kwargs) diff --git a/pipenv/vendor/tomlkit/container.py b/pipenv/vendor/tomlkit/container.py index 6b2e4b965d..8b52dfad6f 100644 --- a/pipenv/vendor/tomlkit/container.py +++ b/pipenv/vendor/tomlkit/container.py @@ -227,9 +227,9 @@ def append( if item.is_super_table(): # We need to merge both super tables if ( - self._table_keys[-1] != current_body_element[0] - or key.is_dotted() + key.is_dotted() or current_body_element[0].is_dotted() + or self._table_keys[-1] != current_body_element[0] ): if key.is_dotted() and not self._parsed: idx = self._get_last_index_before_table() diff --git a/pipenv/vendor/tomlkit/items.py b/pipenv/vendor/tomlkit/items.py index 0f277ea43b..f71f2c2a4e 100644 --- a/pipenv/vendor/tomlkit/items.py +++ b/pipenv/vendor/tomlkit/items.py @@ -1412,6 +1412,7 @@ def __delitem__(self, key: int | slice): if ( idx == 0 and len(self._value) > 0 + and self._value[idx].indent and "\n" not in self._value[idx].indent.s ): # Remove the indentation of the first item if not newline diff --git a/pipenv/vendor/vendor.txt b/pipenv/vendor/vendor.txt index 08d8b5df35..a672c385b3 100644 --- a/pipenv/vendor/vendor.txt +++ b/pipenv/vendor/vendor.txt @@ -1,14 +1,14 @@ -click-didyoumean==0.3.0 +click-didyoumean==0.3.1 click==8.1.7 colorama==0.4.6 dparse==0.6.3 -pexpect==4.8.0 -pipdeptree==2.13.1 +pexpect==4.9.0 +pipdeptree==2.16.2 plette==0.4.4 ptyprocess==0.7.0 -python-dotenv==1.0.0 +python-dotenv==1.0.1 pythonfinder==2.1.0 -ruamel.yaml==0.17.39 -shellingham==1.5.3 +ruamel.yaml==0.18.6 +shellingham==1.5.4 tomli==2.0.1 -tomlkit==0.12.3 +tomlkit==0.12.4 diff --git a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch index b8d195743c..fe21bdac75 100644 --- a/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch +++ b/tasks/vendoring/patches/vendor/pipdeptree-update-pip-import.patch @@ -1,15 +1,15 @@ diff --git a/pipenv/vendor/pipdeptree/__main__.py b/pipenv/vendor/pipdeptree/__main__.py -index cb48791..77ebab7 100644 +index dcd738e..a751954 100644 --- a/pipenv/vendor/pipdeptree/__main__.py +++ b/pipenv/vendor/pipdeptree/__main__.py -@@ -1,9 +1,16 @@ - """The main entry point used for CLI.""" +@@ -2,9 +2,16 @@ + from __future__ import annotations - + +import os import sys from typing import Sequence - + +pardir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# for finding pipdeptree itself +sys.path.append(pardir)