Skip to content

Commit

Permalink
Add JSON output (#361)
Browse files Browse the repository at this point in the history
  • Loading branch information
JelleZijlstra authored Dec 22, 2021
1 parent 58dab61 commit 18f35e2
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 19 deletions.
4 changes: 3 additions & 1 deletion docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@

## Unreleased

- Add JSON output for integrating pyanalyze's output with other
tools (#360)
- Add check that suggests parameter and return types for untyped
functions, using the new `suggested_parameter_type` and
`suggested_return_type` codes (#358)
`suggested_return_type` codes (#358, #359)
- Extract constraints from multi-comparisons (`a < b < c`) (#354)
- Support positional-only arguments with the `__` prefix
outside of stubs (#353)
Expand Down
14 changes: 12 additions & 2 deletions pyanalyze/name_check_visitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1026,11 +1026,17 @@ def _show_error_if_checking(
*,
replacement: Optional[node_visitor.Replacement] = None,
detail: Optional[str] = None,
extra_metadata: Optional[Dict[str, Any]] = None,
) -> None:
"""We usually should show errors only in the check_names state to avoid duplicate errors."""
if self._is_checking():
self.show_error(
node, msg, error_code=error_code, replacement=replacement, detail=detail
node,
msg,
error_code=error_code,
replacement=replacement,
detail=detail,
extra_metadata=extra_metadata,
)

def _set_name_in_scope(
Expand Down Expand Up @@ -1401,10 +1407,12 @@ def visit_FunctionDef(
):
prepared = prepare_type(return_value)
if should_suggest_type(prepared):
detail, metadata = display_suggested_type(prepared)
self._show_error_if_checking(
node,
error_code=ErrorCode.suggested_return_type,
detail=display_suggested_type(prepared),
detail=detail,
extra_metadata=metadata,
)

if evaled_function:
Expand Down Expand Up @@ -4534,7 +4542,9 @@ def _run_on_files(
all_failures.append(
{
"filename": node_visitor.UNUSED_OBJECT_FILENAME,
"absolute_filename": node_visitor.UNUSED_OBJECT_FILENAME,
"message": failure + "\n",
"description": failure,
}
)
if attribute_checker is not None:
Expand Down
58 changes: 48 additions & 10 deletions pyanalyze/node_visitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from enum import Enum
import qcore
import cProfile
import json
import logging
import os
import os.path
Expand All @@ -25,7 +26,7 @@
import builtins
from builtins import print as real_print
from types import ModuleType
from typing_extensions import TypedDict
from typing_extensions import NotRequired, TypedDict
from typing import (
Any,
Dict,
Expand All @@ -38,7 +39,6 @@
Tuple,
Type,
Union,
cast,
)

from . import analysis_lib
Expand Down Expand Up @@ -128,13 +128,16 @@ class FileNotFoundError(Exception):
pass


class Failure(TypedDict, total=False):
class Failure(TypedDict):
description: str
filename: str
code: Enum
lineno: int
context: str
message: str
absolute_filename: str
code: NotRequired[Enum]
lineno: NotRequired[int]
col_offset: NotRequired[int]
context: NotRequired[str]
message: NotRequired[str]
extra_metadata: NotRequired[Dict[str, Any]]


class BaseNodeVisitor(ast.NodeVisitor):
Expand Down Expand Up @@ -360,6 +363,7 @@ def main(cls) -> int:
else:
kwargs = dict(args.__dict__)
markdown_output = kwargs.pop("markdown_output", None)
json_output = kwargs.pop("json_output", None)

verbose = kwargs.pop("verbose", 0)
if verbose == 0 or verbose is None:
Expand Down Expand Up @@ -395,8 +399,16 @@ def main(cls) -> int:
failures = cls._run(**kwargs)
if markdown_output is not None and failures:
cls._write_markdown_report(markdown_output, failures)
if json_output is not None and failures:
cls._write_json_report(json_output, failures)
return 1 if failures else 0

@classmethod
def _write_json_report(cls, output_file: str, failures: List[Failure]) -> None:
failures = [_make_serializable(failure) for failure in failures]
with open(output_file, "w") as f:
json.dump(failures, f)

@classmethod
def _write_markdown_report(cls, output_file: str, failures: List[Failure]) -> None:
by_file = collections.defaultdict(list)
Expand All @@ -409,7 +421,6 @@ def _write_markdown_report(cls, output_file: str, failures: List[Failure]) -> No

with open(output_file, "w") as f:
f.write("%d total failures in %d files\n\n" % (len(failures), len(by_file)))

for filename, file_failures in sorted(by_file.items()):
if filename != UNUSED_OBJECT_FILENAME:
filename = filename[len(prefix) :]
Expand Down Expand Up @@ -525,6 +536,7 @@ def show_error(
ignore_comment: str = IGNORE_COMMENT,
detail: Optional[str] = None,
save: bool = True,
extra_metadata: Optional[Dict[str, Any]] = None,
) -> Optional[Failure]:
"""Shows an error associated with this node.
Expand All @@ -540,6 +552,9 @@ def show_error(
file-level ignore comments.)
- ignore_comment: Comment that can be used to ignore this error. (By default, this
is "# static analysis: ignore".)
- detail: extra detail to append to the error on a separate line
- save: if False, do not add the failure to the all_failures list
- extra_metadata: if given, is added to JSON failures output
"""
if self.caught_errors is not None:
Expand All @@ -552,6 +567,8 @@ def show_error(
"obey_ignore": obey_ignore,
"ignore_comment": ignore_comment,
"detail": detail,
"save": save,
"extra_metadata": extra_metadata,
}
)
return None
Expand Down Expand Up @@ -582,8 +599,13 @@ def show_error(
else:
lineno = col_offset = None

# https://github.com/quora/pyanalyze/issues/112
error = cast(Failure, {"description": str(e), "filename": self.filename})
error: Failure = {
"description": str(e),
"filename": self.filename,
"absolute_filename": os.path.abspath(self.filename),
}
if extra_metadata is not None:
error["extra_metadata"] = extra_metadata
message = f"\n{e}"
if error_code is not None:
error["code"] = error_code
Expand All @@ -595,6 +617,8 @@ def show_error(
message += f"\nIn {self.filename} at line {lineno}\n"
else:
message += f"\n In {self.filename}"
if col_offset is not None:
error["col_offset"] = col_offset
lines = self._lines()

if obey_ignore and lineno is not None:
Expand Down Expand Up @@ -845,6 +869,13 @@ def _get_argument_parser(cls) -> argparse.ArgumentParser:
"Suitable for summarizing and tracking errors."
),
)
parser.add_argument(
"--json-output",
help=(
"Write errors to this file in JSON format. "
"Suitable for integrating with other tools."
),
)
parser.add_argument(
"--add-ignores",
help=(
Expand Down Expand Up @@ -1097,3 +1128,10 @@ def __exit__(self, typ: object, value: object, traceback: object) -> None:
self.filename = tempfile.mktemp()
self.prof.dump_stats(self.filename)
print("profiler output saved as {}".format(self.filename))


def _make_serializable(failure: Failure) -> Dict[str, Any]:
result = dict(failure)
if "code" in failure:
result["code"] = failure["code"].name
return result
3 changes: 2 additions & 1 deletion pyanalyze/reexport.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from collections import defaultdict
from dataclasses import InitVar, dataclass, field
from enum import Enum
from typing import Dict, List, Optional, Set, Tuple
from typing import Any, Dict, List, Optional, Set, Tuple

from .node_visitor import Failure
from .config import Config
Expand All @@ -25,6 +25,7 @@ def show_error(
*,
detail: Optional[str] = None,
save: bool = True,
extra_metadata: Optional[Dict[str, Any]] = None,
) -> Optional[Failure]:
raise NotImplementedError

Expand Down
26 changes: 21 additions & 5 deletions pyanalyze/suggested_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
import ast
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict, Iterator, List, Mapping, Sequence, Tuple, Union
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union

from pyanalyze.safe import safe_isinstance
from pyanalyze.safe import safe_getattr, safe_isinstance

from .error_code import ErrorCode
from .node_visitor import Failure
Expand All @@ -28,6 +28,7 @@
MultiValuedValue,
VariableNameValue,
replace_known_sequence_value,
stringify_object,
unite_values,
)
from .reexport import ErrorContext
Expand Down Expand Up @@ -61,15 +62,17 @@ def check(self) -> Iterator[Failure]:
suggested = unite_values(*all_values)
if not should_suggest_type(suggested):
continue
detail, metadata = display_suggested_type(suggested)
failure = self.ctx.show_error(
param,
f"Suggested type for parameter {param.arg}",
ErrorCode.suggested_parameter_type,
detail=display_suggested_type(suggested),
detail=detail,
# Otherwise we record it twice in tests. We should ultimately
# refactor error tracking to make it less hacky for things that
# show errors outside of files.
save=False,
extra_metadata=metadata,
)
if failure is not None:
yield failure
Expand Down Expand Up @@ -102,13 +105,26 @@ def check(self) -> List[Failure]:
return failures


def display_suggested_type(value: Value) -> str:
def display_suggested_type(value: Value) -> Tuple[str, Optional[Dict[str, Any]]]:
value = prepare_type(value)
if isinstance(value, MultiValuedValue) and value.vals:
cae = CanAssignError("Union", [CanAssignError(str(val)) for val in value.vals])
else:
cae = CanAssignError(str(value))
return str(cae)
# If the type is simple enough, add extra_metadata for autotyping to apply.
if isinstance(value, TypedValue) and type(value) is TypedValue:
# For now, only for exactly TypedValue
suggested_type = stringify_object(value.typ)
imports = []
if isinstance(value.typ, str):
if "." in value.typ:
imports.append(value.typ)
elif safe_getattr(value.typ, "__module__", None) != "builtins":
imports.append(suggested_type.split(".")[0])
metadata = {"suggested_type": suggested_type, "imports": imports}
else:
metadata = None
return str(cae), metadata


def should_suggest_type(value: Value) -> bool:
Expand Down

0 comments on commit 18f35e2

Please sign in to comment.