Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 37 additions & 4 deletions mypy/exportjson.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Tool to convert mypy cache file to a JSON format (print to stdout).
"""Tool to convert binary mypy cache files (.ff) to JSON (.ff.json).
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
"""Tool to convert binary mypy cache files (.ff) to JSON (.ff.json).
"""Tool to convert binary mypy cache files (.ff) to JSON (.json).

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, I see you actually don't strip the original extension. I think it is better to do this, i.e. use .data.json or .meta.json for output files.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did it this way so that there is no confusion between the "real" JSON cache files and the ones produced by the converter -- there's no guarantee that mypy will be able to read the output, since we don't guarantee that it stays in sync with the actual JSON format.


Usage:
python -m mypy.exportjson .mypy_cache/.../my_module.data.ff
Expand All @@ -21,6 +21,7 @@

from librt.internal import Buffer

from mypy.cache import CacheMeta
from mypy.nodes import (
FUNCBASE_FLAGS,
FUNCDEF_FLAGS,
Expand Down Expand Up @@ -552,6 +553,30 @@ def convert_unbound_type(self: UnboundType) -> Json:
}


def convert_binary_cache_meta_to_json(data: bytes, data_file: str) -> Json:
meta = CacheMeta.read(Buffer(data), data_file)
assert meta is not None, f"Error reading meta cache file associated with {data_file}"
return {
"id": meta.id,
"path": meta.path,
"mtime": meta.mtime,
"size": meta.size,
"hash": meta.hash,
"data_mtime": meta.data_mtime,
"dependencies": meta.dependencies,
"suppressed": meta.suppressed,
"options": meta.options,
"dep_prios": meta.dep_prios,
"dep_lines": meta.dep_lines,
"dep_hashes": [dep.hex() for dep in meta.dep_hashes],
"interface_hash": meta.interface_hash.hex(),
"error_lines": meta.error_lines,
"version_id": meta.version_id,
"ignore_all": meta.ignore_all,
"plugin_data": meta.plugin_data,
}


def main() -> None:
parser = argparse.ArgumentParser(
description="Convert binary cache files to JSON. "
Expand All @@ -563,11 +588,19 @@ def main() -> None:
args = parser.parse_args()
fnams: list[str] = args.path
for fnam in fnams:
if not fnam.endswith(".data.ff"):
sys.exit(f"error: Expected .data.ff extension, but got {fnam}")
if fnam.endswith(".data.ff"):
is_data = True
elif fnam.endswith(".meta.ff"):
is_data = False
else:
sys.exit(f"error: Expected .data.ff or .meta.ff extension, but got {fnam}")
with open(fnam, "rb") as f:
data = f.read()
json_data = convert_binary_cache_to_json(data)
if is_data:
json_data = convert_binary_cache_to_json(data)
else:
data_file = fnam.removesuffix(".meta.ff") + ".data.ff"
json_data = convert_binary_cache_meta_to_json(data, data_file)
new_fnam = fnam + ".json"
with open(new_fnam, "w") as f:
json.dump(json_data, f)
Expand Down
24 changes: 20 additions & 4 deletions mypy/test/testexportjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from mypy import build
from mypy.errors import CompileError
from mypy.exportjson import convert_binary_cache_to_json
from mypy.exportjson import convert_binary_cache_meta_to_json, convert_binary_cache_to_json
from mypy.modulefinder import BuildSource
from mypy.options import Options
from mypy.test.config import test_temp_dir
Expand Down Expand Up @@ -53,12 +53,28 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
):
continue
fnam = os.path.join(cache_dir, f"{module}.data.ff")
with open(fnam, "rb") as f:
json_data = convert_binary_cache_to_json(f.read(), implicit_names=False)
is_meta = testcase.name.endswith("_meta")
if not is_meta:
with open(fnam, "rb") as f:
json_data = convert_binary_cache_to_json(f.read(), implicit_names=False)
else:
meta_fnam = os.path.join(cache_dir, f"{module}.meta.ff")
with open(meta_fnam, "rb") as f:
json_data = convert_binary_cache_meta_to_json(f.read(), fnam)
for line in json.dumps(json_data, indent=4).splitlines():
if '"path": ' in line:
# We source file path is unpredictable, so filter it out
# The source file path is unpredictable, so filter it out
line = re.sub(r'"[^"]+\.pyi?"', "...", line)
if is_meta:
if '"version_id"' in line:
line = re.sub(r'"[0-9][^"]+"', "...", line)
if '"mtime"' in line or '"data_mtime"' in line:
line = re.sub(r": [0-9]+", ": ...", line)
if '"platform"' in line:
line = re.sub(': "[^"]+"', ": ...", line)
if '"hash"' not in line:
# Some hashes are unpredictable so filter them out
line = re.sub(r'"[a-f0-9]{40}"', '"<hash>"', line)
assert "ERROR" not in line, line
a.append(line)
except CompileError as e:
Expand Down
44 changes: 44 additions & 0 deletions test-data/unit/exportjson.test
Original file line number Diff line number Diff line change
Expand Up @@ -278,3 +278,47 @@ x: X = 2
[builtins fixtures/tuple.pyi]
[out]
<not checked>

[case testExportMetaBasic_meta]
import typing
from typing_extensions import Final
[builtins fixtures/tuple.pyi]
[out]
{
"id": "main",
"path": ...,
"mtime": ...,
"size": 49,
"hash": "db2252f953c889e6b78dde8e30bd241a0c86b2d9",
"data_mtime": ...,
"dependencies": [
"typing",
"typing_extensions",
"builtins"
],
"suppressed": [],
"options": {
"other_options": "<hash>",
"platform": ...
},
"dep_prios": [
10,
5,
5
],
"dep_lines": [
1,
2,
1
],
"dep_hashes": [
"<hash>",
"<hash>",
"<hash>"
],
"interface_hash": "<hash>",
"error_lines": [],
"version_id": ...,
"ignore_all": false,
"plugin_data": null
}
Loading