Skip to content

Compress cached JSON files #15981

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions mypy/metastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import os
import time
from abc import abstractmethod
from gzip import compress, decompress
from typing import TYPE_CHECKING, Any, Iterable

if TYPE_CHECKING:
Expand Down Expand Up @@ -93,8 +94,8 @@ def read(self, name: str) -> str:
if not self.cache_dir_prefix:
raise FileNotFoundError()

with open(os.path.join(self.cache_dir_prefix, name)) as f:
return f.read()
with open(os.path.join(self.cache_dir_prefix, name), "rb") as f:
return decompress(f.read()).decode()

def write(self, name: str, data: str, mtime: float | None = None) -> bool:
assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!"
Expand All @@ -106,8 +107,8 @@ def write(self, name: str, data: str, mtime: float | None = None) -> bool:
tmp_filename = path + "." + random_string()
try:
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(tmp_filename, "w") as f:
f.write(data)
with open(tmp_filename, "wb") as f:
f.write(compress(data.encode(), 1, mtime=0))
os.replace(tmp_filename, path)
if mtime is not None:
os.utime(path, times=(mtime, mtime))
Expand Down
5 changes: 3 additions & 2 deletions mypy/test/test_ref_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import os
import sys
from gzip import decompress

from mypy import build
from mypy.modulefinder import BuildSource
Expand Down Expand Up @@ -33,8 +34,8 @@ def run_case(self, testcase: DataDrivenTestCase) -> None:
major, minor = sys.version_info[:2]
ref_path = os.path.join(options.cache_dir, f"{major}.{minor}", "__main__.refs.json")

with open(ref_path) as refs_file:
data = json.load(refs_file)
with open(ref_path, "rb") as refs_file:
data = json.loads(decompress(refs_file.read()))

a = []
for item in data:
Expand Down
2 changes: 1 addition & 1 deletion test-data/unit/daemon.test
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ Found 1 error in 1 file (checked 2 source files)
$ {python} -c "import sys; sys.stdout.write(open('log').read())"
-- make sure the meta file didn't get updated. we use this as an imperfect proxy for
-- whether the source file got rehashed, which we don't want it to have been.
$ {python} -c "x = open('.mypy_cache/3.11/bar.meta.json').read(); y = open('asdf.json').read(); assert x == y"
$ {python} -c "x = open('.mypy_cache/3.11/bar.meta.json', 'rb').read(); y = open('asdf.json', 'rb').read(); assert x == y"

[case testDaemonSuggest]
$ dmypy start --log-file log.txt -- --follow-imports=error --no-error-summary
Expand Down