95
95
from mypy .stubinfo import legacy_bundled_packages , non_bundled_packages , stub_distribution_name
96
96
from mypy .types import Type
97
97
from mypy .typestate import reset_global_state , type_state
98
+ from mypy .util import json_dumps , json_loads
98
99
from mypy .version import __version__
99
100
100
101
# Switch to True to produce debug output related to fine-grained incremental
@@ -858,7 +859,7 @@ def load_fine_grained_deps(self, id: str) -> dict[str, set[str]]:
858
859
t0 = time .time ()
859
860
if id in self .fg_deps_meta :
860
861
# TODO: Assert deps file wasn't changed.
861
- deps = json . loads (self .metastore .read (self .fg_deps_meta [id ]["path" ]))
862
+ deps = json_loads (self .metastore .read (self .fg_deps_meta [id ]["path" ]))
862
863
else :
863
864
deps = {}
864
865
val = {k : set (v ) for k , v in deps .items ()}
@@ -911,8 +912,8 @@ def stats_summary(self) -> Mapping[str, object]:
911
912
return self .stats
912
913
913
914
914
- def deps_to_json (x : dict [str , set [str ]]) -> str :
915
- return json . dumps ({k : list (v ) for k , v in x .items ()}, separators = ( "," , ":" ) )
915
+ def deps_to_json (x : dict [str , set [str ]]) -> bytes :
916
+ return json_dumps ({k : list (v ) for k , v in x .items ()})
916
917
917
918
918
919
# File for storing metadata about all the fine-grained dependency caches
@@ -980,7 +981,7 @@ def write_deps_cache(
980
981
981
982
meta = {"snapshot" : meta_snapshot , "deps_meta" : fg_deps_meta }
982
983
983
- if not metastore .write (DEPS_META_FILE , json . dumps (meta , separators = ( "," , ":" ) )):
984
+ if not metastore .write (DEPS_META_FILE , json_dumps (meta )):
984
985
manager .log (f"Error writing fine-grained deps meta JSON file { DEPS_META_FILE } " )
985
986
error = True
986
987
@@ -1048,7 +1049,7 @@ def generate_deps_for_cache(manager: BuildManager, graph: Graph) -> dict[str, di
1048
1049
1049
1050
def write_plugins_snapshot (manager : BuildManager ) -> None :
1050
1051
"""Write snapshot of versions and hashes of currently active plugins."""
1051
- snapshot = json . dumps (manager .plugins_snapshot , separators = ( "," , ":" ) )
1052
+ snapshot = json_dumps (manager .plugins_snapshot )
1052
1053
if not manager .metastore .write (PLUGIN_SNAPSHOT_FILE , snapshot ):
1053
1054
manager .errors .set_file (_cache_dir_prefix (manager .options ), None , manager .options )
1054
1055
manager .errors .report (0 , 0 , "Error writing plugins snapshot" , blocker = True )
@@ -1079,8 +1080,8 @@ def read_quickstart_file(
1079
1080
# just ignore it.
1080
1081
raw_quickstart : dict [str , Any ] = {}
1081
1082
try :
1082
- with open (options .quickstart_file ) as f :
1083
- raw_quickstart = json . load ( f )
1083
+ with open (options .quickstart_file , "rb" ) as f :
1084
+ raw_quickstart = json_loads ( f . read () )
1084
1085
1085
1086
quickstart = {}
1086
1087
for file , (x , y , z ) in raw_quickstart .items ():
@@ -1148,10 +1149,10 @@ def _load_json_file(
1148
1149
manager .add_stats (metastore_read_time = time .time () - t0 )
1149
1150
# Only bother to compute the log message if we are logging it, since it could be big
1150
1151
if manager .verbosity () >= 2 :
1151
- manager .trace (log_success + data .rstrip ())
1152
+ manager .trace (log_success + data .rstrip (). decode () )
1152
1153
try :
1153
1154
t1 = time .time ()
1154
- result = json . loads (data )
1155
+ result = json_loads (data )
1155
1156
manager .add_stats (data_json_load_time = time .time () - t1 )
1156
1157
except json .JSONDecodeError :
1157
1158
manager .errors .set_file (file , None , manager .options )
@@ -1343,8 +1344,8 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> CacheMeta | No
1343
1344
# So that plugins can return data with tuples in it without
1344
1345
# things silently always invalidating modules, we round-trip
1345
1346
# the config data. This isn't beautiful.
1346
- plugin_data = json . loads (
1347
- json . dumps (manager .plugin .report_config_data (ReportConfigContext (id , path , is_check = True )))
1347
+ plugin_data = json_loads (
1348
+ json_dumps (manager .plugin .report_config_data (ReportConfigContext (id , path , is_check = True )))
1348
1349
)
1349
1350
if m .plugin_data != plugin_data :
1350
1351
manager .log (f"Metadata abandoned for { id } : plugin configuration differs" )
@@ -1478,18 +1479,15 @@ def validate_meta(
1478
1479
"ignore_all" : meta .ignore_all ,
1479
1480
"plugin_data" : meta .plugin_data ,
1480
1481
}
1481
- if manager .options .debug_cache :
1482
- meta_str = json .dumps (meta_dict , indent = 2 , sort_keys = True )
1483
- else :
1484
- meta_str = json .dumps (meta_dict , separators = ("," , ":" ))
1482
+ meta_bytes = json_dumps (meta_dict , manager .options .debug_cache )
1485
1483
meta_json , _ , _ = get_cache_names (id , path , manager .options )
1486
1484
manager .log (
1487
1485
"Updating mtime for {}: file {}, meta {}, mtime {}" .format (
1488
1486
id , path , meta_json , meta .mtime
1489
1487
)
1490
1488
)
1491
1489
t1 = time .time ()
1492
- manager .metastore .write (meta_json , meta_str ) # Ignore errors, just an optimization.
1490
+ manager .metastore .write (meta_json , meta_bytes ) # Ignore errors, just an optimization.
1493
1491
manager .add_stats (validate_update_time = time .time () - t1 , validate_munging_time = t1 - t0 )
1494
1492
return meta
1495
1493
@@ -1507,13 +1505,6 @@ def compute_hash(text: str) -> str:
1507
1505
return hash_digest (text .encode ("utf-8" ))
1508
1506
1509
1507
1510
- def json_dumps (obj : Any , debug_cache : bool ) -> str :
1511
- if debug_cache :
1512
- return json .dumps (obj , indent = 2 , sort_keys = True )
1513
- else :
1514
- return json .dumps (obj , sort_keys = True , separators = ("," , ":" ))
1515
-
1516
-
1517
1508
def write_cache (
1518
1509
id : str ,
1519
1510
path : str ,
@@ -1566,8 +1557,8 @@ def write_cache(
1566
1557
1567
1558
# Serialize data and analyze interface
1568
1559
data = tree .serialize ()
1569
- data_str = json_dumps (data , manager .options .debug_cache )
1570
- interface_hash = compute_hash ( data_str )
1560
+ data_bytes = json_dumps (data , manager .options .debug_cache )
1561
+ interface_hash = hash_digest ( data_bytes )
1571
1562
1572
1563
plugin_data = manager .plugin .report_config_data (ReportConfigContext (id , path , is_check = False ))
1573
1564
@@ -1591,7 +1582,7 @@ def write_cache(
1591
1582
manager .trace (f"Interface for { id } is unchanged" )
1592
1583
else :
1593
1584
manager .trace (f"Interface for { id } has changed" )
1594
- if not metastore .write (data_json , data_str ):
1585
+ if not metastore .write (data_json , data_bytes ):
1595
1586
# Most likely the error is the replace() call
1596
1587
# (see https://github.com/python/mypy/issues/3215).
1597
1588
manager .log (f"Error writing data JSON file { data_json } " )
@@ -3568,4 +3559,4 @@ def write_undocumented_ref_info(
3568
3559
assert not ref_info_file .startswith ("." )
3569
3560
3570
3561
deps_json = get_undocumented_ref_info_json (state .tree , type_map )
3571
- metastore .write (ref_info_file , json . dumps (deps_json , separators = ( "," , ":" ) ))
3562
+ metastore .write (ref_info_file , json_dumps (deps_json ))
0 commit comments