@@ -119,7 +119,7 @@ def is_source(self, file: MypyFile) -> bool:
119
119
120
120
# A dict containing saved cache data from a previous run. This will
121
121
# be updated in place with newly computed cache data. See dmypy.py.
122
- SavedCache = Dict [str , Tuple ['CacheMeta' , MypyFile ]]
122
+ SavedCache = Dict [str , Tuple ['CacheMeta' , MypyFile , Dict [ Expression , Type ] ]]
123
123
124
124
125
125
def build (sources : List [BuildSource ],
@@ -335,6 +335,7 @@ def default_lib_path(data_dir: str,
335
335
CacheMeta = NamedTuple ('CacheMeta' ,
336
336
[('id' , str ),
337
337
('path' , str ),
338
+ ('memory_only' , bool ), # no corresponding json files (fine-grained only)
338
339
('mtime' , int ),
339
340
('size' , int ),
340
341
('hash' , str ),
@@ -359,6 +360,7 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
359
360
return CacheMeta (
360
361
meta .get ('id' , sentinel ),
361
362
meta .get ('path' , sentinel ),
363
+ meta .get ('memory_only' , False ),
362
364
int (meta ['mtime' ]) if 'mtime' in meta else sentinel ,
363
365
meta .get ('size' , sentinel ),
364
366
meta .get ('hash' , sentinel ),
@@ -510,7 +512,8 @@ class BuildManager:
510
512
version_id: The current mypy version (based on commit id when possible)
511
513
plugin: Active mypy plugin(s)
512
514
errors: Used for reporting all errors
513
- saved_cache: Dict with saved cache state for dmypy (read-write!)
515
+ saved_cache: Dict with saved cache state for dmypy and fine-grained incremental mode
516
+ (read-write!)
514
517
stats: Dict with various instrumentation numbers
515
518
"""
516
519
@@ -642,19 +645,20 @@ def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> My
642
645
self .errors .set_file_ignored_lines (path , tree .ignored_lines , ignore_errors )
643
646
return tree
644
647
645
- def module_not_found (self , path : str , line : int , id : str ) -> None :
648
+ def module_not_found (self , path : str , id : str , line : int , target : str ) -> None :
646
649
self .errors .set_file (path , id )
647
650
stub_msg = "(Stub files are from https://github.com/python/typeshed)"
648
- if ((self .options .python_version [0 ] == 2 and moduleinfo .is_py2_std_lib_module (id )) or
649
- (self .options .python_version [0 ] >= 3 and moduleinfo .is_py3_std_lib_module (id ))):
651
+ if ((self .options .python_version [0 ] == 2 and moduleinfo .is_py2_std_lib_module (target )) or
652
+ (self .options .python_version [0 ] >= 3 and
653
+ moduleinfo .is_py3_std_lib_module (target ))):
650
654
self .errors .report (
651
- line , 0 , "No library stub file for standard library module '{}'" .format (id ))
655
+ line , 0 , "No library stub file for standard library module '{}'" .format (target ))
652
656
self .errors .report (line , 0 , stub_msg , severity = 'note' , only_once = True )
653
- elif moduleinfo .is_third_party_module (id ):
654
- self .errors .report (line , 0 , "No library stub file for module '{}'" .format (id ))
657
+ elif moduleinfo .is_third_party_module (target ):
658
+ self .errors .report (line , 0 , "No library stub file for module '{}'" .format (target ))
655
659
self .errors .report (line , 0 , stub_msg , severity = 'note' , only_once = True )
656
660
else :
657
- self .errors .report (line , 0 , "Cannot find module named '{}'" .format (id ))
661
+ self .errors .report (line , 0 , "Cannot find module named '{}'" .format (target ))
658
662
self .errors .report (line , 0 , '(Perhaps setting MYPYPATH '
659
663
'or using the "--ignore-missing-imports" flag would help)' ,
660
664
severity = 'note' , only_once = True )
@@ -937,7 +941,7 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
937
941
"""
938
942
saved_cache = manager .saved_cache
939
943
if id in saved_cache :
940
- m , t = saved_cache [id ]
944
+ m , t , types = saved_cache [id ]
941
945
manager .add_stats (reused_metas = 1 )
942
946
manager .trace ("Reusing saved metadata for %s" % id )
943
947
# Note: it could still be skipped if the mtime/size/hash mismatches.
@@ -1036,6 +1040,12 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
1036
1040
manager .log ('Metadata abandoned for {}: errors were previously ignored' .format (id ))
1037
1041
return None
1038
1042
1043
+ if meta .memory_only :
1044
+ # Special case for fine-grained incremental mode when the JSON file is missing but
1045
+ # we want to cache the module anyway.
1046
+ manager .log ('Memory-only metadata for {}' .format (id ))
1047
+ return meta
1048
+
1039
1049
assert path is not None , "Internal error: meta was provided without a path"
1040
1050
# Check data_json; assume if its mtime matches it's good.
1041
1051
# TODO: stat() errors
@@ -1441,6 +1451,10 @@ class State:
1441
1451
# Whether to ignore all errors
1442
1452
ignore_all = False
1443
1453
1454
+ # Type checker used for checking this file. Use type_checker() for
1455
+ # access and to construct this on demand.
1456
+ _type_checker = None # type: Optional[TypeChecker]
1457
+
1444
1458
def __init__ (self ,
1445
1459
id : Optional [str ],
1446
1460
path : Optional [str ],
@@ -1464,6 +1478,7 @@ def __init__(self,
1464
1478
self .import_context = []
1465
1479
self .id = id or '__main__'
1466
1480
self .options = manager .options .clone_for_module (self .id )
1481
+ self ._type_checker = None
1467
1482
if not path and source is None :
1468
1483
assert id is not None
1469
1484
file_id = id
@@ -1511,7 +1526,8 @@ def __init__(self,
1511
1526
if not self .options .ignore_missing_imports :
1512
1527
save_import_context = manager .errors .import_context ()
1513
1528
manager .errors .set_import_context (caller_state .import_context )
1514
- manager .module_not_found (caller_state .xpath , caller_line , id )
1529
+ manager .module_not_found (caller_state .xpath , caller_state .id ,
1530
+ caller_line , id )
1515
1531
manager .errors .set_import_context (save_import_context )
1516
1532
manager .missing_modules .add (id )
1517
1533
raise ModuleNotFound
@@ -1828,20 +1844,27 @@ def semantic_analysis_apply_patches(self) -> None:
1828
1844
patch_func ()
1829
1845
1830
1846
def type_check_first_pass (self ) -> None :
1831
- assert self .tree is not None , "Internal error: method must be called on parsed file only"
1832
- manager = self .manager
1833
1847
if self .options .semantic_analysis_only :
1834
1848
return
1835
1849
with self .wrap_context ():
1836
- self .type_checker = TypeChecker (manager .errors , manager .modules , self .options ,
1837
- self .tree , self .xpath , manager .plugin )
1838
- self .type_checker .check_first_pass ()
1850
+ self .type_checker ().check_first_pass ()
1851
+
1852
+ def type_checker (self ) -> TypeChecker :
1853
+ if not self ._type_checker :
1854
+ assert self .tree is not None , "Internal error: must be called on parsed file only"
1855
+ manager = self .manager
1856
+ self ._type_checker = TypeChecker (manager .errors , manager .modules , self .options ,
1857
+ self .tree , self .xpath , manager .plugin )
1858
+ return self ._type_checker
1859
+
1860
+ def type_map (self ) -> Dict [Expression , Type ]:
1861
+ return self .type_checker ().type_map
1839
1862
1840
1863
def type_check_second_pass (self ) -> bool :
1841
1864
if self .options .semantic_analysis_only :
1842
1865
return False
1843
1866
with self .wrap_context ():
1844
- return self .type_checker .check_second_pass ()
1867
+ return self .type_checker () .check_second_pass ()
1845
1868
1846
1869
def finish_passes (self ) -> None :
1847
1870
assert self .tree is not None , "Internal error: method must be called on parsed file only"
@@ -1851,16 +1874,16 @@ def finish_passes(self) -> None:
1851
1874
with self .wrap_context ():
1852
1875
# Some tests want to look at the set of all types.
1853
1876
if manager .options .use_builtins_fixtures or manager .options .dump_deps :
1854
- manager .all_types .update (self .type_checker . type_map )
1877
+ manager .all_types .update (self .type_map () )
1855
1878
1856
1879
if self .options .incremental :
1857
- self ._patch_indirect_dependencies (self .type_checker .module_refs ,
1858
- self .type_checker . type_map )
1880
+ self ._patch_indirect_dependencies (self .type_checker () .module_refs ,
1881
+ self .type_map () )
1859
1882
1860
1883
if self .options .dump_inference_stats :
1861
1884
dump_type_stats (self .tree , self .xpath , inferred = True ,
1862
- typemap = self .type_checker . type_map )
1863
- manager .report_file (self .tree , self .type_checker . type_map , self .options )
1885
+ typemap = self .type_map () )
1886
+ manager .report_file (self .tree , self .type_map () , self .options )
1864
1887
1865
1888
def _patch_indirect_dependencies (self ,
1866
1889
module_refs : Set [str ],
@@ -1904,7 +1927,7 @@ def write_cache(self) -> None:
1904
1927
self .meta = None
1905
1928
self .mark_interface_stale (on_errors = True )
1906
1929
return
1907
- dep_prios = [ self .priorities . get ( dep , PRI_HIGH ) for dep in self . dependencies ]
1930
+ dep_prios = self .dependency_priorities ()
1908
1931
new_interface_hash , self .meta = write_cache (
1909
1932
self .id , self .path , self .tree ,
1910
1933
list (self .dependencies ), list (self .suppressed ), list (self .child_modules ),
@@ -1917,6 +1940,9 @@ def write_cache(self) -> None:
1917
1940
self .mark_interface_stale ()
1918
1941
self .interface_hash = new_interface_hash
1919
1942
1943
+ def dependency_priorities (self ) -> List [int ]:
1944
+ return [self .priorities .get (dep , PRI_HIGH ) for dep in self .dependencies ]
1945
+
1920
1946
1921
1947
def dispatch (sources : List [BuildSource ], manager : BuildManager ) -> Graph :
1922
1948
set_orig = set (manager .saved_cache )
@@ -1963,7 +1989,7 @@ def preserve_cache(graph: Graph) -> SavedCache:
1963
1989
for id , state in graph .items ():
1964
1990
assert state .id == id
1965
1991
if state .meta is not None and state .tree is not None :
1966
- saved_cache [id ] = (state .meta , state .tree )
1992
+ saved_cache [id ] = (state .meta , state .tree , state . type_map () )
1967
1993
return saved_cache
1968
1994
1969
1995
0 commit comments