3939from threading import Lock
4040from copy import copy
4141
42- if sys .version_info >= (3 ,):
43- import pickle
44- else :
45- import cPickle as pickle
46-
4742from coconut ._pyparsing import (
4843 USE_COMPUTATION_GRAPH ,
4944 USE_CACHE ,
109104 incremental_mode_cache_size ,
110105 incremental_cache_limit ,
111106 use_line_by_line_parser ,
107+ coconut_cache_dir ,
112108)
113109from coconut .util import (
110+ pickle ,
114111 pickleable_obj ,
115112 checksum ,
116113 clip ,
126123 create_method ,
127124 univ_open ,
128125 staledict ,
126+ ensure_dir ,
129127)
130128from coconut .exceptions import (
131129 CoconutException ,
161159 ComputationNode ,
162160 StartOfStrGrammar ,
163161 MatchAny ,
162+ CombineToNode ,
164163 sys_target ,
165164 getline ,
166165 addskip ,
210209 get_cache_items_for ,
211210 clear_packrat_cache ,
212211 add_packrat_cache_items ,
213- get_cache_path ,
212+ parse_elem_to_identifier ,
213+ identifier_to_parse_elem ,
214214 _lookup_loc ,
215215 _value_exc_loc_or_ret ,
216216)
@@ -447,10 +447,7 @@ def pickle_cache(original, cache_path, include_incremental=True, protocol=pickle
447447 # are the only ones that parseIncremental will reuse
448448 if 0 < loc < len (original ) - 1 :
449449 elem = lookup [0 ]
450- identifier = elem .parse_element_index
451- internal_assert (lambda : elem == all_parse_elements [identifier ](), "failed to look up parse element by identifier" , lambda : (elem , all_parse_elements [identifier ]()))
452- if validation_dict is not None :
453- validation_dict [identifier ] = elem .__class__ .__name__
450+ identifier = parse_elem_to_identifier (elem , validation_dict )
454451 pickleable_lookup = (identifier ,) + lookup [1 :]
455452 internal_assert (value [_value_exc_loc_or_ret ] is True or isinstance (value [_value_exc_loc_or_ret ], int ), "cache must be dehybridized before pickling" , value [_value_exc_loc_or_ret ])
456453 pickleable_cache_items .append ((pickleable_lookup , value ))
@@ -460,21 +457,15 @@ def pickle_cache(original, cache_path, include_incremental=True, protocol=pickle
460457 for wkref in MatchAny .all_match_anys :
461458 match_any = wkref ()
462459 if match_any is not None and match_any .adaptive_usage is not None :
463- identifier = match_any .parse_element_index
464- internal_assert (lambda : match_any == all_parse_elements [identifier ](), "failed to look up match_any by identifier" , lambda : (match_any , all_parse_elements [identifier ]()))
465- if validation_dict is not None :
466- validation_dict [identifier ] = match_any .__class__ .__name__
460+ identifier = parse_elem_to_identifier (match_any , validation_dict )
467461 match_any .expr_order .sort (key = lambda i : (- match_any .adaptive_usage [i ], i ))
468462 all_adaptive_items .append ((identifier , (match_any .adaptive_usage , match_any .expr_order )))
469463 logger .log ("Caching adaptive item:" , match_any , (match_any .adaptive_usage , match_any .expr_order ))
470464
471465 # computation graph cache
472466 computation_graph_cache_items = []
473467 for (call_site_name , grammar_elem ), cache in Compiler .computation_graph_caches .items ():
474- identifier = grammar_elem .parse_element_index
475- internal_assert (lambda : grammar_elem == all_parse_elements [identifier ](), "failed to look up grammar by identifier" , lambda : (grammar_elem , all_parse_elements [identifier ]()))
476- if validation_dict is not None :
477- validation_dict [identifier ] = grammar_elem .__class__ .__name__
468+ identifier = parse_elem_to_identifier (grammar_elem , validation_dict )
478469 computation_graph_cache_items .append (((call_site_name , identifier ), cache ))
479470
480471 logger .log ("Saving {num_inc} incremental, {num_adapt} adaptive, and {num_comp_graph} computation graph cache items to {cache_path!r}." .format (
@@ -492,8 +483,9 @@ def pickle_cache(original, cache_path, include_incremental=True, protocol=pickle
492483 "computation_graph_cache_items" : computation_graph_cache_items ,
493484 }
494485 try :
495- with univ_open (cache_path , "wb" ) as pickle_file :
496- pickle .dump (pickle_info_obj , pickle_file , protocol = protocol )
486+ with CombineToNode .enable_pickling (validation_dict ):
487+ with univ_open (cache_path , "wb" ) as pickle_file :
488+ pickle .dump (pickle_info_obj , pickle_file , protocol = protocol )
497489 except Exception :
498490 logger .log_exc ()
499491 return False
@@ -531,15 +523,25 @@ def unpickle_cache(cache_path):
531523 all_adaptive_items = pickle_info_obj ["all_adaptive_items" ]
532524 computation_graph_cache_items = pickle_info_obj ["computation_graph_cache_items" ]
533525
526+ # incremental cache
527+ new_cache_items = []
528+ for pickleable_lookup , value in pickleable_cache_items :
529+ maybe_elem = identifier_to_parse_elem (pickleable_lookup [0 ], validation_dict )
530+ if maybe_elem is not None :
531+ internal_assert (value [_value_exc_loc_or_ret ] is True or isinstance (value [_value_exc_loc_or_ret ], int ), "attempting to unpickle hybrid cache item" , value [_value_exc_loc_or_ret ])
532+ lookup = (maybe_elem ,) + pickleable_lookup [1 :]
533+ usefullness = value [- 1 ][0 ]
534+ internal_assert (usefullness , "loaded useless cache item" , (lookup , value ))
535+ stale_value = value [:- 1 ] + ([usefullness + 1 ],)
536+ new_cache_items .append ((lookup , stale_value ))
537+ add_packrat_cache_items (new_cache_items )
538+
534539 # adaptive cache
535540 for identifier , (adaptive_usage , expr_order ) in all_adaptive_items :
536- if identifier < len (all_parse_elements ):
537- maybe_elem = all_parse_elements [identifier ]()
538- if maybe_elem is not None :
539- if validation_dict is not None :
540- internal_assert (maybe_elem .__class__ .__name__ == validation_dict [identifier ], "adaptive cache pickle-unpickle inconsistency" , (maybe_elem , validation_dict [identifier ]))
541- maybe_elem .adaptive_usage = adaptive_usage
542- maybe_elem .expr_order = expr_order
541+ maybe_elem = identifier_to_parse_elem (identifier , validation_dict )
542+ if maybe_elem is not None :
543+ maybe_elem .adaptive_usage = adaptive_usage
544+ maybe_elem .expr_order = expr_order
543545
544546 max_cache_size = min (
545547 incremental_mode_cache_size or float ("inf" ),
@@ -548,38 +550,29 @@ def unpickle_cache(cache_path):
548550 if max_cache_size != float ("inf" ):
549551 pickleable_cache_items = pickleable_cache_items [- max_cache_size :]
550552
551- # incremental cache
552- new_cache_items = []
553- for pickleable_lookup , value in pickleable_cache_items :
554- identifier = pickleable_lookup [0 ]
555- if identifier < len (all_parse_elements ):
556- maybe_elem = all_parse_elements [identifier ]()
557- if maybe_elem is not None :
558- if validation_dict is not None :
559- internal_assert (maybe_elem .__class__ .__name__ == validation_dict [identifier ], "incremental cache pickle-unpickle inconsistency" , (maybe_elem , validation_dict [identifier ]))
560- internal_assert (value [_value_exc_loc_or_ret ] is True or isinstance (value [_value_exc_loc_or_ret ], int ), "attempting to unpickle hybrid cache item" , value [_value_exc_loc_or_ret ])
561- lookup = (maybe_elem ,) + pickleable_lookup [1 :]
562- usefullness = value [- 1 ][0 ]
563- internal_assert (usefullness , "loaded useless cache item" , (lookup , value ))
564- stale_value = value [:- 1 ] + ([usefullness + 1 ],)
565- new_cache_items .append ((lookup , stale_value ))
566- add_packrat_cache_items (new_cache_items )
567-
568553 # computation graph cache
569554 for (call_site_name , identifier ), cache in computation_graph_cache_items :
570- if identifier < len (all_parse_elements ):
571- maybe_elem = all_parse_elements [identifier ]()
572- if maybe_elem is not None :
573- if validation_dict is not None :
574- internal_assert (maybe_elem .__class__ .__name__ == validation_dict [identifier ], "computation graph cache pickle-unpickle inconsistency" , (maybe_elem , validation_dict [identifier ]))
575- Compiler .computation_graph_caches [(call_site_name , maybe_elem )].update (cache )
555+ maybe_elem = identifier_to_parse_elem (identifier , validation_dict )
556+ if maybe_elem is not None :
557+ Compiler .computation_graph_caches [(call_site_name , maybe_elem )].update (cache )
576558
577559 num_inc = len (pickleable_cache_items )
578560 num_adapt = len (all_adaptive_items )
579561 num_comp_graph = sum (len (cache ) for _ , cache in computation_graph_cache_items ) if computation_graph_cache_items else 0
580562 return num_inc , num_adapt , num_comp_graph
581563
582564
565+ def get_cache_path (codepath ):
566+ """Get the cache filename to use for the given codepath."""
567+ code_dir , code_fname = os .path .split (codepath )
568+
569+ cache_dir = os .path .join (code_dir , coconut_cache_dir )
570+ ensure_dir (cache_dir , logger = logger )
571+
572+ pickle_fname = code_fname + ".pkl"
573+ return os .path .join (cache_dir , pickle_fname )
574+
575+
583576def load_cache_for (inputstring , codepath ):
584577 """Load cache_path (for the given inputstring and filename)."""
585578 if not SUPPORTS_INCREMENTAL :
0 commit comments