146 val is = TextIO.openIn(s ^ ".imp") |
146 val is = TextIO.openIn(s ^ ".imp") |
147 val inp = TextIO.inputAll is |
147 val inp = TextIO.inputAll is |
148 val _ = TextIO.closeIn is |
148 val _ = TextIO.closeIn is |
149 val orig_source = Source.of_string inp |
149 val orig_source = Source.of_string inp |
150 val symb_source = Symbol.source {do_recover = false} orig_source |
150 val symb_source = Symbol.source {do_recover = false} orig_source |
151 val lexes = Unsynchronized.ref |
151 val lexes = |
152 (Scan.make_lexicon (map Symbol.explode ["import_segment","ignore_thms","import","end",">","::","const_maps","const_moves","thm_maps","const_renames","type_maps","def_maps"]), |
152 (Scan.make_lexicon |
|
153 (map Symbol.explode ["import_segment","ignore_thms","import","end",">","::","const_maps","const_moves","thm_maps","const_renames","type_maps","def_maps"]), |
153 Scan.empty_lexicon) |
154 Scan.empty_lexicon) |
154 val get_lexes = fn () => !lexes |
155 val token_source = Token.source {do_recover = NONE} (K lexes) Position.start symb_source |
155 val token_source = Token.source {do_recover = NONE} get_lexes Position.start symb_source |
|
156 val token_list = filter_out (Token.is_kind Token.Space) (Source.exhaust token_source) |
156 val token_list = filter_out (Token.is_kind Token.Space) (Source.exhaust token_source) |
157 val import_segmentP = Parse.$$$ "import_segment" |-- import_segment |
157 val import_segmentP = Parse.$$$ "import_segment" |-- import_segment |
158 val type_mapsP = Parse.$$$ "type_maps" |-- type_maps |
158 val type_mapsP = Parse.$$$ "type_maps" |-- type_maps |
159 val def_mapsP = Parse.$$$ "def_maps" |-- def_maps |
159 val def_mapsP = Parse.$$$ "def_maps" |-- def_maps |
160 val const_mapsP = Parse.$$$ "const_maps" |-- const_maps |
160 val const_mapsP = Parse.$$$ "const_maps" |-- const_maps |