@@ -23,23 +23,23 @@ use std::fs::File;
2323use std:: io:: { BufRead , Read } ;
2424use std:: path:: Path ;
2525
26- use syntax:: parse;
2726use syntax:: parse:: lexer;
2827use rustc:: dep_graph:: DepGraph ;
2928use rustc:: session:: { self , config} ;
3029use rustc:: middle:: cstore:: DummyCrateStore ;
3130
3231use std:: rc:: Rc ;
3332use syntax:: ast;
34- use syntax:: ast:: Name ;
3533use syntax:: codemap;
3634use syntax:: parse:: token:: { self , BinOpToken , DelimToken , Lit , Token } ;
3735use syntax:: parse:: lexer:: TokenAndSpan ;
3836use syntax_pos:: Pos ;
3937
38+ use syntax:: symbol:: { Symbol , keywords} ;
39+
4040fn parse_token_list ( file : & str ) -> HashMap < String , token:: Token > {
4141 fn id ( ) -> token:: Token {
42- Token :: Ident ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) )
42+ Token :: Ident ( ast:: Ident :: with_empty_ctxt ( keywords :: Invalid . name ( ) ) )
4343 }
4444
4545 let mut res = HashMap :: new ( ) ;
@@ -65,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
6565 "SHL" => Token :: BinOp ( BinOpToken :: Shl ) ,
6666 "LBRACE" => Token :: OpenDelim ( DelimToken :: Brace ) ,
6767 "RARROW" => Token :: RArrow ,
68- "LIT_STR" => Token :: Literal ( Lit :: Str_ ( Name ( 0 ) ) , None ) ,
68+ "LIT_STR" => Token :: Literal ( Lit :: Str_ ( keywords :: Invalid . name ( ) ) , None ) ,
6969 "DOTDOT" => Token :: DotDot ,
7070 "MOD_SEP" => Token :: ModSep ,
7171 "DOTDOTDOT" => Token :: DotDotDot ,
@@ -75,21 +75,22 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
7575 "ANDAND" => Token :: AndAnd ,
7676 "AT" => Token :: At ,
7777 "LBRACKET" => Token :: OpenDelim ( DelimToken :: Bracket ) ,
78- "LIT_STR_RAW" => Token :: Literal ( Lit :: StrRaw ( Name ( 0 ) , 0 ) , None ) ,
78+ "LIT_STR_RAW" => Token :: Literal ( Lit :: StrRaw ( keywords :: Invalid . name ( ) , 0 ) , None ) ,
7979 "RPAREN" => Token :: CloseDelim ( DelimToken :: Paren ) ,
8080 "SLASH" => Token :: BinOp ( BinOpToken :: Slash ) ,
8181 "COMMA" => Token :: Comma ,
82- "LIFETIME" => Token :: Lifetime ( ast:: Ident :: with_empty_ctxt ( Name ( 0 ) ) ) ,
82+ "LIFETIME" => Token :: Lifetime (
83+ ast:: Ident :: with_empty_ctxt ( keywords:: Invalid . name ( ) ) ) ,
8384 "CARET" => Token :: BinOp ( BinOpToken :: Caret ) ,
8485 "TILDE" => Token :: Tilde ,
8586 "IDENT" => id ( ) ,
8687 "PLUS" => Token :: BinOp ( BinOpToken :: Plus ) ,
87- "LIT_CHAR" => Token :: Literal ( Lit :: Char ( Name ( 0 ) ) , None ) ,
88- "LIT_BYTE" => Token :: Literal ( Lit :: Byte ( Name ( 0 ) ) , None ) ,
88+ "LIT_CHAR" => Token :: Literal ( Lit :: Char ( keywords :: Invalid . name ( ) ) , None ) ,
89+ "LIT_BYTE" => Token :: Literal ( Lit :: Byte ( keywords :: Invalid . name ( ) ) , None ) ,
8990 "EQ" => Token :: Eq ,
9091 "RBRACKET" => Token :: CloseDelim ( DelimToken :: Bracket ) ,
9192 "COMMENT" => Token :: Comment ,
92- "DOC_COMMENT" => Token :: DocComment ( Name ( 0 ) ) ,
93+ "DOC_COMMENT" => Token :: DocComment ( keywords :: Invalid . name ( ) ) ,
9394 "DOT" => Token :: Dot ,
9495 "EQEQ" => Token :: EqEq ,
9596 "NE" => Token :: Ne ,
@@ -99,9 +100,9 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
99100 "BINOP" => Token :: BinOp ( BinOpToken :: Plus ) ,
100101 "POUND" => Token :: Pound ,
101102 "OROR" => Token :: OrOr ,
102- "LIT_INTEGER" => Token :: Literal ( Lit :: Integer ( Name ( 0 ) ) , None ) ,
103+ "LIT_INTEGER" => Token :: Literal ( Lit :: Integer ( keywords :: Invalid . name ( ) ) , None ) ,
103104 "BINOPEQ" => Token :: BinOpEq ( BinOpToken :: Plus ) ,
104- "LIT_FLOAT" => Token :: Literal ( Lit :: Float ( Name ( 0 ) ) , None ) ,
105+ "LIT_FLOAT" => Token :: Literal ( Lit :: Float ( keywords :: Invalid . name ( ) ) , None ) ,
105106 "WHITESPACE" => Token :: Whitespace ,
106107 "UNDERSCORE" => Token :: Underscore ,
107108 "MINUS" => Token :: BinOp ( BinOpToken :: Minus ) ,
@@ -111,10 +112,11 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
111112 "OR" => Token :: BinOp ( BinOpToken :: Or ) ,
112113 "GT" => Token :: Gt ,
113114 "LE" => Token :: Le ,
114- "LIT_BINARY" => Token :: Literal ( Lit :: ByteStr ( Name ( 0 ) ) , None ) ,
115- "LIT_BINARY_RAW" => Token :: Literal ( Lit :: ByteStrRaw ( Name ( 0 ) , 0 ) , None ) ,
115+ "LIT_BINARY" => Token :: Literal ( Lit :: ByteStr ( keywords:: Invalid . name ( ) ) , None ) ,
116+ "LIT_BINARY_RAW" => Token :: Literal (
117+ Lit :: ByteStrRaw ( keywords:: Invalid . name ( ) , 0 ) , None ) ,
116118 "QUESTION" => Token :: Question ,
117- "SHEBANG" => Token :: Shebang ( Name ( 0 ) ) ,
119+ "SHEBANG" => Token :: Shebang ( keywords :: Invalid . name ( ) ) ,
118120 _ => continue ,
119121 } ;
120122
@@ -158,7 +160,7 @@ fn fix(mut lit: &str) -> ast::Name {
158160 let leading_hashes = count ( lit) ;
159161
160162 // +1/-1 to adjust for single quotes
161- parse :: token :: intern ( & lit[ leading_hashes + 1 ..lit. len ( ) - leading_hashes - 1 ] )
163+ Symbol :: intern ( & lit[ leading_hashes + 1 ..lit. len ( ) - leading_hashes - 1 ] )
162164}
163165
164166/// Assuming a char/byte literal, strip the 'b' prefix and the single quotes.
@@ -168,7 +170,7 @@ fn fixchar(mut lit: &str) -> ast::Name {
168170 lit = & lit[ 1 ..] ;
169171 }
170172
171- parse :: token :: intern ( & lit[ 1 ..lit. len ( ) - 1 ] )
173+ Symbol :: intern ( & lit[ 1 ..lit. len ( ) - 1 ] )
172174}
173175
174176fn count ( lit : & str ) -> usize {
@@ -196,7 +198,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
196198 let not_found = format ! ( "didn't find token {:?} in the map" , toknum) ;
197199 let proto_tok = tokens. get ( toknum) . expect ( & not_found[ ..] ) ;
198200
199- let nm = parse :: token :: intern ( content) ;
201+ let nm = Symbol :: intern ( content) ;
200202
201203 debug ! ( "What we got: content (`{}`), proto: {:?}" , content, proto_tok) ;
202204
0 commit comments