Definitions of all tokens involved in the lexical analysis
{reading => token_class} {reading => [token_class, *opt]}
# File rdoc/parser/ruby.rb, line 274 def self.def_token(token_n, super_token = Token, reading = nil, *opts) token_n = token_n.id2name unless String === token_n fail AlreadyDefinedToken, token_n if const_defined?(token_n) token_c = Class.new super_token const_set token_n, token_c # token_c.inspect if reading if TkReading2Token[reading] fail TkReading2TokenDuplicateError, token_n, reading end if opts.empty? TkReading2Token[reading] = [token_c] else TkReading2Token[reading] = [token_c].concat(opts) end end TkSymbol2Token[token_n.intern] = token_c if token_c <= TkOp token_c.class_eval %{ def self.op_name; "#{reading}"; end } end end
# File rdoc/parser/ruby.rb, line 118 def Token(token, value = nil) tk = nil case token when String, Symbol source = String === token ? TkReading2Token : TkSymbol2Token raise TkReading2TokenNoKey, token if (tk = source[token]).nil? tk = Token(tk[0], value) else tk = if (token.ancestors & [TkId, TkVal, TkOPASGN, TkUnknownChar]).empty? token.new(@prev_line_no, @prev_char_no) else token.new(@prev_line_no, @prev_char_no, value) end end tk end