1##
2# A TokenStream is a list of tokens, gathered during the parse of some entity
3# (say a method). Entities populate these streams by being registered with the
4# lexer. Any class can collect tokens by including TokenStream. From the
5# outside, you use such an object by calling the start_collecting_tokens
6# method, followed by calls to add_token and pop_token.
7
8module RDoc::TokenStream
9
10  ##
11  # Converts +token_stream+ to HTML wrapping various tokens with
12  # <tt><span></tt> elements.  The following tokens types are wrapped in spans
13  # with the given class names:
14  #
15  # TkCONSTANT :: 'ruby-constant'
16  # TkKW       :: 'ruby-keyword'
17  # TkIVAR     :: 'ruby-ivar'
18  # TkOp       :: 'ruby-operator'
19  # TkId       :: 'ruby-identifier'
20  # TkNode     :: 'ruby-node'
21  # TkCOMMENT  :: 'ruby-comment'
22  # TkREGEXP   :: 'ruby-regexp'
23  # TkSTRING   :: 'ruby-string'
24  # TkVal      :: 'ruby-value'
25  #
26  # Other token types are not wrapped in spans.
27
28  def self.to_html token_stream
29    token_stream.map do |t|
30      next unless t
31
32      style = case t
33              when RDoc::RubyToken::TkCONSTANT then 'ruby-constant'
34              when RDoc::RubyToken::TkKW       then 'ruby-keyword'
35              when RDoc::RubyToken::TkIVAR     then 'ruby-ivar'
36              when RDoc::RubyToken::TkOp       then 'ruby-operator'
37              when RDoc::RubyToken::TkId       then 'ruby-identifier'
38              when RDoc::RubyToken::TkNode     then 'ruby-node'
39              when RDoc::RubyToken::TkCOMMENT  then 'ruby-comment'
40              when RDoc::RubyToken::TkREGEXP   then 'ruby-regexp'
41              when RDoc::RubyToken::TkSTRING   then 'ruby-string'
42              when RDoc::RubyToken::TkVal      then 'ruby-value'
43              end
44
45      text = CGI.escapeHTML t.text
46
47      if style then
48        "<span class=\"#{style}\">#{text}</span>"
49      else
50        text
51      end
52    end.join
53  end
54
55  ##
56  # Adds +tokens+ to the collected tokens
57
58  def add_tokens(*tokens)
59    tokens.flatten.each { |token| @token_stream << token }
60  end
61
62  alias add_token add_tokens
63
64  ##
65  # Starts collecting tokens
66
67  def collect_tokens
68    @token_stream = []
69  end
70
71  alias start_collecting_tokens collect_tokens
72
73  ##
74  # Remove the last token from the collected tokens
75
76  def pop_token
77    @token_stream.pop
78  end
79
80  ##
81  # Current token stream
82
83  def token_stream
84    @token_stream
85  end
86
87  ##
88  # Returns a string representation of the token stream
89
90  def tokens_to_s
91    token_stream.map { |token| token.text }.join ''
92  end
93
94end
95
96