Searched refs:tokenize (Results 1 - 25 of 50) sorted by relevance

12

/macosx-10.10.1/ruby-106/ruby/test/psych/
H A Dtest_scalar_scanner.rb20 assert_equal time, @ss.tokenize(time_str)
31 assert_equal time_str, @ss.tokenize(time_str)
37 assert_equal x, @ss.tokenize(x)
40 assert_equal x, @ss.tokenize(x)
43 assert_equal x, @ss.tokenize(x)
48 assert_equal Date.strptime(x, '%Y-%m-%d'), @ss.tokenize(x)
53 assert_equal x, @ss.tokenize(x)
58 token = @ss.tokenize date
65 assert_equal(1 / 0.0, ss.tokenize('.inf'))
69 assert_equal(-1 / 0.0, ss.tokenize('
[all...]
/macosx-10.10.1/libxslt-13/libxslt/tests/exslt/strings/
H A DMakefile.am7 tokenize.1.xml tokenize.1.xsl tokenize.1.out \
8 tokenize.2.xml tokenize.2.xsl tokenize.2.out \
9 tokenize.3.xml tokenize.3.xsl tokenize.3.out \
/macosx-10.10.1/ruby-106/ruby/test/rdoc/
H A Dtest_rdoc_ruby_lex.rb19 tokens = RDoc::RubyLex.tokenize "def x() end", nil
36 tokens = RDoc::RubyLex.tokenize '__END__', nil
47 tokens = RDoc::RubyLex.tokenize "?\\", nil
58 tokens = RDoc::RubyLex.tokenize <<-'RUBY', nil
84 tokens = RDoc::RubyLex.tokenize '{ class:"foo" }', nil
101 tokens = RDoc::RubyLex.tokenize <<-RUBY, nil
123 tokens = RDoc::RubyLex.tokenize <<-'RUBY', nil
145 tokens = RDoc::RubyLex.tokenize <<-'RUBY', nil
166 tokens = RDoc::RubyLex.tokenize <<-'RUBY', nil
185 tokens = RDoc::RubyLex.tokenize '����', ni
[all...]
H A Dtest_rdoc_markup_parser.rb1099 assert_equal expected, @RMP.tokenize(str)
1117 assert_equal expected, @RMP.tokenize(str)
1135 assert_equal expected, @RMP.tokenize(str)
1152 assert_equal expected, @RMP.tokenize(str)
1166 assert_equal expected, @RMP.tokenize(str)
1184 assert_equal expected, @RMP.tokenize(str)
1202 assert_equal expected, @RMP.tokenize(str)
1218 assert_equal expected, @RMP.tokenize(str)
1234 assert_equal expected, @RMP.tokenize(str)
1250 assert_equal expected, @RMP.tokenize(st
[all...]
H A Dtest_rdoc_tom_doc.rb331 @td.tokenize "Public: Do some stuff\n"
342 @td.tokenize <<-TEXT
365 @td.tokenize <<-TEXT
384 @td.tokenize <<-TEXT
406 @td.tokenize <<-TEXT
429 @td.tokenize <<-TEXT
453 @td.tokenize <<-TEXT
471 @td.tokenize <<-TEXT
/macosx-10.10.1/WebInspectorUI-7600.1.17/UserInterface/External/CodeMirror/
H A Dxml.js68 state.tokenize = parser;
88 state.tokenize = inBlock("meta", "?>");
92 state.tokenize = inTag;
116 state.tokenize = inText;
123 state.tokenize = inText;
126 var next = state.tokenize(stream, state);
129 state.tokenize = inAttribute(ch);
131 return state.tokenize(stream, state);
142 state.tokenize = inTag;
156 state.tokenize
[all...]
H A Dcoffeescript.js83 state.tokenize = longComment;
84 return state.tokenize(stream, state);
135 state.tokenize = tokenFactory(stream.current(), false, "string");
136 return state.tokenize(stream, state);
141 state.tokenize = tokenFactory(stream.current(), true, "string-2");
142 return state.tokenize(stream, state);
187 state.tokenize = tokenBase;
197 state.tokenize = tokenBase;
208 state.tokenize = tokenBase;
265 var style = state.tokenize(strea
[all...]
H A Dsql.js59 state.tokenize = tokenLiteral(ch);
60 return state.tokenize(stream, state);
83 state.tokenize = tokenComment;
84 return state.tokenize(stream, state);
125 state.tokenize = tokenBase;
138 state.tokenize = tokenBase;
165 return {tokenize: tokenBase, context: null};
175 var style = state.tokenize(stream, state);
/macosx-10.10.1/ICU-531.30/icuSources/common/
H A Drbtok.h108 int32_t tokenize(int32_t maxTokens, RuleBasedTokenRange *outTokenRanges, unsigned long *outTokenFlags);
H A Drbtok.cpp26 int32_t RuleBasedTokenizer::tokenize(int32_t maxTokens, RuleBasedTokenRange *outTokenRanges, unsigned long *outTokenFlags) function in class:RuleBasedTokenizer
H A Durbtok.cpp119 return ((RuleBasedTokenizer *)bi)->tokenize(maxTokens, outTokens, outTokenFlags);
/macosx-10.10.1/llvmCore-3425.0.34/lib/MC/MCDisassembler/
H A DEDInst.cpp165 int EDInst::tokenize() { function in class:EDInst
175 return TokenizeResult.setResult(EDToken::tokenize(Tokens,
183 if (tokenize())
189 if (tokenize())
197 if (tokenize())
H A DEDToken.h116 /// tokenize - Tokenizes a string using the platform- and syntax-specific
126 static int tokenize(std::vector<EDToken*> &tokens,
H A DEDInst.h90 /// The result of the tokenize() function
156 /// tokenize - populates the Tokens member of the instruction, returning 0 on
158 int tokenize();
H A DEDToken.cpp88 int EDToken::tokenize(std::vector<EDToken*> &tokens, function in class:EDToken
/macosx-10.10.1/ruby-106/ruby/ext/ripper/lib/ripper/
H A Dlexer.rb17 # p Ripper.tokenize("def m(a) nil end")
20 def Ripper.tokenize(src, filename = '-', lineno = 1) singleton method in class:Ripper
21 Lexer.new(src, filename, lineno).tokenize
47 def tokenize method in class:Ripper.Lexer
/macosx-10.10.1/ruby-106/ruby/lib/rdoc/
H A Dtom_doc.rb86 parser.tokenize text
199 def tokenize text method
/macosx-10.10.1/ruby-106/ruby/test/ripper/
H A Dtest_scanner_events.rb28 Ripper.tokenize('')
30 Ripper.tokenize('a')
32 Ripper.tokenize('1')
34 Ripper.tokenize("1;def m(arg)end")
36 Ripper.tokenize("print(<<""EOS)\nheredoc\nEOS\n")
38 Ripper.tokenize("print( <<""EOS)\nheredoc\nEOS\n")
40 Ripper.tokenize("\#\n\n\#\n\nnil\n")
835 Ripper.tokenize("__END__\njunk junk junk")
/macosx-10.10.1/ntp-92/libopts/
H A DMakefile.am26 time.c tokenize.c usage.c \
/macosx-10.10.1/ntp-92/sntp/libopts/
H A DMakefile.am26 time.c tokenize.c usage.c \
/macosx-10.10.1/ruby-106/ruby/ext/psych/lib/psych/
H A Ddeprecated.rb38 ScalarScanner.new.tokenize(thing).class.name.downcase
H A Dscalar_scanner.rb29 def tokenize string method in class:Psych.ScalarScanner
/macosx-10.10.1/ruby-106/ruby/lib/rdoc/markup/
H A Dparser.rb60 parser.tokenize str
68 def self.tokenize str singleton method in class:RDoc
70 parser.tokenize str
449 def tokenize input method
/macosx-10.10.1/ruby-106/ruby/ext/psych/lib/psych/visitors/
H A Dto_ruby.rb48 return @ss.tokenize(o.value) unless o.tag
73 Float(@ss.tokenize(o.value))
98 @ss.tokenize o.value
/macosx-10.10.1/WebInspectorUI-7600.1.17/Tools/PrettyPrinting/
H A Dcss.js45 state.tokenize = tokenString(ch);
46 return state.tokenize(stream, state);
71 state.tokenize = tokenParenthesized;
91 if (ch == quote || !escaped && quote != ")") state.tokenize = null;
99 state.tokenize = tokenString(")");
101 state.tokenize = null;
310 return {tokenize: null,
316 if (!state.tokenize && stream.eatSpace()) return null;
317 var style = (state.tokenize || tokenBase)(stream, state);
599 state.tokenize
[all...]

Completed in 308 milliseconds

12