mirror of
https://github.com/Shopify/liquid.git
synced 2025-10-01 00:00:26 -04:00
Remove the Token class from the lexer in favour of less smart but faster arrays
This commit is contained in:
parent
1b43bf5686
commit
bacacf2fd0
@ -1,30 +1,4 @@
|
||||
module Liquid
|
||||
class Token
|
||||
attr_accessor :type, :contents
|
||||
def initialize(*args)
|
||||
@type, @contents = args
|
||||
end
|
||||
|
||||
def self.[](*args)
|
||||
Token.new(*args)
|
||||
end
|
||||
|
||||
def inspect
|
||||
out = "<#{@type}"
|
||||
out << ": \'#{@contents}\'" if contents
|
||||
out << '>'
|
||||
end
|
||||
|
||||
def to_s
|
||||
self.inspect
|
||||
end
|
||||
|
||||
def ==(other)
|
||||
return unless other && other.respond_to?(:type) && other.respond_to?(:contents)
|
||||
@type == other.type && @contents == other.contents
|
||||
end
|
||||
end
|
||||
|
||||
class Lexer
|
||||
SPECIALS = {
|
||||
'|' => :pipe,
|
||||
@ -51,7 +25,7 @@ module Liquid
|
||||
loop do
|
||||
tok = next_token
|
||||
unless tok
|
||||
@output << Token[:end_of_string]
|
||||
@output << [:end_of_string]
|
||||
return @output
|
||||
end
|
||||
@output << tok
|
||||
@ -59,16 +33,16 @@ module Liquid
|
||||
end
|
||||
|
||||
def next_token
|
||||
consume_whitespace
|
||||
@ss.skip(/\s*/)
|
||||
return if @ss.eos?
|
||||
|
||||
|
||||
case
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then Token[:comparison, t]
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then Token[:string, t]
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then Token[:string, t]
|
||||
when t = @ss.scan(FLOAT_LITERAL) then Token[:float, t]
|
||||
when t = @ss.scan(INTEGER_LITERAL) then Token[:integer, t]
|
||||
when t = @ss.scan(IDENTIFIER) then Token[:id, t]
|
||||
when t = @ss.scan(COMPARISON_OPERATOR) then [:comparison, t]
|
||||
when t = @ss.scan(SINGLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(DOUBLE_STRING_LITERAL) then [:string, t]
|
||||
when t = @ss.scan(FLOAT_LITERAL) then [:float, t]
|
||||
when t = @ss.scan(INTEGER_LITERAL) then [:integer, t]
|
||||
when t = @ss.scan(IDENTIFIER) then [:id, t]
|
||||
else
|
||||
lex_specials
|
||||
end
|
||||
@ -78,14 +52,10 @@ module Liquid
|
||||
def lex_specials
|
||||
c = @ss.getch
|
||||
if s = SPECIALS[c]
|
||||
return Token[s,c]
|
||||
return [s,c]
|
||||
end
|
||||
|
||||
raise SyntaxError, "Unexpected character #{c}."
|
||||
end
|
||||
|
||||
def consume_whitespace
|
||||
@ss.skip(/\s*/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -14,11 +14,11 @@ module Liquid
|
||||
|
||||
def consume(type = nil)
|
||||
token = @tokens[@p]
|
||||
if type && token.type != type
|
||||
if type && token[0] != type
|
||||
raise SyntaxError, "Expected #{type} but found #{@tokens[@p]}"
|
||||
end
|
||||
@p += 1
|
||||
token.contents
|
||||
token[1]
|
||||
end
|
||||
|
||||
# Only consumes the token if it matches the type
|
||||
@ -26,35 +26,35 @@ module Liquid
|
||||
# or false otherwise.
|
||||
def consume?(type)
|
||||
token = @tokens[@p]
|
||||
return false unless token && token.type == type
|
||||
return false unless token && token[0] == type
|
||||
@p += 1
|
||||
token.contents
|
||||
token[1]
|
||||
end
|
||||
|
||||
# Like consume? Except for an :id token of a certain name
|
||||
def id?(str)
|
||||
token = @tokens[@p]
|
||||
return false unless token && token.type == :id
|
||||
return false unless token.contents == str
|
||||
return false unless token && token[0] == :id
|
||||
return false unless token[1] == str
|
||||
@p += 1
|
||||
token.contents
|
||||
token[1]
|
||||
end
|
||||
|
||||
def look(type, ahead = 0)
|
||||
tok = @tokens[@p + ahead]
|
||||
return false unless tok
|
||||
tok.type == type
|
||||
tok[0] == type
|
||||
end
|
||||
|
||||
# === General Liquid parsing functions ===
|
||||
|
||||
def expression
|
||||
token = @tokens[@p]
|
||||
if token.type == :id
|
||||
if token[0] == :id
|
||||
variable_signature
|
||||
elsif [:string, :integer, :float].include? token.type
|
||||
elsif [:string, :integer, :float].include? token[0]
|
||||
consume
|
||||
token.contents
|
||||
token[1]
|
||||
else
|
||||
raise SyntaxError, "#{token} is not a valid expression."
|
||||
end
|
||||
|
@ -11,7 +11,7 @@ results = profiler.run_profile
|
||||
puts 'Success'
|
||||
puts
|
||||
|
||||
[RubyProf::FlatPrinter, RubyProf::GraphPrinter, RubyProf::GraphHtmlPrinter, RubyProf::CallTreePrinter].each do |klass|
|
||||
[RubyProf::FlatPrinter, RubyProf::GraphHtmlPrinter, RubyProf::CallTreePrinter, RubyProf::DotPrinter].each do |klass|
|
||||
filename = (ENV['TMP'] || '/tmp') + (klass.name.include?('Html') ? "/liquid.#{klass.name.downcase}.html" : "/callgrind.liquid.#{klass.name.downcase}.txt")
|
||||
filename.gsub!(/:+/, '_')
|
||||
File.open(filename, "w+") { |fp| klass.new(results).print(fp, :print_file => true) }
|
||||
|
@ -5,39 +5,39 @@ class LexerTest < Test::Unit::TestCase
|
||||
|
||||
def test_strings
|
||||
tokens = Lexer.new(%! 'this is a test""' "wat 'lol'"!).tokenize
|
||||
assert_equal [Token[:string,%!'this is a test""'!], Token[:string, %!"wat 'lol'"!], Token[:end_of_string]], tokens
|
||||
assert_equal [[:string,%!'this is a test""'!], [:string, %!"wat 'lol'"!], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_integer
|
||||
tokens = Lexer.new('hi 50').tokenize
|
||||
assert_equal [Token[:id,'hi'], Token[:integer, '50'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:id,'hi'], [:integer, '50'], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_float
|
||||
tokens = Lexer.new('hi 5.0').tokenize
|
||||
assert_equal [Token[:id,'hi'], Token[:float, '5.0'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:id,'hi'], [:float, '5.0'], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_comparison
|
||||
tokens = Lexer.new('== <> contains').tokenize
|
||||
assert_equal [Token[:comparison,'=='], Token[:comparison, '<>'], Token[:comparison, 'contains'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:comparison,'=='], [:comparison, '<>'], [:comparison, 'contains'], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_specials
|
||||
tokens = Lexer.new('| .:').tokenize
|
||||
assert_equal [Token[:pipe, '|'], Token[:dot, '.'], Token[:colon, ':'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:pipe, '|'], [:dot, '.'], [:colon, ':'], [:end_of_string]], tokens
|
||||
tokens = Lexer.new('[,]').tokenize
|
||||
assert_equal [Token[:open_square, '['], Token[:comma, ','], Token[:close_square, ']'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:open_square, '['], [:comma, ','], [:close_square, ']'], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_fancy_identifiers
|
||||
tokens = Lexer.new('hi! five?').tokenize
|
||||
assert_equal [Token[:id,'hi!'], Token[:id, 'five?'], Token[:end_of_string]], tokens
|
||||
assert_equal [[:id,'hi!'], [:id, 'five?'], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_whitespace
|
||||
tokens = Lexer.new("five|\n\t ==").tokenize
|
||||
assert_equal [Token[:id,'five'], Token[:pipe, '|'], Token[:comparison, '=='], Token[:end_of_string]], tokens
|
||||
assert_equal [[:id,'five'], [:pipe, '|'], [:comparison, '=='], [:end_of_string]], tokens
|
||||
end
|
||||
|
||||
def test_unexpected_character
|
||||
@ -48,8 +48,8 @@ class LexerTest < Test::Unit::TestCase
|
||||
|
||||
def test_next_token
|
||||
l = Lexer.new('hi 5.0')
|
||||
assert_equal Token[:id, 'hi'], l.next_token
|
||||
assert_equal Token[:float, '5.0'], l.next_token
|
||||
assert_equal [:id, 'hi'], l.next_token
|
||||
assert_equal [:float, '5.0'], l.next_token
|
||||
assert_nil l.next_token
|
||||
end
|
||||
end
|
||||
|
Loading…
x
Reference in New Issue
Block a user