Commit ba558164 authored by Rene Saarsoo's avatar Rene Saarsoo
Browse files

Added unit tests for Lexer.

Accordingly found and fixed some string and regex parsing errors.
parent 1a7e6da7
Loading
Loading
Loading
Loading
+12 −11
Original line number Diff line number Diff line
@@ -23,8 +23,9 @@ module JsDuck
      end
    end

    def next
      @tokens.shift[:value]
    def next(full=false)
      tok = @tokens.shift
      full ? tok : tok[:value]
    end

    def empty?
@@ -36,16 +37,16 @@ module JsDuck
      @tokens = []
      while !@input.eos? do
        skip_white_and_comments
        if @input.check(/\w+/) then
          @tokens << {
            :type => :ident,
            :value => @input.scan(/\w+/)
          }
        elsif @input.check(/[0-9]+/) then
        if @input.check(/[0-9]+/) then
          @tokens << {
            :type => :number,
            :value => eval(@input.scan(/[0-9]+(\.[0-9]*)?/))
          }
        elsif @input.check(/\w+/) then
          @tokens << {
            :type => :ident,
            :value => @input.scan(/\w+/)
          }
        elsif @input.check(/\/\*\*/) then
          @tokens << {
            :type => :doc_comment,
@@ -54,18 +55,18 @@ module JsDuck
        elsif @input.check(/"/) then
          @tokens << {
            :type => :string,
            :value => eval(@input.scan(/"([^\\]|\\.)*"/))
            :value => eval(@input.scan(/"([^"\\]|\\.)*"/))
          }
        elsif @input.check(/'/) then
          @tokens << {
            :type => :string,
            :value => eval(@input.scan(/'([^\\]|\\.)*'/))
            :value => eval(@input.scan(/'([^'\\]|\\.)*'/))
          }
        elsif @input.check(/\//) then
          if regex? then
            @tokens << {
              :type => :regex,
              :value => @input.scan(/\/([^\\]|\\.)*\/[gim]*/)
              :value => @input.scan(/\/([^\/\\]|\\.)*\/[gim]*/)
            }
          else
            @tokens << {

tc_lexer.rb

0 → 100644
+57 −0
Original line number Diff line number Diff line
require "lexer"
require "test/unit"
 
class TestLexer < Test::Unit::TestCase

  def assert_tokens(source, expected_tokens)
    lex = JsDuck::Lexer.new(source)
    expected_tokens.each do |t|
      assert_equal({:type => t[0], :value => t[1]}, lex.next(true))
    end
  end

  def test_simple
    assert_tokens("var foo = 8;",
                  [
                   [:ident, "var"],
                   [:ident, "foo"],
                   [:operator, "="],
                   [:number, 8],
                   [:operator, ";"]
                  ])
  end

  def test_regex_vs_division
    assert_tokens("x = /  /; y / 2",
                  [
                   [:ident, "x"],
                   [:operator, "="],
                   [:regex, "/  /"],
                   [:operator, ";"],
                   [:ident, "y"],
                   [:operator, "/"],
                   [:number, 2]
                  ])
  end

  def test_strings
    d = '"' # double-quote
    s = "'" # single-quote
    b = "\\" # backslash
    assert_tokens(d+s+d + ' "blah"', [[:string, s]])
    assert_tokens(s+d+s + ' "blah"', [[:string, d]])
    assert_tokens(d+b+d+d + ' "blah"', [[:string, d]])
    assert_tokens(s+b+s+s + ' "blah"', [[:string, s]])
  end

  def test_comments
    assert_tokens("a // foo\n b", [[:ident, "a"], [:ident, "b"]])
    assert_tokens("a /* foo */ b", [[:ident, "a"], [:ident, "b"]])
  end

  def test_doc_comment
    lex = JsDuck::Lexer.new("/** foo */")
    assert_equal(:doc_comment, lex.next(true)[:type])
  end
end