diff options
| author | Misko Hevery | 2010-10-15 11:58:55 -0700 |
|---|---|---|
| committer | Misko Hevery | 2010-10-15 11:58:55 -0700 |
| commit | d320e3d2c3b72896603a3df3abd26adc0bfa3c10 (patch) | |
| tree | 35c9b83a82b9ea0daad8d97275ef555060f1fe01 | |
| parent | de66a319b451cc0beb3e9da7657f6cc8f9733de0 (diff) | |
| download | angular.js-d320e3d2c3b72896603a3df3abd26adc0bfa3c10.tar.bz2 | |
Updated JSTD
| -rw-r--r-- | lib/jstestdriver/JsTestDriver.jar | bin | 3168906 -> 3539462 bytes | |||
| -rw-r--r-- | test/FormattersTest.js | 2 | ||||
| -rw-r--r-- | test/ParserTest.js | 346 |
3 files changed, 174 insertions, 174 deletions
diff --git a/lib/jstestdriver/JsTestDriver.jar b/lib/jstestdriver/JsTestDriver.jar Binary files differindex cb30a6dc..28ea11bc 100644 --- a/lib/jstestdriver/JsTestDriver.jar +++ b/lib/jstestdriver/JsTestDriver.jar diff --git a/test/FormattersTest.js b/test/FormattersTest.js index b520faf9..8e240195 100644 --- a/test/FormattersTest.js +++ b/test/FormattersTest.js @@ -19,7 +19,7 @@ TestCase("formatterTest", { assertEquals(true, angular.formatter['boolean'].parse("true")); assertEquals(false, angular.formatter['boolean'].parse("")); assertEquals(false, angular.formatter['boolean'].parse("false")); - assertEquals(null, angular.formatter['boolean'].parse(null)); + assertEquals(false, angular.formatter['boolean'].parse(null)); }, testNumber: function() { diff --git a/test/ParserTest.js b/test/ParserTest.js index 916beb26..a8211404 100644 --- a/test/ParserTest.js +++ b/test/ParserTest.js @@ -1,177 +1,177 @@ -LexerTest = TestCase('LexerTest'); - -LexerTest.prototype.testTokenizeAString = function(){ - var tokens = lex("a.bc[22]+1.3|f:'a\\\'c':\"d\\\"e\""); - var i = 0; - assertEquals(tokens[i].index, 0); - assertEquals(tokens[i].text, 'a.bc'); - - i++; - assertEquals(tokens[i].index, 4); - assertEquals(tokens[i].text, '['); - - i++; - assertEquals(tokens[i].index, 5); - assertEquals(tokens[i].text, 22); - - i++; - assertEquals(tokens[i].index, 7); - assertEquals(tokens[i].text, ']'); - - i++; - assertEquals(tokens[i].index, 8); - assertEquals(tokens[i].text, '+'); - - i++; - assertEquals(tokens[i].index, 9); - assertEquals(tokens[i].text, 1.3); - - i++; - assertEquals(tokens[i].index, 12); - assertEquals(tokens[i].text, '|'); - - i++; - assertEquals(tokens[i].index, 13); - assertEquals(tokens[i].text, 'f'); - - i++; - assertEquals(tokens[i].index, 14); - assertEquals(tokens[i].text, ':'); - - i++; - assertEquals(tokens[i].index, 15); - assertEquals(tokens[i].string, "a'c"); - - i++; - assertEquals(tokens[i].index, 21); - assertEquals(tokens[i].text, ':'); - - i++; - assertEquals(tokens[i].index, 22); - assertEquals(tokens[i].string, 'd"e'); -}; - -LexerTest.prototype.testTokenizeUndefined = function(){ - var tokens = lex("undefined"); - var i = 0; - assertEquals(tokens[i].index, 0); - assertEquals(tokens[i].text, 'undefined'); - assertEquals(undefined, tokens[i].fn()); -}; - - - -LexerTest.prototype.testTokenizeRegExp = function(){ - var tokens = lex("/r 1/"); - var i = 0; - assertEquals(tokens[i].index, 0); - assertEquals(tokens[i].text, 'r 1'); - assertEquals("r 1".match(tokens[i].fn())[0], 'r 1'); -}; - -LexerTest.prototype.testQuotedString = function(){ - var str = "['\\'', \"\\\"\"]"; - var tokens = lex(str); - - assertEquals(1, tokens[1].index); - assertEquals("'", tokens[1].string); - - assertEquals(7, tokens[3].index); - assertEquals('"', tokens[3].string); - -}; - -LexerTest.prototype.testQuotedStringEscape = function(){ - var str = '"\\"\\n\\f\\r\\t\\v\\u00A0"'; - var tokens = lex(str); - - assertEquals('"\n\f\r\t\v\u00A0', tokens[0].string); -}; - -LexerTest.prototype.testTokenizeUnicode = function(){ - var tokens = lex('"\\u00A0"'); - assertEquals(1, tokens.length); - assertEquals('\u00a0', tokens[0].string); -}; - -LexerTest.prototype.testTokenizeRegExpWithOptions = function(){ - var tokens = lex("/r/g"); - var i = 0; - assertEquals(tokens[i].index, 0); - assertEquals(tokens[i].text, 'r'); - assertEquals(tokens[i].flags, 'g'); - assertEquals("rr".match(tokens[i].fn()).length, 2); -}; - -LexerTest.prototype.testTokenizeRegExpWithEscape = function(){ - var tokens = lex("/\\/\\d/"); - var i = 0; - assertEquals(tokens[i].index, 0); - assertEquals(tokens[i].text, '\\/\\d'); - assertEquals("/1".match(tokens[i].fn())[0], '/1'); -}; - -LexerTest.prototype.testIgnoreWhitespace = function(){ - var tokens = lex("a \t \n \r b"); - assertEquals(tokens[0].text, 'a'); - assertEquals(tokens[1].text, 'b'); -}; - -LexerTest.prototype.testRelation = function(){ - var tokens = lex("! == != < > <= >="); - assertEquals(tokens[0].text, '!'); - assertEquals(tokens[1].text, '=='); - assertEquals(tokens[2].text, '!='); - assertEquals(tokens[3].text, '<'); - assertEquals(tokens[4].text, '>'); - assertEquals(tokens[5].text, '<='); - assertEquals(tokens[6].text, '>='); -}; - -LexerTest.prototype.testStatements = function(){ - var tokens = lex("a;b;"); - assertEquals(tokens[0].text, 'a'); - assertEquals(tokens[1].text, ';'); - assertEquals(tokens[2].text, 'b'); - assertEquals(tokens[3].text, ';'); -}; - -LexerTest.prototype.testNumber = function(){ - var tokens = lex("0.5"); - expect(tokens[0].text).toEqual(0.5); -}; - -LexerTest.prototype.testNegativeNumber = function(){ - var value = createScope().$eval("-0.5"); - expect(value).toEqual(-0.5); - - value = createScope().$eval("{a:-0.5}"); - expect(value).toEqual({a:-0.5}); -}; - -LexerTest.prototype.testNumberExponent = function(){ - var tokens = lex("0.5E-10"); - expect(tokens[0].text).toEqual(0.5E-10); - expect(createScope().$eval("0.5E-10")).toEqual(0.5E-10); - - tokens = lex("0.5E+10"); - expect(tokens[0].text).toEqual(0.5E+10); -}; - -LexerTest.prototype.testNumberExponentInvalid = function(){ - assertThrows('Lexer found invalid exponential value "0.5E-"', function(){ - lex("0.5E-"); - }); - assertThrows('Lexer found invalid exponential value "0.5E-A"', function(){ - lex("0.5E-A"); +desccribe('parser', function(){ + describe('lexer', function(){ + it('should TokenizeAString', function(){ + var tokens = lex("a.bc[22]+1.3|f:'a\\\'c':\"d\\\"e\""); + var i = 0; + assertEquals(tokens[i].index, 0); + assertEquals(tokens[i].text, 'a.bc'); + + i++; + assertEquals(tokens[i].index, 4); + assertEquals(tokens[i].text, '['); + + i++; + assertEquals(tokens[i].index, 5); + assertEquals(tokens[i].text, 22); + + i++; + assertEquals(tokens[i].index, 7); + assertEquals(tokens[i].text, ']'); + + i++; + assertEquals(tokens[i].index, 8); + assertEquals(tokens[i].text, '+'); + + i++; + assertEquals(tokens[i].index, 9); + assertEquals(tokens[i].text, 1.3); + + i++; + assertEquals(tokens[i].index, 12); + assertEquals(tokens[i].text, '|'); + + i++; + assertEquals(tokens[i].index, 13); + assertEquals(tokens[i].text, 'f'); + + i++; + assertEquals(tokens[i].index, 14); + assertEquals(tokens[i].text, ':'); + + i++; + assertEquals(tokens[i].index, 15); + assertEquals(tokens[i].string, "a'c"); + + i++; + assertEquals(tokens[i].index, 21); + assertEquals(tokens[i].text, ':'); + + i++; + assertEquals(tokens[i].index, 22); + assertEquals(tokens[i].string, 'd"e'); + }); + + it('should TokenizeUndefined', function(){ + var tokens = lex("undefined"); + var i = 0; + assertEquals(tokens[i].index, 0); + assertEquals(tokens[i].text, 'undefined'); + assertEquals(undefined, tokens[i].fn()); + }); + + + + it('should TokenizeRegExp', function(){ + var tokens = lex("/r 1/"); + var i = 0; + assertEquals(tokens[i].index, 0); + assertEquals(tokens[i].text, 'r 1'); + assertEquals("r 1".match(tokens[i].fn())[0], 'r 1'); + }); + + it('should QuotedString', function(){ + var str = "['\\'', \"\\\"\"]"; + var tokens = lex(str); + + assertEquals(1, tokens[1].index); + assertEquals("'", tokens[1].string); + + assertEquals(7, tokens[3].index); + assertEquals('"', tokens[3].string); + }); + + it('should QuotedStringEscape', function(){ + var str = '"\\"\\n\\f\\r\\t\\v\\u00A0"'; + var tokens = lex(str); + + assertEquals('"\n\f\r\t\v\u00A0', tokens[0].string); + }); + + it('should TokenizeUnicode', function(){ + var tokens = lex('"\\u00A0"'); + assertEquals(1, tokens.length); + assertEquals('\u00a0', tokens[0].string); + }); + + it('should TokenizeRegExpWithOptions', function(){ + var tokens = lex("/r/g"); + var i = 0; + assertEquals(tokens[i].index, 0); + assertEquals(tokens[i].text, 'r'); + assertEquals(tokens[i].flags, 'g'); + assertEquals("rr".match(tokens[i].fn()).length, 2); + }); + + it('should TokenizeRegExpWithEscape', function(){ + var tokens = lex("/\\/\\d/"); + var i = 0; + assertEquals(tokens[i].index, 0); + assertEquals(tokens[i].text, '\\/\\d'); + assertEquals("/1".match(tokens[i].fn())[0], '/1'); + }); + + it('should IgnoreWhitespace', function(){ + var tokens = lex("a \t \n \r b"); + assertEquals(tokens[0].text, 'a'); + assertEquals(tokens[1].text, 'b'); + }); + + it('should Relation', function(){ + var tokens = lex("! == != < > <= >="); + assertEquals(tokens[0].text, '!'); + assertEquals(tokens[1].text, '=='); + assertEquals(tokens[2].text, '!='); + assertEquals(tokens[3].text, '<'); + assertEquals(tokens[4].text, '>'); + assertEquals(tokens[5].text, '<='); + assertEquals(tokens[6].text, '>='); + }); + + it('should Statements', function(){ + var tokens = lex("a;b;"); + assertEquals(tokens[0].text, 'a'); + assertEquals(tokens[1].text, ';'); + assertEquals(tokens[2].text, 'b'); + assertEquals(tokens[3].text, ';'); + }); + + it('should Number', function(){ + var tokens = lex("0.5"); + expect(tokens[0].text).toEqual(0.5); + }); + + it('should NegativeNumber', function(){ + var value = createScope().$eval("-0.5"); + expect(value).toEqual(-0.5); + + value = createScope().$eval("{a:-0.5}"); + expect(value).toEqual({a:-0.5}); + }); + + it('should NumberExponent', function(){ + var tokens = lex("0.5E-10"); + expect(tokens[0].text).toEqual(0.5E-10); + expect(createScope().$eval("0.5E-10")).toEqual(0.5E-10); + + tokens = lex("0.5E+10"); + expect(tokens[0].text).toEqual(0.5E+10); + }); + + it('should NumberExponentInvalid', function(){ + assertThrows('Lexer found invalid exponential value "0.5E-"', function(){ + lex("0.5E-"); + }); + assertThrows('Lexer found invalid exponential value "0.5E-A"', function(){ + lex("0.5E-A"); + }); + }); + + it('should NumberStartingWithDot', function(){ + var tokens = lex(".5"); + expect(tokens[0].text).toEqual(0.5); + }); }); -}; - -LexerTest.prototype.testNumberStartingWithDot = function(){ - var tokens = lex(".5"); - expect(tokens[0].text).toEqual(0.5); -}; - +}); ParserTest = TestCase('ParserTest'); |
