Tokeniser.test.js 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738
  1. var Tokeniser = require("./Tokeniser");
  2. var Token = require("../lib/Token");
  3. var StringSource = require("../lib/StringSource");
  4. exports.stringIsSingleIdentifier = stringIsTokenisedTo("blah", [
  5. new Token("identifier", "blah", stringSourceRange("blah", 0, 4)),
  6. new Token("end", null, stringSourceRange("blah", 4, 4))
  7. ]);
  8. exports.identifiersAreSeparatedByWhitespace = stringIsTokenisedTo("one two", [
  9. new Token("identifier", "one", stringSourceRange("one two", 0, 3)),
  10. new Token("identifier", "two", stringSourceRange("one two", 4, 7)),
  11. new Token("end", null, stringSourceRange("one two", 7, 7))
  12. ]);
  13. exports.canDetectKeywords = stringIsTokenisedTo("true", [
  14. new Token("keyword", "true", stringSourceRange("true", 0, 4)),
  15. new Token("end", null, stringSourceRange("true", 4, 4))
  16. ]);
  17. exports.emptyStringIsTokenisedToSingleEndToken = stringIsTokenisedTo("", [
  18. new Token("end", null, stringSourceRange("", 0, 0))
  19. ]);
  20. function stringIsTokenisedTo(input, expected) {
  21. return function(test) {
  22. test.deepEqual(expected, tokenise(input));
  23. test.done();
  24. };
  25. };
  26. function stringSourceRange(string, startIndex, endIndex) {
  27. return new StringSource(string).range(startIndex, endIndex);
  28. };
  29. function tokenise(input) {
  30. return new Tokeniser({keywords: ["true"]}).tokenise(input);
  31. };