summary refs log tree commit diff stats
path: root/test.lua
diff options
context:
space:
mode:
authorSoniEx2 <endermoneymod@gmail.com>2019-04-08 13:57:28 -0300
committerSoniEx2 <endermoneymod@gmail.com>2019-04-08 13:58:06 -0300
commitf56e1bd7e7f9a8d0a55146edba4e7c2ee071487a (patch)
tree7a41eb92ffadeffa0d779573399bb488be4b6ede /test.lua
parentfd48534de3427de16c3077f7d112d0bcfb030b73 (diff)
Add copyright notices, everything almost works
Numbers and long comments aren't working
Diffstat (limited to 'test.lua')
-rw-r--r--test.lua96
1 files changed, 92 insertions, 4 deletions
diff --git a/test.lua b/test.lua
index f9648eb..a8a830d 100644
--- a/test.lua
+++ b/test.lua
@@ -90,6 +90,7 @@ do -- lua tokens
     else
         assert(state[1] == luatokens.tokens.TK_STRING)
         assert(state[2] == "hello world")
+        assert(state[3] == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- lua tokens
@@ -110,6 +111,7 @@ do -- more lua tokens
     else
         assert(state[1] == luatokens.tokens.TK_STRING)
         assert(state[2] == "\7\8\12\10\13\9\11\92\34\39\65\65\10")
+        assert(state[3] == nil)
         assert(state.line == 2)
     end
 end -- lua tokens
@@ -129,6 +131,7 @@ do -- even more lua tokens
     else
         assert(state[1] == luatokens.tokens.TK_STRING)
         assert(state[2] == "A")
+        assert(state[3] == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- lua tokens
@@ -168,6 +171,7 @@ do -- even more lua tokens
         assert(table.remove(state, 1) == "\252\132\128\128\128\128")
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "\253\191\191\191\191\191")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- lua tokens
@@ -188,6 +192,7 @@ do -- simple lua tokens
         assert(table.remove(state, 1) == "[")
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- lua tokens
@@ -207,6 +212,7 @@ do -- simple long string
     else
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- long string
@@ -226,6 +232,7 @@ do -- long string with depth 1
     else
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- long string
@@ -245,6 +252,7 @@ do -- long string with "nested" long string
     else
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "[[]]")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- long string
@@ -252,7 +260,7 @@ end -- long string
 do -- long string edge cases
     local luatokens = require "luatokens"
     local tokens = luatokens.defs
-    local state, err, etoken, estate = parser.parse(tokens, "[==[]=]==][==[]]==]")
+    local state, err, etoken, estate = parser.parse(tokens, "[==[]=]==][==[]]==][=[] ]=]")
     local case = case()
     if not state then
         print(case, "---- IN  TOKENS ----")
@@ -266,6 +274,9 @@ do -- long string edge cases
         assert(table.remove(state, 1) == "]=")
         assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
         assert(table.remove(state, 1) == "]")
+        assert(table.remove(state, 1) == luatokens.tokens.TK_STRING)
+        assert(table.remove(state, 1) == "] ")
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 1 or not state.line)
     end
 end -- long string
@@ -309,20 +320,97 @@ do -- keywords
         assert(table.remove(state, 1) == luatokens.tokens.TK_TRUE)
         assert(table.remove(state, 1) == luatokens.tokens.TK_UNTIL)
         assert(table.remove(state, 1) == luatokens.tokens.TK_WHILE)
+        assert(table.remove(state, 1) == nil)
         assert(state.line == 4)
     end
 end -- keywords
 
-do -- FUCK
+do -- "other tokens"
+    local luatokens = require "luatokens"
+    local tokens = luatokens.defs
+    local state, err, etoken, estate = parser.parse(tokens, [[
+     +     -     *     /     %     ^     #
+     &     ~     |     <<    >>    //
+     ==    ~=    <=    >=    <     >     =
+     (     )     {     }     [     ]     ::
+     ;     :     ,     .     ..    ...]])
+    local case = case()
+    if not state then
+        print(case, "---- IN  TOKENS ----")
+        print(case, err, etoken)
+        for i,v in pairs(estate) do
+            print(case, i, v)
+        end
+        print(case, "---- OUT TOKENS ----")
+    else
+        assert(table.remove(state, 1) == "+")
+        assert(table.remove(state, 1) == "-")
+        assert(table.remove(state, 1) == "*")
+        assert(table.remove(state, 1) == "/")
+        assert(table.remove(state, 1) == "%")
+        assert(table.remove(state, 1) == "^")
+        assert(table.remove(state, 1) == "#")
+        assert(table.remove(state, 1) == "&")
+        assert(table.remove(state, 1) == "~")
+        assert(table.remove(state, 1) == "|")
+        assert(table.remove(state, 1) == luatokens.tokens.TK_SHL)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_SHR)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_IDIV)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_EQ)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_NE)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_LE)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_GE)
+        assert(table.remove(state, 1) == "<")
+        assert(table.remove(state, 1) == ">")
+        assert(table.remove(state, 1) == "=")
+        assert(table.remove(state, 1) == "(")
+        assert(table.remove(state, 1) == ")")
+        assert(table.remove(state, 1) == "{")
+        assert(table.remove(state, 1) == "}")
+        assert(table.remove(state, 1) == "[")
+        assert(table.remove(state, 1) == "]")
+        assert(table.remove(state, 1) == luatokens.tokens.TK_DBCOLON)
+        assert(table.remove(state, 1) == ";")
+        assert(table.remove(state, 1) == ":")
+        assert(table.remove(state, 1) == ",")
+        assert(table.remove(state, 1) == ".")
+        assert(table.remove(state, 1) == luatokens.tokens.TK_CONCAT)
+        assert(table.remove(state, 1) == luatokens.tokens.TK_DOTS)
+        assert(table.remove(state, 1) == nil)
+        assert(state.line == 5)
+    end
+end -- "other tokens"
+
+do -- long comments
+    local luatokens = require "luatokens"
+    local tokens = luatokens.defs
+    local state, err, etoken, estate = parser.parse(tokens, [==[--[[
+    --]]]==])
+    local case = case()
+    if not state then
+        print(case, "---- IN  TOKENS ----")
+        print(case, err, etoken)
+        for i,v in pairs(estate) do
+            print(case, i, v)
+        end
+        print(case, "---- OUT TOKENS ----")
+    else
+        assert(table.remove(state, 1) == nil)
+        assert(state.line == 2)
+    end
+end -- long comments
+
+while false do -- FUCK
     local luatokens = require "luatokens"
-    local luatokens_file = io.open("./luatokens.lua", "r"):read((_VERSION == "Lua 5.1" or _VERSION == "Lua 5.2") and "*a" or "a")
+    local luatokens_file = io.open("./luatokens.lua", "r")
     local tokens = luatokens.defs
-    local state, err, etoken, estate = parser.parse(tokens, luatokens_file)
+    local state, err, etoken, estate = parser.parse(tokens, function() return luatokens_file:read(8192) end)
     local case = case()
     if not state then
         print(case, "---- IN  TOKENS ----")
         print(case, err, etoken)
         for i,v in pairs(estate) do
+            v = luatokens.reverse_keywords[v] or luatokens.reverse_tokens[v] or v
             print(case, i, v)
         end
         print(case, "---- OUT TOKENS ----")