From abf44b55e2b05a6ba835a4a397f7b2761922ca2b Mon Sep 17 00:00:00 2001 From: inikulin Date: Tue, 30 May 2017 20:34:59 +0300 Subject: [PATCH] Add error for duplicate attribute --- tokenizer/test1.test | 5 ++++- tokenizer/test3.test | 16 ++++++++++++---- tokenizer/test4.test | 7 ++++++- 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/tokenizer/test1.test b/tokenizer/test1.test index cf22902b..09d15024 100644 --- a/tokenizer/test1.test +++ b/tokenizer/test1.test @@ -85,7 +85,10 @@ {"description":"Repeated attr", "input":"", - "output":[["StartTag", "h", {"a":"b"}]]}, + "output":[["StartTag", "h", {"a":"b"}]], + "errors":[ + { "code": "duplicate-attribute", "line": 1, "col": 11 } +]}, {"description":"Simple comment", "input":"", diff --git a/tokenizer/test3.test b/tokenizer/test3.test index 9f3e89ef..ba3c15b3 100644 --- a/tokenizer/test3.test +++ b/tokenizer/test3.test @@ -9083,7 +9083,10 @@ {"description":"", "input":"", -"output":[["StartTag", "a", {"a":""}]]}, +"output":[["StartTag", "a", {"a":""}]], +"errors":[ + { "code": "duplicate-attribute", "line": 1, "col": 7 } +]}, {"description":"", "input":"", @@ -9107,7 +9110,10 @@ {"description":"", "input":"", -"output":[["StartTag", "a", {"a":""}]]}, +"output":[["StartTag", "a", {"a":""}]], +"errors":[ + { "code": "duplicate-attribute", "line": 1, "col": 7 } +]}, {"description":"", "input":"", @@ -9631,7 +9637,8 @@ "input":"", "output":[["StartTag", "a", {"a":""}]], "errors":[ - { "code": "missing-whitespace-between-attributes", "line": 1, "col": 8 } + { "code": "missing-whitespace-between-attributes", "line": 1, "col": 8 }, + { "code": "duplicate-attribute", "line": 1, "col": 9 } ]}, {"description":"", @@ -9666,7 +9673,8 @@ "input":"", "output":[["StartTag", "a", {"a":""}]], "errors":[ - { "code": "missing-whitespace-between-attributes", "line": 1, "col": 8 } + { "code": "missing-whitespace-between-attributes", "line": 1, "col": 8 }, + { "code": "duplicate-attribute", "line": 1, "col": 9 } ]}, {"description":"", diff --git a/tokenizer/test4.test b/tokenizer/test4.test index bbe6ce86..ff4c4ef2 100644 --- a/tokenizer/test4.test +++ b/tokenizer/test4.test @@ -300,7 +300,11 @@ {"description":"Duplicate different-case attributes", "input":"", -"output":[["StartTag", "x", { "x":"1" }]]}, +"output":[["StartTag", "x", { "x":"1" }]], +"errors":[ + { "code": "duplicate-attribute", "line": 1, "col": 9 }, + { "code": "duplicate-attribute", "line": 1, "col": 13 } +]}, {"description":"Uppercase close tag attributes", "input":"", @@ -313,6 +317,7 @@ "input":"", "output":[["EndTag", "x"]], "errors":[ + { "code": "duplicate-attribute", "line": 1, "col": 8 }, { "code": "end-tag-with-attributes", "line": 1, "col": 8 } ]},