1*99e0aae7SDavid Rees# Copyright 2019 Google LLC 2*99e0aae7SDavid Rees# 3*99e0aae7SDavid Rees# Licensed under the Apache License, Version 2.0 (the "License"); 4*99e0aae7SDavid Rees# you may not use this file except in compliance with the License. 5*99e0aae7SDavid Rees# You may obtain a copy of the License at 6*99e0aae7SDavid Rees# 7*99e0aae7SDavid Rees# https://www.apache.org/licenses/LICENSE-2.0 8*99e0aae7SDavid Rees# 9*99e0aae7SDavid Rees# Unless required by applicable law or agreed to in writing, software 10*99e0aae7SDavid Rees# distributed under the License is distributed on an "AS IS" BASIS, 11*99e0aae7SDavid Rees# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12*99e0aae7SDavid Rees# See the License for the specific language governing permissions and 13*99e0aae7SDavid Rees# limitations under the License. 14*99e0aae7SDavid Rees 15*99e0aae7SDavid Rees"""Tests for lr1.""" 16*99e0aae7SDavid Rees 17*99e0aae7SDavid Reesimport collections 18*99e0aae7SDavid Reesimport unittest 19*99e0aae7SDavid Rees 20*99e0aae7SDavid Reesfrom compiler.front_end import lr1 21*99e0aae7SDavid Reesfrom compiler.util import parser_types 22*99e0aae7SDavid Rees 23*99e0aae7SDavid Rees 24*99e0aae7SDavid Reesdef _make_items(text): 25*99e0aae7SDavid Rees """Makes a list of lr1.Items from the lines in text.""" 26*99e0aae7SDavid Rees return frozenset([lr1.Item.parse(line.strip()) for line in text.splitlines()]) 27*99e0aae7SDavid Rees 28*99e0aae7SDavid Rees 29*99e0aae7SDavid ReesToken = collections.namedtuple("Token", ["symbol", "source_location"]) 30*99e0aae7SDavid Rees 31*99e0aae7SDavid Rees 32*99e0aae7SDavid Reesdef _tokenize(text): 33*99e0aae7SDavid Rees """"Tokenizes" text by making each character into a token.""" 34*99e0aae7SDavid Rees result = [] 35*99e0aae7SDavid Rees for i in range(len(text)): 36*99e0aae7SDavid Rees result.append(Token(text[i], parser_types.make_location( 37*99e0aae7SDavid Rees (1, i + 1), (1, i + 2)))) 38*99e0aae7SDavid Rees return result 39*99e0aae7SDavid Rees 40*99e0aae7SDavid Rees 41*99e0aae7SDavid Reesdef _parse_productions(text): 42*99e0aae7SDavid Rees """Parses text into a grammar by calling Production.parse on each line.""" 43*99e0aae7SDavid Rees return [parser_types.Production.parse(line) for line in text.splitlines()] 44*99e0aae7SDavid Rees 45*99e0aae7SDavid Rees# Example grammar 4.54 from Aho, Sethi, Lam, Ullman (ASLU) p263. 46*99e0aae7SDavid Rees_alsu_grammar = lr1.Grammar("S", _parse_productions("""S -> C C 47*99e0aae7SDavid Rees C -> c C 48*99e0aae7SDavid Rees C -> d""")) 49*99e0aae7SDavid Rees 50*99e0aae7SDavid Rees# Item sets corresponding to the above grammar, ASLU pp263-264. 51*99e0aae7SDavid Rees_alsu_items = [ 52*99e0aae7SDavid Rees _make_items("""S' -> . S, $ 53*99e0aae7SDavid Rees S -> . C C, $ 54*99e0aae7SDavid Rees C -> . c C, c 55*99e0aae7SDavid Rees C -> . c C, d 56*99e0aae7SDavid Rees C -> . d, c 57*99e0aae7SDavid Rees C -> . d, d"""), 58*99e0aae7SDavid Rees _make_items("""S' -> S ., $"""), 59*99e0aae7SDavid Rees _make_items("""S -> C . C, $ 60*99e0aae7SDavid Rees C -> . c C, $ 61*99e0aae7SDavid Rees C -> . d, $"""), 62*99e0aae7SDavid Rees _make_items("""C -> c . C, c 63*99e0aae7SDavid Rees C -> c . C, d 64*99e0aae7SDavid Rees C -> . c C, c 65*99e0aae7SDavid Rees C -> . c C, d 66*99e0aae7SDavid Rees C -> . d, c 67*99e0aae7SDavid Rees C -> . d, d"""), 68*99e0aae7SDavid Rees _make_items("""C -> d ., c 69*99e0aae7SDavid Rees C -> d ., d"""), 70*99e0aae7SDavid Rees _make_items("""S -> C C ., $"""), 71*99e0aae7SDavid Rees _make_items("""C -> c . C, $ 72*99e0aae7SDavid Rees C -> . c C, $ 73*99e0aae7SDavid Rees C -> . d, $"""), 74*99e0aae7SDavid Rees _make_items("""C -> d ., $"""), 75*99e0aae7SDavid Rees _make_items("""C -> c C ., c 76*99e0aae7SDavid Rees C -> c C ., d"""), 77*99e0aae7SDavid Rees _make_items("""C -> c C ., $"""), 78*99e0aae7SDavid Rees] 79*99e0aae7SDavid Rees 80*99e0aae7SDavid Rees# ACTION table corresponding to the above grammar, ASLU p266. 81*99e0aae7SDavid Rees_alsu_action = { 82*99e0aae7SDavid Rees (0, "c"): lr1.Shift(3, _alsu_items[3]), 83*99e0aae7SDavid Rees (0, "d"): lr1.Shift(4, _alsu_items[4]), 84*99e0aae7SDavid Rees (1, lr1.END_OF_INPUT): lr1.Accept(), 85*99e0aae7SDavid Rees (2, "c"): lr1.Shift(6, _alsu_items[6]), 86*99e0aae7SDavid Rees (2, "d"): lr1.Shift(7, _alsu_items[7]), 87*99e0aae7SDavid Rees (3, "c"): lr1.Shift(3, _alsu_items[3]), 88*99e0aae7SDavid Rees (3, "d"): lr1.Shift(4, _alsu_items[4]), 89*99e0aae7SDavid Rees (4, "c"): lr1.Reduce(parser_types.Production("C", ("d",))), 90*99e0aae7SDavid Rees (4, "d"): lr1.Reduce(parser_types.Production("C", ("d",))), 91*99e0aae7SDavid Rees (5, lr1.END_OF_INPUT): lr1.Reduce(parser_types.Production("S", ("C", "C"))), 92*99e0aae7SDavid Rees (6, "c"): lr1.Shift(6, _alsu_items[6]), 93*99e0aae7SDavid Rees (6, "d"): lr1.Shift(7, _alsu_items[7]), 94*99e0aae7SDavid Rees (7, lr1.END_OF_INPUT): lr1.Reduce(parser_types.Production("C", ("d",))), 95*99e0aae7SDavid Rees (8, "c"): lr1.Reduce(parser_types.Production("C", ("c", "C"))), 96*99e0aae7SDavid Rees (8, "d"): lr1.Reduce(parser_types.Production("C", ("c", "C"))), 97*99e0aae7SDavid Rees (9, lr1.END_OF_INPUT): lr1.Reduce(parser_types.Production("C", ("c", "C"))), 98*99e0aae7SDavid Rees} 99*99e0aae7SDavid Rees 100*99e0aae7SDavid Rees# GOTO table corresponding to the above grammar, ASLU p266. 101*99e0aae7SDavid Rees_alsu_goto = {(0, "S"): 1, (0, "C"): 2, (2, "C"): 5, (3, "C"): 8, (6, "C"): 9,} 102*99e0aae7SDavid Rees 103*99e0aae7SDavid Rees 104*99e0aae7SDavid Reesdef _normalize_table(items, table): 105*99e0aae7SDavid Rees """Returns a canonical-form version of items and table, for comparisons.""" 106*99e0aae7SDavid Rees item_to_original_index = {} 107*99e0aae7SDavid Rees for i in range(len(items)): 108*99e0aae7SDavid Rees item_to_original_index[items[i]] = i 109*99e0aae7SDavid Rees sorted_items = items[0:1] + sorted(items[1:], key=sorted) 110*99e0aae7SDavid Rees original_index_to_index = {} 111*99e0aae7SDavid Rees for i in range(len(sorted_items)): 112*99e0aae7SDavid Rees original_index_to_index[item_to_original_index[sorted_items[i]]] = i 113*99e0aae7SDavid Rees updated_table = {} 114*99e0aae7SDavid Rees for k in table: 115*99e0aae7SDavid Rees new_k = original_index_to_index[k[0]], k[1] 116*99e0aae7SDavid Rees new_value = table[k] 117*99e0aae7SDavid Rees if isinstance(new_value, int): 118*99e0aae7SDavid Rees new_value = original_index_to_index[new_value] 119*99e0aae7SDavid Rees elif isinstance(new_value, lr1.Shift): 120*99e0aae7SDavid Rees new_value = lr1.Shift(original_index_to_index[new_value.state], 121*99e0aae7SDavid Rees new_value.items) 122*99e0aae7SDavid Rees updated_table[new_k] = new_value 123*99e0aae7SDavid Rees return sorted_items, updated_table 124*99e0aae7SDavid Rees 125*99e0aae7SDavid Rees 126*99e0aae7SDavid Reesclass Lr1Test(unittest.TestCase): 127*99e0aae7SDavid Rees """Tests for lr1.""" 128*99e0aae7SDavid Rees 129*99e0aae7SDavid Rees def test_parse_lr1item(self): 130*99e0aae7SDavid Rees self.assertEqual(lr1.Item.parse("S' -> . S, $"), 131*99e0aae7SDavid Rees lr1.Item(parser_types.Production(lr1.START_PRIME, ("S",)), 132*99e0aae7SDavid Rees 0, lr1.END_OF_INPUT, "S")) 133*99e0aae7SDavid Rees 134*99e0aae7SDavid Rees def test_symbol_extraction(self): 135*99e0aae7SDavid Rees self.assertEqual(_alsu_grammar.terminals, set(["c", "d", lr1.END_OF_INPUT])) 136*99e0aae7SDavid Rees self.assertEqual(_alsu_grammar.nonterminals, set(["S", "C", 137*99e0aae7SDavid Rees lr1.START_PRIME])) 138*99e0aae7SDavid Rees self.assertEqual(_alsu_grammar.symbols, 139*99e0aae7SDavid Rees set(["c", "d", "S", "C", lr1.END_OF_INPUT, 140*99e0aae7SDavid Rees lr1.START_PRIME])) 141*99e0aae7SDavid Rees 142*99e0aae7SDavid Rees def test_items(self): 143*99e0aae7SDavid Rees self.assertEqual(set(_alsu_grammar._items()[0]), frozenset(_alsu_items)) 144*99e0aae7SDavid Rees 145*99e0aae7SDavid Rees def test_terminal_nonterminal_production_tables(self): 146*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 147*99e0aae7SDavid Rees self.assertEqual(parser.terminals, _alsu_grammar.terminals) 148*99e0aae7SDavid Rees self.assertEqual(parser.nonterminals, _alsu_grammar.nonterminals) 149*99e0aae7SDavid Rees self.assertEqual(parser.productions, _alsu_grammar.productions) 150*99e0aae7SDavid Rees 151*99e0aae7SDavid Rees def test_action_table(self): 152*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 153*99e0aae7SDavid Rees norm_items, norm_action = _normalize_table(parser.item_sets, parser.action) 154*99e0aae7SDavid Rees test_items, test_action = _normalize_table(_alsu_items, _alsu_action) 155*99e0aae7SDavid Rees self.assertEqual(norm_items, test_items) 156*99e0aae7SDavid Rees self.assertEqual(norm_action, test_action) 157*99e0aae7SDavid Rees 158*99e0aae7SDavid Rees def test_goto_table(self): 159*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 160*99e0aae7SDavid Rees norm_items, norm_goto = _normalize_table(parser.item_sets, parser.goto) 161*99e0aae7SDavid Rees test_items, test_goto = _normalize_table(_alsu_items, _alsu_goto) 162*99e0aae7SDavid Rees self.assertEqual(norm_items, test_items) 163*99e0aae7SDavid Rees self.assertEqual(norm_goto, test_goto) 164*99e0aae7SDavid Rees 165*99e0aae7SDavid Rees def test_successful_parse(self): 166*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 167*99e0aae7SDavid Rees loc = parser_types.parse_location 168*99e0aae7SDavid Rees s_to_c_c = parser_types.Production.parse("S -> C C") 169*99e0aae7SDavid Rees c_to_c_c = parser_types.Production.parse("C -> c C") 170*99e0aae7SDavid Rees c_to_d = parser_types.Production.parse("C -> d") 171*99e0aae7SDavid Rees self.assertEqual( 172*99e0aae7SDavid Rees lr1.Reduction("S", [lr1.Reduction("C", [ 173*99e0aae7SDavid Rees Token("c", loc("1:1-1:2")), lr1.Reduction( 174*99e0aae7SDavid Rees "C", [Token("c", loc("1:2-1:3")), 175*99e0aae7SDavid Rees lr1.Reduction("C", 176*99e0aae7SDavid Rees [Token("c", loc("1:3-1:4")), lr1.Reduction( 177*99e0aae7SDavid Rees "C", [Token("d", loc("1:4-1:5"))], 178*99e0aae7SDavid Rees c_to_d, loc("1:4-1:5"))], c_to_c_c, 179*99e0aae7SDavid Rees loc("1:3-1:5"))], c_to_c_c, loc("1:2-1:5")) 180*99e0aae7SDavid Rees ], c_to_c_c, loc("1:1-1:5")), lr1.Reduction( 181*99e0aae7SDavid Rees "C", [Token("c", loc("1:5-1:6")), 182*99e0aae7SDavid Rees lr1.Reduction("C", [Token("d", loc("1:6-1:7"))], c_to_d, 183*99e0aae7SDavid Rees loc("1:6-1:7"))], c_to_c_c, loc("1:5-1:7"))], 184*99e0aae7SDavid Rees s_to_c_c, loc("1:1-1:7")), 185*99e0aae7SDavid Rees parser.parse(_tokenize("cccdcd")).parse_tree) 186*99e0aae7SDavid Rees self.assertEqual( 187*99e0aae7SDavid Rees lr1.Reduction("S", [ 188*99e0aae7SDavid Rees lr1.Reduction("C", [Token("d", loc("1:1-1:2"))], c_to_d, loc( 189*99e0aae7SDavid Rees "1:1-1:2")), lr1.Reduction("C", [Token("d", loc("1:2-1:3"))], 190*99e0aae7SDavid Rees c_to_d, loc("1:2-1:3")) 191*99e0aae7SDavid Rees ], s_to_c_c, loc("1:1-1:3")), parser.parse(_tokenize("dd")).parse_tree) 192*99e0aae7SDavid Rees 193*99e0aae7SDavid Rees def test_parse_with_no_source_information(self): 194*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 195*99e0aae7SDavid Rees s_to_c_c = parser_types.Production.parse("S -> C C") 196*99e0aae7SDavid Rees c_to_d = parser_types.Production.parse("C -> d") 197*99e0aae7SDavid Rees self.assertEqual( 198*99e0aae7SDavid Rees lr1.Reduction("S", [ 199*99e0aae7SDavid Rees lr1.Reduction("C", [Token("d", None)], c_to_d, None), 200*99e0aae7SDavid Rees lr1.Reduction("C", [Token("d", None)], c_to_d, None) 201*99e0aae7SDavid Rees ], s_to_c_c, None), 202*99e0aae7SDavid Rees parser.parse([Token("d", None), Token("d", None)]).parse_tree) 203*99e0aae7SDavid Rees 204*99e0aae7SDavid Rees def test_failed_parses(self): 205*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 206*99e0aae7SDavid Rees self.assertEqual(None, parser.parse(_tokenize("d")).parse_tree) 207*99e0aae7SDavid Rees self.assertEqual(None, parser.parse(_tokenize("cccd")).parse_tree) 208*99e0aae7SDavid Rees self.assertEqual(None, parser.parse(_tokenize("")).parse_tree) 209*99e0aae7SDavid Rees self.assertEqual(None, parser.parse(_tokenize("cccdc")).parse_tree) 210*99e0aae7SDavid Rees 211*99e0aae7SDavid Rees def test_mark_error(self): 212*99e0aae7SDavid Rees parser = _alsu_grammar.parser() 213*99e0aae7SDavid Rees self.assertIsNone(parser.mark_error(_tokenize("cccdc"), None, 214*99e0aae7SDavid Rees "missing last d")) 215*99e0aae7SDavid Rees self.assertIsNone(parser.mark_error(_tokenize("d"), None, "missing last C")) 216*99e0aae7SDavid Rees # Marking an already-marked error with the same error code should succeed. 217*99e0aae7SDavid Rees self.assertIsNone(parser.mark_error(_tokenize("d"), None, "missing last C")) 218*99e0aae7SDavid Rees # Marking an already-marked error with a different error code should fail. 219*99e0aae7SDavid Rees self.assertRegexpMatches( 220*99e0aae7SDavid Rees parser.mark_error(_tokenize("d"), None, "different message"), 221*99e0aae7SDavid Rees r"^Attempted to overwrite existing error code 'missing last C' with " 222*99e0aae7SDavid Rees r"new error code 'different message' for state \d+, terminal \$$") 223*99e0aae7SDavid Rees self.assertEqual( 224*99e0aae7SDavid Rees "Input successfully parsed.", 225*99e0aae7SDavid Rees parser.mark_error(_tokenize("dd"), None, "good parse")) 226*99e0aae7SDavid Rees self.assertEqual( 227*99e0aae7SDavid Rees parser.mark_error(_tokenize("x"), None, "wrong location"), 228*99e0aae7SDavid Rees "error occurred on x token, not end of input.") 229*99e0aae7SDavid Rees self.assertEqual( 230*99e0aae7SDavid Rees parser.mark_error([], _tokenize("x")[0], "wrong location"), 231*99e0aae7SDavid Rees "error occurred on $ token, not x token.") 232*99e0aae7SDavid Rees self.assertIsNone( 233*99e0aae7SDavid Rees parser.mark_error([lr1.ANY_TOKEN], lr1.ANY_TOKEN, "default error")) 234*99e0aae7SDavid Rees # Marking an already-marked error with the same error code should succeed. 235*99e0aae7SDavid Rees self.assertIsNone( 236*99e0aae7SDavid Rees parser.mark_error([lr1.ANY_TOKEN], lr1.ANY_TOKEN, "default error")) 237*99e0aae7SDavid Rees # Marking an already-marked error with a different error code should fail. 238*99e0aae7SDavid Rees self.assertRegexpMatches( 239*99e0aae7SDavid Rees parser.mark_error([lr1.ANY_TOKEN], lr1.ANY_TOKEN, "default error 2"), 240*99e0aae7SDavid Rees r"^Attempted to overwrite existing default error code 'default error' " 241*99e0aae7SDavid Rees r"with new error code 'default error 2' for state \d+$") 242*99e0aae7SDavid Rees 243*99e0aae7SDavid Rees self.assertEqual( 244*99e0aae7SDavid Rees "missing last d", parser.parse(_tokenize("cccdc")).error.code) 245*99e0aae7SDavid Rees self.assertEqual("missing last d", parser.parse(_tokenize("dc")).error.code) 246*99e0aae7SDavid Rees self.assertEqual("missing last C", parser.parse(_tokenize("d")).error.code) 247*99e0aae7SDavid Rees self.assertEqual("default error", parser.parse(_tokenize("z")).error.code) 248*99e0aae7SDavid Rees self.assertEqual( 249*99e0aae7SDavid Rees "missing last C", parser.parse(_tokenize("ccccd")).error.code) 250*99e0aae7SDavid Rees self.assertEqual(None, parser.parse(_tokenize("ccc")).error.code) 251*99e0aae7SDavid Rees 252*99e0aae7SDavid Rees def test_grammar_with_empty_rhs(self): 253*99e0aae7SDavid Rees grammar = lr1.Grammar("S", _parse_productions("""S -> A B 254*99e0aae7SDavid Rees A -> a A 255*99e0aae7SDavid Rees A -> 256*99e0aae7SDavid Rees B -> b""")) 257*99e0aae7SDavid Rees parser = grammar.parser() 258*99e0aae7SDavid Rees self.assertFalse(parser.conflicts) 259*99e0aae7SDavid Rees self.assertTrue(parser.parse(_tokenize("ab")).parse_tree) 260*99e0aae7SDavid Rees self.assertTrue(parser.parse(_tokenize("b")).parse_tree) 261*99e0aae7SDavid Rees self.assertTrue(parser.parse(_tokenize("aab")).parse_tree) 262*99e0aae7SDavid Rees 263*99e0aae7SDavid Rees def test_grammar_with_reduce_reduce_conflicts(self): 264*99e0aae7SDavid Rees grammar = lr1.Grammar("S", _parse_productions("""S -> A c 265*99e0aae7SDavid Rees S -> B c 266*99e0aae7SDavid Rees A -> a 267*99e0aae7SDavid Rees B -> a""")) 268*99e0aae7SDavid Rees parser = grammar.parser() 269*99e0aae7SDavid Rees self.assertEqual(len(parser.conflicts), 1) 270*99e0aae7SDavid Rees # parser.conflicts is a set 271*99e0aae7SDavid Rees for conflict in parser.conflicts: 272*99e0aae7SDavid Rees for action in conflict.actions: 273*99e0aae7SDavid Rees self.assertTrue(isinstance(action, lr1.Reduce)) 274*99e0aae7SDavid Rees 275*99e0aae7SDavid Rees def test_grammar_with_shift_reduce_conflicts(self): 276*99e0aae7SDavid Rees grammar = lr1.Grammar("S", _parse_productions("""S -> A B 277*99e0aae7SDavid Rees A -> a 278*99e0aae7SDavid Rees A -> 279*99e0aae7SDavid Rees B -> a 280*99e0aae7SDavid Rees B ->""")) 281*99e0aae7SDavid Rees parser = grammar.parser() 282*99e0aae7SDavid Rees self.assertEqual(len(parser.conflicts), 1) 283*99e0aae7SDavid Rees # parser.conflicts is a set 284*99e0aae7SDavid Rees for conflict in parser.conflicts: 285*99e0aae7SDavid Rees reduces = 0 286*99e0aae7SDavid Rees shifts = 0 287*99e0aae7SDavid Rees for action in conflict.actions: 288*99e0aae7SDavid Rees if isinstance(action, lr1.Reduce): 289*99e0aae7SDavid Rees reduces += 1 290*99e0aae7SDavid Rees elif isinstance(action, lr1.Shift): 291*99e0aae7SDavid Rees shifts += 1 292*99e0aae7SDavid Rees self.assertEqual(1, reduces) 293*99e0aae7SDavid Rees self.assertEqual(1, shifts) 294*99e0aae7SDavid Rees 295*99e0aae7SDavid Rees def test_item_str(self): 296*99e0aae7SDavid Rees self.assertEqual( 297*99e0aae7SDavid Rees "a -> b c ., d", 298*99e0aae7SDavid Rees str(lr1.make_item(parser_types.Production.parse("a -> b c"), 2, "d"))) 299*99e0aae7SDavid Rees self.assertEqual( 300*99e0aae7SDavid Rees "a -> b . c, d", 301*99e0aae7SDavid Rees str(lr1.make_item(parser_types.Production.parse("a -> b c"), 1, "d"))) 302*99e0aae7SDavid Rees self.assertEqual( 303*99e0aae7SDavid Rees "a -> . b c, d", 304*99e0aae7SDavid Rees str(lr1.make_item(parser_types.Production.parse("a -> b c"), 0, "d"))) 305*99e0aae7SDavid Rees self.assertEqual( 306*99e0aae7SDavid Rees "a -> ., d", 307*99e0aae7SDavid Rees str(lr1.make_item(parser_types.Production.parse("a ->"), 0, "d"))) 308*99e0aae7SDavid Rees 309*99e0aae7SDavid Rees def test_conflict_str(self): 310*99e0aae7SDavid Rees self.assertEqual("Conflict for 'A' in state 12: R vs S", 311*99e0aae7SDavid Rees str(lr1.Conflict(12, "'A'", ["R", "S"]))) 312*99e0aae7SDavid Rees self.assertEqual("Conflict for 'A' in state 12: R vs S vs T", 313*99e0aae7SDavid Rees str(lr1.Conflict(12, "'A'", ["R", "S", "T"]))) 314*99e0aae7SDavid Rees 315*99e0aae7SDavid Rees 316*99e0aae7SDavid Reesif __name__ == "__main__": 317*99e0aae7SDavid Rees unittest.main() 318