|
| 1 | +importsys |
1 | 2 | importos |
2 | 3 | importunittest |
3 | 4 | fromsupportimportsimplejson,html5lib_test_files |
4 | 5 |
|
5 | 6 | fromhtml5lib.tokenizerimportHTMLTokenizer |
6 | 7 | fromhtml5libimportconstants |
7 | 8 |
|
| 9 | +importcStringIO |
| 10 | + |
8 | 11 | classTokenizerTestParser(object): |
9 | 12 | def__init__(self,contentModelFlag,lastStartTag=None): |
10 | 13 | self.tokenizer=HTMLTokenizer |
@@ -104,19 +107,29 @@ def runTokenizerTest(self, test): |
104 | 107 | output=concatenateCharacterTokens(test['output']) |
105 | 108 | if'lastStartTag'notintest: |
106 | 109 | test['lastStartTag']=None |
| 110 | +outBuffer=cStringIO.StringIO() |
| 111 | +stdout=sys.stdout |
| 112 | +sys.stdout=outBuffer |
107 | 113 | parser=TokenizerTestParser(test['contentModelFlag'], |
108 | 114 | test['lastStartTag']) |
109 | 115 | tokens=parser.parse(test['input']) |
110 | 116 | tokens=concatenateCharacterTokens(tokens) |
| 117 | +tokens=normalizeTokens(tokens) |
111 | 118 | errorMsg="\n".join(["\n\nContent Model Flag:", |
112 | 119 | test['contentModelFlag'] , |
113 | | -"\nInput:",str(test['input']), |
114 | | -"\nExpected:",str(output), |
115 | | -"\nreceived:",str(tokens)]) |
116 | | -tokens=normalizeTokens(tokens) |
| 120 | +"\nInput:",test['input'], |
| 121 | +"\nExpected:",unicode(output), |
| 122 | +"\nreceived:",unicode(tokens)]) |
117 | 123 | ignoreErrorOrder=test.get('ignoreErrorOrder',False) |
118 | | -self.assertEquals(tokensMatch(tokens,output,ignoreErrorOrder),True, |
119 | | -errorMsg) |
| 124 | +sys.stdout=stdout |
| 125 | +try: |
| 126 | +self.assertEquals(tokensMatch(tokens,output,ignoreErrorOrder),True, |
| 127 | +errorMsg) |
| 128 | +exceptAssertionError: |
| 129 | +outBuffer.seek(0) |
| 130 | +printoutBuffer.read() |
| 131 | +printerrorMsg |
| 132 | +raise |
120 | 133 |
|
121 | 134 | defbuildTestSuite(): |
122 | 135 | forfilenameinhtml5lib_test_files('tokenizer','*.test'): |
|