@@ -168,15 +168,15 @@ def sanitize_token(self, token):
168
168
if token_type in (tokenTypes ["StartTag" ],tokenTypes ["EndTag" ],
169
169
tokenTypes ["EmptyTag" ]):
170
170
if token ["name" ]in self .allowed_elements :
171
- return self .allowed_token (token )
171
+ return self .allowed_token (token , token_type )
172
172
else :
173
- return self .unallowed_token (token )
173
+ return self .unallowed_token (token , token_type )
174
174
elif token_type == tokenTypes ["Comment" ]:
175
175
pass
176
176
else :
177
177
return token
178
178
179
- def allowed_token (self ,token ):
179
+ def allowed_token (self ,token , token_type ):
180
180
if "data" in token :
181
181
attrs = dict ([(name ,val )for name ,val in
182
182
token ["data" ][::- 1 ]
@@ -206,7 +206,7 @@ def allowed_token(self, token):
206
206
token ["data" ]= [[name ,val ]for name ,val in list (attrs .items ())]
207
207
return token
208
208
209
- def unallowed_token (self ,token ):
209
+ def unallowed_token (self ,token , token_type ):
210
210
if token_type == tokenTypes ["EndTag" ]:
211
211
token ["data" ]= "</%s>" % token ["name" ]
212
212
elif token ["data" ]: