escape_test.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. import unittest
  2. import tornado.escape
  3. from tornado.escape import (
  4. utf8,
  5. xhtml_escape,
  6. xhtml_unescape,
  7. url_escape,
  8. url_unescape,
  9. to_unicode,
  10. json_decode,
  11. json_encode,
  12. squeeze,
  13. recursive_unicode,
  14. )
  15. from tornado.util import unicode_type
  16. from typing import List, Tuple, Union, Dict, Any # noqa: F401
  17. linkify_tests = [
  18. # (input, linkify_kwargs, expected_output)
  19. (
  20. "hello http://world.com/!",
  21. {},
  22. u'hello <a href="http://world.com/">http://world.com/</a>!',
  23. ),
  24. (
  25. "hello http://world.com/with?param=true&stuff=yes",
  26. {},
  27. u'hello <a href="http://world.com/with?param=true&amp;stuff=yes">http://world.com/with?param=true&amp;stuff=yes</a>', # noqa: E501
  28. ),
  29. # an opened paren followed by many chars killed Gruber's regex
  30. (
  31. "http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
  32. {},
  33. u'<a href="http://url.com/w">http://url.com/w</a>(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', # noqa: E501
  34. ),
  35. # as did too many dots at the end
  36. (
  37. "http://url.com/withmany.......................................",
  38. {},
  39. u'<a href="http://url.com/withmany">http://url.com/withmany</a>.......................................', # noqa: E501
  40. ),
  41. (
  42. "http://url.com/withmany((((((((((((((((((((((((((((((((((a)",
  43. {},
  44. u'<a href="http://url.com/withmany">http://url.com/withmany</a>((((((((((((((((((((((((((((((((((a)', # noqa: E501
  45. ),
  46. # some examples from http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
  47. # plus a fex extras (such as multiple parentheses).
  48. (
  49. "http://foo.com/blah_blah",
  50. {},
  51. u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>',
  52. ),
  53. (
  54. "http://foo.com/blah_blah/",
  55. {},
  56. u'<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>',
  57. ),
  58. (
  59. "(Something like http://foo.com/blah_blah)",
  60. {},
  61. u'(Something like <a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>)',
  62. ),
  63. (
  64. "http://foo.com/blah_blah_(wikipedia)",
  65. {},
  66. u'<a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>',
  67. ),
  68. (
  69. "http://foo.com/blah_(blah)_(wikipedia)_blah",
  70. {},
  71. u'<a href="http://foo.com/blah_(blah)_(wikipedia)_blah">http://foo.com/blah_(blah)_(wikipedia)_blah</a>', # noqa: E501
  72. ),
  73. (
  74. "(Something like http://foo.com/blah_blah_(wikipedia))",
  75. {},
  76. u'(Something like <a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>)', # noqa: E501
  77. ),
  78. (
  79. "http://foo.com/blah_blah.",
  80. {},
  81. u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>.',
  82. ),
  83. (
  84. "http://foo.com/blah_blah/.",
  85. {},
  86. u'<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>.',
  87. ),
  88. (
  89. "<http://foo.com/blah_blah>",
  90. {},
  91. u'&lt;<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>&gt;',
  92. ),
  93. (
  94. "<http://foo.com/blah_blah/>",
  95. {},
  96. u'&lt;<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>&gt;',
  97. ),
  98. (
  99. "http://foo.com/blah_blah,",
  100. {},
  101. u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>,',
  102. ),
  103. (
  104. "http://www.example.com/wpstyle/?p=364.",
  105. {},
  106. u'<a href="http://www.example.com/wpstyle/?p=364">http://www.example.com/wpstyle/?p=364</a>.', # noqa: E501
  107. ),
  108. (
  109. "rdar://1234",
  110. {"permitted_protocols": ["http", "rdar"]},
  111. u'<a href="rdar://1234">rdar://1234</a>',
  112. ),
  113. (
  114. "rdar:/1234",
  115. {"permitted_protocols": ["rdar"]},
  116. u'<a href="rdar:/1234">rdar:/1234</a>',
  117. ),
  118. (
  119. "http://userid:password@example.com:8080",
  120. {},
  121. u'<a href="http://userid:password@example.com:8080">http://userid:password@example.com:8080</a>', # noqa: E501
  122. ),
  123. (
  124. "http://userid@example.com",
  125. {},
  126. u'<a href="http://userid@example.com">http://userid@example.com</a>',
  127. ),
  128. (
  129. "http://userid@example.com:8080",
  130. {},
  131. u'<a href="http://userid@example.com:8080">http://userid@example.com:8080</a>',
  132. ),
  133. (
  134. "http://userid:password@example.com",
  135. {},
  136. u'<a href="http://userid:password@example.com">http://userid:password@example.com</a>',
  137. ),
  138. (
  139. "message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e",
  140. {"permitted_protocols": ["http", "message"]},
  141. u'<a href="message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e">'
  142. u"message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e</a>",
  143. ),
  144. (
  145. u"http://\u27a1.ws/\u4a39",
  146. {},
  147. u'<a href="http://\u27a1.ws/\u4a39">http://\u27a1.ws/\u4a39</a>',
  148. ),
  149. (
  150. "<tag>http://example.com</tag>",
  151. {},
  152. u'&lt;tag&gt;<a href="http://example.com">http://example.com</a>&lt;/tag&gt;',
  153. ),
  154. (
  155. "Just a www.example.com link.",
  156. {},
  157. u'Just a <a href="http://www.example.com">www.example.com</a> link.',
  158. ),
  159. (
  160. "Just a www.example.com link.",
  161. {"require_protocol": True},
  162. u"Just a www.example.com link.",
  163. ),
  164. (
  165. "A http://reallylong.com/link/that/exceedsthelenglimit.html",
  166. {"require_protocol": True, "shorten": True},
  167. u'A <a href="http://reallylong.com/link/that/exceedsthelenglimit.html"'
  168. u' title="http://reallylong.com/link/that/exceedsthelenglimit.html">http://reallylong.com/link...</a>', # noqa: E501
  169. ),
  170. (
  171. "A http://reallylongdomainnamethatwillbetoolong.com/hi!",
  172. {"shorten": True},
  173. u'A <a href="http://reallylongdomainnamethatwillbetoolong.com/hi"'
  174. u' title="http://reallylongdomainnamethatwillbetoolong.com/hi">http://reallylongdomainnametha...</a>!', # noqa: E501
  175. ),
  176. (
  177. "A file:///passwords.txt and http://web.com link",
  178. {},
  179. u'A file:///passwords.txt and <a href="http://web.com">http://web.com</a> link',
  180. ),
  181. (
  182. "A file:///passwords.txt and http://web.com link",
  183. {"permitted_protocols": ["file"]},
  184. u'A <a href="file:///passwords.txt">file:///passwords.txt</a> and http://web.com link',
  185. ),
  186. (
  187. "www.external-link.com",
  188. {"extra_params": 'rel="nofollow" class="external"'},
  189. u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>', # noqa: E501
  190. ),
  191. (
  192. "www.external-link.com and www.internal-link.com/blogs extra",
  193. {
  194. "extra_params": lambda href: 'class="internal"'
  195. if href.startswith("http://www.internal-link.com")
  196. else 'rel="nofollow" class="external"'
  197. },
  198. u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>' # noqa: E501
  199. u' and <a href="http://www.internal-link.com/blogs" class="internal">www.internal-link.com/blogs</a> extra', # noqa: E501
  200. ),
  201. (
  202. "www.external-link.com",
  203. {"extra_params": lambda href: ' rel="nofollow" class="external" '},
  204. u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>', # noqa: E501
  205. ),
  206. ] # type: List[Tuple[Union[str, bytes], Dict[str, Any], str]]
  207. class EscapeTestCase(unittest.TestCase):
  208. def test_linkify(self):
  209. for text, kwargs, html in linkify_tests:
  210. linked = tornado.escape.linkify(text, **kwargs)
  211. self.assertEqual(linked, html)
  212. def test_xhtml_escape(self):
  213. tests = [
  214. ("<foo>", "&lt;foo&gt;"),
  215. (u"<foo>", u"&lt;foo&gt;"),
  216. (b"<foo>", b"&lt;foo&gt;"),
  217. ("<>&\"'", "&lt;&gt;&amp;&quot;&#39;"),
  218. ("&amp;", "&amp;amp;"),
  219. (u"<\u00e9>", u"&lt;\u00e9&gt;"),
  220. (b"<\xc3\xa9>", b"&lt;\xc3\xa9&gt;"),
  221. ] # type: List[Tuple[Union[str, bytes], Union[str, bytes]]]
  222. for unescaped, escaped in tests:
  223. self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped))
  224. self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
  225. def test_xhtml_unescape_numeric(self):
  226. tests = [
  227. ("foo&#32;bar", "foo bar"),
  228. ("foo&#x20;bar", "foo bar"),
  229. ("foo&#X20;bar", "foo bar"),
  230. ("foo&#xabc;bar", u"foo\u0abcbar"),
  231. ("foo&#xyz;bar", "foo&#xyz;bar"), # invalid encoding
  232. ("foo&#;bar", "foo&#;bar"), # invalid encoding
  233. ("foo&#x;bar", "foo&#x;bar"), # invalid encoding
  234. ]
  235. for escaped, unescaped in tests:
  236. self.assertEqual(unescaped, xhtml_unescape(escaped))
  237. def test_url_escape_unicode(self):
  238. tests = [
  239. # byte strings are passed through as-is
  240. (u"\u00e9".encode("utf8"), "%C3%A9"),
  241. (u"\u00e9".encode("latin1"), "%E9"),
  242. # unicode strings become utf8
  243. (u"\u00e9", "%C3%A9"),
  244. ] # type: List[Tuple[Union[str, bytes], str]]
  245. for unescaped, escaped in tests:
  246. self.assertEqual(url_escape(unescaped), escaped)
  247. def test_url_unescape_unicode(self):
  248. tests = [
  249. ("%C3%A9", u"\u00e9", "utf8"),
  250. ("%C3%A9", u"\u00c3\u00a9", "latin1"),
  251. ("%C3%A9", utf8(u"\u00e9"), None),
  252. ]
  253. for escaped, unescaped, encoding in tests:
  254. # input strings to url_unescape should only contain ascii
  255. # characters, but make sure the function accepts both byte
  256. # and unicode strings.
  257. self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped)
  258. self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped)
  259. def test_url_escape_quote_plus(self):
  260. unescaped = "+ #%"
  261. plus_escaped = "%2B+%23%25"
  262. escaped = "%2B%20%23%25"
  263. self.assertEqual(url_escape(unescaped), plus_escaped)
  264. self.assertEqual(url_escape(unescaped, plus=False), escaped)
  265. self.assertEqual(url_unescape(plus_escaped), unescaped)
  266. self.assertEqual(url_unescape(escaped, plus=False), unescaped)
  267. self.assertEqual(url_unescape(plus_escaped, encoding=None), utf8(unescaped))
  268. self.assertEqual(
  269. url_unescape(escaped, encoding=None, plus=False), utf8(unescaped)
  270. )
  271. def test_escape_return_types(self):
  272. # On python2 the escape methods should generally return the same
  273. # type as their argument
  274. self.assertEqual(type(xhtml_escape("foo")), str)
  275. self.assertEqual(type(xhtml_escape(u"foo")), unicode_type)
  276. def test_json_decode(self):
  277. # json_decode accepts both bytes and unicode, but strings it returns
  278. # are always unicode.
  279. self.assertEqual(json_decode(b'"foo"'), u"foo")
  280. self.assertEqual(json_decode(u'"foo"'), u"foo")
  281. # Non-ascii bytes are interpreted as utf8
  282. self.assertEqual(json_decode(utf8(u'"\u00e9"')), u"\u00e9")
  283. def test_json_encode(self):
  284. # json deals with strings, not bytes. On python 2 byte strings will
  285. # convert automatically if they are utf8; on python 3 byte strings
  286. # are not allowed.
  287. self.assertEqual(json_decode(json_encode(u"\u00e9")), u"\u00e9")
  288. if bytes is str:
  289. self.assertEqual(json_decode(json_encode(utf8(u"\u00e9"))), u"\u00e9")
  290. self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
  291. def test_squeeze(self):
  292. self.assertEqual(
  293. squeeze(u"sequences of whitespace chars"),
  294. u"sequences of whitespace chars",
  295. )
  296. def test_recursive_unicode(self):
  297. tests = {
  298. "dict": {b"foo": b"bar"},
  299. "list": [b"foo", b"bar"],
  300. "tuple": (b"foo", b"bar"),
  301. "bytes": b"foo",
  302. }
  303. self.assertEqual(recursive_unicode(tests["dict"]), {u"foo": u"bar"})
  304. self.assertEqual(recursive_unicode(tests["list"]), [u"foo", u"bar"])
  305. self.assertEqual(recursive_unicode(tests["tuple"]), (u"foo", u"bar"))
  306. self.assertEqual(recursive_unicode(tests["bytes"]), u"foo")