curl_httpclient.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572
  1. #
  2. # Copyright 2009 Facebook
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License"); you may
  5. # not use this file except in compliance with the License. You may obtain
  6. # a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  12. # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
  13. # License for the specific language governing permissions and limitations
  14. # under the License.
  15. """Non-blocking HTTP client implementation using pycurl."""
  16. import collections
  17. import functools
  18. import logging
  19. import pycurl # type: ignore
  20. import threading
  21. import time
  22. from io import BytesIO
  23. from tornado import httputil
  24. from tornado import ioloop
  25. from tornado.escape import utf8, native_str
  26. from tornado.httpclient import (
  27. HTTPRequest,
  28. HTTPResponse,
  29. HTTPError,
  30. AsyncHTTPClient,
  31. main,
  32. )
  33. from tornado.log import app_log
  34. from typing import Dict, Any, Callable, Union
  35. import typing
  36. if typing.TYPE_CHECKING:
  37. from typing import Deque, Tuple, Optional # noqa: F401
  38. curl_log = logging.getLogger("tornado.curl_httpclient")
  39. class CurlAsyncHTTPClient(AsyncHTTPClient):
  40. def initialize( # type: ignore
  41. self, max_clients: int = 10, defaults: Dict[str, Any] = None
  42. ) -> None:
  43. super(CurlAsyncHTTPClient, self).initialize(defaults=defaults)
  44. self._multi = pycurl.CurlMulti()
  45. self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
  46. self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
  47. self._curls = [self._curl_create() for i in range(max_clients)]
  48. self._free_list = self._curls[:]
  49. self._requests = (
  50. collections.deque()
  51. ) # type: Deque[Tuple[HTTPRequest, Callable[[HTTPResponse], None], float]]
  52. self._fds = {} # type: Dict[int, int]
  53. self._timeout = None # type: Optional[object]
  54. # libcurl has bugs that sometimes cause it to not report all
  55. # relevant file descriptors and timeouts to TIMERFUNCTION/
  56. # SOCKETFUNCTION. Mitigate the effects of such bugs by
  57. # forcing a periodic scan of all active requests.
  58. self._force_timeout_callback = ioloop.PeriodicCallback(
  59. self._handle_force_timeout, 1000
  60. )
  61. self._force_timeout_callback.start()
  62. # Work around a bug in libcurl 7.29.0: Some fields in the curl
  63. # multi object are initialized lazily, and its destructor will
  64. # segfault if it is destroyed without having been used. Add
  65. # and remove a dummy handle to make sure everything is
  66. # initialized.
  67. dummy_curl_handle = pycurl.Curl()
  68. self._multi.add_handle(dummy_curl_handle)
  69. self._multi.remove_handle(dummy_curl_handle)
  70. def close(self) -> None:
  71. self._force_timeout_callback.stop()
  72. if self._timeout is not None:
  73. self.io_loop.remove_timeout(self._timeout)
  74. for curl in self._curls:
  75. curl.close()
  76. self._multi.close()
  77. super(CurlAsyncHTTPClient, self).close()
  78. # Set below properties to None to reduce the reference count of current
  79. # instance, because those properties hold some methods of current
  80. # instance that will case circular reference.
  81. self._force_timeout_callback = None # type: ignore
  82. self._multi = None
  83. def fetch_impl(
  84. self, request: HTTPRequest, callback: Callable[[HTTPResponse], None]
  85. ) -> None:
  86. self._requests.append((request, callback, self.io_loop.time()))
  87. self._process_queue()
  88. self._set_timeout(0)
  89. def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None:
  90. """Called by libcurl when it wants to change the file descriptors
  91. it cares about.
  92. """
  93. event_map = {
  94. pycurl.POLL_NONE: ioloop.IOLoop.NONE,
  95. pycurl.POLL_IN: ioloop.IOLoop.READ,
  96. pycurl.POLL_OUT: ioloop.IOLoop.WRITE,
  97. pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE,
  98. }
  99. if event == pycurl.POLL_REMOVE:
  100. if fd in self._fds:
  101. self.io_loop.remove_handler(fd)
  102. del self._fds[fd]
  103. else:
  104. ioloop_event = event_map[event]
  105. # libcurl sometimes closes a socket and then opens a new
  106. # one using the same FD without giving us a POLL_NONE in
  107. # between. This is a problem with the epoll IOLoop,
  108. # because the kernel can tell when a socket is closed and
  109. # removes it from the epoll automatically, causing future
  110. # update_handler calls to fail. Since we can't tell when
  111. # this has happened, always use remove and re-add
  112. # instead of update.
  113. if fd in self._fds:
  114. self.io_loop.remove_handler(fd)
  115. self.io_loop.add_handler(fd, self._handle_events, ioloop_event)
  116. self._fds[fd] = ioloop_event
  117. def _set_timeout(self, msecs: int) -> None:
  118. """Called by libcurl to schedule a timeout."""
  119. if self._timeout is not None:
  120. self.io_loop.remove_timeout(self._timeout)
  121. self._timeout = self.io_loop.add_timeout(
  122. self.io_loop.time() + msecs / 1000.0, self._handle_timeout
  123. )
  124. def _handle_events(self, fd: int, events: int) -> None:
  125. """Called by IOLoop when there is activity on one of our
  126. file descriptors.
  127. """
  128. action = 0
  129. if events & ioloop.IOLoop.READ:
  130. action |= pycurl.CSELECT_IN
  131. if events & ioloop.IOLoop.WRITE:
  132. action |= pycurl.CSELECT_OUT
  133. while True:
  134. try:
  135. ret, num_handles = self._multi.socket_action(fd, action)
  136. except pycurl.error as e:
  137. ret = e.args[0]
  138. if ret != pycurl.E_CALL_MULTI_PERFORM:
  139. break
  140. self._finish_pending_requests()
  141. def _handle_timeout(self) -> None:
  142. """Called by IOLoop when the requested timeout has passed."""
  143. self._timeout = None
  144. while True:
  145. try:
  146. ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0)
  147. except pycurl.error as e:
  148. ret = e.args[0]
  149. if ret != pycurl.E_CALL_MULTI_PERFORM:
  150. break
  151. self._finish_pending_requests()
  152. # In theory, we shouldn't have to do this because curl will
  153. # call _set_timeout whenever the timeout changes. However,
  154. # sometimes after _handle_timeout we will need to reschedule
  155. # immediately even though nothing has changed from curl's
  156. # perspective. This is because when socket_action is
  157. # called with SOCKET_TIMEOUT, libcurl decides internally which
  158. # timeouts need to be processed by using a monotonic clock
  159. # (where available) while tornado uses python's time.time()
  160. # to decide when timeouts have occurred. When those clocks
  161. # disagree on elapsed time (as they will whenever there is an
  162. # NTP adjustment), tornado might call _handle_timeout before
  163. # libcurl is ready. After each timeout, resync the scheduled
  164. # timeout with libcurl's current state.
  165. new_timeout = self._multi.timeout()
  166. if new_timeout >= 0:
  167. self._set_timeout(new_timeout)
  168. def _handle_force_timeout(self) -> None:
  169. """Called by IOLoop periodically to ask libcurl to process any
  170. events it may have forgotten about.
  171. """
  172. while True:
  173. try:
  174. ret, num_handles = self._multi.socket_all()
  175. except pycurl.error as e:
  176. ret = e.args[0]
  177. if ret != pycurl.E_CALL_MULTI_PERFORM:
  178. break
  179. self._finish_pending_requests()
  180. def _finish_pending_requests(self) -> None:
  181. """Process any requests that were completed by the last
  182. call to multi.socket_action.
  183. """
  184. while True:
  185. num_q, ok_list, err_list = self._multi.info_read()
  186. for curl in ok_list:
  187. self._finish(curl)
  188. for curl, errnum, errmsg in err_list:
  189. self._finish(curl, errnum, errmsg)
  190. if num_q == 0:
  191. break
  192. self._process_queue()
  193. def _process_queue(self) -> None:
  194. while True:
  195. started = 0
  196. while self._free_list and self._requests:
  197. started += 1
  198. curl = self._free_list.pop()
  199. (request, callback, queue_start_time) = self._requests.popleft()
  200. curl.info = {
  201. "headers": httputil.HTTPHeaders(),
  202. "buffer": BytesIO(),
  203. "request": request,
  204. "callback": callback,
  205. "queue_start_time": queue_start_time,
  206. "curl_start_time": time.time(),
  207. "curl_start_ioloop_time": self.io_loop.current().time(),
  208. }
  209. try:
  210. self._curl_setup_request(
  211. curl, request, curl.info["buffer"], curl.info["headers"]
  212. )
  213. except Exception as e:
  214. # If there was an error in setup, pass it on
  215. # to the callback. Note that allowing the
  216. # error to escape here will appear to work
  217. # most of the time since we are still in the
  218. # caller's original stack frame, but when
  219. # _process_queue() is called from
  220. # _finish_pending_requests the exceptions have
  221. # nowhere to go.
  222. self._free_list.append(curl)
  223. callback(HTTPResponse(request=request, code=599, error=e))
  224. else:
  225. self._multi.add_handle(curl)
  226. if not started:
  227. break
  228. def _finish(
  229. self, curl: pycurl.Curl, curl_error: int = None, curl_message: str = None
  230. ) -> None:
  231. info = curl.info
  232. curl.info = None
  233. self._multi.remove_handle(curl)
  234. self._free_list.append(curl)
  235. buffer = info["buffer"]
  236. if curl_error:
  237. assert curl_message is not None
  238. error = CurlError(curl_error, curl_message) # type: Optional[CurlError]
  239. assert error is not None
  240. code = error.code
  241. effective_url = None
  242. buffer.close()
  243. buffer = None
  244. else:
  245. error = None
  246. code = curl.getinfo(pycurl.HTTP_CODE)
  247. effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)
  248. buffer.seek(0)
  249. # the various curl timings are documented at
  250. # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html
  251. time_info = dict(
  252. queue=info["curl_start_ioloop_time"] - info["queue_start_time"],
  253. namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),
  254. connect=curl.getinfo(pycurl.CONNECT_TIME),
  255. appconnect=curl.getinfo(pycurl.APPCONNECT_TIME),
  256. pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),
  257. starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
  258. total=curl.getinfo(pycurl.TOTAL_TIME),
  259. redirect=curl.getinfo(pycurl.REDIRECT_TIME),
  260. )
  261. try:
  262. info["callback"](
  263. HTTPResponse(
  264. request=info["request"],
  265. code=code,
  266. headers=info["headers"],
  267. buffer=buffer,
  268. effective_url=effective_url,
  269. error=error,
  270. reason=info["headers"].get("X-Http-Reason", None),
  271. request_time=self.io_loop.time() - info["curl_start_ioloop_time"],
  272. start_time=info["curl_start_time"],
  273. time_info=time_info,
  274. )
  275. )
  276. except Exception:
  277. self.handle_callback_exception(info["callback"])
  278. def handle_callback_exception(self, callback: Any) -> None:
  279. app_log.error("Exception in callback %r", callback, exc_info=True)
  280. def _curl_create(self) -> pycurl.Curl:
  281. curl = pycurl.Curl()
  282. if curl_log.isEnabledFor(logging.DEBUG):
  283. curl.setopt(pycurl.VERBOSE, 1)
  284. curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
  285. if hasattr(
  286. pycurl, "PROTOCOLS"
  287. ): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)
  288. curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
  289. curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP | pycurl.PROTO_HTTPS)
  290. return curl
  291. def _curl_setup_request(
  292. self,
  293. curl: pycurl.Curl,
  294. request: HTTPRequest,
  295. buffer: BytesIO,
  296. headers: httputil.HTTPHeaders,
  297. ) -> None:
  298. curl.setopt(pycurl.URL, native_str(request.url))
  299. # libcurl's magic "Expect: 100-continue" behavior causes delays
  300. # with servers that don't support it (which include, among others,
  301. # Google's OpenID endpoint). Additionally, this behavior has
  302. # a bug in conjunction with the curl_multi_socket_action API
  303. # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
  304. # which increases the delays. It's more trouble than it's worth,
  305. # so just turn off the feature (yes, setting Expect: to an empty
  306. # value is the official way to disable this)
  307. if "Expect" not in request.headers:
  308. request.headers["Expect"] = ""
  309. # libcurl adds Pragma: no-cache by default; disable that too
  310. if "Pragma" not in request.headers:
  311. request.headers["Pragma"] = ""
  312. curl.setopt(
  313. pycurl.HTTPHEADER,
  314. [
  315. "%s: %s" % (native_str(k), native_str(v))
  316. for k, v in request.headers.get_all()
  317. ],
  318. )
  319. curl.setopt(
  320. pycurl.HEADERFUNCTION,
  321. functools.partial(
  322. self._curl_header_callback, headers, request.header_callback
  323. ),
  324. )
  325. if request.streaming_callback:
  326. def write_function(b: Union[bytes, bytearray]) -> int:
  327. assert request.streaming_callback is not None
  328. self.io_loop.add_callback(request.streaming_callback, b)
  329. return len(b)
  330. else:
  331. write_function = buffer.write
  332. curl.setopt(pycurl.WRITEFUNCTION, write_function)
  333. curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
  334. curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
  335. assert request.connect_timeout is not None
  336. curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
  337. assert request.request_timeout is not None
  338. curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
  339. if request.user_agent:
  340. curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
  341. else:
  342. curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
  343. if request.network_interface:
  344. curl.setopt(pycurl.INTERFACE, request.network_interface)
  345. if request.decompress_response:
  346. curl.setopt(pycurl.ENCODING, "gzip,deflate")
  347. else:
  348. curl.setopt(pycurl.ENCODING, "none")
  349. if request.proxy_host and request.proxy_port:
  350. curl.setopt(pycurl.PROXY, request.proxy_host)
  351. curl.setopt(pycurl.PROXYPORT, request.proxy_port)
  352. if request.proxy_username:
  353. assert request.proxy_password is not None
  354. credentials = httputil.encode_username_password(
  355. request.proxy_username, request.proxy_password
  356. )
  357. curl.setopt(pycurl.PROXYUSERPWD, credentials)
  358. if request.proxy_auth_mode is None or request.proxy_auth_mode == "basic":
  359. curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
  360. elif request.proxy_auth_mode == "digest":
  361. curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
  362. else:
  363. raise ValueError(
  364. "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode
  365. )
  366. else:
  367. curl.setopt(pycurl.PROXY, "")
  368. curl.unsetopt(pycurl.PROXYUSERPWD)
  369. if request.validate_cert:
  370. curl.setopt(pycurl.SSL_VERIFYPEER, 1)
  371. curl.setopt(pycurl.SSL_VERIFYHOST, 2)
  372. else:
  373. curl.setopt(pycurl.SSL_VERIFYPEER, 0)
  374. curl.setopt(pycurl.SSL_VERIFYHOST, 0)
  375. if request.ca_certs is not None:
  376. curl.setopt(pycurl.CAINFO, request.ca_certs)
  377. else:
  378. # There is no way to restore pycurl.CAINFO to its default value
  379. # (Using unsetopt makes it reject all certificates).
  380. # I don't see any way to read the default value from python so it
  381. # can be restored later. We'll have to just leave CAINFO untouched
  382. # if no ca_certs file was specified, and require that if any
  383. # request uses a custom ca_certs file, they all must.
  384. pass
  385. if request.allow_ipv6 is False:
  386. # Curl behaves reasonably when DNS resolution gives an ipv6 address
  387. # that we can't reach, so allow ipv6 unless the user asks to disable.
  388. curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
  389. else:
  390. curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
  391. # Set the request method through curl's irritating interface which makes
  392. # up names for almost every single method
  393. curl_options = {
  394. "GET": pycurl.HTTPGET,
  395. "POST": pycurl.POST,
  396. "PUT": pycurl.UPLOAD,
  397. "HEAD": pycurl.NOBODY,
  398. }
  399. custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
  400. for o in curl_options.values():
  401. curl.setopt(o, False)
  402. if request.method in curl_options:
  403. curl.unsetopt(pycurl.CUSTOMREQUEST)
  404. curl.setopt(curl_options[request.method], True)
  405. elif request.allow_nonstandard_methods or request.method in custom_methods:
  406. curl.setopt(pycurl.CUSTOMREQUEST, request.method)
  407. else:
  408. raise KeyError("unknown method " + request.method)
  409. body_expected = request.method in ("POST", "PATCH", "PUT")
  410. body_present = request.body is not None
  411. if not request.allow_nonstandard_methods:
  412. # Some HTTP methods nearly always have bodies while others
  413. # almost never do. Fail in this case unless the user has
  414. # opted out of sanity checks with allow_nonstandard_methods.
  415. if (body_expected and not body_present) or (
  416. body_present and not body_expected
  417. ):
  418. raise ValueError(
  419. "Body must %sbe None for method %s (unless "
  420. "allow_nonstandard_methods is true)"
  421. % ("not " if body_expected else "", request.method)
  422. )
  423. if body_expected or body_present:
  424. if request.method == "GET":
  425. # Even with `allow_nonstandard_methods` we disallow
  426. # GET with a body (because libcurl doesn't allow it
  427. # unless we use CUSTOMREQUEST). While the spec doesn't
  428. # forbid clients from sending a body, it arguably
  429. # disallows the server from doing anything with them.
  430. raise ValueError("Body must be None for GET request")
  431. request_buffer = BytesIO(utf8(request.body or ""))
  432. def ioctl(cmd: int) -> None:
  433. if cmd == curl.IOCMD_RESTARTREAD:
  434. request_buffer.seek(0)
  435. curl.setopt(pycurl.READFUNCTION, request_buffer.read)
  436. curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
  437. if request.method == "POST":
  438. curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ""))
  439. else:
  440. curl.setopt(pycurl.UPLOAD, True)
  441. curl.setopt(pycurl.INFILESIZE, len(request.body or ""))
  442. if request.auth_username is not None:
  443. assert request.auth_password is not None
  444. if request.auth_mode is None or request.auth_mode == "basic":
  445. curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
  446. elif request.auth_mode == "digest":
  447. curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
  448. else:
  449. raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
  450. userpwd = httputil.encode_username_password(
  451. request.auth_username, request.auth_password
  452. )
  453. curl.setopt(pycurl.USERPWD, userpwd)
  454. curl_log.debug(
  455. "%s %s (username: %r)",
  456. request.method,
  457. request.url,
  458. request.auth_username,
  459. )
  460. else:
  461. curl.unsetopt(pycurl.USERPWD)
  462. curl_log.debug("%s %s", request.method, request.url)
  463. if request.client_cert is not None:
  464. curl.setopt(pycurl.SSLCERT, request.client_cert)
  465. if request.client_key is not None:
  466. curl.setopt(pycurl.SSLKEY, request.client_key)
  467. if request.ssl_options is not None:
  468. raise ValueError("ssl_options not supported in curl_httpclient")
  469. if threading.active_count() > 1:
  470. # libcurl/pycurl is not thread-safe by default. When multiple threads
  471. # are used, signals should be disabled. This has the side effect
  472. # of disabling DNS timeouts in some environments (when libcurl is
  473. # not linked against ares), so we don't do it when there is only one
  474. # thread. Applications that use many short-lived threads may need
  475. # to set NOSIGNAL manually in a prepare_curl_callback since
  476. # there may not be any other threads running at the time we call
  477. # threading.activeCount.
  478. curl.setopt(pycurl.NOSIGNAL, 1)
  479. if request.prepare_curl_callback is not None:
  480. request.prepare_curl_callback(curl)
  481. def _curl_header_callback(
  482. self,
  483. headers: httputil.HTTPHeaders,
  484. header_callback: Callable[[str], None],
  485. header_line_bytes: bytes,
  486. ) -> None:
  487. header_line = native_str(header_line_bytes.decode("latin1"))
  488. if header_callback is not None:
  489. self.io_loop.add_callback(header_callback, header_line)
  490. # header_line as returned by curl includes the end-of-line characters.
  491. # whitespace at the start should be preserved to allow multi-line headers
  492. header_line = header_line.rstrip()
  493. if header_line.startswith("HTTP/"):
  494. headers.clear()
  495. try:
  496. (__, __, reason) = httputil.parse_response_start_line(header_line)
  497. header_line = "X-Http-Reason: %s" % reason
  498. except httputil.HTTPInputError:
  499. return
  500. if not header_line:
  501. return
  502. headers.parse_line(header_line)
  503. def _curl_debug(self, debug_type: int, debug_msg: str) -> None:
  504. debug_types = ("I", "<", ">", "<", ">")
  505. if debug_type == 0:
  506. debug_msg = native_str(debug_msg)
  507. curl_log.debug("%s", debug_msg.strip())
  508. elif debug_type in (1, 2):
  509. debug_msg = native_str(debug_msg)
  510. for line in debug_msg.splitlines():
  511. curl_log.debug("%s %s", debug_types[debug_type], line)
  512. elif debug_type == 4:
  513. curl_log.debug("%s %r", debug_types[debug_type], debug_msg)
  514. class CurlError(HTTPError):
  515. def __init__(self, errno: int, message: str) -> None:
  516. HTTPError.__init__(self, 599, message)
  517. self.errno = errno
  518. if __name__ == "__main__":
  519. AsyncHTTPClient.configure(CurlAsyncHTTPClient)
  520. main()