"Fossies" - the Fresh Open Source Software Archive

Member "salt-3002.2/salt/utils/http.py" (18 Nov 2020, 35150 Bytes) of package /linux/misc/salt-3002.2.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Python source code syntax highlighting (style: standard) with prefixed line numbers. Alternatively you can here view or download the uninterpreted source code file. For more information about "http.py" see the Fossies "Dox" file reference documentation and the latest Fossies "Diffs" side-by-side code changes report: 3002.1_vs_3002.2.

    1 """
    2 Utils for making various web calls. Primarily designed for REST, SOAP, webhooks
    3 and the like, but also useful for basic HTTP testing.
    4 
    5 .. versionadded:: 2015.5.0
    6 """
    7 
    8 import cgi
    9 import gzip
   10 import io
   11 import logging
   12 import os
   13 import pprint
   14 import re
   15 import socket
   16 import ssl
   17 import zlib
   18 
   19 import salt.config
   20 import salt.ext.six.moves.http_client
   21 import salt.ext.six.moves.http_cookiejar
   22 import salt.ext.six.moves.urllib.request as urllib_request
   23 import salt.ext.tornado.httputil
   24 import salt.ext.tornado.simple_httpclient
   25 import salt.loader
   26 import salt.syspaths
   27 import salt.utils.args
   28 import salt.utils.data
   29 import salt.utils.files
   30 import salt.utils.json
   31 import salt.utils.msgpack
   32 import salt.utils.network
   33 import salt.utils.platform
   34 import salt.utils.stringutils
   35 import salt.utils.xmlutil as xml
   36 import salt.utils.yaml
   37 import salt.version
   38 from salt._compat import ElementTree as ET
   39 from salt.ext import six
   40 from salt.ext.six.moves import StringIO
   41 from salt.ext.six.moves.urllib.error import URLError
   42 from salt.ext.six.moves.urllib.parse import splitquery
   43 from salt.ext.six.moves.urllib.parse import urlencode as _urlencode
   44 from salt.ext.six.moves.urllib.parse import urlparse
   45 from salt.ext.tornado.httpclient import HTTPClient
   46 from salt.template import compile_template
   47 from salt.utils.decorators.jinja import jinja_filter
   48 
   49 try:
   50     from ssl import CertificateError, match_hostname
   51 
   52     HAS_MATCHHOSTNAME = True
   53 except ImportError:
   54     # pylint: disable=no-name-in-module
   55     try:
   56         from backports.ssl_match_hostname import CertificateError, match_hostname
   57 
   58         HAS_MATCHHOSTNAME = True
   59     except ImportError:
   60         try:
   61             from salt.ext.ssl_match_hostname import CertificateError, match_hostname
   62 
   63             HAS_MATCHHOSTNAME = True
   64         except ImportError:
   65             HAS_MATCHHOSTNAME = False
   66     # pylint: enable=no-name-in-module
   67 
   68 
   69 try:
   70     import salt.ext.tornado.curl_httpclient
   71 
   72     HAS_CURL_HTTPCLIENT = True
   73 except ImportError:
   74     HAS_CURL_HTTPCLIENT = False
   75 
   76 try:
   77     import requests
   78 
   79     HAS_REQUESTS = True
   80 except ImportError:
   81     HAS_REQUESTS = False
   82 
   83 try:
   84     import certifi
   85 
   86     HAS_CERTIFI = True
   87 except ImportError:
   88     HAS_CERTIFI = False
   89 
   90 log = logging.getLogger(__name__)
   91 USERAGENT = "Salt/{}".format(salt.version.__version__)
   92 
   93 
   94 def __decompressContent(coding, pgctnt):
   95     """
   96     Decompress returned HTTP content depending on the specified encoding.
   97     Currently supports identity/none, deflate, and gzip, which should
   98     cover 99%+ of the content on the internet.
   99     """
  100     if not pgctnt:
  101         return pgctnt
  102 
  103     log.trace(
  104         "Decompressing %s byte content with compression type: %s", len(pgctnt), coding
  105     )
  106 
  107     if coding == "deflate":
  108         pgctnt = zlib.decompress(pgctnt, -zlib.MAX_WBITS)
  109 
  110     elif coding == "gzip":
  111         buf = io.BytesIO(pgctnt)
  112         f = gzip.GzipFile(fileobj=buf)
  113         pgctnt = f.read()
  114 
  115     elif coding == "sdch":
  116         raise ValueError("SDCH compression is not currently supported")
  117     elif coding == "br":
  118         raise ValueError("Brotli compression is not currently supported")
  119     elif coding == "compress":
  120         raise ValueError("LZW compression is not currently supported")
  121 
  122     log.trace("Content size after decompression: %s", len(pgctnt))
  123     return pgctnt
  124 
  125 
  126 @jinja_filter("http_query")
  127 def query(
  128     url,
  129     method="GET",
  130     params=None,
  131     data=None,
  132     data_file=None,
  133     header_dict=None,
  134     header_list=None,
  135     header_file=None,
  136     username=None,
  137     password=None,
  138     auth=None,
  139     decode=False,
  140     decode_type="auto",
  141     status=False,
  142     headers=False,
  143     text=False,
  144     cookies=None,
  145     cookie_jar=None,
  146     cookie_format="lwp",
  147     persist_session=False,
  148     session_cookie_jar=None,
  149     data_render=False,
  150     data_renderer=None,
  151     header_render=False,
  152     header_renderer=None,
  153     template_dict=None,
  154     test=False,
  155     test_url=None,
  156     node="minion",
  157     port=80,
  158     opts=None,
  159     backend=None,
  160     ca_bundle=None,
  161     verify_ssl=None,
  162     cert=None,
  163     text_out=None,
  164     headers_out=None,
  165     decode_out=None,
  166     stream=False,
  167     streaming_callback=None,
  168     header_callback=None,
  169     handle=False,
  170     agent=USERAGENT,
  171     hide_fields=None,
  172     raise_error=True,
  173     formdata=False,
  174     formdata_fieldname=None,
  175     formdata_filename=None,
  176     decode_body=True,
  177     **kwargs
  178 ):
  179     """
  180     Query a resource, and decode the return data
  181     """
  182     ret = {}
  183 
  184     if opts is None:
  185         if node == "master":
  186             opts = salt.config.master_config(
  187                 os.path.join(salt.syspaths.CONFIG_DIR, "master")
  188             )
  189         elif node == "minion":
  190             opts = salt.config.minion_config(
  191                 os.path.join(salt.syspaths.CONFIG_DIR, "minion")
  192             )
  193         else:
  194             opts = {}
  195 
  196     if not backend:
  197         backend = opts.get("backend", "tornado")
  198 
  199     match = re.match(
  200         r"https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)",
  201         url,
  202     )
  203     if not match:
  204         salt.utils.network.refresh_dns()
  205 
  206     if backend == "requests":
  207         if HAS_REQUESTS is False:
  208             ret["error"] = (
  209                 "http.query has been set to use requests, but the "
  210                 "requests library does not seem to be installed"
  211             )
  212             log.error(ret["error"])
  213             return ret
  214         else:
  215             requests_log = logging.getLogger("requests")
  216             requests_log.setLevel(logging.WARNING)
  217 
  218     # Some libraries don't support separation of url and GET parameters
  219     # Don't need a try/except block, since Salt depends on tornado
  220     url_full = salt.ext.tornado.httputil.url_concat(url, params) if params else url
  221 
  222     if ca_bundle is None:
  223         ca_bundle = get_ca_bundle(opts)
  224 
  225     if verify_ssl is None:
  226         verify_ssl = opts.get("verify_ssl", True)
  227 
  228     if cert is None:
  229         cert = opts.get("cert", None)
  230 
  231     if data_file is not None:
  232         data = _render(data_file, data_render, data_renderer, template_dict, opts)
  233 
  234     # Make sure no secret fields show up in logs
  235     log_url = sanitize_url(url_full, hide_fields)
  236 
  237     log.debug("Requesting URL %s using %s method", log_url, method)
  238     log.debug("Using backend: %s", backend)
  239 
  240     if method == "POST" and log.isEnabledFor(logging.TRACE):
  241         # Make sure no secret fields show up in logs
  242         if isinstance(data, dict):
  243             log_data = data.copy()
  244             if isinstance(hide_fields, list):
  245                 for item in data:
  246                     for field in hide_fields:
  247                         if item == field:
  248                             log_data[item] = "XXXXXXXXXX"
  249             log.trace("Request POST Data: %s", pprint.pformat(log_data))
  250         else:
  251             log.trace("Request POST Data: %s", pprint.pformat(data))
  252 
  253     if header_file is not None:
  254         header_tpl = _render(
  255             header_file, header_render, header_renderer, template_dict, opts
  256         )
  257         if isinstance(header_tpl, dict):
  258             header_dict = header_tpl
  259         else:
  260             header_list = header_tpl.splitlines()
  261 
  262     if header_dict is None:
  263         header_dict = {}
  264 
  265     if header_list is None:
  266         header_list = []
  267 
  268     if cookie_jar is None:
  269         cookie_jar = os.path.join(
  270             opts.get("cachedir", salt.syspaths.CACHE_DIR), "cookies.txt"
  271         )
  272     if session_cookie_jar is None:
  273         session_cookie_jar = os.path.join(
  274             opts.get("cachedir", salt.syspaths.CACHE_DIR), "cookies.session.p"
  275         )
  276 
  277     if persist_session is True and salt.utils.msgpack.HAS_MSGPACK:
  278         # TODO: This is hackish; it will overwrite the session cookie jar with
  279         # all cookies from this one connection, rather than behaving like a
  280         # proper cookie jar. Unfortunately, since session cookies do not
  281         # contain expirations, they can't be stored in a proper cookie jar.
  282         if os.path.isfile(session_cookie_jar):
  283             with salt.utils.files.fopen(session_cookie_jar, "rb") as fh_:
  284                 session_cookies = salt.utils.msgpack.load(fh_)
  285             if isinstance(session_cookies, dict):
  286                 header_dict.update(session_cookies)
  287         else:
  288             with salt.utils.files.fopen(session_cookie_jar, "wb") as fh_:
  289                 salt.utils.msgpack.dump("", fh_)
  290 
  291     for header in header_list:
  292         comps = header.split(":")
  293         if len(comps) < 2:
  294             continue
  295         header_dict[comps[0].strip()] = comps[1].strip()
  296 
  297     if not auth:
  298         if username and password:
  299             auth = (username, password)
  300 
  301     if agent == USERAGENT:
  302         agent = "{} http.query()".format(agent)
  303     header_dict["User-agent"] = agent
  304 
  305     if backend == "requests":
  306         sess = requests.Session()
  307         sess.auth = auth
  308         sess.headers.update(header_dict)
  309         log.trace("Request Headers: %s", sess.headers)
  310         sess_cookies = sess.cookies
  311         sess.verify = verify_ssl
  312     elif backend == "urllib2":
  313         sess_cookies = None
  314     else:
  315         # Tornado
  316         sess_cookies = None
  317 
  318     if cookies is not None:
  319         if cookie_format == "mozilla":
  320             sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(
  321                 cookie_jar
  322             )
  323         else:
  324             sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
  325         if not os.path.isfile(cookie_jar):
  326             sess_cookies.save()
  327         sess_cookies.load()
  328 
  329     if test is True:
  330         if test_url is None:
  331             return {}
  332         else:
  333             url = test_url
  334             ret["test"] = True
  335 
  336     if backend == "requests":
  337         req_kwargs = {}
  338         if stream is True:
  339             if requests.__version__[0] == "0":
  340                 # 'stream' was called 'prefetch' before 1.0, with flipped meaning
  341                 req_kwargs["prefetch"] = False
  342             else:
  343                 req_kwargs["stream"] = True
  344 
  345         # Client-side cert handling
  346         if cert is not None:
  347             if isinstance(cert, str):
  348                 if os.path.exists(cert):
  349                     req_kwargs["cert"] = cert
  350             elif isinstance(cert, list):
  351                 if os.path.exists(cert[0]) and os.path.exists(cert[1]):
  352                     req_kwargs["cert"] = cert
  353             else:
  354                 log.error(
  355                     "The client-side certificate path that"
  356                     " was passed is not valid: %s",
  357                     cert,
  358                 )
  359 
  360         if formdata:
  361             if not formdata_fieldname:
  362                 ret["error"] = "formdata_fieldname is required when formdata=True"
  363                 log.error(ret["error"])
  364                 return ret
  365             result = sess.request(
  366                 method,
  367                 url,
  368                 params=params,
  369                 files={formdata_fieldname: (formdata_filename, StringIO(data))},
  370                 **req_kwargs
  371             )
  372         else:
  373             result = sess.request(method, url, params=params, data=data, **req_kwargs)
  374         result.raise_for_status()
  375         if stream is True:
  376             # fake a HTTP response header
  377             header_callback("HTTP/1.0 {} MESSAGE".format(result.status_code))
  378             # fake streaming the content
  379             streaming_callback(result.content)
  380             return {
  381                 "handle": result,
  382             }
  383 
  384         if handle is True:
  385             return {
  386                 "handle": result,
  387                 "body": result.content,
  388             }
  389 
  390         log.debug(
  391             "Final URL location of Response: %s", sanitize_url(result.url, hide_fields)
  392         )
  393 
  394         result_status_code = result.status_code
  395         result_headers = result.headers
  396         result_text = result.content
  397         result_cookies = result.cookies
  398         body = result.content
  399         if not isinstance(body, str) and decode_body:
  400             body = body.decode(result.encoding or "utf-8")
  401         ret["body"] = body
  402     elif backend == "urllib2":
  403         request = urllib_request.Request(url_full, data)
  404         handlers = [
  405             urllib_request.HTTPHandler,
  406             urllib_request.HTTPCookieProcessor(sess_cookies),
  407         ]
  408 
  409         if url.startswith("https"):
  410             hostname = request.get_host()
  411             handlers[0] = urllib_request.HTTPSHandler(1)
  412             if not HAS_MATCHHOSTNAME:
  413                 log.warning(
  414                     "match_hostname() not available, SSL hostname checking "
  415                     "not available. THIS CONNECTION MAY NOT BE SECURE!"
  416                 )
  417             elif verify_ssl is False:
  418                 log.warning(
  419                     "SSL certificate verification has been explicitly "
  420                     "disabled. THIS CONNECTION MAY NOT BE SECURE!"
  421                 )
  422             else:
  423                 if ":" in hostname:
  424                     hostname, port = hostname.split(":")
  425                 else:
  426                     port = 443
  427                 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
  428                 sock.connect((hostname, int(port)))
  429                 sockwrap = ssl.wrap_socket(
  430                     sock, ca_certs=ca_bundle, cert_reqs=ssl.CERT_REQUIRED
  431                 )
  432                 try:
  433                     match_hostname(sockwrap.getpeercert(), hostname)
  434                 except CertificateError as exc:
  435                     ret[
  436                         "error"
  437                     ] = "The certificate was invalid. Error returned was: {}".format(
  438                         pprint.pformat(exc)
  439                     )
  440                     return ret
  441 
  442                 # Client-side cert handling
  443                 if cert is not None:
  444                     cert_chain = None
  445                     if isinstance(cert, str):
  446                         if os.path.exists(cert):
  447                             cert_chain = cert
  448                     elif isinstance(cert, list):
  449                         if os.path.exists(cert[0]) and os.path.exists(cert[1]):
  450                             cert_chain = cert
  451                     else:
  452                         log.error(
  453                             "The client-side certificate path that was "
  454                             "passed is not valid: %s",
  455                             cert,
  456                         )
  457                         return
  458                     if hasattr(ssl, "SSLContext"):
  459                         # Python >= 2.7.9
  460                         context = ssl.SSLContext.load_cert_chain(*cert_chain)
  461                         handlers.append(
  462                             urllib_request.HTTPSHandler(context=context)
  463                         )  # pylint: disable=E1123
  464                     else:
  465                         # Python < 2.7.9
  466                         cert_kwargs = {
  467                             "host": request.get_host(),
  468                             "port": port,
  469                             "cert_file": cert_chain[0],
  470                         }
  471                         if len(cert_chain) > 1:
  472                             cert_kwargs["key_file"] = cert_chain[1]
  473                         handlers[0] = salt.ext.six.moves.http_client.HTTPSConnection(
  474                             **cert_kwargs
  475                         )
  476 
  477         opener = urllib_request.build_opener(*handlers)
  478         for header in header_dict:
  479             request.add_header(header, header_dict[header])
  480         request.get_method = lambda: method
  481         try:
  482             result = opener.open(request)
  483         except URLError as exc:
  484             return {"Error": str(exc)}
  485         if stream is True or handle is True:
  486             return {
  487                 "handle": result,
  488                 "body": result.content,
  489             }
  490 
  491         result_status_code = result.code
  492         result_headers = dict(result.info())
  493         result_text = result.read()
  494         if "Content-Type" in result_headers:
  495             res_content_type, res_params = cgi.parse_header(
  496                 result_headers["Content-Type"]
  497             )
  498             if (
  499                 res_content_type.startswith("text/")
  500                 and "charset" in res_params
  501                 and not isinstance(result_text, str)
  502             ):
  503                 result_text = result_text.decode(res_params["charset"])
  504         if six.PY3 and isinstance(result_text, bytes) and decode_body:
  505             result_text = result_text.decode("utf-8")
  506         ret["body"] = result_text
  507     else:
  508         # Tornado
  509         req_kwargs = {}
  510 
  511         # Client-side cert handling
  512         if cert is not None:
  513             if isinstance(cert, str):
  514                 if os.path.exists(cert):
  515                     req_kwargs["client_cert"] = cert
  516             elif isinstance(cert, list):
  517                 if os.path.exists(cert[0]) and os.path.exists(cert[1]):
  518                     req_kwargs["client_cert"] = cert[0]
  519                     req_kwargs["client_key"] = cert[1]
  520             else:
  521                 log.error(
  522                     "The client-side certificate path that "
  523                     "was passed is not valid: %s",
  524                     cert,
  525                 )
  526 
  527         if isinstance(data, dict):
  528             data = _urlencode(data)
  529 
  530         if verify_ssl:
  531             req_kwargs["ca_certs"] = ca_bundle
  532 
  533         max_body = opts.get(
  534             "http_max_body", salt.config.DEFAULT_MINION_OPTS["http_max_body"]
  535         )
  536         connect_timeout = opts.get(
  537             "http_connect_timeout",
  538             salt.config.DEFAULT_MINION_OPTS["http_connect_timeout"],
  539         )
  540         timeout = opts.get(
  541             "http_request_timeout",
  542             salt.config.DEFAULT_MINION_OPTS["http_request_timeout"],
  543         )
  544 
  545         client_argspec = None
  546 
  547         proxy_host = opts.get("proxy_host", None)
  548         if proxy_host:
  549             # tornado requires a str for proxy_host, cannot be a unicode str in py2
  550             proxy_host = salt.utils.stringutils.to_str(proxy_host)
  551         proxy_port = opts.get("proxy_port", None)
  552         proxy_username = opts.get("proxy_username", None)
  553         if proxy_username:
  554             # tornado requires a str, cannot be unicode str in py2
  555             proxy_username = salt.utils.stringutils.to_str(proxy_username)
  556         proxy_password = opts.get("proxy_password", None)
  557         if proxy_password:
  558             # tornado requires a str, cannot be unicode str in py2
  559             proxy_password = salt.utils.stringutils.to_str(proxy_password)
  560         no_proxy = opts.get("no_proxy", [])
  561 
  562         # Since tornado doesnt support no_proxy, we'll always hand it empty proxies or valid ones
  563         # except we remove the valid ones if a url has a no_proxy hostname in it
  564         if urlparse(url_full).hostname in no_proxy:
  565             proxy_host = None
  566             proxy_port = None
  567             proxy_username = None
  568             proxy_password = None
  569 
  570         # We want to use curl_http if we have a proxy defined
  571         if proxy_host and proxy_port:
  572             if HAS_CURL_HTTPCLIENT is False:
  573                 ret["error"] = (
  574                     "proxy_host and proxy_port has been set. This requires pycurl and tornado, "
  575                     "but the libraries does not seem to be installed"
  576                 )
  577                 log.error(ret["error"])
  578                 return ret
  579 
  580             salt.ext.tornado.httpclient.AsyncHTTPClient.configure(
  581                 "tornado.curl_httpclient.CurlAsyncHTTPClient"
  582             )
  583             client_argspec = salt.utils.args.get_function_argspec(
  584                 salt.ext.tornado.curl_httpclient.CurlAsyncHTTPClient.initialize
  585             )
  586         else:
  587             salt.ext.tornado.httpclient.AsyncHTTPClient.configure(None)
  588             client_argspec = salt.utils.args.get_function_argspec(
  589                 salt.ext.tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize
  590             )
  591 
  592         supports_max_body_size = "max_body_size" in client_argspec.args
  593 
  594         req_kwargs.update(
  595             {
  596                 "method": method,
  597                 "headers": header_dict,
  598                 "auth_username": username,
  599                 "auth_password": password,
  600                 "body": data,
  601                 "validate_cert": verify_ssl,
  602                 "allow_nonstandard_methods": True,
  603                 "streaming_callback": streaming_callback,
  604                 "header_callback": header_callback,
  605                 "connect_timeout": connect_timeout,
  606                 "request_timeout": timeout,
  607                 "proxy_host": proxy_host,
  608                 "proxy_port": proxy_port,
  609                 "proxy_username": proxy_username,
  610                 "proxy_password": proxy_password,
  611                 "raise_error": raise_error,
  612                 "decompress_response": False,
  613             }
  614         )
  615 
  616         # Unicode types will cause a TypeError when Tornado's curl HTTPClient
  617         # invokes setopt. Therefore, make sure all arguments we pass which
  618         # contain strings are str types.
  619         req_kwargs = salt.utils.data.decode(req_kwargs, to_str=True)
  620 
  621         try:
  622             download_client = (
  623                 HTTPClient(max_body_size=max_body)
  624                 if supports_max_body_size
  625                 else HTTPClient()
  626             )
  627             result = download_client.fetch(url_full, **req_kwargs)
  628         except salt.ext.tornado.httpclient.HTTPError as exc:
  629             ret["status"] = exc.code
  630             ret["error"] = str(exc)
  631             return ret
  632         except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc:
  633             if status is True:
  634                 ret["status"] = 0
  635             ret["error"] = str(exc)
  636             log.debug("Cannot perform 'http.query': %s - %s", url_full, ret["error"])
  637             return ret
  638 
  639         if stream is True or handle is True:
  640             return {
  641                 "handle": result,
  642                 "body": result.body,
  643             }
  644 
  645         result_status_code = result.code
  646         result_headers = result.headers
  647         result_text = result.body
  648         if "Content-Type" in result_headers:
  649             res_content_type, res_params = cgi.parse_header(
  650                 result_headers["Content-Type"]
  651             )
  652             if (
  653                 res_content_type.startswith("text/")
  654                 and "charset" in res_params
  655                 and not isinstance(result_text, str)
  656             ):
  657                 result_text = result_text.decode(res_params["charset"])
  658         if six.PY3 and isinstance(result_text, bytes) and decode_body:
  659             result_text = result_text.decode("utf-8")
  660         ret["body"] = result_text
  661         if "Set-Cookie" in result_headers and cookies is not None:
  662             result_cookies = parse_cookie_header(result_headers["Set-Cookie"])
  663             for item in result_cookies:
  664                 sess_cookies.set_cookie(item)
  665         else:
  666             result_cookies = None
  667 
  668     if isinstance(result_headers, list):
  669         result_headers_dict = {}
  670         for header in result_headers:
  671             comps = header.split(":")
  672             result_headers_dict[comps[0].strip()] = ":".join(comps[1:]).strip()
  673         result_headers = result_headers_dict
  674 
  675     log.debug("Response Status Code: %s", result_status_code)
  676     log.trace("Response Headers: %s", result_headers)
  677     log.trace("Response Cookies: %s", sess_cookies)
  678     # log.trace("Content: %s", result_text)
  679 
  680     coding = result_headers.get("Content-Encoding", "identity")
  681 
  682     # Requests will always decompress the content, and working around that is annoying.
  683     if backend != "requests":
  684         result_text = __decompressContent(coding, result_text)
  685 
  686     try:
  687         log.trace("Response Text: %s", result_text)
  688     except UnicodeEncodeError as exc:
  689         log.trace(
  690             "Cannot Trace Log Response Text: %s. This may be due to "
  691             "incompatibilities between requests and logging.",
  692             exc,
  693         )
  694 
  695     if text_out is not None:
  696         with salt.utils.files.fopen(text_out, "w") as tof:
  697             tof.write(result_text)
  698 
  699     if headers_out is not None and os.path.exists(headers_out):
  700         with salt.utils.files.fopen(headers_out, "w") as hof:
  701             hof.write(result_headers)
  702 
  703     if cookies is not None:
  704         sess_cookies.save()
  705 
  706     if persist_session is True and salt.utils.msgpack.HAS_MSGPACK:
  707         # TODO: See persist_session above
  708         if "set-cookie" in result_headers:
  709             with salt.utils.files.fopen(session_cookie_jar, "wb") as fh_:
  710                 session_cookies = result_headers.get("set-cookie", None)
  711                 if session_cookies is not None:
  712                     salt.utils.msgpack.dump({"Cookie": session_cookies}, fh_)
  713                 else:
  714                     salt.utils.msgpack.dump("", fh_)
  715 
  716     if status is True:
  717         ret["status"] = result_status_code
  718 
  719     if headers is True:
  720         ret["headers"] = result_headers
  721 
  722     if decode is True:
  723         if decode_type == "auto":
  724             content_type = result_headers.get("content-type", "application/json")
  725             if "xml" in content_type:
  726                 decode_type = "xml"
  727             elif "json" in content_type:
  728                 decode_type = "json"
  729             elif "yaml" in content_type:
  730                 decode_type = "yaml"
  731             else:
  732                 decode_type = "plain"
  733 
  734         valid_decodes = ("json", "xml", "yaml", "plain")
  735         if decode_type not in valid_decodes:
  736             ret[
  737                 "error"
  738             ] = "Invalid decode_type specified. Valid decode types are: {}".format(
  739                 pprint.pformat(valid_decodes)
  740             )
  741             log.error(ret["error"])
  742             return ret
  743 
  744         if decode_type == "json":
  745             ret["dict"] = salt.utils.json.loads(result_text)
  746         elif decode_type == "xml":
  747             ret["dict"] = []
  748             items = ET.fromstring(result_text)
  749             for item in items:
  750                 ret["dict"].append(xml.to_dict(item))
  751         elif decode_type == "yaml":
  752             ret["dict"] = salt.utils.data.decode(salt.utils.yaml.safe_load(result_text))
  753         else:
  754             text = True
  755 
  756         if decode_out:
  757             with salt.utils.files.fopen(decode_out, "w") as dof:
  758                 dof.write(result_text)
  759 
  760     if text is True:
  761         ret["text"] = result_text
  762 
  763     return ret
  764 
  765 
  766 def get_ca_bundle(opts=None):
  767     """
  768     Return the location of the ca bundle file. See the following article:
  769 
  770         http://tinyurl.com/k7rx42a
  771     """
  772     if hasattr(get_ca_bundle, "__return_value__"):
  773         return get_ca_bundle.__return_value__
  774 
  775     if opts is None:
  776         opts = {}
  777 
  778     opts_bundle = opts.get("ca_bundle", None)
  779     if opts_bundle is not None and os.path.exists(opts_bundle):
  780         return opts_bundle
  781 
  782     file_roots = opts.get("file_roots", {"base": [salt.syspaths.SRV_ROOT_DIR]})
  783 
  784     # Please do not change the order without good reason
  785 
  786     # Check Salt first
  787     for salt_root in file_roots.get("base", []):
  788         for path in ("cacert.pem", "ca-bundle.crt"):
  789             cert_path = os.path.join(salt_root, path)
  790             if os.path.exists(cert_path):
  791                 return cert_path
  792 
  793     locations = (
  794         # Debian has paths that often exist on other distros
  795         "/etc/ssl/certs/ca-certificates.crt",
  796         # RedHat is also very common
  797         "/etc/pki/tls/certs/ca-bundle.crt",
  798         "/etc/pki/tls/certs/ca-bundle.trust.crt",
  799         # RedHat's link for Debian compatibility
  800         "/etc/ssl/certs/ca-bundle.crt",
  801         # SUSE has an unusual path
  802         "/var/lib/ca-certificates/ca-bundle.pem",
  803         # OpenBSD has an unusual path
  804         "/etc/ssl/cert.pem",
  805     )
  806     for path in locations:
  807         if os.path.exists(path):
  808             return path
  809 
  810     if salt.utils.platform.is_windows() and HAS_CERTIFI:
  811         return certifi.where()
  812 
  813     return None
  814 
  815 
  816 def update_ca_bundle(
  817     target=None, source=None, opts=None, merge_files=None,
  818 ):
  819     """
  820     Attempt to update the CA bundle file from a URL
  821 
  822     If not specified, the local location on disk (``target``) will be
  823     auto-detected, if possible. If it is not found, then a new location on disk
  824     will be created and updated.
  825 
  826     The default ``source`` is:
  827 
  828         http://curl.haxx.se/ca/cacert.pem
  829 
  830     This is based on the information at:
  831 
  832         http://curl.haxx.se/docs/caextract.html
  833 
  834     A string or list of strings representing files to be appended to the end of
  835     the CA bundle file may also be passed through as ``merge_files``.
  836     """
  837     if opts is None:
  838         opts = {}
  839 
  840     if target is None:
  841         target = get_ca_bundle(opts)
  842 
  843     if target is None:
  844         log.error("Unable to detect location to write CA bundle to")
  845         return
  846 
  847     if source is None:
  848         source = opts.get("ca_bundle_url", "http://curl.haxx.se/ca/cacert.pem")
  849 
  850     log.debug("Attempting to download %s to %s", source, target)
  851     query(source, text=True, decode=False, headers=False, status=False, text_out=target)
  852 
  853     if merge_files is not None:
  854         if isinstance(merge_files, str):
  855             merge_files = [merge_files]
  856 
  857         if not isinstance(merge_files, list):
  858             log.error(
  859                 "A value was passed as merge_files which was not either "
  860                 "a string or a list"
  861             )
  862             return
  863 
  864         merge_content = ""
  865 
  866         for cert_file in merge_files:
  867             if os.path.exists(cert_file):
  868                 log.debug("Queueing up %s to be appended to %s", cert_file, target)
  869                 try:
  870                     with salt.utils.files.fopen(cert_file, "r") as fcf:
  871                         merge_content = "\n".join((merge_content, fcf.read()))
  872                 except OSError as exc:
  873                     log.error(
  874                         "Reading from %s caused the following error: %s", cert_file, exc
  875                     )
  876 
  877         if merge_content:
  878             log.debug("Appending merge_files to %s", target)
  879             try:
  880                 with salt.utils.files.fopen(target, "a") as tfp:
  881                     tfp.write("\n")
  882                     tfp.write(merge_content)
  883             except OSError as exc:
  884                 log.error("Writing to %s caused the following error: %s", target, exc)
  885 
  886 
  887 def _render(template, render, renderer, template_dict, opts):
  888     """
  889     Render a template
  890     """
  891     if render:
  892         if template_dict is None:
  893             template_dict = {}
  894         if not renderer:
  895             renderer = opts.get("renderer", "jinja|yaml")
  896         rend = salt.loader.render(opts, {})
  897         blacklist = opts.get("renderer_blacklist")
  898         whitelist = opts.get("renderer_whitelist")
  899         ret = compile_template(
  900             template, rend, renderer, blacklist, whitelist, **template_dict
  901         )
  902         if salt.utils.stringio.is_readable(ret):
  903             ret = ret.read()
  904         if str(ret).startswith("#!") and not str(ret).startswith("#!/"):
  905             ret = str(ret).split("\n", 1)[1]
  906         return ret
  907     with salt.utils.files.fopen(template, "r") as fh_:
  908         return fh_.read()
  909 
  910 
  911 def parse_cookie_header(header):
  912     """
  913     Parse the "Set-cookie" header, and return a list of cookies.
  914 
  915     This function is here because Tornado's HTTPClient doesn't handle cookies.
  916     """
  917     attribs = (
  918         "expires",
  919         "path",
  920         "domain",
  921         "version",
  922         "httponly",
  923         "secure",
  924         "comment",
  925         "max-age",
  926         "samesite",
  927     )
  928 
  929     # Split into cookie(s); handles headers with multiple cookies defined
  930     morsels = []
  931     for item in header.split(";"):
  932         item = item.strip()
  933         if "," in item and "expires" not in item:
  934             for part in item.split(","):
  935                 morsels.append(part)
  936         else:
  937             morsels.append(item)
  938 
  939     # Break down morsels into actual cookies
  940     cookies = []
  941     cookie = {}
  942     value_set = False
  943     for morsel in morsels:
  944         parts = morsel.split("=")
  945         parts[0] = parts[0].lower()
  946         if parts[0] in attribs:
  947             if parts[0] in cookie:
  948                 cookies.append(cookie)
  949                 cookie = {}
  950             if len(parts) > 1:
  951                 cookie[parts[0]] = "=".join(parts[1:])
  952             else:
  953                 cookie[parts[0]] = True
  954         else:
  955             if value_set is True:
  956                 # This is a new cookie; save the old one and clear for this one
  957                 cookies.append(cookie)
  958                 cookie = {}
  959                 value_set = False
  960             cookie[parts[0]] = "=".join(parts[1:])
  961             value_set = True
  962 
  963     if cookie:
  964         # Set the last cookie that was processed
  965         cookies.append(cookie)
  966 
  967     # These arguments are required by cookielib.Cookie()
  968     reqd = (
  969         "version",
  970         "port",
  971         "port_specified",
  972         "domain",
  973         "domain_specified",
  974         "domain_initial_dot",
  975         "path",
  976         "path_specified",
  977         "secure",
  978         "expires",
  979         "discard",
  980         "comment",
  981         "comment_url",
  982         "rest",
  983     )
  984 
  985     ret = []
  986     for cookie in cookies:
  987         name = None
  988         value = None
  989         for item in list(cookie):
  990             if item in attribs:
  991                 continue
  992             name = item
  993             value = cookie.pop(item)
  994 
  995         # cookielib.Cookie() requires an epoch
  996         if "expires" in cookie:
  997             cookie["expires"] = salt.ext.six.moves.http_cookiejar.http2time(
  998                 cookie["expires"]
  999             )
 1000 
 1001         # Fill in missing required fields
 1002         for req in reqd:
 1003             if req not in cookie:
 1004                 cookie[req] = ""
 1005         if cookie["version"] == "":
 1006             cookie["version"] = 0
 1007         if cookie["rest"] == "":
 1008             cookie["rest"] = {}
 1009         if cookie["expires"] == "":
 1010             cookie["expires"] = 0
 1011 
 1012         # Remove attribs that don't apply to Cookie objects
 1013         cookie.pop("httponly", None)
 1014         cookie.pop("samesite", None)
 1015         ret.append(
 1016             salt.ext.six.moves.http_cookiejar.Cookie(name=name, value=value, **cookie)
 1017         )
 1018 
 1019     return ret
 1020 
 1021 
 1022 def sanitize_url(url, hide_fields):
 1023     """
 1024     Make sure no secret fields show up in logs
 1025     """
 1026     if isinstance(hide_fields, list):
 1027         url_comps = splitquery(url)
 1028         log_url = url_comps[0]
 1029         if len(url_comps) > 1:
 1030             log_url += "?"
 1031         for pair in url_comps[1:]:
 1032             url_tmp = None
 1033             for field in hide_fields:
 1034                 comps_list = pair.split("&")
 1035                 if url_tmp:
 1036                     url_tmp = url_tmp.split("&")
 1037                     url_tmp = _sanitize_url_components(url_tmp, field)
 1038                 else:
 1039                     url_tmp = _sanitize_url_components(comps_list, field)
 1040             log_url += url_tmp
 1041         return log_url.rstrip("&")
 1042     else:
 1043         return str(url)
 1044 
 1045 
 1046 def _sanitize_url_components(comp_list, field):
 1047     """
 1048     Recursive function to sanitize each component of the url.
 1049     """
 1050     if not comp_list:
 1051         return ""
 1052     elif comp_list[0].startswith("{}=".format(field)):
 1053         ret = "{}=XXXXXXXXXX&".format(field)
 1054         comp_list.remove(comp_list[0])
 1055         return ret + _sanitize_url_components(comp_list, field)
 1056     else:
 1057         ret = "{}&".format(comp_list[0])
 1058         comp_list.remove(comp_list[0])
 1059         return ret + _sanitize_url_components(comp_list, field)