mirror of
https://github.com/munki/munki.git
synced 2026-05-04 11:29:16 -05:00
Merge branch 'NSURL'
This commit is contained in:
Executable
+78
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
#
|
||||
# Copyright 2014 Greg Neagle.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License');
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an 'AS IS' BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
keychain
|
||||
|
||||
Created by Greg Neagle on 2014-10-15.
|
||||
Incorporating work and ideas from Michael Lynn here:
|
||||
https://gist.github.com/pudquick/7704254
|
||||
|
||||
"""
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from munkilib import munkicommon
|
||||
from munkilib import keychain
|
||||
|
||||
|
||||
def main():
|
||||
'''Main routine'''
|
||||
usage = "usage: %prog [options]"
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option('--ca_cert_path', help='Path to a CA cert.')
|
||||
parser.add_option('--ca_dir_path', help='Path to a directory of CA certs.')
|
||||
parser.add_option('--client_cert_path', help='Path to a client cert.')
|
||||
parser.add_option('--client_key_path', help='Path to a client key.')
|
||||
parser.add_option('--site_url',
|
||||
help='Site URL. '
|
||||
'(Generally the root URL of the munki repo.)')
|
||||
options, arguments = parser.parse_args()
|
||||
|
||||
# check to see if we're root
|
||||
if os.geteuid() != 0:
|
||||
print >> sys.stderr, 'You must run this as root!'
|
||||
exit(munkicommon.EXIT_STATUS_ROOT_REQUIRED)
|
||||
|
||||
if arguments:
|
||||
print >> sys.stderr, 'Extra arguments supplied!'
|
||||
parser.print_usage()
|
||||
exit(-1)
|
||||
|
||||
server_cert_data = keychain.get_munki_server_cert_data()
|
||||
client_cert_data = keychain.get_munki_client_cert_data()
|
||||
|
||||
# command-line options override what we find from Munki
|
||||
if options.ca_cert_path:
|
||||
server_cert_data['ca_cert_path'] = options.ca_cert_path
|
||||
if options.ca_dir_path:
|
||||
server_cert_data['ca_dir_path'] = options.ca_dir_path
|
||||
|
||||
if options.client_cert_path:
|
||||
client_cert_data['client_cert_path'] = options.client_cert_path
|
||||
if options.client_key_path:
|
||||
client_cert_data['client_key_path'] = options.client_key_path
|
||||
if options.site_url:
|
||||
client_cert_data['site_url'] = options.site_url
|
||||
|
||||
keychain.add_ca_certs_to_system_keychain(server_cert_data)
|
||||
keychain.make_client_keychain(client_cert_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -74,7 +74,7 @@ def makeDMG(pkgpath):
|
||||
pkgname = os.path.basename(pkgpath)
|
||||
print 'Making disk image containing %s...' % pkgname
|
||||
diskimagename = os.path.splitext(pkgname)[0] + '.dmg'
|
||||
diskimagepath = os.path.join(munkicommon.tmpdir, diskimagename)
|
||||
diskimagepath = os.path.join(munkicommon.tmpdir(), diskimagename)
|
||||
cmd = ['/usr/bin/hdiutil', 'create', '-srcfolder', pkgpath, diskimagepath]
|
||||
proc = subprocess.Popen(cmd, shell=False, bufsize=-1,
|
||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
|
||||
@@ -660,7 +660,7 @@ def doAdobeCS5Uninstall(adobeInstallInfo, payloads=None):
|
||||
munkicommon.display_error("No uninstall.xml in adobe_install_info")
|
||||
return -1
|
||||
payloadcount = adobeInstallInfo.get('payload_count', 0)
|
||||
path = os.path.join(munkicommon.tmpdir, "uninstall.xml")
|
||||
path = os.path.join(munkicommon.tmpdir(), "uninstall.xml")
|
||||
deploymentFile = writefile(uninstallxml, path)
|
||||
if not deploymentFile:
|
||||
return -1
|
||||
|
||||
@@ -770,13 +770,10 @@ class AppleUpdates(object):
|
||||
self.CacheAppleCatalog()
|
||||
except CatalogNotFoundError:
|
||||
return False
|
||||
except ReplicationError, err:
|
||||
except (ReplicationError, fetch.MunkiDownloadError), err:
|
||||
munkicommon.display_warning('Could not download Apple SUS catalog:')
|
||||
munkicommon.display_warning('\t%s', str(err))
|
||||
return False
|
||||
except fetch.MunkiDownloadError:
|
||||
munkicommon.display_warning('Could not download Apple SUS catalog.')
|
||||
return False
|
||||
|
||||
if not force_check and not self._IsForceCheckNeccessary(before_hash):
|
||||
munkicommon.display_info('Skipping Apple Software Update check '
|
||||
|
||||
+135
-339
@@ -34,9 +34,13 @@ import urlparse
|
||||
import xattr
|
||||
|
||||
#our libs
|
||||
import keychain
|
||||
import munkicommon
|
||||
from gurl import Gurl
|
||||
#import munkistatus
|
||||
|
||||
from Foundation import NSHTTPURLResponse
|
||||
|
||||
|
||||
# XATTR name storing the ETAG of the file when downloaded via http(s).
|
||||
XATTR_ETAG = 'com.googlecode.munki.etag'
|
||||
@@ -44,7 +48,7 @@ XATTR_ETAG = 'com.googlecode.munki.etag'
|
||||
XATTR_SHA = 'com.googlecode.munki.sha256'
|
||||
|
||||
|
||||
class CurlError(Exception):
|
||||
class GurlError(Exception):
|
||||
pass
|
||||
|
||||
class HTTPError(Exception):
|
||||
@@ -54,8 +58,8 @@ class MunkiDownloadError(Exception):
|
||||
"""Base exception for download errors"""
|
||||
pass
|
||||
|
||||
class CurlDownloadError(MunkiDownloadError):
|
||||
"""Curl failed to download the item"""
|
||||
class GurlDownloadError(MunkiDownloadError):
|
||||
"""Gurl failed to download the item"""
|
||||
pass
|
||||
|
||||
class FileCopyError(MunkiDownloadError):
|
||||
@@ -87,349 +91,144 @@ def writeCachedChecksum(file_path, fhash=None):
|
||||
return None
|
||||
|
||||
|
||||
WARNINGSLOGGED = {}
|
||||
def curl(url, destinationpath,
|
||||
cert_info=None, custom_headers=None, donotrecurse=False, etag=None,
|
||||
message=None, onlyifnewer=False, resume=False, follow_redirects=False):
|
||||
def header_dict_from_list(array):
|
||||
"""Given a list of strings in http header format, return a dict.
|
||||
If array is None, return None"""
|
||||
if array is None:
|
||||
return array
|
||||
header_dict = {}
|
||||
for item in array:
|
||||
(key, sep, value) = item.partition(':')
|
||||
if sep and value:
|
||||
header_dict[key.strip()] = value.strip()
|
||||
return header_dict
|
||||
|
||||
|
||||
def get_url(url, destinationpath,
|
||||
custom_headers=None, message=None, onlyifnewer=False,
|
||||
resume=False, follow_redirects=False):
|
||||
"""Gets an HTTP or HTTPS URL and stores it in
|
||||
destination path. Returns a dictionary of headers, which includes
|
||||
http_result_code and http_result_description.
|
||||
Will raise CurlError if curl returns an error.
|
||||
Will raise CurlError if Gurl returns an error.
|
||||
Will raise HTTPError if HTTP Result code is not 2xx or 304.
|
||||
If destinationpath already exists, you can set 'onlyifnewer' to true to
|
||||
indicate you only want to download the file only if it's newer on the
|
||||
server.
|
||||
If you have an ETag from the current destination path, you can pass that
|
||||
to download the file only if it is different.
|
||||
Finally, if you set resume to True, curl will attempt to resume an
|
||||
interrupted download. You'll get an error if the existing file is
|
||||
complete; if the file has changed since the first download attempt, you'll
|
||||
get a mess."""
|
||||
If you set resume to True, Gurl will attempt to resume an
|
||||
interrupted download."""
|
||||
|
||||
header = {}
|
||||
header['http_result_code'] = '000'
|
||||
header['http_result_description'] = ''
|
||||
|
||||
curldirectivepath = os.path.join(munkicommon.tmpdir, 'curl_temp')
|
||||
tempdownloadpath = destinationpath + '.download'
|
||||
if os.path.exists(tempdownloadpath) and not resume:
|
||||
if resume and not os.path.exists(destinationpath):
|
||||
os.remove(tempdownloadpath)
|
||||
|
||||
# we're writing all the curl options to a file and passing that to
|
||||
# curl so we avoid the problem of URLs showing up in a process listing
|
||||
cache_data = None
|
||||
if onlyifnewer and os.path.exists(destinationpath):
|
||||
# create a temporary Gurl object so we can extract the
|
||||
# stored caching data so we can download only if the
|
||||
# file has changed on the server
|
||||
gurl_obj = Gurl.alloc().initWithOptions_({'file': destinationpath})
|
||||
cache_data = gurl_obj.get_stored_headers()
|
||||
del gurl_obj
|
||||
|
||||
options = {'url': url,
|
||||
'file': tempdownloadpath,
|
||||
'follow_redirects': follow_redirects,
|
||||
'can_resume': resume,
|
||||
'additional_headers': header_dict_from_list(custom_headers),
|
||||
'download_only_if_changed': onlyifnewer,
|
||||
'cache_data': cache_data,
|
||||
'logging_function': munkicommon.display_debug2}
|
||||
munkicommon.display_debug2('Options: %s' % options)
|
||||
|
||||
connection = Gurl.alloc().initWithOptions_(options)
|
||||
stored_percent_complete = -1
|
||||
stored_bytes_received = 0
|
||||
connection.start()
|
||||
try:
|
||||
fileobj = open(curldirectivepath, mode='w')
|
||||
print >> fileobj, 'silent' # no progress meter
|
||||
print >> fileobj, 'show-error' # print error msg to stderr
|
||||
print >> fileobj, 'no-buffer' # don't buffer output
|
||||
print >> fileobj, 'fail' # throw error if download fails
|
||||
print >> fileobj, 'dump-header -' # dump headers to stdout
|
||||
print >> fileobj, 'speed-time = 30' # give up if too slow d/l
|
||||
print >> fileobj, 'output = "%s"' % tempdownloadpath
|
||||
print >> fileobj, 'ciphers = HIGH,!ADH' #use only secure >=128 bit SSL
|
||||
print >> fileobj, 'url = "%s"' % url
|
||||
|
||||
munkicommon.display_debug2('follow_redirects is %s', follow_redirects)
|
||||
if follow_redirects:
|
||||
print >> fileobj, 'location' # follow redirects
|
||||
|
||||
if cert_info:
|
||||
cacert = cert_info.get('cacert')
|
||||
capath = cert_info.get('capath')
|
||||
cert = cert_info.get('cert')
|
||||
key = cert_info.get('key')
|
||||
if cacert:
|
||||
if not os.path.isfile(cacert):
|
||||
raise CurlError(-1, 'No CA cert at %s' % cacert)
|
||||
print >> fileobj, 'cacert = "%s"' % cacert
|
||||
if capath:
|
||||
if not os.path.isdir(capath):
|
||||
raise CurlError(-2, 'No CA directory at %s' % capath)
|
||||
print >> fileobj, 'capath = "%s"' % capath
|
||||
if cert:
|
||||
if not os.path.isfile(cert):
|
||||
raise CurlError(-3, 'No client cert at %s' % cert)
|
||||
print >> fileobj, 'cert = "%s"' % cert
|
||||
if key:
|
||||
if not os.path.isfile(key):
|
||||
raise CurlError(-4, 'No client key at %s' % key)
|
||||
print >> fileobj, 'key = "%s"' % key
|
||||
|
||||
if os.path.exists(destinationpath):
|
||||
if etag:
|
||||
escaped_etag = etag.replace('"','\\"')
|
||||
print >> fileobj, ('header = "If-None-Match: %s"'
|
||||
% escaped_etag)
|
||||
elif onlyifnewer:
|
||||
print >> fileobj, 'time-cond = "%s"' % destinationpath
|
||||
else:
|
||||
os.remove(destinationpath)
|
||||
|
||||
if os.path.exists(tempdownloadpath):
|
||||
if resume and not os.path.exists(destinationpath):
|
||||
# let's try to resume this download
|
||||
print >> fileobj, 'continue-at -'
|
||||
# if an existing etag, only resume if etags still match.
|
||||
tempetag = getxattr(tempdownloadpath, XATTR_ETAG)
|
||||
if tempetag:
|
||||
# Note: If-Range is more efficient, but the response
|
||||
# confuses curl (Error: 33 if etag not match).
|
||||
escaped_etag = tempetag.replace('"','\\"')
|
||||
print >> fileobj, ('header = "If-Match: %s"'
|
||||
% escaped_etag)
|
||||
else:
|
||||
os.remove(tempdownloadpath)
|
||||
|
||||
# Add any additional headers specified in custom_headers
|
||||
# custom_headers must be an array of strings with valid HTTP
|
||||
# header format.
|
||||
if custom_headers:
|
||||
for custom_header in custom_headers:
|
||||
custom_header = custom_header.strip().encode('utf-8')
|
||||
if re.search(r'^[\w-]+:.+', custom_header):
|
||||
print >> fileobj, ('header = "%s"' % custom_header)
|
||||
else:
|
||||
munkicommon.display_warning(
|
||||
'Skipping invalid HTTP header: %s' % custom_header)
|
||||
|
||||
fileobj.close()
|
||||
except Exception, e:
|
||||
raise CurlError(-5, 'Error writing curl directive: %s' % str(e))
|
||||
|
||||
# In Mavericks we need to wrap our call to curl with a utility
|
||||
# that makes curl think it is connected to a tty-like
|
||||
# device so its output is unbuffered so we can get progress info
|
||||
cmd = []
|
||||
minor_os_version = munkicommon.getOsVersion(as_tuple=True)[1]
|
||||
if minor_os_version > 8:
|
||||
# Try to find our ptyexec tool
|
||||
# first look in the parent directory of this file's directory
|
||||
# (../)
|
||||
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
ptyexec_path = os.path.join(parent_dir, 'ptyexec')
|
||||
if not os.path.exists(ptyexec_path):
|
||||
# try absolute path in munki's normal install dir
|
||||
ptyexec_path = '/usr/local/munki/ptyexec'
|
||||
if os.path.exists(ptyexec_path):
|
||||
cmd = [ptyexec_path]
|
||||
|
||||
# Workaround for current issue in OS X 10.9's included curl
|
||||
# Allows for alternate curl binary path as Apple's included curl currently
|
||||
# broken for client-side certificate usage
|
||||
curl_path = munkicommon.pref('CurlPath') or '/usr/bin/curl'
|
||||
cmd.extend([curl_path,
|
||||
'-q', # don't read .curlrc file
|
||||
'--config', # use config file
|
||||
curldirectivepath])
|
||||
|
||||
proc = subprocess.Popen(cmd, shell=False, bufsize=1,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
targetsize = 0
|
||||
downloadedpercent = -1
|
||||
donewithheaders = False
|
||||
maxheaders = 15
|
||||
|
||||
while True:
|
||||
if not donewithheaders:
|
||||
info = proc.stdout.readline().strip('\r\n')
|
||||
if info:
|
||||
munkicommon.display_debug2(info)
|
||||
if info.startswith('HTTP/'):
|
||||
header['http_result_code'] = info.split(None, 2)[1]
|
||||
header['http_result_description'] = info.split(None, 2)[2]
|
||||
elif ': ' in info:
|
||||
part = info.split(None, 1)
|
||||
fieldname = part[0].rstrip(':').lower()
|
||||
header[fieldname] = part[1]
|
||||
else:
|
||||
# we got an empty line; end of headers (or curl exited)
|
||||
if follow_redirects:
|
||||
if header.get('http_result_code') in ['301', '302', '303']:
|
||||
# redirect, so more headers are coming.
|
||||
# Throw away the headers we've received so far
|
||||
header = {}
|
||||
header['http_result_code'] = '000'
|
||||
header['http_result_description'] = ''
|
||||
else:
|
||||
donewithheaders = True
|
||||
try:
|
||||
# Prefer Content-Length header to determine download
|
||||
# size, otherwise fall back to a custom X-Download-Size
|
||||
# header.
|
||||
# This is primary for servers that use chunked transfer
|
||||
# encoding, when Content-Length is forbidden by
|
||||
# RFC2616 4.4. An example of such a server is
|
||||
# Google App Engine Blobstore.
|
||||
targetsize = (
|
||||
header.get('content-length') or
|
||||
header.get('x-download-size'))
|
||||
targetsize = int(targetsize)
|
||||
except (ValueError, TypeError):
|
||||
targetsize = 0
|
||||
if header.get('http_result_code') == '206':
|
||||
# partial content because we're resuming
|
||||
munkicommon.display_detail(
|
||||
'Resuming partial download for %s' %
|
||||
os.path.basename(destinationpath))
|
||||
contentrange = header.get('content-range')
|
||||
if contentrange.startswith('bytes'):
|
||||
try:
|
||||
targetsize = int(contentrange.split('/')[1])
|
||||
except (ValueError, TypeError):
|
||||
targetsize = 0
|
||||
|
||||
if message and header.get('http_result_code') != '304':
|
||||
if message:
|
||||
# log always, display if verbose is 1 or more
|
||||
# also display in MunkiStatus detail field
|
||||
munkicommon.display_status_minor(message)
|
||||
|
||||
elif targetsize and header.get('http_result_code').startswith('2'):
|
||||
# display progress if we get a 2xx result code
|
||||
if os.path.exists(tempdownloadpath):
|
||||
downloadedsize = os.path.getsize(tempdownloadpath)
|
||||
percent = int(float(downloadedsize)
|
||||
/float(targetsize)*100)
|
||||
if percent != downloadedpercent:
|
||||
# percent changed; update display
|
||||
downloadedpercent = percent
|
||||
munkicommon.display_percent_done(downloadedpercent, 100)
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
# Headers have finished, but not targetsize or HTTP2xx.
|
||||
# It's possible that Content-Length was not in the headers.
|
||||
# so just sleep and loop again. We can't show progress.
|
||||
time.sleep(0.1)
|
||||
|
||||
if (proc.poll() != None):
|
||||
# For small download files curl may exit before all headers
|
||||
# have been parsed, don't immediately exit.
|
||||
maxheaders -= 1
|
||||
if donewithheaders or maxheaders <= 0:
|
||||
while True:
|
||||
# if we did `while not connection.isDone()` we'd miss printing
|
||||
# messages and displaying percentages if we exit the loop first
|
||||
connection_done = connection.isDone()
|
||||
if message and connection.status and connection.status != 304:
|
||||
# log always, display if verbose is 1 or more
|
||||
# also display in MunkiStatus detail field
|
||||
munkicommon.display_status_minor(message)
|
||||
# now clear message so we don't display it again
|
||||
message = None
|
||||
if (str(connection.status).startswith('2')
|
||||
and connection.percentComplete != -1):
|
||||
if connection.percentComplete != stored_percent_complete:
|
||||
# display percent done if it has changed
|
||||
stored_percent_complete = connection.percentComplete
|
||||
munkicommon.display_percent_done(
|
||||
stored_percent_complete, 100)
|
||||
elif connection.bytesReceived != stored_bytes_received:
|
||||
# if we don't have percent done info, log bytes received
|
||||
stored_bytes_received = connection.bytesReceived
|
||||
munkicommon.display_detail(
|
||||
'Bytes received: %s', stored_bytes_received)
|
||||
if connection_done:
|
||||
break
|
||||
|
||||
retcode = proc.poll()
|
||||
if retcode:
|
||||
curlerr = ''
|
||||
try:
|
||||
curlerr = proc.stderr.read().rstrip('\n')
|
||||
curlerr = curlerr.split(None, 2)[2]
|
||||
except IndexError:
|
||||
pass
|
||||
if retcode == 22:
|
||||
# 22 means any 400 series return code. Note: header seems not to
|
||||
# be dumped to STDOUT for immediate failures. Hence
|
||||
# http_result_code is likely blank/000. Read it from stderr.
|
||||
if re.search(r'URL returned error: [0-9]+$', curlerr):
|
||||
header['http_result_code'] = curlerr[curlerr.rfind(' ')+1:]
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
# safely kill the connection then re-raise
|
||||
connection.cancel()
|
||||
raise
|
||||
except Exception, err: # too general, I know
|
||||
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
|
||||
connection.cancel()
|
||||
# Re-raise the error as a GurlError
|
||||
raise GurlError(-1, str(err))
|
||||
|
||||
if os.path.exists(tempdownloadpath):
|
||||
if not resume:
|
||||
os.remove(tempdownloadpath)
|
||||
elif retcode == 33 or header.get('http_result_code') == '412':
|
||||
# 33: server doesn't support range requests
|
||||
# 412: Etag didn't match (precondition failed), could not
|
||||
# resume partial download as file on server has changed.
|
||||
if retcode == 33 and not 'HTTPRange' in WARNINGSLOGGED:
|
||||
# use display_info instead of display_warning so these
|
||||
# don't get reported but are available in the log
|
||||
# and in command-line output
|
||||
munkicommon.display_info('WARNING: Web server refused '
|
||||
'partial/range request. Munki cannot run '
|
||||
'efficiently when this support is absent for '
|
||||
'pkg urls. URL: %s' % url)
|
||||
WARNINGSLOGGED['HTTPRange'] = 1
|
||||
os.remove(tempdownloadpath)
|
||||
# The partial failed immediately as not supported.
|
||||
# Try a full download again immediately.
|
||||
if not donotrecurse:
|
||||
return curl(url, destinationpath,
|
||||
cert_info=cert_info,
|
||||
custom_headers=custom_headers,
|
||||
donotrecurse=True,
|
||||
etag=etag,
|
||||
message=message,
|
||||
onlyifnewer=onlyifnewer,
|
||||
resume=resume,
|
||||
follow_redirects=follow_redirects)
|
||||
elif retcode == 22:
|
||||
# TODO: Made http(s) connection but 400 series error.
|
||||
# What should we do?
|
||||
# 403 could be ok, just that someone is currently offsite and
|
||||
# the server is refusing the service them while there.
|
||||
# 404 could be an interception proxy at a public wifi point.
|
||||
# The partial may still be ok later.
|
||||
# 416 could be dangerous - the targeted resource may now be
|
||||
# different / smaller. We need to delete the temp or retrying
|
||||
# will never work.
|
||||
if header.get('http_result_code') == 416:
|
||||
# Bad range request.
|
||||
os.remove(tempdownloadpath)
|
||||
elif header.get('http_result_code') == 503:
|
||||
# Web server temporarily unavailable.
|
||||
pass
|
||||
elif not header.get('http_result_code').startswith('4'):
|
||||
# 500 series, or no error code parsed.
|
||||
# Perhaps the webserver gets really confused by partial
|
||||
# requests. It is likely majorly misconfigured so we won't
|
||||
# try asking it anything challenging.
|
||||
os.remove(tempdownloadpath)
|
||||
elif header.get('etag'):
|
||||
xattr.setxattr(tempdownloadpath, XATTR_ETAG, header['etag'])
|
||||
# TODO: should we log this diagnostic here (we didn't previously)?
|
||||
# Currently for a pkg all that is logged on failure is:
|
||||
# "WARNING: Download of Firefox failed." with no detail. Logging at
|
||||
# the place where this exception is caught has to be done in many
|
||||
# places.
|
||||
munkicommon.display_detail('Download error: %s. Failed (%s) with: %s'
|
||||
% (url,retcode,curlerr))
|
||||
munkicommon.display_detail('Headers: %s', header)
|
||||
raise CurlError(retcode, curlerr)
|
||||
if connection.error != None:
|
||||
# Gurl returned an error
|
||||
munkicommon.display_detail(
|
||||
'Download error %s: %s', connection.error.code(),
|
||||
connection.error.localizedDescription())
|
||||
if connection.SSLerror:
|
||||
munkicommon.display_detail(
|
||||
'SSL error detail: %s', str(connection.SSLerror))
|
||||
keychain.debug_output()
|
||||
munkicommon.display_detail('Headers: %s', connection.headers)
|
||||
if os.path.exists(tempdownloadpath) and not resume:
|
||||
os.remove(tempdownloadpath)
|
||||
raise GurlError(connection.error.code(),
|
||||
connection.error.localizedDescription())
|
||||
|
||||
if connection.response != None:
|
||||
munkicommon.display_debug1('Status: %s', connection.status)
|
||||
munkicommon.display_debug1('Headers: %s', connection.headers)
|
||||
if connection.redirection != []:
|
||||
munkicommon.display_debug1('Redirection: %s', connection.redirection)
|
||||
|
||||
temp_download_exists = os.path.isfile(tempdownloadpath)
|
||||
connection.headers['http_result_code'] = str(connection.status)
|
||||
description = NSHTTPURLResponse.localizedStringForStatusCode_(
|
||||
connection.status)
|
||||
connection.headers['http_result_description'] = description
|
||||
|
||||
if str(connection.status).startswith('2') and temp_download_exists:
|
||||
os.rename(tempdownloadpath, destinationpath)
|
||||
return connection.headers
|
||||
elif connection.status == 304:
|
||||
# unchanged on server
|
||||
munkicommon.display_debug1('Item is unchanged on the server.')
|
||||
return connection.headers
|
||||
else:
|
||||
temp_download_exists = os.path.isfile(tempdownloadpath)
|
||||
http_result = header.get('http_result_code')
|
||||
if http_result.startswith('2') and temp_download_exists:
|
||||
downloadedsize = os.path.getsize(tempdownloadpath)
|
||||
if downloadedsize >= targetsize:
|
||||
if targetsize and not downloadedpercent == 100:
|
||||
# need to display a percent done of 100%
|
||||
munkicommon.display_percent_done(100, 100)
|
||||
os.rename(tempdownloadpath, destinationpath)
|
||||
if (resume and not header.get('etag')
|
||||
and not 'HTTPetag' in WARNINGSLOGGED):
|
||||
# use display_info instead of display_warning so these
|
||||
# don't get reported but are available in the log
|
||||
# and in command-line output
|
||||
munkicommon.display_info(
|
||||
'WARNING: '
|
||||
'Web server did not return an etag. Munki cannot '
|
||||
'safely resume downloads without etag support on the '
|
||||
'web server. URL: %s' % url)
|
||||
WARNINGSLOGGED['HTTPetag'] = 1
|
||||
return header
|
||||
else:
|
||||
# not enough bytes retreived
|
||||
if not resume and temp_download_exists:
|
||||
os.remove(tempdownloadpath)
|
||||
raise CurlError(-5, 'Expected %s bytes, got: %s' %
|
||||
(targetsize, downloadedsize))
|
||||
elif http_result == '304':
|
||||
return header
|
||||
else:
|
||||
# there was a download error of some sort; clean all relevant
|
||||
# downloads that may be in a bad state.
|
||||
for f in [tempdownloadpath, destinationpath]:
|
||||
try:
|
||||
os.unlink(f)
|
||||
except OSError:
|
||||
pass
|
||||
raise HTTPError(http_result,
|
||||
header.get('http_result_description',''))
|
||||
# there was an HTTP error of some sort; remove our temp download.
|
||||
if os.path.exists(tempdownloadpath):
|
||||
try:
|
||||
os.unlink(tempdownloadpath)
|
||||
except OSError:
|
||||
pass
|
||||
raise HTTPError(connection.status,
|
||||
connection.headers.get('http_result_description',''))
|
||||
|
||||
|
||||
def getResourceIfChangedAtomically(url,
|
||||
destinationpath,
|
||||
cert_info=None,
|
||||
custom_headers=None,
|
||||
expected_hash=None,
|
||||
message=None,
|
||||
@@ -475,7 +274,7 @@ def getResourceIfChangedAtomically(url,
|
||||
if url_parse.scheme in ['http', 'https']:
|
||||
changed = getHTTPfileIfChangedAtomically(
|
||||
url, destinationpath,
|
||||
cert_info=cert_info, custom_headers=custom_headers,
|
||||
custom_headers=custom_headers,
|
||||
message=message, resume=resume, follow_redirects=follow_redirects)
|
||||
elif url_parse.scheme == 'file':
|
||||
changed = getFileIfChangedAtomically(url_parse.path, destinationpath)
|
||||
@@ -554,7 +353,7 @@ def getFileIfChangedAtomically(path, destinationpath):
|
||||
|
||||
|
||||
def getHTTPfileIfChangedAtomically(url, destinationpath,
|
||||
cert_info=None, custom_headers=None,
|
||||
custom_headers=None,
|
||||
message=None, resume=False,
|
||||
follow_redirects=False):
|
||||
"""Gets file from HTTP URL, checking first to see if it has changed on the
|
||||
@@ -563,7 +362,7 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
|
||||
Returns True if a new download was required; False if the
|
||||
item is already in the local cache.
|
||||
|
||||
Raises CurlDownloadError if there is an error."""
|
||||
Raises GurlDownloadError if there is an error."""
|
||||
|
||||
etag = None
|
||||
getonlyifnewer = False
|
||||
@@ -575,29 +374,27 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
|
||||
getonlyifnewer = False
|
||||
|
||||
try:
|
||||
header = curl(url,
|
||||
header = get_url(url,
|
||||
destinationpath,
|
||||
cert_info=cert_info,
|
||||
custom_headers=custom_headers,
|
||||
etag=etag,
|
||||
message=message,
|
||||
onlyifnewer=getonlyifnewer,
|
||||
resume=resume,
|
||||
follow_redirects=follow_redirects)
|
||||
|
||||
except CurlError, err:
|
||||
except GurlError, err:
|
||||
err = 'Error %s: %s' % tuple(err)
|
||||
raise CurlDownloadError(err)
|
||||
raise GurlDownloadError(err)
|
||||
|
||||
except HTTPError, err:
|
||||
err = 'HTTP result %s: %s' % tuple(err)
|
||||
raise CurlDownloadError(err)
|
||||
raise GurlDownloadError(err)
|
||||
|
||||
err = None
|
||||
if header['http_result_code'] == '304':
|
||||
# not modified, return existing file
|
||||
munkicommon.display_debug1('%s already exists and is up-to-date.'
|
||||
% destinationpath)
|
||||
munkicommon.display_debug1('%s already exists and is up-to-date.',
|
||||
destinationpath)
|
||||
# file is in cache and is unchanged, so we return False
|
||||
return False
|
||||
else:
|
||||
@@ -691,4 +488,3 @@ def verifySoftwarePackageIntegrity(file_path, item_hash, always_hash=False):
|
||||
|
||||
return (False, chash)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,392 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
#
|
||||
# Copyright 2009-2014 Greg Neagle.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License');
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an 'AS IS' BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
gurl.py
|
||||
|
||||
Created by Greg Neagle on 2013-11-21.
|
||||
|
||||
curl replacement using NSURLConnection and friends
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import xattr
|
||||
|
||||
from Foundation import NSRunLoop, NSDate
|
||||
from Foundation import NSObject, NSURL, NSURLConnection
|
||||
from Foundation import NSMutableURLRequest
|
||||
from Foundation import NSURLRequestReloadIgnoringLocalCacheData
|
||||
from Foundation import NSURLResponseUnknownLength
|
||||
from Foundation import NSLog
|
||||
from Foundation import NSString, NSUTF8StringEncoding
|
||||
from Foundation import NSURLCredential, NSURLCredentialPersistenceNone
|
||||
|
||||
from Foundation import NSData, \
|
||||
NSPropertyListSerialization, \
|
||||
NSPropertyListMutableContainersAndLeaves, \
|
||||
NSPropertyListXMLFormat_v1_0
|
||||
|
||||
|
||||
ssl_error_codes = {
|
||||
-9800: u'SSL protocol error',
|
||||
-9801: u'Cipher Suite negotiation failure',
|
||||
-9802: u'Fatal alert',
|
||||
-9803: u'I/O would block (not fatal)',
|
||||
-9804: u'Attempt to restore an unknown session',
|
||||
-9805: u'Connection closed gracefully',
|
||||
-9806: u'Connection closed via error',
|
||||
-9807: u'Invalid certificate chain',
|
||||
-9808: u'Bad certificate format',
|
||||
-9809: u'Underlying cryptographic error',
|
||||
-9810: u'Internal error',
|
||||
-9811: u'Module attach failure',
|
||||
-9812: u'Valid cert chain, untrusted root',
|
||||
-9813: u'Cert chain not verified by root',
|
||||
-9814: u'Chain had an expired cert',
|
||||
-9815: u'Chain had a cert not yet valid',
|
||||
-9816: u'Server closed session with no notification',
|
||||
-9817: u'Insufficient buffer provided',
|
||||
-9818: u'Bad SSLCipherSuite',
|
||||
-9819: u'Unexpected message received',
|
||||
-9820: u'Bad MAC',
|
||||
-9821: u'Decryption failed',
|
||||
-9822: u'Record overflow',
|
||||
-9823: u'Decompression failure',
|
||||
-9824: u'Handshake failure',
|
||||
-9825: u'Misc. bad certificate',
|
||||
-9826: u'Bad unsupported cert format',
|
||||
-9827: u'Certificate revoked',
|
||||
-9828: u'Certificate expired',
|
||||
-9829: u'Unknown certificate',
|
||||
-9830: u'Illegal parameter',
|
||||
-9831: u'Unknown Cert Authority',
|
||||
-9832: u'Access denied',
|
||||
-9833: u'Decoding error',
|
||||
-9834: u'Decryption error',
|
||||
-9835: u'Export restriction',
|
||||
-9836: u'Bad protocol version',
|
||||
-9837: u'Insufficient security',
|
||||
-9838: u'Internal error',
|
||||
-9839: u'User canceled',
|
||||
-9840: u'No renegotiation allowed',
|
||||
-9841: u'Peer cert is valid, or was ignored if verification disabled',
|
||||
-9842: u'Server has requested a client cert',
|
||||
-9843: u'Peer host name mismatch',
|
||||
-9844: u'Peer dropped connection before responding',
|
||||
-9845: u'Decryption failure',
|
||||
-9846: u'Bad MAC',
|
||||
-9847: u'Record overflow',
|
||||
-9848: u'Configuration error',
|
||||
-9849: u'Unexpected (skipped) record in DTLS'}
|
||||
|
||||
|
||||
class Gurl(NSObject):
|
||||
'''A class for getting content from a URL
|
||||
using NSURLConnection and friends'''
|
||||
|
||||
GURL_XATTR = 'com.googlecode.munki.downloadData'
|
||||
|
||||
def initWithOptions_(self, options):
|
||||
self = super(Gurl, self).init()
|
||||
if not self:
|
||||
return
|
||||
|
||||
self.follow_redirects = options.get('follow_redirects', False)
|
||||
self.destination_path = options.get('file')
|
||||
self.can_resume = options.get('can_resume', False)
|
||||
self.url = options.get('url')
|
||||
self.additional_headers = options.get('additional_headers', {})
|
||||
self.username = options.get('username')
|
||||
self.password = options.get('password')
|
||||
self.download_only_if_changed = options.get(
|
||||
'download_only_if_changed', False)
|
||||
self.cache_data = options.get('cache_data')
|
||||
self.connection_timeout = options.get('connection_timeout', 10)
|
||||
|
||||
self.log = options.get('logging_function', NSLog)
|
||||
|
||||
self.resume = False
|
||||
self.response = None
|
||||
self.headers = None
|
||||
self.status = None
|
||||
self.error = None
|
||||
self.SSLerror = None
|
||||
self.done = False
|
||||
self.redirection = []
|
||||
self.destination = None
|
||||
self.bytesReceived = 0
|
||||
self.expectedLength = -1
|
||||
self.percentComplete = 0
|
||||
self.connection = None
|
||||
return self
|
||||
|
||||
def start(self):
|
||||
if not self.destination_path:
|
||||
self.log('No output file specified.')
|
||||
self.done = True
|
||||
return
|
||||
url = NSURL.URLWithString_(self.url)
|
||||
request = (
|
||||
NSMutableURLRequest.requestWithURL_cachePolicy_timeoutInterval_(
|
||||
url, NSURLRequestReloadIgnoringLocalCacheData,
|
||||
self.connection_timeout))
|
||||
if self.additional_headers:
|
||||
for header, value in self.additional_headers.items():
|
||||
request.setValue_forHTTPHeaderField_(value, header)
|
||||
# does the file already exist? See if we can resume a partial download
|
||||
if os.path.isfile(self.destination_path):
|
||||
stored_data = self.get_stored_headers()
|
||||
if (self.can_resume and 'expected-length' in stored_data
|
||||
and ('last-modified' in stored_data or 'etag' in stored_data)):
|
||||
# we have a partial file and we're allowed to resume
|
||||
self.resume = True
|
||||
local_filesize = os.path.getsize(self.destination_path)
|
||||
byte_range = 'bytes=%s-' % local_filesize
|
||||
request.setValue_forHTTPHeaderField_(byte_range, 'Range')
|
||||
if self.download_only_if_changed and not self.resume:
|
||||
stored_data = self.cache_data or self.get_stored_headers()
|
||||
if 'last-modified' in stored_data:
|
||||
request.setValue_forHTTPHeaderField_(
|
||||
stored_data['last-modified'], 'if-modified-since')
|
||||
if 'etag' in stored_data:
|
||||
request.setValue_forHTTPHeaderField_(
|
||||
stored_data['etag'], 'if-none-match')
|
||||
self.connection = NSURLConnection.alloc().initWithRequest_delegate_(
|
||||
request, self)
|
||||
|
||||
def cancel(self):
|
||||
if self.connection:
|
||||
self.connection.cancel()
|
||||
self.done = True
|
||||
|
||||
def isDone(self):
|
||||
if self.done:
|
||||
return self.done
|
||||
# let the delegates do their thing
|
||||
NSRunLoop.currentRunLoop().runUntilDate_(
|
||||
NSDate.dateWithTimeIntervalSinceNow_(.1))
|
||||
return self.done
|
||||
|
||||
def get_stored_headers(self):
|
||||
'''Returns any stored headers for self.destination_path'''
|
||||
# try to read stored headers
|
||||
try:
|
||||
stored_plist_str = xattr.getxattr(
|
||||
self.destination_path, self.GURL_XATTR)
|
||||
except (KeyError, IOError):
|
||||
return {}
|
||||
data = buffer(stored_plist_str)
|
||||
dataObject, plistFormat, error = (
|
||||
NSPropertyListSerialization.
|
||||
propertyListFromData_mutabilityOption_format_errorDescription_(
|
||||
data, NSPropertyListMutableContainersAndLeaves, None, None))
|
||||
if error:
|
||||
return {}
|
||||
else:
|
||||
return dataObject
|
||||
|
||||
def store_headers(self, headers):
|
||||
'''Store dictionary data as an xattr for self.destination_path'''
|
||||
plistData, error = (
|
||||
NSPropertyListSerialization.
|
||||
dataFromPropertyList_format_errorDescription_(
|
||||
headers, NSPropertyListXMLFormat_v1_0, None))
|
||||
if error:
|
||||
string = ''
|
||||
else:
|
||||
string = str(plistData)
|
||||
try:
|
||||
xattr.setxattr(self.destination_path, self.GURL_XATTR, string)
|
||||
except IOError, err:
|
||||
self.log('Could not store metadata to %s: %s' %
|
||||
(self.destination_path, err))
|
||||
pass
|
||||
|
||||
def normalize_header_dict(self, a_dict):
|
||||
'''Since HTTP header names are not case-sensitive, we normalize a
|
||||
dictionary of HTTP headers by converting all the key names to
|
||||
lower case'''
|
||||
new_dict = {}
|
||||
for key, value in a_dict.items():
|
||||
new_dict[key.lower()] = value
|
||||
return new_dict
|
||||
|
||||
def connection_didFailWithError_(self, connection, error):
|
||||
self.error = error
|
||||
# If this was an SSL error, try to extract the SSL error code.
|
||||
if 'NSUnderlyingError' in error.userInfo():
|
||||
ssl_code = error.userInfo()['NSUnderlyingError'].userInfo().get(
|
||||
'_kCFNetworkCFStreamSSLErrorOriginalValue', None)
|
||||
if ssl_code:
|
||||
self.SSLerror = (ssl_code, ssl_error_codes.get(
|
||||
ssl_code, 'Unknown SSL error'))
|
||||
self.done = True
|
||||
if self.destination and self.destination_path:
|
||||
self.destination.close()
|
||||
# delete it? Might not want to...
|
||||
|
||||
def connectionDidFinishLoading_(self, connection):
|
||||
self.done = True
|
||||
if self.destination and self.destination_path:
|
||||
self.destination.close()
|
||||
if str(self.status).startswith('2'):
|
||||
# remove the expected-size from the stored headers
|
||||
headers = self.get_stored_headers()
|
||||
if 'expected-length' in headers:
|
||||
del headers['expected-length']
|
||||
self.store_headers(headers)
|
||||
|
||||
def connection_didReceiveResponse_(self, connection, response):
|
||||
self.response = response
|
||||
self.bytesReceived = 0
|
||||
self.percentComplete = -1
|
||||
self.expectedLength = response.expectedContentLength()
|
||||
|
||||
download_data = {}
|
||||
if response.className() == u'NSHTTPURLResponse':
|
||||
# Headers and status code only available for HTTP/S transfers
|
||||
self.status = response.statusCode()
|
||||
self.headers = dict(response.allHeaderFields())
|
||||
normalized_headers = self.normalize_header_dict(self.headers)
|
||||
if 'last-modified' in normalized_headers:
|
||||
download_data['last-modified'] = normalized_headers[
|
||||
'last-modified']
|
||||
if 'etag' in normalized_headers:
|
||||
download_data['etag'] = normalized_headers['etag']
|
||||
download_data['expected-length'] = self.expectedLength
|
||||
|
||||
if not self.destination and self.destination_path:
|
||||
if self.status == 206 and self.resume:
|
||||
# 206 is Partial Content response
|
||||
stored_data = self.get_stored_headers()
|
||||
if (not stored_data
|
||||
or stored_data.get('etag') != download_data.get('etag')
|
||||
or stored_data.get('last-modified') != download_data.get(
|
||||
'last-modified')):
|
||||
# file on server is different than the one
|
||||
# we have a partial for
|
||||
self.log(
|
||||
'Can\'t resume download; file on server has changed.')
|
||||
connection.cancel()
|
||||
self.log('Removing %s' % self.destination_path)
|
||||
os.unlink(self.destination_path)
|
||||
# restart and attempt to download the entire file
|
||||
self.log(
|
||||
'Restarting download of %s' % self.destination_path)
|
||||
os.unlink(self.destination_path)
|
||||
self.start()
|
||||
return
|
||||
# try to resume
|
||||
self.log('Resuming download for %s' % self.destination_path)
|
||||
# add existing file size to bytesReceived so far
|
||||
local_filesize = os.path.getsize(self.destination_path)
|
||||
self.bytesReceived = local_filesize
|
||||
self.expectedLength += local_filesize
|
||||
# open file for append
|
||||
self.destination = open(self.destination_path, 'a')
|
||||
|
||||
elif str(self.status).startswith('2'):
|
||||
# not resuming, just open the file for writing
|
||||
self.destination = open(self.destination_path, 'w')
|
||||
# store some headers with the file for use if we need to resume
|
||||
# the downloadand for future checking if the file on the server
|
||||
# has changed
|
||||
self.store_headers(download_data)
|
||||
|
||||
def connection_willSendRequest_redirectResponse_(
|
||||
self, connection, request, response):
|
||||
if response == None:
|
||||
# This isn't a real redirect, this is without talking to a server.
|
||||
# Pass it back as-is
|
||||
return request
|
||||
# But if we're here, it appears to be a real redirect attempt
|
||||
# Annoyingly, we apparently can't get access to the headers from the
|
||||
# site that told us to redirect. All we know is that we were told
|
||||
# to redirect and where the new location is.
|
||||
newURL = request.URL().absoluteString()
|
||||
self.redirection.append([newURL, dict(response.allHeaderFields())])
|
||||
if self.follow_redirects:
|
||||
# Allow the redirect
|
||||
self.log('Allowing redirect to: %s' % newURL)
|
||||
return request
|
||||
else:
|
||||
# Deny the redirect
|
||||
self.log('Denying redirect to: %s' % newURL)
|
||||
return None
|
||||
|
||||
def connection_canAuthenticateAgainstProtectionSpace_(
|
||||
self, connection, protectionSpace):
|
||||
# this is not called in 10.5.x.
|
||||
self.log('connection_canAuthenticateAgainstProtectionSpace_')
|
||||
if protectionSpace:
|
||||
host = protectionSpace.host()
|
||||
realm = protectionSpace.realm()
|
||||
authenticationMethod = protectionSpace.authenticationMethod()
|
||||
self.log('Protection space found. Host: %s Realm: %s AuthMethod: %s'
|
||||
% (host, realm, authenticationMethod))
|
||||
if self.username and self.password and authenticationMethod in [
|
||||
'NSURLAuthenticationMethodDefault',
|
||||
'NSURLAuthenticationMethodHTTPBasic',
|
||||
'NSURLAuthenticationMethodHTTPDigest']:
|
||||
# we know how to handle this
|
||||
self.log('Can handle this authentication request')
|
||||
return True
|
||||
# we don't know how to handle this; let the OS try
|
||||
self.log('Allowing OS to handle authentication request')
|
||||
return False
|
||||
|
||||
def connection_didReceiveAuthenticationChallenge_(
|
||||
self, connection, challenge):
|
||||
protectionSpace = challenge.protectionSpace()
|
||||
host = protectionSpace.host()
|
||||
realm = protectionSpace.realm()
|
||||
authenticationMethod = protectionSpace.authenticationMethod()
|
||||
self.log(
|
||||
'Authentication challenge for Host: %s Realm: %s AuthMethod: %s'
|
||||
% (host, realm, authenticationMethod))
|
||||
if challenge.previousFailureCount() > 0:
|
||||
# we have the wrong credentials. just fail
|
||||
self.log('Previous authentication attempt failed.')
|
||||
challenge.sender().cancelAuthenticationChallenge_(challenge)
|
||||
if self.username and self.password and authenticationMethod in [
|
||||
'NSURLAuthenticationMethodDefault',
|
||||
'NSURLAuthenticationMethodHTTPBasic',
|
||||
'NSURLAuthenticationMethodHTTPDigest']:
|
||||
self.log('Will attempt to authenticate.')
|
||||
self.log('Username: %s Password: %s'
|
||||
% (self.username, ('*' * len(self.password or ''))))
|
||||
credential = (
|
||||
NSURLCredential.credentialWithUser_password_persistence_(
|
||||
self.username, self.password, NSURLCredentialPersistenceNone))
|
||||
challenge.sender().useCredential_forAuthenticationChallenge_(
|
||||
credential, challenge)
|
||||
else:
|
||||
# fall back to system-provided default behavior
|
||||
self.log('Continuing without credential.')
|
||||
challenge.sender(
|
||||
).continueWithoutCredentialForAuthenticationChallenge_(
|
||||
challenge)
|
||||
|
||||
def connection_didReceiveData_(self, connection, data):
|
||||
if self.destination:
|
||||
self.destination.write(str(data))
|
||||
else:
|
||||
self.log(str(data).decode('UTF-8'))
|
||||
self.bytesReceived += len(data)
|
||||
if self.expectedLength != NSURLResponseUnknownLength:
|
||||
self.percentComplete = int(
|
||||
float(self.bytesReceived)/float(self.expectedLength) * 100.0)
|
||||
@@ -727,7 +727,7 @@ def installWithInfo(
|
||||
munkicommon.display_debug1("suppress_bundle_relocation: %s" %
|
||||
suppressBundleRelocation )
|
||||
if 'installer_choices_xml' in item:
|
||||
choicesXMLfile = os.path.join(munkicommon.tmpdir,
|
||||
choicesXMLfile = os.path.join(munkicommon.tmpdir(),
|
||||
"choices.xml")
|
||||
FoundationPlist.writePlist(item['installer_choices_xml'],
|
||||
choicesXMLfile)
|
||||
|
||||
@@ -0,0 +1,546 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
#
|
||||
# Copyright 2014 Greg Neagle.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the 'License');
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an 'AS IS' BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
keychain
|
||||
|
||||
Created by Greg Neagle on 2014-06-09.
|
||||
Incorporating work and ideas from Michael Lynn here:
|
||||
https://gist.github.com/pudquick/7704254
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
import munkicommon
|
||||
|
||||
|
||||
DEFAULT_KEYCHAIN_NAME = 'munki.keychain'
|
||||
DEFAULT_KEYCHAIN_PASSWORD = 'munki'
|
||||
KEYCHAIN_DIRECTORY = os.path.join(
|
||||
munkicommon.pref('ManagedInstallDir'), 'Keychains')
|
||||
|
||||
|
||||
def read_file(pathname):
|
||||
'''Return the contents of pathname as a string'''
|
||||
try:
|
||||
fileobj = open(pathname, mode='r')
|
||||
data = fileobj.read()
|
||||
fileobj.close()
|
||||
return data
|
||||
except (OSError, IOError), err:
|
||||
munkicommon.display_error(
|
||||
'Could not read %s: %s', pathname, err)
|
||||
return ''
|
||||
|
||||
|
||||
def write_file(stringdata, pathname):
|
||||
'''Writes stringdata to pathname.
|
||||
Returns the pathname on success, empty string on failure.'''
|
||||
try:
|
||||
fileobject = open(pathname, mode='w')
|
||||
fileobject.write(stringdata)
|
||||
fileobject.close()
|
||||
return pathname
|
||||
except (OSError, IOError), err:
|
||||
munkicommon.display_error('Couldn\'t write %s to %s: %s',
|
||||
stringdata, pathname, err)
|
||||
return ''
|
||||
|
||||
|
||||
def get_munki_server_cert_data():
|
||||
'''Attempt to get information we need from Munki's preferences or
|
||||
defaults. Returns a dictionary.'''
|
||||
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
|
||||
cert_data = {}
|
||||
|
||||
# get server CA cert if it exists so we can verify the Munki server
|
||||
cert_data['ca_cert_path'] = None
|
||||
cert_data['ca_dir_path'] = None
|
||||
if munkicommon.pref('SoftwareRepoCAPath'):
|
||||
CA_path = munkicommon.pref('SoftwareRepoCAPath')
|
||||
if os.path.isfile(CA_path):
|
||||
cert_data['ca_cert_path'] = CA_path
|
||||
elif os.path.isdir(CA_path):
|
||||
cert_data['ca_dir_path'] = CA_path
|
||||
if munkicommon.pref('SoftwareRepoCACertificate'):
|
||||
cert_data['ca_cert_path'] = munkicommon.pref(
|
||||
'SoftwareRepoCACertificate')
|
||||
if cert_data['ca_cert_path'] == None:
|
||||
ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
|
||||
if os.path.exists(ca_cert_path):
|
||||
cert_data['ca_cert_path'] = ca_cert_path
|
||||
return cert_data
|
||||
|
||||
|
||||
def get_munki_client_cert_data():
|
||||
'''Attempt to get information we need from Munki's preferences or
|
||||
defaults. Returns a dictionary.'''
|
||||
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
|
||||
cert_data = {}
|
||||
|
||||
cert_data['client_cert_path'] = None
|
||||
cert_data['client_key_path'] = None
|
||||
# get client cert if it exists
|
||||
if munkicommon.pref('UseClientCertificate'):
|
||||
cert_data['client_cert_path'] = (
|
||||
munkicommon.pref('ClientCertificatePath') or None)
|
||||
cert_data['client_key_path'] = munkicommon.pref('ClientKeyPath') or None
|
||||
if not cert_data['client_cert_path']:
|
||||
for name in ['cert.pem', 'client.pem', 'munki.pem']:
|
||||
client_cert_path = os.path.join(
|
||||
ManagedInstallDir, 'certs', name)
|
||||
if os.path.exists(client_cert_path):
|
||||
cert_data['client_cert_path'] = client_cert_path
|
||||
break
|
||||
|
||||
cert_data['site_url'] = (
|
||||
munkicommon.pref('SoftwareRepoURL').rstrip('/') + '/')
|
||||
return cert_data
|
||||
|
||||
|
||||
def add_ca_certs_to_system_keychain(certdata=None):
|
||||
'''Adds any CA certs as trusted root certs to System.keychain'''
|
||||
|
||||
if not certdata:
|
||||
certdata = get_munki_server_cert_data()
|
||||
|
||||
ca_cert_path = certdata['ca_cert_path']
|
||||
ca_dir_path = certdata['ca_dir_path']
|
||||
SYSTEM_KEYCHAIN = "/Library/Keychains/System.keychain"
|
||||
if not os.path.exists(SYSTEM_KEYCHAIN):
|
||||
munkicommon.display_warning('%s not found.', SYSTEM_KEYCHAIN)
|
||||
return
|
||||
|
||||
if not ca_cert_path and not ca_dir_path:
|
||||
# no CA certs, so nothing to do
|
||||
munkicommon.display_debug2(
|
||||
'No CA cert info provided, so nothing to add to System keychain.')
|
||||
return
|
||||
else:
|
||||
munkicommon.display_debug2('CA cert path: %s', ca_cert_path)
|
||||
munkicommon.display_debug2('CA dir path: %s', ca_dir_path)
|
||||
|
||||
# Add CA certs
|
||||
certs_to_add = []
|
||||
if ca_cert_path:
|
||||
certs_to_add.append(ca_cert_path)
|
||||
if ca_dir_path:
|
||||
# add any pem files in the ca_dir_path directory
|
||||
for item in os.listdir(ca_dir_path):
|
||||
if item.endswith('.pem'):
|
||||
certs_to_add.append(os.path.join(ca_dir_path, item))
|
||||
for cert in certs_to_add:
|
||||
munkicommon.display_debug1('Adding CA cert %s...', cert)
|
||||
try:
|
||||
output = security('add-trusted-cert', '-d',
|
||||
'-k', SYSTEM_KEYCHAIN, cert)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not add CA cert %s into System keychain: %s', cert, err)
|
||||
|
||||
munkicommon.display_info('System.keychain updated.')
|
||||
|
||||
|
||||
def make_client_keychain(certdata=None):
|
||||
'''Builds a client cert keychain from existing client certs'''
|
||||
|
||||
if not certdata:
|
||||
# jusr grab data from Munki's preferences/defaults
|
||||
certdata = get_munki_client_cert_data()
|
||||
|
||||
client_cert_path = certdata['client_cert_path']
|
||||
client_key_path = certdata['client_key_path']
|
||||
site_url = certdata['site_url']
|
||||
if not client_cert_path:
|
||||
# no client, so nothing to do
|
||||
munkicommon.display_debug1(
|
||||
'No client cert info provided, '
|
||||
'so no client keychain will be created.')
|
||||
return
|
||||
else:
|
||||
munkicommon.display_debug1('Client cert path: %s', client_cert_path)
|
||||
munkicommon.display_debug1('Client key path: %s', client_key_path)
|
||||
|
||||
# to do some of the following options correctly, we need to be root
|
||||
# and have root's home.
|
||||
# check to see if we're root
|
||||
if os.geteuid() != 0:
|
||||
munkicommon.display_error(
|
||||
'Can\'t make our client keychain unless we are root!')
|
||||
return
|
||||
# switch HOME if needed to root's home
|
||||
original_home = os.environ.get('HOME')
|
||||
if original_home:
|
||||
os.environ['HOME'] = os.path.expanduser('~root')
|
||||
|
||||
keychain_pass = (
|
||||
munkicommon.pref('KeychainPassword') or DEFAULT_KEYCHAIN_PASSWORD)
|
||||
abs_keychain_path = get_keychain_path()
|
||||
if os.path.exists(abs_keychain_path):
|
||||
os.unlink(abs_keychain_path)
|
||||
if not os.path.exists(os.path.dirname(abs_keychain_path)):
|
||||
os.makedirs(os.path.dirname(abs_keychain_path))
|
||||
# create a new keychain
|
||||
munkicommon.display_debug1('Creating client keychain...')
|
||||
try:
|
||||
output = security('create-keychain',
|
||||
'-p', keychain_pass, abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not create keychain %s: %s', abs_keychain_path, err)
|
||||
if original_home:
|
||||
# switch it back
|
||||
os.environ['HOME'] = original_home
|
||||
return
|
||||
|
||||
# Ensure the keychain is in the search path and unlocked
|
||||
added_keychain = add_to_keychain_list(abs_keychain_path)
|
||||
unlock_and_set_nonlocking(abs_keychain_path)
|
||||
|
||||
# Add client cert (and optionally key)
|
||||
if client_key_path:
|
||||
# combine client cert and private key before we import
|
||||
cert_data = read_file(client_cert_path)
|
||||
key_data = read_file(client_key_path)
|
||||
# write the combined data
|
||||
combined_pem = os.path.join(munkicommon.tmpdir(), 'combined.pem')
|
||||
if write_file(cert_data + key_data, combined_pem):
|
||||
munkicommon.display_debug1('Importing client cert and key...')
|
||||
try:
|
||||
output = security(
|
||||
'import', combined_pem, '-A', '-k', abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not import %s: %s', combined_pem, err)
|
||||
os.unlink(combined_pem)
|
||||
else:
|
||||
munkicommon.display_error(
|
||||
'Could not combine client cert and key for import!')
|
||||
else:
|
||||
munkicommon.display_debug2('Importing client cert and key...')
|
||||
try:
|
||||
output = security(
|
||||
'import', client_cert_path, '-A', '-k', abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not import %s: %s', client_cert_path, err)
|
||||
|
||||
# set up identity preference linking the identity (cert and key)
|
||||
# to the site_url
|
||||
# First we need to find the existing identity in our keychain
|
||||
try:
|
||||
output = security('find-identity', abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError:
|
||||
pass
|
||||
if not ' 1 identities found' in output:
|
||||
munkicommon.display_error('No identities found!')
|
||||
else:
|
||||
# We have a solitary match and can configure / verify
|
||||
# the identity preference
|
||||
id_hash = re.findall(r'\W+1\)\W+([0-9A-F]+)\W', output)[0]
|
||||
# First, check to see if we have an identity already
|
||||
create_identity = False
|
||||
try:
|
||||
output = security(
|
||||
'get-identity-preference', '-s', site_url, '-Z')
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
# No error, we found an identity
|
||||
# Check if it matches the one we want
|
||||
current_hash = re.match(
|
||||
r'SHA-1 hash:\W+([A-F0-9]+)\W', output).group(1)
|
||||
if id_hash != current_hash:
|
||||
# We only care if there's a different hash being used.
|
||||
# Remove the incorrect one.
|
||||
output = security(
|
||||
'set-identity-preference', '-n', '-s', site_url)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
# Signal that we want to create a new identity preference
|
||||
create_identity = True
|
||||
except SecurityError, err:
|
||||
# error finding identity-preference
|
||||
create_identity = True
|
||||
#elif id_hash not in output:
|
||||
# # Non-zero error code and hash not detected in output
|
||||
# # Signal that we want to create a new identity preference
|
||||
# create_identity = True
|
||||
if create_identity:
|
||||
# This code was moved into a common block that both routes could
|
||||
# access as it's a little complicated.
|
||||
# security will only create an identity preference in the
|
||||
# default keychain - which means a default has to be
|
||||
# defined/selected. For normal users, this is login.keychain -
|
||||
# but for root there's no login.keychain and no default keychain
|
||||
# configured. So we'll handle the case of no default keychain
|
||||
# (just set one) as well as pre-existing default keychain
|
||||
# (in which case we set it long enough to create the preference,
|
||||
# then set it back)
|
||||
munkicommon.display_debug1('Creating identity preference...')
|
||||
try:
|
||||
output = security('default-keychain')
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
# One is defined, remember the path
|
||||
default_keychain = [
|
||||
x.strip().strip('"')
|
||||
for x in output.split('\n') if x.strip()][0]
|
||||
except SecurityError, err:
|
||||
# error raised if there is no default
|
||||
default_keychain = None
|
||||
# Temporarily assign the default keychain to ours
|
||||
try:
|
||||
output = security(
|
||||
'default-keychain', '-s', abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not set default keychain to %s failed: %s'
|
||||
% (abs_keychain_path, err))
|
||||
default_keychain = None
|
||||
# Create the identity preference
|
||||
try:
|
||||
output = security(
|
||||
'set-identity-preference', '-s', site_url, '-Z',
|
||||
id_hash, abs_keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Setting identity preference failed: %s' % err)
|
||||
if default_keychain:
|
||||
# We originally had a different one, set it back
|
||||
output = security(
|
||||
'default-keychain', '-s', default_keychain)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
# we're done, clean up.
|
||||
remove_from_keychain_list(abs_keychain_path)
|
||||
if original_home:
|
||||
# switch it back
|
||||
os.environ['HOME'] = original_home
|
||||
munkicommon.display_info(
|
||||
'Completed creation of client keychain at %s' % abs_keychain_path)
|
||||
|
||||
|
||||
def add_to_keychain_list(keychain_path):
|
||||
'''Ensure the keychain is in the search path. Returns True if we
|
||||
added the keychain to the list.'''
|
||||
added_keychain = False
|
||||
output = security('list-keychains', '-d', 'user')
|
||||
# Split the output and strip it of whitespace and leading/trailing
|
||||
# quotes, the result are absolute paths to keychains
|
||||
# Preserve the order in case we need to append to them
|
||||
search_keychains = [x.strip().strip('"')
|
||||
for x in output.split('\n') if x.strip()]
|
||||
if not keychain_path in search_keychains:
|
||||
# Keychain is not in the search paths
|
||||
munkicommon.display_debug2('Adding client keychain to search path...')
|
||||
search_keychains.append(keychain_path)
|
||||
try:
|
||||
output = security(
|
||||
'list-keychains', '-d', 'user', '-s', *search_keychains)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
added_keychain = True
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not add keychain %s to keychain list: %s',
|
||||
keychain_path, err)
|
||||
added_keychain = False
|
||||
return added_keychain
|
||||
|
||||
|
||||
def unlock_and_set_nonlocking(keychain_path):
|
||||
'''Unlocks the keychain and sets it to non-locking'''
|
||||
keychain_pass = (
|
||||
munkicommon.pref('KeychainPassword') or DEFAULT_KEYCHAIN_PASSWORD)
|
||||
try:
|
||||
output = security(
|
||||
'unlock-keychain', '-p', keychain_pass, keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
# some problem unlocking the keychain.
|
||||
munkicommon.display_error(
|
||||
'Could not unlock %s: %s.', keychain_path, err)
|
||||
# delete it
|
||||
try:
|
||||
os.unlink(keychain_path)
|
||||
except OSError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not remove %s: %s.', keychain_path, err)
|
||||
return
|
||||
try:
|
||||
output = security('set-keychain-settings', keychain_path)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not set keychain settings for %s: %s',
|
||||
keychain_path, err)
|
||||
|
||||
|
||||
def remove_from_keychain_list(keychain_path):
|
||||
'''Remove keychain from the list of keychains'''
|
||||
output = security('list-keychains', '-d', 'user')
|
||||
# Split the output and strip it of whitespace and leading/trailing
|
||||
# quotes, the result are absolute paths to keychains
|
||||
# Preserve the order in case we need to append to them
|
||||
search_keychains = [x.strip().strip('"')
|
||||
for x in output.split('\n') if x.strip()]
|
||||
if keychain_path in search_keychains:
|
||||
# Keychain is in the search path
|
||||
munkicommon.display_debug1(
|
||||
'Removing %s from search path...', keychain_path)
|
||||
filtered_keychains = [keychain for keychain in search_keychains
|
||||
if keychain != keychain_path]
|
||||
try:
|
||||
output = security(
|
||||
'list-keychains', '-d', 'user', '-s', *filtered_keychains)
|
||||
if output:
|
||||
munkicommon.display_debug2(output)
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(
|
||||
'Could not set new keychain list: %s', err)
|
||||
|
||||
|
||||
def client_certs_newer_than_keychain():
|
||||
'''Returns True if we have client certs that are newer than our
|
||||
client keychain, False otherwise'''
|
||||
certdata = get_munki_client_cert_data()
|
||||
client_cert_path = certdata['client_cert_path']
|
||||
client_key_path = certdata['client_key_path']
|
||||
keychain_path = get_keychain_path()
|
||||
if not client_cert_path or not os.path.exists(client_cert_path):
|
||||
return False
|
||||
if not os.path.exists(keychain_path):
|
||||
return False
|
||||
keychain_mod_time = os.stat(keychain_path).st_mtime
|
||||
if os.stat(client_cert_path).st_mtime > keychain_mod_time:
|
||||
return True
|
||||
if os.stat(client_key_path).st_mtime > keychain_mod_time:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def debug_output():
|
||||
'''Debugging output for keychain'''
|
||||
try:
|
||||
munkicommon.display_debug1('***Keychain list***')
|
||||
munkicommon.display_debug1(security('list-keychains', '-d', 'user'))
|
||||
munkicommon.display_debug1('***Default keychain info***')
|
||||
munkicommon.display_debug1(security('default-keychain', '-d', 'user'))
|
||||
keychainfile = get_keychain_path()
|
||||
if os.path.exists(keychainfile):
|
||||
munkicommon.display_debug1('***Info for %s***' % keychainfile)
|
||||
munkicommon.display_debug1(
|
||||
security('show-keychain-info', keychainfile))
|
||||
except SecurityError, err:
|
||||
munkicommon.display_error(str(err))
|
||||
|
||||
|
||||
class SecurityError(Exception):
|
||||
'''An exception class to raise if there is an error running
|
||||
/usr/bin/security'''
|
||||
pass
|
||||
|
||||
|
||||
def security(verb_name, *args):
|
||||
'''Runs the security binary with args. Returns stdout.
|
||||
Raises SecurityError for a non-zero return code'''
|
||||
cmd = ['/usr/bin/security', verb_name] + list(args)
|
||||
proc = subprocess.Popen(
|
||||
cmd, shell=False, bufsize=-1,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(output, err) = proc.communicate()
|
||||
if proc.returncode:
|
||||
raise SecurityError('%s: %s' % (proc.returncode, err))
|
||||
return output or err
|
||||
|
||||
|
||||
def get_keychain_path():
|
||||
'''Returns an absolute path for our keychain'''
|
||||
keychain_name = (
|
||||
munkicommon.pref('KeychainName') or DEFAULT_KEYCHAIN_NAME)
|
||||
# If we have an odd path that appears to be all directory and no
|
||||
# file name, revert to default filename
|
||||
if not os.path.basename(keychain_name):
|
||||
keychain_name = DEFAULT_KEYCHAIN_NAME
|
||||
# Check to make sure it's just a simple file name, no directory
|
||||
# information
|
||||
if os.path.dirname(keychain_name):
|
||||
# keychain name should be just the filename,
|
||||
# so we'll drop down to the base name
|
||||
keychain_name = os.path.basename(
|
||||
keychain_name).strip() or DEFAULT_KEYCHAIN_NAME
|
||||
# Correct the filename to include '.keychain' if not already present
|
||||
if not keychain_name.lower().endswith('.keychain'):
|
||||
keychain_name += '.keychain'
|
||||
keychain_path = os.path.realpath(
|
||||
os.path.join(KEYCHAIN_DIRECTORY, keychain_name))
|
||||
return keychain_path
|
||||
|
||||
|
||||
class MunkiKeychain(object):
|
||||
'''Wrapper class for handling the client keychain'''
|
||||
|
||||
keychain_path = None
|
||||
added_keychain = False
|
||||
|
||||
def __init__(self):
|
||||
'''Adds CA certs as trusted to System keychain.
|
||||
Unlocks the munki.keychain if it exists.
|
||||
Makes sure the munki.keychain is in the search list.
|
||||
Creates a new client keychain if needed.'''
|
||||
add_ca_certs_to_system_keychain()
|
||||
self.keychain_path = get_keychain_path()
|
||||
if client_certs_newer_than_keychain():
|
||||
# updated client certs; we should build a new keychain
|
||||
os.unlink(self.keychain_path)
|
||||
if os.path.exists(self.keychain_path):
|
||||
# ensure existing keychain is available for use
|
||||
self.added_keychain = add_to_keychain_list(self.keychain_path)
|
||||
unlock_and_set_nonlocking(self.keychain_path)
|
||||
if not os.path.exists(self.keychain_path):
|
||||
# try making a new keychain
|
||||
make_client_keychain()
|
||||
if os.path.exists(self.keychain_path):
|
||||
self.added_keychain = add_to_keychain_list(self.keychain_path)
|
||||
unlock_and_set_nonlocking(self.keychain_path)
|
||||
if not os.path.exists(self.keychain_path):
|
||||
# give up
|
||||
self.keychain_path = None
|
||||
self.added_keychain = False
|
||||
|
||||
def __del__(self):
|
||||
'''Remove our keychain from the keychain list if we added it'''
|
||||
if self.added_keychain:
|
||||
remove_from_keychain_list(self.keychain_path)
|
||||
@@ -42,7 +42,7 @@ class Job(object):
|
||||
'''launchd job object'''
|
||||
|
||||
def __init__(self, cmd, environment_vars=None):
|
||||
tmpdir = munkicommon.tmpdir
|
||||
tmpdir = munkicommon.tmpdir()
|
||||
LABELPREFIX = 'com.googlecode.munki.'
|
||||
# create a unique id for this job
|
||||
jobid = str(uuid.uuid1())
|
||||
|
||||
@@ -831,8 +831,8 @@ def DMGisWritable(dmgpath):
|
||||
if pliststr:
|
||||
try:
|
||||
plist = FoundationPlist.readPlistFromString(pliststr)
|
||||
format = plist.get('Format')
|
||||
if format in ['UDSB', 'UDSP', 'UDRW', 'RdWr']:
|
||||
dmg_format = plist.get('Format')
|
||||
if dmg_format in ['UDSB', 'UDSP', 'UDRW', 'RdWr']:
|
||||
return True
|
||||
except FoundationPlist.NSPropertyListSerializationException:
|
||||
pass
|
||||
@@ -1514,7 +1514,7 @@ def getFlatPackageInfo(pkgpath):
|
||||
# get the absolute path to the pkg because we need to do a chdir later
|
||||
abspkgpath = os.path.abspath(pkgpath)
|
||||
# make a tmp dir to expand the flat package into
|
||||
pkgtmp = tempfile.mkdtemp(dir=tmpdir)
|
||||
pkgtmp = tempfile.mkdtemp(dir=tmpdir())
|
||||
# record our current working dir
|
||||
cwd = os.getcwd()
|
||||
# change into our tmpdir so we can use xar to unarchive the flat package
|
||||
@@ -2483,16 +2483,24 @@ def getAvailableDiskSpace(volumepath='/'):
|
||||
return int(st.f_frsize * st.f_bavail / 1024) # f_bavail matches df(1) output
|
||||
|
||||
|
||||
def tmpdir():
|
||||
'''Returns a temporary directory for this session'''
|
||||
global _TMPDIR
|
||||
if not _TMPDIR:
|
||||
_TMPDIR = tempfile.mkdtemp(prefix='munki-', dir='/tmp')
|
||||
return _TMPDIR
|
||||
|
||||
|
||||
def cleanUpTmpDir():
|
||||
"""Cleans up our temporary directory."""
|
||||
global tmpdir
|
||||
if tmpdir:
|
||||
global _TMPDIR
|
||||
if _TMPDIR:
|
||||
try:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(_TMPDIR)
|
||||
except (OSError, IOError):
|
||||
display_warning(
|
||||
'Unable to clean up temporary dir %s: %s', tmpdir, str(e))
|
||||
tmpdir = None
|
||||
'Unable to clean up temporary dir %s: %s', _TMPDIR, str(e))
|
||||
_TMPDIR = None
|
||||
|
||||
|
||||
def listdir(path):
|
||||
@@ -2602,7 +2610,7 @@ def runEmbeddedScript(scriptname, pkginfo_item, suppress_error=False):
|
||||
return -1
|
||||
|
||||
# write the script to a temp file
|
||||
scriptpath = os.path.join(tmpdir, scriptname)
|
||||
scriptpath = os.path.join(tmpdir(), scriptname)
|
||||
if writefile(script_text, scriptpath):
|
||||
cmd = ['/bin/chmod', '-R', 'o+x', scriptpath]
|
||||
retcode = subprocess.call(cmd)
|
||||
@@ -2728,7 +2736,7 @@ def blockingApplicationsRunning(pkginfoitem):
|
||||
#debug = False
|
||||
verbose = 1
|
||||
munkistatusoutput = False
|
||||
tmpdir = tempfile.mkdtemp(prefix='munki-', dir='/tmp')
|
||||
_TMPDIR = None
|
||||
report = {}
|
||||
|
||||
def main():
|
||||
|
||||
@@ -33,6 +33,7 @@ from OpenSSL.crypto import load_certificate, FILETYPE_PEM
|
||||
|
||||
# our libs
|
||||
import fetch
|
||||
import keychain
|
||||
import munkicommon
|
||||
import munkistatus
|
||||
import appleupdates
|
||||
@@ -1643,7 +1644,7 @@ def updateAvailableLicenseSeats(installinfo):
|
||||
munkicommon.display_debug1('Got: %s', license_data)
|
||||
license_dict = FoundationPlist.readPlistFromString(
|
||||
license_data)
|
||||
except (fetch.MunkiDownloadError, fetch.CurlDownloadError), err:
|
||||
except (fetch.MunkiDownloadError, fetch.GurlDownloadError), err:
|
||||
# problem fetching from URL
|
||||
munkicommon.display_error('Error from %s: %s', url, err)
|
||||
except FoundationPlist.FoundationPlistException:
|
||||
@@ -1918,7 +1919,7 @@ def processInstall(manifestitem, cataloglist, installinfo):
|
||||
iteminfo['note'] = 'Integrity check failed'
|
||||
installinfo['managed_installs'].append(iteminfo)
|
||||
return False
|
||||
except fetch.CurlDownloadError, errmsg:
|
||||
except fetch.GurlDownloadError, errmsg:
|
||||
munkicommon.display_warning(
|
||||
'Download of %s failed: %s', manifestitem, errmsg)
|
||||
iteminfo['installed'] = False
|
||||
@@ -2286,10 +2287,7 @@ def processRemoval(manifestitem, cataloglist, installinfo):
|
||||
iteminfo = {}
|
||||
iteminfo['name'] = uninstall_item.get('name', '')
|
||||
iteminfo['display_name'] = uninstall_item.get('display_name', '')
|
||||
iteminfo['description'] = uninstall_item.get(
|
||||
'uninstall_description',
|
||||
'Will be removed.'
|
||||
)
|
||||
iteminfo['description'] = 'Will be removed.'
|
||||
|
||||
# we will ignore the unattended_uninstall key if the item needs a restart
|
||||
# or logout...
|
||||
@@ -2854,6 +2852,8 @@ def check(client_id='', localmanifestpath=None):
|
||||
munkicommon.getConditions()
|
||||
CONDITIONS = munkicommon.getConditions()
|
||||
|
||||
keychain_obj = keychain.MunkiKeychain()
|
||||
|
||||
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
|
||||
if munkicommon.munkistatusoutput:
|
||||
munkistatus.activate()
|
||||
@@ -3322,7 +3322,7 @@ def getDataFromURL(url):
|
||||
'''Returns data from url as string. We use the existing
|
||||
getResourceIfChangedAtomically function so any custom
|
||||
authentication/authorization headers are reused'''
|
||||
urldata = os.path.join(munkicommon.tmpdir, 'urldata')
|
||||
urldata = os.path.join(munkicommon.tmpdir(), 'urldata')
|
||||
if os.path.exists(urldata):
|
||||
try:
|
||||
os.unlink(urldata)
|
||||
@@ -3351,39 +3351,39 @@ def getResourceIfChangedAtomically(url,
|
||||
exists, and adds any additional headers'''
|
||||
|
||||
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
|
||||
# get server CA cert if it exists so we can verify the munki server
|
||||
ca_cert_path = None
|
||||
ca_dir_path = None
|
||||
if munkicommon.pref('SoftwareRepoCAPath'):
|
||||
CA_path = munkicommon.pref('SoftwareRepoCAPath')
|
||||
if os.path.isfile(CA_path):
|
||||
ca_cert_path = CA_path
|
||||
elif os.path.isdir(CA_path):
|
||||
ca_dir_path = CA_path
|
||||
if munkicommon.pref('SoftwareRepoCACertificate'):
|
||||
ca_cert_path = munkicommon.pref('SoftwareRepoCACertificate')
|
||||
if ca_cert_path == None:
|
||||
ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
|
||||
if not os.path.exists(ca_cert_path):
|
||||
ca_cert_path = None
|
||||
## get server CA cert if it exists so we can verify the munki server
|
||||
#ca_cert_path = None
|
||||
#ca_dir_path = None
|
||||
#if munkicommon.pref('SoftwareRepoCAPath'):
|
||||
# CA_path = munkicommon.pref('SoftwareRepoCAPath')
|
||||
# if os.path.isfile(CA_path):
|
||||
# ca_cert_path = CA_path
|
||||
# elif os.path.isdir(CA_path):
|
||||
# ca_dir_path = CA_path
|
||||
#if munkicommon.pref('SoftwareRepoCACertificate'):
|
||||
# ca_cert_path = munkicommon.pref('SoftwareRepoCACertificate')
|
||||
#if ca_cert_path == None:
|
||||
# ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
|
||||
# if not os.path.exists(ca_cert_path):
|
||||
# ca_cert_path = None
|
||||
|
||||
client_cert_path = None
|
||||
client_key_path = None
|
||||
# get client cert if it exists
|
||||
if munkicommon.pref('UseClientCertificate'):
|
||||
client_cert_path = munkicommon.pref('ClientCertificatePath') or None
|
||||
client_key_path = munkicommon.pref('ClientKeyPath') or None
|
||||
if not client_cert_path:
|
||||
for name in ['cert.pem', 'client.pem', 'munki.pem']:
|
||||
client_cert_path = os.path.join(ManagedInstallDir, 'certs',
|
||||
name)
|
||||
if os.path.exists(client_cert_path):
|
||||
break
|
||||
cert_info = {}
|
||||
cert_info['cacert'] = ca_cert_path
|
||||
cert_info['capath'] = ca_dir_path
|
||||
cert_info['cert'] = client_cert_path
|
||||
cert_info['key'] = client_key_path
|
||||
#client_cert_path = None
|
||||
#client_key_path = None
|
||||
## get client cert if it exists
|
||||
#if munkicommon.pref('UseClientCertificate'):
|
||||
# client_cert_path = munkicommon.pref('ClientCertificatePath') or None
|
||||
# client_key_path = munkicommon.pref('ClientKeyPath') or None
|
||||
# if not client_cert_path:
|
||||
# for name in ['cert.pem', 'client.pem', 'munki.pem']:
|
||||
# client_cert_path = os.path.join(ManagedInstallDir, 'certs',
|
||||
# name)
|
||||
# if os.path.exists(client_cert_path):
|
||||
# break
|
||||
#cert_info = {}
|
||||
#cert_info['cacert'] = ca_cert_path
|
||||
#cert_info['capath'] = ca_dir_path
|
||||
#cert_info['cert'] = client_cert_path
|
||||
#cert_info['key'] = client_key_path
|
||||
|
||||
# Add any additional headers specified in ManagedInstalls.plist.
|
||||
# AdditionalHttpHeaders must be an array of strings with valid HTTP
|
||||
@@ -3398,7 +3398,6 @@ def getResourceIfChangedAtomically(url,
|
||||
|
||||
return fetch.getResourceIfChangedAtomically(url,
|
||||
destinationpath,
|
||||
cert_info=cert_info,
|
||||
custom_headers=custom_headers,
|
||||
expected_hash=expected_hash,
|
||||
message=message,
|
||||
|
||||
Reference in New Issue
Block a user