New attempt to use NSURLConnection + keychain to replace curl

This commit is contained in:
Greg Neagle
2014-10-15 16:39:38 -07:00
parent 55ee712d04
commit e232c1b955
6 changed files with 1045 additions and 376 deletions
+300
View File
@@ -0,0 +1,300 @@
#!/usr/bin/python
# encoding: utf-8
#
# Copyright 2014 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
keychain
Created by Greg Neagle on 2014-10-15.
Incorporating work and ideas from Michael Lynn here:
https://gist.github.com/pudquick/7704254
"""
import optparse
import os
import re
import stat
import subprocess
import sys
from munkilib import keychain
from munkilib import munkicommon
DEFAULT_KEYCHAIN_PASSWORD = 'munki'
def read_file(pathname):
'''Return the contents of pathname as a string'''
try:
fileobj = open(pathname, mode='r')
data = fileobj.read()
fileobj.close()
return data
except (OSError, IOError), err:
munkicommon.display_error(
'Could not read %s: %s' % (pathname, err))
return ''
def write_file(stringdata, pathname):
'''Writes stringdata to pathname.
Returns the pathname on success, empty string on failure.'''
try:
fileobject = open(pathname, mode='w')
fileobject.write(stringdata)
fileobject.close()
return pathname
except (OSError, IOError), err:
munkicommon.display_error("Couldn't write %s to %s: %s"
% (stringdata, pathname, err))
return ""
def make_keychain(ca_cert_path=None, ca_dir_path=None,
client_cert_path=None, client_key_path=None,
site_url=None):
'''Builds a keychain for use by managedsoftwareupdate'''
our_keychain = None
if (not ca_cert_path and not ca_dir_path and
not client_cert_path and not client_key_path):
# no existing CA/cert info, so nothing to do
munkicommon.display_info(
'No cert info provided, so no keychain will be created.')
return
else:
munkicommon.display_info('CA cert path: %s', ca_cert_path)
munkicommon.display_info('CA dir path: %s', ca_dir_path)
munkicommon.display_info('Client cert path: %s', client_cert_path)
munkicommon.display_info('Client key path: %s', client_key_path)
keychain_pass = (
munkicommon.pref('KeychainPassword') or DEFAULT_KEYCHAIN_PASSWORD)
abs_keychain_path = keychain.get_keychain_path()
if os.path.exists(abs_keychain_path):
os.unlink(abs_keychain_path)
if not os.path.exists(os.path.dirname(abs_keychain_path)):
os.makedirs(os.path.dirname(abs_keychain_path))
# create a new keychain
munkicommon.display_info('Creating keychain...')
output = keychain.security(
'create-keychain', '-p', keychain_pass, abs_keychain_path)
# Ensure the keychain is in the search path and unlocked
our_keychain = keychain.MunkiKeychain()
# Add CA certs
if ca_cert_path:
munkicommon.display_info('Adding CA cert...')
output = keychain.security(
'add-trusted-cert', '-d', '-k', abs_keychain_path, ca_cert_path)
if ca_dir_path:
# import any pem files in the ca_dir_path directory
for item in os.listdir(ca_dir_path):
if item.endswith('.pem'):
cert_path = os.path.join(ca_dir_path, item)
munkicommon.display_info('Adding CA cert %s...' % cert_path)
output = keychain.security('add-trusted-cert', '-d',
'-k', abs_keychain_path, cert_path)
# Add client cert (and optionally key)
if client_cert_path:
if client_key_path:
# combine client cert and private key before we import
cert_data = read_file(client_cert_path)
key_data = read_file(client_key_path)
# write the combined data
combined_pem = os.path.join('/tmp', 'combined.pem')
if write_file(cert_data + key_data, combined_pem):
munkicommon.display_info('Importing client cert and key...')
output = keychain.security(
'import', combined_pem, '-A', '-k', abs_keychain_path)
os.unlink(combined_pem)
else:
munkicommon.display_info('Importing client cert and key...')
output = keychain.security(
'import', client_cert_path, '-A', '-k', abs_keychain_path)
# set up identity preference linking the identity (cert and key)
# to the site_url
if not site_url:
site_url = munkicommon.pref('SoftwareRepoURL').rstrip('/')
# Set up an identity if it doesn't exist already for our site
# First we need to find the existing identity in our keychain
output = keychain.security('find-identity', abs_keychain_path)
if not ' 1 identities found' in output:
munkicommon.display_error('No identities found!')
else:
# We have a solitary match and can configure / verify
# the identity preference
id_hash = re.findall(r'\W+1\)\W+([0-9A-F]+)\W', output)[0]
# First, check to see if we have an identity already
create_identity = False
try:
output = keychain.security(
'get-identity-preference', '-s', site_url, '-Z')
# No error, we found an identity
# Check if it matches the one we want
current_hash = re.match(
r'SHA-1 hash:\W+([A-F0-9]+)\W', output).group(1)
if id_hash != current_hash:
# We only care if there's a different hash being used.
# Remove the incorrect one.
output = keychain.security(
'set-identity-preference', '-n', '-s', site_url)
# Signal that we want to create a new identity preference
create_identity = True
except keychain.SecurityError, err:
# error finding identity-preference
create_identity = True
#elif id_hash not in output:
# # Non-zero error code and hash not detected in output
# # Signal that we want to create a new identity preference
# create_identity = True
if create_identity:
# This code was moved into a common block that both routes could
# access as it's a little complicated.
# security will only create an identity preference in the
# default keychain - which means a default has to be
# defined/selected. For normal users, this is login.keychain -
# but for root there's no login.keychain and no default keychain
# configured. So we'll handle the case of no default keychain
# (just set one) as well as pre-existing default keychain
# (in which case we set it long enough to create the preference,
# then set it back)
munkicommon.display_info('Creating identity preference...')
try:
output = keychain.security('default-keychain')
# One is defined, remember the path
default_keychain = [
x.strip().strip('"')
for x in output.split('\n') if x.strip()][0]
except keychain.SecurityError, err:
# error raised if there is no default
default_keychain = None
# Temporarily assign the default keychain to ours
try:
output = keychain.security(
'default-keychain', '-s', abs_keychain_path)
except keychain.SecurityError, err:
munkicommon.display_error(
'Could not set default keychain to %s failed: %s'
% (abs_keychain_path, err))
default_keychain = None
# Create the identity preference
try:
output = keychain.security(
'set-identity-preference', '-s', site_url, '-Z',
id_hash, abs_keychain_path)
except keychain.SecurityError, err:
munkicommon.display_error(
'Setting identity preference failed: %s' % err)
if default_keychain:
# We originally had a different one, set it back
output = keychain.security(
'default-keychain', '-s', default_keychain)
if our_keychain:
# remove it from the keychain list
our_keychain.remove_from_search_list()
munkicommon.display_info(
'Completed creation of keychain at %s' % abs_keychain_path)
def get_munki_cert_data():
# find existing cert/CA info
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
cert_data = {}
# get server CA cert if it exists so we can verify the munki server
cert_data['ca_cert_path'] = None
cert_data['ca_dir_path'] = None
if munkicommon.pref('SoftwareRepoCAPath'):
CA_path = munkicommon.pref('SoftwareRepoCAPath')
if os.path.isfile(CA_path):
cert_data['ca_cert_path'] = CA_path
elif os.path.isdir(CA_path):
cert_data['ca_dir_path'] = CA_path
if munkicommon.pref('SoftwareRepoCACertificate'):
cert_data['ca_cert_path'] = munkicommon.pref('SoftwareRepoCACertificate')
if cert_data['ca_cert_path'] == None:
ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
if os.path.exists(ca_cert_path):
cert_data['ca_cert_path'] = ca_cert_path
cert_data['client_cert_path'] = None
cert_data['client_key_path'] = None
# get client cert if it exists
if munkicommon.pref('UseClientCertificate'):
cert_data['client_cert_path'] = (
munkicommon.pref('ClientCertificatePath') or None)
cert_data['client_key_path'] = munkicommon.pref('ClientKeyPath') or None
if not cert_data['client_cert_path']:
for name in ['cert.pem', 'client.pem', 'munki.pem']:
client_cert_path = os.path.join(
ManagedInstallDir, 'certs', name)
if os.path.exists(client_cert_path):
cert_data['client_cert_path'] = client_cert_path
break
return cert_data
if __name__ == '__main__':
'''Main routine'''
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage=usage)
p.add_option('--ca_cert_path', help='Path to a CA cert.')
p.add_option('--ca_dir_path', help='Path to a directory of CA certs.')
p.add_option('--client_cert_path', help='Path to a client cert.')
p.add_option('--client_key_path', help='Path to a client key.')
p.add_option('--site_url',
help='Site URL. (Generally the root URL of the munki repo.)')
options, arguments = p.parse_args()
# check to see if we're root
if os.geteuid() != 0:
print >> sys.stderr, 'You must run this as root!'
exit(munkicommon.EXIT_STATUS_ROOT_REQUIRED)
if os.environ['HOME'] != '/var/root':
print >> sys.stderr, (
'SORRY! '
'You must actually login as root, or use `sudo su` to inherit '
'root\'s environment before running this tool. '
'`sudo` or `sudo -s` is not sufficient.')
exit(munkicommon.EXIT_STATUS_ROOT_REQUIRED)
cert_data = get_munki_cert_data()
# command-line options override what we find from Munki
if options.ca_cert_path:
cert_data['ca_cert_path'] = options.ca_cert_path
if options.ca_dir_path:
cert_data['ca_dir_path'] = options.ca_dir_path
if options.client_cert_path:
cert_data['client_cert_path'] = options.client_cert_path
if options.client_key_path:
cert_data['client_key_path'] = options.client_key_path
site_url = None
if options.site_url:
site_url = options.site_url
make_keychain(ca_cert_path=cert_data['ca_cert_path'],
ca_dir_path=cert_data['ca_dir_path'],
client_cert_path=cert_data['client_cert_path'],
client_key_path=cert_data['client_key_path'],
site_url=site_url)
+3 -4
View File
@@ -48,6 +48,7 @@ from LaunchServices import LSFindApplicationForInfo
import FoundationPlist
import fetch
import keychain
import launchd
import munkicommon
import munkistatus
@@ -306,6 +307,7 @@ class AppleUpdates(object):
"User-Agent: managedsoftwareupdate/%s Darwin/%s (%s) (%s)"
% (machine['munki_version'], darwin_version,
machine['arch'], machine['machine_model']))
keychain_obj = keychain.MunkiKeychain()
return fetch.getResourceIfChangedAtomically(
url,
destinationpath,
@@ -768,13 +770,10 @@ class AppleUpdates(object):
self.CacheAppleCatalog()
except CatalogNotFoundError:
return False
except ReplicationError, err:
except (ReplicationError, fetch.MunkiDownloadError), err:
munkicommon.display_warning('Could not download Apple SUS catalog:')
munkicommon.display_warning('\t%s', str(err))
return False
except fetch.MunkiDownloadError:
munkicommon.display_warning('Could not download Apple SUS catalog.')
return False
if not force_check and not self._IsForceCheckNeccessary(before_hash):
munkicommon.display_info('Skipping Apple Software Update check '
+133 -337
View File
@@ -34,9 +34,13 @@ import urlparse
import xattr
#our libs
import keychain
import munkicommon
from gurl import Gurl
#import munkistatus
from Foundation import NSHTTPURLResponse
# XATTR name storing the ETAG of the file when downloaded via http(s).
XATTR_ETAG = 'com.googlecode.munki.etag'
@@ -44,7 +48,7 @@ XATTR_ETAG = 'com.googlecode.munki.etag'
XATTR_SHA = 'com.googlecode.munki.sha256'
class CurlError(Exception):
class GurlError(Exception):
pass
class HTTPError(Exception):
@@ -54,8 +58,8 @@ class MunkiDownloadError(Exception):
"""Base exception for download errors"""
pass
class CurlDownloadError(MunkiDownloadError):
"""Curl failed to download the item"""
class GurlDownloadError(MunkiDownloadError):
"""Gurl failed to download the item"""
pass
class FileCopyError(MunkiDownloadError):
@@ -87,349 +91,144 @@ def writeCachedChecksum(file_path, fhash=None):
return None
WARNINGSLOGGED = {}
def curl(url, destinationpath,
cert_info=None, custom_headers=None, donotrecurse=False, etag=None,
message=None, onlyifnewer=False, resume=False, follow_redirects=False):
def header_dict_from_list(array):
"""Given a list of strings in http header format, return a dict.
If array is None, return None"""
if array is None:
return array
header_dict = {}
for item in array:
(key, sep, value) = item.partition(':')
if sep and value:
header_dict[key.strip()] = value.strip()
return header_dict
def get_url(url, destinationpath,
custom_headers=None, message=None, onlyifnewer=False,
resume=False, follow_redirects=False):
"""Gets an HTTP or HTTPS URL and stores it in
destination path. Returns a dictionary of headers, which includes
http_result_code and http_result_description.
Will raise CurlError if curl returns an error.
Will raise CurlError if Gurl returns an error.
Will raise HTTPError if HTTP Result code is not 2xx or 304.
If destinationpath already exists, you can set 'onlyifnewer' to true to
indicate you only want to download the file only if it's newer on the
server.
If you have an ETag from the current destination path, you can pass that
to download the file only if it is different.
Finally, if you set resume to True, curl will attempt to resume an
interrupted download. You'll get an error if the existing file is
complete; if the file has changed since the first download attempt, you'll
get a mess."""
If you set resume to True, Gurl will attempt to resume an
interrupted download."""
header = {}
header['http_result_code'] = '000'
header['http_result_description'] = ''
curldirectivepath = os.path.join(munkicommon.tmpdir, 'curl_temp')
tempdownloadpath = destinationpath + '.download'
if os.path.exists(tempdownloadpath) and not resume:
if resume and not os.path.exists(destinationpath):
os.remove(tempdownloadpath)
# we're writing all the curl options to a file and passing that to
# curl so we avoid the problem of URLs showing up in a process listing
cache_data = None
if onlyifnewer and os.path.exists(destinationpath):
# create a temporary Gurl object so we can extract the
# stored caching data so we can download only if the
# file has changed on the server
gurl_obj = Gurl.alloc().initWithOptions_({'file': destinationpath})
cache_data = gurl_obj.get_stored_headers()
del gurl_obj
options = {'url': url,
'file': tempdownloadpath,
'follow_redirects': follow_redirects,
'can_resume': resume,
'additional_headers': header_dict_from_list(custom_headers),
'download_only_if_changed': onlyifnewer,
'cache_data': cache_data,
'logging_function': munkicommon.display_debug2}
munkicommon.display_debug2('Options: %s' % options)
connection = Gurl.alloc().initWithOptions_(options)
stored_percent_complete = -1
stored_bytes_received = 0
connection.start()
try:
fileobj = open(curldirectivepath, mode='w')
print >> fileobj, 'silent' # no progress meter
print >> fileobj, 'show-error' # print error msg to stderr
print >> fileobj, 'no-buffer' # don't buffer output
print >> fileobj, 'fail' # throw error if download fails
print >> fileobj, 'dump-header -' # dump headers to stdout
print >> fileobj, 'speed-time = 30' # give up if too slow d/l
print >> fileobj, 'output = "%s"' % tempdownloadpath
print >> fileobj, 'ciphers = HIGH,!ADH' #use only secure >=128 bit SSL
print >> fileobj, 'url = "%s"' % url
munkicommon.display_debug2('follow_redirects is %s', follow_redirects)
if follow_redirects:
print >> fileobj, 'location' # follow redirects
if cert_info:
cacert = cert_info.get('cacert')
capath = cert_info.get('capath')
cert = cert_info.get('cert')
key = cert_info.get('key')
if cacert:
if not os.path.isfile(cacert):
raise CurlError(-1, 'No CA cert at %s' % cacert)
print >> fileobj, 'cacert = "%s"' % cacert
if capath:
if not os.path.isdir(capath):
raise CurlError(-2, 'No CA directory at %s' % capath)
print >> fileobj, 'capath = "%s"' % capath
if cert:
if not os.path.isfile(cert):
raise CurlError(-3, 'No client cert at %s' % cert)
print >> fileobj, 'cert = "%s"' % cert
if key:
if not os.path.isfile(key):
raise CurlError(-4, 'No client key at %s' % key)
print >> fileobj, 'key = "%s"' % key
if os.path.exists(destinationpath):
if etag:
escaped_etag = etag.replace('"','\\"')
print >> fileobj, ('header = "If-None-Match: %s"'
% escaped_etag)
elif onlyifnewer:
print >> fileobj, 'time-cond = "%s"' % destinationpath
else:
os.remove(destinationpath)
if os.path.exists(tempdownloadpath):
if resume and not os.path.exists(destinationpath):
# let's try to resume this download
print >> fileobj, 'continue-at -'
# if an existing etag, only resume if etags still match.
tempetag = getxattr(tempdownloadpath, XATTR_ETAG)
if tempetag:
# Note: If-Range is more efficient, but the response
# confuses curl (Error: 33 if etag not match).
escaped_etag = tempetag.replace('"','\\"')
print >> fileobj, ('header = "If-Match: %s"'
% escaped_etag)
else:
os.remove(tempdownloadpath)
# Add any additional headers specified in custom_headers
# custom_headers must be an array of strings with valid HTTP
# header format.
if custom_headers:
for custom_header in custom_headers:
custom_header = custom_header.strip().encode('utf-8')
if re.search(r'^[\w-]+:.+', custom_header):
print >> fileobj, ('header = "%s"' % custom_header)
else:
munkicommon.display_warning(
'Skipping invalid HTTP header: %s' % custom_header)
fileobj.close()
except Exception, e:
raise CurlError(-5, 'Error writing curl directive: %s' % str(e))
# In Mavericks we need to wrap our call to curl with a utility
# that makes curl think it is connected to a tty-like
# device so its output is unbuffered so we can get progress info
cmd = []
minor_os_version = munkicommon.getOsVersion(as_tuple=True)[1]
if minor_os_version > 8:
# Try to find our ptyexec tool
# first look in the parent directory of this file's directory
# (../)
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
ptyexec_path = os.path.join(parent_dir, 'ptyexec')
if not os.path.exists(ptyexec_path):
# try absolute path in munki's normal install dir
ptyexec_path = '/usr/local/munki/ptyexec'
if os.path.exists(ptyexec_path):
cmd = [ptyexec_path]
# Workaround for current issue in OS X 10.9's included curl
# Allows for alternate curl binary path as Apple's included curl currently
# broken for client-side certificate usage
curl_path = munkicommon.pref('CurlPath') or '/usr/bin/curl'
cmd.extend([curl_path,
'-q', # don't read .curlrc file
'--config', # use config file
curldirectivepath])
proc = subprocess.Popen(cmd, shell=False, bufsize=1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
targetsize = 0
downloadedpercent = -1
donewithheaders = False
maxheaders = 15
while True:
if not donewithheaders:
info = proc.stdout.readline().strip('\r\n')
if info:
munkicommon.display_debug2(info)
if info.startswith('HTTP/'):
header['http_result_code'] = info.split(None, 2)[1]
header['http_result_description'] = info.split(None, 2)[2]
elif ': ' in info:
part = info.split(None, 1)
fieldname = part[0].rstrip(':').lower()
header[fieldname] = part[1]
else:
# we got an empty line; end of headers (or curl exited)
if follow_redirects:
if header.get('http_result_code') in ['301', '302', '303']:
# redirect, so more headers are coming.
# Throw away the headers we've received so far
header = {}
header['http_result_code'] = '000'
header['http_result_description'] = ''
else:
donewithheaders = True
try:
# Prefer Content-Length header to determine download
# size, otherwise fall back to a custom X-Download-Size
# header.
# This is primary for servers that use chunked transfer
# encoding, when Content-Length is forbidden by
# RFC2616 4.4. An example of such a server is
# Google App Engine Blobstore.
targetsize = (
header.get('content-length') or
header.get('x-download-size'))
targetsize = int(targetsize)
except (ValueError, TypeError):
targetsize = 0
if header.get('http_result_code') == '206':
# partial content because we're resuming
munkicommon.display_detail(
'Resuming partial download for %s' %
os.path.basename(destinationpath))
contentrange = header.get('content-range')
if contentrange.startswith('bytes'):
try:
targetsize = int(contentrange.split('/')[1])
except (ValueError, TypeError):
targetsize = 0
if message and header.get('http_result_code') != '304':
if message:
# log always, display if verbose is 1 or more
# also display in MunkiStatus detail field
munkicommon.display_status_minor(message)
elif targetsize and header.get('http_result_code').startswith('2'):
# display progress if we get a 2xx result code
if os.path.exists(tempdownloadpath):
downloadedsize = os.path.getsize(tempdownloadpath)
percent = int(float(downloadedsize)
/float(targetsize)*100)
if percent != downloadedpercent:
# percent changed; update display
downloadedpercent = percent
munkicommon.display_percent_done(downloadedpercent, 100)
time.sleep(0.1)
else:
# Headers have finished, but not targetsize or HTTP2xx.
# It's possible that Content-Length was not in the headers.
# so just sleep and loop again. We can't show progress.
time.sleep(0.1)
if (proc.poll() != None):
# For small download files curl may exit before all headers
# have been parsed, don't immediately exit.
maxheaders -= 1
if donewithheaders or maxheaders <= 0:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages and displaying percentages if we exit the loop first
connection_done = connection.isDone()
if message and connection.status and connection.status != 304:
# log always, display if verbose is 1 or more
# also display in MunkiStatus detail field
munkicommon.display_status_minor(message)
# now clear message so we don't display it again
message = None
if (str(connection.status).startswith('2')
and connection.percentComplete != -1):
if connection.percentComplete != stored_percent_complete:
# display percent done if it has changed
stored_percent_complete = connection.percentComplete
munkicommon.display_percent_done(
stored_percent_complete, 100)
elif connection.bytesReceived != stored_bytes_received:
# if we don't have percent done info, log bytes received
stored_bytes_received = connection.bytesReceived
munkicommon.display_detail(
'Bytes received: %s', stored_bytes_received)
if connection_done:
break
retcode = proc.poll()
if retcode:
curlerr = ''
try:
curlerr = proc.stderr.read().rstrip('\n')
curlerr = curlerr.split(None, 2)[2]
except IndexError:
pass
if retcode == 22:
# 22 means any 400 series return code. Note: header seems not to
# be dumped to STDOUT for immediate failures. Hence
# http_result_code is likely blank/000. Read it from stderr.
if re.search(r'URL returned error: [0-9]+$', curlerr):
header['http_result_code'] = curlerr[curlerr.rfind(' ')+1:]
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise GurlError(-1, str(err))
if os.path.exists(tempdownloadpath):
if not resume:
os.remove(tempdownloadpath)
elif retcode == 33 or header.get('http_result_code') == '412':
# 33: server doesn't support range requests
# 412: Etag didn't match (precondition failed), could not
# resume partial download as file on server has changed.
if retcode == 33 and not 'HTTPRange' in WARNINGSLOGGED:
# use display_info instead of display_warning so these
# don't get reported but are available in the log
# and in command-line output
munkicommon.display_info('WARNING: Web server refused '
'partial/range request. Munki cannot run '
'efficiently when this support is absent for '
'pkg urls. URL: %s' % url)
WARNINGSLOGGED['HTTPRange'] = 1
os.remove(tempdownloadpath)
# The partial failed immediately as not supported.
# Try a full download again immediately.
if not donotrecurse:
return curl(url, destinationpath,
cert_info=cert_info,
custom_headers=custom_headers,
donotrecurse=True,
etag=etag,
message=message,
onlyifnewer=onlyifnewer,
resume=resume,
follow_redirects=follow_redirects)
elif retcode == 22:
# TODO: Made http(s) connection but 400 series error.
# What should we do?
# 403 could be ok, just that someone is currently offsite and
# the server is refusing the service them while there.
# 404 could be an interception proxy at a public wifi point.
# The partial may still be ok later.
# 416 could be dangerous - the targeted resource may now be
# different / smaller. We need to delete the temp or retrying
# will never work.
if header.get('http_result_code') == 416:
# Bad range request.
os.remove(tempdownloadpath)
elif header.get('http_result_code') == 503:
# Web server temporarily unavailable.
pass
elif not header.get('http_result_code').startswith('4'):
# 500 series, or no error code parsed.
# Perhaps the webserver gets really confused by partial
# requests. It is likely majorly misconfigured so we won't
# try asking it anything challenging.
os.remove(tempdownloadpath)
elif header.get('etag'):
xattr.setxattr(tempdownloadpath, XATTR_ETAG, header['etag'])
# TODO: should we log this diagnostic here (we didn't previously)?
# Currently for a pkg all that is logged on failure is:
# "WARNING: Download of Firefox failed." with no detail. Logging at
# the place where this exception is caught has to be done in many
# places.
munkicommon.display_detail('Download error: %s. Failed (%s) with: %s'
% (url,retcode,curlerr))
munkicommon.display_detail('Headers: %s', header)
raise CurlError(retcode, curlerr)
if connection.error != None:
# Gurl returned an error
munkicommon.display_detail(
'Download error %s: %s', connection.error.code(),
connection.error.localizedDescription())
if connection.SSLerror:
munkicommon.display_detail(
'SSL error detail: %s' % str(connection.SSLerror))
keychain.debug_output()
munkicommon.display_detail('Headers: %s', connection.headers)
if os.path.exists(tempdownloadpath) and not resume:
os.remove(tempdownloadpath)
raise GurlError(connection.error.code(),
connection.error.localizedDescription())
if connection.response != None:
munkicommon.display_debug1('Status: %s', connection.status)
munkicommon.display_debug1('Headers: %s', connection.headers)
if connection.redirection != []:
munkicommon.display_debug1('Redirection: %s', connection.redirection)
temp_download_exists = os.path.isfile(tempdownloadpath)
connection.headers['http_result_code'] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(
connection.status)
connection.headers['http_result_description'] = description
if str(connection.status).startswith('2') and temp_download_exists:
os.rename(tempdownloadpath, destinationpath)
return connection.headers
elif connection.status == 304:
# unchanged on server
munkicommon.display_debug1('Item is unchanged on the server.')
return connection.headers
else:
temp_download_exists = os.path.isfile(tempdownloadpath)
http_result = header.get('http_result_code')
if http_result.startswith('2') and temp_download_exists:
downloadedsize = os.path.getsize(tempdownloadpath)
if downloadedsize >= targetsize:
if targetsize and not downloadedpercent == 100:
# need to display a percent done of 100%
munkicommon.display_percent_done(100, 100)
os.rename(tempdownloadpath, destinationpath)
if (resume and not header.get('etag')
and not 'HTTPetag' in WARNINGSLOGGED):
# use display_info instead of display_warning so these
# don't get reported but are available in the log
# and in command-line output
munkicommon.display_info(
'WARNING: '
'Web server did not return an etag. Munki cannot '
'safely resume downloads without etag support on the '
'web server. URL: %s' % url)
WARNINGSLOGGED['HTTPetag'] = 1
return header
else:
# not enough bytes retreived
if not resume and temp_download_exists:
os.remove(tempdownloadpath)
raise CurlError(-5, 'Expected %s bytes, got: %s' %
(targetsize, downloadedsize))
elif http_result == '304':
return header
else:
# there was a download error of some sort; clean all relevant
# downloads that may be in a bad state.
for f in [tempdownloadpath, destinationpath]:
try:
os.unlink(f)
except OSError:
pass
raise HTTPError(http_result,
header.get('http_result_description',''))
# there was an HTTP error of some sort; remove our temp download.
if os.path.exists(tempdownloadpath):
try:
os.unlink(tempdownloadpath)
except OSError:
pass
raise HTTPError(connection.status,
connection.headers.get('http_result_description',''))
def getResourceIfChangedAtomically(url,
destinationpath,
cert_info=None,
custom_headers=None,
expected_hash=None,
message=None,
@@ -475,7 +274,7 @@ def getResourceIfChangedAtomically(url,
if url_parse.scheme in ['http', 'https']:
changed = getHTTPfileIfChangedAtomically(
url, destinationpath,
cert_info=cert_info, custom_headers=custom_headers,
custom_headers=custom_headers,
message=message, resume=resume, follow_redirects=follow_redirects)
elif url_parse.scheme == 'file':
changed = getFileIfChangedAtomically(url_parse.path, destinationpath)
@@ -554,7 +353,7 @@ def getFileIfChangedAtomically(path, destinationpath):
def getHTTPfileIfChangedAtomically(url, destinationpath,
cert_info=None, custom_headers=None,
custom_headers=None,
message=None, resume=False,
follow_redirects=False):
"""Gets file from HTTP URL, checking first to see if it has changed on the
@@ -563,7 +362,7 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
Returns True if a new download was required; False if the
item is already in the local cache.
Raises CurlDownloadError if there is an error."""
Raises GurlDownloadError if there is an error."""
etag = None
getonlyifnewer = False
@@ -575,23 +374,21 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
getonlyifnewer = False
try:
header = curl(url,
header = get_url(url,
destinationpath,
cert_info=cert_info,
custom_headers=custom_headers,
etag=etag,
message=message,
onlyifnewer=getonlyifnewer,
resume=resume,
follow_redirects=follow_redirects)
except CurlError, err:
except GurlError, err:
err = 'Error %s: %s' % tuple(err)
raise CurlDownloadError(err)
raise GurlDownloadError(err)
except HTTPError, err:
err = 'HTTP result %s: %s' % tuple(err)
raise CurlDownloadError(err)
raise GurlDownloadError(err)
err = None
if header['http_result_code'] == '304':
@@ -691,4 +488,3 @@ def verifySoftwarePackageIntegrity(file_path, item_hash, always_hash=False):
return (False, chash)
+393
View File
@@ -0,0 +1,393 @@
#!/usr/bin/python
# encoding: utf-8
#
# Copyright 2009-2014 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
gurl.py
Created by Greg Neagle on 2013-11-21.
curl replacement using NSURLConnection and friends
"""
import os
import sys
import xattr
from Foundation import NSRunLoop, NSDate
from Foundation import NSObject, NSURL, NSURLConnection
from Foundation import NSMutableURLRequest
from Foundation import NSURLRequestReloadIgnoringLocalCacheData
from Foundation import NSURLResponseUnknownLength
from Foundation import NSLog
from Foundation import NSString, NSUTF8StringEncoding
from Foundation import NSURLCredential, NSURLCredentialPersistenceNone
from Foundation import NSData, \
NSPropertyListSerialization, \
NSPropertyListMutableContainersAndLeaves, \
NSPropertyListXMLFormat_v1_0
ssl_error_codes = {
-9800: u'SSL protocol error',
-9801: u'Cipher Suite negotiation failure',
-9802: u'Fatal alert',
-9803: u'I/O would block (not fatal)',
-9804: u'Attempt to restore an unknown session',
-9805: u'Connection closed gracefully',
-9806: u'Connection closed via error',
-9807: u'Invalid certificate chain',
-9808: u'Bad certificate format',
-9809: u'Underlying cryptographic error',
-9810: u'Internal error',
-9811: u'Module attach failure',
-9812: u'Valid cert chain, untrusted root',
-9813: u'Cert chain not verified by root',
-9814: u'Chain had an expired cert',
-9815: u'Chain had a cert not yet valid',
-9816: u'Server closed session with no notification',
-9817: u'Insufficient buffer provided',
-9818: u'Bad SSLCipherSuite',
-9819: u'Unexpected message received',
-9820: u'Bad MAC',
-9821: u'Decryption failed',
-9822: u'Record overflow',
-9823: u'Decompression failure',
-9824: u'Handshake failure',
-9825: u'Misc. bad certificate',
-9826: u'Bad unsupported cert format',
-9827: u'Certificate revoked',
-9828: u'Certificate expired',
-9829: u'Unknown certificate',
-9830: u'Illegal parameter',
-9831: u'Unknown Cert Authority',
-9832: u'Access denied',
-9833: u'Decoding error',
-9834: u'Decryption error',
-9835: u'Export restriction',
-9836: u'Bad protocol version',
-9837: u'Insufficient security',
-9838: u'Internal error',
-9839: u'User canceled',
-9840: u'No renegotiation allowed',
-9841: u'Peer cert is valid, or was ignored if verification disabled',
-9842: u'Server has requested a client cert',
-9843: u'Peer host name mismatch',
-9844: u'Peer dropped connection before responding',
-9845: u'Decryption failure',
-9846: u'Bad MAC',
-9847: u'Record overflow',
-9848: u'Configuration error',
-9849: u'Unexpected (skipped) record in DTLS'}
class Gurl(NSObject):
'''A class for getting content from a URL
using NSURLConnection and friends'''
GURL_XATTR = 'com.googlecode.munki.downloadData'
def initWithOptions_(self, options):
self = super(Gurl, self).init()
if not self:
return
self.follow_redirects = options.get('follow_redirects', False)
self.destination_path = options.get('file')
self.can_resume = options.get('can_resume', False)
self.url = options.get('url')
self.additional_headers = options.get('additional_headers', {})
self.username = options.get('username')
self.password = options.get('password')
self.download_only_if_changed = options.get(
'download_only_if_changed', False)
self.cache_data = options.get('cache_data')
self.connection_timeout = options.get('connection_timeout', 10)
self.log = options.get('logging_function', NSLog)
self.resume = False
self.response = None
self.headers = None
self.status = None
self.error = None
self.SSLerror = None
self.done = False
self.redirection = []
self.destination = None
self.bytesReceived = 0
self.expectedLength = -1
self.percentComplete = 0
self.connection = None
return self
def start(self):
if not self.destination_path:
self.log('No output file specified.')
self.done = True
return
url = NSURL.URLWithString_(self.url)
request = (
NSMutableURLRequest.requestWithURL_cachePolicy_timeoutInterval_(
url, NSURLRequestReloadIgnoringLocalCacheData,
self.connection_timeout))
if self.additional_headers:
for header, value in self.additional_headers.items():
request.setValue_forHTTPHeaderField_(value, header)
# does the file already exist? See if we can resume a partial download
if os.path.isfile(self.destination_path):
stored_data = self.get_stored_headers()
if (self.can_resume and 'expected-length' in stored_data
and ('last-modified' in stored_data or 'etag' in stored_data)):
# we have a partial file and we're allowed to resume
self.resume = True
local_filesize = os.path.getsize(self.destination_path)
byte_range = 'bytes=%s-' % local_filesize
request.setValue_forHTTPHeaderField_(byte_range, 'Range')
if self.download_only_if_changed and not self.resume:
stored_data = self.cache_data or self.get_stored_headers()
if 'last-modified' in stored_data:
request.setValue_forHTTPHeaderField_(
stored_data['last-modified'], 'if-modified-since')
if 'etag' in stored_data:
request.setValue_forHTTPHeaderField_(
stored_data['etag'], 'if-none-match')
self.connection = NSURLConnection.alloc().initWithRequest_delegate_(
request, self)
def cancel(self):
if self.connection:
self.connection.cancel()
self.done = True
def isDone(self):
if self.done:
return self.done
# let the delegates do their thing
NSRunLoop.currentRunLoop().runUntilDate_(
NSDate.dateWithTimeIntervalSinceNow_(.1))
return self.done
def get_stored_headers(self):
'''Returns any stored headers for self.destination_path'''
# try to read stored headers
try:
stored_plist_str = xattr.getxattr(
self.destination_path, self.GURL_XATTR)
except (KeyError, IOError):
return {}
data = buffer(stored_plist_str)
dataObject, plistFormat, error = (
NSPropertyListSerialization.
propertyListFromData_mutabilityOption_format_errorDescription_(
data, NSPropertyListMutableContainersAndLeaves, None, None))
if error:
return {}
else:
return dataObject
def store_headers(self, headers):
'''Store dictionary data as an xattr for self.destination_path'''
plistData, error = (
NSPropertyListSerialization.
dataFromPropertyList_format_errorDescription_(
headers, NSPropertyListXMLFormat_v1_0, None))
if error:
string = ''
else:
string = str(plistData)
try:
xattr.setxattr(self.destination_path, self.GURL_XATTR, string)
except IOError, err:
self.log('Could not store metadata to %s: %s' %
(self.destination_path, err))
pass
def normalize_header_dict(self, a_dict):
'''Since HTTP header names are not case-sensitive, we normalize a
dictionary of HTTP headers by converting all the key names to
lower case'''
new_dict = {}
for key, value in a_dict.items():
new_dict[key.lower()] = value
return new_dict
def connection_didFailWithError_(self, connection, error):
self.error = error
# If this was an SSL error, try to extract the SSL error code.
if 'NSUnderlyingError' in error.userInfo():
ssl_code = error.userInfo()['NSUnderlyingError'].userInfo().get(
'_kCFNetworkCFStreamSSLErrorOriginalValue', None)
if ssl_code:
self.SSLerror = (ssl_code, ssl_error_codes.get(
ssl_code, 'Unknown SSL error'))
self.done = True
if self.destination and self.destination_path:
self.destination.close()
# delete it? Might not want to...
def connectionDidFinishLoading_(self, connection):
self.done = True
if self.destination and self.destination_path:
self.destination.close()
if str(self.status).startswith('2'):
# remove the expected-size from the stored headers
headers = self.get_stored_headers()
if 'expected-length' in headers:
del headers['expected-length']
self.store_headers(headers)
def connection_didReceiveResponse_(self, connection, response):
self.response = response
self.bytesReceived = 0
self.percentComplete = -1
self.expectedLength = response.expectedContentLength()
download_data = {}
if response.className() == u'NSHTTPURLResponse':
# Headers and status code only available for HTTP/S transfers
self.status = response.statusCode()
self.headers = dict(response.allHeaderFields())
normalized_headers = self.normalize_header_dict(self.headers)
if 'last-modified' in normalized_headers:
download_data['last-modified'] = normalized_headers[
'last-modified']
if 'etag' in normalized_headers:
download_data['etag'] = normalized_headers['etag']
download_data['expected-length'] = self.expectedLength
if not self.destination and self.destination_path:
if self.status == 206 and self.resume:
# 206 is Partial Content response
stored_data = self.get_stored_headers()
if (not stored_data
or stored_data.get('etag') != download_data.get('etag')
or stored_data.get('last-modified') != download_data.get(
'last-modified')):
# file on server is different than the one
# we have a partial for
self.log(
'Can\'t resume download; file on server has changed.')
connection.cancel()
self.log('Removing %s' % self.destination_path)
os.unlink(self.destination_path)
# restart and attempt to download the entire file
self.log(
'Restarting download of %s' % self.destination_path)
os.unlink(self.destination_path)
self.start()
return
# try to resume
self.log('Resuming download for %s' % self.destination_path)
# add existing file size to bytesReceived so far
local_filesize = os.path.getsize(self.destination_path)
self.bytesReceived = local_filesize
self.expectedLength += local_filesize
# open file for append
self.destination = open(self.destination_path, 'a')
elif str(self.status).startswith('2'):
# not resuming, just open the file for writing
self.destination = open(self.destination_path, 'w')
# store some headers with the file for use if we need to resume
# the downloadand for future checking if the file on the server
# has changed
self.store_headers(download_data)
def connection_willSendRequest_redirectResponse_(
self, connection, request, response):
if response == None:
# This isn't a real redirect, this is without talking to a server.
# Pass it back as-is
return request
# But if we're here, it appears to be a real redirect attempt
# Annoyingly, we apparently can't get access to the headers from the
# site that told us to redirect. All we know is that we were told
# to redirect and where the new location is.
newURL = request.URL().absoluteString()
self.redirection.append([newURL, dict(response.allHeaderFields())])
if self.follow_redirects:
# Allow the redirect
self.log('Allowing redirect to: %s' % newURL)
return request
else:
# Deny the redirect
self.log('Denying redirect to: %s' % newURL)
return None
def connection_canAuthenticateAgainstProtectionSpace_(
self, connection, protectionSpace):
# this is not called in 10.5.x.
self.log('connection_canAuthenticateAgainstProtectionSpace_')
if protectionSpace:
host = protectionSpace.host()
realm = protectionSpace.realm()
authenticationMethod = protectionSpace.authenticationMethod()
self.log('Protection space found. Host: %s Realm: %s AuthMethod: %s'
% (host, realm, authenticationMethod))
if self.username and self.password and authenticationMethod in [
'NSURLAuthenticationMethodDefault',
'NSURLAuthenticationMethodHTTPBasic',
'NSURLAuthenticationMethodHTTPDigest']:
# we know how to handle this
self.log('Can handle this authentication request')
return True
# we don't know how to handle this; let the OS try
self.log('Allowing OS to handle authentication request')
return False
def connection_didReceiveAuthenticationChallenge_(
self, connection, challenge):
protectionSpace = challenge.protectionSpace()
host = protectionSpace.host()
realm = protectionSpace.realm()
authenticationMethod = protectionSpace.authenticationMethod()
self.log(
'Authentication challenge for Host: %s Realm: %s AuthMethod: %s'
% (host, realm, authenticationMethod))
if challenge.previousFailureCount() > 0:
# we have the wrong credentials. just fail
self.log('Previous authentication attempt failed.')
challenge.sender().cancelAuthenticationChallenge_(challenge)
if self.username and self.password and authenticationMethod in [
'NSURLAuthenticationMethodDefault',
'NSURLAuthenticationMethodHTTPBasic',
'NSURLAuthenticationMethodHTTPDigest']:
self.log('Will attempt to authenticate.')
self.log('Username: %s Password: %s'
% (self.username, ('*' * len(self.password or ''))))
credential = (
NSURLCredential.credentialWithUser_password_persistence_(
self.username, self.password, NSURLCredentialPersistenceNone))
challenge.sender().useCredential_forAuthenticationChallenge_(
credential, challenge)
else:
# fall back to system-provided default behavior
self.log('Continuing without credential.')
challenge.sender(
).continueWithoutCredentialForAuthenticationChallenge_(
challenge)
def connection_didReceiveData_(self, connection, data):
if self.destination:
self.destination.write(str(data))
else:
self.log(NSString.alloc().initWithData_encoding_(
data, NSUTF8StringEncoding))
self.bytesReceived += len(data)
if self.expectedLength != NSURLResponseUnknownLength:
self.percentComplete = int(
float(self.bytesReceived)/float(self.expectedLength) * 100.0)
+179
View File
@@ -0,0 +1,179 @@
#!/usr/bin/python
# encoding: utf-8
#
# Copyright 2014 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
keychain
Created by Greg Neagle on 2014-06-09.
Incorporating work and ideas from Michael Lynn here:
https://gist.github.com/pudquick/7704254
"""
import os
import re
import stat
import subprocess
import sys
import munkicommon
DEFAULT_KEYCHAIN_NAME = 'munki.keychain'
DEFAULT_KEYCHAIN_PASSWORD = 'munki'
KEYCHAIN_DIRECTORY = os.path.join(
munkicommon.pref('ManagedInstallDir'), 'Keychains')
def debug_output():
'''Debugging output for keychain'''
try:
munkicommon.display_info('***Keychain list***')
munkicommon.display_info(security('list-keychains', '-d', 'user'))
munkicommon.display_info('***Default keychain info***')
munkicommon.display_info(security('default-keychain', '-d', 'user'))
keychainfile = get_keychain_path()
if os.path.exists(keychainfile):
munkicommon.display_info('***Info for %s***' % keychainfile)
munkicommon.display_info(
security('show-keychain-info', keychainfile))
except SecurityError, err:
munkicommon.display_info(str(err))
class SecurityError(Exception):
'''An exception class to raise if there is an error running
/usr/bin/security'''
pass
def security(verb_name, *args):
'''Runs the security binary with args. Returns stdout.
Raises SecurityError for a non-zero return code'''
cmd = ['/usr/bin/security', verb_name] + list(args)
proc = subprocess.Popen(
cmd, shell=False, bufsize=-1,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output, err) = proc.communicate()
if proc.returncode:
raise SecurityError('%s: %s' % (proc.returncode, err))
return output or err
def get_keychain_path():
'''Returns an absolute path for our keychain'''
keychain_name = (
munkicommon.pref('KeychainName') or DEFAULT_KEYCHAIN_NAME)
# If we have an odd path that appears to be all directory and no
# file name, revert to default filename
if not os.path.basename(keychain_name):
keychain_name = DEFAULT_KEYCHAIN_NAME
# Check to make sure it's just a simple file name, no directory
# information
if os.path.dirname(keychain_name):
# keychain name should be just the filename,
# so we'll drop down to the base name
keychain_name = os.path.basename(
keychain_name).strip() or DEFAULT_KEYCHAIN_NAME
# Correct the filename to include '.keychain' if not already present
if not keychain_name.lower().endswith('.keychain'):
keychain_name += '.keychain'
keychain_path = os.path.realpath(
os.path.join(KEYCHAIN_DIRECTORY, keychain_name))
return keychain_path
class MunkiKeychain(object):
keychain_path = None
added_keychain = False
def __init__(self):
'''Unlocks the munki.keychain if it exists.
Makes sure the munki.keychain is in the search list.'''
self.keychain_path = get_keychain_path()
keychain_pass = (
munkicommon.pref('KeychainPassword') or DEFAULT_KEYCHAIN_PASSWORD)
if os.path.exists(self.keychain_path):
self.ensure_in_search_list()
try:
output = security(
'unlock-keychain', '-p', keychain_pass, self.keychain_path)
except SecurityError, err:
# some problem unlocking the keychain.
munkicommon.display_error(
'Could not unlock %s: %s.' % (self.keychain_path, err))
self.keychain_path = None
return
try:
output = security('set-keychain-settings', self.keychain_path)
except SecurityError, err:
munkicommon.display_error(
'Could not set keychain settings for %s: %s'
% (self.keychain_path, err))
if not os.path.exists(self.keychain_path):
self.keychain_path = None
def __del__(self):
'''Remove our keychain from the keychain list if we added it'''
if self.added_keychain:
self.remove_from_search_list()
def ensure_in_search_list(self):
'''Ensure the keychain is in the search path.'''
self.added_keychain = False
output = security('list-keychains', '-d', 'user')
# Split the output and strip it of whitespace and leading/trailing
# quotes, the result are absolute paths to keychains
# Preserve the order in case we need to append to them
search_keychains = [x.strip().strip('"')
for x in output.split('\n') if x.strip()]
if not self.keychain_path in search_keychains:
# Keychain is not in the search paths
munkicommon.display_debug1('Adding keychain to search path...')
search_keychains.append(self.keychain_path)
try:
output = security(
'list-keychains', '-d', 'user', '-s', *search_keychains)
self.added_keychain = True
except SecurityError, err:
munkicommon.display_error(
'Could not add keychain %s to keychain list: %s'
% (self.keychain_path, err))
self.added_keychain = False
def remove_from_search_list(self):
'''Remove our keychain from the list of keychains'''
output = security('list-keychains', '-d', 'user')
# Split the output and strip it of whitespace and leading/trailing
# quotes, the result are absolute paths to keychains
# Preserve the order in case we need to append to them
search_keychains = [x.strip().strip('"')
for x in output.split('\n') if x.strip()]
if self.keychain_path in search_keychains:
# Keychain is in the search path
munkicommon.display_debug1(
'Removing %s from search path...' % self.keychain_path)
filtered_keychains = [keychain for keychain in search_keychains
if keychain != self.keychain_path]
try:
output = security(
'list-keychains', '-d', 'user', '-s', *filtered_keychains)
self.added_keychain = False
except SecurityError, err:
munkicommon.display_error(
'Could not set new keychain list: %s' % err)
+37 -35
View File
@@ -33,6 +33,7 @@ from OpenSSL.crypto import load_certificate, FILETYPE_PEM
# our libs
import fetch
import keychain
import munkicommon
import munkistatus
import appleupdates
@@ -1643,7 +1644,7 @@ def updateAvailableLicenseSeats(installinfo):
munkicommon.display_debug1('Got: %s', license_data)
license_dict = FoundationPlist.readPlistFromString(
license_data)
except (fetch.MunkiDownloadError, fetch.CurlDownloadError), err:
except (fetch.MunkiDownloadError, fetch.GurlDownloadError), err:
# problem fetching from URL
munkicommon.display_error('Error from %s: %s', url, err)
except FoundationPlist.FoundationPlistException:
@@ -1918,7 +1919,7 @@ def processInstall(manifestitem, cataloglist, installinfo):
iteminfo['note'] = 'Integrity check failed'
installinfo['managed_installs'].append(iteminfo)
return False
except fetch.CurlDownloadError, errmsg:
except fetch.GurlDownloadError, errmsg:
munkicommon.display_warning(
'Download of %s failed: %s', manifestitem, errmsg)
iteminfo['installed'] = False
@@ -2851,6 +2852,8 @@ def check(client_id='', localmanifestpath=None):
munkicommon.getConditions()
CONDITIONS = munkicommon.getConditions()
keychain_obj = keychain.MunkiKeychain()
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
if munkicommon.munkistatusoutput:
munkistatus.activate()
@@ -3348,39 +3351,39 @@ def getResourceIfChangedAtomically(url,
exists, and adds any additional headers'''
ManagedInstallDir = munkicommon.pref('ManagedInstallDir')
# get server CA cert if it exists so we can verify the munki server
ca_cert_path = None
ca_dir_path = None
if munkicommon.pref('SoftwareRepoCAPath'):
CA_path = munkicommon.pref('SoftwareRepoCAPath')
if os.path.isfile(CA_path):
ca_cert_path = CA_path
elif os.path.isdir(CA_path):
ca_dir_path = CA_path
if munkicommon.pref('SoftwareRepoCACertificate'):
ca_cert_path = munkicommon.pref('SoftwareRepoCACertificate')
if ca_cert_path == None:
ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
if not os.path.exists(ca_cert_path):
ca_cert_path = None
## get server CA cert if it exists so we can verify the munki server
#ca_cert_path = None
#ca_dir_path = None
#if munkicommon.pref('SoftwareRepoCAPath'):
# CA_path = munkicommon.pref('SoftwareRepoCAPath')
# if os.path.isfile(CA_path):
# ca_cert_path = CA_path
# elif os.path.isdir(CA_path):
# ca_dir_path = CA_path
#if munkicommon.pref('SoftwareRepoCACertificate'):
# ca_cert_path = munkicommon.pref('SoftwareRepoCACertificate')
#if ca_cert_path == None:
# ca_cert_path = os.path.join(ManagedInstallDir, 'certs', 'ca.pem')
# if not os.path.exists(ca_cert_path):
# ca_cert_path = None
client_cert_path = None
client_key_path = None
# get client cert if it exists
if munkicommon.pref('UseClientCertificate'):
client_cert_path = munkicommon.pref('ClientCertificatePath') or None
client_key_path = munkicommon.pref('ClientKeyPath') or None
if not client_cert_path:
for name in ['cert.pem', 'client.pem', 'munki.pem']:
client_cert_path = os.path.join(ManagedInstallDir, 'certs',
name)
if os.path.exists(client_cert_path):
break
cert_info = {}
cert_info['cacert'] = ca_cert_path
cert_info['capath'] = ca_dir_path
cert_info['cert'] = client_cert_path
cert_info['key'] = client_key_path
#client_cert_path = None
#client_key_path = None
## get client cert if it exists
#if munkicommon.pref('UseClientCertificate'):
# client_cert_path = munkicommon.pref('ClientCertificatePath') or None
# client_key_path = munkicommon.pref('ClientKeyPath') or None
# if not client_cert_path:
# for name in ['cert.pem', 'client.pem', 'munki.pem']:
# client_cert_path = os.path.join(ManagedInstallDir, 'certs',
# name)
# if os.path.exists(client_cert_path):
# break
#cert_info = {}
#cert_info['cacert'] = ca_cert_path
#cert_info['capath'] = ca_dir_path
#cert_info['cert'] = client_cert_path
#cert_info['key'] = client_key_path
# Add any additional headers specified in ManagedInstalls.plist.
# AdditionalHttpHeaders must be an array of strings with valid HTTP
@@ -3395,7 +3398,6 @@ def getResourceIfChangedAtomically(url,
return fetch.getResourceIfChangedAtomically(url,
destinationpath,
cert_info=cert_info,
custom_headers=custom_headers,
expected_hash=expected_hash,
message=message,