Initial stab at replacing curl with NSURLConnection and friends.

This commit is contained in:
Greg Neagle
2014-06-09 11:50:45 -07:00
parent ada656f717
commit 56c856e469
4 changed files with 526 additions and 336 deletions
+2 -1
View File
@@ -772,8 +772,9 @@ class AppleUpdates(object):
munkicommon.display_warning('Could not download Apple SUS catalog:')
munkicommon.display_warning('\t%s', str(err))
return False
except fetch.MunkiDownloadError:
except fetch.MunkiDownloadError, err:
munkicommon.display_warning('Could not download Apple SUS catalog.')
munkicommon.display_warning('\t%s', str(err))
return False
if not force_check and not self._IsForceCheckNeccessary(before_hash):
+129 -333
View File
@@ -35,8 +35,11 @@ import xattr
#our libs
import munkicommon
from gurl import Gurl
#import munkistatus
from Foundation import NSHTTPURLResponse
# XATTR name storing the ETAG of the file when downloaded via http(s).
XATTR_ETAG = 'com.googlecode.munki.etag'
@@ -44,7 +47,7 @@ XATTR_ETAG = 'com.googlecode.munki.etag'
XATTR_SHA = 'com.googlecode.munki.sha256'
class CurlError(Exception):
class GurlError(Exception):
pass
class HTTPError(Exception):
@@ -54,8 +57,8 @@ class MunkiDownloadError(Exception):
"""Base exception for download errors"""
pass
class CurlDownloadError(MunkiDownloadError):
"""Curl failed to download the item"""
class GurlDownloadError(MunkiDownloadError):
"""Gurl failed to download the item"""
pass
class FileCopyError(MunkiDownloadError):
@@ -87,344 +90,139 @@ def writeCachedChecksum(file_path, fhash=None):
return None
WARNINGSLOGGED = {}
def curl(url, destinationpath,
cert_info=None, custom_headers=None, donotrecurse=False, etag=None,
message=None, onlyifnewer=False, resume=False, follow_redirects=False):
def header_dict_from_list(array):
"""Given a list of strings in http header format, return a dict.
If array is None, return None"""
if array is None:
return array
header_dict = {}
for item in array:
(key, sep, value) = item.partition(':')
if sep and value:
header_dict[key.strip()] = value.strip()
return header_dict
def get_url(url, destinationpath,
custom_headers=None, message=None, onlyifnewer=False,
resume=False, follow_redirects=False):
"""Gets an HTTP or HTTPS URL and stores it in
destination path. Returns a dictionary of headers, which includes
http_result_code and http_result_description.
Will raise CurlError if curl returns an error.
Will raise CurlError if Gurl returns an error.
Will raise HTTPError if HTTP Result code is not 2xx or 304.
If destinationpath already exists, you can set 'onlyifnewer' to true to
indicate you only want to download the file only if it's newer on the
server.
If you have an ETag from the current destination path, you can pass that
to download the file only if it is different.
Finally, if you set resume to True, curl will attempt to resume an
interrupted download. You'll get an error if the existing file is
complete; if the file has changed since the first download attempt, you'll
get a mess."""
If you set resume to True, Gurl will attempt to resume an
interrupted download."""
header = {}
header['http_result_code'] = '000'
header['http_result_description'] = ''
curldirectivepath = os.path.join(munkicommon.tmpdir, 'curl_temp')
tempdownloadpath = destinationpath + '.download'
if os.path.exists(tempdownloadpath) and not resume:
if resume and not os.path.exists(destinationpath):
os.remove(tempdownloadpath)
# we're writing all the curl options to a file and passing that to
# curl so we avoid the problem of URLs showing up in a process listing
cache_data = None
if onlyifnewer and os.path.exists(destinationpath):
# create a temporary Gurl object so we can extract the
# stored caching data so we can download only if the
# file has changed on the server
gurl_obj = Gurl.alloc().initWithOptions_({'file': destinationpath})
cache_data = gurl_obj.get_stored_headers()
del gurl_obj
options = {'url': url,
'file': tempdownloadpath,
'follow_redirects': follow_redirects,
'can_resume': resume,
'additional_headers': header_dict_from_list(custom_headers),
'download_only_if_changed': onlyifnewer,
'cache_data': cache_data,
'logging_function': munkicommon.display_debug2}
munkicommon.display_debug2('Options: %s' % options)
connection = Gurl.alloc().initWithOptions_(options)
stored_percent_complete = -1
stored_bytes_received = 0
connection.start()
try:
fileobj = open(curldirectivepath, mode='w')
print >> fileobj, 'silent' # no progress meter
print >> fileobj, 'show-error' # print error msg to stderr
print >> fileobj, 'no-buffer' # don't buffer output
print >> fileobj, 'fail' # throw error if download fails
print >> fileobj, 'dump-header -' # dump headers to stdout
print >> fileobj, 'speed-time = 30' # give up if too slow d/l
print >> fileobj, 'output = "%s"' % tempdownloadpath
print >> fileobj, 'ciphers = HIGH,!ADH' #use only secure >=128 bit SSL
print >> fileobj, 'url = "%s"' % url
munkicommon.display_debug2('follow_redirects is %s', follow_redirects)
if follow_redirects:
print >> fileobj, 'location' # follow redirects
if cert_info:
cacert = cert_info.get('cacert')
capath = cert_info.get('capath')
cert = cert_info.get('cert')
key = cert_info.get('key')
if cacert:
if not os.path.isfile(cacert):
raise CurlError(-1, 'No CA cert at %s' % cacert)
print >> fileobj, 'cacert = "%s"' % cacert
if capath:
if not os.path.isdir(capath):
raise CurlError(-2, 'No CA directory at %s' % capath)
print >> fileobj, 'capath = "%s"' % capath
if cert:
if not os.path.isfile(cert):
raise CurlError(-3, 'No client cert at %s' % cert)
print >> fileobj, 'cert = "%s"' % cert
if key:
if not os.path.isfile(key):
raise CurlError(-4, 'No client key at %s' % key)
print >> fileobj, 'key = "%s"' % key
if os.path.exists(destinationpath):
if etag:
escaped_etag = etag.replace('"','\\"')
print >> fileobj, ('header = "If-None-Match: %s"'
% escaped_etag)
elif onlyifnewer:
print >> fileobj, 'time-cond = "%s"' % destinationpath
else:
os.remove(destinationpath)
if os.path.exists(tempdownloadpath):
if resume and not os.path.exists(destinationpath):
# let's try to resume this download
print >> fileobj, 'continue-at -'
# if an existing etag, only resume if etags still match.
tempetag = getxattr(tempdownloadpath, XATTR_ETAG)
if tempetag:
# Note: If-Range is more efficient, but the response
# confuses curl (Error: 33 if etag not match).
escaped_etag = tempetag.replace('"','\\"')
print >> fileobj, ('header = "If-Match: %s"'
% escaped_etag)
else:
os.remove(tempdownloadpath)
# Add any additional headers specified in custom_headers
# custom_headers must be an array of strings with valid HTTP
# header format.
if custom_headers:
for custom_header in custom_headers:
custom_header = custom_header.strip().encode('utf-8')
if re.search(r'^[\w-]+:.+', custom_header):
print >> fileobj, ('header = "%s"' % custom_header)
else:
munkicommon.display_warning(
'Skipping invalid HTTP header: %s' % custom_header)
fileobj.close()
except Exception, e:
raise CurlError(-5, 'Error writing curl directive: %s' % str(e))
# In Mavericks we need to wrap our call to curl with a utility
# that makes curl think it is connected to a tty-like
# device so its output is unbuffered so we can get progress info
cmd = []
minor_os_version = munkicommon.getOsVersion(as_tuple=True)[1]
if minor_os_version > 8:
# Try to find our ptyexec tool
# first look in the parent directory of this file's directory
# (../)
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
ptyexec_path = os.path.join(parent_dir, 'ptyexec')
if not os.path.exists(ptyexec_path):
# try absolute path in munki's normal install dir
ptyexec_path = '/usr/local/munki/ptyexec'
if os.path.exists(ptyexec_path):
cmd = [ptyexec_path]
# Workaround for current issue in OS X 10.9's included curl
# Allows for alternate curl binary path as Apple's included curl currently
# broken for client-side certificate usage
curl_path = munkicommon.pref('CurlPath') or '/usr/bin/curl'
cmd.extend([curl_path,
'-q', # don't read .curlrc file
'--config', # use config file
curldirectivepath])
proc = subprocess.Popen(cmd, shell=False, bufsize=1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
targetsize = 0
downloadedpercent = -1
donewithheaders = False
maxheaders = 15
while True:
if not donewithheaders:
info = proc.stdout.readline().strip('\r\n')
if info:
munkicommon.display_debug2(info)
if info.startswith('HTTP/'):
header['http_result_code'] = info.split(None, 2)[1]
header['http_result_description'] = info.split(None, 2)[2]
elif ': ' in info:
part = info.split(None, 1)
fieldname = part[0].rstrip(':').lower()
header[fieldname] = part[1]
else:
# we got an empty line; end of headers (or curl exited)
if follow_redirects:
if header.get('http_result_code') in ['301', '302', '303']:
# redirect, so more headers are coming.
# Throw away the headers we've received so far
header = {}
header['http_result_code'] = '000'
header['http_result_description'] = ''
else:
donewithheaders = True
try:
# Prefer Content-Length header to determine download
# size, otherwise fall back to a custom X-Download-Size
# header.
# This is primary for servers that use chunked transfer
# encoding, when Content-Length is forbidden by
# RFC2616 4.4. An example of such a server is
# Google App Engine Blobstore.
targetsize = (
header.get('content-length') or
header.get('x-download-size'))
targetsize = int(targetsize)
except (ValueError, TypeError):
targetsize = 0
if header.get('http_result_code') == '206':
# partial content because we're resuming
munkicommon.display_detail(
'Resuming partial download for %s' %
os.path.basename(destinationpath))
contentrange = header.get('content-range')
if contentrange.startswith('bytes'):
try:
targetsize = int(contentrange.split('/')[1])
except (ValueError, TypeError):
targetsize = 0
if message and header.get('http_result_code') != '304':
if message:
# log always, display if verbose is 1 or more
# also display in MunkiStatus detail field
munkicommon.display_status_minor(message)
elif targetsize and header.get('http_result_code').startswith('2'):
# display progress if we get a 2xx result code
if os.path.exists(tempdownloadpath):
downloadedsize = os.path.getsize(tempdownloadpath)
percent = int(float(downloadedsize)
/float(targetsize)*100)
if percent != downloadedpercent:
# percent changed; update display
downloadedpercent = percent
munkicommon.display_percent_done(downloadedpercent, 100)
time.sleep(0.1)
else:
# Headers have finished, but not targetsize or HTTP2xx.
# It's possible that Content-Length was not in the headers.
# so just sleep and loop again. We can't show progress.
time.sleep(0.1)
if (proc.poll() != None):
# For small download files curl may exit before all headers
# have been parsed, don't immediately exit.
maxheaders -= 1
if donewithheaders or maxheaders <= 0:
while True:
# if we did `while not connection.isDone()` we'd miss printing
# messages and displaying percentages if we exit the loop first
connection_done = connection.isDone()
if message and connection.status and connection.status != 304:
# log always, display if verbose is 1 or more
# also display in MunkiStatus detail field
munkicommon.display_status_minor(message)
# now clear message so we don't display it again
message = None
if (str(connection.status).startswith('2')
and connection.percentComplete != -1):
if connection.percentComplete != stored_percent_complete:
# display percent done if it has changed
stored_percent_complete = connection.percentComplete
munkicommon.display_percent_done(
stored_percent_complete, 100)
elif connection.bytesReceived != stored_bytes_received:
# if we don't have percent done info, log bytes received
stored_bytes_received = connection.bytesReceived
munkicommon.display_detail(
'Bytes received: %s', stored_bytes_received)
if connection_done:
break
retcode = proc.poll()
if retcode:
curlerr = ''
try:
curlerr = proc.stderr.read().rstrip('\n')
curlerr = curlerr.split(None, 2)[2]
except IndexError:
pass
if retcode == 22:
# 22 means any 400 series return code. Note: header seems not to
# be dumped to STDOUT for immediate failures. Hence
# http_result_code is likely blank/000. Read it from stderr.
if re.search(r'URL returned error: [0-9]+$', curlerr):
header['http_result_code'] = curlerr[curlerr.rfind(' ')+1:]
except (KeyboardInterrupt, SystemExit):
# safely kill the connection then re-raise
connection.cancel()
raise
except Exception, err: # too general, I know
# Let us out! ... Safely! Unexpectedly quit dialogs are annoying...
connection.cancel()
# Re-raise the error as a GurlError
raise GurlError(-1, str(err))
if os.path.exists(tempdownloadpath):
if not resume:
os.remove(tempdownloadpath)
elif retcode == 33 or header.get('http_result_code') == '412':
# 33: server doesn't support range requests
# 412: Etag didn't match (precondition failed), could not
# resume partial download as file on server has changed.
if retcode == 33 and not 'HTTPRange' in WARNINGSLOGGED:
# use display_info instead of display_warning so these
# don't get reported but are available in the log
# and in command-line output
munkicommon.display_info('WARNING: Web server refused '
'partial/range request. Munki cannot run '
'efficiently when this support is absent for '
'pkg urls. URL: %s' % url)
WARNINGSLOGGED['HTTPRange'] = 1
os.remove(tempdownloadpath)
# The partial failed immediately as not supported.
# Try a full download again immediately.
if not donotrecurse:
return curl(url, destinationpath,
cert_info=cert_info,
custom_headers=custom_headers,
donotrecurse=True,
etag=etag,
message=message,
onlyifnewer=onlyifnewer,
resume=resume,
follow_redirects=follow_redirects)
elif retcode == 22:
# TODO: Made http(s) connection but 400 series error.
# What should we do?
# 403 could be ok, just that someone is currently offsite and
# the server is refusing the service them while there.
# 404 could be an interception proxy at a public wifi point.
# The partial may still be ok later.
# 416 could be dangerous - the targeted resource may now be
# different / smaller. We need to delete the temp or retrying
# will never work.
if header.get('http_result_code') == 416:
# Bad range request.
os.remove(tempdownloadpath)
elif header.get('http_result_code') == 503:
# Web server temporarily unavailable.
pass
elif not header.get('http_result_code').startswith('4'):
# 500 series, or no error code parsed.
# Perhaps the webserver gets really confused by partial
# requests. It is likely majorly misconfigured so we won't
# try asking it anything challenging.
os.remove(tempdownloadpath)
elif header.get('etag'):
xattr.setxattr(tempdownloadpath, XATTR_ETAG, header['etag'])
# TODO: should we log this diagnostic here (we didn't previously)?
# Currently for a pkg all that is logged on failure is:
# "WARNING: Download of Firefox failed." with no detail. Logging at
# the place where this exception is caught has to be done in many
# places.
munkicommon.display_detail('Download error: %s. Failed (%s) with: %s'
% (url,retcode,curlerr))
munkicommon.display_detail('Headers: %s', header)
raise CurlError(retcode, curlerr)
if connection.error != None:
# Gurl returned an error
munkicommon.display_detail(
'Download error %s: %s', connection.error.code(),
connection.error.localizedDescription())
if connection.SSLerror:
munkicommon.display_detail(
'SSL error detail: %s' % connection.SSLerror)
munkicommon.display_detail('Headers: %s', connection.headers)
if os.path.exists(tempdownloadpath) and not resume:
os.remove(tempdownloadpath)
raise GurlError(connection.error.code(),
connection.error.localizedDescription())
if connection.response != None:
munkicommon.display_debug1('Status: %s', connection.status)
munkicommon.display_debug1('Headers: %s', connection.headers)
if connection.redirection != []:
munkicommon.display_debug1('Redirection: %s', connection.redirection)
temp_download_exists = os.path.isfile(tempdownloadpath)
connection.headers['http_result_code'] = str(connection.status)
description = NSHTTPURLResponse.localizedStringForStatusCode_(
connection.status)
connection.headers['http_result_description'] = description
if str(connection.status).startswith('2') and temp_download_exists:
os.rename(tempdownloadpath, destinationpath)
return connection.headers
elif connection.status == 304:
# unchanged on server
munkicommon.display_debug1('Item is unchanged on the server.')
return connection.headers
else:
temp_download_exists = os.path.isfile(tempdownloadpath)
http_result = header.get('http_result_code')
if http_result.startswith('2') and temp_download_exists:
downloadedsize = os.path.getsize(tempdownloadpath)
if downloadedsize >= targetsize:
if targetsize and not downloadedpercent == 100:
# need to display a percent done of 100%
munkicommon.display_percent_done(100, 100)
os.rename(tempdownloadpath, destinationpath)
if (resume and not header.get('etag')
and not 'HTTPetag' in WARNINGSLOGGED):
# use display_info instead of display_warning so these
# don't get reported but are available in the log
# and in command-line output
munkicommon.display_info(
'WARNING: '
'Web server did not return an etag. Munki cannot '
'safely resume downloads without etag support on the '
'web server. URL: %s' % url)
WARNINGSLOGGED['HTTPetag'] = 1
return header
else:
# not enough bytes retreived
if not resume and temp_download_exists:
os.remove(tempdownloadpath)
raise CurlError(-5, 'Expected %s bytes, got: %s' %
(targetsize, downloadedsize))
elif http_result == '304':
return header
else:
# there was a download error of some sort; clean all relevant
# downloads that may be in a bad state.
for f in [tempdownloadpath, destinationpath]:
try:
os.unlink(f)
except OSError:
pass
raise HTTPError(http_result,
header.get('http_result_description',''))
# there was an HTTP error of some sort; remove our temp download.
if os.path.exists(tempdownloadpath):
try:
os.unlink(tempdownloadpath)
except OSError:
pass
raise HTTPError(connection.status,
connection.headers.get('http_result_description',''))
def getResourceIfChangedAtomically(url,
@@ -563,7 +361,7 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
Returns True if a new download was required; False if the
item is already in the local cache.
Raises CurlDownloadError if there is an error."""
Raises GurlDownloadError if there is an error."""
etag = None
getonlyifnewer = False
@@ -575,23 +373,21 @@ def getHTTPfileIfChangedAtomically(url, destinationpath,
getonlyifnewer = False
try:
header = curl(url,
header = get_url(url,
destinationpath,
cert_info=cert_info,
custom_headers=custom_headers,
etag=etag,
message=message,
onlyifnewer=getonlyifnewer,
resume=resume,
follow_redirects=follow_redirects)
except CurlError, err:
except GurlError, err:
err = 'Error %s: %s' % tuple(err)
raise CurlDownloadError(err)
raise GurlDownloadError(err)
except HTTPError, err:
err = 'HTTP result %s: %s' % tuple(err)
raise CurlDownloadError(err)
raise GurlDownloadError(err)
err = None
if header['http_result_code'] == '304':
+393
View File
@@ -0,0 +1,393 @@
#!/usr/bin/python
# encoding: utf-8
#
# Copyright 2009-2014 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
gurl.py
Created by Greg Neagle on 2013-11-21.
curl replacement using NSURLConnection and friends
"""
import os
import sys
import xattr
from Foundation import NSRunLoop, NSDate
from Foundation import NSObject, NSURL, NSURLConnection
from Foundation import NSMutableURLRequest
from Foundation import NSURLRequestReloadIgnoringLocalCacheData
from Foundation import NSURLResponseUnknownLength
from Foundation import NSLog
from Foundation import NSString, NSUTF8StringEncoding
from Foundation import NSURLCredential, NSURLCredentialPersistenceNone
from Foundation import NSData, \
NSPropertyListSerialization, \
NSPropertyListMutableContainersAndLeaves, \
NSPropertyListXMLFormat_v1_0
ssl_error_codes = {
-9800: u'SSL protocol error',
-9801: u'Cipher Suite negotiation failure',
-9802: u'Fatal alert',
-9803: u'I/O would block (not fatal)',
-9804: u'Attempt to restore an unknown session',
-9805: u'Connection closed gracefully',
-9806: u'Connection closed via error',
-9807: u'Invalid certificate chain',
-9808: u'Bad certificate format',
-9809: u'Underlying cryptographic error',
-9810: u'Internal error',
-9811: u'Module attach failure',
-9812: u'Valid cert chain, untrusted root',
-9813: u'Cert chain not verified by root',
-9814: u'Chain had an expired cert',
-9815: u'Chain had a cert not yet valid',
-9816: u'Server closed session with no notification',
-9817: u'Insufficient buffer provided',
-9818: u'Bad SSLCipherSuite',
-9819: u'Unexpected message received',
-9820: u'Bad MAC',
-9821: u'Decryption failed',
-9822: u'Record overflow',
-9823: u'Decompression failure',
-9824: u'Handshake failure',
-9825: u'Misc. bad certificate',
-9826: u'Bad unsupported cert format',
-9827: u'Certificate revoked',
-9828: u'Certificate expired',
-9829: u'Unknown certificate',
-9830: u'Illegal parameter',
-9831: u'Unknown Cert Authority',
-9832: u'Access denied',
-9833: u'Decoding error',
-9834: u'Decryption error',
-9835: u'Export restriction',
-9836: u'Bad protocol version',
-9837: u'Insufficient security',
-9838: u'Internal error',
-9839: u'User canceled',
-9840: u'No renegotiation allowed',
-9841: u'Peer cert is valid, or was ignored if verification disabled',
-9842: u'Server has requested a client cert',
-9843: u'Peer host name mismatch',
-9844: u'Peer dropped connection before responding',
-9845: u'Decryption failure',
-9846: u'Bad MAC',
-9847: u'Record overflow',
-9848: u'Configuration error',
-9849: u'Unexpected (skipped) record in DTLS'}
class Gurl(NSObject):
'''A class for getting content from a URL
using NSURLConnection and friends'''
GURL_XATTR = 'com.googlecode.munki.downloadData'
def initWithOptions_(self, options):
self = super(Gurl, self).init()
if not self:
return
self.follow_redirects = options.get('follow_redirects', False)
self.destination_path = options.get('file')
self.can_resume = options.get('can_resume', False)
self.url = options.get('url')
self.additional_headers = options.get('additional_headers', {})
self.username = options.get('username')
self.password = options.get('password')
self.download_only_if_changed = options.get(
'download_only_if_changed', False)
self.cache_data = options.get('cache_data')
self.connection_timeout = options.get('connection_timeout', 10)
self.log = options.get('logging_function', NSLog)
self.resume = False
self.response = None
self.headers = None
self.status = None
self.error = None
self.SSLerror = None
self.done = False
self.redirection = []
self.destination = None
self.bytesReceived = 0
self.expectedLength = -1
self.percentComplete = 0
self.connection = None
return self
def start(self):
if not self.destination_path:
self.log('No output file specified.')
self.done = True
return
url = NSURL.URLWithString_(self.url)
request = (
NSMutableURLRequest.requestWithURL_cachePolicy_timeoutInterval_(
url, NSURLRequestReloadIgnoringLocalCacheData,
self.connection_timeout))
if self.additional_headers:
for header, value in self.additional_headers.items():
request.setValue_forHTTPHeaderField_(value, header)
# does the file already exist? See if we can resume a partial download
if os.path.isfile(self.destination_path):
stored_data = self.get_stored_headers()
if (self.can_resume and 'expected-length' in stored_data
and ('last-modified' in stored_data or 'etag' in stored_data)):
# we have a partial file and we're allowed to resume
self.resume = True
local_filesize = os.path.getsize(self.destination_path)
byte_range = 'bytes=%s-' % local_filesize
request.setValue_forHTTPHeaderField_(byte_range, 'Range')
if self.download_only_if_changed and not self.resume:
stored_data = self.cache_data or self.get_stored_headers()
if 'last-modified' in stored_data:
request.setValue_forHTTPHeaderField_(
stored_data['last-modified'], 'if-modified-since')
if 'etag' in stored_data:
request.setValue_forHTTPHeaderField_(
stored_data['etag'], 'if-none-match')
self.connection = NSURLConnection.alloc().initWithRequest_delegate_(
request, self)
def cancel(self):
if self.connection:
self.connection.cancel()
self.done = True
def isDone(self):
if self.done:
return self.done
# let the delegates do their thing
NSRunLoop.currentRunLoop().runUntilDate_(
NSDate.dateWithTimeIntervalSinceNow_(.1))
return self.done
def get_stored_headers(self):
'''Returns any stored headers for self.destination_path'''
# try to read stored headers
try:
stored_plist_str = xattr.getxattr(
self.destination_path, self.GURL_XATTR)
except (KeyError, IOError):
return {}
data = buffer(stored_plist_str)
dataObject, plistFormat, error = (
NSPropertyListSerialization.
propertyListFromData_mutabilityOption_format_errorDescription_(
data, NSPropertyListMutableContainersAndLeaves, None, None))
if error:
return {}
else:
return dataObject
def store_headers(self, headers):
'''Store dictionary data as an xattr for self.destination_path'''
plistData, error = (
NSPropertyListSerialization.
dataFromPropertyList_format_errorDescription_(
headers, NSPropertyListXMLFormat_v1_0, None))
if error:
string = ''
else:
string = str(plistData)
try:
xattr.setxattr(self.destination_path, self.GURL_XATTR, string)
except IOError, err:
self.log('Could not store metadata to %s: %s' %
(self.destination_path, err))
pass
def normalize_header_dict(self, a_dict):
'''Since HTTP header names are not case-sensitive, we normalize a
dictionary of HTTP headers by converting all the key names to
lower case'''
new_dict = {}
for key, value in a_dict.items():
new_dict[key.lower()] = value
return new_dict
def connection_didFailWithError_(self, connection, error):
self.error = error
# If this was an SSL error, try to extract the SSL error code.
if 'NSUnderlyingError' in error.userInfo():
ssl_code = error.userInfo()['NSUnderlyingError'].userInfo().get(
'_kCFNetworkCFStreamSSLErrorOriginalValue', None)
if ssl_code:
self.SSLerror = (ssl_code, ssl_error_codes.get(
ssl_code, 'Unknown SSL error'))
self.done = True
if self.destination and self.destination_path:
self.destination.close()
# delete it? Might not want to...
def connectionDidFinishLoading_(self, connection):
self.done = True
if self.destination and self.destination_path:
self.destination.close()
if str(self.status).startswith('2'):
# remove the expected-size from the stored headers
headers = self.get_stored_headers()
if 'expected-length' in headers:
del headers['expected-length']
self.store_headers(headers)
def connection_didReceiveResponse_(self, connection, response):
self.response = response
self.bytesReceived = 0
self.percentComplete = -1
self.expectedLength = response.expectedContentLength()
download_data = {}
if response.className() == u'NSHTTPURLResponse':
# Headers and status code only available for HTTP/S transfers
self.status = response.statusCode()
self.headers = dict(response.allHeaderFields())
normalized_headers = self.normalize_header_dict(self.headers)
if 'last-modified' in normalized_headers:
download_data['last-modified'] = normalized_headers[
'last-modified']
if 'etag' in normalized_headers:
download_data['etag'] = normalized_headers['etag']
download_data['expected-length'] = self.expectedLength
if not self.destination and self.destination_path:
if self.status == 206 and self.resume:
# 206 is Partial Content response
stored_data = self.get_stored_headers()
if (not stored_data
or stored_data.get('etag') != download_data.get('etag')
or stored_data.get('last-modified') != download_data.get(
'last-modified')):
# file on server is different than the one
# we have a partial for
self.log(
'Can\'t resume download; file on server has changed.')
connection.cancel()
self.log('Removing %s' % self.destination_path)
os.unlink(self.destination_path)
# restart and attempt to download the entire file
self.log(
'Restarting download of %s' % self.destination_path)
os.unlink(self.destination_path)
self.start()
return
# try to resume
self.log('Resuming download for %s' % self.destination_path)
# add existing file size to bytesReceived so far
local_filesize = os.path.getsize(self.destination_path)
self.bytesReceived = local_filesize
self.expectedLength += local_filesize
# open file for append
self.destination = open(self.destination_path, 'a')
elif str(self.status).startswith('2'):
# not resuming, just open the file for writing
self.destination = open(self.destination_path, 'w')
# store some headers with the file for use if we need to resume
# the downloadand for future checking if the file on the server
# has changed
self.store_headers(download_data)
def connection_willSendRequest_redirectResponse_(
self, connection, request, response):
if response == None:
# This isn't a real redirect, this is without talking to a server.
# Pass it back as-is
return request
# But if we're here, it appears to be a real redirect attempt
# Annoyingly, we apparently can't get access to the headers from the
# site that told us to redirect. All we know is that we were told
# to redirect and where the new location is.
newURL = request.URL().absoluteString()
self.redirection.append([newURL, dict(response.allHeaderFields())])
if self.follow_redirects:
# Allow the redirect
self.log('Allowing redirect to: %s' % newURL)
return request
else:
# Deny the redirect
self.log('Denying redirect to: %s' % newURL)
return None
def connection_canAuthenticateAgainstProtectionSpace_(
self, connection, protectionSpace):
# this is not called in 10.5.x.
self.log('connection_canAuthenticateAgainstProtectionSpace_')
if protectionSpace:
host = protectionSpace.host()
realm = protectionSpace.realm()
authenticationMethod = protectionSpace.authenticationMethod()
self.log('Protection space found. Host: %s Realm: %s AuthMethod: %s'
% (host, realm, authenticationMethod))
if self.username and self.password and authenticationMethod in [
'NSURLAuthenticationMethodDefault',
'NSURLAuthenticationMethodHTTPBasic',
'NSURLAuthenticationMethodHTTPDigest']:
# we know how to handle this
self.log('Can handle this authentication request')
return True
# we don't know how to handle this; let the OS try
self.log('Allowing OS to handle authentication request')
return False
def connection_didReceiveAuthenticationChallenge_(
self, connection, challenge):
protectionSpace = challenge.protectionSpace()
host = protectionSpace.host()
realm = protectionSpace.realm()
authenticationMethod = protectionSpace.authenticationMethod()
self.log(
'Authentication challenge for Host: %s Realm: %s AuthMethod: %s'
% (host, realm, authenticationMethod))
if challenge.previousFailureCount() > 0:
# we have the wrong credentials. just fail
self.log('Previous authentication attempt failed.')
challenge.sender().cancelAuthenticationChallenge_(challenge)
if self.username and self.password and authenticationMethod in [
'NSURLAuthenticationMethodDefault',
'NSURLAuthenticationMethodHTTPBasic',
'NSURLAuthenticationMethodHTTPDigest']:
self.log('Will attempt to authenticate.')
self.log('Username: %s Password: %s'
% (self.username, ('*' * len(self.password or ''))))
credential = (
NSURLCredential.credentialWithUser_password_persistence_(
self.username, self.password, NSURLCredentialPersistenceNone))
challenge.sender().useCredential_forAuthenticationChallenge_(
credential, challenge)
else:
# fall back to system-provided default behavior
self.log('Continuing without credential.')
challenge.sender(
).continueWithoutCredentialForAuthenticationChallenge_(
challenge)
def connection_didReceiveData_(self, connection, data):
if self.destination:
self.destination.write(str(data))
else:
self.log(NSString.alloc().initWithData_encoding_(
data, NSUTF8StringEncoding))
self.bytesReceived += len(data)
if self.expectedLength != NSURLResponseUnknownLength:
self.percentComplete = int(
float(self.bytesReceived)/float(self.expectedLength) * 100.0)
+2 -2
View File
@@ -1638,7 +1638,7 @@ def updateAvailableLicenseSeats(installinfo):
munkicommon.display_debug1('Got: %s', license_data)
license_dict = FoundationPlist.readPlistFromString(
license_data)
except (fetch.MunkiDownloadError, fetch.CurlDownloadError), err:
except (fetch.MunkiDownloadError, fetch.GurlDownloadError), err:
# problem fetching from URL
munkicommon.display_error('Error from %s: %s', url, err)
except FoundationPlist.FoundationPlistException:
@@ -1908,7 +1908,7 @@ def processInstall(manifestitem, cataloglist, installinfo):
iteminfo['note'] = 'Integrity check failed'
installinfo['managed_installs'].append(iteminfo)
return False
except fetch.CurlDownloadError, errmsg:
except fetch.GurlDownloadError, errmsg:
munkicommon.display_warning(
'Download of %s failed: %s', manifestitem, errmsg)
iteminfo['installed'] = False