updating urllib imports and related.
M webutils/aws/secures3.py +15 -8
@@ 1,12 1,19 @@ 
 import hmac
 import time
 import base64
-import urllib
 import hashlib
 try:
     from urlparse import urlparse
 except ImportError:
     from urllib.parse import urlparse
+try:
+    from urllib import quote_plus
+except ImportError:
+    from urllib.parse import quote_plus
+try:
+    from urllib import urlencode
+except ImportError:
+    from urllib.parse import urlencode
 
 
 class SecureS3(object):

          
@@ 17,9 24,9 @@ class SecureS3(object):
     def gen_signature(self, string_to_sign):
         return base64.encodestring(
             hmac.new(
-                self.secret_key,
+                self.secret_key.encode('utf-8'),
                 string_to_sign,
-                hashlib.sha1
+                hashlib.sha1,
             ).digest()
         ).strip()
 

          
@@ 27,10 34,10 @@ class SecureS3(object):
         ''' Returns bucket name and file from an S3 URL
         '''
         amazon_host = 's3.amazonaws.com'
-        s = urlparse.urlparse(url)
+        s = urlparse(url)
         if not s.path or not s.path[1:]:
             raise ValueError('Invalid S3 file passed.')
-			
+
         if s.netloc == amazon_host:
             # s3.amazonaws.com/bucket/file...
             bucket = s.path[1:].split('/')[0]

          
@@ 57,7 64,7 @@ class SecureS3(object):
             expires: Seconds from NOW the link expires
             timestamp: Epoch timestamp. If present, "expires" will not be used.
         '''
-        filename = urllib.quote_plus(filename)
+        filename = quote_plus(filename)
         filename = filename.replace('%2F', '/')
         path = '/%s/%s' % (bucket, filename)
 

          
@@ 75,8 82,8 @@ class SecureS3(object):
         }
 
         return '%s://s3.amazonaws.com/%s/%s?%s' % (
-                        scheme, bucket, filename, urllib.urlencode(params))
-    
+                        scheme, bucket, filename, urlencode(params))
+
     def get_easy_auth_link(self, url, expires=600):
         ''' url should be the full URL to the secure file hosted on S3.
             examples:

          
M webutils/djtools/decorators.py +7 -2
@@ 1,4 1,9 @@ 
 import urlparse
+
+try:
+    from urlparse import urljoin
+except ImportError:
+    from urllib.parse import urljoin
 from django.conf import settings
 from django.http import HttpResponseRedirect
 

          
@@ 7,11 12,11 @@ def ssl_required(view_func):
     def _checkssl(request, *args, **kwargs):
         if not settings.DEBUG and not request.is_secure():
             if 'HTTP_X_FORWARDED_PROTO' not in request.META:
-                # This checks for X_FORWARDED_PROTO header. Usually 
+                # This checks for X_FORWARDED_PROTO header. Usually
                 # passed when SSL is being proxied upstream.
                 # This should avoid a redirect loop.
                 if hasattr(settings, 'SSL_DOMAIN'):
-                    url_str = urlparse.urljoin(
+                    url_str = urljoin(
                         settings.SSL_DOMAIN,
                         request.get_full_path()
                     )

          
M webutils/djtools/templatetags/djtools_image_tags.py +1 -2
@@ 1,5 1,4 @@ 
 import os
-import urlparse
 from django import template
 from django.core.files.storage import get_storage_class
 from django.core.files.base import ContentFile

          
@@ 29,7 28,7 @@ def thumbnail(file, size='104x104', forc
 
     image = Image.open(file)
     if image.size[0] < x and image.size[1] < y:
-        # New size is bigger than original's size! Don't 
+        # New size is bigger than original's size! Don't
         # create new image.
         miniature_url = file.url
     else:

          
M webutils/restlib/deprecated.py +6 -3
@@ 15,7 15,10 @@ 
 import urllib
 import urllib2
 import hashlib
-import urlparse
+try:
+    from urlparse import urlparse
+except ImportError:
+    from urllib.parse import urlparse
 from webutils.helpers import encode_dict
 
 

          
@@ 27,7 30,7 @@ class APIError(Exception):
 
 class APIClient(object):
     def __init__(self, url, key, debug=False):
-        url_scheme = urlparse.urlparse(url)
+        url_scheme = urlparse(url)
         self.url = url
         self.key = key
         self.scheme = url_scheme.scheme

          
@@ 71,4 74,4 @@ class APIClient(object):
             }
             return False, hash
 
-        return True, data
  No newline at end of file
+        return True, data

          
M webutils/restlib/helpers.py +7 -4
@@ 1,13 1,16 @@ 
-import urlparse
+try:
+    from urlparse import urlparse
+except ImportError:
+    from urllib.parse import urlparse
 from restlib import BaseClient
 from socket import _GLOBAL_DEFAULT_TIMEOUT as default_timeout
 
 
 def url_to_base_client(url, filters=[], timeout=default_timeout, debug=False):
-    ''' Takes a full URL and returns a dict with appropriate 
+    ''' Takes a full URL and returns a dict with appropriate
         data, including a "ready" BaseClient instance.
     '''
-    url_scheme = urlparse.urlparse(url)
+    url_scheme = urlparse(url)
     host = url_scheme.netloc
     port = url_scheme.port
     is_secure = (url_scheme.scheme == 'https')

          
@@ 25,4 28,4 @@ def url_to_base_client(url, filters=[], 
         if field == 'path' and res[field] == '':
             res[field] = '/'
     bc.url_data = res
-    return bc
  No newline at end of file
+    return bc