Python 3 upgrade finished

This commit is contained in:
Fergal Moran
2015-08-20 11:18:53 +01:00
parent 139e23d4c0
commit dfa82d4e76
132 changed files with 6399 additions and 6575 deletions

View File

@@ -11,11 +11,11 @@ from dss.storagesettings import AZURE_ACCOUNT_NAME, AZURE_ACCOUNT_KEY, AZURE_CON
def upload_file_to_azure(in_file, file_name, container_name=settings.AZURE_CONTAINER):
if os.path.isfile(in_file):
print "Uploading file for: %s" % in_file
print("Uploading file for: %s" % in_file)
with open(in_file, 'rb') as iterator:
return upload_stream_to_azure(iterator, file_name, container_name=container_name)
else:
print "infile not found"
print("infile not found")
return None
@@ -42,20 +42,20 @@ def set_azure_details(blob_name, download_name, container_name=AZURE_CONTAINER):
x_ms_blob_content_type='application/octet-stream',
x_ms_blob_content_disposition='attachment;filename="{0}"'.format(download_name)
)
print "Processed: %s" % download_name
print("Processed: %s" % download_name)
else:
print "No blob found for: %s" % download_name
print("No blob found for: %s" % download_name)
except WindowsAzureMissingResourceError:
print "No blob found for: %s" % download_name
except Exception, ex:
print "Error processing blob %s: %s" % (download_name, ex.message)
print("No blob found for: %s" % download_name)
except Exception as ex:
print("Error processing blob %s: %s" % (download_name, ex))
def file_exists(url):
import httplib
from urlparse import urlparse
import http.client
from urllib.parse import urlparse
p = urlparse(url)
c = httplib.HTTPConnection(p.netloc)
c = http.client.HTTPConnection(p.netloc)
c.request("HEAD", p.path)
r = c.getresponse()

View File

@@ -1,28 +1,28 @@
from HTMLParser import HTMLParser
class HTMLStripper(HTMLParser):
"""
Class that cleans HTML, removing all tags and HTML entities.
"""
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip(self, d):
self.reset()
self.fed = []
self.feed(d)
return self.get_data().strip()
def strip_tags(html):
s = HTMLStripper()
s.feed(html)
return s.get_data()
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
"""
Class that cleans HTML, removing all tags and HTML entities.
"""
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip(self, d):
self.reset()
self.fed = []
self.feed(d)
return self.get_data().strip()
def strip_tags(html):
s = HTMLStripper()
s.feed(html)
return s.get_data()

View File

@@ -5,17 +5,17 @@
import socket
import json
import urllib2
import urllib.request, urllib.error, urllib.parse
import getopt
import sys
def usage ():
print """usage:
print("""usage:
-h host to get metadata from
-m mount to get metadata from
[-p port to get metadata from (default 8000)]
[-u url to post metadata to as json]"""
[-u url to post metadata to as json]""")
try:
optlist, cmdline = getopt.getopt(sys.argv[1:],'h:p:m:u:')
@@ -60,7 +60,7 @@ def get_data(host, port, mount):
data = s.recv(1024).decode('utf-8', 'ignore').encode('utf-8')
s.close()
pdata = dict([d.split(':',1) for d in data.split('\r\n') if d.count("icy")])
if pdata.has_key("icy-br"):
if "icy-br" in pdata:
return json.dumps(pdata)
@@ -69,12 +69,12 @@ def get_data(host, port, mount):
jdata = get_data(host, port, mount)
#skip empty crap
if jdata:
print jdata
print(jdata)
try:
# this post is optional
req = urllib2.Request(posturl, data=jdata, headers={'Content-Type': 'application/json',
req = urllib.request.Request(posturl, data=jdata, headers={'Content-Type': 'application/json',
'Referer': 'http://%s' % (host)})
r = urllib2.urlopen(req)
r = urllib.request.urlopen(req)
except NameError:
pass

View File

@@ -1,44 +1,44 @@
import logging
import urllib2
from bs4 import BeautifulSoup
def _parseItem(soup, param):
try:
match = soup.find(text=param)
if match is not None:
return match.findNext('td').contents[0]
except Exception, ex:
logging.getLogger('core').exception("Error parsing ice stream details: " + ex.message)
return ""
def get_server_details(server, port, mount):
server = "http://%s:%s/status.xsl?mount=/%s" % (server, port, mount)
print "Getting info for %s" % server
try:
response = urllib2.urlopen(server)
html = response.read()
if html:
soup = BeautifulSoup(html)
info = {
'stream_title': _parseItem(soup, "Stream Title:"),
'stream_description': _parseItem(soup, "Stream Description:"),
'content_type': _parseItem(soup, "Content Type:"),
'mount_started': _parseItem(soup, "Mount started:"),
'quality': _parseItem(soup, "Quality:"),
'current_listeners': _parseItem(soup, "Current Listeners:"),
'peak_listeners': _parseItem(soup, "Peak Listeners:"),
'stream_genre': _parseItem(soup, "Stream Genre:"),
'current_song': _parseItem(soup, "Current Song:")
}
return info
else:
print "Invalid content found"
return None
except urllib2.URLError:
return "Unknown stream %s" % server
def get_now_playing(server, port, mount):
return get_server_details(server, port, mount)
import logging
import urllib.request, urllib.error, urllib.parse
from bs4 import BeautifulSoup
def _parseItem(soup, param):
try:
match = soup.find(text=param)
if match is not None:
return match.findNext('td').contents[0]
except Exception as ex:
logging.getLogger('core').exception("Error parsing ice stream details: " + ex)
return ""
def get_server_details(server, port, mount):
server = "http://%s:%s/status.xsl?mount=/%s" % (server, port, mount)
print("Getting info for %s" % server)
try:
response = urllib.request.urlopen(server)
html = response.read()
if html:
soup = BeautifulSoup(html)
info = {
'stream_title': _parseItem(soup, "Stream Title:"),
'stream_description': _parseItem(soup, "Stream Description:"),
'content_type': _parseItem(soup, "Content Type:"),
'mount_started': _parseItem(soup, "Mount started:"),
'quality': _parseItem(soup, "Quality:"),
'current_listeners': _parseItem(soup, "Current Listeners:"),
'peak_listeners': _parseItem(soup, "Peak Listeners:"),
'stream_genre': _parseItem(soup, "Stream Genre:"),
'current_song': _parseItem(soup, "Current Song:")
}
return info
else:
print("Invalid content found")
return None
except urllib.error.URLError:
return "Unknown stream %s" % server
def get_now_playing(server, port, mount):
return get_server_details(server, port, mount)

View File

@@ -1,4 +1,4 @@
import urlparse
import urllib.parse
import re
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
@@ -8,19 +8,19 @@ __author__ = 'fergalm'
def url_path_join(*parts):
"""Join and normalize url path parts with a slash."""
schemes, netlocs, paths, queries, fragments = zip(*(urlparse.urlsplit(part) for part in parts))
schemes, netlocs, paths, queries, fragments = list(zip(*(urllib.parse.urlsplit(part) for part in parts)))
# Use the first value for everything but path. Join the path on '/'
scheme = next((x for x in schemes if x), '')
netloc = next((x for x in netlocs if x), '')
path = '/'.join(x.strip('/') for x in paths if x)
query = next((x for x in queries if x), '')
fragment = next((x for x in fragments if x), '')
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
return urllib.parse.urlunsplit((scheme, netloc, path, query, fragment))
def urlclean(url):
# remove double slashes
ret = urlparse.urljoin(url, urlparse.urlparse(url).path.replace('//', '/'))
ret = urllib.parse.urljoin(url, urllib.parse.urlparse(url).path.replace('//', '/'))
return ret
@@ -94,7 +94,7 @@ def _slug_strip(value, separator='-'):
def is_absolute(url):
return bool(urlparse.urlparse(url).scheme)
return bool(urllib.parse.urlparse(url).scheme)
def wrap_full(url):

View File

@@ -1,34 +1,34 @@
import subprocess
import traceback
import uuid
import os
from dss import settings
def generate_waveform(input_file, output_file):
try:
print "Starting decode : %s\n\tIn: %s\n\tOut: %s" % \
(settings.DSS_LAME_PATH, input_file, output_file)
convert_command = "%s %s -c 1 -t wav - | %s -w 1170 -h 140 -o %s /dev/stdin" % \
(settings.DSS_LAME_PATH, input_file, settings.DSS_WAVE_PATH, output_file)
print "Convert command: %s" % convert_command
result = os.system(convert_command)
print result
if os.path.exists(output_file):
#crop the image as it looks nice with zoom
from PIL import Image
import glob
im = Image.open(output_file)
w, h = im.size
im.crop((0, 0, w, h / 2)).save(output_file)
return output_file
else:
print "Unable to find working file, did LAME succeed?"
return ""
except Exception, ex:
print "Error generating waveform %s" % (ex)
import subprocess
import traceback
import uuid
import os
from dss import settings
def generate_waveform(input_file, output_file):
try:
print("Starting decode : %s\n\tIn: %s\n\tOut: %s" % \
(settings.DSS_LAME_PATH, input_file, output_file))
convert_command = "%s %s -c 1 -t wav - | %s -w 1170 -h 140 -o %s /dev/stdin" % \
(settings.DSS_LAME_PATH, input_file, settings.DSS_WAVE_PATH, output_file)
print("Convert command: %s" % convert_command)
result = os.system(convert_command)
print(result)
if os.path.exists(output_file):
#crop the image as it looks nice with zoom
from PIL import Image
import glob
im = Image.open(output_file)
w, h = im.size
im.crop((0, 0, w, h / 2)).save(output_file)
return output_file
else:
print("Unable to find working file, did LAME succeed?")
return ""
except Exception as ex:
print("Error generating waveform %s" % (ex))