From 261e5472021813c4ae6a184dbc0bc6b0baf9d8a3 Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 20 Sep 2015 17:25:00 +0100 Subject: [PATCH 1/8] Initial 10 second schedule --- dss/celeryconf.py | 10 ++++++++-- spa/tasks.py | 10 ++++++++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/dss/celeryconf.py b/dss/celeryconf.py index 8499c10..746d14e 100644 --- a/dss/celeryconf.py +++ b/dss/celeryconf.py @@ -1,9 +1,9 @@ - - import os import logging from celery import Celery +from celery.schedules import crontab +from spa import tasks logger = logging.getLogger('dss') @@ -17,3 +17,9 @@ app = Celery('dss') # pickle the object when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) + + +@app.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + # Calls test('hello') every 10 seconds. + sender.add_periodic_task(10.0, tasks.play_pending_audio.s('hello'), name='add every 10') diff --git a/spa/tasks.py b/spa/tasks.py index f23ef2d..db53096 100755 --- a/spa/tasks.py +++ b/spa/tasks.py @@ -1,9 +1,11 @@ from celery.task import task import os import logging +import requests from core.realtime import activity from core.utils import cdn +from spa.models import Mix from spa.signals import waveform_generated_signal try: @@ -61,3 +63,11 @@ def update_geo_info_task(ip_address, profile_id): def notify_subscriber(session_id, uid): if session_id is not None: activity.post_activity('user:process', session_id, {'type': 'waveform', 'target': uid}) + + +@task +def play_pending_audio(): + m = Mix.objects.order_by('uid').first() + print("Playing: {}".format(m.title)) + r = requests.post('http://localhost:8888/a/play', data={'audio_file:': m.get_stream_url()}) + print(r.text) From d44671139154f5869612e9c6abee560e41f754ef Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 20 Sep 2015 19:30:16 +0100 Subject: [PATCH 2/8] Added celerybeat --- celerybeat-schedule | Bin 0 -> 12747 bytes dss/__init__.py | 3 +- dss/{celeryconf.py => celery.py} | 11 +++-- dss/celerysettings.py | 10 +++++ dss/settings.py | 1 + spa/management/commands/backup.py | 71 ++++++++++++++++++++++++++---- spa/tasks.py | 8 +++- 7 files changed, 86 insertions(+), 18 deletions(-) create mode 100644 celerybeat-schedule rename dss/{celeryconf.py => celery.py} (85%) create mode 100644 dss/celerysettings.py diff --git a/celerybeat-schedule b/celerybeat-schedule new file mode 100644 index 0000000000000000000000000000000000000000..de739906d8e9c377aa89744f1655620aa00a4b45 GIT binary patch literal 12747 zcmeI2&uSA<6vl6we`acG+r(OJtm3Ark|CiAE`_3-po_7^SvlOyT+GnvBq#SqOeK;{ zseJ_>KwOG1px{abU%-X<0`3Jt&&|x(fNrW-sGJO$?`G!a%$Z-#`M3l5@a$fR%`!HA zqi8L}`dsS`y<_ZKB(x_2M1Tko0U|&IhyW2F0z`la5CI}U1dbzti*eGV5eVWy95ex{ z1F8e61L_0v0rCOz0rCOz0rCOz0rG+W%m=2Tu4ctpSv?wDn~C&&BRTQ+n!ym?@%6h% zbUdO$9EihYe^5OfO+9>k_j$!}9*Qfm|4rj%_1~h1|IzR#&crz)Kpcp}B!OtWbpO?I zU%5_Y!k=k9dBBPMGh#s;{)_|P|56g6>fMU_E01BGq-gzh;TwMYY3NG3KeC zv-LCfVdMPk`T95ag&d`az)ZC>O5177a&u!HKWe`fA#Vl38>OF`dU@QFekXJsoR-6B zYi7f2T*sM{QL1(sPjSzy37kKut<`H%xSi0GkaArS;4!l$xH9g#ZQ=C;aT8M*7%)|B zr|b%TRd%_lxZILvH{b)OD?-l?A2_`4`5j2R9#=y7+rs!cfXup)|5PZ?Y#5h!jzp~;#?lkI_F!Sw!}JveDWadikYLpY^i)`B^E z0W(bQg1cLooZ|+9hy5a<};gbp|lHS3r;uLulcyWPN;axfQ71!&Bp3* tW~^6Xajci&Y#TQP=PX!SZDSf0TX&{-)b)Ep!ui{8UPuMY7F_6?KL8SmXfgl* literal 0 HcmV?d00001 diff --git a/dss/__init__.py b/dss/__init__.py index 9f69479..7dec5b1 100755 --- a/dss/__init__.py +++ b/dss/__init__.py @@ -1,2 +1,3 @@ +from __future__ import absolute_import -from .celeryconf import app as celery_app +from .celery import app as celery_app diff --git a/dss/celeryconf.py b/dss/celery.py similarity index 85% rename from dss/celeryconf.py rename to dss/celery.py index 746d14e..cc2aa5c 100644 --- a/dss/celeryconf.py +++ b/dss/celery.py @@ -1,16 +1,14 @@ +from __future__ import absolute_import + import os -import logging from celery import Celery -from celery.schedules import crontab -from spa import tasks - -logger = logging.getLogger('dss') # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dss.settings') from django.conf import settings + app = Celery('dss') # Using a string here means the worker will not have to @@ -18,8 +16,9 @@ app = Celery('dss') app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) - +""" @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): # Calls test('hello') every 10 seconds. sender.add_periodic_task(10.0, tasks.play_pending_audio.s('hello'), name='add every 10') +""" diff --git a/dss/celerysettings.py b/dss/celerysettings.py new file mode 100644 index 0000000..44d1c83 --- /dev/null +++ b/dss/celerysettings.py @@ -0,0 +1,10 @@ +from datetime import timedelta + +CELERYBEAT_SCHEDULE = { + 'add-every-30-seconds': { + 'task': 'spa.tasks.play_pending_audio', + 'schedule': timedelta(seconds=10) + }, +} + +CELERY_TIMEZONE = 'UTC' \ No newline at end of file diff --git a/dss/settings.py b/dss/settings.py index fba4276..5455376 100755 --- a/dss/settings.py +++ b/dss/settings.py @@ -12,6 +12,7 @@ from dss.storagesettings import * from dss.paymentsettings import * from dss.logsettings import * from dss.psa import * +from dss.celerysettings import * DEVELOPMENT = DEBUG diff --git a/spa/management/commands/backup.py b/spa/management/commands/backup.py index 07af8b5..7567349 100644 --- a/spa/management/commands/backup.py +++ b/spa/management/commands/backup.py @@ -2,16 +2,55 @@ from gzip import GzipFile import subprocess from django.core.management.base import LabelCommand, CommandError from subprocess import Popen, PIPE, STDOUT +from dropbox.rest import ErrorResponse import pexpect from dss import settings import tarfile import dropbox import os, time +from dropbox.client import ChunkedUploader + +""" Monkey patch dropbox upload chunked """ + + +def __upload_chunked(self, chunk_size = 4 * 1024 * 1024): + """Uploads data from this ChunkedUploader's file_obj in chunks, until + an error occurs. Throws an exception when an error occurs, and can + be called again to resume the upload. + + Parameters + chunk_size + The number of bytes to put in each chunk. (Default 4 MB.) + """ + + while self.offset < self.target_length: + next_chunk_size = min(chunk_size, self.target_length - self.offset) + if self.last_block == None: + self.last_block = self.file_obj.read(next_chunk_size) + + try: + (self.offset, self.upload_id) = self.client.upload_chunk( + self.last_block, next_chunk_size, self.offset, self.upload_id) + self.last_block = None + except ErrorResponse as e: + # Handle the case where the server tells us our offset is wrong. + must_reraise = True + if e.status == 400: + reply = e.body + if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset: + self.last_block = None + self.offset = reply['offset'] + must_reraise = False + if must_reraise: + raise + +ChunkedUploader.upload_chunked = __upload_chunked + def _backup_database(): print("Creating database backup") - file_name = "{0}.sql".format(time.strftime("%Y%m%d-%H%M%S")) + file_name = "{}.sql".format(time.strftime("%Y%m%d-%H%M%S")) backup_file = os.path.join(settings.DSS_TEMP_PATH, file_name) print('Backing up {} database to {}'.format(settings.DATABASE_NAME, file_name)) @@ -25,13 +64,16 @@ def _backup_database(): child.sendline(settings.DATABASE_PASSWORD) child.expect(pexpect.EOF) - _create_backup_bundle("{0}.tar.gz".format(file_name), 'database', backup_file) + zip_name = "{0}.tar.gz".format(file_name) + archive = _create_backup_bundle(zip_name, backup_file) + _upload_to_dropbox('database', archive, zip_name) def _backup_settings(): print("Creating settings backup") - file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'settings', settings.PROJECT_ROOT) + zip_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) + tar_file = _create_backup_bundle(zip_name, settings.PROJECT_ROOT) + _upload_to_dropbox('settings', tar_file, "{}.tar.gz".format(zip_name)) def _progress_filter(tarinfo): @@ -39,14 +81,13 @@ def _progress_filter(tarinfo): return tarinfo -def _create_backup_bundle(remote_file, type, location): +def _create_backup_bundle(remote_file, location): backup_file = "{0}/{1}".format(settings.DSS_TEMP_PATH, remote_file) tar = tarfile.open(backup_file, "w:gz") tar.add(location) tar.close() - - _upload_to_dropbox(type, backup_file, remote_file) + return backup_file def _upload_to_dropbox(type, backup_file, remote_file): @@ -57,7 +98,6 @@ def _upload_to_dropbox(type, backup_file, remote_file): response = client.put_file("{0}/{1}".format(type, remote_file), f, overwrite=True) os.remove(backup_file) - print(response) except Exception as ex: print(ex) @@ -66,7 +106,20 @@ def _upload_to_dropbox(type, backup_file, remote_file): def _backup_media(): print("Creating media backup") file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'media', settings.MEDIA_ROOT) + archive = _create_backup_bundle(file_name, settings.MEDIA_ROOT) + + size = os.path.getsize(archive) + upload_file = open(archive, 'rb') + + client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) + uploader = client.get_chunked_uploader(upload_file, size) + while uploader.offset < size: + try: + upload = uploader.upload_chunked() + except Exception as e: + print("Error uploading: {0}".format(e)) + + uploader.finish('/media/{}'.format(file_name)) class Command(LabelCommand): diff --git a/spa/tasks.py b/spa/tasks.py index db53096..5a7f739 100755 --- a/spa/tasks.py +++ b/spa/tasks.py @@ -1,6 +1,7 @@ from celery.task import task import os import logging +import json import requests from core.realtime import activity @@ -67,7 +68,10 @@ def notify_subscriber(session_id, uid): @task def play_pending_audio(): - m = Mix.objects.order_by('uid').first() + m = Mix.objects.order_by('?').first() print("Playing: {}".format(m.title)) - r = requests.post('http://localhost:8888/a/play', data={'audio_file:': m.get_stream_url()}) + + data = {'audio_file': m.get_stream_url()} + headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} + r = requests.post('http://localhost:8888/a/play', data=json.dumps(data), headers=headers) print(r.text) From 8d9c2623c82ba3616174a1872487c79b8df62492 Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Fri, 25 Sep 2015 19:01:26 +0100 Subject: [PATCH 3/8] Added celery schedules --- celerybeat-schedule | Bin 12747 -> 12747 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/celerybeat-schedule b/celerybeat-schedule index de739906d8e9c377aa89744f1655620aa00a4b45..cff0e3dd84c975aa41fe474aff70fddea5d04dc0 100644 GIT binary patch delta 240 zcmX?|d^&kUh{0q-L+Q!628v7>jFUGSh=?*fbFts&6cJn+a delta 280 zcmX?|d^&kUh{0qTL&?ec28xrd4VCJdlT#8)QcE&(Q@P-bLKbH(_WPV7f^6)pN3Ni5z~lZ1R7vaC=RAg3MIg_R-t4D5SA251r Date: Fri, 25 Sep 2015 19:10:40 +0100 Subject: [PATCH 4/8] Some celery stuff --- .gitignore | 1 + celerybeat-schedule | Bin 12747 -> 0 bytes dss/celeryconf.py | 19 +++++++++++++++++++ 3 files changed, 20 insertions(+) delete mode 100644 celerybeat-schedule create mode 100644 dss/celeryconf.py diff --git a/.gitignore b/.gitignore index b68b21f..6eabc6c 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,4 @@ dsskeys reload reset __krud/ +celerybeat-schedule diff --git a/celerybeat-schedule b/celerybeat-schedule deleted file mode 100644 index cff0e3dd84c975aa41fe474aff70fddea5d04dc0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12747 zcmeI2L2DC17=|ZJZJJF}YZGI&v3fIAvLsYNJPAcFK@V%iJ-JMF2RC%xHu|sbpRiL55NcD1MmU(0DJ&G@Spj>RJ^KLK2cVS8<*x|eP17U{JBo65Z~43SCMc% zrUDM&Fu6WZ4~J6^pWb~r=Qt0bClCQQ&_#{J|NVV*=m+4wD4p_VV4=D?RBt zno$eJW65zGhSf_t*bY$HkRZTWBG)qe$UqL z*awaCx5w*e-V)I;Js@Vvtzp_ut1NGCtgBP)w<6@tKzPIS6H{-GyHTqhI*uAPqlT@S zb+dk1jhu{9u}iAU6@lpX%_es@Md&$hAb8m8P|6*52ri9Vt}nb^Ag-w-`UXu^{FI+^ zJuZc8-517QgVO6p=5w}KO3mi%r!>7yM=Y9YuyL>`i4z%aw;f8}G$>QCRrRtO&5raU zWk-6R3}1aE6Mvv|!OOyQr>n&#MYG?|yZyR*i() z4!wvLeU<8>T~_^3hntc|n~~WGc;D%$$+p6K4)1xbHWmE6#IMXf|47AcI&M*^@%ZP& u$5Sxq#6AE`!4rYx1}!yM!)UM_I%(0V-J3hKY*BgDSNWfov|`bjp7{eh+GueA diff --git a/dss/celeryconf.py b/dss/celeryconf.py new file mode 100644 index 0000000..8499c10 --- /dev/null +++ b/dss/celeryconf.py @@ -0,0 +1,19 @@ + + +import os +import logging + +from celery import Celery + +logger = logging.getLogger('dss') + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dss.settings') + +from django.conf import settings +app = Celery('dss') + +# Using a string here means the worker will not have to +# pickle the object when using Windows. +app.config_from_object('django.conf:settings') +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) From 2b6ba4c11317fc2390f549ef0a5727ef9fabaf3d Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 8 Nov 2015 19:59:39 +0000 Subject: [PATCH 5/8] Settings --- .gitignore | 3 +-- spa/management/commands/backup.py | 3 +++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index b68b21f..4cfa1d2 100644 --- a/.gitignore +++ b/.gitignore @@ -11,8 +11,7 @@ media/* build/* _working/* static/CACHE/* -dss/localsettings.py -dss/storagesettings.py +dss/devsettings.py dss/celery_settings.py dss.conf dss/debugsettings.py diff --git a/spa/management/commands/backup.py b/spa/management/commands/backup.py index b07c28a..03e8b2b 100644 --- a/spa/management/commands/backup.py +++ b/spa/management/commands/backup.py @@ -3,6 +3,8 @@ import subprocess from django.core.management.base import LabelCommand, CommandError from subprocess import Popen, PIPE, STDOUT import pexpect +from dropbox.client import ChunkedUploader + from dss import settings import tarfile import dropbox @@ -54,6 +56,7 @@ def _upload_to_dropbox(type, backup_file, remote_file): try: with open(backup_file, "rb") as f: client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) + uploader = ChunkedUploader(client=client, file_obj=backup_file) response = client.put_file("{0}/{1}".format(type, remote_file), f, overwrite=True) os.remove(backup_file) From 3981ce288cf51460d97a5400d494f094a159e6aa Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 8 Nov 2015 20:17:59 +0000 Subject: [PATCH 6/8] Altered backup --- spa/management/commands/backup.py | 87 ++++++++++++++++++++++++------- 1 file changed, 68 insertions(+), 19 deletions(-) diff --git a/spa/management/commands/backup.py b/spa/management/commands/backup.py index 03e8b2b..f764b09 100644 --- a/spa/management/commands/backup.py +++ b/spa/management/commands/backup.py @@ -1,19 +1,55 @@ -from gzip import GzipFile -import subprocess -from django.core.management.base import LabelCommand, CommandError -from subprocess import Popen, PIPE, STDOUT +import os +import tarfile +import time + +import dropbox import pexpect -from dropbox.client import ChunkedUploader +from django.core.management.base import LabelCommand +from dropbox.client import ChunkedUploader, DropboxClient +from dropbox.rest import ErrorResponse from dss import settings -import tarfile -import dropbox -import os, time + +""" Monkey patch dropbox upload chunked """ + + +def __upload_chunked(self, chunk_size = 4 * 1024 * 1024): + """Uploads data from this ChunkedUploader's file_obj in chunks, until + an error occurs. Throws an exception when an error occurs, and can + be called again to resume the upload. + + Parameters + chunk_size + The number of bytes to put in each chunk. (Default 4 MB.) + """ + + while self.offset < self.target_length: + next_chunk_size = min(chunk_size, self.target_length - self.offset) + if self.last_block == None: + self.last_block = self.file_obj.read(next_chunk_size) + + try: + (self.offset, self.upload_id) = self.client.upload_chunk( + self.last_block, next_chunk_size, self.offset, self.upload_id) + self.last_block = None + except ErrorResponse as e: + # Handle the case where the server tells us our offset is wrong. + must_reraise = True + if e.status == 400: + reply = e.body + if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset: + self.last_block = None + self.offset = reply['offset'] + must_reraise = False + if must_reraise: + raise + +ChunkedUploader.upload_chunked = __upload_chunked def _backup_database(): print("Creating database backup") - file_name = "{0}.sql".format(time.strftime("%Y%m%d-%H%M%S")) + file_name = "{}.sql".format(time.strftime("%Y%m%d-%H%M%S")) backup_file = os.path.join(settings.DSS_TEMP_PATH, file_name) print('Backing up {} database to {}'.format(settings.DATABASE_NAME, file_name)) @@ -27,13 +63,16 @@ def _backup_database(): child.sendline(settings.DATABASE_PASSWORD) child.expect(pexpect.EOF, timeout=120) - _create_backup_bundle("{0}.tar.gz".format(file_name), 'database', backup_file) + zip_name = "{0}.tar.gz".format(file_name) + archive = _create_backup_bundle(zip_name, backup_file) + _upload_to_dropbox('database', archive, zip_name) def _backup_settings(): print("Creating settings backup") - file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'settings', settings.PROJECT_ROOT) + zip_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) + tar_file = _create_backup_bundle(zip_name, settings.PROJECT_ROOT) + _upload_to_dropbox('settings', tar_file, "{}.tar.gz".format(zip_name)) def _progress_filter(tarinfo): @@ -41,26 +80,23 @@ def _progress_filter(tarinfo): return tarinfo -def _create_backup_bundle(remote_file, type, location): +def _create_backup_bundle(remote_file, location): backup_file = "{0}/{1}".format(settings.DSS_TEMP_PATH, remote_file) tar = tarfile.open(backup_file, "w:gz") tar.add(location) tar.close() - - _upload_to_dropbox(type, backup_file, remote_file) + return backup_file def _upload_to_dropbox(type, backup_file, remote_file): print("Uploading {0} to dropbox".format(backup_file)) try: with open(backup_file, "rb") as f: - client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) - uploader = ChunkedUploader(client=client, file_obj=backup_file) + client = dropbox.Dropbox(settings.DSS_DB_BACKUP_TOKEN) response = client.put_file("{0}/{1}".format(type, remote_file), f, overwrite=True) os.remove(backup_file) - print(response) except Exception as ex: print(ex) @@ -69,7 +105,20 @@ def _upload_to_dropbox(type, backup_file, remote_file): def _backup_media(): print("Creating media backup") file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'media', settings.MEDIA_ROOT) + archive = _create_backup_bundle(file_name, settings.MEDIA_ROOT) + + size = os.path.getsize(archive) + upload_file = open(archive, 'rb') + + client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) + uploader = client.get_chunked_uploader(upload_file, size) + while uploader.offset < size: + try: + upload = uploader.upload_chunked() + except Exception as e: + print("Error uploading: {0}".format(e)) + + uploader.finish('/media/{}'.format(file_name)) class Command(LabelCommand): From 73b028e7f0201af5329443966b94241a6a75959d Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 8 Nov 2015 20:22:26 +0000 Subject: [PATCH 7/8] Fixed chunked backup --- dss/localsettings.py | 55 +++++++++++++++++++++++++++++++ dss/storagesettings.py | 6 ++++ requirements.txt | 2 +- spa/management/commands/backup.py | 4 +-- 4 files changed, 64 insertions(+), 3 deletions(-) create mode 100644 dss/localsettings.py create mode 100644 dss/storagesettings.py diff --git a/dss/localsettings.py b/dss/localsettings.py new file mode 100644 index 0000000..4191807 --- /dev/null +++ b/dss/localsettings.py @@ -0,0 +1,55 @@ +import os +from dss import devsettings + +DEBUG = True +DSS_TEMP_PATH = os.environ.get('DSS_TEMP_PATH', '/tmp/') +DSS_LAME_PATH = os.environ.get('DSS_LAME_PATH', '/usr/bin/sox') +DSS_WAVE_PATH = os.environ.get('DSS_WAVE_PATH', + '/home/fergalm/Dropbox/development/deepsouthsounds.com/dss.lib/wav2png/bin/Linux/wav2png') +GEOIP_PATH = os.environ.get('GEOIP_PATH', '/home/fergalm/Dropbox/Private/deepsouthsounds.com/working/geolite') + +DATABASE_PASSWORD = os.environ.get('DATABASE_PASSWORD', 'deepsouthsounds') +DATABASE_NAME = os.environ.get('DATABASE_NAME', 'deepsouthsounds') +DATABASE_USER = os.environ.get('DATABASE_USER', 'deepsouthsounds') +DATABASE_HOST = os.environ.get('DATABASE_HOST', 'localhost') + +STATIC_URL = '/assets/' +MEDIA_ROOT = os.environ.get('MEDIA_ROOT', '/mnt/dev/deepsouthsounds.com/media') +STATIC_ROOT = os.environ.get('STATIC_ROOT', '/home/fergalm/Dropbox/development/deepsouthsounds.com/cache/static') +CACHE_ROOT = os.environ.get('CACHE_ROOT', '/mnt/dev/deepsouthsounds.com/cache') + +MEDIA_URL = os.environ.get('MEDIA_URL', 'http://localhost/DSSMedia/') # '{0}media/'.format(CDN_URL) + +REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') +BROKER_URL = os.environ.get('BROKER_URL', 'amqp://guest:guest@localhost:5672//') +CELERY_ACCEPT_CONTENT = ['pickle', 'msgpack', 'json'] + +SECRET_KEY = os.environ.get('SECRET_KEY', devsettings.SECRET_KEY) +LIVE_ENABLED = os.environ.get('LIVE_ENABLED', False) + +ICE_HOST = os.environ.get('ICE_HOST', 'localhost') +ICE_MOUNT = os.environ.get('ICE_MOUNT =', 'dss') +ICE_PORT = os.environ.get('ICE_PORT', 8000) + +RADIO_HOST = os.environ.get('RADIO_HOST', 'localhost') +RADIO_PORT = os.environ.get('RADIO_PORT', 8888) + +MANDRILL_API_KEY = os.environ.get('MANDRILL_API_KEY', '') + +SOCIAL_AUTH_FACEBOOK_KEY = os.environ.get('SOCIAL_AUTH_FACEBOOK_KEY', devsettings.SOCIAL_AUTH_FACEBOOK_KEY) +SOCIAL_AUTH_FACEBOOK_SECRET = os.environ.get('SOCIAL_AUTH_FACEBOOK_SECRET', devsettings.SOCIAL_AUTH_FACEBOOK_SECRET) + +SOCIAL_AUTH_TWITTER_KEY = os.environ.get('SOCIAL_AUTH_TWITTER_KEY', devsettings.SOCIAL_AUTH_TWITTER_KEY) +SOCIAL_AUTH_TWITTER_SECRET = os.environ.get('SOCIAL_AUTH_TWITTER_SECRET', devsettings.SOCIAL_AUTH_TWITTER_SECRET) + +SOCIAL_AUTH_GOOGLE_OAUTH_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_KEY', devsettings.SOCIAL_AUTH_GOOGLE_OAUTH_KEY) +SOCIAL_AUTH_GOOGLE_OAUTH_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_SECRET', devsettings.SOCIAL_AUTH_GOOGLE_OAUTH_SECRET) + +SOCIAL_AUTH_GOOGLE_PLUS_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_KEY', devsettings.SOCIAL_AUTH_GOOGLE_PLUS_KEY) +SOCIAL_AUTH_GOOGLE_PLUS_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_SECRET', devsettings.SOCIAL_AUTH_GOOGLE_PLUS_SECRET) + +DSS_DB_BACKUP_KEY = os.environ.get('DSS_DB_BACKUP_KEY', devsettings.DSS_DB_BACKUP_KEY) +DSS_DB_BACKUP_SECRET = os.environ.get('DSS_DB_BACKUP_SECRET', devsettings.DSS_DB_BACKUP_SECRET) +DSS_DB_BACKUP_TOKEN = os.environ.get('DSS_DB_BACKUP_TOKEN', devsettings.DSS_DB_BACKUP_TOKEN) + +AZURE_ACCOUNT_KEY = os.environ.get('AZURE_ACCOUNT_KEY', devsettings.AZURE_ACCOUNT_KEY) diff --git a/dss/storagesettings.py b/dss/storagesettings.py new file mode 100644 index 0000000..2e30c6b --- /dev/null +++ b/dss/storagesettings.py @@ -0,0 +1,6 @@ +from dss import localsettings + +AZURE_ACCOUNT_NAME = 'dsscdn' +AZURE_CONTAINER = 'media' +AZURE_ACCOUNT_KEY = localsettings.AZURE_ACCOUNT_KEY +AZURE_ITEM_BASE_URL = 'https://dsscdn.blob.core.windows.net/' diff --git a/requirements.txt b/requirements.txt index ce09af2..2d69c2d 100755 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ django-sendfile Werkzeug psycopg2 gunicorn -dropbox==2.2.0 +dropbox django-dirtyfields django-storages django-user-sessions diff --git a/spa/management/commands/backup.py b/spa/management/commands/backup.py index f764b09..3fd4017 100644 --- a/spa/management/commands/backup.py +++ b/spa/management/commands/backup.py @@ -5,7 +5,7 @@ import time import dropbox import pexpect from django.core.management.base import LabelCommand -from dropbox.client import ChunkedUploader, DropboxClient +from dropbox.client import ChunkedUploader from dropbox.rest import ErrorResponse from dss import settings @@ -93,7 +93,7 @@ def _upload_to_dropbox(type, backup_file, remote_file): print("Uploading {0} to dropbox".format(backup_file)) try: with open(backup_file, "rb") as f: - client = dropbox.Dropbox(settings.DSS_DB_BACKUP_TOKEN) + client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) response = client.put_file("{0}/{1}".format(type, remote_file), f, overwrite=True) os.remove(backup_file) From 02861ac79e97c28989889759f9cfd40befbc12d3 Mon Sep 17 00:00:00 2001 From: Fergal Moran Date: Sun, 8 Nov 2015 20:25:41 +0000 Subject: [PATCH 8/8] Version bump --- dss/settings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dss/settings.py b/dss/settings.py index 2de0663..32a8ece 100755 --- a/dss/settings.py +++ b/dss/settings.py @@ -17,6 +17,7 @@ from dss.celerysettings import * DEVELOPMENT = DEBUG TEMPLATE_DEBUG = DEBUG +VERSION = '2.13.01' ADMINS = ( ('Fergal Moran', 'fergal.moran@gmail.com'),