diff --git a/.gitignore b/.gitignore index 1ec4988..d0582bc 100644 --- a/.gitignore +++ b/.gitignore @@ -11,8 +11,8 @@ media/* build/* _working/* static/CACHE/* -dss/localsettings.py.dev -dss/localsettings.py.docker +dss/devsettings.py +>>>>>>> release/2.13.01 dss/celery_settings.py dss.conf dss/debugsettings.py @@ -33,3 +33,4 @@ dsskeys reload reset __krud/ +celerybeat-schedule diff --git a/dss/__init__.py b/dss/__init__.py index 9f69479..7dec5b1 100755 --- a/dss/__init__.py +++ b/dss/__init__.py @@ -1,2 +1,3 @@ +from __future__ import absolute_import -from .celeryconf import app as celery_app +from .celery import app as celery_app diff --git a/dss/celery.py b/dss/celery.py new file mode 100644 index 0000000..cc2aa5c --- /dev/null +++ b/dss/celery.py @@ -0,0 +1,24 @@ +from __future__ import absolute_import + +import os + +from celery import Celery + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dss.settings') + +from django.conf import settings + +app = Celery('dss') + +# Using a string here means the worker will not have to +# pickle the object when using Windows. +app.config_from_object('django.conf:settings') +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) + +""" +@app.on_after_configure.connect +def setup_periodic_tasks(sender, **kwargs): + # Calls test('hello') every 10 seconds. + sender.add_periodic_task(10.0, tasks.play_pending_audio.s('hello'), name='add every 10') +""" diff --git a/dss/celerysettings.py b/dss/celerysettings.py new file mode 100644 index 0000000..44d1c83 --- /dev/null +++ b/dss/celerysettings.py @@ -0,0 +1,10 @@ +from datetime import timedelta + +CELERYBEAT_SCHEDULE = { + 'add-every-30-seconds': { + 'task': 'spa.tasks.play_pending_audio', + 'schedule': timedelta(seconds=10) + }, +} + +CELERY_TIMEZONE = 'UTC' \ No newline at end of file diff --git a/dss/localsettings.py b/dss/localsettings.py index 9c82de2..16909b8 100755 --- a/dss/localsettings.py +++ b/dss/localsettings.py @@ -1,4 +1,5 @@ import os +from dss import devsettings DEBUG = True DSS_TEMP_PATH = os.environ.get('DSS_TEMP_PATH', '/tmp/') @@ -13,17 +14,18 @@ DATABASE_USER = os.environ.get('DATABASE_USER', 'deepsouthsounds') DATABASE_HOST = os.environ.get('DATABASE_HOST', 'localhost') STATIC_URL = '/assets/' -MEDIA_ROOT = os.environ.get('MEDIA_ROOT', '/files/media') -STATIC_ROOT = os.environ.get('STATIC_ROOT', '/files/static') -CACHE_ROOT = os.environ.get('CACHE_ROOT', '/files/cache') -MEDIA_URL = os.environ.get('MEDIA_URL', 'http://deepsouthsounds.com/media/') # '{0}media/'.format(CDN_URL) +MEDIA_ROOT = os.environ.get('MEDIA_ROOT', '/mnt/dev/deepsouthsounds.com/media') +STATIC_ROOT = os.environ.get('STATIC_ROOT', '/home/fergalm/Dropbox/development/deepsouthsounds.com/cache/static') +CACHE_ROOT = os.environ.get('CACHE_ROOT', '/mnt/dev/deepsouthsounds.com/cache') + +MEDIA_URL = os.environ.get('MEDIA_URL', 'http://localhost/DSSMedia/') # '{0}media/'.format(CDN_URL) REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') BROKER_URL = os.environ.get('BROKER_URL', 'amqp://guest:guest@localhost:5672//') CELERY_ACCEPT_CONTENT = ['pickle', 'msgpack', 'json'] -SECRET_KEY = os.environ.get('SECRET_KEY', 'AAA') +SECRET_KEY = os.environ.get('SECRET_KEY', devsettings.SECRET_KEY) LIVE_ENABLED = os.environ.get('LIVE_ENABLED', False) ICE_HOST = os.environ.get('ICE_HOST', 'localhost') @@ -35,20 +37,20 @@ RADIO_PORT = os.environ.get('RADIO_PORT', 8888) MANDRILL_API_KEY = os.environ.get('MANDRILL_API_KEY', '') -SOCIAL_AUTH_FACEBOOK_KEY = os.environ.get('SOCIAL_AUTH_FACEBOOK_KEY', '') -SOCIAL_AUTH_FACEBOOK_SECRET = os.environ.get('SOCIAL_AUTH_FACEBOOK_SECRET', '') +SOCIAL_AUTH_FACEBOOK_KEY = os.environ.get('SOCIAL_AUTH_FACEBOOK_KEY', devsettings.SOCIAL_AUTH_FACEBOOK_KEY) +SOCIAL_AUTH_FACEBOOK_SECRET = os.environ.get('SOCIAL_AUTH_FACEBOOK_SECRET', devsettings.SOCIAL_AUTH_FACEBOOK_SECRET) -SOCIAL_AUTH_TWITTER_KEY = os.environ.get('SOCIAL_AUTH_TWITTER_KEY', '') -SOCIAL_AUTH_TWITTER_SECRET = os.environ.get('SOCIAL_AUTH_TWITTER_SECRET', '') +SOCIAL_AUTH_TWITTER_KEY = os.environ.get('SOCIAL_AUTH_TWITTER_KEY', devsettings.SOCIAL_AUTH_TWITTER_KEY) +SOCIAL_AUTH_TWITTER_SECRET = os.environ.get('SOCIAL_AUTH_TWITTER_SECRET', devsettings.SOCIAL_AUTH_TWITTER_SECRET) -SOCIAL_AUTH_GOOGLE_OAUTH_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_KEY', '') -SOCIAL_AUTH_GOOGLE_OAUTH_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_SECRET', '') +SOCIAL_AUTH_GOOGLE_OAUTH_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_KEY', devsettings.SOCIAL_AUTH_GOOGLE_OAUTH_KEY) +SOCIAL_AUTH_GOOGLE_OAUTH_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_OAUTH_SECRET', devsettings.SOCIAL_AUTH_GOOGLE_OAUTH_SECRET) -SOCIAL_AUTH_GOOGLE_PLUS_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_KEY', '') -SOCIAL_AUTH_GOOGLE_PLUS_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_SECRET', '') +SOCIAL_AUTH_GOOGLE_PLUS_KEY = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_KEY', devsettings.SOCIAL_AUTH_GOOGLE_PLUS_KEY) +SOCIAL_AUTH_GOOGLE_PLUS_SECRET = os.environ.get('SOCIAL_AUTH_GOOGLE_PLUS_SECRET', devsettings.SOCIAL_AUTH_GOOGLE_PLUS_SECRET) -DSS_DB_BACKUP_KEY = os.environ.get('DSS_DB_BACKUP_KEY', '') -DSS_DB_BACKUP_SECRET = os.environ.get('DSS_DB_BACKUP_SECRET', '') -DSS_DB_BACKUP_TOKEN = os.environ.get('DSS_DB_BACKUP_TOKEN', '') +DSS_DB_BACKUP_KEY = os.environ.get('DSS_DB_BACKUP_KEY', devsettings.DSS_DB_BACKUP_KEY) +DSS_DB_BACKUP_SECRET = os.environ.get('DSS_DB_BACKUP_SECRET', devsettings.DSS_DB_BACKUP_SECRET) +DSS_DB_BACKUP_TOKEN = os.environ.get('DSS_DB_BACKUP_TOKEN', devsettings.DSS_DB_BACKUP_TOKEN) -AZURE_ACCOUNT_KEY = os.environ.get('AZURE_ACCOUNT_KEY', '') +AZURE_ACCOUNT_KEY = os.environ.get('AZURE_ACCOUNT_KEY', devsettings.AZURE_ACCOUNT_KEY) diff --git a/dss/settings.py b/dss/settings.py index 099c0b5..539cfb8 100755 --- a/dss/settings.py +++ b/dss/settings.py @@ -13,10 +13,12 @@ from dss.storagesettings import * from dss.paymentsettings import * from dss.logsettings import * from dss.psa import * +from dss.celerysettings import * DEVELOPMENT = DEBUG TEMPLATE_DEBUG = DEBUG +VERSION = '2.13.01' ADMINS = ( ('Fergal Moran', 'fergal.moran@gmail.com'), diff --git a/requirements.txt b/requirements.txt index ce09af2..2d69c2d 100755 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ django-sendfile Werkzeug psycopg2 gunicorn -dropbox==2.2.0 +dropbox django-dirtyfields django-storages django-user-sessions diff --git a/spa/management/commands/backup.py b/spa/management/commands/backup.py index b07c28a..1fd999b 100644 --- a/spa/management/commands/backup.py +++ b/spa/management/commands/backup.py @@ -1,17 +1,93 @@ -from gzip import GzipFile -import subprocess -from django.core.management.base import LabelCommand, CommandError -from subprocess import Popen, PIPE, STDOUT -import pexpect -from dss import settings +import os import tarfile +import time + import dropbox -import os, time +import pexpect +from django.core.management.base import LabelCommand +from dropbox.client import ChunkedUploader +from dropbox.rest import ErrorResponse + +from dss import settings + +""" Monkey patch dropbox upload chunked """ + + +def __upload_chunked(self, chunk_size = 4 * 1024 * 1024): + """Uploads data from this ChunkedUploader's file_obj in chunks, until + an error occurs. Throws an exception when an error occurs, and can + be called again to resume the upload. + + Parameters + chunk_size + The number of bytes to put in each chunk. (Default 4 MB.) + """ + + while self.offset < self.target_length: + next_chunk_size = min(chunk_size, self.target_length - self.offset) + if self.last_block == None: + self.last_block = self.file_obj.read(next_chunk_size) + + try: + (self.offset, self.upload_id) = self.client.upload_chunk( + self.last_block, next_chunk_size, self.offset, self.upload_id) + self.last_block = None + except ErrorResponse as e: + # Handle the case where the server tells us our offset is wrong. + must_reraise = True + if e.status == 400: + reply = e.body + if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset: + self.last_block = None + self.offset = reply['offset'] + must_reraise = False + if must_reraise: + raise + +ChunkedUploader.upload_chunked = __upload_chunked + +from dropbox.client import ChunkedUploader + +""" Monkey patch dropbox upload chunked """ + + +def __upload_chunked(self, chunk_size = 4 * 1024 * 1024): + """Uploads data from this ChunkedUploader's file_obj in chunks, until + an error occurs. Throws an exception when an error occurs, and can + be called again to resume the upload. + + Parameters + chunk_size + The number of bytes to put in each chunk. (Default 4 MB.) + """ + + while self.offset < self.target_length: + next_chunk_size = min(chunk_size, self.target_length - self.offset) + if self.last_block == None: + self.last_block = self.file_obj.read(next_chunk_size) + + try: + (self.offset, self.upload_id) = self.client.upload_chunk( + self.last_block, next_chunk_size, self.offset, self.upload_id) + self.last_block = None + except ErrorResponse as e: + # Handle the case where the server tells us our offset is wrong. + must_reraise = True + if e.status == 400: + reply = e.body + if "offset" in reply and reply['offset'] != 0 and reply['offset'] > self.offset: + self.last_block = None + self.offset = reply['offset'] + must_reraise = False + if must_reraise: + raise + +ChunkedUploader.upload_chunked = __upload_chunked def _backup_database(): print("Creating database backup") - file_name = "{0}.sql".format(time.strftime("%Y%m%d-%H%M%S")) + file_name = "{}.sql".format(time.strftime("%Y%m%d-%H%M%S")) backup_file = os.path.join(settings.DSS_TEMP_PATH, file_name) print('Backing up {} database to {}'.format(settings.DATABASE_NAME, file_name)) @@ -25,13 +101,16 @@ def _backup_database(): child.sendline(settings.DATABASE_PASSWORD) child.expect(pexpect.EOF, timeout=120) - _create_backup_bundle("{0}.tar.gz".format(file_name), 'database', backup_file) + zip_name = "{0}.tar.gz".format(file_name) + archive = _create_backup_bundle(zip_name, backup_file) + _upload_to_dropbox('database', archive, zip_name) def _backup_settings(): print("Creating settings backup") - file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'settings', settings.PROJECT_ROOT) + zip_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) + tar_file = _create_backup_bundle(zip_name, settings.PROJECT_ROOT) + _upload_to_dropbox('settings', tar_file, "{}.tar.gz".format(zip_name)) def _progress_filter(tarinfo): @@ -39,14 +118,13 @@ def _progress_filter(tarinfo): return tarinfo -def _create_backup_bundle(remote_file, type, location): +def _create_backup_bundle(remote_file, location): backup_file = "{0}/{1}".format(settings.DSS_TEMP_PATH, remote_file) tar = tarfile.open(backup_file, "w:gz") tar.add(location) tar.close() - - _upload_to_dropbox(type, backup_file, remote_file) + return backup_file def _upload_to_dropbox(type, backup_file, remote_file): @@ -57,7 +135,6 @@ def _upload_to_dropbox(type, backup_file, remote_file): response = client.put_file("{0}/{1}".format(type, remote_file), f, overwrite=True) os.remove(backup_file) - print(response) except Exception as ex: print(ex) @@ -66,7 +143,20 @@ def _upload_to_dropbox(type, backup_file, remote_file): def _backup_media(): print("Creating media backup") file_name = "{0}.tar.gz".format(time.strftime("%Y%m%d-%H%M%S")) - _create_backup_bundle(file_name, 'media', settings.MEDIA_ROOT) + archive = _create_backup_bundle(file_name, settings.MEDIA_ROOT) + + size = os.path.getsize(archive) + upload_file = open(archive, 'rb') + + client = dropbox.client.DropboxClient(settings.DSS_DB_BACKUP_TOKEN) + uploader = client.get_chunked_uploader(upload_file, size) + while uploader.offset < size: + try: + upload = uploader.upload_chunked() + except Exception as e: + print("Error uploading: {0}".format(e)) + + uploader.finish('/media/{}'.format(file_name)) class Command(LabelCommand): diff --git a/spa/tasks.py b/spa/tasks.py index 969e0f1..5a7f739 100755 --- a/spa/tasks.py +++ b/spa/tasks.py @@ -1,9 +1,12 @@ from celery.task import task import os import logging +import json +import requests from core.realtime import activity from core.utils import cdn +from spa.models import Mix from spa.signals import waveform_generated_signal try: @@ -60,7 +63,15 @@ def update_geo_info_task(ip_address, profile_id): @task def notify_subscriber(session_id, uid): if session_id is not None: - activity.post_activity( - channel='user:process', - message={'type': 'waveform', 'target': uid}, - session=session_id) + activity.post_activity('user:process', session_id, {'type': 'waveform', 'target': uid}) + + +@task +def play_pending_audio(): + m = Mix.objects.order_by('?').first() + print("Playing: {}".format(m.title)) + + data = {'audio_file': m.get_stream_url()} + headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} + r = requests.post('http://localhost:8888/a/play', data=json.dumps(data), headers=headers) + print(r.text)