diff --git a/configs/actions/acquire_iq_700MHz_P-SafetyNB_DL.yml b/configs/actions/acquire_iq_700MHz_P-SafetyNB_DL.yml index b019065a..83eebeb6 100644 --- a/configs/actions/acquire_iq_700MHz_P-SafetyNB_DL.yml +++ b/configs/actions/acquire_iq_700MHz_P-SafetyNB_DL.yml @@ -7,4 +7,4 @@ stepped_frequency_time_domain_iq: sample_rates: - 15.36e6 durations_ms: - - 1000 # 10000 \ No newline at end of file + - 10000 \ No newline at end of file diff --git a/configs/actions/acquire_iq_700MHz_P-SafetyNB_UL.yml b/configs/actions/acquire_iq_700MHz_P-SafetyNB_UL.yml index 8130bacc..8f08d87f 100644 --- a/configs/actions/acquire_iq_700MHz_P-SafetyNB_UL.yml +++ b/configs/actions/acquire_iq_700MHz_P-SafetyNB_UL.yml @@ -7,4 +7,4 @@ stepped_frequency_time_domain_iq: sample_rates: - 15.36e6 durations_ms: - - 1000 # 10000 \ No newline at end of file + - 10000 \ No newline at end of file diff --git a/configs/actions/survey_700MHz_band_iq.yml b/configs/actions/survey_700MHz_band_iq.yml index 1e7905c5..cbef5e9b 100644 --- a/configs/actions/survey_700MHz_band_iq.yml +++ b/configs/actions/survey_700MHz_band_iq.yml @@ -40,7 +40,7 @@ stepped_frequency_time_domain_iq: - 1000 - 1000 - 1000 - - 1000 # 10000 + - 10000 - 1000 - 1000 - - 1000 # 10000 + - 10000 diff --git a/docker-compose.yml b/docker-compose.yml index bd721eb2..e2b1adad 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -42,6 +42,7 @@ services: - MOCK_RADIO - MOCK_RADIO_RANDOM - CALLBACK_SSL_VERIFICATION + - MAX_TASK_RESULTS expose: - '8000' volumes: diff --git a/src/actions/acquire_single_freq_fft.py b/src/actions/acquire_single_freq_fft.py index 2fb83d63..16b9e92e 100644 --- a/src/actions/acquire_single_freq_fft.py +++ b/src/actions/acquire_single_freq_fft.py @@ -90,6 +90,8 @@ from .base import Action +from django.core.files.base import ContentFile + logger = logging.getLogger(__name__) GLOBAL_INFO = { @@ -301,9 +303,17 @@ def archive(self, task_result, m4s_data, sigmf_md): logger.debug("Storing acquisition in database") - Acquisition( - task_result=task_result, metadata=sigmf_md._metadata, data=m4s_data - ).save() + name = ( + task_result.schedule_entry.name + + "_" + + str(task_result.task_id) + + ".sigmf-data" + ) + + acquisition = Acquisition(task_result=task_result, metadata=sigmf_md._metadata) + acquisition.data.save(name, ContentFile(m4s_data)) + acquisition.save() + logger.debug("Saved new file at {}".format(acquisition.data.path)) @property def description(self): diff --git a/src/actions/acquire_stepped_freq_tdomain_iq.py b/src/actions/acquire_stepped_freq_tdomain_iq.py index 0bd02add..5f3a9536 100644 --- a/src/actions/acquire_stepped_freq_tdomain_iq.py +++ b/src/actions/acquire_stepped_freq_tdomain_iq.py @@ -55,6 +55,8 @@ from .base import Action +from django.core.files.base import ContentFile + logger = logging.getLogger(__name__) GLOBAL_INFO = { @@ -178,17 +180,28 @@ def configure_sdr(self, fc, gain, sample_rate, duration_ms): def set_sdr_sample_rate(self, sample_rate): self.sdr.radio.sample_rate = sample_rate - def archive(self, task_result, recording_id, m4s_data, sigmf_md): + def archive(self, task_result, recording_id, acq_data, sigmf_md): from tasks.models import Acquisition logger.debug("Storing acquisition in database") - Acquisition( + name = ( + task_result.schedule_entry.name + + "_" + + str(task_result.task_id) + + "_" + + str(recording_id) + + ".sigmf-data" + ) + + acquisition = Acquisition( task_result=task_result, recording_id=recording_id, metadata=sigmf_md._metadata, - data=m4s_data, - ).save() + ) + acquisition.data.save(name, ContentFile(acq_data)) + acquisition.save() + logger.debug("Saved new file at {}".format(acquisition.data.path)) @property def description(self): diff --git a/src/actions/tests/test_acquire_single_freq_fft.py b/src/actions/tests/test_acquire_single_freq_fft.py index 06dbbe09..4bbd9f7d 100644 --- a/src/actions/tests/test_acquire_single_freq_fft.py +++ b/src/actions/tests/test_acquire_single_freq_fft.py @@ -1,4 +1,5 @@ import json +import os from os import path from django.conf import settings @@ -18,7 +19,16 @@ def test_detector(user_client, test_scheduler): entry_name = simulate_acquisitions(user_client) tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) - acquistion = Acquisition.objects.get(task_result=tr) - assert sigmf_validate(acquistion.metadata) + acquisition = Acquisition.objects.get(task_result=tr) + assert sigmf_validate(acquisition.metadata) # FIXME: update schema so that this passes # schema_validate(sigmf_metadata, schema) + + +def test_data_file_created(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client) + tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) + acquisition = Acquisition.objects.get(task_result=tr) + assert acquisition.data + assert path.exists(acquisition.data.path) + os.remove(acquisition.data.path) diff --git a/src/actions/tests/test_stepped_freq_tdomain_iq.py b/src/actions/tests/test_stepped_freq_tdomain_iq.py new file mode 100644 index 00000000..58515ad7 --- /dev/null +++ b/src/actions/tests/test_stepped_freq_tdomain_iq.py @@ -0,0 +1,36 @@ +import json +import os +from os import path + +from django.conf import settings +from sigmf.validate import validate as sigmf_validate + +from tasks.models import Acquisition, TaskResult +from tasks.tests.utils import simulate_multirec_acquisition + +SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas") +SCHEMA_FNAME = "scos_transfer_spec_schema.json" +SCHEMA_PATH = path.join(SCHEMA_DIR, SCHEMA_FNAME) + +with open(SCHEMA_PATH, "r") as f: + schema = json.load(f) + + +def test_metadata(user_client, test_scheduler): + entry_name = simulate_multirec_acquisition(user_client) + tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) + acquisitions = Acquisition.objects.filter(task_result=tr) + for acquisition in acquisitions: + assert sigmf_validate(acquisition.metadata) + # FIXME: update schema so that this passes + # schema_validate(sigmf_metadata, schema) + + +def test_data_file_created(user_client, test_scheduler): + entry_name = simulate_multirec_acquisition(user_client) + tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1) + acquisitions = Acquisition.objects.filter(task_result=tr) + for acquisition in acquisitions: + assert acquisition.data + assert path.exists(acquisition.data.path) + os.remove(acquisition.data.path) diff --git a/src/environment.yml b/src/environment.yml new file mode 100644 index 00000000..71174189 --- /dev/null +++ b/src/environment.yml @@ -0,0 +1,44 @@ +name: scos-sensor +channels: + - conda-forge +dependencies: + - python=3.6 + - black=18.9b0 + - Django=2.2.3 + - flake8=3.7.7 + - Pygments=2.4.2 + - Markdown=3.1.1 + - django-extensions=2.1.7 + - django-filter=2.1.0 + - djangorestframework=3.9.4 + - docker-compose=1.24.0 + - gunicorn=19.9.0 + - isort=4.3.20 + - jedi=0.13.3 + - jsonfield=2.0.2 + - jsonschema=3.0.1 + - mkdocs=1.0.4 + - numpy=1.16.4 + - pytest-cov=2.7.1 + - pytest-django=3.5.1 + - pytest-flake8=1.0.4 + - raven=6.10.0 + - requests-futures=0.9.9 + - requests-mock=1.6.0 + - ruamel.yaml=0.15.96 + - six=1.12.0 + - pip + - pip: + - coreapi==2.3.3 + - django-debug-toolbar==1.11 + - drf-yasg==1.15.0 + - -e git+https://github.com/NTIA/SigMF.git@multi-recording-archive#egg=SigMF + - environs==4.2.0 + - flake8-bugbear==19.3.0 + - pre-commit==1.16.1 + - psycopg2-binary==2.8.2 + - seed-isort-config==1.9.1 + - tox==3.12.1 + + + diff --git a/src/requirements.txt b/src/requirements.txt index a8d5b90c..17371b96 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -1,4 +1,4 @@ -Django==2.2.2 +Django==2.2.3 Pygments==2.4.2 Markdown==3.1.1 -e git+https://github.com/NTIA/SigMF.git@multi-recording-archive#egg=SigMF diff --git a/src/schedule/migrations/0002_auto_20190702_2113.py b/src/schedule/migrations/0002_auto_20190702_2113.py new file mode 100644 index 00000000..b2cbc729 --- /dev/null +++ b/src/schedule/migrations/0002_auto_20190702_2113.py @@ -0,0 +1,106 @@ +# Generated by Django 2.2.2 on 2019-07-02 21:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("schedule", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="scheduleentry", + name="action", + field=models.CharField( + choices=[ + ( + "acquire_iq_700MHz_ATT_DL", + "acquire_iq_700MHz_ATT_DL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_ATT_UL", + "acquire_iq_700MHz_ATT_UL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_FirstNet_DL", + "acquire_iq_700MHz_FirstNet_DL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_FirstNet_UL", + "acquire_iq_700MHz_FirstNet_UL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_P-SafetyNB_DL", + "acquire_iq_700MHz_P-SafetyNB_DL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_P-SafetyNB_UL", + "acquire_iq_700MHz_P-SafetyNB_UL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_T-Mobile_DL", + "acquire_iq_700MHz_T-Mobile_DL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_T-Mobile_UL", + "acquire_iq_700MHz_T-Mobile_UL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_Verizon_DL", + "acquire_iq_700MHz_Verizon_DL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_iq_700MHz_Verizon_UL", + "acquire_iq_700MHz_Verizon_UL - Capture time-domain IQ samples at 1 frequencies between", + ), + ( + "acquire_m4s_700MHz_ATT_DL", + "acquire_m4s_700MHz_ATT_DL - Apply m4s detector over 300 1024-pt FFTs at 739.00 MHz.", + ), + ( + "acquire_m4s_700MHz_ATT_UL", + "acquire_m4s_700MHz_ATT_UL - Apply m4s detector over 300 1024-pt FFTs at 709.00 MHz.", + ), + ( + "acquire_m4s_700MHz_FirstNet_DL", + "acquire_m4s_700MHz_FirstNet_DL - Apply m4s detector over 300 1024-pt FFTs at 763.00 MHz.", + ), + ( + "acquire_m4s_700MHz_FirstNet_UL", + "acquire_m4s_700MHz_FirstNet_UL - Apply m4s detector over 300 1024-pt FFTs at 793.00 MHz.", + ), + ( + "acquire_m4s_700MHz_P-SafetyNB_DL", + "acquire_m4s_700MHz_P-SafetyNB_DL - Apply m4s detector over 300 512-pt FFTs at 772.00 MHz.", + ), + ( + "acquire_m4s_700MHz_P-SafetyNB_UL", + "acquire_m4s_700MHz_P-SafetyNB_UL - Apply m4s detector over 300 512-pt FFTs at 802.00 MHz.", + ), + ( + "acquire_m4s_700MHz_T-Mobile_DL", + "acquire_m4s_700MHz_T-Mobile_DL - Apply m4s detector over 300 512-pt FFTs at 731.50 MHz.", + ), + ( + "acquire_m4s_700MHz_T-Mobile_UL", + "acquire_m4s_700MHz_T-Mobile_UL - Apply m4s detector over 300 512-pt FFTs at 700.50 MHz.", + ), + ( + "acquire_m4s_700MHz_Verizon_DL", + "acquire_m4s_700MHz_Verizon_DL - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.", + ), + ( + "acquire_m4s_700MHz_Verizon_UL", + "acquire_m4s_700MHz_Verizon_UL - Apply m4s detector over 300 1024-pt FFTs at 782.00 MHz.", + ), + ("logger", 'logger - Log the message "running test {name}/{tid}".'), + ( + "survey_700MHz_band_iq", + "survey_700MHz_band_iq - Capture time-domain IQ samples at 10 frequencies between", + ), + ], + help_text="[Required] The name of the action to be scheduled", + max_length=50, + ), + ) + ] diff --git a/src/sensor/settings.py b/src/sensor/settings.py index 661d5be7..8c947c11 100644 --- a/src/sensor/settings.py +++ b/src/sensor/settings.py @@ -59,6 +59,7 @@ CALIBRATION_FILE = path.join(CONFIG_DIR, "calibration.json") SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, "sensor_definition.json") ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, "actions") +MEDIA_ROOT = path.join(REPO_ROOT, "files") # Cleanup any existing healtcheck files try: @@ -271,7 +272,7 @@ DATABASES["default"]["HOST"] = "localhost" # Ensure only the last MAX_TASK_RESULTS results are kept per schedule entry -MAX_TASK_RESULTS = 100 +MAX_TASK_RESULTS = env.int("MAX_TASK_RESULTS", default=100000) # Display at most MAX_TASK_QUEUE upcoming tasks in /tasks/upcoming MAX_TASK_QUEUE = 50 diff --git a/src/tasks/migrations/0002_auto_20190702_2113.py b/src/tasks/migrations/0002_auto_20190702_2113.py new file mode 100644 index 00000000..a2f27824 --- /dev/null +++ b/src/tasks/migrations/0002_auto_20190702_2113.py @@ -0,0 +1,16 @@ +# Generated by Django 2.2.2 on 2019-07-02 21:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [("tasks", "0001_initial")] + + operations = [ + migrations.AlterField( + model_name="acquisition", + name="data", + field=models.FileField(null=True, upload_to="blob/%Y/%m/%d/%H/%M/%S"), + ) + ] diff --git a/src/tasks/models/acquisition.py b/src/tasks/models/acquisition.py index be34dbcf..4b37c2d7 100644 --- a/src/tasks/models/acquisition.py +++ b/src/tasks/models/acquisition.py @@ -3,6 +3,8 @@ from .task_result import TaskResult +from django.db.models.signals import pre_delete + class Acquisition(models.Model): """The data and metadata associated with a task. @@ -26,7 +28,7 @@ class Acquisition(models.Model): default=0, help_text="The id of the recording relative to the task" ) metadata = JSONField(help_text="The sigmf meta data for the acquisition") - data = models.BinaryField(help_text="", null=True) + data = models.FileField(upload_to="blob/%Y/%m/%d/%H/%M/%S", null=True) class Meta: db_table = "acquisitions" @@ -39,3 +41,11 @@ def __str__(self): self.task_result.task_id, self.recording_id, ) + + +def clean_up_data(sender, **kwargs): + acq = kwargs["instance"] + acq.data.delete(save=False) + + +pre_delete.connect(clean_up_data, sender=Acquisition) diff --git a/src/tasks/tests/test_detail_view.py b/src/tasks/tests/test_detail_view.py index e9dbf0cc..e82cba14 100644 --- a/src/tasks/tests/test_detail_view.py +++ b/src/tasks/tests/test_detail_view.py @@ -8,6 +8,10 @@ update_result_detail, ) +from tasks.models import Acquisition, TaskResult + +import os + def test_user_can_create_nonprivate_acquisition(user_client, test_scheduler): entry_name = simulate_acquisitions(user_client) @@ -225,3 +229,21 @@ def test_admin_cant_modify_own_results(admin_client, test_scheduler): response = update_result_detail(admin_client, entry_name, 1, new_result_detail) validate_response(response, status.HTTP_405_METHOD_NOT_ALLOWED) + + +def test_deleted_result_deletes_data_file(user_client, test_scheduler): + """A user should be able to delete results they own.""" + entry_name = simulate_acquisitions(user_client) + # schedule_entry = ScheduleEntry.objects.get(name=entry_name) + task_result = TaskResult.objects.get(schedule_entry__name=entry_name) + acquisition = Acquisition.objects.get(task_result__id=task_result.id) + data_file = acquisition.data.path + assert os.path.exists(data_file) + result_url = reverse_result_detail(entry_name, 1) + + first_response = user_client.delete(result_url, **HTTPS_KWARG) + second_response = user_client.delete(result_url, **HTTPS_KWARG) + + validate_response(first_response, status.HTTP_204_NO_CONTENT) + validate_response(second_response, status.HTTP_404_NOT_FOUND) + assert not os.path.exists(data_file) diff --git a/src/tasks/tests/test_list_view.py b/src/tasks/tests/test_list_view.py index 483b8787..e76f1729 100644 --- a/src/tasks/tests/test_list_view.py +++ b/src/tasks/tests/test_list_view.py @@ -10,6 +10,10 @@ simulate_acquisitions, ) +from tasks.models import Acquisition, TaskResult + +import os + def test_non_existent_entry(user_client): with pytest.raises(AssertionError): @@ -56,6 +60,19 @@ def test_delete_list(user_client): # If result does exist, expect 204 entry_name = create_task_results(1, user_client) + + url = reverse_result_list(entry_name) + response = user_client.delete(url, **HTTPS_KWARG) + validate_response(response, status.HTTP_204_NO_CONTENT) + + +def test_delete_list_data_files_deleted(user_client, test_scheduler): + entry_name = simulate_acquisitions(user_client) + task_result = TaskResult.objects.get(schedule_entry__name=entry_name) + acquisition = Acquisition.objects.get(task_result__id=task_result.id) + data_file = acquisition.data.path + assert os.path.exists(data_file) url = reverse_result_list(entry_name) response = user_client.delete(url, **HTTPS_KWARG) validate_response(response, status.HTTP_204_NO_CONTENT) + assert not os.path.exists(data_file) diff --git a/src/tasks/views.py b/src/tasks/views.py index 3693f9ac..171b41ff 100644 --- a/src/tasks/views.py +++ b/src/tasks/views.py @@ -225,7 +225,7 @@ def build_sigmf_archive(fileobj, schedule_entry_name, acquisitions): for acq in acquisitions: with tempfile.NamedTemporaryFile() as tmpdata: - tmpdata.write(acq.data) + tmpdata.write(acq.data.read()) tmpdata.seek(0) # move fd ptr to start of data for reading name = schedule_entry_name + "_" + str(acq.task_result.task_id) if multirecording: