Skip to content

Commit

Permalink
Merge pull request #162 from NTIA/acquisition_data_storage
Browse files Browse the repository at this point in the history
Acquisition data storage
  • Loading branch information
dboulware authored Jul 17, 2019
2 parents def53c6 + 6e31c73 commit 0574cab
Show file tree
Hide file tree
Showing 17 changed files with 303 additions and 17 deletions.
2 changes: 1 addition & 1 deletion configs/actions/acquire_iq_700MHz_P-SafetyNB_DL.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ stepped_frequency_time_domain_iq:
sample_rates:
- 15.36e6
durations_ms:
- 1000 # 10000
- 10000
2 changes: 1 addition & 1 deletion configs/actions/acquire_iq_700MHz_P-SafetyNB_UL.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ stepped_frequency_time_domain_iq:
sample_rates:
- 15.36e6
durations_ms:
- 1000 # 10000
- 10000
4 changes: 2 additions & 2 deletions configs/actions/survey_700MHz_band_iq.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ stepped_frequency_time_domain_iq:
- 1000
- 1000
- 1000
- 1000 # 10000
- 10000
- 1000
- 1000
- 1000 # 10000
- 10000
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ services:
- MOCK_RADIO
- MOCK_RADIO_RANDOM
- CALLBACK_SSL_VERIFICATION
- MAX_TASK_RESULTS
expose:
- '8000'
volumes:
Expand Down
16 changes: 13 additions & 3 deletions src/actions/acquire_single_freq_fft.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@

from .base import Action

from django.core.files.base import ContentFile

logger = logging.getLogger(__name__)

GLOBAL_INFO = {
Expand Down Expand Up @@ -301,9 +303,17 @@ def archive(self, task_result, m4s_data, sigmf_md):

logger.debug("Storing acquisition in database")

Acquisition(
task_result=task_result, metadata=sigmf_md._metadata, data=m4s_data
).save()
name = (
task_result.schedule_entry.name
+ "_"
+ str(task_result.task_id)
+ ".sigmf-data"
)

acquisition = Acquisition(task_result=task_result, metadata=sigmf_md._metadata)
acquisition.data.save(name, ContentFile(m4s_data))
acquisition.save()
logger.debug("Saved new file at {}".format(acquisition.data.path))

@property
def description(self):
Expand Down
21 changes: 17 additions & 4 deletions src/actions/acquire_stepped_freq_tdomain_iq.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@

from .base import Action

from django.core.files.base import ContentFile

logger = logging.getLogger(__name__)

GLOBAL_INFO = {
Expand Down Expand Up @@ -178,17 +180,28 @@ def configure_sdr(self, fc, gain, sample_rate, duration_ms):
def set_sdr_sample_rate(self, sample_rate):
self.sdr.radio.sample_rate = sample_rate

def archive(self, task_result, recording_id, m4s_data, sigmf_md):
def archive(self, task_result, recording_id, acq_data, sigmf_md):
from tasks.models import Acquisition

logger.debug("Storing acquisition in database")

Acquisition(
name = (
task_result.schedule_entry.name
+ "_"
+ str(task_result.task_id)
+ "_"
+ str(recording_id)
+ ".sigmf-data"
)

acquisition = Acquisition(
task_result=task_result,
recording_id=recording_id,
metadata=sigmf_md._metadata,
data=m4s_data,
).save()
)
acquisition.data.save(name, ContentFile(acq_data))
acquisition.save()
logger.debug("Saved new file at {}".format(acquisition.data.path))

@property
def description(self):
Expand Down
14 changes: 12 additions & 2 deletions src/actions/tests/test_acquire_single_freq_fft.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import os
from os import path

from django.conf import settings
Expand All @@ -18,7 +19,16 @@
def test_detector(user_client, test_scheduler):
entry_name = simulate_acquisitions(user_client)
tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1)
acquistion = Acquisition.objects.get(task_result=tr)
assert sigmf_validate(acquistion.metadata)
acquisition = Acquisition.objects.get(task_result=tr)
assert sigmf_validate(acquisition.metadata)
# FIXME: update schema so that this passes
# schema_validate(sigmf_metadata, schema)


def test_data_file_created(user_client, test_scheduler):
entry_name = simulate_acquisitions(user_client)
tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1)
acquisition = Acquisition.objects.get(task_result=tr)
assert acquisition.data
assert path.exists(acquisition.data.path)
os.remove(acquisition.data.path)
36 changes: 36 additions & 0 deletions src/actions/tests/test_stepped_freq_tdomain_iq.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import json
import os
from os import path

from django.conf import settings
from sigmf.validate import validate as sigmf_validate

from tasks.models import Acquisition, TaskResult
from tasks.tests.utils import simulate_multirec_acquisition

SCHEMA_DIR = path.join(settings.REPO_ROOT, "schemas")
SCHEMA_FNAME = "scos_transfer_spec_schema.json"
SCHEMA_PATH = path.join(SCHEMA_DIR, SCHEMA_FNAME)

with open(SCHEMA_PATH, "r") as f:
schema = json.load(f)


def test_metadata(user_client, test_scheduler):
entry_name = simulate_multirec_acquisition(user_client)
tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1)
acquisitions = Acquisition.objects.filter(task_result=tr)
for acquisition in acquisitions:
assert sigmf_validate(acquisition.metadata)
# FIXME: update schema so that this passes
# schema_validate(sigmf_metadata, schema)


def test_data_file_created(user_client, test_scheduler):
entry_name = simulate_multirec_acquisition(user_client)
tr = TaskResult.objects.get(schedule_entry__name=entry_name, task_id=1)
acquisitions = Acquisition.objects.filter(task_result=tr)
for acquisition in acquisitions:
assert acquisition.data
assert path.exists(acquisition.data.path)
os.remove(acquisition.data.path)
44 changes: 44 additions & 0 deletions src/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: scos-sensor
channels:
- conda-forge
dependencies:
- python=3.6
- black=18.9b0
- Django=2.2.3
- flake8=3.7.7
- Pygments=2.4.2
- Markdown=3.1.1
- django-extensions=2.1.7
- django-filter=2.1.0
- djangorestframework=3.9.4
- docker-compose=1.24.0
- gunicorn=19.9.0
- isort=4.3.20
- jedi=0.13.3
- jsonfield=2.0.2
- jsonschema=3.0.1
- mkdocs=1.0.4
- numpy=1.16.4
- pytest-cov=2.7.1
- pytest-django=3.5.1
- pytest-flake8=1.0.4
- raven=6.10.0
- requests-futures=0.9.9
- requests-mock=1.6.0
- ruamel.yaml=0.15.96
- six=1.12.0
- pip
- pip:
- coreapi==2.3.3
- django-debug-toolbar==1.11
- drf-yasg==1.15.0
- -e git+https://github.com/NTIA/SigMF.git@multi-recording-archive#egg=SigMF
- environs==4.2.0
- flake8-bugbear==19.3.0
- pre-commit==1.16.1
- psycopg2-binary==2.8.2
- seed-isort-config==1.9.1
- tox==3.12.1



2 changes: 1 addition & 1 deletion src/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Django==2.2.2
Django==2.2.3
Pygments==2.4.2
Markdown==3.1.1
-e git+https://github.com/NTIA/SigMF.git@multi-recording-archive#egg=SigMF
Expand Down
106 changes: 106 additions & 0 deletions src/schedule/migrations/0002_auto_20190702_2113.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# Generated by Django 2.2.2 on 2019-07-02 21:13

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [("schedule", "0001_initial")]

operations = [
migrations.AlterField(
model_name="scheduleentry",
name="action",
field=models.CharField(
choices=[
(
"acquire_iq_700MHz_ATT_DL",
"acquire_iq_700MHz_ATT_DL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_ATT_UL",
"acquire_iq_700MHz_ATT_UL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_FirstNet_DL",
"acquire_iq_700MHz_FirstNet_DL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_FirstNet_UL",
"acquire_iq_700MHz_FirstNet_UL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_P-SafetyNB_DL",
"acquire_iq_700MHz_P-SafetyNB_DL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_P-SafetyNB_UL",
"acquire_iq_700MHz_P-SafetyNB_UL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_T-Mobile_DL",
"acquire_iq_700MHz_T-Mobile_DL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_T-Mobile_UL",
"acquire_iq_700MHz_T-Mobile_UL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_Verizon_DL",
"acquire_iq_700MHz_Verizon_DL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_iq_700MHz_Verizon_UL",
"acquire_iq_700MHz_Verizon_UL - Capture time-domain IQ samples at 1 frequencies between",
),
(
"acquire_m4s_700MHz_ATT_DL",
"acquire_m4s_700MHz_ATT_DL - Apply m4s detector over 300 1024-pt FFTs at 739.00 MHz.",
),
(
"acquire_m4s_700MHz_ATT_UL",
"acquire_m4s_700MHz_ATT_UL - Apply m4s detector over 300 1024-pt FFTs at 709.00 MHz.",
),
(
"acquire_m4s_700MHz_FirstNet_DL",
"acquire_m4s_700MHz_FirstNet_DL - Apply m4s detector over 300 1024-pt FFTs at 763.00 MHz.",
),
(
"acquire_m4s_700MHz_FirstNet_UL",
"acquire_m4s_700MHz_FirstNet_UL - Apply m4s detector over 300 1024-pt FFTs at 793.00 MHz.",
),
(
"acquire_m4s_700MHz_P-SafetyNB_DL",
"acquire_m4s_700MHz_P-SafetyNB_DL - Apply m4s detector over 300 512-pt FFTs at 772.00 MHz.",
),
(
"acquire_m4s_700MHz_P-SafetyNB_UL",
"acquire_m4s_700MHz_P-SafetyNB_UL - Apply m4s detector over 300 512-pt FFTs at 802.00 MHz.",
),
(
"acquire_m4s_700MHz_T-Mobile_DL",
"acquire_m4s_700MHz_T-Mobile_DL - Apply m4s detector over 300 512-pt FFTs at 731.50 MHz.",
),
(
"acquire_m4s_700MHz_T-Mobile_UL",
"acquire_m4s_700MHz_T-Mobile_UL - Apply m4s detector over 300 512-pt FFTs at 700.50 MHz.",
),
(
"acquire_m4s_700MHz_Verizon_DL",
"acquire_m4s_700MHz_Verizon_DL - Apply m4s detector over 300 1024-pt FFTs at 751.00 MHz.",
),
(
"acquire_m4s_700MHz_Verizon_UL",
"acquire_m4s_700MHz_Verizon_UL - Apply m4s detector over 300 1024-pt FFTs at 782.00 MHz.",
),
("logger", 'logger - Log the message "running test {name}/{tid}".'),
(
"survey_700MHz_band_iq",
"survey_700MHz_band_iq - Capture time-domain IQ samples at 10 frequencies between",
),
],
help_text="[Required] The name of the action to be scheduled",
max_length=50,
),
)
]
3 changes: 2 additions & 1 deletion src/sensor/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
CALIBRATION_FILE = path.join(CONFIG_DIR, "calibration.json")
SENSOR_DEFINITION_FILE = path.join(CONFIG_DIR, "sensor_definition.json")
ACTION_DEFINITIONS_DIR = path.join(CONFIG_DIR, "actions")
MEDIA_ROOT = path.join(REPO_ROOT, "files")

# Cleanup any existing healtcheck files
try:
Expand Down Expand Up @@ -271,7 +272,7 @@
DATABASES["default"]["HOST"] = "localhost"

# Ensure only the last MAX_TASK_RESULTS results are kept per schedule entry
MAX_TASK_RESULTS = 100
MAX_TASK_RESULTS = env.int("MAX_TASK_RESULTS", default=100000)
# Display at most MAX_TASK_QUEUE upcoming tasks in /tasks/upcoming
MAX_TASK_QUEUE = 50

Expand Down
16 changes: 16 additions & 0 deletions src/tasks/migrations/0002_auto_20190702_2113.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# Generated by Django 2.2.2 on 2019-07-02 21:13

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [("tasks", "0001_initial")]

operations = [
migrations.AlterField(
model_name="acquisition",
name="data",
field=models.FileField(null=True, upload_to="blob/%Y/%m/%d/%H/%M/%S"),
)
]
12 changes: 11 additions & 1 deletion src/tasks/models/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

from .task_result import TaskResult

from django.db.models.signals import pre_delete


class Acquisition(models.Model):
"""The data and metadata associated with a task.
Expand All @@ -26,7 +28,7 @@ class Acquisition(models.Model):
default=0, help_text="The id of the recording relative to the task"
)
metadata = JSONField(help_text="The sigmf meta data for the acquisition")
data = models.BinaryField(help_text="", null=True)
data = models.FileField(upload_to="blob/%Y/%m/%d/%H/%M/%S", null=True)

class Meta:
db_table = "acquisitions"
Expand All @@ -39,3 +41,11 @@ def __str__(self):
self.task_result.task_id,
self.recording_id,
)


def clean_up_data(sender, **kwargs):
acq = kwargs["instance"]
acq.data.delete(save=False)


pre_delete.connect(clean_up_data, sender=Acquisition)
Loading

0 comments on commit 0574cab

Please sign in to comment.