From c2824fe10dc644d1bffdc5ce62e5e8ac398a67b7 Mon Sep 17 00:00:00 2001 From: John Madden Date: Wed, 21 Feb 2024 11:40:54 -0800 Subject: [PATCH 01/10] Removed chirpstack-api package --- backend/requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/requirements.txt b/backend/requirements.txt index ffeb9a8a..06ef420f 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,7 +1,6 @@ sqlalchemy alembic psycopg2-binary==2.9.5 -chirpstack-api==3.7.2 protobuf tqdm Flask From b67ef791c3ca95d9e72b86595c706c827d59a62b Mon Sep 17 00:00:00 2001 From: John Madden Date: Wed, 21 Feb 2024 16:11:45 -0800 Subject: [PATCH 02/10] Added resource for TTN webhook --- backend/api/resources/sensor_data_ttn.py | 78 ++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 backend/api/resources/sensor_data_ttn.py diff --git a/backend/api/resources/sensor_data_ttn.py b/backend/api/resources/sensor_data_ttn.py new file mode 100644 index 00000000..c9f71c29 --- /dev/null +++ b/backend/api/resources/sensor_data_ttn.py @@ -0,0 +1,78 @@ +"""Sensor endpoint for request made with The Things Network webhooks + +The api endpoint handles uplink messages from The Things Network (TNN). The data +payload is decoded and inserted into the database. See the following for more +information on TTN integration: +- https://www.thethingsindustries.com/docs/integrations/webhooks/creating-webhooks/. +- https://www.thethingsindustries.com/docs/the-things-stack/concepts/data-formats/. + +TODO: +- Integrate downlinks to device to ack the data as successfully inserted into +the db and data can be cleared from local non-volatile storage. See +https://www.thethingsindustries.com/docs/integrations/webhooks/scheduling-downlinks/ + +Author: John Madden +""" + +from flask import request, jsonify +from flask_restful import Resource +from ..database.schemas.power_data_schema import PowerDataSchema +from ..database.schemas.get_cell_data_schema import TEROSDataSchema +from ..database.schemas.p_input import PInput +from ..database.models.power_data import PowerData +from ..database.models.teros_data import TEROSData + +from datetime import datetime + +from soil_power_sensor_protobuf import decode_measurement + +power_schema = PowerDataSchema() +teros_schema = TEROSDataSchema() + +class Measurement_Upink(Resource): + def post(self): + """Handlings uplink POST request from TTN""" + + # get uplink json + uplink_json = request.json + + # get payload + payload = uplink_json["uplink_message"]["frm_payload"] + + # decode binary protobuf data + meas = decode_measurement(payload) + + # power measurement + if meas["type"] == "power": + power_data = PowerData.add_protobuf_power_data( + meas["loggerId"], + meas["cellId"], + datetime.fromtimestamp(meas["ts"]), + meas["data"]["voltage"], + meas["data"]["current"], + ) + + data_json = power_schema.jsonify(power_data) + + # teros12 measurement + elif meas["type"] == "teros12": + teros_data = TEROSData.add_protobuf_teros_data( + meas["cellId"], + datetime.fromtimestamp(meas["ts"]), + meas["data"]["vwcAdj"], + meas["data"]["vwcRaw"], + meas["data"]["temp"], + meas["data"]["ec"], + None, + ) + + data_json = teros_schema.jsonify(teros_data) + + # raise error if any other data types are not stored + else: + raise NotImplementedError(f"Message type {meas["type"]} not implemented") + + # TODO: Add downlink messages to device + + # return json of measurement + return data_json \ No newline at end of file From 0096060fd2b56879158221314fe31a587f735e81 Mon Sep 17 00:00:00 2001 From: John Madden Date: Wed, 21 Feb 2024 19:40:40 -0800 Subject: [PATCH 03/10] Added direct HTTP uploads --- backend/api/__init__.py | 12 ++-- backend/api/database/models/teros_data.py | 2 +- backend/api/resources/sensor_data.py | 49 --------------- backend/api/resources/sensor_data_ttn.py | 63 +++++--------------- backend/api/resources/sensor_data_wifi.py | 70 ++++++++++++++++++++++ backend/api/resources/util.py | 72 +++++++++++++++++++++++ 6 files changed, 163 insertions(+), 105 deletions(-) delete mode 100644 backend/api/resources/sensor_data.py create mode 100644 backend/api/resources/sensor_data_wifi.py create mode 100644 backend/api/resources/util.py diff --git a/backend/api/__init__.py b/backend/api/__init__.py index 5b4eda0b..890d2472 100644 --- a/backend/api/__init__.py +++ b/backend/api/__init__.py @@ -46,9 +46,8 @@ def create_app(debug: bool = False) -> Flask: from .resources.cell_id import Cell_Id from .resources.power_data import Power_Data from .resources.teros_data import Teros_Data - from .resources.power_data_protobuf import Power_Data_Protobuf - from .resources.teros_data_protobuf import Teros_Data_Protobuf - from .resources.sensor_data import Sensor_Data + from .resources.sensor_data_ttn import Measurement_Upink + from .resources.sensor_data_wifi import Measurement_Direct from .resources.cell import Cell from .resources.session import Session_r from .resources.users_data import User_Data @@ -61,11 +60,8 @@ def create_app(debug: bool = False) -> Flask: api.add_resource(Cell_Id, "/cell/id") api.add_resource(Power_Data, "/power/", "/power/") api.add_resource(Teros_Data, "/teros/", "/teros/") - api.add_resource( - Power_Data_Protobuf, "/power-proto", "/power-proto/" - ) - api.add_resource(Teros_Data_Protobuf, "/teros-proto", "/teros-proto/") - api.add_resource(Sensor_Data, "/sensor", "/sensor/") + api.add_resource(Measurement_Direct, "/sensor/") + api.add_resource(Measurement_Upink, "/ttn/") api.add_resource(Session_r, "/session") api.add_resource(User_Data, "/user") app.register_blueprint(auth, url_prefix="/api") diff --git a/backend/api/database/models/teros_data.py b/backend/api/database/models/teros_data.py index 40b68015..1a987e3c 100644 --- a/backend/api/database/models/teros_data.py +++ b/backend/api/database/models/teros_data.py @@ -47,7 +47,7 @@ def add_teros_data(cell_name, ts, vwc, raw_vwc, temp, ec, water_pot): return teros_data @staticmethod - def add_protobuf_power_data(cell_id, ts, vwc, raw_vwc, temp, ec, water_pot): + def add_protobuf_teros_data(cell_id, ts, vwc, raw_vwc, temp, ec, water_pot): cur_cell = Cell.query.filter_by(id=cell_id).first() if cur_cell is None: return None diff --git a/backend/api/resources/sensor_data.py b/backend/api/resources/sensor_data.py deleted file mode 100644 index 789bf6ea..00000000 --- a/backend/api/resources/sensor_data.py +++ /dev/null @@ -1,49 +0,0 @@ -from flask import request, jsonify, make_response -from flask_restful import Resource -from ..database.schemas.data_schema import DataSchema -from ..database.schemas.get_sensor_data_schema import GetSensorDataSchema -from ..database.models.sensor import Sensor -from soil_power_sensor_protobuf import encode, decode - -from datetime import datetime - -data_schema = DataSchema() -get_sensor_data_schema = GetSensorDataSchema() - - -class Sensor_Data(Resource): - def post(self): - meas_sensor = decode(request.data) - - for measurement, data in meas_sensor["data"].items(): - res = Sensor.add_data( - meas_sensor["cellId"], - meas_sensor["type"], - measurement, - data, - meas_sensor["data_type"], - datetime.fromtimestamp(meas_sensor["ts"]), - ) - if res is None: - encoded_data = encode(success=False) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 500 - return response - encoded_data = encode(success=True) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 201 - return response - - def get(self): - v_args = get_sensor_data_schema.load(request.args) - sensor_obj = Sensor.get_sensor_data_obj( - cell_id=v_args["cellId"], - measurement=v_args["measurement"], - start_time=v_args["startTime"], - end_time=v_args["endTime"], - ) - if sensor_obj is None: - return {"msg": "sensor not found"}, 400 - return jsonify(sensor_obj) diff --git a/backend/api/resources/sensor_data_ttn.py b/backend/api/resources/sensor_data_ttn.py index c9f71c29..da820960 100644 --- a/backend/api/resources/sensor_data_ttn.py +++ b/backend/api/resources/sensor_data_ttn.py @@ -14,64 +14,33 @@ Author: John Madden """ -from flask import request, jsonify +from flask import request from flask_restful import Resource -from ..database.schemas.power_data_schema import PowerDataSchema -from ..database.schemas.get_cell_data_schema import TEROSDataSchema -from ..database.schemas.p_input import PInput -from ..database.models.power_data import PowerData -from ..database.models.teros_data import TEROSData -from datetime import datetime +from .util import process_measurement -from soil_power_sensor_protobuf import decode_measurement - -power_schema = PowerDataSchema() -teros_schema = TEROSDataSchema() class Measurement_Upink(Resource): def post(self): - """Handlings uplink POST request from TTN""" - + """Handlings uplink POST request from TTN + + Raises: + ValueError if the header Content-Type is not application/json + """ + + content_type = request.headers.get("Content-Type") + # get uplink json - uplink_json = request.json + if content_type == "application/json": + uplink_json = request.json + else: + raise ValueError("POST request must be application/json") # get payload payload = uplink_json["uplink_message"]["frm_payload"] - - # decode binary protobuf data - meas = decode_measurement(payload) - - # power measurement - if meas["type"] == "power": - power_data = PowerData.add_protobuf_power_data( - meas["loggerId"], - meas["cellId"], - datetime.fromtimestamp(meas["ts"]), - meas["data"]["voltage"], - meas["data"]["current"], - ) - - data_json = power_schema.jsonify(power_data) - - # teros12 measurement - elif meas["type"] == "teros12": - teros_data = TEROSData.add_protobuf_teros_data( - meas["cellId"], - datetime.fromtimestamp(meas["ts"]), - meas["data"]["vwcAdj"], - meas["data"]["vwcRaw"], - meas["data"]["temp"], - meas["data"]["ec"], - None, - ) - - data_json = teros_schema.jsonify(teros_data) - - # raise error if any other data types are not stored - else: - raise NotImplementedError(f"Message type {meas["type"]} not implemented") + data_json = process_measurement(payload) + # TODO: Add downlink messages to device # return json of measurement diff --git a/backend/api/resources/sensor_data_wifi.py b/backend/api/resources/sensor_data_wifi.py new file mode 100644 index 00000000..89e6022b --- /dev/null +++ b/backend/api/resources/sensor_data_wifi.py @@ -0,0 +1,70 @@ +"""Sensor endpoint for requests directly uploading data + +The measurement data is POSTed to this resource as protobuf encoded binary data +which is decoded into a dictionary containing measured values. The data is +inserted into the appropriate table in the database. A HTTP response is sent +back containing protobuf binary data with the response message indicating status +of data. + +Author: John Madden +""" + +from flask import request, Response +from flask_restful import Resource + +from soil_power_sensor_protobuf import encode_response + +from .util import process_measurement + + +class Measurement_Direct(Resource): + def post(self): + """Handles POST request containing binary data and return response + + The HTTP request is checked for appropriate Content-Type then the + measurement is decoded and inserted into the database. Both a HTTP and + binary response are returned. + + Returns: + Response object with a binary response message indicating a success + or failure of processing with Content-Type of + application/octet-stream. An HTTP status code of 201 indicates a + successful processing and 500 indicates a failure. + + Raises: + ValueError if the header Content-Type is not + application/octet-stream + """ + + content_type = request.headers.get("Content-Type") + + # check for correct content type and get json + if content_type == "application/json": + # get uplink json + data = request.data + else: + raise ValueError("POST request must be application/json") + + # decode and insret into db + data_json = process_measurement(data) + + + # format HTTP response + resp = Response() + + # encode response data + if data_json is not None: + resp.data = encode_response(True) + # created + resp.status_code = 201 + else: + resp.data = encode_response(False) + # internal server error + resp.status_code = 500 + + resp.content_type = "application/octet-stream" + # should be autopopulated with automatically_set_content_length + #resp.content_length = len(resp.data) + + # return json of measurement + return data_json \ No newline at end of file diff --git a/backend/api/resources/util.py b/backend/api/resources/util.py new file mode 100644 index 00000000..422a1246 --- /dev/null +++ b/backend/api/resources/util.py @@ -0,0 +1,72 @@ +"""Utility file for database operations + +author: John Madden +""" + +from datetime import datetime + +from soil_power_sensor_protobuf import decode_measurement + +from ..database.schemas.power_data_schema import PowerDataSchema +from ..database.schemas.get_cell_data_schema import TEROSDataSchema +from ..database.models.power_data import PowerData +from ..database.models.teros_data import TEROSData + + +def process_measurement(data : bytes): + """Process protobuf encoded measurement + + The byte string gets decoded through protobuf and inserted into the associated table. + + Args + data: Encoded measurement message + + Returns: + JSON representation of the object inserted into the database + + Raises: + NotImplementedError when the processing of the message type is not + implemented + """ + + # decode binary protobuf data + meas = decode_measurement(data) + + # stores the json formatted repsonse + data_json = None + + # power measurement + if meas["type"] == "power": + power_data = PowerData.add_protobuf_power_data( + meas["loggerId"], + meas["cellId"], + datetime.fromtimestamp(meas["ts"]), + meas["data"]["voltage"], + meas["data"]["current"], + ) + + if power_data is not None: + power_schema = PowerDataSchema() + data_json = power_schema.jsonify(power_data) + + # teros12 measurement + elif meas["type"] == "teros12": + teros_data = TEROSData.add_protobuf_teros_data( + meas["cellId"], + datetime.fromtimestamp(meas["ts"]), + meas["data"]["vwcAdj"], + meas["data"]["vwcRaw"], + meas["data"]["temp"], + meas["data"]["ec"], + None, + ) + + if teros_data is not None: + teros_schema = TEROSDataSchema() + data_json = teros_schema.jsonify(teros_data) + + # raise error if any other data types are not stored + else: + raise NotImplementedError(f"Message type {meas["type"]} not implemented") + + return data_json \ No newline at end of file From f369e19999361fe50aa2d9bb43c82640aeee98fd Mon Sep 17 00:00:00 2001 From: John Madden Date: Wed, 21 Feb 2024 19:41:38 -0800 Subject: [PATCH 04/10] Removed old protobuf resources --- backend/api/resources/power_data_protobuf.py | 46 ------------------- backend/api/resources/teros_data_protobuf.py | 47 -------------------- 2 files changed, 93 deletions(-) delete mode 100644 backend/api/resources/power_data_protobuf.py delete mode 100644 backend/api/resources/teros_data_protobuf.py diff --git a/backend/api/resources/power_data_protobuf.py b/backend/api/resources/power_data_protobuf.py deleted file mode 100644 index d155c6fe..00000000 --- a/backend/api/resources/power_data_protobuf.py +++ /dev/null @@ -1,46 +0,0 @@ -from flask import request, jsonify, make_response -from flask_restful import Resource -from ..database.schemas.power_data_schema import PowerDataSchema -from ..database.schemas.get_cell_data_schema import GetCellDataSchema -from ..database.schemas.p_input import PInput -from ..database.models.power_data import PowerData -from soil_power_sensor_protobuf import encode, decode - -from datetime import datetime - -power_schema = PowerDataSchema() -get_cell_data = GetCellDataSchema() -p_in = PInput() - - -class Power_Data_Protobuf(Resource): - def post(self): - meas_power = decode(request.data) - - new_pwr_data = PowerData.add_protobuf_power_data( - meas_power["loggerId"], - meas_power["cellId"], - datetime.fromtimestamp(meas_power["ts"]), - meas_power["voltage"], - meas_power["current"], - ) - if new_pwr_data is None: - # missing cell_id or logger_id in table - encoded_data = encode(success=False) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 500 - return response - encoded_data = encode(success=True) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 201 - return response - - def get(self, cell_id=0): - v_args = get_cell_data.load(request.args) - return jsonify( - PowerData.get_power_data_obj( - cell_id, start_time=v_args["startTime"], end_time=v_args["endTime"] - ) - ) diff --git a/backend/api/resources/teros_data_protobuf.py b/backend/api/resources/teros_data_protobuf.py deleted file mode 100644 index 568860fd..00000000 --- a/backend/api/resources/teros_data_protobuf.py +++ /dev/null @@ -1,47 +0,0 @@ -from flask import request, jsonify, make_response -from flask_restful import Resource -from ..database.schemas.teros_data_schema import TEROSDataSchema -from ..database.schemas.get_cell_data_schema import GetCellDataSchema -from ..database.schemas.t_input import TInput -from ..database.models.teros_data import TEROSData -from soil_power_sensor_protobuf import encode, decode - -from datetime import datetime - -teros_schema = TEROSDataSchema() -get_cell_data = GetCellDataSchema() -t_in = TInput() - - -class Teros_Data_Protobuf(Resource): - def post(self): - meas_teros = decode(request.data) - - new_pwr_data = TEROSData.add_protobuf_power_data( - meas_teros["cellId"], - datetime.fromtimestamp(meas_teros["ts"]), - meas_teros["vwcAdj"], - meas_teros["vwcRaw"], - meas_teros["temp"], - meas_teros["ec"], - ) - if new_pwr_data is None: - # missing cell_id or logger_id in table - encoded_data = encode(success=False) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 500 - return response - encoded_data = encode(success=True) - response = make_response(encoded_data) - response.headers["content-type"] = "text/octet-stream" - response.status_code = 201 - return response - - def get(self, cell_id=0): - v_args = get_cell_data.load(request.args) - return jsonify( - TEROSData.get_teros_data_obj( - cell_id, start_time=v_args["startTime"], end_time=v_args["endTime"] - ) - ) From 95bd6614aec1a5c1bd1518057a22eb2ab0eabc77 Mon Sep 17 00:00:00 2001 From: John Madden Date: Thu, 22 Feb 2024 15:29:19 -0800 Subject: [PATCH 05/10] Updated dev endpoint --- backend/entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/entrypoint.sh b/backend/entrypoint.sh index 2b394802..3839faf8 100755 --- a/backend/entrypoint.sh +++ b/backend/entrypoint.sh @@ -4,7 +4,7 @@ while getopts 'dp' FLAG do case "$FLAG" in - d) flask --app backend.wsgi --debug run -p 8000;; + d) flask --app backend.wsgi --debug run -h 0.0.0.0 -p 8000;; p) gunicorn -b :8000 backend.wsgi:handler;; esac done From b47742ad3a8719bc0a117ea0a485cdb747f73e02 Mon Sep 17 00:00:00 2001 From: John Madden Date: Thu, 22 Feb 2024 17:29:45 -0800 Subject: [PATCH 06/10] Updated response handling for sensor resources --- backend/Dockerfile | 3 +- backend/api/resources/sensor_data_ttn.py | 16 +++--- backend/api/resources/sensor_data_wifi.py | 35 ++----------- backend/api/resources/util.py | 52 ++++++++----------- backend/migrate.sh | 62 +++++++++++++++++++++++ 5 files changed, 101 insertions(+), 67 deletions(-) create mode 100755 backend/migrate.sh diff --git a/backend/Dockerfile b/backend/Dockerfile index e6871867..427cc717 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -111,6 +111,7 @@ RUN pip install --no-cache /wheels/* # copy project COPY . $APP_HOME +COPY migrate.sh $HOME # chown all the files to the app user RUN chown -R app:app $APP_HOME @@ -118,4 +119,4 @@ RUN chown -R app:app $APP_HOME # change to the app user USER app WORKDIR /home/app/ -ENTRYPOINT ["./backend/entrypoint.sh", "-p"] +ENTRYPOINT ["./backend/entrypoint.sh", "-p"] \ No newline at end of file diff --git a/backend/api/resources/sensor_data_ttn.py b/backend/api/resources/sensor_data_ttn.py index da820960..794ca8df 100644 --- a/backend/api/resources/sensor_data_ttn.py +++ b/backend/api/resources/sensor_data_ttn.py @@ -14,6 +14,8 @@ Author: John Madden """ +import base64 + from flask import request from flask_restful import Resource @@ -24,10 +26,11 @@ class Measurement_Upink(Resource): def post(self): """Handlings uplink POST request from TTN - Raises: - ValueError if the header Content-Type is not application/json + Returns: + Response indicating success or failure. See util.process_measurement + for full description. """ - + content_type = request.headers.get("Content-Type") # get uplink json @@ -37,11 +40,12 @@ def post(self): raise ValueError("POST request must be application/json") # get payload - payload = uplink_json["uplink_message"]["frm_payload"] + payload_str = uplink_json["data"]["uplink_message"]["frm_payload"] + payload = base64.b64decode(payload_str) - data_json = process_measurement(payload) + resp = process_measurement(payload) # TODO: Add downlink messages to device # return json of measurement - return data_json \ No newline at end of file + return resp \ No newline at end of file diff --git a/backend/api/resources/sensor_data_wifi.py b/backend/api/resources/sensor_data_wifi.py index 89e6022b..e7cd7dd0 100644 --- a/backend/api/resources/sensor_data_wifi.py +++ b/backend/api/resources/sensor_data_wifi.py @@ -26,45 +26,20 @@ def post(self): binary response are returned. Returns: - Response object with a binary response message indicating a success - or failure of processing with Content-Type of - application/octet-stream. An HTTP status code of 201 indicates a - successful processing and 500 indicates a failure. - - Raises: - ValueError if the header Content-Type is not - application/octet-stream + Response indicating success or failure. See util.process_measurement + for full description. """ content_type = request.headers.get("Content-Type") # check for correct content type and get json - if content_type == "application/json": + if content_type == "application/octet-stream": # get uplink json data = request.data else: raise ValueError("POST request must be application/json") # decode and insret into db - data_json = process_measurement(data) - - - # format HTTP response - resp = Response() - - # encode response data - if data_json is not None: - resp.data = encode_response(True) - # created - resp.status_code = 201 - else: - resp.data = encode_response(False) - # internal server error - resp.status_code = 500 - - resp.content_type = "application/octet-stream" - # should be autopopulated with automatically_set_content_length - #resp.content_length = len(resp.data) + resp = process_measurement(data) - # return json of measurement - return data_json \ No newline at end of file + return resp \ No newline at end of file diff --git a/backend/api/resources/util.py b/backend/api/resources/util.py index 422a1246..13679e7a 100644 --- a/backend/api/resources/util.py +++ b/backend/api/resources/util.py @@ -1,14 +1,13 @@ -"""Utility file for database operations +"""Utility functions for interacting with the database author: John Madden """ from datetime import datetime -from soil_power_sensor_protobuf import decode_measurement +from flask import Response +from soil_power_sensor_protobuf import encode_response, decode_measurement -from ..database.schemas.power_data_schema import PowerDataSchema -from ..database.schemas.get_cell_data_schema import TEROSDataSchema from ..database.models.power_data import PowerData from ..database.models.teros_data import TEROSData @@ -16,42 +15,31 @@ def process_measurement(data : bytes): """Process protobuf encoded measurement - The byte string gets decoded through protobuf and inserted into the associated table. + The byte string gets decoded through protobuf and inserted into the associated table. Upon successful insertion, a 200 response is sent back. On failure when the server does not know how to handle a measurement type, a 501 response is sent. Both cases have serialized response messages sent back. Args data: Encoded measurement message Returns: - JSON representation of the object inserted into the database - - Raises: - NotImplementedError when the processing of the message type is not - implemented + Flask response with status code and protobuf encoded response. """ # decode binary protobuf data meas = decode_measurement(data) - - # stores the json formatted repsonse - data_json = None - + # power measurement if meas["type"] == "power": - power_data = PowerData.add_protobuf_power_data( + db_obj = PowerData.add_protobuf_power_data( meas["loggerId"], meas["cellId"], datetime.fromtimestamp(meas["ts"]), meas["data"]["voltage"], meas["data"]["current"], - ) + ) - if power_data is not None: - power_schema = PowerDataSchema() - data_json = power_schema.jsonify(power_data) - # teros12 measurement elif meas["type"] == "teros12": - teros_data = TEROSData.add_protobuf_teros_data( + db_obj = TEROSData.add_protobuf_teros_data( meas["cellId"], datetime.fromtimestamp(meas["ts"]), meas["data"]["vwcAdj"], @@ -59,14 +47,18 @@ def process_measurement(data : bytes): meas["data"]["temp"], meas["data"]["ec"], None, - ) - - if teros_data is not None: - teros_schema = TEROSDataSchema() - data_json = teros_schema.jsonify(teros_data) - - # raise error if any other data types are not stored + ) + + # format response + resp = Response() + resp.content_type = "application/octet-stream" + # indicate a success with 200 + if db_obj is not None: + resp.status_code = 200 + resp.data = encode_response(True) + # indicate an error with 501 else: - raise NotImplementedError(f"Message type {meas["type"]} not implemented") + resp.status_code = 501 + resp.data = encode_response(False) - return data_json \ No newline at end of file + return resp \ No newline at end of file diff --git a/backend/migrate.sh b/backend/migrate.sh new file mode 100755 index 00000000..c93f3013 --- /dev/null +++ b/backend/migrate.sh @@ -0,0 +1,62 @@ +#!/bin/sh + +# Date: 02/11/24 +# Author: Aaron Wu +# This aggergates all the necessary alembic migration commands + +# Note: migration commands +# flask --app backend.api db init -d ./backend/api/migrations +# flask --app backend.api db stamp head -d ./backend/api/migrations +# flask --app backend.api db migrate -d ./backend/api/database/migrations +# flask --app backend.api db revision -d ./backend/api/database/migrations --autogenerate -m "updated db" +# flask --app backend.api db upgrade head -d ./backend/api/database/migrations +# flask --app backend.api db downgrade -d ./backend/api/migrations +# flask --app backend.api db check -d ./backend/api/migrations + +USAGE="script usage: $(basename $0) [-u] [-m ] [-d ] [-c]" + +if (( $# == 0 )); then + echo $USAGE + exit 1 +fi + + +while getopts 'um:d:ch' FLAG +do + case "$FLAG" in + u) + # upgrades to lastest alembic version + flask --app api db upgrade head -d api/migrations + ;; + m) + # Migrate database + msg="$OPTARG" + # read -p "Enter migrate message: " msg; + flask --app api db migrate -d api/migrations -m "$msg"; + flask --app api db upgrade head -d api/migrations + ;; + d) + # Downgrade to another alembic version + ver="$OPTARG" + # read -p "Enter downgrade version: " ver; + flask --app api db downgrade $ver -d api/migrations + ;; + c) + # Checks if database needs migration + flask --app api db check -d api/migrations + ;; + h) + # Usage help + echo $USAGE >&2 + exit 1 + ;; + ?) + # Invalid flag + echo $USAGE >&2 + exit 1 + ;; + esac +done + + + From ba84f5d35758e03908b52c33131416ec17f5765b Mon Sep 17 00:00:00 2001 From: John Madden Date: Thu, 22 Feb 2024 20:40:14 -0800 Subject: [PATCH 07/10] Removed resampling and updated units --- backend/api/database/getters.py | 136 ---------------------- backend/api/database/models/power_data.py | 81 +++---------- backend/api/database/models/teros_data.py | 46 ++------ 3 files changed, 24 insertions(+), 239 deletions(-) delete mode 100644 backend/api/database/getters.py diff --git a/backend/api/database/getters.py b/backend/api/database/getters.py deleted file mode 100644 index cd46ed46..00000000 --- a/backend/api/database/getters.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Helper functions for getting data - -Notes ------ -All functions must have a `sess` argument so that multiple functions can be -called while retaining object data from queried objects. -""" - -from sqlalchemy import select, func - -from .models import power_data as PowerData -from .models import teros_data as TEROSData - - -def get_power_data(sess, cell_id, resample="hour"): - """Gets the power data for a given cell. Can be directly passed to - bokeh.ColumnDataSource. - - Parmaters - --------- - sess : sqlalchemy.orm.Session - Session to use - cell_id : int - Valid Cell.id - - Returns - ------- - dict - Dictionary of lists with keys named after columns of the table - { - 'timestamp': [], - 'v': [], - 'i': [], - 'p': [] - } - """ - - data = { - 'timestamp': [], - 'v': [], - 'i': [], - 'p': [], - } - - resampled = ( - select( - func.date_trunc(resample, PowerData.ts).label("ts"), - func.avg(PowerData.voltage).label("voltage"), - func.avg(PowerData.current).label("current") - ) - .where(PowerData.cell_id == cell_id) - .group_by(func.date_trunc(resample, PowerData.ts)) - .subquery() - ) - - adj_units = ( - select( - resampled.c.ts.label("ts"), - (resampled.c.voltage * 10e-9).label("voltage"), - (resampled.c.current * 10e-6).label("current") - ) - .subquery() - ) - - stmt = ( - select( - adj_units.c.ts.label("ts"), - adj_units.c.voltage.label("voltage"), - adj_units.c.current.label("current"), - (adj_units.c.voltage * adj_units.c.current).label("power") - ) - .order_by(adj_units.c.ts) - ) - - for row in sess.execute(stmt): - data["timestamp"].append(row.ts) - data["v"].append(row.voltage) - data["i"].append(row.current) - data["p"].append(row.power) - - return data - - -def get_teros_data(sess, cell_id, resample='hour'): - """Gets the TEROS-12 sensor data for a given cell. Returned dictionary can - be passed directly to bokeh.ColumnDataSource. - - Parmaters - --------- - s : sqlalchemy.orm.Session - Session to use - cell_id : int - Valid Cell.id - resample : str - Resample time frame. Defaults to hour. Valid options are - [microseconds, milliseconds, second, minute, hour, day, week, month, - quarter, year, decade, century, millennium]. - - Returns - ------- - dict - Dictionary of lists with keys named after columns of the table - { - 'timestamp': [], - 'vwc': [], - 'temp': [], - 'ec': [] - } - """ - - data = { - 'timestamp': [], - 'vwc': [], - 'temp': [], - 'ec': [] - } - - stmt = ( - select( - func.date_trunc(resample, TEROSData.ts).label("ts"), - func.avg(TEROSData.vwc).label("vwc"), - func.avg(TEROSData.temp).label("temp"), - func.avg(TEROSData.ec).label("ec") - ) - .where(TEROSData.cell_id == cell_id) - .group_by(func.date_trunc(resample, TEROSData.ts)) - .order_by(func.date_trunc(resample, TEROSData.ts)) - ) - - for row in sess.execute(stmt): - data['timestamp'].append(row.ts) - data['vwc'].append(row.vwc) - data['temp'].append(row.temp) - data['ec'].append(row.ec) - - return data diff --git a/backend/api/database/models/power_data.py b/backend/api/database/models/power_data.py index d9da38c3..a1f9667a 100644 --- a/backend/api/database/models/power_data.py +++ b/backend/api/database/models/power_data.py @@ -65,55 +65,8 @@ def add_protobuf_power_data(logger_id, cell_id, ts, v, i): db.session.commit() return power_data - def get_power_data( - cell_id, - resample="hour", - startTime=datetime.now() - relativedelta(months=1), - endTime=datetime.now(), - ): - """gets power data aggregated by attributes""" - data = [] - - resampled = ( - db.select( - db.func.date_trunc(resample, PowerData.ts).label("ts"), - db.func.avg(PowerData.voltage).label("voltage"), - db.func.avg(PowerData.current).label("current"), - ) - .where(PowerData.cell_id == cell_id) - # .filter((PowerData.ts > startTime) & (PowerData.ts < endTime)) - .group_by(db.func.date_trunc(resample, PowerData.ts)) - .subquery() - ) - - # adj_units = db.select( - # resampled.c.ts.label("ts"), - # (resampled.c.voltage * 1e3).label("voltage"), - # (resampled.c.current * 1e6).label("current"), - # ).subquery() - - stmt = db.select( - resampled.c.ts.label("ts"), - (resampled.c.voltage * 1e3).label("voltage"), - (resampled.c.current * 1e6).label("current"), - (resampled.c.voltage * resampled.c.current * 1e6).label("power"), - ).order_by(resampled.c.ts) - - for row in db.session.execute(stmt): - data.append( - { - "ts": row.ts, - "v": row.voltage, - "i": row.current, - "p": row.power, - } - ) - - return data - def get_power_data_obj( cell_id, - resample="hour", start_time=datetime.now() - relativedelta(months=1), end_time=datetime.now(), ): @@ -124,34 +77,26 @@ def get_power_data_obj( "i": [], "p": [], } - - resampled = ( + + stmt = ( db.select( - db.func.date_trunc(resample, PowerData.ts).label("ts"), - db.func.avg(PowerData.voltage).label("voltage"), - db.func.avg(PowerData.current).label("current"), + PowerData.ts.label("ts"), + PowerData.voltage.label("voltage"), + PowerData.current.label("current"), ) - .where((PowerData.cell_id == cell_id)) - # .filter((PowerData.ts >= startTime, PowerData.ts <= endTime)) + .where(PowerData.cell_id == cell_id) .filter((PowerData.ts.between(start_time, end_time))) - .group_by(db.func.date_trunc(resample, PowerData.ts)) .subquery() ) - # adj_units = db.select( - # resampled.c.ts.label("ts"), - # resampled.c.voltage.label("voltage"), - # resampled.c.current.label("current"), - # ).subquery() - - stmt = db.select( - resampled.c.ts.label("ts"), - (resampled.c.voltage * 1e3).label("voltage"), - (resampled.c.current * 1e6).label("current"), - (resampled.c.voltage * resampled.c.current * 1e6).label("power"), - ).order_by(resampled.c.ts) + adj_units = db.select( + stmt.c.ts.label("ts"), + stmt.c.voltage.label("voltage"), + stmt.c.current.label("current"), + (stmt.c.voltage * stmt.c.current * 1e-3).label("power") + ).order_by(stmt.c.ts) - for row in db.session.execute(stmt): + for row in db.session.execute(adj_units): data["timestamp"].append(row.ts) data["v"].append(row.voltage) data["i"].append(row.current) diff --git a/backend/api/database/models/teros_data.py b/backend/api/database/models/teros_data.py index 1a987e3c..ec84b4bc 100644 --- a/backend/api/database/models/teros_data.py +++ b/backend/api/database/models/teros_data.py @@ -64,53 +64,29 @@ def add_protobuf_teros_data(cell_id, ts, vwc, raw_vwc, temp, ec, water_pot): db.session.commit() return teros_data - def get_teros_data(cell_id, resample="hour"): - """gets teros data aggregated by attributes""" - data = [] - - stmt = ( - db.select( - func.date_trunc(resample, TEROSData.ts).label("ts"), - func.avg(TEROSData.vwc).label("vwc"), - func.avg(TEROSData.temp).label("temp"), - func.avg(TEROSData.ec).label("ec"), - ) - .where(TEROSData.cell_id == cell_id) - .group_by(func.date_trunc(resample, TEROSData.ts)) - .order_by(func.date_trunc(resample, TEROSData.ts)) - ) - - for row in db.session.execute(stmt): - data.append( - { - "ts": row.ts, - "vwc": row.vwc, - "temp": row.temp, - "ec": row.ec, - } - ) - return data - def get_teros_data_obj( cell_id, - resample="hour", start_time=datetime.now() - relativedelta(months=1), end_time=datetime.now(), ): """gets teros data as a list of objects""" - data = {"timestamp": [], "vwc": [], "temp": [], "ec": []} + data = { + "timestamp": [], + "vwc": [], + "temp": [], + "ec": [] + } stmt = ( db.select( - func.date_trunc(resample, TEROSData.ts).label("ts"), - func.avg(TEROSData.vwc).label("vwc"), - func.avg(TEROSData.temp).label("temp"), - func.avg(TEROSData.ec).label("ec"), + TEROSData.ts.label("ts"), + TEROSData.vwc.label("vwc"), + TEROSData.temp.label("temp"), + TEROSData.ec.label("ec"), ) .where(TEROSData.cell_id == cell_id) .filter((TEROSData.ts.between(start_time, end_time))) - .group_by(func.date_trunc(resample, TEROSData.ts)) - .order_by(func.date_trunc(resample, TEROSData.ts)) + .order_by(TEROSData.ts) ) for row in db.session.execute(stmt): From 6af65a6fa1828e49f2f42c1241b1c048974b0d22 Mon Sep 17 00:00:00 2001 From: John Madden Date: Thu, 22 Feb 2024 20:49:34 -0800 Subject: [PATCH 08/10] Fixed linting errors --- backend/api/database/models/power_data.py | 29 +---------------------- backend/api/resources/sensor_data_wifi.py | 4 +--- backend/api/resources/util.py | 6 ++++- 3 files changed, 7 insertions(+), 32 deletions(-) diff --git a/backend/api/database/models/power_data.py b/backend/api/database/models/power_data.py index 31e3c54b..bf67613f 100644 --- a/backend/api/database/models/power_data.py +++ b/backend/api/database/models/power_data.py @@ -78,8 +78,7 @@ def get_power_data_obj( "i": [], "p": [], } -<<<<<<< HEAD - + stmt = ( db.select( PowerData.ts.label("ts"), @@ -90,32 +89,6 @@ def get_power_data_obj( .filter((PowerData.ts.between(start_time, end_time))) .subquery() ) -======= - if not stream: - resampled = ( - db.select( - db.func.date_trunc(resample, PowerData.ts).label("ts"), - db.func.avg(PowerData.voltage).label("voltage"), - db.func.avg(PowerData.current).label("current"), - ) - .where((PowerData.cell_id == cell_id)) - # .filter((PowerData.ts >= startTime, PowerData.ts <= endTime)) - .filter((PowerData.ts.between(start_time, end_time))) - .group_by(db.func.date_trunc(resample, PowerData.ts)) - .subquery() - ) - else: - resampled = ( - db.select( - PowerData.ts, - PowerData.voltage, - PowerData.current, - ) - .where((PowerData.cell_id == cell_id)) - .filter((PowerData.ts.between(start_time, end_time))) - .subquery() - ) ->>>>>>> main adj_units = db.select( stmt.c.ts.label("ts"), diff --git a/backend/api/resources/sensor_data_wifi.py b/backend/api/resources/sensor_data_wifi.py index e7cd7dd0..8218d8fe 100644 --- a/backend/api/resources/sensor_data_wifi.py +++ b/backend/api/resources/sensor_data_wifi.py @@ -9,11 +9,9 @@ Author: John Madden """ -from flask import request, Response +from flask import request from flask_restful import Resource -from soil_power_sensor_protobuf import encode_response - from .util import process_measurement diff --git a/backend/api/resources/util.py b/backend/api/resources/util.py index 13679e7a..b294d12a 100644 --- a/backend/api/resources/util.py +++ b/backend/api/resources/util.py @@ -15,7 +15,11 @@ def process_measurement(data : bytes): """Process protobuf encoded measurement - The byte string gets decoded through protobuf and inserted into the associated table. Upon successful insertion, a 200 response is sent back. On failure when the server does not know how to handle a measurement type, a 501 response is sent. Both cases have serialized response messages sent back. + The byte string gets decoded through protobuf and inserted into the + associated table. Upon successful insertion, a 200 response is sent back. On + failure when the server does not know how to handle a measurement type, a + 501 response is sent. Both cases have serialized response messages sent + back. Args data: Encoded measurement message From 7b47de1b64fc792b4fd8ed11dcc3810bf9e9e208 Mon Sep 17 00:00:00 2001 From: John Madden Date: Sat, 24 Feb 2024 13:12:51 -0800 Subject: [PATCH 09/10] Updated plot units --- backend/api/database/models/power_data.py | 7 ++++--- backend/api/database/models/teros_data.py | 3 ++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/backend/api/database/models/power_data.py b/backend/api/database/models/power_data.py index bf67613f..a5e2359a 100644 --- a/backend/api/database/models/power_data.py +++ b/backend/api/database/models/power_data.py @@ -90,11 +90,12 @@ def get_power_data_obj( .subquery() ) + # expected units are mV, uA, and uW adj_units = db.select( stmt.c.ts.label("ts"), - stmt.c.voltage.label("voltage"), - stmt.c.current.label("current"), - (stmt.c.voltage * stmt.c.current * 1e-3).label("power") + stmt.c.voltage.label("voltage") * 1e-3, + stmt.c.current.label("current") * 1e-6, + (stmt.c.voltage * stmt.c.current * 1e-6).label("power") ).order_by(stmt.c.ts) for row in db.session.execute(adj_units): diff --git a/backend/api/database/models/teros_data.py b/backend/api/database/models/teros_data.py index 4b670c59..1f3a2ef6 100644 --- a/backend/api/database/models/teros_data.py +++ b/backend/api/database/models/teros_data.py @@ -78,10 +78,11 @@ def get_teros_data_obj( "ec": [] } + # VWC stored in decimal, converted to percentage stmt = ( db.select( TEROSData.ts.label("ts"), - TEROSData.vwc.label("vwc"), + (TEROSData.vwc * 100).label("vwc"), TEROSData.temp.label("temp"), TEROSData.ec.label("ec"), ) From a2aa7156629eaa233fabb641c8da40332e83077b Mon Sep 17 00:00:00 2001 From: John Madden Date: Sat, 24 Feb 2024 13:18:58 -0800 Subject: [PATCH 10/10] Removd migrate.sh from backend docker --- backend/Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 427cc717..96248032 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -111,7 +111,6 @@ RUN pip install --no-cache /wheels/* # copy project COPY . $APP_HOME -COPY migrate.sh $HOME # chown all the files to the app user RUN chown -R app:app $APP_HOME