Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions TelemetrySite/client/src/DataUpload/DataUpload.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import {
Form,
} from "reactstrap";
import { useNavigate } from "react-router-dom";
import ErrorModal from "./ErrorModal";

/**
* Upload a mf4 file to the backend sever as
Expand All @@ -30,6 +31,8 @@ function DataUpload() {

const [progressBar, setProgressBar] = useState(null);

const [err, setErr] = useState(null);

let navigate = useNavigate();

/**
Expand All @@ -44,7 +47,7 @@ function DataUpload() {
navigate(url);
};

const DisplayRedirect = () => {
const DisplayRedirect = (err) => {
setBodyDisplay(
<Container className='button-container'>
<Col>
Expand Down Expand Up @@ -100,12 +103,13 @@ function DataUpload() {
try {
const response = await CheckData(); // Await the result
if (!response) {
console.error("CheckData failed or returned an invalid response.");
return false; // Stop execution if CheckData fails
let error_str = "CheckData failed or returned an invalid response.";
console.error(error_str);
return {val: false, error: error_str}; // Stop execution if CheckData fails
}
} catch (error) {
console.error("Error in CheckData:", error);
return false;
return {val: false, "error": error};
}
}

Expand All @@ -127,13 +131,13 @@ function DataUpload() {
"Error occurred on server side. Error message: " +
jsonResponse.error
);
return false;
return {val: false, "error": jsonResponse.error};
}
incrementRunOrderNumber();
return await response.json();
return {val: response.json()};
} catch (error) {
console.error("Network or server error:", error);
return false;
return {val: false, "error": error};
}
};

Expand Down Expand Up @@ -228,12 +232,12 @@ function DataUpload() {
* with the option of keeping the same event data
*/
response.then((responseValue) => {
if (responseValue != false) {
clearInterval(interval);
setProgressBar(null);
if (responseValue.val != false) {
responseValue = responseValue.val;
sessionStorage.setItem("DataSubmitted", true);

clearInterval(interval);
setProgressBar(null);

const parsedContextData = JSON.parse(contextData);

//save the needed event details to be displayed on the next page
Expand All @@ -249,6 +253,10 @@ function DataUpload() {

DisplayRedirect();
}
else {
setErr(responseValue.error);
DisplayRedirect(responseValue.error);
}
});
};

Expand Down Expand Up @@ -301,6 +309,7 @@ function DataUpload() {

return (
<Container fluid className='outer-container'>
<ErrorModal error={err} />
<Card className='upload-card'>
<CardBody fluid className='text-center'>
{bodyDisplay}
Expand Down
39 changes: 39 additions & 0 deletions TelemetrySite/client/src/DataUpload/ErrorModal.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { useState, } from "react";
import {
Button,
Modal,
ModalHeader,
ModalBody,
ModalFooter,
} from "reactstrap";

function ErrorModal(props) {
const [isErrorModalActive, setErrorModal] = useState(true);
const ModalErrorToggle = () => {
console.log("switching to", !isErrorModalActive);
setErrorModal(!isErrorModalActive);
console.log("after update. ", isErrorModalActive);
};
if (props.error == null) {
return ;
}
return (
<div>
<Modal isOpen={isErrorModalActive} toggle={ModalErrorToggle} >
<ModalHeader toggle={ModalErrorToggle}>
ERROR
</ModalHeader>
<ModalBody>
{props.error.message}
</ModalBody>
<ModalFooter>
<Button color="primary" onClick={ModalErrorToggle}>
Acknowledge
</Button>
</ModalFooter>
</Modal>
</div>
);
}

export default ErrorModal;
49 changes: 30 additions & 19 deletions TelemetrySite/server/data_upload_scripts/data_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
parsing_data_progress = [0]
uploading_data_progress = [0]

documents_loaded_id = [] # have ideas of making this thread safe, if needed

## Create a connection to the Mongo DB
#
# @return db connection object
Expand All @@ -26,31 +28,37 @@ def create_db_connection():
# @param data_path path to the mf4 file
# @param dbc_file path to the dbc file
# @param context_id context id for the data
def submit_data(mf4_file, dbc_file, context_data, runOrderNumber):
def submit_data(mf4_file, dbc_file, context_data, runOrderNumber) -> tuple | str :

db_connection = create_db_connection()
fs = gridfs.GridFS(db_connection)


dbc_decoded = cantools.database.load_file(dbc_file)

# get a dictionary of CAN id -> Board name
can_id_values = get_board_names(dbc_decoded)
# create an outline of how to read the data
config_values = createConfig(can_id_values, dbc_decoded)
# turn data from CAN messages -> list
data_values_json = parse_data(mf4_file, config_values)
try:
# get a dictionary of CAN id -> Board name
can_id_values = get_board_names(dbc_decoded)
# create an outline of how to read the data
config_values = createConfig(can_id_values, dbc_decoded)
# turn data from CAN messages -> list
data_values_json = parse_data(mf4_file, config_values)

context_data = json.loads(context_data)

create_db_connection()["files"]
if runOrderNumber == None:
runOrderNumber=0
context_data["event"]["runs"][0]["mf4File"]=fs.put(mf4_file, encoding="utf-8")
context_data["event"]["runs"][0]["dbcFile"]=fs.put(dbc_file, encoding="utf-8")
context_data["event"]["runs"][0]["orderNumber"] = runOrderNumber

return(upload_data_in_chunks(context_data, data_values_json))
context_data = json.loads(context_data)

create_db_connection()["files"]
if runOrderNumber == None:
runOrderNumber=0
context_data["event"]["runs"][0]["mf4File"]=fs.put(mf4_file, encoding="utf-8")
context_data["event"]["runs"][0]["dbcFile"]=fs.put(dbc_file, encoding="utf-8")
context_data["event"]["runs"][0]["orderNumber"] = runOrderNumber

return(upload_data_in_chunks(context_data, data_values_json))
except Exception as e:
print(e)
for id in documents_loaded_id: # delete all ids if an error occurs
fs.delete(id)
return -1, "Upload halted and all uploaded files from this session have been removed. Had error: " + str(e) # return as tuple to make sure that the error gets to the front end for display. Stack trace stays server side so we don't scare the EEs



Expand All @@ -66,6 +74,8 @@ def upload_data_in_chunks(new_run_data, data_values_json):
# divide the data into chunks that are less the 16 mb
# 150_000 ~< 15 mb but always < 16 mb
sliced_data = list(sliced(data_values_json, 150_000))

documents_loaded_id.clear()

collection_access_messages = create_db_connection()["messages"]
for data_index in range(0, len(sliced_data)):
Expand All @@ -87,7 +97,8 @@ def upload_data_in_chunks(new_run_data, data_values_json):
data.append(sliced_data[data_index+1].pop())
data_upload["event"]["runs"][0]["messages"] = data
# add data to its own document and get the reference id number
collection_access_messages.insert_one(data_upload)
res = collection_access_messages.insert_one(data_upload)
documents_loaded_id.append(res.inserted_id) # adds the inserted id to an array of documents

uploading_data_progress[0]=1
return 0
Expand Down Expand Up @@ -238,7 +249,7 @@ def parse_data(mdf_path, config_values):
# if something uses 7 bits, something else will use the last bit

raw_result = read_bits(
data_list[previous_bytes_used],
data_list[previous_bytes_used], # swapped data_list with data_array bc they are the same
previous_bits_used,
previous_bits_used + data_length_bits,
)
Expand Down
96 changes: 52 additions & 44 deletions TelemetrySite/server/data_upload_scripts/data_upload_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,55 +21,63 @@ def get(self):

## Take in a mf4, dbc, and json object for the run and submit to nrdb
def post(self):
# Check if the post request has all needed data needed
if "mf4File" not in request.files:
return jsonify({"error": "No mf4 file uploaded"}), 400
elif "dbcFile" not in request.files:
return jsonify({"error": "No dbc file uploaded"}), 400
elif "contextData" not in request.form:
return jsonify({"error": "No context data passed"}), 400
# save all needed data to local variables
mf4File = request.files["mf4File"]
dbcFile = request.files["dbcFile"]
context_data = request.form["contextData"]
runOrderNumber = request.form["runOrderNumber"]

# ensure the file actually contains a valid file name and files
if not mf4File or mf4File.name == "":
return jsonify({"error": "No mf4 file uploaded"}), 400
elif not dbcFile or dbcFile.name == "":
return jsonify({"error": "No dbc file uploaded"}), 400
try:
# Check if the post request has all needed data needed
if "mf4File" not in request.files:
return jsonify({"error": "No mf4 file uploaded"}), 400
elif "dbcFile" not in request.files:
return jsonify({"error": "No dbc file uploaded"}), 400
elif "contextData" not in request.form:
return jsonify({"error": "No context data passed"}), 400
# save all needed data to local variables
mf4File = request.files["mf4File"]
dbcFile = request.files["dbcFile"]
context_data = request.form["contextData"]
runOrderNumber = request.form["runOrderNumber"]

# ensure the file actually contains a valid file name and files
if not mf4File or mf4File.name == "":
return jsonify({"error": "No mf4 file uploaded"}), 400
elif not dbcFile or dbcFile.name == "":
return jsonify({"error": "No dbc file uploaded"}), 400

# ensure files are of correct file type before uploading data to NRDB
if self.file_type_check(mf4File.filename) and self.file_type_check(
dbcFile.filename
):
# Secure file names for best practice when saving external
# gets rid of any "/" or "." that can change where the file is saved
# ensure files are of correct file type before uploading data to NRDB
if self.file_type_check(mf4File.filename) and self.file_type_check(
dbcFile.filename
):
# Secure file names for best practice when saving external
# gets rid of any "/" or "." that can change where the file is saved

mf4FileName = secure_filename(mf4File.filename)
dbcFileName = secure_filename(dbcFile.filename)
mf4FileName = secure_filename(mf4File.filename)
dbcFileName = secure_filename(dbcFile.filename)

mf4_file = os.path.join(self.UPLOAD_FOLDER, mf4FileName)
dbc_file = os.path.join(self.UPLOAD_FOLDER, dbcFileName)
mf4_file = os.path.join(self.UPLOAD_FOLDER, mf4FileName)
dbc_file = os.path.join(self.UPLOAD_FOLDER, dbcFileName)

mf4File.save(mf4_file)
dbcFile.save(dbc_file)

document_id = submit_data(mf4_file, dbc_file, context_data, runOrderNumber)
mf4File.save(mf4_file)
dbcFile.save(dbc_file)
document_id = submit_data(mf4_file, dbc_file, context_data, runOrderNumber)

# remove the file from the sever end
os.remove(mf4_file)
os.remove(dbc_file)

return jsonify({"id": document_id}), 201
else:
return (
jsonify(
{"error": "Wrong file type submitted. Must be of type mf4 and dbc"}
),
400,
)
# remove the file from the sever end
os.remove(mf4_file)
os.remove(dbc_file)


if document_id is tuple : # returned as tuple to also contain specfic error to front end
if document_id[0] == -1:
return jsonify({"error": document_id[1]}), 400

return jsonify({"id": document_id}), 201
else:
return (
jsonify(
{"error": "Wrong file type submitted. Must be of type mf4 and dbc"}
),
400,
)
except Exception as e:
return jsonify({"error": str(e)}),

## Ensure the uploaded file types match the expected files
#
Expand Down