Skip to content
Snippets Groups Projects
Commit 60946e7c authored by Lennard's avatar Lennard
Browse files

Various updates

- Fix multiple headers in datalog
- Fix data log files having wrong date again
- Add log for dir file size
- Update measure to hopefully measure better :grinning:
parent 47674680
No related branches found
No related tags found
No related merge requests found
......@@ -10,7 +10,7 @@ variables:
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_PIPELINE_SOURCE == "schedule" # only run this pipeline upon a schedule event
convert:
stage: convert
......@@ -27,9 +27,6 @@ convert:
upload:
stage: upload
image: curlimages/curl:latest
script:
- echo "Compiling the code..."
- echo "Compile complete."
script:
- 'curl --header "JOB-TOKEN: ${CI_JOB_TOKEN}" --upload-file data.zip "${PACKAGE_REGISTRY_URL}/data.zip"'
......
"""Convert csv data into mat files to read into matlab.
Combines the files from one weak and converts it into a single '.mat' file.
Combines the files from one week and converts it into a single '.mat' file.
"""
from datetime import datetime, timedelta
......@@ -41,14 +41,14 @@ for file in files:
Path(f"{Path(__file__).parent}/out").mkdir(parents=True, exist_ok=True)
# save each week as seperate '.mat' file
# save each week as seperate '.mat' file in 'out' folder
for week_start, arr in data.items():
scipy.io.savemat(
f"{Path(__file__).parent}/out/data.{week_start}.mat",
mdict={name: column for name, column in zip(header, np.split(arr, arr.shape[1], axis=1))},
)
# zip folder
# zip 'out' folder
shutil.make_archive("data", "zip", "out")
# Update CHANGELOG.md
......
Source diff could not be displayed: it is stored in LFS. Options to address this: view the blob.
......@@ -25,6 +25,7 @@ class TimedRotatingFileHandlerWithHeader(logging.handlers.TimedRotatingFileHandl
self.first = True
super().__init__(filename, when=when, interval=interval, backupCount=backupCount, atTime=atTime)
self.namer = self._namer
print(datetime.datetime.fromtimestamp(self.rolloverAt).strftime('%Y-%m-%d %H:%M:%S'))
@staticmethod
def _namer(filename: str) -> str:
......@@ -32,9 +33,8 @@ class TimedRotatingFileHandlerWithHeader(logging.handlers.TimedRotatingFileHandl
def emit(self, record):
try:
if self.shouldRollover(record) or self.first:
if self.first and self._header:
stream = self.stream
if self._header:
stream.write(self._header + self.terminator)
else:
stream = self.stream
......@@ -87,7 +87,7 @@ def get_offset() -> np.ndarray:
sys.excepthook = handle_exception
def setup_loggers(config: Any) -> None:
def setup_loggers(config: Any, data_folder='data', info_folder='logs') -> None:
"""
Configure the two loggers. DataLogger for logging the data and InfoLogger for logging various information.
"""
......@@ -98,7 +98,7 @@ def setup_loggers(config: Any) -> None:
fh.append(
TimedRotatingFileHandlerWithHeader(
header=f"Timestamp,{','.join([f'dms{i+1}' for i in range(4)])},{','.join([f'temp{i+1}' for i in range(4)])},n",
filename=f"{Path(__file__).parent}/data/data",
filename=f"{Path(__file__).parent}/{data_folder}/data",
when="h",
interval=23,
backupCount=config["DataLogger"]["backupCount"],
......@@ -116,7 +116,7 @@ def setup_loggers(config: Any) -> None:
bf = logging.Formatter("{asctime}, {levelname}, [{name}.{funcName}:{lineno}]\t{message}", datefmt=r"%Y-%m-%d %H:%M:%S", style="{")
fh.append(
logging.handlers.RotatingFileHandler(
filename=f"{Path(__file__).parent}/logs/log",
filename=f"{Path(__file__).parent}/{info_folder}/log",
maxBytes=config["InfoLogger"]["maxBytes"],
backupCount=config["InfoLogger"]["backupCount"],
)
......@@ -171,9 +171,9 @@ def main(config: Any) -> None:
recv1 = None
off1 = None
con2.write(2)
con2.write(1)
# offsets for writing data in correct column
off2 = 4 if int(convert(con2.readline())) == 2.0 else 0
off2 = 0 if int(convert(con2.readline())) == 1.0 else 4
for i in range(4):
recv2 = con2.readline()
......@@ -185,7 +185,7 @@ def main(config: Any) -> None:
data = new_data
except (TypeError, ValueError):
# may occur if no data was read over serial, but why???
logger.exception(f"Didn't receive data from arduino, off1: {off1}, off2: {off2}, recv1: {recv1}, recv2: {recv2}")
logger.info(f"Didn't receive data from arduino, off1: {off1}, off2: {off2}, recv1: {recv1}, recv2: {recv2}", exc_info=True)
if time.time() - last_write > delta_time:
# write data
......@@ -197,6 +197,9 @@ def main(config: Any) -> None:
fh[0].doRollover() # rollover the current data log file
Path(f"{Path(__file__).parent}/data/data").unlink(missing_ok=True) # delete old data file
logger.warning("Finished")
......
......@@ -12,15 +12,18 @@ import os
import time
import threading
import traceback
from typing import Any
import serial
import serial.serialutil
import sys
import datetime
import yaml
from pathlib import Path
import numpy as np
from multiprocessing_logging import install_mp_handler
# we want to be able to log from multiple processes
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger()
install_mp_handler()
from main import logger, data_logger, fh, get_offset, setup_loggers
# separate logger that only stores events into a file
prof_logger = logging.getLogger("profiling")
......@@ -84,10 +87,10 @@ def read_value(connection: serial.Serial) -> bytes:
@log_profile("read")
def read(connection: serial.Serial):
for _ in range(4):
recv1 = read_value(connection)
float(convert(recv1))
def read(connection: serial.Serial, data: np.ndarray, off: int):
for i in range(4):
recv = read_value(connection)
data[i + off] += float(convert(recv))
@log_profile("write")
......@@ -96,14 +99,14 @@ def write(connection: serial.Serial):
@log_profile("offset")
def offset(connection: serial.Serial):
def offset(connection: serial.Serial) -> int:
return 0 if int(convert(connection.readline())) == 1.0 else 4
@log_profile("write_data")
def write_data(n: int):
print(f"writing data, {n}")
time.sleep(10e-3)
def write_data(data: np.ndarray, n: int, factors: np.ndarray, offsets: np.ndarray):
data_logger.info(",".join([f"{(value/n) * factors[i] - offsets[i]:.5f}" for i, value in enumerate(data)]) + f",{n}")
logger.debug("Wrote data")
def convert(data) -> str:
......@@ -111,42 +114,62 @@ def convert(data) -> str:
@log_profile("get_data")
def get_data(con1: serial.Serial, con2: serial.Serial):
def get_data(con1: serial.Serial, con2: serial.Serial) -> np.ndarray:
data = np.zeros((8,))
try:
for connection in [con1, con2]:
write(connection)
offset(connection)
read(connection)
off = offset(connection)
read(connection, data, off)
except (TypeError, ValueError):
# may occur if no data was read over serial
log_event(ph="I", ts=time_usec(), name="NoData", cat="NoData", **base_info)
print(f"Didn't receive data from arduino, {traceback.format_exc().replace(os.linesep, '')}")
logger.info(f"Didn't receive data from arduino", exc_info=True)
return data
@log_profile("loop")
def loop(con1: serial.Serial, con2: serial.Serial):
def loop(con1: serial.Serial, con2: serial.Serial, factors: np.ndarray, offsets: np.ndarray):
last_write = time.time()
delta_time = 30
n = 0
data = np.zeros((8,))
while time.time() - last_write < delta_time:
get_data(con1, con2)
data += get_data(con1, con2)
n += 1
write_data(n)
write_data(data, n, factors, offsets)
@log_profile("main")
def main() -> None:
def main(config: Any) -> None:
print("Starting")
try:
Path(f"{Path(__file__).parent}/test_data").mkdir(parents=True, exist_ok=True)
Path(f"{Path(__file__).parent}/test_logs").mkdir(parents=True, exist_ok=True)
setup_loggers(config, "test_data", "test_logs")
delta_time = config["Data"]["delta_time"] # log averaged out data every n seconds
end_time = datetime.datetime.combine(datetime.date.today(), datetime.time(1, 0, 0, 0))
logger.warning("Starting")
factors: np.ndarray = np.hstack((np.array(config["Data"]["factors"]), np.ones((4,))))
offsets: np.ndarray = np.hstack((get_offset(), np.zeros((4,))))
logger.info(
f"Factors: {', '.join(f'{factor:.3f}' for factor in factors[:4])}, Offset: {', '.join(f'{offset:.3f}' for offset in offsets[:4])}"
)
with serial.Serial("/dev/ttyACM0", 9600, timeout=3) as con1, serial.Serial("/dev/ttyACM1", 9600, timeout=3) as con2:
for _ in range(100):
loop(con1, con2)
except serial.serialutil.SerialException:
print(traceback.format_exc())
print("Finished")
loop(con1, con2, factors, offsets)
fh[0].doRollover() # rollover the current data log file
logger.warning("Finished")
if __name__ == "__main__":
main()
main(yaml.safe_load(open(f"{Path(__file__).parent}/config.yml")))
convert_log_to_trace("profiling.log", "profiling_trace.json")
......@@ -8,6 +8,8 @@ while pidof -x "python3 main.py">/dev/null; do
done
sleep 10
du -hc --max-depth=1 .
scripts/./write.bash
sleep 10
......
......@@ -8,6 +8,7 @@
#define DHT_Typ DHT11
DHT dhts[] = {DHT(DHT1_Pin, DHT_Typ), DHT(DHT2_Pin, DHT_Typ), DHT(DHT3_Pin, DHT_Typ), DHT(DHT4_Pin, DHT_Typ)};
float temp[4];
void setup() {
Serial.begin(9600);
......@@ -21,16 +22,17 @@ void loop() {
if (Serial.available()) {
Serial.read();
delay(20);
delay(40);
Serial.println(2); // for identification
for (int i = 0; i < sizeof(dhts) / sizeof(*dhts); i++) {
dhts[i].readHumidity();
float temp = dhts[i].readTemperature();
temp[i] = dhts[i].readTemperature();
}
delay(20);
Serial.println(temp);
for (int i = 0; i < sizeof(dhts) / sizeof(*dhts); i++) {
delay(40);
Serial.println(temp[i]);
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment