feat: Added reading zone info from csv

This commit is contained in:
2025-11-12 12:02:58 +00:00
parent be2c89bcc2
commit e38d21598f
9 changed files with 150 additions and 108 deletions
+30 -21
View File
@@ -1,6 +1,7 @@
import logging
import time
import os
import csv
from pathlib import Path
from config import Config
@@ -14,14 +15,25 @@ if __name__ == "__main__":
os.makedirs(Path(Config.ASC_TOP_FOLDER), exist_ok=True)
os.makedirs(Path(Config.CSV_TOP_FOLDER), exist_ok=True)
os.makedirs(Path(Config.COMBINED_FOLDER), exist_ok=True)
dat_file_count = [f for f in os.listdir(Path(Config.DAT_TOP_FOLDER))]
asc_file_count = [f for f in os.listdir(Path(Config.ASC_TOP_FOLDER))]
locations = [
# loc name, loc id, x loc, y loc, output group
["BRICSC", "TM0816", 608500, 216500, 1],
["HEACSC", "TF6842", 568500, 342500, 1],
]
locations = []
#load zone inputs here
for file in os.listdir(Path(Config.ZONE_FOLDER)):
with open(Path(Config.ZONE_FOLDER,file), 'r') as csvfile:
reader = csv.reader(csvfile)
header = next(reader) # Skip header row
for row in reader:
# Extract the relevant fields: Ossheet (location ID), Easting, Northing, Zone
zone_id = row[1] # Ossheet column
easting = int(row[2]) # Easting column
northing = int(row[3]) # Northing column
zone = int(row[6]) # ZoneID column
locations.append([zone_id, easting, northing, zone])
# testing locations, can be removed.
locations.append(["TM0816", 608500, 216500, 1])
locations.append(["TF6842", 568500, 342500, 1])
batch = BatchNimrod(Config)
timeseries = GenerateTimeseries(Config)
@@ -29,24 +41,21 @@ if __name__ == "__main__":
start = time.time()
logging.info("Starting to process DAT to ASC")
if len(dat_file_count) != len(asc_file_count):
batch.process_nimrod_files()
batch_checkpoint = time.time()
elapsed_time = batch_checkpoint - start
logging.info(f"DAT to ASC completed in {elapsed_time:.2f} seconds")
else:
logging.info("No need to process DAT files, skipping...")
batch_checkpoint = time.time()
time.sleep(1)
batch.process_nimrod_files()
batch_checkpoint = time.time()
elapsed_time = batch_checkpoint - start
logging.info(f"DAT to ASC completed in {elapsed_time:.2f} seconds")
for place in locations:
logging.info(f"{place[0]} started generating timeseries data.")
place_start = time.time()
timeseries.extract_cropped_rain_data(place)
place_checkpoint = time.time()
since_asc_create = place_checkpoint - batch_checkpoint
elapsed_time = place_checkpoint - start
logging.info(f"{place[0]} completed in {since_asc_create:.2f} seconds")
logging.info(f"total time so far {elapsed_time:.2f} seconds")
place_end = time.time()
place_create_time = place_end - place_start
elapsed_time = place_end - start
logging.info(f"{place[0]} completed in {place_create_time:.2f} seconds")
logging.info(f"Total time so far {elapsed_time:.2f} seconds")
logging.info("combining CSVs into groups")
combiner.combine_csv_files()