Skip to content

Commit f5ebb08

Browse files
committed
get current UTC timestamps in the recommended way
as per https://docs.python.org/3/library/datetime.html#datetime.datetime.utcnow + refactor into common module (addresses #125 for timeseries products only)
1 parent 5041611 commit f5ebb08

6 files changed

Lines changed: 53 additions & 56 deletions

File tree

aodntools/timeseries_products/aggregated_timeseries.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,15 @@
55
import os
66
import shutil
77
import tempfile
8-
from datetime import datetime
98

109
import numpy as np
1110
import xarray as xr
1211
from netCDF4 import Dataset, num2date, stringtochar
1312
from pkg_resources import resource_filename
1413

1514
from aodntools import __version__
16-
from aodntools.timeseries_products.common import NoInputFilesError, check_file, in_water
15+
from aodntools.timeseries_products.common import (NoInputFilesError, check_file, in_water, current_utc_timestamp,
16+
TIMESTAMP_FORMAT, DATESTAMP_FORMAT)
1717

1818
TEMPLATE_JSON = resource_filename(__name__, 'aggregated_timeseries_template.json')
1919

@@ -305,13 +305,10 @@ def main_aggregator(files_to_agg, var_to_agg, site_code, input_dir='', output_di
305305
ds['source_file'].setncatts(source_file_attributes(download_url_prefix, opendap_url_prefix))
306306

307307
## set global attrs
308-
timeformat = '%Y-%m-%dT%H:%M:%SZ'
309-
file_timeformat = '%Y%m%d'
310-
311-
time_start = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(timeformat)
312-
time_end = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(timeformat)
313-
time_start_filename = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(file_timeformat)
314-
time_end_filename = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(file_timeformat)
308+
time_start = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(TIMESTAMP_FORMAT)
309+
time_end = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(TIMESTAMP_FORMAT)
310+
time_start_filename = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(DATESTAMP_FORMAT)
311+
time_end_filename = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(DATESTAMP_FORMAT)
315312

316313
add_attribute = {
317314
'title': ("Long Timeseries Velocity Aggregated product: " + var_to_agg + " at " +
@@ -325,8 +322,8 @@ def main_aggregator(files_to_agg, var_to_agg, site_code, input_dir='', output_di
325322
'geospatial_lat_max': np.max(ds['LATITUDE'][:]),
326323
'geospatial_lon_min': np.min(ds['LONGITUDE'][:]),
327324
'geospatial_lon_max': np.max(ds['LONGITUDE'][:]),
328-
'date_created': datetime.utcnow().strftime(timeformat),
329-
'history': datetime.utcnow().strftime(timeformat) + ': Aggregated file created.',
325+
'date_created': current_utc_timestamp(),
326+
'history': current_utc_timestamp() + ': Aggregated file created.',
330327
'keywords': ', '.join([var_to_agg, 'AGGREGATED']),
331328
'rejected_files': "\n".join(rejected_files),
332329
'generating_code_version': __version__}
@@ -348,7 +345,7 @@ def main_aggregator(files_to_agg, var_to_agg, site_code, input_dir='', output_di
348345
file_version = 1
349346
output_name = '_'.join(['IMOS', facility_code, data_code, time_start_filename, site_code, ('FV0'+str(file_version)),
350347
(var_to_agg + "-" + product_type),
351-
('END-'+ time_end_filename), 'C-' + datetime.utcnow().strftime(file_timeformat)]) + '.nc'
348+
('END-'+ time_end_filename), 'C-' + current_utc_timestamp(DATESTAMP_FORMAT)]) + '.nc'
352349
ncout_path = os.path.join(output_dir, output_name)
353350
shutil.move(temp_outfile, os.path.join(output_dir, ncout_path))
354351

aodntools/timeseries_products/common.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,12 @@
11
"""Code shared by all timeseries product generating code"""
2+
from datetime import datetime, timezone
3+
24
import numpy as np
35

6+
# Common date/time format strings
7+
TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
8+
DATESTAMP_FORMAT = '%Y%m%d'
9+
410

511
class NoInputFilesError(Exception):
612
"""Exception raised if there are no valid input files to aggregate"""
@@ -183,4 +189,8 @@ def in_water(nc):
183189
:param nc: xarray dataset
184190
:return: xarray dataset
185191
"""
186-
return nc.where(in_water_index(nc), drop=True)
192+
return nc.where(in_water_index(nc), drop=True)
193+
194+
195+
def current_utc_timestamp(format=TIMESTAMP_FORMAT):
196+
return datetime.now(timezone.utc).strftime(format)

aodntools/timeseries_products/gridded_timeseries.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,15 @@
33
import argparse
44
import os.path
55
import json
6-
from datetime import datetime
6+
from datetime import datetime, timezone
77

88
import xarray as xr
99
import pandas as pd
1010

1111
from pkg_resources import resource_filename
1212

1313
from aodntools import __version__
14+
from aodntools.timeseries_products.common import current_utc_timestamp, TIMESTAMP_FORMAT, DATESTAMP_FORMAT
1415
import aodntools.timeseries_products.aggregated_timeseries as TStools
1516

1617

@@ -122,14 +123,12 @@ def generate_netcdf_output_filename(nc, facility_code, data_code, VoI, site_code
122123
:return: name of the output file
123124
"""
124125

125-
file_timeformat = '%Y%m%d'
126-
127126
if '_' in VoI:
128127
VoI = VoI.replace('_', '-')
129-
t_start = pd.to_datetime(nc.TIME.min().values).strftime(file_timeformat)
130-
t_end = pd.to_datetime(nc.TIME.max().values).strftime(file_timeformat)
128+
t_start = pd.to_datetime(nc.TIME.min().values).strftime(DATESTAMP_FORMAT)
129+
t_end = pd.to_datetime(nc.TIME.max().values).strftime(DATESTAMP_FORMAT)
131130

132-
output_name = '_'.join(['IMOS', facility_code, data_code, t_start, site_code, ('FV0'+str(file_version)), (VoI+"-"+product_type), ('END-'+ t_end), 'C-' + datetime.utcnow().strftime(file_timeformat)]) + '.nc'
131+
output_name = '_'.join(['IMOS', facility_code, data_code, t_start, site_code, ('FV0'+str(file_version)), (VoI+"-"+product_type), ('END-'+ t_end), 'C-' + current_utc_timestamp(DATESTAMP_FORMAT)]) + '.nc'
133132

134133
return output_name
135134

@@ -250,10 +249,9 @@ def grid_variable(input_file, VoI, depth_bins=None, max_separation=50, depth_bin
250249
for attr in ('geospatial_lat_min', 'geospatial_lat_max', 'geospatial_lon_min', 'geospatial_lon_max', 'site_code',
251250
'included_values_flagged_as', 'contributor_name', 'contributor_role', 'contributor_email'):
252251
VoI_interpolated.attrs[attr] = input_global_attributes[attr]
253-
timeformat = '%Y-%m-%dT%H:%M:%SZ'
254-
date_start = pd.to_datetime(VoI_interpolated.TIME.values.min()).strftime(timeformat)
255-
date_end = pd.to_datetime(VoI_interpolated.TIME.values.max()).strftime(timeformat)
256-
date_created = datetime.utcnow().strftime(timeformat)
252+
date_start = pd.to_datetime(VoI_interpolated.TIME.values.min()).strftime(TIMESTAMP_FORMAT)
253+
date_end = pd.to_datetime(VoI_interpolated.TIME.values.max()).strftime(TIMESTAMP_FORMAT)
254+
date_created = current_utc_timestamp()
257255
VoI_interpolated.attrs.update(global_attribute_dictionary)
258256
VoI_interpolated.attrs.update({
259257
'source_file': input_file,

aodntools/timeseries_products/hourly_timeseries.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import json
55
import os.path
66
from collections import OrderedDict
7-
from datetime import datetime
87

98
import numpy as np
109
import pandas as pd
@@ -14,7 +13,8 @@
1413

1514
from aodntools import __version__
1615
from aodntools.timeseries_products import aggregated_timeseries as utils
17-
from aodntools.timeseries_products.common import NoInputFilesError, check_file, get_qc_variable_names, in_water
16+
from aodntools.timeseries_products.common import (NoInputFilesError, check_file, get_qc_variable_names, in_water,
17+
current_utc_timestamp, TIMESTAMP_FORMAT, DATESTAMP_FORMAT)
1818

1919
TEMPLATE_JSON = resource_filename(__name__, 'hourly_timeseries_template.json')
2020
BINNING_METHOD_JSON = resource_filename(__name__, 'binning_method.json')
@@ -180,8 +180,8 @@ def set_globalattr(nc_aggregated, templatefile, site_code, add_attribute, parame
180180
'geospatial_lat_max': nc_aggregated.LATITUDE.values.max(),
181181
'geospatial_lon_min': nc_aggregated.LONGITUDE.values.min(),
182182
'geospatial_lon_max': nc_aggregated.LONGITUDE.values.max(),
183-
'date_created': datetime.utcnow().strftime(timeformat),
184-
'history': datetime.utcnow().strftime(timeformat) + ': Hourly aggregated file created.',
183+
'date_created': current_utc_timestamp(),
184+
'history': current_utc_timestamp() + ': Hourly aggregated file created.',
185185
'keywords': ', '.join(parameter_names + ['HOURLY', 'AGGREGATED'])}
186186
global_metadata.update(agg_attr)
187187
global_metadata.update(add_attribute)
@@ -259,14 +259,12 @@ def generate_netcdf_output_filename(nc, facility_code, data_code, site_code, pro
259259
:return: name of the output file
260260
"""
261261

262-
file_timeformat = '%Y%m%d'
263-
264-
t_start = pd.to_datetime(nc.TIME.min().values).strftime(file_timeformat)
265-
t_end = pd.to_datetime(nc.TIME.max().values).strftime(file_timeformat)
262+
t_start = pd.to_datetime(nc.TIME.min().values).strftime(DATESTAMP_FORMAT)
263+
t_end = pd.to_datetime(nc.TIME.max().values).strftime(DATESTAMP_FORMAT)
266264

267265
output_name = '_'.join(
268266
['IMOS', facility_code, data_code, t_start, site_code, ('FV0' + str(file_version)), product_type,
269-
('END-' + t_end), 'C-' + datetime.utcnow().strftime(file_timeformat)]) + '.nc'
267+
('END-' + t_end), 'C-' + current_utc_timestamp(DATESTAMP_FORMAT)]) + '.nc'
270268

271269
return output_name
272270

aodntools/timeseries_products/velocity_aggregated_timeseries.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@
44
from netCDF4 import Dataset, num2date, stringtochar
55
import numpy as np
66
import json
7-
from datetime import datetime
87
import argparse
98
from pkg_resources import resource_filename
109
from aodntools import __version__
1110

1211
import xarray as xr
1312

1413
from aodntools.timeseries_products import aggregated_timeseries as utils
15-
from aodntools.timeseries_products.common import NoInputFilesError, check_velocity_file
14+
from aodntools.timeseries_products.common import (NoInputFilesError, check_velocity_file, current_utc_timestamp,
15+
TIMESTAMP_FORMAT, DATESTAMP_FORMAT)
1616

1717
TEMPLATE_JSON = resource_filename(__name__, 'velocity_aggregated_timeseries_template.json')
1818

@@ -188,13 +188,10 @@ def velocity_aggregated(files_to_agg, site_code, input_dir='', output_dir='./',
188188
ds['source_file'].setncatts(utils.source_file_attributes(download_url_prefix, opendap_url_prefix))
189189

190190
## set global attrs
191-
timeformat = '%Y-%m-%dT%H:%M:%SZ'
192-
file_timeformat = '%Y%m%d'
193-
194-
time_start = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(timeformat)
195-
time_end = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(timeformat)
196-
time_start_filename = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(file_timeformat)
197-
time_end_filename = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(file_timeformat)
191+
time_start = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(TIMESTAMP_FORMAT)
192+
time_end = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(TIMESTAMP_FORMAT)
193+
time_start_filename = num2date(np.min(TIME[:]), time_units, time_calendar).strftime(DATESTAMP_FORMAT)
194+
time_end_filename = num2date(np.max(TIME[:]), time_units, time_calendar).strftime(DATESTAMP_FORMAT)
198195

199196
add_attribute = {
200197
'title': ("Long Timeseries Velocity Aggregated product: " + ', '.join(varlist) + " at " +
@@ -208,8 +205,8 @@ def velocity_aggregated(files_to_agg, site_code, input_dir='', output_dir='./',
208205
'geospatial_lat_max': np.max(ds['LATITUDE']),
209206
'geospatial_lon_min': np.min(ds['LONGITUDE']),
210207
'geospatial_lon_max': np.max(ds['LONGITUDE']),
211-
'date_created': datetime.utcnow().strftime(timeformat),
212-
'history': datetime.utcnow().strftime(timeformat) + ': Aggregated file created.',
208+
'date_created': current_utc_timestamp(),
209+
'history': current_utc_timestamp() + ': Aggregated file created.',
213210
'keywords': ', '.join(varlist + ['AGGREGATED']),
214211
'rejected_files': "\n".join(bad_files.keys()),
215212
'generating_code_version': __version__
@@ -235,7 +232,7 @@ def velocity_aggregated(files_to_agg, site_code, input_dir='', output_dir='./',
235232
file_version = 1
236233
output_name = '_'.join(['IMOS', facility_code, data_code, time_start_filename, site_code, ('FV0'+str(file_version)),
237234
("velocity-"+product_type),
238-
('END-'+ time_end_filename), 'C-' + datetime.utcnow().strftime(file_timeformat)]) + '.nc'
235+
('END-'+ time_end_filename), 'C-' + current_utc_timestamp(DATESTAMP_FORMAT)]) + '.nc'
239236
ncout_path = os.path.join(output_dir, output_name)
240237
shutil.move(temp_outfile, ncout_path)
241238

aodntools/timeseries_products/velocity_hourly_timeseries.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import os
44
import shutil
55
import tempfile
6-
from datetime import datetime
76

87
import numpy as np
98
import pandas as pd
@@ -13,7 +12,8 @@
1312

1413
import aodntools.timeseries_products.aggregated_timeseries as utils
1514
from aodntools import __version__
16-
from aodntools.timeseries_products.common import NoInputFilesError, check_velocity_file
15+
from aodntools.timeseries_products.common import (NoInputFilesError, check_velocity_file, current_utc_timestamp,
16+
TIMESTAMP_FORMAT, DATESTAMP_FORMAT)
1717

1818
TEMPLATE_JSON = resource_filename(__name__, 'velocity_hourly_timeseries_template.json')
1919
QC_FLAG_MAX = 2
@@ -252,13 +252,10 @@ def velocity_hourly_aggregated(files_to_agg, site_code, input_dir='', output_dir
252252
ds['source_file'].setncatts(utils.source_file_attributes(download_url_prefix, opendap_url_prefix))
253253

254254
## set global attrs
255-
timeformat = '%Y-%m-%dT%H:%M:%SZ'
256-
file_timeformat = '%Y%m%d'
257-
258-
time_start = num2date(np.min(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(timeformat)
259-
time_end = num2date(np.max(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(timeformat)
260-
time_start_filename = num2date(np.min(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(file_timeformat)
261-
time_end_filename = num2date(np.max(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(file_timeformat)
255+
time_start = num2date(np.min(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(TIMESTAMP_FORMAT)
256+
time_end = num2date(np.max(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(TIMESTAMP_FORMAT)
257+
time_start_filename = num2date(np.min(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(DATESTAMP_FORMAT)
258+
time_end_filename = num2date(np.max(TIME[:]), TIME_UNITS, TIME_CALENDAR).strftime(DATESTAMP_FORMAT)
262259

263260

264261
add_attribute = {
@@ -273,8 +270,8 @@ def velocity_hourly_aggregated(files_to_agg, site_code, input_dir='', output_dir
273270
'geospatial_lat_max': np.float64(np.max(ds['LATITUDE'])),
274271
'geospatial_lon_min': np.float64(np.min(ds['LONGITUDE'])),
275272
'geospatial_lon_max': np.float64(np.max(ds['LONGITUDE'])),
276-
'date_created': datetime.utcnow().strftime(timeformat),
277-
'history': datetime.utcnow().strftime(timeformat) + ': Aggregated file created.',
273+
'date_created': current_utc_timestamp(),
274+
'history': current_utc_timestamp() + ': Aggregated file created.',
278275
'keywords': ', '.join(varlist + ['AGGREGATED']),
279276
'rejected_files': "\n".join(bad_files.keys()),
280277
'generating_code_version': __version__
@@ -306,7 +303,7 @@ def velocity_hourly_aggregated(files_to_agg, site_code, input_dir='', output_dir
306303
file_version = 2
307304
output_name = '_'.join(['IMOS', facility_code, data_code, time_start_filename, site_code, ('FV0'+str(file_version)),
308305
("velocity-"+product_type),
309-
('END-'+ time_end_filename), 'C-' + datetime.utcnow().strftime(file_timeformat)]) + '.nc'
306+
('END-'+ time_end_filename), 'C-' + current_utc_timestamp(DATESTAMP_FORMAT)]) + '.nc'
310307
ncout_path = os.path.join(output_dir, output_name)
311308
shutil.move(temp_outfile, ncout_path)
312309

0 commit comments

Comments
 (0)