11
2- import json
3- import os
4- import boto3
2+ import json
3+ import os
4+ import boto3
55import base64
66from pprint import pprint
77import psycopg2
1818from api .helpers import get_s3_file_url
1919from api .s3_helpers import save_tiff_to_s3
2020from api .s3_helpers import save_fz_to_fits
21- from api .db import get_files_within_date_range
2221
2322db_host = get_secret ('db-host' )
2423db_database = get_secret ('db-database' )
@@ -47,11 +46,11 @@ def default(event, context):
4746 return http_response (HTTPStatus .OK , "New photon ranch API" )
4847
4948
50- def upload (event , context ):
49+ def upload (event , context ):
5150 """Generates a presigned URL to upload files at AWS.
5251
5352 A request for a presigned post URL requires the name of the object.
54- This is sent in a single string under the key 'object_name' in the
53+ This is sent in a single string under the key 'object_name' in the
5554 json-string body of the request.
5655
5756 Args:
@@ -109,12 +108,12 @@ def upload(event, context):
109108 ...
110109 }
111110 This is the object that is queried to find the info image at
112- site 'tst', channel 2.
111+ site 'tst', channel 2.
113112 """
114113
115114 log .info (json .dumps (event , indent = 2 ))
116115 body = _get_body (event )
117-
116+
118117 # Retrieve and validate the s3_directory
119118 s3_directory = body .get ('s3_directory' , 'data' )
120119 filename = body .get ('object_name' )
@@ -145,7 +144,7 @@ def upload(event, context):
145144 metadata = body .get ('metadata' , None )
146145 if metadata is not None :
147146 metadata = json .dumps (json .loads (metadata ), cls = DecimalEncoder )
148-
147+
149148 # TODO: if applicable, add metadata to database
150149
151150 key = f"{ s3_directory } /{ body ['object_name' ]} "
@@ -158,9 +157,9 @@ def upload(event, context):
158157 return http_response (HTTPStatus .OK , url )
159158
160159
161- def download (event , context ):
160+ def download (event , context ):
162161 """Handles requests to download individual data files.
163-
162+
164163 Args:
165164 s3_directory (str):
166165 data | info-images | allsky | test,
@@ -170,7 +169,7 @@ def download(event, context):
170169 The full filename of the requested file. Appending this to the end
171170 of s3_directory should specify the full key for the object in s3.
172171 image_type (str):
173- tif | fits, used if the requester wants a tif file created
172+ tif | fits, used if the requester wants a tif file created
174173 from the underlying fits image. If so, the tif file is
175174 created on the fly. Default is 'fits'.
176175 stretch (str):
@@ -198,11 +197,11 @@ def download(event, context):
198197 "Bucket" : BUCKET_NAME ,
199198 "Key" : key ,
200199 }
201-
200+
202201 image_type = body .get ('image_type' , 'fits' ) # Assume FITS if not otherwise specified
203202
204203 # Routine if TIFF file is specified
205- if image_type in ['tif' , 'tiff' ]:
204+ if image_type in ['tif' , 'tiff' ]:
206205 stretch = body .get ('stretch' , 'arcsinh' )
207206 #s3_destination_key = f"downloads/tif/{body['object_name']}"
208207 s3_destination_key = save_tiff_to_s3 (BUCKET_NAME , key , stretch )
@@ -215,10 +214,10 @@ def download(event, context):
215214 s3_destination_key = save_fz_to_fits (BUCKET_NAME , key )
216215 url = get_s3_file_url (s3_destination_key )
217216 log .info (f"Presigned download url: { url } " )
218- return http_response (HTTPStatus .OK , str (url ))
217+ return http_response (HTTPStatus .OK , str (url ))
219218
220219 # If TIFF file not requested, just get the file as-is from s3
221- else :
220+ else :
222221 url = s3 .generate_presigned_url (
223222 ClientMethod = 'get_object' ,
224223 Params = params ,
@@ -228,70 +227,6 @@ def download(event, context):
228227 return http_response (HTTPStatus .OK , str (url ))
229228
230229
231- def download_zip (event , context ):
232- """Returns a link to download a zip of multiple images in FITS format.
233-
234- First, get a list of files to be zipped based on
235- the query parameters specified. Next, call a Lambda function
236- (defined in the repository photonranch-downloads) that creates a zip
237- from the list of specified files and uploads that back to s3,
238- returning a presigned download URL. Finally, return the URL
239- in the HTTP response to the requester.
240-
241- Args:
242- event.body.start_timestamp_s (str):
243- UTC datestring of starting time to query.
244- event.body.end_timestamp_s (str):
245- UTC datestring of ending time to query.
246- event.body.fits_size (str):
247- Size of the FITS file (eg. 'small', 'large').
248- event.body.site (str): Sitecode to zip files from.
249-
250- Returns:
251- 200 status code with requested presigned URL at AWS.
252- Otherwise, 404 status code if no images match the query.
253- """
254-
255- body = _get_body (event )
256- pprint (body )
257-
258- start_timestamp_s = int (body .get ('start_timestamp_s' ))
259- end_timestamp_s = int (body .get ('end_timestamp_s' ))
260- fits_size = body .get ('fits_size' ) # small | large | best
261- site = body .get ('site' )
262-
263- files = get_files_within_date_range (site , start_timestamp_s , end_timestamp_s , fits_size )
264-
265- # Return 404 if no images fit the query.
266- if len (files ) == 0 :
267- return http_response (HTTPStatus .NOT_FOUND , 'No images exist for the given query.' )
268-
269- print ('number of files: ' , len (files ))
270- print ('first file: ' , files [0 ])
271-
272- payload = json .dumps ({
273- 'filenames' : files
274- }).encode ('utf-8' )
275-
276- response = lambda_client .invoke (
277- FunctionName = 'zip-downloads-dev-zip' ,
278- InvocationType = 'RequestResponse' ,
279- LogType = 'Tail' ,
280- Payload = payload
281- )
282- lambda_response = response ['Payload' ].read ()
283-
284- zip_url = json .loads (json .loads (lambda_response )['body' ])
285- print (zip_url )
286-
287- log_response = base64 .b64decode (response ['LogResult' ]).decode ('utf-8' )
288- pprint (log_response )
289-
290- logs = log_response .splitlines ()
291- pprint (logs )
292- return http_response (HTTPStatus .OK , zip_url )
293-
294-
295230def get_recent_uploads (event , context ):
296231 """Queries for a list of files recently uploaded to S3.
297232
@@ -303,30 +238,29 @@ def get_recent_uploads(event, context):
303238
304239 Args:
305240 event.body.site: Sitecode to query recent files from.
306-
241+
307242 Returns:
308243 200 status code with list of recent files if successful.
309244 Otherwise, 404 status code if missing sitecode in request.
310245 """
311246
312247 print ("Query string params: " , event ['queryStringParameters' ])
313- try :
248+ try :
314249 site = event ['queryStringParameters' ]['site' ]
315- except :
250+ except :
316251 msg = "Please be sure to include the sitecode query parameter."
317252 return http_response (HTTPStatus .NOT_FOUND , msg )
318253
319254 # Results from across all sites
320- if site == 'all' :
255+ if site == 'all' :
321256 response = recent_uploads_table .scan ()
322257 results = response ['Items' ]
323-
258+
324259 # Results for specific site
325260 else :
326261 response = recent_uploads_table .query (
327262 KeyConditionExpression = Key ('site' ).eq (site )
328263 )
329264 results = response ['Items' ]
330265
331- return http_response (HTTPStatus .OK , results )
332-
266+ return http_response (HTTPStatus .OK , results )
0 commit comments