Skip to content
This repository was archived by the owner on Apr 3, 2026. It is now read-only.

Commit 957a9ab

Browse files
committed
Manual exports for community mapswipe website
To allow mapswipe website builder to pull data in bulk
1 parent b71c044 commit 957a9ab

3 files changed

Lines changed: 66 additions & 0 deletions

File tree

api/nginx.conf

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@ server {
22
listen 80;
33
server_name api;
44

5+
gzip on;
6+
gzip_comp_level 2;
7+
gzip_types text/plain text/csv text/css application/json text/javascript;
8+
59
location / {
610
alias /usr/share/nginx/html/api/;
711
autoindex on;

mapswipe_workers/mapswipe_workers/generate_stats/generate_stats.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,71 @@
1+
import csv
12
import datetime as dt
3+
import hashlib
4+
import os
5+
import shutil
26
from typing import List, Optional
37

48
from mapswipe_workers import auth
59
from mapswipe_workers.definitions import DATA_PATH, logger
610
from mapswipe_workers.generate_stats import overall_stats, project_stats
711

812

13+
def generate_data_for_mapswipe_website():
14+
"""
15+
Generate data for website
16+
"""
17+
website_data_dest = f"{DATA_PATH}/api/website-data"
18+
19+
# TODO: Move to utils
20+
def _compute_md5(file_name):
21+
hash_md5 = hashlib.md5()
22+
with open(file_name, "rb") as f:
23+
for chunk in iter(lambda: f.read(4096), b""):
24+
hash_md5.update(chunk)
25+
return hash_md5.hexdigest()
26+
27+
def _project_history_zip():
28+
project_history_file = f"{website_data_dest}/project-history"
29+
zip_file_name = shutil.make_archive(
30+
project_history_file,
31+
"zip",
32+
f"{DATA_PATH}/api/history/",
33+
)
34+
logger.info("finished generate project-history zip")
35+
return zip_file_name
36+
37+
def _manifest_file():
38+
endpoints_dir = f"{DATA_PATH}/api/"
39+
manifest_file = f"{website_data_dest}/overall-endpoints.csv"
40+
with open(manifest_file, "w") as fp:
41+
csv_writer = csv.writer(fp)
42+
csv_writer.writerow(["endpoints", "size_bytes"])
43+
for path, _, files in os.walk(endpoints_dir):
44+
for name in files:
45+
file_path = os.path.join(path, name)
46+
csv_writer.writerow(
47+
[
48+
"/api/" + file_path.split("/api/")[1],
49+
os.path.getsize(file_path),
50+
]
51+
)
52+
logger.info("finished generate endpoints manifest for existing stats")
53+
return manifest_file
54+
55+
def _generate_file_hash(files):
56+
for file in files:
57+
md5_hash = _compute_md5(file)
58+
with open(f"{file}.md5", "w") as fp:
59+
fp.write(md5_hash)
60+
61+
files_to_track_for_checksum = [
62+
f"{DATA_PATH}/api/projects/projects_centroid.geojson",
63+
f"{DATA_PATH}/api/projects/projects_geom.geojson",
64+
]
65+
files_to_track_for_checksum.extend([_project_history_zip(), _manifest_file()])
66+
_generate_file_hash(files_to_track_for_checksum)
67+
68+
969
def get_recent_projects(hours: int = 3):
1070
"""Get ids for projects when results have been submitted within the last x hours."""
1171
pg_db = auth.postgresDB()
@@ -108,6 +168,7 @@ def generate_stats(project_id_list: Optional[List[str]] = None):
108168
overall_stats.get_overall_stats(projects_df, overall_stats_filename)
109169

110170
logger.info(f"finished generate stats for: {project_id_list}")
171+
generate_data_for_mapswipe_website()
111172

112173

113174
def generate_stats_all_projects():

mapswipe_workers/mapswipe_workers/utils/create_directories.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ def create_directories() -> None:
77
"""Create directories"""
88
dirs = (
99
"/api",
10+
"/api/website-data",
1011
"/api/agg_results",
1112
"/api/groups",
1213
"/api/history",

0 commit comments

Comments
 (0)