|
| 1 | +from pyspark.sql import SparkSession |
| 2 | +from google.cloud import firestore |
| 3 | +from google.cloud import bigquery |
| 4 | +import json |
| 5 | +import os |
| 6 | + |
| 7 | +class FirestoreBatch: |
| 8 | + def __init__(self): |
| 9 | + self.firestore = firestore.Client() |
| 10 | + self.bigquery = bigquery.Client() |
| 11 | + self.batch_size = 500 |
| 12 | + self.max_concurrent_batches = 200 |
| 13 | + self.current_batch = [] |
| 14 | + self.batch_promises = [] |
| 15 | + self.spark = SparkSession.builder.appName("FirestoreBatchProcessor").getOrCreate() |
| 16 | + |
| 17 | + def queue_batch(self, operation): |
| 18 | + batch = self.firestore.batch() |
| 19 | + |
| 20 | + for doc in self.current_batch: |
| 21 | + if operation == "delete": |
| 22 | + batch.delete(doc.reference) |
| 23 | + elif operation == "set": |
| 24 | + doc_ref = self.firestore.collection(self.collection_name).document() |
| 25 | + batch.set(doc_ref, doc) |
| 26 | + else: |
| 27 | + raise ValueError("Invalid operation") |
| 28 | + |
| 29 | + self.batch_promises.append(batch.commit()) |
| 30 | + self.current_batch = [] |
| 31 | + |
| 32 | + def commit_batches(self): |
| 33 | + print(f"Committing {len(self.batch_promises)} batches to {self.collection_name}") |
| 34 | + for batch_promise in self.batch_promises: |
| 35 | + try: |
| 36 | + batch_promise |
| 37 | + except Exception as e: |
| 38 | + print(f"Error committing batch: {e}") |
| 39 | + raise |
| 40 | + self.batch_promises = [] |
| 41 | + |
| 42 | + def final_flush(self, operation): |
| 43 | + if self.current_batch: |
| 44 | + self.queue_batch(operation) |
| 45 | + if self.batch_promises: |
| 46 | + self.commit_batches() |
| 47 | + |
| 48 | + def batch_delete(self): |
| 49 | + print("Starting batch deletion...") |
| 50 | + start_time = self.spark.sparkContext.startTime |
| 51 | + self.current_batch = [] |
| 52 | + self.batch_promises = [] |
| 53 | + total_docs_deleted = 0 |
| 54 | + |
| 55 | + collection_ref = self.firestore.collection(self.collection_name) |
| 56 | + if self.collection_type == "report": |
| 57 | + print(f"Deleting documents from {self.collection_name} for date {self.date}") |
| 58 | + query = collection_ref.where("date", "==", self.date) |
| 59 | + elif self.collection_type == "dict": |
| 60 | + print(f"Deleting documents from {self.collection_name}") |
| 61 | + query = collection_ref |
| 62 | + else: |
| 63 | + raise ValueError("Invalid collection type") |
| 64 | + |
| 65 | + while True: |
| 66 | + docs = list(query.limit(self.batch_size * self.max_concurrent_batches).stream()) |
| 67 | + if not docs: |
| 68 | + break |
| 69 | + |
| 70 | + for doc in docs: |
| 71 | + self.current_batch.append(doc) |
| 72 | + if len(self.current_batch) >= self.batch_size: |
| 73 | + self.queue_batch("delete") |
| 74 | + if len(self.batch_promises) >= self.max_concurrent_batches: |
| 75 | + self.commit_batches() |
| 76 | + total_docs_deleted += 1 |
| 77 | + |
| 78 | + self.final_flush("delete") |
| 79 | + duration = (self.spark.sparkContext.startTime - start_time) / 1000 |
| 80 | + print(f"Deletion complete. Total docs deleted: {total_docs_deleted}. Time: {duration} seconds") |
| 81 | + |
| 82 | + def stream_from_bigquery(self, query): |
| 83 | + print("Starting BigQuery to Firestore transfer...") |
| 84 | + start_time = self.spark.sparkContext.startTime |
| 85 | + total_rows_processed = 0 |
| 86 | + |
| 87 | + df = self.spark.read.format("bigquery").option("query", query).load() |
| 88 | + |
| 89 | + for row in df.collect(): |
| 90 | + self.current_batch.append(row.asDict()) |
| 91 | + if len(self.current_batch) >= self.batch_size: |
| 92 | + self.queue_batch("set") |
| 93 | + if len(self.batch_promises) >= self.max_concurrent_batches: |
| 94 | + self.commit_batches() |
| 95 | + total_rows_processed += 1 |
| 96 | + |
| 97 | + self.final_flush("set") |
| 98 | + duration = (self.spark.sparkContext.startTime - start_time) / 1000 |
| 99 | + print(f"Transfer to {self.collection_name} complete. Total rows processed: {total_rows_processed}. Time: {duration} seconds") |
| 100 | + |
| 101 | + def export(self): |
| 102 | + export_config = json.loads('{"name": "technologies", "type": "dict", "environment": "dev"}') |
| 103 | + query = str(json.loads("SELECT * FROM report.tech_report_technologies")) |
| 104 | + |
| 105 | + self.date = getattr(export_config, "date", "") |
| 106 | + self.collection_name = export_config["name"] |
| 107 | + self.collection_type = export_config["type"] |
| 108 | + |
| 109 | + self.batch_delete() |
| 110 | + self.stream_from_bigquery(query) |
| 111 | + |
| 112 | +if __name__ == "__main__": |
| 113 | + processor = FirestoreBatch() |
| 114 | + processor.export() |
0 commit comments