1111from decimal import Decimal
1212
1313import dateutil
14- import ipdb
1514import pandas as pd
1615from action import Action
1716from google .cloud import bigquery
18- from ipdb import launch_ipdb_on_exception
1917from web3 import Web3
2018
2119from w3_utils import (BPoolLogCallParser , ERC20InfoReader ,
@@ -42,10 +40,12 @@ def query(client, sql: str) -> pd.DataFrame:
4240 )
4341 return result
4442
43+
4544def load_json (path ):
4645 with open (path , 'r' ) as f :
4746 return json .load (f )
4847
48+
4949def save_json (x , path , indent = True , ** kwargs ):
5050 with open (path , 'w' ) as f :
5151 if indent :
@@ -54,32 +54,41 @@ def save_json(x, path, indent=True, **kwargs):
5454 json .dump (x , f , ** kwargs )
5555 print ("Saved to" , path )
5656
57+
5758def load_pickle (path ):
5859 print ("Unpickling from" , path )
5960 with open (path , 'rb' ) as f :
6061 return pickle .load (f )
6162
63+
6264def save_pickle (x , path ):
6365 print ("Pickling to" , path )
6466 with open (path , 'wb' ) as f :
6567 return pickle .dump (x , f )
6668
69+
6770def save_queries_pickle (pool_address : str , event_type : str , df : pd .DataFrame ):
6871 filename = f"{ pool_address } /{ event_type } .pickle"
6972 save_pickle (df , filename )
7073
74+
7175def query_and_save (client , pool_address : str , event_type : str , sql : str , writer ) -> pd .DataFrame :
7276 df = query (client , sql )
7377 writer (pool_address , event_type , df )
7478 return df
7579
80+
7681def get_initial_token_distribution (new_results ) -> dict :
7782 receipt = w3 .eth .getTransactionReceipt (new_results .iloc [0 ]['transaction_hash' ])
7883 events = log_call_parser .parse_from_receipt (receipt , args .pool_address )
7984 bind_events = list (filter (lambda x : x ['type' ] == 'bind' , events ))
85+ unique_bind_events = []
86+ for event in bind_events :
87+ if event not in unique_bind_events :
88+ unique_bind_events .append (event )
8089 tokens = {}
8190 total_denorm_weight = Decimal ('0.0' )
82- for event in bind_events :
91+ for event in unique_bind_events :
8392 inputs = event ['inputs' ]
8493 token_address = inputs ['token' ]
8594 token_symbol = erc20_info_getter .get_token_symbol (token_address )
@@ -92,23 +101,27 @@ def get_initial_token_distribution(new_results) -> dict:
92101 'balance' : inputs ['balance' ],
93102 'bound' : True
94103 }
104+
95105 for (key , token ) in tokens .items ():
96106 denorm = Decimal (token ['denorm_weight' ])
97- token ['weight' ] = str (denorm / total_denorm_weight )
107+ token ['weight' ] = str (Decimal ( '100' ) * ( denorm / total_denorm_weight ) )
98108 return tokens
99109
110+
100111def get_initial_fees_generated (pool_tokens ):
101112 fees = {}
102113 for token_symbol in pool_tokens :
103114 fees [token_symbol ] = '0.0'
104115 return fees
105116
117+
106118def get_initial_pool_share (transfer_results , tx_hash ):
107119 initial_tx_transfers = transfer_results .loc [transfer_results ['transaction_hash' ] == tx_hash ]
108120 minting = initial_tx_transfers .loc [initial_tx_transfers ['src' ] == '0x0000000000000000000000000000000000000000' ]
109121 wei_amount = int (minting .iloc [0 ]['amt' ])
110122 return Web3 .fromWei (wei_amount , 'ether' )
111123
124+
112125def format_denorms (denorms : dict ) -> typing .List [typing .Dict ]:
113126 """
114127 format_denorms expects the input to be
@@ -124,6 +137,7 @@ def format_denorms(denorms: dict) -> typing.List[typing.Dict]:
124137 d .append (a )
125138 return d
126139
140+
127141def classify_pool_share_transfers (transfers : []) -> (str , str ):
128142 pool_share_burnt = list (filter (lambda x : x ['dst' ] == ZERO_ADDRESS , transfers ))
129143 if len (pool_share_burnt ) > 0 :
@@ -133,6 +147,7 @@ def classify_pool_share_transfers(transfers: []) -> (str, str):
133147 return 'pool_amount_out' , str (Web3 .fromWei (int (pool_share_minted [0 ]['amt' ]), 'ether' ))
134148 raise Exception ('not pool share mint or burn' , transfers )
135149
150+
136151def map_token_amounts (txs : [], address_key : str , amount_key : str ):
137152 def map_tx (x ):
138153 mapped = {}
@@ -143,6 +158,7 @@ def map_tx(x):
143158
144159 return list (map (map_tx , txs ))
145160
161+
146162def classify_actions (group ):
147163 action = {}
148164 transfers = list (filter (lambda x : x .action_type == 'transfer' , group ))
@@ -186,6 +202,7 @@ def classify_actions(group):
186202 action ['type' ] = 'exit_swap'
187203 return action
188204
205+
189206def turn_events_into_actions (events_list , fees : typing .Dict , denorms : pd .DataFrame ) -> typing .List [Action ]:
190207 actions = []
191208 grouped = events_list .groupby ("transaction_hash" )
@@ -203,11 +220,12 @@ def turn_events_into_actions(events_list, fees: typing.Dict, denorms: pd.DataFra
203220 # convert block_number and swap_fee to string to painlessly
204221 # convert to JSON later (numpy.int64 can't be JSON serialized)
205222 a = Action (timestamp = ts .to_pydatetime (), tx_hash = tx_hash , block_number = str (block_number ), swap_fee = str (fee ),
206- denorms = denorm , action_type = first_event_log ["type" ], action = events .to_dict (orient = "records" ))
223+ denorms = denorm , action_type = first_event_log ["type" ], action = events .to_dict (orient = "records" ))
207224 actions .append (a )
208225
209226 return actions
210227
228+
211229def stage1_load_sql_data (pool_address : str ):
212230 try :
213231 new_results = load_pickle (f"{ pool_address } /new.pickle" )
@@ -255,6 +273,7 @@ def stage1_load_sql_data(pool_address: str):
255273
256274 return new_results , join_results , swap_results , exit_results , transfer_results , fees_results , denorms_results
257275
276+
258277def stage2_produce_initial_state (new_results , fees_results , transfer_results ) -> typing .Dict :
259278 tokens = get_initial_token_distribution (new_results )
260279 generated_fees = get_initial_fees_generated (tokens )
@@ -274,6 +293,7 @@ def stage2_produce_initial_state(new_results, fees_results, transfer_results) ->
274293 }
275294 return initial_states
276295
296+
277297def stage3_merge_actions (pool_address , grouped_actions ):
278298 filename = f"{ pool_address } /txhash_contractcalls.json"
279299 try :
@@ -310,6 +330,7 @@ def stage3_merge_actions(pool_address, grouped_actions):
310330 actions_final .sort (key = lambda a : a ['timestamp' ])
311331 return actions_final
312332
333+
313334def stage4_add_prices_to_initialstate_and_actions (pool_address : str , fiat_symbol : str , initial_state : typing .Dict , actions : typing .List [typing .Dict ]):
314335 def parse_price_feeds (token_symbols : []) -> []:
315336 if len (price_feed_paths ) != len (token_symbols ):
@@ -368,16 +389,19 @@ def get_price_feeds_tokens(initial_state: typing.Dict):
368389
369390 def add_price_feeds_to_actions (actions : typing .List [typing .Dict ]) -> typing .List [typing .Dict ]:
370391 actions .extend (price_actions )
392+
371393 def equalize_date_types (action ):
372394 if not isinstance (action ['timestamp' ], datetime ):
373395 action ['timestamp' ] = dateutil .parser .isoparse (action ['timestamp' ])
374396 return action
397+
375398 actions = list (map (equalize_date_types , actions ))
376399 actions .sort (key = lambda x : x ['timestamp' ])
377400
378401 def convert_to_iso_str (action ):
379402 action ['timestamp' ] = action ['timestamp' ].isoformat ()
380403 return action
404+
381405 actions = list (map (convert_to_iso_str , actions ))
382406
383407 # Remove prices before pool creation. First action must be pool creation
@@ -445,22 +469,26 @@ def produce_actions():
445469 # grouped_actions = load_pickle("{}/grouped_actions.pickle".format(args.pool_address))
446470
447471 actions_final = stage3_merge_actions (args .pool_address , grouped_actions )
472+
448473 def prep_json_serialize (o ):
449474 if isinstance (o , datetime ):
450475 return o .isoformat ()
476+
451477 save_json (actions_final , f"{ args .pool_address } -actions.json" , default = prep_json_serialize )
452478
453479 # save_pickle(actions_final, f"{args.pool_address}/actions_final.pickle")
454480 # actions_final = load_pickle(f"{args.pool_address}/actions_final.pickle")
455481
456482 if args .fiat :
457- initial_state_w_prices , actions_w_prices = stage4_add_prices_to_initialstate_and_actions (args .pool_address , args .fiat , initial_state , actions_final )
483+ initial_state_w_prices , actions_w_prices = stage4_add_prices_to_initialstate_and_actions (args .pool_address , args .fiat , initial_state ,
484+ actions_final )
458485 save_json (initial_state_w_prices , f'{ args .pool_address } -initial_pool_states-prices.json' )
459486 save_json (actions_w_prices , f'{ args .pool_address } -actions-prices.json' )
460487 else :
461488 print ("Fiat base for token prices not given - skipping price data injection" )
462489
463- from ipdb import launch_ipdb_on_exception
464490
465- with launch_ipdb_on_exception ():
466- produce_actions ()
491+ # from ipdb import launch_ipdb_on_exception
492+
493+ # with launch_ipdb_on_exception():
494+ produce_actions ()
0 commit comments