Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 2 additions & 12 deletions airdrop_analysis/airdrop_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,25 +64,15 @@ def get_distribution_graph_json(self, param: GraphQueryParameters):

def get_graph_records_from_user_id(self,user_id: str):
return self.__controller.get_graph_records(user_id)

def __get_communities_from_partition(self, partition: dict) -> dict:
communities: dict[str, list] = {}
for nodeID, communityID in partition.items():
if communityID not in communities:
communities[communityID] = []
communities[communityID].append(nodeID)
return communities

def get_communities(self, param: GraphQueryParameters) -> dict:
graph = self.__builder.build_graph_from_distributor(param)
partition, _ = self.__nx_builder.get_louvain_partition(graph)
return self.__get_communities_from_partition(partition)
return self.__builder.get_communities(param)

def get_graph_summary(self, param: GraphQueryParameters) -> dict:
graph = self.__builder.build_graph_from_distributor(param)
analysis = self.__analyzer.analyze(graph)
if param.partition:
partition, _ = self.__nx_builder.get_louvain_partition(graph)
communities = self.__get_communities_from_partition(partition)
communities = self.__builder.get_communities_from_partition(partition)
analysis['communities'] = len(communities)
return analysis
38 changes: 30 additions & 8 deletions airdrop_analysis/data_handler/graph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from typing import List, Dict
import pandas as pd

from data_handler.networkx_builder import NetworkXBuilder
from data_handler.query_handlers.chain_query_controller import \
ChainQueryController
from data_handler.models.base_models.transaction_history \
Expand All @@ -24,6 +25,7 @@ def __init__(self, api_keys_path: str, dex_addresses_path: str):
self.__graph = Graph()
self.__current_query_params = None
self.__current_hirerarchy_stack = []
self.__nx_builder = NetworkXBuilder()

def __get_dex_addresses_from_csv(self,dex_addresses_path):
dex_info = pd.read_csv(dex_addresses_path)
Expand Down Expand Up @@ -236,7 +238,7 @@ def build_graph(self, params: GraphQueryParameters) -> Graph:

def build_graph_from_distributor(
self,
params: GraphQueryParameters,
params: GraphQueryParameters,
) -> Graph:
hist_p = self.__get_transactions_query_params(
params.center_addresses[0],
Expand All @@ -249,20 +251,40 @@ def build_graph_from_distributor(
g.add_node(center)
for dex_address in self.__dex_addresses:
g.delete_node(dex_address)
self.__save_graph(g,params)
result_dict = self.__get_result_dict(g,params)
self.__save_graph(result_dict,params)
return g

def build_graph_json(self, params : GraphQueryParameters):
graph = self.build_graph_from_distributor(params)
return self.__save_graph(graph, params)
result_dict = self.__get_result_dict(graph,params)
return self.__dict_to_json(result_dict)

def __save_graph(self, result_dict : dict, params: GraphQueryParameters):
return self.__controller.save_graph_record(params.user_id,result_dict)

def __save_graph(self, graph : Graph , params : GraphQueryParameters ,):
result_dict = graph.get_graph_dict()
def __get_result_dict(self, graph: Graph , params: GraphQueryParameters):
communities = {}
if params.partition:
partition, _ = self.__nx_builder.get_louvain_partition(graph)
communities = self.get_communities_from_partition(partition)
result_dict = graph.get_graph_dict(communities)
result_dict[ck.PARAMETERS] = self.__dict_to_json(params.to_dict())
self.__controller.save_graph_record(params.user_id,result_dict)
return self.__dict_to_json(result_dict)
return result_dict

def __dict_to_json(self,data):
dict_string = json.dumps(data)
return json.loads(dict_string)


def get_communities_from_partition(self, partition: dict) -> dict:
communities: dict[str, list] = {}
for nodeID, communityID in partition.items():
if communityID not in communities:
communities[communityID] = []
communities[communityID].append(nodeID)
return communities

def get_communities(self, param: GraphQueryParameters) -> dict:
graph = self.build_graph_from_distributor(param)
partition, _ = self.__nx_builder.get_louvain_partition(graph)
return self.get_communities_from_partition(partition)
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
class TransactionTime():
average_time = []

def __init__(self , last_transaction_count):
self.last_transaction_count = last_transaction_count
11 changes: 9 additions & 2 deletions airdrop_analysis/data_handler/models/graph_models/graph.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from pydantic import BaseModel
from typing import Dict, List, Union

from data_handler.models.base_models.query_parameters import GraphQueryParameters
from data_handler.models.graph_models.node import Node
from data_handler.models.graph_models.edge import Edge
from utils.custom_keys import CustomKeys as ck
Expand Down Expand Up @@ -108,12 +109,18 @@ def get_most_productive_parent(self, hirerarchy: int) -> Node:
most_productive = node
return most_productive

def get_graph_dict(self):
def get_graph_dict(self, communities : dict = {}):
graph_dict = {}
graph_dict[ck.NODES] = []
graph_dict[ck.LINKS] = []
for node in self.__nodes.values():
node_dict = {"id" : node.id}
hierarchy = node.hierarchy
label = node.label
for key,community in communities.items():
if label in community:
hierarchy = key
break
node_dict = {"id" : node.id , "hierarchy" : hierarchy}
graph_dict[ck.NODES].append(node_dict)
for edge in self.__edges.values():
edge_dict = {"source" : edge.source.id , "target" : edge.destination.id}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,25 +1,24 @@
from peewee import CharField, DateTimeField, IntegerField, FloatField
import json
from datetime import datetime

from data_handler.models.table_models.base_model import BaseModel
from utils.custom_keys import CustomKeys as ck

class Graph_Record(BaseModel):
user_id = CharField()
time_stamp = DateTimeField()
graph = CharField()
graph_path = CharField()

def create_from(user_id : str , graph : dict):
def create_from(user_id : str , graph_path : str , time_stamp : str):
return Graph_Record.create(
user_id = user_id,
time_stamp = datetime.now().strftime(ck.DATETIME_FORMAT_FOR_QUERIED_TRANSFERS),
graph = json.dumps(graph)
time_stamp = time_stamp,
graph_path = graph_path
)

def to_dict(data):
return {
"user_id": data.user_id,
"time_stamp": str(data.time_stamp),
"graph": json.loads(data.graph)
"graph": data.graph_path
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,16 @@ def get_graph_records(self,user_id : str):
record_list = []
for graph in graph_records:
record_dict = Graph_Record.to_dict(graph)
record_dict[ck.GRAPH] = self.__read_graph_from_json(record_dict[ck.GRAPH])
record_string = json.dumps(record_dict)
record_json = json.loads(record_string)
record_list.append(record_json)
return record_list

def __read_graph_from_json(self,graph_path : str):
with open(graph_path,"r") as file:
return json.loads(file.read())

def __query_wallet_token_transfers(
self,
params: TokenTransfersQueryParameters,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import moralis
import os
from requests_html import HTMLSession
from datetime import datetime

from data_handler.models.base_models.transaction_time import TransactionTime
from data_handler.models.base_models.query_parameters \
import TransactionsQueryParameters , TokenTransfersQueryParameters
from data_handler.models.base_models.query_parameters \
Expand Down Expand Up @@ -37,12 +39,14 @@ def __query_wallet_transactions(
event: str,
):
address = params.address
transaction_time = TransactionTime(0)
transactions = []
try:
start_time = datetime.now()
while params.cursor is not None:
tnxs, cursor = self.__query_wallet_transactions_page(
params, query,
)
)
transactions.extend(tnxs)
params.cursor = cursor
cnt = len(transactions)
Expand All @@ -54,6 +58,11 @@ def __query_wallet_transactions(
else:
s += ' Done.' + ' ' * 25
print(s, end='\r')
end_time = datetime.now()
difference = (end_time - start_time).total_seconds()
transaction_time.last_transaction_count = cnt
if cnt != 0:
TransactionTime.average_time.append(difference / cnt)
except Exception as e:
if 'Reason: Internal Server Error' in str(e):
print(f'Internal Server Error querying {event}s for {address}')
Expand Down
14 changes: 12 additions & 2 deletions airdrop_analysis/data_handler/query_handlers/pw_query_handler.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from datetime import datetime
import json
from peewee import DoesNotExist

from data_handler.models.table_models.base_model import db
Expand All @@ -6,9 +8,12 @@
from data_handler.models.table_models.graph_record import Graph_Record
from data_handler.models.base_models.transaction_history \
import TransactionHistory
from utils.path_provider import PathProvider
from utils.custom_keys import CustomKeys as ck

class PWQueryHandler(object):
def __init__(self):
self.__path_provider = PathProvider()
db.create_tables([Address_Record, Token_Transfer, Graph_Record], safe=True)

def create_wallet_token_transfers(self, chain: str, transfers: list):
Expand Down Expand Up @@ -52,8 +57,13 @@ def update_address_record(self, address, data: dict[str, any]):
.where(Address_Record.address == address)
address_record.execute()
def create_graph_record(self, user_id : str, graph: dict):
return Graph_Record.create_from(user_id,graph)

time_now = datetime.now().strftime(ck.DATETIME_FORMAT_FOR_QUERIED_TRANSFERS)
graph_path = self.__path_provider.get_graph_json_path(user_id,time_now)
with open(graph_path,"w") as file:
graph_string = json.dumps(graph)
file.write(graph_string)
return Graph_Record.create_from(user_id,graph_path,time_now)

def get_graph_records(self,user_id : str):
return Graph_Record.select().where(Graph_Record.user_id==user_id)

2 changes: 1 addition & 1 deletion airdrop_analysis/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def json_distribution_graph(
graph = AirdropAnalyzer().get_distribution_graph_json(param)
return JSONResponse(content=graph, status_code=200)

@app.get("/graph_records/")
@app.get("/graph_records_for_fast_api/")
def get_graph_records(
user_id : str
) -> JSONResponse:
Expand Down
6 changes: 3 additions & 3 deletions airdrop_analysis/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@


def main():
# test = ChainQueryControllerTest()
# test.run_tests()
test = GraphBuilderTest()
test = ChainQueryControllerTest()
test.run_tests()
# test = GraphBuilderTest()
# test.run_tests()

if __name__ == '__main__':
main()
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from utils.path_provider import PathProvider
from utils.custom_keys import CustomKeys as ck
from data_handler.models.base_models.query_parameters import *
from data_handler.models.base_models.transaction_time import TransactionTime


class ChainQueryControllerTest():
Expand Down Expand Up @@ -54,20 +55,25 @@ def __test_query_wallet_token_transfers_with_contracts(self):

def __test_query_wallet_token_transfers(self):
contract_addresses=['0x4ed4e862860bed51a9570b96d89af5e1b0efefed']
addresses = self.__claimers[ck.WALLET_ADDRESS].to_list()[:8]
addresses = self.__claimers[ck.WALLET_ADDRESS].to_list()[150:200]
for address in addresses:
params = TokenTransfersQueryParameters(
address=address,
chain='base',
cached_first=True,
from_date='2022-01-01T00:00:00Z',
from_date='2024-01-01T00:00:00Z',
to_date='2024-07-15T00:00:00Z',
contract_addresses=contract_addresses,
)
history, _ = self.__controller.get_wallet_token_transfer_history(
params,
)
print(history.get_transaction_count())
total_time = 0
length = len(TransactionTime.average_time)
for i in TransactionTime.average_time:
total_time += i
print(total_time/length)
# print(history.get_transaction_count())

def __test_query_total_token_transfer_count(self):
contract_addresses=['0x4ed4e862860bed51a9570b96d89af5e1b0efefed']
Expand Down
2 changes: 2 additions & 0 deletions airdrop_analysis/utils/custom_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ class CustomKeys:
CLAIMERS_PATH = 'claimers_path'
TABLES_FILE_PATH = 'tables_file_path'
DEX_ADDRESSES_PATH = 'dex_addresses_path'
GRAPH_JSONS_FOLDER_PATH = 'graph_jsons_folder_path'

# API keys
MORALIS = 'moralis'
Expand All @@ -33,6 +34,7 @@ class CustomKeys:
ASC = 'ASC'
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
DATETIME_FORMAT_FOR_QUERIED_TRANSFERS = '%Y-%m-%dT%H:%M:%S.%fZ'
GRAPH = 'graph'

# Wallet stats
NFTS = 'nfts'
Expand Down
14 changes: 13 additions & 1 deletion airdrop_analysis/utils/path_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,4 +59,16 @@ def get_claimer_lists_json_path(self) -> str:

def get_claimer_lists(self) -> dict:
with open(self.get_claimer_lists_json_path(), 'r') as file:
return json.loads(file.read())
return json.loads(file.read())

def get_graph_json_path(self,user_id : str,time_stamp : str):
folder_path = self[ck.GRAPH_JSONS_FOLDER_PATH].replace('/', self.__sep)
arranged_time_stamp = time_stamp.replace(":" , "-")
if not os.path.exists(f"{folder_path}{user_id}"):
os.makedirs(f"{folder_path}{user_id}")
return self.__sep.join(
[
f"{folder_path}{user_id}",
arranged_time_stamp + ".json"
]
)