Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airdrop_analysis/airdrop_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __get_communities_from_partition(self, partition: dict) -> dict:
communities[communityID] = []
communities[communityID].append(nodeID)
return communities

def get_communities(self, param: GraphQueryParameters) -> dict:
graph = self.__builder.build_graph_from_distributor(param)
partition, _ = self.__nx_builder.get_louvain_partition(graph)
Expand Down
40 changes: 31 additions & 9 deletions airdrop_analysis/data_handler/graph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@
from typing import List, Dict
import pandas as pd

from data_handler.networkx_builder import NetworkXBuilder
from data_handler.query_handlers.chain_query_controller import \
ChainQueryController
from data_handler.models.base_models.transaction_history \
import TransactionHistory
from data_handler.models.base_models.query_parameters import \
TokenTransfersQueryParameters, GraphQueryParameters
from data_handler.models.table_models.token_transfer import Token_Transfer
from data_handler.models.odbc_models.token_transfer import Token_Transfer
from data_handler.models.graph_models.node import Node
from data_handler.models.graph_models.edge import Edge
from data_handler.models.graph_models.graph import Graph
Expand All @@ -24,6 +25,7 @@ def __init__(self, api_keys_path: str, dex_addresses_path: str):
self.__graph = Graph()
self.__current_query_params = None
self.__current_hirerarchy_stack = []
self.__nx_builder = NetworkXBuilder()

def __get_dex_addresses_from_csv(self,dex_addresses_path):
dex_info = pd.read_csv(dex_addresses_path)
Expand All @@ -42,7 +44,7 @@ def __get_transactions_query_params(
from_date=params.from_date,
to_date=params.to_date,
order=params.edge_order,
limit=params.edge_limit if params.edge_limit > 0 else 300,
limit=params.edge_limit if params.edge_limit > 0 else 100,
)

def __get_parent_addresses(self, sender_addresses: List[str]) -> List[str]:
Expand Down Expand Up @@ -236,7 +238,7 @@ def build_graph(self, params: GraphQueryParameters) -> Graph:

def build_graph_from_distributor(
self,
params: GraphQueryParameters,
params: GraphQueryParameters,
) -> Graph:
hist_p = self.__get_transactions_query_params(
params.center_addresses[0],
Expand All @@ -249,20 +251,40 @@ def build_graph_from_distributor(
g.add_node(center)
for dex_address in self.__dex_addresses:
g.delete_node(dex_address)
self.__save_graph(g,params)
result_dict = self.__get_result_dict(g,params)
self.__save_graph(result_dict,params)
return g

def build_graph_json(self, params : GraphQueryParameters):
graph = self.build_graph_from_distributor(params)
return self.__save_graph(graph, params)
result_dict = self.__get_result_dict(graph,params)
return self.__dict_to_json(result_dict)

def __save_graph(self, result_dict : dict, params: GraphQueryParameters):
return self.__controller.save_graph_record(params.user_id,result_dict)

def __save_graph(self, graph : Graph , params : GraphQueryParameters ,):
def __get_result_dict(self, graph: Graph , params: GraphQueryParameters):
communities = {}
if params.partition:
partition, _ = self.__nx_builder.get_louvain_partition(graph)
communities = self.get_communities_from_partition(partition)
result_dict = graph.get_graph_dict()
result_dict[ck.PARAMETERS] = self.__dict_to_json(params.to_dict())
self.__controller.save_graph_record(params.user_id,result_dict)
return self.__dict_to_json(result_dict)
return result_dict

def __dict_to_json(self,data):
dict_string = json.dumps(data)
return json.loads(dict_string)


def get_communities_from_partition(self, partition: dict) -> dict:
communities: dict[str, list] = {}
for nodeID, communityID in partition.items():
if communityID not in communities:
communities[communityID] = []
communities[communityID].append(nodeID)
return communities

def get_communities(self, param: GraphQueryParameters) -> dict:
graph = self.build_graph_from_distributor(param)
partition, _ = self.__nx_builder.get_louvain_partition(graph)
return self.get_communities_from_partition(partition)
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from pydantic import BaseModel
from typing import Optional, List

from data_handler.models.table_models.token_transfer import Token_Transfer
from data_handler.models.odbc_models.token_transfer import Token_Transfer
from utils.custom_keys import CustomKeys as ck


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
class TransactionTime():
average_time = []

def __init__(self , last_transaction_count):
self.last_transaction_count = last_transaction_count
3 changes: 2 additions & 1 deletion airdrop_analysis/data_handler/models/graph_models/edge.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from pydantic import BaseModel
from typing import TYPE_CHECKING
from datetime import datetime

if TYPE_CHECKING:
from data_handler.models.graph_models.node import Node
Expand All @@ -10,7 +11,7 @@ class Edge(BaseModel):
destination: 'Node'
edge_type: str
edge_value: float
edge_timestamp: str
edge_timestamp: datetime

def __init__(
self,
Expand Down
5 changes: 5 additions & 0 deletions airdrop_analysis/data_handler/models/graph_models/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,11 @@ def get_graph_dict(self):
graph_dict[ck.LINKS].append(edge_dict)
return graph_dict

def get_neighbors(self, node: Node) -> List[Node]:
inc_neighbors = [edge.source for edge in node.incoming_edges]
out_neighbors = [edge.destination for edge in node.outgoing_edges]
return inc_neighbors + out_neighbors

def __contains__(self, node: Union[Node, str]) -> bool:
if isinstance(node, str):
return node in self.__nodes
Expand Down
72 changes: 72 additions & 0 deletions airdrop_analysis/data_handler/models/odbc_models/address_record.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from utils.custom_keys import CustomKeys as ck
from data_handler.models.base_models.transaction_history import TransactionHistory


class Address_Record:
def __init__(self, address, from_date, to_date, balance, total_transaction_count, incoming_transaction_count, outgoing_transaction_count, first_incoming_transaction_date, first_incoming_transaction_source, last_outgoing_transaction_date, last_outgoing_transaction_destination, chain, contract_address, last_cursor):
self.address = address
self.from_date = from_date
self.to_date = to_date
self.balance = balance
self.total_transaction_count = total_transaction_count
self.incoming_transaction_count = incoming_transaction_count
self.outgoing_transaction_count = outgoing_transaction_count
self.first_incoming_transaction_date = first_incoming_transaction_date
self.first_incoming_transaction_source = first_incoming_transaction_source
self.last_outgoing_transaction_date = last_outgoing_transaction_date
self.last_outgoing_transaction_destination = last_outgoing_transaction_destination
self.chain = chain
self.contract_address = contract_address
self.last_cursor = last_cursor

@staticmethod
def create_from_dict(data: dict):
return Address_Record(
address=data[ck.ADDRESS],
from_date=data[ck.FROM_DATE],
to_date=data[ck.TO_DATE],
balance=data[ck.BALANCE],
total_transaction_count=data[ck.TOTAL_TRANSACTION_COUNT],
incoming_transaction_count=data[ck.INCOMING_TRANSACTION_COUNT],
outgoing_transaction_count=data[ck.OUTGOING_TRANSACTION_COUNT],
first_incoming_transaction_date=data[ck.FIRST_INCOMING_TRANSACTION_DATE],
first_incoming_transaction_source=data[ck.FIRST_INCOMING_TRANSACTION_SOURCE],
last_outgoing_transaction_date=data[ck.LAST_OUTGOING_TRANSACTION_DATE],
last_outgoing_transaction_destination=data[ck.LAST_OUTGOING_TRANSACTION_DESTINATION],
chain=data[ck.CHAIN],
contract_address=data[ck.CONTRACT_ADDRESS],
last_cursor=data[ck.LAST_CURSOR],
)

@staticmethod
def create_from_history(history: TransactionHistory):
ins = history.get_received_transactions()
outs = history.get_sent_transactions()
first_date, last_date = '', ''
first_source, last_destination = '', ''
if ins:
first_date = ins[0].block_timestamp
first_source = ins[0].from_address
if outs:
last_date = outs[-1].block_timestamp
last_destination = outs[-1].to_address
balance = sum([t.value for t in ins]) - sum([t.value for t in outs])
contract_address = ''
if history.contract_addresses:
contract_address = history.contract_addresses[0]
return Address_Record(
address=history.address,
from_date=history.from_date,
to_date=history.to_date,
balance=balance,
total_transaction_count=history.get_transaction_count(),
incoming_transaction_count=len(ins),
outgoing_transaction_count=len(outs),
first_incoming_transaction_date=first_date,
first_incoming_transaction_source=first_source,
last_outgoing_transaction_date=last_date,
last_outgoing_transaction_destination=last_destination,
chain=history.chain,
contract_address=contract_address,
last_cursor=history.last_cursor,
)
30 changes: 30 additions & 0 deletions airdrop_analysis/data_handler/models/odbc_models/graph_record.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from utils.custom_keys import CustomKeys as ck

class Graph_Record:
def __init__(self, user_id, time_stamp, graph_path):
self.user_id = user_id
self.time_stamp = time_stamp
self.graph_path = graph_path

@staticmethod
def create_from_dict(data: dict):
return Graph_Record(
user_id=data[ck.USER_ID],
time_stamp=data[ck.TIME_STAMP],
graph_path=data[ck.GRAPH_PATH],
)

@staticmethod
def create_from(user_id : str , graph_path : str , time_stamp : str):
return Graph_Record(
user_id = user_id,
time_stamp = time_stamp,
graph_path = graph_path
)

def to_dict(self):
return {
ck.USER_ID: self.user_id,
ck.TIME_STAMP: str(self.time_stamp),
ck.GRAPH_PATH: self.graph_path
}
27 changes: 27 additions & 0 deletions airdrop_analysis/data_handler/models/odbc_models/token_transfer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from utils.custom_keys import CustomKeys as ck

class Token_Transfer:
def __init__(self, from_address, to_address, value, block_timestamp, block_hash, transaction_hash, token_name, contract_address, chain):
self.from_address = from_address
self.to_address = to_address
self.value = value
self.block_timestamp = block_timestamp
self.block_hash = block_hash
self.transaction_hash = transaction_hash
self.token_name = token_name
self.contract_address = contract_address
self.chain = chain

@staticmethod
def create_from_dict(chain: str, response: dict):
return Token_Transfer(
from_address=response[ck.FROM_ADDRESS],
to_address=response[ck.TO_ADDRESS],
value=float(response[ck.VALUE]) / 10**18,
block_timestamp=response[ck.BLOCK_TIMESTAMP],
block_hash=response[ck.BLOCK_HASH],
transaction_hash=response[ck.TRANSACTION_HASH],
token_name=response[ck.TOKEN_NAME],
contract_address=response[ck.ADDRESS],
chain=chain,
)
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
from datetime import datetime
import json

from data_handler.models.table_models.graph_record import Graph_Record
from data_handler.models.odbc_models.graph_record import Graph_Record
from data_handler.query_handlers.moralis_query_handler \
import MoralisQueryHandler
from data_handler.query_handlers.pw_query_handler import PWQueryHandler
from data_handler.query_handlers.odbc_query_handlers import ODBCQueryHandler
from data_handler.models.base_models.query_parameters import *
from data_handler.models.table_models.address_record import Address_Record
from data_handler.models.odbc_models.address_record import Address_Record
from data_handler.models.base_models.transaction_history \
import TransactionHistory
from utils.custom_keys import CustomKeys as ck
Expand All @@ -16,7 +16,7 @@
class ChainQueryController():
def __init__(self, api_keys_path: str):
self.__moralis_handler = MoralisQueryHandler(api_keys_path)
self.__database_handler = PWQueryHandler()
self.__database_handler = ODBCQueryHandler()

def get_address_record(self, address) -> Address_Record:
return self.__database_handler.get_address_record(address)
Expand All @@ -29,11 +29,16 @@ def get_graph_records(self,user_id : str):
record_list = []
for graph in graph_records:
record_dict = Graph_Record.to_dict(graph)
record_dict[ck.GRAPH] = self.__read_graph_from_json(record_dict[ck.GRAPH])
record_string = json.dumps(record_dict)
record_json = json.loads(record_string)
record_list.append(record_json)
return record_list

def __read_graph_from_json(self,graph_path : str):
with open(graph_path,"r") as file:
return json.loads(file.read())

def __query_wallet_token_transfers(
self,
params: TokenTransfersQueryParameters,
Expand All @@ -47,15 +52,11 @@ def __query_wallet_token_transfers(
)
return transfers, cursor

def compare_dates(self,date1_str: str,date2_str: str):
def compare_dates(self,date1_str: str,date2: datetime):
try:
date1 = datetime.strptime(date1_str,ck.DATETIME_FORMAT)
except ValueError:
date1 = datetime.strptime(date1_str,ck.DATETIME_FORMAT_FOR_QUERIED_TRANSFERS)
try:
date2 = datetime.strptime(date2_str,ck.DATETIME_FORMAT)
except ValueError:
date2 = datetime.strptime(date2_str,ck.DATETIME_FORMAT_FOR_QUERIED_TRANSFERS)
return (date1-date2).total_seconds()

def query_not_overlapped_transfers(
Expand Down Expand Up @@ -99,7 +100,7 @@ def get_wallet_token_transfer_history(
get_token_transfers_by_address(params.address))
transfers.sort(
key=lambda transfer:
datetime.strptime(transfer.block_timestamp,ck.DATETIME_FORMAT_FOR_QUERIED_TRANSFERS)
transfer.block_timestamp
)
if params.order == ck.DESC:
transfers.reverse()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
import moralis
import os
from requests_html import HTMLSession
from datetime import datetime

from data_handler.models.base_models.transaction_time import TransactionTime
from data_handler.models.base_models.query_parameters \
import TransactionsQueryParameters , TokenTransfersQueryParameters
from data_handler.models.base_models.query_parameters \
Expand Down Expand Up @@ -37,12 +39,14 @@ def __query_wallet_transactions(
event: str,
):
address = params.address
transaction_time = TransactionTime(0)
transactions = []
try:
start_time = datetime.now()
while params.cursor is not None:
tnxs, cursor = self.__query_wallet_transactions_page(
params, query,
)
)
transactions.extend(tnxs)
params.cursor = cursor
cnt = len(transactions)
Expand All @@ -54,8 +58,14 @@ def __query_wallet_transactions(
else:
s += ' Done.' + ' ' * 25
print(s, end='\r')
end_time = datetime.now()
difference = (end_time - start_time).total_seconds()
transaction_time.last_transaction_count = cnt
if cnt != 0:
TransactionTime.average_time.append(difference / cnt)
except Exception as e:
if 'Reason: Internal Server Error' in str(e):
print(e)
print(f'Internal Server Error querying {event}s for {address}')
else:
print(e)
Expand Down
Loading