Skip to content

Commit

Permalink
feat:user balances (#2)
Browse files Browse the repository at this point in the history
* feat: add transactions exporter to docker

* chore: reduce unnecessary printing

* feat: user balances
  • Loading branch information
BobTheBuidler authored Oct 13, 2021
1 parent ac80305 commit 8d526ed
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 25 deletions.
6 changes: 1 addition & 5 deletions entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,8 @@ NETWORK="mainnet" # default to Mainnet (Infura)
EXPLORER=${EXPLORER:-https://api.etherscan.io/api}

if [[ ! -z "$WEB3_PROVIDER" ]]; then
NETWORK="mainnet-custom"

if [[ ! $(brownie networks list | grep mainnet-custom) ]]; then
brownie networks add Ethereum $NETWORK host=$WEB3_PROVIDER chainid=1 explorer=$EXPLORER
else
brownie networks modify $NETWORK host=$WEB3_PROVIDER chainid=1 explorer=$EXPLORER
brownie networks modify mainnet host=$WEB3_PROVIDER chainid=1 explorer=$EXPLORER
fi
fi

Expand Down
3 changes: 1 addition & 2 deletions scripts/transactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ def _get_price(event, vault):
print(f'ValueError: {str(e)}')
if str(e) in ["Failed to retrieve data from API: {'status': '0', 'message': 'NOTOK', 'result': 'Max rate limit reached'}","Failed to retrieve data from API: {'status': '0', 'message': 'NOTOK', 'result': 'Max rate limit reached, please use API Key for higher rate limit'}"]:
# Try again
print(str(e))
print('trying again...')
time.sleep(5)
else:
Expand Down Expand Up @@ -106,7 +105,7 @@ def _vault_transfers(vault, start_block, end_block) -> pd.DataFrame:
web3.codec,
)
events = decode_logs(get_logs_asap(vault.address, topics, from_block = start_block, to_block = end_block))
return pd.DataFrame(Parallel(16,'threading')(delayed(_process_event)(event, vault, vault_symbol, vault_decimals) for event in tqdm(events)))
return pd.DataFrame(Parallel(16,'threading')(delayed(_process_event)(event, vault, vault_symbol, vault_decimals) for event in events))

def main():
while True:
Expand Down
65 changes: 47 additions & 18 deletions scripts/transactions_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
from datetime import datetime, timedelta
from decimal import Decimal
from itertools import count
from brownie.network.contract import Contract

import pandas as pd
import requests
from brownie import ZERO_ADDRESS, chain
from joblib import Parallel, delayed
from tqdm import tqdm
Expand Down Expand Up @@ -102,30 +102,51 @@ def _users(df,vault = None):
return df['to'].unique()


def _user_balances(df: pd.DataFrame, block: int) -> pd.DataFrame:
def _get_price(vault):
try:
return Decimal(get_price(vault,block))
except TypeError:
return Decimal(get_price(Contract(vault).token(),block))
sum_in = df[df['to'] != ZERO_ADDRESS][['to','vault','value']].rename(columns={'to':'user','value':'in'}).groupby(['user','vault']).sum().reset_index().set_index(['user','vault'])
sum_out = df[df['from'] != ZERO_ADDRESS][['from','vault','value']].rename(columns={'from':'user','value':'out'}).groupby(['user','vault']).sum().reset_index().set_index(['user','vault'])
df = sum_in[sum_in['in'] > 0].join(sum_out).fillna(0).reset_index()
df['balance'] = df['in'] - df['out']
df['price'] = df['vault'].apply(_get_price)
df['usd_bal'] = df['balance'] * df['price']
return df

def _count_users_by_num_vaults_used(df: pd.DataFrame):
data = {}
for user in _users(df):
ct = len(df[df['to'] == user]['vault'].unique())
try:
data[f'num wallets used {ct} vaults'] += 1
except:
data[f'num wallets used {ct} vaults'] = 1
return data

def _process_vault(df,vault):
print(f'vault: {vault}')
users = _users(df,vault)
data = {
'lifetime_users': len(users),
}
return data
df = df[['to','vault']].groupby(['to','vault']).size().reset_index()
df = df[df[0] > 0].groupby(['to']).size().reset_index()
return {f'num wallets used {num} vaults': val for num, val in df.groupby([0]).size().items()}

def _export_block(df,block):
print(f'exporting block {block}')
df = df[df['block'] <= block]
user_balances = _user_balances(df,block)
data = {'stats': _count_users_by_num_vaults_used(df)}
data['stats']['total_users'] = len(_users(df))
data['vaults'] = {vault: _process_vault(df,vault) for vault in _vaults(df)}
data['vaults'] = {
vault: {
'lifetime_users': len(_users(df,vault=vault)),
'user_balances': {
row.user: {
'token_bal': row.balance,
'usd_bal': row.usd_bal
} for row in user_balances[user_balances['vault'] == vault].itertuples() if row.balance > 0
},
'churned_users': sum(1 for row in user_balances[user_balances['vault'] == vault].itertuples() if row.usd_bal <= 10),
} for vault in tqdm(_vaults(df))
}
sum_bals = user_balances[['user','usd_bal']].groupby('user').sum().reset_index()
data['stats']['$1k+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 1000)
data['stats']['$10k+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 10000)
data['stats']['$100k+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 100000)
data['stats']['$1m+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 1000000)
data['stats']['$10m+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 10000000)
data['stats']['$100m+'] = sum(1 for row in sum_bals.itertuples() if row.usd_bal > 100000000)
print(sum_bals)
return data


Expand All @@ -151,3 +172,11 @@ def main(block = None):
print(vaults_df.drop(columns=['user_balances']))
vaults_df.drop(columns=['user_balances']).to_csv('./reports/vault_stats.csv', index=False)

# user balances
users_df = pd.melt(
vaults_df[['block','vault']].join(vaults_df['user_balances'].apply(pd.Series)),
id_vars=['block','vault']).dropna().rename(columns={'variable':'address'}
)
users_df = users_df.drop(columns=['value']).join(users_df['value'].apply(pd.Series))
print(users_df)
users_df.to_csv('./reports/user_stats.csv', index=False)
18 changes: 18 additions & 0 deletions services/dashboard/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,24 @@ services:
- yearn-exporter
restart: on-failure

transactions-exporter:
build: .
command: transactions
environment:
- WEB3_INFURA_PROJECT_ID
- WEB3_PROVIDER
- ETHERSCAN_TOKEN
- EXPLORER
- SLEEP_SECONDS
volumes:
- solidity_compilers:/root/.solcx
- vyper_compilers:/root/.vvm
- brownie:/root/.brownie
- cache:/app/yearn-exporter/cache
networks:
- yearn-exporter
restart: on-failure

historical-exporter:
build: .
command: historical_exporter
Expand Down

0 comments on commit 8d526ed

Please sign in to comment.