Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .idea/.gitignore

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

15 changes: 15 additions & 0 deletions .idea/bitcoin-etl.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

63 changes: 63 additions & 0 deletions .idea/inspectionProfiles/Project_Default.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/inspectionProfiles/profiles_settings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions bitcoinetl/streaming/streaming_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def get_item_exporter(output,topic_mapping,chain):
'transaction': output + '.transactions'
},
message_attributes=('item_id',))
else:
elif item_exporter_type == ItemExporterType.CONSOLE:
item_exporter = ConsoleItemExporter()

elif item_exporter_type == ItemExporterType.KAFKA:
Expand All @@ -22,6 +22,7 @@ def get_item_exporter(output,topic_mapping,chain):
item_exporter = KafkaItemExporter(output, item_type_to_topic_mapping={
'block': f"producer-{chain}-blocks-hot",
'transaction': f"producer-{chain}-transactions-hot",
'transaction_raw': f"producer-{chain}-transactions-raw-hot",
})
else:
item_exporter = KafkaItemExporter(output, item_type_to_topic_mapping=topic_mapping)
Expand All @@ -36,11 +37,14 @@ def determine_item_exporter_type(output):
return ItemExporterType.PUBSUB
if output is not None and output.startswith('kafka'):
return ItemExporterType.KAFKA
if output is not None and output.startswith('console'):
return ItemExporterType.CONSOLE
else:
return ItemExporterType.UNKNOWN


class ItemExporterType:
PUBSUB = 'pubsub'
KAFKA = 'kafka'
UNKNOWN = 'unknown'
UNKNOWN = 'unknown'
CONSOLE = 'console'
63 changes: 32 additions & 31 deletions blockchainetl/jobs/exporters/bitcoin_flatten.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from decimal import Decimal




def flatten_transformation(payload_dict):

TYPE_EXTERNAL = 1
Expand All @@ -11,46 +13,45 @@ def flatten_transformation(payload_dict):
TYPE_BLOCK_REWARD = 3

transformed_transactions = []
for output in payload_dict["outputs"]:
for input in payload_dict["inputs"]:
if not payload_dict["is_coinbase"]:
if output["value"] > 0:
token_outgoing_value = Decimal((1e-8 * input["value"]) * (1e-8 * output["value"]) / (1e-8 * payload_dict["output_value"]))
else:
token_outgoing_value = Decimal((1e-8 * input["value"]) / payload_dict["output_count"])
if input["value"] > 0:
token_incoming_value = Decimal((1e-8 * input["value"]) * (1e-8 * output["value"]) / (1e-8 * payload_dict["input_value"]))
else:
token_incoming_value = 0

token_outgoing_fee = token_outgoing_value - token_incoming_value
if not payload_dict["is_coinbase"]:
for output in payload_dict["outputs"]:
for input in payload_dict["inputs"]:
if output["value"] > 0:
token_outgoing_value = Decimal((input["value"]) * (output["value"]) / (payload_dict["output_value"]))
else:
token_outgoing_value = Decimal((input["value"]) / payload_dict["output_count"])
if input["value"] > 0:
token_incoming_value = Decimal((input["value"]) * (output["value"]) / (payload_dict["input_value"]))
else:
token_incoming_value = 0
token_outgoing_fee = token_outgoing_value - token_incoming_value

transformed_transactions.append({
"block": payload_dict["block_number"],
"transaction_id": payload_dict["hash"],
"transaction_ts": payload_dict["block_timestamp"],
"transaction_type": TYPE_EXTERNAL,
"sender_address": "|".join(input["addresses"]),
"receiver_address": "|".join(output["addresses"]),
"token_outgoing_value": str(float(token_outgoing_value)),
"token_address": default_token_address,
"token_incoming_value": str(float(token_incoming_value)),
"token_outgoing_fee": str(float(token_outgoing_fee))
})
else:
transformed_transactions.append({
"block": payload_dict["block_number"],
"transaction_id": payload_dict["hash"],
"transaction_ts": payload_dict["block_timestamp"],
"transaction_type": TYPE_EXTERNAL,
"sender_address": "|".join(input["addresses"]),
"receiver_address": "|".join(output["addresses"]),
"token_outgoing_value": str(float(token_outgoing_value)),
"token_address": default_token_address,
"token_incoming_value": str(float(token_incoming_value)),
"token_outgoing_fee": str(float(token_outgoing_fee))
})
else:
for output in payload_dict["outputs"]:
transformed_transactions.append({
"block": payload_dict["block_number"],
"transaction_id": payload_dict["hash"],
"transaction_ts": payload_dict["block_timestamp"],
"transaction_type": TYPE_BLOCK_REWARD,
"sender_address": f"{NULL_ADDRESS_MINT}_{datetime.datetime.fromtimestamp(payload_dict['block_timestamp']).month}",
"receiver_address": "|".join(output["addresses"]),
"token_outgoing_value": str(1e-8 * output["value"]),
"token_incoming_value": str(1e-8 * output["value"]),
"token_outgoing_value": str(output["value"]),
"token_incoming_value": str(output["value"]),
"token_address": default_token_address,
"token_outgoing_fee": str(0)
})


return transformed_transactions

return transformed_transactions
2 changes: 2 additions & 0 deletions blockchainetl/jobs/exporters/kafka_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def __init__(self, output, item_type_to_topic_mapping, converters=()):
"sasl.username": os.getenv("KAFKA_PRODUCER_KEY"),
"sasl.password": os.getenv("KAFKA_PRODUCER_PASSWORD"),
"queue.buffering.max.messages": 10000000,
"compression.type": "gzip"
}

self.producer = Producer(conf)
Expand All @@ -47,6 +48,7 @@ def export_items(self, items):
transformed_data = flatten_transformation(item)
for data in transformed_data:
self.export_item(data,item_type)
self.export_item(item,"transaction_raw")
else:
self.export_item(item,item_type)
else:
Expand Down
2 changes: 1 addition & 1 deletion last_synced_block.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
804002
806550