commit 69ad3711cde9e3d8167f9111586a6d63a6018ae0
parent ed75502f46285bec406388f7939064814f713423
Author: lash <dev@holbrook.no>
Date: Sat, 12 Mar 2022 12:49:38 +0000
Add embedded normalization to filter, cache tx
Diffstat:
M | chainqueue/cache.py | | | 62 | ++++++++++++++++++++++++++++++++++++++++++++++++++++---------- |
M | tests/test_cache.py | | | 95 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++----------- |
2 files changed, 134 insertions(+), 23 deletions(-)
diff --git a/chainqueue/cache.py b/chainqueue/cache.py
@@ -1,10 +1,29 @@
# standard imports
import enum
+import logging
+logg = logging.getLogger(__name__)
-class CacheTx:
+
+class NoopNormalizer:
def __init__(self):
+ self.address = self.noop
+ self.hash = self.noop
+ self.value = self.noop
+
+
+ def noop(self, v):
+ return v
+
+
+noop_normalizer = NoopNormalizer()
+
+
+class CacheTx:
+
+ def __init__(self, normalizer=noop_normalizer):
+ self.normalizer = normalizer
self.sender = None
self.recipient = None
self.nonce = None
@@ -23,11 +42,11 @@ class CacheTx:
def init(self, tx_hash, nonce, sender, recipient, value):
- self.tx_hash = tx_hash
- self.sender = sender
- self.recipient = recipient
+ self.tx_hash = self.normalizer.hash(tx_hash)
+ self.sender = self.normalizer.address(sender)
+ self.recipient = self.normalizer.address(recipient)
self.nonce = nonce
- self.value = value
+ self.value = self.normalizer.value(value)
def deserialize(self, signed_tx):
@@ -46,8 +65,8 @@ class CacheTx:
class CacheTokenTx(CacheTx):
- def __init__(self): #, nonce, sender, recipient, src_token, dst_token, src_value, dst_value):
- super(CacheTokenTx, self).__init__()
+ def __init__(self, normalizer=noop_normalizer):
+ super(CacheTokenTx, self).__init__(normalizer=normalizer)
self.v_src_token = None
self.v_src_value = None
self.v_dst_token = None
@@ -61,9 +80,10 @@ class CacheSort(enum.Enum):
class CacheFilter:
- def __init__(self, senders=None, recipients=None, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False):
- self.senders = senders
- self.recipients = recipients
+ def __init__(self, normalizer=noop_normalizer, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False):
+ self.normalizer = normalizer
+ self.senders = None
+ self.recipients = None
self.nonce = nonce
self.before = before
self.after = after
@@ -71,6 +91,28 @@ class CacheFilter:
self.reverse = reverse
+ def add_senders(self, senders):
+ if self.senders == None:
+ self.senders = []
+ if isinstance(senders, str):
+ senders = [senders]
+ for sender in senders:
+ if self.normalizer != None:
+ sender = self.normalizer.address(sender)
+ self.senders.append(sender)
+
+
+ def add_recipients(self, recipients):
+ if self.recipients == None:
+ self.recipients = []
+ if isinstance(recipients, str):
+ recipients = [recipients]
+ for recipient in recipients:
+ if self.normalizer != None:
+ recipient = self.normalizer.address(recipient)
+ self.recipients.append(recipient)
+
+
class Cache:
def put(self, chain_spec, cache_tx):
diff --git a/tests/test_cache.py b/tests/test_cache.py
@@ -3,15 +3,18 @@ import os
import logging
import unittest
import hashlib
+import math
# external imports
from hexathon import add_0x
+from chainlib.chain import ChainSpec
# local imports
from chainqueue import QueueEntry
from chainqueue.cache import (
CacheTokenTx,
Cache,
+ CacheFilter,
)
# test imports
@@ -51,40 +54,106 @@ class MockCacheTokenTx(CacheTokenTx):
z = h.digest()
tx_hash = z.hex()
- tx = CacheTokenTx()
- tx.init(tx_hash, nonce, sender, recipient, value)
- tx.set('src_token', token)
- tx.set('dst_token', token)
- tx.set('src_value', token_value)
- tx.set('dst_value', token_value)
- tx.confirm(42, 13, 1024000)
+ #tx = CacheTokenTx(normalizer=self.normalizer)
+ self.init(tx_hash, nonce, sender, recipient, value)
+ self.set('src_token', token)
+ self.set('dst_token', token)
+ self.set('src_value', token_value)
+ self.set('dst_value', token_value)
+ self.confirm(42, 13, 1024000)
- return tx
+ return self
class MockTokenCache(Cache):
def __init__(self):
self.db = {}
+ self.last_filter = None
def put(self, chain_spec, cache_tx):
self.db[cache_tx.tx_hash] = cache_tx
def get(self, chain_spec, tx_hash):
- pass
+ return self.db[tx_hash]
+
+
+ def by_nonce(self, cache_filter):
+ self.last_filter = cache_filter
+
+
+ def by_date(self, cache_filter=None):
+ self.last_filter = cache_filter
+
+
+ def count(self, cache_filter):
+ self.last_filter = cache_filter
+
+
+class MockNormalizer:
+
+ def address(self, v):
+ return 'address' + v
+
+
+ def value(self, v):
+ dv = int(math.log10(v) + 1)
+ return float(v / (10 ** dv))
+
+
+ def hash(self, v):
+ return 'ashbashhash' + v
class TestCache(TestShepBase):
def setUp(self):
super(TestCache, self).setUp()
- self.tx = MockCacheTokenTx()
+ self.chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
+ self.cache = MockTokenCache()
+
+
+ def test_cache_instance(self):
+ normalizer = MockNormalizer()
+ a = b'foo'
+ tx = MockCacheTokenTx(normalizer=normalizer)
+ tx.deserialize(a)
+ self.assertTrue(isinstance(tx.value, float))
+ self.assertEqual(tx.sender[:4], 'addr')
+ self.assertEqual(tx.recipient[:4], 'addr')
+ self.assertEqual(tx.tx_hash[:11], 'ashbashhash')
- def test_basic_translator(self):
+
+ def test_cache_putget(self):
a = b'foo'
- tx = self.tx.deserialize(a)
- print(tx)
+ tx = MockCacheTokenTx()
+ tx.deserialize(a)
+ self.cache.put(self.chain_spec, tx)
+ tx_retrieved = self.cache.get(self.chain_spec, tx.tx_hash)
+ self.assertEqual(tx, tx_retrieved)
+
+
+ def test_cache_filter(self):
+ normalizer = MockNormalizer()
+ fltr = CacheFilter(normalizer=normalizer)
+
+ sender = os.urandom(20).hex()
+ fltr.add_senders(sender)
+
+ recipient_one = os.urandom(20).hex()
+ recipient_two = os.urandom(20).hex()
+ fltr.add_recipients([recipient_one, recipient_two])
+
+ self.assertEqual(fltr.senders[0][:4], 'addr')
+ self.assertEqual(fltr.recipients[1][:4], 'addr')
+
+
+ def test_cache_query(self):
+ a = os.urandom(20).hex()
+ fltr = CacheFilter(nonce=42)
+ self.cache.count(fltr)
+ self.assertEqual(self.cache.last_filter, fltr)
if __name__ == '__main__':