From 482403c8878522627bc4713ce984bcdc8c7c4132 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 28 Jun 2023 14:17:31 -0400 Subject: [PATCH 001/116] Expose `.accounting.load_account()` --- piker/accounting/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 8f55217c5..707c66007 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -28,6 +28,7 @@ open_trade_ledger, ) from ._pos import ( + load_account, load_pps_from_ledger, open_pps, Position, @@ -61,6 +62,7 @@ 'dec_digits', 'digits_to_dec', 'iter_by_dt', + 'load_account', 'load_pps_from_ledger', 'mk_allocator', 'open_pps', From c780164f699842b6395eaa5cf7cb603420e6b782 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 28 Jun 2023 14:17:56 -0400 Subject: [PATCH 002/116] Fix test to use new `load_account()` location --- tests/test_accounting.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/test_accounting.py b/tests/test_accounting.py index f5a3bd8dd..dacffb98b 100644 --- a/tests/test_accounting.py +++ b/tests/test_accounting.py @@ -8,6 +8,7 @@ from pathlib import Path from piker import config +from piker.accounting import load_account def test_root_conf_networking_section( @@ -23,7 +24,7 @@ def test_root_conf_networking_section( def test_account_file_default_empty( tmpconfdir: Path, ): - conf, path = config.load_account( + conf, path = load_account( 'kraken', 'paper', ) From 87d611595486c0958c23f0a310614a2956d44985 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 12:21:53 -0400 Subject: [PATCH 003/116] Add src asset name ignore via `MktPair._fqme_without_src: bool` --- piker/accounting/_mktinfo.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index c1f14f9fb..6cab676b3 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -290,6 +290,10 @@ class MktPair(Struct, frozen=True): _atype: str = '' + # allow explicit disable of the src part of the market + # pair name -> useful for legacy markets like qqq.nasdaq.ib + _fqme_without_src: bool = False + # NOTE: when cast to `str` return fqme def __str__(self) -> str: return self.fqme @@ -486,7 +490,7 @@ def get_fqme( ''' key: str = ( self.pair(delim_char=delim_char) - if not without_src + if not (without_src or self._fqme_without_src) else str(self.dst) ) From 75f01e22d7dcd61e6eb2ea4809ea0064b98b517b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 13:56:01 -0400 Subject: [PATCH 004/116] Drop `Position.expiry`, delegate to `.mkt: MktPair` No point having duplicate data when we already stash the `expiry` on the mkt info type and can just read it (and cast to `datetime` obj). Further this fixes a regression caused by converting `._clears` to a list by adding a `._events: dict[str, Transaction]` which prevents double entering transactions based on checking the events table for the existing id.. Further add a sanity check that all events are popped (for now) after serializing the clearing table for the toml account file. In the longer run, ideally we don't have the separate sequences ._clears and ._events by choosing a better data structure (sorted unique set of mkt events) maybe a specially used `polars.DataFrame` (which we kind need eventually anyway)? --- piker/accounting/_pos.py | 49 +++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 18 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 1288c6886..317a6d6a9 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -103,9 +103,13 @@ class Position(Struct): _clears: list[ dict[str, Any], # transaction history summaries ] = [] + _events: dict[str, dict] = {} first_clear_dt: datetime | None = None - expiry: datetime | None = None + @property + def expiry(self) -> datetime | None: + if exp := self.mkt.expiry: + return pendulum.parse(exp) def __repr__(self) -> str: return pformat(self.to_dict()) @@ -121,7 +125,7 @@ def to_pretoml(self) -> tuple[str, dict]: ''' asdict = self.to_dict() clears: list[dict] = asdict.pop('_clears') - expiry = asdict.pop('expiry') + events: dict[str, Transaction] = asdict.pop('_events') if self.split_ratio is None: asdict.pop('split_ratio') @@ -148,10 +152,8 @@ def to_pretoml(self) -> tuple[str, dict]: asdict['price_tick'] = mkt.price_tick asdict['size_tick'] = mkt.size_tick - if self.expiry is None: - asdict.pop('expiry', None) - elif expiry: - asdict['expiry'] = str(expiry) + if exp := self.expiry: + asdict['expiry'] = exp.isoformat('T') clears_table: tomlkit.Array = tomlkit.array() clears_table.multiline( @@ -165,8 +167,8 @@ def to_pretoml(self) -> tuple[str, dict]: inline_table = tomlkit.inline_table() # serialize datetime to parsable `str` - dtstr = inline_table['dt'] = entry['dt'].isoformat('T') - assert 'Datetime' not in dtstr + inline_table['dt'] = entry['dt'].isoformat('T') + # assert 'Datetime' not in inline_table['dt'] # insert optional clear fields in column order for k in ['ppu', 'accum_size']: @@ -177,11 +179,13 @@ def to_pretoml(self) -> tuple[str, dict]: for k in ['price', 'size', 'cost']: inline_table[k] = entry[k] - inline_table['tid'] = entry['tid'] + tid: str = entry['tid'] + events.pop(tid) + inline_table['tid'] = tid clears_table.append(inline_table) + assert not events asdict['clears'] = clears_table - return fqme, asdict def ensure_state(self) -> None: @@ -502,6 +506,11 @@ def add_clear( Inserts are always done in datetime sorted order. ''' + tid: str = t.tid + if tid in self._events: + log.warning(f'{t} is already added?!') + return {} + clear: dict[str, float | str | int] = { 'tid': t.tid, 'cost': t.cost, @@ -509,6 +518,7 @@ def add_clear( 'size': t.size, 'dt': t.dt } + self._events[tid] = t insort( self._clears, @@ -526,6 +536,8 @@ def add_clear( self.size = clear['accum_size'] = self.calc_size() self.ppu = clear['ppu'] = self.calc_ppu() + assert len(self._events) == len(self._clears) + return clear # TODO: once we have an `.events` table with diff @@ -580,7 +592,6 @@ def update_from_trans( size=0.0, ppu=0.0, bs_mktid=bs_mktid, - expiry=t.expiry, ) else: # NOTE: if for some reason a "less resolved" mkt pair @@ -683,7 +694,7 @@ def to_toml( # ONLY dict-serialize all active positions; those that are # closed we don't store in the ``pps.toml``. - to_toml_dict = {} + to_toml_dict: dict[str, Any] = {} pos: Position for bs_mktid, pos in active.items(): @@ -919,10 +930,14 @@ def open_pps( for clears_table in toml_clears_list: tid = clears_table.get('tid') - dtstr = clears_table['dt'] - dt = pendulum.parse(dtstr) - clears_table['dt'] = dt + dt: tomlkit.items.DateTime | str = clears_table['dt'] + # woa cool, `tomlkit` will actually load datetimes into + # native form B) + if isinstance(dt, str): + dt = pendulum.parse(dt) + + clears_table['dt'] = dt trans.append(Transaction( fqme=bs_mktid, sym=mkt, @@ -944,8 +959,7 @@ def open_pps( split_ratio = entry.get('split_ratio') - expiry = entry.get('expiry') - if expiry: + if expiry := entry.get('expiry'): expiry = pendulum.parse(expiry) pp = pp_objs[bs_mktid] = Position( @@ -953,7 +967,6 @@ def open_pps( size=size, ppu=ppu, split_ratio=split_ratio, - expiry=expiry, bs_mktid=bs_mktid, ) From 3ff9fb3e105e0c4a40693cf8f9e5323e4ccc1651 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 14:01:36 -0400 Subject: [PATCH 005/116] clearing._messages: add todo to drop the `BrokedPosition` msg --- piker/clearing/_messages.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 219749716..4946b0c06 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -301,6 +301,8 @@ class BrokerdError(Struct): broker_details: dict = {} +# TODO: yeah, so we REALLY need to completely deprecate +# this and use the `.accounting.Position` msg-type instead.. class BrokerdPosition(Struct): ''' Position update event from brokerd. From 9748b22d34186464e133ab7285ad8153bbefd7e2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 14:03:16 -0400 Subject: [PATCH 006/116] Always include the src asset for (parquet file names) for fiat pairs --- piker/data/history.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/piker/data/history.py b/piker/data/history.py index 99fd425df..46c5f5a6f 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -366,7 +366,13 @@ async def start_backfill( f'{next_start_dt} -> {last_start_dt}' ) - if mkt.dst.atype not in {'crypto', 'crypto_currency'}: + # always drop the src asset token for + # non-currency-pair like market types (for now) + if mkt.dst.atype not in { + 'crypto', + 'crypto_currency', + 'fiat', # a "forex pair" + }: # for now, our table key schema is not including # the dst[/src] source asset token. col_sym_key: str = mkt.get_fqme( From c0929c042a0f3e14422fa67dff077a8c3413ab20 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 14:03:57 -0400 Subject: [PATCH 007/116] ib: fix `Client.trades()` return type annot --- piker/brokers/ib/api.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index fd0d024d6..880e9f531 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -24,7 +24,10 @@ contextmanager as cm, ) from contextlib import AsyncExitStack -from dataclasses import asdict, astuple +from dataclasses import ( + asdict, + astuple, +) from datetime import datetime from functools import ( partial, @@ -402,13 +405,13 @@ def __init__( # NOTE: the ib.client here is "throttled" to 45 rps by default - async def trades(self) -> dict[str, Any]: + async def trades(self) -> list[dict]: ''' Return list of trade-fills from current session in ``dict``. ''' - fills: list[Fill] = self.ib.fills() norm_fills: list[dict] = [] + fills: list[Fill] = self.ib.fills() for fill in fills: fill = fill._asdict() # namedtuple for key, val in fill.items(): From 10ebc855e490569b84843aaad45e6ebe1fc387b5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 14:04:24 -0400 Subject: [PATCH 008/116] ib: fully handle `MktPair.src` and `.dst` in ledger loading In an effort to properly support fiat pairs (aka forex) as well as more generally insert a fully-qualified `MktPair` in for the `Transaction.sys`. Note that there's a bit of special handling for API `Contract`s-as-dict records vs. flex-report-from-xml equivalents. --- piker/brokers/ib/ledger.py | 168 ++++++++++++++++++++++++++----------- 1 file changed, 121 insertions(+), 47 deletions(-) diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index 2d1c1003b..805cdaf62 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -29,6 +29,7 @@ import pendulum from piker.accounting import ( + Asset, dec_digits, digits_to_dec, Transaction, @@ -43,10 +44,12 @@ def norm_trade_records( ) -> dict[str, Transaction]: ''' - Normalize a flex report or API retrieved executions - ledger into our standard record format. + Normalize (xml) flex-report or (recent) API trade records into + our ledger format with parsing for `MktPair` and `Asset` + extraction to fill in the `Transaction.sys: MktPair` field. ''' + # select: list[transactions] = [] records: list[Transaction] = [] for tid, record in ledger.items(): @@ -64,26 +67,25 @@ def norm_trade_records( 'SLD': -1, }[record['side']] - exch = record['exchange'] - lexch = record.get('listingExchange') + symbol: str = record['symbol'] + exch: str = record.get('listingExchange') or record['exchange'] # NOTE: remove null values since `tomlkit` can't serialize # them to file. - dnc = record.pop('deltaNeutralContract', False) - if dnc is not None: + if dnc := record.pop('deltaNeutralContract', None): record['deltaNeutralContract'] = dnc - suffix = lexch or exch - symbol = record['symbol'] - # likely an opts contract record from a flex report.. # TODO: no idea how to parse ^ the strike part from flex.. # (00010000 any, or 00007500 tsla, ..) # we probably must do the contract lookup for this? - if ' ' in symbol or '--' in exch: + if ( + ' ' in symbol + or '--' in exch + ): underlying, _, tail = symbol.partition(' ') - suffix = exch = 'opt' - expiry = tail[:6] + exch: str = 'opt' + expiry: str = tail[:6] # otype = tail[6] # strike = tail[7:] @@ -108,45 +110,107 @@ def norm_trade_records( 'assetCategory' ) or record.get('secType', 'STK') - # TODO: XXX: WOA this is kinda hacky.. probably - # should figure out the correct future pair key more - # explicitly and consistently? - if asset_type == 'FUT': - # (flex) ledger entries don't have any simple 3-char key? - symbol = record['symbol'][:3] - asset_type: str = 'future' - - elif asset_type == 'STK': - asset_type: str = 'stock' - - # try to build out piker fqme from record. - expiry = ( - record.get('lastTradeDateOrContractMonth') - or record.get('expiry') + if (expiry := ( + record.get('lastTradeDateOrContractMonth') + or record.get('expiry') + ) + ): + expiry: str = str(expiry).strip(' ') + # NOTE: we directly use the (simple and usually short) + # date-string expiry token when packing the `MktPair` + # since we want the fqme to contain *that* token. + # It might make sense later to instead parse and then + # render different output str format(s) for this same + # purpose depending on asset-type-market down the road. + # Eg. for derivs we use the short token only for fqme + # but use the isoformat('T') for transactions and + # account file position entries? + # dt_str: str = pendulum.parse(expiry).isoformat('T') + + # XXX: pretty much all legacy market assets have a fiat + # currency (denomination) determined by their venue. + currency: str = record['currency'] + src = Asset( + name=currency.lower(), + atype='fiat', + tx_tick=Decimal('0.01'), ) - if expiry: - expiry = str(expiry).strip(' ') - suffix = f'{exch}.{expiry}' - expiry = pendulum.parse(expiry) + match asset_type: + case 'FUT': + # (flex) ledger entries don't have any simple 3-char key? + # TODO: XXX: WOA this is kinda hacky.. probably + # should figure out the correct future pair key more + # explicitly and consistently? + symbol: str = symbol[:3] + dst = Asset( + name=symbol.lower(), + atype='future', + tx_tick=Decimal('1'), + ) + + case 'STK': + dst = Asset( + name=symbol.lower(), + atype='stock', + tx_tick=Decimal('1'), + ) + + case 'CASH': + if currency not in symbol: + # likely a dict-casted `Forex` contract which + # has .symbol as the dst and .currency as the + # src. + name: str = symbol.lower() + else: + # likely a flex-report record which puts + # EUR.USD as the symbol field and just USD in + # the currency field. + name: str = symbol.lower().replace(f'.{src.name}', '') + + dst = Asset( + name=name, + atype='fiat', + tx_tick=Decimal('0.01'), + ) + + case 'OPT': + dst = Asset( + name=symbol.lower(), + atype='option', + tx_tick=Decimal('1'), + ) + # try to build out piker fqme from record. # src: str = record['currency'] price_tick: Decimal = digits_to_dec(dec_digits(price)) - pair = MktPair.from_fqme( - fqme=f'{symbol}.{suffix}.ib', + # NOTE: can't serlialize `tomlkit.String` so cast to native + atype: str = str(dst.atype) + + pair = MktPair( bs_mktid=str(conid), - _atype=str(asset_type), # XXX: can't serlialize `tomlkit.String` + dst=dst, price_tick=price_tick, # NOTE: for "legacy" assets, volume is normally discreet, not # a float, but we keep a digit in case the suitz decide # to get crazy and change it; we'll be kinda ready # schema-wise.. - size_tick='1', + size_tick=Decimal('1'), + + src=src, # XXX: normally always a fiat + + _atype=atype, + + venue=exch, + expiry=expiry, + broker='ib', + + _fqme_without_src=(atype != 'fiat'), ) - fqme = pair.fqme + fqme: str = pair.fqme # NOTE: for flex records the normal fields for defining an fqme # sometimes won't be available so we rely on two approaches for @@ -158,22 +222,32 @@ def norm_trade_records( # should already have entries if the pps are still open, in # which case, we can pull the fqme from that table (see # `trades_dialogue()` above). + trans = Transaction( + fqme=fqme, + sym=pair, + tid=tid, + size=size, + price=price, + cost=comms, + dt=dt, + expiry=expiry, + bs_mktid=str(conid), + ) insort( records, - Transaction( - fqme=fqme, - sym=pair, - tid=tid, - size=size, - price=price, - cost=comms, - dt=dt, - expiry=expiry, - bs_mktid=str(conid), - ), + trans, key=lambda t: t.dt ) + # if ( + # atype == 'fiat' + # or atype == 'option' + # ): + # select.append(trans) + + # if select: + # breakpoint() + return {r.tid: r for r in records} From 745c14431484485acbd40dcdde9c6ff4c350d417 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 29 Jun 2023 14:08:42 -0400 Subject: [PATCH 009/116] ib.feed: handle fiat (forex) pairs with `Asset` Also finally adds full `FeedInit` and `MktPair` support for this backend by handling: - all "currency" fields for each `Contract` by constructing and `Asset` and setting the `MktPair.src` with `.atype='fiat'`. - always render the `MktPair.src` name in the `.fqme` for fiat pairs (aka forex) but never for other instruments. --- piker/brokers/ib/feed.py | 57 +++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 33 deletions(-) diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 07e9b44f2..7673c2c53 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -140,32 +140,21 @@ async def open_history_client( # memory. # IB's internal symbology does not expect the "source asset" in - # the "symbol name", what we call the "market name". This is + # the "symbol name", what we call the "market pair name". This is # common in most legacy market brokers since it's presumed that # given a certain stock exchange, listed assets are traded - # "from" a particular source fiat, normally something like USD. - if ( - mkt.src - and mkt.src.atype == 'fiat' - ): - fqme_kwargs: dict[str, Any] = {} - - if mkt.dst.atype == 'forex': - - # XXX: for now we do need the src token kept in since - fqme_kwargs = { - 'without_src': False, # default is True - 'delim_char': '', # bc they would normally use a frickin `.` smh - } - - fqme: str = mkt.get_bs_fqme(**(fqme_kwargs)) + # "from" a particular source fiat, normally something like USD + # on the given venue-provider, eg. nasdaq, nyse, etc. + fqme_kwargs: dict[str, Any] = {} + if mkt.dst.atype != 'fiat': + fqme_kwargs = { + 'without_src': True, # default is True + 'delim_char': '', # bc they would normally use a frickin `.` smh + } - else: - fqme = mkt.bs_fqme + fqme: str = mkt.get_bs_fqme(**(fqme_kwargs)) async with open_data_client() as proxy: - - max_timeout: float = 2. mean: float = 0 count: int = 0 @@ -178,7 +167,8 @@ async def open_history_client( try: head_dt = await proxy.get_head_time(fqme=fqme) except RequestError: - head_dt = None + log.warning(f'Unable to get head time: {fqme} ?') + pass async def get_hist( timeframe: float, @@ -576,7 +566,7 @@ async def query(): 'OPT': 'option', 'FUT': 'future', 'CONTFUT': 'continuous_future', - 'CASH': 'forex', + 'CASH': 'fiat', 'IND': 'index', 'CFD': 'cfd', 'BOND': 'bond', @@ -837,7 +827,9 @@ async def get_mkt_info( # if con.secType == 'STK': size_tick = Decimal('1') else: - size_tick: Decimal = Decimal(str(details.minSize).rstrip('0')) + size_tick: Decimal = Decimal( + str(details.minSize).rstrip('0') + ) # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it? # NOTE: this is duplicate from the .broker.norm_trade_records() @@ -853,13 +845,11 @@ async def get_mkt_info( # we need to figure out how we're going to handle this (later?) # but likely we want all backends to eventually handle # ``dst/src.venue.`` style !? - src: str | Asset = '' - if atype == 'forex': - src = Asset( - name=str(con.currency), - atype='fiat', - tx_tick=Decimal('0.01'), # right? - ) + src = Asset( + name=str(con.currency).lower(), + atype='fiat', + tx_tick=Decimal('0.01'), # right? + ) mkt = MktPair( dst=Asset( @@ -879,6 +869,7 @@ async def get_mkt_info( # TODO: options contract info as str? # contract_info= + _fqme_without_src=(atype != 'fiat'), ) return mkt, details @@ -920,7 +911,7 @@ async def stream_quotes( init_msg = FeedInit(mkt_info=mkt) if mkt.dst.atype in { - 'forex', + 'fiat', 'index', 'commodity', }: @@ -947,7 +938,7 @@ async def stream_quotes( isnan(first_ticker.last) # last quote price value is nan and mkt.dst.atype not in { 'commodity', - 'forex', + 'fiat', 'crypto', } ): From 05af2b3e643a5ae3663f3d85d61819e3bf4a0258 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 3 Jul 2023 18:52:02 -0400 Subject: [PATCH 010/116] Rework `.accounting.Position` calcs to prep for `polars` We're probably going to move to implementing all accounting using `polars.DataFrame` and friends and thus this rejig preps for a much more "stateless" implementation of our `Position` type and its internal pos-accounting metrics: `ppu` and `cumsize`. Summary: - wrt to `._pos.Position`: - rename `.size`/`.accum_size` to `.cumsize` to be more in line with `polars.DataFrame.cumsum()`. - make `Position.expiry` delegate to the underlying `.mkt: MktPair` handling (hopefully) all edge cases.. - change over to a new `._events: dict[str, Transaction]` in prep for #510 (and friends) and enforce a new `Transaction.etype: str` which is by default `clear`. - add `.iter_by_type()` which iterates, filters and sorts the entries in `._events` from above. - add `Position.clearsdict()` which returns the dict-ified and datetime-sorted table which can more-or-less be stored in the toml account file. - add `.minimized_clears()` a new (and close) version of the old method which always grabs at least one clear before a position-side-polarity-change. - mask-drop `.ensure_state()` since there is no more `.size`/`.price` state vars (per say) as we always re-calc the ppu and cumsize from the clears records on every read. - `.add_clear` no longer does bisec insorting since all sorting is done on position properties *reads*. - move the PPU (price per unit) calculator to a new `.accounting.calcs` as well as add in the `iter_by_dt()` clearing transaction sorted iterator. - also make some fixes to this to handle both lists of `Transaction` as well as `dict`s as before. - start rename of `PpTable` -> `Account` and make a note about adding a `.balances` table. - always `float()` the transaction size/price values since it seems if they get processed as `tomlkit.Integer` there's some suuper weird double negative on read-then-write to the clears table? - something like `cumsize = -1` -> `cumsize = --1` !?!? - make `load_pps_from_ledger()` work again but now includes some very very first draft `polars` df processing from a transaction ledger. - use this from the `accounting.cli.disect` subcmd which is also in *super early draft* mode ;) - obviously as mentioned in the `Position` section, add the new `.calcs` module with a `.ppu()` calculator func B) --- piker/accounting/__init__.py | 21 +- piker/accounting/_allocate.py | 8 +- piker/accounting/_ledger.py | 64 +-- piker/accounting/_pos.py | 829 +++++++++++++++++----------------- piker/accounting/calc.py | 276 +++++++++++ piker/accounting/cli.py | 42 +- 6 files changed, 720 insertions(+), 520 deletions(-) create mode 100644 piker/accounting/calc.py diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 707c66007..3246d9c85 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -21,8 +21,10 @@ ''' from ..log import get_logger -from ._ledger import ( +from .calc import ( iter_by_dt, +) +from ._ledger import ( Transaction, TransactionLedger, open_trade_ledger, @@ -100,20 +102,3 @@ def get_likely_pair( likely_dst = bs_mktid[:src_name_start] if likely_dst == dst: return bs_mktid - - -if __name__ == '__main__': - import sys - from pprint import pformat - - args = sys.argv - assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`' - args = args[1:] - for acctid in args: - broker, name = acctid.split('.') - trans, updated_pps = load_pps_from_ledger(broker, name) - print( - f'Processing transactions into pps for {broker}:{acctid}\n' - f'{pformat(trans)}\n\n' - f'{pformat(updated_pps)}' - ) diff --git a/piker/accounting/_allocate.py b/piker/accounting/_allocate.py index b4345785e..deeec4989 100644 --- a/piker/accounting/_allocate.py +++ b/piker/accounting/_allocate.py @@ -118,9 +118,9 @@ def next_order_info( ld: int = mkt.size_tick_digits size_unit = self.size_unit - live_size = live_pp.size + live_size = live_pp.cumsize abs_live_size = abs(live_size) - abs_startup_size = abs(startup_pp.size) + abs_startup_size = abs(startup_pp.cumsize) u_per_slot, currency_per_slot = self.step_sizes() @@ -213,8 +213,6 @@ def next_order_info( slots_used = self.slots_used( Position( mkt=mkt, - size=order_size, - ppu=price, bs_mktid=mkt.bs_mktid, ) ) @@ -241,7 +239,7 @@ def slots_used( Calc and return the number of slots used by this ``Position``. ''' - abs_pp_size = abs(pp.size) + abs_pp_size = abs(pp.cumsize) if self.size_unit == 'currency': # live_currency_size = size or (abs_pp_size * pp.ppu) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 268a81fc8..b0061f0ae 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -25,15 +25,12 @@ from typing import ( Any, Callable, - Iterator, - Union, Generator ) from pendulum import ( datetime, DateTime, - from_timestamp, parse, ) import tomli_w # for fast ledger writing @@ -41,6 +38,9 @@ from .. import config from ..data.types import Struct from ..log import get_logger +from .calc import ( + iter_by_dt, +) from ._mktinfo import ( Symbol, # legacy MktPair, @@ -56,13 +56,14 @@ class Transaction(Struct, frozen=True): # once we have that as a required field, # we don't really need the fqme any more.. fqme: str - - tid: Union[str, int] # unique transaction id + tid: str | int # unique transaction id size: float price: float cost: float # commisions or other additional costs dt: datetime + etype: str = 'clear' + # TODO: we can drop this right since we # can instead expect the backend to provide this # via the `MktPair`? @@ -159,9 +160,9 @@ def iter_trans( # and instead call it for each entry incrementally: # normer = mod.norm_trade_record(txdict) - # TODO: use tx_sort here yah? + # datetime-sort and pack into txs for txdict in self.tx_sort(self.data.values()): - # for tid, txdict in self.data.items(): + # special field handling for datetimes # to ensure pendulum is used! tid: str = txdict['tid'] @@ -186,6 +187,7 @@ def iter_trans( # TODO: change to .sys! sym=mkt, expiry=parse(expiry) if expiry else None, + etype='clear', ) yield tid, tx @@ -208,62 +210,26 @@ def write_config( Render the self.data ledger dict to it's TOML file form. ''' - cpy = self.data.copy() towrite: dict[str, Any] = {} - for tid, trans in cpy.items(): + for tid, txdict in self.tx_sort(self.data.copy()): - # drop key for non-expiring assets - txdict = towrite[tid] = self.data[tid] + # write blank-str expiry for non-expiring assets if ( 'expiry' in txdict and txdict['expiry'] is None ): - txdict.pop('expiry') + txdict['expiry'] = '' # re-write old acro-key - fqme = txdict.get('fqsn') - if fqme: + if fqme := txdict.get('fqsn'): txdict['fqme'] = fqme + towrite[tid] = txdict + with self.file_path.open(mode='wb') as fp: tomli_w.dump(towrite, fp) -def iter_by_dt( - records: dict[str, dict[str, Any]] | list[dict], - - # NOTE: parsers are looked up in the insert order - # so if you know that the record stats show some field - # is more common then others, stick it at the top B) - parsers: dict[tuple[str], Callable] = { - 'dt': None, # parity case - 'datetime': parse, # datetime-str - 'time': from_timestamp, # float epoch - }, - key: Callable | None = None, - -) -> Iterator[tuple[str, dict]]: - ''' - Iterate entries of a ``records: dict`` table sorted by entry recorded - datetime presumably set at the ``'dt'`` field in each entry. - - ''' - def dyn_parse_to_dt(txdict: dict[str, Any]) -> DateTime: - k, v, parser = next( - (k, txdict[k], parsers[k]) for k in parsers if k in txdict - ) - return parser(v) if parser else v - - if isinstance(records, dict): - records = records.values() - - for entry in sorted( - records, - key=key or dyn_parse_to_dt, - ): - yield entry - - def load_ledger( brokername: str, acctid: str, diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 317a6d6a9..65eb67a82 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -22,10 +22,9 @@ ''' from __future__ import annotations -from bisect import insort +# from bisect import insort from contextlib import contextmanager as cm from decimal import Decimal -from math import copysign from pprint import pformat from pathlib import Path from typing import ( @@ -34,13 +33,16 @@ Generator ) +import polars as pl import pendulum -from pendulum import datetime, now +from pendulum import ( + datetime, + now, +) import tomlkit from ._ledger import ( Transaction, - iter_by_dt, open_trade_ledger, TransactionLedger, ) @@ -49,6 +51,10 @@ Asset, unpack_fqme, ) +from .calc import ( + ppu, + iter_by_dt, +) from .. import config from ..clearing._messages import ( BrokerdPosition, @@ -81,7 +87,7 @@ class Position(Struct): mkt: MktPair # can be +ve or -ve for long/short - size: float + # size: float # "price-per-unit price" above or below which pnl moves above and # below zero for the entirety of the current "trade state". The ppu @@ -89,7 +95,7 @@ class Position(Struct): # in one of a long/short "direction" (i.e. abs(.size_i) > 0 after # the next transaction given .size was > 0 before that tx, and vice # versa for -ve sized positions). - ppu: float + # ppu: float # TODO: break-even-price support! # bep: float @@ -103,75 +109,157 @@ class Position(Struct): _clears: list[ dict[str, Any], # transaction history summaries ] = [] - _events: dict[str, dict] = {} - first_clear_dt: datetime | None = None + + # _events: pl.DataFrame | None = None + _events: dict[str, Transaction | dict] = {} + + # first_clear_dt: datetime | None = None @property def expiry(self) -> datetime | None: - if exp := self.mkt.expiry: - return pendulum.parse(exp) + exp: str = self.mkt.expiry + match exp: + # empty str, 'perp' (contract) or simply a null + # signifies instrument with NO expiry. + case 'perp' | '' | None: + return None + + case str(): + return pendulum.parse(exp) + + case _: + raise ValueError( + f'Unhandled `MktPair.expiry`: `{exp}`' + ) + + # TODO: idea: "real LIFO" dynamic positioning. + # - when a trade takes place where the pnl for + # the (set of) trade(s) is below the breakeven price + # it may be that the trader took a +ve pnl on a short(er) + # term trade in the same account. + # - in this case we could recalc the be price to + # be reverted back to it's prior value before the nearest term + # trade was opened.? + # def bep() -> float: + # ... + + def clearsdict(self) -> dict[str, dict]: + clears: dict[str, dict] = ppu( + self.iter_by_type('clear'), + as_ledger=True + ) + return clears + + def iter_by_type( + self, + etype: str, + ) -> Iterator[dict | Transaction]: + ''' + Iterate the internally managed ``._events: dict`` table in + datetime-stamped order. + + ''' + # sort on the expected datetime field + for event in iter_by_dt( + self._events.values(), + key=lambda entry: + getattr(entry, 'dt', None) + or entry.get('dt'), + ): + match event: + case ( + { 'etype': _etype} | + Transaction(etype=str(_etype)) + ): + assert _etype == etype + yield event + + + def minimized_clears(self) -> dict[str, dict]: + ''' + Minimize the position's clears entries by removing + all transactions before the last net zero size except for when + a clear event causes a position "side" change (i.e. long to short + after a single fill) wherein we store the transaction prior to the + net-zero pass. + + This avoids unnecessary history irrelevant to the current + non-net-zero size state when serializing for offline storage. + + ''' + # scan for the last "net zero" position by iterating + # transactions until the next net-zero accum_size, rinse, + # repeat. + cumsize: float = 0 + clears_since_zero: list[dict] = [] - def __repr__(self) -> str: - return pformat(self.to_dict()) + for tid, cleardict in self.clearsdict().items(): + cumsize = float( + # self.mkt.quantize(cumsize + cleardict['tx'].size + self.mkt.quantize(cleardict['cumsize']) + ) + clears_since_zero.append(cleardict) + + # NOTE: always pop sign change since we just use it to + # determine which entry to clear "up to". + sign_change: bool = cleardict.pop('sign_change') + if cumsize == 0: + clears_since_zero = clears_since_zero[:-2] + # clears_since_zero.clear() + + elif sign_change: + clears_since_zero = clears_since_zero[:-1] + + return clears_since_zero def to_pretoml(self) -> tuple[str, dict]: ''' Prep this position's data contents for export as an entry in a TOML "account file" (such as `account.binance.paper.toml`) including re-structuring of - the ``._clears`` entries as an array of inline-subtables + the ``._events`` entries as an array of inline-subtables for better ``pps.toml`` compactness. ''' - asdict = self.to_dict() - clears: list[dict] = asdict.pop('_clears') - events: dict[str, Transaction] = asdict.pop('_events') - - if self.split_ratio is None: - asdict.pop('split_ratio') - - # should be obvious from clears/event table - asdict.pop('first_clear_dt') - + mkt: MktPair = self.mkt + assert isinstance(mkt, MktPair) # TODO: we need to figure out how to have one top level # listing venue here even when the backend isn't providing # it via the trades ledger.. # drop symbol obj in serialized form - mkt: MktPair = asdict.pop('mkt') - assert isinstance(mkt, MktPair) - - fqme = mkt.fqme + fqme: str = mkt.fqme broker, mktep, venue, suffix = unpack_fqme(fqme) # an asset resolved mkt where we have ``Asset`` info about # each tradeable asset in the market. + asset_type: str = 'n/a' if mkt.resolved: dst: Asset = mkt.dst - asdict['asset_type'] = dst.atype - - asdict['price_tick'] = mkt.price_tick - asdict['size_tick'] = mkt.size_tick + asset_type = dst.atype + + asdict: dict[str, Any] = { + 'bs_mktid': self.bs_mktid, + 'expiry': self.expiry or '', + 'asset_type': asset_type, + 'price_tick': mkt.price_tick, + 'size_tick': mkt.size_tick, + } if exp := self.expiry: - asdict['expiry'] = exp.isoformat('T') + asdict['expiry'] = exp + clears_since_zero: list[dict] = self.minimized_clears() clears_table: tomlkit.Array = tomlkit.array() clears_table.multiline( multiline=True, indent='', ) - # reverse sort so latest clears are at top of section? - for entry in iter_by_dt(clears): - + for entry in clears_since_zero: inline_table = tomlkit.inline_table() - # serialize datetime to parsable `str` - inline_table['dt'] = entry['dt'].isoformat('T') - # assert 'Datetime' not in inline_table['dt'] - # insert optional clear fields in column order - for k in ['ppu', 'accum_size']: + for k in ['ppu', 'cumsize']: if val := entry.get(k): inline_table[k] = val @@ -179,61 +267,68 @@ def to_pretoml(self) -> tuple[str, dict]: for k in ['price', 'size', 'cost']: inline_table[k] = entry[k] + # serialize datetime to parsable `str` + inline_table['dt'] = entry['dt']#.isoformat('T') + # assert 'Datetime' not in inline_table['dt'] + tid: str = entry['tid'] - events.pop(tid) inline_table['tid'] = tid clears_table.append(inline_table) + # if val < 0: + # breakpoint() - assert not events + # assert not events asdict['clears'] = clears_table - return fqme, asdict - - def ensure_state(self) -> None: - ''' - Audit either the `.size` and `.ppu` local instance vars against - the clears table calculations and return the calc-ed values if - they differ and log warnings to console. - - ''' - clears: list[dict] = self._clears - self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt'] - last_clear: dict = clears[-1] - csize: float = self.calc_size() - accum: float = last_clear['accum_size'] - - if not self.expired(): - if ( - csize != accum - and csize != round(accum * (self.split_ratio or 1)) - ): - raise ValueError(f'Size mismatch: {csize}') - else: - assert csize == 0, 'Contract is expired but non-zero size?' - - if self.size != csize: - log.warning( - 'Position state mismatch:\n' - f'{self.size} => {csize}' - ) - self.size = csize - cppu: float = self.calc_ppu() - ppu: float = last_clear['ppu'] - if ( - cppu != ppu - and self.split_ratio is not None - - # handle any split info entered (for now) manually by user - and cppu != (ppu / self.split_ratio) - ): - raise ValueError(f'PPU mismatch: {cppu}') + return fqme, asdict - if self.ppu != cppu: - log.warning( - 'Position state mismatch:\n' - f'{self.ppu} => {cppu}' - ) - self.ppu = cppu + # def ensure_state(self) -> None: + # ''' + # Audit either the `.cumsize` and `.ppu` local instance vars against + # the clears table calculations and return the calc-ed values if + # they differ and log warnings to console. + + # ''' + # # clears: list[dict] = self._clears + + # # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt'] + # last_clear: dict = clears[-1] + # csize: float = self.calc_size() + # accum: float = last_clear['accum_size'] + + # if not self.expired(): + # if ( + # csize != accum + # and csize != round(accum * (self.split_ratio or 1)) + # ): + # raise ValueError(f'Size mismatch: {csize}') + # else: + # assert csize == 0, 'Contract is expired but non-zero size?' + + # if self.cumsize != csize: + # log.warning( + # 'Position state mismatch:\n' + # f'{self.cumsize} => {csize}' + # ) + # self.cumsize = csize + + # cppu: float = self.calc_ppu() + # ppu: float = last_clear['ppu'] + # if ( + # cppu != ppu + # and self.split_ratio is not None + + # # handle any split info entered (for now) manually by user + # and cppu != (ppu / self.split_ratio) + # ): + # raise ValueError(f'PPU mismatch: {cppu}') + + # if self.ppu != cppu: + # log.warning( + # 'Position state mismatch:\n' + # f'{self.ppu} => {cppu}' + # ) + # self.ppu = cppu def update_from_msg( self, @@ -241,20 +336,23 @@ def update_from_msg( ) -> None: - # XXX: better place to do this? - mkt = self.mkt - size_tick_digits = mkt.size_tick_digits - price_tick_digits = mkt.price_tick_digits - - self.ppu = round( - # TODO: change this to ppu? - msg['avg_price'], - ndigits=price_tick_digits, - ) - self.size = round( - msg['size'], - ndigits=size_tick_digits, + mkt: MktPair = self.mkt + # we summarize the pos with a single summary transaction + # (for now) until we either pass THIS type as msg directly + # from emsd or come up with a better way? + t = Transaction( + fqme=mkt.bs_mktid, + sym=mkt, + bs_mktid=mkt.bs_mktid, + tid='unknown', + size=msg['size'], + price=msg['avg_price'], + cost=0, + + # TODO: also figure out how to avoid this! + dt=now(), ) + self.add_clear(t) @property def dsize(self) -> float: @@ -265,174 +363,74 @@ def dsize(self) -> float: ''' return self.ppu * self.size - # TODO: idea: "real LIFO" dynamic positioning. - # - when a trade takes place where the pnl for - # the (set of) trade(s) is below the breakeven price - # it may be that the trader took a +ve pnl on a short(er) - # term trade in the same account. - # - in this case we could recalc the be price to - # be reverted back to it's prior value before the nearest term - # trade was opened.? - # def lifo_price() -> float: - # ... - - def iter_clears(self) -> Iterator[tuple[str, dict]]: + def expired(self) -> bool: ''' - Iterate the internally managed ``._clears: dict`` table in - datetime-stamped order. + Predicate which checks if the contract/instrument is past its expiry. ''' - # sort on the already existing datetime that should have - # been generated for the entry's table - return iter_by_dt( - self._clears, - key=lambda entry: entry['dt'] - ) + return bool(self.expiry) and self.expiry < now() - def calc_ppu( + def add_clear( self, - - # include transaction cost in breakeven price - # and presume the worst case of the same cost - # to exit this transaction (even though in reality - # it will be dynamic based on exit stratetgy). - cost_scalar: float = 2, - - ) -> float: + t: Transaction, + ) -> bool: ''' - Compute the "price-per-unit" price for the given non-zero sized - rolling position. - - The recurrence relation which computes this (exponential) mean - per new clear which **increases** the accumulative postiion size - is: - - ppu[-1] = ( - ppu[-2] * accum_size[-2] - + - ppu[-1] * size - ) / accum_size[-1] + Update clearing table by calculating the rolling ppu and + (accumulative) size in both the clears entry and local + attrs state. - where `cost_basis` for the current step is simply the price - * size of the most recent clearing transaction. + Inserts are always done in datetime sorted order. ''' - asize_h: list[float] = [] # historical accumulative size - ppu_h: list[float] = [] # historical price-per-unit - - entry: dict[str, Any] - for entry in self.iter_clears(): - clear_size = entry['size'] - clear_price: str | float = entry['price'] - is_clear: bool = not isinstance(clear_price, str) - - last_accum_size = asize_h[-1] if asize_h else 0 - accum_size = last_accum_size + clear_size - accum_sign = copysign(1, accum_size) - - sign_change: bool = False - - if accum_size == 0: - ppu_h.append(0) - asize_h.append(0) - continue - - # on transfers we normally write some non-valid - # price since withdrawal to another account/wallet - # has nothing to do with inter-asset-market prices. - # TODO: this should be better handled via a `type: 'tx'` - # field as per existing issue surrounding all this: - # https://github.com/pikers/piker/issues/510 - if isinstance(clear_price, str): - # TODO: we can't necessarily have this commit to - # the overall pos size since we also need to - # include other positions contributions to this - # balance or we might end up with a -ve balance for - # the position.. - continue - - # test if the pp somehow went "passed" a net zero size state - # resulting in a change of the "sign" of the size (+ve for - # long, -ve for short). - sign_change = ( - copysign(1, last_accum_size) + accum_sign == 0 - and last_accum_size != 0 - ) - - # since we passed the net-zero-size state the new size - # after sum should be the remaining size the new - # "direction" (aka, long vs. short) for this clear. - if sign_change: - clear_size = accum_size - abs_diff = abs(accum_size) - asize_h.append(0) - ppu_h.append(0) - - else: - # old size minus the new size gives us size diff with - # +ve -> increase in pp size - # -ve -> decrease in pp size - abs_diff = abs(accum_size) - abs(last_accum_size) - - # XXX: LIFO breakeven price update. only an increaze in size - # of the position contributes the breakeven price, - # a decrease does not (i.e. the position is being made - # smaller). - # abs_clear_size = abs(clear_size) - abs_new_size = abs(accum_size) - - if ( - abs_diff > 0 - and is_clear - ): - - cost_basis = ( - # cost basis for this clear - clear_price * abs(clear_size) - + - # transaction cost - accum_sign * cost_scalar * entry['cost'] - ) - - if asize_h: - size_last = abs(asize_h[-1]) - cb_last = ppu_h[-1] * size_last - ppu = (cost_basis + cb_last) / abs_new_size - - else: - ppu = cost_basis / abs_new_size - - ppu_h.append(ppu) - asize_h.append(accum_size) - - else: - # TODO: for PPU we should probably handle txs out - # (aka withdrawals) similarly by simply not having - # them contrib to the running PPU calc and only - # when the next entry clear comes in (which will - # then have a higher weighting on the PPU). + added: bool = False + tid: str = t.tid + if tid in self._events: + log.warning(f'{t} is already added?!') + return added + + # clear: dict[str, float | str | int] = { + # 'tid': t.tid, + # 'cost': t.cost, + # 'price': t.price, + # 'size': t.size, + # 'dt': t.dt + # } + self._events[tid] = t + return True + # insort( + # self._clears, + # clear, + # key=lambda entry: entry['dt'] + # ) - # on "exit" clears from a given direction, - # only the size changes not the price-per-unit - # need to be updated since the ppu remains constant - # and gets weighted by the new size. - asize_h.append(accum_size) - ppu_h.append(ppu_h[-1]) + # TODO: compute these incrementally instead + # of re-looping through each time resulting in O(n**2) + # behaviour..? - final_ppu = ppu_h[-1] if ppu_h else 0 + # NOTE: we compute these **after** adding the entry in order to + # make the recurrence relation math work inside + # ``.calc_size()``. + # self.size = clear['accum_size'] = self.calc_size() + # self.ppu = clear['ppu'] = self.calc_ppu() + # self.size: float = self.calc_size() + # self.ppu: float = self.calc_ppu() - # handle any split info entered (for now) manually by user - if self.split_ratio is not None: - final_ppu /= self.split_ratio + # assert len(self._events) == len(self._clears) + # return clear - return final_ppu + def calc_ppu(self) -> float: + return ppu(self.iter_by_type('clear')) - def expired(self) -> bool: - ''' - Predicate which checks if the contract/instrument is past its expiry. + # # return self.clearsdict() + # # ) + # return list(self.clearsdict())[-1][1]['ppu'] - ''' - return bool(self.expiry) and self.expiry < now() + @property + def ppu(self) -> float: + return round( + self.calc_ppu(), + ndigits=self.mkt.price_tick_digits, + ) def calc_size(self) -> float: ''' @@ -440,105 +438,47 @@ def calc_size(self) -> float: asset using the clears/trade event table; zero if expired. ''' - size: float = 0. - # time-expired pps (normally derivatives) are "closed" # and have a zero size. if self.expired(): return 0. - for entry in self._clears: - size += entry['size'] - # XXX: do we need it every step? - # no right since rounding is an LT? - # size = self.mkt.quantize( - # size + entry['size'], - # quantity_type='size', - # ) - - if self.split_ratio is not None: - size = round(size * self.split_ratio) - - return float( - self.mkt.quantize(size), - ) - - def minimize_clears( - self, - - ) -> dict[str, dict]: - ''' - Minimize the position's clears entries by removing - all transactions before the last net zero size to avoid - unnecessary history irrelevant to the current pp state. - - ''' - size: float = 0 - clears_since_zero: list[dict] = [] - - # TODO: we might just want to always do this when iterating - # a ledger? keep a state of the last net-zero and only do the - # full iterate when no state was stashed? - - # scan for the last "net zero" position by iterating - # transactions until the next net-zero size, rinse, repeat. - for clear in self._clears: - size = float( - self.mkt.quantize(size + clear['size']) - ) - clears_since_zero.append(clear) + clears: list[dict] = list(self.clearsdict().values()) + if clears: + return clears[-1]['cumsize'] + else: + return 0. - if size == 0: - clears_since_zero.clear() + # if self.split_ratio is not None: + # size = round(size * self.split_ratio) - self._clears = clears_since_zero - return self._clears + # return float( + # self.mkt.quantize(size), + # ) - def add_clear( - self, - t: Transaction, - ) -> dict: - ''' - Update clearing table by calculating the rolling ppu and - (accumulative) size in both the clears entry and local - attrs state. - - Inserts are always done in datetime sorted order. - - ''' - tid: str = t.tid - if tid in self._events: - log.warning(f'{t} is already added?!') - return {} + # TODO: ideally we don't implicitly recompute the + # full sequence from `.clearsdict()` every read.. + # the writer-updates-local-attr-state was actually kinda nice + # before, but sometimes led to hard to detect bugs when + # state was de-synced. + @property + def cumsize(self) -> float: - clear: dict[str, float | str | int] = { - 'tid': t.tid, - 'cost': t.cost, - 'price': t.price, - 'size': t.size, - 'dt': t.dt - } - self._events[tid] = t + if ( + self.expiry + and self.expiry < now() + ): + return 0 - insort( - self._clears, - clear, - key=lambda entry: entry['dt'] + return round( + self.calc_size(), + ndigits=self.mkt.size_tick_digits, ) - # TODO: compute these incrementally instead - # of re-looping through each time resulting in O(n**2) - # behaviour..? - - # NOTE: we compute these **after** adding the entry in order to - # make the recurrence relation math work inside - # ``.calc_size()``. - self.size = clear['accum_size'] = self.calc_size() - self.ppu = clear['ppu'] = self.calc_ppu() - - assert len(self._events) == len(self._clears) - - return clear + @property + def size(self) -> float: + log.warning('`Position.size` is deprecated, use `.cumsize`') + return self.cumsize # TODO: once we have an `.events` table with diff # mkt event types..? @@ -546,9 +486,7 @@ def add_clear( # ... -# TODO: maybe a better name is just `Account` and we include -# a table of asset balances as `.balances: dict[Asset, float]`? -class PpTable(Struct): +class Account(Struct): brokername: str acctid: str @@ -556,6 +494,9 @@ class PpTable(Struct): conf_path: Path conf: dict | None = {} + # TODO: track a table of asset balances as `.balances: + # dict[Asset, float]`? + def update_from_trans( self, trans: dict[str, Transaction], @@ -578,19 +519,17 @@ def update_from_trans( key=lambda t: t.dt, # reverse=True, ): - fqme = t.fqme - bs_mktid = t.bs_mktid + fqme: str = t.fqme + bs_mktid: str = t.bs_mktid # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. mkt: MktPair = t.sys - pos = pps.get(bs_mktid) - if not pos: + + if not (pos := pps.get(bs_mktid)): # if no existing pos, allocate fresh one. pos = pps[bs_mktid] = Position( mkt=mkt, - size=0.0, - ppu=0.0, bs_mktid=bs_mktid, ) else: @@ -602,33 +541,33 @@ def update_from_trans( if len(pos.mkt.fqme) < len(fqme): pos.mkt = mkt - clears: list[dict] = pos._clears - if clears: - first_clear_dt = pos.first_clear_dt - - # don't do updates for ledger records we already have - # included in the current pps state. - if ( - t.tid in clears - or ( - first_clear_dt - and t.dt < first_clear_dt - ) - ): - # NOTE: likely you'll see repeats of the same - # ``Transaction`` passed in here if/when you are restarting - # a ``brokerd.ib`` where the API will re-report trades from - # the current session, so we need to make sure we don't - # "double count" these in pp calculations. - continue + # clears: list[dict] = pos._clears + # if clears: + # # first_clear_dt = pos.first_clear_dt + + # # don't do updates for ledger records we already have + # # included in the current pps state. + # if ( + # t.tid in clears + # # or ( + # # first_clear_dt + # # and t.dt < first_clear_dt + # # ) + # ): + # # NOTE: likely you'll see repeats of the same + # # ``Transaction`` passed in here if/when you are restarting + # # a ``brokerd.ib`` where the API will re-report trades from + # # the current session, so we need to make sure we don't + # # "double count" these in pp calculations. + # continue # update clearing table pos.add_clear(t) updated[t.bs_mktid] = pos # re-calc ppu and accumulative sizing. - for bs_mktid, pos in updated.items(): - pos.ensure_state() + # for bs_mktid, pos in updated.items(): + # pos.ensure_state() # NOTE: deliver only the position entries that were # actually updated (modified the state) from the input @@ -657,29 +596,21 @@ def dump_active( pp_objs = self.pps for bs_mktid in list(pp_objs): - pp = pp_objs[bs_mktid] - pp.ensure_state() + pos = pp_objs[bs_mktid] + # pos.ensure_state() - if ( - # "net-zero" is a "closed" position - pp.size == 0 - - # time-expired pps (normally derivatives) are "closed" - or (pp.expiry and pp.expiry < now()) - ): - # for expired cases - pp.size = 0 - - # NOTE: we DO NOT pop the pp here since it can still be + # "net-zero" is a "closed" position + if pos.cumsize == 0: + # NOTE: we DO NOT pop the pos here since it can still be # used to check for duplicate clears that may come in as # new transaction from some backend API and need to be # ignored; the closed positions won't be written to the # ``pps.toml`` since ``pp_active_entries`` above is what's # written. - closed_pp_objs[bs_mktid] = pp + closed_pp_objs[bs_mktid] = pos else: - open_pp_objs[bs_mktid] = pp + open_pp_objs[bs_mktid] = pos return open_pp_objs, closed_pp_objs @@ -700,13 +631,14 @@ def to_toml( for bs_mktid, pos in active.items(): # NOTE: we only store the minimal amount of clears that make up this # position since the last net-zero state. - pos.minimize_clears() - pos.ensure_state() + # pos.minimize_clears() + # pos.ensure_state() # serialize to pre-toml form fqme, asdict = pos.to_pretoml() - assert 'Datetime' not in asdict['clears'][0]['dt'] + # clears: list[dict] = asdict['clears'] + # assert 'Datetime' not in [0]['dt'] log.info(f'Updating active pp: {fqme}') # XXX: ugh, it's cuz we push the section under @@ -769,12 +701,28 @@ def write_config(self) -> None: for entry in list(self.conf): del self.conf[entry] + # XXX WTF: if we use a tomlkit.Integer here we get this + # super weird --1 thing going on for cumsize!?1! + # NOTE: the fix was to always float() the size value loaded + # in open_pps() below! + + # confclears = self.conf["tsla.nasdaq.ib"]['clears'] + # firstcum = confclears[0]['cumsize'] + # if firstcum: + # breakpoint() + config.write( config=self.conf, path=self.conf_path, fail_empty=False, ) + # breakpoint() + + +# TODO: move over all broker backend usage to new name.. +PpTable = Account + def load_account( brokername: str, @@ -928,8 +876,7 @@ def open_pps( toml_clears_list: list[dict[str, Any]] = entry['clears'] trans: list[Transaction] = [] for clears_table in toml_clears_list: - - tid = clears_table.get('tid') + tid = clears_table['tid'] dt: tomlkit.items.DateTime | str = clears_table['dt'] # woa cool, `tomlkit` will actually load datetimes into @@ -943,29 +890,33 @@ def open_pps( sym=mkt, bs_mktid=bs_mktid, tid=tid, - size=clears_table['size'], - price=clears_table['price'], + size=float(clears_table['size']), + price=float(clears_table['price']), cost=clears_table['cost'], dt=dt, )) - size = entry['size'] + # size = entry['size'] - # TODO: remove but, handle old field name for now - ppu = entry.get( - 'ppu', - entry.get('be_price', 0), - ) + # # TODO: remove but, handle old field name for now + # ppu = entry.get( + # 'ppu', + # entry.get('be_price', 0), + # ) split_ratio = entry.get('split_ratio') - if expiry := entry.get('expiry'): - expiry = pendulum.parse(expiry) + # if a string-ified expiry field is loaded we try to parse + # it, THO, they should normally be serialized as native + # TOML datetimes, since that's supported. + if ( + (expiry := entry.get('expiry')) + and isinstance(expiry, str) + ): + expiry: pendulum.DateTime = pendulum.parse(expiry) pp = pp_objs[bs_mktid] = Position( mkt, - size=size, - ppu=ppu, split_ratio=split_ratio, bs_mktid=bs_mktid, ) @@ -979,7 +930,7 @@ def open_pps( pp.add_clear(t) # audit entries loaded from toml - pp.ensure_state() + # pp.ensure_state() try: yield table @@ -994,10 +945,10 @@ def load_pps_from_ledger( acctname: str, # post normalization filter on ledger entries to be processed - filter_by_ids: list[str] | None = None, + filter_by_ids: dict[str, list[str]] | None = None, ) -> tuple[ - dict[str, Transaction], + pl.DataFrame, PpTable, ]: ''' @@ -1022,20 +973,52 @@ def load_pps_from_ledger( src_records: dict[str, Transaction] = mod.norm_trade_records( ledger ) - - if not filter_by_ids: - # records = src_records - records = ledger - - else: - records = {} - bs_mktids = set(map(str, filter_by_ids)) - - # for tid, recdict in ledger.items(): - for tid, r in src_records.items(): - if r.bs_mktid in bs_mktids: - records[tid] = r.to_dict() - - # updated = table.update_from_trans(records) - - return records, table + table.update_from_trans(src_records) + + fdf = df = pl.DataFrame( + list(rec.to_dict() for rec in src_records.values()), + # schema=[ + # ('tid', str), + # ('fqme', str), + # ('dt', str), + # ('size', pl.Float64), + # ('price', pl.Float64), + # ('cost', pl.Float64), + # ('expiry', str), + # ('bs_mktid', str), + # ], + ).sort('dt').select([ + pl.col('fqme'), + pl.col('dt').str.to_datetime(), + # pl.col('expiry').dt.datetime(), + pl.col('bs_mktid'), + pl.col('size'), + pl.col('price'), + ]) + # ppt = df.groupby('fqme').agg([ + # # TODO: ppu and bep !! + # pl.cumsum('size').alias('cumsum'), + # ]) + acts = df.partition_by('fqme', as_dict=True) + # ppt: dict[str, pl.DataFrame] = {} + # for fqme, ppt in act.items(): + # ppt.with_columuns + # # TODO: ppu and bep !! + # pl.cumsum('size').alias('cumsum'), + # ]) + + # filter out to the columns matching values filter passed + # as input. + if filter_by_ids: + for col, vals in filter_by_ids.items(): + str_vals = set(map(str, vals)) + pred: pl.Expr = pl.col(col).eq(str_vals.pop()) + for val in str_vals: + pred |= pl.col(col).eq(val) + + fdf = df.filter(pred) + + bs_mktid: str = fdf[0]['bs_mktid'] + # pos: Position = table.pps[bs_mktid] + + return fdf, acts, table diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py new file mode 100644 index 000000000..034c810a6 --- /dev/null +++ b/piker/accounting/calc.py @@ -0,0 +1,276 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Calculation routines for balance and position tracking such that +you know when you're losing money (if possible) XD + +''' +from __future__ import annotations +from math import copysign +from typing import ( + Any, + Callable, + Iterator, + TYPE_CHECKING, +) + +from pendulum import ( + # datetime, + DateTime, + from_timestamp, + parse, +) + +if TYPE_CHECKING: + from ._ledger import ( + Transaction, + ) + + +def ppu( + clears: Iterator[Transaction], + + # include transaction cost in breakeven price + # and presume the worst case of the same cost + # to exit this transaction (even though in reality + # it will be dynamic based on exit stratetgy). + cost_scalar: float = 2, + + # return the ledger of clears as a (now dt sorted) dict with + # new position fields inserted alongside each entry. + as_ledger: bool = False, + +) -> float: + ''' + Compute the "price-per-unit" price for the given non-zero sized + rolling position. + + The recurrence relation which computes this (exponential) mean + per new clear which **increases** the accumulative postiion size + is: + + ppu[-1] = ( + ppu[-2] * accum_size[-2] + + + ppu[-1] * size + ) / accum_size[-1] + + where `cost_basis` for the current step is simply the price + * size of the most recent clearing transaction. + + ''' + asize_h: list[float] = [] # historical accumulative size + ppu_h: list[float] = [] # historical price-per-unit + ledger: dict[str, dict] = {} + + # entry: dict[str, Any] | Transaction + t: Transaction + for t in clears: + # tid: str = entry['tid'] + # clear_size = entry['size'] + clear_size: float = t.size + # clear_price: str | float = entry['price'] + clear_price: str | float = t.price + is_clear: bool = not isinstance(clear_price, str) + + last_accum_size = asize_h[-1] if asize_h else 0 + accum_size = last_accum_size + clear_size + accum_sign = copysign(1, accum_size) + + sign_change: bool = False + + if accum_size == 0: + ppu_h.append(0) + asize_h.append(0) + continue + + # on transfers we normally write some non-valid + # price since withdrawal to another account/wallet + # has nothing to do with inter-asset-market prices. + # TODO: this should be better handled via a `type: 'tx'` + # field as per existing issue surrounding all this: + # https://github.com/pikers/piker/issues/510 + if isinstance(clear_price, str): + # TODO: we can't necessarily have this commit to + # the overall pos size since we also need to + # include other positions contributions to this + # balance or we might end up with a -ve balance for + # the position.. + continue + + # test if the pp somehow went "passed" a net zero size state + # resulting in a change of the "sign" of the size (+ve for + # long, -ve for short). + sign_change = ( + copysign(1, last_accum_size) + accum_sign == 0 + and last_accum_size != 0 + ) + + # since we passed the net-zero-size state the new size + # after sum should be the remaining size the new + # "direction" (aka, long vs. short) for this clear. + if sign_change: + clear_size = accum_size + abs_diff = abs(accum_size) + asize_h.append(0) + ppu_h.append(0) + + else: + # old size minus the new size gives us size diff with + # +ve -> increase in pp size + # -ve -> decrease in pp size + abs_diff = abs(accum_size) - abs(last_accum_size) + + # XXX: LIFO breakeven price update. only an increaze in size + # of the position contributes the breakeven price, + # a decrease does not (i.e. the position is being made + # smaller). + # abs_clear_size = abs(clear_size) + abs_new_size: float | int = abs(accum_size) + + if ( + abs_diff > 0 + and is_clear + ): + + cost_basis = ( + # cost basis for this clear + clear_price * abs(clear_size) + + + # transaction cost + # accum_sign * cost_scalar * entry['cost'] + accum_sign * cost_scalar * t.cost + ) + + if asize_h: + size_last = abs(asize_h[-1]) + cb_last = ppu_h[-1] * size_last + ppu = (cost_basis + cb_last) / abs_new_size + + else: + ppu = cost_basis / abs_new_size + + # ppu_h.append(ppu) + # asize_h.append(accum_size) + + else: + # TODO: for PPU we should probably handle txs out + # (aka withdrawals) similarly by simply not having + # them contrib to the running PPU calc and only + # when the next entry clear comes in (which will + # then have a higher weighting on the PPU). + + # on "exit" clears from a given direction, + # only the size changes not the price-per-unit + # need to be updated since the ppu remains constant + # and gets weighted by the new size. + ppu: float = ppu_h[-1] # set to previous value + # ppu_h.append(ppu_h[-1]) + # asize_h.append(accum_size) + + # extend with new rolling metric for this step + ppu_h.append(ppu) + asize_h.append(accum_size) + + # ledger[t.tid] = { + # 'tx': t, + ledger[t.tid] = t.to_dict() | { + 'ppu': ppu, + 'cumsize': accum_size, + 'sign_change': sign_change, + + # TODO: cumpnl, bep + } + + final_ppu = ppu_h[-1] if ppu_h else 0 + # TODO: once we have etypes in all ledger entries.. + # handle any split info entered (for now) manually by user + # if self.split_ratio is not None: + # final_ppu /= self.split_ratio + + if as_ledger: + return ledger + + else: + return final_ppu + + +def iter_by_dt( + records: ( + dict[str, dict[str, Any]] + | list[dict] + | list[Transaction] # XXX preferred! + ), + + # NOTE: parsers are looked up in the insert order + # so if you know that the record stats show some field + # is more common then others, stick it at the top B) + parsers: dict[tuple[str], Callable] = { + 'dt': None, # parity case + 'datetime': parse, # datetime-str + 'time': from_timestamp, # float epoch + }, + key: Callable | None = None, + +) -> Iterator[tuple[str, dict]]: + ''' + Iterate entries of a transaction table sorted by entry recorded + datetime presumably set at the ``'dt'`` field in each entry. + + ''' + # isdict: bool = False + if isinstance(records, dict): + # isdict: bool = True + records = list(records.items()) + + def dyn_parse_to_dt( + tx: tuple[str, dict[str, Any]] | Transaction, + ) -> DateTime: + + # handle `.items()` inputs + if isinstance(tx, tuple): + tx = tx[1] + + # dict or tx object? + isdict: bool = isinstance(tx, dict) + + # get best parser for this record.. + for k in parsers: + if ( + isdict and k in tx + or getattr(tx, k, None) + ): + v = tx[k] if isdict else tx.dt + if v is None: + breakpoint() + + parser = parsers[k] + + # only call parser on the value if not None from the + # `parsers` table above, otherwise pass through the value + # and sort on it directly + return parser(v) if (parser is not None) else v + + else: + breakpoint() + + entry: tuple[str, dict] | Transaction + for entry in sorted( + records, + key=key or dyn_parse_to_dt, + ): + yield entry diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 290c1a5e5..a562e26e7 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -240,9 +240,13 @@ async def main(): def disect( # "fully_qualified_account_name" fqan: str, - bs_mktid: str, # for ib + fqme: str, # for ib pdb: bool = False, + bs_mktid: str = typer.Option( + None, + "-bid", + ), loglevel: str = typer.Option( 'error', "-l", @@ -255,36 +259,24 @@ def disect( brokername, account = pair # ledger: TransactionLedger - records: dict[str, dict] + # records: dict[str, dict] table: PpTable - records, table = load_pps_from_ledger( + df: pl.DataFrame # legder df + ppt: pl.DataFrame # piker position table + df, ppt, table = load_pps_from_ledger( brokername, account, - filter_by_ids={bs_mktid}, + filter_by_ids={'fqme': [fqme]}, ) - df = pl.DataFrame( - list(records.values()), - # schema=[ - # ('tid', str), - # ('fqme', str), - # ('dt', str), - # ('size', pl.Float64), - # ('price', pl.Float64), - # ('cost', pl.Float64), - # ('expiry', str), - # ('bs_mktid', str), - # ], - ).select([ - pl.col('fqme'), - pl.col('dt').str.to_datetime(), - # pl.col('expiry').dt.datetime(), - pl.col('size'), - pl.col('price'), - ]) - + # sers = [ + # pl.Series(e['fqme'], e['cumsum']) + # for e in ppt.to_dicts() + # ] + # ppt_by_id: pl.DataFrame = ppt.filter( + # pl.col('fqme') == fqme, + # ) assert not df.is_empty() breakpoint() - # tractor.pause_from_sync() # with open_trade_ledger( # brokername, # account, From 005023275e69be503130ac544134be8870e5971b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 5 Jul 2023 10:46:57 -0400 Subject: [PATCH 011/116] Add a symbology cache subsys New mod is `.data._symcache` and it needs backend clients to declare `Client.get_assets()` and `.get_mkt_pairs()` to generate the cache files which now go in the config dir under `_cache/`. --- piker/data/_symcache.py | 158 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 piker/data/_symcache.py diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py new file mode 100644 index 000000000..3ea054a51 --- /dev/null +++ b/piker/data/_symcache.py @@ -0,0 +1,158 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Mega-simple symbology cache via TOML files. + +Allow backend data providers and/or brokers to stash their +symbology sets (aka the meta data we normalize into our +`.accounting.MktPair` type) to the filesystem for faster lookup and +offline usage. + +''' +from __future__ import annotations +from contextlib import asynccontextmanager as acm +from pathlib import Path +from typing import Any + +import tomli_w # for fast symbol cache writing +try: + import tomllib +except ModuleNotFoundError: + import tomli as tomllib +from msgspec import field + +from ..log import get_logger +from .. import config +from ..brokers import open_cached_client +from .types import Struct +from ..accounting import ( + Asset, + # MktPair, +) + +log = get_logger('data.cache') + + +class AssetsInfo(Struct): + ''' + Asset meta-data cache which holds lookup tables for 3 sets of + market-symbology related struct-types required by the + `.accounting` and `.data` subsystems. + + ''' + provider: str + fp: Path + assets: dict[str, Asset] = field(default_factory=dict) + + # backend-system pairs loaded in provider (schema) specific + # structs. + pairs: dict[str, Struct] = field(default_factory=dict) + + # TODO: piker-normalized `.accounting.MktPair` table? + # loaded from the `.pairs` and a normalizer + # provided by the backend pkg. + # mkt_pairs: dict[str, MktPair] = field(default_factory=dict) + + def write_config(self) -> None: + cachedict: dict[str, Any] = { + 'assets': self.assets, + 'pairs': self.pairs, + } + try: + with self.fp.open(mode='wb') as fp: + tomli_w.dump(cachedict, fp) + except TypeError: + self.fp.unlink() + raise + + +_caches: dict[str, AssetsInfo] = {} + + +@acm +async def open_symbology_cache( + provider: str, + reload: bool = False, + +) -> AssetsInfo: + global _caches + + if not reload: + try: + yield _caches[provider] + except KeyError: + log.warning('No asset info cache exists yet for ' + f'`{provider}`') + + cachedir: Path = config.get_conf_dir() / '_cache' + if not cachedir.is_dir(): + log.info(f'Creating `nativedb` director: {cachedir}') + cachedir.mkdir() + + cachefile: Path = cachedir / f'{str(provider)}_symbology.toml' + + cache = AssetsInfo( + provider=provider, + fp=cachefile, + ) + + # if no cache exists or an explicit reload is requested, load + # the provider API and call appropriate endpoints to populate + # the mkt and asset tables. + if ( + reload + or not cachefile.is_file() + ): + async with open_cached_client(provider) as client: + + if get_assets := getattr(client, 'get_assets', None): + assets: dict[str, Asset] = await get_assets() + for bs_mktid, asset in assets.items(): + cache.assets[bs_mktid] = asset.to_dict() + else: + log.warning( + 'No symbology cache `Asset` support for `{provider}`..\n' + 'Implement `Client.get_assets()`!' + ) + + if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): + for bs_mktid, pair in (await get_mkt_pairs()).items(): + cache.pairs[bs_mktid] = pair.to_dict() + else: + log.warning( + 'No symbology cache `Pair` support for `{provider}`..\n' + 'Implement `Client.get_mkt_pairs()`!' + ) + + # TODO: pack into `MktPair` normalized types as + # well? + + # only (re-)write if explicit reload or non-existing + cache.write_config() + else: + with cachefile.open('rb') as existing_fp: + data: dict[str, dict] = tomllib.load(existing_fp) + + for key, table in data.items(): + attr: dict[str, Any] = getattr(cache, key) + if attr != table: + log.info(f'OUT-OF-SYNC symbology cache: {key}') + + setattr(cache, key, table) + + yield cache + cache.write_config() From c8c28df62f671416d1d032be776324ed1dece915 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 6 Jul 2023 15:19:08 -0400 Subject: [PATCH 012/116] Much (much) better symbology cache refinements For starters rename the cache type to `SymbologyCache` and fill out its interface to include an (async) `.reload()` which can be used to populate the in-mem asset-table sets such that any tractor-runtime task can actually directly call it. Use a symcache file name schema of `_cache/.symcache.toml`. Dirtier deatz: - make `.open_symcache()` a `@cm` such that it can be used from sync code and will actually call `trio.run()` in the case where it needs to do a full (re)load; also don't write on exit only on reloads. - add `.get_symcache()` a simple non-ctx-mngr reader which again can mostly be called willy-nilly from sync code without the full runtime being up (but likely will only work if symcache files already exist for the backend). --- piker/data/_symcache.py | 164 ++++++++++++++++++++++++++++++---------- 1 file changed, 125 insertions(+), 39 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 3ea054a51..4fb0fd644 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -24,9 +24,13 @@ ''' from __future__ import annotations -from contextlib import asynccontextmanager as acm +from contextlib import ( + # asynccontextmanager as acm, + contextmanager as cm, +) from pathlib import Path from typing import Any +from types import ModuleType import tomli_w # for fast symbol cache writing try: @@ -41,21 +45,24 @@ from .types import Struct from ..accounting import ( Asset, - # MktPair, + MktPair, ) log = get_logger('data.cache') -class AssetsInfo(Struct): +class SymbologyCache(Struct): ''' Asset meta-data cache which holds lookup tables for 3 sets of market-symbology related struct-types required by the `.accounting` and `.data` subsystems. ''' - provider: str + mod: ModuleType fp: Path + + # all asset-money-systems descriptions as minimally defined by + # in `.accounting.Asset` assets: dict[str, Asset] = field(default_factory=dict) # backend-system pairs loaded in provider (schema) specific @@ -65,13 +72,23 @@ class AssetsInfo(Struct): # TODO: piker-normalized `.accounting.MktPair` table? # loaded from the `.pairs` and a normalizer # provided by the backend pkg. - # mkt_pairs: dict[str, MktPair] = field(default_factory=dict) + mktmaps: dict[str, MktPair] = field(default_factory=dict) def write_config(self) -> None: - cachedict: dict[str, Any] = { + cachedict: dict[str, Any] = {} + for key, attr in { 'assets': self.assets, 'pairs': self.pairs, - } + # 'mktmaps': self.mktmaps, + }.items(): + if not attr: + log.warning( + f'Asset cache table for `{key}` is empty?' + ) + continue + + cachedict[key] = attr + try: with self.fp.open(mode='wb') as fp: tomli_w.dump(cachedict, fp) @@ -79,18 +96,58 @@ def write_config(self) -> None: self.fp.unlink() raise + async def load(self) -> None: + async with open_cached_client(self.mod.name) as client: -_caches: dict[str, AssetsInfo] = {} + if get_assets := getattr(client, 'get_assets', None): + assets: dict[str, Asset] = await get_assets() + for bs_mktid, asset in assets.items(): + self.assets[bs_mktid] = asset.to_dict() + else: + log.warning( + 'No symbology cache `Asset` support for `{provider}`..\n' + 'Implement `Client.get_assets()`!' + ) + if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): + pairs: dict[str, Struct] = await get_mkt_pairs() + for bs_fqme, pair in pairs.items(): -@acm -async def open_symbology_cache( - provider: str, + entry = await self.mod.get_mkt_info(pair.bs_fqme) + if not entry: + continue + + mkt: MktPair + pair: Struct + mkt, _pair = entry + assert _pair is pair + self.pairs[pair.bs_fqme] = pair.to_dict() + self.mktmaps[mkt.fqme] = mkt + + else: + log.warning( + 'No symbology cache `Pair` support for `{provider}`..\n' + 'Implement `Client.get_mkt_pairs()`!' + ) + + return self + + +# actor-process-local in-mem-cache of symcaches (by backend). +_caches: dict[str, SymbologyCache] = {} + + +@cm +def open_symcache( + mod: ModuleType, reload: bool = False, -) -> AssetsInfo: - global _caches +) -> SymbologyCache: + provider: str = mod.name + + # actor-level cache-cache XD + global _caches if not reload: try: yield _caches[provider] @@ -103,10 +160,10 @@ async def open_symbology_cache( log.info(f'Creating `nativedb` director: {cachedir}') cachedir.mkdir() - cachefile: Path = cachedir / f'{str(provider)}_symbology.toml' + cachefile: Path = cachedir / f'{str(provider)}.symcache.toml' - cache = AssetsInfo( - provider=provider, + cache = SymbologyCache( + mod=mod, fp=cachefile, ) @@ -117,42 +174,71 @@ async def open_symbology_cache( reload or not cachefile.is_file() ): - async with open_cached_client(provider) as client: + log.info(f'GENERATING symbology cache for `{mod.name}`') - if get_assets := getattr(client, 'get_assets', None): - assets: dict[str, Asset] = await get_assets() - for bs_mktid, asset in assets.items(): - cache.assets[bs_mktid] = asset.to_dict() - else: - log.warning( - 'No symbology cache `Asset` support for `{provider}`..\n' - 'Implement `Client.get_assets()`!' - ) + import tractor + import trio - if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): - for bs_mktid, pair in (await get_mkt_pairs()).items(): - cache.pairs[bs_mktid] = pair.to_dict() - else: - log.warning( - 'No symbology cache `Pair` support for `{provider}`..\n' - 'Implement `Client.get_mkt_pairs()`!' - ) + # spawn tractor runtime and generate cache + # if not existing. + async def sched_gen_symcache(): + + async with ( + # only for runtime + tractor.open_nursery(debug_mode=True), + ): + return await cache.load() - # TODO: pack into `MktPair` normalized types as - # well? + cache: SymbologyCache = trio.run(sched_gen_symcache) # only (re-)write if explicit reload or non-existing cache.write_config() + else: + log.info( + f'Loading EXISTING `{mod.name}` symbology cache:\n' + f'> {cache.fp}' + ) + import time + now = time.time() with cachefile.open('rb') as existing_fp: data: dict[str, dict] = tomllib.load(existing_fp) + log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') for key, table in data.items(): attr: dict[str, Any] = getattr(cache, key) - if attr != table: - log.info(f'OUT-OF-SYNC symbology cache: {key}') + assert not attr + # if attr != table: + # log.info(f'OUT-OF-SYNC symbology cache: {key}') setattr(cache, key, table) + # TODO: use a real profiling sys.. + # https://github.com/pikers/piker/issues/337 + log.info(f'SYMCACHE LOAD TIME: {time.time() - now}') + yield cache - cache.write_config() + + # TODO: write only when changes detected? but that should + # never happen right except on reload? + # cache.write_config() + + +def get_symcache( + provider: str, + force_reload: bool = False, + +) -> SymbologyCache: + ''' + Get any available symbology/assets cache from + sync code by manually running `trio` to do the work. + + ''' + from ..brokers import get_brokermod + + with open_symcache( + get_brokermod(provider), + reload=force_reload, + + ) as symcache: + return symcache From 309b91676d8472272d6a71f0bdde835d3dc045bc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 13:31:43 -0400 Subject: [PATCH 013/116] Finally, support full `MktPair` + `Asset` msgs Previously we weren't necessarily serializing mkt pairs (for IPC msging) entirely bc the assets `.src/.dst` were being sent just by their str-names. This now properly supports fully serializing `Asset`s as `dict`-msgs such that use of `MktPair.to_dict()` can be transmitted over `tractor.MsgStream`s and deserialized entirely back to struct from on the receiver end. Deats: - implement `Asset.to_dict()` and `.from_msg()` - adjust `MktPair.to_dict()` and `.from_msg()` to use these methods. - drop all the hacky "if .src/.dst is str" handling. - add better `MktPair.from_fqme()` input handling for expiry and venue; ensure that either can be extracted from passed fqme *and* if so they are also popped from any duplicate passed in `**kwargs**`. --- piker/accounting/_mktinfo.py | 82 ++++++++++++++++++++++++------------ 1 file changed, 56 insertions(+), 26 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 6cab676b3..4532de6a6 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -130,8 +130,26 @@ class Asset(Struct, frozen=True): # should not be explicitly required in our generic API. info: dict | None = None - # TODO? - # _to_dict_skip = {'info'} + # `None` is not toml-compat so drop info + # if no extra data added.. + def to_dict(self) -> dict: + dct = super().to_dict() + if (info := dct.pop('info', None)): + dct['info'] = info + + assert dct['tx_tick'] + return dct + + @classmethod + def from_msg( + cls, + msg: dict[str, Any], + ) -> Asset: + return Asset( + tx_tick=Decimal(str(msg.pop('tx_tick'))), + info=msg.pop('info', None), + **msg, + ) def __str__(self) -> str: return self.name @@ -288,6 +306,8 @@ class MktPair(Struct, frozen=True): # strike price, call or put, swap type, exercise model, etc. contract_info: list[str] | None = None + # TODO: rename to sectype since all of these can + # be considered "securities"? _atype: str = '' # allow explicit disable of the src part of the market @@ -298,6 +318,18 @@ class MktPair(Struct, frozen=True): def __str__(self) -> str: return self.fqme + def to_dict(self) -> dict: + d = super().to_dict() + d['src'] = self.src.to_dict() + d['dst'] = self.dst.to_dict() + + if self.contract_info is None: + d.pop('contract_info') + + # d.pop('_fqme_without_src') + + return d + @classmethod def from_msg( cls, @@ -309,35 +341,20 @@ def from_msg( ''' dst_asset_msg = msg.pop('dst') - src_asset_msg = msg.pop('src') - - if isinstance(dst_asset_msg, str): - src: str = str(src_asset_msg) - assert isinstance(src, str) - return cls.from_fqme( - dst_asset_msg, - src=src, - **msg, - ) + dst = Asset.from_msg(dst_asset_msg) # .copy() - else: - # NOTE: we call `.copy()` here to ensure - # type casting! - dst = Asset(**dst_asset_msg).copy() - if not isinstance(src_asset_msg, str): - src = Asset(**src_asset_msg).copy() - else: - src = str(src_asset_msg) + src_asset_msg = msg.pop('src') + src = Asset.from_msg(src_asset_msg) # .copy() - return cls( - dst=dst, - src=src, - **msg, # XXX NOTE: ``msgspec`` can encode `Decimal` # but it doesn't decide to it by default since # we aren't spec-cing these msgs as structs, SO # we have to ensure we do a struct type case (which `.copy()` # does) to ensure we get the right type! + return cls( + dst=dst, + src=src, + **msg, ).copy() @property @@ -365,7 +382,20 @@ def from_fqme( ): _fqme = f'{fqme}.{broker}' - broker, mkt_ep_key, venue, suffix = unpack_fqme(_fqme) + broker, mkt_ep_key, venue, expiry = unpack_fqme(_fqme) + + kven: str = kwargs.pop('venue', venue) + if venue: + assert venue == kven + else: + venue = kven + + exp: str = kwargs.pop('expiry', expiry) + if expiry: + assert exp == expiry + else: + expiry = exp + dst: Asset = Asset.guess_from_mkt_ep_key( mkt_ep_key, atype=kwargs.get('_atype'), @@ -384,7 +414,7 @@ def from_fqme( venue=venue, # XXX NOTE: we presume this token # if the expiry for now! - expiry=suffix, + expiry=expiry, price_tick=price_tick, size_tick=size_tick, From 13f231b926dd22342a06a866dc827c02ea75b093 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 13:47:24 -0400 Subject: [PATCH 014/116] Decode cached mkts and assets back to structs B) As part of loading the cache we can now fill the asset sub-tables: `.mktmaps` and `.assets` with their deserialized struct instances! In theory this might be possible for the backend defined `Pair` structs as well but we need to figure out probably an endpoint to offer the conversion? Also, add a `SymbologyCache.search()` which allows sync code to scan the existing (known via cache) symbol set just like how async code can use the (much slower) `open_symbol_search()` ctx endpoint :boom: --- piker/data/_symcache.py | 97 +++++++++++++++++++++++++++++++++++------ 1 file changed, 84 insertions(+), 13 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 4fb0fd644..799f2422f 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -29,9 +29,14 @@ contextmanager as cm, ) from pathlib import Path -from typing import Any +from pprint import pformat +from typing import ( + Any, + TYPE_CHECKING, +) from types import ModuleType +from fuzzywuzzy import process as fuzzy import tomli_w # for fast symbol cache writing try: import tomllib @@ -43,10 +48,12 @@ from .. import config from ..brokers import open_cached_client from .types import Struct -from ..accounting import ( - Asset, - MktPair, -) + +if TYPE_CHECKING: + from ..accounting import ( + Asset, + MktPair, + ) log = get_logger('data.cache') @@ -79,7 +86,6 @@ def write_config(self) -> None: for key, attr in { 'assets': self.assets, 'pairs': self.pairs, - # 'mktmaps': self.mktmaps, }.items(): if not attr: log.warning( @@ -89,6 +95,11 @@ def write_config(self) -> None: cachedict[key] = attr + # serialize mkts + mktmapsdict = cachedict['mktmaps'] = {} + for fqme, mkt in self.mktmaps.items(): + mktmapsdict[fqme] = mkt.to_dict() + try: with self.fp.open(mode='wb') as fp: tomli_w.dump(cachedict, fp) @@ -132,6 +143,24 @@ async def load(self) -> None: return self + def search( + self, + pattern: str, + + ) -> dict[str, Struct]: + + matches = fuzzy.extractBests( + pattern, + self.mktmaps, + score_cutoff=50, + ) + + # repack in dict[fqme, MktPair] form + return { + item[0].fqme: item[0] + for item in matches + } + # actor-process-local in-mem-cache of symcaches (by backend). _caches: dict[str, SymbologyCache] = {} @@ -200,18 +229,60 @@ async def sched_gen_symcache(): f'> {cache.fp}' ) import time + from ..accounting import ( + Asset, + MktPair, + ) + now = time.time() with cachefile.open('rb') as existing_fp: data: dict[str, dict] = tomllib.load(existing_fp) log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') - for key, table in data.items(): - attr: dict[str, Any] = getattr(cache, key) - assert not attr - # if attr != table: - # log.info(f'OUT-OF-SYNC symbology cache: {key}') - - setattr(cache, key, table) + # load `dict` -> `Asset` + assettable = data.pop('assets') + for name, asdict in assettable.items(): + cache.assets[name] = Asset.from_msg(asdict) + + # load `dict` -> `MktPair` + dne: list[str] = [] + mkttable = data.pop('mktmaps') + for fqme, mktdict in mkttable.items(): + + # pull asset refs from (presumably) now previously + # loaded asset set above B) + src_k: str = mktdict.pop('src') + dst_k: str = mktdict.pop('dst') + src: Asset = cache.assets[src_k] + + dst: Asset + if not (dst := cache.assets.get(dst_k)): + dne.append(dst_k) + continue + + mkt = MktPair( + src=src, + dst=dst, + **mktdict, + ) + assert mkt.fqme == fqme + cache.mktmaps[fqme] = mkt + + log.warning( + f'These `MktPair.dst: Asset`s DNE says `{mod.name}` ?\n' + f'{pformat(dne)}' + ) + + # copy in backend specific pairs table directly without + # struct loading for now.. + pairtable = data.pop('pairs') + cache.pairs = pairtable + + # TODO: some kinda way to allow the backend + # to provide a struct-loader per entry? + # for key, pairtable in pairtable.items(): + # pair: Struct = cache.mod.load_pair(pairtable) + # cache.pairs[key] = pair # TODO: use a real profiling sys.. # https://github.com/pikers/piker/issues/337 From 3994fd83843f7b24489c3542d538b7802ce8c10f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 14:19:26 -0400 Subject: [PATCH 015/116] Also handle `Decimal` interchange in `MktPair` msg-ification --- piker/accounting/_mktinfo.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 4532de6a6..534e9c06e 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -323,6 +323,9 @@ def to_dict(self) -> dict: d['src'] = self.src.to_dict() d['dst'] = self.dst.to_dict() + d['price_tick'] = str(self.price_tick) + d['size_tick'] = str(self.price_tick) + if self.contract_info is None: d.pop('contract_info') @@ -346,14 +349,20 @@ def from_msg( src_asset_msg = msg.pop('src') src = Asset.from_msg(src_asset_msg) # .copy() - # XXX NOTE: ``msgspec`` can encode `Decimal` - # but it doesn't decide to it by default since - # we aren't spec-cing these msgs as structs, SO - # we have to ensure we do a struct type case (which `.copy()` - # does) to ensure we get the right type! + # XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't + # decide to it by default since we aren't spec-cing these + # msgs as structs proper to get them to decode implictily + # (yet) as per, + # - https://github.com/pikers/piker/pull/354 + # - https://github.com/goodboy/tractor/pull/311 + # SO we have to ensure we do a struct type + # case (which `.copy()` does) to ensure we get the right + # type! return cls( dst=dst, src=src, + price_tick=Decimal(msg.pop('price_tick')), + size_tick=Decimal(msg.pop('size_tick')), **msg, ).copy() From ddc5f2b4410236f2b524effd2457f8d70b39f005 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 19:00:35 -0400 Subject: [PATCH 016/116] Use `MktPair.from_msg()` in symcache Since we now fully support interchange-as-dict-msg, use the msg codec API and drop manual `Asset` unpacking. Also, wrap `get_symcache()` in a `pdbp` crash handler block for now B) --- piker/data/_symcache.py | 60 +++++++++++++++++++++-------------------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 799f2422f..ad3c7004b 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -239,6 +239,17 @@ async def sched_gen_symcache(): data: dict[str, dict] = tomllib.load(existing_fp) log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') + # copy in backend specific pairs table directly without + # struct loading for now.. + pairtable = data.pop('pairs') + cache.pairs = pairtable + + # TODO: some kinda way to allow the backend + # to provide a struct-loader per entry? + # for key, pairtable in pairtable.items(): + # pair: Struct = cache.mod.load_pair(pairtable) + # cache.pairs[key] = pair + # load `dict` -> `Asset` assettable = data.pop('assets') for name, asdict in assettable.items(): @@ -249,23 +260,21 @@ async def sched_gen_symcache(): mkttable = data.pop('mktmaps') for fqme, mktdict in mkttable.items(): - # pull asset refs from (presumably) now previously - # loaded asset set above B) - src_k: str = mktdict.pop('src') - dst_k: str = mktdict.pop('dst') - src: Asset = cache.assets[src_k] + mkt = MktPair.from_msg(mktdict) + assert mkt.fqme == fqme + # sanity check asset refs from those (presumably) + # loaded asset set above. + # src_k: str = pairtable.get('bs_src_asset, + src: Asset = cache.assets[mkt.src.name] + assert src == mkt.src dst: Asset - if not (dst := cache.assets.get(dst_k)): - dne.append(dst_k) + if not (dst := cache.assets.get(mkt.dst.name)): + dne.append(mkt.dst.name) continue + else: + assert dst.name == mkt.dst.name - mkt = MktPair( - src=src, - dst=dst, - **mktdict, - ) - assert mkt.fqme == fqme cache.mktmaps[fqme] = mkt log.warning( @@ -273,17 +282,6 @@ async def sched_gen_symcache(): f'{pformat(dne)}' ) - # copy in backend specific pairs table directly without - # struct loading for now.. - pairtable = data.pop('pairs') - cache.pairs = pairtable - - # TODO: some kinda way to allow the backend - # to provide a struct-loader per entry? - # for key, pairtable in pairtable.items(): - # pair: Struct = cache.mod.load_pair(pairtable) - # cache.pairs[key] = pair - # TODO: use a real profiling sys.. # https://github.com/pikers/piker/issues/337 log.info(f'SYMCACHE LOAD TIME: {time.time() - now}') @@ -307,9 +305,13 @@ def get_symcache( ''' from ..brokers import get_brokermod - with open_symcache( - get_brokermod(provider), - reload=force_reload, + try: + with open_symcache( + get_brokermod(provider), + reload=force_reload, - ) as symcache: - return symcache + ) as symcache: + return symcache + except BaseException: + import pdbp + pdbp.xpm() From 520414a096c95967de14ff1a7440e348b5412051 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 19:03:54 -0400 Subject: [PATCH 017/116] Oof, fix `.size` tick msg encode.. --- piker/accounting/_mktinfo.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 534e9c06e..2c180f251 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -324,7 +324,7 @@ def to_dict(self) -> dict: d['dst'] = self.dst.to_dict() d['price_tick'] = str(self.price_tick) - d['size_tick'] = str(self.price_tick) + d['size_tick'] = str(self.size_tick) if self.contract_info is None: d.pop('contract_info') @@ -416,8 +416,9 @@ def from_fqme( # which we expect to be filled in by some # backend client with access to that data-info. return cls( - # XXX: not resolved to ``Asset`` :( dst=dst, + # XXX: not resolved to ``Asset`` :( + #src=src, broker=broker, venue=venue, From 0e94e8937335df1c327e134c2419873dc217a475 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 19:40:51 -0400 Subject: [PATCH 018/116] Finally, just drop `Transaction.sym` Turns out we don't really need it directly for most "txn processing" AND if we do it's usually related to some `Account`-ing related calcs; which means we can instead just rely on the new `SymbologyCache` lookup to get it when needed. So, basically just get rid of it and rely instead on the `.fqme` to be the god-key to getting `MktPair` info (from the cache). Further, extend the `TransactionLedger` to contain much more info on the pertaining backend: - `.mod` mapping to the (pkg) py mod. - `.filepath` pointing to the actual ledger TOML file. - `_symcache` for doing any needed asset or mkt lookup as mentioned above. - rename `.iter_trans()` -> `.iter_txns()` and allow passing in a symcache or using the init provided one. - rename `.to_trans()` similarly. - delegate paper account txn processing to the `.clearing._paper_engine` mod's `norm_trade()` (and expect this similarly from other backends!) - use new `SymbologyCache.search()` to find the best but un-fully-qualified fqme for a given `txdict` being processed when writing a config (aka always try to expand to the most verbose `.fqme` possible). - add a `rewrite: bool` control to `open_trade_ledger()`. --- piker/accounting/_ledger.py | 178 +++++++++++++++++++----------------- 1 file changed, 95 insertions(+), 83 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index b0061f0ae..3f8f258c8 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -22,16 +22,19 @@ from collections import UserDict from contextlib import contextmanager as cm from pathlib import Path +from pprint import pformat +from types import ModuleType from typing import ( Any, Callable, - Generator + Generator, + TYPE_CHECKING, ) from pendulum import ( datetime, - DateTime, - parse, + # DateTime, + # parse, ) import tomli_w # for fast ledger writing @@ -41,27 +44,37 @@ from .calc import ( iter_by_dt, ) -from ._mktinfo import ( - Symbol, # legacy - MktPair, - Asset, -) + +if TYPE_CHECKING: + from ..data._symcache import ( + SymbologyCache, + ) log = get_logger(__name__) class Transaction(Struct, frozen=True): - # TODO: unify this with the `MktPair`, - # once we have that as a required field, - # we don't really need the fqme any more.. + # NOTE: this is a unified acronym also used in our `MktPair` + # and can stand for any of a + # "fully qualified endpoint": + # - "market" in the case of financial trades + # (btcusdt.spot.binance). + # - "merkel (tree)" aka a blockchain system "wallet tranfers" + # (btc.blockchain) + # - "money" for tradtitional (digital databases) + # *bank accounts* (usd.swift, eur.sepa) fqme: str + tid: str | int # unique transaction id size: float price: float cost: float # commisions or other additional costs dt: datetime + # the "event type" in terms of "market events" see + # https://github.com/pikers/piker/issues/510 for where we're + # probably going with this. etype: str = 'clear' # TODO: we can drop this right since we @@ -69,19 +82,6 @@ class Transaction(Struct, frozen=True): # via the `MktPair`? expiry: datetime | None = None - # TODO: drop the Symbol type, construct using - # t.sys (the transaction system) - - # the underlying "transaction system", normally one of a ``MktPair`` - # (a description of a tradable double auction) or a ledger-recorded - # ("ledger" in any sense as long as you can record transfers) of any - # sort) ``Asset``. - sym: MktPair | Asset | Symbol | None = None - - @property - def sys(self) -> Symbol: - return self.sym - # (optional) key-id defined by the broker-service backend which # ensures the instrument-symbol market key for this record is unique # in the "their backend/system" sense; i.e. this uid for the market @@ -90,14 +90,12 @@ def sys(self) -> Symbol: bs_mktid: str | int | None = None def to_dict(self) -> dict: - dct = super().to_dict() - - # TODO: switch to sys! - dct.pop('sym') + dct: dict[str, Any] = super().to_dict() # ensure we use a pendulum formatted # ISO style str here!@ dct['dt'] = str(self.dt) + return dct @@ -113,13 +111,35 @@ def __init__( self, ledger_dict: dict, file_path: Path, + account: str, + mod: ModuleType, # broker mod tx_sort: Callable, + symcache: SymbologyCache, ) -> None: - self.file_path = file_path - self.tx_sort = tx_sort + self.account: str = account + self.file_path: Path = file_path + self.mod: ModuleType = mod + self.tx_sort: Callable = tx_sort + + self._symcache: SymbologyCache = symcache + + # any added txns we keep in that form for meta-data + # gathering purposes + self._txns: dict[str, Transaction] = {} + super().__init__(ledger_dict) + def __repr__(self) -> str: + return ( + f'TransactionLedger: {len(self)}\n' + f'{pformat(list(self.data))}' + ) + + @property + def symcache(self) -> SymbologyCache: + return self._symcache + def update_from_t( self, t: Transaction, @@ -130,11 +150,11 @@ def update_from_t( ''' self.data[t.tid] = t.to_dict() + self._txns[t.tid] = t - def iter_trans( + def iter_txns( self, - mkt_by_fqme: dict[str, MktPair], - broker: str = 'paper', + symcache: SymbologyCache | None = None, ) -> Generator[ tuple[str, Transaction], @@ -146,73 +166,42 @@ def iter_trans( form via generator. ''' - if broker != 'paper': - raise NotImplementedError('Per broker support not dun yet!') + symcache = symcache or self._symcache - # TODO: lookup some standard normalizer - # func in the backend? - # from ..brokers import get_brokermod - # mod = get_brokermod(broker) - # trans_dict = mod.norm_trade_records(self.data) - - # NOTE: instead i propose the normalizer is - # a one shot routine (that can be lru cached) - # and instead call it for each entry incrementally: - # normer = mod.norm_trade_record(txdict) + if self.account == 'paper': + from piker.clearing import _paper_engine + norm_trade = _paper_engine.norm_trade + else: + norm_trade = self.mod.norm_trade # datetime-sort and pack into txs for txdict in self.tx_sort(self.data.values()): + txn = norm_trade(txdict) + yield txn.tid, txn - # special field handling for datetimes - # to ensure pendulum is used! - tid: str = txdict['tid'] - fqme: str = txdict.get('fqme') or txdict['fqsn'] - dt: DateTime = parse(txdict['dt']) - expiry: str | None = txdict.get('expiry') - - if not (mkt := mkt_by_fqme.get(fqme)): - # we can't build a trans if we don't have - # the ``.sys: MktPair`` info, so skip. - continue - - tx = Transaction( - fqme=fqme, - tid=txdict['tid'], - dt=dt, - price=txdict['price'], - size=txdict['size'], - cost=txdict.get('cost', 0), - bs_mktid=txdict['bs_mktid'], - - # TODO: change to .sys! - sym=mkt, - expiry=parse(expiry) if expiry else None, - etype='clear', - ) - yield tid, tx - - def to_trans( + def to_txns( self, - **kwargs, + symcache: SymbologyCache | None = None, ) -> dict[str, Transaction]: ''' - Return entire output from ``.iter_trans()`` in a ``dict``. + Return entire output from ``.iter_txns()`` in a ``dict``. ''' - return dict(self.iter_trans(**kwargs)) + return dict(self.iter_txns(symcache=symcache)) def write_config( self, ) -> None: ''' - Render the self.data ledger dict to it's TOML file form. + Render the self.data ledger dict to its TOML file form. + + ALWAYS order datetime sorted! ''' towrite: dict[str, Any] = {} for tid, txdict in self.tx_sort(self.data.copy()): - # write blank-str expiry for non-expiring assets if ( 'expiry' in txdict @@ -221,9 +210,18 @@ def write_config( txdict['expiry'] = '' # re-write old acro-key - if fqme := txdict.get('fqsn'): - txdict['fqme'] = fqme + fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] + + if fqme not in self._symcache.mktmaps: + best_fqme: str = list(self._symcache.search(fqme))[0] + log.warning( + f'Could not find FQME: {fqme} in qualified set?\n' + f'Qualifying and expanding {fqme} -> {best_fqme}' + ) + fqme = best_fqme + + txdict['fqme'] = fqme towrite[tid] = txdict with self.file_path.open(mode='wb') as fp: @@ -276,6 +274,7 @@ def open_trade_ledger( # default is to sort by detected datetime-ish field tx_sort: Callable = iter_by_dt, + rewrite: bool = False, ) -> Generator[TransactionLedger, None, None]: ''' @@ -287,18 +286,31 @@ def open_trade_ledger( name as defined in the user's ``brokers.toml`` config. ''' + from ..brokers import get_brokermod + mod: ModuleType = get_brokermod(broker) + ledger_dict, fpath = load_ledger(broker, account) cpy = ledger_dict.copy() + + from ..data._symcache import ( + get_symcache, + ) + symcache: SymbologyCache = get_symcache(broker) ledger = TransactionLedger( ledger_dict=cpy, file_path=fpath, + account=account, + mod=mod, + symcache=symcache, tx_sort=tx_sort, ) try: yield ledger finally: - if ledger.data != ledger_dict: - + if ( + ledger.data != ledger_dict + or rewrite + ): # TODO: show diff output? # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries log.info(f'Updating ledger for {fpath}:\n') From f5d4f58610c9526ed1e67d48b3eac6852b26c6fd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 7 Jul 2023 22:22:06 -0400 Subject: [PATCH 019/116] `Account` api update and refine Rename `open_pps()` -> `open_account()` for obvious reasons as well as expect a bit tighter integration with `SymbologyCache` and consequently `LedgerTransaction` in order to drop `Transaction.sym: MktPair` dependence when compiling / allocating new `Position`s from a ledger. Also we drop a bunch of prior attrs and do some cleaning, - `Position.first_clear_dt` we no longer sort during insert. - `._clears` now replaces by `._events` table. - drop the now masked `.ensure_state()` method (eventually moved to `.calc` submod for maybe-later-use). - drop `.sym=` from all remaining txns init calls. - clean out the `Position.add_clear()` method and only add the provided txn directly to the `._events` table. Improve some `Account` docs and interface: - fill out the main type descr. - add the backend broker modules as `Account.mod` allowing to drop `.brokername` as input and instead wrap as a `@property`. - make `.update_from_trans()` now a new `.update_from_ledger()` and expect either of a `TransactionLedger` (user-dict) or a dict of txns; in the latter case if we have not been also passed a symcache as input then runtime error since the symcache is necessary to allocate positions. - also, delegate to `TransactionLedger.iter_txns()` instead of a manual datetime sorted iter-loop. - drop all the clears datetime don't-insert-if-earlier-then-first logic. - rename `.to_toml()` -> `.prep_toml()`. - drop old `PpTable` alias. - rename `load_pps_from_ledger()` -> `load_account_from_ledger()` and make it only deliver the account instance and also move out all the `polars.DataFrame` related stuff (to `.calc`). And tweak some account clears table formatting, - store datetimes as TOML native equivs. - drop `be_price` fixing. - obvsly drop `.ensure_state()` call to pps. --- piker/accounting/_pos.py | 396 +++++++++++++++------------------------ 1 file changed, 149 insertions(+), 247 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 65eb67a82..fbb6997f6 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -22,18 +22,17 @@ ''' from __future__ import annotations -# from bisect import insort from contextlib import contextmanager as cm from decimal import Decimal from pprint import pformat from pathlib import Path +from types import ModuleType from typing import ( Any, Iterator, Generator ) -import polars as pl import pendulum from pendulum import ( datetime, @@ -43,7 +42,6 @@ from ._ledger import ( Transaction, - open_trade_ledger, TransactionLedger, ) from ._mktinfo import ( @@ -60,6 +58,7 @@ BrokerdPosition, ) from ..data.types import Struct +from ..data._symcache import SymbologyCache from ..log import get_logger log = get_logger(__name__) @@ -105,19 +104,12 @@ class Position(Struct): split_ratio: int | None = None - # ordered record of known constituent trade messages - _clears: list[ - dict[str, Any], # transaction history summaries - ] = [] - - # _events: pl.DataFrame | None = None + # TODO: use a `pl.DataFrame` intead? _events: dict[str, Transaction | dict] = {} - # first_clear_dt: datetime | None = None - @property def expiry(self) -> datetime | None: - exp: str = self.mkt.expiry + exp: str = self.mkt.expiry.lower() match exp: # empty str, 'perp' (contract) or simply a null # signifies instrument with NO expiry. @@ -188,7 +180,7 @@ def minimized_clears(self) -> dict[str, dict]: ''' # scan for the last "net zero" position by iterating - # transactions until the next net-zero accum_size, rinse, + # transactions until the next net-zero cumsize, rinse, # repeat. cumsize: float = 0 clears_since_zero: list[dict] = [] @@ -223,6 +215,7 @@ def to_pretoml(self) -> tuple[str, dict]: ''' mkt: MktPair = self.mkt assert isinstance(mkt, MktPair) + # TODO: we need to figure out how to have one top level # listing venue here even when the backend isn't providing # it via the trades ledger.. @@ -239,16 +232,19 @@ def to_pretoml(self) -> tuple[str, dict]: asdict: dict[str, Any] = { 'bs_mktid': self.bs_mktid, - 'expiry': self.expiry or '', + # 'expiry': self.expiry or '', 'asset_type': asset_type, 'price_tick': mkt.price_tick, 'size_tick': mkt.size_tick, } - if exp := self.expiry: asdict['expiry'] = exp clears_since_zero: list[dict] = self.minimized_clears() + + # setup a "multi-line array of inline tables" which we call + # the "clears table", contained by each position entry in + # an "account file". clears_table: tomlkit.Array = tomlkit.array() clears_table.multiline( multiline=True, @@ -267,69 +263,21 @@ def to_pretoml(self) -> tuple[str, dict]: for k in ['price', 'size', 'cost']: inline_table[k] = entry[k] - # serialize datetime to parsable `str` - inline_table['dt'] = entry['dt']#.isoformat('T') - # assert 'Datetime' not in inline_table['dt'] + # NOTE: we don't actually need to serialize datetime to parsable `str` + # since `tomlkit` supports a native `DateTime` but + # seems like we're not doing it entirely in clearing + # tables yet? + inline_table['dt'] = entry['dt'] # .isoformat('T') tid: str = entry['tid'] inline_table['tid'] = tid clears_table.append(inline_table) - # if val < 0: - # breakpoint() # assert not events asdict['clears'] = clears_table return fqme, asdict - # def ensure_state(self) -> None: - # ''' - # Audit either the `.cumsize` and `.ppu` local instance vars against - # the clears table calculations and return the calc-ed values if - # they differ and log warnings to console. - - # ''' - # # clears: list[dict] = self._clears - - # # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt'] - # last_clear: dict = clears[-1] - # csize: float = self.calc_size() - # accum: float = last_clear['accum_size'] - - # if not self.expired(): - # if ( - # csize != accum - # and csize != round(accum * (self.split_ratio or 1)) - # ): - # raise ValueError(f'Size mismatch: {csize}') - # else: - # assert csize == 0, 'Contract is expired but non-zero size?' - - # if self.cumsize != csize: - # log.warning( - # 'Position state mismatch:\n' - # f'{self.cumsize} => {csize}' - # ) - # self.cumsize = csize - - # cppu: float = self.calc_ppu() - # ppu: float = last_clear['ppu'] - # if ( - # cppu != ppu - # and self.split_ratio is not None - - # # handle any split info entered (for now) manually by user - # and cppu != (ppu / self.split_ratio) - # ): - # raise ValueError(f'PPU mismatch: {cppu}') - - # if self.ppu != cppu: - # log.warning( - # 'Position state mismatch:\n' - # f'{self.ppu} => {cppu}' - # ) - # self.ppu = cppu - def update_from_msg( self, msg: BrokerdPosition, @@ -337,12 +285,13 @@ def update_from_msg( ) -> None: mkt: MktPair = self.mkt - # we summarize the pos with a single summary transaction - # (for now) until we either pass THIS type as msg directly - # from emsd or come up with a better way? + + # NOTE WARNING XXX: we summarize the pos with a single + # summary transaction (for now) until we either pass THIS + # type as msg directly from emsd or come up with a better + # way? t = Transaction( - fqme=mkt.bs_mktid, - sym=mkt, + fqme=mkt.fqme, bs_mktid=mkt.bs_mktid, tid='unknown', size=msg['size'], @@ -357,15 +306,16 @@ def update_from_msg( @property def dsize(self) -> float: ''' - The "dollar" size of the pp, normally in trading (fiat) unit - terms. + The "dollar" size of the pp, normally in source asset + (fiat) units. ''' return self.ppu * self.size def expired(self) -> bool: ''' - Predicate which checks if the contract/instrument is past its expiry. + Predicate which checks if the contract/instrument is past + its expiry. ''' return bool(self.expiry) and self.expiry < now() @@ -388,36 +338,23 @@ def add_clear( log.warning(f'{t} is already added?!') return added - # clear: dict[str, float | str | int] = { - # 'tid': t.tid, - # 'cost': t.cost, - # 'price': t.price, - # 'size': t.size, - # 'dt': t.dt - # } - self._events[tid] = t - return True + # TODO: apparently this IS possible with a dict but not + # common and probably not that beneficial unless we're also + # going to do cum-calcs on each insert? + # https://stackoverflow.com/questions/38079171/python-insert-new-element-into-sorted-list-of-dictionaries + # from bisect import insort # insort( # self._clears, # clear, # key=lambda entry: entry['dt'] # ) + self._events[tid] = t + return True - # TODO: compute these incrementally instead - # of re-looping through each time resulting in O(n**2) - # behaviour..? - - # NOTE: we compute these **after** adding the entry in order to - # make the recurrence relation math work inside - # ``.calc_size()``. - # self.size = clear['accum_size'] = self.calc_size() - # self.ppu = clear['ppu'] = self.calc_ppu() - # self.size: float = self.calc_size() - # self.ppu: float = self.calc_ppu() - - # assert len(self._events) == len(self._clears) - # return clear - + # TODO: compute these incrementally instead + # of re-looping through each time resulting in O(n**2) + # behaviour..? Can we have some kinda clears len to cached + # output subsys? def calc_ppu(self) -> float: return ppu(self.iter_by_type('clear')) @@ -487,20 +424,50 @@ def size(self) -> float: class Account(Struct): + ''' + The real-time (double-entry accounting) state of + a given **asset ownership tracking system**, normally offered + or measured from some brokerage, CEX or (implied virtual) + summary crypto$ "wallets" aggregated and tracked over some set + of DEX-es. + + Both market-mapped and ledger-system-native (aka inter-account + "transfers") transactions are accounted and they pertain to + (implied) PnL relatve to any other accountable asset. + + More specifically in piker terms, an account tracks all of: - brokername: str + - the *balances* of all assets currently available for use either + in (future) market or (inter-account/wallet) transfer + transactions. + - a transaction *ledger* from a given brokerd backend whic + is a recording of all (know) such transactions from the past. + - a set of financial *positions* as measured from the current + ledger state. + + See the semantic origins from double-bookeeping: + https://en.wikipedia.org/wiki/Double-entry_bookkeeping + + ''' + mod: ModuleType acctid: str pps: dict[str, Position] + conf_path: Path conf: dict | None = {} # TODO: track a table of asset balances as `.balances: # dict[Asset, float]`? - def update_from_trans( + @property + def brokername(self) -> str: + return self.mod.name + + def update_from_ledger( self, - trans: dict[str, Transaction], + ledger: TransactionLedger, cost_scalar: float = 2, + symcache: SymbologyCache | None = None, ) -> dict[str, Position]: ''' @@ -509,24 +476,36 @@ def update_from_trans( accumulative size for each entry. ''' + if ( + not isinstance(ledger, TransactionLedger) + and symcache is None + ): + raise RuntimeError( + 'No ledger provided!\n' + 'We can not determine the `MktPair`s without a symcache..\n' + 'Please provide `symcache: SymbologyCache` when ' + 'processing NEW positions!' + ) + pps = self.pps updated: dict[str, Position] = {} # lifo update all pps from records, ensuring # we compute the PPU and size sorted in time! - for t in sorted( - trans.values(), - key=lambda t: t.dt, - # reverse=True, - ): - fqme: str = t.fqme - bs_mktid: str = t.bs_mktid + for tid, txn in ledger.iter_txns(): + # for t in sorted( + # trans.values(), + # key=lambda t: t.dt, + # ): + fqme: str = txn.fqme + bs_mktid: str = txn.bs_mktid # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair = t.sys + mkt: MktPair = ledger._symcache.mktmaps[fqme] if not (pos := pps.get(bs_mktid)): + # if no existing pos, allocate fresh one. pos = pps[bs_mktid] = Position( mkt=mkt, @@ -541,33 +520,16 @@ def update_from_trans( if len(pos.mkt.fqme) < len(fqme): pos.mkt = mkt - # clears: list[dict] = pos._clears - # if clears: - # # first_clear_dt = pos.first_clear_dt - - # # don't do updates for ledger records we already have - # # included in the current pps state. - # if ( - # t.tid in clears - # # or ( - # # first_clear_dt - # # and t.dt < first_clear_dt - # # ) - # ): - # # NOTE: likely you'll see repeats of the same - # # ``Transaction`` passed in here if/when you are restarting - # # a ``brokerd.ib`` where the API will re-report trades from - # # the current session, so we need to make sure we don't - # # "double count" these in pp calculations. - # continue - - # update clearing table - pos.add_clear(t) - updated[t.bs_mktid] = pos - - # re-calc ppu and accumulative sizing. - # for bs_mktid, pos in updated.items(): - # pos.ensure_state() + # update clearing table! + # NOTE: likely you'll see repeats of the same + # ``Transaction`` passed in here if/when you are restarting + # a ``brokerd.ib`` where the API will re-report trades from + # the current session, so we need to make sure we don't + # "double count" these in pp calculations; + # `Position.add_clear()` stores txs in a `dict[tid, + # tx]` which should always ensure this is true B) + pos.add_clear(txn) + updated[txn.bs_mktid] = pos # NOTE: deliver only the position entries that were # actually updated (modified the state) from the input @@ -614,7 +576,7 @@ def dump_active( return open_pp_objs, closed_pp_objs - def to_toml( + def prep_toml( self, active: dict[str, Position] | None = None, @@ -629,12 +591,12 @@ def to_toml( pos: Position for bs_mktid, pos in active.items(): - # NOTE: we only store the minimal amount of clears that make up this - # position since the last net-zero state. - # pos.minimize_clears() # pos.ensure_state() # serialize to pre-toml form + # NOTE: we only store the minimal amount of clears that + # make up this position since the last net-zero state, + # see `Position.to_pretoml()` for details fqme, asdict = pos.to_pretoml() # clears: list[dict] = asdict['clears'] @@ -650,7 +612,8 @@ def to_toml( def write_config(self) -> None: ''' - Write the current position table to the user's ``pps.toml``. + Write the current account state to the user's account TOML file, normally + something like ``pps.toml``. ''' # TODO: show diff output? @@ -658,7 +621,7 @@ def write_config(self) -> None: # active, closed_pp_objs = table.dump_active() active, closed = self.dump_active() - pp_entries = self.to_toml(active=active) + pp_entries = self.prep_toml(active=active) if pp_entries: log.info( f'Updating positions in ``{self.conf_path}``:\n' @@ -705,24 +668,12 @@ def write_config(self) -> None: # super weird --1 thing going on for cumsize!?1! # NOTE: the fix was to always float() the size value loaded # in open_pps() below! - - # confclears = self.conf["tsla.nasdaq.ib"]['clears'] - # firstcum = confclears[0]['cumsize'] - # if firstcum: - # breakpoint() - config.write( config=self.conf, path=self.conf_path, fail_empty=False, ) - # breakpoint() - - -# TODO: move over all broker backend usage to new name.. -PpTable = Account - def load_account( brokername: str, @@ -784,12 +735,12 @@ def load_account( @cm -def open_pps( +def open_account( brokername: str, acctid: str, write_on_exit: bool = False, -) -> Generator[PpTable, None, None]: +) -> Generator[Account, None, None]: ''' Read out broker-specific position entries from incremental update file: ``pps.toml``. @@ -820,10 +771,12 @@ def open_pps( # engine proc if we decide to always spawn it?), # - do diffs against updates from the ledger writer # actor and the in-mem state here? + from ..brokers import get_brokermod + mod: ModuleType = get_brokermod(brokername) - pp_objs = {} - table = PpTable( - brokername, + pp_objs: dict[str, Position] = {} + table = Account( + mod, acctid, pp_objs, conf_path, @@ -831,12 +784,10 @@ def open_pps( ) # unmarshal/load ``pps.toml`` config entries into object form - # and update `PpTable` obj entries. + # and update `Account` obj entries. for fqme, entry in conf.items(): - # atype = entry.get('asset_type', '') - - # unique broker market id + # unique broker-backend-system market id bs_mktid = str( entry.get('bsuid') or entry.get('bs_mktid') @@ -860,7 +811,7 @@ def open_pps( fqme, price_tick=price_tick, size_tick=size_tick, - bs_mktid=bs_mktid + bs_mktid=bs_mktid, ) # TODO: RE: general "events" instead of just "clears": @@ -875,6 +826,7 @@ def open_pps( # for toml re-presentation) back into a master table. toml_clears_list: list[dict[str, Any]] = entry['clears'] trans: list[Transaction] = [] + for clears_table in toml_clears_list: tid = clears_table['tid'] dt: tomlkit.items.DateTime | str = clears_table['dt'] @@ -887,23 +839,18 @@ def open_pps( clears_table['dt'] = dt trans.append(Transaction( fqme=bs_mktid, - sym=mkt, + # sym=mkt, bs_mktid=bs_mktid, tid=tid, + # XXX: not sure why sometimes these are loaded as + # `tomlkit.Integer` and are eventually written with + # an extra `-` in front like `--1`? size=float(clears_table['size']), price=float(clears_table['price']), cost=clears_table['cost'], dt=dt, )) - # size = entry['size'] - - # # TODO: remove but, handle old field name for now - # ppu = entry.get( - # 'ppu', - # entry.get('be_price', 0), - # ) - split_ratio = entry.get('split_ratio') # if a string-ified expiry field is loaded we try to parse @@ -929,9 +876,6 @@ def open_pps( for t in trans: pp.add_clear(t) - # audit entries loaded from toml - # pp.ensure_state() - try: yield table finally: @@ -939,7 +883,21 @@ def open_pps( table.write_config() -def load_pps_from_ledger( +# TODO: drop the old name and THIS! +@cm +def open_pps( + *args, + **kwargs, +) -> Generator[Account, None, None]: + log.warning( + '`open_pps()` is now deprecated!\n' + 'Please use `with open_account() as cnt:`' + ) + with open_account(*args, **kwargs) as acnt: + yield acnt + + +def load_account_from_ledger( brokername: str, acctname: str, @@ -947,10 +905,9 @@ def load_pps_from_ledger( # post normalization filter on ledger entries to be processed filter_by_ids: dict[str, list[str]] | None = None, -) -> tuple[ - pl.DataFrame, - PpTable, -]: + ledger: TransactionLedger | None = None, + +) -> Account: ''' Open a ledger file by broker name and account and read in and process any trade records into our normalized ``Transaction`` form @@ -958,67 +915,12 @@ def load_pps_from_ledger( bs_mktid-mapped dict-sets of the transactions and pps. ''' - ledger: TransactionLedger - table: PpTable - with ( - open_trade_ledger(brokername, acctname) as ledger, - open_pps(brokername, acctname) as table, - ): - if not ledger: - # null case, no ledger file with content - return {} - - from ..brokers import get_brokermod - mod = get_brokermod(brokername) - src_records: dict[str, Transaction] = mod.norm_trade_records( - ledger - ) - table.update_from_trans(src_records) - - fdf = df = pl.DataFrame( - list(rec.to_dict() for rec in src_records.values()), - # schema=[ - # ('tid', str), - # ('fqme', str), - # ('dt', str), - # ('size', pl.Float64), - # ('price', pl.Float64), - # ('cost', pl.Float64), - # ('expiry', str), - # ('bs_mktid', str), - # ], - ).sort('dt').select([ - pl.col('fqme'), - pl.col('dt').str.to_datetime(), - # pl.col('expiry').dt.datetime(), - pl.col('bs_mktid'), - pl.col('size'), - pl.col('price'), - ]) - # ppt = df.groupby('fqme').agg([ - # # TODO: ppu and bep !! - # pl.cumsum('size').alias('cumsum'), - # ]) - acts = df.partition_by('fqme', as_dict=True) - # ppt: dict[str, pl.DataFrame] = {} - # for fqme, ppt in act.items(): - # ppt.with_columuns - # # TODO: ppu and bep !! - # pl.cumsum('size').alias('cumsum'), - # ]) - - # filter out to the columns matching values filter passed - # as input. - if filter_by_ids: - for col, vals in filter_by_ids.items(): - str_vals = set(map(str, vals)) - pred: pl.Expr = pl.col(col).eq(str_vals.pop()) - for val in str_vals: - pred |= pl.col(col).eq(val) - - fdf = df.filter(pred) - - bs_mktid: str = fdf[0]['bs_mktid'] - # pos: Position = table.pps[bs_mktid] - - return fdf, acts, table + acnt: Account + with open_pps( + brokername, + acctname, + ) as acnt: + if ledger is not None: + acnt.update_from_ledger(ledger) + + return acnt From 8f1983fd8ead69324b74ef92b3d0f00bb896c563 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:11:15 -0400 Subject: [PATCH 020/116] Move df loading into `calc.load_ledger_dfs()` To isolate it from the ledger/account mods and bc it is actually for doing (eventual) position calcs / anal, might as well put it in this mod. Add in the old-masked `ensure_state()` method content in case we want to use it later for testing. Also tighten up the parser loading inside `dyn_parse_to_dt()`. --- piker/accounting/calc.py | 178 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 166 insertions(+), 12 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 034c810a6..629788502 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -20,6 +20,7 @@ ''' from __future__ import annotations +from contextlib import contextmanager as cm from math import copysign from typing import ( Any, @@ -28,6 +29,7 @@ TYPE_CHECKING, ) +import polars as pl from pendulum import ( # datetime, DateTime, @@ -40,7 +42,6 @@ Transaction, ) - def ppu( clears: Iterator[Transaction], @@ -219,7 +220,7 @@ def iter_by_dt( # NOTE: parsers are looked up in the insert order # so if you know that the record stats show some field # is more common then others, stick it at the top B) - parsers: dict[tuple[str], Callable] = { + parsers: dict[str, Callable | None] = { 'dt': None, # parity case 'datetime': parse, # datetime-str 'time': from_timestamp, # float epoch @@ -232,10 +233,8 @@ def iter_by_dt( datetime presumably set at the ``'dt'`` field in each entry. ''' - # isdict: bool = False if isinstance(records, dict): - # isdict: bool = True - records = list(records.items()) + records: list[tuple[str, dict]] = list(records.items()) def dyn_parse_to_dt( tx: tuple[str, dict[str, Any]] | Transaction, @@ -255,17 +254,17 @@ def dyn_parse_to_dt( or getattr(tx, k, None) ): v = tx[k] if isdict else tx.dt - if v is None: - breakpoint() - - parser = parsers[k] + assert v is not None, f'No valid value for `{k}`!?' - # only call parser on the value if not None from the - # `parsers` table above, otherwise pass through the value - # and sort on it directly + # only call parser on the value if not None from + # the `parsers` table above (when NOT using + # `.get()`), otherwise pass through the value and + # sort on it directly + parser: Callable | None = parsers[k] return parser(v) if (parser is not None) else v else: + # XXX: should never get here.. breakpoint() entry: tuple[str, dict] | Transaction @@ -274,3 +273,158 @@ def dyn_parse_to_dt( key=key or dyn_parse_to_dt, ): yield entry + + +# TODO: probably just move this into the test suite or +# keep it here for use from as such? +# def ensure_state(self) -> None: +# ''' +# Audit either the `.cumsize` and `.ppu` local instance vars against +# the clears table calculations and return the calc-ed values if +# they differ and log warnings to console. + +# ''' +# # clears: list[dict] = self._clears + +# # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt'] +# last_clear: dict = clears[-1] +# csize: float = self.calc_size() +# accum: float = last_clear['accum_size'] + +# if not self.expired(): +# if ( +# csize != accum +# and csize != round(accum * (self.split_ratio or 1)) +# ): +# raise ValueError(f'Size mismatch: {csize}') +# else: +# assert csize == 0, 'Contract is expired but non-zero size?' + +# if self.cumsize != csize: +# log.warning( +# 'Position state mismatch:\n' +# f'{self.cumsize} => {csize}' +# ) +# self.cumsize = csize + +# cppu: float = self.calc_ppu() +# ppu: float = last_clear['ppu'] +# if ( +# cppu != ppu +# and self.split_ratio is not None + +# # handle any split info entered (for now) manually by user +# and cppu != (ppu / self.split_ratio) +# ): +# raise ValueError(f'PPU mismatch: {cppu}') + +# if self.ppu != cppu: +# log.warning( +# 'Position state mismatch:\n' +# f'{self.ppu} => {cppu}' +# ) +# self.ppu = cppu + + +@cm +def open_ledger_dfs( + + brokername: str, + acctname: str, + +) -> dict[str, pl.DataFrame]: + ''' + Open a ledger of trade records (presumably from some broker + backend), normalize the records into `Transactions` via the + backend's declared endpoint, cast to a `polars.DataFrame` which + can update the ledger on exit. + + ''' + from ._ledger import ( + open_trade_ledger, + # Transaction, + TransactionLedger, + ) + + ledger: TransactionLedger + import time + now = time.time() + with ( + open_trade_ledger( + brokername, + acctname, + rewrite=True, + ) as ledger, + ): + if not ledger: + raise ValueError(f'No ledger for {acctname}@{brokername} exists?') + + print(f'LEDGER LOAD TIME: {time.time() - now}') + # if acctname == 'paper': + # txns: dict[str, Transaction] = ledger.to_trans() + # else: + + # process raw TOML ledger into txns using the + # appropriate backend normalizer. + # cache: AssetsInfo = get_symcache( + # brokername, + # allow_reload=True, + # ) + + txns: dict[str, Transaction] + if acctname != 'paper': + txns = ledger.mod.norm_trade_records(ledger) + else: + txns = ledger.to_txns() + + ldf = pl.DataFrame( + list(txn.to_dict() for txn in txns.values()), + # schema=[ + # ('tid', str), + # ('fqme', str), + # ('dt', str), + # ('size', pl.Float64), + # ('price', pl.Float64), + # ('cost', pl.Float64), + # ('expiry', str), + # ('bs_mktid', str), + # ], + ).sort('dt').select([ + pl.col('fqme'), + pl.col('dt').str.to_datetime(), + # pl.col('expiry').dt.datetime(), + pl.col('bs_mktid'), + pl.col('size'), + pl.col('price'), + ]) + + # filter out to the columns matching values filter passed + # as input. + # if filter_by_ids: + # for col, vals in filter_by_ids.items(): + # str_vals = set(map(str, vals)) + # pred: pl.Expr = pl.col(col).eq(str_vals.pop()) + # for val in str_vals: + # pred |= pl.col(col).eq(val) + + # fdf = df.filter(pred) + + # bs_mktid: str = fdf[0]['bs_mktid'] + # pos: Position = acnt.pps[bs_mktid] + + # ppt = df.groupby('fqme').agg([ + # # TODO: ppu and bep !! + # pl.cumsum('size').alias('cumsum'), + # ]) + + dfs: dict[str, pl.DataFrame] = ldf.partition_by( + 'fqme', + as_dict=True, + ) + + # for fqme, ppt in act.items(): + # ppt.with_columns + # # TODO: ppu and bep !! + # pl.cumsum('size').alias('cumsum'), + # ]) + yield dfs From 3704e2ceacf603fbe9bf91160d46bd19eee819fe Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:13:59 -0400 Subject: [PATCH 021/116] Call `open_ledger_dfs()` for `disect` sub-cmd Drop all the old `polars` (groupby + agg related) mangling to get a df per fqme by delegating to the new routine and add in the `.cumsum()`ing (per frame) as a first start on computing pps using dfs instead of python dicts + loops as in `ppu()`. --- piker/accounting/cli.py | 54 ++++++++++++++++++++--------------------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index a562e26e7..30c147044 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -40,10 +40,8 @@ # open_trade_ledger, # TransactionLedger, ) -from ._pos import ( - PpTable, - load_pps_from_ledger, - # load_account, +from .calc import ( + open_ledger_dfs, ) @@ -241,8 +239,10 @@ def disect( # "fully_qualified_account_name" fqan: str, fqme: str, # for ib - pdb: bool = False, + # TODO: in tractor we should really have + # a debug_mode ctx for wrapping any kind of code no? + pdb: bool = False, bs_mktid: str = typer.Option( None, "-bid", @@ -252,34 +252,32 @@ def disect( "-l", ), ): + from piker.log import get_console_log + get_console_log(loglevel) + pair: tuple[str, str] if not (pair := unpack_fqan(fqan)): raise ValueError('{fqan} malformed!?') brokername, account = pair - # ledger: TransactionLedger - # records: dict[str, dict] - table: PpTable - df: pl.DataFrame # legder df - ppt: pl.DataFrame # piker position table - df, ppt, table = load_pps_from_ledger( + # ledger dfs groupby-partitioned by fqme + dfs: dict[str, pl.DataFrame] + with open_ledger_dfs( brokername, account, - filter_by_ids={'fqme': [fqme]}, - ) - # sers = [ - # pl.Series(e['fqme'], e['cumsum']) - # for e in ppt.to_dicts() - # ] - # ppt_by_id: pl.DataFrame = ppt.filter( - # pl.col('fqme') == fqme, - # ) - assert not df.is_empty() - breakpoint() - # with open_trade_ledger( - # brokername, - # account, - # ) as ledger: - # for tid, rec in ledger.items(): - # bs_mktid: str = rec['bs_mktid'] + ) as dfs: + + for key in dfs: + df = dfs[key] + dfs[key] = df.with_columns([ + pl.cumsum('size').alias('cumsum'), + ]) + + ppt = dfs[fqme] + assert not df.is_empty() + assert not ppt.is_empty() + + # TODO: we REALLY need a better console REPL for this + # kinda thing.. + breakpoint() From 749401e500c22e5caebac3d4881f0fc0470e0301 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:20:31 -0400 Subject: [PATCH 022/116] .accounting: expose new names at pkg top level --- piker/accounting/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 3246d9c85..bac6e6492 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -30,11 +30,11 @@ open_trade_ledger, ) from ._pos import ( + Account, load_account, - load_pps_from_ledger, + load_account_from_ledger, open_pps, Position, - PpTable, ) from ._mktinfo import ( Asset, @@ -53,11 +53,11 @@ log = get_logger(__name__) __all__ = [ + 'Account', 'Allocator', 'Asset', 'MktPair', 'Position', - 'PpTable', 'Symbol', 'Transaction', 'TransactionLedger', @@ -65,7 +65,7 @@ 'digits_to_dec', 'iter_by_dt', 'load_account', - 'load_pps_from_ledger', + 'load_account_from_ledger', 'mk_allocator', 'open_pps', 'open_trade_ledger', From ff267890d124887dd4a0d70378561c5f51e6229b Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:21:12 -0400 Subject: [PATCH 023/116] Change cached-client hit msg to runtime level --- piker/brokers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/brokers/__init__.py b/piker/brokers/__init__.py index 87a0446ad..8817842ed 100644 --- a/piker/brokers/__init__.py +++ b/piker/brokers/__init__.py @@ -106,6 +106,6 @@ async def open_cached_client( ) as (cache_hit, client): if cache_hit: - log.info(f'Reusing existing {client}') + log.runtime(f'Reusing existing {client}') yield client From 87185cf8bb630a9a527e78e80cbdab59eefb5b75 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:26:42 -0400 Subject: [PATCH 024/116] Drop `config` get/set/del apis.. --- piker/config.py | 48 ------------------------------------------------ 1 file changed, 48 deletions(-) diff --git a/piker/config.py b/piker/config.py index 80f7b1d1a..3bbf99b4d 100644 --- a/piker/config.py +++ b/piker/config.py @@ -378,51 +378,3 @@ def load_accounts( accounts['paper'] = None return accounts - - -# XXX: Recursive getting & setting - -def get_value(_dict, _section): - subs = _section.split('.') - if len(subs) > 1: - return get_value( - _dict[subs[0]], - '.'.join(subs[1:]), - ) - - else: - return _dict[_section] - - -def set_value(_dict, _section, val): - subs = _section.split('.') - if len(subs) > 1: - if subs[0] not in _dict: - _dict[subs[0]] = {} - - return set_value( - _dict[subs[0]], - '.'.join(subs[1:]), - val - ) - - else: - _dict[_section] = val - - -def del_value(_dict, _section): - subs = _section.split('.') - if len(subs) > 1: - if subs[0] not in _dict: - return - - return del_value( - _dict[subs[0]], - '.'.join(subs[1:]) - ) - - else: - if _section not in _dict: - return - - del _dict[_section] From 8f40e522efcb134a32a1fe42920c5e7d6c679d85 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:28:38 -0400 Subject: [PATCH 025/116] Add handy `DiffDump`ing for our `.types.Struct` So you can do a `Struct1` - `Struct2` and we dump a little diff `list` of tuples for anal on the REPL B) Prolly can be broken out into it's own micro-patch? --- piker/data/types.py | 57 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/piker/data/types.py b/piker/data/types.py index 596195cca..c5591500e 100644 --- a/piker/data/types.py +++ b/piker/data/types.py @@ -21,7 +21,11 @@ types. ''' -from pprint import pformat +from collections import UserList +from pprint import ( + pformat, +) +from typing import Any from msgspec import ( msgpack, @@ -30,6 +34,33 @@ ) +class DiffDump(UserList): + ''' + Very simple list delegator that repr() dumps (presumed) tuple + elements of the form `tuple[str, Any, Any]` in a nice + multi-line readable form for analyzing `Struct` diffs. + + ''' + def __repr__(self) -> str: + if not len(self): + return super().__repr__() + + # format by displaying item pair's ``repr()`` on multiple, + # indented lines such that they are more easily visually + # comparable when printed to console when printed to + # console. + repstr: str = '[\n' + for k, left, right in self: + repstr += ( + f'({k},\n' + f'\t{repr(left)},\n' + f'\t{repr(right)},\n' + ')\n' + ) + repstr += ']\n' + return repstr + + class Struct( Struct, @@ -102,3 +133,27 @@ def typecast( fi.name, fi.type(getattr(self, fi.name)), ) + + def __sub__( + self, + other: Struct, + + ) -> DiffDump[tuple[str, Any, Any]]: + ''' + Compare fields/items key-wise and return a ``DiffDump`` + for easy visual REPL comparison B) + + ''' + diffs: DiffDump[tuple[str, Any, Any]] = DiffDump() + for fi in structs.fields(self): + attr_name: str = fi.name + ours: Any = getattr(self, attr_name) + theirs: Any = getattr(other, attr_name) + if ours != theirs: + diffs.append(( + attr_name, + ours, + theirs, + )) + + return diffs From c9681d0aa223498f75a6145b5696c3ec4c5a493d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 09:40:37 -0400 Subject: [PATCH 026/116] .nativedb: ignore an `expired/` subdir --- piker/storage/nativedb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/storage/nativedb.py b/piker/storage/nativedb.py index ff914245d..274bf0399 100644 --- a/piker/storage/nativedb.py +++ b/piker/storage/nativedb.py @@ -187,7 +187,7 @@ async def list_keys(self) -> list[str]: def index_files(self): for path in self._datadir.iterdir(): - if 'borked' in path.name: + if path.name in {'borked', 'expired',}: continue key: str = path.name.rstrip('.parquet') From 3c84ac326ab959389a1196ff6bf383bf546ada08 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 11:00:07 -0400 Subject: [PATCH 027/116] binance.venues: add pair-type specific asset keying Add `bs_src/dst_asset: str` properties which provide for unique keying into futures vs. spot venues by offering a `.venue: str` property which, for non-spot delivers normally an expiry suffix (eg. '.PERP') and for spot just delivers the bair chain-token key. This enables keying multiple venues with the same mkt pairs easily in a global flat key->pair table needed as part of supporting a symcache. --- piker/brokers/binance/venues.py | 51 ++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index fe822dd0d..08e1f050c 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -1,8 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) -# Guillermo Rodriguez (aka ze jefe) -# Tyler Goodlet -# (in stewardship for pikers) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -65,7 +62,7 @@ 'spot', # 'margin', 'usdtm_futes', - # 'coin_futes', + # 'coinm_futes', ] @@ -144,6 +141,13 @@ class SpotPair(Pair, frozen=True): def bs_fqme(self) -> str: return f'{self.symbol}.SPOT' + @property + def bs_src_asset(self) -> str: + return f'{self.quoteAsset}' + + @property + def bs_dst_asset(self) -> str: + return f'{self.baseAsset}' class FutesPair(Pair): @@ -176,32 +180,51 @@ def quoteAssetPrecision(self) -> int: return self.quotePrecision @property - def bs_fqme(self) -> str: + def venue(self) -> str: symbol: str = self.symbol ctype: str = self.contractType margin: str = self.marginAsset match ctype: case 'PERPETUAL': - return f'{symbol}.{margin}M.PERP' + return f'{margin}M.PERP' case 'CURRENT_QUARTER': - pair, _, expiry = symbol.partition('_') - return f'{pair}.{margin}M.{expiry}' + _, _, expiry = symbol.partition('_') + return f'{margin}M.{expiry}' case '': subtype: list[str] = self.underlyingSubType if not subtype: if self.status == 'PENDING_TRADING': - return f'{symbol}.{margin}M.PENDING' + return f'{margin}M.PENDING' - match subtype[0]: - case 'DEFI': - return f'{symbol}.{subtype}.PERP' + match subtype: + case ['DEFI']: + return f'{subtype[0]}.PERP' # XXX: yeah no clue then.. - return f'{symbol}.WTF.PWNED.BBQ' + return 'WTF.PWNED.BBQ' + + @property + def bs_fqme(self) -> str: + symbol: str = self.symbol + ctype: str = self.contractType + venue: str = self.venue + + match ctype: + case 'CURRENT_QUARTER': + symbol, _, expiry = symbol.partition('_') + return f'{symbol}.{venue}' + + @property + def bs_src_asset(self) -> str: + return f'{self.quoteAsset}' + + @property + def bs_dst_asset(self) -> str: + return f'{self.baseAsset}.{self.venue}' PAIRTYPES: dict[MarketType, Pair] = { From 19be8348e5cc59dc956133f6018508152984a179 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 11:17:29 -0400 Subject: [PATCH 028/116] binance.api: add venue qualified symcache support Meaning we add the `Client.get_assets()` and `.get_mkt_pairs()` methods. Also implement `.exch_info()` to take in a `expiry: str` to detect whether to look up a derivative venue instead of spot. In support of all this we now explicitly key all assets (via `._cache_pairs() during the populate of `._venue2assets` sub-tables) with their `.bs_dst_asset: str` value to ensure, for ex., a spot `BTCUSDT` has a distinct value from any futures contracts with the same `Pair.symbol: str` value! Also, ensure we always create a `brokers.toml` (from template) if DNE and binance is the user's first used backend XD --- piker/brokers/binance/api.py | 126 +++++++++++++++++++++++++++++------ 1 file changed, 105 insertions(+), 21 deletions(-) diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 98457ace4..2ade69ed4 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -78,7 +78,7 @@ def get_config() -> dict: conf: dict path: Path - conf, path = config.load() + conf, path = config.load(touch_if_dne=True) section = conf.get('binance') @@ -396,7 +396,6 @@ async def _cache_pairs( ) -> None: # lookup internal mkt-specific pair table to update pair_table: dict[str, Pair] = self._venue2pairs[venue] - asset_table: dict[str, Asset] = self._venue2assets[venue] # make API request(s) resp = await self._api( @@ -408,6 +407,7 @@ async def _cache_pairs( venue=venue, allow_testnet=False, # XXX: never use testnet for symbol lookups ) + mkt_pairs = resp['symbols'] if not mkt_pairs: raise SymbolNotFound(f'No market pairs found!?:\n{resp}') @@ -432,21 +432,45 @@ async def _cache_pairs( # `._pairs: ChainMap` for search B0 pairs_view_subtable[pair.bs_fqme] = pair + # XXX WOW: TURNS OUT THIS ISN'T TRUE !? + # > (populate `Asset` table for spot mkts only since it + # > should be a superset of any other venues such as + # > futes or margin) if venue == 'spot': - if (name := pair.quoteAsset) not in asset_table: - asset_table[name] = Asset( - name=name, - atype='crypto_currency', - tx_tick=digits_to_dec(pair.quoteAssetPrecision), - ) - - if (name := pair.baseAsset) not in asset_table: - asset_table[name] = Asset( - name=name, - atype='crypto_currency', - tx_tick=digits_to_dec(pair.baseAssetPrecision), - ) + dst_sectype: str = 'crypto_currency' + + elif venue in {'usdtm_futes'}: + dst_sectype: str = 'future' + if pair.contractType == 'PERPETUAL': + dst_sectype: str = 'perpetual_future' + + spot_asset_table: dict[str, Asset] = self._venue2assets['spot'] + ven_asset_table: dict[str, Asset] = self._venue2assets[venue] + + if ( + (name := pair.quoteAsset) not in spot_asset_table + ): + spot_asset_table[pair.bs_src_asset] = Asset( + name=name, + atype='crypto_currency', + tx_tick=digits_to_dec(pair.quoteAssetPrecision), + ) + + if ( + (name := pair.baseAsset) not in ven_asset_table + ): + if venue != 'spot': + assert dst_sectype != 'crypto_currency' + ven_asset_table[pair.bs_dst_asset] = Asset( + name=name, + atype=dst_sectype, + tx_tick=digits_to_dec(pair.baseAssetPrecision), + ) + + # log.warning( + # f'Assets not YET found in spot set: `{pformat(dne)}`!?' + # ) # NOTE: make merged view of all market-type pairs but # use market specific `Pair.bs_fqme` for keys! # this allows searching for market pairs with different @@ -458,16 +482,29 @@ async def _cache_pairs( if venue == 'spot': return - assets: list[dict] = resp.get('assets', ()) - for entry in assets: - name: str = entry['asset'] - asset_table[name] = self._venue2assets['spot'].get(name) + # TODO: maybe use this assets response for non-spot venues? + # -> issue is we do the exch_info queries conc, so we can't + # guarantee order for inter-table lookups.. + # if venue ep delivers an explicit set of assets copy just + # ensure they are also already listed in the spot equivs. + # assets: list[dict] = resp.get('assets', ()) + # for entry in assets: + # name: str = entry['asset'] + # spot_asset_table: dict[str, Asset] = self._venue2assets['spot'] + # if name not in spot_asset_table: + # log.warning( + # f'COULDNT FIND ASSET {name}\n{entry}\n' + # f'ADDING AS FUTES ONLY!?' + # ) + # asset_table: dict[str, Asset] = self._venue2assets[venue] + # asset_table[name] = spot_asset_table.get(name) async def exch_info( self, sym: str | None = None, venue: MarketType | None = None, + expiry: str | None = None, ) -> dict[str, Pair] | Pair: ''' @@ -485,7 +522,16 @@ async def exch_info( pair_table: dict[str, Pair] = self._venue2pairs[ venue or self.mkt_mode ] - if cached_pair := pair_table.get(sym): + if ( + expiry + and 'perp' not in expiry.lower() + ): + sym: str = f'{sym}_{expiry}' + + if ( + sym + and (cached_pair := pair_table.get(sym)) + ): return cached_pair venues: list[str] = ['spot', 'usdtm_futes'] @@ -500,7 +546,45 @@ async def exch_info( ven, ) - return pair_table[sym] if sym else self._pairs + if sym: + return pair_table[sym] + else: + self._pairs + + async def get_assets( + self, + venue: str | None = None, + + ) -> dict[str, Asset]: + if ( + venue + and venue != 'spot' + ): + venues = [venue] + else: + venues = ['usdtm_futes'] + + ass_table: dict[str, Asset] = self._venue2assets['spot'] + + # merge in futes contracts with a sectype suffix + for venue in venues: + ass_table |= self._venue2assets[venue] + + return ass_table + + + async def get_mkt_pairs(self) -> dict[str, Pair]: + ''' + Flatten the multi-venue (chain) map of market pairs + to a fqme indexed table for data layer caching. + + ''' + flat: dict[str, Pair] = {} + for venmap in self._pairs.maps: + for bs_fqme, pair in venmap.items(): + flat[pair.bs_fqme] = pair + + return flat # TODO: unused except by `brokers.core.search_symbols()`? async def search_symbols( From a2c6749112bfea16b6dd07e9a83fc89382594c16 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 11:26:24 -0400 Subject: [PATCH 029/116] binance.feed: use `Client.get_assets()` for mkt pairs Instead of constructing them (previously manually) in `.get_mkt_info()` ep, just call `.get_assets()` and do key lookups for assets to hand directly to the `.src/dst` of `MktPair`. Refine fqme input parsing to match: - adjust parsing logic to only use `unpack_fqme()` on the input fqme token. - set `.mkt_mode: str` to the derivs venue when an expiry token is detected in the fqme. - pass the parsed `expiry: str` to `Client.exch_info()` to ensure a deriv venue (table) is used for pair lookup. - skip any "DEFI" venue or other unknown asset type cases (since binance doesn't seem to define some assets anywhere?). Also, just use the `Client._pairs` unified table for search input since the first call to `.exch_info()` won't necessarily contain the most up-to-date state whereas `._pairs` always will. --- piker/brokers/binance/broker.py | 2 +- piker/brokers/binance/feed.py | 111 ++++++++++++++++++++++---------- 2 files changed, 77 insertions(+), 36 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index f063bee11..04042f446 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -400,7 +400,7 @@ async def open_trade_dialog( # and comparison with binance's own position calcs. # - load pps and accounts using accounting apis, write # the ledger and account files - # - table: PpTable + # - table: Account # - ledger: TransactionLedger async with ( diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 66a0bff04..8c5965f9f 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -24,8 +24,11 @@ aclosing, ) from datetime import datetime -from functools import partial +from functools import ( + partial, +) import itertools +from pprint import pformat from typing import ( Any, AsyncGenerator, @@ -54,7 +57,6 @@ DerivTypes, MktPair, unpack_fqme, - digits_to_dec, ) from piker.data.types import Struct from piker.data.validate import FeedInit @@ -277,69 +279,107 @@ async def get_ohlc( async def get_mkt_info( fqme: str, -) -> tuple[MktPair, Pair]: +) -> tuple[MktPair, Pair] | None: # uppercase since kraken bs_mktid is always upper - if 'binance' not in fqme: + if 'binance' not in fqme.lower(): fqme += '.binance' - bs_fqme, _, broker = fqme.rpartition('.') + mkt_mode: str = '' broker, mkt_ep, venue, expiry = unpack_fqme(fqme) + venue: str = venue.lower() + + # XXX TODO: we should change the usdtm_futes name to just + # usdm_futes (dropping the tether part) since it turns out that + # there are indeed USD-tokens OTHER THEN tether being used as + # the margin assets.. it's going to require a wholesale + # (variable/key) rename as well as file name adjustments to any + # existing tsdb set.. + if 'usd' in venue: + mkt_mode: str = 'usdtm_futes' + + # NO IDEA what these contracts (some kinda DEX-ish futes?) are + # but we're masking them for now.. + elif ( + 'defi' in venue + + # TODO: handle coinm futes which have a margin asset that + # is some crypto token! + # https://binance-docs.github.io/apidocs/delivery/en/#exchange-information + or 'btc' in venue + ): + return None - # NOTE: see the `FutesPair.bs_fqme: str` implementation - # to understand the reverse market info lookup below. - mkt_mode = venue = venue.lower() or 'spot' - _atype: str = '' + else: + # NOTE: see the `FutesPair.bs_fqme: str` implementation + # to understand the reverse market info lookup below. + mkt_mode = venue or 'spot' + + sectype: str = '' if ( venue - and 'spot' not in venue.lower() + and 'spot' not in venue # XXX: catch all in case user doesn't know which # venue they want (usdtm vs. coinm) and we can choose # a default (via config?) once we support coin-m APIs. - or 'perp' in bs_fqme.lower() + or 'perp' in venue ): - mkt_mode: str = f'{venue.lower()}_futes' - if 'perp' in expiry: - _atype = 'perpetual_future' + if not mkt_mode: + mkt_mode: str = f'{venue}_futes' - else: - _atype = 'future' + sectype: str = 'future' + if 'perp' in expiry: + sectype = 'perpetual_future' async with open_cached_client( 'binance', ) as client: - # switch mode depending on input pattern parsing + assets: dict[str, Asset] = await client.get_assets() + pair_str: str = mkt_ep.upper() + + # switch venue-mode depending on input pattern parsing + # since we want to use a particular endpoint (set) for + # pair info lookup! client.mkt_mode = mkt_mode - pair_str: str = mkt_ep.upper() - pair: Pair = await client.exch_info(pair_str) + pair: Pair = await client.exch_info( + pair_str, + venue=mkt_mode, # explicit + expiry=expiry, + ) if 'futes' in mkt_mode: assert isinstance(pair, FutesPair) + dst: Asset | None = assets.get(pair.bs_dst_asset) + if ( + not dst + # TODO: a known asset DNE list? + # and pair.baseAsset == 'DEFI' + ): + log.warning( + f'UNKNOWN {venue} asset {pair.baseAsset} from,\n' + f'{pformat(pair.to_dict())}' + ) + + # XXX UNKNOWN missing "asset", though no idea why? + # maybe it's only avail in the margin venue(s): /dapi/ ? + return None + mkt = MktPair( - dst=Asset( - name=pair.baseAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.baseAssetPrecision), - ), - src=Asset( - name=pair.quoteAsset, - atype='crypto', - tx_tick=digits_to_dec(pair.quoteAssetPrecision), - ), + dst=dst, + src=assets[pair.bs_src_asset], price_tick=pair.price_tick, size_tick=pair.size_tick, bs_mktid=pair.symbol, expiry=expiry, venue=venue, broker='binance', - _atype=_atype, + _atype=sectype, ) - both = mkt, pair - return both + return mkt, pair @acm @@ -472,10 +512,11 @@ async def open_symbol_search( ctx: tractor.Context, ) -> Client: + # NOTE: symbology tables are loaded as part of client + # startup in ``.api.get_client()`` and in this case + # are stored as `Client._pairs`. async with open_cached_client('binance') as client: - # load all symbols locally for fast search - fqpairs_cache = await client.exch_info() # TODO: maybe we should deliver the cache # so that client's can always do a local-lookup-first # style try and then update async as (new) match results @@ -488,7 +529,7 @@ async def open_symbol_search( async for pattern in stream: matches = fuzzy.extractBests( pattern, - fqpairs_cache, + client._pairs, score_cutoff=50, ) From 55c3d617fa27a4c58ffe4849c42e2fb0511cca47 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 12:07:15 -0400 Subject: [PATCH 030/116] brokers.core: open cached client before hitting `.get_mkt_info()` --- piker/brokers/core.py | 50 ++++++++++++++++++++++--------------------- 1 file changed, 26 insertions(+), 24 deletions(-) diff --git a/piker/brokers/core.py b/piker/brokers/core.py index a9a83e7cd..ab8b68728 100644 --- a/piker/brokers/core.py +++ b/piker/brokers/core.py @@ -95,15 +95,15 @@ async def option_chain( return await client.option_chains(contracts) -async def contracts( - brokermod: ModuleType, - symbol: str, -) -> Dict[str, Dict[str, Dict[str, Any]]]: - """Return option contracts (all expiries) for ``symbol``. - """ - async with brokermod.get_client() as client: - # return await client.get_all_contracts([symbol]) - return await client.get_all_contracts([symbol]) +# async def contracts( +# brokermod: ModuleType, +# symbol: str, +# ) -> Dict[str, Dict[str, Dict[str, Any]]]: +# """Return option contracts (all expiries) for ``symbol``. +# """ +# async with brokermod.get_client() as client: +# # return await client.get_all_contracts([symbol]) +# return await client.get_all_contracts([symbol]) async def bars( @@ -117,21 +117,6 @@ async def bars( return await client.bars(symbol, **kwargs) -async def mkt_info( - brokermod: ModuleType, - fqme: str, - **kwargs, - -) -> MktPair: - ''' - Return MktPair info from broker including src and dst assets. - - ''' - return await brokermod.get_mkt_info( - fqme.replace(brokermod.name, '') - ) - - async def search_w_brokerd(name: str, pattern: str) -> dict: async with open_cached_client(name) as client: @@ -178,3 +163,20 @@ async def search_backend( n.start_soon(search_backend, mod.name) return results + + +async def mkt_info( + brokermod: ModuleType, + fqme: str, + **kwargs, + +) -> MktPair: + ''' + Return MktPair info from broker including src and dst assets. + + ''' + async with open_cached_client(brokermod.name) as client: + assert client + return await brokermod.get_mkt_info( + fqme.replace(brokermod.name, '') + ) From 4123c971397c824ab1e882554566ccea3dfc6422 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 12:08:11 -0400 Subject: [PATCH 031/116] Add symcache support to paper eng - add the `.norm_trade()` required ep (for symcache offline loading) - port to new `Account` apis (which now require a symcache input) --- piker/clearing/_paper_engine.py | 67 ++++++++++++++++++++++++--------- 1 file changed, 49 insertions(+), 18 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 2df4eb4e8..13f15cb7e 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -45,7 +45,7 @@ ) from ..accounting import ( Position, - PpTable, + Account, Transaction, TransactionLedger, open_trade_ledger, @@ -77,10 +77,8 @@ class PaperBoi(Struct): ''' broker: str - ems_trades_stream: tractor.MsgStream - - ppt: PpTable + acnt: Account ledger: TransactionLedger # map of paper "live" orders which be used @@ -263,9 +261,9 @@ async def fake_fill( # we don't actually have any unique backend symbol ourselves # other then this thing, our fqme address. bs_mktid: str = fqme + assert self._mkts[fqme].fqme == fqme t = Transaction( fqme=fqme, - sym=self._mkts[fqme], tid=oid, size=size, price=price, @@ -276,10 +274,13 @@ async def fake_fill( # update in-mem ledger and pos table self.ledger.update_from_t(t) - self.ppt.update_from_trans({oid: t}) + self.acnt.update_from_ledger( + {oid: t}, + symcache=self.ledger._symcache, + ) # transmit pp msg to ems - pp = self.ppt.pps[bs_mktid] + pp = self.acnt.pps[bs_mktid] pp_msg = BrokerdPosition( broker=self.broker, account='paper', @@ -296,7 +297,7 @@ async def fake_fill( # write all updates to filesys immediately # (adds latency but that works for simulation anyway) self.ledger.write_config() - self.ppt.write_config() + self.acnt.write_config() await self.ems_trades_stream.send(pp_msg) @@ -540,14 +541,14 @@ async def open_trade_dialog( # enable piker.clearing console log for *this* subactor get_console_log(loglevel) - ppt: PpTable + acnt: Account ledger: TransactionLedger with ( open_pps( broker, 'paper', write_on_exit=True, - ) as ppt, + ) as acnt, open_trade_ledger( broker, @@ -559,7 +560,7 @@ async def open_trade_dialog( # don't contain necessary market info per trade record entry.. # - if no fqme was passed in, we presume we're running in # "ledger-sync-only mode" and thus we load mkt info for - # each symbol found in the ledger to a ppt table manually. + # each symbol found in the ledger to a acnt table manually. # TODO: how to process ledger info from backends? # - should we be rolling our own actor-cached version of these @@ -575,7 +576,7 @@ async def open_trade_dialog( mkt_by_fqme: dict[str, MktPair] = {} if fqme: bs_fqme, _, broker = fqme.rpartition('.') - mkt, _ = await brokermod.get_mkt_info(bs_fqme) + mkt, pair = await brokermod.get_mkt_info(bs_fqme) mkt_by_fqme[mkt.fqme] = mkt # for each sym in the ledger load it's `MktPair` info @@ -586,7 +587,7 @@ async def open_trade_dialog( gmi and l_fqme not in mkt_by_fqme ): - mkt, pair = await brokermod.get_mkt_info( + mkt, pair = await gmi( l_fqme.rstrip(f'.{broker}'), ) mkt_by_fqme[l_fqme] = mkt @@ -603,12 +604,12 @@ async def open_trade_dialog( # update pos table from ledger history and provide a ``MktPair`` # lookup for internal position accounting calcs. - ppt.update_from_trans(ledger.to_trans(mkt_by_fqme=mkt_by_fqme)) + acnt.update_from_ledger(ledger) pp_msgs: list[BrokerdPosition] = [] pos: Position token: str # f'{symbol}.{self.broker}' - for token, pos in ppt.pps.items(): + for token, pos in acnt.pps.items(): pp_msgs.append(BrokerdPosition( broker=broker, account='paper', @@ -624,7 +625,7 @@ async def open_trade_dialog( # write new positions state in case ledger was # newer then that tracked in pps.toml - ppt.write_config() + acnt.write_config() # exit early since no fqme was passed, # normally this case is just to load @@ -645,7 +646,9 @@ async def open_trade_dialog( ): # sanity check all the mkt infos for fqme, flume in feed.flumes.items(): - assert mkt_by_fqme[fqme] == flume.mkt + mkt = mkt_by_fqme[fqme] + print(mkt - flume.mkt) + assert mkt == flume.mkt async with ( ctx.open_stream() as ems_stream, @@ -654,7 +657,7 @@ async def open_trade_dialog( client = PaperBoi( broker=broker, ems_trades_stream=ems_stream, - ppt=ppt, + acnt=acnt, ledger=ledger, _buys=_buys, @@ -723,3 +726,31 @@ async def open_paperboi( await ctx.cancel() if we_spawned: await portal.cancel_actor() + + +def norm_trade( + txdict: dict, + +) -> Transaction: + from pendulum import ( + DateTime, + parse, + ) + + # special field handling for datetimes + # to ensure pendulum is used! + dt: DateTime = parse(txdict['dt']) + expiry: str | None = txdict.get('expiry') + fqme: str = txdict.get('fqme') or txdict.pop('fqsn') + + return Transaction( + fqme=fqme, + tid=txdict['tid'], + dt=dt, + price=txdict['price'], + size=txdict['size'], + cost=txdict.get('cost', 0), + bs_mktid=txdict['bs_mktid'], + expiry=parse(expiry) if expiry else None, + etype='clear', + ) From 243821aab119316629475085fbd94e086565c784 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 16:54:00 -0400 Subject: [PATCH 032/116] Bleh! Ok make `open_symcache()` and `@acm`.. Turns in order to make things much cleaner from inside-the-runtime usage we do probably want to just make the manager async so that we can generate the cache on demand from async UI inits as well as daemon actors.. So change to that and instead make `get_symcache()` the helper that should ONLY be called from sync funcs / offline ledger processing utils! --- piker/data/_symcache.py | 81 +++++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 36 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index ad3c7004b..79f002236 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -25,8 +25,7 @@ ''' from __future__ import annotations from contextlib import ( - # asynccontextmanager as acm, - contextmanager as cm, + asynccontextmanager as acm, ) from pathlib import Path from pprint import pformat @@ -38,15 +37,20 @@ from fuzzywuzzy import process as fuzzy import tomli_w # for fast symbol cache writing +import tractor +import trio try: import tomllib except ModuleNotFoundError: import tomli as tomllib from msgspec import field -from ..log import get_logger -from .. import config -from ..brokers import open_cached_client +from piker.log import get_logger +from piker import config +from piker.brokers import ( + open_cached_client, + get_brokermod, +) from .types import Struct if TYPE_CHECKING: @@ -166,13 +170,19 @@ def search( _caches: dict[str, SymbologyCache] = {} -@cm -def open_symcache( - mod: ModuleType, +@acm +async def open_symcache( + mod_or_name: ModuleType | str, reload: bool = False, + only_from_memcache: bool = False, ) -> SymbologyCache: + if isinstance(mod_or_name, str): + mod = get_brokermod(mod_or_name) + else: + mod: ModuleType = mod_or_name + provider: str = mod.name # actor-level cache-cache XD @@ -181,8 +191,13 @@ def open_symcache( try: yield _caches[provider] except KeyError: - log.warning('No asset info cache exists yet for ' - f'`{provider}`') + msg: str = ( + f'No asset info cache exists yet for `{provider}`' + ) + if only_from_memcache: + raise RuntimeError(msg) + else: + log.warning(msg) cachedir: Path = config.get_conf_dir() / '_cache' if not cachedir.is_dir(): @@ -204,23 +219,9 @@ def open_symcache( or not cachefile.is_file() ): log.info(f'GENERATING symbology cache for `{mod.name}`') + await cache.load() - import tractor - import trio - - # spawn tractor runtime and generate cache - # if not existing. - async def sched_gen_symcache(): - - async with ( - # only for runtime - tractor.open_nursery(debug_mode=True), - ): - return await cache.load() - - cache: SymbologyCache = trio.run(sched_gen_symcache) - - # only (re-)write if explicit reload or non-existing + # NOTE: only (re-)write if explicit reload or non-existing cache.write_config() else: @@ -265,7 +266,6 @@ async def sched_gen_symcache(): # sanity check asset refs from those (presumably) # loaded asset set above. - # src_k: str = pairtable.get('bs_src_asset, src: Asset = cache.assets[mkt.src.name] assert src == mkt.src dst: Asset @@ -299,19 +299,28 @@ def get_symcache( ) -> SymbologyCache: ''' - Get any available symbology/assets cache from - sync code by manually running `trio` to do the work. + Get any available symbology/assets cache from sync code by + (maybe) manually running `trio` to do the work. ''' - from ..brokers import get_brokermod + # spawn tractor runtime and generate cache + # if not existing. + async def sched_gen_symcache(): + async with ( + # only for runtime's debug mode + tractor.open_nursery(debug_mode=True), + + open_symcache( + get_brokermod(provider), + reload=force_reload, + ) as symcache, + ): + return symcache try: - with open_symcache( - get_brokermod(provider), - reload=force_reload, - - ) as symcache: - return symcache + cache: SymbologyCache = trio.run(sched_gen_symcache) except BaseException: import pdbp pdbp.xpm() + + return cache From 8330b36e58e1a3b4d6c6dec51bd3aeb643752f82 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:44:30 -0400 Subject: [PATCH 033/116] User/return explicit `symcache` var name in sync case --- piker/data/_symcache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 79f002236..8962af173 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -318,9 +318,10 @@ async def sched_gen_symcache(): return symcache try: - cache: SymbologyCache = trio.run(sched_gen_symcache) + symcache: SymbologyCache = trio.run(sched_gen_symcache) + assert symcache except BaseException: import pdbp pdbp.xpm() - return cache + return symcache From 14d5b3c96383561e0cc93196f9c0847550a4a5a6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:45:59 -0400 Subject: [PATCH 034/116] Be pedantic in `open_trade_ledger()` from sync code Require passing an explicit flag when entering from sync code with an extra super duper explicit runtime error to indicate how to use in the async case as well! Also, do rewrites of both the fqme (from best match in the symcache according to search - the worst case) or from the `bs_mktid` field if it exists (should only be true for paper engine accounts) AND the `bs_mktid` for paper accounts if it seems un-fully-qualified. --- piker/accounting/_ledger.py | 68 ++++++++++++++++++++++++++++++------- piker/accounting/calc.py | 6 +--- 2 files changed, 57 insertions(+), 17 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 3f8f258c8..acfe97f7b 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -209,17 +209,41 @@ def write_config( ): txdict['expiry'] = '' - # re-write old acro-key - + # (maybe) re-write old acro-key fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] + bs_mktid: str | None = txdict.get('bs_mktid') - if fqme not in self._symcache.mktmaps: - best_fqme: str = list(self._symcache.search(fqme))[0] - log.warning( - f'Could not find FQME: {fqme} in qualified set?\n' - f'Qualifying and expanding {fqme} -> {best_fqme}' + if ( + fqme not in self._symcache.mktmaps + or ( + # also try to see if this is maybe a paper + # engine ledger in which case the bs_mktid + # should be the fqme as well! + self.account == 'paper' + and bs_mktid + and fqme != bs_mktid ) - fqme = best_fqme + ): + # always take any (paper) bs_mktid if defined and + # in the backend's cache key set. + if bs_mktid in self._symcache.mktmaps: + fqme: str = bs_mktid + else: + best_fqme: str = list(self._symcache.search(fqme))[0] + log.warning( + f'Could not find FQME: {fqme} in qualified set?\n' + f'Qualifying and expanding {fqme} -> {best_fqme}' + ) + fqme = best_fqme + + if ( + self.account == 'paper' + and bs_mktid + and bs_mktid != fqme + ): + # in paper account case always make sure both the + # fqme and bs_mktid are fully qualified.. + txdict['bs_mktid'] = fqme txdict['fqme'] = fqme towrite[tid] = txdict @@ -272,6 +296,9 @@ def open_trade_ledger( broker: str, account: str, + allow_from_sync_code: bool = False, + symcache: SymbologyCache | None = None, + # default is to sort by detected datetime-ish field tx_sort: Callable = iter_by_dt, rewrite: bool = False, @@ -292,10 +319,27 @@ def open_trade_ledger( ledger_dict, fpath = load_ledger(broker, account) cpy = ledger_dict.copy() - from ..data._symcache import ( - get_symcache, - ) - symcache: SymbologyCache = get_symcache(broker) + # XXX NOTE: if not provided presume we are being called from + # sync code and need to maybe run `trio` to generate.. + if symcache is None: + + # XXX: be mega pendantic and ensure the caller knows what + # they're doing.. + if not allow_from_sync_code: + raise RuntimeError( + 'You MUST set `allow_from_sync_code=True` when ' + 'calling `open_trade_ledger()` from sync code! ' + 'If you are calling from async code you MUST ' + 'instead pass a `symcache: SymbologyCache`!' + ) + + from ..data._symcache import ( + get_symcache, + ) + symcache: SymbologyCache = get_symcache(broker) + + assert symcache + ledger = TransactionLedger( ledger_dict=cpy, file_path=fpath, diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 629788502..90ad1cf74 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -31,7 +31,6 @@ import polars as pl from pendulum import ( - # datetime, DateTime, from_timestamp, parse, @@ -354,16 +353,13 @@ def open_ledger_dfs( brokername, acctname, rewrite=True, + allow_from_sync_code=True, ) as ledger, ): if not ledger: raise ValueError(f'No ledger for {acctname}@{brokername} exists?') print(f'LEDGER LOAD TIME: {time.time() - now}') - # if acctname == 'paper': - # txns: dict[str, Transaction] = ledger.to_trans() - # else: - # process raw TOML ledger into txns using the # appropriate backend normalizer. # cache: AssetsInfo = get_symcache( From ddcdbce1a21b87072adaacdbcd53e1b73044ea1c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:51:20 -0400 Subject: [PATCH 035/116] Use `acnt` instead of `table` for ref name B) --- piker/accounting/_pos.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index fbb6997f6..53c321974 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -520,7 +520,7 @@ def update_from_ledger( if len(pos.mkt.fqme) < len(fqme): pos.mkt = mkt - # update clearing table! + # update clearing acnt! # NOTE: likely you'll see repeats of the same # ``Transaction`` passed in here if/when you are restarting # a ``brokerd.ib`` where the API will re-report trades from @@ -618,7 +618,7 @@ def write_config(self) -> None: ''' # TODO: show diff output? # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries - # active, closed_pp_objs = table.dump_active() + # active, closed_pp_objs = acnt.dump_active() active, closed = self.dump_active() pp_entries = self.prep_toml(active=active) @@ -734,6 +734,9 @@ def load_account( return conf, path +# TODO: make this async and offer a `get_account()` that +# can be used from sync code which does the same thing as +# open_trade_ledger()! @cm def open_account( brokername: str, @@ -775,7 +778,7 @@ def open_account( mod: ModuleType = get_brokermod(brokername) pp_objs: dict[str, Position] = {} - table = Account( + acnt = Account( mod, acctid, pp_objs, @@ -877,10 +880,10 @@ def open_account( pp.add_clear(t) try: - yield table + yield acnt finally: if write_on_exit: - table.write_config() + acnt.write_config() # TODO: drop the old name and THIS! From 108e8c7082f1e9158a49db3eca5bcce28bb55aa1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:51:42 -0400 Subject: [PATCH 036/116] .accounting: expose `open_account()` at subsys pkg level --- piker/accounting/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index bac6e6492..167dbcc23 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -34,6 +34,7 @@ load_account, load_account_from_ledger, open_pps, + open_account, Position, ) from ._mktinfo import ( @@ -67,6 +68,7 @@ 'load_account', 'load_account_from_ledger', 'mk_allocator', + 'open_account', 'open_pps', 'open_trade_ledger', 'unpack_fqme', From 06c581bfab074ddf8a6b43c36929faaa92c98e46 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:52:23 -0400 Subject: [PATCH 037/116] Async enter/open the symcache in paper engine Since we don't want to be doing a `trio.run()` from async code (being already in the `tractor` runtime and all); for now just put a top level block wrapping async enter until we figure out to embed it (likely) inside `open_account()` and pass the ref to `open_trade_ledger()`. --- piker/clearing/_paper_engine.py | 262 ++++++++++++++++---------------- 1 file changed, 134 insertions(+), 128 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 13f15cb7e..861cd389a 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -49,9 +49,10 @@ Transaction, TransactionLedger, open_trade_ledger, - open_pps, + open_account, ) from ..data import iterticks +from ..data._symcache import open_symcache from ..accounting import unpack_fqme from ._util import ( log, # sub-sys logger @@ -541,141 +542,146 @@ async def open_trade_dialog( # enable piker.clearing console log for *this* subactor get_console_log(loglevel) - acnt: Account - ledger: TransactionLedger - with ( - open_pps( - broker, - 'paper', - write_on_exit=True, - ) as acnt, - - open_trade_ledger( - broker, - 'paper', - ) as ledger - ): - # NOTE: retreive market(pair) info from the backend broker - # since ledger entries (in their backend native format) often - # don't contain necessary market info per trade record entry.. - # - if no fqme was passed in, we presume we're running in - # "ledger-sync-only mode" and thus we load mkt info for - # each symbol found in the ledger to a acnt table manually. - - # TODO: how to process ledger info from backends? - # - should we be rolling our own actor-cached version of these - # client API refs or using portal IPC to send requests to the - # existing brokerd daemon? - # - alternatively we can possibly expect and use - # a `.broker.norm_trade_records()` ep? - brokermod = get_brokermod(broker) - gmi = getattr(brokermod, 'get_mkt_info', None) - - # update all transactions with mkt info before - # loading any pps - mkt_by_fqme: dict[str, MktPair] = {} - if fqme: - bs_fqme, _, broker = fqme.rpartition('.') - mkt, pair = await brokermod.get_mkt_info(bs_fqme) - mkt_by_fqme[mkt.fqme] = mkt - - # for each sym in the ledger load it's `MktPair` info - for tid, txdict in ledger.data.items(): - l_fqme: str = txdict.get('fqme') or txdict['fqsn'] - - if ( - gmi - and l_fqme not in mkt_by_fqme - ): - mkt, pair = await gmi( - l_fqme.rstrip(f'.{broker}'), - ) - mkt_by_fqme[l_fqme] = mkt - - # if an ``fqme: str`` input was provided we only - # need a ``MktPair`` for that one market, since we're - # running in real simulated-clearing mode, not just ledger - # syncing. - if ( - fqme is not None - and fqme in mkt_by_fqme - ): - break - - # update pos table from ledger history and provide a ``MktPair`` - # lookup for internal position accounting calcs. - acnt.update_from_ledger(ledger) - - pp_msgs: list[BrokerdPosition] = [] - pos: Position - token: str # f'{symbol}.{self.broker}' - for token, pos in acnt.pps.items(): - pp_msgs.append(BrokerdPosition( - broker=broker, - account='paper', - symbol=pos.mkt.fqme, - size=pos.size, - avg_price=pos.ppu, + async with open_symcache(get_brokermod(broker)) as symcache: + acnt: Account + ledger: TransactionLedger + with ( + + # TODO: probably do the symcache and ledger loading + # implicitly behind this? Deliver an account, and ledger + # pair or make the ledger an attr of the account? + open_account( + broker, + 'paper', + write_on_exit=True, + ) as acnt, + + open_trade_ledger( + broker, + 'paper', + symcache=symcache, + ) as ledger + ): + # NOTE: retreive market(pair) info from the backend broker + # since ledger entries (in their backend native format) often + # don't contain necessary market info per trade record entry.. + # - if no fqme was passed in, we presume we're running in + # "ledger-sync-only mode" and thus we load mkt info for + # each symbol found in the ledger to a acnt table manually. + + # TODO: how to process ledger info from backends? + # - should we be rolling our own actor-cached version of these + # client API refs or using portal IPC to send requests to the + # existing brokerd daemon? + # - alternatively we can possibly expect and use + # a `.broker.norm_trade_records()` ep? + brokermod = get_brokermod(broker) + gmi = getattr(brokermod, 'get_mkt_info', None) + + # update all transactions with mkt info before + # loading any pps + mkt_by_fqme: dict[str, MktPair] = {} + if fqme: + bs_fqme, _, broker = fqme.rpartition('.') + mkt, pair = await brokermod.get_mkt_info(bs_fqme) + mkt_by_fqme[mkt.fqme] = mkt + + # for each sym in the ledger load it's `MktPair` info + for tid, txdict in ledger.data.items(): + l_fqme: str = txdict.get('fqme') or txdict['fqsn'] + + if ( + gmi + and l_fqme not in mkt_by_fqme + ): + mkt, pair = await gmi( + l_fqme.rstrip(f'.{broker}'), + ) + mkt_by_fqme[l_fqme] = mkt + + # if an ``fqme: str`` input was provided we only + # need a ``MktPair`` for that one market, since we're + # running in real simulated-clearing mode, not just ledger + # syncing. + if ( + fqme is not None + and fqme in mkt_by_fqme + ): + break + + # update pos table from ledger history and provide a ``MktPair`` + # lookup for internal position accounting calcs. + acnt.update_from_ledger(ledger) + + pp_msgs: list[BrokerdPosition] = [] + pos: Position + token: str # f'{symbol}.{self.broker}' + for token, pos in acnt.pps.items(): + pp_msgs.append(BrokerdPosition( + broker=broker, + account='paper', + symbol=pos.mkt.fqme, + size=pos.size, + avg_price=pos.ppu, + )) + + await ctx.started(( + pp_msgs, + ['paper'], )) - await ctx.started(( - pp_msgs, - ['paper'], - )) - - # write new positions state in case ledger was - # newer then that tracked in pps.toml - acnt.write_config() - - # exit early since no fqme was passed, - # normally this case is just to load - # positions "offline". - if fqme is None: - log.warning( - 'Paper engine only running in position delivery mode!\n' - 'NO SIMULATED CLEARING LOOP IS ACTIVE!' - ) - await trio.sleep_forever() - return - - async with ( - data.open_feed( - [fqme], - loglevel=loglevel, - ) as feed, - ): - # sanity check all the mkt infos - for fqme, flume in feed.flumes.items(): - mkt = mkt_by_fqme[fqme] - print(mkt - flume.mkt) - assert mkt == flume.mkt + # write new positions state in case ledger was + # newer then that tracked in pps.toml + acnt.write_config() + + # exit early since no fqme was passed, + # normally this case is just to load + # positions "offline". + if fqme is None: + log.warning( + 'Paper engine only running in position delivery mode!\n' + 'NO SIMULATED CLEARING LOOP IS ACTIVE!' + ) + await trio.sleep_forever() + return async with ( - ctx.open_stream() as ems_stream, - trio.open_nursery() as n, + data.open_feed( + [fqme], + loglevel=loglevel, + ) as feed, ): - client = PaperBoi( - broker=broker, - ems_trades_stream=ems_stream, - acnt=acnt, - ledger=ledger, - - _buys=_buys, - _sells=_sells, - _reqids=_reqids, + # sanity check all the mkt infos + for fqme, flume in feed.flumes.items(): + mkt = mkt_by_fqme[fqme] + assert mkt == flume.mkt + + async with ( + ctx.open_stream() as ems_stream, + trio.open_nursery() as n, + ): + client = PaperBoi( + broker=broker, + ems_trades_stream=ems_stream, + acnt=acnt, + ledger=ledger, + + _buys=_buys, + _sells=_sells, + _reqids=_reqids, + + _mkts=mkt_by_fqme, - _mkts=mkt_by_fqme, - - ) + ) - n.start_soon( - handle_order_requests, - client, - ems_stream, - ) + n.start_soon( + handle_order_requests, + client, + ems_stream, + ) - # paper engine simulator clearing task - await simulate_fills(feed.streams[broker], client) + # paper engine simulator clearing task + await simulate_fills(feed.streams[broker], client) @acm From 8b9494281da6da762d734860718e33e3c2fc9a7a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 17:57:32 -0400 Subject: [PATCH 038/116] Don't verify the history step period for now in `tsdb_backfill()` --- piker/data/history.py | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/piker/data/history.py b/piker/data/history.py index 46c5f5a6f..c2e89f7f8 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -140,9 +140,7 @@ async def shm_push_in_between( # memory... array = shm.array zeros = array[array['low'] == 0] - if ( - 0 < zeros.size < 1000 - ): + if 0 < zeros.size < 1000: tractor.breakpoint() @@ -637,20 +635,21 @@ async def tsdb_backfill( task_status.started() return - times: np.ndarray = array['time'] - - # sample period step size in seconds - step_size_s = ( - from_timestamp(times[-1]) - - from_timestamp(times[-2]) - ).seconds - - if step_size_s not in (1, 60): - log.error(f'Last 2 sample period is off!? -> {step_size_s}') - step_size_s = ( - from_timestamp(times[-2]) - - from_timestamp(times[-3]) - ).seconds + # NOTE: removed for now since it'll always break + # on the first 60s of the venue open.. + # times: np.ndarray = array['time'] + # # sample period step size in seconds + # step_size_s = ( + # from_timestamp(times[-1]) + # - from_timestamp(times[-2]) + # ).seconds + + # if step_size_s not in (1, 60): + # log.error(f'Last 2 sample period is off!? -> {step_size_s}') + # step_size_s = ( + # from_timestamp(times[-2]) + # - from_timestamp(times[-3]) + # ).seconds # NOTE: on the first history, most recent history # frame we PREPEND from the current shm ._last index From c30d8ac9bac4dd32c09b293ccd2c0624b9eecdc4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 10 Jul 2023 18:02:40 -0400 Subject: [PATCH 039/116] ib: port to new `.accounting` APIs Still kinda borked since i don't think there actually is a (per venue) "get-all-symbologies" endpoint.. so we're likely gonna have to figure out either how to hack it or provide a bypass in ledger processing? Deatz: - use new `Account` type name, rename endpoint vars to match and obviously use any new method name(s). - mask out split ratio handling for now. - async open the symcache prior to ledger processing (again, for now). - drop passing `Transaction.sym`. - fix parser set for dt-sorter since apparently 2022 and back had a `date` field instead? --- piker/brokers/ib/broker.py | 72 +++++++++++++++++++++++++------------- piker/brokers/ib/ledger.py | 1 - 2 files changed, 48 insertions(+), 25 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 9be0e13e6..e4ac0598d 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -61,7 +61,7 @@ TransactionLedger, iter_by_dt, open_pps, - PpTable, + Account, ) from piker.clearing._messages import ( Order, @@ -287,6 +287,9 @@ def push_tradesies( await client.ib.disconnectedEvent +# TODO: maybe we should allow the `trade_entries` input to be +# a list of the actual `Contract` types instead, though a couple +# other callers will need to be changed as well. async def update_ledger_from_api_trades( trade_entries: list[dict[str, Any]], client: Union[Client, MethodProxy], @@ -383,25 +386,33 @@ async def update_and_audit_msgs( # if ib reports a lesser pp it's not as bad since we can # presume we're at least not more in the shit then we # thought. - if diff and pikersize: - reverse_split_ratio = pikersize / ibsize - split_ratio = 1/reverse_split_ratio + if ( + diff + and ( + pikersize + or ibsize + ) + ): + # if 'mbt.cme' in msg.symbol: + # await tractor.pause() - if split_ratio >= reverse_split_ratio: - entry = f'split_ratio = {int(split_ratio)}' - else: - entry = f'split_ratio = 1/{int(reverse_split_ratio)}' + # reverse_split_ratio = pikersize / ibsize + # split_ratio = 1/reverse_split_ratio + # if split_ratio >= reverse_split_ratio: + # entry = f'split_ratio = {int(split_ratio)}' + # else: + # entry = f'split_ratio = 1/{int(reverse_split_ratio)}' msg.size = ibsize - logmsg: str = ( f'Pos mismatch in ib vs. the piker ledger!\n' f'IB:\n{ibfmtmsg}\n\n' f'PIKER:\n{pikerfmtmsg}\n\n' - 'If you are expecting a (reverse) split in this ' - 'instrument you should probably put the following' - 'in the `pps.toml` section:\n' - f'{entry}\n' + + # 'If you are expecting a (reverse) split in this ' + # 'instrument you should probably put the following' + # 'in the `pps.toml` section:\n' + # f'{entry}\n' # f'reverse_split_ratio: {reverse_split_ratio}\n' # f'split_ratio: {split_ratio}\n\n' ) @@ -416,8 +427,9 @@ async def update_and_audit_msgs( # TODO: make this a "propaganda" log level? log.warning( f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' - f'ib: {ibppmsg.avg_price}\n' - f'piker: {msg.avg_price}' + f'ib: {pformat(ibppmsg)}\n' + '---------------------------\n' + f'piker: {msg.to_dict()}' ) else: @@ -537,6 +549,9 @@ async def open_trade_dialog( accounts_def = config.load_accounts(['ib']) + # TODO: do this as part of `open_account()`!? + from piker.data._symcache import open_symcache + global _client_cache # deliver positions to subscriber before anything else @@ -550,12 +565,13 @@ async def open_trade_dialog( proxies, aioclients, ), + open_symcache('ib', only_from_memcache=True) as symcache, ): # Open a trade ledgers stack for appending trade records over # multiple accounts. # TODO: we probably want to generalize this into a "ledgers" api.. ledgers: dict[str, dict] = {} - tables: dict[str, PpTable] = {} + tables: dict[str, Account] = {} order_msgs: list[Status] = [] conf = get_config() accounts_def_inv: bidict[str, str] = bidict(conf['accounts']).inverse @@ -582,8 +598,12 @@ async def open_trade_dialog( parsers={ 'dateTime': parse_flex_dt, 'datetime': pendulum.parse, + # for some some fucking 2022 and + # back options records...fuck me. + 'date': pendulum.parse, }, ), + symcache=symcache, ) ) @@ -616,7 +636,7 @@ async def open_trade_dialog( ) acctid: str = account.strip('ib.') ledger: dict = ledgers[acctid] - table: PpTable = tables[acctid] + table: Account = tables[acctid] # update position table with latest ledger from all # gathered transactions: ledger file + api records. @@ -624,9 +644,8 @@ async def open_trade_dialog( # update trades ledgers for all accounts from connected # api clients which report trades for **this session**. - api_trades = await proxy.trades() + api_trades: list[dict] = await proxy.trades() if api_trades: - api_trans_by_acct: dict[str, Transaction] api_to_ledger_entries: dict[str, dict] ( @@ -660,7 +679,10 @@ async def open_trade_dialog( trans.update(api_trans) # update account (and thus pps) from all gathered transactions - table.update_from_trans(trans) + table.update_from_ledger( + trans, + symcache=ledger.symcache, + ) # process pp value reported from ib's system. we only # use these to cross-check sizing since average pricing @@ -772,7 +794,7 @@ async def emit_pp_update( cids2pps: dict, ledgers: dict[str, dict[str, Any]], - tables: dict[str, PpTable], + acnts: dict[str, Account], ) -> None: @@ -794,9 +816,11 @@ async def emit_pp_update( tx: Transaction = list(trans.values())[0] acctid = fq_acctid.strip('ib.') - table = tables[acctid] - table.update_from_trans(trans) - active, closed = table.dump_active() + acnt = acnts[acctid] + + acnt.update_from_ledger(trans) + + active, closed = acnt.dump_active() # NOTE: update ledger with all new trades for fq_acctid, trades_by_id in api_to_ledger_entries.items(): diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index 805cdaf62..e12bab132 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -224,7 +224,6 @@ def norm_trade_records( # `trades_dialogue()` above). trans = Transaction( fqme=fqme, - sym=pair, tid=tid, size=size, price=price, From 7f4884a6d9ba52e3e47044df588015514843933d Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 12 Jul 2023 12:33:30 -0400 Subject: [PATCH 040/116] data.types.Struct.to_dict(): discard non-member struct by default --- piker/data/types.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/piker/data/types.py b/piker/data/types.py index c5591500e..afa8f7c8d 100644 --- a/piker/data/types.py +++ b/piker/data/types.py @@ -72,15 +72,31 @@ class Struct( A "human friendlier" (aka repl buddy) struct subtype. ''' - def to_dict(self) -> dict: + def to_dict( + self, + include_non_members: bool = False, + ) -> dict: ''' Like it sounds.. direct delegation to: https://jcristharif.com/msgspec/api.html#msgspec.structs.asdict - TODO: probably just drop this method since it's now a built-int method? + BUT, by default we pop all non-member (aka not defined as + struct fields) fields by default. ''' - return structs.asdict(self) + asdict: dict = structs.asdict(self) + if include_non_members: + return asdict + + # only return a dict of the struct members + # which were provided as input, NOT anything + # added as `@properties`! + sin_props: dict = {} + for fi in structs.fields(self): + key: str = fi.name + sin_props[key] = asdict[key] + + return sin_props def pformat(self) -> str: return f'Struct({pformat(self.to_dict())})' From da206f5242988e579c205dafde15c0358adcd104 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 13 Jul 2023 17:58:50 -0400 Subject: [PATCH 041/116] Store "namespace path" for each backend's pair struct Since some backends have multiple venues keyed by the same symbol-pair-name, AND often the market/symbol info for those different market-venues is entirely different (cough binance), we will have to (sometimes) save the struct namespace-path as str for lookup when deserializing a symcache to object form. NOTE: this change is reliant on the following `tractor` dev commit which improves support for constructing a path from object-instance: https://github.com/goodboy/tractor/pull/362/commits/bee2c36072939d7e537ec1fce9f297d37721db63 Add a backend(-wide) default struct path stored as a (TOML top level) field `pair_ns_path: str` in the serialized `dict`-table as well as allow for a per pair-`Struct` value optionally defined on each type def; the global is only used if none was defined per struct via a `ns_path: str`. Further deats: - don't write non-struct-member fields to dict for TOML file cache. - always keep object forms, well as objects (in tables).. XD - factor cache loading from `dict` (and thus from TOML or presumably any other interchange form) into a `@classmethod` constructor method B) - all choosing the subtable for `.search()` by name. --- piker/data/_symcache.py | 220 ++++++++++++++++++++++++++++------------ 1 file changed, 156 insertions(+), 64 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 8962af173..ee67ce6a0 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -79,6 +79,9 @@ class SymbologyCache(Struct): # backend-system pairs loaded in provider (schema) specific # structs. pairs: dict[str, Struct] = field(default_factory=dict) + # serialized namespace path to the backend's pair-info-`Struct` + # defn B) + pair_ns_path: tractor.msg.NamespacePath | None = None # TODO: piker-normalized `.accounting.MktPair` table? # loaded from the `.pairs` and a normalizer @@ -86,23 +89,28 @@ class SymbologyCache(Struct): mktmaps: dict[str, MktPair] = field(default_factory=dict) def write_config(self) -> None: - cachedict: dict[str, Any] = {} - for key, attr in { + + # put the backend's pair-struct type ref at the top + # of file if possible. + cachedict: dict[str, Any] = { + 'pair_ns_path': str(self.pair_ns_path) or '', + } + + # serialize all tables as dicts for TOML. + for key, table in { 'assets': self.assets, 'pairs': self.pairs, + 'mktmaps': self.mktmaps, }.items(): - if not attr: + if not table: log.warning( f'Asset cache table for `{key}` is empty?' ) continue - cachedict[key] = attr - - # serialize mkts - mktmapsdict = cachedict['mktmaps'] = {} - for fqme, mkt in self.mktmaps.items(): - mktmapsdict[fqme] = mkt.to_dict() + dct = cachedict[key] = {} + for key, struct in table.items(): + dct[key] = struct.to_dict(include_non_members=False) try: with self.fp.open(mode='wb') as fp: @@ -112,12 +120,27 @@ def write_config(self) -> None: raise async def load(self) -> None: + ''' + Explicitly load the "symbology set" for this provider by using + 2 required `Client` methods: + + - `.get_assets()`: returning a table of `Asset`s + - `.get_mkt_pairs()`: returning a table of pair-`Struct` + types, custom defined by the particular backend. + + AND, the required `.get_mkt_info()` module-level endpoint which + maps `fqme: str` -> `MktPair`s. + + These tables are then used to fill out the `.assets`, `.pairs` and + `.mktmaps` tables on this cache instance, respectively. + + ''' async with open_cached_client(self.mod.name) as client: if get_assets := getattr(client, 'get_assets', None): assets: dict[str, Asset] = await get_assets() for bs_mktid, asset in assets.items(): - self.assets[bs_mktid] = asset.to_dict() + self.assets[bs_mktid] = asset else: log.warning( 'No symbology cache `Asset` support for `{provider}`..\n' @@ -125,9 +148,20 @@ async def load(self) -> None: ) if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): + pairs: dict[str, Struct] = await get_mkt_pairs() for bs_fqme, pair in pairs.items(): + # NOTE: every backend defined pair should + # declare it's ns path for roundtrip + # serialization lookup. + if not getattr(pair, 'ns_path', None): + raise TypeError( + f'Pair-struct for {self.mod.name} MUST define a ' + '`.ns_path: str`!\n' + f'{pair}' + ) + entry = await self.mod.get_mkt_info(pair.bs_fqme) if not entry: continue @@ -135,10 +169,30 @@ async def load(self) -> None: mkt: MktPair pair: Struct mkt, _pair = entry - assert _pair is pair - self.pairs[pair.bs_fqme] = pair.to_dict() + assert _pair is pair, ( + f'`{self.mod.name}` backend probably has a ' + 'keying-symmetry problem between the pair-`Struct` ' + 'returned from `Client.get_mkt_pairs()`and the ' + 'module level endpoint: `.get_mkt_info()`\n\n' + "Here's the struct diff:\n" + f'{_pair - pair}' + ) + # NOTE XXX: this means backends MUST implement + # a `Struct.bs_mktid: str` field to provide + # a native-keyed map to their own symbol + # set(s). + self.pairs[pair.bs_mktid] = pair + + # NOTE: `MktPair`s are keyed here using piker's + # internal FQME schema so that search, + # accounting and feed init can be accomplished + # a sane, uniform, normalized basis. self.mktmaps[mkt.fqme] = mkt + self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref( + pair, + ) + else: log.warning( 'No symbology cache `Pair` support for `{provider}`..\n' @@ -147,15 +201,94 @@ async def load(self) -> None: return self + @classmethod + def from_dict( + cls: type, + data: dict, + **kwargs, + ) -> SymbologyCache: + + # normal init inputs + cache = cls(**kwargs) + + # XXX WARNING: this may break if backend namespacing + # changes (eg. `Pair` class def is moved to another + # module) in which case you can manually update the + # `pair_ns_path` in the symcache file and try again. + # TODO: probably a verbose error about this? + Pair: type = tractor.msg.NamespacePath( + str(data['pair_ns_path']) + ).load_ref() + + pairtable = data.pop('pairs') + for key, pairtable in pairtable.items(): + + # allow each serialized pair-dict-table to declare its + # specific struct type's path in cases where a backend + # supports multiples (normally with different + # schemas..) and we are storing them in a flat `.pairs` + # table. + ThisPair = Pair + if this_pair_type := pairtable.get('ns_path'): + ThisPair: type = tractor.msg.NamespacePath( + str(this_pair_type) + ).load_ref() + + pair: Struct = ThisPair(**pairtable) + cache.pairs[key] = pair + + from ..accounting import ( + Asset, + MktPair, + ) + + # load `dict` -> `Asset` + assettable = data.pop('assets') + for name, asdict in assettable.items(): + cache.assets[name] = Asset.from_msg(asdict) + + # load `dict` -> `MktPair` + dne: list[str] = [] + mkttable = data.pop('mktmaps') + for fqme, mktdict in mkttable.items(): + + mkt = MktPair.from_msg(mktdict) + assert mkt.fqme == fqme + + # sanity check asset refs from those (presumably) + # loaded asset set above. + src: Asset = cache.assets[mkt.src.name] + assert src == mkt.src + dst: Asset + if not (dst := cache.assets.get(mkt.dst.name)): + dne.append(mkt.dst.name) + continue + else: + assert dst.name == mkt.dst.name + + cache.mktmaps[fqme] = mkt + + log.warning( + f'These `MktPair.dst: Asset`s DNE says `{cache.mod.name}`?\n' + f'{pformat(dne)}' + ) + return cache + def search( self, pattern: str, + table: str = 'mktmaps' ) -> dict[str, Struct]: + ''' + (Fuzzy) search this cache's `.mktmaps` table, which is + keyed by FQMEs, for `pattern: str` and return the best + matches in a `dict` including the `MktPair` values. + ''' matches = fuzzy.extractBests( pattern, - self.mktmaps, + getattr(self, table), score_cutoff=50, ) @@ -206,11 +339,6 @@ async def open_symcache( cachefile: Path = cachedir / f'{str(provider)}.symcache.toml' - cache = SymbologyCache( - mod=mod, - fp=cachefile, - ) - # if no cache exists or an explicit reload is requested, load # the provider API and call appropriate endpoints to populate # the mkt and asset tables. @@ -218,6 +346,11 @@ async def open_symcache( reload or not cachefile.is_file() ): + cache = SymbologyCache( + mod=mod, + fp=cachefile, + ) + log.info(f'GENERATING symbology cache for `{mod.name}`') await cache.load() @@ -227,59 +360,18 @@ async def open_symcache( else: log.info( f'Loading EXISTING `{mod.name}` symbology cache:\n' - f'> {cache.fp}' + f'> {cachefile}' ) import time - from ..accounting import ( - Asset, - MktPair, - ) - now = time.time() with cachefile.open('rb') as existing_fp: data: dict[str, dict] = tomllib.load(existing_fp) log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') - # copy in backend specific pairs table directly without - # struct loading for now.. - pairtable = data.pop('pairs') - cache.pairs = pairtable - - # TODO: some kinda way to allow the backend - # to provide a struct-loader per entry? - # for key, pairtable in pairtable.items(): - # pair: Struct = cache.mod.load_pair(pairtable) - # cache.pairs[key] = pair - - # load `dict` -> `Asset` - assettable = data.pop('assets') - for name, asdict in assettable.items(): - cache.assets[name] = Asset.from_msg(asdict) - - # load `dict` -> `MktPair` - dne: list[str] = [] - mkttable = data.pop('mktmaps') - for fqme, mktdict in mkttable.items(): - - mkt = MktPair.from_msg(mktdict) - assert mkt.fqme == fqme - - # sanity check asset refs from those (presumably) - # loaded asset set above. - src: Asset = cache.assets[mkt.src.name] - assert src == mkt.src - dst: Asset - if not (dst := cache.assets.get(mkt.dst.name)): - dne.append(mkt.dst.name) - continue - else: - assert dst.name == mkt.dst.name - - cache.mktmaps[fqme] = mkt - - log.warning( - f'These `MktPair.dst: Asset`s DNE says `{mod.name}` ?\n' - f'{pformat(dne)}' + cache = SymbologyCache.from_dict( + data, + mod=mod, + fp=cachefile, ) # TODO: use a real profiling sys.. From 494b3faa9b0ab18cfe126803ae847da0a0a16ebd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Jul 2023 14:11:49 -0400 Subject: [PATCH 042/116] Formalize transaction normalizer func signature Since each broker backend generally needs to define a specific field-name-schema to determine the exact instantiation arguments to `Transaction`, we generally need each backend to define an endpoint function to conduct this transaction from an input `dict[str, Any]` received either directly from provided ledger APIs or from previously stored `.accounting._ledger` saved trades ledger TOML files. To accomplish this we now require backends to declare a new routine: ```python def norm_trade( tid: str, # the uuid for the transaction txdict: dict, # the input record-dict # a table of mkt-symbols to backend # struct objects which define the (meta-data) for the backend specific # venue's symbology pairs: dict[str, Struct], ) -> Transaction: ... ``` which implements that record conversion (at least for trades) and can thus be used in `TransactionLedger.iter_txns()` which requires "some code" to implement the loading from a serialization format (aka the input `dict` record) to our local `Transaction` struct, normally also using a `Pair`-struct table defined (and maybe previously cached) by the specific backend such our (normalization layer's) `MktPair`'s fields can be set. For the case of our `.clearing._paper_engine` we def the routine to simply extract the exact same fields from the TOML ledger records that we previously had written (to it) and define it in that module. Also, we always pass `pairs=SymbologyCache.pairs: dict[str, Struct]` on norm trade calls such that offline ledger and accounting processing clients can use a previously cached symbology set without having to necessarily start the async-actor runtime to query the actual backend API if the data has already been saved locally on the system B) Other related: - always passthrough kwargs in overridden `.to_dict()` method. - only do fqme related trade record field name rewrites/names when operating on a paper ledger; normally a backend's records don't contain these. - fix `pendulum.DateTime` type annots. - just deliver `Transaction`s from `.iter_txns()` --- piker/accounting/_ledger.py | 132 ++++++++++++++++++++++-------------- 1 file changed, 80 insertions(+), 52 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index acfe97f7b..b6f0ee744 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -32,9 +32,7 @@ ) from pendulum import ( - datetime, - # DateTime, - # parse, + DateTime, ) import tomli_w # for fast ledger writing @@ -70,7 +68,7 @@ class Transaction(Struct, frozen=True): size: float price: float cost: float # commisions or other additional costs - dt: datetime + dt: DateTime # the "event type" in terms of "market events" see # https://github.com/pikers/piker/issues/510 for where we're @@ -80,7 +78,7 @@ class Transaction(Struct, frozen=True): # TODO: we can drop this right since we # can instead expect the backend to provide this # via the `MktPair`? - expiry: datetime | None = None + expiry: DateTime | None = None # (optional) key-id defined by the broker-service backend which # ensures the instrument-symbol market key for this record is unique @@ -89,8 +87,11 @@ class Transaction(Struct, frozen=True): # service. bs_mktid: str | int | None = None - def to_dict(self) -> dict: - dct: dict[str, Any] = super().to_dict() + def to_dict( + self, + **kwargs, + ) -> dict: + dct: dict[str, Any] = super().to_dict(**kwargs) # ensure we use a pendulum formatted # ISO style str here!@ @@ -107,6 +108,8 @@ class TransactionLedger(UserDict): outside. ''' + # NOTE: see `open_trade_ledger()` for defaults, this should + # never be constructed manually! def __init__( self, ledger_dict: dict, @@ -138,6 +141,10 @@ def __repr__(self) -> str: @property def symcache(self) -> SymbologyCache: + ''' + Read-only ref to backend's ``SymbologyCache``. + + ''' return self._symcache def update_from_t( @@ -157,7 +164,7 @@ def iter_txns( symcache: SymbologyCache | None = None, ) -> Generator[ - tuple[str, Transaction], + Transaction, None, None, ]: @@ -175,9 +182,13 @@ def iter_txns( norm_trade = self.mod.norm_trade # datetime-sort and pack into txs - for txdict in self.tx_sort(self.data.values()): - txn = norm_trade(txdict) - yield txn.tid, txn + for tid, txdict in self.tx_sort(self.data.items()): + txn: Transaction = norm_trade( + tid, + txdict, + pairs=symcache.pairs, + ) + yield txn def to_txns( self, @@ -188,18 +199,19 @@ def to_txns( Return entire output from ``.iter_txns()`` in a ``dict``. ''' - return dict(self.iter_txns(symcache=symcache)) + return { + t.tid: t for t in self.iter_txns(symcache=symcache) + } - def write_config( - self, - - ) -> None: + def write_config(self) -> None: ''' Render the self.data ledger dict to its TOML file form. ALWAYS order datetime sorted! ''' + is_paper: bool = self.account == 'paper' + towrite: dict[str, Any] = {} for tid, txdict in self.tx_sort(self.data.copy()): # write blank-str expiry for non-expiring assets @@ -210,42 +222,44 @@ def write_config( txdict['expiry'] = '' # (maybe) re-write old acro-key - fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] - bs_mktid: str | None = txdict.get('bs_mktid') - - if ( - fqme not in self._symcache.mktmaps - or ( - # also try to see if this is maybe a paper - # engine ledger in which case the bs_mktid - # should be the fqme as well! - self.account == 'paper' - and bs_mktid - and fqme != bs_mktid - ) - ): - # always take any (paper) bs_mktid if defined and - # in the backend's cache key set. - if bs_mktid in self._symcache.mktmaps: - fqme: str = bs_mktid - else: - best_fqme: str = list(self._symcache.search(fqme))[0] - log.warning( - f'Could not find FQME: {fqme} in qualified set?\n' - f'Qualifying and expanding {fqme} -> {best_fqme}' + if is_paper: + fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] + bs_mktid: str | None = txdict.get('bs_mktid') + + if ( + fqme not in self._symcache.mktmaps + or ( + # also try to see if this is maybe a paper + # engine ledger in which case the bs_mktid + # should be the fqme as well! + bs_mktid + and fqme != bs_mktid ) - fqme = best_fqme - - if ( - self.account == 'paper' - and bs_mktid - and bs_mktid != fqme - ): - # in paper account case always make sure both the - # fqme and bs_mktid are fully qualified.. - txdict['bs_mktid'] = fqme + ): + # always take any (paper) bs_mktid if defined and + # in the backend's cache key set. + if bs_mktid in self._symcache.mktmaps: + fqme: str = bs_mktid + else: + best_fqme: str = list(self._symcache.search(fqme))[0] + log.warning( + f'Could not find FQME: {fqme} in qualified set?\n' + f'Qualifying and expanding {fqme} -> {best_fqme}' + ) + fqme = best_fqme + + if ( + bs_mktid + and bs_mktid != fqme + ): + # in paper account case always make sure both the + # fqme and bs_mktid are fully qualified.. + txdict['bs_mktid'] = fqme + + # in paper ledgers always write the latest + # symbology key field: an FQME. + txdict['fqme'] = fqme - txdict['fqme'] = fqme towrite[tid] = txdict with self.file_path.open(mode='wb') as fp: @@ -256,6 +270,9 @@ def load_ledger( brokername: str, acctid: str, + # for testing or manual load from file + dirpath: Path | None = None, + ) -> tuple[dict, Path]: ''' Load a ledger (TOML) file from user's config directory: @@ -270,7 +287,11 @@ def load_ledger( except ModuleNotFoundError: import tomli as tomllib - ldir: Path = config._config_dir / 'accounting' / 'ledgers' + ldir: Path = ( + dirpath + or + config._config_dir / 'accounting' / 'ledgers' + ) if not ldir.is_dir(): ldir.mkdir() @@ -303,6 +324,9 @@ def open_trade_ledger( tx_sort: Callable = iter_by_dt, rewrite: bool = False, + # for testing or manual load from file + _fp: Path | None = None, + ) -> Generator[TransactionLedger, None, None]: ''' Indempotently create and read in a trade log file from the @@ -316,7 +340,11 @@ def open_trade_ledger( from ..brokers import get_brokermod mod: ModuleType = get_brokermod(broker) - ledger_dict, fpath = load_ledger(broker, account) + ledger_dict, fpath = load_ledger( + broker, + account, + dirpath=_fp, + ) cpy = ledger_dict.copy() # XXX NOTE: if not provided presume we are being called from From 803f4a63542d8ebd1adbf5afa592401792900f61 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Jul 2023 17:54:13 -0400 Subject: [PATCH 043/116] Add first account cumsize test; known to fail Bo --- tests/_inputs/account.binance.paper.toml | 0 tests/_inputs/trades_binance_paper.toml | 1358 ++++++++++++++++++++++ tests/test_accounting.py | 60 +- 3 files changed, 1417 insertions(+), 1 deletion(-) create mode 100644 tests/_inputs/account.binance.paper.toml create mode 100644 tests/_inputs/trades_binance_paper.toml diff --git a/tests/_inputs/account.binance.paper.toml b/tests/_inputs/account.binance.paper.toml new file mode 100644 index 000000000..e69de29bb diff --git a/tests/_inputs/trades_binance_paper.toml b/tests/_inputs/trades_binance_paper.toml new file mode 100644 index 000000000..cac81ceb7 --- /dev/null +++ b/tests/_inputs/trades_binance_paper.toml @@ -0,0 +1,1358 @@ +[da16f46a-f964-470a-9813-120a86ac26da] +tid = "da16f46a-f964-470a-9813-120a86ac26da" +size = 0.04 +price = 28374.85 +cost = 0 +dt = "2023-03-29T21:14:52.646556+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[067395c6-85c2-434c-93a2-c31489a09aec] +tid = "067395c6-85c2-434c-93a2-c31489a09aec" +size = 0.04 +price = 28372.65 +cost = 0 +dt = "2023-03-29T21:16:01.511379+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[63a189b6-bf51-478f-b0b8-6c886a086a74] +tid = "63a189b6-bf51-478f-b0b8-6c886a086a74" +size = 0.04 +price = 28369.51 +cost = 0 +dt = "2023-03-29T21:45:00.458763+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[162b7db2-96b0-47fb-9089-e8c03b12722b] +tid = "162b7db2-96b0-47fb-9089-e8c03b12722b" +size = 0.04 +price = 28354.82 +cost = 0 +dt = "2023-03-29T22:46:28.238625+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[599c6fbb-6d14-4001-9692-2bee1f231df1] +tid = "599c6fbb-6d14-4001-9692-2bee1f231df1" +size = 0.04 +price = 28033.54 +cost = 0 +dt = "2023-04-04T15:39:31.976073+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[8a271269-af69-4459-a171-fa3b7f477f4f] +tid = "8a271269-af69-4459-a171-fa3b7f477f4f" +size = -0.04 +price = 28179.99 +cost = 0 +dt = "2023-04-04T17:52:02.527852+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f1050a08-1a4d-4892-b6b7-8d510b385718] +tid = "f1050a08-1a4d-4892-b6b7-8d510b385718" +size = -0.06 +price = 28175.87 +cost = 0 +dt = "2023-04-04T17:56:05.186869+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[0647fe16-24e3-40cd-8250-4cfc887923b6] +tid = "0647fe16-24e3-40cd-8250-4cfc887923b6" +size = -0.06 +price = 28176.34 +cost = 0 +dt = "2023-04-04T17:56:08.396636+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f11501e5-3cab-40e3-9b4c-152fa80df5bb] +tid = "f11501e5-3cab-40e3-9b4c-152fa80df5bb" +size = 0.04 +price = 28184.0 +cost = 0 +dt = "2023-04-04T17:56:27.584848+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9f695411-58f6-43da-9938-53091045f022] +tid = "9f695411-58f6-43da-9938-53091045f022" +size = -0.04 +price = 28223.13 +cost = 0 +dt = "2023-04-04T18:02:12.212835+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[3f3b78c3-1119-44ef-ba6f-4716d06c14ad] +tid = "3f3b78c3-1119-44ef-ba6f-4716d06c14ad" +size = 0.04 +price = 28223.2 +cost = 0 +dt = "2023-04-04T18:04:20.074750+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[34929c46-5638-4ccd-b6fd-e92101d3cb3d] +tid = "34929c46-5638-4ccd-b6fd-e92101d3cb3d" +size = -0.04 +price = 28125.43 +cost = 0 +dt = "2023-04-04T18:30:05.102752+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[8ba9d494-160a-4aa3-b5cb-b9fd042a0cfc] +tid = "8ba9d494-160a-4aa3-b5cb-b9fd042a0cfc" +size = 0.04 +price = 28128.5 +cost = 0 +dt = "2023-04-04T18:30:10.218444+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[e396489a-97b8-425e-a54a-b0280f2a5536] +tid = "e396489a-97b8-425e-a54a-b0280f2a5536" +size = -0.04 +price = 28141.53 +cost = 0 +dt = "2023-04-04T18:30:37.234646+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[90f2ddad-8499-4120-ad18-ce5f57ab932c] +tid = "90f2ddad-8499-4120-ad18-ce5f57ab932c" +size = 0.04 +price = 28071.37 +cost = 0 +dt = "2023-04-05T15:46:59.901329+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f6d3256c-cb46-4327-84fb-7e110e79d26e] +tid = "f6d3256c-cb46-4327-84fb-7e110e79d26e" +size = 0.04 +price = 28081.6 +cost = 0 +dt = "2023-04-05T15:47:44.639526+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[14cb023c-26cd-4cc7-9130-d12e62ee37bd] +tid = "14cb023c-26cd-4cc7-9130-d12e62ee37bd" +size = 0.04 +price = 28082.76 +cost = 0 +dt = "2023-04-05T15:48:00.508908+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[42bc2ffd-e35c-4065-8324-712c3ae46cfa] +tid = "42bc2ffd-e35c-4065-8324-712c3ae46cfa" +size = 0.04 +price = 28083.48 +cost = 0 +dt = "2023-04-05T15:48:02.809801+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b3d9edda-bf38-4abf-88d2-a29074043d52] +tid = "b3d9edda-bf38-4abf-88d2-a29074043d52" +size = 0.06 +price = 27961.14 +cost = 0 +dt = "2023-04-08T23:22:15.773099+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[394f7457-02d4-452f-89a3-03568bc88b0e] +tid = "394f7457-02d4-452f-89a3-03568bc88b0e" +size = -0.04 +price = 28049.99 +cost = 0 +dt = "2023-04-09T05:12:41.854395+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f7a13de5-ce7f-473d-bb18-53de25f2b856] +tid = "f7a13de5-ce7f-473d-bb18-53de25f2b856" +size = 0.04 +price = 30171.94 +cost = 0 +dt = "2023-04-11T01:54:58.572656+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[315ce8c7-dd6f-4757-ad36-290403de9670] +tid = "315ce8c7-dd6f-4757-ad36-290403de9670" +size = 0.04 +price = 30184.59 +cost = 0 +dt = "2023-04-11T01:55:02.615316+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[da085e7d-410b-460c-92ff-9b4638cdd284] +tid = "da085e7d-410b-460c-92ff-9b4638cdd284" +size = 0.05 +price = 30133.42 +cost = 0 +dt = "2023-04-11T01:56:52.587397+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[dfa8b778-3dfb-43af-be84-ec889456d729] +tid = "dfa8b778-3dfb-43af-be84-ec889456d729" +size = 0.06 +price = 30171.41 +cost = 0 +dt = "2023-04-11T01:57:07.416095+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[5687f9f1-c08f-4b42-8d1e-326718776510] +tid = "5687f9f1-c08f-4b42-8d1e-326718776510" +size = 0.07 +price = 30148.46 +cost = 0 +dt = "2023-04-11T01:57:15.266236+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[35b8618f-2574-43e2-a33d-c702914729a3] +tid = "35b8618f-2574-43e2-a33d-c702914729a3" +size = 0.08 +price = 30177.95 +cost = 0 +dt = "2023-04-11T01:57:48.189842+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[3cbeb052-a9b9-45d4-acba-144b6e578973] +tid = "3cbeb052-a9b9-45d4-acba-144b6e578973" +size = 0.08 +price = 30203.06 +cost = 0 +dt = "2023-04-11T01:58:30.402790+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[4ce70b6f-0024-4fb3-9ca5-e83f1b6b0cee] +tid = "4ce70b6f-0024-4fb3-9ca5-e83f1b6b0cee" +size = -0.11 +price = 30164.65 +cost = 0 +dt = "2023-04-11T02:02:26.019689+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[88b8c744-4638-4d79-9053-93c0a288745a] +tid = "88b8c744-4638-4d79-9053-93c0a288745a" +size = -0.11 +price = 30168.04 +cost = 0 +dt = "2023-04-11T02:02:29.355363+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[0bc425f9-295d-4802-bc07-719ee7037940] +tid = "0bc425f9-295d-4802-bc07-719ee7037940" +size = -0.11 +price = 30174.1 +cost = 0 +dt = "2023-04-11T02:02:37.043544+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[943ee11a-1bd2-4214-87aa-c5b56ba2bda9] +tid = "943ee11a-1bd2-4214-87aa-c5b56ba2bda9" +size = -0.11 +price = 30168.0 +cost = 0 +dt = "2023-04-11T02:02:47.892358+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[c8ebfbde-04ac-4d01-b8c6-a839c56f4ba0] +tid = "c8ebfbde-04ac-4d01-b8c6-a839c56f4ba0" +size = 0.03 +price = 30306.56 +cost = 0 +dt = "2023-04-11T02:11:00.453998+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[2de0a5c2-f549-430b-818b-3914007f7ec1] +tid = "2de0a5c2-f549-430b-818b-3914007f7ec1" +size = 0.03 +price = 30283.54 +cost = 0 +dt = "2023-04-11T02:11:42.311480+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[bb38a7b6-f8da-42c6-b164-57c4009412b1] +tid = "bb38a7b6-f8da-42c6-b164-57c4009412b1" +size = 0.03 +price = 30253.75 +cost = 0 +dt = "2023-04-11T02:12:42.932552+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b1e2cbae-0dc9-4673-b3a7-319c261b8a81] +tid = "b1e2cbae-0dc9-4673-b3a7-319c261b8a81" +size = 0.03 +price = 30240.19 +cost = 0 +dt = "2023-04-11T02:12:47.395634+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b096e756-3248-4cc6-99cd-40485336d195] +tid = "b096e756-3248-4cc6-99cd-40485336d195" +size = 0.52 +price = 1905.72 +cost = 0 +dt = "2023-04-12T20:35:18.281216+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[4344de3d-0cc2-4f1b-8ac4-2bd8cc4bf125] +tid = "4344de3d-0cc2-4f1b-8ac4-2bd8cc4bf125" +size = 0.52 +price = 1905.48 +cost = 0 +dt = "2023-04-12T20:36:00.443722+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[0140e3a8-e18d-434a-b6f4-9b3efb70196b] +tid = "0140e3a8-e18d-434a-b6f4-9b3efb70196b" +size = 0.53 +price = 1900.5 +cost = 0 +dt = "2023-04-13T02:16:37.227338+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[f9a19a35-b2cc-4932-88da-91083b6aca6d] +tid = "f9a19a35-b2cc-4932-88da-91083b6aca6d" +size = -0.53 +price = 2007.36 +cost = 0 +dt = "2023-04-13T17:32:46.535299+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[2d863698-7859-414c-87c2-42d1ded745f4] +tid = "2d863698-7859-414c-87c2-42d1ded745f4" +size = -0.53 +price = 2014.0 +cost = 0 +dt = "2023-04-13T17:46:04.990016+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[931efb4a-caa5-4ea3-b54d-ce90154be855] +tid = "931efb4a-caa5-4ea3-b54d-ce90154be855" +size = -0.05 +price = 30746.78 +cost = 0 +dt = "2023-04-14T02:28:53.163397+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[6e51dfc3-3cd1-4226-b70a-1e98f795382d] +tid = "6e51dfc3-3cd1-4226-b70a-1e98f795382d" +size = -0.05 +price = 30746.78 +cost = 0 +dt = "2023-04-14T02:28:54.017720+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[8d14a805-fa96-4d93-83c2-7f94d4dffb05] +tid = "8d14a805-fa96-4d93-83c2-7f94d4dffb05" +size = -0.05 +price = 30746.78 +cost = 0 +dt = "2023-04-14T02:28:55.241175+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[be7a7034-d81c-47f5-b135-2b3bb3295d56] +tid = "be7a7034-d81c-47f5-b135-2b3bb3295d56" +size = -0.05 +price = 30746.78 +cost = 0 +dt = "2023-04-14T02:28:55.718444+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[01f95d36-65c5-446e-8699-4b898ce2720b] +tid = "01f95d36-65c5-446e-8699-4b898ce2720b" +size = 0.48 +price = 2101.72 +cost = 0 +dt = "2023-04-14T03:08:02.545597+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[8f58d467-57e0-4600-8778-288adea396db] +tid = "8f58d467-57e0-4600-8778-288adea396db" +size = 0.47 +price = 2115.66 +cost = 0 +dt = "2023-04-14T06:21:36.461680+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[291efdb8-2693-4e08-a18a-b146fbf5b50a] +tid = "291efdb8-2693-4e08-a18a-b146fbf5b50a" +size = -0.49 +price = 2115.99 +cost = 0 +dt = "2023-04-14T06:23:13.773674+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[c14497b4-416a-4856-a0e8-378ae1df262e] +tid = "c14497b4-416a-4856-a0e8-378ae1df262e" +size = -0.03 +price = 30789.24 +cost = 0 +dt = "2023-04-14T10:36:42.537868+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9ca0a022-62db-4a31-b0b2-42f80e7ae3b1] +tid = "9ca0a022-62db-4a31-b0b2-42f80e7ae3b1" +size = -0.03 +price = 30789.24 +cost = 0 +dt = "2023-04-14T10:36:43.120034+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b1357083-fb1f-4edf-84ab-64121fda61d2] +tid = "b1357083-fb1f-4edf-84ab-64121fda61d2" +size = -0.03 +price = 30789.24 +cost = 0 +dt = "2023-04-14T10:36:44.133592+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[6afc03f4-9aa5-4578-b381-a4ba132fbd54] +tid = "6afc03f4-9aa5-4578-b381-a4ba132fbd54" +size = -0.03 +price = 30789.24 +cost = 0 +dt = "2023-04-14T10:36:44.684864+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[fef31da2-3986-4924-8ef1-c7b544dddcfa] +tid = "fef31da2-3986-4924-8ef1-c7b544dddcfa" +size = -0.03 +price = 30789.24 +cost = 0 +dt = "2023-04-14T10:36:45.458349+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[03aa594f-e3e1-485a-9db1-7732bda7af17] +tid = "03aa594f-e3e1-485a-9db1-7732bda7af17" +size = 0.03 +price = 30228.86 +cost = 0 +dt = "2023-04-14T17:11:02.406640+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b690b879-4b06-4e8f-ac7b-c60317de92d1] +tid = "b690b879-4b06-4e8f-ac7b-c60317de92d1" +size = 0.03 +price = 30500.95 +cost = 0 +dt = "2023-04-16T19:23:36.271889+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[23fa4b76-cd54-4ec9-8e2e-5aabb3391699] +tid = "23fa4b76-cd54-4ec9-8e2e-5aabb3391699" +size = -0.03 +price = 30500.27 +cost = 0 +dt = "2023-04-16T19:39:09.810500+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f8870880-e269-4ccb-b398-9e124b525b63] +tid = "f8870880-e269-4ccb-b398-9e124b525b63" +size = 10988.0 +price = 0.09101 +cost = 0 +dt = "2023-04-16T21:53:42.143462+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[88f46b6a-bd55-4629-99c6-5394d17c4988] +tid = "88f46b6a-bd55-4629-99c6-5394d17c4988" +size = 10975.0 +price = 0.0911 +cost = 0 +dt = "2023-04-16T21:53:59.399611+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[4888fb24-9519-47f3-b286-7c7befc047aa] +tid = "4888fb24-9519-47f3-b286-7c7befc047aa" +size = 10997.0 +price = 0.09092 +cost = 0 +dt = "2023-04-16T21:57:31.874822+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[62f897d8-3c1a-4747-9081-42a6c5f555d6] +tid = "62f897d8-3c1a-4747-9081-42a6c5f555d6" +size = 0.03303 +price = 30278.85 +cost = 0 +dt = "2023-04-16T23:43:13.080764+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[367eb5b6-aa1f-4146-a374-8295e026c066] +tid = "367eb5b6-aa1f-4146-a374-8295e026c066" +size = 0.03303 +price = 30278.85 +cost = 0 +dt = "2023-04-16T23:43:22.070691+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[196a4635-d15e-41af-87e7-1919ee5eff2a] +tid = "196a4635-d15e-41af-87e7-1919ee5eff2a" +size = 0.03303 +price = 30276.37 +cost = 0 +dt = "2023-04-16T23:44:02.957427+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[f91497d5-c1c3-41a8-ab29-88c61b2397f8] +tid = "f91497d5-c1c3-41a8-ab29-88c61b2397f8" +size = -0.03303 +price = 30284.58 +cost = 0 +dt = "2023-04-16T23:47:21.460961+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9cc32645-56ef-44b2-b668-1904fe427012] +tid = "9cc32645-56ef-44b2-b668-1904fe427012" +size = -0.03303 +price = 30284.58 +cost = 0 +dt = "2023-04-16T23:47:22.236516+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[19d22aab-a867-447c-b076-a33f3d6f5c20] +tid = "19d22aab-a867-447c-b076-a33f3d6f5c20" +size = 11074.0 +price = 0.09023 +cost = 0 +dt = "2023-04-16T23:49:02.920133+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[f3fc9aa5-dfad-40d3-a047-a9e4b33ae02d] +tid = "f3fc9aa5-dfad-40d3-a047-a9e4b33ae02d" +size = 0.03399 +price = 29414.08 +cost = 0 +dt = "2023-04-17T14:01:12.830647+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[28f5bd07-dd16-4432-8112-5cca53e5d4ef] +tid = "28f5bd07-dd16-4432-8112-5cca53e5d4ef" +size = -0.03351 +price = 29416.59 +cost = 0 +dt = "2023-04-17T14:01:32.258945+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9b742c45-13dd-47a1-be56-15286013e91a] +tid = "9b742c45-13dd-47a1-be56-15286013e91a" +size = -0.03403 +price = 29393.09 +cost = 0 +dt = "2023-04-17T14:03:18.843163+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[cbc47a9c-2f75-497c-8d2b-d04f37a0996d] +tid = "cbc47a9c-2f75-497c-8d2b-d04f37a0996d" +size = 0.00052 +price = 29468.3 +cost = 0 +dt = "2023-04-17T16:57:48.705291+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[7838e4bc-d47c-4e55-ae38-213190f48831] +tid = "7838e4bc-d47c-4e55-ae38-213190f48831" +size = 0.03391 +price = 29491.86 +cost = 0 +dt = "2023-04-17T17:36:21.179290+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[96a35aac-e62e-402f-bbda-730015f20e40] +tid = "96a35aac-e62e-402f-bbda-730015f20e40" +size = -0.03391 +price = 29491.85 +cost = 0 +dt = "2023-04-17T17:36:35.450594+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[13fd7381-a3d1-48fb-9e6d-46c35e183b29] +tid = "13fd7381-a3d1-48fb-9e6d-46c35e183b29" +size = 0.03388 +price = 29473.94 +cost = 0 +dt = "2023-04-17T17:47:20.546197+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[33cbd9ab-57b4-49d0-a073-f2d3e60e4b1b] +tid = "33cbd9ab-57b4-49d0-a073-f2d3e60e4b1b" +size = 0.03394 +price = 29464.89 +cost = 0 +dt = "2023-04-17T18:02:35.629646+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[60fbb741-5d42-4014-b401-baf625c41291] +tid = "60fbb741-5d42-4014-b401-baf625c41291" +size = -0.03393 +price = 29471.26 +cost = 0 +dt = "2023-04-17T18:03:18.073573+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[e8bcf4af-cad5-44e4-a334-a4115fafd1ae] +tid = "e8bcf4af-cad5-44e4-a334-a4115fafd1ae" +size = -0.03389 +price = 29490.2 +cost = 0 +dt = "2023-04-17T18:10:22.962523+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[ef9013c2-4528-4737-a1fc-68ccbe476ef5] +tid = "ef9013c2-4528-4737-a1fc-68ccbe476ef5" +size = 0.03378 +price = 29498.73 +cost = 0 +dt = "2023-04-17T18:21:23.274963+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9f517173-e1f2-4da6-b04b-976ebdbcca66] +tid = "9f517173-e1f2-4da6-b04b-976ebdbcca66" +size = 0.03389 +price = 29498.73 +cost = 0 +dt = "2023-04-17T18:21:30.536279+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[51a83ca9-989b-42b0-9d05-7dfc5f290123] +tid = "51a83ca9-989b-42b0-9d05-7dfc5f290123" +size = -0.0339 +price = 29498.72 +cost = 0 +dt = "2023-04-17T18:21:34.442889+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[6a8dcf05-ec15-4b44-9ea2-36adeb2027c9] +tid = "6a8dcf05-ec15-4b44-9ea2-36adeb2027c9" +size = -0.03377 +price = 29498.72 +cost = 0 +dt = "2023-04-17T18:21:37.542549+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[afc93855-f789-4f70-a590-5f942aa58f59] +tid = "afc93855-f789-4f70-a590-5f942aa58f59" +size = 0.03389 +price = 29502.21 +cost = 0 +dt = "2023-04-17T18:21:44.781152+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[d22f8a1d-d042-44ee-bf1d-b49bba80bca3] +tid = "d22f8a1d-d042-44ee-bf1d-b49bba80bca3" +size = 0.03388 +price = 29502.21 +cost = 0 +dt = "2023-04-17T18:21:45.308326+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[9c9bea6f-6ff0-4f6b-9931-ccca407ef4d1] +tid = "9c9bea6f-6ff0-4f6b-9931-ccca407ef4d1" +size = -0.06776 +price = 29482.8 +cost = 0 +dt = "2023-04-17T18:24:38.219727+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[dcf7f751-3ad0-45ae-ab89-f00da370c58c] +tid = "dcf7f751-3ad0-45ae-ab89-f00da370c58c" +size = 0.2035 +price = 29482.81 +cost = 0 +dt = "2023-04-17T18:24:48.920473+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[51747be8-52ef-4715-9430-2685e3ea649b] +tid = "51747be8-52ef-4715-9430-2685e3ea649b" +size = -11012.0 +price = 0.09189 +cost = 0 +dt = "2023-04-17T20:20:36.964602+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[1cbc3e34-70e4-44ff-9046-b80791af771e] +tid = "1cbc3e34-70e4-44ff-9046-b80791af771e" +size = -11012.0 +price = 0.09188 +cost = 0 +dt = "2023-04-17T20:21:04.670850+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[bbb5b48b-574b-425b-973e-5c0e120aba65] +tid = "bbb5b48b-574b-425b-973e-5c0e120aba65" +size = -0.03392 +price = 29446.0 +cost = 0 +dt = "2023-04-17T20:29:38.301074+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[14128cb8-c808-4146-9876-f0ec7c1d6063] +tid = "14128cb8-c808-4146-9876-f0ec7c1d6063" +size = 10683.0 +price = 0.09353 +cost = 0 +dt = "2023-04-18T13:58:26.651674+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[5b4e7150-511f-4fdf-9424-cbb37cc98725] +tid = "5b4e7150-511f-4fdf-9424-cbb37cc98725" +size = 0.4825 +price = 2072.18 +cost = 0 +dt = "2023-04-18T18:09:37.059497+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[7352ff1f-d410-494f-9e8e-b4d26851b78e] +tid = "7352ff1f-d410-494f-9e8e-b4d26851b78e" +size = 0.4785 +price = 2089.95 +cost = 0 +dt = "2023-04-18T22:27:53.598061+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[7b4a9235-14e3-427f-9d83-d3d0c543b0a2] +tid = "7b4a9235-14e3-427f-9d83-d3d0c543b0a2" +size = 0.4791 +price = 2087.32 +cost = 0 +dt = "2023-04-19T05:08:18.480609+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[60f39579-6cd0-45a5-9672-941e24a0c70f] +tid = "60f39579-6cd0-45a5-9672-941e24a0c70f" +size = 0.5044 +price = 1981.1 +cost = 0 +dt = "2023-04-19T11:57:32.481811+00:00" +bs_mktid = "ethusdt.spot.binance" +fqme = "ethusdt.spot.binance" + +[ed9a0b53-881f-4374-be7a-dc8a91244d91] +tid = "ed9a0b53-881f-4374-be7a-dc8a91244d91" +size = 11047.0 +price = 0.09052 +cost = 0 +dt = "2023-04-19T16:54:37.057912+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[460f4925-fa21-499f-ac3b-f9e404139740] +tid = "460f4925-fa21-499f-ac3b-f9e404139740" +size = 11275.0 +price = 0.0887 +cost = 0 +dt = "2023-04-19T18:27:46.280795+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[aca0f432-fa18-4162-8f1e-e3d35b51d6c2] +tid = "aca0f432-fa18-4162-8f1e-e3d35b51d6c2" +size = 0.03534 +price = 28250.01 +cost = 0 +dt = "2023-04-21T01:29:55.826173+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[0f6e41f2-e465-477a-ab18-bf204f3da59f] +tid = "0f6e41f2-e465-477a-ab18-bf204f3da59f" +size = 12127.0 +price = 0.08243 +cost = 0 +dt = "2023-04-21T17:18:36.014107+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[e047e65e-645c-49d3-9172-ced13210f7c1] +tid = "e047e65e-645c-49d3-9172-ced13210f7c1" +size = 121227.0 +price = 0.08247 +cost = 0 +dt = "2023-04-21T17:18:46.985975+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[a78b8bd2-38fa-4396-8974-7a50acb74a64] +tid = "a78b8bd2-38fa-4396-8974-7a50acb74a64" +size = 121183.0 +price = 0.08248 +cost = 0 +dt = "2023-04-21T17:18:57.689595+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[f9bf8d41-58e0-4682-a76a-65854e60de7e] +tid = "f9bf8d41-58e0-4682-a76a-65854e60de7e" +size = 126984.0 +price = 0.07803 +cost = 0 +dt = "2023-04-21T20:15:14.565627+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[2de50a4b-2530-46fa-a500-3c20633d3696] +tid = "2de50a4b-2530-46fa-a500-3c20633d3696" +size = 126630.0 +price = 0.0785 +cost = 0 +dt = "2023-04-21T23:19:52.333093+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[b90abfe1-ec43-4a9c-8051-daedb4726fc9] +tid = "b90abfe1-ec43-4a9c-8051-daedb4726fc9" +size = 126614.0 +price = 0.07851 +cost = 0 +dt = "2023-04-21T23:19:54.462741+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[b8c53b69-d952-4535-9ddf-8f2837f0f72e] +tid = "b8c53b69-d952-4535-9ddf-8f2837f0f72e" +size = 127000.0 +price = 0.07851 +cost = 0 +dt = "2023-04-21T23:19:57.180902+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[610348ca-ec3c-46f7-89b9-9d5aa551d4d4] +tid = "610348ca-ec3c-46f7-89b9-9d5aa551d4d4" +size = 127065.0 +price = 0.07851 +cost = 0 +dt = "2023-04-21T23:19:59.112348+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[e6fa2a64-055c-4285-a298-762d84660480] +tid = "e6fa2a64-055c-4285-a298-762d84660480" +size = -157308.0 +price = 0.08027 +cost = 0 +dt = "2023-04-22T17:06:37.652613+00:00" +bs_mktid = "dogeusdt.spot.binance" +fqme = "dogeusdt.spot.binance" + +[49b65271-bd84-45e1-a1af-06b698b36cc7] +tid = "49b65271-bd84-45e1-a1af-06b698b36cc7" +size = 0.35262 +price = 28337.54 +cost = 0 +dt = "2023-04-25T22:07:28.093084+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[7bff12ad-e9e9-4dde-b50e-ad03cf08cc6f] +tid = "7bff12ad-e9e9-4dde-b50e-ad03cf08cc6f" +size = 0.34518 +price = 28339.62 +cost = 0 +dt = "2023-04-25T22:07:32.859565+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[fb5c8b78-b2c1-406b-923b-c0efea30ef53] +tid = "fb5c8b78-b2c1-406b-923b-c0efea30ef53" +size = -0.15045 +price = 29478.78 +cost = 0 +dt = "2023-04-27T23:29:04.872409+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[1475ed80-2745-489a-a6fa-42a42e04f195] +tid = "1475ed80-2745-489a-a6fa-42a42e04f195" +size = -0.15045 +price = 29478.78 +cost = 0 +dt = "2023-04-27T23:29:05.489196+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[0c973666-5ae8-432c-9bdf-329126977912] +tid = "0c973666-5ae8-432c-9bdf-329126977912" +size = -0.15045 +price = 29478.78 +cost = 0 +dt = "2023-04-27T23:29:06.611542+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b50f1982-b22e-470c-b282-d4da5ef1eb22] +tid = "b50f1982-b22e-470c-b282-d4da5ef1eb22" +size = -0.15045 +price = 29478.78 +cost = 0 +dt = "2023-04-27T23:29:16.148062+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[0d3c6fb6-bbb4-473f-a122-791d8edc16c7] +tid = "0d3c6fb6-bbb4-473f-a122-791d8edc16c7" +size = -0.05015 +price = 28603.61 +cost = 0 +dt = "2023-05-02T18:31:40.693023+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[ad1481e9-82cc-47ea-b535-af20382cb16b] +tid = "ad1481e9-82cc-47ea-b535-af20382cb16b" +size = -0.05015 +price = 28603.61 +cost = 0 +dt = "2023-05-02T18:31:44.812848+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[cbfae60b-ab1a-44ad-8f15-f01346e883e6] +tid = "cbfae60b-ab1a-44ad-8f15-f01346e883e6" +size = -0.05015 +price = 28603.61 +cost = 0 +dt = "2023-05-02T18:31:51.127774+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[66fe7ae9-3486-431e-b15f-d9db16a4867e] +tid = "66fe7ae9-3486-431e-b15f-d9db16a4867e" +size = -0.05015 +price = 28748.06 +cost = 0 +dt = "2023-05-02T19:34:36.341881+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[b890dcb0-f126-4a96-9baf-15506b8e001d] +tid = "b890dcb0-f126-4a96-9baf-15506b8e001d" +size = 0.03712 +price = 26874.4 +cost = 0 +dt = "2023-05-15T01:23:33.891746+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[bfab2bc0-1ed9-44d5-be5b-21db23e20cf1] +tid = "bfab2bc0-1ed9-44d5-be5b-21db23e20cf1" +size = 0.36831 +price = 27143.83 +cost = 0 +dt = "2023-05-17T17:54:09.287428+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[097816ff-eadd-4e8d-a002-9f94def56670] +tid = "097816ff-eadd-4e8d-a002-9f94def56670" +size = -0.09666 +price = 26968.92 +cost = 0 +dt = "2023-05-20T21:03:20.255178+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[1760b767-c08b-4726-81df-85be82629400] +tid = "1760b767-c08b-4726-81df-85be82629400" +size = -0.09666 +price = 26967.18 +cost = 0 +dt = "2023-05-20T21:03:25.320027+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[749dd12b-1039-470d-8025-9780424a4149] +tid = "749dd12b-1039-470d-8025-9780424a4149" +size = -0.06444 +price = 27163.75 +cost = 0 +dt = "2023-05-21T02:01:37.085808+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[3d1d28e2-d929-4a53-9d25-4c77ce37ec87] +tid = "3d1d28e2-d929-4a53-9d25-4c77ce37ec87" +size = -0.06444 +price = 27191.03 +cost = 0 +dt = "2023-05-21T02:07:11.383516+00:00" +bs_mktid = "btcusdt.spot.binance" +fqme = "btcusdt.spot.binance" + +[a2a81c3c-2e85-471f-ba30-3ab81ce59dac] +fqme = "btcusdt.spot.binance" +tid = "a2a81c3c-2e85-471f-ba30-3ab81ce59dac" +size = 0.3689 +price = 27103.27 +cost = 0 +dt = "2023-06-01T01:05:47.320495+00:00" +bs_mktid = "btcusdt.spot.binance" + +[8cfdcc1f-b5e9-49a8-8aaa-2aec7b12962b] +fqme = "btcusdt.usdtm.perp.binance" +tid = "8cfdcc1f-b5e9-49a8-8aaa-2aec7b12962b" +size = 0.01 +price = 30398.3 +cost = 0 +dt = "2023-06-26T16:08:41.048560+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[10e737a8-79d6-4f30-a153-fed0d2df8854] +fqme = "btcusdt.usdtm.perp.binance" +tid = "10e737a8-79d6-4f30-a153-fed0d2df8854" +size = 0.01 +price = 30387.6 +cost = 0 +dt = "2023-06-26T16:09:39.252859+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[51a19796-e838-475a-8afb-df80b7de13ba] +fqme = "btcusdt.usdtm.perp.binance" +tid = "51a19796-e838-475a-8afb-df80b7de13ba" +size = 0.01 +price = 30176.9 +cost = 0 +dt = "2023-06-26T19:35:38.105972+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[6fdd9378-5de4-423e-9de1-ad478813be45] +fqme = "btcusdt.usdtm.perp.binance" +tid = "6fdd9378-5de4-423e-9de1-ad478813be45" +size = 0.01 +price = 30190.2 +cost = 0 +dt = "2023-06-26T19:44:01.091792+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[3e867581-8387-472b-b51f-bd84ff902ee4] +fqme = "btcusdt.usdtm.perp.binance" +tid = "3e867581-8387-472b-b51f-bd84ff902ee4" +size = -0.04 +price = 30214.8 +cost = 0 +dt = "2023-06-26T19:44:50.363897+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[6ab96a1d-84ba-4869-affb-e002a98e3f1b] +fqme = "btcusdt.usdtm.perp.binance" +tid = "6ab96a1d-84ba-4869-affb-e002a98e3f1b" +size = 0.033 +price = 30174.3 +cost = 0 +dt = "2023-06-26T20:43:28.156740+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[1dcb3d09-ea2f-4bd0-997d-1481d9a32f0f] +fqme = "btcusdt.usdtm.perp.binance" +tid = "1dcb3d09-ea2f-4bd0-997d-1481d9a32f0f" +size = 0.033 +price = 30221.7 +cost = 0 +dt = "2023-06-26T23:43:43.779369+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[9cf964bc-e405-4183-848a-e67a1e6a7f03] +fqme = "btcusdt.usdtm.perp.binance" +tid = "9cf964bc-e405-4183-848a-e67a1e6a7f03" +size = 0.033 +price = 30226.3 +cost = 0 +dt = "2023-06-27T00:06:19.118681+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[badad1df-264e-496c-896d-9495a9cfa20f] +fqme = "btcusdt.usdtm.perp.binance" +tid = "badad1df-264e-496c-896d-9495a9cfa20f" +size = 0.033 +price = 30634.4 +cost = 0 +dt = "2023-06-27T13:04:16.259238+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[0ad35909-2cd9-4d40-b6ee-60c8649a6906] +fqme = "btcusdt.usdtm.perp.binance" +tid = "0ad35909-2cd9-4d40-b6ee-60c8649a6906" +size = 0.033 +price = 30684.7 +cost = 0 +dt = "2023-06-27T18:03:59.570289+00:00" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[70ae2b8d-3e1e-4f5f-9f80-e4d6ed57eef0] +fqme = "btcusdt.usdtm.perp.binance" +tid = "70ae2b8d-3e1e-4f5f-9f80-e4d6ed57eef0" +size = 0.033 +price = 30307.5 +cost = 0 +dt = "2023-07-10T17:27:55.549956+00:00" +etype = "clear" +expiry = "" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[5f629852-21b6-4a2a-93f5-6fa3c72aa9d9] +fqme = "btcusdt.usdtm.perp.binance" +tid = "5f629852-21b6-4a2a-93f5-6fa3c72aa9d9" +size = -0.033 +price = 30702.1 +cost = 0 +dt = "2023-07-12T11:24:13.718896+00:00" +etype = "clear" +expiry = "" +bs_mktid = "btcusdt.usdtm.perp.binance" + +[364da263-d8c4-4ac9-a534-c4fd000ae6c6] +fqme = "xrpusdt.spot.binance" +tid = "364da263-d8c4-4ac9-a534-c4fd000ae6c6" +size = 1229.0 +price = 0.8133 +cost = 0 +dt = "2023-07-13T22:46:45.549324+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[a98f8f05-6b90-4f46-bee2-c053e1046e86] +fqme = "xrpusdt.spot.binance" +tid = "a98f8f05-6b90-4f46-bee2-c053e1046e86" +size = -1229.0 +price = 0.8124 +cost = 0 +dt = "2023-07-13T22:47:20.693038+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[b4672728-df3a-49d1-a9f5-7e23bf431bf9] +fqme = "xrpusdt.spot.binance" +tid = "b4672728-df3a-49d1-a9f5-7e23bf431bf9" +size = -1229.0 +price = 0.8122 +cost = 0 +dt = "2023-07-13T22:47:31.410600+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[5cee625d-c2ad-461c-b1b4-373483a4e9a0] +fqme = "xrpusdt.spot.binance" +tid = "5cee625d-c2ad-461c-b1b4-373483a4e9a0" +size = -1229.0 +price = 0.8121 +cost = 0 +dt = "2023-07-13T22:47:34.330811+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[0be50de2-2098-48fb-bea2-48fb8a33e660] +fqme = "xrpusdt.spot.binance" +tid = "0be50de2-2098-48fb-bea2-48fb8a33e660" +size = -1229.0 +price = 0.812 +cost = 0 +dt = "2023-07-13T22:47:35.276521+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[a5679bfd-05ee-4654-aef8-6b7161efe742] +fqme = "xrpusdt.spot.binance" +tid = "a5679bfd-05ee-4654-aef8-6b7161efe742" +size = -1229.0 +price = 0.8121 +cost = 0 +dt = "2023-07-13T22:47:35.903756+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[f620ca65-f878-46ba-b900-aaabae9fd3ce] +fqme = "xrpusdt.spot.binance" +tid = "f620ca65-f878-46ba-b900-aaabae9fd3ce" +size = -1229.0 +price = 0.8121 +cost = 0 +dt = "2023-07-13T22:47:36.578845+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[38f7e712-06fa-4551-b235-875d440d369f] +fqme = "xrpusdt.spot.binance" +tid = "38f7e712-06fa-4551-b235-875d440d369f" +size = -1229.0 +price = 0.8122 +cost = 0 +dt = "2023-07-13T22:47:37.144106+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[465fd8b0-f303-4bb9-9af3-1162599c9b53] +fqme = "xrpusdt.spot.binance" +tid = "465fd8b0-f303-4bb9-9af3-1162599c9b53" +size = -1229.0 +price = 0.8135 +cost = 0 +dt = "2023-07-13T22:48:10.259883+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[0fe0529a-e8f5-412e-9759-ffd75e9c9ba4] +fqme = "xrpusdt.spot.binance" +tid = "0fe0529a-e8f5-412e-9759-ffd75e9c9ba4" +size = 1434.0 +price = 0.784 +cost = 0 +dt = "2023-07-14T12:40:42.517435+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[d4483864-e99c-436a-a60d-9be443ce5bcc] +fqme = "xrpusdt.spot.binance" +tid = "d4483864-e99c-436a-a60d-9be443ce5bcc" +size = 1434.0 +price = 0.783 +cost = 0 +dt = "2023-07-14T12:41:55.922400+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[741ced15-a467-4e00-8dab-3b3046aff47c] +fqme = "xrpusdt.spot.binance" +tid = "741ced15-a467-4e00-8dab-3b3046aff47c" +size = 1434.0 +price = 0.7837 +cost = 0 +dt = "2023-07-14T12:42:32.102345+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[6441275d-0acd-4637-ace9-0d9e0ac1df13] +fqme = "xrpusdt.spot.binance" +tid = "6441275d-0acd-4637-ace9-0d9e0ac1df13" +size = 1434.0 +price = 0.784 +cost = 0 +dt = "2023-07-14T12:44:38.961470+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[1c4dc51d-7235-4024-aef4-153b5e42c828] +fqme = "xrpusdt.spot.binance" +tid = "1c4dc51d-7235-4024-aef4-153b5e42c828" +size = 1434.0 +price = 0.7854 +cost = 0 +dt = "2023-07-14T12:45:06.612540+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[d5045d2b-14e4-4054-a032-2bbece37f751] +fqme = "xrpusdt.spot.binance" +tid = "d5045d2b-14e4-4054-a032-2bbece37f751" +size = 1434.0 +price = 0.7852 +cost = 0 +dt = "2023-07-14T12:45:12.194136+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" + +[a8a05b59-83f9-4941-89bb-96f7fc94887b] +fqme = "xrpusdt.spot.binance" +tid = "a8a05b59-83f9-4941-89bb-96f7fc94887b" +size = -1.0 +price = 0.7824 +cost = 0 +dt = "2023-07-14T12:46:21.073564+00:00" +etype = "clear" +expiry = "" +bs_mktid = "xrpusdt.spot.binance" diff --git a/tests/test_accounting.py b/tests/test_accounting.py index dacffb98b..e7773a7a9 100644 --- a/tests/test_accounting.py +++ b/tests/test_accounting.py @@ -8,7 +8,15 @@ from pathlib import Path from piker import config -from piker.accounting import load_account +from piker.accounting import ( + Account, + calc, + Position, + TransactionLedger, + open_trade_ledger, + load_account, + load_account_from_ledger, +) def test_root_conf_networking_section( @@ -34,3 +42,53 @@ def test_account_file_default_empty( assert not conf assert path.parent.is_dir() assert path.parent.name == 'accounting' + + +def test_paper_ledger_position_calcs(): + broker: str = 'binance' + acnt_name: str = 'paper' + + accounts_path: Path = config.repodir() / 'tests' / '_inputs' + + ldr: TransactionLedger + with ( + open_trade_ledger( + broker, + acnt_name, + allow_from_sync_code=True, + + _fp=accounts_path, + ) as ldr, + + # open `polars` acnt dfs Bo + calc.open_ledger_dfs( + broker, + acnt_name, + ledger=ldr, + + _fp=accounts_path, + + ) as (dfs, ledger), + + ): + acnt: Account = load_account_from_ledger( + broker, + acnt_name, + ledger=ldr, + _fp=accounts_path, + ) + + # do manual checks on expected pos calcs based on input + # ledger B) + + # xrpusdt should have a net-zero size + xrp: str = 'xrpusdt.spot.binance' + pos: Position = acnt.pps[xrp] + + # XXX: turns out our old dict-style-processing + # get's this wrong i think due to dt-sorting.. + # lcum: float = pos.cumsize + + df = dfs[xrp] + assert df['cumsize'][-1] == 0 + assert pos.cumsize == 0 From b9fec091ca413ea22f9022dc5953cf3ac47153b0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Jul 2023 20:17:24 -0400 Subject: [PATCH 044/116] Allow accounting (file) dir override via kwarg For testing (and probably hacking) it's handy to be able to point somewhere other the default user-config dir for a ledger or account file to test offline processing apis from `.accounting` subsystems. For now it's a private optional named-arg: `_fp: Path` and it's obviously passed down into the `load_account()` config getter. Note that in the non-paper account case `Account.update_from_ledger()` will use the ledger's `.symcache` and `.iter_txns()` method to acquite actual txn-structs to compute positions. --- piker/accounting/_pos.py | 72 ++++++++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 28 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 53c321974..061a5b445 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -51,7 +51,7 @@ ) from .calc import ( ppu, - iter_by_dt, + # iter_by_dt, ) from .. import config from ..clearing._messages import ( @@ -145,6 +145,7 @@ def clearsdict(self) -> dict[str, dict]: def iter_by_type( self, etype: str, + ) -> Iterator[dict | Transaction]: ''' Iterate the internally managed ``._events: dict`` table in @@ -152,15 +153,15 @@ def iter_by_type( ''' # sort on the expected datetime field - for event in iter_by_dt( + # for event in iter_by_dt( + for event in sorted( self._events.values(), - key=lambda entry: - getattr(entry, 'dt', None) - or entry.get('dt'), + key=lambda entry: entry.dt ): + # if event.etype == etype: match event: case ( - { 'etype': _etype} | + {'etype': _etype} | Transaction(etype=str(_etype)) ): assert _etype == etype @@ -465,7 +466,7 @@ def brokername(self) -> str: def update_from_ledger( self, - ledger: TransactionLedger, + ledger: TransactionLedger | dict[str, Transaction], cost_scalar: float = 2, symcache: SymbologyCache | None = None, @@ -478,31 +479,34 @@ def update_from_ledger( ''' if ( not isinstance(ledger, TransactionLedger) - and symcache is None ): - raise RuntimeError( - 'No ledger provided!\n' - 'We can not determine the `MktPair`s without a symcache..\n' - 'Please provide `symcache: SymbologyCache` when ' - 'processing NEW positions!' + if symcache is None: + raise RuntimeError( + 'No ledger provided!\n' + 'We can not determine the `MktPair`s without a symcache..\n' + 'Please provide `symcache: SymbologyCache` when ' + 'processing NEW positions!' + ) + itertxns = sorted( + ledger.values(), + key=lambda t: t.dt, ) + else: + itertxns = ledger.iter_txns() + symcache = ledger.symcache pps = self.pps updated: dict[str, Position] = {} # lifo update all pps from records, ensuring # we compute the PPU and size sorted in time! - for tid, txn in ledger.iter_txns(): - # for t in sorted( - # trans.values(), - # key=lambda t: t.dt, - # ): + for txn in itertxns: fqme: str = txn.fqme bs_mktid: str = txn.bs_mktid # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair = ledger._symcache.mktmaps[fqme] + mkt: MktPair = symcache.mktmaps[fqme] if not (pos := pps.get(bs_mktid)): @@ -522,12 +526,13 @@ def update_from_ledger( # update clearing acnt! # NOTE: likely you'll see repeats of the same - # ``Transaction`` passed in here if/when you are restarting - # a ``brokerd.ib`` where the API will re-report trades from - # the current session, so we need to make sure we don't - # "double count" these in pp calculations; - # `Position.add_clear()` stores txs in a `dict[tid, - # tx]` which should always ensure this is true B) + # ``Transaction`` passed in here if/when you are + # restarting a ``brokerd.ib`` where the API will + # re-report trades from the current session, so we need + # to make sure we don't "double count" these in pp + # calculations; `Position.add_clear()` stores txs in + # a `._events: dict[tid, tx]` which should always + # ensure this is true! pos.add_clear(txn) updated[txn.bs_mktid] = pos @@ -679,6 +684,8 @@ def load_account( brokername: str, acctid: str, + dirpath: Path | None = None, + ) -> tuple[dict, Path]: ''' Load a accounting (with positions) file from @@ -692,7 +699,7 @@ def load_account( legacy_fn: str = f'pps.{brokername}.{acctid}.toml' fn: str = f'account.{brokername}.{acctid}.toml' - dirpath: Path = config._config_dir / 'accounting' + dirpath: Path = dirpath or (config._config_dir / 'accounting') if not dirpath.is_dir(): dirpath.mkdir() @@ -743,6 +750,9 @@ def open_account( acctid: str, write_on_exit: bool = False, + # for testing or manual load from file + _fp: Path | None = None, + ) -> Generator[Account, None, None]: ''' Read out broker-specific position entries from @@ -751,7 +761,11 @@ def open_account( ''' conf: dict conf_path: Path - conf, conf_path = load_account(brokername, acctid) + conf, conf_path = load_account( + brokername, + acctid, + dirpath=_fp, + ) if brokername in conf: log.warning( @@ -909,6 +923,7 @@ def load_account_from_ledger( filter_by_ids: dict[str, list[str]] | None = None, ledger: TransactionLedger | None = None, + **kwargs, ) -> Account: ''' @@ -919,9 +934,10 @@ def load_account_from_ledger( ''' acnt: Account - with open_pps( + with open_account( brokername, acctname, + **kwargs, ) as acnt: if ledger is not None: acnt.update_from_ledger(ledger) From 69314e9fcadd0afbb3edf8aede4de4d118195ae1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 14 Jul 2023 20:29:05 -0400 Subject: [PATCH 045/116] Passthrough all **kwargs `Struct.to_dict()` Since for symcache-ing we don't want to write non-member fields we need to allow passing the appropriate flag; i hate frickin inheritance XD --- piker/accounting/_mktinfo.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 2c180f251..ed0beb94e 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -132,8 +132,11 @@ class Asset(Struct, frozen=True): # `None` is not toml-compat so drop info # if no extra data added.. - def to_dict(self) -> dict: - dct = super().to_dict() + def to_dict( + self, + **kwargs, + ) -> dict: + dct = super().to_dict(**kwargs) if (info := dct.pop('info', None)): dct['info'] = info @@ -145,7 +148,7 @@ def from_msg( cls, msg: dict[str, Any], ) -> Asset: - return Asset( + return cls( tx_tick=Decimal(str(msg.pop('tx_tick'))), info=msg.pop('info', None), **msg, @@ -318,10 +321,13 @@ class MktPair(Struct, frozen=True): def __str__(self) -> str: return self.fqme - def to_dict(self) -> dict: - d = super().to_dict() - d['src'] = self.src.to_dict() - d['dst'] = self.dst.to_dict() + def to_dict( + self, + **kwargs, + ) -> dict: + d = super().to_dict(**kwargs) + d['src'] = self.src.to_dict(**kwargs) + d['dst'] = self.dst.to_dict(**kwargs) d['price_tick'] = str(self.price_tick) d['size_tick'] = str(self.size_tick) From 3d20490ee55247663a02a129d379eb91860274e4 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 15 Jul 2023 15:43:09 -0400 Subject: [PATCH 046/116] Move cum-calcs to `open_ledger_dfs()`, always parse `str`->`Datetime` Previously the cum-size calc(s) was in the `disect` CLI but it's better stuffed into the backing df converter. Also, ensure that whenever a `dt` field is type-detected as a `str` we parse it to `DateTime`. --- piker/accounting/calc.py | 197 ++++++++++++++++++++++----------------- piker/accounting/cli.py | 17 ++-- 2 files changed, 117 insertions(+), 97 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 90ad1cf74..db7771401 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -20,6 +20,7 @@ ''' from __future__ import annotations +from collections.abc import ValuesView from contextlib import contextmanager as cm from math import copysign from typing import ( @@ -39,6 +40,7 @@ if TYPE_CHECKING: from ._ledger import ( Transaction, + TransactionLedger, ) def ppu( @@ -72,18 +74,23 @@ def ppu( where `cost_basis` for the current step is simply the price * size of the most recent clearing transaction. + ----- + TODO: get the BEP computed and working similarly! + ----- + the equivalent "break even price" or bep at each new clear + event step conversely only changes when an "position exiting + clear" which **decreases** the cumulative dst asset size: + + bep[-1] = ppu[-1] - (cum_pnl[-1] / cumsize[-1]) + ''' asize_h: list[float] = [] # historical accumulative size ppu_h: list[float] = [] # historical price-per-unit ledger: dict[str, dict] = {} - # entry: dict[str, Any] | Transaction t: Transaction for t in clears: - # tid: str = entry['tid'] - # clear_size = entry['size'] clear_size: float = t.size - # clear_price: str | float = entry['price'] clear_price: str | float = t.price is_clear: bool = not isinstance(clear_price, str) @@ -152,7 +159,6 @@ def ppu( clear_price * abs(clear_size) + # transaction cost - # accum_sign * cost_scalar * entry['cost'] accum_sign * cost_scalar * t.cost ) @@ -187,13 +193,13 @@ def ppu( asize_h.append(accum_size) # ledger[t.tid] = { - # 'tx': t, + # 'txn': t, ledger[t.tid] = t.to_dict() | { 'ppu': ppu, 'cumsize': accum_size, 'sign_change': sign_change, - # TODO: cumpnl, bep + # TODO: cum_pnl, bep } final_ppu = ppu_h[-1] if ppu_h else 0 @@ -212,6 +218,7 @@ def ppu( def iter_by_dt( records: ( dict[str, dict[str, Any]] + | ValuesView[dict] # eg. `Position._events.values()` | list[dict] | list[Transaction] # XXX preferred! ), @@ -220,7 +227,7 @@ def iter_by_dt( # so if you know that the record stats show some field # is more common then others, stick it at the top B) parsers: dict[str, Callable | None] = { - 'dt': None, # parity case + 'dt': parse, # parity case 'datetime': parse, # datetime-str 'time': from_timestamp, # float epoch }, @@ -259,8 +266,13 @@ def dyn_parse_to_dt( # the `parsers` table above (when NOT using # `.get()`), otherwise pass through the value and # sort on it directly - parser: Callable | None = parsers[k] - return parser(v) if (parser is not None) else v + if ( + not isinstance(v, DateTime) + and (parser := parsers.get(k)) + ): + return parser(v) + else: + return v else: # XXX: should never get here.. @@ -271,6 +283,7 @@ def dyn_parse_to_dt( records, key=key or dyn_parse_to_dt, ): + # NOTE the type sig above; either pairs or txns B) yield entry @@ -331,7 +344,14 @@ def open_ledger_dfs( brokername: str, acctname: str, -) -> dict[str, pl.DataFrame]: + ledger: TransactionLedger | None = None, + + **kwargs, + +) -> tuple[ + dict[str, pl.DataFrame], + TransactionLedger, +]: ''' Open a ledger of trade records (presumably from some broker backend), normalize the records into `Transactions` via the @@ -341,86 +361,89 @@ def open_ledger_dfs( ''' from ._ledger import ( open_trade_ledger, - # Transaction, - TransactionLedger, ) - ledger: TransactionLedger - import time - now = time.time() - with ( - open_trade_ledger( - brokername, - acctname, - rewrite=True, - allow_from_sync_code=True, - ) as ledger, - ): - if not ledger: - raise ValueError(f'No ledger for {acctname}@{brokername} exists?') - - print(f'LEDGER LOAD TIME: {time.time() - now}') - # process raw TOML ledger into txns using the - # appropriate backend normalizer. - # cache: AssetsInfo = get_symcache( - # brokername, - # allow_reload=True, - # ) - - txns: dict[str, Transaction] - if acctname != 'paper': - txns = ledger.mod.norm_trade_records(ledger) - else: - txns = ledger.to_txns() - - ldf = pl.DataFrame( - list(txn.to_dict() for txn in txns.values()), - # schema=[ - # ('tid', str), - # ('fqme', str), - # ('dt', str), - # ('size', pl.Float64), - # ('price', pl.Float64), - # ('cost', pl.Float64), - # ('expiry', str), - # ('bs_mktid', str), - # ], - ).sort('dt').select([ - pl.col('fqme'), - pl.col('dt').str.to_datetime(), - # pl.col('expiry').dt.datetime(), - pl.col('bs_mktid'), - pl.col('size'), - pl.col('price'), - ]) - - # filter out to the columns matching values filter passed - # as input. - # if filter_by_ids: - # for col, vals in filter_by_ids.items(): - # str_vals = set(map(str, vals)) - # pred: pl.Expr = pl.col(col).eq(str_vals.pop()) - # for val in str_vals: - # pred |= pl.col(col).eq(val) + if not ledger: + import time + from tractor._debug import open_crash_handler + now = time.time() - # fdf = df.filter(pred) + with ( + open_crash_handler(), - # bs_mktid: str = fdf[0]['bs_mktid'] - # pos: Position = acnt.pps[bs_mktid] + open_trade_ledger( + brokername, + acctname, + rewrite=True, + allow_from_sync_code=True, - # ppt = df.groupby('fqme').agg([ - # # TODO: ppu and bep !! - # pl.cumsum('size').alias('cumsum'), - # ]) + # proxied through from caller + **kwargs, - dfs: dict[str, pl.DataFrame] = ldf.partition_by( - 'fqme', - as_dict=True, - ) + ) as ledger, + ): + if not ledger: + raise ValueError(f'No ledger for {acctname}@{brokername} exists?') + + print(f'LEDGER LOAD TIME: {time.time() - now}') + + # process raw TOML ledger into txns using the + # appropriate backend normalizer. + # cache: AssetsInfo = get_symcache( + # brokername, + # allow_reload=True, + # ) + + txns: dict[str, Transaction] = ledger.to_txns() + ldf = pl.DataFrame( + list(txn.to_dict() for txn in txns.values()), + # schema=[ + # ('tid', str), + # ('fqme', str), + # ('dt', str), + # ('size', pl.Float64), + # ('price', pl.Float64), + # ('cost', pl.Float64), + # ('expiry', str), + # ('bs_mktid', str), + # ], + # ).sort('dt').select([ + ).sort('dt').with_columns([ + # pl.col('fqme'), + pl.col('dt').str.to_datetime(), + # pl.col('expiry').dt.datetime(), + # pl.col('bs_mktid'), + # pl.col('size'), + # pl.col('price'), + ]) + + # filter out to the columns matching values filter passed + # as input. + # if filter_by_ids: + # for col, vals in filter_by_ids.items(): + # str_vals = set(map(str, vals)) + # pred: pl.Expr = pl.col(col).eq(str_vals.pop()) + # for val in str_vals: + # pred |= pl.col(col).eq(val) + + # fdf = df.filter(pred) + + # bs_mktid: str = fdf[0]['bs_mktid'] + # pos: Position = acnt.pps[bs_mktid] + + # TODO: not sure if this is even possible but.. + # ppt = df.groupby('fqme').agg([ + # # TODO: ppu and bep !! + # pl.cumsum('size').alias('cumsum'), + # ]) + dfs: dict[str, pl.DataFrame] = ldf.partition_by( + 'fqme', + as_dict=True, + ) + for key in dfs: + df = dfs[key] + dfs[key] = df.with_columns([ + pl.cumsum('size').alias('cumsize'), + ]) - # for fqme, ppt in act.items(): - # ppt.with_columns - # # TODO: ppu and bep !! - # pl.cumsum('size').alias('cumsum'), - # ]) - yield dfs + yield dfs, ledger diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 30c147044..9dc36b4de 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -37,8 +37,8 @@ from ..brokers._daemon import broker_init from ._ledger import ( load_ledger, + TransactionLedger, # open_trade_ledger, - # TransactionLedger, ) from .calc import ( open_ledger_dfs, @@ -263,20 +263,17 @@ def disect( # ledger dfs groupby-partitioned by fqme dfs: dict[str, pl.DataFrame] + # actual ledger ref filled in with all txns + ldgr: TransactionLedger + with open_ledger_dfs( brokername, account, - ) as dfs: - - for key in dfs: - df = dfs[key] - dfs[key] = df.with_columns([ - pl.cumsum('size').alias('cumsum'), - ]) + ) as (dfs, ldgr): - ppt = dfs[fqme] + # look up specific frame for fqme-selected asset + df = dfs[fqme] assert not df.is_empty() - assert not ppt.is_empty() # TODO: we REALLY need a better console REPL for this # kinda thing.. From d794afcb5c86d5886db8d0b540830bc5eddc2dee Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 15 Jul 2023 17:35:41 -0400 Subject: [PATCH 047/116] Adjust `.clearing._paper_engine.norm_trade()` to new sig Always expect a `tid: str` and `pair: dict[str, Struct]` for aiding with txn struct packing B) --- piker/clearing/_paper_engine.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 861cd389a..03b69c0e6 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -281,7 +281,8 @@ async def fake_fill( ) # transmit pp msg to ems - pp = self.acnt.pps[bs_mktid] + pp: Position = self.acnt.pps[bs_mktid] + pp_msg = BrokerdPosition( broker=self.broker, account='paper', @@ -327,6 +328,7 @@ async def simulate_fills( # this stream may eventually contain multiple symbols async for quotes in quote_stream: for sym, quote in quotes.items(): + # print(sym) for tick in iterticks( quote, # dark order price filter(s) @@ -617,6 +619,7 @@ async def open_trade_dialog( pos: Position token: str # f'{symbol}.{self.broker}' for token, pos in acnt.pps.items(): + pp_msgs.append(BrokerdPosition( broker=broker, account='paper', @@ -735,7 +738,9 @@ async def open_paperboi( def norm_trade( + tid: str, txdict: dict, + pairs: dict[str, Struct], ) -> Transaction: from pendulum import ( From a5821ae9b1da3f855984df8b9e9abbb1658b129e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 15 Jul 2023 17:37:56 -0400 Subject: [PATCH 048/116] binance: spec `.ns_path: str` on pair structs Provides for fully isolated symbology caching in a flat TOML table without special case handling B) Also explicitly define `.bs_mktid: str` which is now used by the symcache to to key-index the backend specific pair set and thus provides for round-trip marshalling without special knowledge of any backend schema. --- piker/brokers/binance/__init__.py | 7 ++++++- piker/brokers/binance/broker.py | 4 +++- piker/brokers/binance/venues.py | 15 +++++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/piker/brokers/binance/__init__.py b/piker/brokers/binance/__init__.py index fb5844685..da63a67ca 100644 --- a/piker/brokers/binance/__init__.py +++ b/piker/brokers/binance/__init__.py @@ -33,11 +33,16 @@ from .broker import ( open_trade_dialog, ) - +from .venues import ( + SpotPair, + FutesPair, +) __all__ = [ 'get_client', 'get_mkt_info', + 'SpotPair', + 'FutesPair', 'open_trade_dialog', 'open_history_client', 'open_symbol_search', diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 04042f446..3bc665d6e 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -36,7 +36,6 @@ from piker.accounting import ( Asset, - # MktPair, ) from piker.brokers._util import ( get_logger, @@ -232,6 +231,9 @@ async def open_trade_dialog( account_name: str = 'usdtm' use_testnet: bool = False + # TODO: if/when we add .accounting support we need to + # do a open_symcache() call.. though maybe we can hide + # this in a new async version of open_account()? async with open_cached_client('binance') as client: subconf: dict = client.conf[venue_name] use_testnet = subconf.get('use_testnet', False) diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index 08e1f050c..3510d6f64 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -84,6 +84,7 @@ def get_api_eps(venue: MarketType) -> tuple[str, str]: class Pair(Struct, frozen=True, kw_only=True): + symbol: str status: str orderTypes: list[str] @@ -117,6 +118,10 @@ def size_tick(self) -> Decimal: def bs_fqme(self) -> str: return self.symbol + @property + def bs_mktid(self) -> str: + return f'{self.symbol}.{self.venue}' + class SpotPair(Pair, frozen=True): @@ -137,6 +142,13 @@ class SpotPair(Pair, frozen=True): allowedSelfTradePreventionModes: list[str] permissions: list[str] + # NOTE: see `.data._symcache.SymbologyCache.load()` for why + ns_path: str = 'piker.brokers.binance:SpotPair' + + @property + def venue(self) -> str: + return 'SPOT' + @property def bs_fqme(self) -> str: return f'{self.symbol}.SPOT' @@ -173,6 +185,9 @@ class FutesPair(Pair): underlyingSubType: list[str] # ['PoW'], underlyingType: str # 'COIN' + # NOTE: see `.data._symcache.SymbologyCache.load()` for why + ns_path: str = 'piker.brokers.binance:FutesPair' + # NOTE: for compat with spot pairs and `MktPair.src: Asset` # processing.. @property From 4c5507301e34e7849d479504f21a6b74287fc826 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 16 Jul 2023 18:20:15 -0400 Subject: [PATCH 049/116] kraken: be symcache compatible! This was more involved then expected but on the bright side, is going to help drive a more general `Account` update/processing/loading API providing for all the high-level txn update methods needed for any backend to generically update the participant's account *state* via an input ledger/txn set B) Key changes to enable `SymbologyCache` compat: - adjust `Client` pairs / assets lookup tables to include a duplicate keying of all assets and "asset pairs" using the (chitty) default key set that kraken ships which is NOT the `.altname` no `.wsname` keys; the "default ReST response keys" i guess? - `._AssetPairs` and `._Assets` are *these ^* rest-key sets delivered verbatim from the endpoint responses, - `._pairs` and `._assets` the equivalent value-sets keyed by piker style FQME-looking keys (now provided via the new `.kraken.symbols.Pair.bs_fqme: str` and the delivered `'altname'` field (for assets) respectively. - re-implement `.get_assets()` and `.get_mkt_pairs()` to appropriately delegate to internal methods and these new (multi-keyed) tables to deliver the cacheable set of symbology info. - adjust `.feed.get_mkt_info()` to handle parsing of both fqme-style and wtv(-the-shit-stupid) kraken key set a caller passes via a key-matches-first-table-style-scan after pre-processing the input `fqme: str`; also do the `Asset` lookups from the new `Pair.bs_dst/src_asset: str` fields which should always map correctly to an internal asset entry delivered by `Client.get_assets()`. Dirty impl deatz: - add new `.kraken.symbols` and move the newly refined `Pair` there. - add `.kraken.ledger` and move in the factored out ledger processing routines. - also move out what was the `has_pp()` and large chung of nested-ish looking acnt-position verification logic blocks into a new `verify_balances()` B) --- piker/brokers/kraken/__init__.py | 14 +- piker/brokers/kraken/api.py | 294 +++++++++++++++--------------- piker/brokers/kraken/broker.py | 296 ++++++++++--------------------- piker/brokers/kraken/feed.py | 36 ++-- piker/brokers/kraken/ledger.py | 252 ++++++++++++++++++++++++++ piker/brokers/kraken/symbols.py | 114 ++++++++++++ 6 files changed, 640 insertions(+), 366 deletions(-) create mode 100644 piker/brokers/kraken/ledger.py create mode 100644 piker/brokers/kraken/symbols.py diff --git a/piker/brokers/kraken/__init__.py b/piker/brokers/kraken/__init__.py index 8ec19bcfa..783406a48 100644 --- a/piker/brokers/kraken/__init__.py +++ b/piker/brokers/kraken/__init__.py @@ -25,17 +25,27 @@ wrapping around ``ib_insync``. ''' +from .symbols import Pair # for symcache +# required by `.brokers` from .api import ( get_client, ) from .feed import ( + # required by `.accounting`, `.data` get_mkt_info, - open_history_client, + + # required by `.data` open_symbol_search, stream_quotes, + open_history_client, ) from .broker import ( + # required by `.clearing` open_trade_dialog, +) +from .ledger import ( + # required by `.accounting` + norm_trade, norm_trade_records, ) @@ -43,11 +53,13 @@ __all__ = [ 'get_client', 'get_mkt_info', + 'Pair', 'open_trade_dialog', 'open_history_client', 'open_symbol_search', 'stream_quotes', 'norm_trade_records', + 'norm_trade', ] diff --git a/piker/brokers/kraken/api.py b/piker/brokers/kraken/api.py index a82714cf7..b9fb65408 100644 --- a/piker/brokers/kraken/api.py +++ b/piker/brokers/kraken/api.py @@ -15,12 +15,11 @@ # along with this program. If not, see . ''' -Kraken web API wrapping. +Core (web) API client ''' from contextlib import asynccontextmanager as acm from datetime import datetime -from decimal import Decimal import itertools from typing import ( Any, @@ -28,7 +27,6 @@ ) import time -from bidict import bidict import pendulum import asks from fuzzywuzzy import process as fuzzy @@ -40,11 +38,11 @@ import trio from piker import config -from piker.data.types import Struct from piker.data import def_iohlcv_fields from piker.accounting._mktinfo import ( Asset, digits_to_dec, + dec_digits, ) from piker.brokers._util import ( resproc, @@ -54,6 +52,7 @@ ) from piker.accounting import Transaction from piker.log import get_logger +from .symbols import Pair log = get_logger('piker.brokers.kraken') @@ -105,68 +104,22 @@ class InvalidKey(ValueError): ''' -# https://www.kraken.com/features/api#get-tradable-pairs -class Pair(Struct): - altname: str # alternate pair name - wsname: str # WebSocket pair name (if available) - aclass_base: str # asset class of base component - base: str # asset id of base component - aclass_quote: str # asset class of quote component - quote: str # asset id of quote component - lot: str # volume lot size - - cost_decimals: int - costmin: float - pair_decimals: int # scaling decimal places for pair - lot_decimals: int # scaling decimal places for volume - - # amount to multiply lot volume by to get currency volume - lot_multiplier: float - - # array of leverage amounts available when buying - leverage_buy: list[int] - # array of leverage amounts available when selling - leverage_sell: list[int] - - # fee schedule array in [volume, percent fee] tuples - fees: list[tuple[int, float]] - - # maker fee schedule array in [volume, percent fee] tuples (if on - # maker/taker) - fees_maker: list[tuple[int, float]] - - fee_volume_currency: str # volume discount currency - margin_call: str # margin call level - margin_stop: str # stop-out/liquidation margin level - ordermin: float # minimum order volume for pair - tick_size: float # min price step size - status: str - - short_position_limit: float = 0 - long_position_limit: float = float('inf') - - @property - def price_tick(self) -> Decimal: - return digits_to_dec(self.pair_decimals) - - @property - def size_tick(self) -> Decimal: - return digits_to_dec(self.lot_decimals) - - @property - def bs_fqme(self) -> str: - return f'{self.symbol}.SPOT' - - class Client: # symbol mapping from all names to the altname - _ntable: dict[str, str] = {} + _altnames: dict[str, str] = {} - # 2-way map of symbol names to their "alt names" ffs XD - _altnames: bidict[str, str] = bidict() + # key-ed by kraken's own bs_mktids (like fricking "XXMRZEUR") + # with said keys used directly from EP responses so that ledger + # parsing can be easily accomplished from both trade-event-msgs + # and offline toml files + _Assets: dict[str, Asset] = {} + _AssetPairs: dict[str, Pair] = {} + # key-ed by `Pair.bs_fqme: str`, and thus used for search + # allowing for lookup using piker's own FQME symbology sys. _pairs: dict[str, Pair] = {} + _assets: dict[str, Asset] = {} def __init__( self, @@ -186,15 +139,14 @@ def __init__( self._secret = secret self.conf: dict[str, str] = config - self.assets: dict[str, Asset] = {} @property def pairs(self) -> dict[str, Pair]: + if self._pairs is None: raise RuntimeError( - "Make sure to run `cache_symbols()` on startup!" + "Client didn't run `.get_mkt_pairs()` on startup?!" ) - # retreive and cache all symbols return self._pairs @@ -254,17 +206,29 @@ async def get_balances( 'Balance', {}, ) - by_bsmktid = resp['result'] - - # TODO: we need to pull out the "asset" decimals - # data and return a `decimal.Decimal` instead here! - # using the underlying Asset - return { - self._altnames[sym].lower(): float(bal) - for sym, bal in by_bsmktid.items() - } + by_bsmktid: dict[str, dict] = resp['result'] + + balances: dict = {} + for respname, bal in by_bsmktid.items(): + asset: Asset = self._Assets[respname] + + # TODO: which KEY should we use? it's used to index + # the `Account.pps: dict` .. + key: str = asset.name.lower() + # TODO: should we just return a `Decimal` here + # or is the rounded version ok? + balances[key] = round( + float(bal), + ndigits=dec_digits(asset.tx_tick) + ) + + return balances - async def get_assets(self) -> dict[str, Asset]: + async def get_assets( + self, + reload: bool = False, + + ) -> dict[str, Asset]: ''' Load and cache all asset infos and pack into our native ``Asset`` struct. @@ -282,21 +246,37 @@ async def get_assets(self) -> dict[str, Asset]: } ''' - resp = await self._public('Assets', {}) - assets = resp['result'] - - for bs_mktid, info in assets.items(): - altname = self._altnames[bs_mktid] = info['altname'] - aclass: str = info['aclass'] - - self.assets[bs_mktid] = Asset( - name=altname.lower(), - atype=f'crypto_{aclass}', - tx_tick=digits_to_dec(info['decimals']), - info=info, - ) - - return self.assets + if ( + not self._assets + or reload + ): + resp = await self._public('Assets', {}) + assets: dict[str, dict] = resp['result'] + + for bs_mktid, info in assets.items(): + + altname: str = info['altname'] + aclass: str = info['aclass'] + asset = Asset( + name=altname, + atype=f'crypto_{aclass}', + tx_tick=digits_to_dec(info['decimals']), + info=info, + ) + # NOTE: yes we keep 2 sets since kraken insists on + # keeping 3 frickin sets bc apparently they have + # no sane data engineers whol all like different + # keys for their fricking symbology sets.. + self._Assets[bs_mktid] = asset + self._assets[altname.lower()] = asset + self._assets[altname] = asset + + # we return the "most native" set merged with our preferred + # naming (which i guess is the "altname" one) since that's + # what the symcache loader will be storing, and we need the + # keys that are easiest to match against in any trade + # records. + return self._Assets | self._assets async def get_trades( self, @@ -377,23 +357,26 @@ async def get_xfers( # 'amount': '0.00300726', 'fee': '0.00001000', 'time': # 1658347714, 'status': 'Success'}]} + if xfers: + import tractor + await tractor.pp() + trans: dict[str, Transaction] = {} for entry in xfers: - # look up the normalized name and asset info - asset_key = entry['asset'] - asset = self.assets[asset_key] - asset_key = self._altnames[asset_key].lower() + asset_key: str = entry['asset'] + asset: Asset = self._Assets[asset_key] + asset_key: str = asset.name.lower() + # asset_key: str = self._altnames[asset_key].lower() # XXX: this is in the asset units (likely) so it isn't # quite the same as a commisions cost necessarily..) + # TODO: also round this based on `Pair` cost precision info? cost = float(entry['fee']) - - fqme = asset_key + '.kraken' + # fqme: str = asset_key + '.kraken' tx = Transaction( - fqme=fqme, - sym=asset, + fqme=asset_key, # this must map to an entry in .assets! tid=entry['txid'], dt=pendulum.from_timestamp(entry['time']), bs_mktid=f'{asset_key}{src_asset}', @@ -408,6 +391,11 @@ async def get_xfers( # XXX: see note above cost=cost, + + # not a trade but a withdrawal or deposit on the + # asset (chain) system. + etype='transfer', + ) trans[tx.tid] = tx @@ -458,7 +446,7 @@ async def submit_cancel( # txid is a transaction id given by kraken return await self.endpoint('CancelOrder', {"txid": reqid}) - async def pair_info( + async def asset_pairs( self, pair_patt: str | None = None, @@ -470,64 +458,69 @@ async def pair_info( https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs ''' - # get all pairs by default, or filter - # to whatever pattern is provided as input. - pairs: dict[str, str] | None = None - if pair_patt is not None: - pairs = {'pair': pair_patt} + if not self._AssetPairs: + # get all pairs by default, or filter + # to whatever pattern is provided as input. + req_pairs: dict[str, str] | None = None + if pair_patt is not None: + req_pairs = {'pair': pair_patt} + + resp = await self._public( + 'AssetPairs', + req_pairs, + ) + err = resp['error'] + if err: + raise SymbolNotFound(pair_patt) - resp = await self._public( - 'AssetPairs', - pairs, - ) - err = resp['error'] - if err: - raise SymbolNotFound(pair_patt) + # NOTE: we key pairs by our custom defined `.bs_fqme` + # field since we want to offer search over this key + # set, callers should fill out lookup tables for + # kraken's bs_mktid keys to map to these keys! + for key, data in resp['result'].items(): + pair = Pair(respname=key, **data) - pairs: dict[str, Pair] = { + # always cache so we can possibly do faster lookup + self._AssetPairs[key] = pair - key: Pair(**data) - for key, data in resp['result'].items() - } - # always cache so we can possibly do faster lookup - self._pairs.update(pairs) + bs_fqme: str = pair.bs_fqme - if pair_patt is not None: - return next(iter(pairs.items()))[1] + self._pairs[bs_fqme] = pair - return pairs + # register the piker pair under all monikers, a giant flat + # surjection of all possible (and stupid) kraken names to + # the FMQE style piker key. + self._altnames[pair.altname] = bs_fqme + self._altnames[pair.wsname] = bs_fqme - async def cache_symbols(self) -> dict: - ''' - Load all market pair info build and cache it for downstream use. + if pair_patt is not None: + return next(iter(self._pairs.items()))[1] - A ``._ntable: dict[str, str]`` is available for mapping the - websocket pair name-keys and their http endpoint API (smh) - equivalents to the "alternative name" which is generally the one - we actually want to use XD + return self._AssetPairs + async def get_mkt_pairs( + self, + reload: bool = False, + ) -> dict: ''' - if not self._pairs: - pairs = await self.pair_info() - assert self._pairs == pairs - - # table of all ws and rest keys to their alt-name values. - ntable: dict[str, str] = {} + Load all market pair info build and cache it for downstream + use. - for rest_key in list(pairs.keys()): + An ``._altnames: dict[str, str]`` is available for looking + up the piker-native FQME style `Pair.bs_fqme: str` for any + input of the three (yes, it's that idiotic) available + key-sets that kraken frickin offers depending on the API + including the .altname, .wsname and the weird ass default + set they return in rest responses.. - pair: Pair = pairs[rest_key] - altname = pair.altname - wsname = pair.wsname - ntable[altname] = ntable[rest_key] = ntable[wsname] = altname - - # register the pair under all monikers, a giant flat - # surjection of all possible names to each info obj. - self._pairs[altname] = self._pairs[wsname] = pair + ''' + if ( + not self._pairs + or reload + ): + await self.asset_pairs() - self._ntable.update(ntable) - - return self._pairs + return self._AssetPairs async def search_symbols( self, @@ -543,8 +536,8 @@ async def search_symbols( ''' if not len(self._pairs): - await self.cache_symbols() - assert self._pairs, '`Client.cache_symbols()` was never called!?' + await self.get_mkt_pairs() + assert self._pairs, '`Client.get_mkt_pairs()` was never called!?' matches = fuzzy.extractBests( pattern, @@ -632,9 +625,9 @@ async def bars( raise BrokerError(errmsg) @classmethod - def normalize_symbol( + def to_bs_fqme( cls, - ticker: str + pair_str: str ) -> tuple[str, Pair]: ''' Normalize symbol names to to a 3x3 pair from the global @@ -643,7 +636,7 @@ def normalize_symbol( ''' try: - return cls._ntable[ticker] + return cls._altnames[pair_str.upper()] except KeyError as ke: raise SymbolNotFound(f'kraken has no {ke.args[0]}') @@ -655,6 +648,9 @@ async def get_client() -> Client: if conf: client = Client( conf, + + # TODO: don't break these up and just do internal + # conf lookups instead.. name=conf['key_descr'], api_key=conf['api_key'], secret=conf['secret'] @@ -666,6 +662,6 @@ async def get_client() -> Client: # batch requests. async with trio.open_nursery() as nurse: nurse.start_soon(client.get_assets) - await client.cache_symbols() + await client.get_mkt_pairs() yield client diff --git a/piker/brokers/kraken/broker.py b/piker/brokers/kraken/broker.py index 7cb596725..74bd75622 100644 --- a/piker/brokers/kraken/broker.py +++ b/piker/brokers/kraken/broker.py @@ -24,7 +24,6 @@ ) from functools import partial from itertools import count -import math from pprint import pformat import time from typing import ( @@ -35,21 +34,16 @@ ) from bidict import bidict -import pendulum import trio import tractor from piker.accounting import ( Position, - PpTable, + Account, Transaction, TransactionLedger, open_trade_ledger, - open_pps, - get_likely_pair, -) -from piker.accounting._mktinfo import ( - MktPair, + open_account, ) from piker.clearing import( OrderDialogs, @@ -65,18 +59,24 @@ BrokerdPosition, BrokerdStatus, ) +from piker.brokers import ( + open_cached_client, +) +from piker.data import open_symcache from .api import ( log, Client, BrokerError, - get_client, ) from .feed import ( - get_mkt_info, open_autorecon_ws, NoBsWs, stream_messages, ) +from .ledger import ( + norm_trade_records, + verify_balances, +) MsgUnion = Union[ BrokerdCancel, @@ -371,7 +371,8 @@ async def subscribe( def trades2pps( - table: PpTable, + acnt: Account, + ledger: TransactionLedger, acctid: str, new_trans: dict[str, Transaction] = {}, @@ -379,13 +380,14 @@ def trades2pps( ) -> list[BrokerdPosition]: if new_trans: - updated = table.update_from_trans( + updated = acnt.update_from_ledger( new_trans, + symcache=ledger.symcache, ) log.info(f'Updated pps:\n{pformat(updated)}') - pp_entries, closed_pp_objs = table.dump_active() - pp_objs: dict[Union[str, int], Position] = table.pps + pp_entries, closed_pp_objs = acnt.dump_active() + pp_objs: dict[Union[str, int], Position] = acnt.pps pps: dict[int, Position] position_msgs: list[dict] = [] @@ -399,7 +401,7 @@ def trades2pps( # backend suffix prefixed but when # reading accounts from ledgers we # don't need it and/or it's prefixed - # in the section table.. we should + # in the section acnt.. we should # just strip this from the message # right since `.broker` is already # included? @@ -416,7 +418,7 @@ def trades2pps( # as little as possible. we need to either do # these writes in another actor, or try out `trio`'s # async file IO api? - table.write_config() + acnt.write_config() return position_msgs @@ -427,7 +429,12 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: - async with get_client() as client: + async with ( + # TODO: maybe bind these together and deliver + # a tuple from `.open_cached_client()`? + open_cached_client('kraken') as client, + open_symcache('kraken') as symcache, + ): # make ems flip to paper mode when no creds setup in # `brokers.toml` B0 if not client._api_key: @@ -457,8 +464,8 @@ async def open_trade_dialog( # - delete the *ABSOLUTE LAST* entry from account's corresponding # trade ledgers file (NOTE this MUST be the last record # delivered from the api ledger), - # - open you ``pps.toml`` and find that same tid and delete it - # from the pp's clears table, + # - open you ``account.kraken.spot.toml`` and find that + # same tid and delete it from the pos's clears table, # - set this flag to `True` # # You should see an update come in after the order mode @@ -469,172 +476,83 @@ async def open_trade_dialog( # update things correctly. simulate_pp_update: bool = False - table: PpTable + acnt: Account ledger: TransactionLedger with ( - open_pps( + open_account( 'kraken', acctid, write_on_exit=True, - ) as table, + ) as acnt, open_trade_ledger( 'kraken', acctid, + symcache=symcache, ) as ledger, ): - # transaction-ify the ledger entries - ledger_trans = await norm_trade_records(ledger) + # TODO: loading ledger entries should all be done + # within a newly implemented `async with open_account() + # as acnt` where `Account.ledger: TransactionLedger` + # can be used to explicitily update and write the + # offline TOML files! + # ------ - ------ + # MOL the init sequence is: + # - get `Account` (with presumed pre-loaded ledger done + # beind the scenes as part of ctx enter). + # - pull new trades from API, update the ledger with + # normalized to `Transaction` entries of those + # records, presumably (and implicitly) update the + # acnt state including expiries, positions, + # transfers..), and finally of course existing + # per-asset balances. + # - validate all pos and balances ensuring there's + # no seemingly noticeable discrepancies? + + # LOAD and transaction-ify the EXISTING LEDGER + ledger_trans: dict[str, Transaction] = await norm_trade_records( + ledger, + client, + ) - if not table.pps: - # NOTE: we can't use this since it first needs - # broker: str input support! - # table.update_from_trans(ledger.to_trans()) - table.update_from_trans(ledger_trans) - table.write_config() + if not acnt.pps: + acnt.update_from_ledger( + ledger_trans, + symcache=ledger.symcache, + ) + acnt.write_config() # TODO: eventually probably only load # as far back as it seems is not deliverd in the # most recent 50 trades and assume that by ordering we - # already have those records in the ledger. - tids2trades = await client.get_trades() + # already have those records in the ledger? + tids2trades: dict[str, dict] = await client.get_trades() ledger.update(tids2trades) if tids2trades: ledger.write_config() - api_trans = await norm_trade_records(tids2trades) + api_trans: dict[str, Transaction] = await norm_trade_records( + tids2trades, + client, + ) # retrieve kraken reported balances # and do diff with ledger to determine # what amount of trades-transactions need # to be reloaded. - balances = await client.get_balances() - - for dst, size in balances.items(): - - # we don't care about tracking positions - # in the user's source fiat currency. - if ( - dst == src_fiat - or not any( - dst in bs_mktid for bs_mktid in table.pps - ) - ): - log.warning( - f'Skipping balance `{dst}`:{size} for position calcs!' - ) - continue - - def has_pp( - dst: str, - size: float, - - ) -> Position | None: - - src2dst: dict[str, str] = {} - - for bs_mktid in table.pps: - likely_pair = get_likely_pair( - src_fiat, - dst, - bs_mktid, - ) - if likely_pair: - src2dst[src_fiat] = dst - - for src, dst in src2dst.items(): - pair = f'{dst}{src_fiat}' - pp = table.pps.get(pair) - if ( - pp - and math.isclose(pp.size, size) - ): - return pp - - elif ( - size == 0 - and pp.size - ): - log.warning( - f'`kraken` account says you have a ZERO ' - f'balance for {bs_mktid}:{pair}\n' - f'but piker seems to think `{pp.size}`\n' - 'This is likely a discrepancy in piker ' - 'accounting if the above number is' - "large,' though it's likely to due lack" - "f tracking xfers fees.." - ) - return pp - - return None # signal no entry - - pos = has_pp(dst, size) - if not pos: - - # we have a balance for which there is no pp - # entry? so we have to likely update from the - # ledger. - updated = table.update_from_trans(ledger_trans) - log.info(f'Updated pps from ledger:\n{pformat(updated)}') - pos = has_pp(dst, size) - - if ( - not pos - and not simulate_pp_update - ): - # try reloading from API - table.update_from_trans(api_trans) - pos = has_pp(dst, size) - if not pos: - - # get transfers to make sense of abs balances. - # NOTE: we do this after ledger and API - # loading since we might not have an entry - # in the ``pps.toml`` for the necessary pair - # yet and thus this likely pair grabber will - # likely fail. - for bs_mktid in table.pps: - likely_pair = get_likely_pair( - src_fiat, - dst, - bs_mktid, - ) - if likely_pair: - break - else: - raise ValueError( - 'Could not find a position pair in ' - 'ledger for likely widthdrawal ' - f'candidate: {dst}' - ) - - if likely_pair: - # this was likely pp that had a withdrawal - # from the dst asset out of the account. - - xfer_trans = await client.get_xfers( - dst, - # TODO: not all src assets are - # 3 chars long... - src_asset=likely_pair[3:], - ) - if xfer_trans: - updated = table.update_from_trans( - xfer_trans, - cost_scalar=1, - ) - log.info( - f'Updated {dst} from transfers:\n' - f'{pformat(updated)}' - ) - - if has_pp(dst, size): - raise ValueError( - 'Could not reproduce balance:\n' - f'dst: {dst}, {size}\n' - ) + balances: dict[str, float] = await client.get_balances() + + verify_balances( + acnt, + src_fiat, + balances, + client, + ledger, + ledger_trans, + api_trans, + ) - # only for simulate-testing a "new fill" since + # XXX NOTE: only for simulate-testing a "new fill" since # otherwise we have to actually conduct a live clear. if simulate_pp_update: tid = list(tids2trades)[0] @@ -643,25 +561,27 @@ def has_pp( reqids2txids[0] = last_trade_dict['ordertxid'] ppmsgs: list[BrokerdPosition] = trades2pps( - table, + acnt, + ledger, acctid, ) + # sync with EMS delivering pps and accounts await ctx.started((ppmsgs, [acc_name])) # TODO: ideally this blocks the this task # as little as possible. we need to either do # these writes in another actor, or try out `trio`'s # async file IO api? - table.write_config() + acnt.write_config() # Get websocket token for authenticated data stream # Assert that a token was actually received. resp = await client.endpoint('GetWebSocketsToken', {}) - err = resp.get('error') - if err: + if err := resp.get('error'): raise BrokerError(err) - token = resp['result']['token'] + # resp token for ws init + token: str = resp['result']['token'] ws: NoBsWs async with ( @@ -690,13 +610,14 @@ def has_pp( # enter relay loop await handle_order_updates( + client, ws, stream, ems_stream, apiflows, ids, reqids2txids, - table, + acnt, api_trans, acctid, acc_name, @@ -705,13 +626,14 @@ def has_pp( async def handle_order_updates( + client: Client, # only for pairs table needed in ledger proc ws: NoBsWs, ws_stream: AsyncIterator, ems_stream: tractor.MsgStream, apiflows: OrderDialogs, ids: bidict[str, int], reqids2txids: bidict[int, str], - table: PpTable, + acnt: Account, # transaction records which will be updated # on new trade clearing events (aka order "fills") @@ -733,7 +655,7 @@ async def handle_order_updates( # TODO: turns out you get the fill events from the # `openOrders` before you get this, so it might be better - # to do all fill/status/pp updates in that sub and just use + # to do all fill/status/pos updates in that sub and just use # this one for ledger syncs? # For eg. we could take the "last 50 trades" and do a diff @@ -818,9 +740,12 @@ async def handle_order_updates( ) await ems_stream.send(status_msg) - new_trans = await norm_trade_records(trades) + new_trans = await norm_trade_records( + trades, + client, + ) ppmsgs = trades2pps( - table, + acnt, acctid, new_trans, ) @@ -1183,36 +1108,3 @@ async def handle_order_updates( }) case _: log.warning(f'Unhandled trades update msg: {msg}') - - -async def norm_trade_records( - ledger: dict[str, Any], - -) -> dict[str, Transaction]: - - records: dict[str, Transaction] = {} - - for tid, record in ledger.items(): - - size = float(record.get('vol')) * { - 'buy': 1, - 'sell': -1, - }[record['type']] - - # we normalize to kraken's `altname` always.. - bs_mktid: str = Client.normalize_symbol(record['pair']) - fqme = f'{bs_mktid.lower()}.kraken' - mkt: MktPair = (await get_mkt_info(fqme))[0] - - records[tid] = Transaction( - fqme=fqme, - sym=mkt, - tid=tid, - size=size, - price=float(record['price']), - cost=float(record['fee']), - dt=pendulum.from_timestamp(float(record['time'])), - bs_mktid=bs_mktid, - ) - - return records diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index d0b14f33e..1d10ad8cc 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -282,11 +282,13 @@ async def get_mkt_info( ''' venue: str = 'spot' expiry: str = '' - if '.kraken' in fqme: - broker, pair, venue, expiry = unpack_fqme(fqme) - venue: str = venue or 'spot' + if '.kraken' not in fqme: + fqme += '.kraken' - if venue != 'spot': + broker, pair, venue, expiry = unpack_fqme(fqme) + venue: str = venue or 'spot' + + if venue.lower() != 'spot': raise SymbolNotFound( 'kraken only supports spot markets right now!\n' f'{fqme}\n' @@ -295,14 +297,20 @@ async def get_mkt_info( async with open_cached_client('kraken') as client: # uppercase since kraken bs_mktid is always upper - bs_fqme, _, broker = fqme.partition('.') - pair_str: str = bs_fqme.upper() - bs_mktid: str = Client.normalize_symbol(pair_str) - pair: Pair = await client.pair_info(pair_str) + # bs_fqme, _, broker = fqme.partition('.') + # pair_str: str = bs_fqme.upper() + pair_str: str = f'{pair}.{venue}' + + pair: Pair | None = client._pairs.get(pair_str.upper()) + if not pair: + bs_fqme: str = Client.to_bs_fqme(pair_str) + pair: Pair = client._pairs[bs_fqme] + + if not (assets := client._assets): + assets: dict[str, Asset] = await client.get_assets() - assets = client.assets - dst_asset: Asset = assets[pair.base] - src_asset: Asset = assets[pair.quote] + dst_asset: Asset = assets[pair.bs_dst_asset] + src_asset: Asset = assets[pair.bs_src_asset] mkt = MktPair( dst=dst_asset, @@ -310,7 +318,7 @@ async def get_mkt_info( price_tick=pair.price_tick, size_tick=pair.size_tick, - bs_mktid=bs_mktid, + bs_mktid=pair.bs_mktid, expiry=expiry, venue=venue or 'spot', @@ -488,7 +496,7 @@ async def open_symbol_search( async with open_cached_client('kraken') as client: # load all symbols locally for fast search - cache = await client.cache_symbols() + cache = await client.get_mkt_pairs() await ctx.started(cache) async with ctx.open_stream() as stream: @@ -497,7 +505,7 @@ async def open_symbol_search( matches = fuzzy.extractBests( pattern, - cache, + client._pairs, score_cutoff=50, ) # repack in dict form diff --git a/piker/brokers/kraken/ledger.py b/piker/brokers/kraken/ledger.py new file mode 100644 index 000000000..2dac90d94 --- /dev/null +++ b/piker/brokers/kraken/ledger.py @@ -0,0 +1,252 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Trade transaction accounting and normalization. + +''' +import math +from pprint import pformat +from typing import ( + Any, +) + +import pendulum + +from piker.accounting import ( + Transaction, + Position, + Account, + get_likely_pair, + TransactionLedger, + # MktPair, +) +from piker.data import ( + # SymbologyCache, + Struct, +) +from .api import ( + log, + Client, + Pair, +) +# from .feed import get_mkt_info + + +def norm_trade( + tid: str, + record: dict[str, Any], + + # this is the dict that was returned from + # `Client.get_mkt_pairs()` and when running offline ledger + # processing from `.accounting`, this will be the table loaded + # into `SymbologyCache.pairs`. + pairs: dict[str, Struct], + +) -> Transaction: + + size: float = float(record.get('vol')) * { + 'buy': 1, + 'sell': -1, + }[record['type']] + + rest_pair_key: str = record['pair'] + pair: Pair = pairs[rest_pair_key] + + fqme: str = pair.bs_fqme.lower() + '.kraken' + + return Transaction( + fqme=fqme, + tid=tid, + size=size, + price=float(record['price']), + cost=float(record['fee']), + dt=pendulum.from_timestamp(float(record['time'])), + bs_mktid=pair.bs_mktid, + ) + + +async def norm_trade_records( + ledger: dict[str, Any], + client: Client, + +) -> dict[str, Transaction]: + ''' + Loop through an input ``dict`` of trade records + and convert them to ``Transactions``. + + ''' + records: dict[str, Transaction] = {} + for tid, record in ledger.items(): + + # manual_fqme: str = f'{bs_mktid.lower()}.kraken' + # mkt: MktPair = (await get_mkt_info(manual_fqme))[0] + # fqme: str = mkt.fqme + # assert fqme == manual_fqme + + records[tid] = norm_trade( + tid, + record, + pairs=client._AssetPairs, + ) + + return records + + +def has_pp( + acnt: Account, + src_fiat: str, + dst: str, + size: float, + +) -> Position | None: + + src2dst: dict[str, str] = {} + for bs_mktid in acnt.pps: + likely_pair = get_likely_pair( + src_fiat, + dst, + bs_mktid, + ) + if likely_pair: + src2dst[src_fiat] = dst + + for src, dst in src2dst.items(): + pair: str = f'{dst}{src_fiat}' + pos: Position = acnt.pps.get(pair) + if ( + pos + and math.isclose(pos.size, size) + ): + return pos + + elif ( + size == 0 + and pos.size + ): + log.warning( + f'`kraken` account says you have a ZERO ' + f'balance for {bs_mktid}:{pair}\n' + f'but piker seems to think `{pos.size}`\n' + 'This is likely a discrepancy in piker ' + 'accounting if the above number is' + "large,' though it's likely to due lack" + "f tracking xfers fees.." + ) + return pos + + return None # indicate no entry found + + +# TODO: factor most of this "account updating from txns" into the +# the `Account` impl so has to provide for hiding the mostly +# cross-provider updates from txn sets +async def verify_balances( + acnt: Account, + src_fiat: str, + balances: dict[str, float], + client: Client, + ledger: TransactionLedger, + ledger_trans: dict[str, Transaction], # from toml + api_trans: dict[str, Transaction], # from API + + simulate_pp_update: bool = False, + +) -> None: + for dst, size in balances.items(): + + # we don't care about tracking positions + # in the user's source fiat currency. + if ( + dst == src_fiat + or not any( + dst in bs_mktid for bs_mktid in acnt.pps + ) + ): + log.warning( + f'Skipping balance `{dst}`:{size} for position calcs!' + ) + continue + + # we have a balance for which there is no pos entry + # - we have to likely update from the ledger? + if not has_pp(acnt, src_fiat, dst, size): + updated = acnt.update_from_ledger( + ledger_trans, + symcache=ledger.symcache, + ) + log.info(f'Updated pps from ledger:\n{pformat(updated)}') + + # FIRST try reloading from API records + if ( + not has_pp(acnt, src_fiat, dst, size) + and not simulate_pp_update + ): + acnt.update_from_ledger( + api_trans, + symcache=ledger.symcache, + ) + + # get transfers to make sense of abs + # balances. + # NOTE: we do this after ledger and API + # loading since we might not have an + # entry in the + # ``account.kraken.spot.toml`` for the + # necessary pair yet and thus this + # likely pair grabber will likely fail. + if not has_pp(acnt, src_fiat, dst, size): + for bs_mktid in acnt.pps: + likely_pair: str | None = get_likely_pair( + src_fiat, + dst, + bs_mktid, + ) + if likely_pair: + break + else: + raise ValueError( + 'Could not find a position pair in ' + 'ledger for likely widthdrawal ' + f'candidate: {dst}' + ) + + # this was likely pos that had a withdrawal + # from the dst asset out of the account. + if likely_pair: + xfer_trans = await client.get_xfers( + dst, + + # TODO: not all src assets are + # 3 chars long... + src_asset=likely_pair[3:], + ) + if xfer_trans: + updated = acnt.update_from_ledger( + xfer_trans, + cost_scalar=1, + symcache=ledger.symcache, + ) + log.info( + f'Updated {dst} from transfers:\n' + f'{pformat(updated)}' + ) + + if has_pp(acnt, src_fiat, dst, size): + raise ValueError( + 'Could not reproduce balance:\n' + f'dst: {dst}, {size}\n' + ) diff --git a/piker/brokers/kraken/symbols.py b/piker/brokers/kraken/symbols.py new file mode 100644 index 000000000..43efcac27 --- /dev/null +++ b/piker/brokers/kraken/symbols.py @@ -0,0 +1,114 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Symbology defs and deats! + +''' +from decimal import Decimal + +from piker.accounting._mktinfo import ( + digits_to_dec, +) +from piker.data.types import Struct + + +# https://www.kraken.com/features/api#get-tradable-pairs +class Pair(Struct): + respname: str # idiotic bs_mktid equiv i guess? + altname: str # alternate pair name + wsname: str # WebSocket pair name (if available) + aclass_base: str # asset class of base component + base: str # asset id of base component + aclass_quote: str # asset class of quote component + quote: str # asset id of quote component + lot: str # volume lot size + + cost_decimals: int + costmin: float + pair_decimals: int # scaling decimal places for pair + lot_decimals: int # scaling decimal places for volume + + # amount to multiply lot volume by to get currency volume + lot_multiplier: float + + # array of leverage amounts available when buying + leverage_buy: list[int] + # array of leverage amounts available when selling + leverage_sell: list[int] + + # fee schedule array in [volume, percent fee] tuples + fees: list[tuple[int, float]] + + # maker fee schedule array in [volume, percent fee] tuples (if on + # maker/taker) + fees_maker: list[tuple[int, float]] + + fee_volume_currency: str # volume discount currency + margin_call: str # margin call level + margin_stop: str # stop-out/liquidation margin level + ordermin: float # minimum order volume for pair + tick_size: float # min price step size + status: str + + short_position_limit: float = 0 + long_position_limit: float = float('inf') + + # TODO: should we make this a literal NamespacePath ref? + ns_path: str = 'piker.brokers.kraken:Pair' + + @property + def bs_mktid(self) -> str: + ''' + Kraken seems to index it's market symbol sets in + transaction ledgers using the key returned from rest + queries.. so use that since apparently they can't + make up their minds on a better key set XD + + ''' + return self.respname + + @property + def price_tick(self) -> Decimal: + return digits_to_dec(self.pair_decimals) + + @property + def size_tick(self) -> Decimal: + return digits_to_dec(self.lot_decimals) + + @property + def bs_dst_asset(self) -> str: + dst, _ = self.wsname.split('/') + return dst + + @property + def bs_src_asset(self) -> str: + _, src = self.wsname.split('/') + return src + + @property + def bs_fqme(self) -> str: + ''' + Basically the `.altname` but with special '.' handling and + `.SPOT` suffix appending (for future multi-venue support). + + ''' + dst, src = self.wsname.split('/') + # XXX: omg for stupid shite like ETH2.S/ETH.. + dst = dst.replace('.', '-') + return f'{dst}{src}.SPOT' + + From 430309b5dc506a6c4325c063e3973f07b3e8e3a0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 16 Jul 2023 21:22:15 -0400 Subject: [PATCH 050/116] .accounting: type `Transaction.etype` as a `Literal` Start working out the set of possible "txn types" we want to define in a simple set. Relates to #510 --- piker/accounting/__init__.py | 6 +++--- piker/accounting/_ledger.py | 20 ++++++++++++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index 167dbcc23..d6e1c3b61 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -88,7 +88,7 @@ def get_likely_pair( ''' try: - src_name_start = bs_mktid.rindex(src) + src_name_start: str = bs_mktid.rindex(src) except ( ValueError, # substr not found ): @@ -99,8 +99,8 @@ def get_likely_pair( # log.warning( # f'No src fiat {src} found in {bs_mktid}?' # ) - return + return None - likely_dst = bs_mktid[:src_name_start] + likely_dst: str = bs_mktid[:src_name_start] if likely_dst == dst: return bs_mktid diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index b6f0ee744..f4cb3f38c 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -28,6 +28,7 @@ Any, Callable, Generator, + Literal, TYPE_CHECKING, ) @@ -51,6 +52,18 @@ log = get_logger(__name__) +TxnType = Literal[ + 'clear', + 'transfer', + + # TODO: see https://github.com/pikers/piker/issues/510 + # 'split', + # 'rename', + # 'resize', + # 'removal', +] + + class Transaction(Struct, frozen=True): # NOTE: this is a unified acronym also used in our `MktPair` @@ -70,10 +83,9 @@ class Transaction(Struct, frozen=True): cost: float # commisions or other additional costs dt: DateTime - # the "event type" in terms of "market events" see - # https://github.com/pikers/piker/issues/510 for where we're - # probably going with this. - etype: str = 'clear' + # the "event type" in terms of "market events" see above and + # https://github.com/pikers/piker/issues/510 + etype: TxnType = 'clear' # TODO: we can drop this right since we # can instead expect the backend to provide this From e8025d09857832f978f3a1acdaeeac9f9ce73867 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sun, 16 Jul 2023 21:32:36 -0400 Subject: [PATCH 051/116] .data.types.Struct: by default include non-members from `.to_dict()`.. --- piker/data/types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/piker/data/types.py b/piker/data/types.py index afa8f7c8d..5780a46a4 100644 --- a/piker/data/types.py +++ b/piker/data/types.py @@ -74,7 +74,7 @@ class Struct( ''' def to_dict( self, - include_non_members: bool = False, + include_non_members: bool = True, ) -> dict: ''' Like it sounds.. direct delegation to: From 2dab0e2e56dc7cb4ff9d496140c390bed8ccede9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 01:20:52 -0400 Subject: [PATCH 052/116] Expose `.data._symcache` stuff at subpkg toplevel The list is `open_symcache()`, `get_symcache()`, `SymbologyCache`, and `Stuct` which seems more or less fine to make part of the public namespace. Also, make `._timeseries.t_unit` an instance of literal to make `ruff` happy? --- piker/data/__init__.py | 12 +++++++++++- piker/data/_timeseries.py | 2 +- piker/data/flows.py | 3 --- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/piker/data/__init__.py b/piker/data/__init__.py index 6c621248a..95c47e197 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -39,9 +39,15 @@ open_feed, ) from .flows import Flume +from ._symcache import ( + SymbologyCache, + open_symcache, + get_symcache, +) +from .types import Struct -__all__ = [ +__all__: list[str] = [ 'Flume', 'Feed', 'open_feed', @@ -53,4 +59,8 @@ 'get_shm_token', 'def_iohlcv_fields', 'def_ohlcv_fields', + 'open_symcache', + 'get_symcache', + 'SymbologyCache', + 'Struct', ] diff --git a/piker/data/_timeseries.py b/piker/data/_timeseries.py index f43e0c73a..c812e08af 100644 --- a/piker/data/_timeseries.py +++ b/piker/data/_timeseries.py @@ -225,7 +225,7 @@ def detect_null_time_gap( return None -t_unit: Literal[ +t_unit: Literal = Literal[ 'days', 'hours', 'minutes', diff --git a/piker/data/flows.py b/piker/data/flows.py index 7776a602e..652e1e717 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -31,7 +31,6 @@ import numpy as np from ..accounting import MktPair -from ._util import log from .types import Struct from ._sharedmem import ( attach_shm_array, @@ -233,5 +232,3 @@ def has_vlm(self) -> bool: np.all(np.isin(vlm, -1)) or np.all(np.isnan(vlm)) ) - - From 8fb667686fcd0dd1240077ed034bb7087a2b2ce2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 01:24:45 -0400 Subject: [PATCH 053/116] Open symcaches as part of per-backend search spawning --- piker/ui/_app.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/piker/ui/_app.py b/piker/ui/_app.py index 644e7567d..585ccb18b 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -28,6 +28,7 @@ from . import _event from ._exec import run_qtractor from ..data.feed import install_brokerd_search +from ..data._symcache import open_symcache from ..accounting import unpack_fqme from . import _search from ._chart import GodWidget @@ -56,7 +57,10 @@ async def load_provider_search( portal, brokermod, ), + open_symcache(brokermod) as symcache, ): + assert symcache + # keep search engine stream up until cancelled await trio.sleep_forever() @@ -99,6 +103,8 @@ async def _async_main( sbar = godwidget.window.status_bar starting_done = sbar.open_status('starting ze sexy chartz') + # NOTE: by default we load all "builtin" backends for search + # and that includes loading their symcaches if possible B) needed_brokermods: dict[str, ModuleType] = {} for fqme in syms: brokername, *_ = unpack_fqme(fqme) From 71d0097dc7cc4abb9447d523e43b83f9b47aae73 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 13:50:13 -0400 Subject: [PATCH 054/116] Switch to `Position.cumsize` in tracker and order mode mods --- piker/ui/_position.py | 58 ++++++++++++++++++++++-------------------- piker/ui/order_mode.py | 9 +++---- 2 files changed, 33 insertions(+), 34 deletions(-) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 90976a1e8..0f38978fe 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -85,23 +85,25 @@ async def update_pnl_from_feed( ) -> None: ''' - Real-time display the current pp's PnL in the appropriate label. + Real-time display the current pp's PnL in the appropriate + label. - ``ValueError`` if this task is spawned where there is a net-zero pp. + ``ValueError`` if this task is spawned where there is + a net-zero pp. ''' global _pnl_tasks - pp: PositionTracker = order_mode.current_pp - live: Position = pp.live_pp + pt: PositionTracker = order_mode.current_pp + live: Position = pt.live_pp key: str = live.mkt.fqme - log.info(f'Starting pnl display for {pp.alloc.account}') + log.info(f'Starting pnl display for {pt.alloc.account}') - if live.size < 0: + if live.cumsize < 0: types = ('ask', 'last', 'last', 'dark_trade') - elif live.size > 0: + elif live.cumsize > 0: types = ('bid', 'last', 'last', 'dark_trade') else: @@ -133,10 +135,10 @@ async def update_pnl_from_feed( for tick in iterticks(quote, types): # print(f'{1/period} Hz') - size = order_mode.current_pp.live_pp.size + size = order_mode.current_pp.live_pp.cumsize if size == 0: # terminate this update task since we're - # no longer in a pp + # no longer in a pt order_mode.pane.pnl_label.format(pnl=0) return @@ -266,7 +268,7 @@ def apply_setting( # load the new account's allocator alloc = tracker.alloc - # WRITE any settings to current pp's allocator + # WRITE any settings to current pos-tracker's allocator if key == 'size_unit': # implicit re-write of value if input # is the "text name" of the units. @@ -285,10 +287,10 @@ def apply_setting( log.error('limit must be > 0') return False - pp = mode.current_pp.live_pp + lpos = mode.current_pp.live_pp if alloc.size_unit == 'currency': - dsize = pp.dsize + dsize = lpos.dsize if dsize > value: log.error( f'limit must > then current pp: {dsize}' @@ -303,7 +305,7 @@ def apply_setting( alloc.currency_limit = value else: - size = pp.size + size = lpos.cumsize if size > value: log.error( f'limit must > then current pp: {size}' @@ -342,7 +344,7 @@ def update_status_ui( alloc = tracker.alloc slots = alloc.slots used = alloc.slots_used(tracker.live_pp) - size = tracker.live_pp.size + size = tracker.live_pp.cumsize dsize = tracker.live_pp.dsize # READ out settings and update the status UI / settings widgets @@ -397,12 +399,12 @@ def update_account_icons( form = self.form accounts = form.fields['account'] - for account_name, pp in pps.items(): + for account_name, pos in pps.items(): icon_name = None - if pp.size > 0: + if pos.cumsize > 0: icon_name = 'long_pp' - elif pp.size < 0: + elif pos.cumsize < 0: icon_name = 'short_pp' accounts.set_icon(account_name, icon_name) @@ -422,7 +424,7 @@ def display_pnl( ''' mode = self.order_mode mkt: MktPair = mode.chart.linked.mkt - size = tracker.live_pp.size + size = tracker.live_pp.cumsize fqme: str = mkt.fqme flume: Feed = mode.feed.flumes[fqme] pnl_value = 0 @@ -860,15 +862,15 @@ def update_from_pp( ''' # live pp updates - pp = position or self.live_pp + lpos = position or self.live_pp if set_as_startup: - startup_pp = pp + startup_pp = lpos else: startup_pp = self.startup_pp alloc = self.alloc # update allocator settings - asset_type = pp.mkt.type_key + asset_type = lpos.mkt.type_key # specific configs by asset class / type if asset_type in _derivs: @@ -886,13 +888,13 @@ def update_from_pp( # if the current position is already greater then the limit # settings, increase the limit to the current position if alloc.size_unit == 'currency': - startup_size = self.startup_pp.size * startup_pp.ppu + startup_size = self.startup_pp.cumsize * startup_pp.ppu if startup_size > alloc.currency_limit: alloc.currency_limit = round(startup_size, ndigits=2) else: - startup_size = abs(startup_pp.size) + startup_size = abs(startup_pp.cumsize) if startup_size > alloc.units_limit: alloc.units_limit = startup_size @@ -902,16 +904,16 @@ def update_from_pp( self.nav.update_ui( self.alloc.account, - pp.ppu, - pp.size, - round(alloc.slots_used(pp), ndigits=1), # slots used + lpos.ppu, + lpos.cumsize, + round(alloc.slots_used(lpos), ndigits=1), # slots used ) - if self.live_pp.size: + if self.live_pp.cumsize: # print("SHOWING NAV") self.nav.show() - # if pp.size == 0: + # if lpos.cumsize == 0: else: # print("HIDING NAV") self.nav.hide() diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 9debfc582..6bf9f34a0 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -770,7 +770,6 @@ async def open_order_mode( accounts_def: bidict[str, str | None] = config.load_accounts( providers=[mkt.broker], ) - # await tractor.pause() # XXX: ``brokerd`` delivers a set of account names that it # allows use of but the user also can define the accounts they'd @@ -797,8 +796,6 @@ async def open_order_mode( # net-zero pp startup_pp = Position( mkt=mkt, - size=0, - ppu=0, # XXX: BLEH, do we care about this on the client side? bs_mktid=mkt.key, @@ -822,7 +819,7 @@ async def open_order_mode( pp_tracker.nav.hide() trackers[account_name] = pp_tracker - assert pp_tracker.startup_pp.size == pp_tracker.live_pp.size + assert pp_tracker.startup_pp.cumsize == pp_tracker.live_pp.cumsize # TODO: do we even really need the "startup pp" or can we # just take the max and pass that into the some state / the @@ -830,7 +827,7 @@ async def open_order_mode( pp_tracker.update_from_pp() # on existing position, show pp tracking graphics - if pp_tracker.startup_pp.size != 0: + if pp_tracker.startup_pp.cumsize != 0: pp_tracker.nav.show() pp_tracker.nav.hide_info() @@ -1038,7 +1035,7 @@ async def process_trade_msg( # status/pane UI mode.pane.update_status_ui(tracker) - if tracker.live_pp.size: + if tracker.live_pp.cumsize: # display pnl mode.pane.display_pnl(tracker) From 82fd785646786494c11d634c29a3d5588a11af40 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 14:58:15 -0400 Subject: [PATCH 055/116] Adjust default `[binance]` config to use paper and disable testnets --- config/brokers.toml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/config/brokers.toml b/config/brokers.toml index 675af9409..42df5a3eb 100644 --- a/config/brokers.toml +++ b/config/brokers.toml @@ -2,13 +2,15 @@ # ---- CEXY ---- ################ [binance] +accounts.paper = 'paper' + accounts.usdtm = 'futes' -futes.use_testnet = true +futes.use_testnet = false futes.api_key = '' futes.api_secret = '' accounts.spot = 'spot' -spot.use_testnet = true +spot.use_testnet = false spot.api_key = '' spot.api_secret = '' From 912f1bc635e6e89ebd202566ad7afdd97de97855 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 16:20:11 -0400 Subject: [PATCH 056/116] .kraken: start new `.symbols` submod and move symcache and search stuff there --- piker/brokers/kraken/__init__.py | 24 ++++--- piker/brokers/kraken/feed.py | 102 +----------------------------- piker/brokers/kraken/symbols.py | 103 ++++++++++++++++++++++++++++++- 3 files changed, 117 insertions(+), 112 deletions(-) diff --git a/piker/brokers/kraken/__init__.py b/piker/brokers/kraken/__init__.py index 783406a48..1f5bc8762 100644 --- a/piker/brokers/kraken/__init__.py +++ b/piker/brokers/kraken/__init__.py @@ -19,23 +19,26 @@ Sub-modules within break into the core functionalities: -- ``broker.py`` part for orders / trading endpoints -- ``feed.py`` for real-time data feed endpoints -- ``api.py`` for the core API machinery which is ``trio``-ized - wrapping around ``ib_insync``. +- .api: for the core API machinery which generally + a ``asks``/``trio-websocket`` implemented ``Client``. +- .broker: part for orders / trading endpoints. +- .feed: for real-time and historical data query endpoints. +- .ledger: for transaction processing as it pertains to accounting. +- .symbols: for market (name) search and symbology meta-defs. ''' -from .symbols import Pair # for symcache +from .symbols import ( + Pair, # for symcache + open_symbol_search, + # required by `.accounting`, `.data` + get_mkt_info, +) # required by `.brokers` from .api import ( get_client, ) from .feed import ( - # required by `.accounting`, `.data` - get_mkt_info, - # required by `.data` - open_symbol_search, stream_quotes, open_history_client, ) @@ -66,6 +69,7 @@ # tractor RPC enable arg __enable_modules__: list[str] = [ 'api', - 'feed', 'broker', + 'feed', + 'symbols', ] diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 1d10ad8cc..63072fd02 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -30,24 +30,16 @@ ) import time -from fuzzywuzzy import process as fuzzy import numpy as np import pendulum from trio_typing import TaskStatus -import tractor import trio from piker.accounting._mktinfo import ( - Asset, MktPair, - unpack_fqme, ) from piker.brokers import ( open_cached_client, - SymbolNotFound, -) -from piker._cacheables import ( - async_lifo_cache, ) from piker.brokers._util import ( BrokerError, @@ -59,9 +51,8 @@ from piker.data._web_bs import open_autorecon_ws, NoBsWs from .api import ( log, - Client, - Pair, ) +from .symbols import get_mkt_info class OHLC(Struct, frozen=True): @@ -267,70 +258,6 @@ async def get_ohlc( yield get_ohlc, {'erlangs': 1, 'rate': 1} -@async_lifo_cache() -async def get_mkt_info( - fqme: str, - -) -> tuple[MktPair, Pair]: - ''' - Query for and return a `MktPair` and backend-native `Pair` (or - wtv else) info. - - If more then one fqme is provided return a ``dict`` of native - key-strs to `MktPair`s. - - ''' - venue: str = 'spot' - expiry: str = '' - if '.kraken' not in fqme: - fqme += '.kraken' - - broker, pair, venue, expiry = unpack_fqme(fqme) - venue: str = venue or 'spot' - - if venue.lower() != 'spot': - raise SymbolNotFound( - 'kraken only supports spot markets right now!\n' - f'{fqme}\n' - ) - - async with open_cached_client('kraken') as client: - - # uppercase since kraken bs_mktid is always upper - # bs_fqme, _, broker = fqme.partition('.') - # pair_str: str = bs_fqme.upper() - pair_str: str = f'{pair}.{venue}' - - pair: Pair | None = client._pairs.get(pair_str.upper()) - if not pair: - bs_fqme: str = Client.to_bs_fqme(pair_str) - pair: Pair = client._pairs[bs_fqme] - - if not (assets := client._assets): - assets: dict[str, Asset] = await client.get_assets() - - dst_asset: Asset = assets[pair.bs_dst_asset] - src_asset: Asset = assets[pair.bs_src_asset] - - mkt = MktPair( - dst=dst_asset, - src=src_asset, - - price_tick=pair.price_tick, - size_tick=pair.size_tick, - bs_mktid=pair.bs_mktid, - - expiry=expiry, - venue=venue or 'spot', - - # TODO: futes - # _atype=_atype, - - broker='kraken', - ) - return mkt, pair - - async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -486,30 +413,3 @@ async def subscribe(ws: NoBsWs): log.warning(f'Unknown WSS message: {typ}, {quote}') await send_chan.send({topic: quote}) - - -@tractor.context -async def open_symbol_search( - ctx: tractor.Context, - -) -> Client: - async with open_cached_client('kraken') as client: - - # load all symbols locally for fast search - cache = await client.get_mkt_pairs() - await ctx.started(cache) - - async with ctx.open_stream() as stream: - - async for pattern in stream: - - matches = fuzzy.extractBests( - pattern, - client._pairs, - score_cutoff=50, - ) - # repack in dict form - await stream.send({ - pair[0].altname: pair[0] - for pair in matches - }) diff --git a/piker/brokers/kraken/symbols.py b/piker/brokers/kraken/symbols.py index 43efcac27..ea2c68f44 100644 --- a/piker/brokers/kraken/symbols.py +++ b/piker/brokers/kraken/symbols.py @@ -15,15 +15,30 @@ # along with this program. If not, see . ''' -Symbology defs and deats! +Symbology defs and search. ''' from decimal import Decimal +import tractor +from fuzzywuzzy import process as fuzzy + +from piker._cacheables import ( + async_lifo_cache, +) from piker.accounting._mktinfo import ( digits_to_dec, ) +from piker.brokers import ( + open_cached_client, + SymbolNotFound, +) from piker.data.types import Struct +from piker.accounting._mktinfo import ( + Asset, + MktPair, + unpack_fqme, +) # https://www.kraken.com/features/api#get-tradable-pairs @@ -112,3 +127,89 @@ def bs_fqme(self) -> str: return f'{dst}{src}.SPOT' +@tractor.context +async def open_symbol_search(ctx: tractor.Context) -> None: + async with open_cached_client('kraken') as client: + + # load all symbols locally for fast search + cache = await client.get_mkt_pairs() + await ctx.started(cache) + + async with ctx.open_stream() as stream: + + async for pattern in stream: + + matches = fuzzy.extractBests( + pattern, + client._pairs, + score_cutoff=50, + ) + # repack in dict form + await stream.send({ + pair[0].altname: pair[0] + for pair in matches + }) + + +@async_lifo_cache() +async def get_mkt_info( + fqme: str, + +) -> tuple[MktPair, Pair]: + ''' + Query for and return a `MktPair` and backend-native `Pair` (or + wtv else) info. + + If more then one fqme is provided return a ``dict`` of native + key-strs to `MktPair`s. + + ''' + venue: str = 'spot' + expiry: str = '' + if '.kraken' not in fqme: + fqme += '.kraken' + + broker, pair, venue, expiry = unpack_fqme(fqme) + venue: str = venue or 'spot' + + if venue.lower() != 'spot': + raise SymbolNotFound( + 'kraken only supports spot markets right now!\n' + f'{fqme}\n' + ) + + async with open_cached_client('kraken') as client: + + # uppercase since kraken bs_mktid is always upper + # bs_fqme, _, broker = fqme.partition('.') + # pair_str: str = bs_fqme.upper() + pair_str: str = f'{pair}.{venue}' + + pair: Pair | None = client._pairs.get(pair_str.upper()) + if not pair: + bs_fqme: str = client.to_bs_fqme(pair_str) + pair: Pair = client._pairs[bs_fqme] + + if not (assets := client._assets): + assets: dict[str, Asset] = await client.get_assets() + + dst_asset: Asset = assets[pair.bs_dst_asset] + src_asset: Asset = assets[pair.bs_src_asset] + + mkt = MktPair( + dst=dst_asset, + src=src_asset, + + price_tick=pair.price_tick, + size_tick=pair.size_tick, + bs_mktid=pair.bs_mktid, + + expiry=expiry, + venue=venue or 'spot', + + # TODO: futes + # _atype=_atype, + + broker='kraken', + ) + return mkt, pair From dfa13afe22546e93382f113b7655e11528f38f2c Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 17:12:40 -0400 Subject: [PATCH 057/116] Allow backends to "bypass" symcache loading Some backends like `ib` don't have an obvious (nor practical) way to easily download the entire symbology set available from all its mkt venues. For such backends loading might require a non-std approach (like using the contract search from some input mkt-key set) and can't be expected to necessarily be supported out of the box. As such, allow annotating a broker sub-pkg module with a `_no_symcache: bool = True` attr which will make `open_symcache()` yield early with an empty `SymbologyCache` instance for use by the caller to fill in the mkt and assets tables in whatever ad-hoc way desired. --- piker/data/_symcache.py | 35 +++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index ee67ce6a0..1745d2933 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -303,11 +303,25 @@ def search( _caches: dict[str, SymbologyCache] = {} +def mk_cachefile( + provider: str, +) -> Path: + cachedir: Path = config.get_conf_dir() / '_cache' + if not cachedir.is_dir(): + log.info(f'Creating `nativedb` director: {cachedir}') + cachedir.mkdir() + + cachefile: Path = cachedir / f'{str(provider)}.symcache.toml' + cachefile.touch() + return cachefile + + @acm async def open_symcache( mod_or_name: ModuleType | str, + reload: bool = False, - only_from_memcache: bool = False, + only_from_memcache: bool = False, # no API req ) -> SymbologyCache: @@ -317,6 +331,18 @@ async def open_symcache( mod: ModuleType = mod_or_name provider: str = mod.name + cachefile: Path = mk_cachefile(provider) + + # NOTE: certain backends might not support a symbology cache + # (easily) and thus we allow for an empty instance to be loaded + # and manually filled in at the whim of the caller presuming + # the backend pkg-module is annotated appropriately. + if getattr(mod, '_no_symcache', False): + yield SymbologyCache( + mod=mod, + fp=cachefile, + ) + return # actor-level cache-cache XD global _caches @@ -332,13 +358,6 @@ async def open_symcache( else: log.warning(msg) - cachedir: Path = config.get_conf_dir() / '_cache' - if not cachedir.is_dir(): - log.info(f'Creating `nativedb` director: {cachedir}') - cachedir.mkdir() - - cachefile: Path = cachedir / f'{str(provider)}.symcache.toml' - # if no cache exists or an explicit reload is requested, load # the provider API and call appropriate endpoints to populate # the mkt and asset tables. From e4731eff1076a85d3717420cbd8514a8eb6dceca Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 17:27:22 -0400 Subject: [PATCH 058/116] Fix `Position.expiry == None` bug --- piker/accounting/_pos.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 061a5b445..90b179f84 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -109,8 +109,17 @@ class Position(Struct): @property def expiry(self) -> datetime | None: - exp: str = self.mkt.expiry.lower() - match exp: + ''' + Security expiry if it has a limited lifetime. + + For non-derivative markets this is normally `None`. + + ''' + exp: str | None = self.mkt.expiry + if exp is None: + return None + + match exp.lower(): # empty str, 'perp' (contract) or simply a null # signifies instrument with NO expiry. case 'perp' | '' | None: From a05a82486d3484f6b4d66e3a2869b7e35999d1b1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 17:31:12 -0400 Subject: [PATCH 059/116] Log a warning on no symcache support in a backend --- piker/ui/_app.py | 5 ++++- piker/ui/_position.py | 6 ++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/piker/ui/_app.py b/piker/ui/_app.py index 585ccb18b..199ba656e 100644 --- a/piker/ui/_app.py +++ b/piker/ui/_app.py @@ -59,7 +59,10 @@ async def load_provider_search( ), open_symcache(brokermod) as symcache, ): - assert symcache + if not symcache.mktmaps: + log.warning( + f'BACKEND DOES NOT (yet) support symcaching: `{brokermod.name}`' + ) # keep search engine stream up until cancelled await trio.sleep_forever() diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 0f38978fe..11aca3e9d 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship for piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -15,7 +15,9 @@ # along with this program. If not, see . ''' -Position (pos) info and display to track ur PnLz B) +UI components to display real-time and historical +`.accounting.Position` cross-asset PnL(s) as well as manage market +agnostic asset ownership state. ''' from __future__ import annotations From 9e87b6515b6597c349aa80dd3f0cdc6ebb3914e3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 17:31:34 -0400 Subject: [PATCH 060/116] ib: be symcache compat by using bypass attr Since there's no easy way to support it yet, we bypass symbology caching in for now and instead allow the `ib.ledger` routines to fill in `MktPair` and `Asset` entries ad-hoc for the purposes of txn ledger processing. --- piker/brokers/ib/__init__.py | 16 +- piker/brokers/ib/broker.py | 39 +++- piker/brokers/ib/ledger.py | 395 +++++++++++++++++++---------------- 3 files changed, 263 insertions(+), 187 deletions(-) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index d42002a16..c98057a7a 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -30,18 +30,27 @@ ) from .feed import ( open_history_client, - open_symbol_search, stream_quotes, + get_mkt_info, + open_symbol_search, ) from .broker import ( open_trade_dialog, ) from .ledger import ( + norm_trade, norm_trade_records, ) +# TODO: +# from .symbols import ( +# get_mkt_info, +# open_symbol_search, +# ) __all__ = [ 'get_client', + 'get_mkt_info', + 'norm_trade', 'norm_trade_records', 'open_trade_dialog', 'open_history_client', @@ -75,3 +84,8 @@ # know if ``brokerd`` should be spawned with # ``tractor``'s aio mode. _infect_asyncio: bool = True + +# XXX NOTE: for now we disable symcache with this backend since +# there is no clearly simple nor practical way to download "all +# symbology info" for all supported venues.. +_no_symcache: bool = True diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index e4ac0598d..842fdbc3e 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -60,9 +60,13 @@ open_trade_ledger, TransactionLedger, iter_by_dt, - open_pps, + open_account, Account, ) +from piker.data._symcache import ( + open_symcache, + SymbologyCache, +) from piker.clearing._messages import ( Order, Status, @@ -295,6 +299,10 @@ async def update_ledger_from_api_trades( client: Union[Client, MethodProxy], accounts_def_inv: bidict[str, str], + # provided for ad-hoc insertions "as transactions are + # processed" + symcache: SymbologyCache | None = None, + ) -> tuple[ dict[str, Transaction], dict[str, dict], @@ -325,7 +333,7 @@ async def update_ledger_from_api_trades( # pack in the ``Contract.secType`` entry['asset_type'] = condict['secType'] - entries = api_trades_to_ledger_entries( + entries: dict[str, dict] = api_trades_to_ledger_entries( accounts_def_inv, trade_entries, ) @@ -334,7 +342,10 @@ async def update_ledger_from_api_trades( for acctid, trades_by_id in entries.items(): # normalize to transaction form - trans_by_acct[acctid] = norm_trade_records(trades_by_id) + trans_by_acct[acctid] = norm_trade_records( + trades_by_id, + symcache=symcache, + ) return trans_by_acct, entries @@ -547,11 +558,11 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: + # from piker.brokers import ( + # get_brokermod, + # ) accounts_def = config.load_accounts(['ib']) - # TODO: do this as part of `open_account()`!? - from piker.data._symcache import open_symcache - global _client_cache # deliver positions to subscriber before anything else @@ -565,12 +576,14 @@ async def open_trade_dialog( proxies, aioclients, ), + + # TODO: do this as part of `open_account()`!? open_symcache('ib', only_from_memcache=True) as symcache, ): # Open a trade ledgers stack for appending trade records over # multiple accounts. # TODO: we probably want to generalize this into a "ledgers" api.. - ledgers: dict[str, dict] = {} + ledgers: dict[str, TransactionLedger] = {} tables: dict[str, Account] = {} order_msgs: list[Status] = [] conf = get_config() @@ -617,7 +630,7 @@ async def open_trade_dialog( # positions reported by ib's sys that may not yet be in # piker's ``pps.toml`` state-file. tables[acctid] = lstack.enter_context( - open_pps( + open_account( 'ib', acctid, write_on_exit=True, @@ -640,7 +653,10 @@ async def open_trade_dialog( # update position table with latest ledger from all # gathered transactions: ledger file + api records. - trans: dict[str, Transaction] = norm_trade_records(ledger) + trans: dict[str, Transaction] = norm_trade_records( + ledger, + symcache=symcache, + ) # update trades ledgers for all accounts from connected # api clients which report trades for **this session**. @@ -655,6 +671,7 @@ async def open_trade_dialog( api_trades, proxy, accounts_def_inv, + symcache=symcache, ) # if new api_trades are detected from the API, prepare @@ -797,7 +814,11 @@ async def emit_pp_update( acnts: dict[str, Account], ) -> None: + ''' + Extract trade record from an API event, convert it into a `Transaction`, + update the backing ledger and finally emit a position update to the EMS. + ''' accounts_def_inv: bidict[str, str] = accounts_def.inverse accnum: str = trade_entry['execution']['acctNumber'] fq_acctid: str = accounts_def_inv[accnum] diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index e12bab132..aaeda1531 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -28,6 +28,10 @@ from bidict import bidict import pendulum +from piker.data import ( + Struct, + SymbologyCache, +) from piker.accounting import ( Asset, dec_digits, @@ -39,8 +43,211 @@ from ._util import log +def norm_trade( + tid: str, + record: dict[str, Any], + + # this is the dict that was returned from + # `Client.get_mkt_pairs()` and when running offline ledger + # processing from `.accounting`, this will be the table loaded + # into `SymbologyCache.pairs`. + pairs: dict[str, Struct], + symcache: SymbologyCache | None = None, + +) -> Transaction | None: + + conid = record.get('conId') or record['conid'] + comms = record.get('commission') + if comms is None: + comms = -1*record['ibCommission'] + + price = record.get('price') or record['tradePrice'] + + # the api doesn't do the -/+ on the quantity for you but flex + # records do.. are you fucking serious ib...!? + size = record.get('quantity') or record['shares'] * { + 'BOT': 1, + 'SLD': -1, + }[record['side']] + + symbol: str = record['symbol'] + exch: str = record.get('listingExchange') or record['exchange'] + + # NOTE: remove null values since `tomlkit` can't serialize + # them to file. + if dnc := record.pop('deltaNeutralContract', None): + record['deltaNeutralContract'] = dnc + + # likely an opts contract record from a flex report.. + # TODO: no idea how to parse ^ the strike part from flex.. + # (00010000 any, or 00007500 tsla, ..) + # we probably must do the contract lookup for this? + if ( + ' ' in symbol + or '--' in exch + ): + underlying, _, tail = symbol.partition(' ') + exch: str = 'opt' + expiry: str = tail[:6] + # otype = tail[6] + # strike = tail[7:] + + print(f'skipping opts contract {symbol}') + return None + + # timestamping is way different in API records + dtstr = record.get('datetime') + date = record.get('date') + flex_dtstr = record.get('dateTime') + + if dtstr or date: + dt = pendulum.parse(dtstr or date) + + elif flex_dtstr: + # probably a flex record with a wonky non-std timestamp.. + dt = parse_flex_dt(record['dateTime']) + + # special handling of symbol extraction from + # flex records using some ad-hoc schema parsing. + asset_type: str = record.get( + 'assetCategory' + ) or record.get('secType', 'STK') + + if (expiry := ( + record.get('lastTradeDateOrContractMonth') + or record.get('expiry') + ) + ): + expiry: str = str(expiry).strip(' ') + # NOTE: we directly use the (simple and usually short) + # date-string expiry token when packing the `MktPair` + # since we want the fqme to contain *that* token. + # It might make sense later to instead parse and then + # render different output str format(s) for this same + # purpose depending on asset-type-market down the road. + # Eg. for derivs we use the short token only for fqme + # but use the isoformat('T') for transactions and + # account file position entries? + # dt_str: str = pendulum.parse(expiry).isoformat('T') + + # XXX: pretty much all legacy market assets have a fiat + # currency (denomination) determined by their venue. + currency: str = record['currency'] + src = Asset( + name=currency.lower(), + atype='fiat', + tx_tick=Decimal('0.01'), + ) + + match asset_type: + case 'FUT': + # (flex) ledger entries don't have any simple 3-char key? + # TODO: XXX: WOA this is kinda hacky.. probably + # should figure out the correct future pair key more + # explicitly and consistently? + symbol: str = symbol[:3] + dst = Asset( + name=symbol.lower(), + atype='future', + tx_tick=Decimal('1'), + ) + + case 'STK': + dst = Asset( + name=symbol.lower(), + atype='stock', + tx_tick=Decimal('1'), + ) + + case 'CASH': + if currency not in symbol: + # likely a dict-casted `Forex` contract which + # has .symbol as the dst and .currency as the + # src. + name: str = symbol.lower() + else: + # likely a flex-report record which puts + # EUR.USD as the symbol field and just USD in + # the currency field. + name: str = symbol.lower().replace(f'.{src.name}', '') + + dst = Asset( + name=name, + atype='fiat', + tx_tick=Decimal('0.01'), + ) + + case 'OPT': + dst = Asset( + name=symbol.lower(), + atype='option', + tx_tick=Decimal('1'), + ) + + # try to build out piker fqme from record. + # src: str = record['currency'] + price_tick: Decimal = digits_to_dec(dec_digits(price)) + + # NOTE: can't serlialize `tomlkit.String` so cast to native + atype: str = str(dst.atype) + + mkt = MktPair( + bs_mktid=str(conid), + dst=dst, + + price_tick=price_tick, + # NOTE: for "legacy" assets, volume is normally discreet, not + # a float, but we keep a digit in case the suitz decide + # to get crazy and change it; we'll be kinda ready + # schema-wise.. + size_tick=Decimal('1'), + + src=src, # XXX: normally always a fiat + + _atype=atype, + + venue=exch, + expiry=expiry, + broker='ib', + + _fqme_without_src=(atype != 'fiat'), + ) + + fqme: str = mkt.fqme + + # XXX: if passed in, we fill out the symcache ad-hoc in order + # to make downstream accounting work.. + if symcache: + symcache.mktmaps[fqme] = mkt + symcache.assets[src.name] = src + symcache.assets[dst.name] = dst + + # NOTE: for flex records the normal fields for defining an fqme + # sometimes won't be available so we rely on two approaches for + # the "reverse lookup" of piker style fqme keys: + # - when dealing with API trade records received from + # `IB.trades()` we do a contract lookup at he time of processing + # - when dealing with flex records, it is assumed the record + # is at least a day old and thus the TWS position reporting system + # should already have entries if the pps are still open, in + # which case, we can pull the fqme from that table (see + # `trades_dialogue()` above). + return Transaction( + fqme=fqme, + tid=tid, + size=size, + price=price, + cost=comms, + dt=dt, + expiry=expiry, + bs_mktid=str(conid), + ) + + + def norm_trade_records( ledger: dict[str, Any], + symcache: SymbologyCache | None = None, ) -> dict[str, Transaction]: ''' @@ -53,188 +260,22 @@ def norm_trade_records( records: list[Transaction] = [] for tid, record in ledger.items(): - conid = record.get('conId') or record['conid'] - comms = record.get('commission') - if comms is None: - comms = -1*record['ibCommission'] - - price = record.get('price') or record['tradePrice'] - - # the api doesn't do the -/+ on the quantity for you but flex - # records do.. are you fucking serious ib...!? - size = record.get('quantity') or record['shares'] * { - 'BOT': 1, - 'SLD': -1, - }[record['side']] - - symbol: str = record['symbol'] - exch: str = record.get('listingExchange') or record['exchange'] - - # NOTE: remove null values since `tomlkit` can't serialize - # them to file. - if dnc := record.pop('deltaNeutralContract', None): - record['deltaNeutralContract'] = dnc - - # likely an opts contract record from a flex report.. - # TODO: no idea how to parse ^ the strike part from flex.. - # (00010000 any, or 00007500 tsla, ..) - # we probably must do the contract lookup for this? - if ( - ' ' in symbol - or '--' in exch - ): - underlying, _, tail = symbol.partition(' ') - exch: str = 'opt' - expiry: str = tail[:6] - # otype = tail[6] - # strike = tail[7:] - - print(f'skipping opts contract {symbol}') - continue - - # timestamping is way different in API records - dtstr = record.get('datetime') - date = record.get('date') - flex_dtstr = record.get('dateTime') - - if dtstr or date: - dt = pendulum.parse(dtstr or date) - elif flex_dtstr: - # probably a flex record with a wonky non-std timestamp.. - dt = parse_flex_dt(record['dateTime']) + txn = norm_trade( + tid, + record, - # special handling of symbol extraction from - # flex records using some ad-hoc schema parsing. - asset_type: str = record.get( - 'assetCategory' - ) or record.get('secType', 'STK') - - if (expiry := ( - record.get('lastTradeDateOrContractMonth') - or record.get('expiry') - ) - ): - expiry: str = str(expiry).strip(' ') - # NOTE: we directly use the (simple and usually short) - # date-string expiry token when packing the `MktPair` - # since we want the fqme to contain *that* token. - # It might make sense later to instead parse and then - # render different output str format(s) for this same - # purpose depending on asset-type-market down the road. - # Eg. for derivs we use the short token only for fqme - # but use the isoformat('T') for transactions and - # account file position entries? - # dt_str: str = pendulum.parse(expiry).isoformat('T') - - # XXX: pretty much all legacy market assets have a fiat - # currency (denomination) determined by their venue. - currency: str = record['currency'] - src = Asset( - name=currency.lower(), - atype='fiat', - tx_tick=Decimal('0.01'), + # NOTE: currently no symcache support + pairs={}, + symcache=symcache, ) - match asset_type: - case 'FUT': - # (flex) ledger entries don't have any simple 3-char key? - # TODO: XXX: WOA this is kinda hacky.. probably - # should figure out the correct future pair key more - # explicitly and consistently? - symbol: str = symbol[:3] - dst = Asset( - name=symbol.lower(), - atype='future', - tx_tick=Decimal('1'), - ) - - case 'STK': - dst = Asset( - name=symbol.lower(), - atype='stock', - tx_tick=Decimal('1'), - ) - - case 'CASH': - if currency not in symbol: - # likely a dict-casted `Forex` contract which - # has .symbol as the dst and .currency as the - # src. - name: str = symbol.lower() - else: - # likely a flex-report record which puts - # EUR.USD as the symbol field and just USD in - # the currency field. - name: str = symbol.lower().replace(f'.{src.name}', '') - - dst = Asset( - name=name, - atype='fiat', - tx_tick=Decimal('0.01'), - ) - - case 'OPT': - dst = Asset( - name=symbol.lower(), - atype='option', - tx_tick=Decimal('1'), - ) - - # try to build out piker fqme from record. - # src: str = record['currency'] - price_tick: Decimal = digits_to_dec(dec_digits(price)) - - # NOTE: can't serlialize `tomlkit.String` so cast to native - atype: str = str(dst.atype) - - pair = MktPair( - bs_mktid=str(conid), - dst=dst, - - price_tick=price_tick, - # NOTE: for "legacy" assets, volume is normally discreet, not - # a float, but we keep a digit in case the suitz decide - # to get crazy and change it; we'll be kinda ready - # schema-wise.. - size_tick=Decimal('1'), - - src=src, # XXX: normally always a fiat - - _atype=atype, - - venue=exch, - expiry=expiry, - broker='ib', - - _fqme_without_src=(atype != 'fiat'), - ) + if txn is None: + continue - fqme: str = pair.fqme - - # NOTE: for flex records the normal fields for defining an fqme - # sometimes won't be available so we rely on two approaches for - # the "reverse lookup" of piker style fqme keys: - # - when dealing with API trade records received from - # `IB.trades()` we do a contract lookup at he time of processing - # - when dealing with flex records, it is assumed the record - # is at least a day old and thus the TWS position reporting system - # should already have entries if the pps are still open, in - # which case, we can pull the fqme from that table (see - # `trades_dialogue()` above). - trans = Transaction( - fqme=fqme, - tid=tid, - size=size, - price=price, - cost=comms, - dt=dt, - expiry=expiry, - bs_mktid=str(conid), - ) insort( records, - trans, + txn, key=lambda t: t.dt ) @@ -258,14 +299,14 @@ def api_trades_to_ledger_entries( # instead of pre-casting to dicts? trade_entries: list[dict], -) -> dict: +) -> dict[str, dict]: ''' Convert API execution objects entry objects into ``dict`` form, pretty much straight up without modification except add a `pydatetime` field from the parsed timestamp. ''' - trades_by_account = {} + trades_by_account: dict[str, dict] = {} for t in trade_entries: # NOTE: example of schema we pull from the API client. # { From fe7827794851354994212a5ec6c573abc297c6e8 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 17 Jul 2023 18:30:11 -0400 Subject: [PATCH 061/116] ib: add new `.symbols` sub-mod Move in the obvious things XD - all the specially defined venue tables from `.api`. - some parser funcs: `con2fqme()` and `parse_patt2fqme()`. - the `get_mkt_info()` and `open_symbol_search()` broker eps. - the `_asset_type_map` table which converts to `.accounting.Asset` compat keys for each contract/security. --- piker/brokers/ib/__init__.py | 14 +- piker/brokers/ib/api.py | 301 ++----------------- piker/brokers/ib/feed.py | 268 +---------------- piker/brokers/ib/symbols.py | 561 +++++++++++++++++++++++++++++++++++ 4 files changed, 600 insertions(+), 544 deletions(-) create mode 100644 piker/brokers/ib/symbols.py diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index c98057a7a..e0ad96c88 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -31,8 +31,6 @@ from .feed import ( open_history_client, stream_quotes, - get_mkt_info, - open_symbol_search, ) from .broker import ( open_trade_dialog, @@ -41,11 +39,11 @@ norm_trade, norm_trade_records, ) -# TODO: -# from .symbols import ( -# get_mkt_info, -# open_symbol_search, -# ) +from .symbols import ( + get_mkt_info, + open_symbol_search, + _search_conf, +) __all__ = [ 'get_client', @@ -56,6 +54,7 @@ 'open_history_client', 'open_symbol_search', 'stream_quotes', + '_search_conf', ] _brokerd_mods: list[str] = [ @@ -65,6 +64,7 @@ _datad_mods: list[str] = [ 'feed', + 'symbols', ] diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 880e9f531..69e5cc3ef 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -31,7 +31,6 @@ from datetime import datetime from functools import ( partial, - # lru_cache, ) import itertools from math import isnan @@ -47,7 +46,6 @@ import time from types import SimpleNamespace - from bidict import bidict import trio import tractor @@ -67,7 +65,6 @@ ) from ib_insync.contract import ( ContractDetails, - Option, ) from ib_insync.order import Order from ib_insync.ticker import Ticker @@ -88,6 +85,13 @@ # non-relative for backends so that non-builting backends # can be easily modelled after this style B) from piker import config +from .symbols import ( + con2fqme, + parse_patt2fqme, + _adhoc_symbol_map, + _exch_skip_list, + _futes_venues, +) from ._util import ( log, # only for the ib_sync internal logging @@ -133,15 +137,6 @@ _show_wap_in_history: bool = False -# optional search config the backend can register for -# it's symbol search handling (in this case we avoid -# accepting patterns before the kb has settled more then -# a quarter second). -_search_conf = { - 'pause_period': 6 / 16, -} - - # overrides to sidestep pretty questionable design decisions in # ``ib_insync``: class NonShittyWrapper(Wrapper): @@ -200,120 +195,6 @@ def __init__(self): # self.errorEvent += self._onError self.client.apiEnd += self.disconnectedEvent - -_futes_venues = ( - 'GLOBEX', - 'NYMEX', - 'CME', - 'CMECRYPTO', - 'COMEX', - # 'CMDTY', # special name case.. - 'CBOT', # (treasury) yield futures -) - -_adhoc_cmdty_set = { - # metals - # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 - 'xauusd.cmdty', # london gold spot ^ - 'xagusd.cmdty', # silver spot -} - -# NOTE: if you aren't seeing one of these symbol's futues contracts -# show up, it's likely the `.` part is wrong! -_adhoc_futes_set = { - - # equities - 'nq.cme', - 'mnq.cme', # micro - - 'es.cme', - 'mes.cme', # micro - - # cypto$ - 'brr.cme', - 'mbt.cme', # micro - 'ethusdrr.cme', - - # agriculture - 'he.comex', # lean hogs - 'le.comex', # live cattle (geezers) - 'gf.comex', # feeder cattle (younguns) - - # raw - 'lb.comex', # random len lumber - - 'gc.comex', - 'mgc.comex', # micro - - # oil & gas - 'cl.nymex', - - 'ni.comex', # silver futes - 'qi.comex', # mini-silver futes - - # treasury yields - # etfs by duration: - # SHY -> IEI -> IEF -> TLT - 'zt.cbot', # 2y - 'z3n.cbot', # 3y - 'zf.cbot', # 5y - 'zn.cbot', # 10y - 'zb.cbot', # 30y - - # (micros of above) - '2yy.cbot', - '5yy.cbot', - '10y.cbot', - '30y.cbot', -} - - -# taken from list here: -# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php -_adhoc_fiat_set = set(( - 'USD, AED, AUD, CAD,' - 'CHF, CNH, CZK, DKK,' - 'EUR, GBP, HKD, HUF,' - 'ILS, JPY, MXN, NOK,' - 'NZD, PLN, RUB, SAR,' - 'SEK, SGD, TRY, ZAR' - ).split(' ,') -) - - -# map of symbols to contract ids -_adhoc_symbol_map = { - # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 - - # NOTE: some cmdtys/metals don't have trade data like gold/usd: - # https://groups.io/g/twsapi/message/44174 - 'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), -} -for qsn in _adhoc_futes_set: - sym, venue = qsn.split('.') - assert venue.upper() in _futes_venues, f'{venue}' - _adhoc_symbol_map[sym.upper()] = ( - {'exchange': venue}, - {}, - ) - - -# exchanges we don't support at the moment due to not knowing -# how to do symbol-contract lookup correctly likely due -# to not having the data feeds subscribed. -_exch_skip_list = { - - 'ASX', # aussie stocks - 'MEXI', # mexican stocks - - # no idea - 'VALUE', - 'FUNDSERV', - 'SWB2', - 'PSE', - 'PHLX', -} - _enters = 0 @@ -397,14 +278,13 @@ def __init__( # as needed throughout this backend (eg. vnc sockaddr). self.conf = config + # NOTE: the ib.client here is "throttled" to 45 rps by default self.ib = ib - self.ib.RaiseRequestErrors = True + self.ib.RaiseRequestErrors: bool = True # contract cache self._cons: dict[str, Contract] = {} - # NOTE: the ib.client here is "throttled" to 45 rps by default - async def trades(self) -> list[dict]: ''' Return list of trade-fills from current session in ``dict``. @@ -544,14 +424,14 @@ async def con_deats( ) -> dict[str, ContractDetails]: - futs = [] + futs: list[asyncio.Future] = [] for con in contracts: if con.primaryExchange not in _exch_skip_list: futs.append(self.ib.reqContractDetailsAsync(con)) # batch request all details try: - results = await asyncio.gather(*futs) + results: list[ContractDetails] = await asyncio.gather(*futs) except RequestError as err: msg = err.message if ( @@ -561,7 +441,7 @@ async def con_deats( return {} # one set per future result - details = {} + details: dict[str, ContractDetails] = {} for details_set in results: # XXX: if there is more then one entry in the details list @@ -576,26 +456,28 @@ async def con_deats( return details - async def search_stocks( + async def search_contracts( self, pattern: str, upto: int = 3, # how many contracts to search "up to" ) -> dict[str, ContractDetails]: ''' - Search for stocks matching provided ``str`` pattern. + Search for ``Contract``s matching provided ``str`` pattern. - Return a dictionary of ``upto`` entries worth of contract details. + Return a dictionary of ``upto`` entries worth of ``ContractDetails``. ''' - descriptions = await self.ib.reqMatchingSymbolsAsync(pattern) - - if descriptions is None: + descrs: list[ContractDetails] = ( + await self.ib.reqMatchingSymbolsAsync(pattern) + ) + if descrs is None: return {} - # limit - descrs = descriptions[:upto] - return await self.con_deats([d.contract for d in descrs]) + return await self.con_deats( + # limit to first ``upto`` entries + [d.contract for d in descrs[:upto]] + ) async def search_symbols( self, @@ -609,7 +491,7 @@ async def search_symbols( # TODO add search though our adhoc-locally defined symbol set # for futes/cmdtys/ try: - results = await self.search_stocks( + results = await self.search_contracts( pattern, upto=upto, ) @@ -712,8 +594,8 @@ async def get_fute( return con - # TODO: make this work with our `MethodProxy`.. - # @lru_cache(maxsize=None) + # TODO: is this a better approach? + # @async_lifo_cache() async def get_con( self, conid: int, @@ -727,61 +609,6 @@ async def get_con( self._cons[conid] = con return con - def parse_patt2fqme( - self, - pattern: str, - - ) -> tuple[str, str, str, str]: - - # TODO: we can't use this currently because - # ``wrapper.starTicker()`` currently cashes ticker instances - # which means getting a singel quote will potentially look up - # a quote for a ticker that it already streaming and thus run - # into state clobbering (eg. list: Ticker.ticks). It probably - # makes sense to try this once we get the pub-sub working on - # individual symbols... - - # XXX UPDATE: we can probably do the tick/trades scraping - # inside our eventkit handler instead to bypass this entirely? - - currency = '' - - # fqme parsing stage - # ------------------ - if '.ib' in pattern: - from piker.accounting import unpack_fqme - _, symbol, venue, expiry = unpack_fqme(pattern) - - else: - symbol = pattern - expiry = '' - - # another hack for forex pairs lul. - if ( - '.idealpro' in symbol - # or '/' in symbol - ): - exch = 'IDEALPRO' - symbol = symbol.removesuffix('.idealpro') - if '/' in symbol: - symbol, currency = symbol.split('/') - - else: - # TODO: yes, a cache.. - # try: - # # give the cache a go - # return self._contracts[symbol] - # except KeyError: - # log.debug(f'Looking up contract for {symbol}') - expiry: str = '' - if symbol.count('.') > 1: - symbol, _, expiry = symbol.rpartition('.') - - # use heuristics to figure out contract "type" - symbol, exch = symbol.upper().rsplit('.', maxsplit=1) - - return symbol, currency, exch, expiry - async def find_contracts( self, pattern: Optional[str] = None, @@ -792,7 +619,7 @@ async def find_contracts( ) -> Contract: if pattern is not None: - symbol, currency, exch, expiry = self.parse_patt2fqme( + symbol, currency, exch, expiry = parse_patt2fqme( pattern, ) sectype = '' @@ -1145,80 +972,6 @@ def positions( return self.ib.positions(account=account) -def con2fqme( - con: Contract, - _cache: dict[int, (str, bool)] = {} - -) -> tuple[str, bool]: - ''' - Convert contracts to fqme-style strings to be used both in symbol-search - matching and as feed tokens passed to the front end data deed layer. - - Previously seen contracts are cached by id. - - ''' - # should be real volume for this contract by default - calc_price = False - if con.conId: - try: - return _cache[con.conId] - except KeyError: - pass - - suffix = con.primaryExchange or con.exchange - symbol = con.symbol - expiry = con.lastTradeDateOrContractMonth or '' - - match con: - case Option(): - # TODO: option symbol parsing and sane display: - symbol = con.localSymbol.replace(' ', '') - - case ( - Commodity() - # search API endpoint returns std con box.. - | Contract(secType='CMDTY') - ): - # commodities and forex don't have an exchange name and - # no real volume so we have to calculate the price - suffix = con.secType - - # no real volume on this tract - calc_price = True - - case Forex() | Contract(secType='CASH'): - dst, src = con.localSymbol.split('.') - symbol = ''.join([dst, src]) - suffix = con.exchange or 'idealpro' - - # no real volume on forex feeds.. - calc_price = True - - if not suffix: - entry = _adhoc_symbol_map.get( - con.symbol or con.localSymbol - ) - if entry: - meta, kwargs = entry - cid = meta.get('conId') - if cid: - assert con.conId == meta['conId'] - suffix = meta['exchange'] - - # append a `.` to the returned symbol - # key for derivatives that normally is the expiry - # date key. - if expiry: - suffix += f'.{expiry}' - - fqme_key = symbol.lower() - if suffix: - fqme_key = '.'.join((fqme_key, suffix)).lower() - - _cache[con.conId] = fqme_key, calc_price - return fqme_key, calc_price - - # per-actor API ep caching _client_cache: dict[tuple[str, int], Client] = {} _scan_ignore: set[tuple[str, int]] = set() diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 7673c2c53..8a6ac949a 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -21,9 +21,7 @@ import asyncio from contextlib import ( asynccontextmanager as acm, - nullcontext, ) -from decimal import Decimal from dataclasses import asdict from datetime import datetime from functools import partial @@ -32,11 +30,9 @@ from typing import ( Any, Callable, - Awaitable, ) from async_generator import aclosing -from fuzzywuzzy import process as fuzzy import ib_insync as ibis import numpy as np import pendulum @@ -44,6 +40,10 @@ import trio from trio_typing import TaskStatus +from piker.accounting import ( + MktPair, +) +from piker.data.validate import FeedInit from .._util import ( NoData, DataUnavailable, @@ -63,14 +63,7 @@ RequestError, ) from ._util import data_reset_hack -from piker._cacheables import ( - async_lifo_cache, -) -from piker.accounting import ( - Asset, - MktPair, -) -from piker.data.validate import FeedInit +from .symbols import get_mkt_info # XXX NOTE: See available types table docs: @@ -559,28 +552,6 @@ async def query(): return result, data_cs is not None -# re-mapping to piker asset type names -# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113 -_asset_type_map = { - 'STK': 'stock', - 'OPT': 'option', - 'FUT': 'future', - 'CONTFUT': 'continuous_future', - 'CASH': 'fiat', - 'IND': 'index', - 'CFD': 'cfd', - 'BOND': 'bond', - 'CMDTY': 'commodity', - 'FOP': 'futures_option', - 'FUND': 'mutual_fund', - 'WAR': 'warrant', - 'IOPT': 'warran', - 'BAG': 'bag', - 'CRYPTO': 'crypto', # bc it's diff then fiat? - # 'NEWS': 'news', -} - - _quote_streams: dict[str, trio.abc.ReceiveStream] = {} @@ -784,97 +755,6 @@ def normalize( return data -@async_lifo_cache() -async def get_mkt_info( - fqme: str, - - proxy: MethodProxy | None = None, - -) -> tuple[MktPair, ibis.ContractDetails]: - - # XXX: we don't need to split off any fqme broker part? - # bs_fqme, _, broker = fqme.partition('.') - - proxy: MethodProxy - if proxy is not None: - client_ctx = nullcontext(proxy) - else: - client_ctx = open_data_client - - async with client_ctx as proxy: - try: - ( - con, # Contract - details, # ContractDetails - ) = await proxy.get_sym_details(symbol=fqme) - except ConnectionError: - log.exception(f'Proxy is ded {proxy._aio_ns}') - raise - - # TODO: more consistent field translation - atype = _asset_type_map[con.secType] - - if atype == 'commodity': - venue: str = 'cmdty' - else: - venue = con.primaryExchange or con.exchange - - price_tick: Decimal = Decimal(str(details.minTick)) - - if atype == 'stock': - # XXX: GRRRR they don't support fractional share sizes for - # stocks from the API?! - # if con.secType == 'STK': - size_tick = Decimal('1') - else: - size_tick: Decimal = Decimal( - str(details.minSize).rstrip('0') - ) - # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it? - - # NOTE: this is duplicate from the .broker.norm_trade_records() - # routine, we should factor all this parsing somewhere.. - expiry_str = str(con.lastTradeDateOrContractMonth) - # if expiry: - # expiry_str: str = str(pendulum.parse( - # str(expiry).strip(' ') - # )) - - # TODO: currently we can't pass the fiat src asset because - # then we'll get a `MNQUSD` request for history data.. - # we need to figure out how we're going to handle this (later?) - # but likely we want all backends to eventually handle - # ``dst/src.venue.`` style !? - src = Asset( - name=str(con.currency).lower(), - atype='fiat', - tx_tick=Decimal('0.01'), # right? - ) - - mkt = MktPair( - dst=Asset( - name=con.symbol.lower(), - atype=atype, - tx_tick=size_tick, - ), - src=src, - - price_tick=price_tick, - size_tick=size_tick, - - bs_mktid=str(con.conId), - venue=str(venue), - expiry=expiry_str, - broker='ib', - - # TODO: options contract info as str? - # contract_info= - _fqme_without_src=(atype != 'fiat'), - ) - - return mkt, details - - async def stream_quotes( send_chan: trio.abc.SendChannel, @@ -1045,141 +925,3 @@ async def reset_on_feed(): # ugh, clear ticks since we've consumed them ticker.ticks = [] # last = time.time() - - -@tractor.context -async def open_symbol_search( - ctx: tractor.Context, - -) -> None: - - # TODO: load user defined symbol set locally for fast search? - await ctx.started({}) - - async with ( - open_client_proxies() as (proxies, _), - open_data_client() as data_proxy, - ): - async with ctx.open_stream() as stream: - - # select a non-history client for symbol search to lighten - # the load in the main data node. - proxy = data_proxy - for name, proxy in proxies.items(): - if proxy is data_proxy: - continue - break - - ib_client = proxy._aio_ns.ib - log.info(f'Using {ib_client} for symbol search') - - last = time.time() - async for pattern in stream: - log.info(f'received {pattern}') - now = time.time() - - # this causes tractor hang... - # assert 0 - - assert pattern, 'IB can not accept blank search pattern' - - # throttle search requests to no faster then 1Hz - diff = now - last - if diff < 1.0: - log.debug('throttle sleeping') - await trio.sleep(diff) - try: - pattern = stream.receive_nowait() - except trio.WouldBlock: - pass - - if ( - not pattern - or pattern.isspace() - - # XXX: not sure if this is a bad assumption but it - # seems to make search snappier? - or len(pattern) < 1 - ): - log.warning('empty pattern received, skipping..') - - # TODO: *BUG* if nothing is returned here the client - # side will cache a null set result and not showing - # anything to the use on re-searches when this query - # timed out. We probably need a special "timeout" msg - # or something... - - # XXX: this unblocks the far end search task which may - # hold up a multi-search nursery block - await stream.send({}) - - continue - - log.info(f'searching for {pattern}') - - last = time.time() - - # async batch search using api stocks endpoint and module - # defined adhoc symbol set. - stock_results = [] - - async def stash_results(target: Awaitable[list]): - try: - results = await target - except tractor.trionics.Lagged: - print("IB SYM-SEARCH OVERRUN?!?") - return - - stock_results.extend(results) - - for i in range(10): - with trio.move_on_after(3) as cs: - async with trio.open_nursery() as sn: - sn.start_soon( - stash_results, - proxy.search_symbols( - pattern=pattern, - upto=5, - ), - ) - - # trigger async request - await trio.sleep(0) - - if cs.cancelled_caught: - log.warning( - f'Search timeout? {proxy._aio_ns.ib.client}' - ) - continue - else: - break - - # # match against our ad-hoc set immediately - # adhoc_matches = fuzzy.extractBests( - # pattern, - # list(_adhoc_futes_set), - # score_cutoff=90, - # ) - # log.info(f'fuzzy matched adhocs: {adhoc_matches}') - # adhoc_match_results = {} - # if adhoc_matches: - # # TODO: do we need to pull contract details? - # adhoc_match_results = {i[0]: {} for i in - # adhoc_matches} - - log.debug(f'fuzzy matching stocks {stock_results}') - stock_matches = fuzzy.extractBests( - pattern, - stock_results, - score_cutoff=50, - ) - - # matches = adhoc_match_results | { - matches = { - item[0]: {} for item in stock_matches - } - # TODO: we used to deliver contract details - # {item[2]: item[0] for item in stock_matches} - - log.debug(f"sending matches: {matches.keys()}") - await stream.send(matches) diff --git a/piker/brokers/ib/symbols.py b/piker/brokers/ib/symbols.py new file mode 100644 index 000000000..9a7491799 --- /dev/null +++ b/piker/brokers/ib/symbols.py @@ -0,0 +1,561 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Symbology search and normalization. + +''' +from __future__ import annotations +from contextlib import ( + nullcontext, +) +from decimal import Decimal +import time +from typing import ( + Awaitable, + TYPE_CHECKING, +) + +from fuzzywuzzy import process as fuzzy +import ib_insync as ibis +import tractor +import trio + +from piker.accounting import ( + Asset, + MktPair, +) +from piker._cacheables import ( + async_lifo_cache, +) + +from ._util import ( + log, +) + +if TYPE_CHECKING: + from .api import ( + MethodProxy, + ) + +_futes_venues = ( + 'GLOBEX', + 'NYMEX', + 'CME', + 'CMECRYPTO', + 'COMEX', + # 'CMDTY', # special name case.. + 'CBOT', # (treasury) yield futures +) + +_adhoc_cmdty_set = { + # metals + # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 + 'xauusd.cmdty', # london gold spot ^ + 'xagusd.cmdty', # silver spot +} + +# NOTE: if you aren't seeing one of these symbol's futues contracts +# show up, it's likely the `.` part is wrong! +_adhoc_futes_set = { + + # equities + 'nq.cme', + 'mnq.cme', # micro + + 'es.cme', + 'mes.cme', # micro + + # cypto$ + 'brr.cme', + 'mbt.cme', # micro + 'ethusdrr.cme', + + # agriculture + 'he.comex', # lean hogs + 'le.comex', # live cattle (geezers) + 'gf.comex', # feeder cattle (younguns) + + # raw + 'lb.comex', # random len lumber + + 'gc.comex', + 'mgc.comex', # micro + + # oil & gas + 'cl.nymex', + + 'ni.comex', # silver futes + 'qi.comex', # mini-silver futes + + # treasury yields + # etfs by duration: + # SHY -> IEI -> IEF -> TLT + 'zt.cbot', # 2y + 'z3n.cbot', # 3y + 'zf.cbot', # 5y + 'zn.cbot', # 10y + 'zb.cbot', # 30y + + # (micros of above) + '2yy.cbot', + '5yy.cbot', + '10y.cbot', + '30y.cbot', +} + + +# taken from list here: +# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php +_adhoc_fiat_set = set(( + 'USD, AED, AUD, CAD,' + 'CHF, CNH, CZK, DKK,' + 'EUR, GBP, HKD, HUF,' + 'ILS, JPY, MXN, NOK,' + 'NZD, PLN, RUB, SAR,' + 'SEK, SGD, TRY, ZAR' + ).split(' ,') +) + + +# map of symbols to contract ids +_adhoc_symbol_map = { + # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 + + # NOTE: some cmdtys/metals don't have trade data like gold/usd: + # https://groups.io/g/twsapi/message/44174 + 'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), +} +for qsn in _adhoc_futes_set: + sym, venue = qsn.split('.') + assert venue.upper() in _futes_venues, f'{venue}' + _adhoc_symbol_map[sym.upper()] = ( + {'exchange': venue}, + {}, + ) + + +# exchanges we don't support at the moment due to not knowing +# how to do symbol-contract lookup correctly likely due +# to not having the data feeds subscribed. +_exch_skip_list = { + + 'ASX', # aussie stocks + 'MEXI', # mexican stocks + + # no idea + 'VALUE', + 'FUNDSERV', + 'SWB2', + 'PSE', + 'PHLX', +} + +# optional search config the backend can register for +# it's symbol search handling (in this case we avoid +# accepting patterns before the kb has settled more then +# a quarter second). +_search_conf = { + 'pause_period': 6 / 16, +} + + +@tractor.context +async def open_symbol_search(ctx: tractor.Context) -> None: + ''' + Symbology search brokerd-endpoint. + + ''' + from .api import open_client_proxies + from .feed import open_data_client + + # TODO: load user defined symbol set locally for fast search? + await ctx.started({}) + + async with ( + open_client_proxies() as (proxies, _), + open_data_client() as data_proxy, + ): + async with ctx.open_stream() as stream: + + # select a non-history client for symbol search to lighten + # the load in the main data node. + proxy = data_proxy + for name, proxy in proxies.items(): + if proxy is data_proxy: + continue + break + + ib_client = proxy._aio_ns.ib + log.info(f'Using {ib_client} for symbol search') + + last = time.time() + async for pattern in stream: + log.info(f'received {pattern}') + now = time.time() + + # this causes tractor hang... + # assert 0 + + assert pattern, 'IB can not accept blank search pattern' + + # throttle search requests to no faster then 1Hz + diff = now - last + if diff < 1.0: + log.debug('throttle sleeping') + await trio.sleep(diff) + try: + pattern = stream.receive_nowait() + except trio.WouldBlock: + pass + + if ( + not pattern + or pattern.isspace() + + # XXX: not sure if this is a bad assumption but it + # seems to make search snappier? + or len(pattern) < 1 + ): + log.warning('empty pattern received, skipping..') + + # TODO: *BUG* if nothing is returned here the client + # side will cache a null set result and not showing + # anything to the use on re-searches when this query + # timed out. We probably need a special "timeout" msg + # or something... + + # XXX: this unblocks the far end search task which may + # hold up a multi-search nursery block + await stream.send({}) + + continue + + log.info(f'searching for {pattern}') + + last = time.time() + + # async batch search using api stocks endpoint and module + # defined adhoc symbol set. + stock_results = [] + + async def stash_results(target: Awaitable[list]): + try: + results = await target + except tractor.trionics.Lagged: + print("IB SYM-SEARCH OVERRUN?!?") + return + + stock_results.extend(results) + + for i in range(10): + with trio.move_on_after(3) as cs: + async with trio.open_nursery() as sn: + sn.start_soon( + stash_results, + proxy.search_symbols( + pattern=pattern, + upto=5, + ), + ) + + # trigger async request + await trio.sleep(0) + + if cs.cancelled_caught: + log.warning( + f'Search timeout? {proxy._aio_ns.ib.client}' + ) + continue + else: + break + + # # match against our ad-hoc set immediately + # adhoc_matches = fuzzy.extractBests( + # pattern, + # list(_adhoc_futes_set), + # score_cutoff=90, + # ) + # log.info(f'fuzzy matched adhocs: {adhoc_matches}') + # adhoc_match_results = {} + # if adhoc_matches: + # # TODO: do we need to pull contract details? + # adhoc_match_results = {i[0]: {} for i in + # adhoc_matches} + + log.debug(f'fuzzy matching stocks {stock_results}') + stock_matches = fuzzy.extractBests( + pattern, + stock_results, + score_cutoff=50, + ) + + # matches = adhoc_match_results | { + matches = { + item[0]: {} for item in stock_matches + } + # TODO: we used to deliver contract details + # {item[2]: item[0] for item in stock_matches} + + log.debug(f"sending matches: {matches.keys()}") + await stream.send(matches) + + +# re-mapping to piker asset type names +# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113 +_asset_type_map = { + 'STK': 'stock', + 'OPT': 'option', + 'FUT': 'future', + 'CONTFUT': 'continuous_future', + 'CASH': 'fiat', + 'IND': 'index', + 'CFD': 'cfd', + 'BOND': 'bond', + 'CMDTY': 'commodity', + 'FOP': 'futures_option', + 'FUND': 'mutual_fund', + 'WAR': 'warrant', + 'IOPT': 'warran', + 'BAG': 'bag', + 'CRYPTO': 'crypto', # bc it's diff then fiat? + # 'NEWS': 'news', +} + + +def parse_patt2fqme( + # client: Client, + pattern: str, + +) -> tuple[str, str, str, str]: + + # TODO: we can't use this currently because + # ``wrapper.starTicker()`` currently cashes ticker instances + # which means getting a singel quote will potentially look up + # a quote for a ticker that it already streaming and thus run + # into state clobbering (eg. list: Ticker.ticks). It probably + # makes sense to try this once we get the pub-sub working on + # individual symbols... + + # XXX UPDATE: we can probably do the tick/trades scraping + # inside our eventkit handler instead to bypass this entirely? + + currency = '' + + # fqme parsing stage + # ------------------ + if '.ib' in pattern: + from piker.accounting import unpack_fqme + _, symbol, venue, expiry = unpack_fqme(pattern) + + else: + symbol = pattern + expiry = '' + + # another hack for forex pairs lul. + if ( + '.idealpro' in symbol + # or '/' in symbol + ): + exch = 'IDEALPRO' + symbol = symbol.removesuffix('.idealpro') + if '/' in symbol: + symbol, currency = symbol.split('/') + + else: + # TODO: yes, a cache.. + # try: + # # give the cache a go + # return client._contracts[symbol] + # except KeyError: + # log.debug(f'Looking up contract for {symbol}') + expiry: str = '' + if symbol.count('.') > 1: + symbol, _, expiry = symbol.rpartition('.') + + # use heuristics to figure out contract "type" + symbol, exch = symbol.upper().rsplit('.', maxsplit=1) + + return symbol, currency, exch, expiry + + +def con2fqme( + con: ibis.Contract, + _cache: dict[int, (str, bool)] = {} + +) -> tuple[str, bool]: + ''' + Convert contracts to fqme-style strings to be used both in + symbol-search matching and as feed tokens passed to the front + end data deed layer. + + Previously seen contracts are cached by id. + + ''' + # should be real volume for this contract by default + calc_price = False + if con.conId: + try: + return _cache[con.conId] + except KeyError: + pass + + suffix = con.primaryExchange or con.exchange + symbol = con.symbol + expiry = con.lastTradeDateOrContractMonth or '' + + match con: + case ibis.Option(): + # TODO: option symbol parsing and sane display: + symbol = con.localSymbol.replace(' ', '') + + case ( + ibis.Commodity() + # search API endpoint returns std con box.. + | ibis.Contract(secType='CMDTY') + ): + # commodities and forex don't have an exchange name and + # no real volume so we have to calculate the price + suffix = con.secType + + # no real volume on this tract + calc_price = True + + case ibis.Forex() | ibis.Contract(secType='CASH'): + dst, src = con.localSymbol.split('.') + symbol = ''.join([dst, src]) + suffix = con.exchange or 'idealpro' + + # no real volume on forex feeds.. + calc_price = True + + if not suffix: + entry = _adhoc_symbol_map.get( + con.symbol or con.localSymbol + ) + if entry: + meta, kwargs = entry + cid = meta.get('conId') + if cid: + assert con.conId == meta['conId'] + suffix = meta['exchange'] + + # append a `.` to the returned symbol + # key for derivatives that normally is the expiry + # date key. + if expiry: + suffix += f'.{expiry}' + + fqme_key = symbol.lower() + if suffix: + fqme_key = '.'.join((fqme_key, suffix)).lower() + + _cache[con.conId] = fqme_key, calc_price + return fqme_key, calc_price + + +@async_lifo_cache() +async def get_mkt_info( + fqme: str, + + proxy: MethodProxy | None = None, + +) -> tuple[MktPair, ibis.ContractDetails]: + + # XXX: we don't need to split off any fqme broker part? + # bs_fqme, _, broker = fqme.partition('.') + + proxy: MethodProxy + if proxy is not None: + client_ctx = nullcontext(proxy) + else: + from .feed import ( + open_data_client, + ) + client_ctx = open_data_client + + async with client_ctx as proxy: + try: + ( + con, # Contract + details, # ContractDetails + ) = await proxy.get_sym_details(symbol=fqme) + except ConnectionError: + log.exception(f'Proxy is ded {proxy._aio_ns}') + raise + + # TODO: more consistent field translation + atype = _asset_type_map[con.secType] + + if atype == 'commodity': + venue: str = 'cmdty' + else: + venue = con.primaryExchange or con.exchange + + price_tick: Decimal = Decimal(str(details.minTick)) + + if atype == 'stock': + # XXX: GRRRR they don't support fractional share sizes for + # stocks from the API?! + # if con.secType == 'STK': + size_tick = Decimal('1') + else: + size_tick: Decimal = Decimal( + str(details.minSize).rstrip('0') + ) + # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it? + + # NOTE: this is duplicate from the .broker.norm_trade_records() + # routine, we should factor all this parsing somewhere.. + expiry_str = str(con.lastTradeDateOrContractMonth) + # if expiry: + # expiry_str: str = str(pendulum.parse( + # str(expiry).strip(' ') + # )) + + # TODO: currently we can't pass the fiat src asset because + # then we'll get a `MNQUSD` request for history data.. + # we need to figure out how we're going to handle this (later?) + # but likely we want all backends to eventually handle + # ``dst/src.venue.`` style !? + src = Asset( + name=str(con.currency).lower(), + atype='fiat', + tx_tick=Decimal('0.01'), # right? + ) + + mkt = MktPair( + dst=Asset( + name=con.symbol.lower(), + atype=atype, + tx_tick=size_tick, + ), + src=src, + + price_tick=price_tick, + size_tick=size_tick, + + bs_mktid=str(con.conId), + venue=str(venue), + expiry=expiry_str, + broker='ib', + + # TODO: options contract info as str? + # contract_info= + _fqme_without_src=(atype != 'fiat'), + ) + + return mkt, details From 8a10cbf6abc6d5d4d4414f22f53342d7c6d6f414 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 18 Jul 2023 21:00:19 -0400 Subject: [PATCH 062/116] Change `Position.clearsdict()` -> `.clearsitems()` Since apparently rendering to dict from a sorted generator func clearly doesn't preserve the order when using a `dict`-comprehension.. Further, there's really no reason to strictly return a `dict`. Adjust `.calc.ppu()` to make the return value instead a `list[tuple[str, dict]]`; this results in the current df cumsum values matching the original impl and the existing `binance.paper` unit tests now passing XD Other details that fix a variety of nonsense.. - adjust all `.clearsitems()` consumers to the new list output. - use `str(pendulum.now())` in `Position.from_msg()` since adding multiples with an `unknown` str will obviously discard them, facepalm. - fix `.calc.ppu()` to NOT short circuit when `accum_size` is 0; it's been causing all sorts of incorrect size outputs in the clearing table.. lel, this is what fixed the unit test! --- piker/accounting/_ledger.py | 15 +++++++++++---- piker/accounting/_pos.py | 24 ++++++++++++++---------- piker/accounting/calc.py | 37 ++++++++++++++++--------------------- 3 files changed, 41 insertions(+), 35 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index f4cb3f38c..fd7a50ffa 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -211,9 +211,16 @@ def to_txns( Return entire output from ``.iter_txns()`` in a ``dict``. ''' - return { - t.tid: t for t in self.iter_txns(symcache=symcache) - } + txns: dict[str, Transaction] = {} + for t in self.iter_txns(symcache=symcache): + + if not t: + log.warning(f'{self.mod.name}:{self.account} TXN is -> {t}') + continue + + txns[t.tid] = t + + return txns def write_config(self) -> None: ''' @@ -386,7 +393,7 @@ def open_trade_ledger( account=account, mod=mod, symcache=symcache, - tx_sort=tx_sort, + tx_sort=getattr(mod, 'tx_sort', tx_sort), ) try: yield ledger diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 90b179f84..1e1590832 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -144,12 +144,11 @@ def expiry(self) -> datetime | None: # def bep() -> float: # ... - def clearsdict(self) -> dict[str, dict]: - clears: dict[str, dict] = ppu( + def clearsitems(self) -> list[(str, dict)]: + return ppu( self.iter_by_type('clear'), as_ledger=True ) - return clears def iter_by_type( self, @@ -195,7 +194,7 @@ def minimized_clears(self) -> dict[str, dict]: cumsize: float = 0 clears_since_zero: list[dict] = [] - for tid, cleardict in self.clearsdict().items(): + for tid, cleardict in self.clearsitems(): cumsize = float( # self.mkt.quantize(cumsize + cleardict['tx'].size self.mkt.quantize(cleardict['cumsize']) @@ -295,6 +294,8 @@ def update_from_msg( ) -> None: mkt: MktPair = self.mkt + now_dt: pendulum.DateTime = now() + now_str: str = str(now_dt) # NOTE WARNING XXX: we summarize the pos with a single # summary transaction (for now) until we either pass THIS @@ -303,13 +304,16 @@ def update_from_msg( t = Transaction( fqme=mkt.fqme, bs_mktid=mkt.bs_mktid, - tid='unknown', size=msg['size'], price=msg['avg_price'], cost=0, + # NOTE: special provisions required! + # - tid needs to be unique or this txn will be ignored!! + tid=now_str, + # TODO: also figure out how to avoid this! - dt=now(), + dt=now_dt, ) self.add_clear(t) @@ -342,11 +346,11 @@ def add_clear( Inserts are always done in datetime sorted order. ''' - added: bool = False + # added: bool = False tid: str = t.tid if tid in self._events: log.warning(f'{t} is already added?!') - return added + # return added # TODO: apparently this IS possible with a dict but not # common and probably not that beneficial unless we're also @@ -390,9 +394,9 @@ def calc_size(self) -> float: if self.expired(): return 0. - clears: list[dict] = list(self.clearsdict().values()) + clears: list[(str, dict)] = self.clearsitems() if clears: - return clears[-1]['cumsize'] + return clears[-1][1]['cumsize'] else: return 0. diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index db7771401..ee349092e 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -43,6 +43,7 @@ TransactionLedger, ) + def ppu( clears: Iterator[Transaction], @@ -56,7 +57,7 @@ def ppu( # new position fields inserted alongside each entry. as_ledger: bool = False, -) -> float: +) -> float | list[(str, dict)]: ''' Compute the "price-per-unit" price for the given non-zero sized rolling position. @@ -86,7 +87,8 @@ def ppu( ''' asize_h: list[float] = [] # historical accumulative size ppu_h: list[float] = [] # historical price-per-unit - ledger: dict[str, dict] = {} + # ledger: dict[str, dict] = {} + ledger: list[dict] = [] t: Transaction for t in clears: @@ -95,16 +97,10 @@ def ppu( is_clear: bool = not isinstance(clear_price, str) last_accum_size = asize_h[-1] if asize_h else 0 - accum_size = last_accum_size + clear_size + accum_size: float = last_accum_size + clear_size accum_sign = copysign(1, accum_size) - sign_change: bool = False - if accum_size == 0: - ppu_h.append(0) - asize_h.append(0) - continue - # on transfers we normally write some non-valid # price since withdrawal to another account/wallet # has nothing to do with inter-asset-market prices. @@ -170,9 +166,6 @@ def ppu( else: ppu = cost_basis / abs_new_size - # ppu_h.append(ppu) - # asize_h.append(accum_size) - else: # TODO: for PPU we should probably handle txs out # (aka withdrawals) similarly by simply not having @@ -185,8 +178,6 @@ def ppu( # need to be updated since the ppu remains constant # and gets weighted by the new size. ppu: float = ppu_h[-1] # set to previous value - # ppu_h.append(ppu_h[-1]) - # asize_h.append(accum_size) # extend with new rolling metric for this step ppu_h.append(ppu) @@ -194,13 +185,17 @@ def ppu( # ledger[t.tid] = { # 'txn': t, - ledger[t.tid] = t.to_dict() | { - 'ppu': ppu, - 'cumsize': accum_size, - 'sign_change': sign_change, - - # TODO: cum_pnl, bep - } + # ledger[t.tid] = t.to_dict() | { + ledger.append(( + t.tid, + t.to_dict() | { + 'ppu': ppu, + 'cumsize': accum_size, + 'sign_change': sign_change, + + # TODO: cum_pnl, bep + } + )) final_ppu = ppu_h[-1] if ppu_h else 0 # TODO: once we have etypes in all ledger entries.. From 5eb310cac9da1ad2c04021424b23c1a9752a1076 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 19 Jul 2023 16:46:36 -0400 Subject: [PATCH 063/116] ib: more fixes to try and get positioning correct.. Define and bind in the `tx_sort()` routine to be used by `open_trade_ledger()` when datetime sorting trade records. Further deats: - always use the IB reported position size (since apparently our ledger based accounting is getting rekt on occasion..). - better ib pos msg formatting when there's mismatches with the piker equivalent. - never emit zero-size pos msgs (in terms of strict ib pos sizing) since when there's piker ledger sizing errors we'll send the wrong thing to the ems and its clients.. --- piker/brokers/ib/__init__.py | 2 + piker/brokers/ib/broker.py | 114 +++++++++++++++++++---------------- piker/brokers/ib/ledger.py | 15 +++++ 3 files changed, 79 insertions(+), 52 deletions(-) diff --git a/piker/brokers/ib/__init__.py b/piker/brokers/ib/__init__.py index e0ad96c88..e792eb252 100644 --- a/piker/brokers/ib/__init__.py +++ b/piker/brokers/ib/__init__.py @@ -38,6 +38,7 @@ from .ledger import ( norm_trade, norm_trade_records, + tx_sort, ) from .symbols import ( get_mkt_info, @@ -55,6 +56,7 @@ 'open_symbol_search', 'stream_quotes', '_search_conf', + 'tx_sort', ] _brokerd_mods: list[str] = [ diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 842fdbc3e..56c3d82a0 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -49,7 +49,6 @@ CommissionReport, ) from ib_insync.objects import Position as IbPosition -import pendulum from piker import config from piker.accounting import ( @@ -59,7 +58,6 @@ Transaction, open_trade_ledger, TransactionLedger, - iter_by_dt, open_account, Account, ) @@ -87,14 +85,13 @@ Client, MethodProxy, ) -from ._flex_reports import parse_flex_dt from .ledger import ( norm_trade_records, api_trades_to_ledger_entries, + tx_sort, ) - def pack_position( pos: IbPosition @@ -368,8 +365,6 @@ async def update_and_audit_msgs( # breakeven pp calcs. ibppmsg = cids2pps.get((acctid, bs_mktid)) if ibppmsg: - - symbol: str = ibppmsg.symbol msg = BrokerdPosition( broker='ib', @@ -379,10 +374,18 @@ async def update_and_audit_msgs( # need it and/or it's prefixed in the section # table.. account=ibppmsg.account, + # XXX: the `.ib` is stripped..? - symbol=symbol, - currency=ibppmsg.currency, - size=p.size, + symbol=ibppmsg.symbol, + + # remove.. + # currency=ibppmsg.currency, + + # NOTE: always take their size since it's usually the + # true gospel.. + # size=p.size, + size=ibppmsg.size, + avg_price=p.ppu, ) msgs.append(msg) @@ -404,8 +407,6 @@ async def update_and_audit_msgs( or ibsize ) ): - # if 'mbt.cme' in msg.symbol: - # await tractor.pause() # reverse_split_ratio = pikersize / ibsize # split_ratio = 1/reverse_split_ratio @@ -434,17 +435,19 @@ async def update_and_audit_msgs( # await tractor.pause() log.error(logmsg) + # TODO: make this a "propaganda" log level? if ibppmsg.avg_price != msg.avg_price: - # TODO: make this a "propaganda" log level? log.warning( f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' - f'ib: {pformat(ibppmsg)}\n' + f'ib: {ibfmtmsg}\n' '---------------------------\n' - f'piker: {msg.to_dict()}' + f'piker: {pformat(msg.to_dict())}' ) else: - # make brand new message + # XXX: though it shouldn't be possible (means an error + # in our accounting subsys) create a new message for + # a supposed "missing position" that IB never reported. msg = BrokerdPosition( broker='ib', @@ -455,9 +458,13 @@ async def update_and_audit_msgs( # table.. we should just strip this from the message # right since `.broker` is already included? account=f'ib.{acctid}', + # XXX: the `.ib` is stripped..? symbol=p.mkt.fqme, + + # TODO: we should remove from msg schema.. # currency=ibppmsg.currency, + size=p.size, avg_price=p.ppu, ) @@ -467,11 +474,8 @@ async def update_and_audit_msgs( 'Maybe they LIQUIDATED YOU or are missing ledger entries?\n' ) log.error(logmsg) - - # if validate: - # raise ValueError(logmsg) - - msgs.append(msg) + if validate: + raise ValueError(logmsg) return msgs @@ -558,13 +562,8 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: - # from piker.brokers import ( - # get_brokermod, - # ) accounts_def = config.load_accounts(['ib']) - global _client_cache - # deliver positions to subscriber before anything else all_positions = [] accounts = set() @@ -606,16 +605,7 @@ async def open_trade_dialog( open_trade_ledger( 'ib', acctid, - tx_sort=partial( - iter_by_dt, - parsers={ - 'dateTime': parse_flex_dt, - 'datetime': pendulum.parse, - # for some some fucking 2022 and - # back options records...fuck me. - 'date': pendulum.parse, - }, - ), + tx_sort=tx_sort, symcache=symcache, ) ) @@ -680,7 +670,6 @@ async def open_trade_dialog( api_to_ledger_entries and (trade_entries := api_to_ledger_entries.get(acctid)) ): - # TODO: fix this `tractor` BUG! # https://github.com/goodboy/tractor/issues/354 # await tractor.pp() @@ -724,8 +713,8 @@ async def open_trade_dialog( continue bs_mktid, msg = pack_position(pos) - acctid = msg.account = accounts_def.inverse[msg.account] - acctid = acctid.strip('ib.') + msg.account = accounts_def.inverse[msg.account] + acctid = msg.account.strip('ib.') cids2pps[(acctid, bs_mktid)] = msg assert msg.account in accounts, ( @@ -744,7 +733,7 @@ async def open_trade_dialog( cids2pps, validate=False, ) - all_positions.extend(msg for msg in msgs) + all_positions.extend(msg for msg in msgs if msg.size != 0) if not all_positions and cids2pps: raise RuntimeError( @@ -782,7 +771,7 @@ async def open_trade_dialog( # allocate event relay tasks for each client connection n.start_soon( deliver_trade_events, - n, + # n, trade_event_stream, ems_stream, accounts_def, @@ -838,26 +827,33 @@ async def emit_pp_update( acctid = fq_acctid.strip('ib.') acnt = acnts[acctid] + ledger: dict = ledgers[acctid] - acnt.update_from_ledger(trans) + acnt.update_from_ledger( + trans, + symcache=ledger.symcache + ) active, closed = acnt.dump_active() # NOTE: update ledger with all new trades for fq_acctid, trades_by_id in api_to_ledger_entries.items(): - acctid = fq_acctid.strip('ib.') - ledger = ledgers[acctid] + acctid: str = fq_acctid.strip('ib.') + ledger: dict = ledgers[acctid] + # NOTE: don't override flex/previous entries with new API + # ones, just update with new fields! for tid, tdict in trades_by_id.items(): - # NOTE: don't override flex/previous entries with new API - # ones, just update with new fields! ledger.setdefault(tid, {}).update(tdict) # generate pp msgs and cross check with ib's positions data, relay # re-formatted pps as msgs to the ems. for pos in filter( bool, - [active.get(tx.bs_mktid), closed.get(tx.bs_mktid)] + [ + active.get(tx.bs_mktid), + closed.get(tx.bs_mktid) + ] ): msgs = await update_and_audit_msgs( acctid, @@ -869,7 +865,7 @@ async def emit_pp_update( ) if msgs: msg = msgs[0] - log.info('Emitting pp msg: {msg}') + log.info(f'Emitting pp msg: {msg}') break await ems_stream.send(msg) @@ -889,11 +885,19 @@ async def emit_pp_update( # https://github.com/erdewit/ib_insync/issues/363 'inactive': 'pending', } +_action_map = { + 'BOT': 'buy', + 'SLD': 'sell', +} +# TODO: maybe just make this a flat func without an interal loop +# and call it *from* the `trade_event_stream` loop? Might look +# a lot nicer doing that from open_trade_dialog() instead of +# starting a separate task? async def deliver_trade_events( - nurse: trio.Nursery, + # nurse: trio.Nursery, trade_event_stream: trio.MemoryReceiveChannel, ems_stream: tractor.MsgStream, accounts_def: dict[str, str], # eg. `'ib.main'` -> `'DU999999'` @@ -908,7 +912,6 @@ async def deliver_trade_events( Format and relay all trade events for a given client to emsd. ''' - action_map = {'BOT': 'buy', 'SLD': 'sell'} ids2fills: dict[str, dict] = {} # TODO: for some reason we can receive a ``None`` here when the @@ -916,7 +919,6 @@ async def deliver_trade_events( # at the eventkit code above but we should probably handle it... async for event_name, item in trade_event_stream: log.info(f'ib sending {event_name}:\n{pformat(item)}') - match event_name: # NOTE: we remap statuses to the ems set via the # ``_statuses: dict`` above. @@ -999,7 +1001,7 @@ async def deliver_trade_events( # `.submit_limit()` reqid=execu.orderId, time_ns=time.time_ns(), # cuz why not - action=action_map[execu.side], + action=_action_map[execu.side], size=execu.shares, price=execu.price, # broker_details=execdict, @@ -1163,8 +1165,16 @@ async def deliver_trade_events( case 'position': - cid, msg = pack_position(item) + pos: IbPosition = item + bs_mktid, msg = pack_position(pos) log.info(f'New IB position msg: {msg}') + + # always update with latest ib pos msg info since + # we generally audit against it for sanity and + # testing AND we require it to be updated to avoid + # error msgs emitted from `update_and_audit_msgs()` + cids2pps[(msg.account, bs_mktid)] = msg + # cuck ib and it's shitty fifo sys for pps! continue diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index aaeda1531..cc79122ca 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -20,9 +20,11 @@ ''' from bisect import insort from decimal import Decimal +from functools import partial from pprint import pformat from typing import ( Any, + Callable, ) from bidict import bidict @@ -38,11 +40,24 @@ digits_to_dec, Transaction, MktPair, + iter_by_dt, ) from ._flex_reports import parse_flex_dt from ._util import log +tx_sort: Callable = partial( + iter_by_dt, + parsers={ + 'dateTime': parse_flex_dt, + 'datetime': pendulum.parse, + # for some some fucking 2022 and + # back options records...fuck me. + 'date': pendulum.parse, + } +) + + def norm_trade( tid: str, record: dict[str, Any], From 5e7916a0df974ccb6941eb536dd333712a1b1811 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Jul 2023 15:23:01 -0400 Subject: [PATCH 064/116] Start `piker.toolz` subpkg for all our tooling B) Since there's a growing list of top level mods which are more or less utils/tools for working with the runtime; begin to move them into a new subpkg starting with a new `.toolz.debug`. Start with, - a new `open_crash_handller()` for doing breakpoints around blocks that might error. - move in what was `piker._profile` into `.toolz.profile` and adjust all importing appropriately. --- piker/data/_formatters.py | 8 ++--- piker/data/_timeseries.py | 2 +- piker/data/flows.py | 4 --- piker/fsp/_engine.py | 2 +- piker/storage/__init__.py | 2 +- piker/storage/marketstore/__init__.py | 1 - piker/storage/nativedb.py | 1 - piker/toolz/__init__.py | 38 +++++++++++++++++++++++ piker/toolz/debug.py | 40 +++++++++++++++++++++++++ piker/{_profile.py => toolz/profile.py} | 0 piker/ui/_curve.py | 7 +++-- piker/ui/_dataviz.py | 2 +- piker/ui/_display.py | 4 +-- piker/ui/_fsp.py | 2 +- piker/ui/_interaction.py | 7 +++-- piker/ui/_ohlc.py | 7 +++-- piker/ui/_render.py | 2 +- piker/ui/view_mode.py | 2 +- 18 files changed, 106 insertions(+), 25 deletions(-) create mode 100644 piker/toolz/__init__.py create mode 100644 piker/toolz/debug.py rename piker/{_profile.py => toolz/profile.py} (100%) diff --git a/piker/data/_formatters.py b/piker/data/_formatters.py index 4fbe3151c..29d2ac9c3 100644 --- a/piker/data/_formatters.py +++ b/piker/data/_formatters.py @@ -1,5 +1,5 @@ # piker: trading gear for hackers -# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0) +# Copyright (C) Tyler Goodlet (in stewardship for pikers) # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -13,10 +13,10 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -""" +''' Pre-(path)-graphics formatted x/y nd/1d rendering subsystem. -""" +''' from __future__ import annotations from typing import ( Optional, @@ -39,7 +39,7 @@ from ._dataviz import ( Viz, ) - from .._profile import Profiler + from piker.toolz import Profiler class IncrementalFormatter(msgspec.Struct): diff --git a/piker/data/_timeseries.py b/piker/data/_timeseries.py index c812e08af..2d73c263d 100644 --- a/piker/data/_timeseries.py +++ b/piker/data/_timeseries.py @@ -32,7 +32,7 @@ import polars as pl from ._sharedmem import ShmArray -from .._profile import ( +from ..toolz.profile import ( Profiler, pg_profile_enabled, ms_slower_then, diff --git a/piker/data/flows.py b/piker/data/flows.py index 652e1e717..86e5370ff 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -37,10 +37,6 @@ ShmArray, _Token, ) -# from .._profile import ( -# Profiler, -# pg_profile_enabled, -# ) if TYPE_CHECKING: # from pyqtgraph import PlotItem diff --git a/piker/fsp/_engine.py b/piker/fsp/_engine.py index 9a6ebddb8..b4cccdaea 100644 --- a/piker/fsp/_engine.py +++ b/piker/fsp/_engine.py @@ -51,7 +51,7 @@ _load_builtins, _Token, ) -from .._profile import Profiler +from ..toolz import Profiler log = get_logger(__name__) diff --git a/piker/storage/__init__.py b/piker/storage/__init__.py index 465d3e284..c813c48ae 100644 --- a/piker/storage/__init__.py +++ b/piker/storage/__init__.py @@ -246,7 +246,7 @@ async def open_tsdb_client( # * the original data feed arch blurb: # - https://github.com/pikers/piker/issues/98 # - from .._profile import Profiler + from ..toolz import Profiler profiler = Profiler( disabled=True, # not pg_profile_enabled(), delayed=False, diff --git a/piker/storage/marketstore/__init__.py b/piker/storage/marketstore/__init__.py index 2f0a79703..7466c06a1 100644 --- a/piker/storage/marketstore/__init__.py +++ b/piker/storage/marketstore/__init__.py @@ -59,7 +59,6 @@ Params, ) from piker.log import get_logger -# from .._profile import Profiler log = get_logger(__name__) diff --git a/piker/storage/nativedb.py b/piker/storage/nativedb.py index 274bf0399..1f7da9f79 100644 --- a/piker/storage/nativedb.py +++ b/piker/storage/nativedb.py @@ -67,7 +67,6 @@ from piker.data import def_iohlcv_fields from piker.data import ShmArray from piker.log import get_logger -# from .._profile import Profiler log = get_logger('storage.nativedb') diff --git a/piker/toolz/__init__.py b/piker/toolz/__init__.py new file mode 100644 index 000000000..4b8a9338e --- /dev/null +++ b/piker/toolz/__init__.py @@ -0,0 +1,38 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Toolz for debug, profile and trace of the distributed runtime :surfer: + +''' +from .debug import ( + open_crash_handler, +) +from .profile import ( + Profiler, + pg_profile_enabled, + ms_slower_then, + timeit, +) + + +__all__: list[str] = [ + 'open_crash_handler', + 'pg_profile_enabled', + 'ms_slower_then', + 'Profiler', + 'timeit', +] diff --git a/piker/toolz/debug.py b/piker/toolz/debug.py new file mode 100644 index 000000000..3b8a3e3ed --- /dev/null +++ b/piker/toolz/debug.py @@ -0,0 +1,40 @@ +# piker: trading gear for hackers +# Copyright (C) Tyler Goodlet (in stewardship for pikers) + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. + +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +''' +Debugger wrappers for `pdbp` as used by `tractor`. + +''' +from contextlib import contextmanager as cm + +import pdbp + + +# TODO: better naming and what additionals? +# - optional runtime plugging? +# - detection for sync vs. async code? +# - specialized REPL entry when in distributed mode? +@cm +def open_crash_handler(): + ''' + Super basic crash handler using `pdbp` debugger. + + ''' + try: + yield + except BaseException: + pdbp.xpm() + raise diff --git a/piker/_profile.py b/piker/toolz/profile.py similarity index 100% rename from piker/_profile.py rename to piker/toolz/profile.py diff --git a/piker/ui/_curve.py b/piker/ui/_curve.py index 5442d3471..c8e4c3733 100644 --- a/piker/ui/_curve.py +++ b/piker/ui/_curve.py @@ -34,10 +34,13 @@ QPainter, QPainterPath, ) -from .._profile import pg_profile_enabled, ms_slower_then from ._style import hcolor from ..log import get_logger -from .._profile import Profiler +from ..toolz.profile import ( + Profiler, + pg_profile_enabled, + ms_slower_then, +) log = get_logger(__name__) diff --git a/piker/ui/_dataviz.py b/piker/ui/_dataviz.py index 9da45f448..c011bff04 100644 --- a/piker/ui/_dataviz.py +++ b/piker/ui/_dataviz.py @@ -62,7 +62,7 @@ ) from ._render import Renderer from ..log import get_logger -from .._profile import ( +from ..toolz.profile import ( Profiler, pg_profile_enabled, ms_slower_then, diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 610b38f3d..27e701453 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -79,12 +79,12 @@ open_order_mode, OrderMode, ) -from .._profile import ( +from ..toolz import ( pg_profile_enabled, ms_slower_then, + Profiler, ) from ..log import get_logger -from .._profile import Profiler if TYPE_CHECKING: from ._interaction import ChartView diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index 5202ea975..a4deb0348 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -66,7 +66,7 @@ flow_rates, ) from ..log import get_logger -from .._profile import Profiler +from ..toolz import Profiler log = get_logger(__name__) diff --git a/piker/ui/_interaction.py b/piker/ui/_interaction.py index 1219e94ec..6cc6bee4b 100644 --- a/piker/ui/_interaction.py +++ b/piker/ui/_interaction.py @@ -39,8 +39,11 @@ import trio from ..log import get_logger -from .._profile import Profiler -from .._profile import pg_profile_enabled, ms_slower_then +from ..toolz import ( + Profiler, + pg_profile_enabled, + ms_slower_then, +) from .view_mode import overlay_viewlists # from ._style import _min_points_to_show from ._editors import SelectRect diff --git a/piker/ui/_ohlc.py b/piker/ui/_ohlc.py index 33d7bbdaa..2bbec2533 100644 --- a/piker/ui/_ohlc.py +++ b/piker/ui/_ohlc.py @@ -31,9 +31,12 @@ from PyQt5.QtGui import QPainterPath from ._curve import FlowGraphic -from .._profile import pg_profile_enabled, ms_slower_then +from ..toolz import ( + Profiler, + pg_profile_enabled, + ms_slower_then, +) from ..log import get_logger -from .._profile import Profiler log = get_logger(__name__) diff --git a/piker/ui/_render.py b/piker/ui/_render.py index 2a442e987..bd3d17574 100644 --- a/piker/ui/_render.py +++ b/piker/ui/_render.py @@ -39,7 +39,7 @@ xy_downsample, ) from ..log import get_logger -from .._profile import ( +from ..toolz import ( Profiler, ) diff --git a/piker/ui/view_mode.py b/piker/ui/view_mode.py index 82dfbf623..191b62b95 100644 --- a/piker/ui/view_mode.py +++ b/piker/ui/view_mode.py @@ -33,7 +33,7 @@ from ..data.types import Struct from ..data._timeseries import slice_from_time from ..log import get_logger -from .._profile import Profiler +from ..toolz import Profiler if TYPE_CHECKING: from ._chart import ChartPlotWidget From e88913e1f3d5cb0ec1754eb47f49769d3f881ffd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Jul 2023 15:27:22 -0400 Subject: [PATCH 065/116] .data._pathops: drop profiler imports, fix some naming to appease `ruff` --- piker/data/_pathops.py | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/piker/data/_pathops.py b/piker/data/_pathops.py index a17f289a4..f2a61c42e 100644 --- a/piker/data/_pathops.py +++ b/piker/data/_pathops.py @@ -17,11 +17,6 @@ Super fast ``QPainterPath`` generation related operator routines. """ -from math import ( - ceil, - floor, -) - import numpy as np from numpy.lib import recfunctions as rfn from numba import ( @@ -35,11 +30,6 @@ # TODO: for ``numba`` typing.. # from ._source import numba_ohlc_dtype from ._m4 import ds_m4 -from .._profile import ( - Profiler, - pg_profile_enabled, - ms_slower_then, -) def xy_downsample( @@ -135,7 +125,7 @@ def path_arrays_from_ohlc( half_w: float = bar_w/2 # TODO: report bug for assert @ - # /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991 + # ../piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991 for i, q in enumerate(data[start:], start): open = q['open'] @@ -237,20 +227,20 @@ def trace_hl( for i in range(hl.size): row = hl[i] - l, h = row['low'], row['high'] + lo, hi = row['low'], row['high'] - up_diff = h - last_l - down_diff = last_h - l + up_diff = hi - last_l + down_diff = last_h - lo if up_diff > down_diff: - out[2*i + 1] = h + out[2*i + 1] = hi out[2*i] = last_l else: - out[2*i + 1] = l + out[2*i + 1] = lo out[2*i] = last_h - last_l = l - last_h = h + last_l = lo + last_h = hi x[2*i] = int(i) - margin x[2*i + 1] = int(i) + margin From 759ebe71e9843ac40dccb432ec8278a337d028fd Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 20 Jul 2023 15:27:46 -0400 Subject: [PATCH 066/116] Allow disabling symcache load via kwarg as well --- piker/data/_symcache.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 1745d2933..3bf30d480 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -322,6 +322,7 @@ async def open_symcache( reload: bool = False, only_from_memcache: bool = False, # no API req + _no_symcache: bool = False, # no backend support ) -> SymbologyCache: @@ -337,11 +338,15 @@ async def open_symcache( # (easily) and thus we allow for an empty instance to be loaded # and manually filled in at the whim of the caller presuming # the backend pkg-module is annotated appropriately. - if getattr(mod, '_no_symcache', False): + if ( + getattr(mod, '_no_symcache', False) + or _no_symcache + ): yield SymbologyCache( mod=mod, fp=cachefile, ) + # don't do nuttin return # actor-level cache-cache XD From 897c20bd4a013191ede3e6753ca7cda9fcd4c0fb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 21 Jul 2023 23:48:53 -0400 Subject: [PATCH 067/116] Moar `.accounting` tweaks - start flipping over internals to `Position.cumsize` - allow passing in a `_mktmap_table` to `Account.update_from_ledger()` for cases where the caller wants to per-call-dyamically insert the `MktPair` via a one-off table (cough IB). - use `polars.from_dicts()` in `.calc.open_ledger_dfs()`. and wrap the whole func in a new `toolz.open_crash_handler()`. --- piker/accounting/_pos.py | 13 +++++++-- piker/accounting/calc.py | 61 ++++++++++++++++++++++------------------ piker/accounting/cli.py | 13 ++++++--- 3 files changed, 54 insertions(+), 33 deletions(-) diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 1e1590832..4b4b72d60 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -324,7 +324,7 @@ def dsize(self) -> float: (fiat) units. ''' - return self.ppu * self.size + return self.ppu * self.cumsize def expired(self) -> bool: ''' @@ -483,6 +483,8 @@ def update_from_ledger( cost_scalar: float = 2, symcache: SymbologyCache | None = None, + _mktmap_table: dict[str, MktPair] | None = None, + ) -> dict[str, Position]: ''' Update the internal `.pps[str, Position]` table from input @@ -519,7 +521,14 @@ def update_from_ledger( # template the mkt-info presuming a legacy market ticks # if no info exists in the transactions.. - mkt: MktPair = symcache.mktmaps[fqme] + try: + mkt: MktPair = symcache.mktmaps[fqme] + except KeyError: + # XXX: caller is allowed to provide a fallback + # mktmap table for the case where a new position is + # being added and the preloaded symcache didn't + # have this entry prior (eg. with frickin IB..) + mkt = _mktmap_table[fqme] if not (pos := pps.get(bs_mktid)): diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index ee349092e..d86ad98c0 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -361,8 +361,8 @@ def open_ledger_dfs( if not ledger: import time from tractor._debug import open_crash_handler - now = time.time() + now = time.time() with ( open_crash_handler(), @@ -390,26 +390,29 @@ def open_ledger_dfs( # ) txns: dict[str, Transaction] = ledger.to_txns() - ldf = pl.DataFrame( + # ldf = pl.DataFrame( + # list(txn.to_dict() for txn in txns.values()), + ldf = pl.from_dicts( list(txn.to_dict() for txn in txns.values()), - # schema=[ - # ('tid', str), - # ('fqme', str), - # ('dt', str), - # ('size', pl.Float64), - # ('price', pl.Float64), - # ('cost', pl.Float64), - # ('expiry', str), - # ('bs_mktid', str), - # ], - # ).sort('dt').select([ - ).sort('dt').with_columns([ - # pl.col('fqme'), + + # only for ordering the cols + schema=[ + ('fqme', str), + ('tid', str), + ('bs_mktid', str), + ('expiry', str), + ('etype', str), + ('dt', str), + ('size', pl.Float64), + ('price', pl.Float64), + ('cost', pl.Float64), + ], + ).sort( # chronological order + 'dt' + ).with_columns([ pl.col('dt').str.to_datetime(), - # pl.col('expiry').dt.datetime(), - # pl.col('bs_mktid'), - # pl.col('size'), - # pl.col('price'), + # pl.col('expiry').str.to_datetime(), + # pl.col('expiry').dt.date(), ]) # filter out to the columns matching values filter passed @@ -423,20 +426,24 @@ def open_ledger_dfs( # fdf = df.filter(pred) - # bs_mktid: str = fdf[0]['bs_mktid'] - # pos: Position = acnt.pps[bs_mktid] - - # TODO: not sure if this is even possible but.. - # ppt = df.groupby('fqme').agg([ - # # TODO: ppu and bep !! - # pl.cumsum('size').alias('cumsum'), - # ]) + # break up into a frame per mkt / fqme dfs: dict[str, pl.DataFrame] = ldf.partition_by( 'fqme', as_dict=True, ) + + # TODO: not sure if this is even possible but.. + # - it'd be more ideal to use `ppt = df.groupby('fqme').agg([` + # - ppu and bep calcs! for key in dfs: df = dfs[key] + + # TODO: pass back the current `Position` object loaded from + # the account as well? Would provide incentive to do all + # this ledger loading inside a new async open_account(). + # bs_mktid: str = df[0]['bs_mktid'] + # pos: Position = acnt.pps[bs_mktid] + dfs[key] = df.with_columns([ pl.cumsum('size').alias('cumsize'), ]) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 9dc36b4de..753e6513d 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -253,6 +253,7 @@ def disect( ), ): from piker.log import get_console_log + from piker.toolz import open_crash_handler get_console_log(loglevel) pair: tuple[str, str] @@ -266,10 +267,14 @@ def disect( # actual ledger ref filled in with all txns ldgr: TransactionLedger - with open_ledger_dfs( - brokername, - account, - ) as (dfs, ldgr): + pl.Config.set_tbl_cols(16) + with ( + open_crash_handler(), + open_ledger_dfs( + brokername, + account, + ) as (dfs, ldgr), + ): # look up specific frame for fqme-selected asset df = dfs[fqme] From 50b221f788b1a0ef196483b8cf14dce5035321eb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 25 Jul 2023 16:22:03 -0400 Subject: [PATCH 068/116] ib: rework client-internal contract caching Add new `Client` attr tables to better stash `Contract` lookup results normally mapped from some in put FQME; - `._contracts: dict[str, Contract]` for any input pattern (fqme). - `._cons: dict[str, Contract] = {}` for the `.conId: int` inputs. - `_cons2mkts: bidict[Contract, MktPair]` for mapping back and forth between ib and piker internal pair types. Further, - type out as many ib_insync internal types as possible mostly for contract related objects. - change `Client.trades()` -> `.get_fills()` and return directly the result from `IB.fill()`. --- piker/brokers/ib/api.py | 126 ++++++++++++++++++++-------------------- 1 file changed, 64 insertions(+), 62 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 69e5cc3ef..a85d989fa 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -34,16 +34,15 @@ ) import itertools from math import isnan +import asyncio +from pprint import pformat +import inspect +import time from typing import ( Any, Callable, - Optional, Union, ) -import asyncio -from pprint import pformat -import inspect -import time from types import SimpleNamespace from bidict import bidict @@ -56,26 +55,20 @@ client as ib_client, IB, Contract, + ContractDetails, Crypto, Commodity, Forex, Future, ContFuture, Stock, -) -from ib_insync.contract import ( - ContractDetails, -) -from ib_insync.order import Order -from ib_insync.ticker import Ticker -from ib_insync.objects import ( + Order, + Ticker, BarDataList, Position, Fill, - Execution, - CommissionReport, -) -from ib_insync.wrapper import ( + # Execution, + # CommissionReport, Wrapper, RequestError, ) @@ -85,6 +78,7 @@ # non-relative for backends so that non-builting backends # can be easily modelled after this style B) from piker import config +from piker.accounting import MktPair from .symbols import ( con2fqme, parse_patt2fqme, @@ -264,7 +258,13 @@ class Client: Note: this client requires running inside an ``asyncio`` loop. ''' + # keyed by fqmes _contracts: dict[str, Contract] = {} + # keyed by conId + _cons: dict[str, Contract] = {} + + # for going between ib and piker types + _cons2mkts: bidict[Contract, MktPair] = bidict({}) def __init__( self, @@ -282,26 +282,16 @@ def __init__( self.ib = ib self.ib.RaiseRequestErrors: bool = True - # contract cache - self._cons: dict[str, Contract] = {} - - async def trades(self) -> list[dict]: + async def get_fills(self) -> list[Fill]: ''' - Return list of trade-fills from current session in ``dict``. + Return list of rents `Fills` from trading session. + + In theory this can be configured for dumping clears from multiple + days but can't member where to set that.. ''' - norm_fills: list[dict] = [] fills: list[Fill] = self.ib.fills() - for fill in fills: - fill = fill._asdict() # namedtuple - for key, val in fill.items(): - match val: - case Contract() | Execution() | CommissionReport(): - fill[key] = asdict(val) - - norm_fills.append(fill) - - return norm_fills + return fills async def orders(self) -> list[Order]: return await self.ib.reqAllOpenOrdersAsync( @@ -347,7 +337,7 @@ async def bars( _enters += 1 - contract = (await self.find_contracts(fqme))[0] + contract: Contract = (await self.find_contracts(fqme))[0] bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) bars = await self.ib.reqHistoricalDataAsync( @@ -575,7 +565,8 @@ async def get_fute( ) -> Contract: ''' - Get an unqualifed contract for the current "continous" future. + Get an unqualifed contract for the current "continous" + future. ''' # it's the "front" contract returned here @@ -606,13 +597,13 @@ async def get_con( con: Contract = await self.ib.qualifyContractsAsync( Contract(conId=conid) ) - self._cons[conid] = con + self._cons[str(conid)] = con[0] return con async def find_contracts( self, - pattern: Optional[str] = None, - contract: Optional[Contract] = None, + pattern: str | None = None, + contract: Contract | None = None, qualify: bool = True, err_on_qualify: bool = True, @@ -622,21 +613,23 @@ async def find_contracts( symbol, currency, exch, expiry = parse_patt2fqme( pattern, ) - sectype = '' + sectype: str = '' + exch: str = exch.upper() else: assert contract - symbol = contract.symbol - sectype = contract.secType - exch = contract.exchange or contract.primaryExchange - expiry = contract.lastTradeDateOrContractMonth - currency = contract.currency + symbol: str = contract.symbol + sectype: str = contract.secType + exch: str = contract.exchange or contract.primaryExchange + expiry: str = contract.lastTradeDateOrContractMonth + currency: str = contract.currency # contract searching stage # ------------------------ - # futes - if exch in _futes_venues: + # futes, ensure exch/venue is uppercase for matching + # our adhoc set. + if exch.upper() in _futes_venues: if expiry: # get the "front" contract con = await self.get_fute( @@ -704,10 +697,12 @@ async def find_contracts( ) exch = 'SMART' if not exch else exch - contracts = [con] + contracts: list[Contract] = [con] if qualify: try: - contracts = await self.ib.qualifyContractsAsync(con) + contracts: list[Contract] = ( + await self.ib.qualifyContractsAsync(con) + ) except RequestError as err: msg = err.message if ( @@ -725,14 +720,21 @@ async def find_contracts( # pack all contracts into cache for tract in contracts: - exch: str = tract.primaryExchange or tract.exchange or exch - pattern = f'{symbol}.{exch}' - expiry = tract.lastTradeDateOrContractMonth + exch: str = ( + tract.primaryExchange + or tract.exchange + or exch + ) + pattern: str = f'{symbol}.{exch}' + expiry: str = tract.lastTradeDateOrContractMonth # add an entry with expiry suffix if available if expiry: pattern += f'.{expiry}' - self._contracts[pattern.lower()] = tract + # directly cache the input pattern to the output + # contract match as well as by the IB-internal conId. + self._contracts[pattern] = tract + self._cons[str(tract.conId)] = tract return contracts @@ -755,21 +757,21 @@ async def get_head_time( async def get_sym_details( self, - symbol: str, + fqme: str, ) -> tuple[ Contract, ContractDetails, ]: ''' - Get summary (meta) data for a given symbol str including - ``Contract`` and its details and a (first snapshot of the) - ``Ticker``. + Return matching contracts for a given ``fqme: str`` including + ``Contract`` and matching ``ContractDetails``. ''' - contract = (await self.find_contracts(symbol))[0] - details_fute = self.ib.reqContractDetailsAsync(contract) - details = (await details_fute)[0] + contract: Contract = (await self.find_contracts(fqme))[0] + details: ContractDetails = ( + await self.ib.reqContractDetailsAsync(contract) + )[0] return contract, details async def get_quote( @@ -842,7 +844,7 @@ def submit_limit( ''' try: - contract = self._contracts[symbol] + con: Contract = self._contracts[symbol] except KeyError: # require that the symbol has been previously cached by # a data feed request - ensure we aren't making orders @@ -851,7 +853,7 @@ def submit_limit( try: trade = self.ib.placeOrder( - contract, + con, Order( orderId=reqid or 0, # stupid api devs.. action=action.upper(), # BUY/SELL @@ -908,7 +910,7 @@ def push_err( reqId: int, errorCode: int, errorString: str, - contract: Contract, + con: Contract, ) -> None: @@ -933,7 +935,7 @@ def push_err( 'reqid': reqId, 'reason': reason, 'error_code': errorCode, - 'contract': contract, + 'contract': con, } )) except trio.BrokenResourceError: From b33be86b2fffa43d3171b38eab0cf37ede09d90a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 25 Jul 2023 16:43:08 -0400 Subject: [PATCH 069/116] ib: fill out contract tables in `.get_mkt_info()` Since getting a global symcache result from the API is basically impossible, we ad-hoc fill out the needed client tables on demand per client code queries to the mkt info EP. Also, use `unpack_fqme()` in fqme (search) pattern parser instead of hacky `str.partition()`. --- piker/brokers/ib/symbols.py | 51 ++++++++++++++++++++++++------------- 1 file changed, 33 insertions(+), 18 deletions(-) diff --git a/piker/brokers/ib/symbols.py b/piker/brokers/ib/symbols.py index 9a7491799..c64924146 100644 --- a/piker/brokers/ib/symbols.py +++ b/piker/brokers/ib/symbols.py @@ -37,6 +37,7 @@ from piker.accounting import ( Asset, MktPair, + unpack_fqme, ) from piker._cacheables import ( async_lifo_cache, @@ -49,6 +50,7 @@ if TYPE_CHECKING: from .api import ( MethodProxy, + Client, ) _futes_venues = ( @@ -358,24 +360,23 @@ def parse_patt2fqme( # fqme parsing stage # ------------------ if '.ib' in pattern: - from piker.accounting import unpack_fqme _, symbol, venue, expiry = unpack_fqme(pattern) else: symbol = pattern expiry = '' - # another hack for forex pairs lul. - if ( - '.idealpro' in symbol - # or '/' in symbol - ): - exch = 'IDEALPRO' - symbol = symbol.removesuffix('.idealpro') - if '/' in symbol: - symbol, currency = symbol.split('/') - - else: + # # another hack for forex pairs lul. + # if ( + # '.idealpro' in symbol + # # or '/' in symbol + # ): + # exch: str = 'IDEALPRO' + # symbol = symbol.removesuffix('.idealpro') + # if '/' in symbol: + # symbol, currency = symbol.split('/') + + # else: # TODO: yes, a cache.. # try: # # give the cache a go @@ -387,9 +388,9 @@ def parse_patt2fqme( symbol, _, expiry = symbol.rpartition('.') # use heuristics to figure out contract "type" - symbol, exch = symbol.upper().rsplit('.', maxsplit=1) + symbol, venue = symbol.upper().rsplit('.', maxsplit=1) - return symbol, currency, exch, expiry + return symbol, currency, venue, expiry def con2fqme( @@ -406,9 +407,12 @@ def con2fqme( ''' # should be real volume for this contract by default - calc_price = False + calc_price: bool = False if con.conId: try: + # TODO: LOL so apparently IB just changes the contract + # ID (int) on a whim.. so we probably need to use an + # FQME style key after all... return _cache[con.conId] except KeyError: pass @@ -475,8 +479,9 @@ async def get_mkt_info( ) -> tuple[MktPair, ibis.ContractDetails]: - # XXX: we don't need to split off any fqme broker part? - # bs_fqme, _, broker = fqme.partition('.') + if '.ib' not in fqme: + fqme += '.ib' + broker, pair, venue, expiry = unpack_fqme(fqme) proxy: MethodProxy if proxy is not None: @@ -492,7 +497,7 @@ async def get_mkt_info( ( con, # Contract details, # ContractDetails - ) = await proxy.get_sym_details(symbol=fqme) + ) = await proxy.get_sym_details(fqme=fqme) except ConnectionError: log.exception(f'Proxy is ded {proxy._aio_ns}') raise @@ -558,4 +563,14 @@ async def get_mkt_info( _fqme_without_src=(atype != 'fiat'), ) + # if possible register the bs_mktid to the just-built + # mkt so that it can be retreived by order mode tasks later. + # TODO NOTE: this is going to be problematic if/when we split + # out the datatd vs. brokerd actors since the mktmap lookup + # table will now be inaccessible.. + if proxy is not None: + client: Client = proxy._aio_ns + client._contracts[mkt.bs_fqme] = con + client._cons2mkts[con] = mkt + return mkt, details From e344bdbf1b3c9505628394bd39955419854fcd07 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 25 Jul 2023 18:03:32 -0400 Subject: [PATCH 070/116] ib: rework trade handling, take ib position sizes as gospel Instead of casting to `dict`s and rewriting event names in the `push_tradesies()` handler, be transparent with event names (also defining and piker-equivalent mapping them in a redefined `_statuses` table) and types passing them directly to the `deliver_trade_events()` task and generally make event handler blocks much easier to grok with type annotations. To deal with the causality dilemma of *when to emit a pos msg* due to needing all of `execDetailsEvent, commissionReportEvent, positionEvent` but having no guarantee on received order, we implement a small task `clears: dict[Contract, tuple[Position, Fill]]` tracker table and (as before) only emit a position event once the "cost" can be accessed for the fill. We now ALWAYS relay any `Position` update from IB directly to ensure (at least) the cumsize is correct (since it appears we still have ongoing issues with computing this correctly via `.accounting.Position` updates..). Further related adjustments: - load (fiat) balances and startup positions into a new `IbAcnt` struct. - change `update_and_audit_pos_msg()` to blindly forward ib position event updates for the **the size** since it should always be considered the true gospel for accounting! - drop ib-has-no-position handling since it should never occur.. - move `update_ledger_from_api_trades()` to the `.ledger` submod and do processing of ib_insync `Fill` related objects instead of dict-casted versions instead doing the casting in `api_trades_to_ledger_entries()`. - `norm_trade()`: add `symcache.mktmaps[bs_mktid] = mkt` in since it turns out API (and sometimes FLEX) records don't contain the listing exchange/venue thus making it impossible to map an asset pair in the "position sense" (i.e. over multiple venues: qqq.nasdaq, qqq.arca, qqq.directedge) to an fqme when doing offline ledger processing; instead use frickin IB's internal int-id so there's no discrepancy. - also much better handle futures mkt trade flex records such that parsed `MktPair.fqme` is consistent. --- piker/brokers/ib/broker.py | 1035 +++++++++++++++++++----------------- piker/brokers/ib/ledger.py | 204 +++++-- 2 files changed, 692 insertions(+), 547 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 56c3d82a0..e9c6c83f6 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -26,18 +26,16 @@ import time from typing import ( Any, - Optional, AsyncIterator, - Union, ) from bidict import bidict import trio from trio_typing import TaskStatus import tractor +from tractor.to_asyncio import LinkedTaskChannel from ib_insync.contract import ( Contract, - Option, ) from ib_insync.order import ( Trade, @@ -47,23 +45,24 @@ Fill, Execution, CommissionReport, + Position as IbPosition, ) -from ib_insync.objects import Position as IbPosition from piker import config from piker.accounting import ( - # dec_digits, - # digits_to_dec, Position, Transaction, open_trade_ledger, TransactionLedger, open_account, Account, + Asset, + MktPair, ) -from piker.data._symcache import ( +from piker.data import ( open_symcache, SymbologyCache, + Struct, ) from piker.clearing._messages import ( Order, @@ -79,28 +78,32 @@ from ._util import log from .api import ( _accounts2clients, - con2fqme, get_config, open_client_proxies, Client, MethodProxy, ) +from .symbols import ( + con2fqme, + # get_mkt_info, +) from .ledger import ( norm_trade_records, - api_trades_to_ledger_entries, tx_sort, + update_ledger_from_api_trades, ) def pack_position( - pos: IbPosition + pos: IbPosition, + accounts_def: bidict[str, str], ) -> tuple[ str, dict[str, Any] ]: - con = pos.contract + con: Contract = pos.contract fqme, calc_price = con2fqme(con) # TODO: options contracts into a sane format.. @@ -108,7 +111,7 @@ def pack_position( str(con.conId), BrokerdPosition( broker='ib', - account=pos.account, + account=accounts_def.inverse[pos.account], symbol=fqme, currency=con.currency, size=float(pos.position), @@ -128,9 +131,8 @@ async def handle_order_requests( async for request_msg in ems_order_stream: log.info(f'Received order request {request_msg}') - action = request_msg['action'] - account = request_msg['account'] - + action: str = request_msg['action'] + account: str = request_msg['account'] acct_number = accounts_def.get(account) if not acct_number: log.error( @@ -213,8 +215,12 @@ async def recv_trade_updates( to_trio: trio.abc.SendChannel, ) -> None: - """Stream a ticker using the std L1 api. - """ + ''' + Receive and relay order control and positioning related events + from `ib_insync`, pack as tuples and push over mem-chan to our + trio relay task for processing and relay to EMS. + + ''' client.inline_errors(to_trio) # sync with trio task @@ -223,37 +229,40 @@ async def recv_trade_updates( def push_tradesies( eventkit_obj, obj, - fill: Optional[Fill] = None, - report: Optional[CommissionReport] = None, + fill: Fill | None = None, + report: CommissionReport | None = None, ): ''' Push events to trio task. ''' - match eventkit_obj.name(): + emit: tuple | object + event_name: str = eventkit_obj.name() + match event_name: case 'orderStatusEvent': - item = ('status', obj) + emit: Trade = obj case 'commissionReportEvent': assert report - item = ('cost', report) + emit: CommissionReport = report case 'execDetailsEvent': # execution details event - item = ('fill', (obj, fill)) + emit: tuple[Trade, Fill] = (obj, fill) case 'positionEvent': - item = ('position', obj) + emit: Position = obj case _: log.error(f'Error unknown event {obj}') return - log.info(f'eventkit event ->\n{pformat(item)}') + log.info(f'eventkit event ->\n{pformat(emit)}') try: - to_trio.send_nowait(item) + # emit event name + relevant ibis internal objects + to_trio.send_nowait((event_name, emit)) except trio.BrokenResourceError: log.exception(f'Disconnected from {eventkit_obj} updates') eventkit_obj.disconnect(push_tradesies) @@ -270,6 +279,8 @@ def push_tradesies( 'commissionReportEvent', # XXX: not sure yet if we need these + # -> prolly not since the named tuple type doesn't offer + # much more then a few more pnl fields.. # 'updatePortfolioEvent', # XXX: these all seem to be weird ib_insync internal @@ -288,196 +299,114 @@ def push_tradesies( await client.ib.disconnectedEvent -# TODO: maybe we should allow the `trade_entries` input to be -# a list of the actual `Contract` types instead, though a couple -# other callers will need to be changed as well. -async def update_ledger_from_api_trades( - trade_entries: list[dict[str, Any]], - client: Union[Client, MethodProxy], - accounts_def_inv: bidict[str, str], - - # provided for ad-hoc insertions "as transactions are - # processed" - symcache: SymbologyCache | None = None, - -) -> tuple[ - dict[str, Transaction], - dict[str, dict], -]: - # XXX; ERRGGG.. - # pack in the "primary/listing exchange" value from a - # contract lookup since it seems this isn't available by - # default from the `.fills()` method endpoint... - for entry in trade_entries: - condict = entry['contract'] - # print( - # f"{condict['symbol']}: GETTING CONTRACT INFO!\n" - # ) - conid = condict['conId'] - pexch = condict['primaryExchange'] - - if not pexch: - cons = await client.get_con(conid=conid) - if cons: - con = cons[0] - pexch = con.primaryExchange or con.exchange - else: - # for futes it seems like the primary is always empty? - pexch = condict['exchange'] - - entry['listingExchange'] = pexch - - # pack in the ``Contract.secType`` - entry['asset_type'] = condict['secType'] - - entries: dict[str, dict] = api_trades_to_ledger_entries( - accounts_def_inv, - trade_entries, - ) - # normalize recent session's trades to the `Transaction` type - trans_by_acct: dict[str, dict[str, Transaction]] = {} - - for acctid, trades_by_id in entries.items(): - # normalize to transaction form - trans_by_acct[acctid] = norm_trade_records( - trades_by_id, - symcache=symcache, - ) - - return trans_by_acct, entries - - -async def update_and_audit_msgs( +async def update_and_audit_pos_msg( acctid: str, # no `ib.` prefix is required! - pps: list[Position], - cids2pps: dict[tuple[str, int], BrokerdPosition], + pikerpos: Position, + ibpos: IbPosition, + cons2mkts: dict[Contract, MktPair], validate: bool = True, -) -> list[BrokerdPosition]: - - msgs: list[BrokerdPosition] = [] - p: Position - for p in pps: - bs_mktid = p.bs_mktid - - # retreive equivalent ib reported position message - # for comparison/audit versus the piker equivalent - # breakeven pp calcs. - ibppmsg = cids2pps.get((acctid, bs_mktid)) - if ibppmsg: - msg = BrokerdPosition( - broker='ib', - - # XXX: ok so this is annoying, we're relaying - # an account name with the backend suffix prefixed - # but when reading accounts from ledgers we don't - # need it and/or it's prefixed in the section - # table.. - account=ibppmsg.account, - - # XXX: the `.ib` is stripped..? - symbol=ibppmsg.symbol, - - # remove.. - # currency=ibppmsg.currency, - - # NOTE: always take their size since it's usually the - # true gospel.. - # size=p.size, - size=ibppmsg.size, - - avg_price=p.ppu, - ) - msgs.append(msg) - - ibfmtmsg = pformat(ibppmsg.to_dict()) - pikerfmtmsg = pformat(msg.to_dict()) - - ibsize = ibppmsg.size - pikersize = msg.size - diff = pikersize - ibsize - - # if ib reports a lesser pp it's not as bad since we can - # presume we're at least not more in the shit then we - # thought. - if ( - diff - and ( - pikersize - or ibsize - ) - ): +) -> BrokerdPosition: + + # NOTE: lookup the ideal `MktPair` value, since multi-venue + # trade records can result in multiple MktpPairs (eg. qqq.arca.ib and + # qqq.nasdaq.ib can map to the same bs_mktid but have obviously + # different .fqme: str values..), instead allow caller to + # provide a table with the desired output mkt-map values; + # eventually this should probably come from a deterministically + # generated symcache.. + # TODO: figure out how make this not be so frickin CRAP by + # either allowing bs_mktid to be the position key or possibly + # be extra pendantic with the `Client._mkts` table? + con: Contract = ibpos.contract + mkt: MktPair = cons2mkts.get(con, pikerpos.mkt) + bs_fqme: str = mkt.bs_fqme + + msg = BrokerdPosition( + broker='ib', + + # TODO: probably forget about this once we drop this msg + # entirely from our set.. + # XXX: ok so this is annoying, we're relaying + # an account name with the backend suffix prefixed + # but when reading accounts from ledgers we don't + # need it and/or it's prefixed in the section + # table.. we should just strip this from the message + # right since `.broker` is already included? + account=f'ib.{acctid}', + # account=account_def.inverse[ibpos.account], + + # XXX: the `.ib` is stripped..? + symbol=bs_fqme, + + # remove.. + # currency=ibpos.currency, + + # NOTE: always take their size since it's usually the + # true gospel.. this SHOULD be the same always as ours + # tho.. + # size=pikerpos.size, + size=ibpos.position, + + avg_price=pikerpos.ppu, + ) - # reverse_split_ratio = pikersize / ibsize - # split_ratio = 1/reverse_split_ratio - # if split_ratio >= reverse_split_ratio: - # entry = f'split_ratio = {int(split_ratio)}' - # else: - # entry = f'split_ratio = 1/{int(reverse_split_ratio)}' - - msg.size = ibsize - logmsg: str = ( - f'Pos mismatch in ib vs. the piker ledger!\n' - f'IB:\n{ibfmtmsg}\n\n' - f'PIKER:\n{pikerfmtmsg}\n\n' - - # 'If you are expecting a (reverse) split in this ' - # 'instrument you should probably put the following' - # 'in the `pps.toml` section:\n' - # f'{entry}\n' - # f'reverse_split_ratio: {reverse_split_ratio}\n' - # f'split_ratio: {split_ratio}\n\n' - ) + ibfmtmsg: str = pformat(ibpos._asdict()) + pikerfmtmsg: str = pformat(msg.to_dict()) + + ibsize: float = ibpos.position + pikersize: float = msg.size + diff: float = pikersize - ibsize + + # NOTE: compare equivalent ib reported position message for + # comparison/audit versus the piker equivalent breakeven pp + # calcs. if ib reports a lesser pp it's not as bad since we can + # presume we're at least not more in the shit then we thought. + if ( + diff + and ( + pikersize + or ibsize + ) + ): - if validate: - raise ValueError(logmsg) - else: - # await tractor.pause() - log.error(logmsg) - - # TODO: make this a "propaganda" log level? - if ibppmsg.avg_price != msg.avg_price: - log.warning( - f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' - f'ib: {ibfmtmsg}\n' - '---------------------------\n' - f'piker: {pformat(msg.to_dict())}' - ) + # reverse_split_ratio = pikersize / ibsize + # split_ratio = 1/reverse_split_ratio + # if split_ratio >= reverse_split_ratio: + # entry = f'split_ratio = {int(split_ratio)}' + # else: + # entry = f'split_ratio = 1/{int(reverse_split_ratio)}' + + msg.size = ibsize + logmsg: str = ( + f'Pos mismatch in ib vs. the piker ledger!\n' + f'IB:\n{ibfmtmsg}\n\n' + f'PIKER:\n{pikerfmtmsg}\n\n' + + # 'If you are expecting a (reverse) split in this ' + # 'instrument you should probably put the following' + # 'in the `pps.toml` section:\n' + # f'{entry}\n' + # f'reverse_split_ratio: {reverse_split_ratio}\n' + # f'split_ratio: {split_ratio}\n\n' + ) + if validate: + raise ValueError(logmsg) else: - # XXX: though it shouldn't be possible (means an error - # in our accounting subsys) create a new message for - # a supposed "missing position" that IB never reported. - msg = BrokerdPosition( - broker='ib', - - # XXX: ok so this is annoying, we're relaying - # an account name with the backend suffix prefixed - # but when reading accounts from ledgers we don't - # need it and/or it's prefixed in the section - # table.. we should just strip this from the message - # right since `.broker` is already included? - account=f'ib.{acctid}', - - # XXX: the `.ib` is stripped..? - symbol=p.mkt.fqme, - - # TODO: we should remove from msg schema.. - # currency=ibppmsg.currency, - - size=p.size, - avg_price=p.ppu, - ) - if p.size: - logmsg: str = ( - f'UNEXPECTED POSITION says IB => {msg.symbol}\n' - 'Maybe they LIQUIDATED YOU or are missing ledger entries?\n' - ) - log.error(logmsg) - if validate: - raise ValueError(logmsg) + # await tractor.pause() + log.error(logmsg) + + # TODO: make this a "propaganda" log level? + if ibpos.avgCost != msg.avg_price: + log.warning( + f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n' + f'ib: {ibfmtmsg}\n' + '---------------------------\n' + f'piker: {pformat(msg.to_dict())}' + ) - return msgs + return msg async def aggr_open_orders( @@ -538,24 +467,54 @@ async def aggr_open_orders( return order_msgs -# proxy wrapper for starting trade event stream async def open_trade_event_stream( client: Client, task_status: TaskStatus[ - trio.abc.ReceiveChannel + LinkedTaskChannel ] = trio.TASK_STATUS_IGNORED, ): - # each api client has a unique event stream + ''' + Proxy wrapper for starting trade event stream from ib_insync + which spawns an asyncio task that registers an internal closure + (`push_tradies()`) which in turn relays trading events through + a `tractor.to_asyncio.LinkedTaskChannel` which the parent + (caller task) can use to process said events in trio-land. + + NOTE: each api client has a unique event stream. + + ''' + trade_event_stream: LinkedTaskChannel + async with tractor.to_asyncio.open_channel_from( recv_trade_updates, client=client, - ) as (_, trade_event_stream): - - # assert ibclient is client.ib + ) as ( + _, # first pushed val + trade_event_stream, + ): task_status.started(trade_event_stream) + # block forever to keep session trio-asyncio session + # up until cancelled or error on either side. await trio.sleep_forever() +class IbAcnt(Struct): + ''' + Wrapper around the useful info for doing accounting (mostly for + position tracking). + + ''' + key: str + balances: dict[ + str, # fiat or crypto name + float # current balance + ] + # TODO: do we need the asset instances? + # (float, Asset), + # ] + positions: dict[str, IbPosition] + + @tractor.context async def open_trade_dialog( ctx: tractor.Context, @@ -565,11 +524,11 @@ async def open_trade_dialog( accounts_def = config.load_accounts(['ib']) # deliver positions to subscriber before anything else - all_positions = [] - accounts = set() - acctids = set() - cids2pps: dict[str, BrokerdPosition] = {} + all_positions: list[BrokerdPosition] = [] + accounts: set[str] = set() + acctids: set[str] = set() + symcache: SymbologyCache async with ( open_client_proxies() as ( proxies, @@ -592,10 +551,12 @@ async def open_trade_dialog( ExitStack() as lstack, ): # load ledgers and pps for all detected client-proxies + account: str + proxy: MethodProxy for account, proxy in proxies.items(): assert account in accounts_def accounts.add(account) - acctid = account.strip('ib.') + acctid: str = account.strip('ib.') acctids.add(acctid) # open ledger and pptable wrapper for each @@ -628,7 +589,47 @@ async def open_trade_dialog( ) for account, proxy in proxies.items(): - client = aioclients[account] + client: Client = aioclients[account] + + # process pp value reported from ib's system. we only + # use these to cross-check sizing since average pricing + # on their end uses the so called (bs) "FIFO" style + # which more or less results in a price that's not + # useful for traders who want to not lose money.. xb + # -> collect all ib-pp reported positions so that we can be + # sure know which positions to update from the ledger if + # any are missing from the ``pps.toml`` + # await tractor.pp() + ib_positions: dict[str, IbPosition] = {} + pos: IbPosition # named tuple subtype + for pos in client.positions(): + bs_mktid: str = str(pos.contract.conId) + ib_positions[bs_mktid] = pos + + bs_mktid, msg = pack_position(pos, accounts_def) + acctid: str = msg.account.strip('ib.') + + assert msg.account in accounts, ( + f'Position for unknown account: {msg.account}') + + balances: dict[str, tuple[float, Asset]] = {} + for av in client.ib.accountValues(): + match av.tag: + case 'CashBalance': + balances[av.currency] = float(av.value) + + # TODO: if we want supposed forex pnls? + # case 'UnrealizedPnL': + # ... + + ibacnt = IbAcnt( + key=acctid, + balances=balances, + positions=ib_positions, + ) + # print( + # f'Current balances for {ibacnt.key}: {ibacnt.balances}' + # ) # order_msgs is filled in by this helper await aggr_open_orders( @@ -639,7 +640,7 @@ async def open_trade_dialog( ) acctid: str = account.strip('ib.') ledger: dict = ledgers[acctid] - table: Account = tables[acctid] + acnt: Account = tables[acctid] # update position table with latest ledger from all # gathered transactions: ledger file + api records. @@ -650,21 +651,21 @@ async def open_trade_dialog( # update trades ledgers for all accounts from connected # api clients which report trades for **this session**. - api_trades: list[dict] = await proxy.trades() - if api_trades: + api_fills: list[Fill] = await proxy.get_fills() + if api_fills: api_trans_by_acct: dict[str, Transaction] api_to_ledger_entries: dict[str, dict] ( api_trans_by_acct, api_to_ledger_entries, ) = await update_ledger_from_api_trades( - api_trades, + api_fills, proxy, accounts_def_inv, symcache=symcache, ) - # if new api_trades are detected from the API, prepare + # if new api_fills are detected from the API, prepare # them for the ledger file and update the pptable. if ( api_to_ledger_entries @@ -674,7 +675,7 @@ async def open_trade_dialog( # https://github.com/goodboy/tractor/issues/354 # await tractor.pp() - # write ledger with all new api_trades + # write ledger with all new api_fills # **AFTER** we've updated the `pps.toml` # from the original ledger state! (i.e. this # is currently done on exit) @@ -685,61 +686,49 @@ async def open_trade_dialog( trans.update(api_trans) # update account (and thus pps) from all gathered transactions - table.update_from_ledger( + acnt.update_from_ledger( trans, symcache=ledger.symcache, ) - # process pp value reported from ib's system. we only - # use these to cross-check sizing since average pricing - # on their end uses the so called (bs) "FIFO" style - # which more or less results in a price that's not - # useful for traders who want to not lose money.. xb - # -> collect all ib-pp reported positions so that we can be - # sure know which positions to update from the ledger if - # any are missing from the ``pps.toml`` - # await tractor.pp() - - pos: IbPosition # named tuple subtype - for pos in client.positions(): - - # NOTE XXX: we skip options for now since we don't - # yet support the symbology nor the live feeds. - if isinstance(pos.contract, Option): - log.warning( - f'Option contracts not supported for now:\n' - f'{pos._asdict()}' - ) - continue - - bs_mktid, msg = pack_position(pos) - msg.account = accounts_def.inverse[msg.account] - acctid = msg.account.strip('ib.') - cids2pps[(acctid, bs_mktid)] = msg - - assert msg.account in accounts, ( - f'Position for unknown account: {msg.account}') - # iterate all (newly) updated pps tables for every # client-account and build out position msgs to deliver to # EMS. - for acctid, table in tables.items(): - active_pps, closed_pps = table.dump_active() + for acctid, acnt in tables.items(): + active_pps, closed_pps = acnt.dump_active() for pps in [active_pps, closed_pps]: - msgs = await update_and_audit_msgs( - acctid, - pps.values(), - cids2pps, - validate=False, - ) - all_positions.extend(msg for msg in msgs if msg.size != 0) - - if not all_positions and cids2pps: - raise RuntimeError( - 'Positions reported by ib but not found in `pps.toml`!?\n' - f'{pformat(cids2pps)}' - ) + piker_pps: list[Position] = list(pps.values()) + for pikerpos in piker_pps: + # TODO: map from both the contract ID + # (bs_mktid) AND the piker-ified FQME ?? + # - they might change the fqme when bbby get's + # downlisted to pink XD + # - the bs_mktid can randomly change like in + # gnln.nasdaq.. + ibpos: IbPosition | None = ibacnt.positions.get( + pikerpos.bs_mktid + ) + if ibpos: + bs_mktid: str = str(ibpos.contract.conId) + msg = await update_and_audit_pos_msg( + acctid, + pikerpos, + ibpos, + cons2mkts=client._cons2mkts, + validate=False, + ) + if msg and msg.size != 0: + all_positions.append(msg) + elif ( + not ibpos + and pikerpos.cumsize + ): + logmsg: str = ( + f'UNEXPECTED POSITION says IB => {msg.symbol}\n' + 'Maybe they LIQUIDATED YOU or your ledger is wrong?\n' + ) + log.error(logmsg) await ctx.started(( all_positions, @@ -755,7 +744,7 @@ async def open_trade_dialog( await ems_stream.send(msg) for client in set(aioclients.values()): - trade_event_stream = await n.start( + trade_event_stream: LinkedTaskChannel = await n.start( open_trade_event_stream, client, ) @@ -771,21 +760,19 @@ async def open_trade_dialog( # allocate event relay tasks for each client connection n.start_soon( deliver_trade_events, - # n, + trade_event_stream, ems_stream, accounts_def, - cids2pps, proxies, - ledgers, tables, ) # write account and ledger files immediately! # TODO: make this thread-async! - for acctid, table in tables.items(): - table.write_config() + for acctid, acnt in tables.items(): + acnt.write_config() ledgers[acctid].write_config() # block until cancelled @@ -794,103 +781,196 @@ async def open_trade_dialog( async def emit_pp_update( ems_stream: tractor.MsgStream, - trade_entry: dict, + accounts_def: bidict[str, str], proxies: dict, - cids2pps: dict, ledgers: dict[str, dict[str, Any]], acnts: dict[str, Account], + ibpos: IbPosition, # required! + + # NEED it before we actually update the trade ledger + fill: Fill | None = None, + ) -> None: ''' - Extract trade record from an API event, convert it into a `Transaction`, - update the backing ledger and finally emit a position update to the EMS. + Emit a position update to the EMS either directly from + a `IbPosition` update (received from the API) or ideally from + a `piker.accounting.Position` update (once it's entirely bug + free xD) by extracting the trade record from the (optionally + provided) `Fill` event, convert it into a `Transaction`, update + the backing ledger and emit a msg for the account's `Position` + entry. ''' accounts_def_inv: bidict[str, str] = accounts_def.inverse - accnum: str = trade_entry['execution']['acctNumber'] + accnum: str = ibpos.account fq_acctid: str = accounts_def_inv[accnum] proxy: MethodProxy = proxies[fq_acctid] + client: Client = proxy._aio_ns + + # XXX FILL CASE: + # compute and relay incrementally updated piker pos + # after doing accounting calcs + if fill: + ( + records_by_acct, + api_to_ledger_entries, + ) = await update_ledger_from_api_trades( + [fill], + proxy, + accounts_def_inv, + ) + trans: dict[str, Transaction] = records_by_acct[fq_acctid] + tx: Transaction = list(trans.values())[0] - # compute and relay incrementally updated piker pp - ( - records_by_acct, - api_to_ledger_entries, - ) = await update_ledger_from_api_trades( - [trade_entry], - proxy, - accounts_def_inv, - ) - trans: dict[str, Transaction] = records_by_acct[fq_acctid] - tx: Transaction = list(trans.values())[0] + acctid: str = fq_acctid.strip('ib.') + acnt: Account = acnts[acctid] + ledger: TransactionLedger = ledgers[acctid] - acctid = fq_acctid.strip('ib.') - acnt = acnts[acctid] - ledger: dict = ledgers[acctid] + # write to disk/storage + ledger.write_config() - acnt.update_from_ledger( - trans, - symcache=ledger.symcache - ) + # con: Contract = fill.contract - active, closed = acnt.dump_active() + acnt.update_from_ledger( + trans, - # NOTE: update ledger with all new trades - for fq_acctid, trades_by_id in api_to_ledger_entries.items(): - acctid: str = fq_acctid.strip('ib.') - ledger: dict = ledgers[acctid] - - # NOTE: don't override flex/previous entries with new API - # ones, just update with new fields! - for tid, tdict in trades_by_id.items(): - ledger.setdefault(tid, {}).update(tdict) - - # generate pp msgs and cross check with ib's positions data, relay - # re-formatted pps as msgs to the ems. - for pos in filter( - bool, - [ - active.get(tx.bs_mktid), - closed.get(tx.bs_mktid) - ] - ): - msgs = await update_and_audit_msgs( - acctid, - [pos], - cids2pps, + # XXX: currently this is likely empty since we have no + # support! + symcache=ledger.symcache, - # ib pp event might not have arrived yet - validate=False, + # TODO: remove this hack by attempting to symcache an + # incrementally updated table? + _mktmap_table=client._contracts ) - if msgs: - msg = msgs[0] - log.info(f'Emitting pp msg: {msg}') - break - - await ems_stream.send(msg) - + # re-compute all positions that have changed state. + # TODO: likely we should change the API to return the + # position updates from `.update_from_ledger()`? + active, closed = acnt.dump_active() + + # NOTE: update ledger with all new trades + for fq_acctid, trades_by_id in api_to_ledger_entries.items(): + acctid: str = fq_acctid.strip('ib.') + ledger: dict = ledgers[acctid] + + # NOTE: don't override flex/previous entries with new API + # ones, just update with new fields! + for tid, tdict in trades_by_id.items(): + # ledger.setdefault(tid, {}).update(tdict) + ledger[tid].update(tdict) + + # generate pp msgs and cross check with ib's positions data, relay + # re-formatted pps as msgs to the ems. + msg: dict | None = None + for pos in filter( + bool, + [ + active.get(tx.bs_mktid), + closed.get(tx.bs_mktid) + ] + ): + msg = await update_and_audit_pos_msg( + acctid, + pos, + ibpos, + cons2mkts=client._cons2mkts, + + # ib pp event might not have arrived yet + validate=False, + ) + if msg: + log.info(f'Emitting pp msg: {msg}') + break + + # XXX NO FILL CASE: + # if just handed an `IbPosition`, pack it and relay for now + # since we always want the size to be up to date even if + # the ppu is wrong.. + else: + bs_mktid, msg = pack_position(ibpos, accounts_def) + + if msg: + await ems_stream.send(msg) + else: + await tractor.pause() + + +# NOTE: See `OrderStatus` def for key schema; +# https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313 +# => we remap statuses to the ems set via the below table: +# short list: +# - PendingSubmit +# - PendingCancel +# - PreSubmitted (simulated orders) +# - ApiCancelled (cancelled by client before submission +# to routing) +# - Cancelled +# - Filled +# - Inactive (reject or cancelled but not by trader) + +# XXX: here's some other sucky cases from the api +# - short-sale but securities haven't been located, in this +# case we should probably keep the order in some kind of +# weird state or cancel it outright? + +# status='PendingSubmit', message=''), +# status='Cancelled', message='Error 404, +# reqId 1550: Order held while securities are located.'), +# status='PreSubmitted', message='')], _statuses: dict[str, str] = { - 'cancelled': 'canceled', - 'submitted': 'open', + 'Filled': 'filled', + 'Cancelled': 'canceled', + 'Submitted': 'open', - # XXX: just pass these through? it duplicates actual fill events other - # then the case where you the `.remaining == 0` case which is our - # 'closed'` case. - # 'filled': 'pending', - # 'pendingsubmit': 'pending', + 'PendingSubmit': 'pending', + 'PendingCancel': 'pending', + 'PreSubmitted': 'pending', + 'ApiPending': 'pending', + 'ApiCancelled': 'pending', # TODO: see a current ``ib_insync`` issue around this: # https://github.com/erdewit/ib_insync/issues/363 - 'inactive': 'pending', + 'Inactive': 'pending', } + _action_map = { 'BOT': 'buy', 'SLD': 'sell', } +# TODO: try out cancelling inactive orders after delay: +# https://github.com/erdewit/ib_insync/issues/363 (was originally +# inside `deliver_trade_events` status handler block. +# acctid = accounts_def.inverse[trade.order.account] +# double check there is no error when +# cancelling.. gawwwd +# if ib_status_key == 'cancelled': +# last_log = trade.log[-1] +# if ( +# last_log.message +# and 'Error' not in last_log.message +# ): +# ib_status_key = trade.log[-2].status +# +# elif ib_status_key == 'inactive': +# +# async def sched_cancel(): +# log.warning( +# 'OH GAWD an inactive order.scheduling a cancel\n' +# f'{pformat(item)}' +# ) +# proxy = proxies[acctid] +# await proxy.submit_cancel(reqid=trade.order.orderId) +# await trio.sleep(1) +# nurse.start_soon(sched_cancel) +# +# nurse.start_soon(sched_cancel) + + # TODO: maybe just make this a flat func without an interal loop # and call it *from* the `trade_event_stream` loop? Might look # a lot nicer doing that from open_trade_dialog() instead of @@ -901,7 +981,6 @@ async def deliver_trade_events( trade_event_stream: trio.MemoryReceiveChannel, ems_stream: tractor.MsgStream, accounts_def: dict[str, str], # eg. `'ib.main'` -> `'DU999999'` - cids2pps: dict[tuple[str, str], BrokerdPosition], proxies: dict[str, MethodProxy], ledgers, @@ -912,39 +991,23 @@ async def deliver_trade_events( Format and relay all trade events for a given client to emsd. ''' - ids2fills: dict[str, dict] = {} + # task local msg dialog tracking + clears: dict[ + Contract, + list[ + IbPosition | None, # filled by positionEvent + Fill | None, # filled by order status and exec details + ] + ] = {} + execid2con: dict[str, Contract] = {} # TODO: for some reason we can receive a ``None`` here when the # ib-gw goes down? Not sure exactly how that's happening looking # at the eventkit code above but we should probably handle it... async for event_name, item in trade_event_stream: - log.info(f'ib sending {event_name}:\n{pformat(item)}') + log.info(f'Relaying `{event_name}`:\n{pformat(item)}') match event_name: - # NOTE: we remap statuses to the ems set via the - # ``_statuses: dict`` above. - - # https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313 - # short list: - # - PendingSubmit - # - PendingCancel - # - PreSubmitted (simulated orders) - # - ApiCancelled (cancelled by client before submission - # to routing) - # - Cancelled - # - Filled - # - Inactive (reject or cancelled but not by trader) - - # XXX: here's some other sucky cases from the api - # - short-sale but securities haven't been located, in this - # case we should probably keep the order in some kind of - # weird state or cancel it outright? - - # status='PendingSubmit', message=''), - # status='Cancelled', message='Error 404, - # reqId 1550: Order held while securities are located.'), - # status='PreSubmitted', message='')], - - case 'status': + case 'orderStatusEvent': # XXX: begin normalization of nonsense ib_insync internal # object-state tracking representations... @@ -952,59 +1015,29 @@ async def deliver_trade_events( # unwrap needed data from ib_insync internal types trade: Trade = item status: OrderStatus = trade.orderStatus - ib_status_key = status.status.lower() - - # TODO: try out cancelling inactive orders after delay: - # https://github.com/erdewit/ib_insync/issues/363 - # acctid = accounts_def.inverse[trade.order.account] - - # double check there is no error when - # cancelling.. gawwwd - # if ib_status_key == 'cancelled': - # last_log = trade.log[-1] - # if ( - # last_log.message - # and 'Error' not in last_log.message - # ): - # ib_status_key = trade.log[-2].status - - # elif ib_status_key == 'inactive': - - # async def sched_cancel(): - # log.warning( - # 'OH GAWD an inactive order.scheduling a cancel\n' - # f'{pformat(item)}' - # ) - # proxy = proxies[acctid] - # await proxy.submit_cancel(reqid=trade.order.orderId) - # await trio.sleep(1) - # nurse.start_soon(sched_cancel) - - # nurse.start_soon(sched_cancel) - - status_key = ( - _statuses.get(ib_status_key.lower()) - or ib_status_key.lower() - ) - - remaining = status.remaining + status_str: str = _statuses[status.status] + remaining: float = status.remaining if ( - status_key == 'filled' + status_str == 'filled' ): fill: Fill = trade.fills[-1] execu: Execution = fill.execution - # execdict = asdict(execu) - # execdict.pop('acctNumber') fill_msg = BrokerdFill( + time_ns=time.time_ns(), # cuz why not + # NOTE: should match the value returned from # `.submit_limit()` reqid=execu.orderId, - time_ns=time.time_ns(), # cuz why not + action=_action_map[execu.side], size=execu.shares, price=execu.price, + + # DO we care? should this be in another + # msg like the position msg? # broker_details=execdict, + # XXX: required by order mode currently broker_time=execu.time, ) @@ -1013,7 +1046,7 @@ async def deliver_trade_events( if remaining == 0: # emit a closed status on filled statuses where # all units were cleared. - status_key = 'closed' + status_str = 'closed' # skip duplicate filled updates - we get the deats # from the execution details event @@ -1023,7 +1056,7 @@ async def deliver_trade_events( account=accounts_def.inverse[trade.order.account], # everyone doin camel case.. - status=status_key, # force lower case + status=status_str, filled=status.filled, reason=status.whyHeld, @@ -1037,107 +1070,134 @@ async def deliver_trade_events( await ems_stream.send(msg) continue - case 'fill': - # for wtv reason this is a separate event type - # from IB, not sure why it's needed other then for extra - # complexity and over-engineering :eyeroll:. - # we may just end up dropping these events (or - # translating them to ``Status`` msgs) if we can - # show the equivalent status events are no more latent. - - # unpack ib_insync types - # pep-0526 style: - # https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations + # XXX: for wtv reason this is a separate event type + # from IB, not sure why it's needed other then for extra + # complexity and over-engineering :eyeroll:. + # we may just end up dropping these events (or + # translating them to ``Status`` msgs) if we can + # show the equivalent status events are no more latent. + case 'execDetailsEvent': + # unpack attrs pep-0526 style. trade: Trade + con: Contract = trade.contract fill: Fill trade, fill = item execu: Execution = fill.execution - execid = execu.execId + execid: str = execu.execId + report: CommissionReport = fill.commissionReport + + # always fill in id to con map so when commissions + # arrive we can maybe fire the pos update.. + execid2con[execid] = con # TODO: # - normalize out commissions details? # - this is the same as the unpacking loop above in # ``trades_to_ledger_entries()`` no? - trade_entry = ids2fills.setdefault(execid, {}) - cost_already_rx = bool(trade_entry) - - # if the costs report was already received this - # should be not empty right? - comms = fill.commissionReport.commission - if cost_already_rx: - assert comms - - trade_entry.update( - { - 'contract': asdict(fill.contract), - 'execution': asdict(fill.execution), - # 'commissionReport': asdict(fill.commissionReport), - # supposedly server fill time? - 'broker_time': execu.time, - 'name': 'ib', - } - ) # 2 cases: # - fill comes first or - # - comms report comes first - comms = fill.commissionReport.commission - if comms: - # UGHHH since the commision report object might be - # filled in **after** we already serialized to dict.. - # def need something better for all this. - trade_entry.update( - {'commissionReport': asdict(fill.commissionReport)} - ) - - if comms or cost_already_rx: - # only send a pp update once we have a cost report + # - commission report comes first + clear: tuple = clears.setdefault( + con, + [None, fill], + ) + pos, _fill = clear + + # NOTE: we have to handle the case where a pos msg + # has already been set (bc we already relayed rxed + # one before both the exec-deats AND the + # comms-report?) but the comms-report hasn't yet + # arrived, so we fill in the fill (XD) and wait for + # the cost to show up before relaying the pos msg + # to the EMS.. + if _fill is None: + clear[1] = fill + + cost: float = report.commission + if ( + pos + and fill + and cost + ): await emit_pp_update( ems_stream, - trade_entry, accounts_def, proxies, - cids2pps, - ledgers, tables, + + ibpos=pos, + fill=fill, ) + clears.pop(con) - case 'cost': + case 'commissionReportEvent': cr: CommissionReport = item - execid = cr.execId - - trade_entry = ids2fills.setdefault(execid, {}) - fill_already_rx = bool(trade_entry) + execid: str = cr.execId # only fire a pp msg update if, # - we haven't already # - the fill event has already arrived # but it didn't yet have a commision report # which we fill in now. + + # placehold i guess until someone who know wtf + # contract this is from can fill it in... + con: Contract | None = execid2con.setdefault(execid, None) if ( - fill_already_rx - and 'commissionReport' not in trade_entry + con + and (clear := clears.get(con)) ): - # no fill msg has arrived yet so just fill out the - # cost report for now and when the fill arrives a pp - # msg can be emitted. - trade_entry.update( - {'commissionReport': asdict(cr)} - ) + pos, fill = clear + if ( + pos + and fill + ): + assert fill.commissionReport == cr + await emit_pp_update( + ems_stream, + accounts_def, + proxies, + ledgers, + tables, + + ibpos=pos, + fill=fill, + ) + clears.pop(con) + # TODO: should we clean this? + # execid2con.pop(execid) + + # always update with latest ib pos msg info since + # we generally audit against it for sanity and + # testing AND we require it to be updated to avoid + # error msgs emitted from `update_and_audit_pos_msg()` + case 'positionEvent': + pos: IbPosition = item + con: Contract = pos.contract - await emit_pp_update( - ems_stream, - trade_entry, - accounts_def, - proxies, - cids2pps, + bs_mktid, ppmsg = pack_position(pos, accounts_def) + log.info(f'New IB position msg: {ppmsg}') - ledgers, - tables, - ) + _, fill = clears.setdefault( + con, + [pos, None], + ) + # only send a pos update once we've actually rxed + # the msg from IB since generally speaking we use + # their 'cumsize' as gospel. + await emit_pp_update( + ems_stream, + accounts_def, + proxies, + ledgers, + tables, + + ibpos=pos, + ) case 'error': err: dict = item @@ -1163,21 +1223,6 @@ async def deliver_trade_events( # broker_details={'name': 'ib'}, # )) - case 'position': - - pos: IbPosition = item - bs_mktid, msg = pack_position(pos) - log.info(f'New IB position msg: {msg}') - - # always update with latest ib pos msg info since - # we generally audit against it for sanity and - # testing AND we require it to be updated to avoid - # error msgs emitted from `update_and_audit_msgs()` - cids2pps[(msg.account, bs_mktid)] = msg - - # cuck ib and it's shitty fifo sys for pps! - continue - case 'event': # it's either a general system status event or an external diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index cc79122ca..9143cce6f 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -18,17 +18,26 @@ Trade transaction accounting and normalization. ''' +from __future__ import annotations from bisect import insort +from dataclasses import asdict from decimal import Decimal from functools import partial from pprint import pformat from typing import ( Any, Callable, + TYPE_CHECKING, ) from bidict import bidict import pendulum +from ib_insync.objects import ( + Contract, + Fill, + Execution, + CommissionReport, +) from piker.data import ( Struct, @@ -45,6 +54,12 @@ from ._flex_reports import parse_flex_dt from ._util import log +if TYPE_CHECKING: + from .api import ( + Client, + MethodProxy, + ) + tx_sort: Callable = partial( iter_by_dt, @@ -71,7 +86,8 @@ def norm_trade( ) -> Transaction | None: - conid = record.get('conId') or record['conid'] + conid: int = str(record.get('conId') or record['conid']) + bs_mktid: str = str(conid) comms = record.get('commission') if comms is None: comms = -1*record['ibCommission'] @@ -86,7 +102,11 @@ def norm_trade( }[record['side']] symbol: str = record['symbol'] - exch: str = record.get('listingExchange') or record['exchange'] + exch: str = ( + record.get('listingExchange') + or record.get('primaryExchange') + or record['exchange'] + ) # NOTE: remove null values since `tomlkit` can't serialize # them to file. @@ -156,11 +176,31 @@ def norm_trade( match asset_type: case 'FUT': - # (flex) ledger entries don't have any simple 3-char key? - # TODO: XXX: WOA this is kinda hacky.. probably - # should figure out the correct future pair key more - # explicitly and consistently? - symbol: str = symbol[:3] + # XXX (flex) ledger entries don't necessarily have any + # simple 3-char key.. sometimes the .symbol is some + # weird internal key that we probably don't want in the + # .fqme => we should probably just wrap `Contract` to + # this like we do other crypto$ backends XD + + # NOTE: at least older FLEX records should have + # this field.. no idea about API entries.. + local_symbol: str | None = record.get('localSymbol') + underlying_key: str = record.get('underlyingSymbol') + descr: str | None = record.get('description') + + if ( + not ( + local_symbol + and symbol in local_symbol + ) + and ( + descr + and symbol not in descr + ) + ): + con_key, exp_str = descr.split(' ') + symbol: str = underlying_key or con_key + dst = Asset( name=symbol.lower(), atype='future', @@ -206,8 +246,9 @@ def norm_trade( # NOTE: can't serlialize `tomlkit.String` so cast to native atype: str = str(dst.atype) + # if not (mkt := symcache.mktmaps.get(bs_mktid)): mkt = MktPair( - bs_mktid=str(conid), + bs_mktid=bs_mktid, dst=dst, price_tick=price_tick, @@ -232,7 +273,21 @@ def norm_trade( # XXX: if passed in, we fill out the symcache ad-hoc in order # to make downstream accounting work.. - if symcache: + if symcache is not None: + orig_mkt: MktPair | None = symcache.mktmaps.get(bs_mktid) + if ( + orig_mkt + and orig_mkt.fqme != mkt.fqme + ): + log.warning( + # print( + f'Contracts with common `conId`: {bs_mktid} mismatch..\n' + f'{orig_mkt.fqme} -> {mkt.fqme}\n' + # 'with DIFF:\n' + # f'{mkt - orig_mkt}' + ) + + symcache.mktmaps[bs_mktid] = mkt symcache.mktmaps[fqme] = mkt symcache.assets[src.name] = src symcache.assets[dst.name] = dst @@ -271,9 +326,7 @@ def norm_trade_records( extraction to fill in the `Transaction.sys: MktPair` field. ''' - # select: list[transactions] = [] records: list[Transaction] = [] - for tid, record in ledger.items(): txn = norm_trade( @@ -294,64 +347,54 @@ def norm_trade_records( key=lambda t: t.dt ) - # if ( - # atype == 'fiat' - # or atype == 'option' - # ): - # select.append(trans) - - # if select: - # breakpoint() - return {r.tid: r for r in records} def api_trades_to_ledger_entries( accounts: bidict[str, str], - - # TODO: maybe we should just be passing through the - # ``ib_insync.order.Trade`` instance directly here - # instead of pre-casting to dicts? - trade_entries: list[dict], + fills: list[Fill], ) -> dict[str, dict]: ''' - Convert API execution objects entry objects into ``dict`` form, - pretty much straight up without modification except add - a `pydatetime` field from the parsed timestamp. + Convert API execution objects entry objects into + flattened-``dict`` form, pretty much straight up without + modification except add a `pydatetime` field from the parsed + timestamp so that on write ''' trades_by_account: dict[str, dict] = {} - for t in trade_entries: - # NOTE: example of schema we pull from the API client. - # { - # 'commissionReport': CommissionReport(... - # 'contract': {... - # 'execution': Execution(... - # 'time': 1654801166.0 - # } - - # flatten all sub-dicts and values into one top level entry. - entry = {} - for section, val in t.items(): - match section: + for fill in fills: + + # NOTE: for the schema, see the defn for `Fill` which is + # a `NamedTuple` subtype + fdict: dict = fill._asdict() + + # flatten all (sub-)objects and convert to dicts. + # with values packed into one top level entry. + val: CommissionReport | Execution | Contract + txn_dict: dict[str, Any] = {} + for attr_name, val in fdict.items(): + match attr_name: + # value is a `@dataclass` subtype case 'contract' | 'execution' | 'commissionReport': - # sub-dict cases - entry.update(val) + txn_dict.update(asdict(val)) case 'time': # ib has wack ns timestamps, or is that us? continue + # TODO: we can remove this case right since there's + # only 4 fields on a `Fill`? case _: - entry[section] = val + txn_dict[attr_name] = val + + tid = str(txn_dict['execId']) + dt = pendulum.from_timestamp(txn_dict['time']) + txn_dict['datetime'] = str(dt) + acctid = accounts[txn_dict['acctNumber']] - tid = str(entry['execId']) - dt = pendulum.from_timestamp(entry['time']) - # TODO: why isn't this showing seconds in the str? - entry['pydatetime'] = dt - entry['datetime'] = str(dt) - acctid = accounts[entry['acctNumber']] + # NOTE: only inserted (then later popped) for sorting below! + txn_dict['pydatetime'] = dt if not tid: # this is likely some kind of internal adjustment @@ -362,13 +405,18 @@ def api_trades_to_ledger_entries( # the user from the accounts window in TWS where they can # manually set the avg price and size: # https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST - log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}') + log.warning( + 'Skipping ID-less ledger txn_dict:\n' + f'{pformat(txn_dict)}' + ) continue trades_by_account.setdefault( acctid, {} - )[tid] = entry + )[tid] = txn_dict + # TODO: maybe we should just bisect.insort() into a list of + # tuples and then return a dict of that? # sort entries in output by python based datetime for acctid in trades_by_account: trades_by_account[acctid] = dict(sorted( @@ -377,3 +425,55 @@ def api_trades_to_ledger_entries( )) return trades_by_account + + +async def update_ledger_from_api_trades( + fills: list[Fill], + client: Client | MethodProxy, + accounts_def_inv: bidict[str, str], + + # NOTE: provided for ad-hoc insertions "as transactions are + # processed" -> see `norm_trade()` signature requirements. + symcache: SymbologyCache | None = None, + +) -> tuple[ + dict[str, Transaction], + dict[str, dict], +]: + # XXX; ERRGGG.. + # pack in the "primary/listing exchange" value from a + # contract lookup since it seems this isn't available by + # default from the `.fills()` method endpoint... + fill: Fill + for fill in fills: + con: Contract = fill.contract + conid: str = con.conId + pexch: str | None = con.primaryExchange + + if not pexch: + cons = await client.get_con(conid=conid) + if cons: + con = cons[0] + pexch = con.primaryExchange or con.exchange + else: + # for futes it seems like the primary is always empty? + pexch: str = con.exchange + + # pack in the ``Contract.secType`` + # entry['asset_type'] = condict['secType'] + + entries: dict[str, dict] = api_trades_to_ledger_entries( + accounts_def_inv, + fills, + ) + # normalize recent session's trades to the `Transaction` type + trans_by_acct: dict[str, dict[str, Transaction]] = {} + + for acctid, trades_by_id in entries.items(): + # normalize to transaction form + trans_by_acct[acctid] = norm_trade_records( + trades_by_id, + symcache=symcache, + ) + + return trans_by_acct, entries From 1d35747fbf3a91ca33ce0ffa05e6a706b05648eb Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:10:26 -0400 Subject: [PATCH 071/116] Always clear `Position._events` in `.from_msg()`.. Not sure why i ever thought it would work otherwise but, obviously if you're replicating a `Position` from a **summary** (IPC) msg we need to wipe any prior clearing events from the events history.. The main use for this loading mechanism is precisely if you don't have local access to the txn ledger and need to represent a position from a summary :facepalm: Also, never bother with ledger file fqme "rewriting" if the backend has no symcache support (yet) since obviously there's then no symbol set to search for a better key xD --- piker/accounting/_ledger.py | 18 +++++++++++++----- piker/accounting/_mktinfo.py | 2 +- piker/accounting/_pos.py | 13 +++++++++++++ 3 files changed, 27 insertions(+), 6 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index fd7a50ffa..5f1865c97 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -199,6 +199,7 @@ def iter_txns( tid, txdict, pairs=symcache.pairs, + symcache=symcache, ) yield txn @@ -231,6 +232,7 @@ def write_config(self) -> None: ''' is_paper: bool = self.account == 'paper' + symcache: SymbologyCache = self._symcache towrite: dict[str, Any] = {} for tid, txdict in self.tx_sort(self.data.copy()): # write blank-str expiry for non-expiring assets @@ -241,12 +243,18 @@ def write_config(self) -> None: txdict['expiry'] = '' # (maybe) re-write old acro-key - if is_paper: + if ( + is_paper + # if symcache is empty/not supported (yet), don't + # bother xD + and symcache.mktmaps + ): fqme: str = txdict.pop('fqsn', None) or txdict['fqme'] bs_mktid: str | None = txdict.get('bs_mktid') if ( - fqme not in self._symcache.mktmaps + + fqme not in symcache.mktmaps or ( # also try to see if this is maybe a paper # engine ledger in which case the bs_mktid @@ -257,10 +265,10 @@ def write_config(self) -> None: ): # always take any (paper) bs_mktid if defined and # in the backend's cache key set. - if bs_mktid in self._symcache.mktmaps: + if bs_mktid in symcache.mktmaps: fqme: str = bs_mktid else: - best_fqme: str = list(self._symcache.search(fqme))[0] + best_fqme: str = list(symcache.search(fqme))[0] log.warning( f'Could not find FQME: {fqme} in qualified set?\n' f'Qualifying and expanding {fqme} -> {best_fqme}' @@ -371,7 +379,7 @@ def open_trade_ledger( if symcache is None: # XXX: be mega pendantic and ensure the caller knows what - # they're doing.. + # they're doing! if not allow_from_sync_code: raise RuntimeError( 'You MUST set `allow_from_sync_code=True` when ' diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index ed0beb94e..4197c1bb5 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -605,7 +605,7 @@ def type_key(self) -> str: if isinstance(self.dst, Asset): return str(self.dst.atype) - return 'unknown' + return 'UNKNOWN' @property def price_tick_digits(self) -> int: diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 4b4b72d60..40a654bde 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -292,11 +292,21 @@ def update_from_msg( msg: BrokerdPosition, ) -> None: + ''' + Hard-set the current position from a remotely-received + (normally via IPC) msg by applying the msg as the one (and + only) txn in the `._events` table thus forcing the current + asset allocation blindly. + ''' mkt: MktPair = self.mkt now_dt: pendulum.DateTime = now() now_str: str = str(now_dt) + # XXX: wipe all prior txn history since we wanted it we wouldn't + # be using this method to compute our state! + self._events.clear() + # NOTE WARNING XXX: we summarize the pos with a single # summary transaction (for now) until we either pass THIS # type as msg directly from emsd or come up with a better @@ -524,6 +534,9 @@ def update_from_ledger( try: mkt: MktPair = symcache.mktmaps[fqme] except KeyError: + if _mktmap_table is None: + raise + # XXX: caller is allowed to provide a fallback # mktmap table for the case where a new position is # being added and the preloaded symcache didn't From bebc817d19b27f608182b1853a812bc3d9141209 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:13:54 -0400 Subject: [PATCH 072/116] Partition ledger data frames by `bs_mktid` Since some backends are going to have the issue of supporting multiple venues for a given "position distinguishing instrument", like IB, we can't presume that every `Position` can be uniquely keyed by a `MktPair.fqme` (since the venue part can change and still be the same "pair" relationship in accounting terms) so instead presume the "backend system's market id" is the unique key (at least for now) instead of the fqme. More practically we use the `bs_mktid` to groupby-partition the per pair DFs from the trades ledger and attempt to scan-match the input fqme (in `ledger disect` cli) against the fqme column values set. --- piker/accounting/calc.py | 2 +- piker/accounting/cli.py | 22 ++++++++++++++++++++-- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index d86ad98c0..941fddb40 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -428,7 +428,7 @@ def open_ledger_dfs( # break up into a frame per mkt / fqme dfs: dict[str, pl.DataFrame] = ldf.partition_by( - 'fqme', + 'bs_mktid', as_dict=True, ) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 753e6513d..4106cb7e6 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -19,6 +19,7 @@ ''' from __future__ import annotations +from pprint import pformat from rich.console import Console @@ -264,7 +265,7 @@ def disect( # ledger dfs groupby-partitioned by fqme dfs: dict[str, pl.DataFrame] - # actual ledger ref filled in with all txns + # actual ledger instance ldgr: TransactionLedger pl.Config.set_tbl_cols(16) @@ -277,7 +278,24 @@ def disect( ): # look up specific frame for fqme-selected asset - df = dfs[fqme] + if (df := dfs.get(fqme)) is None: + mktids2fqmes: dict[str, list[str]] = {} + for bs_mktid in dfs: + df: pl.DataFrame = dfs[bs_mktid] + fqmes: pl.Series[str] = df['fqme'] + uniques: list[str] = fqmes.unique() + mktids2fqmes[bs_mktid] = set(uniques) + if fqme in uniques: + break + print( + f'No specific ledger for fqme={fqme} could be found in\n' + f'{pformat(mktids2fqmes)}?\n' + f'Maybe the `{brokername}` backend uses something ' + 'else for its `bs_mktid` then the `fqme`?\n' + 'Scanning for matches in unique fqmes per frame..\n' + ) + + # :pray: assert not df.is_empty() # TODO: we REALLY need a better console REPL for this From 188508575acacf6ab0287797323e2e9be820e89f Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:21:27 -0400 Subject: [PATCH 073/116] Utilize the new `_mktmap_table` input in paper engine In cases where a brokerd backend doesn't yet support a symcache we need to do manual `.get_mkt_info()` queries and stash them in a table that we pass in for the mkt failover lookup to `Account.update_from_ledger()`. Set the `PaperBoi._mkts` to this table for use on real-time ledger writes in `.fake_fill()`. --- piker/clearing/_paper_engine.py | 90 ++++++++++++++++++++++----------- piker/clearing/_util.py | 3 -- 2 files changed, 61 insertions(+), 32 deletions(-) diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 03b69c0e6..67c789a43 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -30,6 +30,7 @@ from typing import ( Callable, ) +from types import ModuleType import uuid from bidict import bidict @@ -37,23 +38,24 @@ import trio import tractor -from ..brokers import get_brokermod -from .. import data -from ..data.types import Struct -from ..accounting._mktinfo import ( - MktPair, -) -from ..accounting import ( +from piker.brokers import get_brokermod +from piker.accounting import ( Position, Account, Transaction, TransactionLedger, open_trade_ledger, open_account, + MktPair, + unpack_fqme, +) +from piker.data import ( + open_feed, + iterticks, + Struct, + open_symcache, + SymbologyCache, ) -from ..data import iterticks -from ..data._symcache import open_symcache -from ..accounting import unpack_fqme from ._util import ( log, # sub-sys logger get_console_log, @@ -262,7 +264,6 @@ async def fake_fill( # we don't actually have any unique backend symbol ourselves # other then this thing, our fqme address. bs_mktid: str = fqme - assert self._mkts[fqme].fqme == fqme t = Transaction( fqme=fqme, tid=oid, @@ -278,6 +279,11 @@ async def fake_fill( self.acnt.update_from_ledger( {oid: t}, symcache=self.ledger._symcache, + + # XXX when a backend has no symcache support yet we can + # simply pass in the gmi() retreived table created + # during init :o + _mktmap_table=self._mkts, ) # transmit pp msg to ems @@ -544,7 +550,9 @@ async def open_trade_dialog( # enable piker.clearing console log for *this* subactor get_console_log(loglevel) + symcache: SymbologyCache async with open_symcache(get_brokermod(broker)) as symcache: + acnt: Account ledger: TransactionLedger with ( @@ -564,38 +572,53 @@ async def open_trade_dialog( symcache=symcache, ) as ledger ): - # NOTE: retreive market(pair) info from the backend broker - # since ledger entries (in their backend native format) often - # don't contain necessary market info per trade record entry.. - # - if no fqme was passed in, we presume we're running in - # "ledger-sync-only mode" and thus we load mkt info for - # each symbol found in the ledger to a acnt table manually. + # NOTE: WE MUST retreive market(pair) info from each + # backend broker since ledger entries (in their + # provider-native format) often don't contain necessary + # market info per trade record entry.. + # FURTHER, if no fqme was passed in, we presume we're + # running in "ledger-sync-only mode" and thus we load + # mkt info for each symbol found in the ledger to + # an acnt table manually. # TODO: how to process ledger info from backends? # - should we be rolling our own actor-cached version of these - # client API refs or using portal IPC to send requests to the - # existing brokerd daemon? + # client API refs or using portal IPC to send requests to the + # existing brokerd daemon? # - alternatively we can possibly expect and use - # a `.broker.norm_trade_records()` ep? - brokermod = get_brokermod(broker) - gmi = getattr(brokermod, 'get_mkt_info', None) + # a `.broker.ledger.norm_trade()` ep? + brokermod: ModuleType = get_brokermod(broker) + gmi: Callable = getattr(brokermod, 'get_mkt_info', None) # update all transactions with mkt info before # loading any pps mkt_by_fqme: dict[str, MktPair] = {} - if fqme: + if ( + fqme + and fqme not in symcache.mktmaps + ): + log.warning( + f'Symcache for {broker} has no `{fqme}` entry?\n' + 'Manually requesting mkt map data via `.get_mkt_info()`..' + ) + bs_fqme, _, broker = fqme.rpartition('.') - mkt, pair = await brokermod.get_mkt_info(bs_fqme) + mkt, pair = await gmi(bs_fqme) mkt_by_fqme[mkt.fqme] = mkt - # for each sym in the ledger load it's `MktPair` info + # for each sym in the ledger load its `MktPair` info for tid, txdict in ledger.data.items(): l_fqme: str = txdict.get('fqme') or txdict['fqsn'] if ( gmi + and l_fqme not in symcache.mktmaps and l_fqme not in mkt_by_fqme ): + log.warning( + f'Symcache for {broker} has no `{l_fqme}` entry?\n' + 'Manually requesting mkt map data via `.get_mkt_info()`..' + ) mkt, pair = await gmi( l_fqme.rstrip(f'.{broker}'), ) @@ -613,7 +636,15 @@ async def open_trade_dialog( # update pos table from ledger history and provide a ``MktPair`` # lookup for internal position accounting calcs. - acnt.update_from_ledger(ledger) + acnt.update_from_ledger( + ledger, + + # NOTE: if the symcache fails on fqme lookup + # (either sycache not yet supported or not filled + # in) use manually constructed table from calling + # the `.get_mkt_info()` provider EP above. + _mktmap_table=mkt_by_fqme, + ) pp_msgs: list[BrokerdPosition] = [] pos: Position @@ -649,15 +680,15 @@ async def open_trade_dialog( return async with ( - data.open_feed( + open_feed( [fqme], loglevel=loglevel, ) as feed, ): # sanity check all the mkt infos for fqme, flume in feed.flumes.items(): - mkt = mkt_by_fqme[fqme] - assert mkt == flume.mkt + mkt = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme] + assert mkt == flume.mkt async with ( ctx.open_stream() as ems_stream, @@ -741,6 +772,7 @@ def norm_trade( tid: str, txdict: dict, pairs: dict[str, Struct], + symcache: SymbologyCache | None = None, ) -> Transaction: from pendulum import ( diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py index d3c0fb8ef..3ba7f55f0 100644 --- a/piker/clearing/_util.py +++ b/piker/clearing/_util.py @@ -36,9 +36,6 @@ ) -# TODO: use this in other backends like kraken which currently has -# a less formalized version more or less: -# `apiflows[reqid].maps.append(status_msg.to_dict())` class OrderDialogs(Struct): ''' Order control dialog (and thus transaction) tracking via From d0f72bf269cdfc68831c883684b526c73904c6a6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:27:26 -0400 Subject: [PATCH 074/116] Wrap symcache loading into `.from_scratch()` Since we need it both when explicitly reloading **and** whenever either the file or data in the file doesn't exist. --- piker/data/_symcache.py | 49 +++++++++++++++++++++++++++++++---------- 1 file changed, 37 insertions(+), 12 deletions(-) diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index 3bf30d480..e6c94fa66 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -274,6 +274,28 @@ def from_dict( ) return cache + @staticmethod + async def from_scratch( + mod: ModuleType, + fp: Path, + **kwargs, + + ) -> SymbologyCache: + ''' + Generate (a) new symcache (contents) entirely from scratch + including all (TOML) serialized data and file. + + ''' + log.info(f'GENERATING symbology cache for `{mod.name}`') + cache = SymbologyCache( + mod=mod, + fp=fp, + **kwargs, + ) + await cache.load() + cache.write_config() + return cache + def search( self, pattern: str, @@ -370,17 +392,11 @@ async def open_symcache( reload or not cachefile.is_file() ): - cache = SymbologyCache( + cache = await SymbologyCache.from_scratch( mod=mod, fp=cachefile, ) - log.info(f'GENERATING symbology cache for `{mod.name}`') - await cache.load() - - # NOTE: only (re-)write if explicit reload or non-existing - cache.write_config() - else: log.info( f'Loading EXISTING `{mod.name}` symbology cache:\n' @@ -392,11 +408,20 @@ async def open_symcache( data: dict[str, dict] = tomllib.load(existing_fp) log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}') - cache = SymbologyCache.from_dict( - data, - mod=mod, - fp=cachefile, - ) + # if there's an empty file for some reason we need + # to do a full reload as well! + if not data: + cache = await SymbologyCache.from_scratch( + mod=mod, + fp=cachefile, + ) + else: + cache = SymbologyCache.from_dict( + data, + mod=mod, + fp=cachefile, + ) + # TODO: use a real profiling sys.. # https://github.com/pikers/piker/issues/337 From 9fbb75ce7f587ea1ac504a5056756a2e55526b49 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:38:25 -0400 Subject: [PATCH 075/116] Remove piker.trionics; already factored into `tractor` --- piker/toolz/__init__.py | 3 ++ piker/trionics.py | 80 ----------------------------------------- 2 files changed, 3 insertions(+), 80 deletions(-) delete mode 100644 piker/trionics.py diff --git a/piker/toolz/__init__.py b/piker/toolz/__init__.py index 4b8a9338e..3069504e6 100644 --- a/piker/toolz/__init__.py +++ b/piker/toolz/__init__.py @@ -28,6 +28,9 @@ timeit, ) +# TODO: other mods to include? +# - DROP .trionics, already moved into tractor +# - move in `piker.calc` __all__: list[str] = [ 'open_crash_handler', diff --git a/piker/trionics.py b/piker/trionics.py deleted file mode 100644 index 10f6a33d6..000000000 --- a/piker/trionics.py +++ /dev/null @@ -1,80 +0,0 @@ -# piker: trading gear for hackers -# Copyright (C) Tyler Goodlet (in stewardship of piker0) - -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. - -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -''' -sugarz for trio/tractor conc peeps. - -''' -from typing import AsyncContextManager -from typing import TypeVar -from contextlib import asynccontextmanager as acm - -import trio - - -# A regular invariant generic type -T = TypeVar("T") - - -async def _enter_and_sleep( - - mngr: AsyncContextManager[T], - to_yield: dict[int, T], - all_entered: trio.Event, - # task_status: TaskStatus[T] = trio.TASK_STATUS_IGNORED, - -) -> T: - '''Open the async context manager deliver it's value - to this task's spawner and sleep until cancelled. - - ''' - async with mngr as value: - to_yield[id(mngr)] = value - - if all(to_yield.values()): - all_entered.set() - - # sleep until cancelled - await trio.sleep_forever() - - -@acm -async def async_enter_all( - - *mngrs: list[AsyncContextManager[T]], - -) -> tuple[T]: - - to_yield = {}.fromkeys(id(mngr) for mngr in mngrs) - - all_entered = trio.Event() - - async with trio.open_nursery() as n: - for mngr in mngrs: - n.start_soon( - _enter_and_sleep, - mngr, - to_yield, - all_entered, - ) - - # deliver control once all managers have started up - await all_entered.wait() - yield tuple(to_yield.values()) - - # tear down all sleeper tasks thus triggering individual - # mngr ``__aexit__()``s. - n.cancel_scope.cancel() From 58cf7ce10ec9291585e4c4b11c8b55baa3409345 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:39:08 -0400 Subject: [PATCH 076/116] Add `norm_trade()` ep to validator warnings --- piker/data/validate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/piker/data/validate.py b/piker/data/validate.py index 321b42966..f54992cf6 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -81,8 +81,8 @@ class FeedInit(Struct, frozen=True): # live order control and trading 'brokerd': [ 'trades_dialogue', - # TODO: ledger normalizer helper? - # norm_trades(records: dict[str, Any]) -> TransactionLedger) + 'open_trade_dialog', # live order ctl + 'norm_trade', # ledger normalizer for txns ], } From 4436342d33428c507505d95dba5b21fb9ce5d949 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:40:09 -0400 Subject: [PATCH 077/116] Change ui stuff to use new `Position.cumsize` attr name --- piker/ui/_chart.py | 2 +- piker/ui/_interaction.py | 2 +- piker/ui/_ohlc.py | 14 ++++++++++---- piker/ui/_position.py | 4 ++-- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 1b410164b..28e258f3a 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -282,7 +282,7 @@ async def load_symbols( # TODO: probably stick this in some kinda `LooknFeel` API? for tracker in self.rt_linked.mode.trackers.values(): pp_nav = tracker.nav - if tracker.live_pp.size: + if tracker.live_pp.cumsize: pp_nav.show() pp_nav.hide_info() else: diff --git a/piker/ui/_interaction.py b/piker/ui/_interaction.py index 6cc6bee4b..63a5be8ff 100644 --- a/piker/ui/_interaction.py +++ b/piker/ui/_interaction.py @@ -261,7 +261,7 @@ async def handle_viewmode_kb_inputs( # show the pp size label only if there is # a non-zero pos existing tracker = order_mode.current_pp - if tracker.live_pp.size: + if tracker.live_pp.cumsize: tracker.nav.show() # TODO: show pp config mini-params in status bar widget diff --git a/piker/ui/_ohlc.py b/piker/ui/_ohlc.py index 2bbec2533..c43926a11 100644 --- a/piker/ui/_ohlc.py +++ b/piker/ui/_ohlc.py @@ -227,7 +227,13 @@ def draw_last_datum( # last_row = ohlc[-1:] # individual values - last_row = o, h, l, last, i = ohlc[-1] + last_row = ( + o, + h, + lo, + last, + i, + ) = ohlc[-1] # times = src_data['time'] # if times[-1] - times[-2]: @@ -254,17 +260,17 @@ def draw_last_datum( # writer is responsible for changing open on "first" volume of bar larm.setLine(larm.x1(), o, larm.x2(), o) - if l != h: # noqa + if lo != h: # noqa if body is None: body = self._last_bar_lines[0] = QLineF( - i + bg, l, + i + bg, lo, i + step_size - bg, h, ) else: # update body body.setLine( - body.x1(), l, + body.x1(), lo, body.x2(), h, ) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 11aca3e9d..a664561ec 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -279,7 +279,7 @@ def apply_setting( elif key != 'account': # numeric fields entry try: - value = puterize(value) + value: float = puterize(value) except ValueError as err: log.error(err.args[0]) return False @@ -292,7 +292,7 @@ def apply_setting( lpos = mode.current_pp.live_pp if alloc.size_unit == 'currency': - dsize = lpos.dsize + dsize: float = lpos.dsize if dsize > value: log.error( f'limit must > then current pp: {dsize}' From c00cf41541398bf56f8cacdbb4632f0456ac7624 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:40:58 -0400 Subject: [PATCH 078/116] kraken: `norm_trade()` now much accept an optional symcache --- piker/brokers/kraken/ledger.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/brokers/kraken/ledger.py b/piker/brokers/kraken/ledger.py index 2dac90d94..6515568f3 100644 --- a/piker/brokers/kraken/ledger.py +++ b/piker/brokers/kraken/ledger.py @@ -35,7 +35,7 @@ # MktPair, ) from piker.data import ( - # SymbologyCache, + SymbologyCache, Struct, ) from .api import ( @@ -55,6 +55,7 @@ def norm_trade( # processing from `.accounting`, this will be the table loaded # into `SymbologyCache.pairs`. pairs: dict[str, Struct], + symcache: SymbologyCache | None = None, ) -> Transaction: From 618c461bfbbca169d533fdfb8dab38429678a117 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:41:15 -0400 Subject: [PATCH 079/116] binance: always upper case venue and expiry tokens Since we need `.get_mkt_info()` to remain symmetric across calls with different fqme inputs, and binance generally uses upper case for it's symbology keys, we always upper the FQME related tokens for both symcaching and general search purposes. Also don't set `_atype` on mkt pairs since it should be fully handled via the dst asset loading in `Client._cache_pairs()`. --- piker/brokers/binance/feed.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 8c5965f9f..37c6a8bcf 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -287,7 +287,15 @@ async def get_mkt_info( mkt_mode: str = '' broker, mkt_ep, venue, expiry = unpack_fqme(fqme) - venue: str = venue.lower() + + # NOTE: we always upper case all tokens to be consistent with + # binance's symbology style for pairs, like `BTCUSDT`, but in + # theory we could also just keep things lower case; as long as + # we're consistent and the symcache matches whatever this func + # returns, always! + expiry: str = expiry.upper() + venue: str = venue.upper() + venue_lower: str = venue.lower() # XXX TODO: we should change the usdtm_futes name to just # usdm_futes (dropping the tether part) since it turns out that @@ -295,42 +303,37 @@ async def get_mkt_info( # the margin assets.. it's going to require a wholesale # (variable/key) rename as well as file name adjustments to any # existing tsdb set.. - if 'usd' in venue: + if 'usd' in venue_lower: mkt_mode: str = 'usdtm_futes' # NO IDEA what these contracts (some kinda DEX-ish futes?) are # but we're masking them for now.. elif ( - 'defi' in venue + 'defi' in venue_lower # TODO: handle coinm futes which have a margin asset that # is some crypto token! # https://binance-docs.github.io/apidocs/delivery/en/#exchange-information - or 'btc' in venue + or 'btc' in venue_lower ): return None else: # NOTE: see the `FutesPair.bs_fqme: str` implementation # to understand the reverse market info lookup below. - mkt_mode = venue or 'spot' + mkt_mode = venue_lower or 'spot' - sectype: str = '' if ( venue - and 'spot' not in venue + and 'spot' not in venue_lower # XXX: catch all in case user doesn't know which # venue they want (usdtm vs. coinm) and we can choose # a default (via config?) once we support coin-m APIs. - or 'perp' in venue + or 'perp' in venue_lower ): if not mkt_mode: - mkt_mode: str = f'{venue}_futes' - - sectype: str = 'future' - if 'perp' in expiry: - sectype = 'perpetual_future' + mkt_mode: str = f'{venue_lower}_futes' async with open_cached_client( 'binance', @@ -377,7 +380,10 @@ async def get_mkt_info( expiry=expiry, venue=venue, broker='binance', - _atype=sectype, + + # NOTE: sectype is always taken from dst, see + # `MktPair.type_key` and `Client._cache_pairs()` + # _atype=sectype, ) return mkt, pair From bd0af7a4c0efc5b0daaa2e26c71f62cc713f172a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:44:41 -0400 Subject: [PATCH 080/116] kucoin: facepalm, use correct pair fields for price/size ticks --- piker/brokers/kucoin.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index b2953467b..a85b18f4e 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -74,6 +74,8 @@ log = get_logger(__name__) +_no_symcache: bool = True + class KucoinMktPair(Struct, frozen=True): ''' @@ -86,14 +88,14 @@ class KucoinMktPair(Struct, frozen=True): @property def price_tick(self) -> Decimal: - return Decimal(str(self.baseIncrement)) + return Decimal(str(self.quoteIncrement)) baseMaxSize: float baseMinSize: float @property def size_tick(self) -> Decimal: - return Decimal(str(self.baseMinSize)) + return Decimal(str(self.quoteMinSize)) enableTrading: bool feeCurrency: str @@ -207,6 +209,7 @@ def get_config() -> BrokerConfig | None: class Client: + def __init__(self) -> None: self._config: BrokerConfig | None = get_config() self._pairs: dict[str, KucoinMktPair] = {} From 64329d44e789226fcaf2befce782ac5db2b647b6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:48:19 -0400 Subject: [PATCH 081/116] Flip `tractor.breakpoint()`s to new `.pause()` --- piker/brokers/binance/broker.py | 2 +- piker/brokers/binance/feed.py | 2 +- piker/data/_sampling.py | 2 +- piker/data/history.py | 7 +++---- piker/storage/cli.py | 4 ++-- piker/storage/marketstore/__init__.py | 4 ++-- piker/ui/_display.py | 2 +- piker/ui/_interaction.py | 2 +- 8 files changed, 12 insertions(+), 13 deletions(-) diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 3bc665d6e..6dcf99b5e 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -323,7 +323,7 @@ async def open_trade_dialog( if balance > 0: balances[spot_asset] = (balance, last_update_t) - # await tractor.breakpoint() + # await tractor.pause() # @position response: # {'positions': [{'entryPrice': '0.0', diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 37c6a8bcf..6d6ae28dc 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -265,7 +265,7 @@ async def get_ohlc( ): inow = round(time.time()) if (inow - times[-1]) > 60: - await tractor.breakpoint() + await tractor.pause() start_dt = from_timestamp(times[0]) end_dt = from_timestamp(times[-1]) diff --git a/piker/data/_sampling.py b/piker/data/_sampling.py index 1db800153..3ddb39c1e 100644 --- a/piker/data/_sampling.py +++ b/piker/data/_sampling.py @@ -717,7 +717,7 @@ async def sample_and_broadcast( if cs.cancelled_caught: lags += 1 if lags > 10: - await tractor.breakpoint() + await tractor.pause() except ( trio.BrokenResourceError, diff --git a/piker/data/history.py b/piker/data/history.py index c2e89f7f8..3319293aa 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -141,8 +141,7 @@ async def shm_push_in_between( array = shm.array zeros = array[array['low'] == 0] if 0 < zeros.size < 1000: - tractor.breakpoint() - + await tractor.pause() async def start_backfill( @@ -255,7 +254,7 @@ async def start_backfill( # f"{mkt.fqme}: skipping duplicate frame @ {next_start_dt}" # ) # starts[start_dt] += 1 - # await tractor.breakpoint() + # await tractor.pause() # continue # elif starts[next_start_dt] > 6: @@ -484,7 +483,7 @@ async def back_load_from_tsdb( if storemod.name == 'nativedb': return - await tractor.breakpoint() + await tractor.pause() assert shm._first.value == 0 array = shm.array diff --git a/piker/storage/cli.py b/piker/storage/cli.py index 73cf737ee..e4daffa90 100644 --- a/piker/storage/cli.py +++ b/piker/storage/cli.py @@ -177,7 +177,7 @@ async def main(): # TODO: something better with tab completion.. # is there something more minimal but nearly as # functional as ipython? - await tractor.breakpoint() + await tractor.pause() trio.run(main) @@ -288,7 +288,7 @@ async def main(): # TODO: maybe only optionally enter this depending # on some CLI flags and/or gap detection? - await tractor.breakpoint() + await tractor.pause() # write to parquet file? if write_parquet: diff --git a/piker/storage/marketstore/__init__.py b/piker/storage/marketstore/__init__.py index 7466c06a1..de42de441 100644 --- a/piker/storage/marketstore/__init__.py +++ b/piker/storage/marketstore/__init__.py @@ -204,7 +204,7 @@ async def read_ohlcv( # break # except purerpc.grpclib.exceptions.UnknownError as err: # if 'snappy' in err.args: - # await tractor.breakpoint() + # await tractor.pause() # # indicate there is no history for this timeframe # log.exception( @@ -232,7 +232,7 @@ async def read_ohlcv( 'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG ' f'WIPING HISTORY FOR {ts}s' ) - await tractor.breakpoint() + await tractor.pause() # await self.delete_ts(fqme, timeframe) # try reading again.. diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 27e701453..47704e3f0 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -462,7 +462,7 @@ async def graphics_update_loop( await trio.sleep(0) if ds.hist_vars['i_last'] < ds.hist_vars['i_last_append']: - await tractor.breakpoint() + await tractor.pause() # main real-time quotes update loop stream: tractor.MsgStream diff --git a/piker/ui/_interaction.py b/piker/ui/_interaction.py index 63a5be8ff..cf4687356 100644 --- a/piker/ui/_interaction.py +++ b/piker/ui/_interaction.py @@ -161,7 +161,7 @@ async def handle_viewmode_kb_inputs( vlm_chart = chart.linked.subplots['volume'] # noqa vlm_viz = vlm_chart.main_viz # noqa dvlm_pi = vlm_chart._vizs['dolla_vlm'].plot # noqa - await tractor.breakpoint() + await tractor.pause() view.interact_graphics_cycle() # SEARCH MODE # From 7802febd208fa207d2ab1f05e0bf498ec56010f2 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 12:56:06 -0400 Subject: [PATCH 082/116] Backfill history gaps with pre-gap close --- piker/data/history.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/piker/data/history.py b/piker/data/history.py index 3319293aa..0b4f3bf9d 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -140,8 +140,18 @@ async def shm_push_in_between( # memory... array = shm.array zeros = array[array['low'] == 0] - if 0 < zeros.size < 1000: - await tractor.pause() + + # always backfill gaps with the earliest (price) datum's + # value to avoid the y-ranger including zeros and completely + # stretching the y-axis.. + if 0 < zeros.size: + zeros[[ + 'open', + 'high', + 'low', + 'close', + ]] = shm._array[zeros['index'][0] - 1]['close'] + # await tractor.pause() async def start_backfill( From f1289ccce2124320f9059eea2c4c40115e5ded8e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 14:55:17 -0400 Subject: [PATCH 083/116] ib: Oof, right need to create ledger entries too.. --- piker/brokers/ib/broker.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index e9c6c83f6..b32f4eaca 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -857,10 +857,9 @@ async def emit_pp_update( ledger: dict = ledgers[acctid] # NOTE: don't override flex/previous entries with new API - # ones, just update with new fields! + # ones, just update with new fields or create new entry. for tid, tdict in trades_by_id.items(): - # ledger.setdefault(tid, {}).update(tdict) - ledger[tid].update(tdict) + ledger.setdefault(tid, {}).update(tdict) # generate pp msgs and cross check with ib's positions data, relay # re-formatted pps as msgs to the ems. From 2c6ae5d994e6caaea9724b1d28208994c19f4373 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 15:37:59 -0400 Subject: [PATCH 084/116] Drop the `gap_dt_unit: str` column We don't need it in `detect_time_gaps()` since doing straight up datetime diffs in `polars` already has a humanized `str` representation but with higher precision like '2d 1h 24m 1s' B) --- piker/data/_timeseries.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/piker/data/_timeseries.py b/piker/data/_timeseries.py index 2d73c263d..7055854ba 100644 --- a/piker/data/_timeseries.py +++ b/piker/data/_timeseries.py @@ -18,7 +18,8 @@ Financial time series processing utilities usually pertaining to OHLCV style sampled data. -Routines are generally implemented in either ``numpy`` or ``polars`` B) +Routines are generally implemented in either ``numpy`` or +``polars`` B) ''' from __future__ import annotations @@ -269,9 +270,14 @@ def detect_time_gaps( # gap_dt_unit: t_unit = 'minutes', # gap_thresh: int = 1, - # legacy stock mkts + # NOTE: legacy stock mkts have venue operating hours + # and thus gaps normally no more then 1-2 days at + # a time. + # XXX -> must be valid ``polars.Expr.dt.`` + # TODO: allow passing in a frame of operating hours + # durations/ranges for faster legit gap checks. gap_dt_unit: t_unit = 'days', - gap_thresh: int = 2, + gap_thresh: int = 1, ) -> pl.DataFrame: ''' @@ -281,18 +287,17 @@ def detect_time_gaps( actual missing data segments. ''' - dt_gap_col: str = f'{gap_dt_unit}_diff' - return with_dts( - df - ).filter( - pl.col('s_diff').abs() > expect_period - ).with_columns( - getattr( - pl.col('dt_diff').dt, - gap_dt_unit, # NOTE: must be valid ``Expr.dt.`` - )().alias(dt_gap_col) - ).filter( - pl.col(dt_gap_col).abs() > gap_thresh + return ( + with_dts(df) + .filter( + pl.col('s_diff').abs() > expect_period + ) + .filter( + getattr( + pl.col('dt_diff').dt, + gap_dt_unit, + )().abs() > gap_thresh + ) ) From 08e8990fe38fe0af168306fd0d0c245e25263f94 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 15:41:04 -0400 Subject: [PATCH 085/116] Do single `ShmArray.array` read on zero-time filtering --- piker/data/_timeseries.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/piker/data/_timeseries.py b/piker/data/_timeseries.py index 7055854ba..cc84f763e 100644 --- a/piker/data/_timeseries.py +++ b/piker/data/_timeseries.py @@ -209,8 +209,13 @@ def detect_null_time_gap( NOTE: for now presumes only ONE gap XD ''' - zero_pred: np.ndarray = shm.array['time'] == 0 - zero_t: np.ndarray = shm.array[zero_pred] + # ensure we read buffer state only once so that ShmArray rt + # circular-buffer updates don't cause a indexing/size mismatch. + array: np.ndarray = shm.array + + zero_pred: np.ndarray = array['time'] == 0 + zero_t: np.ndarray = array[zero_pred] + if zero_t.size: istart, iend = zero_t['index'][[0, -1]] start, end = shm._array['time'][ From d94ab9d5b2bf25234ad964cb187b00b86cc0e58a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 15:43:48 -0400 Subject: [PATCH 086/116] order_mode: Only send cancels for dialogs that still exist --- piker/ui/order_mode.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 6bf9f34a0..ddc2d6f04 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -641,13 +641,13 @@ def cancel_orders( group_key=True ) for oid in oids: - dialog: Dialog = self.dialogs[oid] - self.client.cancel_nowait(uuid=oid) - cancel_status_close = self.multistatus.open_status( - f'cancelling order {oid}', - group_key=key, - ) - dialog.last_status_close = cancel_status_close + if dialog := self.dialogs.get(oid): + self.client.cancel_nowait(uuid=oid) + cancel_status_close = self.multistatus.open_status( + f'cancelling order {oid}', + group_key=key, + ) + dialog.last_status_close = cancel_status_close def cancel_all_orders(self) -> None: ''' From 385561276be6c10848196fa8daaefea2c354d5a1 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 26 Jul 2023 15:45:55 -0400 Subject: [PATCH 087/116] Add gap detection into the `store ldshm` cmd --- piker/storage/cli.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/piker/storage/cli.py b/piker/storage/cli.py index e4daffa90..abde1b3ca 100644 --- a/piker/storage/cli.py +++ b/piker/storage/cli.py @@ -150,6 +150,7 @@ async def main(): open_piker_runtime( 'tsdb_polars_anal', # enable_modules=['piker.service._ahab'] + debug_mode=True, ), open_storage_client() as (mod, client), ): @@ -168,10 +169,10 @@ async def main(): src_df = await client.as_df(fqme, period) from piker.data import _timeseries as tsmod - df = tsmod.with_dts(src_df) + df: pl.DataFrame = tsmod.with_dts(src_df) gaps: pl.DataFrame = tsmod.detect_time_gaps(df) - if gaps: + if not gaps.is_empty(): print(f'Gaps found:\n{gaps}') # TODO: something better with tab completion.. @@ -216,7 +217,13 @@ def iter_dfs_from_shms(fqme: str) -> Generator[ # lookup array buffer size based on file suffix # being either .rt or .hist - size: int = sizes[shmfile.name.rsplit('.')[-1]] + key: str = shmfile.name.rsplit('.')[-1] + + # skip FSP buffers for now.. + if key not in sizes: + continue + + size: int = sizes[key] # attach to any shm buffer, load array into polars df, # write to local parquet file. @@ -271,24 +278,31 @@ async def main(): open_piker_runtime( 'polars_boi', enable_modules=['piker.data._sharedmem'], + debug_mode=True, ), ): - df: pl.DataFrame | None = None - for shmfile, shm, df in iter_dfs_from_shms(fqme): + for shmfile, shm, src_df in iter_dfs_from_shms(fqme): # compute ohlc properties for naming times: np.ndarray = shm.array['time'] secs: float = times[-1] - times[-2] if secs < 1.: - breakpoint() raise ValueError( f'Something is wrong with time period for {shm}:\n{times}' ) + from piker.data import _timeseries as tsmod + df: pl.DataFrame = tsmod.with_dts(src_df) + gaps: pl.DataFrame = tsmod.detect_time_gaps(df) + # TODO: maybe only optionally enter this depending # on some CLI flags and/or gap detection? - await tractor.pause() + if ( + not gaps.is_empty() + or secs > 2 + ): + await tractor.pause() # write to parquet file? if write_parquet: From b1edaf063961a390609c6db5960dc20212e3bef5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 29 Jul 2023 21:02:59 -0400 Subject: [PATCH 088/116] First draft position accounting with `polars` Took a little while to get right using declarative style but it's finally workin and seems (mostly correct B) Computes the ppu (price per unit) using the PnL since last net-zero-cumsize (aka the pnl from open to close) and uses it to calc the pnl-per-exit trade (using the ppu). Next up, bep (break even price both) per position and maybe since ledger start or an arbitrary ref point? --- piker/accounting/calc.py | 112 +++++++++++++++++++++++++++++++++++---- 1 file changed, 101 insertions(+), 11 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 941fddb40..a787f049c 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -127,8 +127,8 @@ def ppu( # after sum should be the remaining size the new # "direction" (aka, long vs. short) for this clear. if sign_change: - clear_size = accum_size - abs_diff = abs(accum_size) + clear_size: float = accum_size + abs_diff: float = abs(accum_size) asize_h.append(0) ppu_h.append(0) @@ -149,7 +149,6 @@ def ppu( abs_diff > 0 and is_clear ): - cost_basis = ( # cost basis for this clear clear_price * abs(clear_size) @@ -159,12 +158,12 @@ def ppu( ) if asize_h: - size_last = abs(asize_h[-1]) - cb_last = ppu_h[-1] * size_last - ppu = (cost_basis + cb_last) / abs_new_size + size_last: float = abs(asize_h[-1]) + cb_last: float = ppu_h[-1] * size_last + ppu: float = (cost_basis + cb_last) / abs_new_size else: - ppu = cost_basis / abs_new_size + ppu: float = cost_basis / abs_new_size else: # TODO: for PPU we should probably handle txs out @@ -177,7 +176,7 @@ def ppu( # only the size changes not the price-per-unit # need to be updated since the ppu remains constant # and gets weighted by the new size. - ppu: float = ppu_h[-1] # set to previous value + ppu: float = ppu_h[-1] if ppu_h else 0 # set to previous value # extend with new rolling metric for this step ppu_h.append(ppu) @@ -436,7 +435,7 @@ def open_ledger_dfs( # - it'd be more ideal to use `ppt = df.groupby('fqme').agg([` # - ppu and bep calcs! for key in dfs: - df = dfs[key] + df = dfs[key].lazy() # TODO: pass back the current `Position` object loaded from # the account as well? Would provide incentive to do all @@ -444,8 +443,99 @@ def open_ledger_dfs( # bs_mktid: str = df[0]['bs_mktid'] # pos: Position = acnt.pps[bs_mktid] - dfs[key] = df.with_columns([ + df = dfs[key] = df.with_columns([ + pl.cumsum('size').alias('cumsize'), - ]) + + # amount of source asset "sent" (via buy txns in + # the market) to acquire the dst asset, PER txn. + # when this value is -ve (i.e. a sell operation) then + # the amount sent is actually "returned". + (pl.col('price') * pl.col('size')).alias('dst_bot'), + + ]).with_columns([ + + # rolling balance in src asset units + (pl.cumsum('dst_bot') * -1).alias('src_balance'), + + # "position operation type" in terms of increasing the + # amount in the dst asset (entering) or decreasing the + # amount in the dst asset (exiting). + pl.when( + pl.col('size').sign() == pl.col('cumsize').sign() + + ).then( + pl.lit('enter') # see above, but is just price * size per txn + + ).otherwise( + pl.when(pl.col('cumsize') == 0) + .then(pl.lit('exit_to_zero')) + .otherwise(pl.lit('exit')) + ).alias('descr'), + + ]).with_columns([ + + pl.when(pl.col('cumsize') == pl.lit(0)) + .then(pl.col('src_balance')) + .otherwise(pl.lit(None)) + .forward_fill() + .fill_null(0) + .alias('pnl_since_nz'), + + ]).with_columns([ + + pl.when(pl.col('cumsize') == 0) + .then(pl.col('pnl_since_nz')) + .otherwise(0) + .cumsum() + .alias('cum_pnl_since_nz') + + ]).with_columns([ + + pl.when( + pl.col('descr') == pl.lit('enter') + ).then( + ( + pl.col('pnl_since_nz') + - + # -ve on buys (and no prior profits) + pl.col('src_balance') + )# * pl.col('cumsize').sign() + / + pl.col('cumsize') + ).otherwise( + pl.lit(None) + ).forward_fill().alias('ppu_per_pos'), + + ]).with_columns([ + pl.when(pl.col('descr') != pl.lit('enter')) + .then( + (pl.col('price') - pl.col('ppu_per_pos')) * pl.col('size') * -1 + ) + .otherwise(0) + .alias('pnl_per_exit') + + ]).with_columns([ + + # ( + # # weight las ppu by the previous (txn row's) + # # cumsize since sells may have happpened. + # ((pl.col('last_ppu') + # * pl.col('last_cumsize')) + # - pl.col('net_pnl')) + # + pl.col('i_dst_bot') + # ) / + # pl.col('cumsize') + + # choose fields to emit for accounting puposes + ]).select([ + pl.exclude([ + 'tid', + 'dt', + 'expiry', + 'bs_mktid', + 'etype', + ]), + ]).collect() yield dfs, ledger From b37a447595aa9298aadf38712ea21a978661ee49 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 31 Jul 2023 16:01:31 -0400 Subject: [PATCH 089/116] Implement PPU and BEP and inject the ledger frames Since it appears impossible to compute the recurrence relations for PPU (at least sanely) without using embedded `polars.List` elements, this instead just implements price-per-unit and break-even-price calcs doing a plain-ol-for-loop imperative approach with logic branching. I burned wayy too much time trying to implement this in some kinda `polars` DF native way without luck, so hopefuly someone smarter can come in and make it work at some point xD Resolves a related bullet in #515 --- piker/accounting/calc.py | 157 ++++++++++++++++++++++++++------------- 1 file changed, 106 insertions(+), 51 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index a787f049c..79e478501 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -435,15 +435,19 @@ def open_ledger_dfs( # - it'd be more ideal to use `ppt = df.groupby('fqme').agg([` # - ppu and bep calcs! for key in dfs: - df = dfs[key].lazy() + # covert to lazy form (since apparently we might need it + # eventually ...) + df = dfs[key] + + ldf = df.lazy() # TODO: pass back the current `Position` object loaded from # the account as well? Would provide incentive to do all # this ledger loading inside a new async open_account(). # bs_mktid: str = df[0]['bs_mktid'] # pos: Position = acnt.pps[bs_mktid] - df = dfs[key] = df.with_columns([ + df = dfs[key] = ldf.with_columns([ pl.cumsum('size').alias('cumsize'), @@ -473,59 +477,23 @@ def open_ledger_dfs( .otherwise(pl.lit('exit')) ).alias('descr'), - ]).with_columns([ - - pl.when(pl.col('cumsize') == pl.lit(0)) - .then(pl.col('src_balance')) - .otherwise(pl.lit(None)) - .forward_fill() - .fill_null(0) - .alias('pnl_since_nz'), - - ]).with_columns([ - - pl.when(pl.col('cumsize') == 0) - .then(pl.col('pnl_since_nz')) - .otherwise(0) - .cumsum() - .alias('cum_pnl_since_nz') + (pl.col('cumsize').sign() == pl.col('size').sign()) + .alias('is_enter'), ]).with_columns([ - pl.when( - pl.col('descr') == pl.lit('enter') - ).then( - ( - pl.col('pnl_since_nz') - - - # -ve on buys (and no prior profits) - pl.col('src_balance') - )# * pl.col('cumsize').sign() - / - pl.col('cumsize') - ).otherwise( - pl.lit(None) - ).forward_fill().alias('ppu_per_pos'), + pl.lit(0, dtype=pl.Float64).alias('pos_ppu'), + pl.lit(0, dtype=pl.Float64).alias('per_exit_pnl'), + pl.lit(0, dtype=pl.Float64).alias('cum_pos_pnl'), + pl.lit(0, dtype=pl.Float64).alias('pos_bep'), + pl.lit(0, dtype=pl.Float64).alias('cum_ledger_pnl'), + pl.lit(None, dtype=pl.Float64).alias('ledger_bep'), - ]).with_columns([ - pl.when(pl.col('descr') != pl.lit('enter')) - .then( - (pl.col('price') - pl.col('ppu_per_pos')) * pl.col('size') * -1 - ) - .otherwise(0) - .alias('pnl_per_exit') - - ]).with_columns([ - - # ( - # # weight las ppu by the previous (txn row's) - # # cumsize since sells may have happpened. - # ((pl.col('last_ppu') - # * pl.col('last_cumsize')) - # - pl.col('net_pnl')) - # + pl.col('i_dst_bot') - # ) / - # pl.col('cumsize') + # TODO: instead of the iterative loop below i guess we + # could try using embedded lists to track which txns + # are part of which ppu / bep calcs? Not sure this will + # look any better nor be any more performant though xD + # pl.lit([[0]], dtype=pl.List).alias('list'), # choose fields to emit for accounting puposes ]).select([ @@ -538,4 +506,91 @@ def open_ledger_dfs( ]), ]).collect() + # compute recurrence relations for ppu and bep + last_ppu: float = 0 + last_cumsize: float = 0 + last_ledger_pnl: float = 0 + last_pos_pnl: float = 0 + + # imperatively compute the PPU (price per unit) and BEP + # (break even price) iteratively over the ledger, oriented + # to each position state. + for i, row in enumerate(df.iter_rows(named=True)): + cumsize: float = row['cumsize'] + is_enter: bool = row['is_enter'] + + if not is_enter: + + pnl = df[i, 'per_exit_pnl'] = ( + (last_ppu - row['price']) + * + row['size'] + ) + + last_ledger_pnl = df[i, 'cum_ledger_pnl'] = last_ledger_pnl + pnl + + # reset per-position cum PnL + if last_cumsize != 0: + last_pos_pnl = df[i, 'cum_pos_pnl'] = last_pos_pnl + pnl + else: + last_pos_pnl: float = 0 + + if cumsize == 0: + ppu: float = 0 + last_ppu: float = 0 + else: + ppu: float = last_ppu + + if abs(cumsize) > 0: + # compute the "break even price" that + # when the remaining cumsize is liquidated at + # this price the net-pnl on the current position + # will result in ZERO pnl from open to close B) + ledger_bep: float = ( + ( + (ppu * cumsize) + - + last_ledger_pnl + ) / cumsize + ) + df[i, 'ledger_bep'] = ledger_bep + + pos_bep: float = ( + ( + (ppu * cumsize) + - + last_pos_pnl + ) / cumsize + ) + df[i, 'pos_bep'] = pos_bep + + elif is_enter: + + ppu: float = ( + ( + (last_ppu * last_cumsize) + + + (row['price'] * row['size']) + ) + / + cumsize + ) + + last_ppu: float = ppu + + # TODO: case where we "enter more" dst asset units + # (increase position state) -> the bep needs to be + # recomputed based on new ppu.. + pos_bep: float = ( + ( + (ppu * cumsize) + - + last_pos_pnl + ) / cumsize + ) + df[i, 'ledger_bep'] = df[i, 'pos_bep'] = pos_bep + + df[i, 'pos_ppu'] = ppu + last_cumsize: float = cumsize + yield dfs, ledger From a088ebf5e28d75c6a5d1fdb6796ade58c6f58dcc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 31 Jul 2023 17:18:28 -0400 Subject: [PATCH 090/116] Use inf row/col repr for debugging atm --- piker/accounting/cli.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 4106cb7e6..79f186c83 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -268,7 +268,8 @@ def disect( # actual ledger instance ldgr: TransactionLedger - pl.Config.set_tbl_cols(16) + pl.Config.set_tbl_cols(-1) + pl.Config.set_tbl_rows(-1) with ( open_crash_handler(), open_ledger_dfs( From 100be54641970e1f3a6b8d38cd87fb79978fb971 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 31 Jul 2023 17:21:11 -0400 Subject: [PATCH 091/116] data.history: add TODO for non-zero epochs and some typing --- piker/data/history.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/piker/data/history.py b/piker/data/history.py index 0b4f3bf9d..997a902cb 100644 --- a/piker/data/history.py +++ b/piker/data/history.py @@ -644,6 +644,10 @@ async def tsdb_backfill( task_status.started() return + # TODO: fill in non-zero epoch time values ALWAYS! + # hist_shm._array['time'] = np.arange( + # start= + # NOTE: removed for now since it'll always break # on the first 60s of the venue open.. # times: np.ndarray = array['time'] @@ -755,7 +759,6 @@ async def tsdb_backfill( ) ) - # if len(hist_shm.array) < 2: # TODO: there's an edge case here to solve where if the last # frame before market close (at least on ib) was pushed and @@ -774,7 +777,7 @@ async def tsdb_backfill( finally: return - # IF we need to continue backloading incrementall from the + # IF we need to continue backloading incrementally from the # tsdb client.. tn.start_soon( back_load_from_tsdb, @@ -836,10 +839,9 @@ async def manage_history( # from tractor._state import _runtime_vars # port = _runtime_vars['_root_mailbox'][1] - uid = tractor.current_actor().uid + uid: tuple = tractor.current_actor().uid name, uuid = uid - service = name.rstrip(f'.{mod.name}') - + service: str = name.rstrip(f'.{mod.name}') fqme: str = mkt.get_fqme(delim_char='') # (maybe) allocate shm array for this broker/symbol which will @@ -878,8 +880,8 @@ async def manage_history( # (for now) set the rt (hft) shm array with space to prepend # only a few days worth of 1s history. - days = 2 - start_index = days*_secs_in_day + days: int = 2 + start_index: int = days*_secs_in_day rt_shm._first.value = start_index rt_shm._last.value = start_index rt_zero_index = rt_shm.index - 1 @@ -892,7 +894,6 @@ async def manage_history( open_history_client = getattr( mod, 'open_history_client', - None, ) assert open_history_client From 5d24b5defbbc5a4e4dd5cc034e93b7c9309485d7 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 31 Jul 2023 17:32:49 -0400 Subject: [PATCH 092/116] Swap branch order for enter/exit Also fix bug since we always need to reset cum_pos_pnl after a `exit_to_zero` case. --- piker/accounting/calc.py | 90 ++++++++++++++++++++++++++-------------- 1 file changed, 58 insertions(+), 32 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 79e478501..c60a0db99 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -511,6 +511,7 @@ def open_ledger_dfs( last_cumsize: float = 0 last_ledger_pnl: float = 0 last_pos_pnl: float = 0 + last_is_enter: bool = False # imperatively compute the PPU (price per unit) and BEP # (break even price) iteratively over the ledger, oriented @@ -519,7 +520,59 @@ def open_ledger_dfs( cumsize: float = row['cumsize'] is_enter: bool = row['is_enter'] - if not is_enter: + # ALWAYS reset per-position cum PnL + if last_cumsize == 0: + last_pos_pnl: float = 0 + + # a "position size INCREASING" transaction which "makes + # larger", in src asset unit terms, the trade's + # side-size of the destination asset: + # - "buying" (more) units of the dst asset + # - "selling" (more short) units of the dst asset + if is_enter: + + ppu: float = ( + ( + (last_ppu * last_cumsize) + + + (row['price'] * row['size']) + ) / + cumsize + ) + + pos_bep: float = ppu + # When we "enter more" dst asset units (increase + # position state) AFTER having exitted some units + # the bep needs to be RECOMPUTED based on new ppu + # such that liquidation of the cumsize at the bep + # price results in a zero-pnl for the existing + # position (since the last one). + if ( + not last_is_enter + and last_cumsize != 0 + ): + + pos_bep: float = ( + ( + (ppu * cumsize) + - + last_pos_pnl + ) + / + cumsize + ) + + df[i, 'ledger_bep'] = df[i, 'pos_bep'] = pos_bep + + # a "position size DECREASING" transaction which "makes + # smaller" the trade's side-size of the destination + # asset: + # - selling previously bought units of the dst asset + # (aka 'closing' a long position). + # - buying previously borrowed and sold (short) units + # of the dst asset (aka 'covering'/'closing' a short + # position). + else: pnl = df[i, 'per_exit_pnl'] = ( (last_ppu - row['price']) @@ -529,11 +582,7 @@ def open_ledger_dfs( last_ledger_pnl = df[i, 'cum_ledger_pnl'] = last_ledger_pnl + pnl - # reset per-position cum PnL - if last_cumsize != 0: - last_pos_pnl = df[i, 'cum_pos_pnl'] = last_pos_pnl + pnl - else: - last_pos_pnl: float = 0 + last_pos_pnl = df[i, 'cum_pos_pnl'] = last_pos_pnl + pnl if cumsize == 0: ppu: float = 0 @@ -541,6 +590,7 @@ def open_ledger_dfs( else: ppu: float = last_ppu + if abs(cumsize) > 0: # compute the "break even price" that # when the remaining cumsize is liquidated at @@ -564,33 +614,9 @@ def open_ledger_dfs( ) df[i, 'pos_bep'] = pos_bep - elif is_enter: - - ppu: float = ( - ( - (last_ppu * last_cumsize) - + - (row['price'] * row['size']) - ) - / - cumsize - ) - - last_ppu: float = ppu - - # TODO: case where we "enter more" dst asset units - # (increase position state) -> the bep needs to be - # recomputed based on new ppu.. - pos_bep: float = ( - ( - (ppu * cumsize) - - - last_pos_pnl - ) / cumsize - ) - df[i, 'ledger_bep'] = df[i, 'pos_bep'] = pos_bep - df[i, 'pos_ppu'] = ppu + last_ppu: float = ppu last_cumsize: float = cumsize + last_is_enter: bool = is_enter yield dfs, ledger From 85ae180f8f8866ccef0e2512d690f6146a41c5d0 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 31 Jul 2023 17:48:03 -0400 Subject: [PATCH 093/116] Factor df conversion into lone routine: `ledger_to_dfs()` --- piker/accounting/calc.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index c60a0db99..aedc18fcd 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -353,13 +353,11 @@ def open_ledger_dfs( can update the ledger on exit. ''' - from ._ledger import ( - open_trade_ledger, - ) if not ledger: import time from tractor._debug import open_crash_handler + from ._ledger import open_trade_ledger now = time.time() with ( @@ -388,7 +386,16 @@ def open_ledger_dfs( # allow_reload=True, # ) + yield ledger_to_dfs(ledger), ledger + + +def ledger_to_dfs( + ledger: TransactionLedger, + +) -> dict[str, pl.DataFrame]: + txns: dict[str, Transaction] = ledger.to_txns() + # ldf = pl.DataFrame( # list(txn.to_dict() for txn in txns.values()), ldf = pl.from_dicts( @@ -615,8 +622,10 @@ def open_ledger_dfs( df[i, 'pos_bep'] = pos_bep df[i, 'pos_ppu'] = ppu + + # keep backrefs to suffice reccurence relation last_ppu: float = ppu last_cumsize: float = cumsize last_is_enter: bool = is_enter - yield dfs, ledger + return dfs From 29bab02c648745447b74e135c830fbe0c6ae35af Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 1 Aug 2023 09:12:52 -0400 Subject: [PATCH 094/116] Pass sync code flag in flex report processor --- piker/brokers/ib/_flex_reports.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/piker/brokers/ib/_flex_reports.py b/piker/brokers/ib/_flex_reports.py index 2f34d037a..e8c22ecb1 100644 --- a/piker/brokers/ib/_flex_reports.py +++ b/piker/brokers/ib/_flex_reports.py @@ -159,7 +159,11 @@ def load_flex_trades( for acctid in trades_by_account: trades_by_id = trades_by_account[acctid] - with open_trade_ledger('ib', acctid) as ledger_dict: + with open_trade_ledger( + 'ib', + acctid, + allow_from_sync_code=True, + ) as ledger_dict: tid_delta = set(trades_by_id) - set(ledger_dict) log.info( 'New trades detected\n' From b6a705852dda804e6f2dd1f4f84c9c677a5eddd6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 1 Aug 2023 15:42:30 -0400 Subject: [PATCH 095/116] Handle txn costs in BEP, factor enter/exit blocks and df row assignments B) --- piker/accounting/calc.py | 181 +++++++++++++++++++++++---------------- 1 file changed, 107 insertions(+), 74 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index aedc18fcd..eefe821d7 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -386,12 +386,18 @@ def open_ledger_dfs( # allow_reload=True, # ) - yield ledger_to_dfs(ledger), ledger + yield ledger_to_dfs(ledger), ledger def ledger_to_dfs( ledger: TransactionLedger, + # include transaction cost in breakeven price + # and presume the worst case of the same cost + # to exit this transaction (even though in reality + # it will be dynamic based on exit stratetgy). + cost_scalar: float = 1, + ) -> dict[str, pl.DataFrame]: txns: dict[str, Transaction] = ledger.to_txns() @@ -462,12 +468,16 @@ def ledger_to_dfs( # the market) to acquire the dst asset, PER txn. # when this value is -ve (i.e. a sell operation) then # the amount sent is actually "returned". - (pl.col('price') * pl.col('size')).alias('dst_bot'), + ( + (pl.col('price') * pl.col('size')) + + + pl.col('cost') + ).alias('dst_bot'), ]).with_columns([ # rolling balance in src asset units - (pl.cumsum('dst_bot') * -1).alias('src_balance'), + (pl.col('dst_bot').cumsum() * -1).alias('src_balance'), # "position operation type" in terms of increasing the # amount in the dst asset (entering) or decreasing the @@ -506,7 +516,7 @@ def ledger_to_dfs( ]).select([ pl.exclude([ 'tid', - 'dt', + # 'dt', 'expiry', 'bs_mktid', 'etype', @@ -518,114 +528,137 @@ def ledger_to_dfs( last_cumsize: float = 0 last_ledger_pnl: float = 0 last_pos_pnl: float = 0 - last_is_enter: bool = False + # last_is_enter: bool = False # TODO: drop right? # imperatively compute the PPU (price per unit) and BEP # (break even price) iteratively over the ledger, oriented - # to each position state. + # around each position state: a state of split balances in + # > 1 asset. for i, row in enumerate(df.iter_rows(named=True)): + cumsize: float = row['cumsize'] is_enter: bool = row['is_enter'] + price: float = row['price'] + size: float = row['size'] # ALWAYS reset per-position cum PnL if last_cumsize == 0: last_pos_pnl: float = 0 - # a "position size INCREASING" transaction which "makes - # larger", in src asset unit terms, the trade's - # side-size of the destination asset: + # the profit is ALWAYS decreased, aka made a "loss" + # by the constant fee charged by the txn provider! + # TODO: support exit txn virtual cost which we + # resolve on exit txns incrementally? + pnl: float = -1 * row['cost'] + + # a "position size INCREASING" or ENTER transaction + # which "makes larger", in src asset unit terms, the + # trade's side-size of the destination asset: # - "buying" (more) units of the dst asset # - "selling" (more short) units of the dst asset if is_enter: + # a cumulative mean of the price-per-unit acquired + # in the destination asset: + # https://en.wikipedia.org/wiki/Moving_average#Cumulative_average + # You could also think of this measure more + # generally as an exponential mean with `alpha + # = 1/N` where `N` is the current number of txns + # included in the "position" defining set: + # https://en.wikipedia.org/wiki/Exponential_smoothing ppu: float = ( ( (last_ppu * last_cumsize) + - (row['price'] * row['size']) + (price * size) ) / cumsize ) - pos_bep: float = ppu - # When we "enter more" dst asset units (increase - # position state) AFTER having exitted some units - # the bep needs to be RECOMPUTED based on new ppu - # such that liquidation of the cumsize at the bep - # price results in a zero-pnl for the existing - # position (since the last one). - if ( - not last_is_enter - and last_cumsize != 0 - ): - - pos_bep: float = ( - ( - (ppu * cumsize) - - - last_pos_pnl - ) - / - cumsize - ) - - df[i, 'ledger_bep'] = df[i, 'pos_bep'] = pos_bep - - # a "position size DECREASING" transaction which "makes - # smaller" the trade's side-size of the destination - # asset: + # a "position size DECREASING" or EXIT transaction + # which "makes smaller" the trade's side-size of the + # destination asset: # - selling previously bought units of the dst asset # (aka 'closing' a long position). # - buying previously borrowed and sold (short) units # of the dst asset (aka 'covering'/'closing' a short # position). else: + # only changes on position size increasing txns + ppu: float = last_ppu - pnl = df[i, 'per_exit_pnl'] = ( - (last_ppu - row['price']) - * - row['size'] - ) + # include the per-txn profit or loss given we are + # "closing" the position with this txn. + pnl += (last_ppu - price) * size - last_ledger_pnl = df[i, 'cum_ledger_pnl'] = last_ledger_pnl + pnl + # cumulative PnLs per txn + last_ledger_pnl = ( + last_ledger_pnl + pnl + ) + last_pos_pnl = df[i, 'cum_pos_pnl'] = ( + last_pos_pnl + pnl + ) - last_pos_pnl = df[i, 'cum_pos_pnl'] = last_pos_pnl + pnl + if cumsize == 0: + last_ppu = ppu = 0 - if cumsize == 0: - ppu: float = 0 - last_ppu: float = 0 - else: - ppu: float = last_ppu - - - if abs(cumsize) > 0: - # compute the "break even price" that - # when the remaining cumsize is liquidated at - # this price the net-pnl on the current position - # will result in ZERO pnl from open to close B) - ledger_bep: float = ( - ( - (ppu * cumsize) - - - last_ledger_pnl - ) / cumsize - ) - df[i, 'ledger_bep'] = ledger_bep - - pos_bep: float = ( - ( - (ppu * cumsize) - - - last_pos_pnl - ) / cumsize - ) - df[i, 'pos_bep'] = pos_bep + # compute the "break even price" that + # when the remaining cumsize is liquidated at + # this price the net-pnl on the current position + # will result in ZERO pnl from open to close B) + if ( + abs(cumsize) > 0 # non-exit-to-zero position txn + ): + + ledger_bep = pos_bep = ppu + + # TODO: now that this + # recalc-bep-on-enters-based-on-new-ppu was + # factored out of the `is_enter` block above we can + # drop this if condition right? + # + # When we "enter more" dst asset units (aka + # increase position state) AFTER having exited some + # units (aka decreasing the pos size some) the bep + # needs to be RECOMPUTED based on new ppu such that + # liquidation of the cumsize at the bep price + # results in a zero-pnl for the existing position + # (since the last one). + # if ( + # not last_is_enter + # and last_cumsize != 0 + # ): + + ledger_bep: float = ( + ( + (ppu * cumsize) + - + (last_ledger_pnl * copysign(1, cumsize)) + ) / cumsize + ) + + # for position lifetime BEP we never can have + # a valid value once the position is "closed" + # / full exitted Bo + pos_bep: float = ( + ( + (ppu * cumsize) + - + (last_pos_pnl * copysign(1, cumsize)) + ) / cumsize + ) + # inject DF row with all values df[i, 'pos_ppu'] = ppu + df[i, 'per_exit_pnl'] = pnl + df[i, 'cum_pos_pnl'] = last_pos_pnl + df[i, 'pos_bep'] = pos_bep + df[i, 'cum_ledger_pnl'] = last_ledger_pnl + df[i, 'ledger_bep'] = ledger_bep # keep backrefs to suffice reccurence relation last_ppu: float = ppu last_cumsize: float = cumsize - last_is_enter: bool = is_enter + # last_is_enter: bool = is_enter # TODO: drop right? return dfs From 1e3a4ca36d046a63a2dbdcc9113f98a5242db391 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Tue, 1 Aug 2023 15:49:56 -0400 Subject: [PATCH 096/116] Drop commented, now deprecated edge case notes :surfer: --- piker/accounting/calc.py | 37 ++++++++++++------------------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index eefe821d7..ec26910d4 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -602,33 +602,14 @@ def ledger_to_dfs( if cumsize == 0: last_ppu = ppu = 0 - # compute the "break even price" that - # when the remaining cumsize is liquidated at - # this price the net-pnl on the current position - # will result in ZERO pnl from open to close B) + # compute the BEP: "break even price", a value that + # determines at what price the remaining cumsize can be + # liquidated such that the net-PnL on the current + # position will result in ZERO gain or loss from open + # to close including all txn costs B) if ( abs(cumsize) > 0 # non-exit-to-zero position txn ): - - ledger_bep = pos_bep = ppu - - # TODO: now that this - # recalc-bep-on-enters-based-on-new-ppu was - # factored out of the `is_enter` block above we can - # drop this if condition right? - # - # When we "enter more" dst asset units (aka - # increase position state) AFTER having exited some - # units (aka decreasing the pos size some) the bep - # needs to be RECOMPUTED based on new ppu such that - # liquidation of the cumsize at the bep price - # results in a zero-pnl for the existing position - # (since the last one). - # if ( - # not last_is_enter - # and last_cumsize != 0 - # ): - ledger_bep: float = ( ( (ppu * cumsize) @@ -637,6 +618,13 @@ def ledger_to_dfs( ) / cumsize ) + # NOTE: when we "enter more" dst asset units (aka + # increase position state) AFTER having exited some + # units (aka decreasing the pos size some) the bep + # needs to be RECOMPUTED based on new ppu such that + # liquidation of the cumsize at the bep price + # results in a zero-pnl for the existing position + # (since the last one). # for position lifetime BEP we never can have # a valid value once the position is "closed" # / full exitted Bo @@ -659,6 +647,5 @@ def ledger_to_dfs( # keep backrefs to suffice reccurence relation last_ppu: float = ppu last_cumsize: float = cumsize - # last_is_enter: bool = is_enter # TODO: drop right? return dfs From 7ecf2bd89a4e806432adcc76e0e76b909fdb9a1e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 2 Aug 2023 17:25:23 -0400 Subject: [PATCH 097/116] Guess exit transaction costs for BEP prediction In order to attempt giving the user a realistic prediction for a BEP per txn we need to model what the (worst case) anticipated exit txn costs will be during the equivalent, paired entries. For now we use a simple "symmetric cost prediction" model where we assume the exit costs will be simply the same as the enter txn costs and thus on every entry we apply 2x the enter txn cost; on exit txns we then unroll these predictions by keeping a cumulative sum of the cost-per-unit and reversing the charges based on applying that mean to the current exit txn's size. Once unrolled we apply the actual exit txn cost received from the broker-provider. --- piker/accounting/calc.py | 93 ++++++++++++++++++++++++++++++---------- 1 file changed, 71 insertions(+), 22 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index ec26910d4..6b6bd7dc9 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -392,12 +392,6 @@ def open_ledger_dfs( def ledger_to_dfs( ledger: TransactionLedger, - # include transaction cost in breakeven price - # and presume the worst case of the same cost - # to exit this transaction (even though in reality - # it will be dynamic based on exit stratetgy). - cost_scalar: float = 1, - ) -> dict[str, pl.DataFrame]: txns: dict[str, Transaction] = ledger.to_txns() @@ -471,7 +465,7 @@ def ledger_to_dfs( ( (pl.col('price') * pl.col('size')) + - pl.col('cost') + (pl.col('cost')) # * pl.col('size').sign()) ).alias('dst_bot'), ]).with_columns([ @@ -499,8 +493,10 @@ def ledger_to_dfs( ]).with_columns([ + pl.lit(0, dtype=pl.Utf8).alias('virt_cost'), + pl.lit(0, dtype=pl.Float64).alias('applied_cost'), pl.lit(0, dtype=pl.Float64).alias('pos_ppu'), - pl.lit(0, dtype=pl.Float64).alias('per_exit_pnl'), + pl.lit(0, dtype=pl.Float64).alias('per_txn_pnl'), pl.lit(0, dtype=pl.Float64).alias('cum_pos_pnl'), pl.lit(0, dtype=pl.Float64).alias('pos_bep'), pl.lit(0, dtype=pl.Float64).alias('cum_ledger_pnl'), @@ -510,7 +506,7 @@ def ledger_to_dfs( # could try using embedded lists to track which txns # are part of which ppu / bep calcs? Not sure this will # look any better nor be any more performant though xD - # pl.lit([[0]], dtype=pl.List).alias('list'), + # pl.lit([[0]], dtype=pl.List(pl.Float64)).alias('list'), # choose fields to emit for accounting puposes ]).select([ @@ -528,7 +524,7 @@ def ledger_to_dfs( last_cumsize: float = 0 last_ledger_pnl: float = 0 last_pos_pnl: float = 0 - # last_is_enter: bool = False # TODO: drop right? + virt_costs: list[float, float] = [0., 0.] # imperatively compute the PPU (price per unit) and BEP # (break even price) iteratively over the ledger, oriented @@ -541,16 +537,17 @@ def ledger_to_dfs( price: float = row['price'] size: float = row['size'] + # the profit is ALWAYS decreased, aka made a "loss" + # by the constant fee charged by the txn provider! + # see below in final PnL calculation and row element + # set. + txn_cost: float = row['cost'] + pnl: float = 0 + # ALWAYS reset per-position cum PnL if last_cumsize == 0: last_pos_pnl: float = 0 - # the profit is ALWAYS decreased, aka made a "loss" - # by the constant fee charged by the txn provider! - # TODO: support exit txn virtual cost which we - # resolve on exit txns incrementally? - pnl: float = -1 * row['cost'] - # a "position size INCREASING" or ENTER transaction # which "makes larger", in src asset unit terms, the # trade's side-size of the destination asset: @@ -558,6 +555,29 @@ def ledger_to_dfs( # - "selling" (more short) units of the dst asset if is_enter: + # Naively include transaction cost in breakeven + # price and presume the worst case of the + # exact-same-cost-to-exit this transaction's worth + # of size even though in reality it will be dynamic + # based on exit strategy, price, liquidity, etc.. + virt_cost: float = txn_cost + + # cpu: float = cost / size + # cummean of the cost-per-unit used for modelling + # a projected future exit cost which we immediately + # include in the costs incorporated to BEP on enters + last_cum_costs_size, last_cpu = virt_costs + cum_costs_size: float = last_cum_costs_size + abs(size) + cumcpu = ( + (last_cpu * last_cum_costs_size) + + + txn_cost + ) / cum_costs_size + virt_costs = [cum_costs_size, cumcpu] + + txn_cost = txn_cost + virt_cost + df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}' + # a cumulative mean of the price-per-unit acquired # in the destination asset: # https://en.wikipedia.org/wiki/Moving_average#Cumulative_average @@ -587,16 +607,44 @@ def ledger_to_dfs( # only changes on position size increasing txns ppu: float = last_ppu - # include the per-txn profit or loss given we are - # "closing" the position with this txn. - pnl += (last_ppu - price) * size + # UNWIND IMPLIED COSTS FROM ENTRIES + # => Reverse the virtual/modelled (2x predicted) txn + # cost that was included in the least-recently + # entered txn that is still part of the current CSi + # set. + # => we look up the cost-per-unit cumsum and apply + # if over the current txn size (by multiplication) + # and then reverse that previusly applied cost on + # the txn_cost for this record. + # + # NOTE: current "model" is just to previously assumed 2x + # the txn cost for a matching enter-txn's + # cost-per-unit; we then immediately reverse this + # prediction and apply the real cost received here. + last_cum_costs_size, last_cpu = virt_costs + prev_virt_cost: float = last_cpu * abs(size) + txn_cost: float = txn_cost - prev_virt_cost # +ve thus a "reversal" + cum_costs_size: float = last_cum_costs_size - abs(size) + virt_costs = [cum_costs_size, last_cpu] + + df[i, 'virt_cost'] = ( + f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}' + ) + + # the per-txn profit or loss (PnL) given we are + # (partially) "closing"/"exiting" the position via + # this txn. + pnl: float = (last_ppu - price) * size + + # always subtract txn cost from total txn pnl + txn_pnl: float = pnl - txn_cost # cumulative PnLs per txn last_ledger_pnl = ( - last_ledger_pnl + pnl + last_ledger_pnl + txn_pnl ) last_pos_pnl = df[i, 'cum_pos_pnl'] = ( - last_pos_pnl + pnl + last_pos_pnl + txn_pnl ) if cumsize == 0: @@ -638,7 +686,8 @@ def ledger_to_dfs( # inject DF row with all values df[i, 'pos_ppu'] = ppu - df[i, 'per_exit_pnl'] = pnl + df[i, 'per_txn_pnl'] = txn_pnl + df[i, 'applied_cost'] = -txn_cost df[i, 'cum_pos_pnl'] = last_pos_pnl df[i, 'pos_bep'] = pos_bep df[i, 'cum_ledger_pnl'] = last_ledger_pnl From fff610fa8d2206e55cc02992afec18fe3f2c6262 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 2 Aug 2023 17:33:02 -0400 Subject: [PATCH 098/116] Fix `PositionTracker.pane` attr resolve bug.. --- piker/ui/_position.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index a664561ec..8b07d80a1 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -850,7 +850,10 @@ def pane(self) -> FieldsForm: Return handle to pp side pane form. ''' - return self.chart.linked.godwidget.pp_pane + chart: ChartPlotWidget = next( + iter(self.nav.charts.values()) + ) + return chart.linked.godwidget.pp_pane def update_from_pp( self, From 94ebe1e87ea120a20943e7721ac9617801d0fcba Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 2 Aug 2023 20:41:56 -0400 Subject: [PATCH 099/116] Add some new hotkey maps for chart zoom and pane hiding --- piker/ui/_chart.py | 4 +- piker/ui/_forms.py | 1 - piker/ui/_fsp.py | 1 + piker/ui/_interaction.py | 157 ++++++++++++++++++++++++++++++++------- 4 files changed, 134 insertions(+), 29 deletions(-) diff --git a/piker/ui/_chart.py b/piker/ui/_chart.py index 28e258f3a..e00ad70ba 100644 --- a/piker/ui/_chart.py +++ b/piker/ui/_chart.py @@ -406,6 +406,7 @@ def set_sidepane( ) self._sidepane = sidepane + @property def sidepane(self) -> FieldsForm | SearchWidget: return self._sidepane @@ -495,7 +496,7 @@ def set_split_sizes( Set the proportion of space allocated for linked subcharts. ''' - ln = len(self.subplots) or 1 + ln: int = len(self.subplots) or 1 # proportion allocated to consumer subcharts if not prop: @@ -925,6 +926,7 @@ def __init__( self.useOpenGL(use_open_gl) self.name = name self.data_key = data_key or name + self.qframe: ChartnPane | None = None # scene-local placeholder for book graphics # sizing to avoid overlap with data contents diff --git a/piker/ui/_forms.py b/piker/ui/_forms.py index a86cf9030..d3f8da731 100644 --- a/piker/ui/_forms.py +++ b/piker/ui/_forms.py @@ -21,7 +21,6 @@ from __future__ import annotations from contextlib import asynccontextmanager from functools import partial -from math import floor from typing import ( Any, Callable, diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index a4deb0348..bcbd95d42 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -283,6 +283,7 @@ async def run_fsp_ui( name, array_key=array_key, ) + assert chart.qframe chart.linked.focus() diff --git a/piker/ui/_interaction.py b/piker/ui/_interaction.py index cf4687356..e1df40666 100644 --- a/piker/ui/_interaction.py +++ b/piker/ui/_interaction.py @@ -32,8 +32,18 @@ import pyqtgraph as pg # from pyqtgraph.GraphicsScene import mouseEvents from PyQt5.QtWidgets import QGraphicsSceneMouseEvent as gs_mouse -from PyQt5.QtCore import Qt, QEvent -from pyqtgraph import ViewBox, Point, QtCore +from PyQt5.QtGui import ( + QWheelEvent, +) +from PyQt5.QtCore import ( + Qt, + QEvent, +) +from pyqtgraph import ( + ViewBox, + Point, + QtCore, +) from pyqtgraph import functions as fn import numpy as np import trio @@ -50,8 +60,16 @@ from . import _event if TYPE_CHECKING: - from ._chart import ChartPlotWidget + # from ._search import ( + # SearchWidget, + # ) + from ._chart import ( + ChartnPane, + ChartPlotWidget, + GodWidget, + ) from ._dataviz import Viz + from .order_mode import OrderMode log = get_logger(__name__) @@ -83,7 +101,8 @@ async def handle_viewmode_kb_inputs( ) -> None: - order_mode = view.order_mode + order_mode: OrderMode = view.order_mode + godw: GodWidget = order_mode.godw # noqa # track edge triggered keys # (https://en.wikipedia.org/wiki/Interrupt#Triggering_methods) @@ -147,14 +166,14 @@ async def handle_viewmode_kb_inputs( if mods == Qt.ControlModifier: ctrl = True - # UI REPL-shell + # UI REPL-shell, with ctrl-p (for "pause") if ( - ctrl and key in { - Qt.Key_U, + ctrl + and key in { + Qt.Key_P, } ): import tractor - god = order_mode.godw # noqa feed = order_mode.feed # noqa chart = order_mode.chart # noqa viz = chart.main_viz # noqa @@ -167,9 +186,10 @@ async def handle_viewmode_kb_inputs( # SEARCH MODE # # ctlr-/ for "lookup", "search" -> open search tree if ( - ctrl and key in { + ctrl + and key in { Qt.Key_L, - Qt.Key_Space, + # Qt.Key_Space, } ): godw = view._chart.linked.godwidget @@ -177,19 +197,53 @@ async def handle_viewmode_kb_inputs( godw.search.focus() # esc and ctrl-c - if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C): + if ( + key == Qt.Key_Escape + or ( + ctrl + and key == Qt.Key_C + ) + ): # ctrl-c as cancel # https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9 view.select_box.clear() view.linked.focus() # cancel order or clear graphics - if key == Qt.Key_C or key == Qt.Key_Delete: + if ( + key == Qt.Key_C + or key == Qt.Key_Delete + ): order_mode.cancel_orders_under_cursor() # View modes - if key == Qt.Key_R: + if ( + ctrl + and ( + key == Qt.Key_Equal + or key == Qt.Key_I + ) + ): + view.wheelEvent( + ev=None, + axis=None, + delta=view.def_delta, + ) + elif ( + ctrl + and ( + key == Qt.Key_Minus + or key == Qt.Key_O + ) + ): + view.wheelEvent( + ev=None, + axis=None, + delta=-view.def_delta, + ) + + elif key == Qt.Key_R: # NOTE: seems that if we don't yield a Qt render # cycle then the m4 downsampled curves will show here @@ -235,15 +289,47 @@ async def handle_viewmode_kb_inputs( # Toggle position config pane if ( - ctrl and key in { - Qt.Key_P, + ctrl + and key in { + Qt.Key_Space, } ): - pp_pane = order_mode.current_pp.pane - if pp_pane.isHidden(): - pp_pane.show() + # searchw: SearchWidget = godw.search + # pp_pane = order_mode.current_pp.pane + qframes: list[ChartnPane] = [] + + for linked in ( + godw.rt_linked, + godw.hist_linked, + ): + for chartw in ( + [linked.chart] + + + list(linked.subplots.values()) + ): + qframes.append( + chartw.qframe + ) + + # NOTE: place priority on FIRST hiding all + # panes before showing them. + # TODO: make this more "fancy"? + # - maybe look at majority of hidden states and then + # flip based on that? + # - move these loops into the chart APIs? + # - store the UX-state for a given feed/symbol and + # apply when opening a new one (eg. if panes were + # hidden then also hide them on newly loaded mkt + # feeds). + if not any( + qf.sidepane.isHidden() for qf in qframes + ): + for qf in qframes: + qf.sidepane.hide() + else: - pp_pane.hide() + for qf in qframes: + qf.sidepane.show() # ORDER MODE # ---------- @@ -378,6 +464,8 @@ class ChartView(ViewBox): ''' mode_name: str = 'view' + def_delta: float = 616 * 6 + def_scale_factor: float = 1.016 ** (def_delta * -1 / 20) def __init__( self, @@ -502,8 +590,9 @@ def chart(self, chart: ChartPlotWidget) -> None: # type: ignore # noqa def wheelEvent( self, - ev, - axis=None, + ev: QWheelEvent | None = None, + axis: int | None = None, + delta: float | None = None, ): ''' Override "center-point" location for scrolling. @@ -514,6 +603,12 @@ def wheelEvent( TODO: PR a method into ``pyqtgraph`` to make this configurable ''' + # NOTE: certain operations are only avail when this handler is + # actually called on events. + if ev is None: + assert delta + assert axis is None + linked = self.linked if ( not linked @@ -524,7 +619,7 @@ def wheelEvent( mask = [False, False] mask[axis] = self.state['mouseEnabled'][axis] else: - mask = self.state['mouseEnabled'][:] + mask: list[bool] = self.state['mouseEnabled'][:] chart = self.linked.chart @@ -545,8 +640,15 @@ def wheelEvent( # return # actual scaling factor - s = 1.016 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor']) - s = [(None if m is False else s) for m in mask] + delta: float = ev.delta() if ev else delta + scale_factor: float = 1.016 ** (delta * -1 / 20) + + # NOTE: if elem is False -> None meaning "do not scale that + # axis". + scales: list[float | bool] = [ + (None if m is False else scale_factor) + for m in mask + ] if ( # zoom happened on axis @@ -569,7 +671,7 @@ def wheelEvent( ).map(ev.pos()) ) # scale_y = 1.3 ** (center.y() * -1 / 20) - self.scaleBy(s, center) + self.scaleBy(scales, center) # zoom in view-box area else: @@ -584,7 +686,7 @@ def wheelEvent( # NOTE: scroll "around" the right most datum-element in view # gives the feeling of staying "pinned" in place. - self.scaleBy(s, focal) + self.scaleBy(scales, focal) # XXX: the order of the next 2 lines i'm pretty sure # matters, we want the resize to trigger before the graphics @@ -604,7 +706,8 @@ def wheelEvent( self.interact_graphics_cycle() self.interact_graphics_cycle() - ev.accept() + if ev: + ev.accept() def mouseDragEvent( self, From a51a61090d3bdfe6e6a5c1439dc132d623c942d5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 2 Aug 2023 20:42:18 -0400 Subject: [PATCH 100/116] Drop `virt_cost: str` from df output --- piker/accounting/calc.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 6b6bd7dc9..8062cf753 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -493,7 +493,7 @@ def ledger_to_dfs( ]).with_columns([ - pl.lit(0, dtype=pl.Utf8).alias('virt_cost'), + # pl.lit(0, dtype=pl.Utf8).alias('virt_cost'), pl.lit(0, dtype=pl.Float64).alias('applied_cost'), pl.lit(0, dtype=pl.Float64).alias('pos_ppu'), pl.lit(0, dtype=pl.Float64).alias('per_txn_pnl'), @@ -516,6 +516,7 @@ def ledger_to_dfs( 'expiry', 'bs_mktid', 'etype', + # 'is_enter', ]), ]).collect() @@ -576,7 +577,7 @@ def ledger_to_dfs( virt_costs = [cum_costs_size, cumcpu] txn_cost = txn_cost + virt_cost - df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}' + # df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}' # a cumulative mean of the price-per-unit acquired # in the destination asset: @@ -627,9 +628,9 @@ def ledger_to_dfs( cum_costs_size: float = last_cum_costs_size - abs(size) virt_costs = [cum_costs_size, last_cpu] - df[i, 'virt_cost'] = ( - f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}' - ) + # df[i, 'virt_cost'] = ( + # f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}' + # ) # the per-txn profit or loss (PnL) given we are # (partially) "closing"/"exiting" the position via From ae444d1bc7b8223c0d4622a4d4e566e8bf0564d9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 3 Aug 2023 13:56:23 -0400 Subject: [PATCH 101/116] Add note about `xonsh.main.main()` attempted usage --- piker/accounting/cli.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/piker/accounting/cli.py b/piker/accounting/cli.py index 79f186c83..6a62f294d 100644 --- a/piker/accounting/cli.py +++ b/piker/accounting/cli.py @@ -299,6 +299,13 @@ def disect( # :pray: assert not df.is_empty() + # muck around in pdbp REPL + breakpoint() + # TODO: we REALLY need a better console REPL for this # kinda thing.. - breakpoint() + # - `xonsh` is an obvious option (and it looks amazin) but + # we need to figure out how to embed it better then just: + # from xonsh.main import main + # main(argv=[]) + # which will not actually inject the `df` to globals? From e9dfd28aace1bea5076b066f07dcfe10e93c76f5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 3 Aug 2023 16:56:33 -0400 Subject: [PATCH 102/116] ib: add back `src/dst` parsing for fiat pairs --- piker/brokers/ib/api.py | 12 +++++---- piker/brokers/ib/feed.py | 53 ++++++++++++++++++++++++++++++++-------- 2 files changed, 50 insertions(+), 15 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index a85d989fa..7ab827c3f 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -650,12 +650,14 @@ async def find_contracts( exch in {'IDEALPRO'} or sectype == 'CASH' ): - # if '/' in symbol: - # currency = '' - # symbol, currency = symbol.split('/') + pair: str = symbol + if '/' in symbol: + src, dst = symbol.split('/') + pair: str = ''.join([src, dst]) + con = Forex( - pair=''.join((symbol, currency)), - currency=currency, + pair=pair, + currency='', ) con.bars_kwargs = {'whatToShow': 'MIDPOINT'} diff --git a/piker/brokers/ib/feed.py b/piker/brokers/ib/feed.py index 8a6ac949a..72f519647 100644 --- a/piker/brokers/ib/feed.py +++ b/piker/brokers/ib/feed.py @@ -30,6 +30,7 @@ from typing import ( Any, Callable, + TYPE_CHECKING, ) from async_generator import aclosing @@ -65,6 +66,9 @@ from ._util import data_reset_hack from .symbols import get_mkt_info +if TYPE_CHECKING: + from trio._core._run import Task + # XXX NOTE: See available types table docs: # https://interactivebrokers.github.io/tws-api/tick_types.html @@ -308,7 +312,7 @@ async def wait_on_data_reset( return False -_data_resetter_task: trio.Task | None = None +_data_resetter_task: Task | None = None _failed_resets: int = 0 @@ -334,7 +338,15 @@ async def get_bars( task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, -) -> (dict, np.ndarray): +) -> tuple[ + tuple[ # result tuple + ibis.objects.BarDataList, + np.ndarray, + datetime, + datetime, + ] | None, + bool, # timed out hint +]: ''' Retrieve historical data from a ``trio``-side task using a ``MethoProxy``. @@ -420,7 +432,12 @@ async def query(): if data_cs: data_cs.cancel() - result = (bars, bars_array, first_dt, last_dt) + result = ( + bars, # ib native + bars_array, # numpy + first_dt, + last_dt, + ) # signal data reset loop parent task result_ready.set() @@ -428,7 +445,7 @@ async def query(): return result except RequestError as err: - msg = err.message + msg: str = err.message if 'No market data permissions for' in msg: # TODO: signalling for no permissions searches @@ -466,21 +483,29 @@ async def query(): nodatas_count += 1 continue - elif 'API historical data query cancelled' in err.message: + elif ( + 'API historical data query cancelled' + in + err.message + ): log.warning( 'Query cancelled by IB (:eyeroll:):\n' f'{err.message}' ) continue + elif ( 'Trading TWS session is connected from a different IP' - in err.message + in + err.message ): log.warning("ignoring ip address warning") continue # XXX: more or less same as above timeout case - elif _pacing in msg: + elif ( + _pacing in msg + ): log.warning( 'History throttle rate reached!\n' 'Resetting farms with `ctrl-alt-f` hack\n' @@ -532,9 +557,10 @@ async def query(): # don't double invoke the reset hack if another # requester task already has it covered. continue + else: _data_resetter_task = trio.lowlevel.current_task() - unset_resetter = True + unset_resetter: bool = True # spawn new data reset task data_cs, reset_done = await nurse.start( @@ -547,9 +573,16 @@ async def query(): # sync wait on reset to complete await reset_done.wait() - _data_resetter_task = None if unset_resetter else _data_resetter_task + _data_resetter_task = ( + None + if unset_resetter + else _data_resetter_task + ) assert result - return result, data_cs is not None + return ( + result, + data_cs is not None, + ) _quote_streams: dict[str, trio.abc.ReceiveStream] = {} From 60751acf85528ed008edea9b2c3689d2db43a6a3 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 3 Aug 2023 16:57:02 -0400 Subject: [PATCH 103/116] Officially drop `Position.size` --- piker/accounting/README.rst | 16 ++++++++++++++++ piker/accounting/_pos.py | 8 +++----- 2 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 piker/accounting/README.rst diff --git a/piker/accounting/README.rst b/piker/accounting/README.rst new file mode 100644 index 000000000..5214d7a25 --- /dev/null +++ b/piker/accounting/README.rst @@ -0,0 +1,16 @@ +.accounting +----------- +A subsystem for transaction processing, storage and historical +measurement. + + +.pnl +---- +BEP, the break even price: the price at which liquidating +a remaining position results in a zero PnL since the position was +"opened" in the destination asset. + +PPU: price-per-unit: the "average cost" (in cumulative mean terms) +of the "entry" transactions which "make a position larger"; taking +a profit relative to this price means that you will "make more +profit then made prior" since the position was opened. diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 40a654bde..87c36d243 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -38,6 +38,7 @@ datetime, now, ) +import polars as pl import tomlkit from ._ledger import ( @@ -143,6 +144,8 @@ def expiry(self) -> datetime | None: # trade was opened.? # def bep() -> float: # ... + def clears_df(self) -> pl.DataFrame: + ... def clearsitems(self) -> list[(str, dict)]: return ppu( @@ -436,11 +439,6 @@ def cumsize(self) -> float: ndigits=self.mkt.size_tick_digits, ) - @property - def size(self) -> float: - log.warning('`Position.size` is deprecated, use `.cumsize`') - return self.cumsize - # TODO: once we have an `.events` table with diff # mkt event types..? # def suggest_split(self) -> float: From e4ea7d61938ef8f32bd11b8dc53874968edd7588 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 3 Aug 2023 17:27:26 -0400 Subject: [PATCH 104/116] Lul, fix `open_ledger_dfs()` to `yield` when ledger passed in.. --- piker/accounting/calc.py | 44 ++++++++++++++++------------------------ 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 8062cf753..96889c71a 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -353,38 +353,28 @@ def open_ledger_dfs( can update the ledger on exit. ''' + from tractor._debug import open_crash_handler + with open_crash_handler(): + if not ledger: + import time + from ._ledger import open_trade_ledger - if not ledger: - import time - from tractor._debug import open_crash_handler - from ._ledger import open_trade_ledger + now = time.time() - now = time.time() - with ( - open_crash_handler(), + with open_trade_ledger( + brokername, + acctname, + rewrite=True, + allow_from_sync_code=True, - open_trade_ledger( - brokername, - acctname, - rewrite=True, - allow_from_sync_code=True, + # proxied through from caller + **kwargs, - # proxied through from caller - **kwargs, + ) as ledger: + if not ledger: + raise ValueError(f'No ledger for {acctname}@{brokername} exists?') - ) as ledger, - ): - if not ledger: - raise ValueError(f'No ledger for {acctname}@{brokername} exists?') - - print(f'LEDGER LOAD TIME: {time.time() - now}') - - # process raw TOML ledger into txns using the - # appropriate backend normalizer. - # cache: AssetsInfo = get_symcache( - # brokername, - # allow_reload=True, - # ) + print(f'LEDGER LOAD TIME: {time.time() - now}') yield ledger_to_dfs(ledger), ledger From 5d86d336f2073ac07f64ad9b7c1a7eea0fac4442 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 3 Aug 2023 17:28:08 -0400 Subject: [PATCH 105/116] Parametrize account names for offline ledger tests --- tests/test_accounting.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/tests/test_accounting.py b/tests/test_accounting.py index e7773a7a9..952a92296 100644 --- a/tests/test_accounting.py +++ b/tests/test_accounting.py @@ -7,6 +7,7 @@ ''' from pathlib import Path +import pytest from piker import config from piker.accounting import ( Account, @@ -44,9 +45,18 @@ def test_account_file_default_empty( assert path.parent.name == 'accounting' -def test_paper_ledger_position_calcs(): - broker: str = 'binance' - acnt_name: str = 'paper' +@pytest.mark.parametrize( + 'fq_acnt', + [ + ('binance', 'paper'), + ], +) +def test_paper_ledger_position_calcs( + fq_acnt: tuple[str, str], +): + broker: str + acnt_name: str + broker, acnt_name = fq_acnt accounts_path: Path = config.repodir() / 'tests' / '_inputs' From 5ed8544fd18382588738474be791e5d4cb8d0819 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Sat, 5 Aug 2023 15:57:10 -0400 Subject: [PATCH 106/116] Bleh, move `.data.types` back up to top level pkg Since it's depended on by `.data` stuff as well as pretty much everything else, makes more sense to expose it as a top level module (and maybe eventually as a subpkg as we add to it). --- piker/accounting/__init__.py | 3 +++ piker/accounting/_allocate.py | 2 +- piker/accounting/_ledger.py | 4 ++-- piker/accounting/_mktinfo.py | 2 +- piker/accounting/_pos.py | 4 ++-- piker/accounting/calc.py | 22 +++++++++++++-------- piker/brokers/binance/api.py | 2 +- piker/brokers/binance/feed.py | 2 +- piker/brokers/binance/venues.py | 2 +- piker/brokers/ib/broker.py | 2 +- piker/brokers/ib/ledger.py | 2 +- piker/brokers/kraken/feed.py | 2 +- piker/brokers/kraken/ledger.py | 2 +- piker/brokers/kraken/symbols.py | 2 +- piker/brokers/kucoin.py | 2 +- piker/clearing/__init__.py | 15 ++++++++++++++ piker/clearing/_client.py | 2 +- piker/clearing/_ems.py | 2 +- piker/clearing/_messages.py | 2 +- piker/clearing/_paper_engine.py | 14 +++++++------ piker/clearing/_util.py | 2 +- piker/data/__init__.py | 5 +++-- piker/data/_sharedmem.py | 2 +- piker/data/_symcache.py | 2 +- piker/data/_web_bs.py | 2 +- piker/data/feed.py | 22 ++++++++++----------- piker/data/flows.py | 6 +++--- piker/data/validate.py | 5 +++-- piker/fsp/__init__.py | 18 ++++++++++++++++- piker/{data => }/types.py | 0 piker/ui/_display.py | 26 +++++++++++------------- piker/ui/_editors.py | 2 +- piker/ui/_event.py | 2 +- piker/ui/_fsp.py | 35 ++++++++++++++++----------------- piker/ui/_position.py | 28 +++++++++++--------------- piker/ui/order_mode.py | 27 +++++++++++-------------- piker/ui/view_mode.py | 2 +- 37 files changed, 153 insertions(+), 123 deletions(-) rename piker/{data => }/types.py (100%) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index d6e1c3b61..e7ff16f1a 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -50,6 +50,8 @@ mk_allocator, Allocator, ) +from .models import CostModel + log = get_logger(__name__) @@ -57,6 +59,7 @@ 'Account', 'Allocator', 'Asset', + 'CostModel', 'MktPair', 'Position', 'Symbol', diff --git a/piker/accounting/_allocate.py b/piker/accounting/_allocate.py index deeec4989..ab3fa9656 100644 --- a/piker/accounting/_allocate.py +++ b/piker/accounting/_allocate.py @@ -25,7 +25,7 @@ from ._pos import Position from . import MktPair -from ..data.types import Struct +from piker.types import Struct _size_units = bidict({ diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 5f1865c97..8eac518a0 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -37,8 +37,8 @@ ) import tomli_w # for fast ledger writing -from .. import config -from ..data.types import Struct +from piker.types import Struct +from piker import config from ..log import get_logger from .calc import ( iter_by_dt, diff --git a/piker/accounting/_mktinfo.py b/piker/accounting/_mktinfo.py index 4197c1bb5..34adafa24 100644 --- a/piker/accounting/_mktinfo.py +++ b/piker/accounting/_mktinfo.py @@ -36,7 +36,7 @@ Literal, ) -from ..data.types import Struct +from piker.types import Struct # TODO: make these literals.. diff --git a/piker/accounting/_pos.py b/piker/accounting/_pos.py index 87c36d243..1a2378da4 100644 --- a/piker/accounting/_pos.py +++ b/piker/accounting/_pos.py @@ -58,8 +58,8 @@ from ..clearing._messages import ( BrokerdPosition, ) -from ..data.types import Struct -from ..data._symcache import SymbologyCache +from piker.types import Struct +from piker.data._symcache import SymbologyCache from ..log import get_logger log = get_logger(__name__) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 96889c71a..6ab89d2be 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -422,7 +422,12 @@ def ledger_to_dfs( # fdf = df.filter(pred) - # break up into a frame per mkt / fqme + # TODO: originally i had tried just using a plain ol' groupby + # + agg here but the issue was re-inserting to the src frame. + # however, learning more about `polars` seems like maybe we can + # use `.over()`? + # https://pola-rs.github.io/polars/py-polars/html/reference/expressions/api/polars.Expr.over.html#polars.Expr.over + # => CURRENTLY we break up into a frame per mkt / fqme dfs: dict[str, pl.DataFrame] = ldf.partition_by( 'bs_mktid', as_dict=True, @@ -435,14 +440,9 @@ def ledger_to_dfs( # covert to lazy form (since apparently we might need it # eventually ...) - df = dfs[key] + df: pl.DataFrame = dfs[key] - ldf = df.lazy() - # TODO: pass back the current `Position` object loaded from - # the account as well? Would provide incentive to do all - # this ledger loading inside a new async open_account(). - # bs_mktid: str = df[0]['bs_mktid'] - # pos: Position = acnt.pps[bs_mktid] + ldf: pl.LazyFrame = df.lazy() df = dfs[key] = ldf.with_columns([ @@ -688,4 +688,10 @@ def ledger_to_dfs( last_ppu: float = ppu last_cumsize: float = cumsize + # TODO?: pass back the current `Position` object loaded from + # the account as well? Would provide incentive to do all + # this ledger loading inside a new async open_account(). + # bs_mktid: str = df[0]['bs_mktid'] + # pos: Position = acnt.pps[bs_mktid] + return dfs diff --git a/piker/brokers/binance/api.py b/piker/brokers/binance/api.py index 2ade69ed4..25cd31a83 100644 --- a/piker/brokers/binance/api.py +++ b/piker/brokers/binance/api.py @@ -53,7 +53,7 @@ Asset, digits_to_dec, ) -from piker.data.types import Struct +from piker.types import Struct from piker.data import def_iohlcv_fields from piker.brokers import ( resproc, diff --git a/piker/brokers/binance/feed.py b/piker/brokers/binance/feed.py index 6d6ae28dc..713f76c66 100644 --- a/piker/brokers/binance/feed.py +++ b/piker/brokers/binance/feed.py @@ -58,7 +58,7 @@ MktPair, unpack_fqme, ) -from piker.data.types import Struct +from piker.types import Struct from piker.data.validate import FeedInit from piker.data._web_bs import ( open_autorecon_ws, diff --git a/piker/brokers/binance/venues.py b/piker/brokers/binance/venues.py index 3510d6f64..dc3312be1 100644 --- a/piker/brokers/binance/venues.py +++ b/piker/brokers/binance/venues.py @@ -26,7 +26,7 @@ from msgspec import field -from piker.data.types import Struct +from piker.types import Struct # API endpoint paths by venue / sub-API diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index b32f4eaca..262cbc1d0 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -49,6 +49,7 @@ ) from piker import config +from piker.types import Struct from piker.accounting import ( Position, Transaction, @@ -62,7 +63,6 @@ from piker.data import ( open_symcache, SymbologyCache, - Struct, ) from piker.clearing._messages import ( Order, diff --git a/piker/brokers/ib/ledger.py b/piker/brokers/ib/ledger.py index 9143cce6f..7273d8c92 100644 --- a/piker/brokers/ib/ledger.py +++ b/piker/brokers/ib/ledger.py @@ -39,8 +39,8 @@ CommissionReport, ) +from piker.types import Struct from piker.data import ( - Struct, SymbologyCache, ) from piker.accounting import ( diff --git a/piker/brokers/kraken/feed.py b/piker/brokers/kraken/feed.py index 63072fd02..298acdca1 100644 --- a/piker/brokers/kraken/feed.py +++ b/piker/brokers/kraken/feed.py @@ -46,7 +46,7 @@ DataThrottle, DataUnavailable, ) -from piker.data.types import Struct +from piker.types import Struct from piker.data.validate import FeedInit from piker.data._web_bs import open_autorecon_ws, NoBsWs from .api import ( diff --git a/piker/brokers/kraken/ledger.py b/piker/brokers/kraken/ledger.py index 6515568f3..6dcd327ec 100644 --- a/piker/brokers/kraken/ledger.py +++ b/piker/brokers/kraken/ledger.py @@ -34,9 +34,9 @@ TransactionLedger, # MktPair, ) +from piker.types import Struct from piker.data import ( SymbologyCache, - Struct, ) from .api import ( log, diff --git a/piker/brokers/kraken/symbols.py b/piker/brokers/kraken/symbols.py index ea2c68f44..d71828b2c 100644 --- a/piker/brokers/kraken/symbols.py +++ b/piker/brokers/kraken/symbols.py @@ -33,7 +33,7 @@ open_cached_client, SymbolNotFound, ) -from piker.data.types import Struct +from piker.types import Struct from piker.accounting._mktinfo import ( Asset, MktPair, diff --git a/piker/brokers/kucoin.py b/piker/brokers/kucoin.py index a85b18f4e..26aab4df6 100755 --- a/piker/brokers/kucoin.py +++ b/piker/brokers/kucoin.py @@ -64,7 +64,7 @@ ) from piker.log import get_logger from piker.data.validate import FeedInit -from piker.data.types import Struct +from piker.types import Struct from piker.data import def_iohlcv_fields from piker.data._web_bs import ( open_autorecon_ws, diff --git a/piker/clearing/__init__.py b/piker/clearing/__init__.py index 19d6390f2..1c25a6fd9 100644 --- a/piker/clearing/__init__.py +++ b/piker/clearing/__init__.py @@ -27,13 +27,28 @@ open_brokerd_dialog, ) from ._util import OrderDialogs +from ._messages import( + Order, + Status, + Cancel, + + # TODO: deprecate these and replace end-2-end with + # client-side-dialog set above B) + # https://github.com/pikers/piker/issues/514 + BrokerdPosition +) __all__ = [ + 'FeeModel', 'open_ems', 'OrderClient', 'open_brokerd_dialog', 'OrderDialogs', + 'Order', + 'Status', + 'Cancel', + 'BrokerdPosition' ] diff --git a/piker/clearing/_client.py b/piker/clearing/_client.py index 9977f95d7..9bb2aa749 100644 --- a/piker/clearing/_client.py +++ b/piker/clearing/_client.py @@ -30,7 +30,7 @@ from ._util import ( log, # sub-sys logger ) -from ..data.types import Struct +from piker.types import Struct from ..service import maybe_open_emsd from ._messages import ( Order, diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index b43d8fd16..9e30351e5 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -51,9 +51,9 @@ unpack_fqme, dec_digits, ) +from piker.types import Struct from ..ui._notify import notify_from_ems_status_msg from ..data import iterticks -from ..data.types import Struct from ._messages import ( Order, Status, diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 4946b0c06..6e44969ec 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -28,7 +28,7 @@ from msgspec import field -from ..data.types import Struct +from piker.types import Struct # TODO: a composite for tracking msg flow on 2-legged diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 67c789a43..6af254266 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -40,21 +40,22 @@ from piker.brokers import get_brokermod from piker.accounting import ( - Position, Account, + CostModel, + MktPair, + Position, Transaction, TransactionLedger, - open_trade_ledger, open_account, - MktPair, + open_trade_ledger, unpack_fqme, ) from piker.data import ( - open_feed, - iterticks, Struct, - open_symcache, SymbologyCache, + iterticks, + open_feed, + open_symcache, ) from ._util import ( log, # sub-sys logger @@ -83,6 +84,7 @@ class PaperBoi(Struct): ems_trades_stream: tractor.MsgStream acnt: Account ledger: TransactionLedger + fees: CostModel # map of paper "live" orders which be used # to simulate fills based on paper engine settings diff --git a/piker/clearing/_util.py b/piker/clearing/_util.py index 3ba7f55f0..962861e8f 100644 --- a/piker/clearing/_util.py +++ b/piker/clearing/_util.py @@ -25,7 +25,7 @@ get_logger, get_console_log, ) -from piker.data.types import Struct +from piker.types import Struct subsys: str = 'piker.clearing' log = get_logger(subsys) diff --git a/piker/data/__init__.py b/piker/data/__init__.py index 95c47e197..9b12697e9 100644 --- a/piker/data/__init__.py +++ b/piker/data/__init__.py @@ -44,7 +44,7 @@ open_symcache, get_symcache, ) -from .types import Struct +from ._sampling import open_sample_stream __all__: list[str] = [ @@ -60,7 +60,8 @@ 'def_iohlcv_fields', 'def_ohlcv_fields', 'open_symcache', + 'open_sample_stream', 'get_symcache', 'SymbologyCache', - 'Struct', + 'types', ] diff --git a/piker/data/_sharedmem.py b/piker/data/_sharedmem.py index 78f66f634..0a7976008 100644 --- a/piker/data/_sharedmem.py +++ b/piker/data/_sharedmem.py @@ -34,7 +34,7 @@ from ._util import log from ._source import def_iohlcv_fields -from .types import Struct +from piker.types import Struct def cuckoff_mantracker(): diff --git a/piker/data/_symcache.py b/piker/data/_symcache.py index e6c94fa66..abb0145db 100644 --- a/piker/data/_symcache.py +++ b/piker/data/_symcache.py @@ -47,11 +47,11 @@ from piker.log import get_logger from piker import config +from piker.types import Struct from piker.brokers import ( open_cached_client, get_brokermod, ) -from .types import Struct if TYPE_CHECKING: from ..accounting import ( diff --git a/piker/data/_web_bs.py b/piker/data/_web_bs.py index 9c2753b11..e60f871b1 100644 --- a/piker/data/_web_bs.py +++ b/piker/data/_web_bs.py @@ -50,8 +50,8 @@ ConnectionTimeout, ) +from piker.types import Struct from ._util import log -from .types import Struct class NoBsWs: diff --git a/piker/data/feed.py b/piker/data/feed.py index fcd193da0..7691538cb 100644 --- a/piker/data/feed.py +++ b/piker/data/feed.py @@ -50,15 +50,21 @@ gather_contexts, ) -from ..brokers import get_brokermod -from ..calc import humanize +from piker.accounting import ( + MktPair, + unpack_fqme, +) +from piker.types import Struct +from piker.brokers import get_brokermod +from piker.service import ( + maybe_spawn_brokerd, +) +from piker.ui import _search +from piker.calc import humanize from ._util import ( log, get_console_log, ) -from ..service import ( - maybe_spawn_brokerd, -) from .flows import Flume from .validate import ( FeedInit, @@ -68,12 +74,6 @@ manage_history, ) from .ingest import get_ingestormod -from .types import Struct -from ..accounting import ( - MktPair, - unpack_fqme, -) -from ..ui import _search from ._sampling import ( sample_and_broadcast, uniform_rate_send, diff --git a/piker/data/flows.py b/piker/data/flows.py index 86e5370ff..38345cea0 100644 --- a/piker/data/flows.py +++ b/piker/data/flows.py @@ -30,8 +30,7 @@ import pendulum import numpy as np -from ..accounting import MktPair -from .types import Struct +from piker.types import Struct from ._sharedmem import ( attach_shm_array, ShmArray, @@ -39,7 +38,7 @@ ) if TYPE_CHECKING: - # from pyqtgraph import PlotItem + from ..accounting import MktPair from .feed import Feed @@ -189,6 +188,7 @@ def from_msg( ''' mkt_msg = msg.pop('mkt') + from ..accounting import MktPair # cycle otherwise.. mkt = MktPair.from_msg(mkt_msg) return cls(mkt=mkt, **msg) diff --git a/piker/data/validate.py b/piker/data/validate.py index f54992cf6..cefa0f1fe 100644 --- a/piker/data/validate.py +++ b/piker/data/validate.py @@ -18,6 +18,7 @@ data-provider-backend-agnostic schema definitions. ''' +from __future__ import annotations from decimal import Decimal from pprint import pformat from types import ModuleType @@ -28,8 +29,8 @@ from msgspec import field -from .types import Struct -from ..accounting import ( +from piker.types import Struct +from piker.accounting import ( Asset, MktPair, ) diff --git a/piker/fsp/__init__.py b/piker/fsp/__init__.py index 5e88ed696..e463ac263 100644 --- a/piker/fsp/__init__.py +++ b/piker/fsp/__init__.py @@ -22,9 +22,25 @@ import numpy as np +from ._api import ( + maybe_mk_fsp_shm, + Fsp, +) from ._engine import cascade +from ._volume import ( + dolla_vlm, + flow_rates, + tina_vwap, +) -__all__ = ['cascade'] +__all__: list[str] = [ + 'cascade', + 'maybe_mk_fsp_shm', + 'Fsp', + 'dolla_vlm', + 'flow_rates', + 'tina_vwap', +] async def latency( diff --git a/piker/data/types.py b/piker/types.py similarity index 100% rename from piker/data/types.py rename to piker/types.py diff --git a/piker/ui/_display.py b/piker/ui/_display.py index 47704e3f0..5f9e2cb93 100644 --- a/piker/ui/_display.py +++ b/piker/ui/_display.py @@ -36,25 +36,27 @@ from msgspec import field # from .. import brokers -from ..accounting import ( +from piker.accounting import ( MktPair, ) -from ..data import ( +from piker.types import Struct +from piker.data import ( open_feed, Feed, Flume, + open_sample_stream, + ShmArray, ) -from ..data.ticktools import ( +from piker.data.ticktools import ( _tick_groups, _auction_ticks, ) -from ..data.types import Struct -from ..data._sharedmem import ( - ShmArray, -) -from ..data._sampling import ( - open_sample_stream, +from piker.toolz import ( + pg_profile_enabled, + ms_slower_then, + Profiler, ) +from piker.log import get_logger # from ..data._source import tf_in_1s from ._axes import YAxisLabel from ._chart import ( @@ -79,12 +81,6 @@ open_order_mode, OrderMode, ) -from ..toolz import ( - pg_profile_enabled, - ms_slower_then, - Profiler, -) -from ..log import get_logger if TYPE_CHECKING: from ._interaction import ChartView diff --git a/piker/ui/_editors.py b/piker/ui/_editors.py index df8813147..4e489f0f5 100644 --- a/piker/ui/_editors.py +++ b/piker/ui/_editors.py @@ -42,10 +42,10 @@ from PyQt5.QtCore import QPointF import numpy as np +from piker.types import Struct from ._style import hcolor, _font from ._lines import LevelLine from ..log import get_logger -from ..data.types import Struct if TYPE_CHECKING: from ._chart import GodWidget diff --git a/piker/ui/_event.py b/piker/ui/_event.py index 3edfb2ff5..b83dd5780 100644 --- a/piker/ui/_event.py +++ b/piker/ui/_event.py @@ -30,7 +30,7 @@ QGraphicsSceneMouseEvent as gs_mouse, ) -from ..data.types import Struct +from piker.types import Struct MOUSE_EVENTS = { diff --git a/piker/ui/_fsp.py b/piker/ui/_fsp.py index bcbd95d42..23cec162e 100644 --- a/piker/ui/_fsp.py +++ b/piker/ui/_fsp.py @@ -36,16 +36,27 @@ import trio from trio_typing import TaskStatus -from piker.data.types import Struct -from ._axes import PriceAxis -from ..calc import humanize -from ..data._sharedmem import ( +from piker.accounting import MktPair +from piker.fsp import ( + cascade, + maybe_mk_fsp_shm, + Fsp, + dolla_vlm, + flow_rates, +) +from piker.data import ( + Flume, ShmArray, +) +from piker.data._sharedmem import ( _Token, try_read, ) -from ..data.feed import Flume -from ..accounting import MktPair +from piker.log import get_logger +from piker.toolz import Profiler +from piker.types import Struct +from ._axes import PriceAxis +from ..calc import humanize from ._chart import ( ChartPlotWidget, LinkedSplits, @@ -55,18 +66,6 @@ mk_form, open_form_input_handling, ) -from ..fsp._api import ( - maybe_mk_fsp_shm, - Fsp, -) -from ..fsp import cascade -from ..fsp._volume import ( - # tina_vwap, - dolla_vlm, - flow_rates, -) -from ..log import get_logger -from ..toolz import Profiler log = get_logger(__name__) diff --git a/piker/ui/_position.py b/piker/ui/_position.py index 8b07d80a1..1e4895f9c 100644 --- a/piker/ui/_position.py +++ b/piker/ui/_position.py @@ -30,40 +30,34 @@ TYPE_CHECKING, ) - # from PyQt5.QtWidgets import QStyle # from PyQt5.QtGui import ( # QIcon, QPixmap, QColor # ) from pyqtgraph import functions as fn -from ._annotate import LevelMarker -from ._anchors import ( - pp_tight_and_right, # wanna keep it straight in the long run - gpath_pin, -) -from ..calc import ( +from piker.calc import ( humanize, pnl, puterize, ) -from ..accounting import ( +from piker.accounting import ( Allocator, - MktPair, -) -from ..accounting import ( Position, + MktPair, ) -from ..accounting._mktinfo import ( - _derivs, -) - -from ..data import ( +from piker.accounting._mktinfo import _derivs +from piker.types import Struct +from piker.data import ( iterticks, Feed, Flume, ) -from ..data.types import Struct +from ._annotate import LevelMarker +from ._anchors import ( + pp_tight_and_right, # wanna keep it straight in the long run + gpath_pin, +) from ._label import Label from ._lines import LevelLine, order_line from ._style import _font diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index ddc2d6f04..14d8b8210 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -36,25 +36,29 @@ import trio from PyQt5.QtCore import Qt -from .. import config -from ..accounting import ( +from piker import config +from piker.accounting import ( Allocator, Position, mk_allocator, MktPair, Symbol, ) -from ..clearing._client import ( +from piker.clearing import ( open_ems, OrderClient, ) -from ._style import _font -from ..data.feed import ( +from piker.clearing._messages import ( + Order, + Status, + BrokerdPosition, +) +from piker.data import ( Feed, Flume, ) -from ..data.types import Struct -from ..log import get_logger +from piker.types import Struct +from piker.log import get_logger from ._editors import LineEditor, ArrowEditor from ._lines import order_line, LevelLine from ._position import ( @@ -63,14 +67,7 @@ ) from ._forms import FieldsForm from ._window import MultiStatus -from ..clearing._messages import ( - # Cancel, - Order, - Status, - # BrokerdOrder, - # BrokerdStatus, - BrokerdPosition, -) +from ._style import _font from ._forms import open_form_input_handling from ._notify import notify_from_ems_status_msg diff --git a/piker/ui/view_mode.py b/piker/ui/view_mode.py index 191b62b95..a9a093d09 100644 --- a/piker/ui/view_mode.py +++ b/piker/ui/view_mode.py @@ -30,7 +30,7 @@ import pendulum import pyqtgraph as pg -from ..data.types import Struct +from piker.types import Struct from ..data._timeseries import slice_from_time from ..log import get_logger from ..toolz import Profiler From eba6a779660b1ddd7b284646b3e462350441fe80 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 7 Aug 2023 09:55:45 -0400 Subject: [PATCH 107/116] Add paper-engine cost simulation support If a backend declares a top level `get_cost()` (provisional name) we call it in the paper engine to try and simulate costs according to the provider's own schedule. For now only `binance` has support (via the ep def) but ideally we can fill these in incrementally as users start forward testing on multiple cexes. --- piker/accounting/_ledger.py | 9 ++++-- piker/brokers/binance/__init__.py | 2 ++ piker/brokers/binance/broker.py | 31 +++++++++++++++++- piker/clearing/_paper_engine.py | 54 +++++++++++++++++++++++++------ 4 files changed, 83 insertions(+), 13 deletions(-) diff --git a/piker/accounting/_ledger.py b/piker/accounting/_ledger.py index 8eac518a0..82a77107f 100644 --- a/piker/accounting/_ledger.py +++ b/piker/accounting/_ledger.py @@ -21,6 +21,7 @@ from __future__ import annotations from collections import UserDict from contextlib import contextmanager as cm +from functools import partial from pathlib import Path from pprint import pformat from types import ModuleType @@ -189,9 +190,13 @@ def iter_txns( if self.account == 'paper': from piker.clearing import _paper_engine - norm_trade = _paper_engine.norm_trade + norm_trade: Callable = partial( + _paper_engine.norm_trade, + brokermod=self.mod, + ) + else: - norm_trade = self.mod.norm_trade + norm_trade: Callable = self.mod.norm_trade # datetime-sort and pack into txs for tid, txdict in self.tx_sort(self.data.items()): diff --git a/piker/brokers/binance/__init__.py b/piker/brokers/binance/__init__.py index da63a67ca..830b1acf9 100644 --- a/piker/brokers/binance/__init__.py +++ b/piker/brokers/binance/__init__.py @@ -32,6 +32,7 @@ ) from .broker import ( open_trade_dialog, + get_cost, ) from .venues import ( SpotPair, @@ -41,6 +42,7 @@ __all__ = [ 'get_client', 'get_mkt_info', + 'get_cost', 'SpotPair', 'FutesPair', 'open_trade_dialog', diff --git a/piker/brokers/binance/broker.py b/piker/brokers/binance/broker.py index 6dcf99b5e..ff6a2ff56 100644 --- a/piker/brokers/binance/broker.py +++ b/piker/brokers/binance/broker.py @@ -48,7 +48,9 @@ open_cached_client, BrokerError, ) -from piker.clearing import OrderDialogs +from piker.clearing import ( + OrderDialogs, +) from piker.clearing._messages import ( BrokerdOrder, BrokerdOrderAck, @@ -70,6 +72,33 @@ log = get_logger('piker.brokers.binance') +# Fee schedule template, mostly for paper engine fees modelling. +# https://www.binance.com/en/support/faq/what-are-market-makers-and-takers-360007720071 +def get_cost( + price: float, + size: float, + is_taker: bool = False, + +) -> float: + + # https://www.binance.com/en/fee/trading + cb: float = price * size + match is_taker: + case True: + return cb * 0.001000 + + case False if cb < 1e6: + return cb * 0.001000 + + case False if 1e6 >= cb < 5e6: + return cb * 0.000900 + + # NOTE: there's more but are you really going + # to have a cb bigger then this per trade? + case False if cb >= 5e6: + return cb * 0.000800 + + async def handle_order_requests( ems_order_stream: tractor.MsgStream, client: Client, diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index 6af254266..f1c1a3d91 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -41,7 +41,7 @@ from piker.brokers import get_brokermod from piker.accounting import ( Account, - CostModel, + # CostModel, MktPair, Position, Transaction, @@ -51,12 +51,13 @@ unpack_fqme, ) from piker.data import ( - Struct, + Feed, SymbologyCache, iterticks, open_feed, open_symcache, ) +from piker.types import Struct from ._util import ( log, # sub-sys logger get_console_log, @@ -84,7 +85,7 @@ class PaperBoi(Struct): ems_trades_stream: tractor.MsgStream acnt: Account ledger: TransactionLedger - fees: CostModel + fees: Callable # map of paper "live" orders which be used # to simulate fills based on paper engine settings @@ -266,12 +267,17 @@ async def fake_fill( # we don't actually have any unique backend symbol ourselves # other then this thing, our fqme address. bs_mktid: str = fqme + if fees := self.fees: + cost: float = fees(price, size) + else: + cost: float = 0 + t = Transaction( fqme=fqme, tid=oid, size=size, price=price, - cost=0, # TODO: cost model + cost=cost, dt=pendulum.from_timestamp(fill_time_s), bs_mktid=bs_mktid, ) @@ -296,7 +302,7 @@ async def fake_fill( account='paper', symbol=fqme, - size=pp.size, + size=pp.cumsize, avg_price=pp.ppu, # TODO: we need to look up the asset currency from @@ -657,7 +663,7 @@ async def open_trade_dialog( broker=broker, account='paper', symbol=pos.mkt.fqme, - size=pos.size, + size=pos.cumsize, avg_price=pos.ppu, )) @@ -681,6 +687,7 @@ async def open_trade_dialog( await trio.sleep_forever() return + feed: Feed async with ( open_feed( [fqme], @@ -689,9 +696,15 @@ async def open_trade_dialog( ): # sanity check all the mkt infos for fqme, flume in feed.flumes.items(): - mkt = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme] + mkt: MktPair = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme] assert mkt == flume.mkt + get_cost: Callable = getattr( + brokermod, + 'get_cost', + None, + ) + async with ( ctx.open_stream() as ems_stream, trio.open_nursery() as n, @@ -701,6 +714,7 @@ async def open_trade_dialog( ems_trades_stream=ems_stream, acnt=acnt, ledger=ledger, + fees=get_cost, _buys=_buys, _sells=_sells, @@ -776,6 +790,9 @@ def norm_trade( pairs: dict[str, Struct], symcache: SymbologyCache | None = None, + # fees: CostModel | None = None, + brokermod: ModuleType | None = None, + ) -> Transaction: from pendulum import ( DateTime, @@ -788,13 +805,30 @@ def norm_trade( expiry: str | None = txdict.get('expiry') fqme: str = txdict.get('fqme') or txdict.pop('fqsn') + price: float = txdict['price'] + size: float = txdict['size'] + cost: float = txdict.get('cost', 0) + if ( + brokermod + and (get_cost := getattr( + brokermod, + 'get_cost', + False, + )) + ): + cost = get_cost( + price, + size, + is_taker=True, + ) + return Transaction( fqme=fqme, tid=txdict['tid'], dt=dt, - price=txdict['price'], - size=txdict['size'], - cost=txdict.get('cost', 0), + price=price, + size=size, + cost=cost, bs_mktid=txdict['bs_mktid'], expiry=parse(expiry) if expiry else None, etype='clear', From 85a38d057b20a9bc06406cc7dfc4009dda6c74bc Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 7 Aug 2023 10:13:31 -0400 Subject: [PATCH 108/116] Factor cumsize sign to var --- piker/accounting/calc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/piker/accounting/calc.py b/piker/accounting/calc.py index 6ab89d2be..ef0179e4e 100644 --- a/piker/accounting/calc.py +++ b/piker/accounting/calc.py @@ -649,11 +649,12 @@ def ledger_to_dfs( if ( abs(cumsize) > 0 # non-exit-to-zero position txn ): + cumsize_sign: float = copysign(1, cumsize) ledger_bep: float = ( ( (ppu * cumsize) - - (last_ledger_pnl * copysign(1, cumsize)) + (last_ledger_pnl * cumsize_sign) ) / cumsize ) @@ -671,7 +672,7 @@ def ledger_to_dfs( ( (ppu * cumsize) - - (last_pos_pnl * copysign(1, cumsize)) + (last_pos_pnl * cumsize_sign) ) / cumsize ) From ff2bbd5aca831bd70275ad63de1660d35ca74e18 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 7 Aug 2023 18:19:35 -0400 Subject: [PATCH 109/116] ib: handle order errors via `reqid` lookup Finally this is a reason to use our new `OrderDialogs` abstraction; on order submission errors IB doesn't really pass back anything other then the `orderId` and the reason so we have to conduct our own lookup for a message to relay to the EMS.. So, for every EMS msg we send, add it to the dialog tracker and then use the `flows: OrderDialogs` for lookup in the case where we need to relay said error. Also, include sending a `canceled` status such that the order won't get stuck as a stale entry in the `emsd`'s own dialog table. For now we just filter out errors that are unrelated from the stream since there's always going to be stuff to do with live/history data queries.. --- piker/brokers/ib/api.py | 8 ++-- piker/brokers/ib/broker.py | 89 +++++++++++++++++++++++++++----------- 2 files changed, 66 insertions(+), 31 deletions(-) diff --git a/piker/brokers/ib/api.py b/piker/brokers/ib/api.py index 7ab827c3f..58fd6a1ec 100644 --- a/piker/brokers/ib/api.py +++ b/piker/brokers/ib/api.py @@ -916,22 +916,20 @@ def push_err( ) -> None: - reason = errorString + reason: str = errorString if reqId == -1: # it's a general event? - key = 'event' + key: str = 'event' log.info(errorString) else: - key = 'error' + key: str = 'error' log.error(errorString) try: to_trio.send_nowait(( key, - - # error "object" { 'type': key, 'reqid': reqId, diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index 262cbc1d0..f1070a438 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -20,7 +20,6 @@ """ from __future__ import annotations from contextlib import ExitStack -from dataclasses import asdict from functools import partial from pprint import pformat import time @@ -64,6 +63,7 @@ open_symcache, SymbologyCache, ) +from piker.clearing import OrderDialogs from piker.clearing._messages import ( Order, Status, @@ -124,6 +124,7 @@ async def handle_order_requests( ems_order_stream: tractor.MsgStream, accounts_def: dict[str, str], + flows: OrderDialogs, ) -> None: @@ -169,8 +170,9 @@ async def handle_order_requests( # there is no existing order so ask the client to create # a new one (which it seems to do by allocating an int # counter - collision prone..) - reqid = order.reqid + reqid: int | None = order.reqid if reqid is not None: + log.error(f'TYPE .reqid: {reqid} -> {type(reqid)}') reqid = int(reqid) # call our client api to submit the order @@ -191,15 +193,15 @@ async def handle_order_requests( )) # deliver ack that order has been submitted to broker routing - await ems_order_stream.send( - BrokerdOrderAck( - # ems order request id - oid=order.oid, - # broker specific request id - reqid=reqid, - account=account, - ) + ack = BrokerdOrderAck( + # ems order request id + oid=order.oid, + # broker specific request id + reqid=reqid, + account=account, ) + await ems_order_stream.send(ack) + flows.add_msg(reqid, ack.to_dict()) elif action == 'cancel': msg = BrokerdCancel(**request_msg) @@ -521,6 +523,8 @@ async def open_trade_dialog( ) -> AsyncIterator[dict[str, Any]]: + # task local msg dialog tracking + flows = OrderDialogs() accounts_def = config.load_accounts(['ib']) # deliver positions to subscriber before anything else @@ -755,6 +759,7 @@ async def open_trade_dialog( handle_order_requests, ems_stream, accounts_def, + flows, ) # allocate event relay tasks for each client connection @@ -767,6 +772,7 @@ async def open_trade_dialog( proxies, ledgers, tables, + flows, ) # write account and ledger files immediately! @@ -985,6 +991,8 @@ async def deliver_trade_events( ledgers, tables, + flows: OrderDialogs, + ) -> None: ''' Format and relay all trade events for a given client to emsd. @@ -1013,6 +1021,7 @@ async def deliver_trade_events( # unwrap needed data from ib_insync internal types trade: Trade = item + reqid: str = str(trade.order.orderId) status: OrderStatus = trade.orderStatus status_str: str = _statuses[status.status] remaining: float = status.remaining @@ -1027,7 +1036,7 @@ async def deliver_trade_events( # NOTE: should match the value returned from # `.submit_limit()` - reqid=execu.orderId, + reqid=reqid, action=_action_map[execu.side], size=execu.shares, @@ -1040,7 +1049,9 @@ async def deliver_trade_events( # XXX: required by order mode currently broker_time=execu.time, ) + await ems_stream.send(fill_msg) + flows.add_msg(reqid, fill_msg.to_dict()) if remaining == 0: # emit a closed status on filled statuses where @@ -1050,7 +1061,7 @@ async def deliver_trade_events( # skip duplicate filled updates - we get the deats # from the execution details event msg = BrokerdStatus( - reqid=trade.order.orderId, + reqid=reqid, time_ns=time.time_ns(), # cuz why not account=accounts_def.inverse[trade.order.account], @@ -1067,8 +1078,7 @@ async def deliver_trade_events( broker_details={'name': 'ib'}, ) await ems_stream.send(msg) - continue - + flows.add_msg(reqid, msg.to_dict()) # XXX: for wtv reason this is a separate event type # from IB, not sure why it's needed other then for extra @@ -1199,12 +1209,26 @@ async def deliver_trade_events( ) case 'error': + # NOTE: see impl deats in + # `Client.inline_errors()::push_err()` err: dict = item - # f$#$% gawd dammit insync.. - con = err['contract'] - if isinstance(con, Contract): - err['contract'] = asdict(con) + code: int = err['error_code'] + if code in { + 200, # uhh + + # hist pacing / connectivity + 162, + 165, + + # 'No market data during competing live session' + 1669, + }: + continue + + reqid: str = err['reqid'] + acnt: str = flows.get(reqid)['account'] + reason: str = err['reason'] if err['reqid'] == -1: log.error(f'TWS external order error:\n{pformat(err)}') @@ -1213,14 +1237,27 @@ async def deliver_trade_events( # so we need some further filtering logic here.. # for most cases the 'status' block above should take # care of this. - # await ems_stream.send(BrokerdStatus( - # status='error', - # reqid=err['reqid'], - # reason=err['reason'], - # time_ns=time.time_ns(), - # account=accounts_def.inverse[trade.order.account], - # broker_details={'name': 'ib'}, - # )) + await ems_stream.send( + BrokerdStatus( + status='error', + reqid=reqid, + reason=reason, + time_ns=time.time_ns(), + account=acnt, + broker_details={'name': 'ib'}, + ) + ) + + canceled = BrokerdStatus( + reqid=reqid, + time_ns=time.time_ns(), # cuz why not + status='canceled', + reason=reason, + account=acnt, + broker_details={'name': 'ib'}, + ) + await ems_stream.send(canceled) + flows.add_msg(reqid, canceled.to_dict()) case 'event': From 562d027ee6adc1d8f2b09ca6a2d8d23076eee142 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Wed, 9 Aug 2023 21:43:38 -0400 Subject: [PATCH 110/116] Relay brokerd errors to client side, correctly.. Turns out we were expecting/processing `Status(resp='error')` msgs not `BrokerdError` (i guess bc latter was only really being used in initial `brokerd` msg responses and not for relay of actual provider clearing engine failures?) and the case block match / logic wasn't really correct. So this changes a few things: - always do reverse `oid` lookups from `reqid`s if possible in error msg handling case. - add a new `Error` client-dialog msg (derived from `Status`) which we now relay when `brokerd` sends a `BrokerdError` and no prior `Status` can be found (when it is we still fill in appropriate fields from the backend-error and just send back the last status msg like before). - try hard to look up the original `Order.symbol: str` for client broadcasting trying first using any `Status.req` and failing over to embedded `.brokerd_msg` field lookups. - drop the `Status.name = 'error'` from literal def. --- piker/clearing/_ems.py | 124 ++++++++++++++++++++---------------- piker/clearing/_messages.py | 49 ++++++-------- 2 files changed, 89 insertions(+), 84 deletions(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 9e30351e5..2cb3f9d3c 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -27,7 +27,7 @@ from decimal import Decimal from math import isnan from pprint import pformat -import time +from time import time_ns from types import ModuleType from typing import ( AsyncIterator, @@ -57,6 +57,7 @@ from ._messages import ( Order, Status, + Error, BrokerdCancel, BrokerdOrder, # BrokerdOrderAck, @@ -255,7 +256,7 @@ async def clear_dark_triggers( action=action, oid=oid, account=account, - time_ns=time.time_ns(), + time_ns=time_ns(), symbol=bfqme, price=submit_price, size=size, @@ -268,7 +269,7 @@ async def clear_dark_triggers( # fallthrough logic status = Status( oid=oid, # ems dialog id - time_ns=time.time_ns(), + time_ns=time_ns(), resp=resp, req=cmd, brokerd_msg=brokerd_msg, @@ -826,8 +827,8 @@ async def translate_and_relay_brokerd_events( # keep pps per account up to date locally in ``emsd`` mem # sym, broker = pos_msg.symbol, pos_msg.broker + # NOTE: translate to a FQME! relay.positions.setdefault( - # NOTE: translate to a FQSN! (broker, pos_msg.account), {} )[pos_msg.symbol] = pos_msg @@ -883,7 +884,7 @@ async def translate_and_relay_brokerd_events( BrokerdCancel( oid=oid, reqid=reqid, - time_ns=time.time_ns(), + time_ns=time_ns(), account=status_msg.req.account, ) ) @@ -898,38 +899,63 @@ async def translate_and_relay_brokerd_events( continue # BrokerdError + # TODO: figure out how this will interact with EMS clients + # for ex. on an error do we react with a dark orders + # management response, like cancelling all dark orders? + # This looks like a supervision policy for pending orders on + # some unexpected failure - something we need to think more + # about. In most default situations, with composed orders + # (ex. brackets), most brokers seem to use a oca policy. case { 'name': 'error', 'oid': oid, # ems order-dialog id 'reqid': reqid, # brokerd generated order-request id }: - status_msg = book._active.get(oid) + if ( + not oid + ): + oid: str = book._ems2brokerd_ids.inverse[reqid] + msg = BrokerdError(**brokerd_msg) - log.error(fmsg) # XXX make one when it's blank? - - # TODO: figure out how this will interact with EMS clients - # for ex. on an error do we react with a dark orders - # management response, like cancelling all dark orders? - # This looks like a supervision policy for pending orders on - # some unexpected failure - something we need to think more - # about. In most default situations, with composed orders - # (ex. brackets), most brokers seem to use a oca policy. - - # only relay to client side if we have an active - # ongoing dialog - if status_msg: + + # NOTE: retreive the last client-side response + # OR create an error when we have no last msg /dialog + # on record + status_msg: Status + if not (status_msg := book._active.get(oid)): + status_msg = Error( + time_ns=time_ns(), + oid=oid, + reqid=reqid, + brokerd_msg=msg, + ) + else: + # only modify last status if we have an active + # ongoing dialog.. status_msg.resp = 'error' status_msg.brokerd_msg = msg - book._active[oid] = status_msg - await router.client_broadcast( - status_msg.req.symbol, - status_msg, - ) + book._active[oid] = status_msg + log.error( + 'Translating brokerd error to status:\n' + f'{fmsg}' + f'{status_msg.to_dict()}' + ) + if req := status_msg.req: + fqme: str = req.symbol else: - log.error(f'Error for unknown order flow:\n{msg}') - continue + bdmsg: Struct = status_msg.brokerd_msg + fqme: str = ( + bdmsg.symbol # might be None + or + bdmsg.broker_details['flow']['symbol'] + ) + + await router.client_broadcast( + fqme, + status_msg, + ) # BrokerdStatus case { @@ -1070,7 +1096,7 @@ async def translate_and_relay_brokerd_events( status_msg.req = order assert status_msg.src # source tag? - oid = str(status_msg.reqid) + oid: str = str(status_msg.reqid) # attempt to avoid collisions status_msg.reqid = oid @@ -1087,38 +1113,28 @@ async def translate_and_relay_brokerd_events( status_msg, ) - # don't fall through - continue - - # brokerd error - case { - 'name': 'status', - 'status': 'error', - }: - log.error(f'Broker error:\n{fmsg}') - # XXX: we presume the brokerd cancels its own order - continue - # TOO FAST ``BrokerdStatus`` that arrives # before the ``BrokerdAck``. + # NOTE XXX: sometimes there is a race with the backend (like + # `ib` where the pending status will be relayed *before* + # the ack msg, in which case we just ignore the faster + # pending msg and wait for our expected ack to arrive + # later (i.e. the first block below should enter). case { - # XXX: sometimes there is a race with the backend (like - # `ib` where the pending stauts will be related before - # the ack, in which case we just ignore the faster - # pending msg and wait for our expected ack to arrive - # later (i.e. the first block below should enter). 'name': 'status', 'status': status, 'reqid': reqid, }: - oid = book._ems2brokerd_ids.inverse.get(reqid) - msg = f'Unhandled broker status for dialog {reqid}:\n' - if oid: - status_msg = book._active.get(oid) - # status msg may not have been set yet or popped? + msg = ( + f'Unhandled broker status for dialog {reqid}:\n' + f'{pformat(brokerd_msg)}' + ) + if ( + oid := book._ems2brokerd_ids.inverse.get(reqid) + ): # NOTE: have seen a key error here on kraken # clearable limits.. - if status_msg: + if status_msg := book._active.get(oid): msg += ( f'last status msg: {pformat(status_msg)}\n\n' f'this msg:{fmsg}\n' @@ -1214,7 +1230,7 @@ async def process_client_order_cmds( BrokerdCancel( oid=oid, reqid=reqid, - time_ns=time.time_ns(), + time_ns=time_ns(), account=order.account, ) ) @@ -1289,7 +1305,7 @@ async def process_client_order_cmds( msg = BrokerdOrder( oid=oid, # no ib support for oids... - time_ns=time.time_ns(), + time_ns=time_ns(), # if this is None, creates a new order # otherwise will modify any existing one @@ -1307,7 +1323,7 @@ async def process_client_order_cmds( oid=oid, reqid=reqid, resp='pending', - time_ns=time.time_ns(), + time_ns=time_ns(), brokerd_msg=msg, req=req, ) @@ -1424,7 +1440,7 @@ async def process_client_order_cmds( status = Status( resp=resp, oid=oid, - time_ns=time.time_ns(), + time_ns=time_ns(), req=req, src='dark', ) diff --git a/piker/clearing/_messages.py b/piker/clearing/_messages.py index 6e44969ec..51a3860c1 100644 --- a/piker/clearing/_messages.py +++ b/piker/clearing/_messages.py @@ -18,10 +18,7 @@ Clearing sub-system message and protocols. """ -# from collections import ( -# ChainMap, -# deque, -# ) +from __future__ import annotations from typing import ( Literal, ) @@ -31,28 +28,6 @@ from piker.types import Struct -# TODO: a composite for tracking msg flow on 2-legged -# dialogs. -# class Dialog(ChainMap): -# ''' -# Msg collection abstraction to easily track the state changes of -# a msg flow in one high level, query-able and immutable construct. - -# The main use case is to query data from a (long-running) -# msg-transaction-sequence - - -# ''' -# def update( -# self, -# msg, -# ) -> None: -# self.maps.insert(0, msg.to_dict()) - -# def flatten(self) -> dict: -# return dict(self) - - # TODO: ``msgspec`` stuff worth paying attention to: # - schema evolution: # https://jcristharif.com/msgspec/usage.html#schema-evolution @@ -163,6 +138,18 @@ class Status(Struct): brokerd_msg: dict = {} +class Error(Status): + resp: str = 'error' + + # TODO: allow re-wrapping from existing (last) status? + @classmethod + def from_status( + cls, + msg: Status, + ) -> Error: + ... + + # --------------- # emsd -> brokerd # --------------- @@ -226,6 +213,7 @@ class BrokerdOrderAck(Struct): # emsd id originally sent in matching request msg oid: str + # TODO: do we need this? account: str = '' name: str = 'ack' @@ -238,13 +226,14 @@ class BrokerdStatus(Struct): 'open', 'canceled', 'pending', - 'error', + # 'error', # NOTE: use `BrokerdError` 'closed', ] + name: str = 'status' + oid: str = '' # TODO: do we need this? account: str | None = None, - name: str = 'status' filled: float = 0.0 reason: str = '' remaining: float = 0.0 @@ -287,15 +276,15 @@ class BrokerdError(Struct): This is still a TODO thing since we're not sure how to employ it yet. ''' - oid: str reason: str # TODO: drop this right? symbol: str | None = None + oid: str | None = None # if no brokerd order request was actually submitted (eg. we errored # at the ``pikerd`` layer) then there will be ``reqid`` allocated. - reqid: int | str | None = None + reqid: str | None = None name: str = 'error' broker_details: dict = {} From f66a1f8b23fca19e63ef1b10dbecbee3fceafc9a Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 10 Aug 2023 10:31:00 -0400 Subject: [PATCH 111/116] ib: relay submission errors, allow adhoc mkt overrides This is a tricky edge case we weren't handling prior; an example is submitting a limit order with a price tick precision which mismatches that supported (probably bc IB reported the wrong one..) and IB responds immediately with an error event (via a special code..) but doesn't include any `Trade` object(s) nor details beyond the `reqid`. So, we have to do a little reverse EMS order lookup on our own and ideally indicate to the requester which order failed and *why*. To enable this we, - create a `flows: OrderDialogs` instance and pass it to most order/event relay tasks, particularly ensuring we update update ASAP in `handle_order_requests()` such that any successful submit has an `Ack` recorded in the flow. - on such errors lookup the `.symbol` / `Order` from the `flow` and respond back to the EMS with as many details as possible about the prior msg history. - always explicitly relay `error` events which don't fall into the sensible filtered set and wrap in a `BrokerdError.broker_details['flow']: dict` snapshot for the EMS. - in `symbols.get_mkt_info()` support adhoc lookup for `MktPair` inputs and when defined we re-construct with those inputs; in this case we do this for a first mkt: `'vtgn.nasdaq'`.. --- piker/brokers/ib/broker.py | 79 ++++++++++++++++++++----------------- piker/brokers/ib/symbols.py | 27 ++++++++++--- 2 files changed, 65 insertions(+), 41 deletions(-) diff --git a/piker/brokers/ib/broker.py b/piker/brokers/ib/broker.py index f1070a438..e3403397a 100644 --- a/piker/brokers/ib/broker.py +++ b/piker/brokers/ib/broker.py @@ -20,6 +20,7 @@ """ from __future__ import annotations from contextlib import ExitStack +from collections import ChainMap from functools import partial from pprint import pformat import time @@ -135,18 +136,19 @@ async def handle_order_requests( action: str = request_msg['action'] account: str = request_msg['account'] acct_number = accounts_def.get(account) + oid: str = request_msg['oid'] + if not acct_number: log.error( f'An IB account number for name {account} is not found?\n' 'Make sure you have all TWS and GW instances running.' ) - await ems_order_stream.send( - BrokerdError( - oid=request_msg['oid'], - symbol=request_msg['symbol'], - reason=f'No account found: `{account}` ?', - ) + err_msg = BrokerdError( + oid=oid, + symbol=request_msg['symbol'], + reason=f'No account found: `{account}` ?', ) + await ems_order_stream.send(err_msg) continue client = _accounts2clients.get(account) @@ -155,11 +157,12 @@ async def handle_order_requests( f'An IB client for account name {account} is not found.\n' 'Make sure you have all TWS and GW instances running.' ) - await ems_order_stream.send(BrokerdError( - oid=request_msg['oid'], + err_msg = BrokerdError( + oid=oid, symbol=request_msg['symbol'], reason=f'No api client loaded for account: `{account}` ?', - )) + ) + await ems_order_stream.send(err_msg) continue if action in {'buy', 'sell'}: @@ -185,23 +188,26 @@ async def handle_order_requests( account=acct_number, reqid=reqid, ) + str_reqid: str = str(reqid) if reqid is None: - await ems_order_stream.send(BrokerdError( - oid=request_msg['oid'], + err_msg = BrokerdError( + oid=oid, symbol=request_msg['symbol'], reason='Order already active?', - )) + ) + await ems_order_stream.send(err_msg) # deliver ack that order has been submitted to broker routing ack = BrokerdOrderAck( # ems order request id oid=order.oid, # broker specific request id - reqid=reqid, + reqid=str_reqid, account=account, ) await ems_order_stream.send(ack) - flows.add_msg(reqid, ack.to_dict()) + flows.add_msg(str_reqid, order.to_dict()) + flows.add_msg(str_reqid, ack.to_dict()) elif action == 'cancel': msg = BrokerdCancel(**request_msg) @@ -441,7 +447,7 @@ async def aggr_open_orders( deats = await proxy.con_deats(contracts=[con]) fqme = list(deats)[0] - reqid = order.orderId + reqid: str = str(order.orderId) # TODO: maybe embed a ``BrokerdOrder`` instead # since then we can directly load it on the client @@ -449,7 +455,7 @@ async def aggr_open_orders( msg = Status( time_ns=time.time_ns(), resp='open', - oid=str(reqid), + oid=reqid, reqid=reqid, # embedded order info @@ -1213,6 +1219,8 @@ async def deliver_trade_events( # `Client.inline_errors()::push_err()` err: dict = item + # never relay errors for non-broker related issues + # https://interactivebrokers.github.io/tws-api/message_codes.html code: int = err['error_code'] if code in { 200, # uhh @@ -1221,43 +1229,42 @@ async def deliver_trade_events( 162, 165, + # WARNING codes: + # https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes + # Attribute 'Outside Regular Trading Hours' is + # " 'ignored based on the order type and + # destination. PlaceOrder is now ' 'being + # processed.', + 2109, + + # XXX: lol this isn't even documented.. # 'No market data during competing live session' 1669, }: continue - reqid: str = err['reqid'] - acnt: str = flows.get(reqid)['account'] + reqid: str = str(err['reqid']) reason: str = err['reason'] if err['reqid'] == -1: log.error(f'TWS external order error:\n{pformat(err)}') + flow: ChainMap = flows.get(reqid) + # TODO: we don't want to relay data feed / lookup errors # so we need some further filtering logic here.. # for most cases the 'status' block above should take # care of this. - await ems_stream.send( - BrokerdStatus( - status='error', - reqid=reqid, - reason=reason, - time_ns=time.time_ns(), - account=acnt, - broker_details={'name': 'ib'}, - ) - ) - - canceled = BrokerdStatus( + err_msg = BrokerdError( reqid=reqid, - time_ns=time.time_ns(), # cuz why not - status='canceled', reason=reason, - account=acnt, - broker_details={'name': 'ib'}, + broker_details={ + 'name': 'ib', + 'flow': dict(flow), + }, ) - await ems_stream.send(canceled) - flows.add_msg(reqid, canceled.to_dict()) + flows.add_msg(reqid, err_msg.to_dict()) + await ems_stream.send(err_msg) case 'event': diff --git a/piker/brokers/ib/symbols.py b/piker/brokers/ib/symbols.py index c64924146..31cf74a04 100644 --- a/piker/brokers/ib/symbols.py +++ b/piker/brokers/ib/symbols.py @@ -132,6 +132,12 @@ ).split(' ,') ) +# manually discovered tick discrepancies, +# onl god knows how or why they'd cuck these up.. +_adhoc_mkt_infos: dict[int | str, dict] = { + 'vtgn.nasdaq': {'price_tick': Decimal('0.01')}, +} + # map of symbols to contract ids _adhoc_symbol_map = { @@ -511,6 +517,7 @@ async def get_mkt_info( venue = con.primaryExchange or con.exchange price_tick: Decimal = Decimal(str(details.minTick)) + # price_tick: Decimal = Decimal('0.01') if atype == 'stock': # XXX: GRRRR they don't support fractional share sizes for @@ -541,14 +548,15 @@ async def get_mkt_info( atype='fiat', tx_tick=Decimal('0.01'), # right? ) + dst = Asset( + name=con.symbol.lower(), + atype=atype, + tx_tick=size_tick, + ) mkt = MktPair( - dst=Asset( - name=con.symbol.lower(), - atype=atype, - tx_tick=size_tick, - ), src=src, + dst=dst, price_tick=price_tick, size_tick=size_tick, @@ -563,6 +571,15 @@ async def get_mkt_info( _fqme_without_src=(atype != 'fiat'), ) + # just.. wow. + if entry := _adhoc_mkt_infos.get(mkt.bs_fqme): + log.warning(f'Frickin {mkt.fqme} has an adhoc {entry}..') + new = mkt.to_dict() + new['price_tick'] = entry['price_tick'] + new['src'] = src + new['dst'] = dst + mkt = MktPair(**new) + # if possible register the bs_mktid to the just-built # mkt so that it can be retreived by order mode tasks later. # TODO NOTE: this is going to be problematic if/when we split From 78178c2fb7d024205e4928e449b55cc9e24000ba Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 10 Aug 2023 11:49:09 -0400 Subject: [PATCH 112/116] Add example mtr prober from `mtrpacket` Started rejigging example code from this example to use more modern `asyncio` APIs: https://github.com/matt-kimball/mtr-packet-python/blob/master/examples/trace-concurrent.py Relates to #330 --- scripts/mtr.py | 196 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 196 insertions(+) create mode 100644 scripts/mtr.py diff --git a/scripts/mtr.py b/scripts/mtr.py new file mode 100644 index 000000000..be926d1eb --- /dev/null +++ b/scripts/mtr.py @@ -0,0 +1,196 @@ +import asyncio +import curses +import itertools +import sys + +import mtrpacket + +# +# ProbeRecord keeps a record of round-trip times of probes and repsonder +# IP addresses, for a particular time-to-live (TTL) value. +# +# There may be multiple IP addresses for one particular TTL value, +# because some networks have multiple equally weighted routes. +# +class ProbeRecord: + def __init__(self, ttl): + self.ttl = ttl + self.success = False + self.ip_addrs = [] + self.probe_times = [] + + # Format the information about this line for display + def print(self, screen): + line = '{:>2}. '.format(self.ttl) + + if self.ip_addrs: + line += '{:42}'.format(self.ip_addrs[0]) + else: + line += '{:42}'.format(' ???') + + for time in self.probe_times: + if time is None: + line += ' *' + else: + line += ' {:>7.3f}ms'.format(time) + + # Use curses to display the line + screen.addstr(line + '\n') + + # List IP addresses beyond the first + for addr in self.ip_addrs[1:]: + screen.addstr(' ' + addr + '\n') + + +# When we've got a result for one of our probes, we'll regenerate +# the screen output, and allow curses to refresh it. +def redraw(screen, hostname, all_records): + screen.erase() + + screen.addstr('Tracing to "{}"\n\n'.format(hostname)) + + for record in all_records: + record.print(screen) + + # If one of our probes has arrived at the destination IP, + # we don't need to display further hops + if record.success: + break + + # screen.addstr('\n(press SPACEBAR to exit)\n') + + screen.refresh() + + +# Perform multiple probes with a specific time to live (TTL) value +async def probe_ttl( + mtr, + hostname, + ttl, + record, + redraw_callback, +): + for _ in itertools.count(): + result = await mtr.probe(hostname, ttl=ttl, timeout=6) + + if result.success: + record.success = True + + # Record the time of the latest probe + record.probe_times.append(result.time_ms) + + addr = result.responder + # If the address of the responder isn't already in the list + # of addresses responding at this TTL, add it + if addr and addr not in record.ip_addrs: + record.ip_addrs.append(addr) + + # Redraw the display, which will include this probe + redraw_callback() + + # Wait a small amount of time before sending the next probe + # to get an independent sample of network conditions + await asyncio.sleep(0.1) + + +# Launch all the probes for the trace. +# We'll use a separate coroutine (probe_ttl) for each ttl value, +# and those coroutines will run concurrently. +async def launch_probes(screen, hostname): + all_records = [] + + # When one of the probes has a result to display, we'll use + # this callback to display it + def redraw_hops(): + redraw(screen, hostname, all_records) + + async with mtrpacket.MtrPacket() as mtr: + probe_tasks = [] + + try: + for ttl in range(1, 32): + # We need a new ProbeRecord for each ttl value + record = ProbeRecord(ttl) + all_records.append(record) + + # Start a new asyncio task for this probe + probe_coro = probe_ttl( + mtr, + hostname, + ttl, + record, + redraw_hops, + ) + probe_tasks.append(asyncio.ensure_future(probe_coro)) + + # Give each probe a slight delay to avoid flooding + # the network interface, which might perturb the + # results + await asyncio.sleep(0.05) + + await asyncio.gather(*probe_tasks) + finally: + # We may have been cancelled, so we should cancel + # the probe tasks we started to clean up + for task in probe_tasks: + task.cancel() + + +# Wait until a SPACE character to be read on stdin. +# Afterward, cancel the probe task so we can exit +# async def wait_for_spacebar(probe_task): +# exit_event = asyncio.Event() + +# def read_callback(): +# # Read a single character +# # If we tried to read more, we may block other tasks +# key = sys.stdin.read(1) +# if key == ' ': +# exit_event.set() + +# loop = asyncio.get_event_loop() +# loop.add_reader(sys.stdin, read_callback) +# await exit_event.wait() +# loop.remove_reader(sys.stdin) + +# # After spacebar is pressed, stop sending probes +# probe_task.cancel() + + +# The main asynchronous routine, running within the asyncio event loop +async def main_task(hostname): + screen = curses.initscr() + try: + probe_task = asyncio.ensure_future( + launch_probes(screen, hostname) + ) + # spacebar_task = asyncio.ensure_future(wait_for_spacebar(probe_task)) + + try: + # await probe_task + await asyncio.gather(probe_task) + except asyncio.CancelledError: + # It is normal for probe_task to be cancelled by + # the spacebar task + pass + finally: + # We need to clean up by cancelling if gather has returned + # early, perhaps due to an exception raised in one of + # our tasks. + probe_task.cancel() + # spacebar_task.cancel() + finally: + curses.endwin() + + +# Get the hostname to trace to on the commandline +if __name__ == '__main__': + + host: str = '192.168.1.68' + if not len(sys.argv) > 1: + print('Usage: python3 trace-concurrent.py ') + sys.exit(1) + else: + host: str = sys.argv[1] + + out = asyncio.run(main_task(host)) From 077d9bf1d296b8bfca46a377fb5a0d2f44bb85a9 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 10 Aug 2023 12:41:53 -0400 Subject: [PATCH 113/116] Better commenting around order-mode error block --- piker/ui/order_mode.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/piker/ui/order_mode.py b/piker/ui/order_mode.py index 14d8b8210..a64bb7180 100644 --- a/piker/ui/order_mode.py +++ b/piker/ui/order_mode.py @@ -1088,9 +1088,16 @@ async def process_trade_msg( mode.on_submit(oid) case Status(resp='error'): - # delete level line from view + + # do all the things for a cancel: + # - drop order-msg dialog from client table + # - delete level line from view mode.on_cancel(oid) - broker_msg = msg.brokerd_msg + + # TODO: parse into broker-side msg, or should we + # expect it to just be **that** msg verbatim (since + # we'd presumably have only 1 `Error` msg-struct) + broker_msg: dict = msg.brokerd_msg log.error( f'Order {oid}->{resp} with:\n{pformat(broker_msg)}' ) @@ -1111,8 +1118,12 @@ async def process_trade_msg( case Status( resp='triggered', - # req=Order(exec_mode='live', action='alert') as req, # TODO - req={'exec_mode': 'live', 'action': 'alert'} as req, + # TODO: do the struct-msg version, blah blah.. + # req=Order(exec_mode='live', action='alert') as req, + req={ + 'exec_mode': 'live', + 'action': 'alert', + } as req, ): # should only be one "fill" for an alert # add a triangle and remove the level line From c5ed6e6ac490f436fca46f0fdebf99937bde51a5 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 11 Aug 2023 13:34:23 -0400 Subject: [PATCH 114/116] Facepalm: remove now unused `CostModel` idea.. --- piker/accounting/__init__.py | 2 -- piker/clearing/_paper_engine.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/piker/accounting/__init__.py b/piker/accounting/__init__.py index e7ff16f1a..e27dc4bfc 100644 --- a/piker/accounting/__init__.py +++ b/piker/accounting/__init__.py @@ -50,7 +50,6 @@ mk_allocator, Allocator, ) -from .models import CostModel log = get_logger(__name__) @@ -59,7 +58,6 @@ 'Account', 'Allocator', 'Asset', - 'CostModel', 'MktPair', 'Position', 'Symbol', diff --git a/piker/clearing/_paper_engine.py b/piker/clearing/_paper_engine.py index f1c1a3d91..6fe5fe67f 100644 --- a/piker/clearing/_paper_engine.py +++ b/piker/clearing/_paper_engine.py @@ -41,7 +41,6 @@ from piker.brokers import get_brokermod from piker.accounting import ( Account, - # CostModel, MktPair, Position, Transaction, @@ -790,7 +789,6 @@ def norm_trade( pairs: dict[str, Struct], symcache: SymbologyCache | None = None, - # fees: CostModel | None = None, brokermod: ModuleType | None = None, ) -> Transaction: From 4aa04e1c8e85596e3b67b5a2f846a59e1346d71e Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Fri, 11 Aug 2023 14:52:10 -0400 Subject: [PATCH 115/116] Add note about broadcast when no `.symbol` found --- piker/clearing/_ems.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piker/clearing/_ems.py b/piker/clearing/_ems.py index 2cb3f9d3c..501e0dd83 100644 --- a/piker/clearing/_ems.py +++ b/piker/clearing/_ems.py @@ -949,7 +949,10 @@ async def translate_and_relay_brokerd_events( fqme: str = ( bdmsg.symbol # might be None or - bdmsg.broker_details['flow']['symbol'] + bdmsg.broker_details['flow'] + # NOTE: what happens in empty case in the + # broadcast below? it's a problem? + .get('symbol', '') ) await router.client_broadcast( From 6e8d07852cd806f3aba01600ac2ca74d9fb2ec39 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Mon, 14 Aug 2023 11:36:34 -0400 Subject: [PATCH 116/116] Pkg with `poetry`, `poetry2nix` and a `flake.nix` --- flake.nix | 85 +++ poetry.lock | 1521 ++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 52 ++ 3 files changed, 1658 insertions(+) create mode 100644 flake.nix create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..26bdcd020 --- /dev/null +++ b/flake.nix @@ -0,0 +1,85 @@ +# NOTE: to convert to a poetry2nix env like this here are the +# steps: +# - install poetry in your system nix config +# - convert the repo to use poetry using `poetry init`: +# https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project +# - then manually ensuring all deps are converted over: +{ + description = "piker: trading gear for hackers (pkged with poetry2nix)"; + + inputs.flake-utils.url = "github:numtide/flake-utils"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + inputs.poetry2nix = { + url = "github:nix-community/poetry2nix"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + + outputs = { + self, + nixpkgs, + flake-utils, + poetry2nix, + }: + flake-utils.lib.eachDefaultSystem (system: + let + # use PWD as sources + projectDir = ./.; + pyproject = ./pyproject.toml; + poetrylock = ./poetry.lock; + + # TODO: port to 3.11 and support both versions? + python = "python3.10"; + + # see https://github.com/nix-community/poetry2nix/tree/master#api + # for more functions and examples. + # inherit + # (poetry2nix.legacyPackages.${system}) + # mkPoetryApplication; + # pkgs = nixpkgs.legacyPackages.${system}; + pkgs = nixpkgs.legacyPackages.x86_64-linux; + + in + { + # let + # devEnv = poetry2nix.mkPoetryEnv { + # projectDir = ./.; + # }; + + packages = { + piker = poetry2nix.mkPoetryEditablePackage { + # env = poetry2nix.mkPoetryEnv { + + # NOTE: taken from surrounding inputs + # projectDir = projectDir; + editablePackageSources = { piker = ./piker; }; + + # override msgspec to include setuptools as input + # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md#modulenotfounderror-no-module-named-packagenamed + overrides = poetry2nix.defaultPoetryOverrides.extend + (self: super: { + msgspec = super.msgspec.overridePythonAttrs + ( + old: { + buildInputs = (old.buildInputs or [ ]) ++ [ super.setuptools ]; + } + ); + } + ); + + }; + }; + + + # boot xonsh inside the poetry virtualenv when + # define the custom entry point via an expected + # output-attr that `nix-develop` scans for: + # https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes + devShells.default = pkgs.mkShell { + # packages = [ poetry2nix.packages.${system}.poetry ]; + packages = [ poetry2nix.packages.x86_64-linux.poetry ]; + shellHook = "poetry run xonsh"; + # shellHook = "poetry shell"; + }; + } + ); +} diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 000000000..93e620164 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1521 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "anyio" +version = "3.7.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, +] + +[package.dependencies] +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] + +[[package]] +name = "asks" +version = "3.0.0" +description = "asks - async http" +optional = false +python-versions = ">= 3.6.2" +files = [ + {file = "asks-3.0.0.tar.gz", hash = "sha256:e4d61dc8362594c81cfe0198eee9f3a208eef1c0023355d711566a6dcf65e986"}, +] + +[package.dependencies] +anyio = ">=3.0,<4.0" +async_generator = "*" +h11 = "*" + +[[package]] +name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +optional = false +python-versions = ">=3.5" +files = [ + {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, + {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, +] + +[[package]] +name = "asyncvnc" +version = "1.1.0" +description = "" +optional = false +python-versions = ">= 3.7" +files = [] +develop = false + +[package.dependencies] +cryptography = "*" +keysymdef = "*" +numpy = "*" + +[package.source] +type = "git" +url = "https://github.com/pikers/asyncvnc.git" +reference = "main" +resolved_reference = "825447564e3af6b0d4a0996793f1ca7fb360c48f" + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "bidict" +version = "0.22.1" +description = "The bidirectional mapping library for Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "bidict-0.22.1-py3-none-any.whl", hash = "sha256:6ef212238eb884b664f28da76f33f1d28b260f665fc737b413b287d5487d1e7b"}, + {file = "bidict-0.22.1.tar.gz", hash = "sha256:1e0f7f74e4860e6d0943a05d4134c63a2fad86f3d4732fb265bd79e4e856d81d"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton"] +lint = ["pre-commit"] +test = ["hypothesis", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-xdist", "sortedcollections", "sortedcontainers", "sphinx"] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "click" +version = "8.1.6" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.7.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"}, + {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "cryptography" +version = "41.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, + {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, + {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, + {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cython" +version = "3.0.0" +description = "The Cython compiler for writing C extensions in the Python language." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Cython-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c7d728e1a49ad01d41181e3a9ea80b8d14e825f4679e4dd837cbf7bca7998a5"}, + {file = "Cython-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:626a4a6ef4b7ced87c348ea805488e4bd39dad9d0b39659aa9e1040b62bbfedf"}, + {file = "Cython-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c900d1ca9f622b969ac7d8fc44bdae140a4a6c7d8819413b51f3ccd0586a09"}, + {file = "Cython-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a65bc50dc1bc2faeafd9425defbdef6a468974f5c4192497ff7f14adccfdcd32"}, + {file = "Cython-3.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b71b399b10b038b056ad12dce1e317a8aa7a96e99de7e4fa2fa5d1c9415cfb9"}, + {file = "Cython-3.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f42f304c097cc53e9eb5f1a1d150380353d5018a3191f1b77f0de353c762181e"}, + {file = "Cython-3.0.0-cp310-cp310-win32.whl", hash = "sha256:3e234e2549e808d9259fdb23ebcfd145be30c638c65118326ec33a8d29248dc2"}, + {file = "Cython-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:829c8333195100448a23863cf64a07e1334fae6a275aefe871458937911531b6"}, + {file = "Cython-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06db81b1a01858fcc406616f8528e686ffb6cf7c3d78fb83767832bfecea8ad8"}, + {file = "Cython-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c93634845238645ce7abf63a56b1c5b6248189005c7caff898fd4a0dac1c5e1e"}, + {file = "Cython-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa606675c6bd23478b1d174e2a84e3c5a2c660968f97dc455afe0fae198f9d3d"}, + {file = "Cython-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3355e6f690184f984eeb108b0f5bbc4bcf8b9444f8168933acf79603abf7baf"}, + {file = "Cython-3.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93a34e1ca8afa4b7075b02ed14a7e4969256297029fb1bfd4cbe48f7290dbcff"}, + {file = "Cython-3.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb1165ca9e78823f9ad1efa5b3d83156f868eabd679a615d140a3021bb92cd65"}, + {file = "Cython-3.0.0-cp311-cp311-win32.whl", hash = "sha256:2fadde1da055944f5e1e17625055f54ddd11f451889110278ef30e07bd5e1695"}, + {file = "Cython-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:254ed1f03a6c237fa64f0c6e44862058de65bfa2e6a3b48ca3c205492e0653aa"}, + {file = "Cython-3.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e212237b7531759befb92699c452cd65074a78051ae4ee36ff8b237395ecf3d"}, + {file = "Cython-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f29307463eba53747b31f71394ed087e3e3e264dcc433e62de1d51f5c0c966c"}, + {file = "Cython-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53328a8af0806bebbdb48a4191883b11ee9d9dfb084d84f58fa5a8ab58baefc9"}, + {file = "Cython-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5962e70b15e863e72bed6910e8c6ffef77d36cc98e2b31c474378f3b9e49b0e3"}, + {file = "Cython-3.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9e69139f4e60ab14c50767a568612ea64d6907e9c8e0289590a170eb495e005f"}, + {file = "Cython-3.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c40bdbcb2286f0aeeb5df9ce53d45da2d2a9b36a16b331cd0809d212d22a8fc7"}, + {file = "Cython-3.0.0-cp312-cp312-win32.whl", hash = "sha256:8abb8915eb2e57fa53d918afe641c05d1bcc6ed1913682ec1f28de71f4e3f398"}, + {file = "Cython-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:30a4bd2481e59bd7ab2539f835b78edc19fc455811e476916f56026b93afd28b"}, + {file = "Cython-3.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0e1e4b7e4bfbf22fecfa5b852f0e499c442d4853b7ebd33ae37cdec9826ed5d8"}, + {file = "Cython-3.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b00df42cdd1a285a64491ba23de08ab14169d3257c840428d40eb7e8e9979af"}, + {file = "Cython-3.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:650d03ddddc08b051b4659778733f0f173ca7d327415755c05d265a6c1ba02fb"}, + {file = "Cython-3.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4965f2ebade17166f21a508d66dd60d2a0b3a3b90abe3f72003baa17ae020dd6"}, + {file = "Cython-3.0.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4123c8d03167803df31da6b39de167cb9c04ac0aa4e35d4e5aa9d08ad511b84d"}, + {file = "Cython-3.0.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:296c53b6c0030cf82987eef163444e8d7631cc139d995f9d58679d9fd1ddbf31"}, + {file = "Cython-3.0.0-cp36-cp36m-win32.whl", hash = "sha256:0d2c1e172f1c81bafcca703093608e10dc16e3e2d24c5644c17606c7fdb1792c"}, + {file = "Cython-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:bc816d8eb3686d6f8d165f4156bac18c1147e1035dc28a76742d0b7fb5b7c032"}, + {file = "Cython-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8d86651347bbdbac1aca1824696c5e4c0a3b162946c422edcca2be12a03744d1"}, + {file = "Cython-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84176bd04ce9f3cc8799b47ec6d1959fa1ea5e71424507df7bbf0b0915bbedef"}, + {file = "Cython-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35abcf07b8277ec95bbe49a07b5c8760a2d941942ccfe759a94c8d2fe5602e9f"}, + {file = "Cython-3.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a44d6b9a29b2bff38bb648577b2fcf6a68cf8b1783eee89c2eb749f69494b98d"}, + {file = "Cython-3.0.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4dc6bbe7cf079db37f1ebb9b0f10d0d7f29e293bb8688e92d50b5ea7a91d82f3"}, + {file = "Cython-3.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e28763e75e380b8be62b02266a7995a781997c97c119efbdccb8fb954bcd7574"}, + {file = "Cython-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:edae615cb4af51d5173e76ba9aea212424d025c57012e9cdf2f131f774c5ba71"}, + {file = "Cython-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:20c604e974832aaf8b7a1f5455ee7274b34df62a35ee095cd7d2ed7e818e6c53"}, + {file = "Cython-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c85fd2b1cbd9400d60ebe074795bb9a9188752f1612be3b35b0831a24879b91f"}, + {file = "Cython-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:090256c687106932339f87f888b95f0d69c617bc9b18801555545b695d29d8ab"}, + {file = "Cython-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec2a67a0a7d9d4399758c0657ca03e5912e37218859cfbf046242cc532bfb3b"}, + {file = "Cython-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1cdd01ce45333bc264a218c6e183700d6b998f029233f586a53c9b13455c2d2"}, + {file = "Cython-3.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecee663d2d50ca939fc5db81f2f8a219c2417b4651ad84254c50a03a9cb1aadd"}, + {file = "Cython-3.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30f10e79393b411af7677c270ea69807acb9fc30205c8ff25561f4deef780ec1"}, + {file = "Cython-3.0.0-cp38-cp38-win32.whl", hash = "sha256:609777d3a7a0a23b225e84d967af4ad2485c8bdfcacef8037cf197e87d431ca0"}, + {file = "Cython-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f4a6dfd42ae0a45797f50fc4f6add702abf46ab3e7cd61811a6c6a97a40e1a2"}, + {file = "Cython-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2d8158277c8942c0b20ff4c074fe6a51c5b89e6ac60cef606818de8c92773596"}, + {file = "Cython-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54e34f99b2a8c1e11478541b2822e6408c132b98b6b8f5ed89411e5e906631ea"}, + {file = "Cython-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877d1c8745df59dd2061a0636c602729e9533ba13f13aa73a498f68662e1cbde"}, + {file = "Cython-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204690be60f0ff32eb70b04f28ef0d1e50ffd7b3f77ba06a7dc2389ee3b848e0"}, + {file = "Cython-3.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06fcb4628ccce2ba5abc8630adbeaf4016f63a359b4c6c3827b2d80e0673981c"}, + {file = "Cython-3.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:090e24cfa31c926d0b13d8bb2ef48175acdd061ae1413343c94a2b12a4a4fa6f"}, + {file = "Cython-3.0.0-cp39-cp39-win32.whl", hash = "sha256:4cd00f2158dc00f7f93a92444d0f663eda124c9c29bbbd658964f4e89c357fe8"}, + {file = "Cython-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5b4cc896d49ce2bae8d6a030f9a4c64965b59c38acfbf4617685e17f7fcf1731"}, + {file = "Cython-3.0.0-py2.py3-none-any.whl", hash = "sha256:ff1aef1a03cfe293237c7a86ae9625b0411b2df30c53d1a7f29a8d381f38a1df"}, + {file = "Cython-3.0.0.tar.gz", hash = "sha256:350b18f9673e63101dbbfcf774ee2f57c20ac4636d255741d76ca79016b1bd82"}, +] + +[[package]] +name = "eventkit" +version = "1.0.0" +description = "Event-driven data pipelines" +optional = false +python-versions = "*" +files = [ + {file = "eventkit-1.0.0-py3-none-any.whl", hash = "sha256:c3c1ae6e15cda9970c3996b0aaeda48431fc6b8674c01e7a7ff77a13629cc021"}, + {file = "eventkit-1.0.0.tar.gz", hash = "sha256:c9c4bb8a9685e4131e845882512a630d6a57acee148f38af286562a76873e4a9"}, +] + +[package.dependencies] +numpy = "*" + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fuzzywuzzy" +version = "0.18.0" +description = "Fuzzy string matching in python" +optional = false +python-versions = "*" +files = [ + {file = "fuzzywuzzy-0.18.0-py2.py3-none-any.whl", hash = "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"}, + {file = "fuzzywuzzy-0.18.0.tar.gz", hash = "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8"}, +] + +[package.dependencies] +python-levenshtein = {version = ">=0.12", optional = true, markers = "extra == \"speedup\""} + +[package.extras] +speedup = ["python-levenshtein (>=0.12)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "ib-insync" +version = "0.9.86" +description = "Python sync/async framework for Interactive Brokers API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ib_insync-0.9.86-py3-none-any.whl", hash = "sha256:a61fbe56ff405d93d211dad8238d7300de76dd6399eafc04c320470edec9a4a4"}, + {file = "ib_insync-0.9.86.tar.gz", hash = "sha256:73af602ca2463f260999970c5bd937b1c4325e383686eff301743a4de08d381e"}, +] + +[package.dependencies] +eventkit = "*" +nest-asyncio = "*" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "keysymdef" +version = "1.2.0" +description = "X11 keysym data for Python" +optional = false +python-versions = "*" +files = [ + {file = "keysymdef-1.2.0-py2.py3-none-any.whl", hash = "sha256:19a5c2263a861f3ff884a1f58e2b4f7efa319ffc9d11f9ba8e20129babc31a9e"}, +] + +[[package]] +name = "levenshtein" +version = "0.21.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.6" +files = [ + {file = "Levenshtein-0.21.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:59e5054c9dea821840af4623a4059c8f0ae56548a5eae8b9c7aaa0b3f1e33340"}, + {file = "Levenshtein-0.21.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:11694c6f7119d68cc199ff3b1407560c0efb0cc49f288169f28b2e032ee03cda"}, + {file = "Levenshtein-0.21.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5f7ce639bea0f5e95a1f71963624b85521a39928a2a1bb0e66f6180facf5969"}, + {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39e8a1866325b6d54de4e7d1bffffaf4b4c8cbf0988f47f0f2e929edfbeb870d"}, + {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed73d619e203aad54e2e6119a2b58b7568a36bd50a547817d13618ea0acf4412"}, + {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50fbe01be99554f644657c32a9e3085369d23e8ccc540d855c683947d3b48b67"}, + {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675ba3afaa9e8ec393eb1eeee651697036e8391be54e6c28eae4bfdff4d5e64e"}, + {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c89a5ac319a80c131ca8d499ae0f7a91d4dd1dc3b2e9d8b095e991597b79c8f9"}, + {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f9e3a5f4386c8f1811153f309a0ba3dc47d17e81a6dd29aa22d3e10212a2fd73"}, + {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ea042ba262ea2a95d93c4d2d5879df956cf6c85ce22c037e3f0d4491182f10c5"}, + {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:622bc670b906c4bf219755625e9fa704ff07c561a90f1aa35f3f2d8ecd3ec088"}, + {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:f0e51ff6d5665884b0e39b4ae0ef4e2d2d0174147147db7a870ddc4123882212"}, + {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cc8eb12c48598b20b4b99128bc2bd62974dfb65204ceb37807480788b1e66e64"}, + {file = "Levenshtein-0.21.1-cp310-cp310-win32.whl", hash = "sha256:04d338c9153ddf70a32f324cf9f902fe94a6da82122b8037ccde969d4cc0a94b"}, + {file = "Levenshtein-0.21.1-cp310-cp310-win_amd64.whl", hash = "sha256:5a10fc3be2bfb05b03b868d462941e4099b680b7f358a90b8c6d7d5946e9e97c"}, + {file = "Levenshtein-0.21.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:938581ba87b306675bc41e21c2b2822a9eb83fb1a0e4a4903b7398d7845b22e3"}, + {file = "Levenshtein-0.21.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06da6c47aa459c725ee90dab467cd2f66956c5f9a43ddb51a0fe2496960f1d3e"}, + {file = "Levenshtein-0.21.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eea308d98c64dbea48ac351011c4adf66acd936c4de2bf9955826ba8435197e2"}, + {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51974fcb8a94284325cb88b474b76227532a25b035938a46167bebd1646718e"}, + {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87edb05fc6e4eb14008433f02e89815a756fe4ecc32d7180bb757f26e4161e06"}, + {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aee4f570652ad77961e5ab871d11fd42752e7d2117b08324a0c8801a7ee0a7c5"}, + {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a06b0b492e0d936deff751ad4757786ba7cb5eee510d53b6dfe92c924ff733"}, + {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:952e72f173a65f271dfee102b5571004b6594d4f199864ddead77115a2c147fd"}, + {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3f855669e1399597f7a2670310cf20fc04a35c6c446dd70320398e9aa481b3d"}, + {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ca992783feaf1d6e25403340157fb584cf71371b094a575134393bba10b974fa"}, + {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:20361f42f6e7efa5853f69a41a272e9ecb90da284bec4312e42b58fa42b9a752"}, + {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9bcb3abbe97975cc6a97baf24a3b6e0491472ecedbc0247a41eb2c8d73ecde5d"}, + {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72b0b84adc52f4cf970a1bb276e76e115b30d693d6dbcd25fca0bcee85ca7cc7"}, + {file = "Levenshtein-0.21.1-cp311-cp311-win32.whl", hash = "sha256:4217ae380f42f825862eb8e2f9beca627fe9ab613f36e206842c003bb1affafc"}, + {file = "Levenshtein-0.21.1-cp311-cp311-win_amd64.whl", hash = "sha256:12bb3540e021c73c5d8796ecf8148afd441c4471731924a112bc31bc25abeabf"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a0fa251b3b4c561d2f650d9a61fb8980815492bb088a0a521236995a1872e171"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4bf11b89d8d7a7707ae5cac1ef86ac4ff78491482df037289470db8f0378043"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91dca7085aa358da71fa50682fc8ff7e21365c99ef17dc1962a7bbf488003528"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f187f0929a35b6ddabc1324161e8c73ddbd4a7747249f10ec9ceaa793e904f"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d997da10fdf1a82e208fd1b05aba40705ca3f053919c84d2e952141d33e3ab3"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed8f99e4e4ba8a43bb4fe0255606724f22069405fa1e3be679a2d90f74770e5"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5acb7e84ccd619dcff6e04928fa8d8cc24f55bb2c9cdfe96620ed85b0a82a7c7"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62dca15301bdba4ec7fcf53c39dd8d9c198194990cf035def3f47b7cb9c3213e"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:832951ad7b5ee0df8152f239a9fc602322da055264459dcf4d50d3ed68e68045"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:e8ab4d5acdd3ac17161539d9f2ea764497dc269dcd8dc722ae4a394c7b64ae7f"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3c13450450d537ec7ede3781be72d72db37cb131943148c8ada58b34e143fc6f"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-win32.whl", hash = "sha256:267ad98befffeed90e73b8c644a297027adb81f61044843aeade7b4a44ccc7d7"}, + {file = "Levenshtein-0.21.1-cp36-cp36m-win_amd64.whl", hash = "sha256:d66d8f3ebde14840a310a557c8f69eed3e153f2477747365355d058208eea515"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:78d0fb5faef0413864c1b593e5261a840eaa47842b0fa4af7be4c09d90b24a14"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dda976c1dae2a0b41a109facc48d1d242c7acb30ab4c04d8421496da6e153aa"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc54aeb02f38a36f16bca6b0f9d07462686d92716424d9a4a3fdd11f3624528"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:463fd7558f25c477c7e4a59af35c661e133473f62bb02ed2c07c9c95e1c2dc66"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f00495a80c5850466f0a57ea874761f78079702e28b63a1b6573ad254f828e44"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31aa08e8ddac402edd530aaf708ab085fea7299c499404989eabfde143377911"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9e96217a7c6a7d43071c830b1353a3ee669757ae477673f0fd3e3a97def6d410"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d83b8c0ce41e410af143bd3abef94e480d143fdb83e60a01bab9069bf565dada"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:309f134f3d42fa7df7efbbd7975f2331de8c36da3ebdb3fad59abae84268abba"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:267bc6725506571fd3c03afcc871fa5cbf3d2cb6e4bd11043790fa60cbb0f8a4"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4a6cd85ac5f7800e8127b3194fa02c59be735b6bdfe55b8516d094652235e038"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-win32.whl", hash = "sha256:13e87517ce788d71deaa73e37332a67c4085c13e58ea3a0218092d555d1872ce"}, + {file = "Levenshtein-0.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:918f2e0f590cacb30edb88e7eccbf71b340d5f080c9e69009f1f00dc24810a67"}, + {file = "Levenshtein-0.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d17c2ee8aa380c012b3ba015b87502934662c51b7609ef17366c76863e9551d6"}, + {file = "Levenshtein-0.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee847d3e49870e914074fd31c069a1aaba6f71bee650d41de48e7e4b11671bf0"}, + {file = "Levenshtein-0.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d01425bd54c482ccbbc6d953633450a2bdbb7d12450d9eeba6073a6d0f06a3c"}, + {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff4f236d1b6c556a77975812a4d51071181721f3a29c08b42e5c4aa11730957"}, + {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35a603d952e9f286fe8053332862c8cff426f5d8a85ee962c3a0f597f4c463c4"}, + {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9546ded45fb3cf8773ade9c91de164c6cb2cb4927516289abd422a262e81906c"}, + {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79259b10f105f78853210d8769cf77ca55dac8c368dca33b4c10ffa8965e2543"}, + {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41e0e539638a27b5e90a5d46679375f93a1cb65cf06efe7c413cf76f71d3d467"}, + {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ccd0b89300a25decdb34d7c4efe2a971438015f552eeb416b8da12918cb3edc0"}, + {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef365ec78938597623d4fb96c8b0db423ab484fcfc00fae44c34b738b1eb1924"}, + {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e701b9dfb121faf71b0c5757485fc49e1b511b7b8a80034aa1f580488f8f872e"}, + {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e4c2fe1f49f1d8476fe44e243569d775c5454dca70a13be568430d2d2d760ea2"}, + {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:40a5e38d0c3e488d1dca5dc9c2691c000764813d4006c243f2ebd39e0b331e95"}, + {file = "Levenshtein-0.21.1-cp38-cp38-win32.whl", hash = "sha256:6c08879d0cf761cd750e976fda67bcc23cf1e485eaa030942e6628b876f4c6d8"}, + {file = "Levenshtein-0.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:248348e94dee05c787b44f16533a366ec5bf8ba949c604ad0db69d0c872f3539"}, + {file = "Levenshtein-0.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3824e9f75ec9f373fc8b4df23eae668918953487f5ff06db282ddcb3f9c802d2"}, + {file = "Levenshtein-0.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e2ed817fa682243ef2e8a2728fcd0f9352d4e5edd104db44862d0bb55c75a7e"}, + {file = "Levenshtein-0.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94a6ffd7257d12c64de34bc9f801a211e2daa624ec276305f8c67963a9896efa"}, + {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6833f8cefb96b8ccac457ad421866a74f4de973e7001699fcbbbe9ccb59a5c66"}, + {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8126d2b51621483823c6e31d16bc1f5a964ae976aab4f241bbe74ed19d93770"}, + {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58eaab403b77e62e096cbcbaf61728c8736f9f7a3e36a58fb663461e5d70144f"}, + {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e6d66fe0110fd8e6efb1939d686099170c27b3ca838eab0c215f0781f05f06"}, + {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5a1f28b34a15dd2d67bcc324f6661df8cfe66d6ec7ee7a64e921af8ae4c39b7"}, + {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c37609f4e460e570810ec5176c5cdf91c494a9979638f7fef5fd345597245d17"}, + {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:656c70814280c4002af89112f1457b6ad24c42dfba58dcb2047a249ae8ccdd04"}, + {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:863d507cba67de2fa66d5501ed1bc5029363d2b393662ac7d740dd0330c66aba"}, + {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9437c2342937decf3cf5ac79d0b9497734897c0a09dc813378c97f2916b7aa76"}, + {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd48db3d03adb88bf71b45de77b9720f96d3b9d5ab7a32304352baec482689"}, + {file = "Levenshtein-0.21.1-cp39-cp39-win32.whl", hash = "sha256:023dffdde576639e48cab3cc835bfaf9c441df7a8e2829bf20104868db6e4f72"}, + {file = "Levenshtein-0.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:dcc712696d4332962ecab6e4df40d5126d7379c6612e6058ee2e9d3f924387e3"}, + {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9a8d60084e1c9e87ae247c601e331708de09ed23219b5e39af7c8e9115ab8152"}, + {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa6762f8ef1e7dfba101babe43de6edc541cbe64d33d816314ac67cd76c3979"}, + {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eec8a1eaaeadc217c15bc77d01bb29e146acdae73a0b2e9df1ad162263c9752e"}, + {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da0e2dbddb98da890fb779823df991ad50f184b3d986b8c68784eecbb087f01"}, + {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:edac6490f84e8a0456cb40f6729d4199311ce50ca0ea4958572e1b7ea99f546c"}, + {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b33e2cbaca6f7d01092a28711605568dbc08a3bb7b796d8986bf5d0d651a0b09"}, + {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69a430ab564d286f309c19f7abed34fce9c144f39f984c609ee690dd175cc421"}, + {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f30474b2906301594c8fb64cb7492c6978290c466a717c4b5316887a18b77af5"}, + {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9817dca597abde9fc9571d56a7eca8bd667e9dfc0867b190f1e8b43ce4fde761"}, + {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7d7e00e8cb45981386df9d3f99073ba7de59bdb739069766b32906421bb1026b"}, + {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9a072cb0f6e90092c4323cd7731eb539a79ac360045dbe3cc49a123ba381fc5"}, + {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d880a87aca186342bc2fe16b064c3ed434d2a0c170c419f23b4e00261a5340a"}, + {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f282711a220d1bdf245da508e1fefdf7680d1f7482a094e37465674a7e6985ae"}, + {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdba9f8a7a98b0c4c0bc004b811fb31a96521cd264aeb5375898478e7703de4d"}, + {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b2410469cc8fd0f42aa00e63063c42f8aff501996cd5424a5c904739bdaaf4fe"}, + {file = "Levenshtein-0.21.1.tar.gz", hash = "sha256:2e4fc4522f9bf73c6ab4cedec834783999b247312ec9e3d1435a5424ad5bc908"}, +] + +[package.dependencies] +rapidfuzz = ">=2.3.0,<4.0.0" + +[[package]] +name = "llvmlite" +version = "0.40.1" +description = "lightweight wrapper around basic LLVM functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "llvmlite-0.40.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:84ce9b1c7a59936382ffde7871978cddcda14098e5a76d961e204523e5c372fb"}, + {file = "llvmlite-0.40.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3673c53cb21c65d2ff3704962b5958e967c6fc0bd0cff772998face199e8d87b"}, + {file = "llvmlite-0.40.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bba2747cf5b4954e945c287fe310b3fcc484e2a9d1b0c273e99eb17d103bb0e6"}, + {file = "llvmlite-0.40.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd5e82cc990e5a3e343a3bf855c26fdfe3bfae55225f00efd01c05bbda79918"}, + {file = "llvmlite-0.40.1-cp310-cp310-win32.whl", hash = "sha256:09f83ea7a54509c285f905d968184bba00fc31ebf12f2b6b1494d677bb7dde9b"}, + {file = "llvmlite-0.40.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b37297f3cbd68d14a97223a30620589d98ad1890e5040c9e5fc181063f4ed49"}, + {file = "llvmlite-0.40.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a66a5bd580951751b4268f4c3bddcef92682814d6bc72f3cd3bb67f335dd7097"}, + {file = "llvmlite-0.40.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:467b43836b388eaedc5a106d76761e388dbc4674b2f2237bc477c6895b15a634"}, + {file = "llvmlite-0.40.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c23edd196bd797dc3a7860799054ea3488d2824ecabc03f9135110c2e39fcbc"}, + {file = "llvmlite-0.40.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a36d9f244b6680cb90bbca66b146dabb2972f4180c64415c96f7c8a2d8b60a36"}, + {file = "llvmlite-0.40.1-cp311-cp311-win_amd64.whl", hash = "sha256:5b3076dc4e9c107d16dc15ecb7f2faf94f7736cd2d5e9f4dc06287fd672452c1"}, + {file = "llvmlite-0.40.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a7525db121f2e699809b539b5308228854ccab6693ecb01b52c44a2f5647e20"}, + {file = "llvmlite-0.40.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:84747289775d0874e506f907a4513db889471607db19b04de97d144047fec885"}, + {file = "llvmlite-0.40.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e35766e42acef0fe7d1c43169a8ffc327a47808fae6a067b049fe0e9bbf84dd5"}, + {file = "llvmlite-0.40.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cda71de10a1f48416309e408ea83dab5bf36058f83e13b86a2961defed265568"}, + {file = "llvmlite-0.40.1-cp38-cp38-win32.whl", hash = "sha256:96707ebad8b051bbb4fc40c65ef93b7eeee16643bd4d579a14d11578e4b7a647"}, + {file = "llvmlite-0.40.1-cp38-cp38-win_amd64.whl", hash = "sha256:e44f854dc11559795bcdeaf12303759e56213d42dabbf91a5897aa2d8b033810"}, + {file = "llvmlite-0.40.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f643d15aacd0b0b0dc8b74b693822ba3f9a53fa63bc6a178c2dba7cc88f42144"}, + {file = "llvmlite-0.40.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39a0b4d0088c01a469a5860d2e2d7a9b4e6a93c0f07eb26e71a9a872a8cadf8d"}, + {file = "llvmlite-0.40.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9329b930d699699846623054121ed105fd0823ed2180906d3b3235d361645490"}, + {file = "llvmlite-0.40.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2dbbb8424037ca287983b115a29adf37d806baf7e1bf4a67bd2cffb74e085ed"}, + {file = "llvmlite-0.40.1-cp39-cp39-win32.whl", hash = "sha256:e74e7bec3235a1e1c9ad97d897a620c5007d0ed80c32c84c1d787e7daa17e4ec"}, + {file = "llvmlite-0.40.1-cp39-cp39-win_amd64.whl", hash = "sha256:ff8f31111bb99d135ff296757dc81ab36c2dee54ed4bd429158a96da9807c316"}, + {file = "llvmlite-0.40.1.tar.gz", hash = "sha256:5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "msgspec" +version = "0.18.0" +description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgspec-0.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ec4f95f03e9fcaef942f5b76856ad1b6bace5cc4db4555939ff25262faa5ad63"}, + {file = "msgspec-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0d28fcd20d07b565f42289bc7791493cf3b602ad41002db3fe5642802bbf137a"}, + {file = "msgspec-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7bc1386317796508772e95e3747dbea7e4178a24ebba04f33408dd84b6aea44"}, + {file = "msgspec-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d814fe2b4641085ed4a2fc917b5f407afe550c3d0c00ab190fc1f6fae1c75dd7"}, + {file = "msgspec-0.18.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:92c0db3f81bfda2be43ced32b043e68fa95daa5c7403f0ced26e49815efe681e"}, + {file = "msgspec-0.18.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89070b557ae3f057c9357dc7f2f5fe11000808254e3f079663246ae4b43b2b89"}, + {file = "msgspec-0.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:7e0d735205bf9abd7755434233b7ff48db66965ca4d50a59a96421c4425b2507"}, + {file = "msgspec-0.18.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08c422741e0e1e13404f7497c2b3419999fd1398c095e841f191d78f569361fd"}, + {file = "msgspec-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:33950a523536baceed2f252cda32780eb3646a4656ca08c4bea6497d4988e341"}, + {file = "msgspec-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3db21b8e7c71f011c90ba6ed0514cf4a95076ae48e7e85d5fd912f6c8d609990"}, + {file = "msgspec-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b59fff158e5a576d68afe3aed040717d25d7edd2c0653c46733dfa0fbfa1c6"}, + {file = "msgspec-0.18.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ee81a859b16698d2f43fe6bc56b7141f6dd936a6a80f52ec80da45fafa3d56ea"}, + {file = "msgspec-0.18.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:722032d40b721bfb0771c8aeba11373bed84c5ed8721cc81360207d67ecfb9ca"}, + {file = "msgspec-0.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:2e986f68cbcba50a2198052692f530113507fb566f282f40cfdaafee7ae6a307"}, + {file = "msgspec-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3c3d6147f1368c8ccf0869313c23ffdf874abb7e0033002689edf5bfc048f75d"}, + {file = "msgspec-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b44ad06f78a4c05860f80bb533893582727777a8cc760573f41e49cfc5cee60c"}, + {file = "msgspec-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f754b92340188e6e89c51f3fdfab7de0177bcd08919481072c192782ad9ecee5"}, + {file = "msgspec-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf10ad5928aecaaeb6b2be82b8aef78df786734020bfc1f281ee78485daa2af7"}, + {file = "msgspec-0.18.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:047b00a7e148c02e64cbb65c59512f93fb8e96b71fc0358f12062e0359bef878"}, + {file = "msgspec-0.18.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:719c8b687a0402d2cd1579753e50d903ab53ef0402bbab91bca96d0e3c2b78d5"}, + {file = "msgspec-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:cbd16fae31bb5d2ce06d317e5f2736d58690cad310147c2104ff0a98fa63895c"}, + {file = "msgspec-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e409af3aa63df802fbe9f99fee1bfdb895f2b243c96e1ef9a40793f73625b549"}, + {file = "msgspec-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:005f708354371c2a7c3c598f4a67d23f73315e3789dfefd2a274f4a11097866d"}, + {file = "msgspec-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c103a8eb8c01a3594cf3c66fe6d6a4d11e17d52e07bffadff599d87bae4a476"}, + {file = "msgspec-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5f9d154ff486426733726de321d3993f4d8aa7bbea3812a8716dedc6b867592"}, + {file = "msgspec-0.18.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f78c176e072e5d805be618d62e56cb2d2ca68cb93c0d6bbfeb03418247e529f"}, + {file = "msgspec-0.18.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef84977a7f8ced0c369a65ffbcd618c341fe4ba0b30bd1348ce8b6e5dc4096b3"}, + {file = "msgspec-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:f907fcc782e5fa6f6bb329004993baa00f068b4e964a971a1421e316b6870012"}, + {file = "msgspec-0.18.0.tar.gz", hash = "sha256:edcdc1bf397f1b06a3323ac61daaa5de9c9c6e8a2349024bdf0a267d0b4d24b5"}, +] + +[package.extras] +dev = ["attrs", "coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] +doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] +test = ["attrs", "msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] +toml = ["tomli", "tomli-w"] +yaml = ["pyyaml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.5.7" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, + {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, +] + +[[package]] +name = "numba" +version = "0.57.1" +description = "compiling Python code using LLVM" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numba-0.57.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db8268eb5093cae2288942a8cbd69c9352f6fe6e0bfa0a9a27679436f92e4248"}, + {file = "numba-0.57.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:643cb09a9ba9e1bd8b060e910aeca455e9442361e80fce97690795ff9840e681"}, + {file = "numba-0.57.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:53e9fab973d9e82c9f8449f75994a898daaaf821d84f06fbb0b9de2293dd9306"}, + {file = "numba-0.57.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c0602e4f896e6a6d844517c3ab434bc978e7698a22a733cc8124465898c28fa8"}, + {file = "numba-0.57.1-cp310-cp310-win32.whl", hash = "sha256:3d6483c27520d16cf5d122868b79cad79e48056ecb721b52d70c126bed65431e"}, + {file = "numba-0.57.1-cp310-cp310-win_amd64.whl", hash = "sha256:a32ee263649aa3c3587b833d6311305379529570e6c20deb0c6f4fb5bc7020db"}, + {file = "numba-0.57.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c078f84b5529a7fdb8413bb33d5100f11ec7b44aa705857d9eb4e54a54ff505"}, + {file = "numba-0.57.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e447c4634d1cc99ab50d4faa68f680f1d88b06a2a05acf134aa6fcc0342adeca"}, + {file = "numba-0.57.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4838edef2df5f056cb8974670f3d66562e751040c448eb0b67c7e2fec1726649"}, + {file = "numba-0.57.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9b17fbe4a69dcd9a7cd49916b6463cd9a82af5f84911feeb40793b8bce00dfa7"}, + {file = "numba-0.57.1-cp311-cp311-win_amd64.whl", hash = "sha256:93df62304ada9b351818ba19b1cfbddaf72cd89348e81474326ca0b23bf0bae1"}, + {file = "numba-0.57.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8e00ca63c5d0ad2beeb78d77f087b3a88c45ea9b97e7622ab2ec411a868420ee"}, + {file = "numba-0.57.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ff66d5b022af6c7d81ddbefa87768e78ed4f834ab2da6ca2fd0d60a9e69b94f5"}, + {file = "numba-0.57.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:60ec56386076e9eed106a87c96626d5686fbb16293b9834f0849cf78c9491779"}, + {file = "numba-0.57.1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6c057ccedca95df23802b6ccad86bb318be624af45b5a38bb8412882be57a681"}, + {file = "numba-0.57.1-cp38-cp38-win32.whl", hash = "sha256:5a82bf37444039c732485c072fda21a361790ed990f88db57fd6941cd5e5d307"}, + {file = "numba-0.57.1-cp38-cp38-win_amd64.whl", hash = "sha256:9bcc36478773ce838f38afd9a4dfafc328d4ffb1915381353d657da7f6473282"}, + {file = "numba-0.57.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae50c8c90c2ce8057f9618b589223e13faa8cbc037d8f15b4aad95a2c33a0582"}, + {file = "numba-0.57.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9a1b2b69448e510d672ff9a6b18d2db9355241d93c6a77677baa14bec67dc2a0"}, + {file = "numba-0.57.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3cf78d74ad9d289fbc1e5b1c9f2680fca7a788311eb620581893ab347ec37a7e"}, + {file = "numba-0.57.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f47dd214adc5dcd040fe9ad2adbd2192133c9075d2189ce1b3d5f9d72863ef05"}, + {file = "numba-0.57.1-cp39-cp39-win32.whl", hash = "sha256:a3eac19529956185677acb7f01864919761bfffbb9ae04bbbe5e84bbc06cfc2b"}, + {file = "numba-0.57.1-cp39-cp39-win_amd64.whl", hash = "sha256:9587ba1bf5f3035575e45562ada17737535c6d612df751e811d702693a72d95e"}, + {file = "numba-0.57.1.tar.gz", hash = "sha256:33c0500170d213e66d90558ad6aca57d3e03e97bb11da82e6d87ab793648cb17"}, +] + +[package.dependencies] +llvmlite = "==0.40.*" +numpy = ">=1.21,<1.25" + +[[package]] +name = "numpy" +version = "1.24.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6e73a1f4f5b74a42abb55bc2b3d869f1b38cbc8776da5f8b66bf110284f7a437"}, + {file = "numpy-1.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9387c7d6d50e8f8c31e7bfc034241e9c6f4b3eb5db8d118d6487047b922f82af"}, + {file = "numpy-1.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ad6a024a32ee61d18f5b402cd02e9c0e22c0fb9dc23751991b3a16d209d972e"}, + {file = "numpy-1.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73cf2c5b5a07450f20a0c8e04d9955491970177dce8df8d6903bf253e53268e0"}, + {file = "numpy-1.24.0-cp310-cp310-win32.whl", hash = "sha256:cec79ff3984b2d1d103183fc4a3361f5b55bbb66cb395cbf5a920a4bb1fd588d"}, + {file = "numpy-1.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:4f5e78b8b710cd7cd1a8145994cfffc6ddd5911669a437777d8cedfce6c83a98"}, + {file = "numpy-1.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4445f472b246cad6514cc09fbb5ecb7aab09ca2acc3c16f29f8dca6c468af501"}, + {file = "numpy-1.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ec3e5e8172a0a6a4f3c2e7423d4a8434c41349141b04744b11a90e017a95bad5"}, + {file = "numpy-1.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9168790149f917ad8e3cf5047b353fefef753bd50b07c547da0bdf30bc15d91"}, + {file = "numpy-1.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada6c1e9608ceadaf7020e1deea508b73ace85560a16f51bef26aecb93626a72"}, + {file = "numpy-1.24.0-cp311-cp311-win32.whl", hash = "sha256:f3c4a9a9f92734a4728ddbd331e0124eabbc968a0359a506e8e74a9b0d2d419b"}, + {file = "numpy-1.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:90075ef2c6ac6397d0035bcd8b298b26e481a7035f7a3f382c047eb9c3414db0"}, + {file = "numpy-1.24.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0885d9a7666cafe5f9876c57bfee34226e2b2847bfb94c9505e18d81011e5401"}, + {file = "numpy-1.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e63d2157f9fc98cc178870db83b0e0c85acdadd598b134b00ebec9e0db57a01f"}, + {file = "numpy-1.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8960f72997e56781eb1c2ea256a70124f92a543b384f89e5fb3503a308b1d3"}, + {file = "numpy-1.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f8e0df2ecc1928ef7256f18e309c9d6229b08b5be859163f5caa59c93d53646"}, + {file = "numpy-1.24.0-cp38-cp38-win32.whl", hash = "sha256:fe44e925c68fb5e8db1334bf30ac1a1b6b963b932a19cf41d2e899cf02f36aab"}, + {file = "numpy-1.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:d7f223554aba7280e6057727333ed357b71b7da7422d02ff5e91b857888c25d1"}, + {file = "numpy-1.24.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ab11f6a7602cf8ea4c093e091938207de3068c5693a0520168ecf4395750f7ea"}, + {file = "numpy-1.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12bba5561d8118981f2f1ff069ecae200c05d7b6c78a5cdac0911f74bc71cbd1"}, + {file = "numpy-1.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9af91f794d2d3007d91d749ebc955302889261db514eb24caef30e03e8ec1e41"}, + {file = "numpy-1.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b1ddfac6a82d4f3c8e99436c90b9c2c68c0bb14658d1684cdd00f05fab241f5"}, + {file = "numpy-1.24.0-cp39-cp39-win32.whl", hash = "sha256:ac4fe68f1a5a18136acebd4eff91aab8bed00d1ef2fdb34b5d9192297ffbbdfc"}, + {file = "numpy-1.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:667b5b1f6a352419e340f6475ef9930348ae5cb7fca15f2cc3afcb530823715e"}, + {file = "numpy-1.24.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d01f7832fa319a36fd75ba10ea4027c9338ede875792f7bf617f4b45056fc3a"}, + {file = "numpy-1.24.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb0490f0a880700a6cc4d000384baf19c1f4df59fff158d9482d4dbbca2b239"}, + {file = "numpy-1.24.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0104d8adaa3a4cc60c2777cab5196593bf8a7f416eda133be1f3803dd0838886"}, + {file = "numpy-1.24.0.tar.gz", hash = "sha256:c4ab7c9711fe6b235e86487ca74c1b092a6dd59a3cb45b63241ea0a148501853"}, +] + +[[package]] +name = "outcome" +version = "1.2.0" +description = "Capture the outcome of Python function calls." +optional = false +python-versions = ">=3.7" +files = [ + {file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"}, + {file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"}, +] + +[package.dependencies] +attrs = ">=19.2.0" + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pdbp" +version = "1.4.6" +description = "pdbp (Pdb+): A drop-in replacement for pdb and pdbpp." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pdbp-1.4.6-py3-none-any.whl", hash = "sha256:2a0d5d85d39393aa9b6abf8e19b24431953c11c927b1a799161b6968cee25bb8"}, + {file = "pdbp-1.4.6.tar.gz", hash = "sha256:966e8c3ca2a7b53fba355e9d6b8c8ed39c072078c356b95965a5c8e4bb7ab9da"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.5", markers = "platform_system == \"Windows\""} +pygments = {version = ">=2.16.1", markers = "python_version >= \"3.7\""} +tabcompleter = ">=1.2.1" + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "polars" +version = "0.18.14" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "polars-0.18.14-cp38-abi3-macosx_10_7_x86_64.whl", hash = "sha256:47dcf649a9cf8d3152d086a772cfb6202a81b1ac1d98e62dc83d1504bb81013a"}, + {file = "polars-0.18.14-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0d2c5c0b26d5abfb87ac931ef23515646ae8074ca5a5273a408c86ea7887511e"}, + {file = "polars-0.18.14-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b50bfab638a7da85f242c4653d2fc9a5439256d571f5fc0a007241521b111473"}, + {file = "polars-0.18.14-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8da787972ec3216edaa21f9fcf80c7db021a8e3b54e9a134bdead5a0c426c9c7"}, + {file = "polars-0.18.14-cp38-abi3-win_amd64.whl", hash = "sha256:18c1b44fd7bb9339e5f9fa158ad89b1c5132ef145b4d69667ada45dd26a6e9a8"}, + {file = "polars-0.18.14.tar.gz", hash = "sha256:6fd6a453813bf724ed7b62142658c93970dca04fe6a469d03c4c91e75efbae0d"}, +] + +[package.extras] +adbc = ["adbc_driver_sqlite"] +all = ["polars[adbc,cloudpickle,connectorx,deltalake,fsspec,matplotlib,numpy,pandas,pyarrow,pydantic,sqlalchemy,timezone,xlsx2csv,xlsxwriter]"] +cloudpickle = ["cloudpickle"] +connectorx = ["connectorx"] +deltalake = ["deltalake (>=0.10.0)"] +fsspec = ["fsspec"] +matplotlib = ["matplotlib"] +numpy = ["numpy (>=1.16.0)"] +pandas = ["pandas", "pyarrow (>=7.0.0)"] +pyarrow = ["pyarrow (>=7.0.0)"] +pydantic = ["pydantic"] +sqlalchemy = ["pandas", "sqlalchemy"] +timezone = ["backports.zoneinfo", "tzdata"] +xlsx2csv = ["xlsx2csv (>=0.8.0)"] +xlsxwriter = ["xlsxwriter"] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyqt5" +version = "5.15.9" +description = "Python bindings for the Qt cross platform application toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyQt5-5.15.9-cp37-abi3-macosx_10_13_x86_64.whl", hash = "sha256:883ba5c8a348be78c8be6a3d3ba014c798e679503bce00d76c666c2dc6afe828"}, + {file = "PyQt5-5.15.9-cp37-abi3-manylinux_2_17_x86_64.whl", hash = "sha256:dd5ce10e79fbf1df29507d2daf99270f2057cdd25e4de6fbf2052b46c652e3a5"}, + {file = "PyQt5-5.15.9-cp37-abi3-win32.whl", hash = "sha256:e45c5cc15d4fd26ab5cb0e5cdba60691a3e9086411f8e3662db07a5a4222a696"}, + {file = "PyQt5-5.15.9-cp37-abi3-win_amd64.whl", hash = "sha256:e030d795df4cbbfcf4f38b18e2e119bcc9e177ef658a5094b87bb16cac0ce4c5"}, + {file = "PyQt5-5.15.9.tar.gz", hash = "sha256:dc41e8401a90dc3e2b692b411bd5492ab559ae27a27424eed4bd3915564ec4c0"}, +] + +[package.dependencies] +PyQt5-Qt5 = ">=5.15.2" +PyQt5-sip = ">=12.11,<13" + +[[package]] +name = "pyqt5-qt5" +version = "5.15.2" +description = "The subset of a Qt installation needed by PyQt5." +optional = false +python-versions = "*" +files = [ + {file = "PyQt5_Qt5-5.15.2-py3-none-macosx_10_13_intel.whl", hash = "sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win32.whl", hash = "sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win_amd64.whl", hash = "sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962"}, +] + +[[package]] +name = "pyqt5-sip" +version = "12.12.2" +description = "The sip module support for PyQt5" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyQt5_sip-12.12.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cc49c8498c34649325d53bcd243c854391f828d9bab4f2f3afd3ee3451cab72"}, + {file = "PyQt5_sip-12.12.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f6e7a697d0ddf754798988fae7b2a0da04f6a59fb13ae863e5d1da4b280c4f"}, + {file = "PyQt5_sip-12.12.2-cp310-cp310-win32.whl", hash = "sha256:7e572c8104e75db2c69609d195daf44c7b965dcb1c5b48e30fc376868909be56"}, + {file = "PyQt5_sip-12.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a65697aa0fdb66e20d7b1ef8adfacc1caf1e61655530920172bf3a2fb1148cd"}, + {file = "PyQt5_sip-12.12.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:761e018dbbc46daccdb01f8f0dcc0d055c76834d839f0343cbec4b0ecbbde512"}, + {file = "PyQt5_sip-12.12.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d2b127ba5155bff452944b8a96ba06d7ec2161f48a2f9cc190425bfca94ab6b"}, + {file = "PyQt5_sip-12.12.2-cp311-cp311-win32.whl", hash = "sha256:26e75bc4ffd8e6b19ae96fe93dc135eb5aea03e4570724d4b3c40dbf36f3a2e6"}, + {file = "PyQt5_sip-12.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:d9548f353f17407d00f67d08c737de9f5c067352c3bdac8571492c614c2893eb"}, + {file = "PyQt5_sip-12.12.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e640b7636d86271ba8969b260e1655068b44750f20801ebc80f49a1aa737bf9"}, + {file = "PyQt5_sip-12.12.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e46d957fbeecaa1437f2dd715407b1e59e0918cc29382c7ea79784c5f3cbe0d2"}, + {file = "PyQt5_sip-12.12.2-cp37-cp37m-win32.whl", hash = "sha256:cb4523097f1ccabb95b3197a58278a40fc944b33791d3406bfa397e12303b6c6"}, + {file = "PyQt5_sip-12.12.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ed04bd0065d870912c1b0a4b34b8a78698c76d77f15474c3e841b0b6dd2f429f"}, + {file = "PyQt5_sip-12.12.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:71795c177010e52109812b03ec919020461ec42a7d9d241a45fe6d708529b5a6"}, + {file = "PyQt5_sip-12.12.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de06b6bd8241a189f729b8c093ce5dcf5928489eb7748bda28e28324e57544b0"}, + {file = "PyQt5_sip-12.12.2-cp38-cp38-win32.whl", hash = "sha256:7050ad8f94370eb7e4caa022b7e6d8b2de615e0714b557ca2098c82c0132074a"}, + {file = "PyQt5_sip-12.12.2-cp38-cp38-win_amd64.whl", hash = "sha256:67eed70427d3291e5c52c349fb4619c57c9a8810ab8d78a142c00edcbfd20d3b"}, + {file = "PyQt5_sip-12.12.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf74db9a1542f66793ccc00e403c8c2c36c67c0cff0fb01d23fe71cc1c56c788"}, + {file = "PyQt5_sip-12.12.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:23e983119f760dc6c1a1e6cb21fd4c268d14c4ee497de6da9ce2b9d46f9779b2"}, + {file = "PyQt5_sip-12.12.2-cp39-cp39-win32.whl", hash = "sha256:a88ce85176639723f04cf5ce59157ecf3a9faca5d5dd1fe82d5ef46a3bd1d102"}, + {file = "PyQt5_sip-12.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:7f13e71f5171f30d8b4176c081f0203a43e1704746b4cdaa837477945177b2a0"}, + {file = "PyQt5_sip-12.12.2.tar.gz", hash = "sha256:10d9bfa9f59f0fd1cad81be187479316ffc95684f573efea94512cb4257d2b17"}, +] + +[[package]] +name = "pyqtgraph" +version = "0.12.3" +description = "Scientific Graphics and GUI Library for Python" +optional = false +python-versions = ">=3.7" +files = [] +develop = false + +[package.dependencies] +numpy = ">=1.17.0" + +[package.source] +type = "git" +url = "https://github.com/pikers/pyqtgraph.git" +reference = "HEAD" +resolved_reference = "373f9561ea8ec4fef9b4e8bdcdd4bbf372dd6512" + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-levenshtein" +version = "0.21.1" +description = "Python extension for computing string edit distances and similarities." +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-Levenshtein-0.21.1.tar.gz", hash = "sha256:01ea6828c03738a475ee18ea8b86a674eb45ce80e9cce88376d132cf3ab26060"}, + {file = "python_Levenshtein-0.21.1-py3-none-any.whl", hash = "sha256:5f49ebb4772a274aac4aeb190fc23ad537ebe778dec15a8f17975f746478c691"}, +] + +[package.dependencies] +Levenshtein = "0.21.1" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "qdarkstyle" +version = "3.1" +description = "The most complete dark/light style sheet for C++/Python and Qt applications" +optional = false +python-versions = "*" +files = [ + {file = "QDarkStyle-3.1-py2.py3-none-any.whl", hash = "sha256:679a38fcd040de9fac8b8cae483310302fdb12c8d912845249c41dc54974a9b2"}, + {file = "QDarkStyle-3.1.tar.gz", hash = "sha256:600584d625343e0ddd128de08393d3c35637786a49827f174d29aa7caa8279c1"}, +] + +[package.dependencies] +qtpy = ">=1.9" + +[package.extras] +develop = ["qtsass", "watchdog"] +docs = ["sphinx", "sphinx-rtd-theme"] +example = ["pyqt5", "pyside2"] + +[[package]] +name = "qtpy" +version = "2.3.1" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +optional = false +python-versions = ">=3.7" +files = [ + {file = "QtPy-2.3.1-py3-none-any.whl", hash = "sha256:5193d20e0b16e4d9d3bc2c642d04d9f4e2c892590bd1b9c92bfe38a95d5a2e12"}, + {file = "QtPy-2.3.1.tar.gz", hash = "sha256:a8c74982d6d172ce124d80cafd39653df78989683f760f2281ba91a6e7b9de8b"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] + +[[package]] +name = "rapidfuzz" +version = "3.2.0" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rapidfuzz-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f5787f1cc456207dee1902804209e1a90df67e88517213aeeb1b248822413b4c"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e8d91137b0b5a6ef06c3979b6302265129dee1741486b6baa241ac63a632bea7"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c130e73e0079f403b7c3dbf6f85816a3773971c3e639f7289f8b4337b8fd70fe"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e18059188bfe3cdbc3462aeec2fa3302b08717e04ca34e2cc6e02fb3c0280d8"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37bb6bd6a79d5524f121ff2a7d7df4491519b3f43565dccd4596bd75aa73ab7c"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca0d6aee42effaf2e8883d2181196dd0957b1af5731b0763f10f994c32c823db"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49fc2cbbf05bfa1af3fe4c0e0c8e5c8ac118d6b6ddfb0081cff48ad53734f7ac"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd4fdee46f6ba7d254dba8e7e8f33012c964fc891a06b036b0fd20cab0db301"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ab2863732eafd1cc58f249f145c20ad13d4c902d3ef3a369b00438c05e5bfb55"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a9658c545de62ac948027092ba7f4e8507ebc5c9aef964eca654409c58f207f0"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5f3e36cfadaf29f081ad4ca476e320b639d610e930e0557f395780c9b2bdb135"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:239ffc04328e14f5e4097102bd934352a43d5912acf34fb7d3e3fe306de92787"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b56ce39ba0a77501d491bc20a2266989ae0264452758b004950ee5f4c10c641f"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-win32.whl", hash = "sha256:dbebd639579ab113644699fe0c536ae00aba15b224e40a79987684333d1104a5"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:88e99229c4df99a7e5810d4d361033b44e29d8eb4faaddcfb8e4bdcb604cf40a"}, + {file = "rapidfuzz-3.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:8e39c4e2e85828aa6c39cc7f30e2917d991b40190a2a3af1fa02396a3362a54e"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f2e618389427c5e8304357a78f83df22558e61f11bc21aeb95dd544c274d330"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a2a6babfe4d3ce2eadd0079ee7861cb5f1584845c5a3394edead85457e7d7464"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f223deb06895c9c136b40cd8fd7e96ee745c3bb9ed502d7367f6ad9ab6fdd40e"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de6962b45f761355fa4b37de635e4df467d57530732a40d82e748a5bc911731"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76953516cb3b75fb1234c5a90e0b86be4525f055a9e276237adb1ffe40dca536"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1e04861dddbb477500449dc67fb037656a049b6f78c4c434c6000e64aa42bb4"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff6e725eec9c769f9d22126c80a6ada90275c0d693eca2b35d5933178bda5a2"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21ce33242e579ba255c8a8b438782164acaa55bf188d9410298c40cbaa07d5"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:986a7aad18768b920bb710e15ed7629d1da0af31589348c0a51d152820efc05d"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6e98f0a6fac14b7b9893147deceae12131f6ff169ae1c973635ef97617949c8f"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5dd5c4b9f5cd8a8271a90d1bab643028e7172808c68ed5d8dde661a3e51098e3"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e336b0a81c5a8e689edf6928136d19e791733a66509026d9acbaa148238186e0"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fa44afb731535a803c4c15ee846257fef050768af96d1d6c0eadb30285d0f7b"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-win32.whl", hash = "sha256:d04ad155dbecc0c143912f691d38d4790e290c2ce5411b146c0e00d4f4afd26f"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:b9e79e27344af95a71a3bb6cd3562581da5d0780ff847a13ad69ee622d940d3c"}, + {file = "rapidfuzz-3.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:dc53747e73f34e8f3a3c1b0bc5b437b90a2c69d873e97781aa7c06543201409a"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:613c1043332eeba0c0910de71af221ac10d820b4fa9615b0083c733b90a757f9"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0907f87beca70e44f78e318eede2416ddba19ec43d28af9248617e8a1741ef3"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcfd184e0b5c58497cc3d961f49ac07ae1656d161c6c4d06230d267ae4e11f00"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a7d53a2f1ccfb169be26fa3824b1b185420592c75853f16c6b7115315ea6784"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2eac585803c4e8132ed5f4a150621db05c418304982c88cf706abdded65e1632"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc859f654b350def5df2ebc6d09f822b04399823e3dad1c3f2e8776c825fcde7"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8a165f64c528edc0bbbd09c76d64efd4dbe4240fd1961710b69586ef40486e79"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:56a392b655597ecf40535b56bfb7c0856c10c0abc0cbc369fd25a1665420710b"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5863b176da42b1bb450a28375ef1502f81fbecd210a5aae295d7f2221284ad41"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:8f8590c39a3f745b314f2697b140c8f8600fe7ecfb2101e9e4ec6e7716c66827"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:da00990adf1fbc0904f22409b3451473fa465a0ef49f3075703c206080aa31b2"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:2504205552bf568ac478f17dd612d0e31c4a82c645c66209a442df7e572b5adc"}, + {file = "rapidfuzz-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:af3ac648232c109e36c8b941106d726969972644aa3ef55218c5988aa1daea03"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:04d22f6058ce5d620ec4ecd771e44cfa77d571137d6c6547df57bdfc44ee2a98"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac7ddcd372ed202d1b59b117506da695b291f135435cfbf3e71490aa8e687173"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd3fca0224b84350f73eab1fb5728c58fd25ee4f20e512607c7d83f9bc836d3f"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bdb1f92c4666c7e1d3c21268b931cf3f06f32af98dfdeb37641159b15fa31dd"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:871052405c465a45b53a3dc854a8be62079f42cdbb052651ff0b65e2452131e6"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb9bb1af5680741cf974f510fb3894907a1b308e819aff3d9ea10b5326e8a5f6"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84ce2e010677835fa5ba591419e4404f11a1446f33eec3724a2bff557ae5144a"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c13107e0fdca5ccae70659f45646d57453338a9dfc6b152fb7372e4bf73466a0"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:538027685a1a8f1699e329f6443951267f169bfa149298734ea679db8f0e7171"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3557736672115d082979a8a12f884ed5b24268f4471fee85cfb2ec7212b68607"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6bc5e3da74644cf75663f5b438e0ae79b67d1f96d082cda771b0ecfed0528f40"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d2d0fc98d9d7bba44f929d201c2c2c35eb69ea2ffef43d939b297dafef934625"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bf85a3bf34f27383691e8af0fd148b2a3a89f1444d4640d04ef58030f596ee0"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-win32.whl", hash = "sha256:cf5ea3f1d65a0bee707245a0096c3a6f769b3ad6f1b9afc7176dfb73eb0ac98f"}, + {file = "rapidfuzz-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:54906095444ea8b0a4013f3799b3f2c380205d7f60b9c55774e7d2264fa8d9c6"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6d44218823533e0d47770feef86c73c90a6f7e8d4923eafabf56a1fa3444eda0"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:87c3d4077e61c66d5dd11198a317f83db8e8cf034239baa16e4384037b611652"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0e1142350566349c41173685988d942ebc89578f25ee27750d261e7d79e1ce"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de44a378751fdfb19ddf6af412b3395db4b21ab61f40139f815c82f1a1611b50"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0983b30c7b289f540b11cdb550e301b3f2e8f0ef9df866aa24a16f6cd96041"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adfffb79288437006be412d74e28cddd7c5e6cc9f84a34aa9c356b13dc1ad2c9"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a284386652efb3b7d41ed5dd101ab4ce5936f585c52a47fa9838fc0342235700"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c546c83d6bc9006b86f56921b92c3e16d8ddeb4e1663653e755a5d8a3ac258da"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:53b3575fa398a5021192c1592dce98965560ad00690be3ade056eab99288562c"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:366ade5d0067dc6281e2a6c9e5c91bbfe023b09cef86894de8fe480b4696e3bf"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f946dec03cc2c77bc091d186c007d1e957d1f16a4d68a181f5fa75aea40bdf87"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:045e5cccb0e792005d5465de0ea4621b9b67778580e558f266984704e68b0087"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd80288b9538c87209893f0934563c20b6a43acf30693794bcc111b294447ee9"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-win32.whl", hash = "sha256:a359436754ed5dd10d88706f076caa7f8e5c1469bf5ebba1897dc87aa9ff953e"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:75df3d9b895910ee810b2c96c8626cc2b5b63bb237762db36ff79fb466eccc43"}, + {file = "rapidfuzz-3.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:893833a903875a50acdbcb7ed33b5426ba47412bd18b3eb80d56d982b641dc59"}, + {file = "rapidfuzz-3.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3002c3660180747243cccb40c95ade1960e6665b340f211a114f5994b345ab53"}, + {file = "rapidfuzz-3.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa50de7e0f95e1400b2bf38cfeb6e40cf87c862537871c2f7b2050b5db0a9dfc"}, + {file = "rapidfuzz-3.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54842a578a2a8e5258812a9032ffb55e6f1185490fd160cae64e57b4dc342297"}, + {file = "rapidfuzz-3.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:108861623838cd574b0faa3309ce8525c2086159de7f9e23ac263a987c070ebd"}, + {file = "rapidfuzz-3.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d39128415f0b52be08c15eeee5f79288189933a4d6fa5dc5fff11e20614b7989"}, + {file = "rapidfuzz-3.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3af2b75635f33ffab84e295773c84a176d4cba75311d836ad79b6795e9da11ac"}, + {file = "rapidfuzz-3.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68c678f7f3ca3d83d1e1dd7fb7db3232037d9eef12a47f1d5fe248a76ca47571"}, + {file = "rapidfuzz-3.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25d2bd257034e910df0951cdeff337dbd086d7d90af3ed9f6721e7bba9fc388a"}, + {file = "rapidfuzz-3.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7f20e68cad26fc140c6f2ac9e8f2632a0cd66e407ba3ea4ace63c669fd4719"}, + {file = "rapidfuzz-3.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f09fd9dc73180deb9ca1c4fbd9cc27378f0ab6ee74e97318c38c5080708702b6"}, + {file = "rapidfuzz-3.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af7914fc7683f921492f32314cfbe915a5376cc08a982e09084cbd9b866c9fd4"}, + {file = "rapidfuzz-3.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08a242c4b909abbcfa44504dc5041d5eeca4cd088ae51afd6a52b4dc61684fa2"}, + {file = "rapidfuzz-3.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b07afaca28398b93d727a2565491c455896898b66daee4664acde4af94e557"}, + {file = "rapidfuzz-3.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24e4c4a031c50e4eeb4787263319a0ac5bed20f4a263d28eac060150e3ba0018"}, + {file = "rapidfuzz-3.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d19c2853a464c7b98cc408654412fd875b030f78023ccbefc4ba9eec754e07e7"}, + {file = "rapidfuzz-3.2.0.tar.gz", hash = "sha256:448d031d9960fea7826d42bd4284156fc68d3b55a6946eb34ca5c6acf960577b"}, +] + +[package.extras] +full = ["numpy"] + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" +optional = false +python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] + +[[package]] +name = "tabcompleter" +version = "1.2.1" +description = "tabcompleter --- Autocompletion in the Python console." +optional = false +python-versions = ">=3.6" +files = [ + {file = "tabcompleter-1.2.1-py3-none-any.whl", hash = "sha256:5cd3b01410d9bf6bebe3f03645309de35e8eb17cde0e9326952fd6d84b8b135b"}, + {file = "tabcompleter-1.2.1.tar.gz", hash = "sha256:2906f771adf08674a5ace718c006c8b2b1d1caa520ba9f17fc3ceafc6a868b72"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomli-w" +version = "1.0.0" +description = "A lil' TOML writer" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, + {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, +] + +[[package]] +name = "tomlkit" +version = "0.11.8" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [] +develop = false + +[package.source] +type = "git" +url = "https://github.com/pikers/tomlkit.git" +reference = "piker_pin" +resolved_reference = "8e0239a766e96739da700cd87cc00b48dbe7451f" + +[[package]] +name = "tractor" +version = "0.1.0a6.dev0" +description = "structured concurrrent `trio`-\"actors\"" +optional = false +python-versions = ">=3.10" +files = [] +develop = true + +[package.dependencies] +async_generator = "*" +colorlog = "*" +exceptiongroup = "*" +msgspec = "*" +pdbp = "*" +pyreadline3 = {version = "*", markers = "platform_system == \"Windows\""} +tricycle = "*" +trio = ">=0.22" +trio_typing = "*" +wrapt = "*" + +[package.source] +type = "directory" +url = "../tractor" + +[[package]] +name = "tricycle" +version = "0.3.0" +description = "Experimental extensions for Trio, the friendly async I/O library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tricycle-0.3.0-py3-none-any.whl", hash = "sha256:8f9a9bb66d458da68992e64cf2f1cf7a9748d3f98127647fb35422854c22e474"}, + {file = "tricycle-0.3.0.tar.gz", hash = "sha256:57d854a61361e3b45b615b069bcf105fb0d6979d534ed7338e26b51d3ed28473"}, +] + +[package.dependencies] +trio = ">=0.15.0" +trio-typing = ">=0.5.0" + +[[package]] +name = "trio" +version = "0.22.2" +description = "A friendly Python library for async concurrency and I/O" +optional = false +python-versions = ">=3.7" +files = [ + {file = "trio-0.22.2-py3-none-any.whl", hash = "sha256:f43da357620e5872b3d940a2e3589aa251fd3f881b65a608d742e00809b1ec38"}, + {file = "trio-0.22.2.tar.gz", hash = "sha256:3887cf18c8bcc894433420305468388dac76932e9668afa1c49aa3806b6accb3"}, +] + +[package.dependencies] +attrs = ">=20.1.0" +cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""} +exceptiongroup = {version = ">=1.0.0rc9", markers = "python_version < \"3.11\""} +idna = "*" +outcome = "*" +sniffio = "*" +sortedcontainers = "*" + +[[package]] +name = "trio-typing" +version = "0.8.0" +description = "Static type checking support for Trio and related projects" +optional = false +python-versions = "*" +files = [ + {file = "trio-typing-0.8.0.tar.gz", hash = "sha256:f14264a27b45cec5dabd25b686255ac7d841984200c54b2057bc98a099318a97"}, + {file = "trio_typing-0.8.0-py3-none-any.whl", hash = "sha256:3734fa3f61cab047fdc97aa0030c72e910455cf85f18ca18c3ff336781b144bb"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.2" +trio = ">=0.16.0" +typing-extensions = ">=3.7.4" + +[package.extras] +mypy = ["mypy (>=0.920)"] + +[[package]] +name = "trio-util" +version = "0.7.0" +description = "Utility library for the Trio async/await framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "trio_util-0.7.0-py3-none-any.whl", hash = "sha256:a02b3daaf4996d7363f3fdfdc64157ef13d3d3725dff0dcaa0b47f4a6b5af4af"}, + {file = "trio_util-0.7.0.tar.gz", hash = "sha256:8dc5014dd00e6a24d0f048f8b723f2804c61ddc08ace9555db10fb7e1fee704a"}, +] + +[package.dependencies] +async-generator = "*" +trio = ">=0.11.0" + +[[package]] +name = "trio-websocket" +version = "0.10.3" +description = "WebSocket library for Trio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "trio-websocket-0.10.3.tar.gz", hash = "sha256:1a748604ad906a7dcab9a43c6eb5681e37de4793ba0847ef0bc9486933ed027b"}, + {file = "trio_websocket-0.10.3-py3-none-any.whl", hash = "sha256:a9937d48e8132ebf833019efde2a52ca82d223a30a7ea3e8d60a7d28f75a4e3a"}, +] + +[package.dependencies] +exceptiongroup = "*" +trio = ">=0.11" +wsproto = ">=0.14" + +[[package]] +name = "typer" +version = "0.9.0" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "xonsh" +version = "0.14.0" +description = "Python-powered, cross-platform, Unix-gazing shell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "xonsh-0.14.0-py310-none-any.whl", hash = "sha256:72506c6dc494103df6d04467e127abddb1c6cbe05cc5903b6a4cbfbad217ff5d"}, + {file = "xonsh-0.14.0-py311-none-any.whl", hash = "sha256:678a65671bd0a62cdc43e932d6aecc8b1622aa18942e0afb388b8b8ae02f67a5"}, + {file = "xonsh-0.14.0-py38-none-any.whl", hash = "sha256:25976edc5695fb5806b8b23f384ff48e618e07f4596ec0806007f63122917a83"}, + {file = "xonsh-0.14.0-py39-none-any.whl", hash = "sha256:751b615726d2322d43c8166ad4bc5cbe65d03a3728f1837aa02380fa9fadb189"}, + {file = "xonsh-0.14.0.tar.gz", hash = "sha256:45a8aaabb17ce0d6d4eca9b709ecfd7ce1c8fb92162decd29a45bf88a60e9bf1"}, +] + +[package.extras] +bestshell = ["prompt-toolkit (>=3.0.29)", "pygments (>=2.2)"] +dev = ["pre-commit", "re-ver", "tomli", "xonsh[doc,test]"] +doc = ["doctr", "furo", "livereload", "matplotlib", "myst-parser", "numpydoc", "psutil", "pyzmq", "runthis-sphinxext", "sphinx (>=3.1,<5)", "tornado", "xonsh[bestshell]"] +full = ["distro", "gnureadline", "setproctitle", "ujson", "xonsh[ptk,pygments]"] +linux = ["distro"] +mac = ["gnureadline"] +proctitle = ["setproctitle"] +ptk = ["prompt-toolkit (>=3.0.29)", "pyperclip"] +pygments = ["pygments (>=2.2)"] +test = ["coverage (>=5.3.1)", "prompt-toolkit (>=3.0.29)", "pygments (>=2.2)", "pyte (>=0.8.0)", "pytest (>=7)", "pytest-cov", "pytest-mock", "pytest-rerunfailures", "pytest-subprocess", "pytest-timeout", "restructuredtext-lint", "virtualenv (>=20.16.2)", "xonsh[bestshell]"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "163fcb92fc2eefcb9b0f3ec23312a1f2b3f3de0956fad29e0194b827fc6a749c" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..7fd0f2a80 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,52 @@ +[tool.poetry] +name = "piker" +version = "0.1.0.alpha0.dev0" +description = "trading gear for hackers" +authors = ["Tyler Goodlet "] +license = "AGPLv3" +readme = "README.md" + + +[tool.poetry.dependencies] +python = "^3.10" +tomli = "^2.0.1" +tomli-w = "^1.0.0" +colorlog = "^6.7.0" +attrs = "^23.1.0" +pygments = "^2.16.1" +colorama = "^0.4.6" +msgspec = "^0.18.0" +typer = "^0.9.0" +rich = "^13.5.2" +trio = "^0.22.2" +trio-websocket = "^0.10.3" +trio-util = "^0.7.0" +async-generator = "^1.10" +asks = "^3.0.0" +ib-insync = "^0.9.86" +pendulum = "^2.1.2" +bidict = "^0.22.1" +cython = "^3.0.0" +numpy = "1.24" +numba = "^0.57.1" +polars = "^0.18.13" +pyqt5 = "^5.15.9" +qdarkstyle = ">=3.0.2" +fuzzywuzzy = {extras = ["speedup"], version = "^0.18.0"} +xonsh = "^0.14.0" + +# pinned from git +tractor = { path = '../tractor/', develop = true } +# tractor = { git = 'https://github.com/goodboy/tractor.git', branch = 'piker_pin' } +pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' } +asyncvnc = { git = 'https://github.com/pikers/asyncvnc.git', branch = 'main' } +tomlkit = { git = 'https://github.com/pikers/tomlkit.git', branch = 'piker_pin' } + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + + +[tool.poetry.scripts] +poetry = "poetry.console.application:main"