1
0

Compare commits

..

3 Commits

Author SHA1 Message Date
9cc7018698 🤖 Bumped version to 0.1.44
All checks were successful
Build Python Container / Package-Container (push) Successful in 1m22s
Package Python / Package (push) Successful in 25s
Test Python / Test (push) Successful in 30s
This commit was automatically generated by a script: https://gitfub.space/Jmaa/python-omni
2024-09-02 12:05:22 +02:00
9349e4e6d4
Hiding client 2024-09-02 12:04:03 +02:00
7d82003db8
Fixed dumb bug 2024-09-02 11:55:05 +02:00
3 changed files with 12 additions and 10 deletions

View File

@ -1 +1 @@
__version__ = '0.1.43' __version__ = '0.1.44'

View File

@ -12,9 +12,11 @@ from .. import secrets
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_client():
# TODO: Move these into secrets! assert secrets.KUCOIN_KEY, 'Missing secret: KUCOIN_KEY'
client = kucoin.client.Client( assert secrets.KUCOIN_SECRET, 'Missing secret: KUCOIN_SECRET'
assert secrets.KUCOIN_PASS, 'Missing secret: KUCOIN_PASS'
return kucoin.client.Client(
secrets.KUCOIN_KEY, secrets.KUCOIN_KEY,
secrets.KUCOIN_SECRET, secrets.KUCOIN_SECRET,
secrets.KUCOIN_PASS, secrets.KUCOIN_PASS,
@ -40,5 +42,5 @@ class KucoinDepositAddresses(Scraper):
deduplicate_ignore_columns = ['account.update_time'] deduplicate_ignore_columns = ['account.update_time']
def scrape(self) -> Iterator[Mapping[str, object]]: def scrape(self) -> Iterator[Mapping[str, object]]:
addresses = client.get_deposit_address('MPC') addresses = get_client().get_deposit_address('MPC')
yield addresses_to_data_points(addresses) yield addresses_to_data_points(addresses)

View File

@ -124,7 +124,7 @@ def main(
OUTPUT_PATH / f'{scraper.dataset_name}.csv', OUTPUT_PATH / f'{scraper.dataset_name}.csv',
result_rows, result_rows,
deduplicate_mode=scraper.deduplicate_mode, deduplicate_mode=scraper.deduplicate_mode,
deduplicate_ignore_columns=scraper.deduplicate_ignore_columns(), deduplicate_ignore_columns=scraper.deduplicate_ignore_columns,
) )
logger.info('Scraper done: %s', scraper.dataset_name) logger.info('Scraper done: %s', scraper.dataset_name)