Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Merge pull request #8 from forkdelta/v2
Browse files Browse the repository at this point in the history
V2
  • Loading branch information
freeatnet authored Jan 23, 2020
2 parents a73bdea + 6815c3c commit 22eb4a1
Show file tree
Hide file tree
Showing 1,838 changed files with 169,928 additions and 54,278 deletions.
2 changes: 1 addition & 1 deletion scripts/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.6-alpine
FROM python:3.8-alpine3.11

WORKDIR /usr/src/scripts

Expand Down
6 changes: 3 additions & 3 deletions scripts/build_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
import json
import yaml

from helpers import read_entry
from entry_io import read_entry

INDEX_KEYS = ["id", "address", "name", "symbol", "website_slug"]
INDEX_KEYS = ["id", "address", "name", "symbol", "slug", "status", "rank"]


def abridged_entry(entry):
return {k: entry[k] for k in INDEX_KEYS}
return {k: entry.get(k) for k in INDEX_KEYS}


if __name__ == "__main__":
Expand Down
34 changes: 34 additions & 0 deletions scripts/convert_v1_v2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from glob import glob
from itertools import groupby
import re

from entry_io import (read_entry, write_token_entry)

OVERRIDES = {"Technical Documentation": "technical_doc"}


def update_key_format(k):
human_k = re.sub(r' (\d+)$', '', k)
return OVERRIDES.get(human_k, human_k.lower().replace(' ', '_'))


def convert_links(links):
link_tuples = [(update_key_format(k), v) for (k, v) in links.items()]
return {
group_name: [value for (_, value) in name_value_tuples]
for (group_name, name_value_tuples
) in groupby(sorted(link_tuples), key=lambda e: e[0])
}


entry_files = sorted(glob("tokens/0x*.yaml"))
for entry in (read_entry(fn) for fn in entry_files):
address = entry.pop("address")
entry.update({
"markets": [],
"links": convert_links(entry['links']),
})
if "tags" in entry:
del entry["tags"]
del entry["website_slug"]
write_token_entry(address, entry)
31 changes: 31 additions & 0 deletions scripts/entry_io.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import yaml

YAML_WIDTH = 100
YAML_INDENT = 2


def read_entry(fn):
with open(fn) as infile:
return yaml.safe_load(infile)


def write_token_entry(address, listing):
with open("tokens/{}.yaml".format(address), "w") as outfile:
outfile.write(
yaml.dump(
dict(
address=address,
**{k: v
for (k, v) in listing.items() if v}),
explicit_start=True,
width=YAML_WIDTH,
indent=YAML_INDENT,
default_flow_style=False,
allow_unicode=True))


def update_token_entry(address, partial_update):
old_listing = read_entry("tokens/{}.yaml".format(address))
old_listing.update(partial_update)
del old_listing["address"]
write_token_entry(address, old_listing)
91 changes: 58 additions & 33 deletions scripts/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
import logging
import requests

from helpers import (DEFAULT_HEADERS, process_listing, read_entry,
write_token_entry)
from helpers import (DEFAULT_HEADERS, process_listing)
from entry_io import (read_entry, write_token_entry, update_token_entry)

CMC_LISTINGS_API_URL = "https://api.coinmarketcap.com/v2/listings/"


def get_listings():
def get_api_listings():
"""
Returns a list of CoinMarketCap-listed currencies via /v2/listings/ API endpoint.
Expand All @@ -20,12 +20,12 @@ def get_listings():
return r.json()["data"]


def map_existing_entries(files, exclude_deprecated=True):
def map_entries_to_sets(files, key, exclude_deprecated=True):
"""
Returns a hash keyed by CoinMarketCap asset ID with sets of Ethereum addresses
known to be associated with that asset ID.
Returns a dict keyed by CoinMarketCap asset ID with sets of values for
the given key known to be associated with that asset ID.
"""
entries = ((entry["id"], entry["address"])
entries = ((entry["id"], entry[key])
for entry in (read_entry(fn) for fn in files)
if not (exclude_deprecated and entry.get("_DEPRECATED", False)))

Expand All @@ -35,46 +35,71 @@ def map_existing_entries(files, exclude_deprecated=True):
}


def deprecate_token_entry(address):
old_listing = read_entry("tokens/{}.yaml".format(address))
old_listing.update({"_DEPRECATED": True})
del old_listing["address"]
write_token_entry(address, old_listing)
def map_entries_to_discrete(files, key, exclude_deprecated=True):
return {
entry["id"]: entry[key]
for entry in (read_entry(fn) for fn in files)
if not (exclude_deprecated and entry.get("_DEPRECATED", False))
}


def main(listings):
def main():
from time import sleep

id_to_address = map_existing_entries(sorted(glob("tokens/0x*.yaml")))
existing_files = sorted(glob("tokens/0x*.yaml"))
id_to_addresses = map_entries_to_sets(existing_files, "address")
slugs = map_entries_to_discrete(existing_files, "slug")

api_listings = get_api_listings()
api_slugs = {e["id"]: e["website_slug"] for e in api_listings}

slugs.update(api_slugs)

for listing in listings:
for (asset_id, asset_website_slug) in slugs.items():
try:
result = process_listing(listing)
result = process_listing(asset_website_slug)
except:
logging.exception(
"Final error when trying to process listing for '%s'",
listing["website_slug"])
asset_website_slug)
continue

(updated_listing, current_addresses) = result

existing_addresses = id_to_address.get(listing["id"], set())
for address in existing_addresses - current_addresses:
logging.warning("'%s' has deprecated %s", listing["website_slug"],
address)
deprecate_token_entry(address)
(listing, current_addresses) = result

if listing:
assert listing["id"] == asset_id

if asset_website_slug != listing["slug"]:
logging.warning("'%s' redirected to slug '%s' when queried",
asset_website_slug, listing['slug'])

existing_addresses = id_to_addresses.get(asset_id, set())
# Deal with delisted assets and deprecated addresses
if existing_addresses and listing is None:
# listing is None when the page failed to fetch (404ed)
logging.warning("'%s' has been delisted", asset_website_slug)
for address in existing_addresses:
update_token_entry(address, {
"status": "delisted",
"markets": [],
"rank": None
})
else:
for address in existing_addresses - current_addresses:
logging.warning("'%s' has deprecated %s", asset_website_slug,
address)
update_token_entry(
address, {
"_DEPRECATED": True,
"status": "deprecated",
"markets": [],
"rank": None
})

for address in current_addresses:
write_token_entry(address, updated_listing)

listings_ids = [e["id"] for e in listings]
ids_removed_from_listings = id_to_address.keys() - listings_ids
for removed_id in ids_removed_from_listings:
for removed_asset_address in id_to_address[removed_id]:
deprecate_token_entry(removed_asset_address)
write_token_entry(address, listing)


if __name__ == "__main__":
logging.getLogger().setLevel(logging.DEBUG)

main(get_listings())
main()
Loading

0 comments on commit 22eb4a1

Please sign in to comment.