Introduce V2 of current and historical functions
This commit is contained in:
parent
27cd98ee52
commit
ff8e182336
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
build
|
||||
package
|
||||
venv
|
||||
out
|
||||
utils
|
49
Makefile
49
Makefile
@ -1,4 +1,17 @@
|
||||
requirements:
|
||||
rm -rf package
|
||||
pip install --platform manylinux2014_aarch64 --target=package --implementation cp --python-version 3.12 --only-binary=:all: -r requirements.txt
|
||||
|
||||
requirements-arm64:
|
||||
rm -rf package
|
||||
pip install --target=package --implementation cp --python-version 3.12 -r requirements.txt
|
||||
|
||||
clean-pycache:
|
||||
rm -rvf wow_token/__pycache__
|
||||
rm -rvf wow_token/db/__pycache__
|
||||
|
||||
token-current:
|
||||
rm -f build/wow-token-current.zip
|
||||
zip build/wow-token-current.zip wow-token-current.py
|
||||
|
||||
token-current-upload: token-current
|
||||
@ -6,7 +19,18 @@ token-current-upload: token-current
|
||||
aws s3 cp build/wow-token-current.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-current.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
token-current-v2: clean-pycache
|
||||
rm -f build/wow-token-current-v2.zip
|
||||
zip -r build/wow-token-current-v2.zip wow_token
|
||||
zip -g build/wow-token-current-v2.zip wow-token-current-v2.py
|
||||
|
||||
token-current-v2-upload: token-current-v2
|
||||
openssl dgst -sha256 -binary build/wow-token-current-v2.zip | openssl enc -base64 > build/wow-token-current-v2.zip.sha256
|
||||
aws s3 cp build/wow-token-current-v2.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-current-v2.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
token-historical:
|
||||
rm -f build/wow-token-historical.zip
|
||||
zip build/wow-token-historical.zip wow-token-historical.py
|
||||
|
||||
token-historical-upload: token-historical
|
||||
@ -14,8 +38,19 @@ token-historical-upload: token-historical
|
||||
aws s3 cp build/wow-token-historical.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-historical.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
token-updater:
|
||||
cd venv/lib/python3.9/site-packages && zip -qr ../../../../build/wow-token-updater.zip .
|
||||
token-historical-v2: clean-pycache
|
||||
rm -f build/wow-token-historical-v2.zip
|
||||
zip -r build/wow-token-historical-v2.zip wow_token
|
||||
zip -g build/wow-token-historical-v2.zip wow-token-historical-v2.py
|
||||
|
||||
token-historical-v2-upload: token-historical-v2
|
||||
openssl dgst -sha256 -binary build/wow-token-historical-v2.zip | openssl enc -base64 > build/wow-token-historical-v2.zip.sha256
|
||||
aws s3 cp build/wow-token-historical-v2.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-historical-v2.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
token-updater: requirements
|
||||
rm -f build/wow-token-updater.zip
|
||||
cd package && zip -qr ../build/wow-token-updater.zip .
|
||||
zip -g build/wow-token-updater.zip wow-token-updater.py
|
||||
|
||||
token-updater-upload: token-updater
|
||||
@ -23,8 +58,16 @@ token-updater-upload: token-updater
|
||||
aws s3 cp build/wow-token-updater.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-updater.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
token-compactor: requirements
|
||||
rm -f build/wow-token-compactor.zip
|
||||
zip build/wow-token-compactor.zip wow-token-compactor.py
|
||||
|
||||
upload: token-current-upload token-updater-upload token-historical-upload
|
||||
token-compactor-upload: token-compactor
|
||||
openssl dgst -sha256 -binary build/wow-token-compactor.zip | openssl enc -base64 > build/wow-token-compactor.zip.sha256
|
||||
aws s3 cp build/wow-token-compactor.zip s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
aws s3 cp build/wow-token-compactor.zip.sha256 s3://emily-infrastructure-artifacts/wowtoken-backend/ --region us-west-1
|
||||
|
||||
upload: token-current-v2-upload token-updater-upload token-historical-upload token-compactor-upload
|
||||
|
||||
clean:
|
||||
rm -v build/*
|
||||
|
61
wow-token-current-v2.py
Normal file
61
wow-token-current-v2.py
Normal file
@ -0,0 +1,61 @@
|
||||
import json
|
||||
|
||||
from wow_token.db.current import Current
|
||||
from wow_token.flavor import Flavor
|
||||
|
||||
# Current is a global so it's initialized with the Lambda and stays initialized through the lifecycle
|
||||
# of that Lambda runner
|
||||
|
||||
CURRENT_DB = Current()
|
||||
|
||||
# The URI for the Current function should look like /v2/current/{flavor}
|
||||
def flavor_from_path(uri: str) -> Flavor:
|
||||
split_uri = uri.split('/')
|
||||
if split_uri[-1] == 'classic' or split_uri[-1] == 'classic.json':
|
||||
return Flavor.CLASSIC
|
||||
if split_uri[-1] == 'retail' or split_uri[-1] == 'retail.json':
|
||||
return Flavor.RETAIL
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def path_handler(uri: str):
|
||||
flavor = flavor_from_path(uri)
|
||||
return CURRENT_DB.get_current_all(flavor)
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
uri = event['Records'][0]['cf']['request']['uri']
|
||||
try:
|
||||
data = path_handler(uri)
|
||||
response = {
|
||||
'status': '200',
|
||||
'statusDescription': 'OK',
|
||||
'headers': {
|
||||
'content-type': [{
|
||||
'key': 'Content-Type',
|
||||
'value': 'application/json'
|
||||
}]
|
||||
},
|
||||
'body': json.dumps(data)
|
||||
}
|
||||
return response
|
||||
except NotImplementedError:
|
||||
return {
|
||||
'status': '404',
|
||||
'statusDescription': 'Not Found',
|
||||
'headers': {
|
||||
'content-type': [{
|
||||
'key': 'Content-Type',
|
||||
'value': 'application/json'
|
||||
}]
|
||||
},
|
||||
'body': json.dumps({'error': 'Not Found'})
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(CURRENT_DB.get_current_all(Flavor.RETAIL)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,80 +0,0 @@
|
||||
import boto3
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
dynamo_region_map = {
|
||||
'us-west-1': 'us-west-1',
|
||||
'us-west-2': 'us-west-2',
|
||||
'us-east-1': 'us-east-1',
|
||||
'us-east-2': 'us-east-2',
|
||||
'ap-south-1': 'ap-south-1',
|
||||
'ap-northeast-3': 'ap-northeast-1',
|
||||
'ap-northeast-2': 'ap-northeast-1',
|
||||
'ap-southeast-1': 'ap-southeast-1',
|
||||
'ap-southeast-2': 'ap-southeast-2',
|
||||
'ap-northeast-1': 'ap-northeast-1',
|
||||
'ca-central-1': 'us-east-1',
|
||||
'eu-central-1': 'eu-north-1',
|
||||
'eu-west-1': 'eu-west-1',
|
||||
'eu-west-2': 'eu-west-1',
|
||||
'eu-west-3': 'eu-west-3',
|
||||
'eu-north-1': 'eu-north-1',
|
||||
'sa-east-1': 'sa-east-1',
|
||||
'eu-south-1': 'eu-north-1'
|
||||
} # This is a rough first pass at an intelligent region selector based on what is replicated
|
||||
local_region = ''
|
||||
if os.environ['AWS_REGION'] in dynamo_region_map:
|
||||
local_region = dynamo_region_map[os.environ['AWS_REGION']]
|
||||
else:
|
||||
local_region = 'eu-central-1'
|
||||
|
||||
dynamodb_client = boto3.resource('dynamodb', region_name=local_region)
|
||||
retail_table = dynamodb_client.Table('wow-token-price')
|
||||
classic_table = dynamodb_client.Table('wow-token-classic-price')
|
||||
|
||||
regions = ['us', 'eu', 'tw', 'kr']
|
||||
regional_data = {
|
||||
'us': {'current_time': 0, 'price': 0},
|
||||
'eu': {'current_time': 0, 'price': 0},
|
||||
'tw': {'current_time': 0, 'price': 0},
|
||||
'kr': {'current_time': 0, 'price': 0}
|
||||
}
|
||||
|
||||
|
||||
def token_data(version: str) -> dict:
|
||||
if version == 'retail':
|
||||
table = retail_table
|
||||
else:
|
||||
table = classic_table
|
||||
|
||||
items = table.scan()['Items']
|
||||
data = {
|
||||
'current_time': datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat(timespec="seconds"),
|
||||
'price_data': {},
|
||||
'update_times': {},
|
||||
}
|
||||
for item in items:
|
||||
data['price_data'][item['region']] = int(int(item['price']) / 10000)
|
||||
data['update_times'][item['region']] = (
|
||||
datetime.datetime
|
||||
.utcfromtimestamp(int(item['current_time']))
|
||||
.replace(tzinfo=datetime.timezone.utc).isoformat()
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
uri = event['Records'][0]['cf']['request']['uri']
|
||||
print(f"URI:\t${uri}")
|
||||
split_uri = uri.split('/')
|
||||
if split_uri[-3] == 'classic':
|
||||
version = 'classic'
|
||||
else:
|
||||
version = 'retail'
|
||||
data = token_data(version)
|
||||
response = {'status': '200', 'statusDescription': 'OK', 'headers': {}}
|
||||
response['headers']['content-type'] = [{'key': 'Content-Type', 'value': 'application/json'}]
|
||||
response['body'] = json.dumps(data)
|
||||
print('AWS Region:' + os.environ['AWS_REGION'] + '\tdynamodb_connect_region: ' + local_region)
|
||||
return response
|
74
wow-token-historical-v2.py
Normal file
74
wow-token-historical-v2.py
Normal file
@ -0,0 +1,74 @@
|
||||
import json
|
||||
from typing import Tuple, List
|
||||
|
||||
from wow_token.db.compacted import Compacted
|
||||
from wow_token.db.recent import Recent
|
||||
from wow_token.path_handler.math_path_handler import MathPathHandler
|
||||
from wow_token.path_handler.relative_error import InvalidRelativePathError
|
||||
from wow_token.path_handler.relative_path_handler import RelativePathHandler
|
||||
|
||||
COMPACTED_DB = Compacted()
|
||||
RECENT_DB = Recent()
|
||||
|
||||
|
||||
def handle_not_implemented_error():
|
||||
return {
|
||||
'status': '404',
|
||||
'statusDescription': 'Not Found',
|
||||
'headers': {}
|
||||
}
|
||||
|
||||
def find_function(path) -> str:
|
||||
split_uri = path.split('/')
|
||||
i = 0
|
||||
while split_uri[i] != 'v2':
|
||||
i += 1
|
||||
return split_uri[i + 1]
|
||||
|
||||
|
||||
def path_handler(path):
|
||||
|
||||
function = find_function(path)
|
||||
match function:
|
||||
# This URI takes the form of /v2/{function}
|
||||
case 'relative':
|
||||
# This URI takes the form of /v2/{function}/{flavor}/{region}/{range}
|
||||
# The hottest path will be the relative path, so handle that first
|
||||
rph = RelativePathHandler(COMPACTED_DB, RECENT_DB)
|
||||
return rph.path_handler(path)
|
||||
case 'absolute':
|
||||
raise NotImplementedError
|
||||
case 'math':
|
||||
# This URI takes the form of /v2/math/{math_function}/{flavor}/{region}/{range}
|
||||
mph = MathPathHandler(COMPACTED_DB, RECENT_DB)
|
||||
return mph.path_handler(path)
|
||||
case _:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
uri = event['Records'][0]['cf']['request']['uri']
|
||||
try :
|
||||
data = path_handler(uri)
|
||||
return {
|
||||
'status': '200',
|
||||
'statusDescription': 'OK',
|
||||
'headers': {
|
||||
'content-type': [{
|
||||
'key': 'Content-Type',
|
||||
'value': 'application/json'
|
||||
}]
|
||||
},
|
||||
'body': json.dumps(data)
|
||||
}
|
||||
except (NotImplementedError, InvalidRelativePathError):
|
||||
return json.dumps(handle_not_implemented_error())
|
||||
|
||||
|
||||
def main():
|
||||
data = path_handler('/v2/math/avg/retail/us/2m.json')
|
||||
print(data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,345 +0,0 @@
|
||||
import sys
|
||||
from typing import List, Dict
|
||||
|
||||
import boto3
|
||||
from boto3.dynamodb.conditions import Key
|
||||
from collections import deque
|
||||
import datetime
|
||||
import calendar
|
||||
import json
|
||||
import os
|
||||
import statistics
|
||||
|
||||
dynamo_region_map = {
|
||||
'us-west-1': 'us-west-1',
|
||||
'us-west-2': 'us-west-2',
|
||||
'us-east-1': 'us-east-1',
|
||||
'us-east-2': 'us-east-2',
|
||||
'ap-south-1': 'eu-north-1',
|
||||
'ap-northeast-3': 'ap-northeast-1',
|
||||
'ap-northeast-2': 'ap-northeast-1',
|
||||
'ap-southeast-1': 'ap-southeast-1',
|
||||
'ap-southeast-2': 'ap-southeast-2',
|
||||
'ap-northeast-1': 'ap-northeast-1',
|
||||
'ca-central-1': 'us-east-1',
|
||||
'eu-central-1': 'eu-north-1',
|
||||
'eu-west-1': 'eu-west-1',
|
||||
'eu-west-2': 'eu-west-1',
|
||||
'eu-west-3': 'eu-west-3',
|
||||
'eu-north-1': 'eu-north-1',
|
||||
'sa-east-1': 'sa-east-1',
|
||||
'eu-south-1': 'eu-north-1'
|
||||
} # This is a rough first pass at an intelligent region selector based on what is replicated
|
||||
local_region = ''
|
||||
if os.environ['AWS_REGION'] in dynamo_region_map:
|
||||
local_dynamo_region = dynamo_region_map[os.environ['AWS_REGION']]
|
||||
else:
|
||||
local_dynamo_region = 'eu-central-1'
|
||||
local_timestream_region = 'eu-central-1'
|
||||
|
||||
timestream_client = boto3.client('timestream-query', region_name='us-east-1')
|
||||
dynamodb_client = boto3.resource('dynamodb', region_name=local_dynamo_region)
|
||||
|
||||
tables = {
|
||||
'retail': {
|
||||
'recent': 'wow-token-price-recent',
|
||||
'current': 'wow-token-price',
|
||||
'compacted': 'wow-token-compacted',
|
||||
'timestream': 'wow-token-price-history'
|
||||
},
|
||||
'classic': {
|
||||
'recent': 'wow-token-classic-price-recent',
|
||||
'current': 'wow-token-classic-price',
|
||||
'compacted': 'wow-token-compacted',
|
||||
'timestream': 'wow-token-classic-price-history'
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def historical_data(time, region, version):
|
||||
# This shim is to permanently change the URL of 30d to 720h for local caching,
|
||||
# There seems to be at least 1 person using 30d (strangely with no .json) which was deprecated
|
||||
# as the data source for 1 month of data years ago
|
||||
if time == '30d':
|
||||
time = '720h'
|
||||
|
||||
if time[-1] == 'h':
|
||||
return dynamo_data(time, region, version)
|
||||
else:
|
||||
return dynamo_compacted(time, region, version)
|
||||
|
||||
|
||||
def _get_dynamo_compacted(time: str, region: str, version: str) -> List[Dict[str, int|str]]:
|
||||
table = dynamodb_client.Table(tables[version]['compacted'])
|
||||
pk = f'{region}-{version}-{time}'
|
||||
response = table.query(
|
||||
KeyConditionExpression=(
|
||||
Key('region-flavor-timestamp').eq(pk)
|
||||
)
|
||||
)
|
||||
response_data = sorted(response['Items'][0]['data'].items())
|
||||
data = []
|
||||
for item in response_data:
|
||||
data.append({
|
||||
'time': datetime.datetime.fromtimestamp(
|
||||
int(item[0]),
|
||||
tz=datetime.UTC).isoformat(),
|
||||
'value': int(item[1])
|
||||
})
|
||||
return data
|
||||
|
||||
|
||||
def dynamo_compacted(time: str, region: str, version: str) -> List[Dict[str, int]]:
|
||||
return _get_dynamo_compacted(time, region, version)
|
||||
|
||||
|
||||
def dynamo_data(time, region, version):
|
||||
print(f"Function region: {os.environ['AWS_REGION']}\t Dynamo Region: {local_region}")
|
||||
time_stripped = int(time[:-1])
|
||||
start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=time_stripped)
|
||||
start_time_utc = start_time.replace(tzinfo=datetime.timezone.utc)
|
||||
table = dynamodb_client.Table(tables[version]['recent'])
|
||||
response = table.query(
|
||||
KeyConditionExpression=(
|
||||
Key('region').eq(region) &
|
||||
Key('timestamp').gte(int(start_time_utc.timestamp()))))
|
||||
data = []
|
||||
last_price = 0
|
||||
for item in response['Items']:
|
||||
price = int(int(item['price']) / 10000)
|
||||
if last_price != price:
|
||||
item_time = datetime.datetime.utcfromtimestamp(int(item['timestamp'])).replace(
|
||||
tzinfo=datetime.timezone.utc).isoformat()
|
||||
data.append({
|
||||
'time': item_time,
|
||||
'value': price
|
||||
})
|
||||
last_price = price
|
||||
return data
|
||||
|
||||
|
||||
def aggregate_data(aggregate_function: str, data: list):
|
||||
if aggregate_function == 'daily_max':
|
||||
return max_min(1, 1, data)
|
||||
elif aggregate_function == 'daily_min':
|
||||
return max_min(-1, 1, data)
|
||||
elif aggregate_function == 'daily_mean':
|
||||
return mean(1, data)
|
||||
elif aggregate_function == 'weekly_max':
|
||||
return max_min(1, 7, data)
|
||||
elif aggregate_function == 'weekly_min':
|
||||
return max_min(-1, 7, data)
|
||||
elif aggregate_function == 'weekly_mean':
|
||||
return mean(7, data)
|
||||
|
||||
|
||||
def date_in_range(day_range: tuple, date: datetime.datetime):
|
||||
month_range = calendar.monthrange(date.year, date.month)
|
||||
if day_range[0] <= date.day < day_range[1]:
|
||||
return True
|
||||
elif date.day < day_range[1] and date.day < day_range[0]:
|
||||
# TODO: I am probably missing a sanity check here, come back to it
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def day_bucket(bucket_size: int, date: datetime.datetime) -> tuple[datetime.datetime, datetime.datetime]:
|
||||
month_range = calendar.monthrange(date.year, date.month)
|
||||
days_to_reset = {0: 1, 1: 0, 2: 6, 3: 5, 4: 4, 5: 3, 6: 2}
|
||||
# We want the bucket boundaries for a bucket size of 7 to fall on
|
||||
# reset day (index 1), and for a month (31) to fall on the actual boundaries of that month
|
||||
# this means month-to-month, there are dynamic sizing of buckets
|
||||
# TODO: Monthly boundaries
|
||||
if bucket_size == 7 and date.weekday() != 1:
|
||||
# This is WoW, the week starts on Tuesday (datetime index 1)
|
||||
bucket_size = days_to_reset[date.weekday()]
|
||||
|
||||
return tuple((date, date + datetime.timedelta(days=bucket_size)))
|
||||
|
||||
|
||||
def is_new_bucket(d_datetime: datetime.datetime, current_bucket_day: datetime.datetime.day, bucket: tuple) -> bool:
|
||||
if d_datetime.day != current_bucket_day and (d_datetime >= bucket[1] or d_datetime.weekday() == 1):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def __sum_total(__data: list) -> int:
|
||||
__total = 0
|
||||
for __d in __data:
|
||||
__total += __d['value']
|
||||
return __total
|
||||
|
||||
|
||||
def max_min(fn: int, bucket_size: int, data: list) -> list:
|
||||
new_data = []
|
||||
first_date = datetime.datetime.fromisoformat(data[0]['time'])
|
||||
current_bucket_day = first_date.day
|
||||
# I hate working with dates
|
||||
bucket = day_bucket(bucket_size, first_date)
|
||||
min_max = {'minimum': 999_999_999, 'maximum': 0}
|
||||
min_max_date = {'minimum_date': datetime.datetime.min, 'maximum_date': datetime.datetime.max}
|
||||
|
||||
for d in data:
|
||||
d_datetime = datetime.datetime.fromisoformat(d['time'])
|
||||
# current_day is used to check if this 'if' has triggered for a new bucket and bypass if it has
|
||||
if is_new_bucket(d_datetime, current_bucket_day, bucket):
|
||||
current_bucket_day = d_datetime.day
|
||||
bucket = day_bucket(bucket_size, d_datetime)
|
||||
if fn == -1: # Minimum function
|
||||
new_data.append({'time': min_max_date['minimum_date'], 'value': min_max['minimum']})
|
||||
elif fn == 1: # Maximum function
|
||||
new_data.append({'time': min_max_date['maximum_date'], 'value': min_max['maximum']})
|
||||
min_max = {'minimum': 999_999_999, 'maximum': 0}
|
||||
min_max_date = {
|
||||
'minimum_date': datetime.datetime.min.isoformat(),
|
||||
'maximum_date': datetime.datetime.max.isoformat()
|
||||
}
|
||||
|
||||
if d['value'] < min_max['minimum']:
|
||||
min_max['minimum'] = d['value']
|
||||
min_max_date['minimum_date'] = d_datetime.isoformat()
|
||||
|
||||
if d['value'] > min_max['maximum']:
|
||||
min_max['maximum'] = d['value']
|
||||
min_max_date['maximum_date'] = d_datetime.isoformat()
|
||||
|
||||
return new_data
|
||||
|
||||
|
||||
def mean(bucket_size: int, data: list) -> list:
|
||||
new_data = []
|
||||
first_date = datetime.datetime.fromisoformat(data[0]['time'])
|
||||
current_bucket_day = first_date.day
|
||||
bucket = day_bucket(bucket_size, first_date)
|
||||
mean_bucket = []
|
||||
bucket_date = first_date
|
||||
|
||||
for d in data:
|
||||
d_datetime = datetime.datetime.fromisoformat(d['time'])
|
||||
if is_new_bucket(d_datetime, current_bucket_day, bucket):
|
||||
current_bucket_day = d_datetime.day
|
||||
bucket = day_bucket(bucket_size, d_datetime)
|
||||
new_data.append({'time': bucket[0].isoformat(), 'value': int(statistics.mean(mean_bucket))})
|
||||
mean_bucket = []
|
||||
|
||||
mean_bucket.append(d['value'])
|
||||
|
||||
return new_data
|
||||
|
||||
|
||||
# TODO FIXME
|
||||
def simple_moving_average(hours: int, data: list) -> list:
|
||||
# The cyclomatic complexity of this function is getting high, I need to figure out a more elegant solution
|
||||
new_data = []
|
||||
queue = deque()
|
||||
hours_in_queue = 0
|
||||
head_date = datetime.datetime.fromisoformat(data[8]['time'])
|
||||
for datum in data:
|
||||
datum_datetime = datetime.datetime.fromisoformat(datum['time'])
|
||||
if datum_datetime.hour == head_date.hour:
|
||||
queue.append(datum)
|
||||
elif datum_datetime.hour != head_date.hour:
|
||||
if hours_in_queue == hours:
|
||||
q_list = list(queue)
|
||||
total = __sum_total(q_list)
|
||||
new_datum = {
|
||||
'value': int(total / len(q_list)),
|
||||
'time': head_date.isoformat()
|
||||
}
|
||||
new_data.append(new_datum)
|
||||
deque_val = 0
|
||||
for d in q_list:
|
||||
__dt = datetime.datetime.fromisoformat(d['time'])
|
||||
if __dt.hour == head_date.hour and __dt.day == __dt.day:
|
||||
deque_val += 1
|
||||
while deque_val != 0:
|
||||
queue.pop()
|
||||
deque_val -= 1
|
||||
hours_in_queue -= 1
|
||||
head_date = datum_datetime
|
||||
elif hours_in_queue < 5:
|
||||
queue.append(datum)
|
||||
hours_in_queue += 1
|
||||
return new_data
|
||||
|
||||
|
||||
def moving_weighted_average(days: int, data: list) -> list:
|
||||
pass
|
||||
|
||||
|
||||
def validate_path(split_uri: list) -> bool:
|
||||
if not split_uri[-1].endswith('json'):
|
||||
return False
|
||||
|
||||
if not validate_region(split_uri[-2]):
|
||||
return False
|
||||
|
||||
if not validate_time(split_uri[-1].split('.')[0]):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_time(time: str) -> bool:
|
||||
# These can probably be rewritten as a lambda but at the time I am writing this I am just doing a first pass
|
||||
if time[-1] == 'h':
|
||||
hours = int(time[0:-1])
|
||||
return (hours >= 24) and (hours < 1000)
|
||||
|
||||
if time[-1] == 'd':
|
||||
days = int(time[0:-1])
|
||||
return (days >= 30) and (days <= 100)
|
||||
|
||||
if time[-1] == 'm':
|
||||
months = int(time[0:-1])
|
||||
return (months >= 1) and (months <= 12)
|
||||
|
||||
if time[-1] == 'y':
|
||||
years = int(time[0:-1])
|
||||
return (years >= 1) and (years <= 10)
|
||||
|
||||
return time == 'all'
|
||||
|
||||
|
||||
def validate_region(region: str) -> bool:
|
||||
valid_regions = ['us', 'eu', 'tw', 'kr']
|
||||
return region in valid_regions
|
||||
|
||||
|
||||
def validate_aggregate(aggregate_function: str) -> bool:
|
||||
valid_aggregates = ['daily_max', 'daily_min', 'daily_mean', 'weekly_max', 'weekly_min', 'weekly_mean']
|
||||
return aggregate_function in valid_aggregates
|
||||
|
||||
|
||||
def lambda_handler(event, context):
|
||||
uri = event['Records'][0]['cf']['request']['uri']
|
||||
split_uri = uri.split('/')
|
||||
if validate_path(split_uri):
|
||||
if 'classic' in split_uri:
|
||||
version = 'classic'
|
||||
else:
|
||||
version = 'retail'
|
||||
time = split_uri[-1].split('.')[0]
|
||||
region = split_uri[-2]
|
||||
aggregate_function = split_uri[-3]
|
||||
data = historical_data(time, region, version)
|
||||
|
||||
if validate_aggregate(aggregate_function):
|
||||
data = aggregate_data(aggregate_function, data)
|
||||
|
||||
response = {'status': '200', 'statusDescription': 'OK', 'headers': {}}
|
||||
response['headers']['content-type'] = [{'key': 'Content-Type', 'value': 'application/json'}]
|
||||
response['body'] = json.dumps(data)
|
||||
return response
|
||||
else:
|
||||
return {'status': '404', 'statusDescription': 'NotFound', 'headers': {}}
|
||||
|
||||
|
||||
def main():
|
||||
pass
|
||||
#data = dynamo_compacted('1y', 'us', 'retail')
|
||||
#print(data)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
0
wow_token/__init__.py
Normal file
0
wow_token/__init__.py
Normal file
0
wow_token/db/__init__.py
Normal file
0
wow_token/db/__init__.py
Normal file
28
wow_token/db/cache.py
Normal file
28
wow_token/db/cache.py
Normal file
@ -0,0 +1,28 @@
|
||||
import datetime
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
from wow_token.db.cached_range import CachedRange
|
||||
from wow_token.db.trinity import Trinity
|
||||
|
||||
|
||||
class Cache:
|
||||
_cache : Dict[str, List[Tuple[datetime.datetime, int]]]
|
||||
_db : 'Compacted'
|
||||
|
||||
def __init__(self, compacted_db: 'Compacted'):
|
||||
self._db = compacted_db
|
||||
self._cache = {}
|
||||
|
||||
|
||||
def get_month(self, trinity: Trinity) -> List[Tuple[datetime.datetime, int]]:
|
||||
current_time = datetime.datetime.now(datetime.UTC)
|
||||
if isinstance(trinity.range, CachedRange):
|
||||
raise NotImplementedError
|
||||
|
||||
current_month = trinity.range.month == current_time.month and trinity.range.year == current_time.year
|
||||
|
||||
if not current_month and str(trinity) in self._cache:
|
||||
return self._cache[str(trinity)]
|
||||
|
||||
self._cache[str(trinity)] = self._db.ddb_get_data(trinity)
|
||||
return self._cache[str(trinity)]
|
14
wow_token/db/cached_range.py
Normal file
14
wow_token/db/cached_range.py
Normal file
@ -0,0 +1,14 @@
|
||||
class CachedRange:
|
||||
_PRECOMPUTE_RANGES = ['30d', '90d', '6m', '1y', '2y', 'all']
|
||||
# I despise magic strings but this is about as good as I can get without enum support
|
||||
def __init__(self, _range: str):
|
||||
if _range not in CachedRange._PRECOMPUTE_RANGES:
|
||||
raise ValueError(f'Invalid range: {_range}')
|
||||
self._range = _range
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
return self._range
|
||||
|
||||
def __str__(self):
|
||||
return self._range
|
84
wow_token/db/compacted.py
Normal file
84
wow_token/db/compacted.py
Normal file
@ -0,0 +1,84 @@
|
||||
import datetime
|
||||
import os
|
||||
from typing import List, Dict, Tuple, Union, Type
|
||||
|
||||
import boto3
|
||||
from boto3.dynamodb.conditions import Key
|
||||
|
||||
from wow_token.db.trinity import Trinity
|
||||
from wow_token.db.year_month import YearMonth
|
||||
from wow_token.db.cache import Cache
|
||||
from wow_token.region import Region
|
||||
|
||||
# TODO: Reduce Compacted Table Sprawl
|
||||
|
||||
REGION_MAP = {
|
||||
'us-west-1': 'us-west-1',
|
||||
'us-west-2': 'us-west-2',
|
||||
'us-east-1': 'us-east-1',
|
||||
'us-east-2': 'us-east-2',
|
||||
'ap-south-1': 'eu-north-1',
|
||||
'ap-northeast-3': 'ap-northeast-1',
|
||||
'ap-northeast-2': 'ap-northeast-1',
|
||||
'ap-southeast-1': 'ap-southeast-1',
|
||||
'ap-southeast-2': 'ap-southeast-2',
|
||||
'ap-northeast-1': 'ap-northeast-1',
|
||||
'ca-central-1': 'us-east-1',
|
||||
'eu-central-1': 'eu-north-1',
|
||||
'eu-west-1': 'eu-west-1',
|
||||
'eu-west-2': 'eu-west-1',
|
||||
'eu-west-3': 'eu-west-3',
|
||||
'eu-north-1': 'eu-north-1',
|
||||
'sa-east-1': 'sa-east-1',
|
||||
'eu-south-1': 'eu-north-1'
|
||||
}
|
||||
|
||||
|
||||
def _region_selector():
|
||||
if os.environ['AWS_REGION'] in REGION_MAP:
|
||||
local_region = REGION_MAP[os.environ['AWS_REGION']]
|
||||
else:
|
||||
local_region = 'eu-central-1'
|
||||
return local_region
|
||||
|
||||
|
||||
def _data_as_str(data: List[Tuple[datetime.datetime, int]]) -> List[Tuple[str, int]]:
|
||||
data_as_str = []
|
||||
for timestamp, price in data:
|
||||
data_as_str.append((timestamp.isoformat(), price))
|
||||
return data_as_str
|
||||
|
||||
|
||||
class Compacted:
|
||||
_cache : Cache
|
||||
def __init__(self):
|
||||
self._ddb = boto3.resource('dynamodb', region_name=_region_selector())
|
||||
self._table = self._ddb.Table('wow-token-compacted')
|
||||
self._cache = Cache(self)
|
||||
|
||||
def ddb_get_data(self, trinity: Trinity, _type: Union[Type[str], Type[datetime.datetime]] = datetime.datetime) -> Union[List[Tuple[datetime.datetime, int]], List[Tuple[str, int]]]:
|
||||
data = []
|
||||
response = self._table.query(
|
||||
KeyConditionExpression=Key('region-flavor-timestamp').eq(str(trinity))
|
||||
)
|
||||
if response['Items']:
|
||||
for timestamp, price in response['Items'][0]['data'].items():
|
||||
date_time = datetime.datetime.fromtimestamp(int(timestamp), datetime.UTC)
|
||||
if _type == str:
|
||||
date_time = date_time.isoformat()
|
||||
data.append((
|
||||
date_time,
|
||||
int(price)
|
||||
))
|
||||
return sorted(data, key=lambda x: x[0])
|
||||
|
||||
def get_month(self, trinity: Trinity, _type: Union[Type[str], Type[datetime.datetime]] = datetime.datetime) -> Union[List[Tuple[datetime.datetime, int]], List[Tuple[str, int]]]:
|
||||
if _type == str:
|
||||
return _data_as_str(self._cache.get_month(trinity))
|
||||
return self._cache.get_month(trinity)
|
||||
|
||||
def get_precomputed_range(self, trinity: Trinity, _type: Union[Type[str], Type[datetime.datetime]] = datetime.datetime) -> Union[List[Tuple[datetime.datetime, int]], List[Tuple[str, int]]]:
|
||||
if isinstance(trinity.range, YearMonth):
|
||||
return self.get_month(trinity, _type=_type)
|
||||
else:
|
||||
return self.ddb_get_data(trinity, _type=_type)
|
64
wow_token/db/current.py
Normal file
64
wow_token/db/current.py
Normal file
@ -0,0 +1,64 @@
|
||||
import datetime
|
||||
import os
|
||||
from typing import List, Dict, Tuple
|
||||
|
||||
import boto3
|
||||
from boto3.dynamodb.conditions import Key
|
||||
|
||||
from wow_token.flavor import Flavor
|
||||
from wow_token.region import Region
|
||||
|
||||
REGION_MAP = {
|
||||
'us-west-1': 'us-west-1',
|
||||
'us-west-2': 'us-west-2',
|
||||
'us-east-1': 'us-east-1',
|
||||
'us-east-2': 'us-east-2',
|
||||
'ap-south-1': 'eu-north-1',
|
||||
'ap-northeast-3': 'ap-northeast-1',
|
||||
'ap-northeast-2': 'ap-northeast-1',
|
||||
'ap-southeast-1': 'ap-southeast-1',
|
||||
'ap-southeast-2': 'ap-southeast-2',
|
||||
'ap-northeast-1': 'ap-northeast-1',
|
||||
'ca-central-1': 'us-east-1',
|
||||
'eu-central-1': 'eu-north-1',
|
||||
'eu-west-1': 'eu-west-1',
|
||||
'eu-west-2': 'eu-west-1',
|
||||
'eu-west-3': 'eu-west-3',
|
||||
'eu-north-1': 'eu-north-1',
|
||||
'sa-east-1': 'sa-east-1',
|
||||
'eu-south-1': 'eu-north-1'
|
||||
}
|
||||
|
||||
|
||||
def _region_selector():
|
||||
if os.environ['AWS_REGION'] in REGION_MAP:
|
||||
local_region = REGION_MAP[os.environ['AWS_REGION']]
|
||||
else:
|
||||
local_region = 'eu-central-1'
|
||||
return local_region
|
||||
|
||||
|
||||
class Current:
|
||||
def __init__(self):
|
||||
self._ddb = boto3.resource('dynamodb', region_name=_region_selector())
|
||||
self._tables = {
|
||||
Flavor.RETAIL: self._ddb.Table('wow-token-price'),
|
||||
Flavor.CLASSIC: self._ddb.Table('wow-token-classic-price'),
|
||||
}
|
||||
|
||||
def _ddb_get_current_all(self, flavor: Flavor) -> Dict[Region, Tuple[datetime.datetime, int]]:
|
||||
response = self._tables[flavor].scan()
|
||||
data = {}
|
||||
for item in response['Items']:
|
||||
region = Region(item['region'])
|
||||
data[region] = (
|
||||
datetime.datetime.fromtimestamp(int(item['current_time']), datetime.UTC),
|
||||
int(int(item['price']) / 10_000) # the raw copper value is what is stored in DynamoDB
|
||||
)
|
||||
return data
|
||||
|
||||
def get_current_all(self, flavor: Flavor) -> Dict[Region, Tuple[str, int]]:
|
||||
data = {}
|
||||
for region, (timestamp, _) in self._ddb_get_current_all(flavor).items():
|
||||
data[region] = (timestamp.isoformat(), _)
|
||||
return data
|
67
wow_token/db/recent.py
Normal file
67
wow_token/db/recent.py
Normal file
@ -0,0 +1,67 @@
|
||||
import datetime
|
||||
import os
|
||||
from typing import List, Dict, Tuple
|
||||
|
||||
import boto3
|
||||
from boto3.dynamodb.conditions import Key
|
||||
|
||||
from wow_token.flavor import Flavor
|
||||
from wow_token.region import Region
|
||||
|
||||
REGION_MAP = {
|
||||
'us-west-1': 'us-west-1',
|
||||
'us-west-2': 'us-west-2',
|
||||
'us-east-1': 'us-east-1',
|
||||
'us-east-2': 'us-east-2',
|
||||
'ap-south-1': 'eu-north-1',
|
||||
'ap-northeast-3': 'ap-northeast-1',
|
||||
'ap-northeast-2': 'ap-northeast-1',
|
||||
'ap-southeast-1': 'ap-southeast-1',
|
||||
'ap-southeast-2': 'ap-southeast-2',
|
||||
'ap-northeast-1': 'ap-northeast-1',
|
||||
'ca-central-1': 'us-east-1',
|
||||
'eu-central-1': 'eu-north-1',
|
||||
'eu-west-1': 'eu-west-1',
|
||||
'eu-west-2': 'eu-west-1',
|
||||
'eu-west-3': 'eu-west-3',
|
||||
'eu-north-1': 'eu-north-1',
|
||||
'sa-east-1': 'sa-east-1',
|
||||
'eu-south-1': 'eu-north-1'
|
||||
}
|
||||
|
||||
|
||||
def _region_selector():
|
||||
if os.environ['AWS_REGION'] in REGION_MAP:
|
||||
local_region = REGION_MAP[os.environ['AWS_REGION']]
|
||||
else:
|
||||
local_region = 'eu-central-1'
|
||||
return local_region
|
||||
|
||||
|
||||
class Recent:
|
||||
def __init__(self):
|
||||
self._ddb = boto3.resource('dynamodb', region_name=_region_selector())
|
||||
self._tables = {
|
||||
Flavor.RETAIL: self._ddb.Table('wow-token-price-recent'),
|
||||
Flavor.CLASSIC: self._ddb.Table('wow-token-classic-price-recent'),
|
||||
}
|
||||
|
||||
def get_after_unix_timestamp(self, flavor: Flavor, region: Region, timestamp: int) -> List[Tuple[str, int]]:
|
||||
response = self._tables[flavor].query(
|
||||
KeyConditionExpression=(
|
||||
Key('region').eq(region.value) &
|
||||
Key('timestamp').gte(timestamp)
|
||||
)
|
||||
)
|
||||
data = []
|
||||
last_price = 0
|
||||
for item in response['Items']:
|
||||
price = int(int(item['price']) / 10_000) # the raw copper value is what is stored in DynamoDB
|
||||
if last_price != price:
|
||||
item_time = datetime.datetime.fromtimestamp(int(item['timestamp']), datetime.UTC).isoformat()
|
||||
data.append((
|
||||
item_time,
|
||||
price
|
||||
))
|
||||
last_price = price
|
||||
return data
|
26
wow_token/db/trinity.py
Normal file
26
wow_token/db/trinity.py
Normal file
@ -0,0 +1,26 @@
|
||||
from wow_token.db.cached_range import CachedRange
|
||||
from wow_token.db.year_month import YearMonth
|
||||
from wow_token.flavor import Flavor
|
||||
from wow_token.region import Region
|
||||
|
||||
|
||||
class Trinity:
|
||||
def __init__(self, _region: Region, _flavor: Flavor, _range: CachedRange | YearMonth):
|
||||
self._region = _region
|
||||
self._flavor = _flavor
|
||||
self._range = _range
|
||||
|
||||
@property
|
||||
def region(self) -> Region:
|
||||
return self._region
|
||||
|
||||
@property
|
||||
def flavor(self) -> Flavor:
|
||||
return self._flavor
|
||||
|
||||
@property
|
||||
def range(self) -> CachedRange | YearMonth:
|
||||
return self._range
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._region.value}-{self._flavor.value}-{self._range}"
|
23
wow_token/db/year_month.py
Normal file
23
wow_token/db/year_month.py
Normal file
@ -0,0 +1,23 @@
|
||||
class YearMonth:
|
||||
# I really don't like how this class is named and used but
|
||||
# past me is my own worst enemy and used it to make sorting on Dynamo easier
|
||||
VALID_YEARS = [2020, 2021, 2022, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030]
|
||||
VALID_MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
|
||||
def __init__(self, year: int, month: int):
|
||||
if year not in YearMonth.VALID_YEARS:
|
||||
raise ValueError(f'Invalid year: {year}')
|
||||
if month not in YearMonth.VALID_MONTHS:
|
||||
raise ValueError(f'Invalid month: {month}')
|
||||
self._year = year
|
||||
self._month = month
|
||||
|
||||
@property
|
||||
def month(self) -> int:
|
||||
return self._month
|
||||
|
||||
@property
|
||||
def year(self) -> int:
|
||||
return self._year
|
||||
|
||||
def __str__(self):
|
||||
return f'{self._year}-{self._month}'
|
6
wow_token/flavor.py
Normal file
6
wow_token/flavor.py
Normal file
@ -0,0 +1,6 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Flavor(str, Enum):
|
||||
RETAIL = 'retail'
|
||||
CLASSIC = 'classic'
|
0
wow_token/path_handler/__init__.py
Normal file
0
wow_token/path_handler/__init__.py
Normal file
50
wow_token/path_handler/math_path_handler.py
Normal file
50
wow_token/path_handler/math_path_handler.py
Normal file
@ -0,0 +1,50 @@
|
||||
import datetime
|
||||
from typing import List, Tuple
|
||||
|
||||
from wow_token.db.cached_range import CachedRange
|
||||
from wow_token.db.compacted import Compacted
|
||||
from wow_token.db.recent import Recent
|
||||
from wow_token.db.trinity import Trinity
|
||||
from wow_token.flavor import Flavor
|
||||
from wow_token.path_handler.relative_error import InvalidRelativePathError
|
||||
from wow_token.path_handler.relative_path_handler import RelativePathHandler
|
||||
from wow_token.region import Region
|
||||
|
||||
|
||||
class MathPathHandler:
|
||||
_cdb : Compacted
|
||||
_rdb : Recent
|
||||
def __init__(self, cdb: Compacted, rdb: Recent):
|
||||
self._cdb = cdb
|
||||
self._rdb = rdb
|
||||
|
||||
def path_handler(self, uri: str) -> List[Tuple[str, int]]:
|
||||
# This URI takes the form of /v2/math/{math_function}/{flavor}/{region}/{range}
|
||||
split_uri = uri.split('/')
|
||||
math_function = split_uri[-4]
|
||||
data = RelativePathHandler(self._cdb, self._rdb).path_handler(uri)
|
||||
|
||||
match math_function:
|
||||
case 'avg':
|
||||
return self._avg(data)
|
||||
case _:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def _avg(self, data: List[Tuple[str, int]]) -> List[Tuple[str, int]]:
|
||||
avg_buckets = []
|
||||
bucket_timestamp = None
|
||||
bucket_price = 0
|
||||
bucket_count = 0
|
||||
for timestamp, price in data:
|
||||
if bucket_timestamp is None:
|
||||
bucket_timestamp = datetime.datetime.fromisoformat(timestamp)
|
||||
elif bucket_timestamp.date() != datetime.datetime.fromisoformat(timestamp).date():
|
||||
bucket_head = datetime.datetime(year=bucket_timestamp.year, month=bucket_timestamp.month, day=bucket_timestamp.day)
|
||||
avg_buckets.append((bucket_head.isoformat(), int(bucket_price/bucket_count)))
|
||||
bucket_price = 0
|
||||
bucket_count = 0
|
||||
bucket_timestamp = datetime.datetime.fromisoformat(timestamp)
|
||||
bucket_price += price
|
||||
bucket_count += 1
|
||||
return avg_buckets
|
2
wow_token/path_handler/relative_error.py
Normal file
2
wow_token/path_handler/relative_error.py
Normal file
@ -0,0 +1,2 @@
|
||||
class InvalidRelativePathError(Exception):
|
||||
pass
|
98
wow_token/path_handler/relative_path_handler.py
Normal file
98
wow_token/path_handler/relative_path_handler.py
Normal file
@ -0,0 +1,98 @@
|
||||
import datetime
|
||||
from typing import List, Tuple
|
||||
|
||||
from wow_token.db.cached_range import CachedRange
|
||||
from wow_token.db.compacted import Compacted
|
||||
from wow_token.db.recent import Recent
|
||||
from wow_token.db.trinity import Trinity
|
||||
from wow_token.flavor import Flavor
|
||||
from wow_token.path_handler.relative_error import InvalidRelativePathError
|
||||
from wow_token.region import Region
|
||||
|
||||
|
||||
class RelativePathHandler:
|
||||
_cdb : Compacted
|
||||
_rdb : Recent
|
||||
def __init__(self, cdb: Compacted, rdb: Recent):
|
||||
self._cdb = cdb
|
||||
self._rdb = rdb
|
||||
|
||||
|
||||
def get_by_timedelta(self, flavor: Flavor, region: Region, timedelta: datetime.timedelta) -> List[Tuple[str, int]]:
|
||||
current_time = datetime.datetime.now(datetime.UTC)
|
||||
start_time = current_time - timedelta
|
||||
|
||||
if timedelta.days < 61:
|
||||
return self._rdb.get_after_unix_timestamp(flavor, region, int(start_time.timestamp()))
|
||||
elif timedelta.days <= 90:
|
||||
trinity = Trinity(region, flavor, CachedRange('90d'))
|
||||
elif timedelta.days <= 183:
|
||||
trinity = Trinity(region, flavor, CachedRange('6m'))
|
||||
elif timedelta.days <= 365:
|
||||
trinity = Trinity(region, flavor, CachedRange('1y'))
|
||||
elif timedelta.days <= 730:
|
||||
trinity = Trinity(region, flavor, CachedRange('2y'))
|
||||
else:
|
||||
trinity = Trinity(region, flavor, CachedRange('all'))
|
||||
|
||||
# If the data is exactly the size of the precomputed structure, go ahead and return it directly
|
||||
if timedelta.days == 90 or timedelta.days == 182 or timedelta.days == 365 or timedelta.days == 730:
|
||||
return self._cdb.get_precomputed_range(trinity, str)
|
||||
|
||||
final_data = []
|
||||
data = self._cdb.get_precomputed_range(trinity)
|
||||
for timestamp, price in data:
|
||||
if timestamp >= start_time:
|
||||
final_data.append((timestamp.isoformat(), price))
|
||||
return final_data
|
||||
|
||||
|
||||
def relative_time_handler(self, flavor: Flavor, region: Region, relative_range: str) -> List[Tuple[str, int]]:
|
||||
if relative_range == '30d':
|
||||
relative_range = '744h'
|
||||
|
||||
relative_unit = relative_range[-1]
|
||||
|
||||
match relative_unit:
|
||||
case 'h':
|
||||
hours = int(relative_range[:-1])
|
||||
if hours > 1488:
|
||||
raise InvalidRelativePathError
|
||||
start_time = datetime.datetime.now(datetime.UTC) - datetime.timedelta(hours=hours)
|
||||
return self._rdb.get_after_unix_timestamp(flavor, region, int(start_time.timestamp()))
|
||||
case 'd':
|
||||
days = int(relative_range[:-1])
|
||||
if days > 730:
|
||||
raise InvalidRelativePathError
|
||||
delta = datetime.timedelta(days=days)
|
||||
return self.get_by_timedelta(flavor, region, delta)
|
||||
case 'm':
|
||||
months = int(relative_range[:-1])
|
||||
if months > 48:
|
||||
raise InvalidRelativePathError
|
||||
delta = datetime.timedelta(days=int(30.437*months))
|
||||
return self.get_by_timedelta(flavor, region, delta)
|
||||
case 'y':
|
||||
years = int(relative_range[:-1])
|
||||
if years > 10:
|
||||
raise InvalidRelativePathError
|
||||
delta = datetime.timedelta(days=int(365.25*years))
|
||||
return self.get_by_timedelta(flavor, region, delta)
|
||||
case _:
|
||||
if relative_range == 'all':
|
||||
return self._cdb.get_precomputed_range(Trinity(region, flavor, CachedRange('all')), str)
|
||||
raise InvalidRelativePathError
|
||||
|
||||
|
||||
|
||||
def path_handler(self, uri) -> List[Tuple[str, int]]:
|
||||
# This URI takes the form of /v2/relative/{flavor}/{region}/{range}
|
||||
split_uri = uri.split('/')
|
||||
flavor = Flavor(split_uri[-3])
|
||||
region = Region(split_uri[-2])
|
||||
_range = split_uri[-1]
|
||||
if split_uri[-1].endswith('.json'):
|
||||
_range = split_uri[-1][:-5]
|
||||
|
||||
return self.relative_time_handler(flavor, region, _range)
|
||||
|
7
wow_token/region.py
Normal file
7
wow_token/region.py
Normal file
@ -0,0 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
class Region(str, Enum):
|
||||
US = 'us'
|
||||
EU = 'eu'
|
||||
KR = 'kr'
|
||||
TW = 'tw'
|
Loading…
Reference in New Issue
Block a user