Skip to content

Commit

Permalink
Merge pull request #1 from ad-ha/ad-ha-0.1.2
Browse files Browse the repository at this point in the history
0.1.2
  • Loading branch information
ad-ha authored Nov 12, 2024
2 parents a17efc4 + 05e4d49 commit e3d1528
Show file tree
Hide file tree
Showing 10 changed files with 949 additions and 797 deletions.
49 changes: 44 additions & 5 deletions custom_components/advanced_trading_wallet/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from datetime import timedelta
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.typing import ConfigType
Expand Down Expand Up @@ -26,22 +27,33 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b

# If global coordinator doesn't exist, create it
if "coordinator" not in hass.data[DOMAIN]:
update_interval = config_entry.options.get(
"update_interval", DEFAULT_SCAN_INTERVAL
)
coordinator = ATWCoordinator(
hass,
preferred_currency=config_entry.data.get("preferred_currency", "usd"),
update_interval=config_entry.data.get(
"update_interval", DEFAULT_SCAN_INTERVAL
),
preferred_currency=config_entry.data.get("preferred_currency", "USD"),
update_interval=update_interval,
)
hass.data[DOMAIN]["coordinator"] = coordinator
LOGGER.debug("Created global coordinator")
await coordinator.data_store.async_load()
await coordinator.async_config_entry_first_refresh()
else:
coordinator = hass.data[DOMAIN]["coordinator"]
# Update coordinator's update interval if changed
update_interval = config_entry.options.get(
"update_interval", DEFAULT_SCAN_INTERVAL
)
await coordinator.async_set_update_interval(timedelta(minutes=update_interval))

# Store per-entry data
hass.data[DOMAIN][config_entry.entry_id] = {
entry_id = config_entry.entry_id
entry_data = {
"api_provider": config_entry.data.get("api_provider", DEFAULT_API_PROVIDER),
"preferred_currency": config_entry.data.get(
"preferred_currency", "USD"
).upper(),
"stocks_to_track": config_entry.data.get("stocks_to_track", ""),
"crypto_to_track": config_entry.data.get("crypto_to_track", ""),
"stock_amount_owned": config_entry.data.get("stock_amount_owned", 0),
Expand All @@ -50,6 +62,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
"crypto_purchase_price": config_entry.data.get("crypto_purchase_price", 0),
}

# Update entry_data with stored data if available
stored_entry_data = coordinator.data_store.get_entry_data(entry_id)
if stored_entry_data:
entry_data.update(stored_entry_data)

hass.data[DOMAIN][entry_id] = entry_data

# Update coordinator's list of symbols and API providers
coordinator.update_symbols(hass.data[DOMAIN])

Expand All @@ -58,9 +77,28 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
# Pass the coordinator when setting up services
await async_setup_services(hass, coordinator)

# Add update listener for options
config_entry.async_on_unload(
config_entry.add_update_listener(async_options_updated)
)

return True


async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry):
"""Handle options update."""
# Retrieve the coordinator
coordinator = hass.data[DOMAIN]["coordinator"]

# Get the new update_interval from options
update_interval = config_entry.options.get("update_interval", DEFAULT_SCAN_INTERVAL)
new_interval = timedelta(minutes=update_interval)
LOGGER.debug(f"Options updated: new update_interval={new_interval}")

# Update the coordinator's update_interval
await coordinator.async_set_update_interval(new_interval)


async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
Expand All @@ -75,6 +113,7 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->

# If no other entries remain, remove the coordinator and services
if len(hass.data[DOMAIN]) == 1: # Only 'coordinator' remains
await coordinator.async_close()
hass.data[DOMAIN].pop("coordinator")
await async_unload_services(hass)

Expand Down
216 changes: 95 additions & 121 deletions custom_components/advanced_trading_wallet/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(self, hass: HomeAssistant, api_provider: str):
self.api_provider = api_provider
self.crumb = None
self.cookies = None
self.session = aiohttp.ClientSession()

async def get_stock_data(self, stock_symbol: str):
"""Fetch stock data asynchronously."""
Expand All @@ -43,93 +44,67 @@ async def _fetch_yahoo_crumb(self):
url = GET_CRUMB_URL
headers = YAHOO_HEADERS

async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as response:
if response.status == 200:
text = await response.text()
self.crumb = text.strip()
self.cookies = response.cookies
LOGGER.debug(f"Fetched Yahoo Finance crumb: {self.crumb}")
return True
else:
LOGGER.error(
f"Failed to fetch Yahoo Finance crumb: {response.status}"
)
return False
async with self.session.get(url, headers=headers) as response:
if response.status == 200:
text = await response.text()
self.crumb = text.strip()
self.cookies = response.cookies
LOGGER.debug(f"Fetched Yahoo Finance crumb: {self.crumb}")
return True
else:
LOGGER.error(f"Failed to fetch Yahoo Finance crumb: {response.status}")
return False

async def _fetch_yahoo_stock(self, stock_symbol: str):
"""Fetch stock data with crumb handling."""
if not self.crumb:
await self._fetch_yahoo_crumb()

url = f"{YAHOO_FINANCE_BASE_URL}{stock_symbol}&crumb={self.crumb}"
async with aiohttp.ClientSession(cookies=self.cookies) as session:
async with session.get(url, headers=YAHOO_HEADERS) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {stock_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Stock data for {stock_symbol}: {json_data}")
return json_data
else:
LOGGER.error(f"Failed to fetch stock data: {response.status}")
return None
async with self.session.get(
url, headers=YAHOO_HEADERS, cookies=self.cookies
) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {stock_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Stock data for {stock_symbol}: {json_data}")
return json_data
else:
LOGGER.error(f"Failed to fetch stock data: {response.status}")
return None

async def _fetch_coingecko_crypto(self, crypto_symbol: str, currency: str = "usd"):
url = f"{COINGECKO_BASE_URL}/coins/markets?vs_currency={currency}&ids={crypto_symbol}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {crypto_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Crypto data for {crypto_symbol}: {json_data}")
return json_data
else:
LOGGER.error(f"Failed to fetch crypto data: {response.status}")
return None

async def fetch_autocomplete(self, query: str, asset_type: str):
"""Fetch autocomplete suggestions from relevant APIs."""
if asset_type == "Stock" and self.api_provider == "Yahoo Finance":
url = f"{YAHOO_FINANCE_BASE_URL}/lookup/autocomplete?q={query}"
elif asset_type == "Crypto" and self.api_provider == "CoinGecko":
url = f"{COINGECKO_BASE_URL}/search?query={query}"
else:
raise ValueError("Invalid API provider")

async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
return await response.json()
else:
LOGGER.error(
f"Failed to fetch autocomplete data: {response.status}"
)
return None
crypto_symbol_lower = crypto_symbol.lower()
url = f"{COINGECKO_BASE_URL}/coins/markets?vs_currency={currency}&ids={crypto_symbol_lower}"
async with self.session.get(url) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {crypto_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
if json_data:
# Retrieve the symbol from the response
symbol = json_data[0].get("symbol", crypto_symbol).upper()
json_data[0]["symbol"] = symbol
LOGGER.debug(f"Crypto data for {crypto_symbol}: {json_data}")
return json_data
else:
LOGGER.error(f"Failed to fetch crypto data: {response.status}")
return None

async def get_stock_historical_data(self, stock_symbol: str, interval: str):
"""Fetch historical stock data asynchronously."""
Expand All @@ -138,53 +113,52 @@ async def get_stock_historical_data(self, stock_symbol: str, interval: str):

url = f"{YAHOO_FINANCE_HISTORICAL_URL}{stock_symbol}?interval={interval}"

async with aiohttp.ClientSession(cookies=self.cookies) as session:
async with session.get(url, headers=YAHOO_HEADERS) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {stock_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Historical data for {stock_symbol}: {json_data}")
return json_data
else:
LOGGER.error(
f"Failed to fetch stock historical data: {response.status}"
)
return None
async with self.session.get(
url, headers=YAHOO_HEADERS, cookies=self.cookies
) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {stock_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Historical data for {stock_symbol}: {json_data}")
return json_data
else:
LOGGER.error(
f"Failed to fetch stock historical data: {response.status}"
)
return None

async def get_crypto_historical_data(self, crypto_symbol: str, interval: str):
"""Fetch historical crypto data asynchronously."""
url = f"{COINGECKO_BASE_URL}/coins/{crypto_symbol}/market_chart?vs_currency=usd&days={interval}"
crypto_symbol_lower = crypto_symbol.lower()
url = f"{COINGECKO_BASE_URL}/coins/{crypto_symbol_lower}/market_chart?vs_currency=usd&days={interval}"
LOGGER.debug(f"Requesting crypto historical data from {url}")
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {crypto_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(
f"Crypto historical data for {crypto_symbol}: {json_data}"
)
return json_data
else:
LOGGER.error(
f"Failed to fetch crypto historical data: {response.status}"
)
return None
async with self.session.get(url) as response:
if response.status == 429:
retry_after = int(
response.headers.get("Retry-After", DEFAULT_RETRY_AFTER)
)
LOGGER.warning(
f"Rate limit hit for {crypto_symbol}. Retrying after {retry_after} seconds."
)
await asyncio.sleep(retry_after)
return None
if response.status == 200:
json_data = await response.json()
LOGGER.debug(f"Crypto historical data for {crypto_symbol}: {json_data}")
return json_data
else:
LOGGER.error(
f"Failed to fetch crypto historical data: {response.status}"
)
return None

async def close(self):
"""Close the aiohttp session."""
Expand Down
Loading

0 comments on commit e3d1528

Please sign in to comment.