Skip to content

Commit

Permalink
update version
Browse files Browse the repository at this point in the history
  • Loading branch information
lit26 committed Sep 19, 2020
1 parent ec5a502 commit bb93df7
Show file tree
Hide file tree
Showing 9 changed files with 547 additions and 553 deletions.
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author = 'Tianning Li'

# The full version, including alpha/beta/rc tags
release = '0.6.2'
release = '0.7'


# -- General configuration ---------------------------------------------------
Expand Down
Binary file modified example/audusd_d1_s.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified example/btcusd_m5_s.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1,023 changes: 491 additions & 532 deletions example/example.ipynb

Large diffs are not rendered by default.

Binary file modified example/tsla.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
31 changes: 26 additions & 5 deletions finvizfinance/screener/overview.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,14 +178,17 @@ def _get_page(self,soup):
options = soup.findAll('table')[17].findAll('option')
return len(options)

def _get_table(self, rows, df, num_col_index,table_header):
def _get_table(self, rows, df, num_col_index, table_header, limit=-1):
"""Get screener table helper function.
Returns:
df(pandas.DataFrame): screener information table
"""
rows = rows[1:]
for row in rows:
if limit != -1:
rows = rows[0:limit]

for index, row in enumerate(rows):
cols = row.findAll('td')[1:]
info_dict = {}
for i, col in enumerate(cols):
Expand All @@ -197,11 +200,24 @@ def _get_table(self, rows, df, num_col_index,table_header):
df = df.append(info_dict, ignore_index=True)
return df

def ScreenerView(self, order='ticker', verbose=1):
def _screener_helper(self, i, page, rows, df, num_col_index, table_header, limit):
"""Get screener table helper function.
Returns:
df(pandas.DataFrame): screener information table
"""
if i == page - 1:
df = self._get_table(rows, df, num_col_index, table_header, limit=((limit - 1) % 20 + 1))
else:
df = self._get_table(rows, df, num_col_index, table_header)
return df

def ScreenerView(self, order='ticker', limit=-1, verbose=1):
"""Get screener table.
Args:
order(str): sort the table by the choice of order
limit(int): set the top k rows of the screener
verbose(int): choice of visual the progress. 1 for visualize progress
Returns:
df(pandas.DataFrame): screener information table
Expand All @@ -212,19 +228,24 @@ def ScreenerView(self, order='ticker', verbose=1):
raise ValueError()
url = self.url+'&'+self.order_dict[order]
soup = webScrap(url)

page = self._get_page(soup)
if page == 0:
print('No ticker found.')
return None

if limit != -1:
if page > (limit-1)//20+1:
page = (limit-1)//20+1

if verbose == 1:
print('[Info] loading page 1/{} ...'.format(page))
table = soup.findAll('table')[18]
rows = table.findAll('tr')
table_header = [i.text for i in rows[0].findAll('td')][1:]
num_col_index = [table_header.index(i) for i in table_header if i in self.NUMBER_COL]
df = pd.DataFrame([], columns=table_header)
df = self._get_table(rows, df, num_col_index, table_header)
df = self._screener_helper(0, page, rows, df, num_col_index, table_header, limit)

for i in range(1, page):
if verbose == 1:
Expand All @@ -235,7 +256,7 @@ def ScreenerView(self, order='ticker', verbose=1):
soup = webScrap(self.url + '&r={}'.format(i * 20 + 1)+'&'+self.order_dict[order])
table = soup.findAll('table')[18]
rows = table.findAll('tr')
df = self._get_table(rows, df, num_col_index, table_header)
df = self._screener_helper(i, page, rows, df, num_col_index, table_header, limit)
return df

def compare(self, ticker, compare_list, order='ticker', verbose=1):
Expand Down
24 changes: 17 additions & 7 deletions finvizfinance/screener/ticker.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,15 @@ def __init__(self):
self.url = self.BASE_URL.format(signal='', filter='',ticker='')
Overview._loadSetting(self)

def ScreenerView(self, verbose=1):
def _screener_helper(self, i, page, soup, tickers, limit):
table = soup.findAll('table')[18]
page_tickers = table.findAll('span')
if i == page - 1:
page_tickers = page_tickers[:((limit - 1) % 1000 + 1)]
tickers = tickers + [i.text.split('\xa0')[1] for i in page_tickers]
return tickers

def ScreenerView(self, limit=-1, verbose=1):
"""Get screener table.
Args:
Expand All @@ -32,17 +40,19 @@ def ScreenerView(self, verbose=1):
print('No ticker found.')
return None

if limit != -1:
if page > (limit-1)//1000+1:
page = (limit-1)//1000+1

if verbose == 1:
print('[Info] loading page 1/{} ...'.format(page))
table = soup.findAll('table')[18]
tickers = table.findAll('span')
tickers = [i.text.split('\xa0')[1] for i in tickers]

tickers = []
tickers = self._screener_helper(0, page, soup, tickers, limit)

for i in range(1, page):
if verbose == 1:
print('[Info] loading page {}/{} ...'.format((i + 1), page))
soup = webScrap(self.url + '&r={}'.format(i * 1000 + 1))
table = soup.findAll('table')[18]
page_tickers = table.findAll('span')
tickers = tickers + [i.text.split('\xa0')[1] for i in page_tickers]
tickers = self._screener_helper(i, page, soup, tickers, limit)
return tickers
18 changes: 11 additions & 7 deletions finvizfinance/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,12 @@ def webScrap(url):
Returns:
soup(beautiful soup): website html
"""
website = requests.get(url, headers=headers)
soup = BeautifulSoup(website.text, 'lxml')
try:
website = requests.get(url, headers=headers)
website.raise_for_status()
soup = BeautifulSoup(website.text, 'lxml')
except requests.exceptions.HTTPError as err:
raise SystemExit(err)
return soup

def imageScrap(url, ticker, out_dir):
Expand All @@ -32,17 +36,17 @@ def imageScrap(url, ticker, out_dir):
ticker(str): output image name
out_dir(str): output directory
"""
r = requests.get(url, stream=True, headers=headers)
if r.status_code == 200:
try:
r = requests.get(url, stream=True, headers=headers)
r.raise_for_status()
r.raw.decode_content = True
if len(out_dir) != 0:
out_dir +='/'
f = open('{}{}.jpg'.format(out_dir, ticker), "wb")
f.write(r.content)
f.close()
else:
print('Error...')
print(r.status_code)
except requests.exceptions.HTTPError as err:
raise SystemExit(err)

def scrapFunction(url):
"""Scrap forex, crypto information.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

HERE = pathlib.Path(__file__).parent

VERSION = '0.6.2'
VERSION = '0.7'
PACKAGE_NAME = 'finvizfinance'
AUTHOR = 'Tianning Li'
AUTHOR_EMAIL = '[email protected]'
Expand Down

0 comments on commit bb93df7

Please sign in to comment.