Skip to content

Commit

Permalink
Refactor multi.py to handle single tickers and rename columns
Browse files Browse the repository at this point in the history
  • Loading branch information
ranaroussi committed Oct 25, 2024
1 parent 16072e8 commit 5e942fd
Showing 1 changed file with 7 additions and 6 deletions.
13 changes: 7 additions & 6 deletions yfinance/multi.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@


@utils.log_indent_decorator
def download(tickers, start=None, end=None, actions=False, threads=True, ignore_tz=None,
group_by='column', auto_adjust=False, back_adjust=False, repair=False, keepna=False,
progress=True, period="max", interval="1d", prepost=False,
proxy=None, rounding=False, timeout=10, session=None, return_multi_index=True):
def download(tickers, start=None, end=None, actions=False, threads=True,
ignore_tz=None, group_by='column', auto_adjust=False, back_adjust=False,
repair=False, keepna=False, progress=True, period="max", interval="1d",
prepost=False, proxy=None, rounding=False, timeout=10, session=None,
multi_level_index=True):
"""Download yahoo tickers
:Parameters:
tickers : str, list
Expand Down Expand Up @@ -85,7 +86,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
seconds. (Can also be a fraction of a second e.g. 0.01)
session: None or Session
Optional. Pass your own session object to be used for all requests
return_multi_index: bool
multi_level_index: bool
Optional. Always return a MultiIndex DataFrame? Default is False
"""
logger = utils.get_yf_logger()
Expand Down Expand Up @@ -217,7 +218,7 @@ def download(tickers, start=None, end=None, actions=False, threads=True, ignore_
data.columns = data.columns.swaplevel(0, 1)
data.sort_index(level=0, axis=1, inplace=True)

if not return_multi_index and len(tickers) == 1:
if not multi_level_index and len(tickers) == 1:
data = data.droplevel(0 if group_by == 'ticker' else 1, axis=1).rename_axis(None, axis=1)

return data
Expand Down

0 comments on commit 5e942fd

Please sign in to comment.