This script attempts a basic data comparison of previous bottoms in the stock market durig various well known periods (ie .com bubble, subprime mortgage crash, 2020 pandemic crash). It compares data from the St. Louis Fed (FRED) with stock market data.
import numpy as np
from scipy import signal
import yfinance as yf
import pandas as pd
import requests
import json
from datetime import datetime
%matplotlib inline
import matplotlib.pyplot as plt
import mplcursors
plt.rcParams["figure.figsize"] = (20,4)
# Instead of sharing my API key with the world, I'll hide it here
from fred_api_key import FRED_API_KEY
Let's first define some useful functions to normalize the data so it's easier to visually compare.
def normalize( series ):
"""
Returns the normalized input series
return (series - series.min()) / series.max()
"""
min = series.min()
max = series.max()
return (series - min) / max
def normalize_merge( series_array, start_date=None, end_date=None ):
"""
Normalize each series in the input array and merge them all together
"""
merge_series = normalize( series_array[0][ start_date : end_date ] )
merge_change_series = merge_series.pct_change()
for series in series_array[1:]:
norm_series = normalize( series[ start_date : end_date ] )
norm_change_series = norm_series.pct_change()
merge_series = pd.merge( merge_series, norm_series, on=['date'] )
merge_change_series = pd.merge( merge_change_series, norm_change_series, on=['date'] )
return merge_series, merge_change_series
Now we define a couple functions to download data from the FRED
def downloadFredSeriesInfo( series_id, api_key ):
"""
Download series info from St. Louis Fed (FRED).
Parameters:
series_id : str
string used to denote series on St. Louis Fed (FRED) website
api_key : str
API key -- A new key can be requested here: https://fred.stlouisfed.org/docs/api/api_key.html
"""
json_out = None
url = f"https://api.stlouisfed.org/fred/series?series_id={series_id}&api_key={api_key}&file_type=json"
print( f"GET {url.replace(api_key,'???')}")
res = requests.get( url )
if res.status_code == 200:
json_out = json.loads( res.text )
else:
print(f"{res.status_code}: {series_id} not found!")
return json_out
def downloadFredSeries( series_id, api_key, start="", end='9999-12-31' ):
"""
Download series from St. Louis Fed (FRED).
Parameters:
series_id : str
A string used to denote series on St. Louis Fed (FRED) website
api_key : str
The API key needed to access the series data.
A new key can be requested here: https://fred.stlouisfed.org/docs/api/api_key.html
start : str
The start date of the series data
end : str
The end date of the series data
"""
# The resulting pandas dataframe to convert and store the series data into
df_out = None
# The data frequency paramter (ie yearly, monthly, weekly)
frequency = ""
#unrate_info = downloadFredSeriesInfo( "UNRATE", FRED_API )
#if not unrate_info is None:
# frequency = unrate_info['seriess'][0]['frequency_short'].lower()
if len(start) > 0:
start = f"&observation_start={start}"
if len(frequency) > 0:
frequency = f"&frequency={frequency}"
url = f"https://api.stlouisfed.org/fred/series/observations?series_id={series_id}{start}&observation_end={end}&api_key={api_key}{frequency}&file_type=json"
print( f"GET {url.replace(api_key,'???')}")
res = requests.get( url )
if res.status_code == 200:
# Read the data as JSON and convert to a pandas dataframe
json_out = json.loads( res.text )
df_out = pd.DataFrame.from_dict( json_out["observations"] )
#filt = df_out['realtime_end'] == end
# pull out just the date and value data from the downloaded series
df_out = df_out[['date','value']]
df_out['date'] = pd.to_datetime( df_out['date'] )
df_out = df_out.set_index('date')
df_out['value'] = pd.to_numeric( df_out['value'] )
else:
print(f"{res.status_code}: {series_id} not found!")
return df_out
And finally we define some functions to normalize, merge and plot the data
def normalize_merge_plot_range( series_array, start_date=None, end_date=None ):
"""
Normalize and merge series in array and plot them
"""
merge_series = normalize( series_array[0][ start_date : end_date ] )
for series in series_array[1:]:
norm_series = normalize( series[ start_date : end_date ] )
merge_series = pd.merge( merge_series, norm_series, on=['date'] )
merge_series.plot()
merge_series.pct_change().plot()
def plot_peaks_and_valleys( frame, name, start=None, stop=None ):
"""
Locates peaks and valleys (relative extrema) of column {name} in pandas dataframe between start and stop dates.
Plot the data with the peaks and valleys marked.
peaks => Yellow
valleys => Red
Parameters:
frame : DataFrame
The pandas dataframe
name : str
The name of the column in the pandas dataframe
start : str
The start date
stop : str
The stop date
"""
peaks = []
valleys = []
frame_idx = 0
cur_idx = 0
values = frame[start:stop][name].values
# dates = frame[start:stop].values
if start is None:
start = frame.index[0]
if stop is None:
stop = frame.index[-1]
start = pd.Timestamp( start )
stop = pd.Timestamp( stop )
date_str = f"{start.date():%Y-%m-%d} - {stop.date():%Y-%m-%d}"
while( frame_idx < len(values) ):
# frame_peaks_idx, _ = signal.find_peaks( data_butter[ frame_idx : ], distance=14, prominence=7 )
frame_peaks_idx = signal.argrelextrema( values[ frame_idx : ], np.greater, order=18 )[0]
if len(frame_peaks_idx) > 0:
cur_idx = frame_peaks_idx[0]
frame_idx += cur_idx
peaks.append( frame_idx )
# frame_valleys_idx, _ = signal.find_peaks( -1 * data_butter[ frame_idx : ], distance=14, prominence=7 )
frame_valleys_idx = signal.argrelextrema( values[ frame_idx : ], np.less, order=18 )[0]
if len(frame_valleys_idx) > 0:
cur_idx = frame_valleys_idx[0]
frame_idx += cur_idx
valleys.append( frame_idx )
if len(frame_peaks_idx) == 0 or len(frame_valleys_idx) == 0:
break
r = np.array(range(len(frame[start:stop][name])))
plt.plot( frame[start:stop][name], zorder=0 )
plt.plot( frame[start:stop][name][ np.array(peaks) ], 'y*', zorder=0 )
plt.plot( frame[start:stop][name][ np.array(valleys) ], 'r*', zorder=0 )
plt.title( name+f" (Peaks & Valleys) ({date_str})", fontsize=25 )
plt.show()
plt.plot( frame[start:stop][name][ np.array(peaks) ], 'y', zorder=0 )
plt.plot( frame[start:stop][name][ np.array(valleys) ], 'r', zorder=0 )
plt.title( name+f" (Peaks & Valleys Range) ({date_str})", fontsize=25 )
plt.show()
# ret_peaks = zip( dates[ peaks ], vix_closes[ np.array(peaks) ] )
# ret_valleys = zip( dates[ valleys ], vix_closes[ np.array(valleys) ] )
# ret_peaks_dates = dates[ peaks ]
# ret_valleys_dates = dates[ valleys ]
# ret_current_date = dates[ -1 ]
# return (ret_peaks, ret_valleys, ret_peaks_dates, ret_valleys_dates, ret_current_date)
def plot_range_with_marker( frame, name, start=None, stop=None ):
"""
Plot a column of a pandas dataframe between start and stop dates.
Draw a green vertical line to denote the max value of the column data.
Draw a red vertical line to denote the min value of the column data.
Print the min/max and min/max percent change.
Parameters:
frame : DataFrame
The pandas dataframe to plot
name : str
The column within the dataframe to plot
start : str
The start date
stop : str
The stop date
"""
segment = frame[start:stop][name]
segment_change = segment.pct_change()
seg_min = segment.min()
seg_max = segment.max()
seg_change_min = segment_change.min()
seg_change_max = segment_change.max()
seg_min_idx = segment <= seg_min
seg_max_idx = segment >= seg_max
seg_change_min_idx = segment_change <= seg_change_min
seg_change_max_idx = segment_change >= seg_change_max
if start is None:
start = frame.index[0]
if stop is None:
stop = frame.index[-1]
start = pd.Timestamp( start )
stop = pd.Timestamp( stop )
date_str = f"{start.date():%Y-%m-%d} - {stop.date():%Y-%m-%d}"
plt.figure(1)
plt.title( name+f" ({date_str})", fontsize=25 )
plt.plot( segment )
plt.vlines( segment[ seg_min_idx ].index, seg_min, seg_max, color='red' )
plt.vlines( segment[ seg_max_idx ].index, seg_min, seg_max, color='green' )
plt.figure(2)
plt.plot( segment_change )
plt.vlines( segment_change[ seg_change_min_idx ].index, seg_change_min, seg_change_max, color='red' )
plt.vlines( segment_change[ seg_change_max_idx ].index, seg_change_min, seg_change_max, color='green' )
plt.interactive(True)
plt.title( name+f" % Change ({date_str})", fontsize=25 )
plt.show()
plot_peaks_and_valleys( frame, name, start, stop )
print( f"Value Min: ({segment[ seg_min_idx ].index[0].date()}) => {segment[ seg_min_idx ][0]:12.3f}" )
print( f"Value Max: ({segment[ seg_max_idx ].index[0].date()}) => {segment[ seg_max_idx ][0]:12.3f}" )
print( f"% Change Min: ({segment_change[ seg_change_min_idx].index[0].date()}) => {segment_change[ seg_change_min_idx][0]:12.3f}" )
print( f"% Change Max: ({segment_change[ seg_change_max_idx].index[0].date()}) => {segment_change[ seg_change_max_idx][0]:12.3f}" )
def plot_ranges_with_marker( frame_dict, start=None, stop=None ):
"""
Plot columns from data frames contained in dictionary from start date to stop date.
The dictionary should contain keys with string values representing the column in the dataframe.
Parameters:
frame_dict : {str:DataFrame}
A dictionary containing key/value pairs with column names and dataframes
start : str
The start date
stop : str
The stop date
"""
for name,frame in frame_dict.items():
plot_range_with_marker( frame, name, start, stop )
# Create a new API key at: https://fred.stlouisfed.org/docs/api/api_key.html
# Replace FRED_API_KEY with your api key in quotes (ie FRED_API="4fd8asdf8sadf8asdf8")
FRED_API=FRED_API_KEY
# Dictionary to store downloaded FRED data
fred_dict = {}
# Downlaod the unemployment data from the FRED
unrate_df = downloadFredSeries( "UNRATE", FRED_API )
unrate_df = unrate_df.rename(columns={'value':'UE Rate'})
fred_dict['UNRATE'] = unrate_df
GET https://api.stlouisfed.org/fred/series/observations?series_id=UNRATE&observation_end=9999-12-31&api_key=???&file_type=json
# Downlaod the unemployment data from the FRED for the 1980s only
print("* Download just the 80's...\n")
unrate80_df = downloadFredSeries( "UNRATE", FRED_API, '1980-01-01', '1989-01-01' ).rename(columns={'value':'UE Rate'})
unrate80_df.plot()
unrate80_df
type(unrate80_df)
* Download just the 80's... GET https://api.stlouisfed.org/fred/series/observations?series_id=UNRATE&observation_start=1980-01-01&observation_end=1989-01-01&api_key=???&file_type=json
pandas.core.frame.DataFrame
# Download the Federal Funds Rata date from the FRED
ffr_df = downloadFredSeries( "FEDFUNDS", FRED_API )
ffr_df = ffr_df.rename(columns={'value':'FF Rate'})
fred_dict['FEDFUNDS']=ffr_df
GET https://api.stlouisfed.org/fred/series/observations?series_id=FEDFUNDS&observation_end=9999-12-31&api_key=???&file_type=json
# Download the Consumer Price Index, and Consumer Price Index Less Food And Energy data from the FRED
cpi_df = downloadFredSeries( "CPIAUCSL", FRED_API )
cpi_less_food_energy_df = downloadFredSeries( "CPILFESL", FRED_API )
fred_dict['CPIAUCSL']=cpi_df
fred_dict['CPILFESL']=cpi_less_food_energy_df
GET https://api.stlouisfed.org/fred/series/observations?series_id=CPIAUCSL&observation_end=9999-12-31&api_key=???&file_type=json GET https://api.stlouisfed.org/fred/series/observations?series_id=CPILFESL&observation_end=9999-12-31&api_key=???&file_type=json
# Rename columns and merge both CPI data sets
cpi_df = cpi_df.rename( columns={'value':'CPI'})
cpi_less_food_energy_df = cpi_less_food_energy_df.rename( columns={'value':'CPI-FE'})
cpi_merged_df = pd.merge( cpi_df, cpi_less_food_energy_df, on=['date'] )
fred_dict['CPI']=cpi_merged_df
# mplcursors.cursor(hover=True)
# mplcursors.cursor().connect(
# "add", lambda sel: sel.annotation.set_text(cpi_merged_df["value"][sel.index]))
# cpi_merged_df.pct_change(12).plot()
# cpi_merged_df.pct_change(12)*100
# Display the data as year over year percent change for the past 2 years
(cpi_merged_df[-48:].pct_change(12)*100)[-36:]
CPI | CPI-FE | |
---|---|---|
date | ||
2019-06-01 | 1.689644 | 2.134792 |
2019-07-01 | 1.821162 | 2.188499 |
2019-08-01 | 1.759689 | 2.366797 |
2019-09-01 | 1.727330 | 2.353146 |
2019-10-01 | 1.757085 | 2.325123 |
2019-11-01 | 2.030816 | 2.314852 |
2019-12-01 | 2.261721 | 2.234413 |
2020-01-01 | 2.460490 | 2.258438 |
2020-02-01 | 2.319711 | 2.375759 |
2020-03-01 | 1.530638 | 2.117208 |
2020-04-01 | 0.364865 | 1.455577 |
2020-05-01 | 0.242436 | 1.252283 |
2020-06-01 | 0.726814 | 1.195307 |
2020-07-01 | 1.032825 | 1.541229 |
2020-08-01 | 1.327587 | 1.713896 |
2020-09-01 | 1.400645 | 1.724946 |
2020-10-01 | 1.184198 | 1.627430 |
2020-11-01 | 1.137757 | 1.640542 |
2020-12-01 | 1.278154 | 1.596285 |
2021-01-01 | 1.359971 | 1.387294 |
2021-02-01 | 1.675244 | 1.287615 |
2021-03-01 | 2.658377 | 1.660073 |
2021-04-01 | 4.151991 | 2.971731 |
2021-05-01 | 4.944441 | 3.806013 |
2021-06-01 | 5.341016 | 4.447401 |
2021-07-01 | 5.276105 | 4.201800 |
2021-08-01 | 5.205332 | 3.961855 |
2021-09-01 | 5.389907 | 4.036752 |
2021-10-01 | 6.236941 | 4.593958 |
2021-11-01 | 6.828372 | 4.952634 |
2021-12-01 | 7.096542 | 5.476621 |
2022-01-01 | 7.525934 | 6.040783 |
2022-02-01 | 7.912024 | 6.415744 |
2022-03-01 | 8.557586 | 6.436039 |
2022-04-01 | 8.224139 | 6.133687 |
2022-05-01 | 8.516413 | 6.011280 |
# Download Real GDP data from FRED
gdp_df = downloadFredSeries( 'GDPC1', FRED_API )
fred_dict['GDPC1']=gdp_df
GET https://api.stlouisfed.org/fred/series/observations?series_id=GDPC1&observation_end=9999-12-31&api_key=???&file_type=json
# Calculate Real GDP percent change for 2-month periods and check if it is negative (recession marker)
gdp_neg = gdp_df.pct_change(2)['value'] < 0.0
gdp_neg = gdp_df.pct_change(2)[ gdp_neg ]
# Plot Real GDP and recession markers
plt.axhline(y=0, color='red', linestyle='--')
plt.plot( gdp_df.pct_change() )
plt.vlines( gdp_neg.index, gdp_df.pct_change().min(), gdp_df.pct_change().max(), color='grey' )
plt.show()
# Display the data as month over month percent change for last 3 years
(gdp_df.pct_change()*100)[-36:]
value | |
---|---|
date | |
2013-04-01 | 0.139385 |
2013-07-01 | 0.791653 |
2013-10-01 | 0.711171 |
2014-01-01 | -0.350110 |
2014-04-01 | 1.284129 |
2014-07-01 | 1.164962 |
2014-10-01 | 0.448993 |
2015-01-01 | 0.813314 |
2015-04-01 | 0.580001 |
2015-07-01 | 0.323373 |
2015-10-01 | 0.146234 |
2016-01-01 | 0.589180 |
2016-04-01 | 0.302389 |
2016-07-01 | 0.601115 |
2016-10-01 | 0.496889 |
2017-01-01 | 0.471931 |
2017-04-01 | 0.559765 |
2017-07-01 | 0.719150 |
2017-10-01 | 0.940400 |
2018-01-01 | 0.762854 |
2018-04-01 | 0.833911 |
2018-07-01 | 0.481953 |
2018-10-01 | 0.223142 |
2019-01-01 | 0.597790 |
2019-04-01 | 0.792924 |
2019-07-01 | 0.685499 |
2019-10-01 | 0.469098 |
2020-01-01 | -1.303583 |
2020-04-01 | -8.937251 |
2020-07-01 | 7.547535 |
2020-10-01 | 1.115277 |
2021-01-01 | 1.533890 |
2021-04-01 | 1.640747 |
2021-07-01 | 0.570948 |
2021-10-01 | 1.680778 |
2022-01-01 | -0.379531 |
# Get start date of CPI data
start_date = cpi_merged_df.index[0]
print( "CPI Start Date:", start_date )
CPI Start Date: 1957-01-01 00:00:00
# Create frame_dict to store downloaded Stock Market data
frame_dict = {}
# Download VIX data
vix = yf.Ticker("^VIX")
#start_date = "2020-01-01"
vix_df = yf.download("^VIX", start=start_date)
vix_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
vix_df = vix_df.rename(columns={'Close':'VIX'})
frame_dict[ 'VIX' ] = vix_df
# Download AAPL data
aapl = yf.Ticker("AAPL")
aapl_df = yf.download("AAPL", start=start_date)
aapl_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
aapl_df = aapl_df.rename(columns={'Close':'AAPL'})
frame_dict['AAPL'] = aapl_df
# Download DJI data
dji = yf.Ticker("^DJI")
dji_df = yf.download("^DJI", start=start_date)
dji_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
dji_df = dji_df.rename(columns={'Close':'DJI'})
frame_dict['DJI'] = dji_df
# Download NASDAQ data
nasdaq = yf.Ticker("^IXIC")
nasdaq_df = yf.download("^IXIC", start=start_date)
nasdaq_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
nasdaq_df = nasdaq_df.rename(columns={'Close':'NASDAQ'})
frame_dict['NASDAQ'] = nasdaq_df
# Download S&P500 data
snp500 = yf.Ticker("^GSPC")
snp500_df = yf.download("^GSPC", start=start_date)
snp500_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
snp500_df = snp500_df.rename(columns={'Close':'SP500'})
frame_dict['SP500'] = snp500_df
# Download Russell 2000 data
rus2000 = yf.Ticker("^RUT")
rus2000_df = yf.download("^RUT", start=start_date)
rus2000_df.index.rename('date',inplace=True)
[*********************100%***********************] 1 of 1 completed
# Rename column and store
rus2000_df = rus2000_df.rename(columns={'Close':'RUS2K'})
frame_dict['RUS2K'] = rus2000_df
plot_ranges_with_marker( frame_dict )
Value Min: (2017-11-03) => 9.140 Value Max: (2020-03-16) => 82.690 % Change Min: (2010-05-10) => -0.296 % Change Max: (2018-02-05) => 1.156
Value Min: (1982-07-08) => 0.049 Value Max: (2022-01-03) => 182.010 % Change Min: (2000-09-29) => -0.519 % Change Max: (1997-08-06) => 0.332
Value Min: (1992-10-09) => 3136.600 Value Max: (2022-01-04) => 36799.648 % Change Min: (2020-03-16) => -0.129 % Change Max: (2020-03-24) => 0.114
Value Min: (1974-10-03) => 54.870 Value Max: (2021-11-19) => 16057.440 % Change Min: (2020-03-16) => -0.123 % Change Max: (2001-01-03) => 0.142
Value Min: (1957-10-22) => 38.980 Value Max: (2022-01-03) => 4796.560 % Change Min: (1987-10-19) => -0.205 % Change Max: (2008-10-13) => 0.116
Value Min: (1987-10-28) => 106.080 Value Max: (2021-11-08) => 2442.740 % Change Min: (2020-03-16) => -0.143 % Change Max: (2020-03-24) => 0.094
# Normalize all indices data (Russell 2000, Dow Jones, NASDAQ, S&P500) and plot value and percent chamge
norm, norm_change = normalize_merge( [rus2000_df['RUS2K'], dji_df['DJI'], nasdaq_df['NASDAQ'], snp500_df['SP500']])
norm.plot()
norm_change.plot()
<AxesSubplot:xlabel='date'>
# Normalize CPI percent change over 12 months, along with S&P 500 data and plot both value and change
norm, norm_change = normalize_merge( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), snp500_df['SP500']])
norm.plot()
norm_change.plot()
<AxesSubplot:xlabel='date'>
# Normalize and plot CPI vs S&P500 vs Fed Funds Rate both value and change
normalize_merge_plot_range( [cpi_df, snp500_df, ffr_df] )
# Normalize and plot CPI vs S&P500 vs Fed Funds Rate vs Unemployment Rate both value and change
normalize_merge_plot_range( [cpi_df, cpi_less_food_energy_df, snp500_df, ffr_df, unrate_df] )
# A date range encompasing the dot com bubble
start, stop = ('1999-01-01', '2004-01-01')
# Normalize and plot CPI percent change over 12-months vs Fed Funds Rate vs Unemployment rate both value and change
normalize_merge_plot_range( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), ffr_df, unrate_df ], start, stop )
# Plot market indices normalized over .com bubble both value and change
normalize_merge_plot_range( [rus2000_df['RUS2K'], snp500_df['SP500'], dji_df['DJI'], aapl_df['AAPL']], start, stop )
# Plot Stock Market data over .com bubble (value, change, peaks/valleys, peaks/valleys range)
plot_ranges_with_marker( frame_dict, start, stop )
Value Min: (2003-12-17) => 15.580 Value Max: (2002-08-05) => 45.080 % Change Min: (2000-01-07) => -0.156 % Change Max: (2001-09-17) => 0.312
Value Min: (2003-04-17) => 0.234 Value Max: (2000-03-22) => 1.287 % Change Min: (2000-09-29) => -0.519 % Change Max: (1999-10-14) => 0.143
Value Min: (2002-10-09) => 7286.270 Value Max: (2000-01-14) => 11722.980 % Change Min: (2001-09-17) => -0.071 % Change Max: (2002-07-24) => 0.063
Value Min: (2002-10-09) => 1114.110 Value Max: (2000-03-10) => 5048.620 % Change Min: (2000-04-14) => -0.097 % Change Max: (2001-01-03) => 0.142
Value Min: (2002-10-09) => 776.760 Value Max: (2000-03-24) => 1527.460 % Change Min: (2000-04-14) => -0.058 % Change Max: (2002-07-24) => 0.057
Value Min: (2002-10-09) => 327.040 Value Max: (2000-03-09) => 606.050 % Change Min: (2000-04-14) => -0.073 % Change Max: (2000-04-18) => 0.058
# A date range encompasing the subprime mortgage collapse
start, stop = ('2006-01-01', '2012-01-01')
# Normalize and plot CPI percent change over 12-months vs Fed Funds Rate vs Unemployment rate both value and change
normalize_merge_plot_range( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), ffr_df, unrate_df ], start, stop )
# Plot market indices normalized over subprime mortgage collapse both value and change
normalize_merge_plot_range( [rus2000_df['RUS2K'], snp500_df['SP500'], dji_df['DJI'], aapl_df['AAPL']], start, stop )
# Plot Stock Market data over subprime mortgage collapse (value, change, peaks/valleys, peaks/valleys range)
plot_ranges_with_marker( frame_dict, start, stop )
Value Min: (2007-01-24) => 9.890 Value Max: (2008-11-20) => 80.860 % Change Min: (2010-05-10) => -0.296 % Change Max: (2007-02-27) => 0.642
Value Min: (2006-07-14) => 1.810 Value Max: (2011-10-18) => 15.080 % Change Min: (2008-09-29) => -0.179 % Change Max: (2008-10-13) => 0.139
Value Min: (2009-03-09) => 6547.050 Value Max: (2007-10-09) => 14164.530 % Change Min: (2008-10-15) => -0.079 % Change Max: (2008-10-13) => 0.111
Value Min: (2009-03-09) => 1268.640 Value Max: (2011-04-29) => 2873.540 % Change Min: (2008-09-29) => -0.091 % Change Max: (2008-10-13) => 0.118
Value Min: (2009-03-09) => 676.530 Value Max: (2007-10-09) => 1565.150 % Change Min: (2008-10-15) => -0.090 % Change Max: (2008-10-13) => 0.116
Value Min: (2009-03-09) => 343.260 Value Max: (2011-04-29) => 865.290 % Change Min: (2008-12-01) => -0.119 % Change Max: (2008-10-13) => 0.093
# A date range encompasing the COVID-19 pandemic
start, stop = ('2018-01-01', None)
# Normalize and plot CPI percent change over 12-months vs Fed Funds Rate vs Unemployment rate both value and change
normalize_merge_plot_range( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), ffr_df, unrate_df ], start, stop )
# Plot market indices normalized over pandemic both value and change
normalize_merge_plot_range( [rus2000_df['RUS2K'], snp500_df['SP500'], dji_df['DJI'], aapl_df['AAPL']], start, stop )
# Plot Stock Market data over pandamic (value, change, peaks/valleys, peaks/valleys range)
plot_ranges_with_marker( frame_dict, start, stop )
Value Min: (2018-01-03) => 9.150 Value Max: (2020-03-16) => 82.690 % Change Min: (2020-03-13) => -0.234 % Change Max: (2018-02-05) => 1.156
Value Min: (2019-01-03) => 35.548 Value Max: (2022-01-03) => 182.010 % Change Min: (2020-03-16) => -0.129 % Change Max: (2020-03-13) => 0.120
Value Min: (2020-03-23) => 18591.930 Value Max: (2022-01-04) => 36799.648 % Change Min: (2020-03-16) => -0.129 % Change Max: (2020-03-24) => 0.114
Value Min: (2018-12-24) => 6192.920 Value Max: (2021-11-19) => 16057.440 % Change Min: (2020-03-16) => -0.123 % Change Max: (2020-03-13) => 0.093
Value Min: (2020-03-23) => 2237.400 Value Max: (2022-01-03) => 4796.560 % Change Min: (2020-03-16) => -0.120 % Change Max: (2020-03-24) => 0.094
Value Min: (2020-03-18) => 991.160 Value Max: (2021-11-08) => 2442.740 % Change Min: (2020-03-16) => -0.143 % Change Max: (2020-03-24) => 0.094
# A somewht abritrary date range depicting pandemic aftermath
start, stop = ('2021-04-01',None)
# Normalize and plot CPI percent change over 12-months vs Fed Funds Rate vs Unemployment rate both value and change
normalize_merge_plot_range( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), ffr_df, unrate_df ], start, stop )
# Plot market indices normalized over post-pandemic both value and change
normalize_merge_plot_range( [rus2000_df['RUS2K'], snp500_df['SP500'], dji_df['DJI'], aapl_df['AAPL']], start, stop )
# Plot Stock Market data over post-pandamic (value, change, peaks/valleys, peaks/valleys range)
plot_ranges_with_marker( frame_dict, start, stop )
Value Min: (2021-10-21) => 15.010 Value Max: (2022-03-07) => 36.450 % Change Min: (2021-11-29) => -0.198 % Change Max: (2021-11-26) => 0.540
Value Min: (2021-05-12) => 122.770 Value Max: (2022-01-03) => 182.010 % Change Min: (2022-05-18) => -0.056 % Change Max: (2022-01-28) => 0.070
Value Min: (2022-05-19) => 31253.131 Value Max: (2022-01-04) => 36799.648 % Change Min: (2022-05-18) => -0.036 % Change Max: (2022-05-04) => 0.028
Value Min: (2022-05-24) => 11264.450 Value Max: (2021-11-19) => 16057.440 % Change Min: (2022-05-05) => -0.050 % Change Max: (2022-05-13) => 0.038
Value Min: (2022-05-19) => 3900.790 Value Max: (2022-01-03) => 4796.560 % Change Min: (2022-05-18) => -0.040 % Change Max: (2022-05-04) => 0.030
Value Min: (2022-05-11) => 1718.140 Value Max: (2021-11-08) => 2442.740 % Change Min: (2022-05-09) => -0.042 % Change Max: (2022-05-17) => 0.032
# A date range encompasing the .com bubble and the subprime mortgage collapse
start, stop = ('1999-01-01', '2012-01-01')
normalize_merge_plot_range( [cpi_df.pct_change(12), cpi_less_food_energy_df.pct_change(12), snp500_df['SP500'], ffr_df, unrate_df ], start, stop )
plot_ranges_with_marker( frame_dict, start, stop )
Value Min: (2007-01-24) => 9.890 Value Max: (2008-11-20) => 80.860 % Change Min: (2010-05-10) => -0.296 % Change Max: (2007-02-27) => 0.642
Value Min: (2003-04-17) => 0.234 Value Max: (2011-10-18) => 15.080 % Change Min: (2000-09-29) => -0.519 % Change Max: (1999-10-14) => 0.143
Value Min: (2009-03-09) => 6547.050 Value Max: (2007-10-09) => 14164.530 % Change Min: (2008-10-15) => -0.079 % Change Max: (2008-10-13) => 0.111
Value Min: (2002-10-09) => 1114.110 Value Max: (2000-03-10) => 5048.620 % Change Min: (2000-04-14) => -0.097 % Change Max: (2001-01-03) => 0.142
Value Min: (2009-03-09) => 676.530 Value Max: (2007-10-09) => 1565.150 % Change Min: (2008-10-15) => -0.090 % Change Max: (2008-10-13) => 0.116
Value Min: (2002-10-09) => 327.040 Value Max: (2011-04-29) => 865.290 % Change Min: (2008-12-01) => -0.119 % Change Max: (2008-10-13) => 0.093