From d7204cc7bbda258af75ff9c029095acb0124a512 Mon Sep 17 00:00:00 2001 From: Nate Date: Tue, 31 Mar 2015 11:21:36 -0400 Subject: [PATCH 1/4] Add numexpr to requirements for pandas.Dataframe.query() --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2e0c4a7..6ac2598 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,5 @@ python-dateutil==2.2 pytz requests==2.2.1 xlrd==0.9.2 -celery>=3.1 \ No newline at end of file +celery>=3.1 +numexpr==2.4 From 335e713bcf25b9ac0312a992c9738dc4a7f8f906 Mon Sep 17 00:00:00 2001 From: Nate Date: Tue, 31 Mar 2015 11:21:41 -0400 Subject: [PATCH 2/4] Add csv result format parsing to fetch_oasis, add get_lmp functions --- pyiso/caiso.py | 121 +++++++++++++++++++++++++++++++------------- tests/test_caiso.py | 50 ++++++++++++++++-- 2 files changed, 131 insertions(+), 40 deletions(-) diff --git a/pyiso/caiso.py b/pyiso/caiso.py index 4890e67..c8b1920 100644 --- a/pyiso/caiso.py +++ b/pyiso/caiso.py @@ -3,6 +3,8 @@ import copy import re from bs4 import BeautifulSoup +import StringIO +import pandas class CAISOClient(BaseClient): @@ -13,15 +15,15 @@ class CAISOClient(BaseClient): see http://www.caiso.com/Documents/InterfaceSpecifications-OASISv4_1_3.pdf """ NAME = 'CAISO' - + base_url_oasis = 'http://oasis.caiso.com/oasisapi/SingleZip' base_url_gen = 'http://content.caiso.com/green/renewrpt/' base_url_outlook = 'http://content.caiso.com/outlook/SP/' base_payload = {'version': 1} oasis_request_time_format = '%Y%m%dT%H:%M-0000' - + TZ_NAME = 'America/Los_Angeles' - + fuels = { 'GEOTHERMAL': 'geo', 'BIOMASS': 'biomass', @@ -33,7 +35,7 @@ class CAISOClient(BaseClient): 'SOLAR THERMAL': 'solarth', 'NUCLEAR': 'nuclear', 'THERMAL': 'thermal', - 'HYDRO': 'hydro', + 'HYDRO': 'hydro', } oasis_markets = { @@ -167,7 +169,7 @@ def construct_oasis_payload(self, queryname, **kwargs): # get market id market_run_id = self.oasis_markets[self.options['market']] - + # construct payload payload = {'queryname': queryname, 'market_run_id': market_run_id, @@ -178,7 +180,7 @@ def construct_oasis_payload(self, queryname, **kwargs): payload.update(kwargs) # return - return payload + return payload def set_dt_index(self, df, date, hours, end_of_hour=True): if end_of_hour: @@ -208,7 +210,7 @@ def _generation_historical(self): # set up request url_file = this_date.strftime('%Y%m%d_DailyRenewablesWatch.txt') url = self.base_url_gen + url_file - + # carry out request response = self.request(url) if not response: @@ -247,25 +249,25 @@ def _generation_historical(self): # return return parsed_data - + def fetch_oasis(self, payload={}): """Returns a list of report data elements, or an empty list if an error was encountered.""" # set up storage raw_data = [] - + # try get response = self.request(self.base_url_oasis, params=payload) # have request if not response: return [] - + # read data from zip content = self.unzip(response.content) if not content: return [] - + # load xml into soup soup = BeautifulSoup(content) - + # check xml content error = soup.find('m:error') if error: @@ -274,24 +276,71 @@ def fetch_oasis(self, payload={}): msg = 'XML error for CAISO OASIS with payload %s: %s %s' % (payload, code, desc) self.logger.error(msg) return [] - + + else: + if payload.get('resultformat', False) == 6: + return content + else: + raw_data = soup.find_all('report_data') + return raw_data + + def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, + market_run_id='RTM', **kwargs): + + if latest: + queryname = 'PRC_CURR_LMP' + + # these are ignored, but must be present + start_at = datetime.now() + end_at = datetime.now() else: - raw_data = soup.find_all('report_data') - return raw_data - + LMP_MARKETS = { + 'RTM': 'PRC_INTVL_LMP', + 'DAM': 'PRC_LMP', + 'HASP': 'PRC_HASP_LMP',} + queryname = LMP_MARKETS[market_run_id] + + payload = {'queryname': queryname, + 'startdatetime': start_at.strftime(self.oasis_request_time_format), + 'enddatetime': end_at.strftime(self.oasis_request_time_format), + 'node': node_id, + } + payload.update(self.base_payload) + payload.update({'resultformat' : 6}) # CSV + payload.update(kwargs) + + # Fetch data + data = self.fetch_oasis(payload=payload) + + # Turn into pandas Dataframe + str_data = StringIO.StringIO(data) + df = pandas.DataFrame.from_csv(str_data, sep=",") + + # strip congestion and loss prices + df = df.query('LMP_TYPE == "LMP"') + + # Get all data indexed on 'INTERVALSTARTTIME_GMT' as panda datetime + if df.index.name != 'INTERVALSTARTTIME_GMT': + df.set_index('INTERVALSTARTTIME_GMT', inplace=True) + df.index.name = 'INTERVALSTARTTIME_GMT' + df.index = pandas.to_datetime(df.index) + + return df + + def parse_oasis_renewable(self, raw_data): """Parse raw data output of fetch_oasis for renewables.""" # set up storage preparsed_data = {} parsed_data = [] - + # extract values from xml for raw_soup_dp in raw_data: # set up storage for timestamp ts = self.utcify(raw_soup_dp.find('interval_start_gmt').string) if ts not in preparsed_data: preparsed_data[ts] = {'wind': 0, 'solar': 0} - + # store generation value try: fuel_name = raw_soup_dp.find('renewable_type').string.lower() @@ -300,7 +349,7 @@ def parse_oasis_renewable(self, raw_data): except TypeError: self.logger.error('Error in schema for CAISO OASIS result %s' % raw_soup_dp.prettify()) continue - + # collect values into dps freq = self.options.get('freq', self.FREQUENCY_CHOICES.hourly) market = self.options.get('market', self.MARKET_CHOICES.hourly) @@ -311,14 +360,14 @@ def parse_oasis_renewable(self, raw_data): 'freq': freq, 'market': market, 'gen_MW': 0, 'ba_name': self.NAME} - + # collect data for fuel_name in ['wind', 'solar']: parsed_dp = copy.deepcopy(base_parsed_dp) parsed_dp['fuel_name'] = fuel_name parsed_dp['gen_MW'] += preparsed_dp[fuel_name] parsed_data.append(parsed_dp) - + # return return parsed_data @@ -337,7 +386,7 @@ def parse_oasis_slrs(self, raw_data): freq = self.options.get('freq', self.FREQUENCY_CHOICES.fivemin) market = self.options.get('market', self.MARKET_CHOICES.fivemin) - + # set up storage extracted_data = {} parsed_data = [] @@ -359,7 +408,7 @@ def parse_oasis_slrs(self, raw_data): try: extracted_data[ts] += val except KeyError: - extracted_data[ts] = val + extracted_data[ts] = val # assemble data for ts in sorted(extracted_data.keys()): @@ -378,7 +427,7 @@ def parse_oasis_demand_forecast(self, raw_data): """Parse raw data output of fetch_oasis for system-wide 5-min RTM demand forecast.""" # set up storage parsed_data = [] - + # set up freq and market freq = self.options.get('freq', self.FREQUENCY_CHOICES.fivemin) market = self.options.get('market', self.MARKET_CHOICES.fivemin) @@ -391,7 +440,7 @@ def parse_oasis_demand_forecast(self, raw_data): for raw_soup_dp in raw_data: if raw_soup_dp.find('data_item').string == data_item_key and \ raw_soup_dp.find('resource_name').string == 'CA ISO-TAC': - + # parse timestamp ts = self.utcify(raw_soup_dp.find('interval_start_gmt').string) @@ -400,14 +449,14 @@ def parse_oasis_demand_forecast(self, raw_data): 'freq': freq, 'market': market, 'ba_name': self.NAME} - + # store generation value parsed_dp['load_MW'] = float(raw_soup_dp.find('value').string) parsed_data.append(parsed_dp) - + # return return parsed_data - + def todays_outlook_time(self): # get timestamp response = self.request(self.base_url_outlook+'systemconditions.html') @@ -425,7 +474,7 @@ def fetch_todays_outlook_renewables(self): # get renewables data response = self.request(self.base_url_outlook+'renewables.html') return BeautifulSoup(response.content) - + def parse_todays_outlook_renewables(self, soup, ts): # set up storage parsed_data = [] @@ -448,7 +497,7 @@ def parse_todays_outlook_renewables(self, soup, ts): parsed_dp['gen_MW'] = float(match.group('val')) parsed_dp['fuel_name'] = fuel_name parsed_data.append(parsed_dp) - + # actual 'renewable' value should be only renewables that aren't accounted for in other categories accounted_for_ren = 0 for dp in parsed_data: @@ -457,9 +506,9 @@ def parse_todays_outlook_renewables(self, soup, ts): for dp in parsed_data: if dp['fuel_name'] == 'renewable': dp['gen_MW'] -= accounted_for_ren - - return parsed_data - + + return parsed_data + def _generation_latest(self, **kwargs): # set up parsed_data = [] @@ -467,7 +516,7 @@ def _generation_latest(self, **kwargs): # override market and freq to 10 minute self.options['market'] = self.MARKET_CHOICES.tenmin self.options['freq'] = self.FREQUENCY_CHOICES.tenmin - + # get and parse "Today's Outlook" data soup = self.fetch_todays_outlook_renewables() ts = self.todays_outlook_time() @@ -476,7 +525,7 @@ def _generation_latest(self, **kwargs): return parsed_data total_ren_MW = sum([dp['gen_MW'] for dp in parsed_data]) ts = parsed_data[0]['timestamp'] - + # get OASIS total gen data payload = self.construct_oasis_payload(queryname='ENE_SLRS', schedule='ALL') oasis_data = self.fetch_oasis(payload=payload) @@ -529,6 +578,6 @@ def _generation_forecast(self, **kwargs): dp['gen_MW'] -= total_ren_MW[dp['timestamp']] # add to storage parsed_data.append(dp) - + # return return parsed_data diff --git a/tests/test_caiso.py b/tests/test_caiso.py index 5fe85e6..1434d24 100644 --- a/tests/test_caiso.py +++ b/tests/test_caiso.py @@ -6,6 +6,7 @@ import pytz from datetime import date, datetime, timedelta from bs4 import BeautifulSoup +import pandas class TestCAISOBase(TestCase): @@ -413,6 +414,20 @@ def test_fetch_oasis_demand_rtm(self): 26723\n\ ') + def test_fetch_oasis_csv(self): + c = self.create_client('CAISO') + ts = c.utcify('2014-05-08 12:00') + payload = {'queryname': 'SLD_FCST', + 'market_run_id': 'RTM', + 'startdatetime': (ts-timedelta(minutes=20)).strftime(c.oasis_request_time_format), + 'enddatetime': (ts+timedelta(minutes=20)).strftime(c.oasis_request_time_format), + 'resultformat': 6, + } + payload.update(c.base_payload) + data = c.fetch_oasis(payload=payload) + self.assertEqual(len(data), 7828) + self.assertIn('INTERVALSTARTTIME_GMT', data) + def test_parse_oasis_demand_rtm(self): # set up list of data c = self.create_client('CAISO') @@ -425,7 +440,7 @@ def test_parse_oasis_demand_rtm(self): # test self.assertEqual(len(parsed_data), 1) - expected = {'ba_name': 'CAISO', + expected = {'ba_name': 'CAISO', 'timestamp': datetime(2014, 5, 8, 18, 55, tzinfo=pytz.utc), 'freq': '5m', 'market': 'RT5M', 'load_MW': 26755.0} @@ -528,7 +543,7 @@ def test_parse_oasis_slrs_gen_rtm(self): # test self.assertEqual(len(parsed_data), 2) - expected = {'ba_name': 'CAISO', + expected = {'ba_name': 'CAISO', 'timestamp': datetime(2013, 9, 19, 17, 0, tzinfo=pytz.utc), 'freq': '5m', 'market': 'RT5M', 'fuel_name': 'other', 'gen_MW': 23900.79} @@ -546,7 +561,7 @@ def test_parse_oasis_slrs_trade_dam(self): # test self.assertEqual(len(parsed_data), 3) - expected = {'ba_name': 'CAISO', + expected = {'ba_name': 'CAISO', 'timestamp': datetime(2013, 9, 19, 7, 0, tzinfo=pytz.utc), 'freq': '1hr', 'market': 'DAHR', 'net_exp_MW': -5014.0} @@ -564,8 +579,35 @@ def test_parse_oasis_renewables_dam(self): # test self.assertEqual(len(parsed_data), 6) - expected = {'ba_name': 'CAISO', + expected = {'ba_name': 'CAISO', 'timestamp': datetime(2013, 9, 20, 6, 0, tzinfo=pytz.utc), 'freq': '1hr', 'market': 'DAHR', 'fuel_name': 'wind', 'gen_MW': 580.83} self.assertEqual(expected, parsed_data[0]) + + def test_get_lmp_latest(self): + c = self.create_client('CAISO') + ts = datetime.utcnow() + lmp = c.get_lmp('SLAP_PGP2-APND') + self.assertEqual(len(lmp), 1) + + self.assertGreaterEqual(lmp.iloc[0]['LMP_PRC'], 0) + self.assertLessEqual(lmp.iloc[0]['LMP_PRC'], 1500) + + # lmp is a dataframe, lmp.iloc[0] is a Series, Series.name is the index of that entry + self.assertGreater(lmp.iloc[0].name, ts - timedelta(minutes=5)) + self.assertLess(lmp.iloc[0].name, ts + timedelta(minutes=5)) + + def test_get_lmp_hist(self): + c = self.create_client('CAISO') + ts = datetime.utcnow() + start = ts - timedelta(hours=2) + lmps = c.get_lmp('SLAP_PGP2-APND', latest=False, start_at=start, end_at=ts) + self.assertEqual(len(lmps), 24) + + self.assertGreaterEqual(lmps['MW'].max(), 0) + self.assertLess(lmps['MW'].max(), 1500) + self.assertGreaterEqual(lmps['MW'].min(), -300) + + self.assertGreaterEqual(lmps.index.to_pydatetime().min(), start) + self.assertLessEqual(lmps.index.to_pydatetime().max(), ts) From 8e1223e2cf973251a4825812cd7c50303ff890d0 Mon Sep 17 00:00:00 2001 From: Nate Date: Tue, 31 Mar 2015 18:31:04 -0400 Subject: [PATCH 3/4] get_lmp now returns dict, not pandas.dataframe --- pyiso/caiso.py | 11 +++++++++-- tests/test_caiso.py | 43 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/pyiso/caiso.py b/pyiso/caiso.py index c8b1920..f5d4d58 100644 --- a/pyiso/caiso.py +++ b/pyiso/caiso.py @@ -284,7 +284,7 @@ def fetch_oasis(self, payload={}): raw_data = soup.find_all('report_data') return raw_data - def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, + def get_lmp_as_dataframe(self, node_id, latest=True, start_at=False, end_at=False, market_run_id='RTM', **kwargs): if latest: @@ -324,9 +324,16 @@ def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, df.set_index('INTERVALSTARTTIME_GMT', inplace=True) df.index.name = 'INTERVALSTARTTIME_GMT' df.index = pandas.to_datetime(df.index) - + df.rename(columns={'MW': 'LMP_PRC'}, inplace=True) return df + def get_lmp(self, node_id, **kwargs): + df = self.get_lmp_as_dataframe(node_id, **kwargs) + lmp_dict = {} + for i, row in df.iterrows(): + lmp_dict[i.to_pydatetime()] = row['LMP_PRC'] + return lmp_dict + def parse_oasis_renewable(self, raw_data): """Parse raw data output of fetch_oasis for renewables.""" diff --git a/tests/test_caiso.py b/tests/test_caiso.py index 1434d24..2d814df 100644 --- a/tests/test_caiso.py +++ b/tests/test_caiso.py @@ -585,10 +585,10 @@ def test_parse_oasis_renewables_dam(self): 'gen_MW': 580.83} self.assertEqual(expected, parsed_data[0]) - def test_get_lmp_latest(self): + def test_get_lmp_dataframe_latest(self): c = self.create_client('CAISO') ts = datetime.utcnow() - lmp = c.get_lmp('SLAP_PGP2-APND') + lmp = c.get_lmp_as_dataframe('SLAP_PGP2-APND') self.assertEqual(len(lmp), 1) self.assertGreaterEqual(lmp.iloc[0]['LMP_PRC'], 0) @@ -598,16 +598,43 @@ def test_get_lmp_latest(self): self.assertGreater(lmp.iloc[0].name, ts - timedelta(minutes=5)) self.assertLess(lmp.iloc[0].name, ts + timedelta(minutes=5)) - def test_get_lmp_hist(self): + def test_get_lmp_dataframe_hist(self): c = self.create_client('CAISO') - ts = datetime.utcnow() + ts = datetime(2015, 3, 1, 11, 0, 0) start = ts - timedelta(hours=2) - lmps = c.get_lmp('SLAP_PGP2-APND', latest=False, start_at=start, end_at=ts) + lmps = c.get_lmp_as_dataframe('SLAP_PGP2-APND', latest=False, start_at=start, end_at=ts) self.assertEqual(len(lmps), 24) - self.assertGreaterEqual(lmps['MW'].max(), 0) - self.assertLess(lmps['MW'].max(), 1500) - self.assertGreaterEqual(lmps['MW'].min(), -300) + self.assertGreaterEqual(lmps['LMP_PRC'].max(), 0) + self.assertLess(lmps['LMP_PRC'].max(), 1500) + self.assertGreaterEqual(lmps['LMP_PRC'].min(), -300) self.assertGreaterEqual(lmps.index.to_pydatetime().min(), start) self.assertLessEqual(lmps.index.to_pydatetime().max(), ts) + + def test_get_lmp_latest(self): + c = self.create_client('CAISO') + ts = datetime.utcnow() + lmp = c.get_lmp('SLAP_PGP2-APND') + self.assertEqual(len(lmp), 1) + + self.assertGreaterEqual(min(lmp.keys()), ts - timedelta(minutes=5)) + self.assertLessEqual(max(lmp.keys()), ts + timedelta(minutes=5)) + + self.assertGreaterEqual(min(lmp.values()), -300) + self.assertLess(max(lmp.values()), 1500) + + def test_get_lmp_hist(self): + c = self.create_client('CAISO') + ts = datetime(2015, 3, 1, 11, 0, 0) + start = ts - timedelta(hours=2) + lmp = c.get_lmp('SLAP_PGP2-APND', latest=False, start_at=start, end_at=ts) + self.assertEqual(len(lmp), 24) + + self.assertGreaterEqual(min(lmp.keys()), start) + self.assertLessEqual(max(lmp.keys()), ts) + + self.assertGreaterEqual(min(lmp.values()), -300) + self.assertLess(max(lmp.values()), 1500) + + From f2750e8b15b05de39a15505fdc971c68cb1987da Mon Sep 17 00:00:00 2001 From: anna Date: Tue, 31 Mar 2015 15:43:29 -0700 Subject: [PATCH 4/4] utcify dates, and reuse payload and options methods --- pyiso/caiso.py | 92 +++++++++++++++++++++++---------------------- tests/test_caiso.py | 10 ++--- 2 files changed, 52 insertions(+), 50 deletions(-) diff --git a/pyiso/caiso.py b/pyiso/caiso.py index c8b1920..b173923 100644 --- a/pyiso/caiso.py +++ b/pyiso/caiso.py @@ -43,6 +43,11 @@ class CAISOClient(BaseClient): BaseClient.MARKET_CHOICES.fivemin: 'RTM', BaseClient.MARKET_CHOICES.dam: 'DAM', } + LMP_MARKETS = { + 'RTM': 'PRC_INTVL_LMP', + 'DAM': 'PRC_LMP', + 'HASP': 'PRC_HASP_LMP', + } def get_generation(self, latest=False, yesterday=False, start_at=False, end_at=False, **kwargs): @@ -157,6 +162,44 @@ def get_trade(self, latest=False, # return all data return parsed_data + def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, + market_run_id='RTM', **kwargs): + """Returns a pandas DataFrame, not a list of dicts""" + # set args + self.handle_options(data='lmp', latest=latest, + start_at=start_at, end_at=end_at, + market_run_id=market_run_id, + **kwargs) + + if latest: + queryname = 'PRC_CURR_LMP' + else: + queryname = self.LMP_MARKETS[market_run_id] + payload = self.construct_oasis_payload(queryname, + resultformat=6, # csv + node=node_id) + + # Fetch data + data = self.fetch_oasis(payload=payload) + + # Turn into pandas Dataframe + str_data = StringIO.StringIO(data) + df = pandas.DataFrame.from_csv(str_data, sep=",") + + # strip congestion and loss prices + df = df.query('LMP_TYPE == "LMP"') + + # Get all data indexed on 'INTERVALSTARTTIME_GMT' as panda datetime + if df.index.name != 'INTERVALSTARTTIME_GMT': + df.set_index('INTERVALSTARTTIME_GMT', inplace=True) + df.index.name = 'INTERVALSTARTTIME_GMT' + df.index = pandas.to_datetime(df.index) + + # utcify + df.index = self.utcify_index(df.index, tz_name='UTC') + + return df + def construct_oasis_payload(self, queryname, **kwargs): # get start and end times if self.options['latest']: @@ -168,7 +211,10 @@ def construct_oasis_payload(self, queryname, **kwargs): enddatetime = self.options['end_at'] # get market id - market_run_id = self.oasis_markets[self.options['market']] + try: + market_run_id = self.options['market_run_id'] + except KeyError: + market_run_id = self.oasis_markets[self.options['market']] # construct payload payload = {'queryname': queryname, @@ -284,50 +330,6 @@ def fetch_oasis(self, payload={}): raw_data = soup.find_all('report_data') return raw_data - def get_lmp(self, node_id, latest=True, start_at=False, end_at=False, - market_run_id='RTM', **kwargs): - - if latest: - queryname = 'PRC_CURR_LMP' - - # these are ignored, but must be present - start_at = datetime.now() - end_at = datetime.now() - else: - LMP_MARKETS = { - 'RTM': 'PRC_INTVL_LMP', - 'DAM': 'PRC_LMP', - 'HASP': 'PRC_HASP_LMP',} - queryname = LMP_MARKETS[market_run_id] - - payload = {'queryname': queryname, - 'startdatetime': start_at.strftime(self.oasis_request_time_format), - 'enddatetime': end_at.strftime(self.oasis_request_time_format), - 'node': node_id, - } - payload.update(self.base_payload) - payload.update({'resultformat' : 6}) # CSV - payload.update(kwargs) - - # Fetch data - data = self.fetch_oasis(payload=payload) - - # Turn into pandas Dataframe - str_data = StringIO.StringIO(data) - df = pandas.DataFrame.from_csv(str_data, sep=",") - - # strip congestion and loss prices - df = df.query('LMP_TYPE == "LMP"') - - # Get all data indexed on 'INTERVALSTARTTIME_GMT' as panda datetime - if df.index.name != 'INTERVALSTARTTIME_GMT': - df.set_index('INTERVALSTARTTIME_GMT', inplace=True) - df.index.name = 'INTERVALSTARTTIME_GMT' - df.index = pandas.to_datetime(df.index) - - return df - - def parse_oasis_renewable(self, raw_data): """Parse raw data output of fetch_oasis for renewables.""" # set up storage diff --git a/tests/test_caiso.py b/tests/test_caiso.py index 1434d24..ed0b3dd 100644 --- a/tests/test_caiso.py +++ b/tests/test_caiso.py @@ -587,11 +587,11 @@ def test_parse_oasis_renewables_dam(self): def test_get_lmp_latest(self): c = self.create_client('CAISO') - ts = datetime.utcnow() + ts = pytz.utc.localize(datetime.utcnow()) lmp = c.get_lmp('SLAP_PGP2-APND') self.assertEqual(len(lmp), 1) - self.assertGreaterEqual(lmp.iloc[0]['LMP_PRC'], 0) + self.assertGreaterEqual(lmp.iloc[0]['LMP_PRC'], -300) self.assertLessEqual(lmp.iloc[0]['LMP_PRC'], 1500) # lmp is a dataframe, lmp.iloc[0] is a Series, Series.name is the index of that entry @@ -600,14 +600,14 @@ def test_get_lmp_latest(self): def test_get_lmp_hist(self): c = self.create_client('CAISO') - ts = datetime.utcnow() + ts = pytz.utc.localize(datetime(2015, 3, 1, 12)) start = ts - timedelta(hours=2) lmps = c.get_lmp('SLAP_PGP2-APND', latest=False, start_at=start, end_at=ts) self.assertEqual(len(lmps), 24) self.assertGreaterEqual(lmps['MW'].max(), 0) - self.assertLess(lmps['MW'].max(), 1500) - self.assertGreaterEqual(lmps['MW'].min(), -300) + self.assertLess(lmps['MW'].max(), 30) + self.assertGreaterEqual(lmps['MW'].min(), 20) self.assertGreaterEqual(lmps.index.to_pydatetime().min(), start) self.assertLessEqual(lmps.index.to_pydatetime().max(), ts)