diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 48a340d8..68714579 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -1028,57 +1028,56 @@ def get_aqs( typer.echo("Note that the daily option currently requires monetio >0.2.5") raise - if not daily: - with _timer("Fetching site metadata"): - # Need UTC offset in order to compute local time - # But currently the `meta=True` option doesn't work - meta0 = pd.read_csv( - "https://aqs.epa.gov/aqsweb/airdata/aqs_sites.zip", - encoding="ISO-8859-1", - usecols=[0, 1, 2, 17, 24, 25], - dtype=str, + with _timer("Fetching site metadata"): + # Need UTC offset in order to compute local time + # But currently the `meta=True` option doesn't work + meta0 = pd.read_csv( + "https://aqs.epa.gov/aqsweb/airdata/aqs_sites.zip", + encoding="ISO-8859-1", + usecols=[0, 1, 2, 17, 24, 25], + dtype=str, + ) + meta = ( + meta0.copy() + .assign( + siteid=meta0["State Code"] + meta0["County Code"] + meta0["Site Number"], + utcoffset=meta0["GMT Offset"].astype(int), ) - meta = ( - meta0.copy() - .assign( - siteid=meta0["State Code"] + meta0["County Code"] + meta0["Site Number"], - utcoffset=meta0["GMT Offset"].astype(int), - ) - .drop( - columns=["Site Number", "GMT Offset"], - ) - .rename( - columns={ - "State Code": "state_code", - "County Code": "county_code", - "City Name": "city_name", - "CBSA Name": "cbsa_name", - } - ) + .drop( + columns=["Site Number", "GMT Offset"], ) - meta.loc[meta["city_name"] == "Not in a City", "city_name"] = "Not in a city" # normalize - - counties0 = pd.read_csv( - "https://aqs.epa.gov/aqsweb/documents/codetables/states_and_counties.csv", - encoding="ISO-8859-1", - dtype=str, + .rename( + columns={ + "State Code": "state_code", + "County Code": "county_code", + "City Name": "city_name", + "CBSA Name": "cbsa_name", + } ) - counties = ( - counties0.copy() - .rename( - columns={ - "State Code": "state_code", - "State Name": "state_name", - "State Abbreviation": "state_abbr", - "County Code": "county_code", - "County Name": "county_name", - "EPA Region": "epa_region", # note without R prefix - } - ) + ) + meta.loc[meta["city_name"] == "Not in a City", "city_name"] = "Not in a city" # normalize + + counties0 = pd.read_csv( + "https://aqs.epa.gov/aqsweb/documents/codetables/states_and_counties.csv", + encoding="ISO-8859-1", + dtype=str, + ) + counties = ( + counties0.copy() + .rename( + columns={ + "State Code": "state_code", + "State Name": "state_name", + "State Abbreviation": "state_abbr", + "County Code": "county_code", + "County Name": "county_name", + "EPA Region": "epa_region", # note without R prefix + } ) - counties["epa_region"] = "R" + counties["epa_region"] + ) + counties["epa_region"] = "R" + counties["epa_region"] - meta = meta.merge(counties, on=["state_code", "county_code"], how="left") + meta = meta.merge(counties, on=["state_code", "county_code"], how="left") with _timer("Forming xarray Dataset"): # Select requested time period (older monetio doesn't do this)