diff --git a/.gitignore b/.gitignore index 5a8e42504..cf280575d 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,4 @@ nosetests.xml # Translations *.mo +docs/sg_execution_times.rst diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 072a992e6..6c7ff13b8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -83,7 +83,7 @@ Add tests for your change(s). Make the tests pass: py.test -Commit the changes you made. Chris Beams has written a [guide](https://chris.beams.io/posts/git-commit/) on how to write good commit messages. +Commit the changes you made. Chris Beams has written a [guide](https://cbea.ms/git-commit/) on how to write good commit messages. Push to your fork and [submit a pull request][pr]. @@ -111,8 +111,8 @@ Some things that will increase the chance that your pull request is accepted: * Follow [PEP8][pep8] for style. (The `flake8` utility can help with this.) * Write a [good commit message][commit]. -Pull requests will automatically have tests run by Travis. This includes -running both the unit tests as well as the `flake8` code linter. +Pull requests will automatically have tests run by GitHub Actions. This includes +running both the unit tests as well as the `flake8` and `ruff` code linters. [pep8]: https://pep8.org [commit]: https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html diff --git a/README.rst b/README.rst index 733a57920..0680a3e35 100644 --- a/README.rst +++ b/README.rst @@ -5,7 +5,7 @@ Siphon |Docs| |PyPI| |Conda| -|Travis| |AppVeyor| |CodeCov| +|CodeCov| |Codacy| diff --git a/TODO.md b/TODO.md deleted file mode 100644 index 8ca85ec62..000000000 --- a/TODO.md +++ /dev/null @@ -1,16 +0,0 @@ -Things to get done before next release: -- [ ] TDSCatalog and co. design review -- [x] Refactor CDMR support - - [x] CDMR web-api support should be stand-alone - - [x] NCStream parsing -- as good as justified without need for more -- [ ] Complete implementation of spec - - [x] unsigned handling - - [x] compression - - [ ] structure - - [ ] seq - - [x] opaque ? - - [x] enums -- [x] Profile and optimize -- [x] Benchmark against opendap and local netcdf -- [x] Enable Travis -- [ ] Comment and document code (once API is more stable) diff --git a/docs/conf.py b/docs/conf.py index 4c119396f..fbea4c6ba 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,8 +58,15 @@ 'numpy': ('https://numpy.org/doc/stable/', None), 'matplotlib': ('https://matplotlib.org/stable/', None), 'requests': ('https://requests.kennethreitz.org/en/latest/', None), + 'pandas': ('https://pandas.pydata.org/docs/', None), } +nitpicky = True +nitpick_ignore = [ + ('py:class', 'optional'), ('py:class', 'file-like object'), ('py:class', 'iterator') +] +nitpick_ignore_regex = [('py:class', r'.*[cC]allable'),] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -304,3 +311,10 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False + +# Dictionary of URL redirects allowed +linkcheck_allowed_redirects = { + r'https://doi.org/.*': r'https://.*', + r'https://gitter.im/Unidata/siphon': r'https://app.gitter.im/.*siphon.*', + r'https://codecov.io/github/Unidata/siphon': r'https://app.codecov.io/github/Unidata/siphon', +} diff --git a/docs/developerguide.rst b/docs/developerguide.rst index bfbd410ea..cf036f34a 100644 --- a/docs/developerguide.rst +++ b/docs/developerguide.rst @@ -83,7 +83,7 @@ Code Style ---------- Siphon uses the Python code style outlined in `PEP8 -`_. For better or worse, this is what the majority +`_. For better or worse, this is what the majority of the Python world uses. The one deviation is that line length limit is 95 characters. 80 is a good target, but some times longer lines are needed. @@ -107,7 +107,7 @@ generated from docstrings, written using the There are also examples in the ``examples/`` directory. The documentation is hosted on `GitHub Pages `_. The docs are -built automatically from ``main`` with every build on Travis-CI; every merged PR will +built automatically from ``main`` with every build on GitHub Actions; every merged PR will have the built docs upload to GitHub Pages. As part of the build, the documentation is also checked with ``doc8``. To see what the docs will look like, you also need to install the ``sphinx-rtd-theme`` package. @@ -116,12 +116,19 @@ checked with ``doc8``. To see what the docs will look like, you also need to ins Other Tools ----------- -Continuous integration is performed by `Travis CI `_. -This service runs the unit tests on all support versions, as well as runs against the minimum -package versions. ``flake8`` is also run against the code to check formatting. Travis is also -used to build the documentation and to run the examples to ensure they stay working. - -Test coverage is monitored by `Codecov.io `_. +Continuous integration is performed by +`GitHub Actions `_. +This integration runs the unit tests on Linux for all supported versions of Python, as well +as runs against the minimum package versions, using PyPI packages. This also runs against +a (non-exhaustive) matrix of python versions on macOS and Windows. In addition to these tests, +GitHub actions also builds the documentation and runs the examples across multiple platforms +and Python versions, as well as checks for any broken web links. ``flake8`` (along with a +variety of plugins found in ``ci/linting.txt``) and ``ruff`` are also run against the code to +check formatting using another job on GitHub Actions. As part of this linting job, the docs +are also checked using the ``doc8`` tool, and spelling is checked using ``codespell``. +Configurations for these are in a variety of files in ``.github/workflows``. + +Test coverage is monitored by `codecov.io `_. --------- Releasing diff --git a/examples/Radar_Server_Level_3.py b/examples/Radar_Server_Level_3.py index 0a2ea30bb..aeddf63e6 100644 --- a/examples/Radar_Server_Level_3.py +++ b/examples/Radar_Server_Level_3.py @@ -8,7 +8,7 @@ Use Siphon to get NEXRAD Level 3 data from a TDS. """ -from datetime import datetime +from datetime import datetime, timezone import matplotlib.pyplot as plt import numpy as np @@ -38,7 +38,7 @@ # N0B, which is reflectivity data for the lowest tilt. We see that when the query # is represented as a string, it shows the encoded URL. query = rs.query() -query.stations('CYS').time(datetime.utcnow()).variables('N0B') +query.stations('CYS').time(datetime.now(timezone.utc)).variables('N0B') ########################################### # We can use the RadarServer instance to check our query, to make diff --git a/examples/ncss/NCSS_Cartopy_Example.py b/examples/ncss/NCSS_Cartopy_Example.py index 29b72c2fa..b37cb3409 100644 --- a/examples/ncss/NCSS_Cartopy_Example.py +++ b/examples/ncss/NCSS_Cartopy_Example.py @@ -11,7 +11,7 @@ This example uses Siphon's NCSS class to provide temperature data for contouring a basic map using CartoPy. """ -from datetime import datetime +from datetime import datetime, timezone import cartopy.crs as ccrs import cartopy.feature as cfeature @@ -49,13 +49,13 @@ # will return all surface temperatures for points in our bounding box for a single time, # nearest to that requested. Note the string representation of the query is a properly encoded # query string. -query.lonlat_box(north=43, south=35, east=-100, west=-111).time(datetime.utcnow()) +query.lonlat_box(north=43, south=35, east=-100, west=-111).time(datetime.now(timezone.utc)) query.accept('netcdf4') query.variables('Temperature_surface') ########################################### # We now request data from the server using this query. The `NCSS` class handles parsing -# this NetCDF data (using the `netCDF4` module). If we print out the variable names, we see +# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names, we see # our requested variable, as well as the coordinate variables (needed to properly reference # the data). data = ncss.get_data(query) diff --git a/examples/ncss/NCSS_Example.py b/examples/ncss/NCSS_Example.py index 39b9b69c4..16af1d594 100644 --- a/examples/ncss/NCSS_Example.py +++ b/examples/ncss/NCSS_Example.py @@ -8,7 +8,7 @@ Use Siphon to query the NetCDF Subset Service (NCSS). """ -from datetime import datetime +from datetime import datetime, timezone import matplotlib.pyplot as plt @@ -38,13 +38,13 @@ # 'Temperature_isobaric' and 'Relative_humidity_isobaric'. This request will return all # vertical levels for a single point and single time. Note the string representation of # the query is a properly encoded query string. -query.lonlat_point(-105, 40).time(datetime.utcnow()) +query.lonlat_point(-105, 40).time(datetime.now(timezone.utc)) query.accept('netcdf4') query.variables('Temperature_isobaric', 'Relative_humidity_isobaric') ########################################### # We now request data from the server using this query. The `NCSS` class handles parsing -# this NetCDF data (using the `netCDF4` module). If we print out the variable names, +# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names, # we see our requested variables, as well as a few others (more metadata information) data = ncss.get_data(query) list(data.variables) diff --git a/examples/ncss/NCSS_Timeseries_Examples.py b/examples/ncss/NCSS_Timeseries_Examples.py index 15118f94f..820d57061 100644 --- a/examples/ncss/NCSS_Timeseries_Examples.py +++ b/examples/ncss/NCSS_Timeseries_Examples.py @@ -8,7 +8,7 @@ Use Siphon to query the NetCDF Subset Service for a timeseries. """ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import matplotlib.pyplot as plt from netCDF4 import num2date @@ -39,13 +39,13 @@ # 'Temperature_isobaric', at the vertical level of 100000 Pa (approximately surface). # This request will return all times in the range for a single point. Note the string # representation of the query is a properly encoded query string. -now = datetime.utcnow() +now = datetime.now(timezone.utc) query.lonlat_point(-105, 40).vertical_level(100000).time_range(now, now + timedelta(days=7)) query.variables('Temperature_isobaric').accept('netcdf') ########################################### # We now request data from the server using this query. The `NCSS` class handles parsing -# this NetCDF data (using the `netCDF4` module). If we print out the variable names, we +# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names, we # see our requested variables, as well as a few others (more metadata information) data = ncss.get_data(query) list(data.variables) @@ -57,7 +57,7 @@ ########################################### # The time values are in hours relative to the start of the entire model collection. -# Fortunately, the `netCDF4` module has a helper function to convert these numbers into +# Fortunately, the ``netCDF4`` module has a helper function to convert these numbers into # Python `datetime` objects. We can see the first 5 element output by the function look # reasonable. time_vals = num2date(time[:].squeeze(), time.units, only_use_cftime_datetimes=False) diff --git a/pyproject.toml b/pyproject.toml index f10f484aa..ef59c5a06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ test = [ "siphon[extras]" ] extras = [ - "xarray>=2022.3.0" + "xarray>=2022.6.0" ] [project.urls] @@ -87,6 +87,11 @@ combine_star = true [tool.pytest.ini_options] norecursedirs = "build docs .idea" doctest_optionflags = "NORMALIZE_WHITESPACE" +xfail_strict = true +filterwarnings = [ + "error", + "ignore:numpy.ndarray size changed:RuntimeWarning", +] [tool.ruff] line-length = 95 diff --git a/src/siphon/catalog.py b/src/siphon/catalog.py index 5c9950a7d..31f2c72ca 100644 --- a/src/siphon/catalog.py +++ b/src/siphon/catalog.py @@ -101,6 +101,7 @@ def filter_time_nearest(self, time, regex=None, strptime=None): Returns ------- + Dataset The value with a time closest to that desired """ @@ -137,6 +138,7 @@ def filter_time_range(self, start, end, regex=None, strptime=None): Returns ------- + List[Dataset] All values corresponding to times within the specified range """ @@ -611,7 +613,7 @@ def remote_open(self, mode='b', encoding='ascii', errors='ignore'): Parameters ---------- - mode : 'b' or 't', optional + mode : `'b'` or `'t'`, optional Mode with which to open the remote data; 'b' for binary, 't' for text. Defaults to 'b'. @@ -625,7 +627,8 @@ def remote_open(self, mode='b', encoding='ascii', errors='ignore'): Returns ------- - A random access, file-like object + fobj : file-like object + A random access, file-like object for reading data """ fobj = self.access_with_service('HTTPServer') diff --git a/src/siphon/http_util.py b/src/siphon/http_util.py index f84f7e567..9127db196 100644 --- a/src/siphon/http_util.py +++ b/src/siphon/http_util.py @@ -96,7 +96,7 @@ def urlopen(self, url, decompress=False, **kwargs): url : str The URL to request - kwargs : arbitrary keyword arguments + kwargs Additional keyword arguments to pass to :meth:`requests.Session.get`. Returns @@ -140,14 +140,14 @@ class DataQuery: This object provides a clear API to formulate a query for data, including a spatial query, a time query, and possibly some variables or other parameters. - These objects provide a dictionary-like interface, (:meth:`items` and :meth:`__iter__`) + These objects provide a dictionary-like interface, (``items`` and ``__iter__``) sufficient to be passed to functions expecting a dictionary representing a URL query. Instances of this object can also be turned into a string, which will yield a properly escaped string for a URL. """ def __init__(self): - """Construct an empty :class:`DataQuery`.""" + """Construct an empty class representing a query for data.""" self.var = set() self.time_query = OrderedDict() self.spatial_query = OrderedDict() @@ -163,7 +163,7 @@ def variables(self, *var_names): Parameters ---------- - var_names : one or more strings + var_names : str One or more names of variables to request. Use 'all' to request all. Returns @@ -183,7 +183,7 @@ def add_query_parameter(self, **kwargs): Parameters ---------- - kwargs : one or more strings passed as keyword arguments + kwargs Names and values of parameters to add to the query Returns @@ -471,7 +471,7 @@ def get(self, path, params=None): Raises ------ - HTTPError + `~requests.HTTPError` If the server returns anything other than a 200 (OK) code See Also @@ -506,7 +506,7 @@ def validate_query(self, query): Parameters ---------- - query : DataQuery (or subclass) + query : DataQuery Returns ------- diff --git a/src/siphon/ncss.py b/src/siphon/ncss.py index abc762757..37f7596d8 100644 --- a/src/siphon/ncss.py +++ b/src/siphon/ncss.py @@ -49,7 +49,7 @@ class NCSS(HTTPEndPoint): """ # Need staticmethod to keep this from becoming a bound method, where self - # is passed implicitly + # is passed implicitly. Needed to avoid warning about duplicated docstring. unit_handler = staticmethod(lambda *a, **kw: default_unit_handler(*a, **kw)) def _get_metadata(self): diff --git a/src/siphon/radarserver.py b/src/siphon/radarserver.py index 45b9ce00b..70af5eab4 100644 --- a/src/siphon/radarserver.py +++ b/src/siphon/radarserver.py @@ -30,7 +30,7 @@ def stations(self, *stns): Parameters ---------- - stns : one or more strings + stns : str One or more names of variables to request Returns @@ -192,7 +192,7 @@ def get_radarserver_datasets(server): Parameters ---------- - server : string + server : str The base URL to the THREDDS server Returns diff --git a/src/siphon/simplewebservice/acis.py b/src/siphon/simplewebservice/acis.py index 5e0f54a0f..1e26572af 100644 --- a/src/siphon/simplewebservice/acis.py +++ b/src/siphon/simplewebservice/acis.py @@ -37,11 +37,12 @@ def acis_request(method, params): Returns ------- - A dictionary of data based on the JSON parameters + dict[str, Any] + A dictionary of data based on the JSON parameters Raises ------ - :class: `ACIS_API_Exception` + `AcisApiException` When the API is unable to establish a connection or returns unparsable data. diff --git a/src/siphon/simplewebservice/iastate.py b/src/siphon/simplewebservice/iastate.py index 92d908fbe..12a5321a3 100644 --- a/src/siphon/simplewebservice/iastate.py +++ b/src/siphon/simplewebservice/iastate.py @@ -14,8 +14,6 @@ from .._tools import get_wind_components from ..http_util import HTTPEndPoint -warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) - class IAStateUpperAir(HTTPEndPoint): """Download and parse data from the Iowa State's upper air archive.""" @@ -30,7 +28,7 @@ def request_data(cls, time, site_id, interp_nans=False, **kwargs): Parameters ---------- - time : datetime + time : datetime.datetime The date and time of the desired observation. site_id : str @@ -63,7 +61,7 @@ def request_all_data(cls, time, pressure=None, **kwargs): Parameters ---------- - time : datetime + time : datetime.datetime The date and time of the desired observation. pressure : float, optional @@ -87,7 +85,7 @@ def _get_data(self, time, site_id, pressure=None): Parameters ---------- - time : datetime + time : datetime.datetime Date and time for which data should be downloaded site_id : str Site id for which data should be downloaded @@ -134,16 +132,20 @@ def _get_data(self, time, site_id, pressure=None): 'u_wind', 'v_wind'), how='all').reset_index(drop=True) # Add unit dictionary - df.units = {'pressure': 'hPa', - 'height': 'meter', - 'temperature': 'degC', - 'dewpoint': 'degC', - 'direction': 'degrees', - 'speed': 'knot', - 'u_wind': 'knot', - 'v_wind': 'knot', - 'station': None, - 'time': None} + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = {'pressure': 'hPa', + 'height': 'meter', + 'temperature': 'degC', + 'dewpoint': 'degC', + 'direction': 'degrees', + 'speed': 'knot', + 'u_wind': 'knot', + 'v_wind': 'knot', + 'station': None, + 'time': None} + return df def _get_data_raw(self, time, site_id, pressure=None): @@ -151,7 +153,7 @@ def _get_data_raw(self, time, site_id, pressure=None): Parameters ---------- - time : datetime + time : datetime.datetime Date and time for which data should be downloaded site_id : str Site id for which data should be downloaded diff --git a/src/siphon/simplewebservice/igra2.py b/src/siphon/simplewebservice/igra2.py index 66fe07c03..e3dec7154 100644 --- a/src/siphon/simplewebservice/igra2.py +++ b/src/siphon/simplewebservice/igra2.py @@ -16,8 +16,6 @@ from .._tools import get_wind_components from ..http_util import HTTPEndPoint, HTTPError -warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) - class IGRAUpperAir(HTTPEndPoint): """Download and parse data from NCEI's Integrated Radiosonde Archive version 2.""" @@ -40,7 +38,7 @@ def request_data(cls, time, site_id, derived=False): site_id : str 11-character IGRA2 station identifier. - time : datetime + time : datetime.datetime The date and time of the desired observation. If list of two times is given, dataframes for all dates within the two dates will be returned. @@ -362,25 +360,25 @@ def _clean_body_df(self, df): df = df.dropna(subset=('temperature', 'reported_relative_humidity', 'u_wind', 'v_wind'), how='all').reset_index(drop=True) - df.units = {'pressure': 'hPa', - 'reported_height': 'meter', - 'calculated_height': 'meter', - 'temperature': 'Kelvin', - 'temperature_gradient': 'Kelvin / kilometer', - 'potential_temperature': 'Kelvin', - 'potential_temperature_gradient': 'Kelvin / kilometer', - 'virtual_temperature': 'Kelvin', - 'virtual_potential_temperature': 'Kelvin', - 'vapor_pressure': 'Pascal', - 'saturation_vapor_pressure': 'Pascal', - 'reported_relative_humidity': 'percent', - 'calculated_relative_humidity': 'percent', - 'relative_humidity_gradient': 'percent / kilometer', - 'u_wind': 'meter / second', - 'u_wind_gradient': '(meter / second) / kilometer)', - 'v_wind': 'meter / second', - 'v_wind_gradient': '(meter / second) / kilometer)', - 'refractive_index': 'unitless'} + units = {'pressure': 'hPa', + 'reported_height': 'meter', + 'calculated_height': 'meter', + 'temperature': 'Kelvin', + 'temperature_gradient': 'Kelvin / kilometer', + 'potential_temperature': 'Kelvin', + 'potential_temperature_gradient': 'Kelvin / kilometer', + 'virtual_temperature': 'Kelvin', + 'virtual_potential_temperature': 'Kelvin', + 'vapor_pressure': 'Pascal', + 'saturation_vapor_pressure': 'Pascal', + 'reported_relative_humidity': 'percent', + 'calculated_relative_humidity': 'percent', + 'relative_humidity_gradient': 'percent / kilometer', + 'u_wind': 'meter / second', + 'u_wind_gradient': '(meter / second) / kilometer)', + 'v_wind': 'meter / second', + 'v_wind_gradient': '(meter / second) / kilometer)', + 'refractive_index': 'unitless'} else: df['u_wind'], df['v_wind'] = get_wind_components(df['speed'], @@ -396,46 +394,56 @@ def _clean_body_df(self, df): df.drop('dewpoint_depression', axis=1, inplace=True) - df.units = {'etime': 'second', - 'pressure': 'hPa', - 'height': 'meter', - 'temperature': 'degC', - 'dewpoint': 'degC', - 'direction': 'degrees', - 'speed': 'meter / second', - 'u_wind': 'meter / second', - 'v_wind': 'meter / second'} + units = {'etime': 'second', + 'pressure': 'hPa', + 'height': 'meter', + 'temperature': 'degC', + 'dewpoint': 'degC', + 'direction': 'degrees', + 'speed': 'meter / second', + 'u_wind': 'meter / second', + 'v_wind': 'meter / second'} + + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = units return df def _clean_header_df(self, df): """Format the header dataframe and add units.""" if self.suffix == '-drvd.txt': - df.units = {'release_time': 'second', - 'precipitable_water': 'millimeter', - 'inv_pressure': 'hPa', - 'inv_height': 'meter', - 'inv_strength': 'Kelvin', - 'mixed_layer_pressure': 'hPa', - 'mixed_layer_height': 'meter', - 'freezing_point_pressure': 'hPa', - 'freezing_point_height': 'meter', - 'lcl_pressure': 'hPa', - 'lcl_height': 'meter', - 'lfc_pressure': 'hPa', - 'lfc_height': 'meter', - 'lnb_pressure': 'hPa', - 'lnb_height': 'meter', - 'lifted_index': 'degC', - 'showalter_index': 'degC', - 'k_index': 'degC', - 'total_totals_index': 'degC', - 'cape': 'Joule / kilogram', - 'convective_inhibition': 'Joule / kilogram'} + units = {'release_time': 'second', + 'precipitable_water': 'millimeter', + 'inv_pressure': 'hPa', + 'inv_height': 'meter', + 'inv_strength': 'Kelvin', + 'mixed_layer_pressure': 'hPa', + 'mixed_layer_height': 'meter', + 'freezing_point_pressure': 'hPa', + 'freezing_point_height': 'meter', + 'lcl_pressure': 'hPa', + 'lcl_height': 'meter', + 'lfc_pressure': 'hPa', + 'lfc_height': 'meter', + 'lnb_pressure': 'hPa', + 'lnb_height': 'meter', + 'lifted_index': 'degC', + 'showalter_index': 'degC', + 'k_index': 'degC', + 'total_totals_index': 'degC', + 'cape': 'Joule / kilogram', + 'convective_inhibition': 'Joule / kilogram'} else: - df.units = {'release_time': 'second', + units = {'release_time': 'second', 'latitude': 'degrees', 'longitude': 'degrees'} + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = units + return df diff --git a/src/siphon/simplewebservice/ndbc.py b/src/siphon/simplewebservice/ndbc.py index fcf6331dd..24de935b8 100644 --- a/src/siphon/simplewebservice/ndbc.py +++ b/src/siphon/simplewebservice/ndbc.py @@ -12,8 +12,6 @@ from ..http_util import HTTPEndPoint -warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) - class NDBC(HTTPEndPoint): """Download and parse data from the National Data Buoy Center.""" @@ -45,7 +43,8 @@ def realtime_observations(cls, buoy, data_type='txt'): Returns ------- - Raw data string + `pandas.DataFrame` + Parsed data """ endpoint = cls() @@ -107,7 +106,11 @@ def _parse_met(content): names=col_names, sep=r'\s+') df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -147,7 +150,11 @@ def _parse_drift(content): df['minute'] = df['hour_minute'] - df['hour'] * 100 df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour_minute', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -188,7 +195,11 @@ def _parse_cwind(content): utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute', 'hours', 'minutes']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -227,7 +238,11 @@ def _parse_spec(content): names=col_names, sep=r'\s+') df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -266,7 +281,11 @@ def _parse_ocean(content): names=col_names, sep=r'\s+') df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -296,7 +315,11 @@ def _parse_srad(content): names=col_names, sep=r'\s+') df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -334,7 +357,11 @@ def _parse_dart(content): df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute', 'second']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute', 'second']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -362,7 +389,11 @@ def _parse_rain(content): df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -410,7 +441,11 @@ def _parse_supl(content): df['hourly_high_wind_time'] = pd.to_datetime(df[['year', 'month', 'day', 'hours', 'minutes']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute', 'hours', 'minutes']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @classmethod @@ -455,7 +490,11 @@ def latest_observations(cls): names=col_names, sep=r'\s+') df['time'] = pd.to_datetime(df[['year', 'month', 'day', 'hour', 'minute']], utc=True) df = df.drop(columns=['year', 'month', 'day', 'hour', 'minute']) - df.units = col_units + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = col_units + return df @staticmethod @@ -486,6 +525,7 @@ def buoy_data_types(cls, buoy): Returns ------- + dict[str, str] dict of valid file extensions and their descriptions """ @@ -545,7 +585,8 @@ def raw_buoy_data(cls, buoy, data_type='txt'): Returns ------- - Raw data string + str + Raw data string """ endpoint = cls() diff --git a/src/siphon/simplewebservice/wyoming.py b/src/siphon/simplewebservice/wyoming.py index d8f0e8f89..b212655dc 100644 --- a/src/siphon/simplewebservice/wyoming.py +++ b/src/siphon/simplewebservice/wyoming.py @@ -14,8 +14,6 @@ from .._tools import get_wind_components from ..http_util import HTTPEndPoint -warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) - class WyomingUpperAir(HTTPEndPoint): """Download and parse data from the University of Wyoming's upper air archive.""" @@ -30,7 +28,7 @@ def request_data(cls, time, site_id, **kwargs): Parameters ---------- - time : datetime + time : datetime.datetime The date and time of the desired observation. site_id : str @@ -42,7 +40,8 @@ def request_data(cls, time, site_id, **kwargs): Returns ------- - :class:`pandas.DataFrame` containing the data + `pandas.DataFrame` + Parsed data """ endpoint = cls() @@ -54,7 +53,7 @@ def _get_data(self, time, site_id): Parameters ---------- - time : datetime + time : datetime.datetime The date and time of the desired observation. site_id : str @@ -63,7 +62,7 @@ def _get_data(self, time, site_id): Returns ------- - :class:`pandas.DataFrame` containing the data + `pandas.DataFrame` """ raw_data = self._get_data_raw(time, site_id) @@ -106,21 +105,25 @@ def _get_data(self, time, site_id): df['pw'] = pw # Add unit dictionary - df.units = {'pressure': 'hPa', - 'height': 'meter', - 'temperature': 'degC', - 'dewpoint': 'degC', - 'direction': 'degrees', - 'speed': 'knot', - 'u_wind': 'knot', - 'v_wind': 'knot', - 'station': None, - 'station_number': None, - 'time': None, - 'latitude': 'degrees', - 'longitude': 'degrees', - 'elevation': 'meter', - 'pw': 'millimeter'} + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", + UserWarning) + df.units = {'pressure': 'hPa', + 'height': 'meter', + 'temperature': 'degC', + 'dewpoint': 'degC', + 'direction': 'degrees', + 'speed': 'knot', + 'u_wind': 'knot', + 'v_wind': 'knot', + 'station': None, + 'station_number': None, + 'time': None, + 'latitude': 'degrees', + 'longitude': 'degrees', + 'elevation': 'meter', + 'pw': 'millimeter'} + return df def _get_data_raw(self, time, site_id): @@ -128,14 +131,15 @@ def _get_data_raw(self, time, site_id): Parameters ---------- - time : datetime + time : datetime.datetime Date and time for which data should be downloaded site_id : str Site id for which data should be downloaded Returns ------- - text of the server response + str + text of the server response """ path = ('?region=naconf&TYPE=TEXT%3ALIST' diff --git a/tests/test_catalog.py b/tests/test_catalog.py index 93eb53291..4acf4b6dd 100644 --- a/tests/test_catalog.py +++ b/tests/test_catalog.py @@ -70,10 +70,7 @@ def test_virtual_access(): 'CONUS_20km/noaaport/catalog.xml') cat = TDSCatalog(url) # find the 2D time coordinate "full collection" dataset - for dataset in list(cat.datasets.values()): - if 'Full Collection' in dataset.name: - ds = dataset - break + ds = cat.datasets['Full Collection (Reference / Forecast Time) Dataset'] assert 'OPENDAP' in ds.access_urls # TwoD is a virtual dataset, so HTTPServer # should not be listed here @@ -128,8 +125,8 @@ def test_html_link(recwarn): """Test that we fall-back when given an HTML catalog page.""" url = ('http://thredds-test.unidata.ucar.edu/thredds/catalog/' 'grib/NCEP/RAP/CONUS_13km/catalog.html') - TDSCatalog(url) - assert 'Changing' in str(recwarn.pop(UserWarning).message) + with pytest.warns(UserWarning, match='Changing'): + TDSCatalog(url) @recorder.use_cassette('follow_cat') diff --git a/tests/test_http_util.py b/tests/test_http_util.py index 1e325fab6..770d363cd 100644 --- a/tests/test_http_util.py +++ b/tests/test_http_util.py @@ -71,7 +71,7 @@ def test_data_query_repeated_vars(): def test_data_query_time_reset(): """Test query with multiple time-type query fields.""" - dr = DataQuery().all_times().time(datetime.utcnow()) + dr = DataQuery().all_times().time(datetime.now()) query = str(dr) assert query.startswith('time='), 'Bad string: ' + query assert query.count('=') == 1 @@ -79,7 +79,7 @@ def test_data_query_time_reset(): def test_data_query_time_reset2(): """Test that time queries replace each other.""" - dr = DataQuery().time(datetime.utcnow()).all_times() + dr = DataQuery().time(datetime.now()).all_times() assert str(dr) == 'temporal=all' @@ -132,7 +132,7 @@ def test_data_query_spatial_reset2(): def test_data_query_iter(): """Test converting a query to a dictionary.""" - dt = datetime.utcnow() + dt = datetime.now() dr = DataQuery().time(dt).lonlat_point(-1, -2) d = dict(dr) @@ -143,7 +143,7 @@ def test_data_query_iter(): def test_data_query_items(): """Test the items method of query.""" - dt = datetime.utcnow() + dt = datetime.now() dr = DataQuery().time(dt).lonlat_point(-1, -2) items = list(dr.items()) diff --git a/tests/test_igra2.py b/tests/test_igra2.py index 58052ae1f..2fd0ef344 100644 --- a/tests/test_igra2.py +++ b/tests/test_igra2.py @@ -140,6 +140,6 @@ def test_igra2_drvd(): def test_igra2_nonexistent(): """Test behavior when requesting non-existent data.""" with pytest.raises(ValueError) as err: - IGRAUpperAir.request_data(datetime.utcnow(), 'NOSUCHSTATION') + IGRAUpperAir.request_data(datetime.now(), 'NOSUCHSTATION') assert 'No data' in str(err.value)