Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MAINT: Bumping minimum required astropy version #2602

Merged
merged 3 commits into from
Nov 30, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,8 @@ Infrastructure, Utility and Other Changes and Additions

- Removed deprecated function ``utils.download_list_of_fitsfiles()``. [#2594]

- Versions of astropy <4.2.1 and numpy <1.18 are no longer supported. [#2602]



0.4.6 (2022-03-22)
Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ Installation and Requirements
-----------------------------

Astroquery works with Python 3.7 or later.
As an `astropy`_ affiliate, astroquery requires `astropy`_ version 4.0 or later.
As an `astropy`_ affiliate, astroquery requires `astropy`_ version 4.2.1 or later.

astroquery uses the `requests <https://requests.readthedocs.io/en/latest/>`_
module to communicate with the internet. `BeautifulSoup
Expand Down
12 changes: 3 additions & 9 deletions astroquery/alma/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,11 +579,7 @@ def get_data_info(self, uids, *, expand_tarfiles=False,
datalink_service_def_dict[adhoc_service.ID] = adhoc_service

temp = res.to_table()
if commons.ASTROPY_LT_4_1:
# very annoying
for col in [x for x in temp.colnames
if x not in ['content_length', 'readable']]:
temp[col] = temp[col].astype(str)

result = temp if result is None else vstack([result, temp])
to_delete = []
for index, rr in enumerate(result):
Expand Down Expand Up @@ -1214,10 +1210,8 @@ def get_project_metadata(self, projectid, *, cache=True):
result = self.query_tap(
"select distinct proposal_abstract from "
"ivoa.obscore where proposal_id='{}'".format(projectid))
if commons.ASTROPY_LT_4_1:
return [result[0]['proposal_abstract'].astype(str)]
else:
return [result[0]['proposal_abstract']]

return [result[0]['proposal_abstract']]


Alma = AlmaClass()
Expand Down
13 changes: 4 additions & 9 deletions astroquery/alma/tests/test_alma_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
pass

from astroquery.exceptions import CorruptDataWarning
from astroquery.utils.commons import ASTROPY_LT_4_1
from .. import Alma

# ALMA tests involving staging take too long, leading to travis timeouts
Expand Down Expand Up @@ -283,14 +282,10 @@ def test_doc_example(self, tmp_path, alma):
content_length_column_name = 'content_length'

uids = np.unique(m83_data['Member ous id'])
if ASTROPY_LT_4_1:
assert b'uid://A001/X11f/X30' in uids
X30 = (m83_data['Member ous id'] == b'uid://A001/X11f/X30')
X31 = (m83_data['Member ous id'] == b'uid://A002/X3216af/X31')
else:
assert 'uid://A001/X11f/X30' in uids
X30 = (m83_data['Member ous id'] == 'uid://A001/X11f/X30')
X31 = (m83_data['Member ous id'] == 'uid://A002/X3216af/X31')

assert 'uid://A001/X11f/X30' in uids
X30 = (m83_data['Member ous id'] == 'uid://A001/X11f/X30')
X31 = (m83_data['Member ous id'] == 'uid://A002/X3216af/X31')

assert X30.sum() == 4 # Jul 13, 2020
assert X31.sum() == 4 # Jul 13, 2020
Expand Down
4 changes: 1 addition & 3 deletions astroquery/alma/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from astropy import units as u
from astropy.coordinates import SkyCoord
from astroquery.utils.commons import ASTROPY_LT_4_1

__all__ = ['parse_frequency_support', 'footprint_to_reg', 'approximate_primary_beam_sizes']

Expand Down Expand Up @@ -48,8 +47,7 @@ def footprint_to_reg(footprint):
-28.694332 266.521332 -28.699778'
Some of them have *additional* polygons
"""
if ASTROPY_LT_4_1:
footprint = footprint.decode('utf-8')

if footprint[:7] != 'Polygon' and footprint[:6] != 'Circle':
raise ValueError("Unrecognized footprint type")

Expand Down
3 changes: 1 addition & 2 deletions astroquery/cadc/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,8 +457,7 @@ def get_image_list(self, query_result, coordinates, radius):
session=self.cadcdatalink._session)
for service_def in datalink.bysemantics('#cutout'):
access_url = service_def.access_url
if isinstance(access_url, bytes): # ASTROPY_LT_4_1
access_url = access_url.decode('ascii')

if '/sync' in access_url:
service_params = service_def.input_params
input_params = {param.name: param.value
Expand Down
8 changes: 4 additions & 4 deletions astroquery/cadc/tests/test_cadctap.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def get(*args, **kwargs):
class CapsResponse:
def __init__(self):
self.status_code = 200
self.content = b''
self.content = ''

def raise_for_status(self):
pass
Expand Down Expand Up @@ -286,7 +286,7 @@ def get(*args, **kwargs):
class CapsResponse:
def __init__(self):
self.status_code = 200
self.content = b''
self.content = ''

def raise_for_status(self):
pass
Expand All @@ -308,13 +308,13 @@ def __init__(self, **param_dict):

service_def1 = Mock()
service_def1.access_url = \
b'https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2ops/sync'
'https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2ops/sync'
service_def1.input_params = [Params(name='ID', value=uri),
Params(name='RUNID', value=run_id)]

service_def2 = Mock()
service_def2.access_url = \
b'https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2ops/async'
'https://www.cadc-ccda.hia-iha.nrc-cnrc.gc.ca/caom2ops/async'
service_def2.input_params = [Params(name='ID', value=uri),
Params(name='RUNID', value=run_id)]

Expand Down
5 changes: 2 additions & 3 deletions astroquery/gaia/tests/test_gaiatap.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import astropy.units as u
from astropy.coordinates.sky_coordinate import SkyCoord
import numpy as np
from astroquery.utils import ASTROPY_LT_4_1
from astroquery.utils.tap.xmlparser import utils
from astroquery.utils.tap.core import TapPlus
from astroquery.utils.tap import taputils
Expand All @@ -50,8 +49,8 @@ def column_attrs():
"table1_oid": np.int32
}
columns = {k: Column(name=k, description=k, dtype=v) for k, v in dtypes.items()}
if not ASTROPY_LT_4_1:
columns["source_id"].meta = {"_votable_string_dtype": "char"}

columns["source_id"].meta = {"_votable_string_dtype": "char"}
return columns


Expand Down
7 changes: 3 additions & 4 deletions astroquery/ipac/nexsci/nasa_exoplanet_archive/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,9 @@ def get_tap_tables():
"""Tables accessed by API are gradually migrating to TAP service. Generate current list of tables in TAP."""
tap = pyvo.dal.tap.TAPService(baseurl=conf.url_tap)
response = tap.search(query="select * from TAP_SCHEMA.tables", language="ADQL")
if not commons.ASTROPY_LT_4_1:
tables = [table for table in response["table_name"].data if "TAP_SCHEMA." not in table]
else:
tables = [table.decode() for table in response["table_name"].data if b"TAP_SCHEMA." not in table]

tables = [table for table in response["table_name"].data if "TAP_SCHEMA." not in table]

return tables


Expand Down
3 changes: 1 addition & 2 deletions astroquery/simbad/tests/test_simbad.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

from ... import simbad
from astroquery.utils.mocks import MockResponse
from ...utils import commons
from ...query import AstroQuery
from ...exceptions import TableParseError
from .test_simbad_remote import multicoords
Expand Down Expand Up @@ -440,6 +439,6 @@ def test_regression_issue388():
response.content = f.read()
parsed_table = simbad.Simbad._parse_result(response,
simbad.core.SimbadVOTableResult)
truth = b'M 1' if commons.ASTROPY_LT_4_1 else 'M 1'
truth = 'M 1'
assert parsed_table['MAIN_ID'][0] == truth
assert len(parsed_table) == 1
3 changes: 1 addition & 2 deletions astroquery/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from .progressbar import chunk_report, chunk_read
from .class_or_instance import class_or_instance
from .commons import (parse_coordinates, TableList, suppress_vo_warnings,
validate_email, ASTROPY_LT_4_1, ASTROPY_LT_4_3, ASTROPY_LT_5_0,
validate_email, ASTROPY_LT_4_3, ASTROPY_LT_5_0,
ASTROPY_LT_5_1)
from .process_asyncs import async_to_sync
from .docstr_chompers import prepend_docstr_nosections
Expand All @@ -19,7 +19,6 @@
'TableList',
'suppress_vo_warnings',
'validate_email',
'ASTROPY_LT_4_1',
'ASTROPY_LT_4_3',
'ASTROPY_LT_5_0',
'ASTROPY_LT_5_1',
Expand Down
2 changes: 0 additions & 2 deletions astroquery/utils/commons.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,10 @@
'TableList',
'suppress_vo_warnings',
'validate_email',
'ASTROPY_LT_4_1',
'ASTROPY_LT_4_3',
'ASTROPY_LT_5_0',
'ASTROPY_LT_5_1']

ASTROPY_LT_4_1 = not minversion('astropy', '4.1')
ASTROPY_LT_4_3 = not minversion('astropy', '4.3')
ASTROPY_LT_5_0 = not minversion('astropy', '5.0')

Expand Down
22 changes: 4 additions & 18 deletions astroquery/vo_conesearch/validator/tstquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,12 @@
from astropy.utils.data import get_readable_fileobj
from astropy.utils.exceptions import AstropyUserWarning

# LOCAL
from astroquery.utils.commons import ASTROPY_LT_4_1

__all__ = ['parse_cs']


def parse_cs(ivoid, cap_index=1):
"""Return test query pars as dict for given IVO ID and capability index."""
if isinstance(ivoid, bytes): # ASTROPY_LT_4_1
ivoid = ivoid.decode('ascii')

# Production server.
url = ("https://vao.stsci.edu/regtap/tapservice.aspx/sync?lang=adql&"
Expand All @@ -53,20 +49,10 @@ def parse_cs(ivoid, cap_index=1):
if not urls_failed:
try:
xpath = t_query['detail_xpath']
if ASTROPY_LT_4_1:
ra = float(
t_query[xpath == b'/capability/testQuery/ra']['detail_value'])
dec = float(
t_query[xpath == b'/capability/testQuery/dec']['detail_value'])
sr = float(
t_query[xpath == b'/capability/testQuery/sr']['detail_value'])
else:
ra = float(
t_query[xpath == '/capability/testQuery/ra']['detail_value'])
dec = float(
t_query[xpath == '/capability/testQuery/dec']['detail_value'])
sr = float(
t_query[xpath == '/capability/testQuery/sr']['detail_value'])

ra = float(t_query[xpath == '/capability/testQuery/ra']['detail_value'])
dec = float(t_query[xpath == '/capability/testQuery/dec']['detail_value'])
sr = float(t_query[xpath == '/capability/testQuery/sr']['detail_value'])

# Handle big SR returning too big a table for some queries, causing
# tests to fail due to timeout.
Expand Down
103 changes: 32 additions & 71 deletions astroquery/vo_conesearch/validator/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
InvalidValidationAttribute)
from ..exceptions import VOSError
from ..vos_catalog import VOSDatabase, vo_tab_parse
from ...utils.commons import ASTROPY_LT_4_1
from ...utils.timer import timefunc

# Import configurable items declared in __init__.py
Expand Down Expand Up @@ -129,16 +128,10 @@ def check_conesearch_sites(destdir=os.curdir, verbose=True, parallel=True,
# Validate only a subset of the services.
if url_list is not None:
# Make sure URL is unique and fixed.
if ASTROPY_LT_4_1:
url_list = set(map(
unescape_all,
[cur_url.encode('utf-8') if isinstance(cur_url, str) else cur_url
for cur_url in url_list]))
else:
url_list = set(map(
unescape_all,
[cur_url if isinstance(cur_url, str) else cur_url
for cur_url in url_list]))
url_list = set(map(
unescape_all,
[cur_url if isinstance(cur_url, str) else cur_url
for cur_url in url_list]))
uniq_rows = len(url_list)
url_list_processed = [] # To track if given URL is valid in registry
if verbose:
Expand All @@ -151,48 +144,26 @@ def check_conesearch_sites(destdir=os.curdir, verbose=True, parallel=True,
key_lookup_by_url = {}

# Process each catalog in the registry.
if ASTROPY_LT_4_1:
for cur_key, cur_cat in js_mstr.get_catalogs():
cur_url = cur_cat['url'].encode('utf-8')

# Skip if:
# a. not a Cone Search service
# b. not in given subset, if any
if ((cur_cat['cap_type'] != b'conesearch')
or (url_list is not None and cur_url not in url_list)):
continue

# Use testQuery to return non-empty VO table with max verbosity.
testquery_pars = parse_cs(cur_cat['ivoid'], cur_cat['cap_index'])
cs_pars_arr = ['{}={}'.format(key, testquery_pars[key]).encode('utf-8')
for key in testquery_pars]
cs_pars_arr += [b'VERB=3']

# Track the service.
key_lookup_by_url[cur_url + b'&'.join(cs_pars_arr)] = cur_key
if url_list is not None:
url_list_processed.append(cur_url)
else:
for cur_key, cur_cat in js_mstr.get_catalogs():
cur_url = cur_cat['url']

# Skip if:
# a. not a Cone Search service
# b. not in given subset, if any
if ((cur_cat['cap_type'] != 'conesearch')
or (url_list is not None and cur_url not in url_list)):
continue

# Use testQuery to return non-empty VO table with max verbosity.
testquery_pars = parse_cs(cur_cat['ivoid'], cur_cat['cap_index'])
cs_pars_arr = ['{}={}'.format(key, testquery_pars[key])
for key in testquery_pars]
cs_pars_arr += ['VERB=3']

# Track the service.
key_lookup_by_url[cur_url + '&'.join(cs_pars_arr)] = cur_key
if url_list is not None:
url_list_processed.append(cur_url)
for cur_key, cur_cat in js_mstr.get_catalogs():
cur_url = cur_cat['url']

# Skip if:
# a. not a Cone Search service
# b. not in given subset, if any
if ((cur_cat['cap_type'] != 'conesearch')
or (url_list is not None and cur_url not in url_list)):
continue

# Use testQuery to return non-empty VO table with max verbosity.
testquery_pars = parse_cs(cur_cat['ivoid'], cur_cat['cap_index'])
cs_pars_arr = ['{}={}'.format(key, testquery_pars[key])
for key in testquery_pars]
cs_pars_arr += ['VERB=3']

# Track the service.
key_lookup_by_url[cur_url + '&'.join(cs_pars_arr)] = cur_key
if url_list is not None:
url_list_processed.append(cur_url)

# Give warning if any of the user given subset is not in the registry.
if url_list is not None:
Expand All @@ -207,10 +178,8 @@ def check_conesearch_sites(destdir=os.curdir, verbose=True, parallel=True,

all_urls = list(key_lookup_by_url)
timeout = data.conf.remote_timeout
if ASTROPY_LT_4_1:
map_args = [(out_dir, url, timeout) for url in all_urls]
else:
map_args = [(out_dir, url.encode('utf-8'), timeout) for url in all_urls]

map_args = [(out_dir, url.encode('utf-8'), timeout) for url in all_urls]

# Validate URLs
if parallel:
Expand All @@ -225,20 +194,12 @@ def check_conesearch_sites(destdir=os.curdir, verbose=True, parallel=True,
mp_list = map(_do_validation, map_args)

# Categorize validation results
if ASTROPY_LT_4_1:
for r in mp_list:
db_key = r['out_db_name']
cat_key = key_lookup_by_url[r.url]
cur_cat = js_mstr.get_catalog(cat_key)
_copy_r_to_cat(r, cur_cat)
js_tree[db_key].add_catalog(cat_key, cur_cat)
else:
for r in mp_list:
db_key = r['out_db_name']
cat_key = key_lookup_by_url[r.url.decode('utf-8')]
cur_cat = js_mstr.get_catalog(cat_key)
_copy_r_to_cat(r, cur_cat)
js_tree[db_key].add_catalog(cat_key, cur_cat)
for r in mp_list:
db_key = r['out_db_name']
cat_key = key_lookup_by_url[r.url.decode('utf-8')]
cur_cat = js_mstr.get_catalog(cat_key)
_copy_r_to_cat(r, cur_cat)
js_tree[db_key].add_catalog(cat_key, cur_cat)

# Write to HTML
html_subsets = result.get_result_subsets(mp_list, out_dir)
Expand Down
Loading