From 4249a9b0c152fa430a9c67f2312c4ae905a1e735 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 3 Dec 2018 00:59:54 -0500 Subject: [PATCH 01/13] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 282da53..4f0866e 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ The legacy version of sat-search (<1.0.0) can be used with the legacy version of It is recommended to use [pyenv](https://github.com/pyenv/pyenv) and [virtualenv](https://virtualenv.pypa.io/en/latest/) to to control Python versions and installed dependencies. sat-search can be conveniently installed from PyPi: # install the latest release version - $ pip install satsearch + $ pip install sat-search Sat-search is a very lightweight application, with the only dependency being requests. From 9eef8b5c3bd6e84e6b0b423d549acabc4504aa1f Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Fri, 4 Jan 2019 14:13:17 -0500 Subject: [PATCH 02/13] remove files now in satstac library --- satsearch/__init__.py | 1 - satsearch/config.py | 2 +- satsearch/main.py | 25 ++-- satsearch/scene.py | 336 ------------------------------------------ satsearch/utils.py | 188 ----------------------- test/test_scene.py | 246 ------------------------------- test/test_utils.py | 34 ----- 7 files changed, 14 insertions(+), 818 deletions(-) delete mode 100644 satsearch/scene.py delete mode 100644 satsearch/utils.py delete mode 100644 test/test_scene.py delete mode 100644 test/test_utils.py diff --git a/satsearch/__init__.py b/satsearch/__init__.py index 28d41ff..378bd51 100644 --- a/satsearch/__init__.py +++ b/satsearch/__init__.py @@ -1,5 +1,4 @@ from satsearch.search import Search -from satsearch.scene import Scene, Scenes import logging diff --git a/satsearch/config.py b/satsearch/config.py index 403134d..88d89eb 100644 --- a/satsearch/config.py +++ b/satsearch/config.py @@ -1,7 +1,7 @@ import os # API URL -API_URL = os.getenv('SATUTILS_API_URL', 'https://sat-api.developmentseed.org') +API_URL = os.getenv('SATUTILS_API_URL', 'https://sat-api-dev.developmentseed.org') # data directory to store downloaded imagery DATADIR = os.getenv('SATUTILS_DATADIR', './${eo:platform}/${date}') diff --git a/satsearch/main.py b/satsearch/main.py index 5828b11..e779f6d 100644 --- a/satsearch/main.py +++ b/satsearch/main.py @@ -2,40 +2,41 @@ import sys import json from .version import __version__ -from satsearch import Search, Scenes +from satsearch import Search +from satstac import Items from satsearch.parser import SatUtilsParser -def main(scenes=None, print_md=None, print_cal=False, +def main(items=None, print_md=None, print_cal=False, save=None, download=None, **kwargs): """ Main function for performing a search """ - if scenes is None: - # get scenes from search + if items is None: + # get items from search search = Search(**kwargs) - scenes = Scenes(search.scenes(), properties=kwargs) + items = Items(search.items(), properties=kwargs) else: - scenes = Scenes.load(scenes) + items = Items.load(items) # print metadata if print_md is not None: - scenes.print_scenes(print_md) + items.print_scenes(print_md) # print calendar if print_cal: - print(scenes.text_calendar()) + print(items.text_calendar()) # save all metadata in JSON file if save is not None: - scenes.save(filename=save) + items.save(filename=save) - print('%s scenes found' % len(scenes)) + print('%s items found' % len(items)) # download files given keys if download is not None: for key in download: - scenes.download(key=key) + items.download(key=key) - return scenes + return items def cli(): diff --git a/satsearch/scene.py b/satsearch/scene.py deleted file mode 100644 index 63af9ed..0000000 --- a/satsearch/scene.py +++ /dev/null @@ -1,336 +0,0 @@ -import os -import logging -import requests -import json -from string import Formatter, Template -from datetime import datetime -import satsearch.utils as utils -import satsearch.config as config -import traceback - - -logger = logging.getLogger(__name__) - - -class SatSceneError(Exception): - pass - - -class Scene(object): - - def __init__(self, feature): - """ Initialize a scene object """ - required = ['id', 'datetime'] - if 'geometry' not in feature: - raise SatSceneError('No geometry supplied') - if not set(required).issubset(feature.get('properties', {}).keys()): - raise SatSceneError('Invalid Scene (required parameters: %s' % ' '.join(required)) - self.feature = feature - - # determine common_name to asset mapping - # it will map if an asset contains only a single band - bands = self.eobands - band_to_name = {b: bands[b]['common_name'] for b in bands if bands[b].get('common_name', None)} - self.name_to_band = {} - for a in self.assets: - _bands = self.assets[a].get('eo:bands', []) - if len(_bands) == 1 and _bands[0] in band_to_name: - self.name_to_band[band_to_name[_bands[0]]] = _bands[0] - - # QGIS altered date format when editing this GeoJSON file - #self['datetime'] = self['datetime'].replace('/', '-') - self.filenames = {} - # TODO - add validator - - def __repr__(self): - return self['id'] - - def __getitem__(self, key): - try: - return getattr(self, key) - except: - if key in self.feature['properties']: - return self.feature['properties'][key] - else: - return None - - def keys(self): - return self.feature['properties'].keys() - - @property - def id(self): - return self.feature['properties']['id'] - - @property - def geometry(self): - return self.feature['geometry'] - - @property - def date(self): - dt = self['datetime'].replace('/', '-') - pattern = "%Y-%m-%dT%H:%M:%S.%fZ" - return datetime.strptime(dt, pattern).date() - - @property - def assets(self): - """ Return dictionary of assets """ - return self.feature.get('assets', {}) - #prefix = os.path.commonprefix(files) - #keys = [os.path.splitext(f[len(prefix):])[0] for f in files] - #links = dict(zip(keys, files)) - - @property - def links(self): - """ Return dictionary of links """ - return self.feature.get('links', {}) - - @property - def eobands(self): - """ Return dictionary of eo:bands """ - return self.feature.get('eo:bands', {}) - - @property - def bbox(self): - """ Get bounding box of scene """ - lats = [c[1] for c in self.geometry['coordinates'][0]] - lons = [c[0] for c in self.geometry['coordinates'][0]] - return [min(lons), min(lats), max(lons), max(lats)] - - def asset(self, key): - """ Get asset info for this key or common_name """ - if key not in self.assets: - if key not in self.name_to_band: - logging.warning('No such asset (%s)' % key) - return None - else: - key = self.name_to_band[key] - return self.assets[key] - - def download(self, key, overwrite=False): - """ Download this key (e.g., a band, or metadata file) from the scene """ - - asset = self.asset(key) - if asset is None: - return None - - path = self.get_path() - try: - ext = os.path.splitext(asset['href'])[1] - fout = os.path.join(path, self.get_filename(suffix='_'+key) + ext) - if os.path.exists(fout) and overwrite is False: - self.filenames[key] = fout - else: - self.filenames[key] = self.download_file(asset['href'], fout=fout) - except Exception as e: - logger.error('Unable to download %s: %s' % (asset['href'], str(e))) - logger.debug(traceback.format_exc()) - if key in self.filenames: - return self.filenames[key] - else: - return None - - @classmethod - def mkdirp(cls, path): - """ Recursively make directory """ - if not os.path.isdir(path): - os.makedirs(path) - return path - - def string_sub(self, string): - string = string.replace(':', '_colon_') - subs = {} - for key in [i[1] for i in Formatter().parse(string.rstrip('/')) if i[1] is not None]: - if key == 'date': - subs[key] = self.date - else: - subs[key] = self[key.replace('_colon_', ':')] - return Template(string).substitute(**subs) - - def get_path(self, no_create=False): - """ Get local path for this scene """ - path = self.string_sub(config.DATADIR) - if not no_create and path != '': - self.mkdirp(path) - return path - - def get_filename(self, suffix=None): - """ Get local filename for this scene """ - fname = self.string_sub(config.FILENAME) - if suffix is not None: - fname = fname + suffix - return fname - - @staticmethod - def download_file(url, fout=None): - """ Download a file """ - fout = os.path.basename(url) if fout is None else fout - logger.info('Downloading %s as %s' % (url, fout)) - # check if on s3 - if 's3.amazonaws.com' in url: - url, headers = utils.get_s3_signed_url(url) - resp = requests.get(url, headers=headers, stream=True) - if resp.status_code != 200: - raise Exception("Unable to download file %s: %s" % (url, resp.text)) - with open(fout, 'wb') as f: - for chunk in resp.iter_content(chunk_size=1024): - if chunk: # filter out keep-alive new chunks - f.write(chunk) - return fout - - @classmethod - def create_derived(cls, scenes): - """ Create metadata for dervied scene from multiple input scenes """ - # data provenance, iterate through links - links = [] - for i, scene in enumerate(scenes): - links.append({ - 'rel': 'derived_from', - 'href': scene.links['self']['href'] - }) - # calculate composite geometry and bbox - geom = scenes[0].geometry - # properties - props = { - 'id': '%s_%s' % (scenes[0].date, scenes[0]['eo:platform']), - 'datetime': scenes[0]['datetime'] - } - collections = [s['c:id'] for s in scenes if s['c:id'] is not None] - if len(collections) == 1: - props['c:id'] = collections[0] - item = { - 'properties': props, - 'geometry': geom, - 'links': links, - 'assets': {} - } - return Scene(item) - - -class Scenes(object): - """ A collection of Scene objects """ - - def __init__(self, scenes, properties={}): - """ Initialize with a list of Scene objects """ - self.scenes = sorted(scenes, key=lambda s: s.date) - self.properties = properties - for p in properties: - if isinstance(properties[p], str): - try: - _p = json.loads(properties[p]) - self.properties[p] = _p - except: - self.properties[p] = properties[p] - # check if FeatureCollection and get just first Feature - if p == 'intersects': - if self.properties[p]['type'] == 'FeatureCollection': - self.properties[p] = self.properties[p]['features'][0] - self.collections - - def __len__(self): - """ Number of scenes """ - return len(self.scenes) - - def __getitem__(self, index): - return self.scenes[index] - - def __setitem__(self, index, value): - self.scenes[index] = value - - def __delitem__(self, index): - self.scenes.delete(index) - - def dates(self): - """ Get sorted list of dates for all scenes """ - return sorted(list(set([s.date for s in self.scenes]))) - - def collections(self): - """ Get collection records for this list of scenes """ - return self.collections - - def bbox(self): - """ Get bounding box of search """ - if 'intersects' in self.properties: - coords = self.properties['intersects']['geometry']['coordinates'] - lats = [c[1] for c in coords[0]] - lons = [c[0] for c in coords[0]] - return [min(lons), min(lats), max(lons), max(lats)] - else: - return [] - - def center(self): - if 'intersects' in self.properties: - coords = self.properties['intersects']['geometry']['coordinates'] - lats = [c[1] for c in coords[0]] - lons = [c[0] for c in coords[0]] - return [(min(lats) + max(lats))/2.0, (min(lons) + max(lons))/2.0] - else: - return 0, 0 - - def platforms(self, date=None): - """ List of all available sensors across scenes """ - if date is None: - return list(set([s['eo:platform'] for s in self.scenes])) - else: - return list(set([s['eo:platform'] for s in self.scenes if s.date == date])) - - def print_scenes(self, params=[]): - """ Print summary of all scenes """ - if len(params) == 0: - params = ['date', 'id'] - txt = 'Scenes (%s):\n' % len(self.scenes) - txt += ''.join(['{:<20}'.format(p) for p in params]) + '\n' - for s in self.scenes: - # NOTE - the string conversion is because .date returns a datetime obj - txt += ''.join(['{:<20}'.format(str(s[p])) for p in params]) + '\n' - print(txt) - - def text_calendar(self): - """ Get calendar for dates """ - date_labels = {} - dates = self.dates() - if len(dates) == 0: - return '' - for d in self.dates(): - sensors = self.platforms(d) - if len(sensors) > 1: - date_labels[d] = 'Multiple' - else: - date_labels[d] = sensors[0] - return utils.get_text_calendar(date_labels) - - def save(self, filename): - """ Save scene metadata """ - with open(filename, 'w') as f: - f.write(json.dumps(self.geojson())) - - def geojson(self): - """ Get all metadata as GeoJSON """ - features = [s.feature for s in self.scenes] - return { - 'type': 'FeatureCollection', - 'features': features, - 'properties': self.properties - } - - @classmethod - def load(cls, filename): - """ Load a collections class from a GeoJSON file of metadata """ - with open(filename) as f: - geoj = json.loads(f.read()) - scenes = [Scene(feature) for feature in geoj['features']] - return Scenes(scenes, properties=geoj.get('properties', {})) - - def filter(self, key, values): - """ Filter scenes on key matching value """ - scenes = [] - for val in values: - scenes += list(filter(lambda x: x[key] == val, self.scenes)) - self.scenes = scenes - - def download(self, **kwargs): - dls = [] - for s in self.scenes: - fname = s.download(**kwargs) - if fname is not None: - dls.append(fname) - return dls diff --git a/satsearch/utils.py b/satsearch/utils.py deleted file mode 100644 index a49d690..0000000 --- a/satsearch/utils.py +++ /dev/null @@ -1,188 +0,0 @@ -import base64 -import calendar -import collections -import logging -import os -import time - - -logger = logging.getLogger(__name__) - - -# from https://gist.github.com/angstwad/bf22d1822c38a92ec0a9#gistcomment-2622319 -def dict_merge(dct, merge_dct, add_keys=True): - """ Recursive dict merge. Inspired by :meth:``dict.update()``, instead of - updating only top-level keys, dict_merge recurses down into dicts nested - to an arbitrary depth, updating keys. The ``merge_dct`` is merged into - ``dct``. - - This version will return a copy of the dictionary and leave the original - arguments untouched. - - The optional argument ``add_keys``, determines whether keys which are - present in ``merge_dict`` but not ``dct`` should be included in the - new dict. - - Args: - dct (dict) onto which the merge is executed - merge_dct (dict): dct merged into dct - add_keys (bool): whether to add new keys - - Returns: - dict: updated dict - """ - dct = dct.copy() - if not add_keys: - merge_dct = { - k: merge_dct[k] - for k in set(dct).intersection(set(merge_dct)) - } - - for k, v in merge_dct.items(): - if (k in dct and isinstance(dct[k], dict) - and isinstance(merge_dct[k], collections.Mapping)): - dct[k] = dict_merge(dct[k], merge_dct[k], add_keys=add_keys) - else: - dct[k] = merge_dct[k] - - return dct - - -def get_s3_signed_url(url, region='eu-central-1'): - import sys, os, base64, datetime, hashlib, hmac - - access_key = os.environ.get('AWS_ACCESS_KEY_ID') - secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY') - if access_key is None or secret_key is None: - # if credentials not provided, just try to download without signed URL - return url, None - - parts = url.replace('https://', '').split('/') - bucket = parts[0].replace('.s3.amazonaws.com', '') - key = '/'.join(parts[1:]) - - service = 's3' - host = '%s.s3-%s.amazonaws.com' % (bucket, region) #parts[0] #'s3-%s.amazonaws.com' % region - host = '%s.s3.amazonaws.com' % (bucket) - request_parameters = '' - - # Key derivation functions. See: - # http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python - def sign(key, msg): - return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest() - - def getSignatureKey(key, dateStamp, regionName, serviceName): - kDate = sign(('AWS4' + key).encode('utf-8'), dateStamp) - kRegion = sign(kDate, regionName) - kService = sign(kRegion, serviceName) - kSigning = sign(kService, 'aws4_request') - return kSigning - - # Create a date for headers and the credential string - t = datetime.datetime.utcnow() - amzdate = t.strftime('%Y%m%dT%H%M%SZ') - datestamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope - - # create signed request and headers - canonical_uri = '/' + key - canonical_querystring = request_parameters - payload_hash = 'UNSIGNED-PAYLOAD' - canonical_headers = 'host:%s\nx-amz-content-sha256:%s\nx-amz-date:%s\nx-amz-request-payer:requester\n' % (host, payload_hash, amzdate) - signed_headers = 'host;x-amz-content-sha256;x-amz-date;x-amz-request-payer' - canonical_request = 'GET\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash - algorithm = 'AWS4-HMAC-SHA256' - credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request' - string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256(canonical_request.encode('utf-8')).hexdigest() - signing_key = getSignatureKey(secret_key, datestamp, region, service) - signature = hmac.new(signing_key, string_to_sign.encode('utf-8'), hashlib.sha256).hexdigest() - authorization_header = algorithm + ' ' + 'Credential=' + access_key + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature - headers = {'x-amz-date':amzdate, 'x-amz-content-sha256': payload_hash, 'Authorization':authorization_header, 'x-amz-request-payer': 'requester'} - request_url = 'https://%s%s' % (host, canonical_uri) - - logger.debug('Request URL = ' + request_url) - #r = requests.get(request_url, headers=headers) - #print('Response code: %d\n' % r.status_code) - #print(r.text) - return request_url, headers - - -def get_text_calendar_dates(date1, date2, cols=3): - """ Get array of datetimes between two dates suitable for formatting """ - """ - The return value is a list of years. - Each year contains a list of month rows. - Each month row contains cols months (default 3). - Each month contains list of 6 weeks (the max possible). - Each week contains 1 to 7 days. - Days are datetime.date objects. - """ - year1 = date1.year - year2 = date2.year - - # start and end rows - row1 = int((date1.month - 1) / cols) - row2 = int((date2.month - 1) / cols) + 1 - - # generate base calendar array - Calendar = calendar.Calendar() - cal = [] - for yr in range(year1, year2+1): - ycal = Calendar.yeardatescalendar(yr, width=cols) - if yr == year1 and yr == year2: - ycal = ycal[row1:row2] - elif yr == year1: - ycal = ycal[row1:] - elif yr == year2: - ycal = ycal[:row2] - cal.append(ycal) - return cal - - -def get_text_calendar(dates, cols=3): - """ Get calendar covering all dates, with provided dates colored and labeled """ - _dates = sorted(dates.keys()) - _labels = set(dates.values()) - labels = dict(zip(_labels, [str(41 + i) for i in range(0, len(_labels))])) - cal = get_text_calendar_dates(_dates[0], _dates[-1]) - - # month and day headers - months = calendar.month_name - days = 'Mo Tu We Th Fr Sa Su' - hformat = '{:^20} {:^20} {:^20}\n' - rformat = ' '.join(['{:>2}'] * 7) + ' ' - - # create output - col0 = '\033[' - col_end = '\033[0m' - out = '' - for iy, yrcal in enumerate(cal): - out += '{:^64}\n\n'.format(_dates[0].year + iy) - for mrow in yrcal: - mnum = mrow[0][2][3].month - names = [months[mnum], months[mnum+1], months[mnum+2]] - out += hformat.format(names[0], names[1], names[2]) - out += hformat.format(days, days, days) - for r in range(0, len(mrow[0])): - for c in range(0, cols): - if len(mrow[c]) == 4: - mrow[c].append([''] * 7) - if len(mrow[c]) == 5: - mrow[c].append([''] * 7) - wk = [] - for d in mrow[c][r]: - if d == '' or d.month != (mnum + c): - wk.append('') - else: - string = str(d.day).rjust(2, ' ') - if d in _dates: - string = '%s%sm%s%s' % (col0, labels[dates[d]], string, col_end) - wk.append(string) - out += rformat.format(*wk) - out += '\n' - out += '\n' - # print labels - for lbl, col in labels.items(): - vals = list(dates.values()) - out += '%s%sm%s (%s)%s\n' % (col0, col, lbl, vals.count(lbl), col_end) - out += '%s total dates' % len(_dates) - return out diff --git a/test/test_scene.py b/test/test_scene.py deleted file mode 100644 index e828c16..0000000 --- a/test/test_scene.py +++ /dev/null @@ -1,246 +0,0 @@ -import datetime -import os -import unittest -import shutil -import satsearch.config as config -from satsearch.scene import Scene, Scenes, SatSceneError - - -testpath = os.path.dirname(__file__) - - -class TestScene(unittest.TestCase): - - path = os.path.dirname(__file__) - - prefix = 'http://landsat-pds.s3.amazonaws.com/L8/007/029/LC80070292016240LGN00/LC80070292016240LGN00_', - - item = { - 'geometry': {}, - 'properties': { - 'id': 'testscene', - 'collection': 'test_collection', - 'datetime': '2017-01-01T00:00:00.0000Z', - 'eo:platform': 'test_platform' - }, - "bbox": [ - -71.46676936182894, - 42.338371079679106, - -70.09532154452742, - 43.347431265475954 - ], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - -71.46676936182894, - 43.32623760511659 - ], - [ - -70.11293433656888, - 43.347431265475954 - ], - [ - -70.09532154452742, - 42.35884880571144 - ], - [ - -71.42776890002204, - 42.338371079679106 - ], - [ - -71.46676936182894, - 43.32623760511659 - ] - ] - ] - }, - 'assets': { - 'MTL': { - 'href': '%sMTL.txt' % prefix - }, - 'B1': { - 'href': '%sB1.TIF' % prefix, - 'eo:bands': ['B1'] - }, - 'fake_asset': { - 'href': 'nourl', - }, - 'thumbnail': { - 'href': 'http://landsat-pds.s3.amazonaws.com/L8/007/029/LC80070292016240LGN00/LC80070292016240LGN00_thumb_small.jpg' - } - }, - 'links': { - 'self': {'href': 'link/to/self'} - }, - 'eo:bands': { - 'B1': {'common_name': 'coastal'} - } - } - - @classmethod - def setUpClass(cls): - """ Configure testing class """ - config.DATADIR = os.path.join(testpath, config.DATADIR) - - def get_test_scene(self): - """ Get valid test scene """ - return Scene(self.item) - - def test_invalid_init(self): - """ Initialize a scene with insufficient metadata """ - with self.assertRaises(SatSceneError): - Scene({'meaninglesskey': 'meaninglessstring'}) - with self.assertRaises(SatSceneError): - Scene({'geometry': {}}) - - def test_init(self): - """ Initialize a scene """ - scene = self.get_test_scene() - dt, tm = self.item['properties']['datetime'].split('T') - self.assertEqual(str(scene.date), dt) - self.assertEqual(scene.id, self.item['properties']['id']) - self.assertEqual(scene.geometry, self.item['geometry']) - self.assertEqual(str(scene), self.item['properties']['id']) - assert(list(scene.keys()) == ['id', 'collection', 'datetime', 'eo:platform']) - - def test_class_properties(self): - """ Test the property functions of the Scene class """ - scene = self.get_test_scene() - assert(scene.links['self']['href'] == 'link/to/self') - assert(scene.bbox == [-71.46676936182894, 42.338371079679106, -70.09532154452742, 43.347431265475954]) - - def test_assets(self): - """ Get assets for download """ - scene = self.get_test_scene() - assert(scene.assets['B1']['href'] == self.item['assets']['B1']['href']) - assert(scene.asset('coastal')['href'] == self.item['assets']['B1']['href']) - - def test_download_thumbnail(self): - """ Get thumbnail for scene """ - scene = self.get_test_scene() - fname = scene.download(key='thumbnail') - self.assertTrue(os.path.exists(fname)) - os.remove(fname) - self.assertFalse(os.path.exists(fname)) - #shutil.rmtree(os.path.join(testpath, self.item['properties']['collection'])) - - def test_download(self): - """ Retrieve a data file """ - scene = self.get_test_scene() - fname = scene.download(key='MTL') - self.assertTrue(os.path.exists(fname)) - fname = scene.download(key='MTL') - assert(os.path.exists(fname)) - os.remove(fname) - self.assertFalse(os.path.exists(fname)) - #shutil.rmtree(os.path.join(testpath, self.item['properties']['collection'])) - - def test_download_paths(self): - """ Testing of download paths and filenames """ - scene = self.get_test_scene() - datadir = config.DATADIR - filename = config.FILENAME - config.DATADIR = os.path.join(testpath, '${date}') - config.FILENAME = '${date}_${id}' - fname = scene.download('MTL') - _fname = os.path.join(testpath, '2017-01-01/2017-01-01_testscene_MTL.txt') - assert(fname == _fname) - assert(os.path.exists(fname)) - config.DATADIR = datadir - config.FILENAME = filename - shutil.rmtree(os.path.join(testpath, '2017-01-01')) - assert(os.path.exists(fname) == False) - - def test_download_nonexist(self): - """ Test downloading of non-existent file """ - scene = self.get_test_scene() - fname = scene.download(key='fake_asset') - assert(fname is None) - - def test_download_all(self): - """ Retrieve all data files from a source """ - scene = self.get_test_scene() - fnames = [scene.download(a) for a in scene.assets if a != 'fake_asset'] - for f in fnames: - self.assertTrue(os.path.exists(f)) - os.remove(f) - self.assertFalse(os.path.exists(f)) - - def test_create_derived(self): - """ Create single derived scene """ - scenes = [self.get_test_scene(), self.get_test_scene()] - scene = Scene.create_derived(scenes) - assert(scene.date == scenes[0].date) - assert(scene['c:id'] == scenes[0]['c:id']) - - -class TestScenes(unittest.TestCase): - - @classmethod - def setUpClass(cls): - """ Configure testing class """ - config.DATADIR = testpath - - def load_scenes(self): - return Scenes.load(os.path.join(testpath, 'scenes.geojson')) - - def test_load(self): - """ Initialize Scenes with list of Scene objects """ - scenes = self.load_scenes() - self.assertEqual(len(scenes), 2) - self.assertTrue(isinstance(scenes.scenes[0], Scene)) - - def test_save(self): - """ Save scenes list """ - scenes = self.load_scenes() - fname = os.path.join(testpath, 'save-test.json') - scenes.save(fname) - self.assertTrue(os.path.exists(fname)) - os.remove(fname) - self.assertFalse(os.path.exists(fname)) - - def test_print_scenes(self): - """ Print summary of scenes """ - scenes = self.load_scenes() - scenes.print_scenes() - - def test_dates(self): - """ Get dates of all scenes """ - scenes = self.load_scenes() - dates = scenes.dates() - self.assertEqual(len(dates), 1) - - def test_text_calendar(self): - """ Get calendar """ - scenes = self.load_scenes() - cal = scenes.text_calendar() - self.assertTrue(len(cal) > 250) - - def test_download_thumbnails(self): - """ Download all thumbnails """ - scenes = self.load_scenes() - fnames = scenes.download(key='thumbnail') - for f in fnames: - self.assertTrue(os.path.exists(f)) - os.remove(f) - self.assertFalse(os.path.exists(f)) - #shutil.rmtree(os.path.join(testpath, 'landsat-8-l1')) - - def test_filter(self): - scenes = self.load_scenes() - scenes.filter('eo:platform', ['landsat-8']) - assert(len(scenes) == 1) - - def test_download(self): - """ Download a data file from all scenes """ - scenes = self.load_scenes() - - fnames = scenes.download(key='MTL') - assert(len(fnames) == 1) - for f in fnames: - self.assertTrue(os.path.exists(f)) - os.remove(f) - self.assertFalse(os.path.exists(f)) - #shutil.rmtree(os.path.join(testpath, 'landsat-8-l1')) diff --git a/test/test_utils.py b/test/test_utils.py deleted file mode 100644 index ce73af0..0000000 --- a/test/test_utils.py +++ /dev/null @@ -1,34 +0,0 @@ -import os -import unittest -import satsearch.config as config -from satsearch.scene import Scenes - - -class Test(unittest.TestCase): - - path = os.path.dirname(__file__) - - @classmethod - def setUpClass(cls): - """ Configure testing class """ - config.DATADIR = cls.path - - def load_scenes(self): - return Scenes.load(os.path.join(self.path, 'scenes.geojson')) - - def test_text_calendar(self): - """ Get calendar """ - scenes = self.load_scenes() - cal = scenes.text_calendar() - self.assertEqual(len(cal), 576) - self.assertTrue(' 2018 ' in cal) - self.assertTrue(' January ' in cal) - self.assertTrue(' March ' in cal) - - def test_text_calendar_multiyear(self): - scenes = self.load_scenes() - scenes[0].feature['properties']['datetime'] = '2010-02-01T00:00:00.000Z' - cal = scenes.text_calendar() - self.assertEqual(len(cal), 16654) - self.assertTrue(' 2016 ' in cal) - self.assertTrue(' 2017 ' in cal) From 7403879061ac9cb5758d50970a9f0f27f8ec71b3 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Fri, 4 Jan 2019 19:43:07 -0500 Subject: [PATCH 03/13] update readme and requirements --- README.md | 8 +++++++- requirements.txt | 1 + 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 4f0866e..d90c644 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,13 @@ Sat-search is a Python 2/3 library and a command line tool for discovering and downloading publicly available satellite imagery using a conformant API such as [sat-api](https://github.com/sat-utils/sat-api). -The legacy version of sat-search (<1.0.0) can be used with the legacy version of sat-api (<1.0.0), currently deployed at https://api.developmentseed.org/satellites. +The STAC version supported by a given version of sat-api is shown in the table below. Additional information can be found in the [CHANGELOG](CHANGELOG.md) + +| sat-search | STAC | +| ---------- | ---- | +| 0.1.0 | 0.5.0 | +| 0.2.0 | 0.6.0 | + ## Installation It is recommended to use [pyenv](https://github.com/pyenv/pyenv) and [virtualenv](https://virtualenv.pypa.io/en/latest/) to to control Python versions and installed dependencies. sat-search can be conveniently installed from PyPi: diff --git a/requirements.txt b/requirements.txt index 7d63bef..b633764 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ requests~=2.19 +sat-stac~=0.1.0rc3 From 86ec0afa2a8aff07999d9becf53db7647d0a0687 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Fri, 4 Jan 2019 19:43:38 -0500 Subject: [PATCH 04/13] update parser and tests --- satsearch/parser.py | 15 ++++++++------- test/test_parser.py | 2 +- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/satsearch/parser.py b/satsearch/parser.py index 4d8223e..eafb505 100644 --- a/satsearch/parser.py +++ b/satsearch/parser.py @@ -68,21 +68,22 @@ def newbie(cls, *args, **kwargs): subparser = parser.add_subparsers(dest='command') parents = [parser.pparser, parser.output_parser] - sparser = subparser.add_parser('search', help='Perform new search of scenes', parents=parents) + sparser = subparser.add_parser('search', help='Perform new search of items', parents=parents) """ Adds search arguments to a parser """ parser.search_group = sparser.add_argument_group('search options') - parser.search_group.add_argument('-c', '--c:id', help='Name(s) of collection', nargs='*', default=None) + parser.search_group.add_argument('-c', '--collection', help='Name(s) of collection', nargs='*', default=None) + parser.search_group.add_argument('--bbox', help='Bounding box (min lon, min lat, max lon, max lat)', nargs=4) parser.search_group.add_argument('--intersects', help='GeoJSON Feature (file or string)') + parser.search_group.add_argument('--datetime', help='Single date/time or begin and end date/time (e.g., 2017-01-01/2017-02-15') + parser.search_group.add_argument('--sort', help='Sort by fields') #group.add_argument('--id', help='One or more scene IDs', nargs='*', default=None) #group.add_argument('--contains', help='lon,lat points') - parser.search_group.add_argument('--datetime', help='Single date/time or begin and end date/time (e.g., 2017-01-01/2017-02-15') - parser.search_group.add_argument('--eo:cloud_cover', help='Range of acceptable cloud cover (e.g., 0/20)') - parser.search_group.add_argument('-p', '--param', nargs='*', help='Additional parameters of form KEY=VALUE', action=SatUtilsParser.KeyValuePair) + parser.search_group.add_argument('-p', '--param', nargs='*', help='Properties of form KEY=VALUE (<, >, <=, >=, = supported)', action=SatUtilsParser.KeyValuePair) parser.search_group.add_argument('--url', help='URL of the API', default=config.API_URL) parents.append(parser.download_parser) - lparser = subparser.add_parser('load', help='Load scenes from previous search', parents=parents) - lparser.add_argument('scenes', help='GeoJSON file of scenes') + lparser = subparser.add_parser('load', help='Load items from previous search', parents=parents) + lparser.add_argument('items', help='GeoJSON file of Items') return parser class KeyValuePair(argparse.Action): diff --git a/test/test_parser.py b/test/test_parser.py index 6ce4585..7fe6e15 100644 --- a/test/test_parser.py +++ b/test/test_parser.py @@ -9,7 +9,7 @@ class Test(unittest.TestCase): """ Test main module """ - args = 'search --datetime 2017-01-01 --eo:cloud_cover 0/20 -p eo:platform=landsat-8' + args = 'search --datetime 2017-01-01 -p eo:cloud_cover=0/20 eo:platform=landsat-8' @classmethod def get_test_parser(cls): From 58d31ea3383f3245b682be6740546716f0a906e7 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Sat, 5 Jan 2019 16:41:09 -0500 Subject: [PATCH 05/13] more fixing of tests and updating code to work with sat-stac --- satsearch/main.py | 4 +- satsearch/parser.py | 5 +- satsearch/search.py | 145 ++++++++---------- test/aoi1.geojson | 20 +-- test/landsat-item1.json | 184 ++++++++++++++++++++++ test/landsat-item2.json | 184 ++++++++++++++++++++++ test/landsat-response.json | 148 ------------------ test/sentinel-response.json | 294 +++++++++++++++++------------------- test/test_main.py | 21 +-- test/test_search.py | 116 ++++---------- 10 files changed, 624 insertions(+), 497 deletions(-) create mode 100644 test/landsat-item1.json create mode 100644 test/landsat-item2.json delete mode 100644 test/landsat-response.json diff --git a/satsearch/main.py b/satsearch/main.py index e779f6d..cd7fbd1 100644 --- a/satsearch/main.py +++ b/satsearch/main.py @@ -13,13 +13,13 @@ def main(items=None, print_md=None, print_cal=False, if items is None: # get items from search search = Search(**kwargs) - items = Items(search.items(), properties=kwargs) + items = search.items() else: items = Items.load(items) # print metadata if print_md is not None: - items.print_scenes(print_md) + items.print_summary(print_md) # print calendar if print_cal: diff --git a/satsearch/parser.py b/satsearch/parser.py index eafb505..eeae699 100644 --- a/satsearch/parser.py +++ b/satsearch/parser.py @@ -58,7 +58,6 @@ def parse_args(self, *args, **kwargs): config.DATADIR = args.pop('datadir') if 'filename' in args: config.FILENAME = args.pop('filename') - return args @classmethod @@ -74,11 +73,11 @@ def newbie(cls, *args, **kwargs): parser.search_group.add_argument('-c', '--collection', help='Name(s) of collection', nargs='*', default=None) parser.search_group.add_argument('--bbox', help='Bounding box (min lon, min lat, max lon, max lat)', nargs=4) parser.search_group.add_argument('--intersects', help='GeoJSON Feature (file or string)') - parser.search_group.add_argument('--datetime', help='Single date/time or begin and end date/time (e.g., 2017-01-01/2017-02-15') + parser.search_group.add_argument('--datetime', help='Single date/time or begin and end date/time (e.g., 2017-01-01/2017-02-15)') parser.search_group.add_argument('--sort', help='Sort by fields') #group.add_argument('--id', help='One or more scene IDs', nargs='*', default=None) #group.add_argument('--contains', help='lon,lat points') - parser.search_group.add_argument('-p', '--param', nargs='*', help='Properties of form KEY=VALUE (<, >, <=, >=, = supported)', action=SatUtilsParser.KeyValuePair) + parser.search_group.add_argument('-p', '--property', nargs='*', help='Properties of form KEY=VALUE (<, >, <=, >=, = supported)', action=SatUtilsParser.KeyValuePair) parser.search_group.add_argument('--url', help='URL of the API', default=config.API_URL) parents.append(parser.download_parser) diff --git a/satsearch/search.py b/satsearch/search.py index c6056ea..a6337f0 100644 --- a/satsearch/search.py +++ b/satsearch/search.py @@ -2,9 +2,11 @@ import os import logging import requests + import satsearch.config as config -from satsearch.scene import Scene, Scenes -from satsearch.utils import dict_merge + +from satstac import Collection, Item, Items +from satstac.utils import dict_merge logger = logging.getLogger(__name__) @@ -14,27 +16,35 @@ class SatSearchError(Exception): pass -class Query(object): +class Search(object): """ One search query (possibly multiple pages) """ - def __init__(self, endpoint='search/stac', **kwargs): - """ Initialize a Query object with parameters """ - self.endpoint = endpoint + search_endpoint = 'stac/search' + collections_endpoint = 'collections' + + def __init__(self, **kwargs): + """ Initialize a Search object with parameters """ self.kwargs = kwargs - self.results = None + for k in self.kwargs: + if isinstance(kwargs[k], dict): + kwargs[k] = json.dumps(kwargs[k]) + if k == 'datetime': + self.kwargs['time'] = self.kwargs['datetime'] + del self.kwargs['datetime'] def found(self): """ Small query to determine total number of hits """ - if self.results is None: - self.query(limit=0) - return self.results['properties']['found'] + kwargs = { + 'page': 1, + 'page_size': 0 + } + kwargs.update(self.kwargs) + results = self.query(**kwargs) + return results['meta']['found'] @classmethod - def _query(cls, url,**kwargs): - for k in kwargs: - if isinstance(kwargs[k], list) and k is not "geometry": - kwargs[k] = '"%s"' % (','.join(kwargs[k])) - + def _query(cls, url, **kwargs): + """ Get request """ response = requests.get(url, kwargs) logger.debug('Query URL: %s' % response.url) # API error @@ -42,82 +52,45 @@ def _query(cls, url,**kwargs): raise SatSearchError(response.text) return response.json() - def query(self, **kwargs): - """ Make single API call """ - kwargs.update(self.kwargs) - url = os.path.join(config.API_URL, self.endpoint) - self.results = self._query(url, **kwargs) - return self.results - - def items(self, limit=None): - """ Query and return up to limit results """ - if limit is None: - limit = self.found() - limit = min(limit, self.found()) - # split into multiple pages to retrieve - page_size = min(limit, 1000) - items = [] - page = 1 - while len(items) < limit: - results = self.query(page=page, limit=page_size)['features'] - items += results - #for r in results: - # items.append(Scene(r)) - page += 1 - - return items - - -class Search(object): - """ Search the API with multiple queries and combine """ - - def __init__(self, id=[], **kwargs): - """ Initialize a Search object with parameters """ - self.kwargs = kwargs - for k in self.kwargs: - if isinstance(kwargs[k], dict): - kwargs[k] = json.dumps(kwargs[k]) - self.queries = [] - if len(id) == 0: - self.queries.append(Query(**kwargs)) - else: - for s in id: - kwargs.update({'id': s}) - self.queries.append(Query(**kwargs)) - - def found(self): - """ Total number of found scenes """ - found = 0 - for query in self.queries: - found += query.found() - return found + @classmethod + def query(cls, **kwargs): + url = os.path.join(config.API_URL, cls.search_endpoint) + for k in kwargs: + if isinstance(kwargs[k], list) and k is not "geometry": + kwargs[k] = '"%s"' % (','.join(kwargs[k])) + return cls._query(url, **kwargs) @classmethod def collection(cls, cid): """ Get a Collection record """ - url = os.path.join(config.API_URL, 'collections', cid, 'definition') - return Query._query(url)['features'][0] + url = os.path.join(config.API_URL, cls.collections_endpoint, cid) + return Collection(cls._query(url)) - def scenes(self): - """ Return all of the scenes """ + def items(self, limit=1000): + """ Return all of the Items and Collections for this search """ items = [] - for query in self.queries: - items += query.items() + found = self.found() + kwargs = { + 'page': 1, + 'page_size': min(limit, found) + } + kwargs.update(self.kwargs) + while len(items) < found: + items += [Item(i) for i in self.query(**kwargs)['features']] + kwargs['page'] += 1 + # retrieve collections - collections = {} - for c in set([item['properties']['c:id'] for item in items if 'c:id' in item['properties']]): - collections[c] = self.collection(c) - del collections[c]['links'] - scenes = [] - for item in items: - if 'c:id' in item['properties']: - item = dict_merge(item, collections[item['properties']['c:id']]) - scenes.append(Scene(item)) - return scenes - - def collections(self): - """ Search collections """ collections = [] - for query in self.queries: - collections += query.collections() - return collections + for c in set([item.properties['collection'] for item in items if 'collection' in item.properties]): + collections.append(self.collection(c)) + #del collections[c]['links'] + + # merge collections into items + #_items = [] + #for item in items: + # import pdb; pdb.set_trace() + # if 'collection' in item['properties']: + # item = dict_merge(item, collections[item['properties']['collection']]) + # _items.append(Item(item)) + + return Items(items, collections=collections, search=self.kwargs) diff --git a/test/aoi1.geojson b/test/aoi1.geojson index c0b466a..f292f8a 100644 --- a/test/aoi1.geojson +++ b/test/aoi1.geojson @@ -6,24 +6,24 @@ "coordinates": [ [ [ - -70.9503936767578, - 43.0287452513488 + -66.3958740234375, + 43.305193797650546 ], [ - -70.78010559082031, - 43.0287452513488 + -64.390869140625, + 43.305193797650546 ], [ - -70.78010559082031, - 43.14258116631987 + -64.390869140625, + 44.22945656830167 ], [ - -70.9503936767578, - 43.14258116631987 + -66.3958740234375, + 44.22945656830167 ], [ - -70.9503936767578, - 43.0287452513488 + -66.3958740234375, + 43.305193797650546 ] ] ] diff --git a/test/landsat-item1.json b/test/landsat-item1.json new file mode 100644 index 0000000..53ca960 --- /dev/null +++ b/test/landsat-item1.json @@ -0,0 +1,184 @@ +{ + "type": "Feature", + "id": "LC80080462018268LGN00", + "bbox": [ + -72.07482, + 19.15183, + -69.84647, + 21.29465 + ], + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -72.07482, + 21.26876 + ], + [ + -69.85803, + 21.29465 + ], + [ + -69.84647, + 19.17494 + ], + [ + -72.03347, + 19.15183 + ], + [ + -72.07482, + 21.26876 + ] + ] + ] + }, + "properties": { + "collection": "landsat-8-l1", + "datetime": "2018-09-25T15:08:03.209908+00:00", + "eo:sun_azimuth": 132.18559343, + "eo:sun_elevation": 59.86078361, + "eo:cloud_cover": 8, + "eo:row": "046", + "eo:column": "008", + "landsat:product_id": "LC08_L1TP_008046_20180925_20181009_01_T1", + "landsat:scene_id": "LC80080462018268LGN00", + "landsat:processing_level": "L1TP", + "landsat:tier": "T1", + "eo:epsg": 32619 + }, + "assets": { + "index": { + "type": "text/html", + "title": "HTML index page", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/index.html" + }, + "thumbnail": { + "title": "Thumbnail image", + "type": "image/jpeg", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_thumb_large.jpg" + }, + "B1": { + "type": "image/x.geotiff", + "eo:bands": [ + 0 + ], + "title": "Band 1 (coastal)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B1.TIF" + }, + "B2": { + "type": "image/x.geotiff", + "eo:bands": [ + 1 + ], + "title": "Band 2 (blue)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B2.TIF" + }, + "B3": { + "type": "image/x.geotiff", + "eo:bands": [ + 2 + ], + "title": "Band 3 (green)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B3.TIF" + }, + "B4": { + "type": "image/x.geotiff", + "eo:bands": [ + 3 + ], + "title": "Band 4 (red)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B4.TIF" + }, + "B5": { + "type": "image/x.geotiff", + "eo:bands": [ + 4 + ], + "title": "Band 5 (nir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B5.TIF" + }, + "B6": { + "type": "image/x.geotiff", + "eo:bands": [ + 5 + ], + "title": "Band 6 (swir16)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B6.TIF" + }, + "B7": { + "type": "image/x.geotiff", + "eo:bands": [ + 6 + ], + "title": "Band 7 (swir22)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B7.TIF" + }, + "B8": { + "type": "image/x.geotiff", + "eo:bands": [ + 7 + ], + "title": "Band 8 (pan)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B8.TIF" + }, + "B9": { + "type": "image/x.geotiff", + "eo:bands": [ + 8 + ], + "title": "Band 9 (cirrus)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B9.TIF" + }, + "B10": { + "type": "image/x.geotiff", + "eo:bands": [ + 9 + ], + "title": "Band 10 (lwir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B10.TIF" + }, + "B11": { + "type": "image/x.geotiff", + "eo:bands": [ + 10 + ], + "title": "Band 11 (lwir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_B11.TIF" + }, + "ANG": { + "title": "Angle coefficients file", + "type": "text/plain", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_ANG.txt" + }, + "MTL": { + "title": "original metadata file", + "type": "text/plain", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_MTL.txt" + }, + "BQA": { + "title": "Band quality data", + "type": "image/x.geotiff", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/046/LC08_L1TP_008046_20180925_20181009_01_T1/LC08_L1TP_008046_20180925_20181009_01_T1_BQA.TIF" + } + }, + "links": [ + { + "rel": "self", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1/item/LC80080462018268LGN00" + }, + { + "rel": "parent", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1" + }, + { + "rel": "collection", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1" + }, + { + "rel": "root", + "href": "https://sat-api-dev.developmentseed.org/stac" + } + ] +} \ No newline at end of file diff --git a/test/landsat-item2.json b/test/landsat-item2.json new file mode 100644 index 0000000..e64cd95 --- /dev/null +++ b/test/landsat-item2.json @@ -0,0 +1,184 @@ +{ + "type": "Feature", + "id": "LC80080302018268LGN00", + "bbox": [ + -66.15395, + 42.07238, + -63.19112, + 44.27107 + ], + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -66.15395, + 44.22768 + ], + [ + -63.19797, + 44.27107 + ], + [ + -63.19112, + 42.11264 + ], + [ + -66.04493, + 42.07238 + ], + [ + -66.15395, + 44.22768 + ] + ] + ] + }, + "properties": { + "collection": "landsat-8-l1", + "datetime": "2018-09-25T15:01:40.919399+00:00", + "eo:sun_azimuth": 156.16142948, + "eo:sun_elevation": 43.22291973, + "eo:cloud_cover": 41, + "eo:row": "030", + "eo:column": "008", + "landsat:product_id": "LC08_L1TP_008030_20180925_20181009_01_T1", + "landsat:scene_id": "LC80080302018268LGN00", + "landsat:processing_level": "L1TP", + "landsat:tier": "T1", + "eo:epsg": 32620 + }, + "assets": { + "index": { + "type": "text/html", + "title": "HTML index page", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/index.html" + }, + "thumbnail": { + "title": "Thumbnail image", + "type": "image/jpeg", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_thumb_large.jpg" + }, + "B1": { + "type": "image/x.geotiff", + "eo:bands": [ + 0 + ], + "title": "Band 1 (coastal)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B1.TIF" + }, + "B2": { + "type": "image/x.geotiff", + "eo:bands": [ + 1 + ], + "title": "Band 2 (blue)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B2.TIF" + }, + "B3": { + "type": "image/x.geotiff", + "eo:bands": [ + 2 + ], + "title": "Band 3 (green)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B3.TIF" + }, + "B4": { + "type": "image/x.geotiff", + "eo:bands": [ + 3 + ], + "title": "Band 4 (red)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B4.TIF" + }, + "B5": { + "type": "image/x.geotiff", + "eo:bands": [ + 4 + ], + "title": "Band 5 (nir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B5.TIF" + }, + "B6": { + "type": "image/x.geotiff", + "eo:bands": [ + 5 + ], + "title": "Band 6 (swir16)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B6.TIF" + }, + "B7": { + "type": "image/x.geotiff", + "eo:bands": [ + 6 + ], + "title": "Band 7 (swir22)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B7.TIF" + }, + "B8": { + "type": "image/x.geotiff", + "eo:bands": [ + 7 + ], + "title": "Band 8 (pan)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B8.TIF" + }, + "B9": { + "type": "image/x.geotiff", + "eo:bands": [ + 8 + ], + "title": "Band 9 (cirrus)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B9.TIF" + }, + "B10": { + "type": "image/x.geotiff", + "eo:bands": [ + 9 + ], + "title": "Band 10 (lwir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B10.TIF" + }, + "B11": { + "type": "image/x.geotiff", + "eo:bands": [ + 10 + ], + "title": "Band 11 (lwir)", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_B11.TIF" + }, + "ANG": { + "title": "Angle coefficients file", + "type": "text/plain", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_ANG.txt" + }, + "MTL": { + "title": "original metadata file", + "type": "text/plain", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_MTL.txt" + }, + "BQA": { + "title": "Band quality data", + "type": "image/x.geotiff", + "href": "https://landsat-pds.s3.amazonaws.com/c1/L8/008/030/LC08_L1TP_008030_20180925_20181009_01_T1/LC08_L1TP_008030_20180925_20181009_01_T1_BQA.TIF" + } + }, + "links": [ + { + "rel": "self", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1/item/LC80080302018268LGN00" + }, + { + "rel": "parent", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1" + }, + { + "rel": "collection", + "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1" + }, + { + "rel": "root", + "href": "https://sat-api-dev.developmentseed.org/stac" + } + ] +} \ No newline at end of file diff --git a/test/landsat-response.json b/test/landsat-response.json deleted file mode 100644 index 8fa0d0c..0000000 --- a/test/landsat-response.json +++ /dev/null @@ -1,148 +0,0 @@ -{ - "type": "FeatureCollection", - "properties": { - "found": 1122867, - "limit": 1, - "page": 1 - }, - "features": [ - { - "type": "Feature", - "properties": { - "id": "LC08_L1TP_183023_20160625_20170323_01_T1", - "c:id": "landsat-8-l1", - "datetime": "2016-06-25T09:00:16.825Z", - "eo:cloud_cover": 0, - "eo:sun_azimuth": 150.6196362, - "eo:sun_elevation": 57.7126878, - "landsat:path": 183, - "landsat:row": 23 - }, - "bbox": [ - 27.19477, - 52.48149, - 30.75449, - 53.69607 - ], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 30.75449, - 53.69607 - ], - [ - 27.94907, - 54.177 - ], - [ - 27.19477, - 52.48149 - ], - [ - 29.89626, - 52.00994 - ], - [ - 30.75449, - 53.69607 - ] - ] - ] - }, - "assets": { - "ANG": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_ANG.txt" - }, - "B1": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B1.TIF", - "eo:bands": [ - "B1" - ] - }, - "B2": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B2.TIF", - "eo:bands": [ - "B2" - ] - }, - "B3": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B3.TIF", - "eo:bands": [ - "B3" - ] - }, - "B4": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B4.TIF", - "eo:bands": [ - "B4" - ] - }, - "B5": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B5.TIF", - "eo:bands": [ - "B5" - ] - }, - "B6": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B6.TIF", - "eo:bands": [ - "B6" - ] - }, - "B7": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B7.TIF", - "eo:bands": [ - "B7" - ] - }, - "B8": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B8.TIF", - "eo:bands": [ - "B8" - ] - }, - "B9": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B9.TIF", - "eo:bands": [ - "B9" - ] - }, - "B10": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B10.TIF", - "eo:bands": [ - "B10" - ] - }, - "B11": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_B11.TIF", - "eo:bands": [ - "B11" - ] - }, - "BQA": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_BQA.TIF" - }, - "MTL": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_MTL.txt" - }, - "thumbnail": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/LC81830232016177LGN00_thumb_large.jpg" - } - }, - "links": { - "index": { - "href": "http://landsat-pds.s3.amazonaws.com/L8/183/023/LC81830232016177LGN00/index.html" - }, - "self": { - "rel": "self", - "href": "https://sat-api-dev.developmentseed.org/search/stac?id=LC08_L1TP_183023_20160625_20170323_01_T1" - }, - "collection": { - "href": "https://sat-api-dev.developmentseed.org/collections/landsat-8-l1/definition" - } - } - } - ] -} \ No newline at end of file diff --git a/test/sentinel-response.json b/test/sentinel-response.json index bbd1e3d..566f0c8 100644 --- a/test/sentinel-response.json +++ b/test/sentinel-response.json @@ -1,158 +1,148 @@ { - "type": "FeatureCollection", + "type": "Feature", "properties": { - "found": 106013, - "limit": 1, - "page": 1 + "id": "L1C_T49QBA_A006865_20180630T032902", + "c:id": "sentinel-2-l1c", + "datetime": "2018-06-30T03:29:02.990Z", + "eo:platform": "Sentinel-2B", + "eo:cloud_cover": 0, + "eo:epsg": "32649", + "sentinel:product_id": "S2B_MSIL1C_20180630T031539_N0206_R118_T49QBA_20180630T074227" }, - "features": [ - { - "type": "Feature", - "properties": { - "id": "L1C_T49QBA_A006865_20180630T032902", - "c:id": "sentinel-2-l1c", - "datetime": "2018-06-30T03:29:02.990Z", - "eo:platform": "Sentinel-2B", - "eo:cloud_cover": 0, - "eo:epsg": "32649", - "sentinel:product_id": "S2B_MSIL1C_20180630T031539_N0206_R118_T49QBA_20180630T074227" - }, - "bbox": [ - 108.15089336343961, - 17.97943517464906, - 108.69545292812572, - 18.978165276750932 - ], - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 108.15089336343961, - 18.97058416379064 - ], - [ - 108.69545292812572, - 18.978165276750932 - ], - [ - 108.57427698673041, - 18.486901200780036 - ], - [ - 108.45112222544195, - 17.983376887887612 - ], - [ - 108.16722825280434, - 17.97943517464906 - ], - [ - 108.15089336343961, - 18.97058416379064 - ] - ] + "bbox": [ + 108.15089336343961, + 17.97943517464906, + 108.69545292812572, + 18.978165276750932 + ], + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 108.15089336343961, + 18.97058416379064 + ], + [ + 108.69545292812572, + 18.978165276750932 + ], + [ + 108.57427698673041, + 18.486901200780036 + ], + [ + 108.45112222544195, + 17.983376887887612 + ], + [ + 108.16722825280434, + 17.97943517464906 + ], + [ + 108.15089336343961, + 18.97058416379064 ] - }, - "assets": { - "B01": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B01.jp2", - "eo:bands": [ - "B01" - ] - }, - "B02": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B02.jp2", - "eo:bands": [ - "B02" - ] - }, - "B03": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B03.jp2", - "eo:bands": [ - "B03" - ] - }, - "B04": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B04.jp2", - "eo:bands": [ - "B04" - ] - }, - "B05": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B05.jp2", - "eo:bands": [ - "B05" - ] - }, - "B06": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B06.jp2", - "eo:bands": [ - "B06" - ] - }, - "B07": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B07.jp2", - "eo:bands": [ - "B07" - ] - }, - "B08": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B08.jp2", - "eo:bands": [ - "B08" - ] - }, - "B09": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B09.jp2", - "eo:bands": [ - "B09" - ] - }, - "B10": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B10.jp2", - "eo:bands": [ - "B10" - ] - }, - "B11": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B11.jp2", - "eo:bands": [ - "B11" - ] - }, - "B12": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B12.jp2", - "eo:bands": [ - "B12" - ] - }, - "B8A": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B8A.jp2", - "eo:bands": [ - "B8A" - ] - }, - "thumbnail": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/preview.jpg" - }, - "tki": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/TKI.jp2", - "description": "True Color Image" - }, - "metadata": { - "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/metadata.xml" - } - }, - "links": { - "self": { - "rel": "self", - "href": "https://sat-api-dev.developmentseed.org/search/stac?id=L1C_T49QBA_A006865_20180630T032902" - }, - "collection": { - "href": "https://sat-api-dev.developmentseed.org/collections/sentinel-2-l1c/definition" - } - } + ] + ] + }, + "assets": { + "B01": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B01.jp2", + "eo:bands": [ + "B01" + ] + }, + "B02": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B02.jp2", + "eo:bands": [ + "B02" + ] + }, + "B03": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B03.jp2", + "eo:bands": [ + "B03" + ] + }, + "B04": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B04.jp2", + "eo:bands": [ + "B04" + ] + }, + "B05": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B05.jp2", + "eo:bands": [ + "B05" + ] + }, + "B06": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B06.jp2", + "eo:bands": [ + "B06" + ] + }, + "B07": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B07.jp2", + "eo:bands": [ + "B07" + ] + }, + "B08": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B08.jp2", + "eo:bands": [ + "B08" + ] + }, + "B09": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B09.jp2", + "eo:bands": [ + "B09" + ] + }, + "B10": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B10.jp2", + "eo:bands": [ + "B10" + ] + }, + "B11": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B11.jp2", + "eo:bands": [ + "B11" + ] + }, + "B12": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B12.jp2", + "eo:bands": [ + "B12" + ] + }, + "B8A": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/B8A.jp2", + "eo:bands": [ + "B8A" + ] + }, + "thumbnail": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/preview.jpg" + }, + "tki": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/TKI.jp2", + "description": "True Color Image" + }, + "metadata": { + "href": "https://sentinel-s2-l1c.s3.amazonaws.com/tiles/49/Q/BA/2018/6/30/0/metadata.xml" + } + }, + "links": { + "self": { + "rel": "self", + "href": "https://sat-api-dev.developmentseed.org/search/stac?id=L1C_T49QBA_A006865_20180630T032902" + }, + "collection": { + "href": "https://sat-api-dev.developmentseed.org/collections/sentinel-2-l1c/definition" } - ] + } } \ No newline at end of file diff --git a/test/test_main.py b/test/test_main.py index 37ba32d..0b3baf5 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -19,14 +19,14 @@ class Test(unittest.TestCase): def test_main(self): """ Run main function """ - scenes = main.main(datetime='2017-01-01', **{'c:id': 'Landsat-8-l1'}) - self.assertEqual(len(scenes.scenes), self.num_scenes) + items = main.main(datetime='2019-01-01', **{'collection': 'Landsat-8-l1'}) + self.assertEqual(len(items), self.num_scenes) def test_main_options(self): """ Test main program with output options """ fname = os.path.join(testpath, 'test_main-save.json') - scenes = main.main(datetime='2017-01-01', save=fname, printcal=True, print_md=[], **{'eo:platform': 'landsat-8'}) - self.assertEqual(len(scenes.scenes), self.num_scenes) + items = main.main(datetime='2019-01-01', save=fname, printcal=True, print_md=[], **{'eo:platform': 'landsat-8'}) + self.assertEqual(len(items), self.num_scenes) self.assertTrue(os.path.exists(fname)) os.remove(fname) self.assertFalse(os.path.exists(fname)) @@ -40,16 +40,17 @@ def _test_main_review_error(self): def test_cli(self): """ Run CLI program """ with patch.object(sys, 'argv', 'sat-search search --datetime 2017-01-01 -p eo:platform=landsat-8'.split(' ')): - scenes = main.cli() + items = main.cli() + assert(len(items) == 111) def test_main_download(self): """ Test main program with downloading """ with open(os.path.join(testpath, 'aoi1.geojson')) as f: aoi = json.dumps(json.load(f)) config.DATADIR = os.path.join(testpath, "${eo:platform}") - scenes = main.main(datetime='2017-01-05/2017-01-21', intersects=aoi, download=['thumbnail', 'MTL'], **{'eo:platform': 'landsat-8'}) - for scene in scenes.scenes: - self.assertTrue(os.path.exists(scene.filenames['thumbnail'])) - self.assertTrue(os.path.exists(scene.filenames['MTL'])) - shutil.rmtree(os.path.join(testpath, scene['eo:platform'])) + items = main.main(datetime='2017-01-05/2017-01-21', intersects=aoi, download=['thumbnail', 'MTL'], **{'eo:platform': 'landsat-8'}) + for item in items: + self.assertTrue(os.path.exists(item.filenames['thumbnail'])) + self.assertTrue(os.path.exists(item.filenames['MTL'])) + #shutil.rmtree(os.path.join(testpath, item['eo:platform'])) config.DATADIR = testpath diff --git a/test/test_search.py b/test/test_search.py index 3fa7382..6b1d753 100644 --- a/test/test_search.py +++ b/test/test_search.py @@ -2,113 +2,57 @@ import glob import json import unittest -from satsearch.scene import Scene -from satsearch.search import SatSearchError, Query, Search +from satstac import Item +from satsearch.search import SatSearchError, Search -class TestQuery(unittest.TestCase): - path = os.path.dirname(__file__) - results = {} - num_landsat = 558 - num_sentinel = 3854 - - @classmethod - def setUpClass(cls): - fnames = glob.glob(os.path.join(cls.path, '*-response.json')) - for fname in fnames: - with open(fname) as f: - cls.results[os.path.basename(fname)[:-14]] = json.load(f) - - def get_queries(self): - """ Initialize and return search object """ - return {s: Query(id=self.results[s]['features'][0]['properties']['id']) for s in self.results} - - def test_search_init(self): - """ Initialize a search object """ - for s, search in self.get_queries().items(): - self.assertEqual(search.kwargs['id'], self.results[s]['features'][0]['properties']['id']) - - def test_hits(self): - """ Check total number of results """ - search = Query(datetime='2017-01-01') - hits = search.found() - self.assertEqual(hits, 4412) - #self.assertEqual(hits, 4267) - - def test_empty_search(self): - """ Perform search for 0 results """ - search = Query(id='nosuchscene') - self.assertEqual(search.found(), 0) - - def test_bad_search(self): - """ Run a bad query """ - q = Query(limit='a') - with self.assertRaises(SatSearchError): - q.found() - - def test_simple_search(self): - """ Perform simple query """ - for search in self.get_queries().values(): - self.assertEqual(search.found(), 1) - scenes = search.items() - assert(isinstance(scenes, list)) - assert(isinstance(scenes[0], dict)) - - def test_big_landsat_search(self): - """ Search for a bunch of Landsat data """ - search = Query(**{'datetime': '2017-01-01', 'eo:platform': 'landsat-8'}) - self.assertEqual(search.found(), self.num_landsat) - - scenes = search.items() - - self.assertEqual(len(scenes), self.num_landsat) - # verify this is 564 unique scenes (it is not) - #ids = set([s.scene_id for s in scenes]) - #self.assertEqual(len(ids), self.num_landsat) - - def test_big_sentinel_search(self): - """ Search for a bunch of Sentinel data """ - search = Query(**{'datetime': '2017-01-01', 'eo:platform': 'sentinel-2a'}) - self.assertEqual(search.found(), self.num_sentinel) - scenes = search.items() - self.assertEqual(len(scenes), self.num_sentinel) - - -class TestSearch(unittest.TestCase): +class Test(unittest.TestCase): path = os.path.dirname(__file__) - results = {} + results = [] @classmethod def setUpClass(cls): - fnames = glob.glob(os.path.join(cls.path, '*-response.json')) + fnames = glob.glob(os.path.join(cls.path, '*-item*.json')) for fname in fnames: with open(fname) as f: - cls.results[os.path.basename(fname)[:-14]] = json.load(f) + cls.results.append(json.load(f)) - def get_search(self): + def get_searches(self): """ Initialize and return search object """ - sids = [self.results[s]['features'][0]['properties']['id'] for s in self.results] - return Search(id=sids) + return [Search(datetime=r['properties']['datetime']) for r in self.results] def test_search_init(self): """ Initialize a search object """ - search = self.get_search() - sids = [self.results[s]['features'][0]['properties']['id'] for s in self.results] - for s in search.scenes(): + search = self.get_searches()[0] + dts = [r['properties']['datetime'] for r in self.results] + + assert(len(search.kwargs) == 1) + assert('time' in search.kwargs) + for kw in search.kwargs: + self.assertTrue(search.kwargs[kw] in dts) + + def test_search_for_items_by_date(self): + """ Search for specific item """ + search = self.get_searches()[0] + sids = [r['id'] for r in self.results] + items = search.items() + assert(len(items) == 1) + for s in items: self.assertTrue(s.id in sids) def test_empty_search(self): """ Perform search for 0 results """ - search = Search(id=['nosuchscene']) + search = Search(datetime='2001-01-01') self.assertEqual(search.found(), 0) - def test_search(self): + def test_geo_search(self): """ Perform simple query """ with open(os.path.join(self.path, 'aoi1.geojson')) as f: aoi = json.dumps(json.load(f)) - search = Search(datetime='2017-01-05', intersects=aoi) - self.assertEqual(search.found(), 1) - scenes = search.scenes() - self.assertTrue(isinstance(scenes[0], Scene)) + search = Search(datetime='2018-09-25', intersects=aoi) + assert(search.found() == 1) + items = search.items() + assert(len(items) == 1) + assert(isinstance(items[0], Item)) From 853916ce2bc50bb92d6ea795b0cc3ed85490e98f Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 19:01:07 -0500 Subject: [PATCH 06/13] update parser to support logical operators in queries --- satsearch/parser.py | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/satsearch/parser.py b/satsearch/parser.py index eeae699..0ee53ef 100644 --- a/satsearch/parser.py +++ b/satsearch/parser.py @@ -3,7 +3,10 @@ import sys import logging import argparse + import satsearch.config as config + +from satstac.utils import dict_merge from .version import __version__ @@ -58,6 +61,24 @@ def parse_args(self, *args, **kwargs): config.DATADIR = args.pop('datadir') if 'filename' in args: config.FILENAME = args.pop('filename') + + if 'property' in args: + queries = {} + for p in args['property']: + symbols = { + '=': 'eq', + '>': 'gt', + '<': 'lt', + '>=': 'gte', + '<=': 'lte' + } + for s in symbols: + parts = p.split(s) + if len(parts) == 2: + queries = dict_merge(queries, {parts[0]: {symbols[s]: parts[1]}}) + break + args['query'] = queries + del args['property'] return args @classmethod @@ -77,7 +98,7 @@ def newbie(cls, *args, **kwargs): parser.search_group.add_argument('--sort', help='Sort by fields') #group.add_argument('--id', help='One or more scene IDs', nargs='*', default=None) #group.add_argument('--contains', help='lon,lat points') - parser.search_group.add_argument('-p', '--property', nargs='*', help='Properties of form KEY=VALUE (<, >, <=, >=, = supported)', action=SatUtilsParser.KeyValuePair) + parser.search_group.add_argument('-p', '--property', nargs='*', help='Properties of form KEY=VALUE (<, >, <=, >=, = supported)') parser.search_group.add_argument('--url', help='URL of the API', default=config.API_URL) parents.append(parser.download_parser) @@ -90,4 +111,4 @@ class KeyValuePair(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): for val in values: n, v = val.split('=') - setattr(namespace, n, v) + setattr(namespace, n, {'eq': v}) From e9a2c7c6b7cb20898bf8fd920afa58517bc86f5d Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 19:01:41 -0500 Subject: [PATCH 07/13] fix paging term --- satsearch/search.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satsearch/search.py b/satsearch/search.py index a6337f0..9984a52 100644 --- a/satsearch/search.py +++ b/satsearch/search.py @@ -36,7 +36,7 @@ def found(self): """ Small query to determine total number of hits """ kwargs = { 'page': 1, - 'page_size': 0 + 'limit': 0 } kwargs.update(self.kwargs) results = self.query(**kwargs) @@ -72,7 +72,7 @@ def items(self, limit=1000): found = self.found() kwargs = { 'page': 1, - 'page_size': min(limit, found) + 'limit': min(limit, found) } kwargs.update(self.kwargs) while len(items) < found: From e429f2d3fdf4f3fbb7c83cb59534c9d469bbf78d Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 19:02:09 -0500 Subject: [PATCH 08/13] return results from cli (primarily for testing --- satsearch/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satsearch/main.py b/satsearch/main.py index cd7fbd1..c5aea97 100644 --- a/satsearch/main.py +++ b/satsearch/main.py @@ -51,7 +51,7 @@ def cli(): cmd = args.pop('command', None) if cmd is not None: - main(**args) + return main(**args) if __name__ == "__main__": From 0bdefa713fa00cb4f3a4d6cac3b2ac1e482a0689 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 19:02:17 -0500 Subject: [PATCH 09/13] bump version --- satsearch/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satsearch/version.py b/satsearch/version.py index 408bb3f..e9a9df7 100644 --- a/satsearch/version.py +++ b/satsearch/version.py @@ -1 +1 @@ -__version__ = '0.1.0b1' +__version__ = '0.2.0b1' From c799eaec2687460fc23acbfa56227f91d8055b3b Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 22:47:38 -0500 Subject: [PATCH 10/13] only allow single collection in search --- satsearch/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satsearch/parser.py b/satsearch/parser.py index 0ee53ef..b9d0d58 100644 --- a/satsearch/parser.py +++ b/satsearch/parser.py @@ -91,7 +91,7 @@ def newbie(cls, *args, **kwargs): sparser = subparser.add_parser('search', help='Perform new search of items', parents=parents) """ Adds search arguments to a parser """ parser.search_group = sparser.add_argument_group('search options') - parser.search_group.add_argument('-c', '--collection', help='Name(s) of collection', nargs='*', default=None) + parser.search_group.add_argument('-c', '--collection', help='Name of collection', default=None) parser.search_group.add_argument('--bbox', help='Bounding box (min lon, min lat, max lon, max lat)', nargs=4) parser.search_group.add_argument('--intersects', help='GeoJSON Feature (file or string)') parser.search_group.add_argument('--datetime', help='Single date/time or begin and end date/time (e.g., 2017-01-01/2017-02-15)') From 170fb1883cebf9f9e0dada0190fef13eb7fcc959 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 22:47:58 -0500 Subject: [PATCH 11/13] updated requirements --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index b633764..ae0d9d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1 @@ -requests~=2.19 -sat-stac~=0.1.0rc3 +sat-stac~=0.1.0rc5 From 1bfa0c916823a76d8557d0a1a68330ee45fcc3d7 Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 22:48:04 -0500 Subject: [PATCH 12/13] updated README --- README.md | 81 ++++++++++++++++++++++++++----------------------------- 1 file changed, 38 insertions(+), 43 deletions(-) diff --git a/README.md b/README.md index d90c644..f5ddcb4 100644 --- a/README.md +++ b/README.md @@ -18,17 +18,13 @@ It is recommended to use [pyenv](https://github.com/pyenv/pyenv) and [virtualenv # install the latest release version $ pip install sat-search -Sat-search is a very lightweight application, with the only dependency being requests. +Sat-search is a very lightweight application, with the only dependency being [sat-stac](https://github.com/sat-utils/sat-stac), which in turn has two dependencies: `requests` and `python-dateutil`. ## Using sat-search -Sat-search has several features: +With sat-search you can search a STAC compliant API with full querying support (if supported by the API). Search results are saved as a GeoJSON FeatureCollection and can be loaded later. Assets can be easily downloaded by the key, or color if provided. -- search catalog -- STAC compliant interface -- save results of a search -- load results of a search -- download assets (e.g. thumbnails, data files) of the results +Sat-search comes with a Command Line Interface (CLI), but is also a Python library that can be incorporated into other applications. This README only covers use of the CLI. #### The CLI The sat-search CLI has an extensive online help that can be printed with the `-h` switch. @@ -36,7 +32,7 @@ The sat-search CLI has an extensive online help that can be printed with the `-h $ sat-search -h usage: sat-search [-h] {search,load} ... -sat-search (v1.0.0b8) +sat-search (v0.2.0b1) positional arguments: {search,load} @@ -52,13 +48,25 @@ As can be seen there are two subcommands, each of which has it's own online help #### Searching ``` -$ sat-search search -h +usage: sat-search [-h] {search,load} ... + +sat-search (v0.2.0b1) + +positional arguments: + {search,load} + search Perform new search of items + load Load items from previous search + +optional arguments: + -h, --help show this help message and exit +(satutils) mhanson@clavius:~/devseed/sat-utils/sat-search/scratch$ sat-search search -h usage: sat-search search [-h] [--version] [-v VERBOSITY] [--print_md [PRINT_MD [PRINT_MD ...]]] [--print_cal] - [--save SAVE] [--append] [-c [C:ID [C:ID ...]]] + [--save SAVE] [-c COLLECTION] + [--bbox BBOX BBOX BBOX BBOX] [--intersects INTERSECTS] [--datetime DATETIME] - [--eo:cloud_cover EO:CLOUD_COVER] - [-p [PARAM [PARAM ...]]] [--url URL] + [--sort SORT] [-p [PROPERTY [PROPERTY ...]]] + [--url URL] optional arguments: -h, --help show this help message and exit @@ -73,33 +81,30 @@ output options: None) --print_cal Print calendar showing dates (default: False) --save SAVE Save results as GeoJSON (default: None) - --append Append scenes to GeoJSON file (specified by save) - (default: False) search options: - -c [C:ID [C:ID ...]], --c:id [C:ID [C:ID ...]] - Name(s) of collection (default: None) + -c COLLECTION, --collection COLLECTION + Name of collection (default: None) + --bbox BBOX BBOX BBOX BBOX + Bounding box (min lon, min lat, max lon, max lat) + (default: None) --intersects INTERSECTS GeoJSON Feature (file or string) (default: None) --datetime DATETIME Single date/time or begin and end date/time (e.g., - 2017-01-01/2017-02-15 (default: None) - --eo:cloud_cover EO:CLOUD_COVER - Range of acceptable cloud cover (e.g., 0/20) (default: - None) - -p [PARAM [PARAM ...]], --param [PARAM [PARAM ...]] - Additional parameters of form KEY=VALUE (default: - None) - --url URL URL of the API (default: https://sat- - api.developmentseed.org) + 2017-01-01/2017-02-15) (default: None) + --sort SORT Sort by fields (default: None) + -p [PROPERTY [PROPERTY ...]], --property [PROPERTY [PROPERTY ...]] + Properties of form KEY=VALUE (<, >, <=, >=, = + supported) (default: None) + --url URL URL of the API (default: https://sat-api- + dev.developmentseed.org) ``` **Search options** -- **c:id** - A list of names of collections (i.e. sensors). The collections supported depend on the API, and for sat-api can be seen at the [collections endpoint](https://sat-api.developmentseed.org/collections). If one or more collections are not defined, all collections are searched. - **intersects** - Provide a GeoJSON Feature string or the name of a GeoJSON file containing a single Feature that is a Polygon of an AOI to be searched. - **datetime** - Provide a single partial or full datetime (e.g., 2017, 2017-10, 2017-10-11, 2017-10-11T12:00), or two seperated by a slash that defines a range. e.g., 2017-01-01/2017-06-30 will search for scenes acquired in the first 6 months of 2017. -- **eo:cloud_cover** - Provide a single percent cloud cover to match (e.g., 0) or two numbers separated by a slash indicating the range of acceptable cloud cover (e.g., 0/20 searches for scenes with 0% - 20% cloud cover). -- **param** - Allows searching for any other scene properties by providing the pair as KEY=VALUE (e.g. `-p landsat:row=42`) +- **property** - Allows searching for any other scene properties by providing the pair as KEY=VALUE (e.g. `-p "landsat:row=42"`, `-p "eo:cloud_cover<10"`) - **url** - The URL endpoint of a STAC compliant API, this can also be set with the environment variable SATUTILS_API_URL **Output options** @@ -108,7 +113,6 @@ These options control what to do with the search results, multiple switches can - **print_md** - Prints a list of specific metadata fields for all the scenes. If given without any arguments it will print a list of the dates and scene IDs. Otherwise it will print a list of fields that are provided. (e.g., --print_md date eo:cloud_cover eo:platform will print a list of date, cloud cover, and the satellite platform such as WORLDVIEW03) - **print_cal** - Prints a text calendar with specific days colored depending on the platform of the scene (e.g. landsat-8), along with a legend. - **save** - Saves results as a FeatureCollection. The FeatureCollection 'properties' contains all of the arguments used in the search and the 'features' contain all of the individual scenes, with individual scene metadata merged with collection level metadata (metadata fields that are the same across all one collection, such as eo:platform) -- **append** - The save option will always create a new file, even overwriting an existing one. If *append* is provided then the scenes will be appended to the FeatureCollection given by the save filename. #### Loading Scenes that were previously saved with `sat-search search --save ...` can be loaded with the `load` subcommand. @@ -117,13 +121,12 @@ Scenes that were previously saved with `sat-search search --save ...` can be loa $ sat-search load -h usage: sat-search load [-h] [--version] [-v VERBOSITY] [--print_md [PRINT_MD [PRINT_MD ...]]] [--print_cal] - [--save SAVE] [--append] [--datadir DATADIR] - [--filename FILENAME] + [--save SAVE] [--datadir DATADIR] [--filename FILENAME] [--download [DOWNLOAD [DOWNLOAD ...]]] - scenes + items positional arguments: - scenes GeoJSON file of scenes + items GeoJSON file of Items optional arguments: -h, --help show this help message and exit @@ -138,8 +141,6 @@ output options: None) --print_cal Print calendar showing dates (default: False) --save SAVE Save results as GeoJSON (default: None) - --append Append scenes to GeoJSON file (specified by save) - (default: False) download options: --datadir DATADIR Directory pattern to save assets (default: @@ -159,10 +160,10 @@ When loading results from a file, the user now has the option to download assets These control the downloading of assets. Both datadir and filename can include metadata patterns that will be substituted per scene. - **datadir** - This specifies where downloaded assets will be saved to. It can also be specified by setting the environment variable SATUTILS_DATADIR. - **filename** - The name of the file to save. It can also be set by setting the environment variable SATUTILS_FILENAME -- **download** - Provide a list of keys to download these assets. For DG currently only **thumbnail** and **full** are supported. More information on downloading data is provided below. +- **download** - Provide a list of keys to download these assets. More information on downloading data is provided below. **Metadata patterns** -Metadata patterns can be within **datadir** and **filename** in order to have custom path and filenames based on the scene metadata. For instance specifying datadir as "./${eo:platform}/${date}" will save assets for each scene under directories of the platform and the date. So a WorldView-3 scene from June 20, 2018 will have it's assets saved in a directory './WORLDVIEW03/2017-06-20'. For filenames these work exactly the same way, except the appropriate extension will be used at the end of the filename, depending on the asset. +Metadata patterns can be within **datadir** and **filename** in order to have custom path and filenames based on the scene metadata. For instance specifying datadir as "./${eo:platform}/${date}" will save assets for each scene under directories of the platform and the date. So a landsat-8 scene from June 20, 2018 will have it's assets saved in a directory './landsat-8/2017-06-20'. For filenames these work exactly the same way, except the appropriate extension will be used at the end of the filename, depending on the asset. **Assets** The thumbnail for each scene in a *scenes.json* file can be downloaded with @@ -171,11 +172,5 @@ The thumbnail for each scene in a *scenes.json* file can be downloaded with ``` The thumbnails will be saved using a directory and filename according to the `datadir` and `filename` options, and will also have a '_thumbnail` suffix. When thumbnails are downloaded an ESRI Worldfile (.wld) file is created, which is a sidecar file that describes the coordinates and resolution of the images. This enables the thumbnails to be viewed in a GIS program like QGIS in their proper geographical location. The world file does not set the spatial reference system used (lat/lon, or WGS-84, or EPSG:4326), so when opened in QGIS it will need to be selected (EPSG:4326). -## Library - -The sat-search library is made up of several Python classes. The *Scene* class represents a single set of images for an indentical date (or daterange) and footprint. The *Scenes* class is a collection of *Scene* objects that makes it easier to iterate through them and perform common tasks over all the scenes, such as downloading data. - -The *Query* class is a single set of arguments for searching scenes, functions for querying the API with those arguments (and handling of multiple pages if needed) as well storing the results. The higher level *Search* class which is more often used, can deal with multiple *Query* objects, such as individual Scene ids or disparate date ranges that must be issued to the API with different arguments. - ## About sat-search was created by [Development Seed]() and is part of a collection of tools called [sat-utils](https://github.com/sat-utils). From cfe61329ec3e4eb73e3306a7dff6b702e3e6501d Mon Sep 17 00:00:00 2001 From: Matthew Hanson Date: Mon, 7 Jan 2019 22:56:56 -0500 Subject: [PATCH 13/13] update tests --- test/test_main.py | 8 ++++---- test/test_parser.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/test_main.py b/test/test_main.py index 0b3baf5..0e61d8d 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -15,17 +15,17 @@ class Test(unittest.TestCase): """ Test main module """ - num_scenes = 558 + num_scenes = 38 def test_main(self): """ Run main function """ - items = main.main(datetime='2019-01-01', **{'collection': 'Landsat-8-l1'}) + items = main.main(datetime='2019-01-02', **{'collection': 'Landsat-8-l1'}) self.assertEqual(len(items), self.num_scenes) def test_main_options(self): """ Test main program with output options """ fname = os.path.join(testpath, 'test_main-save.json') - items = main.main(datetime='2019-01-01', save=fname, printcal=True, print_md=[], **{'eo:platform': 'landsat-8'}) + items = main.main(datetime='2019-01-02', save=fname, printcal=True, print_md=[], **{'eo:platform': 'landsat-8'}) self.assertEqual(len(items), self.num_scenes) self.assertTrue(os.path.exists(fname)) os.remove(fname) @@ -41,7 +41,7 @@ def test_cli(self): """ Run CLI program """ with patch.object(sys, 'argv', 'sat-search search --datetime 2017-01-01 -p eo:platform=landsat-8'.split(' ')): items = main.cli() - assert(len(items) == 111) + assert(len(items) == 0) def test_main_download(self): """ Test main program with downloading """ diff --git a/test/test_parser.py b/test/test_parser.py index 7fe6e15..2f8653d 100644 --- a/test/test_parser.py +++ b/test/test_parser.py @@ -36,9 +36,9 @@ def test_parse_args(self): args = self.args.split(' ') args = parser.parse_args(args) - self.assertEqual(len(args), 5) + self.assertEqual(len(args), 4) self.assertEqual(args['datetime'], '2017-01-01') - assert(args['eo:cloud_cover'] == '0/20') + #assert(args['eo:cloud_cover'] == '0/20') #self.assertEqual(args['cloud_from'], 0) #self.assertEqual(args['cloud_to'], 20) #self.assertEqual(args['satellite_name'], 'Landsat-8')