first commit

This commit is contained in:
weixin_46229132 2025-07-03 20:29:02 +08:00
parent c93f9a5624
commit 5bc6302955
85 changed files with 14067 additions and 1 deletions

3
.gitignore vendored
View File

@ -1,4 +1,7 @@
# ---> Python
# Custom dir
/test_geosat1_imgs
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]

View File

@ -1,3 +1,20 @@
# dz1-spatial-query
地质一号空间查询服务模块。
## 服务搭建
启动数据库搭建STAC服务。
```
cd stac-fastapi-pgstac
docker-compose up -d --build
```
运行ingest_stac_itesm.py脚本插入测试数据。
启动fastapi服务。
```
uvicorn stac_mosaic_api:app --host 0.0.0.0 --port 8000 --reload
```

15
del_items.py Normal file
View File

@ -0,0 +1,15 @@
import requests
STAC_API_URL = "http://localhost:8082"
COLLECTION_ID = "geosat1"
def delete_item(item_id):
url = f"{STAC_API_URL}/collections/{COLLECTION_ID}/items/{item_id}"
r = requests.delete(url)
if r.status_code in [200, 204]:
print(f"✅ 已成功删除: {item_id}")
else:
print(f"❌ 删除失败: {item_id} | {r.status_code} - {r.text}")
delete_item("TCI_right")

100
ingest_stac_items.py Normal file
View File

@ -0,0 +1,100 @@
import os
import rasterio
import requests
from shapely.geometry import box, mapping
from datetime import datetime, timezone
from pystac import Item, Asset, MediaType, Collection, Extent, SpatialExtent, TemporalExtent
from pyproj import Transformer
# === 配置部分 ===
STAC_API_URL = "http://localhost:8082"
IMG_DIR = "test_geosat1_imgs"
COLLECTION_ID = "geosat1"
def create_stac_item_from_tif(tif_path: str) -> Item:
with rasterio.open(tif_path) as src:
bounds = src.bounds
src_crs = src.crs
dst_crs = "EPSG:4326" # WGS84
# 坐标转换器
transformer = Transformer.from_crs(src_crs, dst_crs, always_xy=True)
minx, miny = transformer.transform(bounds.left, bounds.bottom)
maxx, maxy = transformer.transform(bounds.right, bounds.top)
bbox = [minx, miny, maxx, maxy]
geom = mapping(box(minx, miny, maxx, maxy))
dt = datetime.fromtimestamp(os.path.getmtime(tif_path), timezone.utc)
item_id = os.path.splitext(os.path.basename(tif_path))[0]
item = Item(
id=item_id,
geometry=geom,
bbox=bbox,
datetime=dt,
properties={},
stac_extensions=[]
)
item.add_asset(
"image",
Asset(
href=os.path.abspath(tif_path), # 注意路径服务器可访问性
media_type=MediaType.COG,
roles=["data"],
title="GeoTIFF image"
)
)
item.collection_id = COLLECTION_ID
return item
def create_collection_if_needed():
url = f"{STAC_API_URL}/collections/{COLLECTION_ID}"
try:
r = requests.get(url)
if r.status_code == 404:
print(f"Collection `{COLLECTION_ID}` 不存在,正在创建...")
coll = Collection(
id=COLLECTION_ID,
description="Test collection for GeoSat-1 imagery",
extent=Extent(
SpatialExtent([[-180, -90, 180, 90]]),
TemporalExtent([[None, None]])
),
license="proprietary"
)
r = requests.post(
f"{STAC_API_URL}/collections", json=coll.to_dict())
r.raise_for_status()
print(f"创建成功: {r.status_code}")
else:
print(f"Collection `{COLLECTION_ID}` 已存在。")
except requests.RequestException as e:
print(f"请求失败: {e}")
def register_item(item: Item):
url = f"{STAC_API_URL}/collections/{COLLECTION_ID}/items"
try:
r = requests.post(url, json=item.to_dict())
r.raise_for_status()
print(f"✅ 已成功导入: {item.id}")
except requests.RequestException as e:
print(f"❌ 失败: {item.id} | 错误信息: {e}")
def main():
create_collection_if_needed()
for filename in os.listdir(IMG_DIR):
if filename.lower().endswith(".tif"):
tif_path = os.path.join(IMG_DIR, filename)
item = create_stac_item_from_tif(tif_path)
register_item(item)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,15 @@
**/__pycache__
*.pyc
*.pyo
*.pyd
.coverage
.coverage.*
.vscode
coverage.xml
*.log
.git
.envrc
*egg-info
venv
env

131
stac-fastapi-pgstac/.gitignore vendored Normal file
View File

@ -0,0 +1,131 @@
.mypy_cache
PIP_EXTRA_INDEX_URL
!tests/resources/*.jpg
**.pyc
**.log
*.mat
target/*
src/local/*
src/local-test/*
*.iml
.idea/
model/
.DS_Store
#config.yaml
**.save
*.jpg
**.save.*
**.bak
.DS_Store
.mvn/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# user specific overrides
tests/tests.ini
tests/logging.ini
# Distribution / packaging
.Python
env/
venv/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# dotenv
**/.env
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# skaffold temporary build/deploy files
build.out
# Direnv
.envrc
# Virtualenv
venv
# IDE
.vscode

View File

@ -0,0 +1,24 @@
repos:
- repo: https://github.com/PyCQA/isort
rev: 5.13.2
hooks:
- id: isort
language_version: python
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: ruff
args: ["--fix"]
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.9.0
hooks:
- id: mypy
language_version: python
exclude: tests/.*
additional_dependencies:
- types-requests
- types-attrs
- pydantic~=2.0

View File

@ -0,0 +1,491 @@
# Changelog
## [Unreleased]
### Changed
- rename `POSTGRES_HOST_READER` to `PGHOST` in config **breaking change**
- rename `POSTGRES_USER` to `PGUSER` in config **breaking change**
- rename `POSTGRES_PASS` to `PGPASSWORD` in config **breaking change**
- rename `POSTGRES_PORT` to `PGPORT` in config **breaking change**
- rename `POSTGRES_DBNAME` to `PGDATABASE` in config **breaking change**
```python
from stac_fastapi.pgstac.config import PostgresSettings
# before
settings = PostgresSettings(
postgres_user="user",
postgres_pass="password",
postgres_host_reader="0.0.0.0",
postgres_host_writer="0.0.0.0",
postgres_port=1111,
postgres_dbname="pgstac",
)
# now
settings = PostgresSettings(
pguser="user",
pgpassword="password",
pghost="0.0.0.0",
pgport=1111,
pgdatabase="pgstac",
)
```
- rename `reader_connection_string` to `connection_string` in `PostgresSettings` class **breaking change**
- add `ENABLE_TRANSACTIONS_EXTENSIONS` env variable to enable `transaction` extensions
- disable transaction and bulk_transactions extensions by default **breaking change**
- update `stac-fastapi-*` version requirements to `>=5.2,<6.0`
- add pgstac health-check in `/_mgmt/health`
- switch from using pygeofilter to cql2
### Added
- add `write_connection_pool` option in `stac_fastapi.pgstac.db.connect_to_db` function
- add `write_postgres_settings` option in `stac_fastapi.pgstac.db.connect_to_db` function to set specific settings for the `writer` DB connection pool
- add specific error message when trying to create `Item` with null geometry (not supported by PgSTAC)
### removed
- `stac_fastapi.pgstac.db.DB` class
- `POSTGRES_HOST_WRITER` in config
- `writer_connection_string` in `PostgresSettings` class
- `testing_connection_string` in `PostgresSettings` class
## [5.0.2] - 2025-04-07
### Fixed
- fix root-path handling when setting in uvicorn command
- reduce `db_min_conn_size` to `1` to avoid creating too many db connections when starting the application
## [5.0.1] - 2025-03-27
### Fixed
- fix media type for `self` links in `/search` responses
## [5.0.0] - 2025-03-10
### Changed
- move Postgres settings into separate `PostgresSettings` class and defer loading until connecting to database ([#209](https://github.com/stac-utils/stac-fastapi-pgstac/pull/209))
- update `stac-fastapi-*` version requirements to `>=5.1,<6.0`
## [4.0.3] - 2025-03-10
### Fixed
- fix links when app is mounted behind proxy or has router-prefix ([#195](https://github.com/stac-utils/stac-fastapi-pgstac/pull/195))
## [4.0.2] - 2025-02-18
### Fixed
- use Relation's `value` for `POST` prev/next links
- return `JSONResponse` directly from `/items` endpoint when `fields` parameter is pass and avoid Pydantic validation
### Changed
- avoid re-use of internal `CoreCrudClient.post_search` in `CoreCrudClient.get_search` method to allow customization
## [4.0.1] - 2025-02-06
### Added
- add `numberReturned` and `numberMatched` in `/collections` response
## [4.0.0] - 2025-02-03
### Changed
- remove `python 3.8` support
- update `stac-fastapi-*` requirement to `~=5.0`
- keep `/search` and `/collections` extensions separate ([#158](https://github.com/stac-utils/stac-fastapi-pgstac/pull/158))
- update `pypgstac` requirement to `>=0.8,<0.10`
- set `pypgstac==0.9.*` for test requirements
- renamed `post_request_model` attribute to `pgstac_search_model` in `CoreCrudClient` class
- changed `datetime` input type to `string` in GET endpoint methods
- renamed `filter` to `filter_expr` input attributes in GET endpoint methods
- delete `utils.format_datetime_range` function
### Fixed
- handle `next` and `prev` tokens now returned as links from pgstac>=0.9.0 (author @zstatmanweil, <https://github.com/stac-utils/stac-fastapi-pgstac/pull/140>)
- fix Docker compose file, so example data can be loaded into database (author @zstatmanweil, <https://github.com/stac-utils/stac-fastapi-pgstac/pull/142>)
- fix `filter` extension implementation in `CoreCrudClient`
### Added
- add [collection search extension](https://github.com/stac-api-extensions/collection-search) support ([#139](https://github.com/stac-utils/stac-fastapi-pgstac/pull/139))
- add [free-text extension](https://github.com/stac-api-extensions/freetext-search) to collection search extensions ([#162](https://github.com/stac-utils/stac-fastapi-pgstac/pull/162))
- add [filter extension](https://github.com/stac-api-extensions/filter) support to Item Collection endpoint
- add [sort extension](https://github.com/stac-api-extensions/sort) support to Item Collection endpoint ([#192](https://github.com/stac-utils/stac-fastapi-pgstac/pull/192))
- add [query extension](https://github.com/stac-api-extensions/query) support to Item Collection endpoint ([#162](https://github.com/stac-utils/stac-fastapi-pgstac/pull/192))
- add [fields extension](https://github.com/stac-api-extensions/fields) support to Item Collection endpoint ([#162](https://github.com/stac-utils/stac-fastapi-pgstac/pull/192))
## [3.0.1] - 2024-11-14
- Enable runtime `CORS` configuration using environment variables (`CORS_ORIGINS="https://...,https://..."`, `CORS_METHODS="PUT,OPTIONS"`) (<https://github.com/stac-utils/stac-fastapi-pgstac/pull/168>)
## [3.0.0] - 2024-08-02
- Enable filter extension for `GET /items` requests and add `Queryables` links in `/collections` and `/collections/{collection_id}` responses ([#89](https://github.com/stac-utils/stac-fastapi-pgstac/pull/89))
- Allow to omit `collection` in bulk item insertions. Same identifier checks as with single insertions ([#113](https://github.com/stac-utils/stac-fastapi-pgstac/pull/113))
- Set `stac-fastapi-*` requirements to **~=3.0**
## 3.0.0a4 - 2024-07-10
- Update stac-fastapi libraries to `~=3.0.0b2`
## 3.0.0a3 - 2024-07-01
- Use `quote_plus` instead of `quote` to encode database's password ([#122](https://github.com/stac-utils/stac-fastapi-pgstac/pull/122))
- Update stac-fastapi libraries to `~=3.0.0a4`
## 3.0.0a2 - 2024-06-18
- Update stac-fastapi libraries to `~=3.0.0a3`
- make sure the application can work without any extension
## 3.0.0a1 - 2024-05-22
- Update stac-fastapi libraries to `~=3.0.0a1`
- Update stac-pydantic dependency to `==3.1.*`
## 3.0.0a0 - 2024-05-10
### Changed
- Update stac-fastapi libraries to v3.0.0a0 ([#108](https://github.com/stac-utils/stac-fastapi-pgstac/pull/108))
- Update pgstac version to `0.8.x`
## [2.5.0] - 2024-04-25
### Changed
- Updated stac-fastapi libraries to v2.5.5 ([#101](https://github.com/stac-utils/stac-fastapi-pgstac/pull/101))
### Added
- Ability to configure the database runtime parameters ([#92](https://github.com/stac-utils/stac-fastapi-pgstac/pull/92))
## [2.4.11] - 2023-12-01
### Changed
- Add `method` parameter to Bulk Transactions requests in order to support upserting bulk data ([#64](https://github.com/stac-utils/stac-fastapi-pgstac/pull/64))
## [2.4.10] - 2023-08-18
### Fixed
- Added back the `CMD` to the Dockerfile that was dropped during the repo split ([#52](https://github.com/stac-utils/stac-fastapi-pgstac/pull/52))
## [2.4.9] - 2023-06-21
### Fixed
- Pass `request` by name when calling endpoints from other endpoints ([#44](https://github.com/stac-utils/stac-fastapi-pgstac/pull/44))
## [2.4.8] - 2023-06-08
### Changed
- Updated **stac-fastapi** to v2.4.8, which updates our STAC API version to v1.0.0 ([#40](https://github.com/stac-utils/stac-fastapi-pgstac/pull/40))
## [2.4.7] - 2023-05-17
### Added
- Support for `ItemCollections` in the transaction extension ([#35](https://github.com/stac-utils/stac-fastapi-pgstac/pull/35))
## [2.4.6] - 2023-05-11
As a part of this release, this repository was extracted from the main
[stac-fastapi](https://github.com/stac-utils/stac-fastapi) repository.
### Added
- Ability to customize the database connection ([#22](https://github.com/stac-utils/stac-fastapi-pgstac/pull/22))
- Ability to add ItemCollections through the Transaction API, with more validation ([#35](https://github.com/stac-utils/stac-fastapi-pgstac/pull/35))
### Changed
- Quieter tests ([#22](https://github.com/stac-utils/stac-fastapi-pgstac/pull/22))
## [2.4.5] - 2023-04-04
### Fixed
- Use `V()` instead of f-strings for pgstac queries ([#554](https://github.com/stac-utils/stac-fastapi/pull/554))
## [2.4.4] - 2023-03-09
### Added
- Nginx service as second docker-compose stack to demonstrate proxy ([#503](https://github.com/stac-utils/stac-fastapi/pull/503))
- Validation checks in CI using [stac-api-validator](https://github.com/stac-utils/stac-api-validator) ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
- Required links to the sqlalchemy ItemCollection endpoint ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
- Publication of docker images to GHCR ([#525](https://github.com/stac-utils/stac-fastapi/pull/525))
### Changed
- Updated CI to test against [pgstac v0.6.12](https://github.com/stac-utils/pgstac/releases/tag/v0.6.12) ([#511](https://github.com/stac-utils/stac-fastapi/pull/511))
- Reworked `update_openapi` and added a test for it ([#523](https://github.com/stac-utils/stac-fastapi/pull/523))
- Limit values above 10,000 are now replaced with 10,000 instead of returning a 400 error ([#526](https://github.com/stac-utils/stac-fastapi/pull/526))
- Updated pgstac to v0.7.1 ([#535](https://github.com/stac-utils/stac-fastapi/pull/535))
### Removed
- Incorrect context STAC extension url from the landing page ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
### Fixed
- Allow url encoded values for `query` in GET requests ([#504](https://github.com/stac-utils/stac-fastapi/pull/504))
- Fix path in `register_update_item` docstring ([#507](https://github.com/stac-utils/stac-fastapi/pull/507))
- `self` link rel for `/collections/{c_id}/items` ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
- Media type of the item collection endpoint ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
- Manually exclude non-truthy optional values from sqlalchemy serialization of Collections ([#508](https://github.com/stac-utils/stac-fastapi/pull/508))
- Support `intersects` in GET requests ([#521](https://github.com/stac-utils/stac-fastapi/pull/521))
- Deleting items that had repeated ids in other collections ([#520](https://github.com/stac-utils/stac-fastapi/pull/520))
- 404 for missing collection on /items for sqlalchemy ([#528](https://github.com/stac-utils/stac-fastapi/pull/528))
- Conformance URIs for the filter extension ([#540](https://github.com/stac-utils/stac-fastapi/pull/540))
### Deprecated
- Deprecated `VndOaiResponse` and `config_openapi`, will be removed in v3.0 ([#523](https://github.com/stac-utils/stac-fastapi/pull/523))
## [2.4.3] - 2022-11-25
### Added
- Add the `ENABLED_EXTENSIONS` environment variable determining which extensions are enabled in the pgstac application, all extensions are enabled by default ([#495](https://github.com/stac-utils/stac-fastapi/pull/495))
### Changed
### Removed
### Fixed
## [2.4.2] - 2022-11-25
### Added
- Add support in pgstac backend for /queryables and /collections/{collection_id}/queryables endpoints with functions exposed in pgstac 0.6.8 ([#474](https://github.com/stac-utils/stac-fastapi/pull/474))
- Add `bbox` and `datetime` query parameters to `/collections/{collection_id}/items`. ([#476](https://github.com/stac-utils/stac-fastapi/issues/476), [#380](https://github.com/stac-utils/stac-fastapi/issues/380))
- Update pgstac requirement to 0.6.10
- Add `servers` and `description` to OpenAPI ([#459](https://github.com/stac-utils/stac-fastapi/pull/459))
### Changed
### Removed
- Removed `stac_fastapi.api.routes.create_sync_endpoint` function to reduce code duplication ([#471](https://github.com/stac-utils/stac-fastapi/pull/471))
### Fixed
- Quote password in pgsql strings to accomodate special characters. ([#455](https://github.com/stac-utils/stac-fastapi/issues/455))
- Fix pgstac backend for /queryables endpoint to return 404 for non-existent collections ([#482](https://github.com/stac-utils/stac-fastapi/pull/482))
- `/collection/{collection_id}/items` endpoints now return geojson media type ([#488](https://github.com/stac-utils/stac-fastapi/pull/488))
## [2.4.1] - 2022-08-05
### Added
### Changed
### Removed
### Fixed
- `ciso8601` fails to build in some environments, instead use `pyiso8601` to parse datetimes.
## [2.4.0] - 2022-08-04
### Added
- Add hook to allow adding dependencies to routes. ([#295](https://github.com/stac-utils/stac-fastapi/pull/295))
- Ability to POST an ItemCollection to the collections/{collectionId}/items route. ([#367](https://github.com/stac-utils/stac-fastapi/pull/367))
- Add STAC API - Collections conformance class. ([383](https://github.com/stac-utils/stac-fastapi/pull/383))
- Bulk item inserts for pgstac implementation. ([411](https://github.com/stac-utils/stac-fastapi/pull/411))
- Add APIRouter prefix support for pgstac implementation. ([429](https://github.com/stac-utils/stac-fastapi/pull/429))
- Respect `Forwarded` or `X-Forwarded-*` request headers when building links to better accommodate load balancers and proxies.
### Changed
- Update FastAPI requirement to allow version >=0.73 ([#337](https://github.com/stac-utils/stac-fastapi/pull/337))
- Bump version of PGStac to 0.4.5 ([#346](https://github.com/stac-utils/stac-fastapi/pull/346))
- Add support for PGStac Backend to use PyGeofilter to convert Get Request with cql2-text into cql2-json to send to PGStac backend ([#346](https://github.com/stac-utils/stac-fastapi/pull/346))
- Updated all conformance classes to 1.0.0-rc.1. ([383](https://github.com/stac-utils/stac-fastapi/pull/383))
- Bulk Transactions object Items iterator now returns the Item objects rather than the string IDs of the Item objects
([#355](https://github.com/stac-utils/stac-fastapi/issues/355))
- docker-compose now runs uvicorn with hot-reloading enabled
- Bump version of PGStac to 0.6.2 that includes support for hydrating results in the API backed ([#397](https://github.com/stac-utils/stac-fastapi/pull/397))
- Make item geometry and bbox nullable in sqlalchemy backend. ([#398](https://github.com/stac-utils/stac-fastapi/pull/398))
- Transactions Extension update Item endpoint Item is now `/collections/{collection_id}/items/{item_id}` instead of
`/collections/{collection_id}/items` to align with [STAC API
spec](https://github.com/radiantearth/stac-api-spec/tree/main/ogcapi-features/extensions/transaction#methods) ([#425](https://github.com/stac-utils/stac-fastapi/pull/425))
### Removed
- Remove the unused `router_middleware` function ([#439](https://github.com/stac-utils/stac-fastapi/pull/439))
### Fixed
- Bumped uvicorn version to 0.17 (from >=0.12, <=0.14) to resolve security vulnerability related to websockets dependency version ([#343](https://github.com/stac-utils/stac-fastapi/pull/343))
- `AttributeError` and/or missing properties when requesting the complete `properties`-field in searches. Added test. ([#339](https://github.com/stac-utils/stac-fastapi/pull/339))
- Fixes issues (and adds tests) for issues caused by regression in pgstac ([#345](https://github.com/stac-utils/stac-fastapi/issues/345)
- Update error response payloads to match the API spec. ([#361](https://github.com/stac-utils/stac-fastapi/pull/361))
- Fixed stray `/` before the `#` in several extension conformance class strings ([383](https://github.com/stac-utils/stac-fastapi/pull/383))
- SQLAlchemy backend bulk item insert now works ([#356](https://github.com/stac-utils/stac-fastapi/issues/356))
- PGStac Backend has stricter implementation of Fields Extension syntax ([#397](https://github.com/stac-utils/stac-fastapi/pull/397))
- `/queryables` endpoint now has type `application/schema+json` instead of `application/json` ([#421](https://github.com/stac-utils/stac-fastapi/pull/421))
- Transactions Extension update Item endpoint validates that the `{collection_id}` path parameter matches the Item `"collection"` property
from the request body, if present, and falls back to using the path parameter if no `"collection"` property is found in the body
([#425](https://github.com/stac-utils/stac-fastapi/pull/425))
- PGStac Backend Transactions endpoints return added Item/Collection instead of Item/Collection from request ([#424](https://github.com/stac-utils/stac-fastapi/pull/424))
- Application no longer breaks on startup when pagination extension is not included ([#444](https://github.com/stac-utils/stac-fastapi/pull/444))
## [2.3.0] - 2022-01-18
### Added
- Add link with rel-type of 'service-doc', pointing to HTML API documentation ([#298](https://github.com/stac-utils/stac-fastapi/pull/298))
### Changed
- Refactor to remove hardcoded search request models. Request models are now dynamically created based on the enabled extensions.
([#213](https://github.com/stac-utils/stac-fastapi/pull/213))
- Change example data to use correct `type` for the example Joplin collection ([#314](https://github.com/stac-utils/stac-fastapi/pull/314))
- Changed the geometry type in the Item model from Polygon to Geometry.
- Upgrade pgstac backend to use version 0.4.2 ([#321](https://github.com/stac-utils/stac-fastapi/pull/321))
- STAC 1.0.0-beta.4 conformance classes updated ([#298](https://github.com/stac-utils/stac-fastapi/pull/298))
- Upgrade pgstac backend to use version 0.4.3 ([#326](https://github.com/stac-utils/stac-fastapi/pull/326))
### Removed
- The tiles extension and all tiles links, added for demonstration purposes, have been removed. ([#309](https://github.com/stac-utils/stac-fastapi/pull/309))
### Fixed
- Import error using `importlib.util` ([#325](https://github.com/stac-utils/stac-fastapi/pull/325))
- Add environment variables required by upgraded pgstac container ([#313](https://github.com/stac-utils/stac-fastapi/pull/313))
- Enabled `ContextExtension` by default ([#207](https://github.com/stac-utils/stac-fastapi/issues/207))
- Content-type response headers for the /search endpoint now reflect the geojson response expected in the STAC api spec ([#220](https://github.com/stac-utils/stac-fastapi/issues/220))
- The minimum `limit` value for searches is now 1 ([#296](https://github.com/stac-utils/stac-fastapi/pull/296))
- Links stored with Collections and Items (e.g. license links) are now returned with those STAC objects ([#282](https://github.com/stac-utils/stac-fastapi/pull/282))
- Content-type response headers for the /api endpoint now reflect those expected in the STAC api spec ([#287](https://github.com/stac-utils/stac-fastapi/pull/287))
- Changed type options for datetime in BaseSearchGetRequest ([#318](https://github.com/stac-utils/stac-fastapi/pull/318))
- Expanded on tests to ensure properly testing get and post searches ([#318](https://github.com/stac-utils/stac-fastapi/pull/318))
- Ensure invalid datetimes result in 400s ([#323](https://github.com/stac-utils/stac-fastapi/pull/323))
## [2.2.0] - 2021-10-19
### Added
- Add CQL2 support ([#308](https://github.com/stac-utils/stac-fastapi/pull/308))
- Add ability to override ItemCollectionUri and SearchGetRequest models ([#271](https://github.com/stac-utils/stac-fastapi/pull/271))
- Added `collections` attribute to list of default fields to include, so that we satisfy the STAC API spec, which requires a `collections` attribute to be output when an item is part of a collection ([#276](https://github.com/stac-utils/stac-fastapi/pull/276))
### Changed
- Update pgstac to 0.4.0 ([#308](https://github.com/stac-utils/stac-fastapi/pull/308))
- Update get_item in sqlalchemy backend to allow for querying for items with same ids but in different collections. ([#275](https://github.com/stac-utils/stac-fastapi/pull/275))
## [2.1.1] - 2021-09-23
### Added
- Add `middlewares` option in `stac_fastapi.api.StacApi` to allow custom middleware configuration ([#267](https://github.com/stac-utils/stac-fastapi/pull/267))
- Support non-interval datetime queries on sqlalchemy backend ([#262](https://github.com/stac-utils/stac-fastapi/pull/262))
- Restrict `limit` parameter in sqlalchemy backend to between 1 and 10,000. ([#251](https://github.com/stac-utils/stac-fastapi/pull/251))
- Fix OAS conformance URL ([#263](https://github.com/stac-utils/stac-fastapi/pull/263))
- Links to children collections from the landing pagge always have a title ([#260](https://github.com/stac-utils/stac-fastapi/pull/260))
- Fix collection links in the `all_collections` method in `pgstac` ([#269](https://github.com/stac-utils/stac-fastapi/pull/269))
### Fixed
- Pin FastAPI to 0.67 to avoid issues with rendering OpenAPI documentation ([#246](https://github.com/stac-utils/stac-fastapi/pull/246))
- Add `stac_version` to default search attributes ([#268](https://github.com/stac-utils/stac-fastapi/pull/268))
- pgstac backend specifies collection_id when fetching a single item ([#279](https://github.com/stac-utils/stac-fastapi/pull/270))
## [2.1.0] - 2021-08-26
### Added
- Added filter extension. ([#165](https://github.com/stac-utils/stac-fastapi/pull/165))
- Add Support for CQL JSON to PGStac Backend ([#209](https://github.com/stac-utils/stac-fastapi/pull/209))
- Added item_serializer and item_table to BulkTransactionsClient in sqlalchemy backend ([#210](https://github.com/stac-utils/stac-fastapi/pull/210))
- Enable conformance class configuration ([#214](https://github.com/stac-utils/stac-fastapi/pull/214))
- Add/fix landing page links ([#229](https://github.com/stac-utils/stac-fastapi/pull/229))
- Correct response codes for bad/unusable bboxes ([#235](https://github.com/stac-utils/stac-fastapi/pull/235))
- Add a "method" field for search links ([#236](https://github.com/stac-utils/stac-fastapi/pull/236))
- Add extension schemas to landing ([#237](https://github.com/stac-utils/stac-fastapi/pull/237))
### Removed
- Remove shapely from stac_fastapi.pgstac requirements ([#225](https://github.com/stac-utils/stac-fastapi/pull/225))
### Changed
- Update to STAC API 1.0.0-beta.3 ([#239](https://github.com/stac-utils/stac-fastapi/pull/239))
### Fixed
- Make collection title optional in landing page links ([#198](https://github.com/stac-utils/stac-fastapi/pull/198))
- Preserve relative paths on link generation ([#199](https://github.com/stac-utils/stac-fastapi/pull/199))
- Fix collection endpoint return value to match spec (fixes regression) ([#232](https://github.com/stac-utils/stac-fastapi/pull/232))
- Return empty item collection instead of error when searching ([#233](https://github.com/stac-utils/stac-fastapi/pull/233))
- Correct response codes for bad/unusable bboxes ([#235](https://github.com/stac-utils/stac-fastapi/pull/235))
- Update pgstac to return 400 on invalid date parameter ([#240](https://github.com/stac-utils/stac-fastapi/pull/240))
## [2.0.0] - 2021-07-26
- Refactor stac-fastapi into submodules ([#106](https://github.com/)stac-utils/stac-fastapi/pull/106)
- Add pgstac backend ([#126](https://github.com/stac-utils/stac-fastapi/pull/126))
- Upgrade to stac-pydantic 2.0.0 and stac-spec 1.0.0 ([#181](https://github.com/stac-utils/stac-fastapi/pull/181))
## [1.1.0] - 2021-01-28
- Improve how the library declares API extensions ([#54](https://github.com/stac-utils/arturo-stac-api/pull/54))
- Add postgres bulk transactions client ([#59](https://github.com/stac-utils/arturo-stac-api/pull/59))
- Update TiTiler version ([#61](https://github.com/stac-utils/arturo-stac-api/pull/61))
- Use attrs instead of dataclasses ([#73](https://github.com/stac-utils/arturo-stac-api/pull/73))
- Remove postgres database connection from API layer ([#74](https://github.com/stac-utils/arturo-stac-api/pull/74))
- Fix `pre-commit` config ([#75](https://github.com/stac-utils/arturo-stac-api/pull/75))
## [1.0.0] - 2020-09-28
- First PyPi release!
[Unreleased]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/5.0.2..main>
[5.0.2]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/5.0.1..5.0.2>
[5.0.1]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/5.0.0..5.0.1>
[5.0.0]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/4.0.3..5.0.0>
[4.0.3]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/4.0.2..4.0.3>
[4.0.2]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/4.0.1..4.0.2>
[4.0.1]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/4.0.0..4.0.1>
[4.0.0]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/3.0.1..4.0.0>
[3.0.1]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/3.0.0..3.0.1>
[3.0.0]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.5.0..3.0.0>
[2.5.0]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.11..2.5.0>
[2.4.11]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.10..2.4.11>
[2.4.10]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.9..2.4.10>
[2.4.9]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.8..2.4.9>
[2.4.8]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.7..2.4.8>
[2.4.7]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.6..2.4.7>
[2.4.6]: <https://github.com/stac-utils/stac-fastapi-pgstac/compare/2.4.5..2.4.6>
[2.4.5]: <https://github.com/stac-utils/stac-fastapi/compare/2.4.4..2.4.5>
[2.4.4]: <https://github.com/stac-utils/stac-fastapi/compare/2.4.3..2.4.4>
[2.4.3]: <https://github.com/stac-utils/stac-fastapi/compare/2.4.2..2.4.3>
[2.4.2]: <https://github.com/stac-utils/stac-fastapi/compare/2.4.1..2.4.2>
[2.4.1]: <https://github.com/stac-utils/stac-fastapi/compare/2.4.0..2.4.1>
[2.4.0]: <https://github.com/stac-utils/stac-fastapi/compare/2.3.0..2.4.0>
[2.3.0]: <https://github.com/stac-utils/stac-fastapi/compare/2.2.0..2.3.0>
[2.2.0]: <https://github.com/stac-utils/stac-fastapi/compare/2.1.1..2.2.0>
[2.1.1]: <https://github.com/stac-utils/stac-fastapi/compare/2.1.0..2.1.1>
[2.1.0]: <https://github.com/stac-utils/stac-fastapi/compare/2.1.0..main>
[2.0.0]: <https://github.com/stac-utils/stac-fastapi/compare/1.1.0..2.0.0>
[1.1.0]: <https://github.com/stac-utils/stac-fastapi/compare/1.0.0..1.1.0>
[1.0.0]: <https://github.com/stac-utils/stac-fastapi/tree/1.0.0>

View File

@ -0,0 +1,55 @@
# Contributing
Issues and pull requests are more than welcome.
## Development install
```shell
git clone https://github.com/stac-utils/stac-fastapi-pgstac
cd stac-fastapi-pgstac
make install
```
This repo is set to use `pre-commit` to run *isort*, *flake8*, *pydocstring*, *black* ("uncompromising Python code formatter") and mypy when committing new code.
```shell
pre-commit install
```
To run the service on 0.0.0.0:8082 and ingest example data into the database (the "joplin" collection):
```shell
make run-joplin
```
You can connect to the database with a database tool on port 5439 to inspect and see the data.
To run the tests:
```shell
make test
```
## Docs
```bash
git clone https://github.com/stac-utils/stac-fastapi-pgstac
cd stac-fastapi-pgstac
pip install -e .[docs]
```
Hot-reloading docs:
```bash
mkdocs serve
```
To manually deploy docs (note you should never need to do this because GitHub
Actions deploys automatically for new commits.):
```shell
# Create API documentations
make docs
# deploy
mkdocs gh-deploy
```

View File

@ -0,0 +1,23 @@
ARG PYTHON_VERSION=3.12
FROM python:${PYTHON_VERSION}-slim AS base
# Any python libraries that require system libraries to be installed will likely
# need the following packages in order to build
RUN apt-get update && \
apt-get -y upgrade && \
apt-get install -y build-essential git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
FROM base AS builder
WORKDIR /app
COPY . /app
RUN python -m pip install -e .[server]
CMD ["uvicorn", "stac_fastapi.pgstac.app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@ -0,0 +1,14 @@
FROM python:3.10-slim
# build-essential is required to build a wheel for ciso8601
RUN apt update && apt install -y build-essential
RUN python -m pip install --upgrade pip
COPY . /opt/src
WORKDIR /opt/src
RUN python -m pip install ".[docs]"
CMD ["mkdocs", "build"]

View File

@ -0,0 +1,19 @@
ARG PYTHON_VERSION=3.12
FROM python:${PYTHON_VERSION}-slim as base
# Any python libraries that require system libraries to be installed will likely
# need the following packages in order to build
RUN apt-get update && \
apt-get -y upgrade && \
apt-get install -y build-essential git libpq-dev postgresql-15-postgis-3 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN useradd -ms /bin/bash newuser
USER newuser
WORKDIR /app
COPY . /app
RUN python -m pip install -e .[dev,server] --user

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Arturo AI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,55 @@
#!make
APP_HOST ?= 0.0.0.0
APP_PORT ?= 8080
EXTERNAL_APP_PORT ?= ${APP_PORT}
LOG_LEVEL ?= warning
run = docker compose run --rm \
-p ${EXTERNAL_APP_PORT}:${APP_PORT} \
-e APP_HOST=${APP_HOST} \
-e APP_PORT=${APP_PORT} \
app
runtests = docker compose run --rm tests
.PHONY: image
image:
docker compose build
.PHONY: docker-run
docker-run: image
docker compose up
.PHONY: docker-run-nginx-proxy
docker-run-nginx-proxy:
docker compose -f docker-compose.yml -f docker-compose.nginx.yml up
.PHONY: docker-shell
docker-shell:
$(run) /bin/bash
.PHONY: test
test:
$(runtests) /bin/bash -c 'export && python -m pytest /app/tests/api/test_api.py --log-cli-level $(LOG_LEVEL)'
.PHONY: run-database
run-database:
docker compose run --rm database
.PHONY: run-joplin
run-joplin:
docker compose run --rm loadjoplin
.PHONY: install
install:
pip install -e .[dev,server]
.PHONY: docs-image
docs-image:
docker compose -f docker-compose.docs.yml \
build
.PHONY: docs
docs: docs-image
docker compose -f docker-compose.docs.yml \
run docs

View File

@ -0,0 +1,100 @@
# stac-fastapi-pgstac
[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/stac-utils/stac-fastapi-pgstac/cicd.yaml?style=for-the-badge)](https://github.com/stac-utils/stac-fastapi-pgstac/actions/workflows/cicd.yaml)
[![PyPI](https://img.shields.io/pypi/v/stac-fastapi.pgstac?style=for-the-badge)](https://pypi.org/project/stac-fastapi.pgstac)
[![Documentation](https://img.shields.io/github/actions/workflow/status/stac-utils/stac-fastapi-pgstac/pages.yml?label=Docs&style=for-the-badge)](https://stac-utils.github.io/stac-fastapi-pgstac/)
[![License](https://img.shields.io/github/license/stac-utils/stac-fastapi-pgstac?style=for-the-badge)](https://github.com/stac-utils/stac-fastapi-pgstac/blob/main/LICENSE)
<p align="center">
<img src="https://user-images.githubusercontent.com/10407788/174893876-7a3b5b7a-95a5-48c4-9ff2-cc408f1b6af9.png" style="vertical-align: middle; max-width: 400px; max-height: 100px;" height=100 />
<img src="https://fastapi.tiangolo.com/img/logo-margin/logo-teal.png" alt="FastAPI" style="vertical-align: middle; max-width: 400px; max-height: 100px;" width=200 />
</p>
[PgSTAC](https://github.com/stac-utils/pgstac) backend for [stac-fastapi](https://github.com/stac-utils/stac-fastapi), the [FastAPI](https://fastapi.tiangolo.com/) implementation of the [STAC API spec](https://github.com/radiantearth/stac-api-spec)
## Overview
**stac-fastapi-pgstac** is an HTTP interface built in FastAPI.
It validates requests and data sent to a [PgSTAC](https://github.com/stac-utils/pgstac) backend, and adds [links](https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#link-object) to the returned data.
All other processing and search is provided directly using PgSTAC procedural sql / plpgsql functions on the database.
PgSTAC stores all collection and item records as jsonb fields exactly as they come in allowing for any custom fields to be stored and retrieved transparently.
## `PgSTAC` version
`stac-fastapi-pgstac` depends on [`pgstac`](https://stac-utils.github.io/pgstac/pgstac/) database schema and [`pypgstac`](https://stac-utils.github.io/pgstac/pypgstac/) python package.
| stac-fastapi-pgstac Version | pgstac |
| --| --|
| 2.5 | >=0.7,<0.8 |
| 3.0 | >=0.8,<0.9 |
| 4.0 | >=0.8,<0.10 |
## Usage
PgSTAC is an external project and may be used by multiple front ends.
For Stac FastAPI development, a Docker image (which is pulled as part of the docker-compose) is available via the [Github container registry](https://github.com/stac-utils/pgstac/pkgs/container/pgstac/81689794?tag=latest).
The PgSTAC version required by **stac-fastapi-pgstac** is found in the [setup](http://github.com/stac-utils/stac-fastapi-pgstac/blob/main/setup.py) file.
### Sorting
While the STAC [Sort Extension](https://github.com/stac-api-extensions/sort) is fully supported, [PgSTAC](https://github.com/stac-utils/pgstac) is particularly enhanced to be able to sort by datetime (either ascending or descending).
Sorting by anything other than datetime (the default if no sort is specified) on very large STAC repositories without very specific query limits (ie selecting a single day date range) will not have the same performance.
For more than millions of records it is recommended to either set a low connection timeout on PostgreSQL or to disable use of the Sort Extension.
### Hydration
To configure **stac-fastapi-pgstac** to [hydrate search result items in the API](https://stac-utils.github.io/pgstac/pgstac/#runtime-configurations), set the `USE_API_HYDRATE` environment variable to `true` or explicitly set the option in the PGStac Settings object.
### Migrations
There is a Python utility as part of PgSTAC ([pypgstac](https://stac-utils.github.io/pgstac/pypgstac/)) that includes a migration utility.
To use:
```shell
pypgstac migrate
```
## Contributing
See [CONTRIBUTING](https://github.com/stac-utils/stac-fastapi-pgstac/blob/main/CONTRIBUTING.md) for detailed contribution instructions.
To install:
```shell
git clone https://github.com/stac-utils/stac-fastapi-pgstac
cd stac-fastapi-pgstac
python -m pip install -e ".[dev,server,docs]"
```
To test:
```shell
make test
```
Use Github [Pull Requests](https://github.com/stac-utils/stac-fastapi-pgstac/pulls) to provide new features or to request review of draft code, and use [Issues](https://github.com/stac-utils/stac-fastapi-pgstac/issues) to report bugs or request new features.
### Documentation
To build the docs:
```shell
make docs
```
Then, serve the docs via a local HTTP server:
```shell
mkdocs serve
```
## History
**stac-fastapi-pgstac** was initially added to **stac-fastapi** by [developmentseed](https://github.com/developmentseed).
In April of 2023, it was removed from the core **stac-fastapi** repository and moved to its current location (<http://github.com/stac-utils/stac-fastapi-pgstac>).
## License
[MIT](https://github.com/stac-utils/stac-fastapi-pgstac/blob/main/LICENSE)
<!-- markdownlint-disable-file MD033 -->

View File

@ -0,0 +1,27 @@
# Releasing
This is a checklist for releasing a new version of **stac-fastapi**.
1. Determine the next version.
We currently do not have published versioning guidelines, but there is some text on the subject here: <https://github.com/radiantearth/stac-spec/discussions/1184>.
2. Create a release branch named `release/vX.Y.Z`, where `X.Y.Z` is the new version.
3. Search and replace all instances of the current version number with the new version.
Note: You can use [`bump-my-version`](https://github.com/callowayproject/bump-my-version) CLI
```
bump-my-version bump --new-version 3.1.0
```
4. Update [CHANGES.md](./CHANGES.md) for the new version.
Add the appropriate header, and update the links at the bottom of the file.
5. Audit CHANGES.md for completeness and accuracy.
Also, ensure that the changes in this version are appropriate for the version number change (i.e. if you're making breaking changes, you should be increasing the `MAJOR` version number).
6. (optional) If you have permissions, run `scripts/publish --test` to test your PyPI publish.
If successful, the published packages will be available on <http://test.pypy.org>.
7. Push your release branch, create a PR, and get approval.
8. Once the PR is merged, create a new (annotated, signed) tag on the appropriate commit.
Name the tag `X.Y.Z`, and include `vX.Y.Z` as its annotation message.
9. Push your tag to Github, which will kick off the [publishing workflow](.github/workflows/publish.yml).
10. Create a [new release](https://github.com/stac-utils/stac-fastapi/releases/new) targeting the new tag, and use the "Generate release notes" feature to populate the description.
Publish the release and mark it as the latest.
11. Publicize the release via the appropriate social channels, including [Gitter](https://matrix.to/#/#SpatioTemporal-Asset-Catalog_python:gitter.im).

View File

@ -0,0 +1 @@
5.0.2

View File

@ -0,0 +1,11 @@
version: '3'
services:
docs:
container_name: stac-fastapi-docs-dev
build:
context: .
dockerfile: Dockerfile.docs
platform: linux/amd64
volumes:
- .:/opt/src

View File

@ -0,0 +1,41 @@
version: "3.9"
services:
database:
container_name: stac-db
image: ghcr.io/stac-utils/pgstac:v0.9.2
environment:
- POSTGRES_USER=username
- POSTGRES_PASSWORD=password
- POSTGRES_DB=postgis
- PGUSER=username
- PGPASSWORD=password
- PGDATABASE=postgis
ports:
- "5439:5432"
command: postgres -N 500
app:
container_name: stac-fastapi-pgstac
image: stac-utils/stac-fastapi-pgstac
build: .
environment:
- APP_HOST=0.0.0.0
- APP_PORT=8082
- RELOAD=true
- ENVIRONMENT=local
- PGUSER=username
- PGPASSWORD=password
- PGDATABASE=postgis
- PGHOST=database
- PGPORT=5432
- WEB_CONCURRENCY=10
- ENABLE_TRANSACTIONS_EXTENSIONS=true
ports:
- "8082:8082"
volumes:
- .:/app
depends_on:
- database
command: bash -c "./scripts/wait-for-it.sh database:5432 && python -m stac_fastapi.pgstac.app"

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.app

View File

@ -0,0 +1,3 @@
::: stac_fastapi.pgstac.config
options:
show_source: true

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.core

View File

@ -0,0 +1,3 @@
::: stac_fastapi.pgstac.db
options:
show_source: true

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.extensions.filter

View File

@ -0,0 +1,8 @@
# Module stac_fastapi.pgstac.extensions
pgstac extension customisations.
## Sub-modules
* [stac_fastapi.pgstac.extensions.filter](filter.md)
* [stac_fastapi.pgstac.extensions.query](query.md)

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.extensions.query

View File

@ -0,0 +1,14 @@
# Module stac_fastapi.pgstac
stac_fastapi.pgstac module.
## Sub-modules
* [stac_fastapi.pgstac.app](app.md)
* [stac_fastapi.pgstac.config](config.md)
* [stac_fastapi.pgstac.core](core.md)
* [stac_fastapi.pgstac.db](db.md)
* [stac_fastapi.pgstac.extensions](extensions/index.md)
* [stac_fastapi.pgstac.models](models/index.md)
* [stac_fastapi.pgstac.transactions](transactions.md)
* [stac_fastapi.pgstac.utils](utils.md)

View File

@ -0,0 +1,7 @@
# Module stac_fastapi.pgstac.models
stac_fastapi.pgstac.models module.
## Sub-modules
* [stac_fastapi.pgstac.models.links](links.md)

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.models.links

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.transactions

View File

@ -0,0 +1 @@
::: stac_fastapi.pgstac.utils

View File

@ -0,0 +1 @@
../CONTRIBUTING.md

View File

@ -0,0 +1 @@
../README.md

View File

@ -0,0 +1 @@
../CHANGES.md

View File

@ -0,0 +1,64 @@
### Application Extension
The default `stac-fastapi-pgstac` application comes will **all** extensions enabled (except transaction). Users can use `ENABLED_EXTENSIONS` environment variable to limit the supported extensions.
Available values for `ENABLED_EXTENSIONS`:
- `query`
- `sort`
- `fields`
- `filter`
- `free_text` (only for collection-search)
- `pagination`
- `collection_search`
Example: `ENABLED_EXTENSIONS="pagination,sort"`
Since `6.0.0`, the transaction extension is not enabled by default. To add the transaction endpoints, users can set `ENABLE_TRANSACTIONS_EXTENSIONS=TRUE/YES/1`.
### Database config
- `PGUSER`: postgres username
- `PGPASSWORD`: postgres password
- `PGHOST`: hostname for the connection
- `PGPORT`: database port
- `PGDATABASE`: database name
- `DB_MIN_CONN_SIZE`: Number of connection the pool will be initialized with. Defaults to `1`
- `DB_MAX_CONN_SIZE` Max number of connections in the pool. Defaults to `10`
- `DB_MAX_QUERIES`: Number of queries after a connection is closed and replaced with a new connection. Defaults to `50000`
- `DB_MAX_INACTIVE_CONN_LIFETIME`: Number of seconds after which inactive connections in the pool will be closed. Defaults to `300`
- `SEARCH_PATH`: Postgres search path. Defaults to `"pgstac,public"`
- `APPLICATION_NAME`: PgSTAC Application name. Defaults to `"pgstac"`
##### Deprecated
In version `6.0.0` we've renamed the PG configuration variable to match the official naming convention:
- `POSTGRES_USER` -> `PGUSER`
- `POSTGRES_PASS` -> `PGPASSWORD`
- `POSTGRES_HOST_READER` -> `PGHOST`
- `POSTGRES_HOST_WRITER` -> `PGHOST`*
- `POSTGRES_PORT` -> `PGPORT`
- `POSTGRES_DBNAME` -> `PGDATABASE`
\* Since version `6.0`, users cannot set a different host for `writer` and `reader` database but will need to customize the application and pass a specific `stac_fastapi.pgstac.config.PostgresSettings` instance to the `connect_to_db` function.
### Validation/Serialization
- `ENABLE_RESPONSE_MODELS`: use pydantic models to validate endpoint responses. Defaults to `False`
- `ENABLE_DIRECT_RESPONSE`: by-pass the default FastAPI serialization by wrapping the endpoint responses into `starlette.Response` classes. Defaults to `False`
### Misc
- `STAC_FASTAPI_VERSION` (string) is the version number of your API instance (this is not the STAC version)
- `STAC FASTAPI_TITLE` (string) should be a self-explanatory title for your API
- `STAC FASTAPI_DESCRIPTION` (string) should be a good description for your API. It can contain CommonMark
- `STAC_FASTAPI_LANDING_ID` (string) is a unique identifier for your Landing page
- `ROOT_PATH`: set application root-path (when using proxy)
- `CORS_ORIGINS`: A list of origins that should be permitted to make cross-origin requests. Defaults to `*`
- `CORS_METHODS`: A list of HTTP methods that should be allowed for cross-origin requests. Defaults to `"GET,POST,OPTIONS"`
- `USE_API_HYDRATE`: perform hydration of stac items within stac-fastapi
- `INVALID_ID_CHARS`: list of characters that are not allowed in item or collection ids (used in Transaction endpoints)

View File

@ -0,0 +1,3 @@
:root {
--md-primary-fg-color: rgb(13, 118, 160);
}

View File

@ -0,0 +1,107 @@
site_name: stac-fastapi-pgstac
site_description: STAC FastAPI - pgstac backend.
# Repository
repo_name: "stac-utils/stac-fastapi-pgstac"
repo_url: "https://github.com/stac-utils/stac-fastapi-pgstac"
edit_uri: "blob/master/docs/src/"
# Social links
extra:
social:
- icon: "fontawesome/brands/github"
link: "https://github.com/stac-utils"
# Layout
nav:
- Home: "index.md"
- Configuration: "settings.md"
- API:
- stac_fastapi.pgstac:
- module: api/stac_fastapi/pgstac/index.md
- app: api/stac_fastapi/pgstac/app.md
- config: api/stac_fastapi/pgstac/config.md
- core: api/stac_fastapi/pgstac/core.md
- db: api/stac_fastapi/pgstac/db.md
- extensions:
- module: api/stac_fastapi/pgstac/extensions/index.md
- filter: api/stac_fastapi/pgstac/extensions/filter.md
- query: api/stac_fastapi/pgstac/extensions/query.md
- models:
- module: api/stac_fastapi/pgstac/models/index.md
- links: api/stac_fastapi/pgstac/models/links.md
- transactions: api/stac_fastapi/pgstac/transactions.md
- utils: api/stac_fastapi/pgstac/utils.md
- Development - Contributing: "contributing.md"
- Release Notes: "release-notes.md"
plugins:
- search
- mkdocstrings:
enable_inventory: true
handlers:
python:
options:
docstring_section_style: list
docstring_style: google
line_length: 100
separate_signature: true
show_root_heading: true
show_signature_annotations: true
show_source: false
show_symbol_type_toc: true
signature_crossrefs: true
extensions:
- griffe_inherited_docstrings
inventories:
- https://docs.python.org/3/objects.inv
- https://docs.pydantic.dev/latest/objects.inv
- https://fastapi.tiangolo.com/objects.inv
- https://www.starlette.io/objects.inv
- https://magicstack.github.io/asyncpg/current/objects.inv
- https://stac-utils.github.io/stac-fastapi/objects.inv
- https://www.attrs.org/en/stable/objects.inv
# Theme
theme:
icon:
logo: "material/home"
repo: "fontawesome/brands/github"
name: "material"
language: "en"
font:
text: "Nunito Sans"
code: "Fira Code"
extra_css:
- stylesheets/extra.css
# These extensions are chosen to be a superset of Pandoc's Markdown.
# This way, I can write in Pandoc's Markdown and have it be supported here.
# https://pandoc.org/MANUAL.html
markdown_extensions:
- admonition
- attr_list
- codehilite:
guess_lang: false
- def_list
- footnotes
- pymdownx.arithmatex
- pymdownx.betterem
- pymdownx.caret:
insert: false
- pymdownx.details
- pymdownx.emoji
- pymdownx.escapeall:
hardbreak: true
nbsp: true
- pymdownx.magiclink:
hide_protocol: true
repo_url_shortener: true
- pymdownx.smartsymbols
- pymdownx.superfences
- pymdownx.tasklist:
custom_checkbox: true
- pymdownx.tilde
- toc:
permalink: true

View File

@ -0,0 +1,20 @@
events {}
http {
server {
listen 80;
location /api/v1/pgstac {
rewrite ^/api/v1/pgstac(.*)$ $1 break;
proxy_pass http://app-nginx:8082;
proxy_set_header HOST $http_host;
proxy_set_header Referer $http_referer;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
}
location / {
proxy_redirect off;
}
}
}

View File

@ -0,0 +1,66 @@
[tool.isort]
profile = "black"
known_first_party = "stac_fastapi.pgstac"
known_third_party = ["rasterio", "stac-pydantic", "sqlalchemy", "geoalchemy2", "fastapi", "stac_fastapi"]
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
[tool.mypy]
ignore_missing_imports = true
namespace_packages = true
explicit_package_bases = true
exclude = ["tests", ".venv"]
[tool.ruff]
line-length = 90
[tool.ruff.lint]
select = [
"C",
"E",
"F",
"W",
"B",
]
ignore = [
"E203", # line too long, handled by black
"E501", # do not perform function calls in argument defaults
"B028", # No explicit `stacklevel` keyword argument found
]
[tool.bumpversion]
current_version = "5.0.2"
parse = """(?x)
(?P<major>\\d+)\\.
(?P<minor>\\d+)\\.
(?P<patch>\\d+)
(?:
(?P<pre_l>a|b|rc) # pre-release label
(?P<pre_n>\\d+) # pre-release version number
)? # pre-release section is optional
(?:
\\.post
(?P<post_n>\\d+) # post-release version number
)? # post-release section is optional
"""
serialize = [
"{major}.{minor}.{patch}.post{post_n}",
"{major}.{minor}.{patch}{pre_l}{pre_n}",
"{major}.{minor}.{patch}",
]
search = "{current_version}"
replace = "{new_version}"
regex = false
tag = false
commit = true
[[tool.bumpversion.files]]
filename = "VERSION"
search = "{current_version}"
replace = "{new_version}"
[[tool.bumpversion.files]]
filename = "stac_fastapi/pgstac/version.py"
search = '__version__ = "{current_version}"'
replace = '__version__ = "{new_version}"'

View File

@ -0,0 +1,4 @@
[pytest]
testpaths = tests
addopts = -sv
asyncio_mode = auto

View File

@ -0,0 +1,51 @@
"""Ingest sample data during docker-compose"""
import json
import sys
from pathlib import Path
from urllib.parse import urljoin
import requests
workingdir = Path(__file__).parent.absolute()
joplindata = workingdir.parent / "testdata" / "joplin"
app_host = sys.argv[1]
if not app_host:
raise Exception("You must include full path/port to stac instance")
def post_or_put(url: str, data: dict):
"""Post or put data to url."""
r = requests.post(url, json=data)
if r.status_code == 409:
new_url = url + f"/{data['id']}"
# Exists, so update
r = requests.put(new_url, json=data)
# Unchanged may throw a 404
if not r.status_code == 404:
r.raise_for_status()
else:
r.raise_for_status()
def ingest_joplin_data(app_host: str = app_host, data_dir: Path = joplindata):
"""ingest data."""
with open(data_dir / "collection.json") as f:
collection = json.load(f)
post_or_put(urljoin(app_host, "/collections"), collection)
with open(data_dir / "index.geojson") as f:
index = json.load(f)
for feat in index["features"]:
post_or_put(urljoin(app_host, f"collections/{collection['id']}/items"), feat)
if __name__ == "__main__":
print("Loading Joplin Collection")
ingest_joplin_data()
print("All Done")

View File

@ -0,0 +1,56 @@
#!/bin/bash
set -e
if [[ -n "${CI}" ]]; then
set -x
fi
function usage() {
echo -n \
"Usage: $(basename "$0")
Publish all stac-fastapi packages.
Options:
--test Publish to test pypi. Requires a 'testpypi' repository
be defined in your .pypirc;
See https://packaging.python.org/guides/using-testpypi/#using-testpypi-with-pip
"
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
--help)
usage
exit 0
shift
;;
--test)
TEST_PYPI="--repository testpypi"
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
# Fail if this isn't CI and we aren't publishing to test pypi
if [ -z "${TEST_PYPI}" ] && [ -z "${CI}" ]; then
echo "Only CI can publish to pypi"
exit 1
fi
if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
rm -rf dist
python setup.py sdist bdist_wheel
twine upload ${TEST_PYPI} dist/*
fi

View File

@ -0,0 +1,28 @@
#!/usr/bin/env sh
#
# Validate a STAC server using [stac-api-validator](https://github.com/stac-utils/stac-api-validator).
#
# Assumptions:
#
# - You have stac-api-validator installed, e.g. via `pip install stac-api-validator`
# - You've loaded the joplin data, probably using `python ./scripts/ingest_joplin.py http://localhost:8082``
#
# Currently, item-search is not checked, because it crashes stac-api-validator (probably a problem on our side).
set -e
if [ $# -eq 0 ]; then
root_url=http://localhost:8082
else
root_url="$1"
fi
geometry='{"type":"Polygon","coordinates":[[[-94.6884155,37.0595608],[-94.6884155,37.0332547],[-94.6554565,37.0332547],[-94.6554565,37.0595608],[-94.6884155,37.0595608]]]}'
stac-api-validator --root-url "$root_url" \
--conformance core \
--conformance collections \
--conformance features \
--conformance filter \
--collection joplin \
--geometry "$geometry"
# --conformance item-search # currently breaks stac-api-validator

View File

@ -0,0 +1,186 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
######################################################
# Copied from https://github.com/vishnubob/wait-for-it
######################################################
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

View File

@ -0,0 +1,2 @@
[metadata]
version = attr: stac_fastapi.pgstac.version.__version__

View File

@ -0,0 +1,78 @@
"""stac_fastapi: pgstac module."""
from setuptools import find_namespace_packages, setup
with open("README.md") as f:
desc = f.read()
install_requires = [
"attrs",
"orjson",
"pydantic",
"stac-fastapi.api>=6.0,<7.0",
"stac-fastapi.extensions>=6.0,<7.0",
"stac-fastapi.types>=6.0,<7.0",
"asyncpg",
"buildpg",
"brotli_asgi",
"cql2>=0.3.6",
"pypgstac>=0.8,<0.10",
"typing_extensions>=4.9.0",
]
extra_reqs = {
"dev": [
"pystac[validation]",
"pypgstac[psycopg]==0.9.*",
"pytest-postgresql",
"pytest",
"pytest-cov",
"pytest-asyncio>=0.17,<1.1",
"pre-commit",
"requests",
"shapely",
"httpx",
"twine",
"wheel",
],
"docs": [
"black>=23.10.1",
"mkdocs>=1.4.3",
"mkdocs-jupyter>=0.24.5",
"mkdocs-material[imaging]>=9.5",
"griffe-inherited-docstrings>=1.0.0",
"mkdocstrings[python]>=0.25.1",
],
"server": ["uvicorn[standard]==0.35.0"],
"awslambda": ["mangum"],
}
setup(
name="stac-fastapi.pgstac",
description="An implementation of STAC API based on the FastAPI framework and using the pgstac backend.",
long_description=desc,
long_description_content_type="text/markdown",
python_requires=">=3.9",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"License :: OSI Approved :: MIT License",
],
keywords="STAC FastAPI COG",
author="David Bitner",
author_email="david@developmentseed.org",
url="https://github.com/stac-utils/stac-fastapi",
license="MIT",
packages=find_namespace_packages(exclude=["tests", "scripts"]),
zip_safe=False,
install_requires=install_requires,
tests_require=extra_reqs["dev"],
extras_require=extra_reqs,
entry_points={"console_scripts": ["stac-fastapi-pgstac=stac_fastapi.pgstac.app:run"]},
)

View File

@ -0,0 +1 @@
"""stac_fastapi.pgstac module."""

View File

@ -0,0 +1,227 @@
"""FastAPI application using PGStac.
Enables the extensions specified as a comma-delimited list in
the ENABLED_EXTENSIONS environment variable (e.g. `transactions,sort,query`).
If the variable is not set, enables all extensions.
"""
import os
from contextlib import asynccontextmanager
from brotli_asgi import BrotliMiddleware
from fastapi import FastAPI
from stac_fastapi.api.app import StacApi
from stac_fastapi.api.middleware import CORSMiddleware, ProxyHeaderMiddleware
from stac_fastapi.api.models import (
EmptyRequest,
ItemCollectionUri,
JSONResponse,
create_get_request_model,
create_post_request_model,
create_request_model,
)
from stac_fastapi.extensions.core import (
CollectionSearchExtension,
CollectionSearchFilterExtension,
FieldsExtension,
FreeTextExtension,
ItemCollectionFilterExtension,
OffsetPaginationExtension,
SearchFilterExtension,
SortExtension,
TokenPaginationExtension,
TransactionExtension,
)
from stac_fastapi.extensions.core.fields import FieldsConformanceClasses
from stac_fastapi.extensions.core.free_text import FreeTextConformanceClasses
from stac_fastapi.extensions.core.query import QueryConformanceClasses
from stac_fastapi.extensions.core.sort import SortConformanceClasses
from stac_fastapi.extensions.third_party import BulkTransactionExtension
from starlette.middleware import Middleware
from stac_fastapi.pgstac.config import Settings
from stac_fastapi.pgstac.core import CoreCrudClient, health_check
from stac_fastapi.pgstac.db import close_db_connection, connect_to_db
from stac_fastapi.pgstac.extensions import QueryExtension
from stac_fastapi.pgstac.extensions.filter import FiltersClient
from stac_fastapi.pgstac.transactions import BulkTransactionsClient, TransactionsClient
from stac_fastapi.pgstac.types.search import PgstacSearch
settings = Settings()
# search extensions
search_extensions_map = {
"query": QueryExtension(),
"sort": SortExtension(),
"fields": FieldsExtension(),
"filter": SearchFilterExtension(client=FiltersClient()),
"pagination": TokenPaginationExtension(),
}
# collection_search extensions
cs_extensions_map = {
"query": QueryExtension(conformance_classes=[QueryConformanceClasses.COLLECTIONS]),
"sort": SortExtension(conformance_classes=[SortConformanceClasses.COLLECTIONS]),
"fields": FieldsExtension(conformance_classes=[FieldsConformanceClasses.COLLECTIONS]),
"filter": CollectionSearchFilterExtension(client=FiltersClient()),
"free_text": FreeTextExtension(
conformance_classes=[FreeTextConformanceClasses.COLLECTIONS],
),
"pagination": OffsetPaginationExtension(),
}
# item_collection extensions
itm_col_extensions_map = {
"query": QueryExtension(
conformance_classes=[QueryConformanceClasses.ITEMS],
),
"sort": SortExtension(
conformance_classes=[SortConformanceClasses.ITEMS],
),
"fields": FieldsExtension(conformance_classes=[FieldsConformanceClasses.ITEMS]),
"filter": ItemCollectionFilterExtension(client=FiltersClient()),
"pagination": TokenPaginationExtension(),
}
enabled_extensions = {
*search_extensions_map.keys(),
*cs_extensions_map.keys(),
*itm_col_extensions_map.keys(),
"collection_search",
}
if ext := os.environ.get("ENABLED_EXTENSIONS"):
enabled_extensions = set(ext.split(","))
application_extensions = []
with_transactions = os.environ.get("ENABLE_TRANSACTIONS_EXTENSIONS", "").lower() in [
"yes",
"true",
"1",
]
if with_transactions:
application_extensions.append(
TransactionExtension(
client=TransactionsClient(),
settings=settings,
response_class=JSONResponse,
),
)
application_extensions.append(
BulkTransactionExtension(client=BulkTransactionsClient()),
)
# /search models
search_extensions = [
extension
for key, extension in search_extensions_map.items()
if key in enabled_extensions
]
post_request_model = create_post_request_model(search_extensions, base_model=PgstacSearch)
get_request_model = create_get_request_model(search_extensions)
application_extensions.extend(search_extensions)
# /collections/{collectionId}/items model
items_get_request_model = ItemCollectionUri
itm_col_extensions = [
extension
for key, extension in itm_col_extensions_map.items()
if key in enabled_extensions
]
if itm_col_extensions:
items_get_request_model = create_request_model(
model_name="ItemCollectionUri",
base_model=ItemCollectionUri,
extensions=itm_col_extensions,
request_type="GET",
)
application_extensions.extend(itm_col_extensions)
# /collections model
collections_get_request_model = EmptyRequest
if "collection_search" in enabled_extensions:
cs_extensions = [
extension
for key, extension in cs_extensions_map.items()
if key in enabled_extensions
]
collection_search_extension = CollectionSearchExtension.from_extensions(cs_extensions)
collections_get_request_model = collection_search_extension.GET
application_extensions.append(collection_search_extension)
@asynccontextmanager
async def lifespan(app: FastAPI):
"""FastAPI Lifespan."""
await connect_to_db(app, add_write_connection_pool=with_transactions)
yield
await close_db_connection(app)
api = StacApi(
app=FastAPI(
openapi_url=settings.openapi_url,
docs_url=settings.docs_url,
redoc_url=None,
root_path=settings.root_path,
title=settings.stac_fastapi_title,
version=settings.stac_fastapi_version,
description=settings.stac_fastapi_description,
lifespan=lifespan,
),
settings=settings,
extensions=application_extensions,
client=CoreCrudClient(pgstac_search_model=post_request_model),
response_class=JSONResponse,
items_get_request_model=items_get_request_model,
search_get_request_model=get_request_model,
search_post_request_model=post_request_model,
collections_get_request_model=collections_get_request_model,
middlewares=[
Middleware(BrotliMiddleware),
Middleware(ProxyHeaderMiddleware),
Middleware(
CORSMiddleware,
allow_origins=settings.cors_origins,
allow_methods=settings.cors_methods,
),
],
health_check=health_check,
)
app = api.app
def run():
"""Run app from command line using uvicorn if available."""
try:
import uvicorn
uvicorn.run(
"stac_fastapi.pgstac.app:app",
host=settings.app_host,
port=settings.app_port,
log_level="info",
reload=settings.reload,
root_path=os.getenv("UVICORN_ROOT_PATH", ""),
)
except ImportError as e:
raise RuntimeError("Uvicorn must be installed in order to use command") from e
if __name__ == "__main__":
run()
def create_handler(app):
"""Create a handler to use with AWS Lambda if mangum available."""
try:
from mangum import Mangum
return Mangum(app)
except ImportError:
return None
handler = create_handler(app)

View File

@ -0,0 +1,184 @@
"""Postgres API configuration."""
import warnings
from typing import Annotated, Any, List, Optional, Type
from urllib.parse import quote_plus as quote
from pydantic import BaseModel, Field, field_validator, model_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
from stac_fastapi.types.config import ApiSettings
from stac_fastapi.pgstac.types.base_item_cache import (
BaseItemCache,
DefaultBaseItemCache,
)
DEFAULT_INVALID_ID_CHARS = [
":",
"/",
"?",
"#",
"[",
"]",
"@",
"!",
"$",
"&",
"'",
"(",
")",
"*",
"+",
",",
";",
"=",
]
class ServerSettings(BaseModel):
"""Server runtime parameters.
Attributes:
search_path: Postgres search path. Defaults to "pgstac,public".
application_name: PgSTAC Application name. Defaults to 'pgstac'.
"""
search_path: str = "pgstac,public"
application_name: str = "pgstac"
model_config = SettingsConfigDict(extra="allow")
class PostgresSettings(BaseSettings):
"""Postgres-specific API settings.
Attributes:
pguser: postgres username.
pgpassword: postgres password.
pghost: hostname for the connection.
pgport: database port.
pgdatabase: database name.
"""
postgres_user: Annotated[
Optional[str],
Field(
deprecated="`postgres_user` is deprecated, please use `pguser`", default=None
),
]
postgres_pass: Annotated[
Optional[str],
Field(
deprecated="`postgres_pass` is deprecated, please use `pgpassword`",
default=None,
),
]
postgres_host_reader: Annotated[
Optional[str],
Field(
deprecated="`postgres_host_reader` is deprecated, please use `pghost`",
default=None,
),
]
postgres_host_writer: Annotated[
Optional[str],
Field(
deprecated="`postgres_host_writer` is deprecated, please use `pghost`",
default=None,
),
]
postgres_port: Annotated[
Optional[int],
Field(
deprecated="`postgres_port` is deprecated, please use `pgport`", default=None
),
]
postgres_dbname: Annotated[
Optional[str],
Field(
deprecated="`postgres_dbname` is deprecated, please use `pgdatabase`",
default=None,
),
]
pguser: str
pgpassword: str
pghost: str
pgport: int
pgdatabase: str
db_min_conn_size: int = 1
db_max_conn_size: int = 10
db_max_queries: int = 50000
db_max_inactive_conn_lifetime: float = 300
server_settings: ServerSettings = ServerSettings()
model_config = {"env_file": ".env", "extra": "ignore"}
@model_validator(mode="before")
@classmethod
def _pg_settings_compat(cls, data: Any) -> Any:
if isinstance(data, dict):
compat = {
"postgres_user": "pguser",
"postgres_pass": "pgpassword",
"postgres_host_reader": "pghost",
"postgres_host_writer": "pghost",
"postgres_port": "pgport",
"postgres_dbname": "pgdatabase",
}
for old_key, new_key in compat.items():
if val := data.get(old_key, None):
warnings.warn(
f"`{old_key}` is deprecated, please use `{new_key}`",
DeprecationWarning,
stacklevel=1,
)
data[new_key] = val
if (pgh_reader := data.get("postgres_host_reader")) and (
pgh_writer := data.get("postgres_host_writer")
):
if pgh_reader != pgh_writer:
raise ValueError(
"In order to use different host values for reading and writing "
"you must explicitly provide write_postgres_settings to the connect_to_db function"
)
return data
@property
def connection_string(self):
"""Create reader psql connection string."""
return f"postgresql://{self.pguser}:{quote(self.pgpassword)}@{self.pghost}:{self.pgport}/{self.pgdatabase}"
class Settings(ApiSettings):
"""API settings.
Attributes:
use_api_hydrate: perform hydration of stac items within stac-fastapi.
invalid_id_chars: list of characters that are not allowed in item or collection ids.
"""
use_api_hydrate: bool = False
invalid_id_chars: List[str] = DEFAULT_INVALID_ID_CHARS
base_item_cache: Type[BaseItemCache] = DefaultBaseItemCache
cors_origins: str = "*"
cors_methods: str = "GET,POST,OPTIONS"
testing: bool = False
@field_validator("cors_origins")
def parse_cors_origin(cls, v):
"""Parse CORS origins."""
return [origin.strip() for origin in v.split(",")]
@field_validator("cors_methods")
def parse_cors_methods(cls, v):
"""Parse CORS methods."""
return [method.strip() for method in v.split(",")]

View File

@ -0,0 +1,653 @@
"""Item crud client."""
import json
import re
from typing import Any, Dict, List, Optional, Set, Type, Union
from urllib.parse import unquote_plus, urljoin
import attr
import orjson
from asyncpg.exceptions import InvalidDatetimeFormatError
from buildpg import render
from cql2 import Expr
from fastapi import HTTPException, Request
from pydantic import ValidationError
from pypgstac.hydration import hydrate
from stac_fastapi.api.models import JSONResponse
from stac_fastapi.types.core import AsyncBaseCoreClient, Relations
from stac_fastapi.types.errors import InvalidQueryParameter, NotFoundError
from stac_fastapi.types.requests import get_base_url
from stac_fastapi.types.stac import Collection, Collections, Item, ItemCollection
from stac_pydantic.shared import BBox, MimeTypes
from stac_fastapi.pgstac.config import Settings
from stac_fastapi.pgstac.models.links import (
CollectionLinks,
CollectionSearchPagingLinks,
ItemCollectionLinks,
ItemLinks,
PagingLinks,
SearchLinks,
)
from stac_fastapi.pgstac.types.search import PgstacSearch
from stac_fastapi.pgstac.utils import filter_fields
NumType = Union[float, int]
@attr.s
class CoreCrudClient(AsyncBaseCoreClient):
"""Client for core endpoints defined by stac."""
pgstac_search_model: Type[PgstacSearch] = attr.ib(default=PgstacSearch)
async def all_collections( # noqa: C901
self,
request: Request,
# Extensions
bbox: Optional[BBox] = None,
datetime: Optional[str] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
query: Optional[str] = None,
fields: Optional[List[str]] = None,
sortby: Optional[str] = None,
filter_expr: Optional[str] = None,
filter_lang: Optional[str] = None,
q: Optional[List[str]] = None,
**kwargs,
) -> Collections:
"""Cross catalog search (GET).
Called with `GET /collections`.
Returns:
Collections which match the search criteria, returns all
collections by default.
"""
base_url = get_base_url(request)
next_link: Optional[Dict[str, Any]] = None
prev_link: Optional[Dict[str, Any]] = None
collections_result: Collections
if self.extension_is_enabled("CollectionSearchExtension"):
base_args = {
"bbox": bbox,
"limit": limit,
"offset": offset,
"query": orjson.loads(unquote_plus(query)) if query else query,
}
clean_args = self._clean_search_args(
base_args=base_args,
datetime=datetime,
fields=fields,
sortby=sortby,
filter_query=filter_expr,
filter_lang=filter_lang,
q=q,
)
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""
SELECT * FROM collection_search(:req::text::jsonb);
""",
req=json.dumps(clean_args),
)
collections_result = await conn.fetchval(q, *p)
if links := collections_result.get("links"):
for link in links:
if link["rel"] == "next":
next_link = link
elif link["rel"] == "prev":
prev_link = link
else:
async with request.app.state.get_connection(request, "r") as conn:
cols = await conn.fetchval(
"""
SELECT * FROM all_collections();
"""
)
collections_result = {"collections": cols, "links": []}
linked_collections: List[Collection] = []
collections = collections_result["collections"]
if collections is not None and len(collections) > 0:
for c in collections:
coll = Collection(**c)
coll["links"] = await CollectionLinks(
collection_id=coll["id"], request=request
).get_links(extra_links=coll.get("links"))
if self.extension_is_enabled(
"FilterExtension"
) or self.extension_is_enabled("ItemCollectionFilterExtension"):
coll["links"].append(
{
"rel": Relations.queryables.value,
"type": MimeTypes.jsonschema.value,
"title": "Queryables",
"href": urljoin(
base_url, f"collections/{coll['id']}/queryables"
),
}
)
linked_collections.append(coll)
links = await CollectionSearchPagingLinks(
request=request,
next=next_link,
prev=prev_link,
).get_links()
return Collections(
collections=linked_collections or [],
links=links,
numberMatched=collections_result.get(
"numberMatched", len(linked_collections)
),
numberReturned=collections_result.get(
"numberReturned", len(linked_collections)
),
)
async def get_collection(
self, collection_id: str, request: Request, **kwargs
) -> Collection:
"""Get collection by id.
Called with `GET /collections/{collection_id}`.
Args:
collection_id: ID of the collection.
Returns:
Collection.
"""
collection: Optional[Dict[str, Any]]
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""
SELECT * FROM get_collection(:id::text);
""",
id=collection_id,
)
collection = await conn.fetchval(q, *p)
if collection is None:
raise NotFoundError(f"Collection {collection_id} does not exist.")
collection["links"] = await CollectionLinks(
collection_id=collection_id, request=request
).get_links(extra_links=collection.get("links"))
if self.extension_is_enabled("FilterExtension") or self.extension_is_enabled(
"ItemCollectionFilterExtension"
):
base_url = get_base_url(request)
collection["links"].append(
{
"rel": Relations.queryables.value,
"type": MimeTypes.jsonschema.value,
"title": "Queryables",
"href": urljoin(base_url, f"collections/{collection_id}/queryables"),
}
)
return Collection(**collection)
async def _get_base_item(
self, collection_id: str, request: Request
) -> Dict[str, Any]:
"""Get the base item of a collection for use in rehydrating full item collection properties.
Args:
collection_id: ID of the collection.
Returns:
Item.
"""
item: Optional[Dict[str, Any]]
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""
SELECT * FROM collection_base_item(:collection_id::text);
""",
collection_id=collection_id,
)
item = await conn.fetchval(q, *p)
if item is None:
raise NotFoundError(f"A base item for {collection_id} does not exist.")
return item
async def _search_base( # noqa: C901
self,
search_request: PgstacSearch,
request: Request,
) -> ItemCollection:
"""Cross catalog search (POST).
Called with `POST /search`.
Args:
search_request: search request parameters.
Returns:
ItemCollection containing items which match the search criteria.
"""
items: Dict[str, Any]
settings: Settings = request.app.state.settings
search_request.conf = search_request.conf or {}
search_request.conf["nohydrate"] = settings.use_api_hydrate
search_request_json = search_request.model_dump_json(
exclude_none=True, by_alias=True
)
try:
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""
SELECT * FROM search(:req::text::jsonb);
""",
req=search_request_json,
)
items = await conn.fetchval(q, *p)
except InvalidDatetimeFormatError as e:
raise InvalidQueryParameter(
f"Datetime parameter {search_request.datetime} is invalid."
) from e
# Starting in pgstac 0.9.0, the `next` and `prev` tokens are returned in spec-compliant links with method GET
next_from_link: Optional[str] = None
prev_from_link: Optional[str] = None
for link in items.get("links", []):
if link.get("rel") == "next":
next_from_link = link.get("href").split("token=next:")[1]
if link.get("rel") == "prev":
prev_from_link = link.get("href").split("token=prev:")[1]
next: Optional[str] = items.pop("next", next_from_link)
prev: Optional[str] = items.pop("prev", prev_from_link)
collection = ItemCollection(**items)
fields = getattr(search_request, "fields", None)
include: Set[str] = fields.include if fields and fields.include else set()
exclude: Set[str] = fields.exclude if fields and fields.exclude else set()
async def _add_item_links(
feature: Item,
collection_id: Optional[str] = None,
item_id: Optional[str] = None,
) -> None:
"""Add ItemLinks to the Item.
If the fields extension is excluding links, then don't add them.
Also skip links if the item doesn't provide collection and item ids.
"""
collection_id = feature.get("collection") or collection_id
item_id = feature.get("id") or item_id
if not exclude or "links" not in exclude and all([collection_id, item_id]):
feature["links"] = await ItemLinks(
collection_id=collection_id, # type: ignore
item_id=item_id, # type: ignore
request=request,
).get_links(extra_links=feature.get("links"))
cleaned_features: List[Item] = []
if settings.use_api_hydrate:
async def _get_base_item(collection_id: str) -> Dict[str, Any]:
return await self._get_base_item(collection_id, request=request)
base_item_cache = settings.base_item_cache(
fetch_base_item=_get_base_item, request=request
)
for feature in collection.get("features") or []:
base_item = await base_item_cache.get(feature.get("collection"))
# Exclude None values
base_item = {k: v for k, v in base_item.items() if v is not None}
feature = hydrate(base_item, feature)
# Grab ids needed for links that may be removed by the fields extension.
collection_id = feature.get("collection")
item_id = feature.get("id")
feature = filter_fields(feature, include, exclude)
await _add_item_links(feature, collection_id, item_id)
cleaned_features.append(feature)
else:
for feature in collection.get("features") or []:
await _add_item_links(feature)
cleaned_features.append(feature)
collection["features"] = cleaned_features
collection["links"] = await PagingLinks(
request=request,
next=next,
prev=prev,
).get_links()
return collection
async def item_collection(
self,
collection_id: str,
request: Request,
bbox: Optional[BBox] = None,
datetime: Optional[str] = None,
limit: Optional[int] = None,
# Extensions
query: Optional[str] = None,
fields: Optional[List[str]] = None,
sortby: Optional[str] = None,
filter_expr: Optional[str] = None,
filter_lang: Optional[str] = None,
token: Optional[str] = None,
**kwargs,
) -> ItemCollection:
"""Get all items from a specific collection.
Called with `GET /collections/{collection_id}/items`
Args:
collection_id: id of the collection.
limit: number of items to return.
token: pagination token.
Returns:
An ItemCollection.
"""
# If collection does not exist, NotFoundError wil be raised
await self.get_collection(collection_id, request=request)
base_args = {
"collections": [collection_id],
"bbox": bbox,
"datetime": datetime,
"limit": limit,
"token": token,
"query": orjson.loads(unquote_plus(query)) if query else query,
}
clean = self._clean_search_args(
base_args=base_args,
filter_query=filter_expr,
filter_lang=filter_lang,
fields=fields,
sortby=sortby,
)
try:
search_request = self.pgstac_search_model(**clean)
except ValidationError as e:
raise HTTPException(
status_code=400, detail=f"Invalid parameters provided {e}"
) from e
item_collection = await self._search_base(search_request, request=request)
links = await ItemCollectionLinks(
collection_id=collection_id, request=request
).get_links(extra_links=item_collection["links"])
item_collection["links"] = links
# If we have the `fields` extension enabled
# we need to avoid Pydantic validation because the
# Items might not be a valid STAC Item objects
if fields := getattr(search_request, "fields", None):
if fields.include or fields.exclude:
return JSONResponse(item_collection) # type: ignore
return ItemCollection(**item_collection)
async def get_item(
self, item_id: str, collection_id: str, request: Request, **kwargs
) -> Item:
"""Get item by id.
Called with `GET /collections/{collection_id}/items/{item_id}`.
Args:
item_id: ID of the item.
collection_id: ID of the collection the item is in.
Returns:
Item.
"""
# If collection does not exist, NotFoundError wil be raised
await self.get_collection(collection_id, request=request)
search_request = self.pgstac_search_model(
ids=[item_id], collections=[collection_id], limit=1
)
item_collection = await self._search_base(search_request, request=request)
if not item_collection["features"]:
raise NotFoundError(
f"Item {item_id} in Collection {collection_id} does not exist."
)
return Item(**item_collection["features"][0])
async def post_search(
self, search_request: PgstacSearch, request: Request, **kwargs
) -> ItemCollection:
"""Cross catalog search (POST).
Called with `POST /search`.
Args:
search_request: search request parameters.
Returns:
ItemCollection containing items which match the search criteria.
"""
item_collection = await self._search_base(search_request, request=request)
# If we have the `fields` extension enabled
# we need to avoid Pydantic validation because the
# Items might not be a valid STAC Item objects
if fields := getattr(search_request, "fields", None):
if fields.include or fields.exclude:
return JSONResponse(item_collection) # type: ignore
links = await SearchLinks(request=request).get_links(
extra_links=item_collection["links"]
)
item_collection["links"] = links
return ItemCollection(**item_collection)
async def get_search(
self,
request: Request,
collections: Optional[List[str]] = None,
ids: Optional[List[str]] = None,
bbox: Optional[BBox] = None,
intersects: Optional[str] = None,
datetime: Optional[str] = None,
limit: Optional[int] = None,
# Extensions
query: Optional[str] = None,
fields: Optional[List[str]] = None,
sortby: Optional[str] = None,
filter_expr: Optional[str] = None,
filter_lang: Optional[str] = None,
token: Optional[str] = None,
**kwargs,
) -> ItemCollection:
"""Cross catalog search (GET).
Called with `GET /search`.
Returns:
ItemCollection containing items which match the search criteria.
"""
# Parse request parameters
base_args = {
"collections": collections,
"ids": ids,
"bbox": bbox,
"limit": limit,
"token": token,
"query": orjson.loads(unquote_plus(query)) if query else query,
}
clean = self._clean_search_args(
base_args=base_args,
intersects=intersects,
datetime=datetime,
fields=fields,
sortby=sortby,
filter_query=filter_expr,
filter_lang=filter_lang,
)
try:
search_request = self.pgstac_search_model(**clean)
except ValidationError as e:
raise HTTPException(
status_code=400, detail=f"Invalid parameters provided {e}"
) from e
item_collection = await self._search_base(search_request, request=request)
links = await SearchLinks(request=request).get_links(
extra_links=item_collection["links"]
)
item_collection["links"] = links
# If we have the `fields` extension enabled
# we need to avoid Pydantic validation because the
# Items might not be a valid STAC Item objects
if fields := getattr(search_request, "fields", None):
if fields.include or fields.exclude:
return JSONResponse(item_collection) # type: ignore
return ItemCollection(**item_collection)
def _clean_search_args( # noqa: C901
self,
base_args: Dict[str, Any],
intersects: Optional[str] = None,
datetime: Optional[str] = None,
fields: Optional[List[str]] = None,
sortby: Optional[str] = None,
filter_query: Optional[str] = None,
filter_lang: Optional[str] = None,
q: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Clean up search arguments to match format expected by pgstac"""
if filter_query:
if filter_lang == "cql2-text":
e = Expr(filter_query)
base_args["filter"] = e.to_json()
base_args["filter_lang"] = "cql2-json"
else:
base_args["filter"] = orjson.loads(filter_query)
base_args["filter_lang"] = filter_lang
if datetime:
base_args["datetime"] = datetime
if intersects:
base_args["intersects"] = orjson.loads(unquote_plus(intersects))
if sortby:
# https://github.com/radiantearth/stac-spec/tree/master/api-spec/extensions/sort#http-get-or-post-form
sort_param = []
for sort in sortby:
sortparts = re.match(r"^([+-]?)(.*)$", sort)
if sortparts:
sort_param.append(
{
"field": sortparts.group(2).strip(),
"direction": "desc" if sortparts.group(1) == "-" else "asc",
}
)
base_args["sortby"] = sort_param
if fields:
includes = set()
excludes = set()
for field in fields:
if field[0] == "-":
excludes.add(field[1:])
elif field[0] == "+":
includes.add(field[1:])
else:
includes.add(field)
base_args["fields"] = {"include": includes, "exclude": excludes}
if q:
base_args["q"] = " OR ".join(q)
# Remove None values from dict
clean = {}
for k, v in base_args.items():
if v is not None and v != []:
clean[k] = v
return clean
async def health_check(request: Request) -> Union[Dict, JSONResponse]:
"""PgSTAC HealthCheck."""
resp = {
"status": "UP",
"lifespan": {
"status": "UP",
},
}
if not hasattr(request.app.state, "get_connection"):
return JSONResponse(
status_code=503,
content={
"status": "DOWN",
"lifespan": {
"status": "DOWN",
"message": "application lifespan wasn't run",
},
"pgstac": {
"status": "DOWN",
"message": "Could not connect to database",
},
},
)
try:
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""SELECT pgstac.get_version();""",
)
version = await conn.fetchval(q, *p)
except Exception as e:
resp["status"] = "DOWN"
resp["pgstac"] = {
"status": "DOWN",
"message": str(e),
}
return JSONResponse(status_code=503, content=resp)
resp["pgstac"] = {
"status": "UP",
"pgstac_version": version,
}
return resp

View File

@ -0,0 +1,153 @@
"""Database connection handling."""
import json
from contextlib import asynccontextmanager, contextmanager
from typing import (
AsyncIterator,
Callable,
Dict,
Generator,
List,
Literal,
Optional,
Union,
)
import orjson
from asyncpg import Connection, Pool, exceptions
from buildpg import V, asyncpg, render
from fastapi import FastAPI, HTTPException, Request
from stac_fastapi.types.errors import (
ConflictError,
DatabaseError,
ForeignKeyError,
NotFoundError,
)
from stac_fastapi.pgstac.config import PostgresSettings
async def con_init(conn):
"""Use orjson for json returns."""
await conn.set_type_codec(
"json",
encoder=orjson.dumps,
decoder=orjson.loads,
schema="pg_catalog",
)
await conn.set_type_codec(
"jsonb",
encoder=orjson.dumps,
decoder=orjson.loads,
schema="pg_catalog",
)
ConnectionGetter = Callable[[Request, Literal["r", "w"]], AsyncIterator[Connection]]
async def _create_pool(settings: PostgresSettings) -> Pool:
"""Create a connection pool."""
return await asyncpg.create_pool(
settings.connection_string,
min_size=settings.db_min_conn_size,
max_size=settings.db_max_conn_size,
max_queries=settings.db_max_queries,
max_inactive_connection_lifetime=settings.db_max_inactive_conn_lifetime,
init=con_init,
server_settings=settings.server_settings.model_dump(),
)
async def connect_to_db(
app: FastAPI,
get_conn: Optional[ConnectionGetter] = None,
postgres_settings: Optional[PostgresSettings] = None,
add_write_connection_pool: bool = False,
write_postgres_settings: Optional[PostgresSettings] = None,
) -> None:
"""Create connection pools & connection retriever on application."""
if not postgres_settings:
postgres_settings = PostgresSettings()
app.state.readpool = await _create_pool(postgres_settings)
if add_write_connection_pool:
if not write_postgres_settings:
write_postgres_settings = postgres_settings
app.state.writepool = await _create_pool(write_postgres_settings)
app.state.get_connection = get_conn if get_conn else get_connection
async def close_db_connection(app: FastAPI) -> None:
"""Close connection."""
await app.state.readpool.close()
if pool := getattr(app.state, "writepool", None):
await pool.close()
@asynccontextmanager
async def get_connection(
request: Request,
readwrite: Literal["r", "w"] = "r",
) -> AsyncIterator[Connection]:
"""Retrieve connection from database conection pool."""
pool = request.app.state.readpool
if readwrite == "w":
pool = getattr(request.app.state, "writepool", None)
if not pool:
raise HTTPException(
status_code=500,
detail="Could not find connection pool for write operations",
)
with translate_pgstac_errors():
async with pool.acquire() as conn:
yield conn
async def dbfunc(conn: Connection, func: str, arg: Union[str, Dict, List]):
"""Wrap PLPGSQL Functions.
Keyword arguments:
pool -- the asyncpg pool to use to connect to the database
func -- the name of the PostgreSQL function to call
arg -- the argument to the PostgreSQL function as either a string
or a dict that will be converted into jsonb
"""
with translate_pgstac_errors():
if isinstance(arg, str):
q, p = render(
"""
SELECT * FROM :func(:item::text);
""",
func=V(func),
item=arg,
)
return await conn.fetchval(q, *p)
else:
q, p = render(
"""
SELECT * FROM :func(:item::text::jsonb);
""",
func=V(func),
item=json.dumps(arg),
)
return await conn.fetchval(q, *p)
@contextmanager
def translate_pgstac_errors() -> Generator[None, None, None]:
"""Context manager that translates pgstac errors into FastAPI errors."""
try:
yield
except exceptions.UniqueViolationError as e:
raise ConflictError from e
except exceptions.NoDataFoundError as e:
raise NotFoundError from e
except exceptions.NotNullViolationError as e:
raise DatabaseError from e
except exceptions.ForeignKeyViolationError as e:
raise ForeignKeyError from e

View File

@ -0,0 +1,6 @@
"""pgstac extension customisations."""
from .filter import FiltersClient
from .query import QueryExtension
__all__ = ["QueryExtension", "FiltersClient"]

View File

@ -0,0 +1,40 @@
"""Get Queryables."""
from typing import Any, Dict, Optional
from buildpg import render
from fastapi import Request
from stac_fastapi.extensions.core.filter.client import AsyncBaseFiltersClient
from stac_fastapi.types.errors import NotFoundError
class FiltersClient(AsyncBaseFiltersClient):
"""Defines a pattern for implementing the STAC filter extension."""
async def get_queryables(
self,
request: Request,
collection_id: Optional[str] = None,
**kwargs: Any,
) -> Dict[str, Any]:
"""Get the queryables available for the given collection_id.
If collection_id is None, returns the intersection of all
queryables over all collections.
This base implementation returns a blank queryable schema. This is not allowed
under OGC CQL but it is allowed by the STAC API Filter Extension
https://github.com/radiantearth/stac-api-spec/tree/master/fragments/filter#queryables
"""
async with request.app.state.get_connection(request, "r") as conn:
q, p = render(
"""
SELECT * FROM get_queryables(:collection::text);
""",
collection=collection_id,
)
queryables = await conn.fetchval(q, *p)
if not queryables:
raise NotFoundError(f"Collection {collection_id} not found")
queryables["$id"] = str(request.url)
return queryables

View File

@ -0,0 +1,47 @@
"""Pgstac query customisation."""
import operator
from enum import auto
from types import DynamicClassAttribute
from typing import Any, Callable, Dict, Optional
from pydantic import BaseModel
from stac_fastapi.extensions.core.query import QueryExtension as QueryExtensionBase
from stac_pydantic.utils import AutoValueEnum
class Operator(str, AutoValueEnum):
"""Defines the set of operators supported by the API."""
eq = auto()
ne = auto()
lt = auto()
lte = auto()
gt = auto()
gte = auto()
# TODO: These are defined in the spec but aren't currently implemented by the api
# startsWith = auto()
# endsWith = auto()
# contains = auto()
# in = auto()
@DynamicClassAttribute
def operator(self) -> Callable[[Any, Any], bool]:
"""Return python operator."""
return getattr(operator, self._value_)
class QueryExtensionPostRequest(BaseModel):
"""Query Extension POST request model."""
query: Optional[Dict[str, Dict[Operator, Any]]] = None
class QueryExtension(QueryExtensionBase):
"""Query Extension.
Override the POST request model to add validation against
supported fields
"""
POST = QueryExtensionPostRequest

View File

@ -0,0 +1 @@
"""stac_fastapi.pgstac.models module."""

View File

@ -0,0 +1,350 @@
"""link helpers."""
from typing import Any, Dict, List, Optional
from urllib.parse import ParseResult, parse_qs, unquote, urlencode, urljoin, urlparse
import attr
from stac_fastapi.types.requests import get_base_url
from stac_pydantic.links import Relations
from stac_pydantic.shared import MimeTypes
from starlette.requests import Request
# These can be inferred from the item/collection so they aren't included in the database
# Instead they are dynamically generated when querying the database using the classes defined below
INFERRED_LINK_RELS = ["self", "item", "parent", "collection", "root"]
def filter_links(links: List[Dict]) -> List[Dict]:
"""Remove inferred links."""
return [link for link in links if link["rel"] not in INFERRED_LINK_RELS]
def merge_params(url: str, newparams: Dict) -> str:
"""Merge url parameters."""
u = urlparse(url)
params = parse_qs(u.query)
params.update(newparams)
param_string = unquote(urlencode(params, True))
href = ParseResult(
scheme=u.scheme,
netloc=u.netloc,
path=u.path,
params=u.params,
query=param_string,
fragment=u.fragment,
).geturl()
return href
@attr.s
class BaseLinks:
"""Create inferred links common to collections and items."""
request: Request = attr.ib()
@property
def base_url(self):
"""Get the base url."""
return get_base_url(self.request)
@property
def url(self):
"""Get the current request url."""
base_url = self.request.base_url
path = self.request.url.path
# root path can be set in the request scope in two different ways:
# - by uvicorn when running with --root-path
# - by FastAPI when running with FastAPI(root_path="...")
#
# When root path is set by uvicorn, request.url.path will have the root path prefix.
# eg. if root path is "/api" and the path is "/collections",
# the request.url.path will be "/api/collections"
#
# We need to remove the root path prefix from the path before
# joining the base_url and path to get the full url to avoid
# having root_path twice in the url
if (
root_path := self.request.scope.get("root_path")
) and not self.request.app.root_path:
# self.request.app.root_path is set by FastAPI when running with FastAPI(root_path="...")
# If self.request.app.root_path is not set but self.request.scope.get("root_path") is set,
# then the root path is set by uvicorn
# So we need to remove the root path prefix from the path before
# joining the base_url and path to get the full url
if path.startswith(root_path):
path = path[len(root_path) :]
url = urljoin(str(base_url), path.lstrip("/"))
if qs := self.request.url.query:
url += f"?{qs}"
return url
def resolve(self, url):
"""Resolve url to the current request url."""
return urljoin(str(self.base_url), str(url))
def link_self(self) -> Dict:
"""Return the self link."""
return {
"rel": Relations.self.value,
"type": MimeTypes.json.value,
"href": self.url,
}
def link_root(self) -> Dict:
"""Return the catalog root."""
return {
"rel": Relations.root.value,
"type": MimeTypes.json.value,
"href": self.base_url,
}
def create_links(self) -> List[Dict[str, Any]]:
"""Return all inferred links."""
links = []
for name in dir(self):
if name.startswith("link_") and callable(getattr(self, name)):
link = getattr(self, name)()
if link is not None:
links.append(link)
return links
async def get_links(
self, extra_links: Optional[List[Dict[str, Any]]] = None
) -> List[Dict[str, Any]]:
"""
Generate all the links.
Get the links object for a stac resource by iterating through
available methods on this class that start with link_.
"""
# TODO: Pass request.json() into function so this doesn't need to be coroutine
if self.request.method == "POST":
self.request.postbody = await self.request.json()
# join passed in links with generated links
# and update relative paths
links = self.create_links()
if extra_links:
# For extra links passed in,
# add links modified with a resolved href.
# Drop any links that are dynamically
# determined by the server (e.g. self, parent, etc.)
# Resolving the href allows for relative paths
# to be stored in pgstac and for the hrefs in the
# links of response STAC objects to be resolved
# to the request url.
links += [
{**link, "href": self.resolve(link["href"])}
for link in extra_links
if link["rel"] not in INFERRED_LINK_RELS
]
return links
@attr.s
class PagingLinks(BaseLinks):
"""Create links for paging."""
next: Optional[str] = attr.ib(kw_only=True, default=None)
prev: Optional[str] = attr.ib(kw_only=True, default=None)
def link_next(self) -> Optional[Dict[str, Any]]:
"""Create link for next page."""
if self.next is not None:
method = self.request.method
if method == "GET":
href = merge_params(self.url, {"token": f"next:{self.next}"})
link = {
"rel": Relations.next.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}
return link
if method == "POST":
return {
"rel": Relations.next.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": self.url,
"body": {**self.request.postbody, "token": f"next:{self.next}"},
}
return None
def link_prev(self) -> Optional[Dict[str, Any]]:
"""Create link for previous page."""
if self.prev is not None:
method = self.request.method
if method == "GET":
href = merge_params(self.url, {"token": f"prev:{self.prev}"})
return {
"rel": Relations.previous.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}
if method == "POST":
return {
"rel": Relations.previous.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": self.url,
"body": {**self.request.postbody, "token": f"prev:{self.prev}"},
}
return None
@attr.s
class CollectionSearchPagingLinks(BaseLinks):
next: Optional[Dict[str, Any]] = attr.ib(kw_only=True, default=None)
prev: Optional[Dict[str, Any]] = attr.ib(kw_only=True, default=None)
def link_next(self) -> Optional[Dict[str, Any]]:
"""Create link for next page."""
if self.next is not None:
method = self.request.method
if method == "GET":
# if offset is equal to default value (0), drop it
if self.next["body"].get("offset", -1) == 0:
_ = self.next["body"].pop("offset")
href = merge_params(self.url, self.next["body"])
# if next link is equal to this link, skip it
if href == self.url:
return None
return {
"rel": Relations.next.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}
return None
def link_prev(self):
if self.prev is not None:
method = self.request.method
if method == "GET":
href = merge_params(self.url, self.prev["body"])
# if prev link is equal to this link, skip it
if href == self.url:
return None
return {
"rel": Relations.previous.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}
return None
@attr.s
class CollectionLinksBase(BaseLinks):
"""Create inferred links specific to collections."""
collection_id: str = attr.ib()
def collection_link(self, rel: str = Relations.collection.value) -> Dict:
"""Create a link to a collection."""
return {
"rel": rel,
"type": MimeTypes.json.value,
"href": self.resolve(f"collections/{self.collection_id}"),
}
@attr.s
class CollectionLinks(CollectionLinksBase):
"""Create inferred links specific to collections."""
def link_self(self) -> Dict:
"""Return the self link."""
return self.collection_link(rel=Relations.self.value)
def link_parent(self) -> Dict:
"""Create the `parent` link."""
return {
"rel": Relations.parent.value,
"type": MimeTypes.json.value,
"href": self.base_url,
}
def link_items(self) -> Dict:
"""Create the `item` link."""
return {
"rel": "items",
"type": MimeTypes.geojson.value,
"href": self.resolve(f"collections/{self.collection_id}/items"),
}
@attr.s
class SearchLinks(BaseLinks):
"""Create inferred links specific to collections."""
def link_self(self) -> Dict:
"""Return the self link."""
return {
"rel": Relations.self.value,
"type": MimeTypes.geojson.value,
"href": self.resolve("search"),
}
@attr.s
class ItemCollectionLinks(CollectionLinksBase):
"""Create inferred links specific to collections."""
def link_self(self) -> Dict:
"""Return the self link."""
return {
"rel": Relations.self.value,
"type": MimeTypes.geojson.value,
"href": self.resolve(f"collections/{self.collection_id}/items"),
}
def link_parent(self) -> Dict:
"""Create the `parent` link."""
return self.collection_link(rel=Relations.parent.value)
def link_collection(self) -> Dict:
"""Create the `collection` link."""
return self.collection_link()
@attr.s
class ItemLinks(CollectionLinksBase):
"""Create inferred links specific to items."""
item_id: str = attr.ib()
def link_self(self) -> Dict:
"""Create the self link."""
return {
"rel": Relations.self.value,
"type": MimeTypes.geojson.value,
"href": self.resolve(
f"collections/{self.collection_id}/items/{self.item_id}"
),
}
def link_parent(self) -> Dict:
"""Create the `parent` link."""
return self.collection_link(rel=Relations.parent.value)
def link_collection(self) -> Dict:
"""Create the `collection` link."""
return self.collection_link()

View File

@ -0,0 +1,260 @@
"""transactions extension client."""
import logging
import re
from typing import List, Optional, Union
import attr
from buildpg import render
from fastapi import HTTPException, Request
from stac_fastapi.extensions.core.transaction import AsyncBaseTransactionsClient
from stac_fastapi.extensions.core.transaction.request import (
PartialCollection,
PartialItem,
PatchOperation,
)
from stac_fastapi.extensions.third_party.bulk_transactions import (
AsyncBaseBulkTransactionsClient,
BulkTransactionMethod,
Items,
)
from stac_fastapi.types import stac as stac_types
from stac_pydantic import Collection, Item, ItemCollection
from starlette.responses import JSONResponse, Response
from stac_fastapi.pgstac.config import Settings
from stac_fastapi.pgstac.db import dbfunc
from stac_fastapi.pgstac.models.links import CollectionLinks, ItemLinks
logger = logging.getLogger("uvicorn")
logger.setLevel(logging.INFO)
class ClientValidateMixIn:
def _validate_id(self, id: str, settings: Settings):
invalid_chars = settings.invalid_id_chars
id_regex = "[" + "".join(re.escape(char) for char in invalid_chars) + "]"
if bool(re.search(id_regex, id)):
raise HTTPException(
status_code=400,
detail=f"ID ({id}) cannot contain the following characters: {' '.join(invalid_chars)}",
)
def _validate_collection(self, request: Request, collection: stac_types.Collection):
self._validate_id(collection["id"], request.app.state.settings)
def _validate_item(
self,
request: Request,
item: stac_types.Item,
collection_id: str,
expected_item_id: Optional[str] = None,
) -> None:
"""Validate item."""
body_collection_id = item.get("collection")
body_item_id = item.get("id")
self._validate_id(body_item_id, request.app.state.settings)
if item.get("geometry", None) is None:
raise HTTPException(
status_code=400,
detail=f"Missing or null `geometry` for Item ({body_item_id}). Geometry is required in pgstac.",
)
if body_collection_id is not None and collection_id != body_collection_id:
raise HTTPException(
status_code=400,
detail=f"Collection ID from path parameter ({collection_id}) does not match Collection ID from Item ({body_collection_id})",
)
if expected_item_id is not None and expected_item_id != body_item_id:
raise HTTPException(
status_code=400,
detail=f"Item ID from path parameter ({expected_item_id}) does not match Item ID from Item ({body_item_id})",
)
@attr.s
class TransactionsClient(AsyncBaseTransactionsClient, ClientValidateMixIn):
"""Transactions extension specific CRUD operations."""
async def create_item(
self,
collection_id: str,
item: Union[Item, ItemCollection],
request: Request,
**kwargs,
) -> Optional[Union[stac_types.Item, Response]]:
"""Create item."""
item = item.model_dump(mode="json")
if item["type"] == "FeatureCollection":
valid_items = []
for item in item["features"]: # noqa: B020
self._validate_item(request, item, collection_id)
item["collection"] = collection_id
valid_items.append(item)
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "create_items", valid_items)
return Response(status_code=201)
elif item["type"] == "Feature":
self._validate_item(request, item, collection_id)
item["collection"] = collection_id
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "create_item", item)
item["links"] = await ItemLinks(
collection_id=collection_id,
item_id=item["id"],
request=request,
).get_links(extra_links=item.get("links"))
return stac_types.Item(**item)
else:
raise HTTPException(
status_code=400,
detail=f"Item body type must be 'Feature' or 'FeatureCollection', not {item['type']}",
)
async def update_item(
self,
request: Request,
collection_id: str,
item_id: str,
item: Item,
**kwargs,
) -> Optional[Union[stac_types.Item, Response]]:
"""Update item."""
item = item.model_dump(mode="json")
self._validate_item(request, item, collection_id, item_id)
item["collection"] = collection_id
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "update_item", item)
item["links"] = await ItemLinks(
collection_id=collection_id,
item_id=item["id"],
request=request,
).get_links(extra_links=item.get("links"))
return stac_types.Item(**item)
async def create_collection(
self,
collection: Collection,
request: Request,
**kwargs,
) -> Optional[Union[stac_types.Collection, Response]]:
"""Create collection."""
collection = collection.model_dump(mode="json")
self._validate_collection(request, collection)
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "create_collection", collection)
collection["links"] = await CollectionLinks(
collection_id=collection["id"], request=request
).get_links(extra_links=collection["links"])
return stac_types.Collection(**collection)
async def update_collection(
self,
collection: Collection,
request: Request,
**kwargs,
) -> Optional[Union[stac_types.Collection, Response]]:
"""Update collection."""
col = collection.model_dump(mode="json")
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "update_collection", col)
col["links"] = await CollectionLinks(
collection_id=col["id"], request=request
).get_links(extra_links=col.get("links"))
return stac_types.Collection(**col)
async def delete_item(
self,
item_id: str,
collection_id: str,
request: Request,
**kwargs,
) -> Optional[Union[stac_types.Item, Response]]:
"""Delete item."""
q, p = render(
"SELECT * FROM delete_item(:item::text, :collection::text);",
item=item_id,
collection=collection_id,
)
async with request.app.state.get_connection(request, "w") as conn:
await conn.fetchval(q, *p)
return JSONResponse({"deleted item": item_id})
async def delete_collection(
self, collection_id: str, request: Request, **kwargs
) -> Optional[Union[stac_types.Collection, Response]]:
"""Delete collection."""
async with request.app.state.get_connection(request, "w") as conn:
await dbfunc(conn, "delete_collection", collection_id)
return JSONResponse({"deleted collection": collection_id})
async def patch_item(
self,
collection_id: str,
item_id: str,
patch: Union[PartialItem, List[PatchOperation]],
**kwargs,
) -> Optional[Union[stac_types.Item, Response]]:
"""Patch Item."""
raise NotImplementedError
async def patch_collection(
self,
collection_id: str,
patch: Union[PartialCollection, List[PatchOperation]],
**kwargs,
) -> Optional[Union[stac_types.Collection, Response]]:
"""Patch Collection."""
raise NotImplementedError
@attr.s
class BulkTransactionsClient(AsyncBaseBulkTransactionsClient, ClientValidateMixIn):
"""Postgres bulk transactions."""
async def bulk_item_insert(self, items: Items, request: Request, **kwargs) -> str:
"""Bulk item insertion using pgstac."""
collection_id = request.path_params["collection_id"]
for item_id, item in items.items.items():
self._validate_item(request, item, collection_id, item_id)
item["collection"] = collection_id
items_to_insert = list(items.items.values())
async with request.app.state.get_connection(request, "w") as conn:
if items.method == BulkTransactionMethod.INSERT:
method_verb = "added"
await dbfunc(conn, "create_items", items_to_insert)
elif items.method == BulkTransactionMethod.UPSERT:
method_verb = "upserted"
await dbfunc(conn, "upsert_items", items_to_insert)
return_msg = f"Successfully {method_verb} {len(items_to_insert)} items."
return return_msg

View File

@ -0,0 +1,56 @@
"""base_item_cache classes for pgstac fastapi."""
import abc
from typing import Any, Callable, Coroutine, Dict
from starlette.requests import Request
class BaseItemCache(abc.ABC):
"""
A cache that returns a base item for a collection.
If no base item is found in the cache, use the fetch_base_item function
to fetch the base item from pgstac.
"""
def __init__(
self,
fetch_base_item: Callable[[str], Coroutine[Any, Any, Dict[str, Any]]],
request: Request,
):
"""
Initialize the base item cache.
Args:
fetch_base_item: A function that fetches the base item for a collection.
request: The request object containing app state that may be used by caches.
"""
self._fetch_base_item = fetch_base_item
self._request = request
@abc.abstractmethod
async def get(self, collection_id: str) -> Dict[str, Any]:
"""Return the base item for the collection and cache by collection id."""
pass
class DefaultBaseItemCache(BaseItemCache):
"""Implementation of the BaseItemCache that holds base items in a dict."""
def __init__(
self,
fetch_base_item: Callable[[str], Coroutine[Any, Any, Dict[str, Any]]],
request: Request,
):
"""Initialize the base item cache."""
self._base_items: Dict = {}
super().__init__(fetch_base_item, request)
async def get(self, collection_id: str):
"""Return the base item for the collection and cache by collection id."""
if collection_id not in self._base_items:
self._base_items[collection_id] = await self._fetch_base_item(
collection_id,
)
return self._base_items[collection_id]

View File

@ -0,0 +1,26 @@
"""stac_fastapi.types.search module."""
from typing import Dict, Optional
from pydantic import ValidationInfo, field_validator
from stac_fastapi.types.search import BaseSearchPostRequest
class PgstacSearch(BaseSearchPostRequest):
"""Search model.
Overrides the validation for datetime from the base request model.
"""
conf: Optional[Dict] = None
@field_validator("filter_lang", check_fields=False)
@classmethod
def validate_query_uses_cql(cls, v: str, info: ValidationInfo):
"""Use of Query Extension is not allowed with cql2."""
if info.data.get("query", None) is not None:
raise ValueError(
"Query extension is not available when using pgstac with cql2"
)
return v

View File

@ -0,0 +1,114 @@
"""stac-fastapi utility methods."""
from typing import Any, Dict, Optional, Set, Union
from stac_fastapi.types.stac import Item
def filter_fields( # noqa: C901
item: Union[Item, Dict[str, Any]],
include: Optional[Set[str]] = None,
exclude: Optional[Set[str]] = None,
) -> Item:
"""Preserve and remove fields as indicated by the fields extension include/exclude sets.
Returns a shallow copy of the Item with the fields filtered.
This will not perform a deep copy; values of the original item will be referenced
in the return item.
"""
if not include and not exclude:
return item
# Build a shallow copy of included fields on an item, or a sub-tree of an item
def include_fields(
source: Dict[str, Any], fields: Optional[Set[str]]
) -> Dict[str, Any]:
if not fields:
return source
clean_item: Dict[str, Any] = {}
for key_path in fields or []:
key_path_parts = key_path.split(".")
key_root = key_path_parts[0]
if key_root in source:
if isinstance(source[key_root], dict) and len(key_path_parts) > 1:
# The root of this key path on the item is a dict, and the
# key path indicates a sub-key to be included. Walk the dict
# from the root key and get the full nested value to include.
value = include_fields(
source[key_root], fields={".".join(key_path_parts[1:])}
)
if isinstance(clean_item.get(key_root), dict):
# A previously specified key and sub-keys may have been included
# already, so do a deep merge update if the root key already exists.
dict_deep_update(clean_item[key_root], value)
else:
# The root key does not exist, so add it. Fields
# extension only allows nested referencing on dicts, so
# this won't overwrite anything.
clean_item[key_root] = value
else:
# The item value to include is not a dict, or, it is a dict but the
# key path is for the whole value, not a sub-key. Include the entire
# value in the cleaned item.
clean_item[key_root] = source[key_root]
else:
# The key, or root key of a multi-part key, is not present in the item,
# so it is ignored
pass
return clean_item
# For an item built up for included fields, remove excluded fields. This
# modifies `source` in place.
def exclude_fields(source: Dict[str, Any], fields: Optional[Set[str]]) -> None:
for key_path in fields or []:
key_path_part = key_path.split(".")
key_root = key_path_part[0]
if key_root in source:
if isinstance(source[key_root], dict) and len(key_path_part) > 1:
# Walk the nested path of this key to remove the leaf-key
exclude_fields(source[key_root], fields={".".join(key_path_part[1:])})
# If, after removing the leaf-key, the root is now an empty
# dict, remove it entirely
if not source[key_root]:
del source[key_root]
else:
# The key's value is not a dict, or there is no sub-key to remove. The
# entire key can be removed from the source.
source.pop(key_root, None)
else:
# The key to remove does not exist on the source, so it is ignored
pass
# Coalesce incoming type to a dict
item = dict(item)
clean_item = include_fields(item, include)
# If, after including all the specified fields, there are no included properties,
# return just id and collection.
if not clean_item:
return Item({"id": item["id"], "collection": item["collection"]})
exclude_fields(clean_item, exclude)
return Item(**clean_item)
def dict_deep_update(merge_to: Dict[str, Any], merge_from: Dict[str, Any]) -> None:
"""Perform a deep update of two dicts.
merge_to is updated in-place with the values from merge_from.
merge_from values take precedence over existing values in merge_to.
"""
for k, v in merge_from.items():
if (
k in merge_to
and isinstance(merge_to[k], dict)
and isinstance(merge_from[k], dict)
):
dict_deep_update(merge_to[k], merge_from[k])
else:
merge_to[k] = v

View File

@ -0,0 +1,3 @@
"""library version."""
__version__ = "5.0.2"

View File

@ -0,0 +1,34 @@
{
"id": "joplin",
"description": "This imagery was acquired by the NOAA Remote Sensing Division to support NOAA national security and emergency response requirements. In addition, it will be used for ongoing research efforts for testing and developing standards for airborne digital imagery. Individual images have been combined into a larger mosaic and tiled for distribution. The approximate ground sample distance (GSD) for each pixel is 35 cm (1.14 feet).",
"stac_version": "1.0.0",
"license": "public-domain",
"links": [
{
"rel": "license",
"href": "https://creativecommons.org/licenses/publicdomain/",
"title": "public domain"
}
],
"type": "Collection",
"extent": {
"spatial": {
"bbox": [
[
-94.6911621,
37.0332547,
-94.402771,
37.1077651
]
]
},
"temporal": {
"interval": [
[
"2000-02-01T00:00:00Z",
"2000-02-12T00:00:00Z"
]
]
}
}
}

View File

@ -0,0 +1,59 @@
{
"id": "f2cca2a3-288b-4518-8a3e-a4492bb60b08",
"type": "Feature",
"collection": "joplin",
"links": [],
"geometry": {
"type": "Polygon",
"coordinates": [
[
[
-94.6884155,
37.0595608
],
[
-94.6884155,
37.0332547
],
[
-94.6554565,
37.0332547
],
[
-94.6554565,
37.0595608
],
[
-94.6884155,
37.0595608
]
]
]
},
"properties": {
"proj:epsg": 3857,
"orientation": "nadir",
"height": 2500,
"width": 2500,
"datetime": "2000-02-02T00:00:00Z",
"gsd": 0.5971642834779395
},
"assets": {
"COG": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"href": "https://arturo-stac-api-test-data.s3.amazonaws.com/joplin/images/may24C350000e4102500n.tif",
"title": "NOAA STORM COG"
}
},
"bbox": [
-94.6884155,
37.0332547,
-94.6554565,
37.0595608
],
"stac_extensions": [
"https://stac-extensions.github.io/eo/v1.0.0/schema.json",
"https://stac-extensions.github.io/projection/v1.0.0/schema.json"
],
"stac_version": "1.0.0"
}

File diff suppressed because it is too large Load Diff

View File

View File

@ -0,0 +1,950 @@
import os
from datetime import datetime, timedelta
from typing import Any, Callable, Coroutine, Dict, List, Optional, TypeVar
from urllib.parse import quote_plus
import orjson
import pytest
from fastapi import Request
from httpx import ASGITransport, AsyncClient
from pypgstac.db import PgstacDB
from pypgstac.load import Loader
from pystac import Collection, Extent, Item, SpatialExtent, TemporalExtent
from stac_fastapi.api.app import StacApi
from stac_fastapi.api.models import create_get_request_model, create_post_request_model
from stac_fastapi.extensions.core import (
CollectionSearchExtension,
FieldsExtension,
TransactionExtension,
)
from stac_fastapi.extensions.core.fields import FieldsConformanceClasses
from stac_fastapi.types import stac as stac_types
from stac_fastapi.pgstac.config import PostgresSettings
from stac_fastapi.pgstac.core import CoreCrudClient, Settings
from stac_fastapi.pgstac.db import close_db_connection, connect_to_db
from stac_fastapi.pgstac.transactions import TransactionsClient
from stac_fastapi.pgstac.types.search import PgstacSearch
DATA_DIR = os.path.join(os.path.dirname(__file__), "..", "data")
STAC_CORE_ROUTES = [
"GET /",
"GET /collections",
"GET /collections/{collection_id}",
"GET /collections/{collection_id}/items",
"GET /collections/{collection_id}/items/{item_id}",
"GET /conformance",
"GET /search",
"POST /search",
]
STAC_TRANSACTION_ROUTES = [
"DELETE /collections/{collection_id}",
"DELETE /collections/{collection_id}/items/{item_id}",
"POST /collections",
"POST /collections/{collection_id}/items",
"PUT /collections/{collection_id}",
"PUT /collections/{collection_id}/items/{item_id}",
]
GLOBAL_BBOX = [-180.0, -90.0, 180.0, 90.0]
GLOBAL_GEOMETRY = {
"type": "Polygon",
"coordinates": (
(
(180.0, -90.0),
(180.0, 90.0),
(-180.0, 90.0),
(-180.0, -90.0),
(180.0, -90.0),
),
),
}
DEFAULT_EXTENT = Extent(
SpatialExtent(GLOBAL_BBOX),
TemporalExtent([[datetime.now(), None]]),
)
async def test_default_app_no_transactions(
app_client_no_transaction, load_test_data, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client_no_transaction.post(
f"/collections/{coll['id']}/items", json=item
)
# the default application does not have the transaction extensions enabled!
assert resp.status_code == 405
async def test_post_search_content_type(app_client):
params = {"limit": 1}
resp = await app_client.post("search", json=params)
assert resp.headers["content-type"] == "application/geo+json"
async def test_get_search_content_type(app_client):
resp = await app_client.get("search")
assert resp.headers["content-type"] == "application/geo+json"
async def test_landing_links(app_client):
"""test landing page links."""
landing = await app_client.get("/")
assert landing.status_code == 200, landing.text
assert "Queryables available for this Catalog" in [
link.get("title") for link in landing.json()["links"]
]
async def test_get_queryables_content_type(app_client, load_test_collection):
resp = await app_client.get("queryables")
assert resp.status_code == 200
assert resp.headers["content-type"] == "application/schema+json"
coll = load_test_collection
resp = await app_client.get(f"collections/{coll['id']}/queryables")
assert resp.status_code == 200
assert resp.headers["content-type"] == "application/schema+json"
async def test_get_features_content_type(app_client, load_test_collection):
coll = load_test_collection
resp = await app_client.get(f"collections/{coll['id']}/items")
assert resp.headers["content-type"] == "application/geo+json"
async def test_get_features_self_link(app_client, load_test_collection):
# https://github.com/stac-utils/stac-fastapi/issues/483
resp = await app_client.get(f"collections/{load_test_collection['id']}/items")
assert resp.status_code == 200
resp_json = resp.json()
self_link = next((link for link in resp_json["links"] if link["rel"] == "self"), None)
assert self_link is not None
assert self_link["href"].endswith("/items")
async def test_get_feature_content_type(app_client, load_test_collection, load_test_item):
resp = await app_client.get(
f"collections/{load_test_collection['id']}/items/{load_test_item['id']}"
)
assert resp.headers["content-type"] == "application/geo+json"
async def test_api_headers(app_client):
resp = await app_client.get("/api")
assert resp.headers["content-type"] == "application/vnd.oai.openapi+json;version=3.0"
assert resp.status_code == 200
async def test_core_router(api_client, app):
core_routes = set()
for core_route in STAC_CORE_ROUTES:
method, path = core_route.split(" ")
core_routes.add("{} {}".format(method, app.state.router_prefix + path))
api_routes = {
f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes
}
assert not core_routes - api_routes
async def test_landing_page_stac_extensions(app_client):
resp = await app_client.get("/")
assert resp.status_code == 200
resp_json = resp.json()
assert not resp_json["stac_extensions"]
async def test_transactions_router(api_client, app):
transaction_routes = set()
for transaction_route in STAC_TRANSACTION_ROUTES:
method, path = transaction_route.split(" ")
transaction_routes.add("{} {}".format(method, app.state.router_prefix + path))
api_routes = {
f"{list(route.methods)[0]} {route.path}" for route in api_client.app.routes
}
assert not transaction_routes - api_routes
async def test_app_transaction_extension(
app_client, load_test_data, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
async def test_app_query_extension(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"query": {"proj:epsg": {"eq": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
params["query"] = quote_plus(orjson.dumps(params["query"]))
resp = await app_client.get("/search", params=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
async def test_app_query_extension_limit_1(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"limit": 1}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
async def test_app_query_extension_limit_eq0(app_client):
params = {"limit": 0}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
async def test_app_query_extension_limit_lt0(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"limit": -1}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
async def test_app_query_extension_limit_gt10000(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"limit": 10001}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
async def test_app_query_extension_gt(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"query": {"proj:epsg": {"gt": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 0
async def test_app_query_extension_gte(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
params = {"query": {"proj:epsg": {"gte": item["properties"]["proj:epsg"]}}}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
async def test_app_sort_extension(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
item_date = datetime.strptime(
first_item["properties"]["datetime"], "%Y-%m-%dT%H:%M:%SZ"
)
resp = await app_client.post(f"/collections/{coll['id']}/items", json=first_item)
assert resp.status_code == 201
second_item = load_test_data("test_item.json")
second_item["id"] = "another-item"
another_item_date = item_date - timedelta(days=1)
second_item["properties"]["datetime"] = another_item_date.strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
resp = await app_client.post(f"/collections/{coll['id']}/items", json=second_item)
assert resp.status_code == 201
params = {
"collections": [coll["id"]],
"sortby": [{"field": "datetime", "direction": "desc"}],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json["features"][0]["id"] == first_item["id"]
assert resp_json["features"][1]["id"] == second_item["id"]
params = {
"collections": [coll["id"]],
"sortby": [{"field": "datetime", "direction": "asc"}],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json["features"][1]["id"] == first_item["id"]
assert resp_json["features"][0]["id"] == second_item["id"]
async def test_search_invalid_date(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=first_item)
assert resp.status_code == 201
params = {
"datetime": "2020-XX-01/2020-10-30",
"collections": [coll["id"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 400
async def test_bbox_3d(load_test_data, app_client, load_test_collection):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=first_item)
assert resp.status_code == 201
australia_bbox = [106.343365, -47.199523, 0.1, 168.218365, -19.437288, 0.1]
params = {
"bbox": australia_bbox,
"collections": [coll["id"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
async def test_app_search_response(load_test_data, app_client, load_test_collection):
coll = load_test_collection
params = {
"collections": [coll["id"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert resp_json.get("type") == "FeatureCollection"
# stac_version and stac_extensions were removed in v1.0.0-beta.3
assert resp_json.get("stac_version") is None
assert resp_json.get("stac_extensions") is None
async def test_search_point_intersects(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
new_coordinates = []
for coordinate in item["geometry"]["coordinates"][0]:
new_coordinates.append([coordinate[0] * -1, coordinate[1] * -1])
item["id"] = "test-item-other-hemispheres"
item["geometry"]["coordinates"] = [new_coordinates]
item["bbox"] = [value * -1 for value in item["bbox"]]
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
point = [150.04, -33.14]
intersects = {"type": "Point", "coordinates": point}
params = {
"intersects": intersects,
"collections": [item["collection"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
params["intersects"] = orjson.dumps(params["intersects"]).decode("utf-8")
resp = await app_client.get("/search", params=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
async def test_search_line_string_intersects(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 201
line = [[150.04, -33.14], [150.22, -33.89]]
intersects = {"type": "LineString", "coordinates": line}
params = {
"intersects": intersects,
"collections": [item["collection"]],
}
resp = await app_client.post("/search", json=params)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
@pytest.mark.asyncio
async def test_landing_forwarded_header(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
await app_client.post(f"/collections/{coll['id']}/items", json=item)
response = (
await app_client.get(
"/",
headers={
"Forwarded": "proto=https;host=test:1234",
"X-Forwarded-Proto": "http",
"X-Forwarded-Port": "4321",
},
)
).json()
for link in response["links"]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_search_forwarded_header(load_test_data, app_client, load_test_collection):
coll = load_test_collection
item = load_test_data("test_item.json")
await app_client.post(f"/collections/{coll['id']}/items", json=item)
resp = await app_client.post(
"/search",
json={
"collections": [item["collection"]],
},
headers={"Forwarded": "proto=https;host=test:1234"},
)
features = resp.json()["features"]
assert len(features) > 0
for feature in features:
for link in feature["links"]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_search_x_forwarded_headers(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
await app_client.post(f"/collections/{coll['id']}/items", json=item)
resp = await app_client.post(
"/search",
json={
"collections": [item["collection"]],
},
headers={
"X-Forwarded-Proto": "https",
"X-Forwarded-Port": "1234",
},
)
features = resp.json()["features"]
assert len(features) > 0
for feature in features:
for link in feature["links"]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_search_duplicate_forward_headers(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
await app_client.post(f"/collections/{coll['id']}/items", json=item)
resp = await app_client.post(
"/search",
json={
"collections": [item["collection"]],
},
headers={
"Forwarded": "proto=https;host=test:1234",
"X-Forwarded-Proto": "http",
"X-Forwarded-Port": "4321",
},
)
features = resp.json()["features"]
assert len(features) > 0
for feature in features:
for link in feature["links"]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_base_queryables(load_test_data, app_client, load_test_collection):
resp = await app_client.get("/queryables")
assert resp.status_code == 200
assert resp.headers["Content-Type"] == "application/schema+json"
q = resp.json()
assert q["$id"].endswith("/queryables")
assert q["type"] == "object"
assert "properties" in q
assert "id" in q["properties"]
@pytest.mark.asyncio
async def test_collection_queryables(load_test_data, app_client, load_test_collection):
resp = await app_client.get("/collections/test-collection/queryables")
assert resp.status_code == 200
assert resp.headers["Content-Type"] == "application/schema+json"
q = resp.json()
assert q["$id"].endswith("/collections/test-collection/queryables")
assert q["type"] == "object"
assert "properties" in q
assert "id" in q["properties"]
@pytest.mark.asyncio
async def test_get_collections_search(
app_client, load_test_collection, load_test2_collection
):
# this search should only return a single collection
resp = await app_client.get(
"/collections",
params={"datetime": "2010-01-01T00:00:00Z/2010-01-02T00:00:00Z"},
)
assert len(resp.json()["collections"]) == 1
assert resp.json()["collections"][0]["id"] == load_test2_collection.id
# same with this one
resp = await app_client.get(
"/collections",
params={"datetime": "2020-01-01T00:00:00Z/.."},
)
assert len(resp.json()["collections"]) == 1
assert resp.json()["collections"][0]["id"] == load_test_collection["id"]
# no params should return both collections
resp = await app_client.get(
"/collections",
)
assert len(resp.json()["collections"]) == 2
# this search should return test collection 1 first
resp = await app_client.get(
"/collections",
params={"sortby": "title"},
)
assert resp.json()["collections"][0]["id"] == load_test_collection["id"]
assert resp.json()["collections"][1]["id"] == load_test2_collection.id
# this search should return test collection 2 first
resp = await app_client.get(
"/collections",
params={"sortby": "-title"},
)
assert resp.json()["collections"][1]["id"] == load_test_collection["id"]
assert resp.json()["collections"][0]["id"] == load_test2_collection.id
@pytest.mark.asyncio
async def test_item_collection_filter_bbox(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=first_item)
assert resp.status_code == 201
bbox = "100,-50,170,-20"
resp = await app_client.get(f"/collections/{coll['id']}/items", params={"bbox": bbox})
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
bbox = "1,2,3,4"
resp = await app_client.get(f"/collections/{coll['id']}/items", params={"bbox": bbox})
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 0
@pytest.mark.asyncio
async def test_item_collection_filter_datetime(
load_test_data, app_client, load_test_collection
):
coll = load_test_collection
first_item = load_test_data("test_item.json")
resp = await app_client.post(f"/collections/{coll['id']}/items", json=first_item)
assert resp.status_code == 201
datetime_range = "2020-01-01T00:00:00.00Z/.."
resp = await app_client.get(
f"/collections/{coll['id']}/items", params={"datetime": datetime_range}
)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 1
datetime_range = "2018-01-01T00:00:00.00Z/2019-01-01T00:00:00.00Z"
resp = await app_client.get(
f"/collections/{coll['id']}/items", params={"datetime": datetime_range}
)
assert resp.status_code == 200
resp_json = resp.json()
assert len(resp_json["features"]) == 0
@pytest.mark.asyncio
async def test_bad_collection_queryables(
load_test_data, app_client, load_test_collection
):
resp = await app_client.get("/collections/bad-collection/queryables")
assert resp.status_code == 404
async def test_deleting_items_with_identical_ids(app_client):
collection_a = Collection("collection-a", "The first collection", DEFAULT_EXTENT)
collection_b = Collection("collection-b", "The second collection", DEFAULT_EXTENT)
item = Item("the-item", GLOBAL_GEOMETRY, GLOBAL_BBOX, datetime.now(), {})
for collection in (collection_a, collection_b):
response = await app_client.post(
"/collections", json=collection.to_dict(include_self_link=False)
)
assert response.status_code == 201
item_as_dict = item.to_dict(include_self_link=False)
item_as_dict["collection"] = collection.id
response = await app_client.post(
f"/collections/{collection.id}/items", json=item_as_dict
)
assert response.status_code == 201
response = await app_client.get(f"/collections/{collection.id}/items")
assert response.status_code == 200, response.json()
assert len(response.json()["features"]) == 1
for collection in (collection_a, collection_b):
response = await app_client.delete(
f"/collections/{collection.id}/items/{item.id}"
)
assert response.status_code == 200, response.json()
response = await app_client.get(f"/collections/{collection.id}/items")
assert response.status_code == 200, response.json()
assert not response.json()["features"]
@pytest.mark.parametrize("direction", ("asc", "desc"))
async def test_sorting_and_paging(app_client, load_test_collection, direction: str):
collection_id = load_test_collection["id"]
for i in range(10):
item = Item(
id=f"item-{i}",
geometry={"type": "Point", "coordinates": [-105.1019, 40.1672]},
bbox=[-105.1019, 40.1672, -105.1019, 40.1672],
datetime=datetime.now(),
properties={
"eo:cloud_cover": 42 + i if i % 3 != 0 else None,
},
)
item.collection_id = collection_id
response = await app_client.post(
f"/collections/{collection_id}/items",
json=item.to_dict(include_self_link=False, transform_hrefs=False),
)
assert response.status_code == 201
async def search(query: Dict[str, Any]) -> List[Item]:
items: List[Item] = []
while True:
response = await app_client.post("/search", json=query)
json = response.json()
assert response.status_code == 200, json
items.extend((Item.from_dict(d) for d in json["features"]))
next_link = next(
(link for link in json["links"] if link["rel"] == "next"), None
)
if next_link is None:
return items
else:
query = next_link["body"]
query = {
"collections": [collection_id],
"sortby": [{"field": "properties.eo:cloud_cover", "direction": direction}],
"limit": 5,
}
items = await search(query)
assert len(items) == 10, items
@pytest.mark.asyncio
async def test_wrapped_function(load_test_data, database) -> None:
# Ensure wrappers, e.g. Planetary Computer's rate limiting, work.
# https://github.com/gadomski/planetary-computer-apis/blob/2719ccf6ead3e06de0784c39a2918d4d1811368b/pccommon/pccommon/redis.py#L205-L238
T = TypeVar("T")
def wrap() -> (
Callable[
[Callable[..., Coroutine[Any, Any, T]]],
Callable[..., Coroutine[Any, Any, T]],
]
):
def decorator(
fn: Callable[..., Coroutine[Any, Any, T]],
) -> Callable[..., Coroutine[Any, Any, T]]:
async def _wrapper(*args: Any, **kwargs: Any) -> T:
request: Optional[Request] = kwargs.get("request")
if request:
pass # This is where rate limiting would be applied
else:
raise ValueError(f"Missing request in {fn.__name__}")
return await fn(*args, **kwargs)
return _wrapper
return decorator
class Client(CoreCrudClient):
@wrap()
async def get_collection(
self, collection_id: str, request: Request, **kwargs
) -> stac_types.Item:
return await super().get_collection(collection_id, request=request, **kwargs)
settings = Settings(
testing=True,
)
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
extensions = [
TransactionExtension(client=TransactionsClient(), settings=settings),
FieldsExtension(),
]
post_request_model = create_post_request_model(extensions, base_model=PgstacSearch)
get_request_model = create_get_request_model(extensions)
collection_search_extension = CollectionSearchExtension.from_extensions(
extensions=[
FieldsExtension(conformance_classes=[FieldsConformanceClasses.COLLECTIONS]),
]
)
api = StacApi(
client=Client(pgstac_search_model=post_request_model),
settings=settings,
extensions=extensions,
search_post_request_model=post_request_model,
search_get_request_model=get_request_model,
collections_get_request_model=collection_search_extension.GET,
)
app = api.app
await connect_to_db(
app,
postgres_settings=postgres_settings,
add_write_connection_pool=True,
)
try:
async with AsyncClient(transport=ASGITransport(app=app)) as client:
response = await client.post(
"http://test/collections",
json=load_test_data("test_collection.json"),
)
assert response.status_code == 201
response = await client.post(
"http://test/collections/test-collection/items",
json=load_test_data("test_item.json"),
)
assert response.status_code == 201
response = await client.get(
"http://test/collections/test-collection/items/test-item"
)
assert response.status_code == 200
finally:
await close_db_connection(app)
@pytest.mark.asyncio
@pytest.mark.parametrize("validation", [True, False])
@pytest.mark.parametrize("hydrate", [True, False])
async def test_no_extension(
hydrate, validation, load_test_data, database, pgstac
) -> None:
"""test PgSTAC with no extension."""
connection = f"postgresql://{database.user}:{quote_plus(database.password)}@{database.host}:{database.port}/{database.dbname}"
with PgstacDB(dsn=connection) as db:
loader = Loader(db=db)
loader.load_collections(os.path.join(DATA_DIR, "test_collection.json"))
loader.load_items(os.path.join(DATA_DIR, "test_item.json"))
settings = Settings(
testing=True,
use_api_hydrate=hydrate,
enable_response_models=validation,
)
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
extensions = []
post_request_model = create_post_request_model(extensions, base_model=PgstacSearch)
api = StacApi(
client=CoreCrudClient(pgstac_search_model=post_request_model),
settings=settings,
extensions=extensions,
search_post_request_model=post_request_model,
)
app = api.app
await connect_to_db(
app,
postgres_settings=postgres_settings,
add_write_connection_pool=True,
)
try:
async with AsyncClient(transport=ASGITransport(app=app)) as client:
landing = await client.get("http://test/")
assert landing.status_code == 200, landing.text
assert "Queryables" not in [
link.get("title") for link in landing.json()["links"]
]
collection = await client.get("http://test/collections/test-collection")
assert collection.status_code == 200, collection.text
collections = await client.get("http://test/collections")
assert collections.status_code == 200, collections.text
# datetime should be ignored
collection_datetime = await client.get(
"http://test/collections/test-collection",
params={
"datetime": "2000-01-01T00:00:00Z/2000-12-31T00:00:00Z",
},
)
assert collection_datetime.text == collection.text
item = await client.get(
"http://test/collections/test-collection/items/test-item"
)
assert item.status_code == 200, item.text
item_collection = await client.get(
"http://test/collections/test-collection/items",
params={"limit": 10},
)
assert item_collection.status_code == 200, item_collection.text
get_search = await client.get(
"http://test/search",
params={
"collections": ["test-collection"],
},
)
assert get_search.status_code == 200, get_search.text
post_search = await client.post(
"http://test/search",
json={
"collections": ["test-collection"],
},
)
assert post_search.status_code == 200, post_search.text
get_search = await client.get(
"http://test/search",
params={
"collections": ["test-collection"],
"fields": "properties.datetime",
},
)
# fields should be ignored
assert get_search.status_code == 200, get_search.text
props = get_search.json()["features"][0]["properties"]
assert len(props) > 1
post_search = await client.post(
"http://test/search",
json={
"collections": ["test-collection"],
"fields": {
"include": ["properties.datetime"],
},
},
)
# fields should be ignored
assert post_search.status_code == 200, post_search.text
props = get_search.json()["features"][0]["properties"]
assert len(props) > 1
finally:
await close_db_connection(app)
async def test_default_app(default_client, default_app, load_test_data):
api_routes = {
f"{list(route.methods)[0]} {route.path}" for route in default_app.routes
}
assert set(STAC_CORE_ROUTES).issubset(api_routes)
assert set(STAC_TRANSACTION_ROUTES).issubset(api_routes)
# Load collections
col = load_test_data("test_collection.json")
resp = await default_client.post("/collections", json=col)
assert resp.status_code == 201
# Load items
item = load_test_data("test_item.json")
resp = await default_client.post(f"/collections/{col['id']}/items", json=item)
assert resp.status_code == 201
resp = await default_client.get("/conformance")
assert resp.status_code == 200
conf = resp.json()["conformsTo"]
assert (
"https://api.stacspec.org/v1.0.0/ogcapi-features/extensions/transaction" in conf
)
assert "https://api.stacspec.org/v1.0.0/collections/extensions/transaction" in conf
assert "http://www.opengis.net/spec/cql2/1.0/conf/basic-cql2" in conf
assert "http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/simple-query" in conf
assert "http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core" in conf
assert (
"http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/features-filter" in conf
)
assert "http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter" in conf
assert "https://api.stacspec.org/v1.0.0-rc.1/collection-search" in conf
assert "https://api.stacspec.org/v1.0.0/collections" in conf
assert "https://api.stacspec.org/v1.0.0/ogcapi-features#query" in conf
assert "https://api.stacspec.org/v1.0.0/ogcapi-features#sort" in conf

View File

@ -0,0 +1,106 @@
import pytest
from fastapi import APIRouter, FastAPI
from starlette.requests import Request
from starlette.testclient import TestClient
from stac_fastapi.pgstac.models import links as app_links
@pytest.mark.parametrize("root_path", ["", "/api/v1"])
@pytest.mark.parametrize("prefix", ["", "/stac"])
def tests_app_links(prefix, root_path): # noqa: C901
endpoint_prefix = root_path + prefix
url_prefix = "http://stac.io" + endpoint_prefix
app = FastAPI(root_path=root_path)
router = APIRouter(prefix=prefix)
app.state.router_prefix = router.prefix
@router.get("/search")
@router.post("/search")
async def search(request: Request):
links = app_links.PagingLinks(request, next="yo:2", prev="yo:1")
return {
"url": links.url,
"base_url": links.base_url,
"links": await links.get_links(),
}
@router.get("/collections")
async def collections(request: Request):
pgstac_next = {
"rel": "next",
"body": {"offset": 1},
"href": "./collections",
"type": "application/json",
"merge": True,
"method": "GET",
}
pgstac_prev = {
"rel": "prev",
"body": {"offset": 0},
"href": "./collections",
"type": "application/json",
"merge": True,
"method": "GET",
}
links = app_links.CollectionSearchPagingLinks(
request, next=pgstac_next, prev=pgstac_prev
)
return {
"url": links.url,
"base_url": links.base_url,
"links": await links.get_links(),
}
app.include_router(router)
with TestClient(
app,
base_url="http://stac.io",
root_path=root_path,
) as client:
response = client.get(f"{prefix}/search")
assert response.status_code == 200
assert response.json()["url"] == url_prefix + "/search"
assert response.json()["base_url"].rstrip("/") == url_prefix
links = response.json()["links"]
for link in links:
if link["rel"] in ["previous", "next"]:
assert link["method"] == "GET"
assert link["href"].startswith(url_prefix)
assert {"next", "previous", "root", "self"} == {link["rel"] for link in links}
response = client.get(f"{prefix}/search", params={"limit": 1})
assert response.status_code == 200
assert response.json()["url"] == url_prefix + "/search?limit=1"
assert response.json()["base_url"].rstrip("/") == url_prefix
links = response.json()["links"]
for link in links:
if link["rel"] in ["previous", "next"]:
assert link["method"] == "GET"
assert "limit=1" in link["href"]
assert link["href"].startswith(url_prefix)
assert {"next", "previous", "root", "self"} == {link["rel"] for link in links}
response = client.post(f"{prefix}/search", json={})
assert response.status_code == 200
assert response.json()["url"] == url_prefix + "/search"
assert response.json()["base_url"].rstrip("/") == url_prefix
links = response.json()["links"]
for link in links:
if link["rel"] in ["previous", "next"]:
assert link["method"] == "POST"
assert link["href"].startswith(url_prefix)
assert {"next", "previous", "root", "self"} == {link["rel"] for link in links}
response = client.get(f"{prefix}/collections")
assert response.status_code == 200
assert response.json()["url"] == url_prefix + "/collections"
assert response.json()["base_url"].rstrip("/") == url_prefix
links = response.json()["links"]
for link in links:
if link["rel"] in ["previous", "next"]:
assert link["method"] == "GET"
assert link["href"].startswith(url_prefix)
assert {"next", "previous", "root", "self"} == {link["rel"] for link in links}

View File

@ -0,0 +1,601 @@
import logging
import uuid
from contextlib import asynccontextmanager
from copy import deepcopy
from typing import Callable, Literal
import pytest
from fastapi import Request
from pydantic import ValidationError
from stac_pydantic import Collection, Item
from stac_fastapi.pgstac.config import PostgresSettings
from stac_fastapi.pgstac.db import close_db_connection, connect_to_db, get_connection
# from tests.conftest import MockStarletteRequest
logger = logging.getLogger(__name__)
async def test_create_collection(app_client, load_test_data: Callable):
in_json = load_test_data("test_collection.json")
in_coll = Collection.model_validate(in_json)
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 201
post_coll = Collection.model_validate(resp.json())
assert in_coll.model_dump(exclude={"links"}) == post_coll.model_dump(
exclude={"links"}
)
resp = await app_client.get(f"/collections/{post_coll.id}")
assert resp.status_code == 200
get_coll = Collection.model_validate(resp.json())
assert post_coll.model_dump(exclude={"links"}) == get_coll.model_dump(
exclude={"links"}
)
async def test_update_collection(app_client, load_test_collection, load_test_data):
in_coll = load_test_collection
in_coll["keywords"].append("newkeyword")
resp = await app_client.put(f"/collections/{in_coll['id']}", json=in_coll)
assert resp.status_code == 200
resp = await app_client.get(f"/collections/{in_coll['id']}")
assert resp.status_code == 200
get_coll = Collection.model_validate(resp.json())
in_coll = Collection(**in_coll)
assert in_coll.model_dump(exclude={"links"}) == get_coll.model_dump(exclude={"links"})
assert "newkeyword" in get_coll.keywords
async def test_delete_collection(app_client, load_test_collection):
in_coll = load_test_collection
resp = await app_client.delete(f"/collections/{in_coll['id']}")
assert resp.status_code == 200
resp = await app_client.get(f"/collections/{in_coll['id']}")
assert resp.status_code == 404
async def test_create_item(app_client, load_test_data: Callable, load_test_collection):
coll = load_test_collection
in_json = load_test_data("test_item.json")
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=in_json,
)
assert resp.status_code == 201
in_item = Item.model_validate(in_json)
post_item = Item.model_validate(resp.json())
assert in_item.model_dump(exclude={"links"}) == post_item.model_dump(
exclude={"links"}
)
resp = await app_client.get(f"/collections/{coll['id']}/items/{post_item.id}")
assert resp.status_code == 200
get_item = Item.model_validate(resp.json())
assert in_item.model_dump(exclude={"links"}) == get_item.model_dump(exclude={"links"})
async def test_create_item_no_collection_id(
app_client, load_test_data: Callable, load_test_collection
):
"""Items with no collection id should be set with the collection id from the path"""
coll = load_test_collection
item = load_test_data("test_item.json")
item["collection"] = None
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item,
)
assert resp.status_code == 201
resp = await app_client.get(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 200
get_item = Item.model_validate(resp.json())
assert get_item.collection == coll["id"]
async def test_create_item_invalid_ids(
app_client, load_test_data: Callable, load_test_collection
):
"""Items with invalid ids should return an error"""
coll = load_test_collection
item = load_test_data("test_item.json")
item["id"] = "invalid/id"
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item,
)
assert resp.status_code == 400
async def test_create_item_invalid_collection_id(
app_client, load_test_data: Callable, load_test_collection
):
"""Items with invalid collection ids should return an error"""
coll = load_test_collection
item = load_test_data("test_item.json")
item["collection"] = "wrong-collection-id"
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item,
)
assert resp.status_code == 400
async def test_create_item_bad_body(
app_client, load_test_data: Callable, load_test_collection
):
"""Items with invalid type should return an error"""
coll = load_test_collection
item = load_test_data("test_item.json")
item["type"] = "not-a-type"
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item,
)
assert resp.status_code == 400
async def test_create_item_no_geometry(
app_client, load_test_data: Callable, load_test_collection
):
"""Items with missing or null Geometry should return an error"""
coll = load_test_collection
item = load_test_data("test_item.json")
_ = item.pop("bbox")
item["geometry"] = None
resp = await app_client.post(f"/collections/{coll['id']}/items", json=item)
assert resp.status_code == 400
assert "Geometry is required in pgstac." in resp.json()["detail"]
async def test_update_item(app_client, load_test_collection, load_test_item):
coll = load_test_collection
item = load_test_item
item["properties"]["description"] = "Update Test"
resp = await app_client.put(
f"/collections/{coll['id']}/items/{item['id']}", json=item
)
assert resp.status_code == 200
resp = await app_client.get(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 200
get_item = Item.model_validate(resp.json())
item = Item(**item)
assert item.model_dump(exclude={"links"}) == get_item.model_dump(exclude={"links"})
assert get_item.properties.description == "Update Test"
async def test_delete_item(app_client, load_test_collection, load_test_item):
coll = load_test_collection
item = load_test_item
resp = await app_client.delete(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 200
resp = await app_client.get(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 404
async def test_get_collection_items(app_client, load_test_collection, load_test_item):
coll = load_test_collection
item = load_test_item
for _ in range(4):
item["id"] = str(uuid.uuid4())
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item,
)
assert resp.status_code == 201
resp = await app_client.get(
f"/collections/{coll['id']}/items",
)
assert resp.status_code == 200
fc = resp.json()
assert "features" in fc
assert len(fc["features"]) == 5
async def test_create_item_collection(
app_client, load_test_data: Callable, load_test_collection
):
"""POSTing a FeatureCollection to the items endpoint should create the items"""
coll = load_test_collection
base_item = load_test_data("test_item.json")
items = []
for _ in range(5):
item = deepcopy(base_item)
item["id"] = str(uuid.uuid4())
items.append(item)
item_collection = {"type": "FeatureCollection", "features": items, "links": []}
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item_collection,
)
assert resp.status_code == 201
resp = await app_client.get(
f"/collections/{coll['id']}/items",
)
for item in items:
resp = await app_client.get(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 200
async def test_create_item_collection_no_collection_ids(
app_client, load_test_data: Callable, load_test_collection
):
"""Items in ItemCollection with no collection ids should be set with the collection id from the path"""
coll = load_test_collection
base_item = load_test_data("test_item.json")
items = []
for _ in range(5):
item = deepcopy(base_item)
item["id"] = str(uuid.uuid4())
item["collection"] = None
items.append(item)
item_collection = {"type": "FeatureCollection", "features": items, "links": []}
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item_collection,
)
assert resp.status_code == 201
resp = await app_client.get(
f"/collections/{coll['id']}/items",
)
for item in items:
resp = await app_client.get(f"/collections/{coll['id']}/items/{item['id']}")
assert resp.status_code == 200
assert resp.json()["collection"] == coll["id"]
async def test_create_item_collection_invalid_collection_ids(
app_client, load_test_data: Callable, load_test_collection
):
"""Feature collection containing items with invalid collection ids should return an error"""
coll = load_test_collection
base_item = load_test_data("test_item.json")
items = []
for _ in range(5):
item = deepcopy(base_item)
item["id"] = str(uuid.uuid4())
item["collection"] = "wrong-collection-id"
items.append(item)
item_collection = {"type": "FeatureCollection", "features": items, "links": []}
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item_collection,
)
assert resp.status_code == 400
async def test_create_item_collection_invalid_item_ids(
app_client, load_test_data: Callable, load_test_collection
):
"""Feature collection containing items with invalid ids should return an error"""
coll = load_test_collection
base_item = load_test_data("test_item.json")
items = []
for _ in range(5):
item = deepcopy(base_item)
item["id"] = str(uuid.uuid4()) + "/bad/id"
items.append(item)
item_collection = {"type": "FeatureCollection", "features": items, "links": []}
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=item_collection,
)
assert resp.status_code == 400
async def test_create_bulk_items(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
items[_item["id"]] = _item
payload = {"items": items}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully added 2 items."'
for item_id in items.keys():
resp = await app_client.get(f"/collections/{coll['id']}/items/{item_id}")
assert resp.status_code == 200
async def test_create_bulk_items_already_exist_insert(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
items[_item["id"]] = _item
payload = {"items": items, "method": "insert"}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully added 2 items."'
for item_id in items.keys():
resp = await app_client.get(f"/collections/{coll['id']}/items/{item_id}")
assert resp.status_code == 200
# Try creating the same items again.
# This should fail with the default insert behavior.
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 409
async def test_create_bulk_items_already_exist_upsert(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
items[_item["id"]] = _item
payload = {"items": items, "method": "insert"}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully added 2 items."'
for item_id in items.keys():
resp = await app_client.get(f"/collections/{coll['id']}/items/{item_id}")
assert resp.status_code == 200
# Try creating the same items again, but using upsert.
# This should succeed.
payload["method"] = "upsert"
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully upserted 2 items."'
async def test_create_bulk_items_omit_collection(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
# remove collection ID here
del _item["collection"]
items[_item["id"]] = _item
payload = {"items": items, "method": "insert"}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully added 2 items."'
for item_id in items.keys():
resp = await app_client.get(f"/collections/{coll['id']}/items/{item_id}")
assert resp.status_code == 200
# Try creating the same items again, but using upsert.
# This should succeed.
payload["method"] = "upsert"
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 200
assert resp.text == '"Successfully upserted 2 items."'
async def test_create_bulk_items_collection_mismatch(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
_item["collection"] = "wrong-collection"
items[_item["id"]] = _item
payload = {"items": items, "method": "insert"}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 400
assert (
resp.json()["detail"]
== "Collection ID from path parameter (test-collection) does not match Collection ID from Item (wrong-collection)"
)
async def test_create_bulk_items_id_mismatch(
app_client, load_test_data: Callable, load_test_collection
):
coll = load_test_collection
item = load_test_data("test_item.json")
items = {}
for _ in range(2):
_item = deepcopy(item)
_item["id"] = str(uuid.uuid4())
_item["collection"] = "wrong-collection"
items[_item["id"] + "wrong"] = _item
payload = {"items": items, "method": "insert"}
resp = await app_client.post(
f"/collections/{coll['id']}/bulk_items",
json=payload,
)
assert resp.status_code == 400
assert (
resp.json()["detail"]
== "Collection ID from path parameter (test-collection) does not match Collection ID from Item (wrong-collection)"
)
# TODO since right now puts implement upsert
# test_create_collection_already_exists
# test create_item_already_exists
# def test_get_collection_items(
# postgres_core: CoreCrudClient,
# postgres_transactions: TransactionsClient,
# load_test_data: Callable,
# ):
# coll = Collection.model_validate(load_test_data("test_collection.json"))
# postgres_transactions.create_collection(coll, request=MockStarletteRequest)
# item = Item.model_validate(load_test_data("test_item.json"))
# for _ in range(5):
# item.id = str(uuid.uuid4())
# postgres_transactions.create_item(item, request=MockStarletteRequest)
# fc = postgres_core.item_collection(coll.id, request=MockStarletteRequest)
# assert len(fc.features) == 5
# for item in fc.features:
# assert item.collection == coll.id
async def test_db_setup_works_with_env_vars(api_client, database, monkeypatch):
"""Test that the application starts successfully if the POSTGRES_* environment variables are set"""
monkeypatch.setenv("PGUSER", database.user)
monkeypatch.setenv("PGPASSWORD", database.password)
monkeypatch.setenv("PGHOST", database.host)
monkeypatch.setenv("PGPORT", str(database.port))
monkeypatch.setenv("PGDATABASE", database.dbname)
await connect_to_db(api_client.app)
await close_db_connection(api_client.app)
async def test_db_setup_fails_without_env_vars(api_client):
"""Test that the application fails to start if database environment variables are not set."""
try:
await connect_to_db(api_client.app)
except ValidationError:
await close_db_connection(api_client.app)
pytest.raises(ValidationError)
@asynccontextmanager
async def custom_get_connection(
request: Request,
readwrite: Literal["r", "w"],
):
"""An example of customizing the connection getter"""
async with get_connection(request, readwrite) as conn:
await conn.execute("SELECT set_config('api.test', 'added-config', false)")
yield conn
class TestDbConnect:
@pytest.fixture
async def app(self, api_client, database):
"""
app fixture override to setup app with a customized db connection getter
"""
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
logger.debug("Customizing app setup")
await connect_to_db(api_client.app, custom_get_connection, postgres_settings)
yield api_client.app
await close_db_connection(api_client.app)
async def test_db_setup(self, api_client, app_client):
@api_client.app.get(f"{api_client.router.prefix}/db-test")
async def example_view(request: Request):
async with request.app.state.get_connection(request, "r") as conn:
return await conn.fetchval("SELECT current_setting('api.test', true)")
response = await app_client.get("/db-test")
assert response.status_code == 200
assert response.json() == "added-config"

View File

@ -0,0 +1,404 @@
import json
import logging
import os
import time
from typing import Callable, Dict
from urllib.parse import quote_plus as quote
from urllib.parse import urljoin
import asyncpg
import pytest
from fastapi import APIRouter
from httpx import ASGITransport, AsyncClient
from pypgstac import __version__ as pgstac_version
from pypgstac.db import PgstacDB
from pypgstac.migrate import Migrate
from pytest_postgresql.janitor import DatabaseJanitor
from stac_fastapi.api.app import StacApi
from stac_fastapi.api.models import (
ItemCollectionUri,
JSONResponse,
create_get_request_model,
create_post_request_model,
create_request_model,
)
from stac_fastapi.extensions.core import (
CollectionSearchExtension,
CollectionSearchFilterExtension,
FieldsExtension,
FreeTextExtension,
ItemCollectionFilterExtension,
OffsetPaginationExtension,
SearchFilterExtension,
SortExtension,
TokenPaginationExtension,
TransactionExtension,
)
from stac_fastapi.extensions.core.fields import FieldsConformanceClasses
from stac_fastapi.extensions.core.free_text import FreeTextConformanceClasses
from stac_fastapi.extensions.core.query import QueryConformanceClasses
from stac_fastapi.extensions.core.sort import SortConformanceClasses
from stac_fastapi.extensions.third_party import BulkTransactionExtension
from stac_pydantic import Collection, Item
from stac_fastapi.pgstac.config import PostgresSettings, Settings
from stac_fastapi.pgstac.core import CoreCrudClient, health_check
from stac_fastapi.pgstac.db import close_db_connection, connect_to_db
from stac_fastapi.pgstac.extensions import QueryExtension
from stac_fastapi.pgstac.extensions.filter import FiltersClient
from stac_fastapi.pgstac.transactions import BulkTransactionsClient, TransactionsClient
from stac_fastapi.pgstac.types.search import PgstacSearch
DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
logger = logging.getLogger(__name__)
requires_pgstac_0_9_2 = pytest.mark.skipif(
tuple(map(int, pgstac_version.split("."))) < (0, 9, 2),
reason="PgSTAC>=0.9.2 required",
)
@pytest.fixture(scope="session")
def database(postgresql_proc):
with DatabaseJanitor(
user=postgresql_proc.user,
host=postgresql_proc.host,
port=postgresql_proc.port,
dbname="pgstactestdb",
version=postgresql_proc.version,
password="a2Vw:yk=)CdSis[fek]tW=/o",
) as jan:
connection = f"postgresql://{jan.user}:{quote(jan.password)}@{jan.host}:{jan.port}/{jan.dbname}"
with PgstacDB(dsn=connection) as db:
migrator = Migrate(db)
version = migrator.run_migration()
assert version
yield jan
@pytest.fixture(autouse=True)
async def pgstac(database):
connection = f"postgresql://{database.user}:{quote(database.password)}@{database.host}:{database.port}/{database.dbname}"
yield
conn = await asyncpg.connect(dsn=connection)
await conn.execute(
"""
DROP SCHEMA IF EXISTS pgstac CASCADE;
"""
)
await conn.close()
with PgstacDB(dsn=connection) as db:
migrator = Migrate(db)
version = migrator.run_migration()
logger.info(f"PGStac Migrated to {version}")
# Run all the tests that use the api_client in both db hydrate and api hydrate mode
@pytest.fixture(
params=[
# hydratation, prefix, model_validation
(False, "", False),
(False, "/router_prefix", False),
(True, "", False),
(True, "/router_prefix", False),
(False, "", True),
(True, "", True),
],
scope="session",
)
def api_client(request):
hydrate, prefix, response_model = request.param
api_settings = Settings(
enable_response_models=response_model,
testing=True,
use_api_hydrate=hydrate,
)
api_settings.openapi_url = prefix + api_settings.openapi_url
api_settings.docs_url = prefix + api_settings.docs_url
logger.info(
"creating client with settings, hydrate: {}, router prefix: '{}'".format(
api_settings.use_api_hydrate, prefix
)
)
application_extensions = [
TransactionExtension(client=TransactionsClient(), settings=api_settings),
BulkTransactionExtension(client=BulkTransactionsClient()),
]
search_extensions = [
QueryExtension(),
SortExtension(),
FieldsExtension(),
SearchFilterExtension(client=FiltersClient()),
TokenPaginationExtension(),
]
application_extensions.extend(search_extensions)
collection_extensions = [
QueryExtension(conformance_classes=[QueryConformanceClasses.COLLECTIONS]),
SortExtension(conformance_classes=[SortConformanceClasses.COLLECTIONS]),
FieldsExtension(conformance_classes=[FieldsConformanceClasses.COLLECTIONS]),
CollectionSearchFilterExtension(client=FiltersClient()),
FreeTextExtension(
conformance_classes=[FreeTextConformanceClasses.COLLECTIONS],
),
OffsetPaginationExtension(),
]
collection_search_extension = CollectionSearchExtension.from_extensions(
collection_extensions
)
application_extensions.append(collection_search_extension)
item_collection_extensions = [
QueryExtension(
conformance_classes=[QueryConformanceClasses.ITEMS],
),
SortExtension(
conformance_classes=[SortConformanceClasses.ITEMS],
),
FieldsExtension(conformance_classes=[FieldsConformanceClasses.ITEMS]),
ItemCollectionFilterExtension(client=FiltersClient()),
TokenPaginationExtension(),
]
application_extensions.extend(item_collection_extensions)
items_get_request_model = create_request_model(
model_name="ItemCollectionUri",
base_model=ItemCollectionUri,
extensions=item_collection_extensions,
request_type="GET",
)
search_get_request_model = create_get_request_model(search_extensions)
search_post_request_model = create_post_request_model(
search_extensions, base_model=PgstacSearch
)
api = StacApi(
settings=api_settings,
extensions=application_extensions,
client=CoreCrudClient(pgstac_search_model=search_post_request_model),
items_get_request_model=items_get_request_model,
search_get_request_model=search_get_request_model,
search_post_request_model=search_post_request_model,
collections_get_request_model=collection_search_extension.GET,
response_class=JSONResponse,
router=APIRouter(prefix=prefix),
health_check=health_check,
)
return api
@pytest.fixture(scope="function")
async def app(api_client, database):
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
logger.info("Creating app Fixture")
time.time()
app = api_client.app
await connect_to_db(
app,
postgres_settings=postgres_settings,
add_write_connection_pool=True,
)
yield app
await close_db_connection(app)
logger.info("Closed Pools.")
@pytest.fixture(scope="function")
async def app_client(app):
logger.info("creating app_client")
base_url = "http://test"
if app.state.router_prefix != "":
base_url = urljoin(base_url, app.state.router_prefix)
async with AsyncClient(transport=ASGITransport(app=app), base_url=base_url) as c:
yield c
@pytest.fixture
def load_test_data() -> Callable[[str], Dict]:
def load_file(filename: str) -> Dict:
with open(os.path.join(DATA_DIR, filename)) as file:
return json.load(file)
return load_file
@pytest.fixture
async def load_test_collection(app_client, load_test_data):
data = load_test_data("test_collection.json")
resp = await app_client.post(
"/collections",
json=data,
)
assert resp.status_code == 201
collection = Collection.model_validate(resp.json())
return collection.model_dump(mode="json")
@pytest.fixture
async def load_test_item(app_client, load_test_data, load_test_collection):
coll = load_test_collection
data = load_test_data("test_item.json")
resp = await app_client.post(
f"/collections/{coll['id']}/items",
json=data,
)
assert resp.status_code == 201
item = Item.model_validate(resp.json())
return item.model_dump(mode="json")
@pytest.fixture
async def load_test2_collection(app_client, load_test_data):
data = load_test_data("test2_collection.json")
resp = await app_client.post(
"/collections",
json=data,
)
assert resp.status_code == 201
return Collection.model_validate(resp.json())
@pytest.fixture
async def load_test2_item(app_client, load_test_data, load_test2_collection):
coll = load_test2_collection
data = load_test_data("test2_item.json")
resp = await app_client.post(
f"/collections/{coll.id}/items",
json=data,
)
assert resp.status_code == 201
return Item.model_validate(resp.json())
@pytest.fixture(scope="function")
async def app_no_ext(database):
"""Default stac-fastapi-pgstac application without only the transaction extensions."""
api_settings = Settings(testing=True)
api_client_no_ext = StacApi(
settings=api_settings,
extensions=[
TransactionExtension(client=TransactionsClient(), settings=api_settings)
],
client=CoreCrudClient(),
health_check=health_check,
)
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
logger.info("Creating app Fixture")
time.time()
await connect_to_db(
api_client_no_ext.app,
postgres_settings=postgres_settings,
add_write_connection_pool=True,
)
yield api_client_no_ext.app
await close_db_connection(api_client_no_ext.app)
logger.info("Closed Pools.")
@pytest.fixture(scope="function")
async def app_client_no_ext(app_no_ext):
logger.info("creating app_client")
async with AsyncClient(
transport=ASGITransport(app=app_no_ext), base_url="http://test"
) as c:
yield c
@pytest.fixture(scope="function")
async def app_no_transaction(database):
"""Default stac-fastapi-pgstac application without any extensions."""
api_settings = Settings(testing=True)
api = StacApi(
settings=api_settings,
extensions=[],
client=CoreCrudClient(),
health_check=health_check,
)
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
logger.info("Creating app Fixture")
time.time()
await connect_to_db(
api.app,
postgres_settings=postgres_settings,
add_write_connection_pool=False,
)
yield api.app
await close_db_connection(api.app)
logger.info("Closed Pools.")
@pytest.fixture(scope="function")
async def app_client_no_transaction(app_no_transaction):
logger.info("creating app_client")
async with AsyncClient(
transport=ASGITransport(app=app_no_transaction), base_url="http://test"
) as c:
yield c
@pytest.fixture(scope="function")
async def default_app(database, monkeypatch):
"""Test default stac-fastapi-pgstac application."""
monkeypatch.setenv("PGUSER", database.user)
monkeypatch.setenv("PGPASSWORD", database.password)
monkeypatch.setenv("PGHOST", database.host)
monkeypatch.setenv("PGPORT", str(database.port))
monkeypatch.setenv("PGDATABASE", database.dbname)
monkeypatch.delenv("ENABLED_EXTENSIONS", raising=False)
monkeypatch.setenv("ENABLE_TRANSACTIONS_EXTENSIONS", "TRUE")
monkeypatch.setenv("USE_API_HYDRATE", "TRUE")
monkeypatch.setenv("ENABLE_RESPONSE_MODELS", "TRUE")
from stac_fastapi.pgstac.app import app
await connect_to_db(app, add_write_connection_pool=True)
yield app
await close_db_connection(app)
@pytest.fixture(scope="function")
async def default_client(default_app):
async with AsyncClient(
transport=ASGITransport(app=default_app), base_url="http://test"
) as c:
yield c

View File

@ -0,0 +1,28 @@
{
"id": "joplin",
"description": "This imagery was acquired by the NOAA Remote Sensing Division to support NOAA national security and emergency response requirements. In addition, it will be used for ongoing research efforts for testing and developing standards for airborne digital imagery. Individual images have been combined into a larger mosaic and tiled for distribution. The approximate ground sample distance (GSD) for each pixel is 35 cm (1.14 feet).",
"stac_version": "1.0.0",
"license": "public-domain",
"links": [],
"type": "collection",
"extent": {
"spatial": {
"bbox": [
[
-94.6911621,
37.0332547,
-94.402771,
37.1077651
]
]
},
"temporal": {
"interval": [
[
"2000-02-01T00:00:00Z",
"2000-02-12T00:00:00Z"
]
]
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,271 @@
{
"id": "test2-collection",
"type": "Collection",
"links": [
{
"rel": "items",
"type": "application/geo+json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1/items"
},
{
"rel": "parent",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/"
},
{
"rel": "root",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/"
},
{
"rel": "self",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1"
},
{
"rel": "cite-as",
"href": "https://doi.org/10.5066/P9AF14YV",
"title": "Landsat 1-5 MSS Collection 2 Level-1"
},
{
"rel": "license",
"href": "https://www.usgs.gov/core-science-systems/hdds/data-policy",
"title": "Public Domain"
},
{
"rel": "describedby",
"href": "https://planetarycomputer.microsoft.com/dataset/landsat-c2-l1",
"title": "Human readable dataset overview and reference",
"type": "text/html"
}
],
"title": "Landsat Collection 2 Level-1",
"assets": {
"thumbnail": {
"href": "https://ai4edatasetspublicassets.blob.core.windows.net/assets/pc_thumbnails/landsat-c2-l1-thumb.png",
"type": "image/png",
"roles": ["thumbnail"],
"title": "Landsat Collection 2 Level-1 thumbnail"
}
},
"extent": {
"spatial": {
"bbox": [[-180, -90, 180, 90]]
},
"temporal": {
"interval": [["1972-07-25T00:00:00Z", "2013-01-07T23:23:59Z"]]
}
},
"license": "proprietary",
"keywords": ["Landsat", "USGS", "NASA", "Satellite", "Global", "Imagery"],
"providers": [
{
"url": "https://landsat.gsfc.nasa.gov/",
"name": "NASA",
"roles": ["producer", "licensor"]
},
{
"url": "https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data",
"name": "USGS",
"roles": ["producer", "processor", "licensor"]
},
{
"url": "https://planetarycomputer.microsoft.com",
"name": "Microsoft",
"roles": ["host"]
}
],
"summaries": {
"gsd": [79],
"sci:doi": ["10.5066/P9AF14YV"],
"eo:bands": [
{
"name": "B4",
"common_name": "green",
"description": "Visible green (Landsat 1-3 Band B4)",
"center_wavelength": 0.55,
"full_width_half_max": 0.1
},
{
"name": "B5",
"common_name": "red",
"description": "Visible red (Landsat 1-3 Band B5)",
"center_wavelength": 0.65,
"full_width_half_max": 0.1
},
{
"name": "B6",
"common_name": "nir08",
"description": "Near infrared (Landsat 1-3 Band B6)",
"center_wavelength": 0.75,
"full_width_half_max": 0.1
},
{
"name": "B7",
"common_name": "nir09",
"description": "Near infrared (Landsat 1-3 Band B7)",
"center_wavelength": 0.95,
"full_width_half_max": 0.3
},
{
"name": "B1",
"common_name": "green",
"description": "Visible green (Landsat 4-5 Band B1)",
"center_wavelength": 0.55,
"full_width_half_max": 0.1
},
{
"name": "B2",
"common_name": "red",
"description": "Visible red (Landsat 4-5 Band B2)",
"center_wavelength": 0.65,
"full_width_half_max": 0.1
},
{
"name": "B3",
"common_name": "nir08",
"description": "Near infrared (Landsat 4-5 Band B3)",
"center_wavelength": 0.75,
"full_width_half_max": 0.1
},
{
"name": "B4",
"common_name": "nir09",
"description": "Near infrared (Landsat 4-5 Band B4)",
"center_wavelength": 0.95,
"full_width_half_max": 0.3
}
],
"platform": [
"landsat-1",
"landsat-2",
"landsat-3",
"landsat-4",
"landsat-5"
],
"instruments": ["mss"],
"view:off_nadir": [0]
},
"description": "Landsat Collection 2 Level-1 data, consisting of quantized and calibrated scaled Digital Numbers (DN) representing the multispectral image data. These [Level-1](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data) data can be [rescaled](https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product) to top of atmosphere (TOA) reflectance and/or radiance. Thermal band data can be rescaled to TOA brightness temperature.\\n\\nThis dataset represents the global archive of Level-1 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Multispectral Scanner System](https://landsat.gsfc.nasa.gov/multispectral-scanner-system/) onboard Landsat 1 through Landsat 5 from July 7, 1972 to January 7, 2013. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\\n",
"item_assets": {
"red": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Red Band",
"description": "Collection 2 Level-1 Red Band Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"nodata": 0,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"green": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Green Band",
"description": "Collection 2 Level-1 Green Band Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"nodata": 0,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"nir08": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Near Infrared Band 0.8",
"description": "Collection 2 Level-1 Near Infrared Band 0.8 Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"nodata": 0,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"nir09": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Near Infrared Band 0.9",
"description": "Collection 2 Level-1 Near Infrared Band 0.9 Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"nodata": 0,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"mtl.txt": {
"type": "text/plain",
"roles": ["metadata"],
"title": "Product Metadata File (txt)",
"description": "Collection 2 Level-1 Product Metadata File (txt)"
},
"mtl.xml": {
"type": "application/xml",
"roles": ["metadata"],
"title": "Product Metadata File (xml)",
"description": "Collection 2 Level-1 Product Metadata File (xml)"
},
"mtl.json": {
"type": "application/json",
"roles": ["metadata"],
"title": "Product Metadata File (json)",
"description": "Collection 2 Level-1 Product Metadata File (json)"
},
"qa_pixel": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["cloud"],
"title": "Pixel Quality Assessment Band",
"description": "Collection 2 Level-1 Pixel Quality Assessment Band",
"raster:bands": [
{
"unit": "bit index",
"nodata": 1,
"data_type": "uint16",
"spatial_resolution": 60
}
]
},
"qa_radsat": {
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["saturation"],
"title": "Radiometric Saturation and Dropped Pixel Quality Assessment Band",
"description": "Collection 2 Level-1 Radiometric Saturation and Dropped Pixel Quality Assessment Band",
"raster:bands": [
{
"unit": "bit index",
"data_type": "uint16",
"spatial_resolution": 60
}
]
},
"thumbnail": {
"type": "image/jpeg",
"roles": ["thumbnail"],
"title": "Thumbnail image"
},
"reduced_resolution_browse": {
"type": "image/jpeg",
"roles": ["overview"],
"title": "Reduced resolution browse image"
}
},
"stac_version": "1.0.0",
"stac_extensions": [
"https://stac-extensions.github.io/item-assets/v1.0.0/schema.json",
"https://stac-extensions.github.io/view/v1.0.0/schema.json",
"https://stac-extensions.github.io/scientific/v1.0.0/schema.json",
"https://stac-extensions.github.io/raster/v1.0.0/schema.json",
"https://stac-extensions.github.io/eo/v1.0.0/schema.json"
]
}

View File

@ -0,0 +1,258 @@
{
"id": "test2-item",
"bbox": [-84.7340712, 30.8344014, -82.3892149, 32.6891482],
"type": "Feature",
"links": [
{
"rel": "collection",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1"
},
{
"rel": "parent",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1"
},
{
"rel": "root",
"type": "application/json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/"
},
{
"rel": "self",
"type": "application/geo+json",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/stac/collections/landsat-c2-l1/items/LM05_L1GS_018038_19901223_02_T2"
},
{
"rel": "cite-as",
"href": "https://doi.org/10.5066/P9AF14YV",
"title": "Landsat 1-5 MSS Collection 2 Level-1"
},
{
"rel": "via",
"href": "https://landsatlook.usgs.gov/stac-server/collections/landsat-c2l1/items/LM05_L1GS_018038_19901223_20200827_02_T2",
"type": "application/json",
"title": "USGS STAC Item"
},
{
"rel": "preview",
"href": "https://pct-apis-staging.westeurope.cloudapp.azure.com/data/item/map?collection=landsat-c2-l1&item=LM05_L1GS_018038_19901223_02_T2",
"title": "Map of item",
"type": "text/html"
}
],
"assets": {
"red": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B2.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Red Band (B2)",
"eo:bands": [
{
"name": "B2",
"common_name": "red",
"description": "Landsat 4-5 Band B2",
"center_wavelength": 0.65,
"full_width_half_max": 0.1
}
],
"description": "Collection 2 Level-1 Red Band Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"scale": 0.66024,
"nodata": 0,
"offset": 2.03976,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"green": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B1.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Green Band (B1)",
"eo:bands": [
{
"name": "B1",
"common_name": "green",
"description": "Landsat 4-5 Band B1",
"center_wavelength": 0.55,
"full_width_half_max": 0.1
}
],
"description": "Collection 2 Level-1 Green Band Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"scale": 0.88504,
"nodata": 0,
"offset": 1.51496,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"nir08": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B3.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Near Infrared Band 0.8 (B3)",
"eo:bands": [
{
"name": "B3",
"common_name": "nir08",
"description": "Landsat 4-5 Band B3",
"center_wavelength": 0.75,
"full_width_half_max": 0.1
}
],
"description": "Collection 2 Level-1 Near Infrared Band 0.7 Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"scale": 0.55866,
"nodata": 0,
"offset": 4.34134,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"nir09": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_B4.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["data"],
"title": "Near Infrared Band 0.9 (B4)",
"eo:bands": [
{
"name": "B4",
"common_name": "nir09",
"description": "Landsat 4-5 Band B4",
"center_wavelength": 0.95,
"full_width_half_max": 0.3
}
],
"description": "Collection 2 Level-1 Near Infrared Band 0.9 Top of Atmosphere Radiance",
"raster:bands": [
{
"unit": "watt/steradian/square_meter/micrometer",
"scale": 0.46654,
"nodata": 0,
"offset": 1.03346,
"data_type": "uint8",
"spatial_resolution": 60
}
]
},
"mtl.txt": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.txt",
"type": "text/plain",
"roles": ["metadata"],
"title": "Product Metadata File (txt)",
"description": "Collection 2 Level-1 Product Metadata File (txt)"
},
"mtl.xml": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.xml",
"type": "application/xml",
"roles": ["metadata"],
"title": "Product Metadata File (xml)",
"description": "Collection 2 Level-1 Product Metadata File (xml)"
},
"mtl.json": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_MTL.json",
"type": "application/json",
"roles": ["metadata"],
"title": "Product Metadata File (json)",
"description": "Collection 2 Level-1 Product Metadata File (json)"
},
"qa_pixel": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_QA_PIXEL.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["cloud"],
"title": "Pixel Quality Assessment Band (QA_PIXEL)",
"description": "Collection 2 Level-1 Pixel Quality Assessment Band",
"raster:bands": [
{
"unit": "bit index",
"nodata": 1,
"data_type": "uint16",
"spatial_resolution": 60
}
]
},
"qa_radsat": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_QA_RADSAT.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"roles": ["saturation"],
"title": "Radiometric Saturation and Dropped Pixel Quality Assessment Band (QA_RADSAT)",
"description": "Collection 2 Level-1 Radiometric Saturation and Dropped Pixel Quality Assessment Band",
"raster:bands": [
{
"unit": "bit index",
"data_type": "uint16",
"spatial_resolution": 60
}
]
},
"thumbnail": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_thumb_small.jpeg",
"type": "image/jpeg",
"roles": ["thumbnail"],
"title": "Thumbnail image"
},
"reduced_resolution_browse": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-1/standard/mss/1990/018/038/LM05_L1GS_018038_19901223_20200827_02_T2/LM05_L1GS_018038_19901223_20200827_02_T2_thumb_large.jpeg",
"type": "image/jpeg",
"roles": ["overview"],
"title": "Reduced resolution browse image"
}
},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[-84.3264316, 32.6891482],
[-84.7340712, 31.1114869],
[-82.8283452, 30.8344014],
[-82.3892149, 32.4079117],
[-84.3264316, 32.6891482]
]
]
},
"collection": "test2-collection",
"properties": {
"gsd": 79,
"created": "2022-03-31T16:51:57.476085Z",
"sci:doi": "10.5066/P9AF14YV",
"datetime": "1990-12-23T15:26:35.581000Z",
"platform": "landsat-5",
"proj:epsg": 32617,
"proj:shape": [3525, 3946],
"description": "Landsat Collection 2 Level-1",
"instruments": ["mss"],
"eo:cloud_cover": 23,
"proj:transform": [60, 0, 140790, 0, -60, 3622110],
"view:off_nadir": 0,
"landsat:wrs_row": "038",
"landsat:scene_id": "LM50180381990357AAA03",
"landsat:wrs_path": "018",
"landsat:wrs_type": "2",
"view:sun_azimuth": 147.23255058,
"landsat:correction": "L1GS",
"view:sun_elevation": 27.04507311,
"landsat:cloud_cover_land": 28,
"landsat:collection_number": "02",
"landsat:collection_category": "T2"
},
"stac_version": "1.0.0",
"stac_extensions": [
"https://stac-extensions.github.io/raster/v1.0.0/schema.json",
"https://stac-extensions.github.io/eo/v1.0.0/schema.json",
"https://stac-extensions.github.io/view/v1.0.0/schema.json",
"https://stac-extensions.github.io/projection/v1.0.0/schema.json",
"https://landsat.usgs.gov/stac/landsat-extension/v1.1.1/schema.json",
"https://stac-extensions.github.io/scientific/v1.0.0/schema.json"
]
}

View File

@ -0,0 +1,152 @@
{
"id": "test-collection",
"stac_extensions": [],
"type": "Collection",
"description": "Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.",
"stac_version": "1.0.0",
"license": "PDDL-1.0",
"summaries": {
"platform": ["landsat-8"],
"instruments": ["oli", "tirs"],
"gsd": [30],
"eo:bands": [
{
"name": "B1",
"common_name": "coastal",
"center_wavelength": 0.44,
"full_width_half_max": 0.02
},
{
"name": "B2",
"common_name": "blue",
"center_wavelength": 0.48,
"full_width_half_max": 0.06
},
{
"name": "B3",
"common_name": "green",
"center_wavelength": 0.56,
"full_width_half_max": 0.06
},
{
"name": "B4",
"common_name": "red",
"center_wavelength": 0.65,
"full_width_half_max": 0.04
},
{
"name": "B5",
"common_name": "nir",
"center_wavelength": 0.86,
"full_width_half_max": 0.03
},
{
"name": "B6",
"common_name": "swir16",
"center_wavelength": 1.6,
"full_width_half_max": 0.08
},
{
"name": "B7",
"common_name": "swir22",
"center_wavelength": 2.2,
"full_width_half_max": 0.2
},
{
"name": "B8",
"common_name": "pan",
"center_wavelength": 0.59,
"full_width_half_max": 0.18
},
{
"name": "B9",
"common_name": "cirrus",
"center_wavelength": 1.37,
"full_width_half_max": 0.02
},
{
"name": "B10",
"common_name": "lwir11",
"center_wavelength": 10.9,
"full_width_half_max": 0.8
},
{
"name": "B11",
"common_name": "lwir12",
"center_wavelength": 12,
"full_width_half_max": 1
}
]
},
"extent": {
"spatial": {
"bbox": [
[
-180.0,
-90.0,
180.0,
90.0
]
]
},
"temporal": {
"interval": [
[
"2013-06-01T00:00:00Z",
null
]
]
}
},
"links": [
{
"rel": "license",
"href": "https://creativecommons.org/licenses/publicdomain/",
"title": "public domain"
}
],
"title": "Landsat 8 L1",
"keywords": [
"landsat",
"earth observation",
"usgs"
],
"providers": [
{
"name": "USGS",
"roles": [
"producer"
],
"url": "https://landsat.usgs.gov/"
},
{
"name": "Planet Labs",
"roles": [
"processor"
],
"url": "https://github.com/landsat-pds/landsat_ingestor"
},
{
"name": "AWS",
"roles": [
"host"
],
"url": "https://landsatonaws.com/"
},
{
"name": "Development Seed",
"roles": [
"processor"
],
"url": "https://github.com/sat-utils/sat-api"
},
{
"name": "Earth Search by Element84",
"description": "API of Earth on AWS datasets",
"roles": [
"host"
],
"url": "https://element84.com"
}
]
}

View File

@ -0,0 +1,510 @@
{
"type": "Feature",
"id": "test-item",
"stac_version": "1.0.0",
"stac_extensions": [
"https://stac-extensions.github.io/eo/v1.0.0/schema.json",
"https://stac-extensions.github.io/projection/v1.0.0/schema.json"
],
"geometry": {
"coordinates": [
[
[
152.15052873427666,
-33.82243006904891
],
[
150.1000346138806,
-34.257132625788756
],
[
149.5776607193635,
-32.514709769700254
],
[
151.6262528041627,
-32.08081674221862
],
[
152.15052873427666,
-33.82243006904891
]
]
],
"type": "Polygon"
},
"properties": {
"datetime": "2020-02-12T12:30:22Z",
"landsat:scene_id": "LC82081612020043LGN00",
"landsat:row": "161",
"gsd": 15,
"eo:bands": [
{
"gsd": 30,
"name": "B1",
"common_name": "coastal",
"center_wavelength": 0.44,
"full_width_half_max": 0.02
},
{
"gsd": 30,
"name": "B2",
"common_name": "blue",
"center_wavelength": 0.48,
"full_width_half_max": 0.06
},
{
"gsd": 30,
"name": "B3",
"common_name": "green",
"center_wavelength": 0.56,
"full_width_half_max": 0.06
},
{
"gsd": 30,
"name": "B4",
"common_name": "red",
"center_wavelength": 0.65,
"full_width_half_max": 0.04
},
{
"gsd": 30,
"name": "B5",
"common_name": "nir",
"center_wavelength": 0.86,
"full_width_half_max": 0.03
},
{
"gsd": 30,
"name": "B6",
"common_name": "swir16",
"center_wavelength": 1.6,
"full_width_half_max": 0.08
},
{
"gsd": 30,
"name": "B7",
"common_name": "swir22",
"center_wavelength": 2.2,
"full_width_half_max": 0.2
},
{
"gsd": 15,
"name": "B8",
"common_name": "pan",
"center_wavelength": 0.59,
"full_width_half_max": 0.18
},
{
"gsd": 30,
"name": "B9",
"common_name": "cirrus",
"center_wavelength": 1.37,
"full_width_half_max": 0.02
},
{
"gsd": 100,
"name": "B10",
"common_name": "lwir11",
"center_wavelength": 10.9,
"full_width_half_max": 0.8
},
{
"gsd": 100,
"name": "B11",
"common_name": "lwir12",
"center_wavelength": 12,
"full_width_half_max": 1
}
],
"landsat:revision": "00",
"view:sun_azimuth": -148.83296771,
"instrument": "OLI_TIRS",
"landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT",
"eo:cloud_cover": 0,
"landsat:tier": "RT",
"landsat:processing_level": "L1GT",
"landsat:column": "208",
"platform": "landsat-8",
"proj:epsg": 32756,
"view:sun_elevation": -37.30791534,
"view:off_nadir": 0,
"height": 2500,
"width": 2500
},
"bbox": [
149.57574,
-34.25796,
152.15194,
-32.07915
],
"collection": "test-collection",
"assets": {
"ANG": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ANG.txt",
"type": "text/plain",
"title": "Angle Coefficients File",
"description": "Collection 2 Level-1 Angle Coefficients File (ANG)"
},
"SR_B1": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B1.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Coastal/Aerosol Band (B1)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B1",
"common_name": "coastal",
"center_wavelength": 0.44,
"full_width_half_max": 0.02
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B2": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B2.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Blue Band (B2)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B2",
"common_name": "blue",
"center_wavelength": 0.48,
"full_width_half_max": 0.06
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B3": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B3.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Green Band (B3)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B3",
"common_name": "green",
"center_wavelength": 0.56,
"full_width_half_max": 0.06
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B4": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B4.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Red Band (B4)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B4",
"common_name": "red",
"center_wavelength": 0.65,
"full_width_half_max": 0.04
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B5": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B5.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Near Infrared Band 0.8 (B5)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B5",
"common_name": "nir08",
"center_wavelength": 0.86,
"full_width_half_max": 0.03
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B6": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B6.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Short-wave Infrared Band 1.6 (B6)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B6",
"common_name": "swir16",
"center_wavelength": 1.6,
"full_width_half_max": 0.08
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"SR_B7": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_SR_B7.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Short-wave Infrared Band 2.2 (B7)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B7",
"common_name": "swir22",
"center_wavelength": 2.2,
"full_width_half_max": 0.2
}
],
"proj:shape": [
7731,
7591
],
"description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"ST_QA": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_QA.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Surface Temperature Quality Assessment Band",
"proj:shape": [
7731,
7591
],
"description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"ST_B10": {
"gsd": 100,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_B10.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Surface Temperature Band (B10)",
"eo:bands": [
{
"gsd": 100,
"name": "ST_B10",
"common_name": "lwir11",
"center_wavelength": 10.9,
"full_width_half_max": 0.8
}
],
"proj:shape": [
7731,
7591
],
"description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"MTL.txt": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.txt",
"type": "text/plain",
"title": "Product Metadata File",
"description": "Collection 2 Level-1 Product Metadata File (MTL)"
},
"MTL.xml": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_MTL.xml",
"type": "application/xml",
"title": "Product Metadata File (xml)",
"description": "Collection 2 Level-1 Product Metadata File (xml)"
},
"ST_DRAD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_DRAD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Downwelled Radiance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_DRAD",
"description": "downwelled radiance"
}
],
"proj:shape": [
7731,
7591
],
"description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"ST_EMIS": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMIS.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Emissivity Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_EMIS",
"description": "emissivity"
}
],
"proj:shape": [
7731,
7591
],
"description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
},
"ST_EMSD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2021/108/066/LC08_L2SP_108066_20210712_20210720_02_T1/LC08_L2SP_108066_20210712_20210720_02_T1_ST_EMSD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Emissivity Standard Deviation Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_EMSD",
"description": "emissivity standard deviation"
}
],
"proj:shape": [
7731,
7591
],
"description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product",
"proj:transform": [
30,
0,
304185,
0,
-30,
-843585
]
}
},
"links": [
{
"href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043",
"rel": "self",
"type": "application/geo+json"
},
{
"href": "http://localhost:8081/collections/landsat-8-l1",
"rel": "parent",
"type": "application/json"
},
{
"href": "http://localhost:8081/collections/landsat-8-l1",
"rel": "collection",
"type": "application/json"
},
{
"href": "http://localhost:8081/",
"rel": "root",
"type": "application/json"
},
{
"href": "preview.html",
"rel": "preview",
"type": "text/html"
}
]
}

View File

@ -0,0 +1,646 @@
{
"type": "Feature",
"id": "test_item2",
"stac_version": "1.0.0",
"stac_extensions": [
"https://stac-extensions.github.io/eo/v1.0.0/schema.json",
"https://stac-extensions.github.io/projection/v1.0.0/schema.json"
],
"bbox": [
-123.37257493384075,
46.35430508465464,
-120.21745704411174,
48.51504491534536
],
"links": [
{
"rel": "collection",
"type": "application/json",
"href": "http://localhost:8081/api/stac/v1/collections/landsat-8-c2-l2"
},
{
"rel": "parent",
"type": "application/json",
"href": "http://localhost:8081/api/stac/v1/collections/landsat-8-c2-l2"
},
{
"rel": "root",
"type": "application/json",
"href": "http://localhost:8081/api/stac/v1/"
},
{
"rel": "self",
"type": "application/geo+json",
"href": "http://localhost:8081/api/stac/v1/collections/landsat-8-c2-l2/items/LC08_L2SP_046027_20200908_02_T1"
},
{
"rel": "alternate",
"type": "application/json",
"title": "tiles",
"href": "http://localhost:8081/api/stac/v1/collections/landsat-8-c2-l2/items/LC08_L2SP_046027_20200908_02_T1/tiles"
},
{
"rel": "alternate",
"href": "https://landsatlook.usgs.gov/stac-browser/collection02/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_02_T1",
"type": "text/html",
"title": "USGS stac-browser page"
},
{
"rel": "preview",
"href": "http://localhost:8081/api/data/v1/item/map?collection=landsat-8-c2-l2&item=LC08_L2SP_046027_20200908_02_T1",
"title": "Map of item",
"type": "text/html"
}
],
"assets": {
"ANG": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ANG.txt",
"type": "text/plain",
"title": "Angle Coefficients File",
"description": "Collection 2 Level-1 Angle Coefficients File (ANG)"
},
"SR_B1": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B1.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Coastal/Aerosol Band (B1)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B1",
"common_name": "coastal",
"center_wavelength": 0.44,
"full_width_half_max": 0.02
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Coastal/Aerosol Band (B1) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B2": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B2.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Blue Band (B2)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B2",
"common_name": "blue",
"center_wavelength": 0.48,
"full_width_half_max": 0.06
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Blue Band (B2) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B3": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B3.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Green Band (B3)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B3",
"common_name": "green",
"center_wavelength": 0.56,
"full_width_half_max": 0.06
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Green Band (B3) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B4": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B4.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Red Band (B4)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B4",
"common_name": "red",
"center_wavelength": 0.65,
"full_width_half_max": 0.04
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Red Band (B4) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B5": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B5.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Near Infrared Band 0.8 (B5)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B5",
"common_name": "nir08",
"center_wavelength": 0.86,
"full_width_half_max": 0.03
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Near Infrared Band 0.8 (B5) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B6": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B6.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Short-wave Infrared Band 1.6 (B6)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B6",
"common_name": "swir16",
"center_wavelength": 1.6,
"full_width_half_max": 0.08
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Short-wave Infrared Band 1.6 (B6) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"SR_B7": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_B7.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Short-wave Infrared Band 2.2 (B7)",
"eo:bands": [
{
"gsd": 30,
"name": "SR_B7",
"common_name": "swir22",
"center_wavelength": 2.2,
"full_width_half_max": 0.2
}
],
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Short-wave Infrared Band 2.2 (B7) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_QA": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_QA.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Surface Temperature Quality Assessment Band",
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Surface Temperature Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_B10": {
"gsd": 100,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_B10.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Surface Temperature Band (B10)",
"eo:bands": [
{
"gsd": 100,
"name": "ST_B10",
"common_name": "lwir11",
"center_wavelength": 10.9,
"full_width_half_max": 0.8
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Surface Temperature Band (B10) Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"MTL.txt": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_MTL.txt",
"type": "text/plain",
"title": "Product Metadata File",
"description": "Collection 2 Level-1 Product Metadata File (MTL)"
},
"MTL.xml": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_MTL.xml",
"type": "application/xml",
"title": "Product Metadata File (xml)",
"description": "Collection 2 Level-1 Product Metadata File (xml)"
},
"ST_DRAD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_DRAD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Downwelled Radiance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_DRAD",
"description": "downwelled radiance"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Downwelled Radiance Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_EMIS": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_EMIS.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Emissivity Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_EMIS",
"description": "emissivity"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Emissivity Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_EMSD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_EMSD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Emissivity Standard Deviation Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_EMSD",
"description": "emissivity standard deviation"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Emissivity Standard Deviation Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_TRAD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_TRAD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Thermal Radiance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_TRAD",
"description": "thermal radiance"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Thermal Radiance Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_URAD": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_URAD.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Upwelled Radiance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_URAD",
"description": "upwelled radiance"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Upwelled Radiance Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"MTL.json": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_MTL.json",
"type": "application/json",
"title": "Product Metadata File (json)",
"description": "Collection 2 Level-1 Product Metadata File (json)"
},
"QA_PIXEL": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_QA_PIXEL.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Pixel Quality Assessment Band",
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-1 Pixel Quality Assessment Band",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_ATRAN": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_ATRAN.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Atmospheric Transmittance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_ATRAN",
"description": "atmospheric transmission"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Atmospheric Transmittance Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"ST_CDIST": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_ST_CDIST.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Cloud Distance Band",
"eo:bands": [
{
"gsd": 30,
"name": "ST_CDIST",
"description": "distance to nearest cloud"
}
],
"proj:shape": [
7891,
7771
],
"description": "Landsat Collection 2 Level-2 Cloud Distance Band Surface Temperature Product",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"QA_RADSAT": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_QA_RADSAT.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Radiometric Saturation Quality Assessment Band",
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-1 Radiometric Saturation Quality Assessment Band",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"thumbnail": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_thumb_small.jpeg",
"type": "image/jpeg",
"title": "Thumbnail image"
},
"SR_QA_AEROSOL": {
"gsd": 30,
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_SR_QA_AEROSOL.TIF",
"type": "image/tiff; application=geotiff; profile=cloud-optimized",
"title": "Aerosol Quality Analysis Band",
"proj:shape": [
7891,
7771
],
"description": "Collection 2 Level-2 Aerosol Quality Analysis Band (ANG) Surface Reflectance",
"proj:transform": [
30,
0,
472485,
0,
-30,
5373615
]
},
"reduced_resolution_browse": {
"href": "https://landsateuwest.blob.core.windows.net/landsat-c2/level-2/standard/oli-tirs/2020/046/027/LC08_L2SP_046027_20200908_20200919_02_T1/LC08_L2SP_046027_20200908_20200919_02_T1_thumb_large.jpeg",
"type": "image/jpeg",
"title": "Reduced resolution browse image"
},
"tilejson": {
"title": "TileJSON with default rendering",
"href": "https://planetarycomputer.microsoft.com/api/data/v1/item/tilejson.json?collection=landsat-8-c2-l2&items=LC08_L2SP_046027_20200908_02_T1&assets=SR_B4,SR_B3,SR_B2&color_formula=gamma+RGB+2.7%2C+saturation+1.5%2C+sigmoidal+RGB+15+0.55",
"type": "application/json",
"roles": [
"tiles"
]
},
"rendered_preview": {
"title": "Rendered preview",
"rel": "preview",
"href": "https://planetarycomputer.microsoft.com/api/data/v1/item/preview.png?collection=landsat-8-c2-l2&items=LC08_L2SP_046027_20200908_02_T1&assets=SR_B4,SR_B3,SR_B2&color_formula=gamma+RGB+2.7%2C+saturation+1.5%2C+sigmoidal+RGB+15+0.55",
"roles": [
"overview"
],
"type": "image/png"
}
},
"geometry": {
"type": "Polygon",
"coordinates": [
[
[
-122.73659863,
48.512551
],
[
-120.21828301,
48.09736515
],
[
-120.85665503,
46.35688928
],
[
-123.37063967,
46.78158223
],
[
-122.73659863,
48.512551
]
]
]
},
"collection": "test-collection",
"properties": {
"datetime": "2020-09-08T18:55:51.575595Z",
"platform": "landsat-8",
"proj:bbox": [
472485,
5136885,
705615,
5373615
],
"proj:epsg": 32610,
"description": "Landsat Collection 2 Level-2 Surface Reflectance Product",
"instruments": [
"oli",
"tirs"
],
"eo:cloud_cover": 0.19,
"view:off_nadir": 0,
"landsat:wrs_row": "027",
"landsat:scene_id": "LC80460272020252LGN00",
"landsat:wrs_path": "046",
"landsat:wrs_type": "2",
"view:sun_azimuth": 155.2327918,
"view:sun_elevation": 45.33819766,
"landsat:cloud_cover_land": 0.21,
"landsat:processing_level": "L2SP",
"landsat:collection_number": "02",
"landsat:collection_category": "T1"
}
}

View File

@ -0,0 +1,541 @@
from typing import Callable, Optional
import pystac
import pytest
from stac_pydantic import Collection
from ..conftest import requires_pgstac_0_9_2
async def test_create_collection(app_client, load_test_data: Callable):
in_json = load_test_data("test_collection.json")
in_coll = Collection.model_validate(in_json)
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 201
post_coll = Collection.model_validate(resp.json())
assert in_coll.model_dump(exclude={"links"}) == post_coll.model_dump(
exclude={"links"}
)
resp = await app_client.get(f"/collections/{post_coll.id}")
assert resp.status_code == 200
get_coll = Collection.model_validate(resp.json())
assert post_coll.model_dump(exclude={"links"}) == get_coll.model_dump(
exclude={"links"}
)
post_coll = post_coll.model_dump(mode="json")
get_coll = get_coll.model_dump(mode="json")
post_self_link = next(
(link for link in post_coll["links"] if link["rel"] == "self"), None
)
get_self_link = next(
(link for link in get_coll["links"] if link["rel"] == "self"), None
)
assert post_self_link is not None and get_self_link is not None
assert post_self_link["href"] == get_self_link["href"]
async def test_update_collection(app_client, load_test_data, load_test_collection):
in_coll = load_test_collection
in_coll["keywords"].append("newkeyword")
resp = await app_client.put(f"/collections/{in_coll['id']}", json=in_coll)
assert resp.status_code == 200
put_coll = Collection.model_validate(resp.json())
resp = await app_client.get(f"/collections/{in_coll['id']}")
assert resp.status_code == 200
get_coll = Collection.model_validate(resp.json())
in_coll = Collection(**in_coll)
assert in_coll.model_dump(exclude={"links"}) == get_coll.model_dump(exclude={"links"})
assert "newkeyword" in get_coll.keywords
get_coll = get_coll.model_dump(mode="json")
put_coll = put_coll.model_dump(mode="json")
put_self_link = next(
(link for link in put_coll["links"] if link["rel"] == "self"), None
)
get_self_link = next(
(link for link in get_coll["links"] if link["rel"] == "self"), None
)
assert put_self_link is not None and get_self_link is not None
assert put_self_link["href"] == get_self_link["href"]
async def test_delete_collection(
app_client, load_test_data: Callable, load_test_collection
):
in_coll = load_test_collection
resp = await app_client.delete(f"/collections/{in_coll['id']}")
assert resp.status_code == 200
resp = await app_client.get(f"/collections/{in_coll['id']}")
assert resp.status_code == 404
async def test_create_collection_conflict(app_client, load_test_data: Callable):
in_json = load_test_data("test_collection.json")
Collection.model_validate(in_json)
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 201
Collection.model_validate(resp.json())
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 409
async def test_delete_missing_collection(
app_client,
):
resp = await app_client.delete("/collections")
assert resp.status_code == 405
async def test_update_new_collection(app_client, load_test_collection):
in_coll = load_test_collection
in_coll["id"] = "test-updatenew"
resp = await app_client.put(f"/collections/{in_coll['id']}", json=in_coll)
assert resp.status_code == 404
async def test_nocollections(
app_client,
):
resp = await app_client.get("/collections")
assert resp.status_code == 200
assert resp.json()["numberReturned"] == 0
async def test_returns_valid_collection(app_client, load_test_data):
"""Test updating a collection which already exists"""
in_json = load_test_data("test_collection.json")
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 201
resp = await app_client.get(f"/collections/{in_json['id']}")
assert resp.status_code == 200
resp_json = resp.json()
# Mock root to allow validation
mock_root = pystac.Catalog(
id="test", description="test desc", href="https://example.com"
)
collection = pystac.Collection.from_dict(
resp_json, root=mock_root, preserve_dict=False
)
collection.validate()
async def test_returns_valid_links_in_collections(app_client, load_test_data):
"""Test links from listing collections"""
in_json = load_test_data("test_collection.json")
resp = await app_client.post(
"/collections",
json=in_json,
)
assert resp.status_code == 201
# Get collection by ID
resp = await app_client.get(f"/collections/{in_json['id']}")
assert resp.status_code == 200
resp_json = resp.json()
# Mock root to allow validation
mock_root = pystac.Catalog(
id="test", description="test desc", href="https://example.com"
)
collection = pystac.Collection.from_dict(
resp_json, root=mock_root, preserve_dict=False
)
assert collection.validate()
# List collections
resp = await app_client.get("/collections")
assert resp.status_code == 200
resp_json = resp.json()
assert resp.json()["numberReturned"]
assert resp.json()["numberMatched"]
collections = resp_json["collections"]
# Find collection in list by ID
single_coll = next(coll for coll in collections if coll["id"] == in_json["id"])
is_coll_from_list_valid = False
single_coll_mocked_link: Optional[pystac.Collection] = None
if single_coll is not None:
single_coll_mocked_link = pystac.Collection.from_dict(
single_coll, root=mock_root, preserve_dict=False
)
is_coll_from_list_valid = single_coll_mocked_link.validate()
assert is_coll_from_list_valid
# Check links from the collection GET and list
assert [
i
for i in collection.to_dict()["links"]
if i not in single_coll_mocked_link.to_dict()["links"]
] == []
async def test_returns_license_link(app_client, load_test_collection):
coll = load_test_collection
resp = await app_client.get(f"/collections/{coll['id']}")
assert resp.status_code == 200
resp_json = resp.json()
link_rel_types = [link["rel"] for link in resp_json["links"]]
assert "license" in link_rel_types
@pytest.mark.asyncio
async def test_get_collection_forwarded_header(app_client, load_test_collection):
coll = load_test_collection
resp = await app_client.get(
f"/collections/{coll['id']}",
headers={"Forwarded": "proto=https;host=test:1234"},
)
for link in [
link
for link in resp.json()["links"]
if link["rel"] in ["items", "parent", "root", "self"]
]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_get_collection_x_forwarded_headers(app_client, load_test_collection):
coll = load_test_collection
resp = await app_client.get(
f"/collections/{coll['id']}",
headers={
"X-Forwarded-Port": "1234",
"X-Forwarded-Proto": "https",
},
)
for link in [
link
for link in resp.json()["links"]
if link["rel"] in ["items", "parent", "root", "self"]
]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_get_collection_duplicate_forwarded_headers(
app_client, load_test_collection
):
coll = load_test_collection
resp = await app_client.get(
f"/collections/{coll['id']}",
headers={
"Forwarded": "proto=https;host=test:1234",
"X-Forwarded-Port": "4321",
"X-Forwarded-Proto": "http",
},
)
for link in [
link
for link in resp.json()["links"]
if link["rel"] in ["items", "parent", "root", "self"]
]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_get_collections_forwarded_header(app_client, load_test_collection):
resp = await app_client.get(
"/collections",
headers={"Forwarded": "proto=https;host=test:1234"},
)
for link in resp.json()["links"]:
assert link["href"].startswith("https://test:1234/")
@pytest.mark.asyncio
async def test_get_collections_queryables_links(app_client, load_test_collection):
resp = await app_client.get(
"/collections",
)
assert "Queryables" in [
link.get("title") for link in resp.json()["collections"][0]["links"]
]
collection_id = resp.json()["collections"][0]["id"]
resp = await app_client.get(
f"/collections/{collection_id}",
)
assert "Queryables" in [link.get("title") for link in resp.json()["links"]]
@pytest.mark.asyncio
async def test_get_collections_search(
app_client, load_test_collection, load_test2_collection
):
# this search should only return a single collection
resp = await app_client.get(
"/collections",
params={"datetime": "2010-01-01T00:00:00Z/2010-01-02T00:00:00Z"},
)
assert len(resp.json()["collections"]) == 1
assert resp.json()["collections"][0]["id"] == load_test2_collection.id
# same with this one
resp = await app_client.get(
"/collections",
params={"datetime": "2020-01-01T00:00:00Z/.."},
)
assert len(resp.json()["collections"]) == 1
assert resp.json()["collections"][0]["id"] == load_test_collection["id"]
# no params should return both collections
resp = await app_client.get(
"/collections",
)
assert len(resp.json()["collections"]) == 2
@requires_pgstac_0_9_2
@pytest.mark.asyncio
async def test_collection_search_freetext(
app_client, load_test_collection, load_test2_collection
):
# free-text
resp = await app_client.get(
"/collections",
params={"q": "temperature"},
)
assert resp.json()["numberReturned"] == 1
assert resp.json()["numberMatched"] == 1
assert len(resp.json()["collections"]) == 1
assert resp.json()["collections"][0]["id"] == load_test2_collection.id
resp = await app_client.get(
"/collections",
params={"q": "nosuchthing"},
)
assert len(resp.json()["collections"]) == 0
@requires_pgstac_0_9_2
@pytest.mark.asyncio
async def test_all_collections_with_pagination(app_client, load_test_data):
data = load_test_data("test_collection.json")
collection_id = data["id"]
for ii in range(0, 12):
data["id"] = collection_id + f"_{ii}"
resp = await app_client.post(
"/collections",
json=data,
)
assert resp.status_code == 201
resp = await app_client.get("/collections")
assert resp.json()["numberReturned"] == 10
assert resp.json()["numberMatched"] == 12
cols = resp.json()["collections"]
assert len(cols) == 10
links = resp.json()["links"]
assert len(links) == 3
assert {"root", "self", "next"} == {link["rel"] for link in links}
resp = await app_client.get("/collections", params={"limit": 12})
assert resp.json()["numberReturned"] == 12
assert resp.json()["numberMatched"] == 12
cols = resp.json()["collections"]
assert len(cols) == 12
links = resp.json()["links"]
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
@requires_pgstac_0_9_2
@pytest.mark.asyncio
async def test_all_collections_without_pagination(app_client_no_ext, load_test_data):
data = load_test_data("test_collection.json")
collection_id = data["id"]
for ii in range(0, 12):
data["id"] = collection_id + f"_{ii}"
resp = await app_client_no_ext.post(
"/collections",
json=data,
)
assert resp.status_code == 201
resp = await app_client_no_ext.get("/collections")
assert resp.json()["numberReturned"] == 12
assert resp.json()["numberMatched"] == 12
cols = resp.json()["collections"]
assert len(cols) == 12
links = resp.json()["links"]
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
@requires_pgstac_0_9_2
@pytest.mark.asyncio
async def test_get_collections_search_pagination(
app_client, load_test_collection, load_test2_collection
):
resp = await app_client.get("/collections")
assert resp.json()["numberReturned"] == 2
assert resp.json()["numberMatched"] == 2
cols = resp.json()["collections"]
assert len(cols) == 2
links = resp.json()["links"]
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
###################
# limit should be positive
resp = await app_client.get("/collections", params={"limit": 0})
assert resp.status_code == 400
###################
# limit=1, should have a `next` link
resp = await app_client.get(
"/collections",
params={"limit": 1},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 1
assert cols[0]["id"] == load_test_collection["id"]
assert len(links) == 3
assert {"root", "self", "next"} == {link["rel"] for link in links}
next_link = list(filter(lambda link: link["rel"] == "next", links))[0]
assert next_link["href"].endswith("?limit=1&offset=1")
###################
# limit=2, there should not be a next link
resp = await app_client.get(
"/collections",
params={"limit": 2},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
assert cols[0]["id"] == load_test_collection["id"]
assert cols[1]["id"] == load_test2_collection.id
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
###################
# limit=3, there should not be a next/previous link
resp = await app_client.get(
"/collections",
params={"limit": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
assert cols[0]["id"] == load_test_collection["id"]
assert cols[1]["id"] == load_test2_collection.id
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
###################
# offset=3, because there are 2 collections, we should not have `next` or `prev` links
resp = await app_client.get(
"/collections",
params={"offset": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 0
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
###################
# offset=3,limit=1
resp = await app_client.get(
"/collections",
params={"limit": 1, "offset": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 0
assert len(links) == 3
assert {"root", "self", "previous"} == {link["rel"] for link in links}
prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
assert prev_link["href"].endswith("?limit=1&offset=2")
###################
# limit=2, offset=3, there should not be a next link
resp = await app_client.get(
"/collections",
params={"limit": 2, "offset": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 0
assert len(links) == 3
assert {"root", "self", "previous"} == {link["rel"] for link in links}
prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
assert prev_link["href"].endswith("?limit=2&offset=1")
###################
# offset=1,limit=1 should have a `previous` link
resp = await app_client.get(
"/collections",
params={"offset": 1, "limit": 1},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 1
assert cols[0]["id"] == load_test2_collection.id
assert len(links) == 3
assert {"root", "self", "previous"} == {link["rel"] for link in links}
prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
assert "offset" in prev_link["href"]
###################
# offset=0, should not have next/previous link
resp = await app_client.get(
"/collections",
params={"offset": 0},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}
@requires_pgstac_0_9_2
@pytest.mark.xfail(strict=False)
@pytest.mark.asyncio
async def test_get_collections_search_offset_1(
app_client, load_test_collection, load_test2_collection
):
# BUG: pgstac doesn't return a `prev` link when limit is not set
# offset=1, should have a `previous` link
resp = await app_client.get(
"/collections",
params={"offset": 1},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 1
assert cols[0]["id"] == load_test2_collection.id
assert len(links) == 3
assert {"root", "self", "previous"} == {link["rel"] for link in links}
prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
# offset=0 should not be in the previous link (because it's useless)
assert "offset" not in prev_link["href"]

View File

@ -0,0 +1,76 @@
import urllib.parse
from typing import Dict, Optional
import pytest
@pytest.fixture(scope="function")
async def response(app_client):
return await app_client.get("/")
@pytest.fixture(scope="function")
async def response_json(response) -> Dict:
return response.json()
def get_link(landing_page, rel_type, method: Optional[str] = None):
return next(
filter(
lambda link: link["rel"] == rel_type
and (not method or link.get("method") == method),
landing_page["links"],
),
None,
)
def test_landing_page_health(response):
"""Test landing page"""
assert response.status_code == 200
assert response.headers["content-type"] == "application/json"
# Parameters for test_landing_page_links test below.
# Each tuple has the following values (in this order):
# - Rel type of link to test
# - Expected MIME/Media Type
# - Expected relative path
link_tests = [
("root", "application/json", "/"),
("conformance", "application/json", "/conformance"),
("service-doc", "text/html", "/api.html"),
("service-desc", "application/vnd.oai.openapi+json;version=3.0", "/api"),
]
@pytest.mark.parametrize("rel_type,expected_media_type,expected_path", link_tests)
async def test_landing_page_links(
response_json: Dict, app_client, app, rel_type, expected_media_type, expected_path
):
link = get_link(response_json, rel_type)
assert link is not None, f"Missing {rel_type} link in landing page"
assert link.get("type") == expected_media_type
link_path = urllib.parse.urlsplit(link.get("href")).path
assert link_path == app.state.router_prefix + expected_path
resp = await app_client.get(link_path.rsplit("/", 1)[-1])
assert resp.status_code == 200
# This endpoint currently returns a 404 for empty result sets, but testing for this response
# code here seems meaningless since it would be the same as if the endpoint did not exist. Once
# https://github.com/stac-utils/stac-fastapi/pull/227 has been merged we can add this to the
# parameterized tests above.
def test_search_link(response_json: Dict, app):
for search_link in [
get_link(response_json, "search", "GET"),
get_link(response_json, "search", "POST"),
]:
assert search_link is not None
assert search_link.get("type") == "application/geo+json"
search_path = urllib.parse.urlsplit(search_link.get("href")).path
assert search_path == app.state.router_prefix + "/search"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,80 @@
from httpx import ASGITransport, AsyncClient
from stac_fastapi.api.app import StacApi
from stac_fastapi.pgstac.config import PostgresSettings, Settings
from stac_fastapi.pgstac.core import CoreCrudClient, health_check
from stac_fastapi.pgstac.db import close_db_connection, connect_to_db
async def test_ping_no_param(app_client):
"""
Test ping endpoint with a mocked client.
Args:
app_client (TestClient): mocked client fixture
"""
res = await app_client.get("/_mgmt/ping")
assert res.status_code == 200
assert res.json() == {"message": "PONG"}
async def test_health(app_client):
"""
Test health endpoint
Args:
app_client (TestClient): mocked client fixture
"""
res = await app_client.get("/_mgmt/health")
assert res.status_code == 200
body = res.json()
assert body["status"] == "UP"
assert body["pgstac"]["status"] == "UP"
assert body["pgstac"]["pgstac_version"]
async def test_health_503(database):
"""Test health endpoint error."""
# No lifespan so no `get_connection` is application state
api = StacApi(
settings=Settings(testing=True),
extensions=[],
client=CoreCrudClient(),
health_check=health_check,
)
async with AsyncClient(
transport=ASGITransport(app=api.app), base_url="http://test"
) as client:
res = await client.get("/_mgmt/health")
assert res.status_code == 503
body = res.json()
assert body["status"] == "DOWN"
assert body["lifespan"]["status"] == "DOWN"
assert body["lifespan"]["message"] == "application lifespan wasn't run"
assert body["pgstac"]["status"] == "DOWN"
assert body["pgstac"]["message"] == "Could not connect to database"
# No lifespan so no `get_connection` is application state
postgres_settings = PostgresSettings(
pguser=database.user,
pgpassword=database.password,
pghost=database.host,
pgport=database.port,
pgdatabase=database.dbname,
)
# Create connection pool but close it just after
await connect_to_db(api.app, postgres_settings=postgres_settings)
await close_db_connection(api.app)
async with AsyncClient(
transport=ASGITransport(app=api.app), base_url="http://test"
) as client:
res = await client.get("/_mgmt/health")
assert res.status_code == 503
body = res.json()
assert body["status"] == "DOWN"
assert body["lifespan"]["status"] == "UP"
assert body["pgstac"]["status"] == "DOWN"
assert body["pgstac"]["message"] == "pool is closed"

View File

@ -0,0 +1,76 @@
"""test config."""
import warnings
import pytest
from pydantic import ValidationError
from stac_fastapi.pgstac.config import PostgresSettings
async def test_pg_settings_with_env(monkeypatch):
"""Test PostgresSettings with PG* environment variables"""
monkeypatch.setenv("PGUSER", "username")
monkeypatch.setenv("PGPASSWORD", "password")
monkeypatch.setenv("PGHOST", "0.0.0.0")
monkeypatch.setenv("PGPORT", "1111")
monkeypatch.setenv("PGDATABASE", "pgstac")
assert PostgresSettings(_env_file=None)
async def test_pg_settings_with_env_postgres(monkeypatch):
"""Test PostgresSettings with POSTGRES_* environment variables"""
monkeypatch.setenv("POSTGRES_USER", "username")
monkeypatch.setenv("POSTGRES_PASS", "password")
monkeypatch.setenv("POSTGRES_HOST_READER", "0.0.0.0")
monkeypatch.setenv("POSTGRES_HOST_WRITER", "0.0.0.0")
monkeypatch.setenv("POSTGRES_PORT", "1111")
monkeypatch.setenv("POSTGRES_DBNAME", "pgstac")
with pytest.warns(DeprecationWarning) as record:
assert PostgresSettings(_env_file=None)
assert len(record) == 6
async def test_pg_settings_attributes(monkeypatch):
"""Test PostgresSettings with attributes"""
with warnings.catch_warnings():
warnings.simplefilter("error")
settings = PostgresSettings(
pguser="user",
pgpassword="password",
pghost="0.0.0.0",
pgport=1111,
pgdatabase="pgstac",
_env_file=None,
)
assert settings.pghost == "0.0.0.0"
# Compat, should work with old style postgres_ attributes
# Should raise warnings on set attribute
with pytest.warns(DeprecationWarning) as record:
settings = PostgresSettings(
postgres_user="user",
postgres_pass="password",
postgres_host_reader="0.0.0.0",
postgres_port=1111,
postgres_dbname="pgstac",
_env_file=None,
)
assert settings.pghost == "0.0.0.0"
assert len(record) == 5
# Should raise warning when accessing deprecated attributes
with pytest.warns(DeprecationWarning):
assert settings.postgres_host_reader == "0.0.0.0"
with pytest.raises(ValidationError):
with pytest.warns(DeprecationWarning) as record:
PostgresSettings(
postgres_user="user",
postgres_pass="password",
postgres_host_reader="0.0.0.0",
postgres_host_writer="1.1.1.1",
postgres_port=1111,
postgres_dbname="pgstac",
_env_file=None,
)

94
stac_mosaic_api.py Normal file
View File

@ -0,0 +1,94 @@
from fastapi import FastAPI, Query, Body, HTTPException
from typing import List
from pydantic import BaseModel
from shapely.geometry import Polygon, mapping
from rio_tiler.io import Reader
from rio_tiler.mosaic import mosaic_reader
import requests
from fastapi.responses import StreamingResponse
from io import BytesIO
# FastAPI 实例
app = FastAPI(title="STAC Mosaic API")
# STAC 配置
STAC_API_URL = "http://localhost:8082"
COLLECTION_ID = "geosat1"
# ---------- 数据模型 ----------
class BBoxQuery(BaseModel):
bbox: List[float] # [minx, miny, maxx, maxy]
class GeoJSONPolygon(BaseModel):
type: str
coordinates: List
# ---------- 实用函数 ----------
def fetch_items_by_bbox(bbox: List[float]) -> List[str]:
url = f"{STAC_API_URL}/collections/{COLLECTION_ID}/items"
params = {"bbox": ",".join(map(str, bbox)),
# "limit": 10
}
r = requests.get(url, params=params)
r.raise_for_status()
data = r.json()
return [feat["assets"]["image"]["href"] for feat in data.get("features", [])]
def fetch_items_by_polygon(geojson: dict) -> List[str]:
url = f"{STAC_API_URL}/search"
headers = {"Content-Type": "application/json"}
body = {
"collections": [COLLECTION_ID],
"intersects": geojson,
# "limit": 10
}
r = requests.post(url, headers=headers, json=body)
r.raise_for_status()
data = r.json()
return [feat["assets"]["image"]["href"] for feat in data.get("features", [])]
def render_mosaic(image_paths: List[str], reader_fn, geo: object) -> BytesIO:
img, _ = mosaic_reader(image_paths, reader_fn, geo)
buf = BytesIO(img.render(img_format="PNG"))
buf.seek(0)
return buf
# ---------- API 路由 ----------
@app.post("/bbox_mosaic", summary="基于 bbox 的图像镶嵌")
def bbox_mosaic(query: BBoxQuery):
try:
image_paths = fetch_items_by_bbox(query.bbox)
if not image_paths:
raise HTTPException(status_code=404, detail="未查询到图像")
def part_reader(src_path, part):
with Reader(src_path) as cog:
return cog.part(part, max_size=1024)
img_buf = render_mosaic(image_paths, part_reader, query.bbox)
return StreamingResponse(img_buf, media_type="image/png")
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/polygon_mosaic", summary="基于多边形的图像镶嵌")
def polygon_mosaic(polygon: GeoJSONPolygon = Body(...)):
try:
image_paths = fetch_items_by_polygon(polygon.model_dump())
if not image_paths:
raise HTTPException(status_code=404, detail="未查询到图像")
def feature_reader(src_path, feat):
with Reader(src_path) as cog:
return cog.feature(feat, max_size=1024)
img_buf = render_mosaic(image_paths, feature_reader, polygon.model_dump())
return StreamingResponse(img_buf, media_type="image/png")
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))