Skip to content

Commit d77e9f7

Browse files
authored
build: add github workflows (#2)
adds the github workflows to be executed in the CI: - test workflow (linters, mypy, tests on Linux and Windows) - deploy workflow (publish to PyPI) - build workflow (create docker image and push to registry) NOTE: in the first execution of the deploy workflow the step "Check that the current version isn't already on PyPi" has to be skipped because the PyPi project first has to be created
1 parent f9cbca9 commit d77e9f7

14 files changed

Lines changed: 321 additions & 50 deletions

File tree

.github/workflows/build.yml

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
name: Create and publish a Docker image
2+
3+
on:
4+
push:
5+
branches: ['main']
6+
workflow_dispatch:
7+
8+
env:
9+
REGISTRY: ghcr.io
10+
IMAGE_NAME: ${{ github.repository }}
11+
12+
jobs:
13+
build-and-push-image:
14+
runs-on: ubuntu-latest
15+
# Sets the permissions granted to the GITHUB_TOKEN for the actions in this job.
16+
permissions:
17+
contents: read
18+
packages: write
19+
attestations: write
20+
id-token: write
21+
22+
# Uses the docker/login-action action to log in to the Container registry
23+
# using the account and password that will publish the packages. Once published,
24+
# the packages are scoped to the account defined here.
25+
steps:
26+
- name: Checkout repository
27+
uses: actions/checkout@v4
28+
29+
- name: Log in to the Container registry
30+
uses: docker/login-action@v3
31+
with:
32+
registry: ${{ env.REGISTRY }}
33+
username: ${{ github.actor }}
34+
password: ${{ secrets.GITHUB_TOKEN }}
35+
36+
# This step uses docker/metadata-action to extract tags and labels that will be applied to the
37+
# specified image. The id "meta" allows the output of this step to be referenced in a subsequent step.
38+
- name: Extract metadata (tags, labels) for Docker
39+
id: meta
40+
uses: docker/metadata-action@v5
41+
with:
42+
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
43+
44+
# Use the docker/build-push-action action to build the image based on Dockerfile. If the build succeeds,
45+
# it pushes the image to GitHub Packages. It uses the tags and labels parameters to tag and label the image
46+
# with the output from the "meta" step.
47+
- name: Build and push Docker image
48+
id: push
49+
uses: docker/build-push-action@v6
50+
with:
51+
context: .
52+
push: true
53+
tags: ${{ steps.meta.outputs.tags }}
54+
labels: ${{ steps.meta.outputs.labels }}
55+
56+
57+
- name: Generate artifact attestation
58+
uses: actions/attest-build-provenance@v2
59+
with:
60+
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
61+
subject-digest: ${{ steps.push.outputs.digest }}
62+
push-to-registry: true

.github/workflows/deploy.yml

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
name: Publish to PyPI
2+
3+
on:
4+
push:
5+
tags:
6+
- "v*"
7+
workflow_dispatch:
8+
9+
jobs:
10+
build-n-publish:
11+
name: Build and publish to PyPI
12+
runs-on: ubuntu-latest
13+
14+
steps:
15+
- name: Checkout source
16+
uses: actions/checkout@v4
17+
with:
18+
# Get history and tags for SCM versioning to work
19+
fetch-depth: 0
20+
- name: Install the latest version of uv with cache enabled
21+
uses: astral-sh/setup-uv@v3
22+
with:
23+
version: "latest"
24+
enable-cache: true
25+
cache-dependency-glob: ""
26+
# TODO: uncomment after stac-fastapi-eodag first release
27+
# - name: Check that the current version isn't already on PyPi
28+
# run: |
29+
# if [ "$(./get_pypi_latest_version.sh)" != "$(git describe --tags)" ]
30+
# then
31+
# echo "Current version is not on PyPI, proceed with bulding"
32+
# else
33+
# echo "Current version is the latest version uploaded to PyPI"
34+
# exit 1
35+
# fi
36+
37+
- name: Check long description is OK for PyPI with tox
38+
run: uvx --with tox-uv --with sphinx tox -e pypi
39+
40+
- name: Build a binary wheel and a source tarball
41+
run: uvx --from build pyproject-build --sdist --wheel
42+
43+
- name: Publish distribution to PyPI
44+
uses: pypa/gh-action-pypi-publish@release/v1
45+
with:
46+
user: __token__
47+
password: ${{ secrets.PYPI_API_TOKEN }}

.github/workflows/test.yml

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
name: Run Linting and Tests
2+
3+
on:
4+
push:
5+
branches: [main]
6+
pull_request:
7+
branches: [main]
8+
schedule:
9+
- cron: "0 7 * * 1"
10+
workflow_dispatch:
11+
12+
jobs:
13+
lint:
14+
name: Linting (pre-commit and mypy)
15+
runs-on: ubuntu-latest
16+
steps:
17+
- name: Checkout the repo
18+
uses: actions/checkout@v4
19+
with:
20+
# Get history and tags for SCM versioning to work
21+
fetch-depth: 0
22+
- name: Install the latest version of uv with cache enabled
23+
uses: astral-sh/setup-uv@v3
24+
with:
25+
version: "latest"
26+
enable-cache: true
27+
cache-dependency-glob: ""
28+
- name: Run linters with tox
29+
run: uvx --python 3.9 --with tox-uv tox -e linters
30+
31+
tests:
32+
name: Test it!
33+
runs-on: ${{ matrix.os }}
34+
strategy:
35+
matrix:
36+
python-version: [3.9, "3.13"]
37+
os: [ubuntu-latest, windows-latest]
38+
steps:
39+
- name: Checkout the repo
40+
uses: actions/checkout@v4
41+
with:
42+
# Get history and tags for SCM versioning to work
43+
fetch-depth: 0
44+
- name: Install the latest version of uv with cache enabled
45+
uses: astral-sh/setup-uv@v3
46+
with:
47+
version: "latest"
48+
enable-cache: true
49+
cache-dependency-glob: ""
50+
- name: Test with tox
51+
run: uvx --python ${{ matrix.python-version }} --with tox-uv --with tox-gh-actions tox
52+
- name: Upload Unit Test Results
53+
if: always()
54+
uses: actions/upload-artifact@v4
55+
with:
56+
name: unit-test-results-python${{ matrix.python-version }}-${{ matrix.os }}
57+
path: |
58+
test-reports/junit-report.xml
59+
test-reports/coverage.xml
60+
61+
publish-test-results:
62+
name: "Publish Unit Tests Results"
63+
needs: tests
64+
runs-on: ubuntu-latest
65+
if: always()
66+
67+
steps:
68+
- name: Download Artifacts
69+
uses: actions/download-artifact@v4
70+
with:
71+
path: artifacts
72+
73+
- name: Publish Unit Test Results
74+
uses: EnricoMi/publish-unit-test-result-action@v2
75+
continue-on-error: true
76+
with:
77+
files: artifacts/*/junit-report.xml
78+
79+
- name: Produce the coverage report for Ubuntu
80+
uses: insightsengineering/coverage-action@v2
81+
with:
82+
# Path to the Cobertura XML report.
83+
path: artifacts/unit-test-results-python3.13-ubuntu-latest/coverage.xml
84+
# Minimum total coverage, if you want to the
85+
# workflow to enforce it as a standard.
86+
# This has no effect if the `fail` arg is set to `false`.
87+
threshold: 70
88+
# Fail the workflow if the minimum code coverage
89+
# reuqirements are not satisfied.
90+
fail: false
91+
# Publish the rendered output as a PR comment
92+
publish: true
93+
# Create a coverage diff report.
94+
diff: true
95+
# Branch to diff against.
96+
# Compare the current coverage to the coverage
97+
# determined on this branch.
98+
diff-branch: main
99+
# This is where the coverage reports for the
100+
# `diff-branch` are stored.
101+
# Branch is created if it doesn't already exist'.
102+
diff-storage: _xml_coverage_reports
103+
# A custom title that can be added to the code
104+
# coverage summary in the PR comment.
105+
coverage-summary-title: "Code Coverage"
106+
# Make the code coverage report togglable
107+
togglable-report: true
108+
109+
check-pypi:
110+
name: Long description check for PyPI
111+
runs-on: ubuntu-latest
112+
steps:
113+
- name: Checkout the repo
114+
uses: actions/checkout@v4
115+
with:
116+
# Get history and tags for SCM versioning to work
117+
fetch-depth: 0
118+
- name: Install the latest version of uv with cache enabled
119+
uses: astral-sh/setup-uv@v3
120+
with:
121+
version: "latest"
122+
enable-cache: true
123+
cache-dependency-glob: ""
124+
- name: Testing with tox and sphinx (to have rst2html.py utility available)
125+
run: uvx --with tox-uv --with sphinx tox -e pypi

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@ nosetests.xml
7171
coverage.xml
7272
*,cover
7373
.hypothesis/
74+
test-reports*
7475

7576
# Translations
7677
*.mo
@@ -137,5 +138,4 @@ docs/api/*
137138
node_modules
138139

139140
# Helm
140-
141141
helm/**/charts/**

get_pypi_latest_version.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
#!/usr/bin/env bash
2+
3+
curl https://pypi.org/pypi/stac-fastapi-eodag/json | python -c "import sys, json; print(json.load(sys.stdin)['info']['version']);"

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ dev = [
5353
"pytest-cov",
5454
"pytest-mock",
5555
"pytest-socket",
56+
"pytest-html",
5657
"responses",
5758
"stdlib-list",
5859
"tox",
@@ -74,7 +75,7 @@ module = [
7475
ignore_missing_imports = true
7576

7677
[tool.pytest.ini_options]
77-
addopts = "--disable-socket --allow-unix-socket"
78+
addopts = "--disable-socket --allow-unix-socket --allow-hosts=localhost"
7879
asyncio_mode = "auto"
7980
asyncio_default_fixture_loop_scope = "session"
8081
testpaths = [

stac_fastapi/eodag/eodag_types/queryables.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,13 @@ class QueryablesGetParams(BaseModel):
3737

3838
@field_validator("start_datetime", "end_datetime")
3939
@classmethod
40-
def validate_start_end_datetime(cls, values: Optional[str]) -> Optional[str]:
40+
def validate_start_end_datetime(cls, values: Optional[list[str]]) -> Optional[str]:
4141
"""
4242
datetimes must be valid RFC3339 strings
4343
we assume that only one start_datetime/end_datetime filter is used
4444
"""
45+
if not values:
46+
raise ValueError
4547
try:
4648
parse_single_date(values[0])
4749
return values[0]
@@ -50,11 +52,13 @@ def validate_start_end_datetime(cls, values: Optional[str]) -> Optional[str]:
5052

5153
@field_validator("datetime")
5254
@classmethod
53-
def validate_datetime(cls, values: Optional[str]) -> Optional[str]:
55+
def validate_datetime(cls, values: Optional[list[str]]) -> Optional[str]:
5456
"""
5557
datetimes must be either single datetime or range separated by "/",
5658
we assume that only one datetime filter is used
5759
"""
60+
if not values:
61+
raise ValueError
5862
try:
5963
str_to_interval(values[0])
6064
return values[0]

stac_fastapi/eodag/extensions/data_download.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
import attr
2828
from eodag.api.core import EODataAccessGateway
2929
from eodag.api.product._product import EOProduct
30-
from eodag.api.product.metadata_mapping import ONLINE_STATUS, STAGING_STATUS
30+
from eodag.api.product.metadata_mapping import ONLINE_STATUS, STAGING_STATUS, get_metadata_path_value
3131
from fastapi import APIRouter, FastAPI, Path, Request
3232
from fastapi.responses import StreamingResponse
3333
from stac_fastapi.api.errors import NotFoundError
@@ -132,25 +132,24 @@ def get_data(
132132
# (the same one as order ID) to make error message clearer
133133
product.properties["title"] = product.properties["id"]
134134
# "orderLink" property is set to auth provider conf matching url to create its auth plugin
135-
product.properties["orderLink"] = product.properties["orderStatusLink"] = (
136-
product.downloader.config.order_on_response["metadata_mapping"]["orderStatusLink"].format(
137-
orderId=item_id
138-
)
139-
)
135+
status_link_metadata = product.downloader.config.order_on_response["metadata_mapping"]["orderStatusLink"]
136+
product.properties["orderLink"] = product.properties["orderStatusLink"] = get_metadata_path_value(
137+
status_link_metadata
138+
).format(orderId=item_id)
140139

141-
if product.downloader.config.order_on_response["metadata_mapping"].get("searchLink"):
142-
product.properties["searchLink"] = product.downloader.config.order_on_response["metadata_mapping"][
143-
"searchLink"
144-
].format(orderId=item_id)
140+
search_link_metadata = product.downloader.config.order_on_response["metadata_mapping"].get("searchLink")
141+
if search_link_metadata:
142+
product.properties["searchLink"] = get_metadata_path_value(search_link_metadata).format(orderId=item_id)
145143

146-
if not getattr(product.downloader, "_order_status", None):
144+
order_status_method = getattr(product.downloader, "_order_status", None)
145+
if not order_status_method:
147146
raise MisconfiguredError("Product downloader must have the order status request method")
148147

149148
auth = product.downloader_auth.authenticate() if product.downloader_auth else None
150149

151150
logger.debug("Poll product")
152151
try:
153-
product.downloader._order_status(product=product, auth=auth)
152+
order_status_method(product=product, auth=auth)
154153
# when a NotAvailableError is catched, it means the product is not ready and still needs to be polled
155154
except NotAvailableError:
156155
product.properties["storageStatus"] = STAGING_STATUS

0 commit comments

Comments
 (0)