Skip to content

Commit

Permalink
Merge pull request #111 from alliander-opensource/feature/workflows
Browse files Browse the repository at this point in the history
Run tests in multiple python versions
  • Loading branch information
TonyXiang8787 committed Dec 21, 2022
2 parents 9c4883d + 937cc88 commit 6693220
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 14 deletions.
31 changes: 23 additions & 8 deletions .github/workflows/build-test-and-sonar.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on:
push:
branches:
- main
- 'release/**'
# run pipeline on pull request
pull_request:
# Allows you to run this workflow manually from the Actions tab
Expand All @@ -17,6 +18,7 @@ on:
jobs:

build-python:
if: (github.event_name == 'push') || (github.event_name == 'workflow_dispatch') || !startsWith(github.head_ref, 'release')
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
Expand Down Expand Up @@ -45,7 +47,7 @@ jobs:

sonar-cloud:
# only run sonar server in push event or pull request event from own repo
if: ${{ (github.event_name == 'push') || (github.event.pull_request.head.repo.owner.login == 'alliander-opensource') }}
if: (github.event_name == 'push') || (github.event_name == 'workflow_dispatch') || (!startsWith(github.head_ref, 'release') && (github.event.pull_request.head.repo.owner.login == 'alliander-opensource'))
permissions:
contents: write
runs-on: ubuntu-latest
Expand Down Expand Up @@ -81,16 +83,22 @@ jobs:

unit-tests:
needs: build-python
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11"]
fail-fast: false
runs-on: ${{ matrix.os }}

steps:

- name: Checkout source code
uses: actions/checkout@v3

- name: Setup Python 3.10
- name: Setup Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: "3.10"
python-version: ${{ matrix.python }}

- name: Load built wheel file
uses: actions/download-artifact@v3
Expand All @@ -106,16 +114,22 @@ jobs:

validation-tests:
needs: build-python
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python: ["3.8", "3.9", "3.10", "3.11"]
fail-fast: false
runs-on: ${{ matrix.os }}

steps:

- name: Checkout source code
uses: actions/checkout@v3

- name: Setup Python 3.10
- name: Setup Python ${{ matrix.python }}
uses: actions/setup-python@v4
with:
python-version: "3.10"
python-version: ${{ matrix.python }}

- name: Load built wheel file
uses: actions/download-artifact@v3
Expand All @@ -135,7 +149,6 @@ jobs:
- unit-tests
- validation-tests
- sonar-cloud
if: (github.event_name == 'push') || (github.event_name == 'workflow_dispatch')
permissions:
contents: write
env:
Expand All @@ -155,12 +168,14 @@ jobs:
path: wheelhouse/

- name: Upload wheels
if: (github.event_name == 'push') || (github.event_name == 'workflow_dispatch')
run: |
pip install twine
echo "Publish to PyPI..."
twine upload --verbose wheelhouse/*
- name: Release
if: (github.event_name == 'push') || (github.event_name == 'workflow_dispatch')
uses: softprops/action-gh-release@v1
with:
files: |
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ classifiers=[
"Operating System :: MacOS",
"Topic :: Scientific/Engineering :: Physics",
]
requires-python = ">=3.9"
requires-python = ">=3.8"
dependencies = [
"numpy>=1.20",
"openpyxl",
Expand Down
10 changes: 8 additions & 2 deletions src/power_grid_model_io/converters/pgm_json_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,14 @@ def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfoLookup
# For example: {"node": [{"id": 0, ...}, {"id": 1, ...}], "line": [{"id": 2, ...}]}
return {
component: [
{attribute: obj[attribute].tolist() for attribute in objects.dtype.names if not is_nan(obj[attribute])}
| extra_info.get(obj["id"], {})
dict(
**{
attribute: obj[attribute].tolist()
for attribute in objects.dtype.names
if not is_nan(obj[attribute])
},
**extra_info.get(obj["id"], {}),
)
for obj in objects
]
for component, objects in data.items()
Expand Down
2 changes: 1 addition & 1 deletion src/power_grid_model_io/converters/tabular_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _parse_data(self, data: TabularData, data_type: str, extra_info: Optional[Ex
# For each table in the mapping
for table in self._mapping.tables():
if table not in data or len(data[table]) == 0:
continue
continue # pragma: no cover (bug in python 3.9)
for component, attributes in self._mapping.instances(table=table):
component_data = self._convert_table_to_component(
data=data,
Expand Down
2 changes: 1 addition & 1 deletion src/power_grid_model_io/mappings/unit_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def set_mapping(self, mapping: Units):
raise ValueError(
f"Invalid unit definition for '{unit}': 1{unit} cannot be {multiplier}{si_unit}"
)
continue
continue # pragma: no cover (bug in python 3.9)
self._mapping[unit] = (multiplier, si_unit)
self._log.debug(
"Set unit definitions", n_units=len(self._si_units | self._mapping.keys()), n_si_units=len(self._si_units)
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/converters/test_pgm_json_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def test_is_batch(
# Sparse batch dataset
assert converter._is_batch(pgm_sparse_batch_data)
# Wrong dataset with both single and batch data
combined_input_batch = pgm_input_data | pgm_batch_data
combined_input_batch = dict(**pgm_input_data, **pgm_batch_data)
with pytest.raises(ValueError, match=r"Mixed non-batch data with batch data \(line\)."):
converter._is_batch(combined_input_batch)

Expand Down

0 comments on commit 6693220

Please sign in to comment.