diff --git a/.commitlintrc.mjs b/.commitlintrc.mjs
new file mode 100644
index 000000000..ae4ffd168
--- /dev/null
+++ b/.commitlintrc.mjs
@@ -0,0 +1,42 @@
+export default {
+ extends: [
+ '@commitlint/config-conventional'
+ ],
+ ignores: [
+ (message) => message.includes('Signed-off-by: dependabot[bot]')
+ ],
+ rules: {
+ 'header-max-length': [
+ 2,
+ 'always',
+ 72
+ ],
+ 'body-max-line-length': [
+ 2,
+ 'always',
+ 72
+ ],
+ 'body-leading-blank': [
+ 2,
+ 'always'
+ ],
+ 'type-enum': [
+ 2,
+ 'always',
+ [
+ 'build',
+ 'chore',
+ 'ci',
+ 'deps',
+ 'docs',
+ 'feat',
+ 'fix',
+ 'perf',
+ 'refactor',
+ 'revert',
+ 'style',
+ 'test'
+ ]
+ ]
+ }
+};
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 635e62ddb..25cf4fb31 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,76 +1,20 @@
-name: Continuous Integration
+name: CI
on:
push:
branches: [ develop, master ]
pull_request:
branches: [ develop, master ]
-jobs:
- build:
- runs-on: ubuntu-22.04
- strategy:
- matrix:
- python: [ "3.10" , "3.11" ]
- env:
- DATABASE_URL: postgis://postgres:postgres@localhost/smbackend
- SECRET_KEY: test-secret
- services:
- postgres:
- image: postgis/postgis:13-3.3
- env:
- POSTGRES_HOST_AUTH_METHOD: trust
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
- ports:
- - 5432:5432
- steps:
- - name: Checkout
- uses: actions/checkout@v3
- with:
- fetch-depth: 0
-
- - name: Setup Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python }}
- cache: pip
-
- - name: Install required Ubuntu packages
- run: |
- sudo apt-get update && sudo apt-get install gdal-bin voikko-fi libvoikko-dev
-
- - name: Create needed postgis extensions
- run: |
- psql -h localhost -U postgres template1 -c 'create extension hstore;create extension pg_trgm;'
-
- - name: Install PyPI dependencies
- run: |
- python -m pip install --upgrade pip
- pip install codecov -r requirements.txt -r requirements-dev.txt
+ workflow_dispatch:
- - name: Run Python side code neatness tests
- run: |
- flake8
- black --check .
- isort . -c
-
- - name: Run pytest code functionality tests
- run: |
- pytest -ra -vvv --cov=.
-
- - name: Upload coverage reports to Codecov
- uses: codecov/codecov-action@v3
-
- # Without this workaround Sonar reports a warning about an incorrect source path
- - name: Override coverage report source path for Sonar
- if: github.event_name == 'push'
- run: sed -i 's@'$GITHUB_WORKSPACE'@/github/workspace/@g' coverage.xml
-
- - name: SonarCloud Scan
- if: github.event_name == 'push'
- uses: SonarSource/sonarcloud-github-action@master
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
+jobs:
+ common:
+ uses: City-of-Helsinki/.github/.github/workflows/ci-django-api.yml@main
+ secrets: inherit
+ with:
+ python-version: "3.10"
+ postgres-major-version: 13
+ use-postgis: true
+ extra-commands: |
+ sudo apt-get install voikko-fi libvoikko-dev
+ psql postgresql://test_user:test_password@localhost/template1 -c 'CREATE EXTENSION IF NOT EXISTS hstore;'
+ psql postgresql://test_user:test_password@localhost/template1 -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..212fb0f2d
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,32 @@
+# Keep tool versions in sync with the versions in requirements-dev.txt
+minimum_pre_commit_version: 2.13.0
+default_install_hook_types: [pre-commit, commit-msg]
+default_language_version:
+ python: python3
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ - id: check-toml
+ - id: check-added-large-files
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.8.6 # Sync with requirements-dev.in
+ hooks:
+ - id: ruff
+ name: ruff lint
+ - id: ruff-format
+ name: ruff format
+ args: [ --check ]
+ - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
+ rev: v9.13.0
+ hooks:
+ - id: commitlint
+ stages: [commit-msg, manual]
+ additional_dependencies: ["@commitlint/config-conventional"]
+ - repo: https://github.com/koalaman/shellcheck-precommit
+ rev: v0.10.0
+ hooks:
+ - id: shellcheck
diff --git a/README.md b/README.md
index b37fb7a88..44c3e9240 100644
--- a/README.md
+++ b/README.md
@@ -21,11 +21,11 @@ This will startup and bind local postgres, servicemap backend and servicemap fro
### Run migrations
-When building the application for the first time, migrations need to be run. This can be done with the following command:
+When building the application for the first time, migrations need to be run. This can be done with the following
+command:
`docker compose exec servicemap python manage.py migrate`
-
### Importing data
To import data for development usage and automatically index it, run command:
@@ -40,18 +40,19 @@ Installation without Docker
1. First, install the necessary Debian packages.
-* libpython3.10-dev
+* libpython3.10-dev
* python3.10-distutils
-* virtualenvwrapper
-* libyaml-dev
-* libxml2-dev
+* virtualenvwrapper
+* libyaml-dev
+* libxml2-dev
* libxslt1-dev
* voikko-fi
* libvoikko-dev
2. Clone the repository.
-Use pyenv to manage python version and create a virtualenv with virtualenvwrapper.
-The virtualenv that will be created and used here is named "servicemap"
+ Use pyenv to manage python version and create a virtualenv with virtualenvwrapper.
+ The virtualenv that will be created and used here is named "servicemap"
+
```
pyenv install -v 3.10.1
pyenv virtualenv 3.10.1 smbackend
@@ -60,25 +61,27 @@ pyenv virtualenvwrapper
mkvirtualenv servicemap
```
-Installation and usage info for pyenv, pyenv-virtualenvwrapper and
- virtualenvwrapper can be found here:
+Installation and usage info for pyenv, pyenv-virtualenvwrapper and
+virtualenvwrapper can be found here:
https://github.com/pyenv/pyenv-virtualenv
https://github.com/pyenv/pyenv-virtualenvwrapper
https://virtualenvwrapper.readthedocs.io/en/latest/install.html
-
3. Install pip requirements.
-Be sure to load the virtualenv before installing the requirements:
-Example with virtualenv named servicemap as created in example above.
-```workon servicemap```
-Install the requirements:
-```pip install -r requirements.txt -r requirements-dev.txt```
-
- If this error occurs:
-```
+ Be sure to load the virtualenv before installing the requirements:
+ Example with virtualenv named servicemap as created in example above.
+ ```workon servicemap```
+ Install the requirements:
+ ```pip install -r requirements.txt -r requirements-dev.txt```
+
+If this error occurs:
+
+```
ImportError: cannot import name 'html5lib' from 'pip._vendor' (/home/johndoe/.virtualenvs/servicemap/lib/python3.10/site-packages/pip/_vendor/__init__.py)
```
-Try installing latest pip.
+
+Try installing latest pip.
+
```
curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
```
@@ -90,13 +93,14 @@ Please note, we recommend PostgreSQL version 13 or higher.
Local setup:
First, ensure that the collation fi_FI.UTF-8 exists by entering the
postgresql shell with the psql command.
+
```
sudo su postgres
psql
SELECT * FROM pg_collation where collname like '%fi%';
```
-There should be a `collname` fi_FI.UTF-8 . If not, you must create the collation.
+There should be a `collname` fi_FI.UTF-8 . If not, you must create the collation.
```
sudo su postgres
@@ -105,7 +109,7 @@ ALTER database template1 is_template=false;
DROP database template1;
CREATE DATABASE template1 WITH OWNER = postgres ENCODING = 'UTF8' TABLESPACE = pg_default LC_COLLATE = 'fi_FI.UTF-8' LC_CTYPE = 'fi_FI.UTF-8' CONNECTION LIMIT = -1 TEMPLATE template0;
ALTER database template1 is_template=true;
-\q
+\q
psql template1 -c 'CREATE EXTENSION IF NOT EXISTS postgis;'
psql template1 -c 'CREATE EXTENSION IF NOT EXISTS hstore;'
psql template1 -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
@@ -116,19 +120,21 @@ createdb -O servicemap -T template1 -l fi_FI.UTF-8 -E utf8 servicemap
```
ERROR: could not open extension control file "/usr/share/postgresql/14/extension/postgis.control": No such file or directory
```
+
Solution for ubuntu and Postgresql 14:
+
```
sudo apt install postgis postgresql-14-postgis-3
```
Docker setup (modify as needed, starts the database on local port 8765):
+
```
docker run --name servicemap-psql -e POSTGRES_USE.R=servicemap -e POSTGRES_PASSWORD=servicemap -p 8765:5432 -d mdillon/postgis
# you'll need the hstore extension enabled:
echo "CREATE EXTENSION hstore;" | docker exec -i servicemap-psql psql -U servicemap
```
-
5. Create database tables.
```
@@ -137,11 +143,11 @@ echo "CREATE EXTENSION hstore;" | docker exec -i servicemap-psql psql -U service
If this command fails with: `django.core.exceptions.ImproperlyConfigured: GEOS is required and has not been detected.`,
then install the GEOS library. On a Mac this can be achieved with HomeBrew:
+
```
brew install geos
```
-
6. Import geo data.
```
@@ -151,23 +157,27 @@ brew install geos
```
### Importing addresses from geo-search
+
```
./manage.py geo_import uusimaa --addresses
./manage.py update_postal_code_areas
```
+
Note, this imports all the addresses from Uusimaa-region and might take ~6 hours.
Postal code area datas can be enriched from geo-search using `update_postal_code_areas` -management-command.
-### Indexing search columns
-The search columns must be indexed after the first time data is imported or geo-search addresses are imported or addresses are enriched with geo-search data.
+### Indexing search columns
+
+The search columns must be indexed after the first time data is imported or geo-search addresses are imported or
+addresses are enriched with geo-search data.
+
```
./manage.py index_search_columns
```
-
7. Redis
-Redis is used for caching and as a message broker for Celery.
-Install Redis. Ubuntu: `sudo apt-get install redis-server`
+ Redis is used for caching and as a message broker for Celery.
+ Install Redis. Ubuntu: `sudo apt-get install redis-server`
8. Celery
@@ -175,12 +185,15 @@ Install and run a message broker such as Redis or RabbitMQ.
Redis is recommended as it is also used for caching.
Configure the message broker in the environment variable "CELERY_BROKER_URL".
Start a Celery worker to handle asynchronous tasks locally with command:
+
```
celery -A smbackend worker -l INFO
```
+
Note, in production environment the celery worker can be run as a daemon.
https://docs.celeryproject.org/en/stable/userguide/daemonizing.html#daemonizing
Start Celery beat to handle scheduled periodic tasks with command:
+
```
celery -A smbackend beat -l INFO
```
@@ -189,48 +202,72 @@ Updating requirements
---------------------
pip-tools is used to manage requirements. To update the requirements, run:
+
```
pip-compile -U requirements.in
pip-compile -U requirements-dev.in
```
-Code formatting
+Code format
+-----------
+This project uses [Ruff](https://docs.astral.sh/ruff/) for code formatting and quality checking.
+
+Basic `ruff` commands:
+
+* lint: `ruff check`
+* apply safe lint fixes: `ruff check --fix`
+* check formatting: `ruff format --check`
+* format: `ruff format`
+
+[`pre-commit`](https://pre-commit.com/) can be used to install and
+run all the formatting tools as git hooks automatically before a
+commit.
+
+Commit message format
---------------------
-The code is formatted with black, flake8 and isort. To format the code, run:
-```
-isort .
-black .
-```
+New commit messages must adhere to the [Conventional Commits](https://www.conventionalcommits.org/)
+specification, and line length is limited to 72 characters.
+
+When [`pre-commit`](https://pre-commit.com/) is in use, [
+`commitlint`](https://github.com/conventional-changelog/commitlint)
+checks new commit messages for the correct format.
Observations
------------
Load the initial observation data with the command:
+
```
./scripts/import_observation_initial_data.sh
```
-
Troubleshooting
---------------
The error:
+
```
OSError: dlopen(/usr/local/lib/libgdal.dylib, 6): Symbol not found: _GEOSArea
```
+
Can be fixed by adding this to local_settings.py:
+
```python
GDAL_LIBRARY_PATH = "/usr/local/lib/libgdal.dylib"
import ctypes
+
ctypes.CDLL(GDAL_LIBRARY_PATH)
```
The error:
+
```
psycopg2.errors.UndefinedObject: operator class "gin_trgm_ops" does not exist for access method "gin"
```
+
Can be fixed by adding the pg_trgm extension to the database:
+
```
psql template1 -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
```
diff --git a/data/accessibility_rules.csv b/data/accessibility_rules.csv
index 96ddf6607..179dae53d 100644
--- a/data/accessibility_rules.csv
+++ b/data/accessibility_rules.csv
@@ -2866,4 +2866,4 @@ AND;;;;;;;;;;;;;;;
AND;;;;;;;;;;;;;;;
[89] INTERIOR.FIXED_INDUCTION_LOOP.assembly_hall = true;6;179;608;89;E;true;;;S;Juhlasalissa pitää olla kiinteä induktiosilmukka.;Ei kiinteää induktiosilmukkaa.;BIG;Juhlasalissa ei ole kiinteää induktiosilmukkaa.;Det finns ingen fast induktionsslinga i festsalen.;The festival hall does not have a fixed induction loop.
AND;;;;;;;;;;;;;;;
-[90] INTERIOR.FIXED_INDUCTION_LOOP.gym = true;6;180;609;90;E;true;;;S;Liikuntasalissa pitää olla kiinteä induktiosilmukka.;Ei kiinteää induktiosilmukkaa.;BIG;Liikuntasalissa ei ole kiinteää induktiosilmukkaa.;Det finns ingen fast induktionsslinga i idrottssalen.;The sports hall does not have a fixed induction loop.
\ No newline at end of file
+[90] INTERIOR.FIXED_INDUCTION_LOOP.gym = true;6;180;609;90;E;true;;;S;Liikuntasalissa pitää olla kiinteä induktiosilmukka.;Ei kiinteää induktiosilmukkaa.;BIG;Liikuntasalissa ei ole kiinteää induktiosilmukkaa.;Det finns ingen fast induktionsslinga i idrottssalen.;The sports hall does not have a fixed induction loop.
diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh
index 91a32738e..6bbcf2e39 100755
--- a/docker-entrypoint.sh
+++ b/docker-entrypoint.sh
@@ -27,7 +27,7 @@ else
exec uwsgi --plugin http,python3 --master --http :8000 \
--processes 4 --threads 1 \
--need-app \
- --mount ${URL_PREFIX:-/}=smbackend/wsgi.py \
+ --mount "${URL_PREFIX:-/}=smbackend/wsgi.py" \
--manage-script-name \
--die-on-term \
--strict \
diff --git a/observations/fixtures/maintenance_users.yaml b/observations/fixtures/maintenance_users.yaml
index 8d73761ee..d94d4c615 100644
--- a/observations/fixtures/maintenance_users.yaml
+++ b/observations/fixtures/maintenance_users.yaml
@@ -24,4 +24,4 @@
- model: 'observations.UserOrganization'
fields:
organization_id: 49
- user_id: 3
\ No newline at end of file
+ user_id: 3
diff --git a/observations/tests/data.py b/observations/tests/data.py
index dc219b3e5..9fe543392 100644
--- a/observations/tests/data.py
+++ b/observations/tests/data.py
@@ -1,4 +1,6 @@
-def observation_raw_data(observable_property_name, unit, allowed_values=set()):
+def observation_raw_data(observable_property_name, unit, allowed_values=None):
+ if allowed_values is None:
+ allowed_values = set()
if observable_property_name == "skiing_trail_condition":
for val in allowed_values:
yield dict(unit=unit.pk, value=val, property=observable_property_name)
diff --git a/observations/tests/fixtures.py b/observations/tests/fixtures.py
index 910653136..4fab15990 100644
--- a/observations/tests/fixtures.py
+++ b/observations/tests/fixtures.py
@@ -124,21 +124,30 @@ def observable_property(service, unit):
observation_type="observations.CategoricalObservation",
)
p.services.add(service)
- AllowedValue.objects.create(
- identifier="no_snow", name="No snow", description="There is no snow", property=p
- ),
- AllowedValue.objects.create(
- identifier="good",
- name="Good condition",
- description="The trail is in good condition",
- property=p,
- ),
- AllowedValue.objects.create(
- identifier="poor",
- name="Poor condition",
- description="Poor skiing condition",
- property=p,
- ),
+ (
+ AllowedValue.objects.create(
+ identifier="no_snow",
+ name="No snow",
+ description="There is no snow",
+ property=p,
+ ),
+ )
+ (
+ AllowedValue.objects.create(
+ identifier="good",
+ name="Good condition",
+ description="The trail is in good condition",
+ property=p,
+ ),
+ )
+ (
+ AllowedValue.objects.create(
+ identifier="poor",
+ name="Poor condition",
+ description="Poor skiing condition",
+ property=p,
+ ),
+ )
AllowedValue.objects.create(
identifier="closed",
name="Closed",
diff --git a/profiler/middleware.py b/profiler/middleware.py
index 543c96d5f..c6c06f517 100644
--- a/profiler/middleware.py
+++ b/profiler/middleware.py
@@ -1,5 +1,5 @@
try:
- import cProfile as profile
+ import cProfile as profile # noqa: N813
except ImportError:
import profile
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 000000000..263d1315b
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,31 @@
+[tool.ruff]
+target-version = "py39"
+
+[tool.ruff.lint]
+select = [
+ # Pyflakes
+ "F",
+ # pycodestyle
+ "E",
+ "W",
+ # isort
+ "I",
+ # pep8-naming
+ "N",
+ # flake8-bugbear without opinionated rules
+ "B0",
+ # flake8-pie
+ "PIE",
+ # flake8-print
+ "T20",
+]
+[tool.ruff.lint.extend-per-file-ignores]
+"*/migrations/*" = ["E501"]
+"*/tests/*" = ["E501"]
+"api/schemas.py" = ["N815"]
+"api/tests/test_api.py" = ["N815"]
+
+[tool.pytest.ini_options]
+DJANGO_SETTINGS_MODULE = "smbackend.settings"
+python_files = ["tests.py", "test_*.py", "*_tests.py"]
+filterwarnings = ["ignore:No directory at:UserWarning"]
diff --git a/requirements-dev.in b/requirements-dev.in
index f647e289d..49582370f 100644
--- a/requirements-dev.in
+++ b/requirements-dev.in
@@ -1,14 +1,11 @@
-c requirements.txt
urllib3<2
-black
-flake8
ipython
-isort
jedi
parso
pip-tools
-pep8-naming
pytest-django
pytest-cov
PyGithub
requests-mock
+ruff==v0.8.6 # sync with .pre-commit-config.yaml
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 347219164..d79c57b90 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -6,8 +6,6 @@
#
asttokens==2.4.1
# via stack-data
-black==24.10.0
- # via -r requirements-dev.in
build==1.2.2.post1
# via pip-tools
certifi==2024.8.30
@@ -23,9 +21,7 @@ charset-normalizer==3.4.0
# -c requirements.txt
# requests
click==8.1.7
- # via
- # black
- # pip-tools
+ # via pip-tools
coverage==7.6.9
# via pytest-cov
cryptography==43.0.1
@@ -41,10 +37,6 @@ exceptiongroup==1.2.2
# pytest
executing==2.1.0
# via stack-data
-flake8==7.1.1
- # via
- # -r requirements-dev.in
- # pep8-naming
idna==3.10
# via
# -c requirements.txt
@@ -53,39 +45,24 @@ iniconfig==2.0.0
# via pytest
ipython==8.27.0
# via -r requirements-dev.in
-isort==5.13.2
- # via -r requirements-dev.in
jedi==0.19.1
# via
# -r requirements-dev.in
# ipython
matplotlib-inline==0.1.7
# via ipython
-mccabe==0.7.0
- # via flake8
-mypy-extensions==1.0.0
- # via black
packaging==24.2
# via
- # black
# build
# pytest
parso==0.8.4
# via
# -r requirements-dev.in
# jedi
-pathspec==0.12.1
- # via black
-pep8-naming==0.14.1
- # via -r requirements-dev.in
pexpect==4.9.0
# via ipython
pip-tools==7.4.1
# via -r requirements-dev.in
-platformdirs==4.3.6
- # via
- # -c requirements.txt
- # black
pluggy==1.5.0
# via pytest
prompt-toolkit==3.0.47
@@ -94,12 +71,8 @@ ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
-pycodestyle==2.12.1
- # via flake8
pycparser==2.22
# via cffi
-pyflakes==3.2.0
- # via flake8
pygithub==2.4.0
# via -r requirements-dev.in
pygments==2.18.0
@@ -127,6 +100,8 @@ requests==2.32.3
# requests-mock
requests-mock==1.12.1
# via -r requirements-dev.in
+ruff==0.8.6
+ # via -r requirements-dev.in
six==1.16.0
# via
# -c requirements.txt
@@ -135,7 +110,6 @@ stack-data==0.6.3
# via ipython
tomli==2.2.1
# via
- # black
# build
# coverage
# pip-tools
@@ -147,7 +121,6 @@ traitlets==5.14.3
typing-extensions==4.12.2
# via
# -c requirements.txt
- # black
# ipython
# pygithub
urllib3==1.26.20
diff --git a/scripts/import_observation_initial_data.sh b/scripts/import_observation_initial_data.sh
index df671043e..919c3a3f1 100755
--- a/scripts/import_observation_initial_data.sh
+++ b/scripts/import_observation_initial_data.sh
@@ -1,20 +1,20 @@
#!/bin/bash
-% export PYTHONIOENCODING=utf-8
+export PYTHONIOENCODING=utf-8
set -e
TIMESTAMP_FORMAT="+%Y-%m-%d %H:%M:%S"
ROOT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
-if [ -f $ROOT_PATH/local_update_config ]; then
- $ROOT_PATH/local_update_config
+if [ -f "$ROOT_PATH/local_update_config" ]; then
+ "$ROOT_PATH/local_update_config"
fi
echo ---------------------------------
echo "$(date "$TIMESTAMP_FORMAT") Importing observation initial data"
echo ---------------------------------
-cd $ROOT_PATH
+cd "$ROOT_PATH"
timeout 20m nice python manage.py loaddata observations/fixtures/maintenance_users.yaml 2>&1
timeout 20m nice python manage.py loaddata observations/fixtures/initial_observable_properties_common.yaml 2>&1
diff --git a/scripts/random_test_observations.py b/scripts/random_test_observations.py
index 25db104f7..5c1c5549e 100755
--- a/scripts/random_test_observations.py
+++ b/scripts/random_test_observations.py
@@ -47,7 +47,7 @@ def main(base_url):
headers={"Authorization": "Token " + os.environ["API_TOKEN"]},
)
if response.status_code != 201:
- print("error")
+ print("error") # noqa: T201
sys.stderr.write(response.text)
exit(1)
diff --git a/scripts/release_notes.py b/scripts/release_notes.py
index aa498d8fe..4bfcec476 100644
--- a/scripts/release_notes.py
+++ b/scripts/release_notes.py
@@ -8,9 +8,9 @@
def print_section(title, items):
if items:
- print(f"## {title}")
- for item in items:
- print(f"- {item.title} [(#{item.number})]({item.html_url})")
+ print(f"## {title}") # noqa: T201
+ for item in items: # noqa: T201
+ print(f"- {item.title} [(#{item.number})]({item.html_url})") # noqa: T201
def create_release_notes(start_tag, end_tag):
@@ -41,7 +41,7 @@ def create_release_notes(start_tag, end_tag):
else:
other.append(p)
- print(f"# Release Notes - {end_tag}")
+ print(f"# Release Notes - {end_tag}") # noqa: T201
print_section("Features", features)
print_section("Fixes", fixes)
print_section("Improvements", improvements)
diff --git a/scripts/update.sh b/scripts/update.sh
index 1436a1e16..b963c93fa 100755
--- a/scripts/update.sh
+++ b/scripts/update.sh
@@ -6,15 +6,15 @@ set -e
TIMESTAMP_FORMAT="+%Y-%m-%d %H:%M:%S"
ROOT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
-if [ -f $ROOT_PATH/local_update_config ]; then
- $ROOT_PATH/local_update_config
+if [ -f "$ROOT_PATH/local_update_config" ]; then
+ "$ROOT_PATH/local_update_config"
fi
echo ---------------------------------
echo "$(date "$TIMESTAMP_FORMAT") Starting import"
echo ---------------------------------
-cd $ROOT_PATH
+cd "$ROOT_PATH"
timeout 20m nice python manage.py services_import_v4 departments services units entrances 2>&1
timeout 20m nice python manage.py lipas_import --muni-id=92 --muni-id=91 --muni-id=49 --muni-id=235 --muni-id=257 2>&1
diff --git a/services/accessibility.py b/services/accessibility.py
index aabae967c..310e81f77 100644
--- a/services/accessibility.py
+++ b/services/accessibility.py
@@ -41,7 +41,7 @@ def _parse(self):
self.tree = {}
mode_letters = "ABC"
for case, expression in tree.items():
- for mode in range(0, len(expression.messages["case_names"])):
+ for mode in range(len(expression.messages["case_names"])):
expression.set_mode(mode)
self.tree[str(case) + mode_letters[mode]] = expression.val()
diff --git a/services/api.py b/services/api.py
index 3402a20e5..1951ac9c2 100644
--- a/services/api.py
+++ b/services/api.py
@@ -181,8 +181,8 @@ def to_internal_value(self, data):
if not isinstance(obj, dict):
raise ValidationError(
{
- field_name: "This field is a translated field. Instead of a string,"
- " you must supply an object with strings corresponding"
+ field_name: "This field is a translated field. Instead of a"
+ " string, you must supply an object with strings corresponding"
" to desired language ids."
}
)
@@ -232,7 +232,7 @@ def to_representation(self, obj):
d[lang] = val
# If no text provided, leave the field as null
- for key, val in d.items():
+ for val in d.values():
if val is not None:
break
else:
@@ -327,7 +327,7 @@ def get_uuid(self, obj):
return obj.uuid
def get_parent(self, obj):
- parent = getattr(obj, "parent")
+ parent = obj.parent
if parent is not None:
return parent.uuid
return None
@@ -450,9 +450,9 @@ def to_representation(self, obj):
else None
)
if organization_name:
- ret["unit_count"]["organization"][
- organization_name
- ] = organization_unit_count.count
+ ret["unit_count"]["organization"][organization_name] = (
+ organization_unit_count.count
+ )
divisions = self.context.get("divisions", [])
include_fields = self.context.get("include", [])
@@ -809,13 +809,13 @@ def get_organizer_type(self, obj):
return choicefield_string(ORGANIZER_TYPES, "organizer_type", obj)
def get_contract_type(self, obj):
- key = getattr(obj, "displayed_service_owner_type")
+ key = obj.displayed_service_owner_type
if not key:
return None
translations = {
- "fi": getattr(obj, "displayed_service_owner_fi"),
- "sv": getattr(obj, "displayed_service_owner_sv"),
- "en": getattr(obj, "displayed_service_owner_en"),
+ "fi": obj.displayed_service_owner_fi,
+ "sv": obj.displayed_service_owner_sv,
+ "en": obj.displayed_service_owner_en,
}
return {"id": key, "description": translations}
diff --git a/services/content_metrics.py b/services/content_metrics.py
index e7607cc26..39d0be2e5 100644
--- a/services/content_metrics.py
+++ b/services/content_metrics.py
@@ -77,5 +77,5 @@ def format_unit(unit):
def print_units(units):
for u, value in units:
- print(format_unit(u), "measured value:", value)
- print()
+ print(format_unit(u), "measured value:", value) # noqa: T201
+ print() # noqa: T201
diff --git a/services/fixtures/exclusion_words.json b/services/fixtures/exclusion_words.json
index dd36151f8..d9574802c 100644
--- a/services/fixtures/exclusion_words.json
+++ b/services/fixtures/exclusion_words.json
@@ -15,4 +15,4 @@
"language_short": "fi"
}
}
-]
\ No newline at end of file
+]
diff --git a/services/management/commands/calculate_accessibility_shortcomings.py b/services/management/commands/calculate_accessibility_shortcomings.py
index 40e4870ff..16848df71 100644
--- a/services/management/commands/calculate_accessibility_shortcomings.py
+++ b/services/management/commands/calculate_accessibility_shortcomings.py
@@ -61,7 +61,7 @@ def print_rule(rule, indent=""):
)
except AccessibilityVariable.DoesNotExist:
evaluation = "**MISSING**"
- print(
+ print( # noqa: T201
"{}{} {}{}{}".format(
indent, rule["id"], rule["operator"], evaluation, message
)
@@ -71,5 +71,5 @@ def print_rule(rule, indent=""):
print_rule(operand, indent + " ")
for name, rule in Calculator().rules.items():
- print("=== RULE {} ===".format(name))
+ print("=== RULE {} ===".format(name)) # noqa: T201
print_rule(rule)
diff --git a/services/management/commands/empty_search_columns.py b/services/management/commands/empty_search_columns.py
index 85992304b..b5400517f 100644
--- a/services/management/commands/empty_search_columns.py
+++ b/services/management/commands/empty_search_columns.py
@@ -16,7 +16,8 @@ def handle(self, *args, **kwargs):
for model in MODELS:
for lang in ["fi", "sv", "en"]:
logger.info(
- f"Emptying search columns for model: {model.__name__} and language {lang}."
+ f"Emptying search columns for model: {model.__name__} and language"
+ " {lang}."
)
key = "search_column_%s" % lang
model.objects.update(**{key: None})
diff --git a/services/management/commands/index_search_columns.py b/services/management/commands/index_search_columns.py
index 85fc05ba8..ed69321d0 100644
--- a/services/management/commands/index_search_columns.py
+++ b/services/management/commands/index_search_columns.py
@@ -106,7 +106,8 @@ def add_arguments(self, parser):
"--hyphenate_addresses_from",
nargs="?",
type=str,
- help="Hyphenate addresses whose modified_at timestamp starts at given timestamp YYYY-MM-DDTHH:MM:SS",
+ help="Hyphenate addresses whose modified_at timestamp starts at given"
+ " timestamp YYYY-MM-DDTHH:MM:SS",
)
parser.add_argument(
@@ -142,20 +143,28 @@ def handle(self, *args, **options):
f"Syllables generated for {generate_syllables(Service)} Services"
)
logger.info(
- f"Syllables generated for {generate_syllables(ServiceNode)} ServiceNodes"
+ f"Syllables generated for"
+ f" {generate_syllables(ServiceNode)} ServiceNodes"
)
logger.info(
- f"{lang} Units indexed: {Unit.objects.update(**{key: get_search_column(Unit, lang)})}"
+ f"{lang} Units indexed:"
+ f" {Unit.objects.update(**{key: get_search_column(Unit, lang)})}"
)
logger.info(
- f"{lang} Services indexed: {Service.objects.update(**{key: get_search_column(Service, lang)})}"
+ f"{lang} Services indexed:"
+ f" {Service.objects.update(**{key: get_search_column(Service, lang)})}"
)
logger.info(f"{lang} ServiceNodes indexed: {index_servicenodes(lang)}")
logger.info(
- f"{lang} AdministrativeDivisions indexed: "
- f"{AdministrativeDivision.objects.update(**{key: get_search_column(AdministrativeDivision, lang)})}"
+ "{lang} AdministrativeDivisions indexed: {divisions}".format(
+ lang=lang,
+ divisions=AdministrativeDivision.objects.update(
+ **{key: get_search_column(AdministrativeDivision, lang)}
+ ),
+ )
)
logger.info(
- f"{lang} Addresses indexed: {Address.objects.update(**{key: get_search_column(Address, lang)})}"
+ f"{lang} Addresses indexed:"
+ f" {Address.objects.update(**{key: get_search_column(Address, lang)})}"
)
diff --git a/services/management/commands/lipas_import.py b/services/management/commands/lipas_import.py
index 6c5e7700c..eb4d808b6 100644
--- a/services/management/commands/lipas_import.py
+++ b/services/management/commands/lipas_import.py
@@ -145,9 +145,8 @@ def clean_name(name):
if clean_name(feature["nimi_fi"].value) != clean_name(unit.name_fi):
logger.warning(
- "id {} has non-matching name fields (Lipas: {}, db: {}).".format(
- lipas_id, feature["nimi_fi"].value, unit.name_fi
- )
+ f"id {lipas_id} has non-matching name fields (Lipas:"
+ f" {feature['nimi_fi'].value}, db: {unit.name_fi})."
)
try:
@@ -159,21 +158,27 @@ def clean_name(name):
geometries[lipas_id].append(feature.geom.geos)
except TypeError:
raise TypeError(
- "The lipas database contains mixed geometries, this is unsupported!"
+ "The lipas database contains mixed geometries, this is"
+ " unsupported!"
)
- # If mixed geometry types ever begin to appear in the lipas database,
- # uncommenting the following might make everything work straight
- # away. Please note that it's completely untested.
-
- # logger.warning("id {} has mixed geometries, "
- # "creating a GeometryCollection as fallback".format(lipas_id))
- # geometries[lipas_id] = GeometryCollection(list(geometries[lipas_id]) + feature.geom.geos)
+ # If mixed geometry types ever begin to appear in the lipas
+ # database, uncommenting the following might make everything
+ # work straight away. Please note that it's completely
+ # untested.
+
+ # logger.warning(
+ # f"id {lipas_id} has mixed geometries, creating a"
+ # " GeometryCollection as fallback"
+ # )
+ # geometries[lipas_id] = GeometryCollection(
+ # list(geometries[lipas_id]) + feature.geom.geos)
else:
geometries[lipas_id] = get_multi(feature.geom.geos)
except GDALException as err:
- # We might be dealing with something weird that the Python GDAL lib doesn't handle.
- # One example is a CurvePolygon as defined here http://www.gdal.org/ogr__core_8h.html
+ # We might be dealing with something weird that the Python GDAL lib
+ # doesn't handle. One example is a CurvePolygon as defined here
+ # http://www.gdal.org/ogr__core_8h.html
logger.error("Error while processing a geometry: {}".format(err))
logger.info("Found {} matches.".format(len(geometries)))
diff --git a/services/management/commands/lipas_import_3d.py b/services/management/commands/lipas_import_3d.py
index 24074e0bd..28eada54e 100644
--- a/services/management/commands/lipas_import_3d.py
+++ b/services/management/commands/lipas_import_3d.py
@@ -44,7 +44,8 @@ def _save_geometries(self, geometries, units_by_lipas_id):
unit.save()
else:
logger.warning(
- f"Failed to save unit {unit.name_fi} because of a missing z coordinate.",
+ f"Failed to save unit {unit.name_fi} because of a missing z"
+ " coordinate.",
)
def _get_types(self):
diff --git a/services/management/commands/school_district_import/school_district_importer.py b/services/management/commands/school_district_import/school_district_importer.py
index f0070e341..72d56b347 100644
--- a/services/management/commands/school_district_import/school_district_importer.py
+++ b/services/management/commands/school_district_import/school_district_importer.py
@@ -145,7 +145,8 @@ def remove_old_school_year(self, division_type):
During 1.8.-15.12. only the current school year is shown.
During 16.12.-31.7. both the current and the next school year are shown.
- The source might be named as "tuleva" but it might still actually be the current school year.
+ The source might be named as "tuleva" but it might still actually be the current
+ school year.
If today is between 1.8.-15.12 delete the previous year.
"""
diff --git a/services/management/commands/services_import/aliases.py b/services/management/commands/services_import/aliases.py
index 492e7a96a..04723a698 100644
--- a/services/management/commands/services_import/aliases.py
+++ b/services/management/commands/services_import/aliases.py
@@ -14,7 +14,7 @@ def import_aliases():
try:
f = open(path, "r")
except FileNotFoundError:
- print("Aliases file {} not found".format(path))
+ print("Aliases file {} not found".format(path)) # noqa: T201
return
value_sets = {}
@@ -27,7 +27,7 @@ def import_aliases():
)
if len(value_sets) == 0:
- print("No aliases found in file.")
+ print("No aliases found in file.") # noqa: T201
return
counts = {"success": 0, "duplicate": 0, "notfound": 0}
@@ -41,13 +41,12 @@ def import_aliases():
counts["success"] += 1
except db.IntegrityError:
counts["duplicate"] += 1
- pass
except Unit.DoesNotExist:
counts["notfound"] += 1
if counts["success"]:
- print("Imported {} aliases.".format(counts["success"]))
+ print("Imported {} aliases.".format(counts["success"])) # noqa: T201
if counts["notfound"]:
- print("{} units not found.".format(counts["notfound"]))
+ print("{} units not found.".format(counts["notfound"])) # noqa: T201
if counts["duplicate"]:
- print("Skipped {} aliases already in database.".format(counts["duplicate"]))
+ print("Skipped {} aliases already in database.".format(counts["duplicate"])) # noqa: T201
diff --git a/services/management/commands/services_import/entrances.py b/services/management/commands/services_import/entrances.py
index e6602eaeb..92d293c4c 100644
--- a/services/management/commands/services_import/entrances.py
+++ b/services/management/commands/services_import/entrances.py
@@ -91,8 +91,8 @@ def _import_unit_entrance(
obj_changed = True
is_main_entrance = info["is_main_entrance"] == "Y"
- if is_main_entrance != getattr(obj, "is_main_entrance"):
- setattr(obj, "is_main_entrance", is_main_entrance)
+ if is_main_entrance != obj.is_main_entrance:
+ obj.is_main_entrance = is_main_entrance
obj_changed = True
n = float(info.get("latitude", 0))
diff --git a/services/management/commands/services_import/services.py b/services/management/commands/services_import/services.py
index 4503902b7..61a6f18f4 100644
--- a/services/management/commands/services_import/services.py
+++ b/services/management/commands/services_import/services.py
@@ -53,9 +53,14 @@ def import_services(
noop=False,
logger=None,
importer=None,
- ontologytrees=pk_get("ontologytree"),
- ontologywords=pk_get("ontologyword"),
+ ontologytrees=None,
+ ontologywords=None,
):
+ if ontologytrees is None:
+ ontologytrees = pk_get("ontologytree")
+ if ontologywords is None:
+ ontologywords = pk_get("ontologyword")
+
nodesyncher = ModelSyncher(ServiceNode.objects.all(), lambda obj: obj.id)
servicesyncher = ModelSyncher(Service.objects.all(), lambda obj: obj.id)
@@ -168,7 +173,7 @@ def update_service_node(node, units_by_service):
s.update(v)
units[k] = s
node._unit_count = {}
- for k, v in units.items():
+ for k in units.keys():
node._unit_count[k] = len(units[k])
return units
@@ -227,9 +232,9 @@ def update_count_objects(service_node_unit_count_objects, node, node_count_model
elif obj.count != count:
obj.count = count
yield obj
- for node in node.get_children():
+ for child_node in node.get_children():
yield from update_count_objects(
- service_node_unit_count_objects, node, node_count_model
+ service_node_unit_count_objects, child_node, node_count_model
)
@@ -414,7 +419,7 @@ def remove_empty_service_nodes(logger):
@db.transaction.atomic
def update_mobility_service_nodes():
service_node_count = 0
- for root_node_name, root_node_dict in MOBILITY_SERVICE_NODE_MAPPING.items():
+ for root_node_dict in MOBILITY_SERVICE_NODE_MAPPING.values():
service_nodes = root_node_dict.pop("service_nodes")
root_node, __ = MobilityServiceNode.objects.update_or_create(
id=root_node_dict["id"],
diff --git a/services/management/commands/services_import/tests/test_update_service_counts.py b/services/management/commands/services_import/tests/test_update_service_counts.py
index 87cb46d6a..9efa5eebd 100644
--- a/services/management/commands/services_import/tests/test_update_service_counts.py
+++ b/services/management/commands/services_import/tests/test_update_service_counts.py
@@ -43,7 +43,7 @@ def municipalities(municipality_type):
@pytest.fixture
def services():
os = []
- for i in range(0, 5):
+ for i in range(5):
o = Service.objects.create(
name="service{}".format(i), id=i, last_modified_time=now()
)
@@ -64,7 +64,7 @@ def units(services, municipalities):
for service in services:
if index % max_unit_count > 0:
distinct_service_muni_counts.add((service.id, municipality.id))
- for i in range(0, index % max_unit_count):
+ for i in range(index % max_unit_count):
name = "unit_s{}_m{}_{}".format(service.id, municipality.id, i)
unit = Unit.objects.create(
id=unit_id,
@@ -194,7 +194,7 @@ def test_update_service_counts(municipalities, services, units, api_client):
# Step 4: add single unit at a time
service = Service.objects.get(pk=0)
count = 0
- for i in range(0, 10):
+ for i in range(10):
u = Unit.objects.create(
name="test_{}",
id=i + 100000,
diff --git a/services/management/commands/services_import/units.py b/services/management/commands/services_import/units.py
index 4c6b66cd6..80f714a99 100644
--- a/services/management/commands/services_import/units.py
+++ b/services/management/commands/services_import/units.py
@@ -168,7 +168,7 @@ def import_units(
)
syncher = ModelSyncher(queryset, lambda obj: obj.id)
- for idx, info in enumerate(obj_list):
+ for info in obj_list:
uid = info["id"]
info["connections"] = conn_by_unit.get(uid, [])
info["accessibility_properties"] = acc_by_unit.get(uid, [])
@@ -508,8 +508,6 @@ def _import_unit_service_nodes(obj, info, obj_changed, update_fields):
obj_service_node_ids = sorted(obj.service_nodes.values_list("id", flat=True))
if obj_service_node_ids != service_node_ids:
- # if not obj_created and VERBOSITY:
- # LOGGER.warning("%s service set changed: %s -> %s" % (obj, obj_service_node_ids, service_node_ids))
obj.service_nodes.set(service_node_ids)
# Update root service cache
@@ -670,8 +668,8 @@ def _import_unit_connections(obj, info, obj_changed, update_fields):
c.order = i
tags = conn.get("tags", [])
- if tags and getattr(c, "tags") != tags:
- setattr(c, "tags", tags)
+ if tags and c.tags != tags:
+ c.tags = tags
c._changed = True
fields = ["email", "phone", "contact_person"]
@@ -679,9 +677,8 @@ def _import_unit_connections(obj, info, obj_changed, update_fields):
val = conn.get(field, None)
if val and len(val) > UnitConnection._meta.get_field(field).max_length:
LOGGER.info(
- "Ignoring too long value of field {} in unit {} connections".format(
- field, obj.pk
- )
+ "Ignoring too long value of field {field} in unit {obj.pk}"
+ " connections"
)
continue
if getattr(c, field) != val:
diff --git a/services/management/commands/services_import/utils.py b/services/management/commands/services_import/utils.py
index 5175f7731..5866ae48c 100644
--- a/services/management/commands/services_import/utils.py
+++ b/services/management/commands/services_import/utils.py
@@ -54,7 +54,7 @@ def pk_get(resource_name, res_id=None, params=None):
url = "%s%s/" % (url, res_id)
if params:
url += "?" + urlencode(params)
- print("CALLING URL >>> ", url)
+ print("CALLING URL >>> ", url) # noqa: T201
resp = requests.get(url, timeout=300)
assert resp.status_code == 200, "fuu status code {}".format(resp.status_code)
return resp.json()
@@ -88,7 +88,7 @@ def clean_text(text):
return text
text = text.replace("\r\n", "\n")
# remove consecutive whitespaces
- text = re.sub(r"[ \t][ \t]+", " ", text, re.U)
+ text = re.sub(r"[ \t][ \t]+", " ", text)
# remove nil bytes
text = text.replace("\u0000", " ")
text = text.replace("\r", "\n")
@@ -116,9 +116,9 @@ def update_service_names_fields(obj, info, obj_changed, update_fields):
):
return obj_changed, update_fields
- setattr(obj, "service_names_fi", service_names_fi)
- setattr(obj, "service_names_sv", service_names_sv)
- setattr(obj, "service_names_en", service_names_en)
+ obj.service_names_fi = service_names_fi
+ obj.service_names_sv = service_names_sv
+ obj.service_names_en = service_names_en
update_fields.extend(["service_names_fi", "service_names_sv", "service_names_en"])
obj_changed = True
return obj_changed, update_fields
@@ -148,13 +148,13 @@ def update_extra_searchwords(obj, info, obj_changed, update_fields):
return obj_changed, update_fields
if extra_searchwords_fi:
- setattr(obj, "extra_searchwords_fi", extra_searchwords_fi)
+ obj.extra_searchwords_fi = extra_searchwords_fi
update_fields.append("extra_searchwords_fi")
if extra_searchwords_sv:
- setattr(obj, "extra_searchwords_sv", extra_searchwords_sv)
+ obj.extra_searchwords_sv = extra_searchwords_sv
update_fields.append("extra_searchwords_sv")
if extra_searchwords_en:
- setattr(obj, "extra_searchwords_en", extra_searchwords_en)
+ obj.extra_searchwords_en = extra_searchwords_en
update_fields.append("extra_searchwords_en")
obj_changed = True
return obj_changed, update_fields
diff --git a/services/management/commands/services_import_v4.py b/services/management/commands/services_import_v4.py
index 6eeccbfb7..4502540df 100644
--- a/services/management/commands/services_import_v4.py
+++ b/services/management/commands/services_import_v4.py
@@ -77,7 +77,7 @@ def clean_text(self, text):
# text = text.replace('\n', ' ')
# text = text.replace(u'\u00a0', ' ')
# remove consecutive whitespaces
- text = re.sub(r"\s\s+", " ", text, re.U)
+ text = re.sub(r"\s\s+", " ", text)
# remove nil bytes
text = text.replace("\u0000", " ")
text = text.strip()
@@ -120,7 +120,7 @@ def _save_translated_field(
def _set_field(self, obj, field_name, val):
if not hasattr(obj, field_name):
- print(vars(obj))
+ print(vars(obj)) # noqa: T201
obj_val = getattr(obj, field_name)
if obj_val == val:
return
@@ -186,7 +186,7 @@ def handle(self, **options):
continue
method = getattr(self, "import_%s" % imp)
if self.verbosity:
- print("Importing %s..." % imp)
+ print("Importing %s..." % imp) # noqa: T201
if "id" in options and options.get("id"):
method(pk=options["id"])
else:
diff --git a/services/management/commands/update_helsinki_preschool_districts.py b/services/management/commands/update_helsinki_preschool_districts.py
index 3f0d7db89..5e307a1b3 100644
--- a/services/management/commands/update_helsinki_preschool_districts.py
+++ b/services/management/commands/update_helsinki_preschool_districts.py
@@ -1,7 +1,7 @@
from django.core.management.base import BaseCommand
from munigeo.models import AdministrativeDivision
-from services.management.commands.school_district_import.school_district_importer import (
+from services.management.commands.school_district_import.school_district_importer import ( # noqa: E501
SchoolDistrictImporter,
)
@@ -40,7 +40,8 @@ def handle(self, *args, **options):
{data["division_type"] for data in PRESCHOOL_DISTRICT_DATA}
)
- # Remove old divisions before importing new ones to avoid possible duplicates as the source layers may change
+ # Remove old divisions before importing new ones to avoid possible duplicates
+ # as the source layers may change
AdministrativeDivision.objects.filter(
type__type__in=division_types, municipality__id="helsinki"
).delete()
diff --git a/services/management/commands/update_helsinki_school_districts.py b/services/management/commands/update_helsinki_school_districts.py
index 4256b8e57..2aa250820 100644
--- a/services/management/commands/update_helsinki_school_districts.py
+++ b/services/management/commands/update_helsinki_school_districts.py
@@ -1,7 +1,7 @@
from django.core.management.base import BaseCommand
from munigeo.models import AdministrativeDivision
-from services.management.commands.school_district_import.school_district_importer import (
+from services.management.commands.school_district_import.school_district_importer import ( # noqa: E501
SchoolDistrictImporter,
)
@@ -58,7 +58,8 @@ class Command(BaseCommand):
def handle(self, *args, **options):
division_types = list({data["division_type"] for data in SCHOOL_DISTRICT_DATA})
- # Remove old divisions before importing new ones to avoid possible duplicates as the source layers may change
+ # Remove old divisions before importing new ones to avoid possible duplicates
+ # as the source layers may change
AdministrativeDivision.objects.filter(
type__type__in=division_types, municipality__id="helsinki"
).delete()
diff --git a/services/management/commands/update_vantaa_nature_reserves.py b/services/management/commands/update_vantaa_nature_reserves.py
index c4d7cddd9..b2514f58b 100644
--- a/services/management/commands/update_vantaa_nature_reserves.py
+++ b/services/management/commands/update_vantaa_nature_reserves.py
@@ -98,6 +98,6 @@ def update_nature_reserves(self):
)
num_nature_reserves_deleted = deleted_nature_reserves.delete()[0]
logger.info(
- f"Import completed. {num_nature_reserves_updated} nature reserves updated and "
- f"{num_nature_reserves_deleted} deleted."
+ f"Import completed. {num_nature_reserves_updated} nature reserves updated"
+ f" and {num_nature_reserves_deleted} deleted."
)
diff --git a/services/management/commands/update_vantaa_parking_areas.py b/services/management/commands/update_vantaa_parking_areas.py
index 0cffbfcc0..7762049bf 100644
--- a/services/management/commands/update_vantaa_parking_areas.py
+++ b/services/management/commands/update_vantaa_parking_areas.py
@@ -35,7 +35,7 @@
"service_url": "https://matti.vantaa.fi/server2/rest/services/Hosted/Kadunvarsipys%C3%A4k%C3%B6inti/"
"FeatureServer",
"layer_name": "Kadunvarsipysäköinti MUOKATTAVA",
- "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/kadunvarsipysakointi-alue:",
+ "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/kadunvarsipysakointi-alue:", # noqa: E501
},
{
"type": "park_and_ride_area",
@@ -49,21 +49,21 @@
"service_url": "https://matti.vantaa.fi/server2/rest/services/Hosted/Raskaan_liikenteen_"
"pys%C3%A4k%C3%B6intialueet/FeatureServer",
"layer_name": "Raskaan liikenteen pysäköintialueet MUOKATTAVA",
- "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-pysakointipaikka-alue:",
+ "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-pysakointipaikka-alue:", # noqa: E501
},
{
"type": "hgv_street_parking_area",
"service_url": "https://matti.vantaa.fi/server2/rest/services/Hosted/Raskaan_liikenteen_sallitut_kadunvarret/"
"FeatureServer",
"layer_name": "Raskaan liikenteen sallitut kadunvarret MUOKATTAVA",
- "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-sallittu-kadunvarsi-alue:",
+ "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-sallittu-kadunvarsi-alue:", # noqa: E501
},
{
"type": "hgv_no_parking_area",
"service_url": "https://matti.vantaa.fi/server2/rest/services/Hosted/Raskaan_liikenteen_kielletyt_kadunvarret/"
"FeatureServer",
"layer_name": "Raskaan liikenteen kielletyt kadunvarret MUOKATTAVA",
- "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-kielletty-kadunvarsi-alue:",
+ "ocd_id_base": "ocd-division/country:fi/kunta:vantaa/raskaanliikenteen-kielletty-kadunvarsi-alue:", # noqa: E501
},
]
@@ -217,6 +217,7 @@ def update_parking_areas(self):
num_parking_areas_deleted = removed_parking_areas.delete()[0]
logger.info(
- f"Import completed. {num_parking_areas_updated} {readable_name} updated and {num_parking_areas_deleted}"
- f" deleted in {time() - start_time:.0f} seconds."
+ f"Import completed. {num_parking_areas_updated} {readable_name} updated"
+ f" and {num_parking_areas_deleted} deleted in"
+ f" {time() - start_time:.0f} seconds."
)
diff --git a/services/management/commands/update_vantaa_parking_payzones.py b/services/management/commands/update_vantaa_parking_payzones.py
index 643bb48ad..c452057c7 100644
--- a/services/management/commands/update_vantaa_parking_payzones.py
+++ b/services/management/commands/update_vantaa_parking_payzones.py
@@ -113,6 +113,7 @@ def update_parking_payzones(self):
num_parking_payzones_deleted = removed_parking_payzones.delete()[0]
logger.info(
- f"Import completed. {num_parking_payzones_updated} parking payzones updated and "
- f"{num_parking_payzones_deleted} deleted in {time() - start_time:.0f} seconds."
+ f"Import completed. {num_parking_payzones_updated} parking payzones updated"
+ f" and {num_parking_payzones_deleted} deleted in"
+ f" {time() - start_time:.0f} seconds."
)
diff --git a/services/management/commands/verify_school_districts.py b/services/management/commands/verify_school_districts.py
index 6d380c189..a72f9c4f5 100644
--- a/services/management/commands/verify_school_districts.py
+++ b/services/management/commands/verify_school_districts.py
@@ -55,7 +55,7 @@ def verify_school_units_found():
success = True
error_report = []
- for key, val in missing.items():
+ for val in missing.values():
if len(val) > 0:
success = False
error_report.append(pprint.pformat(val, indent=4))
diff --git a/services/migrations/0005_auto_20170403_1131.py b/services/migrations/0005_auto_20170403_1131.py
index 44b56f0d0..e8e35a6f5 100644
--- a/services/migrations/0005_auto_20170403_1131.py
+++ b/services/migrations/0005_auto_20170403_1131.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
-from django.db import migrations, models
+from django.db import migrations
class Migration(migrations.Migration):
diff --git a/services/migrations/0092_trigram_index_service_and_unit_names.py b/services/migrations/0092_trigram_index_service_and_unit_names.py
index 48dec60f8..d1a4d1ab2 100644
--- a/services/migrations/0092_trigram_index_service_and_unit_names.py
+++ b/services/migrations/0092_trigram_index_service_and_unit_names.py
@@ -21,7 +21,7 @@ class Migration(migrations.Migration):
DROP INDEX unit_name_en_trgm_idx;
DROP INDEX service_name_fi_trgm_idx;
DROP INDEX service_name_sv_trgm_idx;
- DROP INDEX service_name_en_trgm_idx;
+ DROP INDEX service_name_en_trgm_idx;
""",
),
]
diff --git a/services/migrations/0094_update_search_view.py b/services/migrations/0094_update_search_view.py
index ef440813d..c1aac55e7 100644
--- a/services/migrations/0094_update_search_view.py
+++ b/services/migrations/0094_update_search_view.py
@@ -14,7 +14,7 @@ class Migration(migrations.Migration):
SELECT concat('service_', id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'Service' AS type_name from services_service
UNION
SELECT concat('servicenode_', id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'ServiceNode' AS type_name from services_servicenode
- UNION
+ UNION
SELECT concat('administrativedivision_', id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'AdministrativeDivision' AS type_name from munigeo_administrativedivision
UNION
SELECT concat('address_', id) AS id, full_name_fi as name_fi, full_name_sv as name_sv, full_name_en as name_en, search_column_fi, search_column_sv, search_column_en, 'Address' AS type_name from munigeo_address;
diff --git a/services/migrations/0095_combine_servicenodes_with_same_name.py b/services/migrations/0095_combine_servicenodes_with_same_name.py
index e951bf525..e98e5cdb2 100644
--- a/services/migrations/0095_combine_servicenodes_with_same_name.py
+++ b/services/migrations/0095_combine_servicenodes_with_same_name.py
@@ -12,8 +12,8 @@ class Migration(migrations.Migration):
SELECT concat('unit_', services_unit.id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'Unit' AS type_name from services_unit
UNION
SELECT concat('service_', id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'Service' AS type_name from services_service
- UNION
- SELECT concat('servicenode_', string_agg(id::text, '_')) AS ids, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'ServiceNode' AS type_name from services_servicenode group by 2,3,4,5,6,7,8
+ UNION
+ SELECT concat('servicenode_', string_agg(id::text, '_')) AS ids, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'ServiceNode' AS type_name from services_servicenode group by 2,3,4,5,6,7,8
UNION
SELECT concat('administrativedivision_', id) AS id, name_fi, name_sv, name_en, search_column_fi, search_column_sv, search_column_en, 'AdministrativeDivision' AS type_name from munigeo_administrativedivision
UNION
diff --git a/services/migrations/0116_alter_unit_address_postal_full_and_more.py b/services/migrations/0116_alter_unit_address_postal_full_and_more.py
index 98bfbf385..3adbaa854 100644
--- a/services/migrations/0116_alter_unit_address_postal_full_and_more.py
+++ b/services/migrations/0116_alter_unit_address_postal_full_and_more.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("services", "0115_unit_geometry_3d"),
]
diff --git a/services/migrations/0117_exclusionword.py b/services/migrations/0117_exclusionword.py
index 4fae5f0ad..260b4c3dd 100644
--- a/services/migrations/0117_exclusionword.py
+++ b/services/migrations/0117_exclusionword.py
@@ -4,7 +4,6 @@
class Migration(migrations.Migration):
-
dependencies = [
("services", "0116_alter_unit_address_postal_full_and_more"),
]
diff --git a/services/models/__init__.py b/services/models/__init__.py
index 2b74047de..8c1d64eb0 100644
--- a/services/models/__init__.py
+++ b/services/models/__init__.py
@@ -22,3 +22,31 @@
)
from .unit_entrance import UnitEntrance
from .unit_identifier import UnitIdentifier
+
+__all__ = [
+ "AccessibilityVariable",
+ "Department",
+ "FeedbackMapping",
+ "Keyword",
+ "MobilityServiceNode",
+ "Announcement",
+ "ErrorMessage",
+ "ExclusionRule",
+ "ExclusionWord",
+ "Service",
+ "UnitServiceDetails",
+ "ServiceMapping",
+ "ServiceNode",
+ "RequestStatistic",
+ "Unit",
+ "UnitAccessibilityProperty",
+ "UnitAccessibilityShortcomings",
+ "UnitAlias",
+ "UnitConnection",
+ "MobilityServiceNodeUnitCount",
+ "OrganizationServiceUnitCount",
+ "ServiceNodeUnitCount",
+ "ServiceUnitCount",
+ "UnitEntrance",
+ "UnitIdentifier",
+]
diff --git a/services/models/unit.py b/services/models/unit.py
index 74044672c..5c77dc2b5 100644
--- a/services/models/unit.py
+++ b/services/models/unit.py
@@ -7,7 +7,7 @@
from django.contrib.postgres.search import SearchVectorField
from django.db.models import JSONField, Manager
from django.utils import timezone
-from django.utils.translation import gettext as _
+from django.utils.translation import gettext_lazy as _
from munigeo.models import Municipality
from munigeo.utils import get_default_srid
diff --git a/services/open_api_parameters.py b/services/open_api_parameters.py
index e19f4f18e..46ccb19d5 100644
--- a/services/open_api_parameters.py
+++ b/services/open_api_parameters.py
@@ -19,7 +19,8 @@
BBOX_PARAMETER = OpenApiParameter(
name="bbox",
location=OpenApiParameter.QUERY,
- description="Bounding box in the format 'left,bottom,right,top'. Values must be floating points or integers.",
+ description="Bounding box in the format 'left,bottom,right,top'. Values must be"
+ " floating points or integers.",
required=False,
type=str,
)
@@ -44,7 +45,8 @@
OpenApiParameter(
name="date",
location=OpenApiParameter.QUERY,
- description="Filter divisions based on their validity date. Format: YYYY-MM-DD.",
+ description="Filter divisions based on their validity date. Format:"
+ " YYYY-MM-DD.",
required=False,
type=str,
),
@@ -53,12 +55,14 @@
DISTANCE_PARAMETER = OpenApiParameter(
name="distance",
location=OpenApiParameter.QUERY,
- description="The maximum distance from the provided location, defined by the lat and lon parameters. If this"
- " parameter is given also the 'lat' and 'lon' parameters are required.",
+ description="The maximum distance from the provided location, defined by the lat"
+ " and lon parameters. If this parameter is given also the 'lat' and 'lon'"
+ " parameters are required.",
required=False,
type=float,
)
+
DIVISION_TYPE_PARAMETER = OpenApiParameter(
name="type",
location=OpenApiParameter.QUERY,
@@ -94,8 +98,8 @@
LATITUDE_PARAMETER = OpenApiParameter(
name="lat",
location=OpenApiParameter.QUERY,
- description="Filter by location. Give latitude in WGS84 system. If this parameter is given also the 'lon' "
- "parameter is required.",
+ description="Filter by location. Give latitude in WGS84 system. If this parameter"
+ " is given also the 'lon' parameter is required.",
required=False,
type=float,
)
@@ -119,8 +123,8 @@
LONGITUDE_PARAMETER = OpenApiParameter(
name="lon",
location=OpenApiParameter.QUERY,
- description="Filter by location. Give longitude in WGS84 system. If this parameter is given also the 'lat' "
- "parameter is required.",
+ description="Filter by location. Give longitude in WGS84 system. If this parameter"
+ " is given also the 'lat' parameter is required.",
required=False,
type=float,
)
diff --git a/services/scripts/accessibility_rules.py b/services/scripts/accessibility_rules.py
index 18b420306..3175cff0d 100644
--- a/services/scripts/accessibility_rules.py
+++ b/services/scripts/accessibility_rules.py
@@ -2,7 +2,7 @@
import pprint
import re
import sys
-from collections import OrderedDict as odict
+from collections import OrderedDict
from sys import argv
"""
@@ -102,10 +102,8 @@ def set_operator(self, operator, row):
msg = """
Error, trying to change operator of a compound expression at {}.
Probable cause: missing closing parenthesis right before said line.
- """.format(
- row[-1]
- )
- print(msg)
+ """.format(row[-1])
+ print(msg) # noqa: T201
def set_mode(self, mode):
self.mode = mode
@@ -203,14 +201,14 @@ def next_line(reader):
def exit_on_error(message, expression=None, lineno=None):
- print("Error: " + message)
+ print("Error: " + message) # noqa: T201
if expression:
- print(
+ print( # noqa: T201
" beginning at line %s, expression %s"
% (expression.first_line, str(expression))
)
if lineno:
- print(" beginning at line %s" % lineno)
+ print(" beginning at line %s" % lineno) # noqa: T201
sys.exit(2)
@@ -271,7 +269,7 @@ def update_flags(row, expression):
string_parts = raw_string.split(":")
human_keys = {"Q": "include", "R": "reports", "S": "detailed_choice"}
bits = []
- for i, part in enumerate(string_parts):
+ for part in string_parts:
vals = set()
for char in part:
if char not in human_keys.keys():
@@ -291,13 +289,13 @@ def build_comparison(iterator, row, depth=0, requirement_id=None):
try:
variable, operator, value = int(row[VARIABLE]), row[OPERATOR], row[VALUE]
except ValueError:
- exit_on_error("Value error %s." % row)
+ return exit_on_error("Value error %s." % row)
if operator == "I":
operator = "NEQ"
elif operator == "E":
operator = "EQ"
else:
- exit_on_error("Unknown comparison operator %s." % operator)
+ return exit_on_error("Unknown comparison operator %s." % operator)
expression = Comparison(depth, variable, operator, value)
match = VARIABLE_NAME.match(row[EXPRESSION])
@@ -306,7 +304,7 @@ def build_comparison(iterator, row, depth=0, requirement_id=None):
path[0] = path[0].lower()
expression.variable_path = path
else:
- print("nomatch")
+ print("nomatch") # noqa: T201
update_messages(row, expression)
update_flags(row, expression)
return expression
@@ -361,7 +359,7 @@ def build_expression(iterator, row, depth=0, requirement_id=None):
iterator, depth=depth, requirement_id=requirement_id
)
except ParseError as e:
- exit_on_error(str(e), lineno=first_line)
+ return exit_on_error(str(e), lineno=first_line)
expression.first_line = row[-1]
expression.requirement_id = requirement_id
return expression
@@ -428,7 +426,7 @@ def save_message(multilingual_message):
message_id_incr += 1
message_ids[msg_key] = msg_id
msg_id = message_ids[msg_key]
- for lang, message in multilingual_message.items():
+ for lang in multilingual_message.keys():
try:
current_message = messages[msg_id]
except IndexError:
@@ -458,8 +456,8 @@ def gather_messages(expression):
def build_tree(reader):
global messages
- tree = odict()
- row_groups = odict()
+ tree = OrderedDict()
+ row_groups = OrderedDict()
_, row = next_line(reader)
accessibility_case_id = None
while True:
@@ -478,10 +476,10 @@ def build_tree(reader):
it = iter(rows)
row = next(it)
tree[acid] = build_expression(it, row, depth=0)
- for acid, expression in tree.items():
+ for expression in tree.values():
rescope(expression, "messages")
rescope(expression, "flags")
- for acid, expression in tree.items():
+ for expression in tree.values():
gather_messages(expression)
return tree, messages
@@ -496,7 +494,7 @@ def parse_accessibility_rules(filename):
WIDTH = 140
if __name__ == "__main__":
if len(argv) != 3:
- print(
+ print( # noqa: T201
"Please provide the desired operation and the input csv filename "
"as the first and second parameters.\n\nOperation is one of\n"
" values, messages or debug."
@@ -506,13 +504,13 @@ def parse_accessibility_rules(filename):
tree, messages = parse_accessibility_rules(filename)
if op == "debug":
for i, v in tree.items():
- print("Case " + i)
- print(str(v))
+ print("Case " + i) # noqa: T201
+ print(str(v)) # noqa: T201
elif op == "values":
key_qualifiers = "ABC"
- for i, v in tree.items():
- for mode in range(0, len(v.messages["case_names"])):
+ for v in tree.values():
+ for mode in range(len(v.messages["case_names"])):
v.set_mode(mode)
- pprint.pprint(v.val(), width=WIDTH)
+ pprint.pprint(v.val(), width=WIDTH) # noqa: T203
elif op == "messages":
- pprint.pprint(messages, width=WIDTH)
+ pprint.pprint(messages, width=WIDTH) # noqa: T203
diff --git a/services/search/api.py b/services/search/api.py
index 2d33fef7a..6997d3ce4 100644
--- a/services/search/api.py
+++ b/services/search/api.py
@@ -4,11 +4,11 @@
munigeo_Address, munigeo_Administrative_division.
- For every model that is included in the search a search column is added
for every language of type SearchVector. These are also defined as a Gindex.
- The models that are searched also implements a function called get_search_column_indexing
- where the name, configuration(language) and weight of the columns that will be indexed
- are defined. This function is used by the indexing script and signals when
- the search_column is populated.
-- A view called search_view is created and it contains the search_columns of the models
+ The models that are searched also implements a function called
+ get_search_column_indexing where the name, configuration(language) and weight of the
+ columns that will be indexed are defined. This function is used by the indexing script
+ and signals when the search_column is populated.
+- A view called search_view is created. It contains the search_columns of the models
and a couple auxiliary columns: id. type_name and name. This view is created by a
raw SQL migration 008X_create_search_view.py.
- The search if performed by querying the views search_columns.
@@ -25,7 +25,7 @@
from django.contrib.gis.gdal import SpatialReference
from django.db import connection, reset_queries
from django.db.models import Count
-from drf_spectacular.utils import extend_schema, OpenApiParameter
+from drf_spectacular.utils import OpenApiParameter, extend_schema
from munigeo import api as munigeo_api
from munigeo.models import Address, AdministrativeDivision
from munigeo.utils import get_default_srid
@@ -122,28 +122,28 @@ def to_representation(self, obj):
# Address IDs are not serialized thus they changes after every import.
if object_type not in ["address", "servicenode"]:
- representation["id"] = getattr(obj, "id")
+ representation["id"] = obj.id
representation["object_type"] = object_type
names = {}
if object_type == "address":
- names["fi"] = getattr(obj, "full_name_fi")
- names["sv"] = getattr(obj, "full_name_sv")
- names["en"] = getattr(obj, "full_name_en")
+ names["fi"] = obj.full_name_fi
+ names["sv"] = obj.full_name_sv
+ names["en"] = obj.full_name_en
representation["name"] = names
else:
- names["fi"] = getattr(obj, "name_fi")
- names["sv"] = getattr(obj, "name_sv")
- names["en"] = getattr(obj, "name_en")
+ names["fi"] = obj.name_fi
+ names["sv"] = obj.name_sv
+ names["en"] = obj.name_en
representation["name"] = names
if object_type == "unit":
representation["street_address"] = {
- "fi": getattr(obj, "street_address_fi"),
- "sv": getattr(obj, "street_address_sv"),
- "en": getattr(obj, "street_address_en"),
+ "fi": obj.street_address_fi,
+ "sv": obj.street_address_sv,
+ "en": obj.street_address_en,
}
if hasattr(obj.municipality, "id"):
- representation["municipality"] = getattr(obj.municipality, "id")
+ representation["municipality"] = obj.municipality.id
try:
shortcomings = obj.accessibility_shortcomings
except UnitAccessibilityShortcomings.DoesNotExist:
@@ -191,7 +191,8 @@ def to_representation(self, obj):
include_object_type, include_field = include.split(".")
except ValueError:
raise ParseError(
- "'include' list elements must be in format: entity.field, e.g., unit.connections."
+ "'include' list elements must be in format: entity.field, e.g.,"
+ " unit.connections."
)
if object_type == "unit" and include_object_type == "unit":
@@ -222,7 +223,8 @@ def to_representation(self, obj):
)
else:
raise ParseError(
- f"Entity {object_type} does not contain a {include_field} field."
+ f"Entity {object_type} does not contain a {include_field}"
+ " field."
)
return representation
@@ -262,8 +264,10 @@ def build_search_query(query: str):
OpenApiParameter(
name="q",
location=OpenApiParameter.QUERY,
- description="The query string used for searching. Searches the search_columns for the given models. Commas "
- "between words are interpreted as 'and' operator. Words ending with the '|' sign are interpreted as 'or' "
+ description="The query string used for searching. Searches the"
+ " search_columns for the given models. Commas "
+ "between words are interpreted as 'and' operator. Words ending with the '|'"
+ " sign are interpreted as 'or' "
"operator.",
required=False,
type=str,
@@ -271,7 +275,8 @@ def build_search_query(query: str):
OpenApiParameter(
name="type",
location=OpenApiParameter.QUERY,
- description="Comma separated list of types to search for. Valid values are: unit, service, servicenode, "
+ description="Comma separated list of types to search for. Valid values are:"
+ " unit, service, servicenode, "
"address, administrativedivision. If not given defaults to all.",
required=False,
type=str,
@@ -279,8 +284,10 @@ def build_search_query(query: str):
OpenApiParameter(
name="use_trigram",
location=OpenApiParameter.QUERY,
- description="Comma separated list of types that will include trigram results in search if no results are "
- "found. Valid values are: unit, service, servicenode, address, administrativedivision. If not given "
+ description="Comma separated list of types that will include trigram"
+ " results in search if no results are "
+ "found. Valid values are: unit, service, servicenode, address,"
+ " administrativedivision. If not given "
"trigram will not be used.",
required=False,
type=str,
@@ -288,14 +295,16 @@ def build_search_query(query: str):
OpenApiParameter(
name="trigram_threshold",
location=OpenApiParameter.QUERY,
- description="Threshold value for trigram search. If not given defaults to 0.15.",
+ description="Threshold value for trigram search. If not given defaults to"
+ " 0.15.",
required=False,
type=float,
),
OpenApiParameter(
name="rank_threshold",
location=OpenApiParameter.QUERY,
- description="Include results with search rank greater than or equal to the value. If not given defaults to "
+ description="Include results with search rank greater than or equal to the"
+ " value. If not given defaults to "
"0.",
required=False,
type=float,
@@ -303,21 +312,24 @@ def build_search_query(query: str):
OpenApiParameter(
name="use_websearch",
location=OpenApiParameter.QUERY,
- description="Use websearch_to_tsquery instead of to_tsquery if exclusion rules are defined for the search.",
+ description="Use websearch_to_tsquery instead of to_tsquery if exclusion"
+ " rules are defined for the search.",
required=False,
type=bool,
),
OpenApiParameter(
name="geometry",
location=OpenApiParameter.QUERY,
- description="Display geometry of the search result. If not given defaults to false.",
+ description="Display geometry of the search result. If not given defaults"
+ " to false.",
required=False,
type=bool,
),
OpenApiParameter(
name="order_units_by_num_services",
location=OpenApiParameter.QUERY,
- description="Order units by number of services. If not given defaults to true.",
+ description="Order units by number of services. If not given defaults to"
+ " true.",
required=False,
type=bool,
),
@@ -331,7 +343,8 @@ def build_search_query(query: str):
OpenApiParameter(
name="include",
location=OpenApiParameter.QUERY,
- description="Comma separated list of fields to include in the response. Format: entity.field, e.g., "
+ description="Comma separated list of fields to include in the response."
+ " Format: entity.field, e.g., "
"unit.connections.",
required=False,
type=str,
@@ -367,7 +380,8 @@ def build_search_query(query: str):
OpenApiParameter(
name="administrativedivision_limit",
location=OpenApiParameter.QUERY,
- description="Limit the number of administrative divisions in the search results.",
+ description="Limit the number of administrative divisions in the search"
+ " results.",
required=False,
type=int,
),
@@ -381,7 +395,8 @@ def build_search_query(query: str):
OpenApiParameter(
name="language",
location=OpenApiParameter.QUERY,
- description="The language to be used in the search. If not given defaults to Finnish. Format: fi, sv, en.",
+ description="The language to be used in the search. If not given defaults"
+ " to Finnish. Format: fi, sv, en.",
required=False,
type=str,
),
@@ -393,7 +408,8 @@ def build_search_query(query: str):
type=str,
),
],
- description="Search for units, services, service nodes, addresses and administrative divisions.",
+ description="Search for units, services, service nodes, addresses and"
+ " administrative divisions.",
)
class SearchViewSet(GenericAPIView):
queryset = Unit.objects.all()
@@ -412,7 +428,8 @@ def get(self, request):
if not re.match(r"^[\w\såäö.,'+&|-]+$", q_val):
raise ParseError(
- "Invalid search terms, only letters, numbers, spaces and .,'+-&| allowed."
+ "Invalid search terms, only letters, numbers, spaces and .,'+-&|"
+ " allowed."
)
types_str = ",".join([elem for elem in QUERY_PARAM_TYPE_NAMES])
@@ -531,8 +548,9 @@ def get(self, request):
search_fn = "websearch_to_tsquery"
search_query_str += f" {exclusions}"
- # This is ~100 times faster than using Djangos SearchRank and allows searching using wildcard "|*"
- # and by ranking gives better results, e.g. extra fields weight is counted.
+ # This is ~100 times faster than using Django's SearchRank and allows searching
+ # using wildcard "|*" and by ranking gives better results, e.g. extra fields
+ # weight is counted.
sql = f"""
SELECT * from (
SELECT id, type_name, name_{language_short}, ts_rank_cd(search_column_{language_short}, search_query)
@@ -540,7 +558,7 @@ def get(self, request):
WHERE search_query @@ search_column_{language_short}
ORDER BY rank DESC LIMIT {sql_query_limit}
) AS sub_query where sub_query.rank >= {rank_threshold};
- """
+ """ # noqa: E501
cursor = connection.cursor()
try:
@@ -708,11 +726,13 @@ def get(self, request):
cursor = connection.cursor()
cursor.execute(sql)
addresses = cursor.fetchall()
- # addresses are in format e.g. [(12755,), (4067,)], remove comma and parenthesis
+ # addresses are in format e.g. [(12755,), (4067,)], remove comma and
+ # parenthesis
ids = [re.sub(r"[(,)]", "", str(a)) for a in addresses]
preserved = get_preserved_order(ids)
addresses_qs = Address.objects.filter(id__in=ids).order_by(preserved)
- # if no units has been found without trigram search and addresses are found,
+ # if no units has been found without trigram search and addresses are
+ # found,
# do not return any units, thus they might confuse in the results.
if addresses_qs.exists() and show_only_address:
units_qs = Unit.objects.none()
@@ -723,7 +743,8 @@ def get(self, request):
logger.debug(connection.queries)
queries_time = sum([float(s["time"]) for s in connection.queries])
logger.debug(
- f"Search queries total execution time: {queries_time} Num queries: {len(connection.queries)}"
+ f"Search queries total execution time: {queries_time}"
+ f" Num queries: {len(connection.queries)}"
)
reset_queries()
diff --git a/services/search/specification.swagger.yaml b/services/search/specification.swagger.yaml
index 94997e20d..80d1a6275 100644
--- a/services/search/specification.swagger.yaml
+++ b/services/search/specification.swagger.yaml
@@ -307,4 +307,3 @@ definitions:
type: string
name_en:
type: string
-
diff --git a/services/templates/search/indexes/services/service_text.txt b/services/templates/search/indexes/services/service_text.txt
index 8b1378917..e69de29bb 100644
--- a/services/templates/search/indexes/services/service_text.txt
+++ b/services/templates/search/indexes/services/service_text.txt
@@ -1 +0,0 @@
-
diff --git a/services/templates/search/indexes/services/servicenode_text.txt b/services/templates/search/indexes/services/servicenode_text.txt
index 8b1378917..e69de29bb 100644
--- a/services/templates/search/indexes/services/servicenode_text.txt
+++ b/services/templates/search/indexes/services/servicenode_text.txt
@@ -1 +0,0 @@
-
diff --git a/services/tests/data/Esiopetusalue_suomi.gml b/services/tests/data/Esiopetusalue_suomi.gml
index 9960eeda5..6bec91f1b 100644
--- a/services/tests/data/Esiopetusalue_suomi.gml
+++ b/services/tests/data/Esiopetusalue_suomi.gml
@@ -66,4 +66,4 @@
2023-2024
-
\ No newline at end of file
+
diff --git a/services/tests/data/Esiopetusalue_suomi_tuleva.gml b/services/tests/data/Esiopetusalue_suomi_tuleva.gml
index 339df53bb..96785f145 100644
--- a/services/tests/data/Esiopetusalue_suomi_tuleva.gml
+++ b/services/tests/data/Esiopetusalue_suomi_tuleva.gml
@@ -66,4 +66,4 @@
2024-2025
-
\ No newline at end of file
+
diff --git a/services/tests/data/Opev_ooa_alaaste_suomi.gml b/services/tests/data/Opev_ooa_alaaste_suomi.gml
index d58f71da0..a77e4db51 100644
--- a/services/tests/data/Opev_ooa_alaaste_suomi.gml
+++ b/services/tests/data/Opev_ooa_alaaste_suomi.gml
@@ -114,4 +114,4 @@
-
\ No newline at end of file
+
diff --git a/services/tests/data/Opev_ooa_alaaste_suomi_tuleva.gml b/services/tests/data/Opev_ooa_alaaste_suomi_tuleva.gml
index ea1291709..a5dc809f4 100644
--- a/services/tests/data/Opev_ooa_alaaste_suomi_tuleva.gml
+++ b/services/tests/data/Opev_ooa_alaaste_suomi_tuleva.gml
@@ -114,4 +114,4 @@
-
\ No newline at end of file
+
diff --git a/services/tests/data/melontareitti_3d.gml b/services/tests/data/melontareitti_3d.gml
index 290117d65..92e91586b 100644
--- a/services/tests/data/melontareitti_3d.gml
+++ b/services/tests/data/melontareitti_3d.gml
@@ -75,4 +75,4 @@
4.20000000000000018
-
\ No newline at end of file
+
diff --git a/services/utils/__init__.py b/services/utils/__init__.py
index edd6e8b68..a85dbb52c 100644
--- a/services/utils/__init__.py
+++ b/services/utils/__init__.py
@@ -2,3 +2,10 @@
from .models import check_valid_concrete_field
from .translator import get_translated
from .types import strtobool
+
+__all__ = [
+ "AccessibilityShortcomingCalculator",
+ "check_valid_concrete_field",
+ "get_translated",
+ "strtobool",
+]
diff --git a/services/utils/accessibility_shortcoming_calculator.py b/services/utils/accessibility_shortcoming_calculator.py
index 8e2744aa8..8861585cf 100644
--- a/services/utils/accessibility_shortcoming_calculator.py
+++ b/services/utils/accessibility_shortcoming_calculator.py
@@ -153,7 +153,8 @@ def _calculate_shortcomings(self, rule, properties, messages, profile_id):
op, properties, messages, profile_id
)
if rule["operator"] == "AND" and not is_ok and not message_recorded:
- # Short circuit AND evaluation when no message was emitted. This edge case is required!
+ # Short circuit AND evaluation when no message was emitted.
+ # This edge case is required!
# NOTE: No messages are emitted from the AND clause itself.
logger.debug("{}: AND short circuited".format(rule["id"]))
return False, False
@@ -200,7 +201,8 @@ def record(segment, message):
messages[segment] = messages.get(segment, {})
messages[segment][requirement_id] = messages[segment].get(requirement_id, [])
if rule["id"] == requirement_id:
- # This is a top level requirement - only add top level message if there are no specific messages.
+ # This is a top level requirement - only add top level message if there are
+ # no specific messages.
if not messages[segment][requirement_id]:
messages[segment][requirement_id].append(rule["msg"])
record(segment, rule["msg"])
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 1b5f465f3..000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,38 +0,0 @@
-[pep8]
-max-line-length = 125
-exclude = *migrations*
-
-[flake8]
-exclude = .git,
- *migrations*,
- venv,
- services/models/__init__.py,
- services/utils/__init__.py,
- local_settings.py,
-max-line-length = 125
-ignore = E203,W503,N813
-
-[tool:pytest]
-DJANGO_SETTINGS_MODULE=smbackend.settings
-python_files = tests.py test_*.py *_tests.py
-filterwarnings = ignore:No directory at:UserWarning
-
-[coverage:run]
-branch = True
-omit = *migrations*,*site-packages*,*venv*
-
-[isort]
-atomic = true
-combine_as_imports = true
-indent = 4
-length_sort = false
-multi_line_output = 3
-order_by_type = false
-skip = venv
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = True
-line_length = 88
-
-[pydocstyle]
-ignore = D100,D104,D105,D200,D203,D400
diff --git a/setup.py b/setup.py
deleted file mode 100644
index ab678c290..000000000
--- a/setup.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from setuptools import find_packages, setup
-
-setup(
- name="smbackend",
- version="241126",
- license="AGPLv3",
- packages=find_packages(),
- include_package_data=True,
- install_requires=[
- p
- for p in open("requirements.txt", "rt").readlines()
- if p and not p.startswith("#")
- ],
- zip_safe=False,
-)
diff --git a/sonar-project.properties b/sonar-project.properties
index 787569695..23bceecbc 100644
--- a/sonar-project.properties
+++ b/sonar-project.properties
@@ -1,9 +1,7 @@
sonar.projectKey=City-of-Helsinki_smbackend
sonar.organization=city-of-helsinki
sonar.projectName=smbackend
-sonar.projectVersion=1.0
-sonar.sourceEncoding=UTF-8
-sonar.python.version=3.8
+sonar.python.version=3.10
sonar.python.coverage.reportPaths=coverage.xml
sonar.test.inclusions=**/tests/**/*
sonar.exclusions=**/tests/**/*